Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * Licensed to the Apache Software Foundation (ASF) under one or more
0003  * contributor license agreements.  See the NOTICE file distributed with
0004  * this work for additional information regarding copyright ownership.
0005  * The ASF licenses this file to You under the Apache License, Version 2.0
0006  * (the "License"); you may not use this file except in compliance with
0007  * the License.  You may obtain a copy of the License at
0008  *
0009  *    http://www.apache.org/licenses/LICENSE-2.0
0010  *
0011  * Unless required by applicable law or agreed to in writing, software
0012  * distributed under the License is distributed on an "AS IS" BASIS,
0013  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014  * See the License for the specific language governing permissions and
0015  * limitations under the License.
0016  */
0017 
0018 /**
0019  * Library for launching Spark applications programmatically.
0020  *
0021  * <p>
0022  * There are two ways to start applications with this library: as a child process, using
0023  * {@link org.apache.spark.launcher.SparkLauncher}, or in-process, using
0024  * {@link org.apache.spark.launcher.InProcessLauncher}.
0025  * </p>
0026  *
0027  * <p>
0028  * The {@link org.apache.spark.launcher.AbstractLauncher#startApplication(
0029  * org.apache.spark.launcher.SparkAppHandle.Listener...)}  method can be used to start Spark and
0030  * provide a handle to monitor and control the running application:
0031  * </p>
0032  *
0033  * <pre>
0034  * {@code
0035  *   import org.apache.spark.launcher.SparkAppHandle;
0036  *   import org.apache.spark.launcher.SparkLauncher;
0037  *
0038  *   public class MyLauncher {
0039  *     public static void main(String[] args) throws Exception {
0040  *       SparkAppHandle handle = new SparkLauncher()
0041  *         .setAppResource("/my/app.jar")
0042  *         .setMainClass("my.spark.app.Main")
0043  *         .setMaster("local")
0044  *         .setConf(SparkLauncher.DRIVER_MEMORY, "2g")
0045  *         .startApplication();
0046  *       // Use handle API to monitor / control application.
0047  *     }
0048  *   }
0049  * }
0050  * </pre>
0051  *
0052  * <p>
0053  * Launching applications as a child process requires a full Spark installation. The installation
0054  * directory can be provided to the launcher explicitly in the launcher's configuration, or by
0055  * setting the <i>SPARK_HOME</i> environment variable.
0056  * </p>
0057  *
0058  * <p>
0059  * Launching applications in-process is only recommended in cluster mode, since Spark cannot run
0060  * multiple client-mode applications concurrently in the same process. The in-process launcher
0061  * requires the necessary Spark dependencies (such as spark-core and cluster manager-specific
0062  * modules) to be present in the caller thread's class loader.
0063  * </p>
0064  *
0065  * <p>
0066  * It's also possible to launch a raw child process, without the extra monitoring, using the
0067  * {@link org.apache.spark.launcher.SparkLauncher#launch()} method:
0068  * </p>
0069  *
0070  * <pre>
0071  * {@code
0072  *   import org.apache.spark.launcher.SparkLauncher;
0073  *
0074  *   public class MyLauncher {
0075  *     public static void main(String[] args) throws Exception {
0076  *       Process spark = new SparkLauncher()
0077  *         .setAppResource("/my/app.jar")
0078  *         .setMainClass("my.spark.app.Main")
0079  *         .setMaster("local")
0080  *         .setConf(SparkLauncher.DRIVER_MEMORY, "2g")
0081  *         .launch();
0082  *       spark.waitFor();
0083  *     }
0084  *   }
0085  * }
0086  * </pre>
0087  *
0088  * <p>This method requires the calling code to manually manage the child process, including its
0089  * output streams (to avoid possible deadlocks). It's recommended that
0090  * {@link org.apache.spark.launcher.SparkLauncher#startApplication(
0091  *   org.apache.spark.launcher.SparkAppHandle.Listener...)} be used instead.</p>
0092  */
0093 package org.apache.spark.launcher;