0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 package test.org.apache.spark;
0019
0020 import java.io.*;
0021
0022 import scala.collection.immutable.List;
0023 import scala.collection.immutable.List$;
0024 import scala.collection.immutable.Map;
0025 import scala.collection.immutable.Map$;
0026
0027 import org.junit.Test;
0028
0029 import org.apache.spark.api.java.*;
0030 import org.apache.spark.*;
0031
0032
0033
0034
0035 public class JavaSparkContextSuite implements Serializable {
0036
0037 @Test
0038 public void javaSparkContext() {
0039 String[] jars = new String[] {};
0040 java.util.Map<String, String> environment = new java.util.HashMap<>();
0041
0042 new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
0043 new JavaSparkContext("local", "name", new SparkConf()).stop();
0044 new JavaSparkContext("local", "name").stop();
0045 new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop();
0046 new JavaSparkContext("local", "name", "sparkHome", jars).stop();
0047 new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop();
0048 }
0049
0050 @Test
0051 public void scalaSparkContext() {
0052 List<String> jars = List$.MODULE$.empty();
0053 Map<String, String> environment = Map$.MODULE$.empty();
0054
0055 new SparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
0056 new SparkContext("local", "name", new SparkConf()).stop();
0057 new SparkContext("local", "name").stop();
0058 new SparkContext("local", "name", "sparkHome").stop();
0059 new SparkContext("local", "name", "sparkHome", jars).stop();
0060 new SparkContext("local", "name", "sparkHome", jars, environment).stop();
0061 }
0062 }