Back to home page

OSCL-LXR

 
 

    


0001 /*
0002  * Licensed to the Apache Software Foundation (ASF) under one or more
0003  * contributor license agreements.  See the NOTICE file distributed with
0004  * this work for additional information regarding copyright ownership.
0005  * The ASF licenses this file to You under the Apache License, Version 2.0
0006  * (the "License"); you may not use this file except in compliance with
0007  * the License.  You may obtain a copy of the License at
0008  *
0009  *    http://www.apache.org/licenses/LICENSE-2.0
0010  *
0011  * Unless required by applicable law or agreed to in writing, software
0012  * distributed under the License is distributed on an "AS IS" BASIS,
0013  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014  * See the License for the specific language governing permissions and
0015  * limitations under the License.
0016  */
0017 
0018 package test.org.apache.spark;
0019 
0020 import java.io.*;
0021 
0022 import scala.collection.immutable.List;
0023 import scala.collection.immutable.List$;
0024 import scala.collection.immutable.Map;
0025 import scala.collection.immutable.Map$;
0026 
0027 import org.junit.Test;
0028 
0029 import org.apache.spark.api.java.*;
0030 import org.apache.spark.*;
0031 
0032 /**
0033  * Java apps can use both Java-friendly JavaSparkContext and Scala SparkContext.
0034  */
0035 public class JavaSparkContextSuite implements Serializable {
0036 
0037   @Test
0038   public void javaSparkContext() {
0039     String[] jars = new String[] {};
0040     java.util.Map<String, String> environment = new java.util.HashMap<>();
0041 
0042     new JavaSparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
0043     new JavaSparkContext("local", "name", new SparkConf()).stop();
0044     new JavaSparkContext("local", "name").stop();
0045     new JavaSparkContext("local", "name", "sparkHome", "jarFile").stop();
0046     new JavaSparkContext("local", "name", "sparkHome", jars).stop();
0047     new JavaSparkContext("local", "name", "sparkHome", jars, environment).stop();
0048   }
0049 
0050   @Test
0051   public void scalaSparkContext() {
0052     List<String> jars = List$.MODULE$.empty();
0053     Map<String, String> environment = Map$.MODULE$.empty();
0054 
0055     new SparkContext(new SparkConf().setMaster("local").setAppName("name")).stop();
0056     new SparkContext("local", "name", new SparkConf()).stop();
0057     new SparkContext("local", "name").stop();
0058     new SparkContext("local", "name", "sparkHome").stop();
0059     new SparkContext("local", "name", "sparkHome", jars).stop();
0060     new SparkContext("local", "name", "sparkHome", jars, environment).stop();
0061   }
0062 }