0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 package org.apache.spark.examples;
0019
0020 import org.apache.spark.api.java.JavaRDD;
0021 import org.apache.spark.api.java.JavaSparkContext;
0022 import org.apache.spark.sql.SparkSession;
0023
0024 import java.util.ArrayList;
0025 import java.util.List;
0026
0027
0028
0029
0030
0031 public final class JavaSparkPi {
0032
0033 public static void main(String[] args) throws Exception {
0034 SparkSession spark = SparkSession
0035 .builder()
0036 .appName("JavaSparkPi")
0037 .getOrCreate();
0038
0039 JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
0040
0041 int slices = (args.length == 1) ? Integer.parseInt(args[0]) : 2;
0042 int n = 100000 * slices;
0043 List<Integer> l = new ArrayList<>(n);
0044 for (int i = 0; i < n; i++) {
0045 l.add(i);
0046 }
0047
0048 JavaRDD<Integer> dataSet = jsc.parallelize(l, slices);
0049
0050 int count = dataSet.map(integer -> {
0051 double x = Math.random() * 2 - 1;
0052 double y = Math.random() * 2 - 1;
0053 return (x * x + y * y <= 1) ? 1 : 0;
0054 }).reduce((integer, integer2) -> integer + integer2);
0055
0056 System.out.println("Pi is roughly " + 4.0 * count / n);
0057
0058 spark.stop();
0059 }
0060 }