0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 package org.apache.spark.ml.stat;
0019
0020 import java.io.IOException;
0021 import java.util.Arrays;
0022 import java.util.List;
0023
0024 import org.apache.commons.math3.distribution.NormalDistribution;
0025 import org.apache.spark.sql.Encoders;
0026 import org.junit.Assert;
0027 import org.junit.Test;
0028
0029 import org.apache.spark.SharedSparkSession;
0030 import org.apache.spark.api.java.function.Function;
0031 import org.apache.spark.sql.Dataset;
0032 import org.apache.spark.sql.Row;
0033
0034
0035 public class JavaKolmogorovSmirnovTestSuite extends SharedSparkSession {
0036
0037 private transient Dataset<Row> dataset;
0038
0039 @Override
0040 public void setUp() throws IOException {
0041 super.setUp();
0042 List<java.lang.Double> points = Arrays.asList(0.1, 1.1, 10.1, -1.1);
0043
0044 dataset = spark.createDataset(points, Encoders.DOUBLE()).toDF("sample");
0045 }
0046
0047 @Test
0048 public void testKSTestCDF() {
0049
0050 NormalDistribution stdNormalDist = new NormalDistribution(0, 1);
0051
0052
0053 Long seed = 10L;
0054 stdNormalDist.reseedRandomGenerator(seed);
0055 Function<Double, Double> stdNormalCDF = (x) -> stdNormalDist.cumulativeProbability(x);
0056
0057 double pThreshold = 0.05;
0058
0059
0060 Row results = KolmogorovSmirnovTest
0061 .test(dataset, "sample", stdNormalCDF).head();
0062 double pValue1 = results.getDouble(0);
0063
0064 Assert.assertTrue(pValue1 > pThreshold);
0065 }
0066
0067 @Test
0068 public void testKSTestNamedDistribution() {
0069 double pThreshold = 0.05;
0070
0071
0072 Row results = KolmogorovSmirnovTest
0073 .test(dataset, "sample", "norm", 0.0, 1.0).head();
0074 double pValue1 = results.getDouble(0);
0075
0076 Assert.assertTrue(pValue1 > pThreshold);
0077 }
0078 }