0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 package org.apache.spark.examples.mllib;
0019
0020
0021 import java.util.Arrays;
0022
0023 import org.apache.spark.api.java.JavaRDD;
0024 import org.apache.spark.api.java.JavaSparkContext;
0025 import org.apache.spark.mllib.fpm.AssociationRules;
0026 import org.apache.spark.mllib.fpm.FPGrowth;
0027 import org.apache.spark.mllib.fpm.FPGrowth.FreqItemset;
0028
0029
0030 import org.apache.spark.SparkConf;
0031
0032 public class JavaAssociationRulesExample {
0033
0034 public static void main(String[] args) {
0035
0036 SparkConf sparkConf = new SparkConf().setAppName("JavaAssociationRulesExample");
0037 JavaSparkContext sc = new JavaSparkContext(sparkConf);
0038
0039
0040 JavaRDD<FPGrowth.FreqItemset<String>> freqItemsets = sc.parallelize(Arrays.asList(
0041 new FreqItemset<>(new String[] {"a"}, 15L),
0042 new FreqItemset<>(new String[] {"b"}, 35L),
0043 new FreqItemset<>(new String[] {"a", "b"}, 12L)
0044 ));
0045
0046 AssociationRules arules = new AssociationRules()
0047 .setMinConfidence(0.8);
0048 JavaRDD<AssociationRules.Rule<String>> results = arules.run(freqItemsets);
0049
0050 for (AssociationRules.Rule<String> rule : results.collect()) {
0051 System.out.println(
0052 rule.javaAntecedent() + " => " + rule.javaConsequent() + ", " + rule.confidence());
0053 }
0054
0055
0056 sc.stop();
0057 }
0058 }