0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 package org.apache.spark.examples.mllib;
0019
0020
0021 import java.util.Arrays;
0022 import java.util.List;
0023
0024 import org.apache.spark.api.java.JavaRDD;
0025 import org.apache.spark.api.java.JavaSparkContext;
0026 import org.apache.spark.mllib.fpm.AssociationRules;
0027 import org.apache.spark.mllib.fpm.FPGrowth;
0028 import org.apache.spark.mllib.fpm.FPGrowthModel;
0029
0030
0031 import org.apache.spark.SparkConf;
0032
0033 public class JavaSimpleFPGrowth {
0034
0035 public static void main(String[] args) {
0036 SparkConf conf = new SparkConf().setAppName("FP-growth Example");
0037 JavaSparkContext sc = new JavaSparkContext(conf);
0038
0039
0040 JavaRDD<String> data = sc.textFile("data/mllib/sample_fpgrowth.txt");
0041
0042 JavaRDD<List<String>> transactions = data.map(line -> Arrays.asList(line.split(" ")));
0043
0044 FPGrowth fpg = new FPGrowth()
0045 .setMinSupport(0.2)
0046 .setNumPartitions(10);
0047 FPGrowthModel<String> model = fpg.run(transactions);
0048
0049 for (FPGrowth.FreqItemset<String> itemset: model.freqItemsets().toJavaRDD().collect()) {
0050 System.out.println("[" + itemset.javaItems() + "], " + itemset.freq());
0051 }
0052
0053 double minConfidence = 0.8;
0054 for (AssociationRules.Rule<String> rule
0055 : model.generateAssociationRules(minConfidence).toJavaRDD().collect()) {
0056 System.out.println(
0057 rule.javaAntecedent() + " => " + rule.javaConsequent() + ", " + rule.confidence());
0058 }
0059
0060
0061 sc.stop();
0062 }
0063 }