0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 package org.apache.spark.examples.ml;
0019
0020 import org.apache.spark.sql.Dataset;
0021 import org.apache.spark.sql.SparkSession;
0022
0023
0024 import java.util.Arrays;
0025 import java.util.List;
0026
0027 import org.apache.spark.ml.feature.DCT;
0028 import org.apache.spark.ml.linalg.VectorUDT;
0029 import org.apache.spark.ml.linalg.Vectors;
0030 import org.apache.spark.sql.Row;
0031 import org.apache.spark.sql.RowFactory;
0032 import org.apache.spark.sql.types.Metadata;
0033 import org.apache.spark.sql.types.StructField;
0034 import org.apache.spark.sql.types.StructType;
0035
0036
0037 public class JavaDCTExample {
0038 public static void main(String[] args) {
0039 SparkSession spark = SparkSession
0040 .builder()
0041 .appName("JavaDCTExample")
0042 .getOrCreate();
0043
0044
0045 List<Row> data = Arrays.asList(
0046 RowFactory.create(Vectors.dense(0.0, 1.0, -2.0, 3.0)),
0047 RowFactory.create(Vectors.dense(-1.0, 2.0, 4.0, -7.0)),
0048 RowFactory.create(Vectors.dense(14.0, -2.0, -5.0, 1.0))
0049 );
0050 StructType schema = new StructType(new StructField[]{
0051 new StructField("features", new VectorUDT(), false, Metadata.empty()),
0052 });
0053 Dataset<Row> df = spark.createDataFrame(data, schema);
0054
0055 DCT dct = new DCT()
0056 .setInputCol("features")
0057 .setOutputCol("featuresDCT")
0058 .setInverse(false);
0059
0060 Dataset<Row> dctDf = dct.transform(df);
0061
0062 dctDf.select("featuresDCT").show(false);
0063
0064
0065 spark.stop();
0066 }
0067 }
0068