0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 package org.apache.spark.examples.ml;
0019
0020 import org.apache.spark.sql.SparkSession;
0021
0022
0023 import java.util.Arrays;
0024 import java.util.List;
0025
0026 import org.apache.spark.ml.feature.MinMaxScaler;
0027 import org.apache.spark.ml.feature.MinMaxScalerModel;
0028 import org.apache.spark.ml.linalg.Vectors;
0029 import org.apache.spark.ml.linalg.VectorUDT;
0030 import org.apache.spark.sql.Dataset;
0031 import org.apache.spark.sql.Row;
0032 import org.apache.spark.sql.RowFactory;
0033 import org.apache.spark.sql.types.DataTypes;
0034 import org.apache.spark.sql.types.Metadata;
0035 import org.apache.spark.sql.types.StructField;
0036 import org.apache.spark.sql.types.StructType;
0037
0038
0039 public class JavaMinMaxScalerExample {
0040 public static void main(String[] args) {
0041 SparkSession spark = SparkSession
0042 .builder()
0043 .appName("JavaMinMaxScalerExample")
0044 .getOrCreate();
0045
0046
0047 List<Row> data = Arrays.asList(
0048 RowFactory.create(0, Vectors.dense(1.0, 0.1, -1.0)),
0049 RowFactory.create(1, Vectors.dense(2.0, 1.1, 1.0)),
0050 RowFactory.create(2, Vectors.dense(3.0, 10.1, 3.0))
0051 );
0052 StructType schema = new StructType(new StructField[]{
0053 new StructField("id", DataTypes.IntegerType, false, Metadata.empty()),
0054 new StructField("features", new VectorUDT(), false, Metadata.empty())
0055 });
0056 Dataset<Row> dataFrame = spark.createDataFrame(data, schema);
0057
0058 MinMaxScaler scaler = new MinMaxScaler()
0059 .setInputCol("features")
0060 .setOutputCol("scaledFeatures");
0061
0062
0063 MinMaxScalerModel scalerModel = scaler.fit(dataFrame);
0064
0065
0066 Dataset<Row> scaledData = scalerModel.transform(dataFrame);
0067 System.out.println("Features scaled to range: [" + scaler.getMin() + ", "
0068 + scaler.getMax() + "]");
0069 scaledData.select("features", "scaledFeatures").show();
0070
0071
0072 spark.stop();
0073 }
0074 }