0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 package org.apache.spark.examples.ml;
0019
0020
0021 import java.util.Arrays;
0022 import java.util.List;
0023
0024 import org.apache.spark.ml.feature.MaxAbsScaler;
0025 import org.apache.spark.ml.feature.MaxAbsScalerModel;
0026 import org.apache.spark.ml.linalg.Vectors;
0027 import org.apache.spark.ml.linalg.VectorUDT;
0028 import org.apache.spark.sql.Dataset;
0029 import org.apache.spark.sql.Row;
0030 import org.apache.spark.sql.RowFactory;
0031 import org.apache.spark.sql.types.DataTypes;
0032 import org.apache.spark.sql.types.Metadata;
0033 import org.apache.spark.sql.types.StructField;
0034 import org.apache.spark.sql.types.StructType;
0035
0036 import org.apache.spark.sql.SparkSession;
0037
0038 public class JavaMaxAbsScalerExample {
0039
0040 public static void main(String[] args) {
0041 SparkSession spark = SparkSession
0042 .builder()
0043 .appName("JavaMaxAbsScalerExample")
0044 .getOrCreate();
0045
0046
0047 List<Row> data = Arrays.asList(
0048 RowFactory.create(0, Vectors.dense(1.0, 0.1, -8.0)),
0049 RowFactory.create(1, Vectors.dense(2.0, 1.0, -4.0)),
0050 RowFactory.create(2, Vectors.dense(4.0, 10.0, 8.0))
0051 );
0052 StructType schema = new StructType(new StructField[]{
0053 new StructField("id", DataTypes.IntegerType, false, Metadata.empty()),
0054 new StructField("features", new VectorUDT(), false, Metadata.empty())
0055 });
0056 Dataset<Row> dataFrame = spark.createDataFrame(data, schema);
0057
0058 MaxAbsScaler scaler = new MaxAbsScaler()
0059 .setInputCol("features")
0060 .setOutputCol("scaledFeatures");
0061
0062
0063 MaxAbsScalerModel scalerModel = scaler.fit(dataFrame);
0064
0065
0066 Dataset<Row> scaledData = scalerModel.transform(dataFrame);
0067 scaledData.select("features", "scaledFeatures").show();
0068
0069
0070 spark.stop();
0071 }
0072
0073 }