Back to home page

OSCL-LXR

 
 

    


0001 #
0002 # Licensed to the Apache Software Foundation (ASF) under one or more
0003 # contributor license agreements.  See the NOTICE file distributed with
0004 # this work for additional information regarding copyright ownership.
0005 # The ASF licenses this file to You under the Apache License, Version 2.0
0006 # (the "License"); you may not use this file except in compliance with
0007 # the License.  You may obtain a copy of the License at
0008 #
0009 #    http://www.apache.org/licenses/LICENSE-2.0
0010 #
0011 # Unless required by applicable law or agreed to in writing, software
0012 # distributed under the License is distributed on an "AS IS" BASIS,
0013 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014 # See the License for the specific language governing permissions and
0015 # limitations under the License.
0016 #
0017 
0018 from __future__ import print_function
0019 
0020 # $example on$
0021 from pyspark.ml.feature import Normalizer
0022 from pyspark.ml.linalg import Vectors
0023 # $example off$
0024 from pyspark.sql import SparkSession
0025 
0026 if __name__ == "__main__":
0027     spark = SparkSession\
0028         .builder\
0029         .appName("NormalizerExample")\
0030         .getOrCreate()
0031 
0032     # $example on$
0033     dataFrame = spark.createDataFrame([
0034         (0, Vectors.dense([1.0, 0.5, -1.0]),),
0035         (1, Vectors.dense([2.0, 1.0, 1.0]),),
0036         (2, Vectors.dense([4.0, 10.0, 2.0]),)
0037     ], ["id", "features"])
0038 
0039     # Normalize each Vector using $L^1$ norm.
0040     normalizer = Normalizer(inputCol="features", outputCol="normFeatures", p=1.0)
0041     l1NormData = normalizer.transform(dataFrame)
0042     print("Normalized using L^1 norm")
0043     l1NormData.show()
0044 
0045     # Normalize each Vector using $L^\infty$ norm.
0046     lInfNormData = normalizer.transform(dataFrame, {normalizer.p: float("inf")})
0047     print("Normalized using L^inf norm")
0048     lInfNormData.show()
0049     # $example off$
0050 
0051     spark.stop()