Back to home page

OSCL-LXR

 
 

    


0001 #
0002 # Licensed to the Apache Software Foundation (ASF) under one or more
0003 # contributor license agreements.  See the NOTICE file distributed with
0004 # this work for additional information regarding copyright ownership.
0005 # The ASF licenses this file to You under the Apache License, Version 2.0
0006 # (the "License"); you may not use this file except in compliance with
0007 # the License.  You may obtain a copy of the License at
0008 #
0009 #    http://www.apache.org/licenses/LICENSE-2.0
0010 #
0011 # Unless required by applicable law or agreed to in writing, software
0012 # distributed under the License is distributed on an "AS IS" BASIS,
0013 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014 # See the License for the specific language governing permissions and
0015 # limitations under the License.
0016 #
0017 
0018 """
0019 An example for computing correlation matrix.
0020 Run with:
0021   bin/spark-submit examples/src/main/python/ml/correlation_example.py
0022 """
0023 from __future__ import print_function
0024 
0025 # $example on$
0026 from pyspark.ml.linalg import Vectors
0027 from pyspark.ml.stat import Correlation
0028 # $example off$
0029 from pyspark.sql import SparkSession
0030 
0031 if __name__ == "__main__":
0032     spark = SparkSession \
0033         .builder \
0034         .appName("CorrelationExample") \
0035         .getOrCreate()
0036 
0037     # $example on$
0038     data = [(Vectors.sparse(4, [(0, 1.0), (3, -2.0)]),),
0039             (Vectors.dense([4.0, 5.0, 0.0, 3.0]),),
0040             (Vectors.dense([6.0, 7.0, 0.0, 8.0]),),
0041             (Vectors.sparse(4, [(0, 9.0), (3, 1.0)]),)]
0042     df = spark.createDataFrame(data, ["features"])
0043 
0044     r1 = Correlation.corr(df, "features").head()
0045     print("Pearson correlation matrix:\n" + str(r1[0]))
0046 
0047     r2 = Correlation.corr(df, "features", "spearman").head()
0048     print("Spearman correlation matrix:\n" + str(r2[0]))
0049     # $example off$
0050 
0051     spark.stop()