Back to home page

OSCL-LXR

 
 

    


0001 #
0002 # Licensed to the Apache Software Foundation (ASF) under one or more
0003 # contributor license agreements.  See the NOTICE file distributed with
0004 # this work for additional information regarding copyright ownership.
0005 # The ASF licenses this file to You under the Apache License, Version 2.0
0006 # (the "License"); you may not use this file except in compliance with
0007 # the License.  You may obtain a copy of the License at
0008 #
0009 #    http://www.apache.org/licenses/LICENSE-2.0
0010 #
0011 # Unless required by applicable law or agreed to in writing, software
0012 # distributed under the License is distributed on an "AS IS" BASIS,
0013 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014 # See the License for the specific language governing permissions and
0015 # limitations under the License.
0016 #
0017 
0018 from __future__ import print_function
0019 
0020 # $example on$
0021 from pyspark.ml.feature import Word2Vec
0022 # $example off$
0023 from pyspark.sql import SparkSession
0024 
0025 if __name__ == "__main__":
0026     spark = SparkSession\
0027         .builder\
0028         .appName("Word2VecExample")\
0029         .getOrCreate()
0030 
0031     # $example on$
0032     # Input data: Each row is a bag of words from a sentence or document.
0033     documentDF = spark.createDataFrame([
0034         ("Hi I heard about Spark".split(" "), ),
0035         ("I wish Java could use case classes".split(" "), ),
0036         ("Logistic regression models are neat".split(" "), )
0037     ], ["text"])
0038 
0039     # Learn a mapping from words to Vectors.
0040     word2Vec = Word2Vec(vectorSize=3, minCount=0, inputCol="text", outputCol="result")
0041     model = word2Vec.fit(documentDF)
0042 
0043     result = model.transform(documentDF)
0044     for row in result.collect():
0045         text, vector = row
0046         print("Text: [%s] => \nVector: %s\n" % (", ".join(text), str(vector)))
0047     # $example off$
0048 
0049     spark.stop()