Back to home page

OSCL-LXR

 
 

    


0001 #
0002 # Licensed to the Apache Software Foundation (ASF) under one or more
0003 # contributor license agreements.  See the NOTICE file distributed with
0004 # this work for additional information regarding copyright ownership.
0005 # The ASF licenses this file to You under the Apache License, Version 2.0
0006 # (the "License"); you may not use this file except in compliance with
0007 # the License.  You may obtain a copy of the License at
0008 #
0009 #    http://www.apache.org/licenses/LICENSE-2.0
0010 #
0011 # Unless required by applicable law or agreed to in writing, software
0012 # distributed under the License is distributed on an "AS IS" BASIS,
0013 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014 # See the License for the specific language governing permissions and
0015 # limitations under the License.
0016 #
0017 
0018 from __future__ import print_function
0019 
0020 # $example on$
0021 from pyspark.ml.classification import NaiveBayes
0022 from pyspark.ml.evaluation import MulticlassClassificationEvaluator
0023 # $example off$
0024 from pyspark.sql import SparkSession
0025 
0026 if __name__ == "__main__":
0027     spark = SparkSession\
0028         .builder\
0029         .appName("NaiveBayesExample")\
0030         .getOrCreate()
0031 
0032     # $example on$
0033     # Load training data
0034     data = spark.read.format("libsvm") \
0035         .load("data/mllib/sample_libsvm_data.txt")
0036 
0037     # Split the data into train and test
0038     splits = data.randomSplit([0.6, 0.4], 1234)
0039     train = splits[0]
0040     test = splits[1]
0041 
0042     # create the trainer and set its parameters
0043     nb = NaiveBayes(smoothing=1.0, modelType="multinomial")
0044 
0045     # train the model
0046     model = nb.fit(train)
0047 
0048     # select example rows to display.
0049     predictions = model.transform(test)
0050     predictions.show()
0051 
0052     # compute accuracy on the test set
0053     evaluator = MulticlassClassificationEvaluator(labelCol="label", predictionCol="prediction",
0054                                                   metricName="accuracy")
0055     accuracy = evaluator.evaluate(predictions)
0056     print("Test set accuracy = " + str(accuracy))
0057     # $example off$
0058 
0059     spark.stop()