0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019 from pyspark.sql.functions import sha2
0020 from pyspark.sql.utils import AnalysisException, ParseException, IllegalArgumentException
0021 from pyspark.testing.sqlutils import ReusedSQLTestCase
0022
0023
0024 class UtilsTests(ReusedSQLTestCase):
0025
0026 def test_capture_analysis_exception(self):
0027 self.assertRaises(AnalysisException, lambda: self.spark.sql("select abc"))
0028 self.assertRaises(AnalysisException, lambda: self.df.selectExpr("a + b"))
0029
0030 def test_capture_user_friendly_exception(self):
0031 try:
0032 self.spark.sql("select `中文字段`")
0033 except AnalysisException as e:
0034 self.assertRegexpMatches(str(e), "cannot resolve '`中文字段`'")
0035
0036 def test_capture_parse_exception(self):
0037 self.assertRaises(ParseException, lambda: self.spark.sql("abc"))
0038
0039 def test_capture_illegalargument_exception(self):
0040 self.assertRaisesRegexp(IllegalArgumentException, "Setting negative mapred.reduce.tasks",
0041 lambda: self.spark.sql("SET mapred.reduce.tasks=-1"))
0042 df = self.spark.createDataFrame([(1, 2)], ["a", "b"])
0043 self.assertRaisesRegexp(IllegalArgumentException, "1024 is not in the permitted values",
0044 lambda: df.select(sha2(df.a, 1024)).collect())
0045 try:
0046 df.select(sha2(df.a, 1024)).collect()
0047 except IllegalArgumentException as e:
0048 self.assertRegexpMatches(e.desc, "1024 is not in the permitted values")
0049 self.assertRegexpMatches(e.stackTrace,
0050 "org.apache.spark.sql.functions")
0051
0052
0053 if __name__ == "__main__":
0054 import unittest
0055 from pyspark.sql.tests.test_utils import *
0056
0057 try:
0058 import xmlrunner
0059 testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
0060 except ImportError:
0061 testRunner = None
0062 unittest.main(testRunner=testRunner, verbosity=2)