0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 """
0019 An interactive shell.
0020
0021 This file is designed to be launched as a PYTHONSTARTUP script.
0022 """
0023
0024 import atexit
0025 import os
0026 import platform
0027 import warnings
0028
0029 import py4j
0030
0031 from pyspark import SparkConf
0032 from pyspark.context import SparkContext
0033 from pyspark.sql import SparkSession, SQLContext
0034
0035 if os.environ.get("SPARK_EXECUTOR_URI"):
0036 SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"])
0037
0038 SparkContext._ensure_initialized()
0039
0040 try:
0041 spark = SparkSession._create_shell_session()
0042 except Exception:
0043 import sys
0044 import traceback
0045 warnings.warn("Failed to initialize Spark session.")
0046 traceback.print_exc(file=sys.stderr)
0047 sys.exit(1)
0048
0049 sc = spark.sparkContext
0050 sql = spark.sql
0051 atexit.register(lambda: sc.stop())
0052
0053
0054 sqlContext = spark._wrapped
0055 sqlCtx = sqlContext
0056
0057 print(r"""Welcome to
0058 ____ __
0059 / __/__ ___ _____/ /__
0060 _\ \/ _ \/ _ `/ __/ '_/
0061 /__ / .__/\_,_/_/ /_/\_\ version %s
0062 /_/
0063 """ % sc.version)
0064 print("Using Python version %s (%s, %s)" % (
0065 platform.python_version(),
0066 platform.python_build()[0],
0067 platform.python_build()[1]))
0068 print("SparkSession available as 'spark'.")
0069
0070
0071
0072 _pythonstartup = os.environ.get('OLD_PYTHONSTARTUP')
0073 if _pythonstartup and os.path.isfile(_pythonstartup):
0074 with open(_pythonstartup) as f:
0075 code = compile(f.read(), _pythonstartup, 'exec')
0076 exec(code)