0001 #!/usr/bin/env bash
0002
0003 #
0004 # Licensed to the Apache Software Foundation (ASF) under one or more
0005 # contributor license agreements. See the NOTICE file distributed with
0006 # this work for additional information regarding copyright ownership.
0007 # The ASF licenses this file to You under the Apache License, Version 2.0
0008 # (the "License"); you may not use this file except in compliance with
0009 # the License. You may obtain a copy of the License at
0010 #
0011 # http://www.apache.org/licenses/LICENSE-2.0
0012 #
0013 # Unless required by applicable law or agreed to in writing, software
0014 # distributed under the License is distributed on an "AS IS" BASIS,
0015 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0016 # See the License for the specific language governing permissions and
0017 # limitations under the License.
0018 #
0019
0020 #
0021 # Shell script for starting the Spark Shell REPL
0022
0023 cygwin=false
0024 case "$(uname)" in
0025 CYGWIN*) cygwin=true;;
0026 esac
0027
0028 # Enter posix mode for bash
0029 set -o posix
0030
0031 if [ -z "${SPARK_HOME}" ]; then
0032 source "$(dirname "$0")"/find-spark-home
0033 fi
0034
0035 export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]
0036
0037 Scala REPL options:
0038 -I <file> preload <file>, enforcing line-by-line interpretation"
0039
0040 # SPARK-4161: scala does not assume use of the java classpath,
0041 # so we need to add the "-Dscala.usejavacp=true" flag manually. We
0042 # do this specifically for the Spark shell because the scala REPL
0043 # has its own class loader, and any additional classpath specified
0044 # through spark.driver.extraClassPath is not automatically propagated.
0045 SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Dscala.usejavacp=true"
0046
0047 function main() {
0048 if $cygwin; then
0049 # Workaround for issue involving JLine and Cygwin
0050 # (see http://sourceforge.net/p/jline/bugs/40/).
0051 # If you're using the Mintty terminal emulator in Cygwin, may need to set the
0052 # "Backspace sends ^H" setting in "Keys" section of the Mintty options
0053 # (see https://github.com/sbt/sbt/issues/562).
0054 stty -icanon min 1 -echo > /dev/null 2>&1
0055 export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
0056 "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
0057 stty icanon echo > /dev/null 2>&1
0058 else
0059 export SPARK_SUBMIT_OPTS
0060 "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
0061 fi
0062 }
0063
0064 # Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
0065 # binary distribution of Spark where Scala is not installed
0066 exit_status=127
0067 saved_stty=""
0068
0069 # restore stty settings (echo in particular)
0070 function restoreSttySettings() {
0071 stty $saved_stty
0072 saved_stty=""
0073 }
0074
0075 function onExit() {
0076 if [[ "$saved_stty" != "" ]]; then
0077 restoreSttySettings
0078 fi
0079 exit $exit_status
0080 }
0081
0082 # to reenable echo if we are interrupted before completing.
0083 trap onExit INT
0084
0085 # save terminal settings
0086 saved_stty=$(stty -g 2>/dev/null)
0087 # clear on error so we don't later try to restore them
0088 if [[ ! $? ]]; then
0089 saved_stty=""
0090 fi
0091
0092 main "$@"
0093
0094 # record the exit status lest it be overwritten:
0095 # then reenable echo and propagate the code.
0096 exit_status=$?
0097 onExit
0098