0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023
0024 set -o posix
0025
0026 if [ -z "${SPARK_HOME}" ]; then
0027 export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
0028 fi
0029
0030
0031
0032 CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
0033
0034 function usage {
0035 echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
0036 pattern="usage"
0037 pattern+="\|Spark assembly has been built with Hive"
0038 pattern+="\|NOTE: SPARK_PREPEND_CLASSES is set"
0039 pattern+="\|Spark Command: "
0040 pattern+="\|======="
0041 pattern+="\|--help"
0042 pattern+="\|Using Spark's default log4j profile:"
0043 pattern+="\|^log4j:"
0044 pattern+="\|Started daemon with process name"
0045 pattern+="\|Registered signal handler for"
0046
0047 "${SPARK_HOME}"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
0048 echo
0049 echo "Thrift server options:"
0050 "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
0051 }
0052
0053 if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
0054 usage
0055 exit 1
0056 fi
0057
0058 export SUBMIT_USAGE_FUNCTION=usage
0059
0060 exec "${SPARK_HOME}"/sbin/spark-daemon.sh submit $CLASS 1 --name "Thrift JDBC/ODBC Server" "$@"