Back to home page

OSCL-LXR

 
 

    


0001 #!/usr/bin/env bash
0002 
0003 #
0004 # Licensed to the Apache Software Foundation (ASF) under one or more
0005 # contributor license agreements.  See the NOTICE file distributed with
0006 # this work for additional information regarding copyright ownership.
0007 # The ASF licenses this file to You under the Apache License, Version 2.0
0008 # (the "License"); you may not use this file except in compliance with
0009 # the License.  You may obtain a copy of the License at
0010 #
0011 #    http://www.apache.org/licenses/LICENSE-2.0
0012 #
0013 # Unless required by applicable law or agreed to in writing, software
0014 # distributed under the License is distributed on an "AS IS" BASIS,
0015 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0016 # See the License for the specific language governing permissions and
0017 # limitations under the License.
0018 #
0019 
0020 #
0021 # Shell script for starting the Spark SQL Thrift server
0022 
0023 # Enter posix mode for bash
0024 set -o posix
0025 
0026 if [ -z "${SPARK_HOME}" ]; then
0027   export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
0028 fi
0029 
0030 # NOTE: This exact class name is matched downstream by SparkSubmit.
0031 # Any changes need to be reflected there.
0032 CLASS="org.apache.spark.sql.hive.thriftserver.HiveThriftServer2"
0033 
0034 function usage {
0035   echo "Usage: ./sbin/start-thriftserver [options] [thrift server options]"
0036   pattern="usage"
0037   pattern+="\|Spark assembly has been built with Hive"
0038   pattern+="\|NOTE: SPARK_PREPEND_CLASSES is set"
0039   pattern+="\|Spark Command: "
0040   pattern+="\|======="
0041   pattern+="\|--help"
0042   pattern+="\|Using Spark's default log4j profile:"
0043   pattern+="\|^log4j:"
0044   pattern+="\|Started daemon with process name"
0045   pattern+="\|Registered signal handler for"
0046 
0047   "${SPARK_HOME}"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
0048   echo
0049   echo "Thrift server options:"
0050   "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
0051 }
0052 
0053 if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
0054   usage
0055   exit 1
0056 fi
0057 
0058 export SUBMIT_USAGE_FUNCTION=usage
0059 
0060 exec "${SPARK_HOME}"/sbin/spark-daemon.sh submit $CLASS 1 --name "Thrift JDBC/ODBC Server" "$@"