|
||||
0001 #!/usr/bin/env bash 0002 0003 # 0004 # Licensed to the Apache Software Foundation (ASF) under one or more 0005 # contributor license agreements. See the NOTICE file distributed with 0006 # this work for additional information regarding copyright ownership. 0007 # The ASF licenses this file to You under the Apache License, Version 2.0 0008 # (the "License"); you may not use this file except in compliance with 0009 # the License. You may obtain a copy of the License at 0010 # 0011 # http://www.apache.org/licenses/LICENSE-2.0 0012 # 0013 # Unless required by applicable law or agreed to in writing, software 0014 # distributed under the License is distributed on an "AS IS" BASIS, 0015 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 0016 # See the License for the specific language governing permissions and 0017 # limitations under the License. 0018 # 0019 0020 # Starts a slave on the machine this script is executed on. 0021 # 0022 # Environment Variables 0023 # 0024 # SPARK_WORKER_INSTANCES The number of worker instances to run on this 0025 # slave. Default is 1. Note it has been deprecate since Spark 3.0. 0026 # SPARK_WORKER_PORT The base port number for the first worker. If set, 0027 # subsequent workers will increment this number. If 0028 # unset, Spark will find a valid port number, but 0029 # with no guarantee of a predictable pattern. 0030 # SPARK_WORKER_WEBUI_PORT The base port for the web interface of the first 0031 # worker. Subsequent workers will increment this 0032 # number. Default is 8081. 0033 0034 if [ -z "${SPARK_HOME}" ]; then 0035 export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)" 0036 fi 0037 0038 # NOTE: This exact class name is matched downstream by SparkSubmit. 0039 # Any changes need to be reflected there. 0040 CLASS="org.apache.spark.deploy.worker.Worker" 0041 0042 if [[ $# -lt 1 ]] || [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then 0043 echo "Usage: ./sbin/start-slave.sh <master> [options]" 0044 pattern="Usage:" 0045 pattern+="\|Using Spark's default log4j profile:" 0046 pattern+="\|Started daemon with process name" 0047 pattern+="\|Registered signal handler for" 0048 0049 "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2 0050 exit 1 0051 fi 0052 0053 . "${SPARK_HOME}/sbin/spark-config.sh" 0054 0055 . "${SPARK_HOME}/bin/load-spark-env.sh" 0056 0057 # First argument should be the master; we need to store it aside because we may 0058 # need to insert arguments between it and the other arguments 0059 MASTER=$1 0060 shift 0061 0062 # Determine desired worker port 0063 if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then 0064 SPARK_WORKER_WEBUI_PORT=8081 0065 fi 0066 0067 # Start up the appropriate number of workers on this machine. 0068 # quick local function to start a worker 0069 function start_instance { 0070 WORKER_NUM=$1 0071 shift 0072 0073 if [ "$SPARK_WORKER_PORT" = "" ]; then 0074 PORT_FLAG= 0075 PORT_NUM= 0076 else 0077 PORT_FLAG="--port" 0078 PORT_NUM=$(( $SPARK_WORKER_PORT + $WORKER_NUM - 1 )) 0079 fi 0080 WEBUI_PORT=$(( $SPARK_WORKER_WEBUI_PORT + $WORKER_NUM - 1 )) 0081 0082 "${SPARK_HOME}/sbin"/spark-daemon.sh start $CLASS $WORKER_NUM \ 0083 --webui-port "$WEBUI_PORT" $PORT_FLAG $PORT_NUM $MASTER "$@" 0084 } 0085 0086 if [ "$SPARK_WORKER_INSTANCES" = "" ]; then 0087 start_instance 1 "$@" 0088 else 0089 for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do 0090 start_instance $(( 1 + $i )) "$@" 0091 done 0092 fi
[ Source navigation ] | [ Diff markup ] | [ Identifier search ] | [ general search ] |
This page was automatically generated by the 2.1.0 LXR engine. The LXR team |