Back to home page

OSCL-LXR

 
 

    


0001 #!/usr/bin/env bash
0002 
0003 #
0004 # Licensed to the Apache Software Foundation (ASF) under one or more
0005 # contributor license agreements.  See the NOTICE file distributed with
0006 # this work for additional information regarding copyright ownership.
0007 # The ASF licenses this file to You under the Apache License, Version 2.0
0008 # (the "License"); you may not use this file except in compliance with
0009 # the License.  You may obtain a copy of the License at
0010 #
0011 #    http://www.apache.org/licenses/LICENSE-2.0
0012 #
0013 # Unless required by applicable law or agreed to in writing, software
0014 # distributed under the License is distributed on an "AS IS" BASIS,
0015 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0016 # See the License for the specific language governing permissions and
0017 # limitations under the License.
0018 #
0019 set -ex
0020 TEST_ROOT_DIR=$(git rev-parse --show-toplevel)
0021 UNPACKED_SPARK_TGZ="$TEST_ROOT_DIR/target/spark-dist-unpacked"
0022 IMAGE_TAG_OUTPUT_FILE="$TEST_ROOT_DIR/target/image-tag.txt"
0023 DEPLOY_MODE="minikube"
0024 IMAGE_REPO="docker.io/kubespark"
0025 IMAGE_TAG="N/A"
0026 JAVA_IMAGE_TAG="8-jre-slim"
0027 SPARK_TGZ="N/A"
0028 MVN="$TEST_ROOT_DIR/build/mvn"
0029 EXCLUDE_TAGS=""
0030 
0031 # Parse arguments
0032 while (( "$#" )); do
0033   case $1 in
0034     --unpacked-spark-tgz)
0035       UNPACKED_SPARK_TGZ="$2"
0036       shift
0037       ;;
0038     --image-repo)
0039       IMAGE_REPO="$2"
0040       shift
0041       ;;
0042     --image-tag)
0043       IMAGE_TAG="$2"
0044       shift
0045       ;;
0046     --java-image-tag)
0047       JAVA_IMAGE_TAG="$2"
0048       shift
0049       ;;
0050     --image-tag-output-file)
0051       IMAGE_TAG_OUTPUT_FILE="$2"
0052       shift
0053       ;;
0054     --deploy-mode)
0055       DEPLOY_MODE="$2"
0056       shift
0057       ;;
0058     --spark-tgz)
0059       SPARK_TGZ="$2"
0060       shift
0061       ;;
0062     --test-exclude-tags)
0063       EXCLUDE_TAGS="$2"
0064       shift
0065       ;;
0066     *)
0067       break
0068       ;;
0069   esac
0070   shift
0071 done
0072 
0073 rm -rf "$UNPACKED_SPARK_TGZ"
0074 if [[ $SPARK_TGZ == "N/A" && $IMAGE_TAG == "N/A" ]];
0075 then
0076   # If there is no spark image tag to test with and no src dir, build from current
0077   SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
0078   SPARK_INPUT_DIR="$(cd "$SCRIPT_DIR/"../../../../  >/dev/null 2>&1 && pwd )"
0079   DOCKER_FILE_BASE_PATH="$SPARK_INPUT_DIR/resource-managers/kubernetes/docker/src/main/dockerfiles/spark"
0080 elif [[ $IMAGE_TAG == "N/A" ]];
0081 then
0082   # If there is a test src tarball and no image tag we will want to build from that
0083   mkdir -p $UNPACKED_SPARK_TGZ
0084   tar -xzvf $SPARK_TGZ --strip-components=1 -C $UNPACKED_SPARK_TGZ;
0085   SPARK_INPUT_DIR="$UNPACKED_SPARK_TGZ"
0086   DOCKER_FILE_BASE_PATH="$SPARK_INPUT_DIR/kubernetes/dockerfiles/spark"
0087 fi
0088 
0089 
0090 # If there is a specific Spark image skip building and extraction/copy
0091 if [[ $IMAGE_TAG == "N/A" ]];
0092 then
0093   VERSION=$("$MVN" help:evaluate -Dexpression=project.version \
0094     | grep -v "INFO"\
0095     | grep -v "WARNING"\
0096     | tail -n 1)
0097   IMAGE_TAG=${VERSION}_$(uuidgen)
0098   cd $SPARK_INPUT_DIR
0099 
0100   # OpenJDK base-image tag (e.g. 8-jre-slim, 11-jre-slim)
0101   JAVA_IMAGE_TAG_BUILD_ARG="-b java_image_tag=$JAVA_IMAGE_TAG"
0102 
0103   # Build PySpark image
0104   LANGUAGE_BINDING_BUILD_ARGS="-p $DOCKER_FILE_BASE_PATH/bindings/python/Dockerfile"
0105 
0106   # Build SparkR image
0107   tags=(${EXCLUDE_TAGS//,/ })
0108   if [[ ! ${tags[@]} =~ "r" ]]; then
0109     LANGUAGE_BINDING_BUILD_ARGS="$LANGUAGE_BINDING_BUILD_ARGS -R $DOCKER_FILE_BASE_PATH/bindings/R/Dockerfile"
0110   fi
0111 
0112   # Unset SPARK_HOME to let the docker-image-tool script detect SPARK_HOME. Otherwise, it cannot
0113   # indicate the unpacked directory as its home. See SPARK-28550.
0114   unset SPARK_HOME
0115 
0116   case $DEPLOY_MODE in
0117     cloud)
0118       # Build images
0119       $SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $JAVA_IMAGE_TAG_BUILD_ARG $LANGUAGE_BINDING_BUILD_ARGS build
0120 
0121       # Push images appropriately
0122       if [[ $IMAGE_REPO == gcr.io* ]] ;
0123       then
0124         gcloud docker -- push $IMAGE_REPO/spark:$IMAGE_TAG
0125       else
0126         $SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG push
0127       fi
0128       ;;
0129 
0130     docker-for-desktop)
0131        # Only need to build as this will place it in our local Docker repo which is all
0132        # we need for Docker for Desktop to work so no need to also push
0133        $SPARK_INPUT_DIR/bin/docker-image-tool.sh -r $IMAGE_REPO -t $IMAGE_TAG $JAVA_IMAGE_TAG_BUILD_ARG $LANGUAGE_BINDING_BUILD_ARGS build
0134        ;;
0135 
0136     minikube)
0137        # Only need to build and if we do this with the -m option for minikube we will
0138        # build the images directly using the minikube Docker daemon so no need to push
0139        $SPARK_INPUT_DIR/bin/docker-image-tool.sh -m -r $IMAGE_REPO -t $IMAGE_TAG $JAVA_IMAGE_TAG_BUILD_ARG $LANGUAGE_BINDING_BUILD_ARGS build
0140        ;;
0141     *)
0142        echo "Unrecognized deploy mode $DEPLOY_MODE" && exit 1
0143        ;;
0144   esac
0145   cd -
0146 fi
0147 
0148 rm -f $IMAGE_TAG_OUTPUT_FILE
0149 echo -n $IMAGE_TAG > $IMAGE_TAG_OUTPUT_FILE