Back to home page

OSCL-LXR

 
 

    


0001 #!/usr/bin/env bash
0002 
0003 #
0004 # Licensed to the Apache Software Foundation (ASF) under one or more
0005 # contributor license agreements.  See the NOTICE file distributed with
0006 # this work for additional information regarding copyright ownership.
0007 # The ASF licenses this file to You under the Apache License, Version 2.0
0008 # (the "License"); you may not use this file except in compliance with
0009 # the License.  You may obtain a copy of the License at
0010 #
0011 #    http://www.apache.org/licenses/LICENSE-2.0
0012 #
0013 # Unless required by applicable law or agreed to in writing, software
0014 # distributed under the License is distributed on an "AS IS" BASIS,
0015 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0016 # See the License for the specific language governing permissions and
0017 # limitations under the License.
0018 #
0019 
0020 SELF=$(cd $(dirname $0) && pwd)
0021 . "$SELF/release-util.sh"
0022 
0023 function exit_with_usage {
0024   local NAME=$(basename $0)
0025   cat << EOF
0026 usage: $NAME
0027 Tags a Spark release on a particular branch.
0028 You must push the tags after.
0029 
0030 Inputs are specified with the following environment variables:
0031 ASF_USERNAME - Apache Username
0032 ASF_PASSWORD - Apache Password
0033 GIT_NAME - Name to use with git
0034 GIT_EMAIL - E-mail address to use with git
0035 GIT_BRANCH - Git branch on which to make release
0036 RELEASE_VERSION - Version used in pom files for release
0037 RELEASE_TAG - Name of release tag
0038 NEXT_VERSION - Development version after release
0039 EOF
0040   exit 1
0041 }
0042 
0043 set -e
0044 set -o pipefail
0045 
0046 if [[ $@ == *"help"* ]]; then
0047   exit_with_usage
0048 fi
0049 
0050 if [[ -z "$ASF_PASSWORD" ]]; then
0051   echo 'The environment variable ASF_PASSWORD is not set. Enter the password.'
0052   echo
0053   stty -echo && printf "ASF password: " && read ASF_PASSWORD && printf '\n' && stty echo
0054 fi
0055 
0056 for env in ASF_USERNAME ASF_PASSWORD RELEASE_VERSION RELEASE_TAG NEXT_VERSION GIT_EMAIL GIT_NAME GIT_BRANCH; do
0057   if [ -z "${!env}" ]; then
0058     echo "$env must be set to run this script"
0059     exit 1
0060   fi
0061 done
0062 
0063 init_java
0064 init_maven_sbt
0065 
0066 ASF_SPARK_REPO="gitbox.apache.org/repos/asf/spark.git"
0067 
0068 rm -rf spark
0069 git clone "https://$ASF_USERNAME:$ASF_PASSWORD@$ASF_SPARK_REPO" -b $GIT_BRANCH
0070 cd spark
0071 
0072 git config user.name "$GIT_NAME"
0073 git config user.email $GIT_EMAIL
0074 
0075 # Create release version
0076 $MVN versions:set -DnewVersion=$RELEASE_VERSION | grep -v "no value" # silence logs
0077 if [[ $RELEASE_VERSION != *"preview"* ]]; then
0078   # Set the release version in R/pkg/DESCRIPTION
0079   sed -i".tmp1" 's/Version.*$/Version: '"$RELEASE_VERSION"'/g' R/pkg/DESCRIPTION
0080 else
0081   sed -i".tmp1" 's/-SNAPSHOT/'"-$(cut -d "-" -f 2 <<< $RELEASE_VERSION)"'/g' R/pkg/R/sparkR.R
0082 fi
0083 # Set the release version in docs
0084 sed -i".tmp1" 's/SPARK_VERSION:.*$/SPARK_VERSION: '"$RELEASE_VERSION"'/g' docs/_config.yml
0085 sed -i".tmp2" 's/SPARK_VERSION_SHORT:.*$/SPARK_VERSION_SHORT: '"$RELEASE_VERSION"'/g' docs/_config.yml
0086 sed -i".tmp3" 's/__version__ = .*$/__version__ = "'"$RELEASE_VERSION"'"/' python/pyspark/version.py
0087 
0088 git commit -a -m "Preparing Spark release $RELEASE_TAG"
0089 echo "Creating tag $RELEASE_TAG at the head of $GIT_BRANCH"
0090 git tag $RELEASE_TAG
0091 
0092 # Create next version
0093 $MVN versions:set -DnewVersion=$NEXT_VERSION | grep -v "no value" # silence logs
0094 # Remove -SNAPSHOT before setting the R version as R expects version strings to only have numbers
0095 R_NEXT_VERSION=`echo $NEXT_VERSION | sed 's/-SNAPSHOT//g'`
0096 sed -i".tmp4" 's/Version.*$/Version: '"$R_NEXT_VERSION"'/g' R/pkg/DESCRIPTION
0097 # Write out the R_NEXT_VERSION to PySpark version info we use dev0 instead of SNAPSHOT to be closer
0098 # to PEP440.
0099 sed -i".tmp5" 's/__version__ = .*$/__version__ = "'"$R_NEXT_VERSION.dev0"'"/' python/pyspark/version.py
0100 
0101 
0102 # Update docs with next version
0103 sed -i".tmp6" 's/SPARK_VERSION:.*$/SPARK_VERSION: '"$NEXT_VERSION"'/g' docs/_config.yml
0104 # Use R version for short version
0105 sed -i".tmp7" 's/SPARK_VERSION_SHORT:.*$/SPARK_VERSION_SHORT: '"$R_NEXT_VERSION"'/g' docs/_config.yml
0106 
0107 git commit -a -m "Preparing development version $NEXT_VERSION"
0108 
0109 cd ..
0110 if is_dry_run; then
0111   mv spark spark.tag
0112   echo "Clone with version changes and tag available as spark.tag in the output directory."
0113 fi