Back to home page

OSCL-LXR

 
 

    


0001 #!/usr/bin/env bash
0002 
0003 #
0004 # Licensed to the Apache Software Foundation (ASF) under one or more
0005 # contributor license agreements.  See the NOTICE file distributed with
0006 # this work for additional information regarding copyright ownership.
0007 # The ASF licenses this file to You under the Apache License, Version 2.0
0008 # (the "License"); you may not use this file except in compliance with
0009 # the License.  You may obtain a copy of the License at
0010 #
0011 #    http://www.apache.org/licenses/LICENSE-2.0
0012 #
0013 # Unless required by applicable law or agreed to in writing, software
0014 # distributed under the License is distributed on an "AS IS" BASIS,
0015 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0016 # See the License for the specific language governing permissions and
0017 # limitations under the License.
0018 #
0019 
0020 set -e
0021 
0022 FWDIR="$(cd "`dirname $0`"/..; pwd)"
0023 cd "$FWDIR"
0024 
0025 # Explicitly set locale in order to make `sort` output consistent across machines.
0026 # See https://stackoverflow.com/questions/28881 for more details.
0027 export LC_ALL=C
0028 
0029 # TODO: This would be much nicer to do in SBT, once SBT supports Maven-style resolution.
0030 
0031 # NOTE: These should match those in the release publishing script
0032 HADOOP2_MODULE_PROFILES="-Phive-thriftserver -Pmesos -Pkubernetes -Pyarn -Phive"
0033 MVN="build/mvn"
0034 HADOOP_HIVE_PROFILES=(
0035     hadoop-2.7-hive-1.2
0036     hadoop-2.7-hive-2.3
0037     hadoop-3.2-hive-2.3
0038 )
0039 
0040 # We'll switch the version to a temp. one, publish POMs using that new version, then switch back to
0041 # the old version. We need to do this because the `dependency:build-classpath` task needs to
0042 # resolve Spark's internal submodule dependencies.
0043 
0044 # From http://stackoverflow.com/a/26514030
0045 set +e
0046 OLD_VERSION=$($MVN -q \
0047     -Dexec.executable="echo" \
0048     -Dexec.args='${project.version}' \
0049     --non-recursive \
0050     org.codehaus.mojo:exec-maven-plugin:1.6.0:exec | grep -E '[0-9]+\.[0-9]+\.[0-9]+')
0051 if [ $? != 0 ]; then
0052     echo -e "Error while getting version string from Maven:\n$OLD_VERSION"
0053     exit 1
0054 fi
0055 set -e
0056 TEMP_VERSION="spark-$(python -S -c "import random; print(random.randrange(100000, 999999))")"
0057 
0058 function reset_version {
0059   # Delete the temporary POMs that we wrote to the local Maven repo:
0060   find "$HOME/.m2/" | grep "$TEMP_VERSION" | xargs rm -rf
0061 
0062   # Restore the original version number:
0063   $MVN -q versions:set -DnewVersion=$OLD_VERSION -DgenerateBackupPoms=false > /dev/null
0064 }
0065 trap reset_version EXIT
0066 
0067 $MVN -q versions:set -DnewVersion=$TEMP_VERSION -DgenerateBackupPoms=false > /dev/null
0068 
0069 # Generate manifests for each Hadoop profile:
0070 for HADOOP_HIVE_PROFILE in "${HADOOP_HIVE_PROFILES[@]}"; do
0071   if [[ $HADOOP_HIVE_PROFILE == **hadoop-3.2-hive-2.3** ]]; then
0072     HADOOP_PROFILE=hadoop-3.2
0073     HIVE_PROFILE=hive-2.3
0074   elif [[ $HADOOP_HIVE_PROFILE == **hadoop-2.7-hive-2.3** ]]; then
0075     HADOOP_PROFILE=hadoop-2.7
0076     HIVE_PROFILE=hive-2.3
0077   else
0078     HADOOP_PROFILE=hadoop-2.7
0079     HIVE_PROFILE=hive-1.2
0080   fi
0081   echo "Performing Maven install for $HADOOP_HIVE_PROFILE"
0082   $MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE -P$HIVE_PROFILE jar:jar jar:test-jar install:install clean -q
0083 
0084   echo "Performing Maven validate for $HADOOP_HIVE_PROFILE"
0085   $MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE -P$HIVE_PROFILE validate -q
0086 
0087   echo "Generating dependency manifest for $HADOOP_HIVE_PROFILE"
0088   mkdir -p dev/pr-deps
0089   $MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE -P$HIVE_PROFILE dependency:build-classpath -pl assembly -am \
0090     | grep "Dependencies classpath:" -A 1 \
0091     | tail -n 1 | tr ":" "\n" | awk -F '/' '{
0092       # For each dependency classpath, we fetch the last three parts split by "/": artifact id, version, and jar name.
0093       # Since classifier, if exists, always sits between "artifact_id-version-" and ".jar" suffix in the jar name,
0094       # we extract classifier and put it right before the jar name explicitly.
0095       # For example, `orc-core/1.5.5/nohive/orc-core-1.5.5-nohive.jar`
0096       #                              ^^^^^^
0097       #                              extracted classifier
0098       #               `okio/1.15.0//okio-1.15.0.jar`
0099       #                           ^
0100       #                           empty for dependencies without classifier
0101       artifact_id=$(NF-2);
0102       version=$(NF-1);
0103       jar_name=$NF;
0104       classifier_start_index=length(artifact_id"-"version"-") + 1;
0105       classifier_end_index=index(jar_name, ".jar") - 1;
0106       classifier=substr(jar_name, classifier_start_index, classifier_end_index - classifier_start_index + 1);
0107       print artifact_id"/"version"/"classifier"/"jar_name
0108     }' | sort | grep -v spark > dev/pr-deps/spark-deps-$HADOOP_HIVE_PROFILE
0109 done
0110 
0111 if [[ $@ == **replace-manifest** ]]; then
0112   echo "Replacing manifests and creating new files at dev/deps"
0113   rm -rf dev/deps
0114   mv dev/pr-deps dev/deps
0115   exit 0
0116 fi
0117 
0118 for HADOOP_HIVE_PROFILE in "${HADOOP_HIVE_PROFILES[@]}"; do
0119   set +e
0120   dep_diff="$(
0121     git diff \
0122     --no-index \
0123     dev/deps/spark-deps-$HADOOP_HIVE_PROFILE \
0124     dev/pr-deps/spark-deps-$HADOOP_HIVE_PROFILE \
0125   )"
0126   set -e
0127   if [ "$dep_diff" != "" ]; then
0128     echo "Spark's published dependencies DO NOT MATCH the manifest file (dev/spark-deps)."
0129     echo "To update the manifest file, run './dev/test-dependencies.sh --replace-manifest'."
0130     echo "$dep_diff"
0131     rm -rf dev/pr-deps
0132     exit 1
0133   fi
0134 done
0135 
0136 exit 0