0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020 set -e
0021
0022 FWDIR="$(cd "`dirname $0`"/..; pwd)"
0023 cd "$FWDIR"
0024
0025
0026
0027 export LC_ALL=C
0028
0029
0030
0031
0032 HADOOP2_MODULE_PROFILES="-Phive-thriftserver -Pmesos -Pkubernetes -Pyarn -Phive"
0033 MVN="build/mvn"
0034 HADOOP_HIVE_PROFILES=(
0035 hadoop-2.7-hive-1.2
0036 hadoop-2.7-hive-2.3
0037 hadoop-3.2-hive-2.3
0038 )
0039
0040
0041
0042
0043
0044
0045 set +e
0046 OLD_VERSION=$($MVN -q \
0047 -Dexec.executable="echo" \
0048 -Dexec.args='${project.version}' \
0049 --non-recursive \
0050 org.codehaus.mojo:exec-maven-plugin:1.6.0:exec | grep -E '[0-9]+\.[0-9]+\.[0-9]+')
0051 if [ $? != 0 ]; then
0052 echo -e "Error while getting version string from Maven:\n$OLD_VERSION"
0053 exit 1
0054 fi
0055 set -e
0056 TEMP_VERSION="spark-$(python -S -c "import random; print(random.randrange(100000, 999999))")"
0057
0058 function reset_version {
0059
0060 find "$HOME/.m2/" | grep "$TEMP_VERSION" | xargs rm -rf
0061
0062
0063 $MVN -q versions:set -DnewVersion=$OLD_VERSION -DgenerateBackupPoms=false > /dev/null
0064 }
0065 trap reset_version EXIT
0066
0067 $MVN -q versions:set -DnewVersion=$TEMP_VERSION -DgenerateBackupPoms=false > /dev/null
0068
0069
0070 for HADOOP_HIVE_PROFILE in "${HADOOP_HIVE_PROFILES[@]}"; do
0071 if [[ $HADOOP_HIVE_PROFILE == **hadoop-3.2-hive-2.3** ]]; then
0072 HADOOP_PROFILE=hadoop-3.2
0073 HIVE_PROFILE=hive-2.3
0074 elif [[ $HADOOP_HIVE_PROFILE == **hadoop-2.7-hive-2.3** ]]; then
0075 HADOOP_PROFILE=hadoop-2.7
0076 HIVE_PROFILE=hive-2.3
0077 else
0078 HADOOP_PROFILE=hadoop-2.7
0079 HIVE_PROFILE=hive-1.2
0080 fi
0081 echo "Performing Maven install for $HADOOP_HIVE_PROFILE"
0082 $MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE -P$HIVE_PROFILE jar:jar jar:test-jar install:install clean -q
0083
0084 echo "Performing Maven validate for $HADOOP_HIVE_PROFILE"
0085 $MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE -P$HIVE_PROFILE validate -q
0086
0087 echo "Generating dependency manifest for $HADOOP_HIVE_PROFILE"
0088 mkdir -p dev/pr-deps
0089 $MVN $HADOOP2_MODULE_PROFILES -P$HADOOP_PROFILE -P$HIVE_PROFILE dependency:build-classpath -pl assembly -am \
0090 | grep "Dependencies classpath:" -A 1 \
0091 | tail -n 1 | tr ":" "\n" | awk -F '/' '{
0092 # For each dependency classpath, we fetch the last three parts split by "/": artifact id, version, and jar name.
0093 # Since classifier, if exists, always sits between "artifact_id-version-" and ".jar" suffix in the jar name,
0094 # we extract classifier and put it right before the jar name explicitly.
0095 # For example, `orc-core/1.5.5/nohive/orc-core-1.5.5-nohive.jar`
0096 # ^^^^^^
0097 # extracted classifier
0098 # `okio/1.15.0//okio-1.15.0.jar`
0099 # ^
0100 # empty for dependencies without classifier
0101 artifact_id=$(NF-2);
0102 version=$(NF-1);
0103 jar_name=$NF;
0104 classifier_start_index=length(artifact_id"-"version"-") + 1;
0105 classifier_end_index=index(jar_name, ".jar") - 1;
0106 classifier=substr(jar_name, classifier_start_index, classifier_end_index - classifier_start_index + 1);
0107 print artifact_id"/"version"/"classifier"/"jar_name
0108 }' | sort | grep -v spark > dev/pr-deps/spark-deps-$HADOOP_HIVE_PROFILE
0109 done
0110
0111 if [[ $@ == **replace-manifest** ]]; then
0112 echo "Replacing manifests and creating new files at dev/deps"
0113 rm -rf dev/deps
0114 mv dev/pr-deps dev/deps
0115 exit 0
0116 fi
0117
0118 for HADOOP_HIVE_PROFILE in "${HADOOP_HIVE_PROFILES[@]}"; do
0119 set +e
0120 dep_diff="$(
0121 git diff \
0122 --no-index \
0123 dev/deps/spark-deps-$HADOOP_HIVE_PROFILE \
0124 dev/pr-deps/spark-deps-$HADOOP_HIVE_PROFILE \
0125 )"
0126 set -e
0127 if [ "$dep_diff" != "" ]; then
0128 echo "Spark's published dependencies DO NOT MATCH the manifest file (dev/spark-deps)."
0129 echo "To update the manifest file, run './dev/test-dependencies.sh --replace-manifest'."
0130 echo "$dep_diff"
0131 rm -rf dev/pr-deps
0132 exit 1
0133 fi
0134 done
0135
0136 exit 0