0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020 set -o pipefail
0021 set -e
0022
0023 FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
0024 pushd "$FWDIR" > /dev/null
0025
0026 . "$FWDIR/find-r.sh"
0027
0028
0029
0030 . "$FWDIR/install-dev.sh"
0031
0032
0033 SPARK_HOME="$(cd "${FWDIR}"/..; pwd)"
0034 . "${SPARK_HOME}/bin/load-spark-env.sh"
0035 if [ -f "${SPARK_HOME}/RELEASE" ]; then
0036 SPARK_JARS_DIR="${SPARK_HOME}/jars"
0037 else
0038 SPARK_JARS_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars"
0039 fi
0040
0041 if [ -d "$SPARK_JARS_DIR" ]; then
0042
0043 SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/R" CMD build "$FWDIR/pkg"
0044
0045 find pkg/vignettes/. -not -name '.' -not -name '*.Rmd' -not -name '*.md' -not -name '*.pdf' -not -name '*.html' -delete
0046 else
0047 echo "Error Spark JARs not found in '$SPARK_HOME'"
0048 exit 1
0049 fi
0050
0051
0052 VERSION=`grep Version "$FWDIR/pkg/DESCRIPTION" | awk '{print $NF}'`
0053
0054 CRAN_CHECK_OPTIONS="--as-cran"
0055
0056 if [ -n "$NO_TESTS" ]
0057 then
0058 CRAN_CHECK_OPTIONS=$CRAN_CHECK_OPTIONS" --no-tests"
0059 fi
0060
0061 if [ -n "$NO_MANUAL" ]
0062 then
0063 CRAN_CHECK_OPTIONS=$CRAN_CHECK_OPTIONS" --no-manual --no-vignettes"
0064 fi
0065
0066 echo "Running CRAN check with $CRAN_CHECK_OPTIONS options"
0067
0068
0069
0070 export _R_CHECK_FORCE_SUGGESTS_=FALSE
0071
0072 if [ -n "$NO_TESTS" ] && [ -n "$NO_MANUAL" ]
0073 then
0074 "$R_SCRIPT_PATH/R" CMD check $CRAN_CHECK_OPTIONS "SparkR_$VERSION.tar.gz"
0075 else
0076
0077 SPARK_HOME="${SPARK_HOME}" "$R_SCRIPT_PATH/R" CMD check $CRAN_CHECK_OPTIONS "SparkR_$VERSION.tar.gz"
0078 fi
0079
0080 popd > /dev/null