Back to home page

OSCL-LXR

 
 

    


0001 #!/usr/bin/env bash
0002 
0003 #
0004 # Licensed to the Apache Software Foundation (ASF) under one or more
0005 # contributor license agreements.  See the NOTICE file distributed with
0006 # this work for additional information regarding copyright ownership.
0007 # The ASF licenses this file to You under the Apache License, Version 2.0
0008 # (the "License"); you may not use this file except in compliance with
0009 # the License.  You may obtain a copy of the License at
0010 #
0011 #    http://www.apache.org/licenses/LICENSE-2.0
0012 #
0013 # Unless required by applicable law or agreed to in writing, software
0014 # distributed under the License is distributed on an "AS IS" BASIS,
0015 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0016 # See the License for the specific language governing permissions and
0017 # limitations under the License.
0018 #
0019 
0020 set -o pipefail
0021 set -e
0022 
0023 # This variable indicates which coverage executable to run to combine coverages
0024 # and generate HTMLs, for example, 'coverage3' in Python 3.
0025 COV_EXEC="${COV_EXEC:-coverage}"
0026 FWDIR="$(cd "`dirname $0`"; pwd)"
0027 pushd "$FWDIR" > /dev/null
0028 
0029 # Ensure that coverage executable is installed.
0030 if ! hash $COV_EXEC 2>/dev/null; then
0031   echo "Missing coverage executable in your path, skipping PySpark coverage"
0032   exit 1
0033 fi
0034 
0035 # Set up the directories for coverage results.
0036 export COVERAGE_DIR="$FWDIR/test_coverage"
0037 rm -fr "$COVERAGE_DIR/coverage_data"
0038 rm -fr "$COVERAGE_DIR/htmlcov"
0039 mkdir -p "$COVERAGE_DIR/coverage_data"
0040 
0041 # Current directory are added in the python path so that it doesn't refer our built
0042 # pyspark zip library first.
0043 export PYTHONPATH="$FWDIR:$PYTHONPATH"
0044 # Also, our sitecustomize.py and coverage_daemon.py are included in the path.
0045 export PYTHONPATH="$COVERAGE_DIR:$PYTHONPATH"
0046 
0047 # We use 'spark.python.daemon.module' configuration to insert the coverage supported workers.
0048 export SPARK_CONF_DIR="$COVERAGE_DIR/conf"
0049 
0050 # This environment variable enables the coverage.
0051 export COVERAGE_PROCESS_START="$FWDIR/.coveragerc"
0052 
0053 ./run-tests "$@"
0054 
0055 # Don't run coverage for the coverage command itself
0056 unset COVERAGE_PROCESS_START
0057 
0058 # Coverage could generate empty coverage data files. Remove it to get rid of warnings when combining.
0059 find $COVERAGE_DIR/coverage_data -size 0 -print0 | xargs -0 rm
0060 echo "Combining collected coverage data under $COVERAGE_DIR/coverage_data"
0061 $COV_EXEC combine
0062 echo "Reporting the coverage data at $COVERAGE_DIR/coverage_data/coverage"
0063 $COV_EXEC report --include "pyspark/*"
0064 echo "Generating HTML files for PySpark coverage under $COVERAGE_DIR/htmlcov"
0065 $COV_EXEC html --ignore-errors --include "pyspark/*" --directory "$COVERAGE_DIR/htmlcov"
0066 
0067 popd