0001 #
0002 # Licensed to the Apache Software Foundation (ASF) under one or more
0003 # contributor license agreements. See the NOTICE file distributed with
0004 # this work for additional information regarding copyright ownership.
0005 # The ASF licenses this file to You under the Apache License, Version 2.0
0006 # (the "License"); you may not use this file except in compliance with
0007 # the License. You may obtain a copy of the License at
0008 #
0009 # http://www.apache.org/licenses/LICENSE-2.0
0010 #
0011 # Unless required by applicable law or agreed to in writing, software
0012 # distributed under the License is distributed on an "AS IS" BASIS,
0013 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0014 # See the License for the specific language governing permissions and
0015 # limitations under the License.
0016 #
0017
0018 library(testthat)
0019 library(SparkR)
0020
0021 # SPARK-25572
0022 if (identical(Sys.getenv("NOT_CRAN"), "true")) {
0023 # Turn all warnings into errors
0024 options("warn" = 2)
0025
0026 if (.Platform$OS.type == "windows") {
0027 Sys.setenv(TZ = "GMT")
0028 }
0029
0030 # Setup global test environment
0031 # Install Spark first to set SPARK_HOME
0032
0033 # NOTE(shivaram): We set overwrite to handle any old tar.gz files or directories left behind on
0034 # CRAN machines. For Jenkins we should already have SPARK_HOME set.
0035 install.spark(overwrite = TRUE)
0036
0037 sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
0038 sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
0039 invisible(lapply(sparkRWhitelistSQLDirs,
0040 function(x) { unlink(file.path(sparkRDir, x), recursive = TRUE, force = TRUE)}))
0041 sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
0042
0043 sparkRTestMaster <- "local[1]"
0044 sparkRTestConfig <- list()
0045 if (identical(Sys.getenv("NOT_CRAN"), "true")) {
0046 sparkRTestMaster <- ""
0047 } else {
0048 # Disable hsperfdata on CRAN
0049 old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
0050 Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
0051 tmpDir <- tempdir()
0052 tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
0053 sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
0054 spark.executor.extraJavaOptions = tmpArg)
0055 }
0056
0057 test_package("SparkR")
0058
0059 if (identical(Sys.getenv("NOT_CRAN"), "true")) {
0060 # set random seed for predictable results. mostly for base's sample() in tree and classification
0061 set.seed(42)
0062
0063 # TODO (SPARK-30663) To be removed once testthat 1.x is removed from all builds
0064 if (grepl("^1\\..*", packageVersion("testthat"))) {
0065 # testthat 1.x
0066 test_runner <- testthat:::run_tests
0067 reporter <- "summary"
0068
0069 } else {
0070 # testthat >= 2.0.0
0071 test_runner <- testthat:::test_package_dir
0072 reporter <- testthat::default_reporter()
0073 }
0074
0075 test_runner("SparkR",
0076 file.path(sparkRDir, "pkg", "tests", "fulltests"),
0077 NULL,
0078 reporter)
0079 }
0080
0081 SparkR:::uninstallDownloadedSpark()
0082
0083 }