Back to home page

OSCL-LXR

 
 

    


0001 # Licensed to the Apache Software Foundation (ASF) under one or more
0002 # contributor license agreements. See the NOTICE file distributed with
0003 # this work for additional information regarding copyright ownership.
0004 # The ASF licenses this file to You under the Apache License, Version 2.0
0005 # (the "License"); you may not use this file except in compliance with
0006 # the License. You may obtain a copy of the License at
0007 #
0008 # http://www.apache.org/licenses/LICENSE-2.0
0009 #
0010 # Unless required by applicable law or agreed to in writing, software
0011 # distributed under the License is distributed on an "AS IS" BASIS,
0012 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
0013 # See the License for the specific language governing permissions and
0014 # limitations under the License.
0015 
0016 version: "{build}-{branch}"
0017 
0018 shallow_clone: true
0019 
0020 platform: x64
0021 configuration: Debug
0022 
0023 branches:
0024   only:
0025     - master
0026 
0027 only_commits:
0028   files:
0029     - appveyor.yml
0030     - dev/appveyor-install-dependencies.ps1
0031     - R/
0032     - sql/core/src/main/scala/org/apache/spark/sql/api/r/
0033     - core/src/main/scala/org/apache/spark/api/r/
0034     - mllib/src/main/scala/org/apache/spark/ml/r/
0035     - core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
0036     - bin/*.cmd
0037 
0038 cache:
0039   - C:\Users\appveyor\.m2
0040 
0041 install:
0042   # Install maven and dependencies
0043   - ps: .\dev\appveyor-install-dependencies.ps1
0044   # Required package for R unit tests
0045   - cmd: R -e "install.packages(c('knitr', 'rmarkdown', 'testthat', 'e1071', 'survival', 'arrow'), repos='https://cloud.r-project.org/')"
0046   - cmd: R -e "packageVersion('knitr'); packageVersion('rmarkdown'); packageVersion('testthat'); packageVersion('e1071'); packageVersion('survival'); packageVersion('arrow')"
0047 
0048 build_script:
0049   # '-Djna.nosys=true' is required to avoid kernel32.dll load failure.
0050   # See SPARK-28759.
0051   # Ideally we should check the tests related to Hive in SparkR as well (SPARK-31745).
0052   - cmd: mvn -DskipTests -Psparkr -Djna.nosys=true package
0053 
0054 environment:
0055   NOT_CRAN: true
0056   # See SPARK-27848. Currently installing some dependent packages causes
0057   # "(converted from warning) unable to identify current timezone 'C':" for an unknown reason.
0058   # This environment variable works around to test SparkR against a higher version.
0059   R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
0060 
0061 test_script:
0062   - cmd: .\bin\spark-submit2.cmd --driver-java-options "-Dlog4j.configuration=file:///%CD:\=/%/R/log4j.properties" --conf spark.hadoop.fs.defaultFS="file:///" R\pkg\tests\run-all.R
0063 
0064 notifications:
0065   - provider: Email
0066     on_build_success: false
0067     on_build_failure: false
0068     on_build_status_changed: false