dsBase tests' suite #175
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| ################################################################################ | |
| # DataSHIELD GHA test suite - dsBase | |
| # Adapted from `azure-pipelines.yml` by Roberto Villegas-Diaz | |
| # | |
| # Inside the root directory $(Pipeline.Workspace) will be a file tree like: | |
| # /dsBase <- Checked out version of datashield/dsBase | |
| # /dsBase/logs <- Where results of tests and logs are collated | |
| # /testStatus <- Checked out version of datashield/testStatus | |
| # | |
| # As of Jul 2025 this takes ~ 9 mins to run. | |
| ################################################################################ | |
| name: dsBase tests' suite | |
| on: | |
| push: | |
| schedule: | |
| - cron: '0 0 * * 0' # Weekly | |
| - cron: '0 1 * * *' # Nightly | |
| jobs: | |
| dsBase_test_suite: | |
| runs-on: ubuntu-latest | |
| timeout-minutes: 120 | |
| permissions: | |
| contents: write | |
| # These should all be constant, except TEST_FILTER. This can be used to test | |
| # subsets of test files in the testthat directory. Options are like: | |
| # '*' <- Run all tests. | |
| # 'asNumericDS*' <- Run all asNumericDS tests, i.e. all the arg, etc. tests. | |
| # '*_smk_*' <- Run all the smoke tests for all functions. | |
| env: | |
| TEST_FILTER: '*' | |
| _r_check_system_clock_: 0 | |
| WORKFLOW_ID: ${{ github.run_id }}-${{ github.run_attempt }} | |
| PROJECT_NAME: dsBase | |
| BRANCH_NAME: ${{ github.ref_name }} | |
| REPO_OWNER: ${{ github.repository_owner }} | |
| R_KEEP_PKG_SOURCE: yes | |
| GITHUB_TOKEN: ${{ github.token || 'placeholder-token' }} | |
| steps: | |
| - name: Checkout dsBase | |
| uses: actions/checkout@v4 | |
| with: | |
| path: dsBase | |
| - name: Checkout testStatus | |
| if: ${{ github.actor != 'nektos/act' }} # for local deployment only | |
| uses: actions/checkout@v4 | |
| with: | |
| repository: ${{ env.REPO_OWNER }}/testStatus | |
| ref: master | |
| path: testStatus | |
| persist-credentials: false | |
| token: ${{ env.GITHUB_TOKEN }} | |
| - uses: r-lib/actions/setup-pandoc@v2 | |
| - uses: r-lib/actions/setup-r@v2 | |
| with: | |
| r-version: release | |
| http-user-agent: release | |
| use-public-rspm: true | |
| - name: Install dsBase | |
| run: | | |
| Rscript -e 'install.packages(c("RANN", "stringr", "lme4", "dplyr", "reshape2", "polycor", "gamlss", "gamlss.dist", "mice", "childsds", "usethis", "devtools"), dependencies = TRUE)' | |
| R CMD INSTALL ./dsBase | |
| - uses: r-lib/actions/setup-r-dependencies@v2 | |
| with: | |
| dependencies: 'c("Imports")' | |
| extra-packages: | | |
| any::rcmdcheck | |
| cran::devtools | |
| cran::git2r | |
| cran::RCurl | |
| cran::readr | |
| cran::magrittr | |
| cran::xml2 | |
| cran::purrr | |
| cran::dplyr | |
| cran::stringr | |
| cran::tidyr | |
| cran::quarto | |
| cran::knitr | |
| cran::kableExtra | |
| cran::rmarkdown | |
| cran::downlit | |
| needs: check | |
| - name: Check man files up-to-date | |
| run: | | |
| orig_sum=$(find man -type f | sort -u | xargs cat | md5sum) | |
| R -e "devtools::document()" | |
| new_sum=$(find man -type f | sort -u | xargs cat | md5sum) | |
| if [ "$orig_sum" != "$new_sum" ]; then | |
| echo "Your committed manual files (man/*.Rd) are out of sync with the R files. Run devtools::document() locally then commit." | |
| exit 1 | |
| else | |
| echo "Documentation up-to-date." | |
| fi | |
| working-directory: dsBase | |
| continue-on-error: true | |
| - name: Run devtools::check | |
| run: | | |
| R -q -e "library('devtools'); devtools::check(args = c('--no-tests', '--no-examples'))" | tee ../check.Rout | |
| grep -q "^0 errors" ../check.Rout && grep -q " 0 warnings" ../check.Rout && grep -q " 0 notes" ../check.Rout | |
| working-directory: dsBase | |
| continue-on-error: true | |
| - name: Run tests with coverage & JUnit report | |
| run: | | |
| mkdir -p logs | |
| R -q -e "devtools::reload();" | |
| R -q -e ' | |
| write.csv( | |
| covr::coverage_to_list( | |
| covr::package_coverage( | |
| type = c("none"), | |
| code = c('"'"' | |
| output_file <- file("test_console_output.txt"); | |
| sink(output_file); | |
| sink(output_file, type = "message"); | |
| junit_rep <- testthat::JunitReporter$new(file = file.path(getwd(), "test_results.xml")); | |
| progress_rep <- testthat::ProgressReporter$new(max_failures = 999999); | |
| multi_rep <- testthat::MultiReporter$new(reporters = list(progress_rep, junit_rep)); | |
| testthat::test_package("${{ env.PROJECT_NAME }}", filter = "${{ env.TEST_FILTER }}", reporter = multi_rep, stop_on_failure = FALSE)'"'"' | |
| ) | |
| ) | |
| ), | |
| "coveragelist.csv" | |
| )' | |
| mv coveragelist.csv logs/ | |
| mv test_* logs/ | |
| grep -q " FAIL 0 " logs/test_console_output.txt | |
| working-directory: dsBase | |
| - name: Check for JUnit errors | |
| run: | | |
| issue_count=$(sed 's/failures="0" errors="0"//' test_results.xml | grep -c errors= || true) | |
| echo "Number of testsuites with issues: $issue_count" | |
| sed 's/failures="0" errors="0"//' test_results.xml | grep errors= > issues.log || true | |
| cat issues.log || true | |
| exit $issue_count | |
| working-directory: dsBase/logs | |
| - name: Write versions to file | |
| run: | | |
| echo "branch:${{ env.BRANCH_NAME }}" > ${{ env.WORKFLOW_ID }}.txt | |
| echo "os:$(lsb_release -ds)" >> ${{ env.WORKFLOW_ID }}.txt | |
| echo "R:$(R --version | head -n1)" >> ${{ env.WORKFLOW_ID }}.txt | |
| working-directory: dsBase/logs | |
| - name: Parse results from testthat and covr | |
| run: | | |
| Rscript --verbose --vanilla ../testStatus/source/parse_test_report.R logs/ | |
| working-directory: dsBase | |
| - name: Render report | |
| run: | | |
| cd testStatus | |
| mkdir -p new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ | |
| mkdir -p new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ | |
| mkdir -p new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/latest/ | |
| # Copy logs to new logs directory location | |
| cp -rv ../${{ env.PROJECT_NAME }}/logs/* new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ | |
| cp -rv ../${{ env.PROJECT_NAME }}/logs/${{ env.WORKFLOW_ID }}.txt new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ | |
| R -e 'input_dir <- file.path("../new/logs", Sys.getenv("PROJECT_NAME"), Sys.getenv("BRANCH_NAME"), Sys.getenv("WORKFLOW_ID")); quarto::quarto_render("source/test_report.qmd", execute_params = list(input_dir = input_dir))' | |
| mv source/test_report.html new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/index.html | |
| cp -r new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/* new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/latest | |
| env: | |
| PROJECT_NAME: ${{ env.PROJECT_NAME }} | |
| BRANCH_NAME: ${{ env.BRANCH_NAME }} | |
| WORKFLOW_ID: ${{ env.WORKFLOW_ID }} | |
| - name: Upload test logs | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: dsbase-logs | |
| path: testStatus/new | |
| - name: Dump environment info | |
| run: | | |
| echo -e "\n#############################" | |
| echo -e "ls /: ######################" | |
| ls -al . | |
| echo -e "\n#############################" | |
| echo -e "lscpu: ######################" | |
| lscpu | |
| echo -e "\n#############################" | |
| echo -e "memory: #####################" | |
| free -m | |
| echo -e "\n#############################" | |
| echo -e "env: ########################" | |
| env | |
| echo -e "\n#############################" | |
| echo -e "R sessionInfo(): ############" | |
| R -e 'sessionInfo()' | |
| sudo apt install tree -y | |
| tree . | |