|
| 1 | +################################################################################ |
| 2 | +# DataSHIELD GHA test suite - dsBase |
| 3 | +# Adapted from `azure-pipelines.yml` by Roberto Villegas-Diaz |
| 4 | +# |
| 5 | +# Inside the root directory $(Pipeline.Workspace) will be a file tree like: |
| 6 | +# /dsBase <- Checked out version of datashield/dsBase |
| 7 | +# /dsBase/logs <- Where results of tests and logs are collated |
| 8 | +# /testStatus <- Checked out version of datashield/testStatus |
| 9 | +# |
| 10 | +# As of Jul 2025 this takes ~ 9 mins to run. |
| 11 | +################################################################################ |
| 12 | +name: dsBase tests' suite |
| 13 | + |
| 14 | +on: |
| 15 | + push: |
| 16 | + schedule: |
| 17 | + - cron: '0 0 * * 0' # Weekly |
| 18 | + |
| 19 | +jobs: |
| 20 | + dsBase_test_suite: |
| 21 | + runs-on: ubuntu-latest |
| 22 | + timeout-minutes: 120 |
| 23 | + permissions: |
| 24 | + contents: write |
| 25 | + |
| 26 | + # These should all be constant, except TEST_FILTER. This can be used to test |
| 27 | + # subsets of test files in the testthat directory. Options are like: |
| 28 | + # '*' <- Run all tests. |
| 29 | + # 'asNumericDS*' <- Run all asNumericDS tests, i.e. all the arg, etc. tests. |
| 30 | + # '*_smk_*' <- Run all the smoke tests for all functions. |
| 31 | + env: |
| 32 | + TEST_FILTER: '*' |
| 33 | + _r_check_system_clock_: 0 |
| 34 | + WORKFLOW_ID: ${{ github.run_id }}-${{ github.run_attempt }} |
| 35 | + PROJECT_NAME: dsBase |
| 36 | + BRANCH_NAME: ${{ github.ref_name }} |
| 37 | + REPO_OWNER: ${{ github.repository_owner }} |
| 38 | + R_KEEP_PKG_SOURCE: yes |
| 39 | + GITHUB_TOKEN: ${{ github.token || 'placeholder-token' }} |
| 40 | + |
| 41 | + steps: |
| 42 | + - name: Checkout dsBase |
| 43 | + uses: actions/checkout@v4 |
| 44 | + with: |
| 45 | + path: dsBase |
| 46 | + |
| 47 | + - name: Checkout testStatus |
| 48 | + if: ${{ github.actor != 'nektos/act' }} # for local deployment only |
| 49 | + uses: actions/checkout@v4 |
| 50 | + with: |
| 51 | + repository: ${{ env.REPO_OWNER }}/testStatus |
| 52 | + ref: master |
| 53 | + path: testStatus |
| 54 | + persist-credentials: false |
| 55 | + token: ${{ env.GITHUB_TOKEN }} |
| 56 | + |
| 57 | + - uses: r-lib/actions/setup-pandoc@v2 |
| 58 | + |
| 59 | + - uses: r-lib/actions/setup-r@v2 |
| 60 | + with: |
| 61 | + r-version: release |
| 62 | + http-user-agent: release |
| 63 | + use-public-rspm: true |
| 64 | + |
| 65 | + - name: Install dsBase |
| 66 | + run: | |
| 67 | + Rscript -e 'install.packages(c("RANN", "stringr", "lme4", "dplyr", "reshape2", "polycor", "gamlss", "gamlss.dist", "mice", "childsds", "usethis", "devtools"), dependencies = TRUE)' |
| 68 | + R CMD INSTALL ./dsBase |
| 69 | + |
| 70 | + - uses: r-lib/actions/setup-r-dependencies@v2 |
| 71 | + with: |
| 72 | + dependencies: 'c("Imports")' |
| 73 | + extra-packages: | |
| 74 | + any::rcmdcheck |
| 75 | + cran::devtools |
| 76 | + cran::git2r |
| 77 | + cran::RCurl |
| 78 | + cran::readr |
| 79 | + cran::magrittr |
| 80 | + cran::xml2 |
| 81 | + cran::purrr |
| 82 | + cran::dplyr |
| 83 | + cran::stringr |
| 84 | + cran::tidyr |
| 85 | + cran::quarto |
| 86 | + cran::knitr |
| 87 | + cran::kableExtra |
| 88 | + cran::rmarkdown |
| 89 | + cran::downlit |
| 90 | + needs: check |
| 91 | + |
| 92 | + - name: Check man files up-to-date |
| 93 | + run: | |
| 94 | + orig_sum=$(find man -type f | sort -u | xargs cat | md5sum) |
| 95 | + R -e "devtools::document()" |
| 96 | + new_sum=$(find man -type f | sort -u | xargs cat | md5sum) |
| 97 | + if [ "$orig_sum" != "$new_sum" ]; then |
| 98 | + echo "Your committed manual files (man/*.Rd) are out of sync with the R files. Run devtools::document() locally then commit." |
| 99 | + exit 1 |
| 100 | + else |
| 101 | + echo "Documentation up-to-date." |
| 102 | + fi |
| 103 | + working-directory: dsBase |
| 104 | + continue-on-error: true |
| 105 | + |
| 106 | + - name: Run devtools::check |
| 107 | + run: | |
| 108 | + R -q -e "library('devtools'); devtools::check(args = c('--no-tests', '--no-examples'))" | tee ../check.Rout |
| 109 | + grep -q "^0 errors" ../check.Rout && grep -q " 0 warnings" ../check.Rout && grep -q " 0 notes" ../check.Rout |
| 110 | + working-directory: dsBase |
| 111 | + continue-on-error: true |
| 112 | + |
| 113 | + - name: Run tests with coverage & JUnit report |
| 114 | + run: | |
| 115 | + mkdir -p logs |
| 116 | + R -q -e "devtools::reload();" |
| 117 | + R -q -e ' |
| 118 | + write.csv( |
| 119 | + covr::coverage_to_list( |
| 120 | + covr::package_coverage( |
| 121 | + type = c("none"), |
| 122 | + code = c('"'"' |
| 123 | + output_file <- file("test_console_output.txt"); |
| 124 | + sink(output_file); |
| 125 | + sink(output_file, type = "message"); |
| 126 | + junit_rep <- testthat::JunitReporter$new(file = file.path(getwd(), "test_results.xml")); |
| 127 | + progress_rep <- testthat::ProgressReporter$new(max_failures = 999999); |
| 128 | + multi_rep <- testthat::MultiReporter$new(reporters = list(progress_rep, junit_rep)); |
| 129 | + testthat::test_package("${{ env.PROJECT_NAME }}", filter = "${{ env.TEST_FILTER }}", reporter = multi_rep, stop_on_failure = FALSE)'"'"' |
| 130 | + ) |
| 131 | + ) |
| 132 | + ), |
| 133 | + "coveragelist.csv" |
| 134 | + )' |
| 135 | + |
| 136 | + mv coveragelist.csv logs/ |
| 137 | + mv test_* logs/ |
| 138 | + grep -q " FAIL 0 " logs/test_console_output.txt |
| 139 | + working-directory: dsBase |
| 140 | + |
| 141 | + - name: Check for JUnit errors |
| 142 | + run: | |
| 143 | + issue_count=$(sed 's/failures="0" errors="0"//' test_results.xml | grep -c errors= || true) |
| 144 | + echo "Number of testsuites with issues: $issue_count" |
| 145 | + sed 's/failures="0" errors="0"//' test_results.xml | grep errors= > issues.log || true |
| 146 | + cat issues.log || true |
| 147 | + exit $issue_count |
| 148 | + working-directory: dsBase/logs |
| 149 | + |
| 150 | + - name: Write versions to file |
| 151 | + run: | |
| 152 | + echo "branch:${{ env.BRANCH_NAME }}" > ${{ env.WORKFLOW_ID }}.txt |
| 153 | + echo "os:$(lsb_release -ds)" >> ${{ env.WORKFLOW_ID }}.txt |
| 154 | + echo "R:$(R --version | head -n1)" >> ${{ env.WORKFLOW_ID }}.txt |
| 155 | + Rscript --vanilla -e 'sessionInfo()' >> session_info_${{ env.WORKFLOW_ID }}.txt |
| 156 | + working-directory: dsBase/logs |
| 157 | + |
| 158 | + - name: Parse results from testthat and covr |
| 159 | + run: | |
| 160 | + Rscript --verbose --vanilla ../testStatus/source/parse_test_report.R logs/ logs/ https://github.com/datashield/${{ env.PROJECT_NAME }}/blob/${{ env.BRANCH_NAME }} '[^-:.]+' '(?<=::)[^:]+(?=::)' |
| 161 | + working-directory: dsBase |
| 162 | + env: |
| 163 | + PROJECT_NAME: ${{ env.PROJECT_NAME }} |
| 164 | + BRANCH_NAME: ${{ env.BRANCH_NAME }} |
| 165 | + |
| 166 | + - name: Render report |
| 167 | + run: | |
| 168 | + cd testStatus |
| 169 | + |
| 170 | + mkdir -p new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ |
| 171 | + mkdir -p new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/latest/ |
| 172 | + |
| 173 | + # Copy logs to new logs directory location |
| 174 | + cp -rv ../${{ env.PROJECT_NAME }}/logs/* new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ |
| 175 | + cp -rv ../${{ env.PROJECT_NAME }}/logs/${{ env.WORKFLOW_ID }}.txt new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ |
| 176 | + |
| 177 | + R -e 'input_dir <- file.path("../new/logs", Sys.getenv("PROJECT_NAME"), Sys.getenv("BRANCH_NAME"), Sys.getenv("WORKFLOW_ID")); quarto::quarto_render("source/test_report.qmd", execute_params = list(input_dir = input_dir))' |
| 178 | + mv source/test_report.html new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/latest/index.html |
| 179 | + |
| 180 | + env: |
| 181 | + PROJECT_NAME: ${{ env.PROJECT_NAME }} |
| 182 | + BRANCH_NAME: ${{ env.BRANCH_NAME }} |
| 183 | + WORKFLOW_ID: ${{ env.WORKFLOW_ID }} |
| 184 | + |
| 185 | + - name: Upload test logs |
| 186 | + uses: actions/upload-artifact@v4 |
| 187 | + with: |
| 188 | + name: dsbase-logs |
| 189 | + path: testStatus/new |
| 190 | + |
| 191 | + - name: Dump environment info |
| 192 | + run: | |
| 193 | + echo -e "\n#############################" |
| 194 | + echo -e "ls /: ######################" |
| 195 | + ls -al . |
| 196 | + echo -e "\n#############################" |
| 197 | + echo -e "lscpu: ######################" |
| 198 | + lscpu |
| 199 | + echo -e "\n#############################" |
| 200 | + echo -e "memory: #####################" |
| 201 | + free -m |
| 202 | + echo -e "\n#############################" |
| 203 | + echo -e "env: ########################" |
| 204 | + env |
| 205 | + echo -e "\n#############################" |
| 206 | + echo -e "R sessionInfo(): ############" |
| 207 | + R -e 'sessionInfo()' |
| 208 | + sudo apt install tree -y |
| 209 | + tree . |
| 210 | +
|
0 commit comments