├── .Rbuildignore ├── .github ├── .gitignore └── workflows │ ├── R-CMD-check-dev.yaml │ ├── R-CMD-check-status.yaml │ ├── R-CMD-check.yaml │ ├── check │ └── action.yml │ ├── commit │ └── action.yml │ ├── covr │ └── action.yml │ ├── dep-matrix │ └── action.yml │ ├── dep-suggests-matrix │ ├── action.R │ └── action.yml │ ├── fledge.yaml │ ├── get-extra │ └── action.yml │ ├── git-identity │ └── action.yml │ ├── install │ └── action.yml │ ├── lock.yaml │ ├── matrix-check │ └── action.yml │ ├── pkgdown-build │ └── action.yml │ ├── pkgdown-deploy │ └── action.yml │ ├── pkgdown.yaml │ ├── pr-commands.yaml │ ├── rate-limit │ └── action.yml │ ├── revdep.yaml │ ├── roxygenize │ └── action.yml │ ├── style │ └── action.yml │ ├── update-snapshots │ └── action.yml │ └── versions-matrix │ ├── action.R │ └── action.yml ├── .gitignore ├── CODE_OF_CONDUCT.md ├── DESCRIPTION ├── NAMESPACE ├── NEWS.md ├── R ├── Connection.R ├── Driver.R ├── Result.R ├── deparse.R ├── import.R └── make-log-call.R ├── README.Rmd ├── README.md ├── dblog.Rproj ├── man ├── DBI.Rd ├── dblog.Rd └── get_default_logger.Rd └── tests ├── testthat.R └── testthat └── test-deparse.R /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^README\.Rmd$ 2 | ^dblog\.Rproj$ 3 | ^\.Rproj\.user$ 4 | ^CODE_OF_CONDUCT\.md$ 5 | ^\.github$ 6 | -------------------------------------------------------------------------------- /.github/.gitignore: -------------------------------------------------------------------------------- 1 | /pkg.lock 2 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check-dev.yaml: -------------------------------------------------------------------------------- 1 | # This workflow calls the GitHub API very frequently. 2 | # Can't be run as part of commits 3 | on: 4 | schedule: 5 | - cron: "0 5 * * *" # 05:00 UTC every day only run on main branch 6 | push: 7 | branches: 8 | - "cran-*" 9 | tags: 10 | - "v*" 11 | 12 | name: rcc dev 13 | 14 | jobs: 15 | matrix: 16 | runs-on: ubuntu-22.04 17 | outputs: 18 | matrix: ${{ steps.set-matrix.outputs.matrix }} 19 | 20 | name: Collect deps 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | 25 | - uses: ./.github/workflows/rate-limit 26 | with: 27 | token: ${{ secrets.GITHUB_TOKEN }} 28 | 29 | - uses: r-lib/actions/setup-r@v2 30 | 31 | - id: set-matrix 32 | uses: ./.github/workflows/dep-matrix 33 | 34 | check-matrix: 35 | runs-on: ubuntu-22.04 36 | needs: matrix 37 | 38 | name: Check deps 39 | 40 | steps: 41 | - name: Install json2yaml 42 | run: | 43 | sudo npm install -g json2yaml 44 | 45 | - name: Check matrix definition 46 | run: | 47 | matrix='${{ needs.matrix.outputs.matrix }}' 48 | echo $matrix 49 | echo $matrix | jq . 50 | echo $matrix | json2yaml 51 | 52 | R-CMD-check-base: 53 | runs-on: ubuntu-22.04 54 | 55 | name: base 56 | 57 | # Begin custom: services 58 | # End custom: services 59 | 60 | strategy: 61 | fail-fast: false 62 | 63 | steps: 64 | - uses: actions/checkout@v4 65 | 66 | - uses: ./.github/workflows/custom/before-install 67 | if: hashFiles('.github/workflows/custom/before-install/action.yml') != '' 68 | 69 | - uses: ./.github/workflows/install 70 | with: 71 | cache-version: rcc-dev-base-1 72 | needs: build, check 73 | extra-packages: "any::rcmdcheck any::remotes ." 74 | token: ${{ secrets.GITHUB_TOKEN }} 75 | 76 | - name: Session info 77 | run: | 78 | options(width = 100) 79 | if (!requireNamespace("sessioninfo", quietly = TRUE)) install.packages("sessioninfo") 80 | pkgs <- installed.packages()[, "Package"] 81 | sessioninfo::session_info(pkgs, include_base = TRUE) 82 | shell: Rscript {0} 83 | 84 | - uses: ./.github/workflows/custom/after-install 85 | if: hashFiles('.github/workflows/custom/after-install/action.yml') != '' 86 | 87 | - uses: ./.github/workflows/update-snapshots 88 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository 89 | 90 | - uses: ./.github/workflows/check 91 | with: 92 | results: ${{ matrix.package }} 93 | 94 | R-CMD-check-dev: 95 | needs: 96 | - matrix 97 | - R-CMD-check-base 98 | 99 | runs-on: ubuntu-22.04 100 | 101 | name: 'rcc-dev: ${{ matrix.package }}' 102 | 103 | # Begin custom: services 104 | # End custom: services 105 | 106 | strategy: 107 | fail-fast: false 108 | matrix: ${{fromJson(needs.matrix.outputs.matrix)}} 109 | 110 | steps: 111 | - uses: actions/checkout@v4 112 | 113 | - uses: ./.github/workflows/custom/before-install 114 | if: hashFiles('.github/workflows/custom/before-install/action.yml') != '' 115 | 116 | - uses: ./.github/workflows/install 117 | with: 118 | cache-version: rcc-dev-${{ matrix.package }}-1 119 | needs: build, check 120 | extra-packages: "any::rcmdcheck any::remotes ." 121 | token: ${{ secrets.GITHUB_TOKEN }} 122 | 123 | - name: Install dev version of ${{ matrix.package }} 124 | env: 125 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 126 | run: | 127 | remotes::install_dev("${{ matrix.package }}", "https://cloud.r-project.org", upgrade = "always") 128 | shell: Rscript {0} 129 | 130 | - name: Session info 131 | run: | 132 | options(width = 100) 133 | if (!requireNamespace("sessioninfo", quietly = TRUE)) install.packages("sessioninfo") 134 | pkgs <- installed.packages()[, "Package"] 135 | sessioninfo::session_info(pkgs, include_base = TRUE) 136 | shell: Rscript {0} 137 | 138 | - uses: ./.github/workflows/custom/after-install 139 | if: hashFiles('.github/workflows/custom/after-install/action.yml') != '' 140 | 141 | - uses: ./.github/workflows/update-snapshots 142 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository 143 | 144 | - uses: ./.github/workflows/check 145 | with: 146 | results: ${{ matrix.package }} 147 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check-status.yaml: -------------------------------------------------------------------------------- 1 | # Workflow to update the status of a commit for the R-CMD-check workflow 2 | # Necessary because remote PRs cannot update the status of the commit 3 | on: 4 | workflow_run: 5 | workflows: 6 | - rcc 7 | types: 8 | - requested 9 | - completed 10 | 11 | name: rcc-status 12 | 13 | jobs: 14 | rcc-status: 15 | runs-on: ubuntu-24.04 16 | 17 | name: "Update commit status" 18 | 19 | permissions: 20 | contents: read 21 | statuses: write 22 | 23 | steps: 24 | - name: "Update commit status" 25 | # Only run if triggered by rcc workflow 26 | if: github.event.workflow_run.name == 'rcc' 27 | env: 28 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | run: | 30 | set -x 31 | 32 | if [ "${{ github.event.workflow_run.status }}" == "completed" ]; then 33 | if [ "${{ github.event.workflow_run.conclusion }}" == "success" ]; then 34 | state="success" 35 | else 36 | state="failure" 37 | fi 38 | 39 | # Read artifact ID 40 | artifact_id=$(gh api \ 41 | -H "Accept: application/vnd.github+json" \ 42 | -H "X-GitHub-Api-Version: 2022-11-28" \ 43 | repos/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }}/artifacts | jq -r '.artifacts[] | select(.name == "rcc-smoke-sha") | .id') 44 | 45 | if [ -n "${artifact_id}" ]; then 46 | # Download artifact 47 | curl -L -o rcc-smoke-sha.zip \ 48 | -H "Accept: application/vnd.github+json" \ 49 | -H "Authorization: Bearer ${GH_TOKEN}" \ 50 | -H "X-GitHub-Api-Version: 2022-11-28" \ 51 | https://api.github.com/repos/${{ github.repository }}/actions/artifacts/${artifact_id}/zip 52 | 53 | # Unzip artifact 54 | unzip rcc-smoke-sha.zip 55 | 56 | # Read artifact 57 | sha=$(cat rcc-smoke-sha.txt) 58 | 59 | # Clean up 60 | rm rcc-smoke-sha.zip rcc-smoke-sha.txt 61 | fi 62 | else 63 | state="pending" 64 | fi 65 | 66 | if [ -z "${sha}" ]; then 67 | sha=${{ github.event.workflow_run.head_sha }} 68 | fi 69 | 70 | html_url=${{ github.event.workflow_run.html_url }} 71 | description=${{ github.event.workflow_run.name }} 72 | 73 | gh api \ 74 | --method POST \ 75 | -H "Accept: application/vnd.github+json" \ 76 | -H "X-GitHub-Api-Version: 2022-11-28" \ 77 | repos/${{ github.repository }}/statuses/${sha} \ 78 | -f "state=${state}" -f "target_url=${html_url}" -f "description=${description}" -f "context=rcc" 79 | shell: bash 80 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | # 4 | # NOTE: This workflow is overkill for most R packages and 5 | # check-standard.yaml is likely a better choice. 6 | # usethis::use_github_action("check-standard") will install it. 7 | on: 8 | push: 9 | branches: 10 | - main 11 | - master 12 | - release 13 | - cran-* 14 | pull_request: 15 | branches: 16 | - main 17 | - master 18 | workflow_dispatch: 19 | inputs: 20 | ref: 21 | description: "Branch, tag, or commit to check out" 22 | required: false 23 | default: "main" 24 | versions-matrix: 25 | description: "Create a matrix of R versions" 26 | type: boolean 27 | default: false 28 | dep-suggests-matrix: 29 | description: "Create a matrix of suggested dependencies" 30 | type: boolean 31 | default: false 32 | merge_group: 33 | types: 34 | - checks_requested 35 | schedule: 36 | - cron: "10 1 * * *" 37 | 38 | concurrency: 39 | group: ${{ github.workflow }}-${{ github.ref }}-${{ inputs.ref || github.head_ref || github.sha }}-${{ github.base_ref || '' }} 40 | cancel-in-progress: true 41 | 42 | name: rcc 43 | 44 | jobs: 45 | rcc-smoke: 46 | runs-on: ubuntu-24.04 47 | 48 | outputs: 49 | sha: ${{ steps.commit.outputs.sha }} 50 | versions-matrix: ${{ steps.versions-matrix.outputs.matrix }} 51 | dep-suggests-matrix: ${{ steps.dep-suggests-matrix.outputs.matrix }} 52 | 53 | name: "Smoke test: stock R" 54 | 55 | permissions: 56 | contents: write 57 | statuses: write 58 | pull-requests: write 59 | actions: write 60 | 61 | # Begin custom: services 62 | # End custom: services 63 | 64 | steps: 65 | - uses: actions/checkout@v4 66 | with: 67 | ref: ${{ inputs.ref }} 68 | 69 | - name: Update status for rcc 70 | # FIXME: Wrap into action 71 | if: github.event_name == 'workflow_dispatch' 72 | env: 73 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 74 | run: | 75 | # Check status of this workflow 76 | state="pending" 77 | sha=${{ inputs.ref }} 78 | if [ -z "${sha}" ]; then 79 | sha=${{ github.head_ref }} 80 | fi 81 | if [ -z "${sha}" ]; then 82 | sha=${{ github.sha }} 83 | fi 84 | sha=$(git rev-parse ${sha}) 85 | 86 | html_url=$(gh api \ 87 | -H "Accept: application/vnd.github+json" \ 88 | -H "X-GitHub-Api-Version: 2022-11-28" \ 89 | repos/${{ github.repository }}/actions/runs/${{ github.run_id }} | jq -r .html_url) 90 | 91 | description="${{ github.workflow }} / ${{ github.job }}" 92 | 93 | gh api \ 94 | --method POST \ 95 | -H "Accept: application/vnd.github+json" \ 96 | -H "X-GitHub-Api-Version: 2022-11-28" \ 97 | repos/${{ github.repository }}/statuses/${sha} \ 98 | -f "state=${state}" -f "target_url=${html_url}" -f "description=${description}" -f "context=rcc" 99 | shell: bash 100 | 101 | - uses: ./.github/workflows/rate-limit 102 | with: 103 | token: ${{ secrets.GITHUB_TOKEN }} 104 | 105 | - uses: ./.github/workflows/git-identity 106 | 107 | - uses: ./.github/workflows/custom/before-install 108 | if: hashFiles('.github/workflows/custom/before-install/action.yml') != '' 109 | 110 | - uses: ./.github/workflows/install 111 | with: 112 | token: ${{ secrets.GITHUB_TOKEN }} 113 | cache-version: rcc-smoke-2 114 | needs: build, check, website 115 | # Beware of using dev pkgdown here, has brought in dev dependencies in the past 116 | extra-packages: any::rcmdcheck r-lib/roxygen2 any::decor r-lib/styler r-lib/pkgdown deps::. 117 | 118 | - uses: ./.github/workflows/custom/after-install 119 | if: hashFiles('.github/workflows/custom/after-install/action.yml') != '' 120 | 121 | # Must come after the custom after-install workflow 122 | - name: Install package 123 | run: | 124 | _R_SHLIB_STRIP_=true R CMD INSTALL . 125 | shell: bash 126 | 127 | - id: versions-matrix 128 | # Only run for pull requests if the base repo is different from the head repo, not for workflow_dispatch if not requested, always run for other events 129 | if: (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository) && (github.event_name != 'workflow_dispatch' || inputs.versions-matrix) 130 | uses: ./.github/workflows/versions-matrix 131 | 132 | - id: dep-suggests-matrix 133 | # Not for workflow_dispatch if not requested, always run for other events 134 | if: github.event_name != 'workflow_dispatch' || inputs.dep-suggests-matrix 135 | uses: ./.github/workflows/dep-suggests-matrix 136 | 137 | - uses: ./.github/workflows/update-snapshots 138 | with: 139 | base: ${{ inputs.ref || github.head_ref }} 140 | 141 | - uses: ./.github/workflows/style 142 | 143 | - uses: ./.github/workflows/roxygenize 144 | 145 | - name: Remove config files from previous iteration 146 | run: | 147 | rm -f .github/dep-suggests-matrix.json .github/versions-matrix.json 148 | shell: bash 149 | 150 | - id: commit 151 | uses: ./.github/workflows/commit 152 | with: 153 | token: ${{ secrets.GITHUB_TOKEN }} 154 | 155 | - uses: ./.github/workflows/check 156 | with: 157 | results: ${{ runner.os }}-smoke-test 158 | 159 | - uses: ./.github/workflows/pkgdown-build 160 | if: github.event_name != 'push' 161 | 162 | - uses: ./.github/workflows/pkgdown-deploy 163 | if: github.event_name == 'push' 164 | 165 | # Upload sha as artifact 166 | - run: | 167 | echo -n "${{ steps.commit.outputs.sha }}" > rcc-smoke-sha.txt 168 | shell: bash 169 | 170 | - uses: actions/upload-artifact@v4 171 | with: 172 | name: rcc-smoke-sha 173 | path: rcc-smoke-sha.txt 174 | 175 | - name: Update status for rcc 176 | # FIXME: Wrap into action 177 | if: always() && github.event_name == 'workflow_dispatch' 178 | env: 179 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 180 | run: | 181 | # Check status of this workflow 182 | if [ "${{ job.status }}" == "success" ]; then 183 | state="success" 184 | else 185 | state="failure" 186 | fi 187 | 188 | sha=${{ steps.commit.outputs.sha }} 189 | if [ -z "${sha}" ]; then 190 | sha=${{ inputs.ref }} 191 | fi 192 | if [ -z "${sha}" ]; then 193 | sha=${{ github.head_ref }} 194 | fi 195 | if [ -z "${sha}" ]; then 196 | sha=${{ github.sha }} 197 | fi 198 | sha=$(git rev-parse ${sha}) 199 | 200 | html_url=$(gh api \ 201 | -H "Accept: application/vnd.github+json" \ 202 | -H "X-GitHub-Api-Version: 2022-11-28" \ 203 | repos/${{ github.repository }}/actions/runs/${{ github.run_id }} | jq -r .html_url) 204 | 205 | description="${{ github.workflow }} / ${{ github.job }}" 206 | 207 | gh api \ 208 | --method POST \ 209 | -H "Accept: application/vnd.github+json" \ 210 | -H "X-GitHub-Api-Version: 2022-11-28" \ 211 | repos/${{ github.repository }}/statuses/${sha} \ 212 | -f "state=${state}" -f "target_url=${html_url}" -f "description=${description}" -f "context=rcc" 213 | shell: bash 214 | 215 | rcc-smoke-check-matrix: 216 | runs-on: ubuntu-24.04 217 | 218 | name: "Check matrix" 219 | 220 | needs: 221 | - rcc-smoke 222 | 223 | steps: 224 | - uses: actions/checkout@v4 225 | with: 226 | ref: ${{ needs.rcc-smoke.outputs.sha }} 227 | 228 | - uses: ./.github/workflows/matrix-check 229 | with: 230 | matrix: ${{ needs.rcc-smoke.outputs.versions-matrix }} 231 | 232 | - uses: ./.github/workflows/matrix-check 233 | with: 234 | matrix: ${{ needs.rcc-smoke.outputs.dep-suggests-matrix }} 235 | 236 | rcc-full: 237 | needs: 238 | - rcc-smoke 239 | 240 | runs-on: ${{ matrix.os }} 241 | 242 | if: ${{ needs.rcc-smoke.outputs.versions-matrix != '' }} 243 | 244 | name: 'rcc: ${{ matrix.os }} (${{ matrix.r }}) ${{ matrix.desc }}' 245 | 246 | # Begin custom: services 247 | # End custom: services 248 | 249 | strategy: 250 | fail-fast: false 251 | matrix: ${{fromJson(needs.rcc-smoke.outputs.versions-matrix)}} 252 | 253 | steps: 254 | - uses: actions/checkout@v4 255 | with: 256 | ref: ${{ needs.rcc-smoke.outputs.sha }} 257 | 258 | - uses: ./.github/workflows/custom/before-install 259 | if: hashFiles('.github/workflows/custom/before-install/action.yml') != '' 260 | 261 | - uses: ./.github/workflows/install 262 | with: 263 | r-version: ${{ matrix.r }} 264 | cache-version: rcc-full-1 265 | token: ${{ secrets.GITHUB_TOKEN }} 266 | needs: build, check 267 | 268 | - uses: ./.github/workflows/custom/after-install 269 | if: hashFiles('.github/workflows/custom/after-install/action.yml') != '' 270 | 271 | - name: Must allow NOTEs if packages are missing, even with _R_CHECK_FORCE_SUGGESTS_ 272 | run: | 273 | if (Sys.getenv("RCMDCHECK_ERROR_ON") %in% c("", "note")) { 274 | pkgs <- setdiff(desc::desc_get_deps()$package, "R") 275 | installable <- vapply(pkgs, FUN.VALUE = logical(1), requireNamespace, quietly = TRUE) 276 | if (any(!installable)) { 277 | message("Missing packages: ", paste(pkgs[!installable], collapse = ", ")) 278 | cat('RCMDCHECK_ERROR_ON="warning"\n', file = Sys.getenv("GITHUB_ENV"), append = TRUE) 279 | } 280 | } 281 | shell: Rscript {0} 282 | 283 | - uses: ./.github/workflows/update-snapshots 284 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository 285 | 286 | - uses: ./.github/workflows/check 287 | if: ${{ ! matrix.covr }} 288 | with: 289 | results: ${{ runner.os }}-r${{ matrix.r }} 290 | 291 | - uses: ./.github/workflows/covr 292 | if: ${{ matrix.covr }} 293 | with: 294 | token: ${{ secrets.CODECOV_TOKEN }} 295 | 296 | # The status update is taken care of by R-CMD-check-status.yaml 297 | 298 | rcc-suggests: 299 | needs: 300 | - rcc-smoke 301 | 302 | runs-on: ubuntu-22.04 303 | 304 | if: ${{ needs.rcc-smoke.outputs.dep-suggests-matrix != '' }} 305 | 306 | name: Without ${{ matrix.package }} 307 | 308 | # Begin custom: services 309 | # End custom: services 310 | 311 | strategy: 312 | fail-fast: false 313 | matrix: ${{fromJson(needs.rcc-smoke.outputs.dep-suggests-matrix)}} 314 | 315 | steps: 316 | - uses: actions/checkout@v4 317 | 318 | - uses: ./.github/workflows/custom/before-install 319 | if: hashFiles('.github/workflows/custom/before-install/action.yml') != '' 320 | 321 | - uses: ./.github/workflows/install 322 | with: 323 | cache-version: rcc-dev-${{ matrix.package }}-1 324 | needs: build, check 325 | extra-packages: "any::rcmdcheck any::remotes ." 326 | token: ${{ secrets.GITHUB_TOKEN }} 327 | 328 | - name: Remove ${{ matrix.package }} and all strong dependencies 329 | run: | 330 | pkg <- "${{ matrix.package }}" 331 | pkgs <- tools::package_dependencies(pkg, reverse = TRUE)[[1]] 332 | installed <- rownames(utils::installed.packages()) 333 | to_remove <- c(pkg, intersect(pkgs, installed)) 334 | print(to_remove) 335 | remove.packages(to_remove) 336 | shell: Rscript {0} 337 | 338 | - name: Session info 339 | run: | 340 | options(width = 100) 341 | if (!requireNamespace("sessioninfo", quietly = TRUE)) install.packages("sessioninfo") 342 | pkgs <- installed.packages()[, "Package"] 343 | sessioninfo::session_info(pkgs, include_base = TRUE) 344 | shell: Rscript {0} 345 | 346 | - uses: ./.github/workflows/custom/after-install 347 | if: hashFiles('.github/workflows/custom/after-install/action.yml') != '' 348 | 349 | - name: Must allow NOTEs, even with _R_CHECK_FORCE_SUGGESTS_ 350 | run: | 351 | if (Sys.getenv("RCMDCHECK_ERROR_ON") %in% c("", "note")) { 352 | cat('RCMDCHECK_ERROR_ON="warning"\n', file = Sys.getenv("GITHUB_ENV"), append = TRUE) 353 | } 354 | shell: Rscript {0} 355 | 356 | - name: Check env vars 357 | run: | 358 | print(Sys.getenv('_R_CHECK_FORCE_SUGGESTS_')) 359 | print(Sys.getenv('RCMDCHECK_ERROR_ON')) 360 | shell: Rscript {0} 361 | 362 | - uses: ./.github/workflows/check 363 | with: 364 | results: ${{ matrix.package }} 365 | 366 | # The status update is taken care of by R-CMD-check-status.yaml 367 | -------------------------------------------------------------------------------- /.github/workflows/check/action.yml: -------------------------------------------------------------------------------- 1 | name: "Actions to check an R package" 2 | inputs: 3 | results: 4 | description: Slug for check results 5 | required: true 6 | 7 | runs: 8 | using: "composite" 9 | steps: 10 | - uses: r-lib/actions/check-r-package@v2 11 | with: 12 | # Fails on R 3.6 on Windows, remove when this job is removed? 13 | args: 'c("--no-manual", "--as-cran", "--no-multiarch")' 14 | error-on: ${{ env.RCMDCHECK_ERROR_ON || '"note"' }} 15 | 16 | - name: Show test output 17 | if: always() 18 | run: | 19 | ## -- Show test output -- 20 | echo "::group::Test output" 21 | find check -name '*.Rout*' -exec head -n 1000000 '{}' \; || true 22 | echo "::endgroup::" 23 | shell: bash 24 | 25 | - name: Upload check results 26 | if: failure() 27 | uses: actions/upload-artifact@main 28 | with: 29 | name: ${{ inputs.results }}-results 30 | path: check 31 | -------------------------------------------------------------------------------- /.github/workflows/commit/action.yml: -------------------------------------------------------------------------------- 1 | name: "Action to commit changes to the repository" 2 | inputs: 3 | token: 4 | description: "GitHub token" 5 | required: true 6 | outputs: 7 | sha: 8 | description: "SHA of generated commit" 9 | value: ${{ steps.commit.outputs.sha }} 10 | 11 | runs: 12 | using: "composite" 13 | steps: 14 | - name: Commit if changed, create a PR if protected 15 | id: commit 16 | env: 17 | GITHUB_TOKEN: ${{ inputs.token }} 18 | run: | 19 | set -x 20 | if [ -n "$(git status --porcelain)" ]; then 21 | echo "Changed" 22 | protected=${{ github.ref_protected }} 23 | foreign=${{ github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository }} 24 | if [ "${foreign}" = "true" ]; then 25 | # https://github.com/krlmlr/actions-sync/issues/44 26 | echo "Can't push to foreign branch" 27 | elif [ "${protected}" = "true" ]; then 28 | current_branch=$(git branch --show-current) 29 | new_branch=gha-commit-$(git rev-parse --short HEAD) 30 | git checkout -b ${new_branch} 31 | git add . 32 | git commit -m "chore: Auto-update from GitHub Actions"$'\n'$'\n'"Run: ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" 33 | # Force-push, used in only one place 34 | # Alternative: separate branch names for each usage 35 | git push -u origin HEAD -f 36 | 37 | existing_pr=$(gh pr list --state open --base main --head ${new_branch} --json number --jq '.[] | .number') 38 | if [ -n "${existing_pr}" ]; then 39 | echo "Existing PR: ${existing_pr}" 40 | else 41 | gh pr create --base main --head ${new_branch} --title "chore: Auto-update from GitHub Actions" --body "Run: ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" 42 | fi 43 | 44 | gh workflow run rcc -f ref=$(git rev-parse HEAD) 45 | gh pr merge --merge --auto 46 | else 47 | git fetch 48 | if [ -n "${GITHUB_HEAD_REF}" ]; then 49 | git add . 50 | git stash save 51 | git switch ${GITHUB_HEAD_REF} 52 | git merge origin/${GITHUB_BASE_REF} --no-edit 53 | git stash pop 54 | fi 55 | git add . 56 | git commit -m "chore: Auto-update from GitHub Actions"$'\n'$'\n'"Run: ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" 57 | git push -u origin HEAD 58 | 59 | # Only set output if changed 60 | echo sha=$(git rev-parse HEAD) >> $GITHUB_OUTPUT 61 | fi 62 | fi 63 | shell: bash 64 | -------------------------------------------------------------------------------- /.github/workflows/covr/action.yml: -------------------------------------------------------------------------------- 1 | name: "Actions to run covr for an R package" 2 | inputs: 3 | token: 4 | description: codecov token 5 | required: false 6 | 7 | runs: 8 | using: "composite" 9 | steps: 10 | - name: Run coverage check 11 | run: | 12 | if (dir.exists("tests/testthat")) { 13 | cov <- covr::package_coverage( 14 | quiet = FALSE, 15 | clean = FALSE, 16 | install_path = file.path(normalizePath(Sys.getenv("RUNNER_TEMP"), winslash = "/"), "package") 17 | ) 18 | covr::to_cobertura(cov) 19 | } else { 20 | message("No tests found, coverage not tested.") 21 | } 22 | shell: Rscript {0} 23 | 24 | - uses: codecov/codecov-action@v5 25 | with: 26 | # Fail if token is given 27 | fail_ci_if_error: ${{ inputs.token != '' }} 28 | files: ./cobertura.xml 29 | plugins: noop 30 | disable_search: true 31 | token: ${{ inputs.token }} 32 | 33 | - name: Show testthat output 34 | if: always() 35 | run: | 36 | ## -------------------------------------------------------------------- 37 | find '${{ runner.temp }}/package' -name 'testthat.Rout*' -exec cat '{}' \; || true 38 | shell: bash 39 | 40 | - name: Upload test results 41 | if: failure() 42 | uses: actions/upload-artifact@v4 43 | with: 44 | name: coverage-test-failures 45 | path: ${{ runner.temp }}/package 46 | -------------------------------------------------------------------------------- /.github/workflows/dep-matrix/action.yml: -------------------------------------------------------------------------------- 1 | name: "Actions to compute a matrix with all dependent packages" 2 | outputs: 3 | matrix: 4 | description: "Generated matrix" 5 | value: ${{ steps.set-matrix.outputs.matrix }} 6 | 7 | runs: 8 | using: "composite" 9 | steps: 10 | - id: set-matrix 11 | run: | 12 | # Determine package dependencies 13 | # From remotes 14 | read_dcf <- function(path) { 15 | fields <- colnames(read.dcf(path)) 16 | as.list(read.dcf(path, keep.white = fields)[1, ]) 17 | } 18 | 19 | re_match <- function(text, pattern, perl = TRUE, ...) { 20 | 21 | stopifnot(is.character(pattern), length(pattern) == 1, !is.na(pattern)) 22 | text <- as.character(text) 23 | 24 | match <- regexpr(pattern, text, perl = perl, ...) 25 | 26 | start <- as.vector(match) 27 | length <- attr(match, "match.length") 28 | end <- start + length - 1L 29 | 30 | matchstr <- substring(text, start, end) 31 | matchstr[ start == -1 ] <- NA_character_ 32 | 33 | res <- data.frame( 34 | stringsAsFactors = FALSE, 35 | .text = text, 36 | .match = matchstr 37 | ) 38 | 39 | if (!is.null(attr(match, "capture.start"))) { 40 | 41 | gstart <- attr(match, "capture.start") 42 | glength <- attr(match, "capture.length") 43 | gend <- gstart + glength - 1L 44 | 45 | groupstr <- substring(text, gstart, gend) 46 | groupstr[ gstart == -1 ] <- NA_character_ 47 | dim(groupstr) <- dim(gstart) 48 | 49 | res <- cbind(groupstr, res, stringsAsFactors = FALSE) 50 | } 51 | 52 | names(res) <- c(attr(match, "capture.names"), ".text", ".match") 53 | class(res) <- c("tbl_df", "tbl", class(res)) 54 | res 55 | } 56 | 57 | dev_split_ref <- function(x) { 58 | re_match(x, "^(?[^@#]+)(?[@#].*)?$") 59 | } 60 | 61 | has_dev_dep <- function(package) { 62 | cran_url <- "https://cloud.r-project.org" 63 | 64 | refs <- dev_split_ref(package) 65 | url <- file.path(cran_url, "web", "packages", refs[["pkg"]], "DESCRIPTION") 66 | 67 | f <- tempfile() 68 | on.exit(unlink(f)) 69 | 70 | utils::download.file(url, f) 71 | desc <- read_dcf(f) 72 | 73 | url_fields <- c(desc$URL, desc$BugReports) 74 | 75 | if (length(url_fields) == 0) { 76 | return(FALSE) 77 | } 78 | 79 | pkg_urls <- unlist(strsplit(url_fields, "[[:space:]]*,[[:space:]]*")) 80 | 81 | # Remove trailing "/issues" from the BugReports URL 82 | pkg_urls <- sub("/issues$", "", pkg_urls) 83 | 84 | valid_domains <- c("github[.]com", "gitlab[.]com", "bitbucket[.]org") 85 | 86 | parts <- 87 | re_match(pkg_urls, 88 | sprintf("^https?://(?%s)/(?%s)/(?%s)(?:/(?%s))?", 89 | domain = paste0(valid_domains, collapse = "|"), 90 | username = "[^/]+", 91 | repo = "[^/@#]+", 92 | subdir = "[^/@$ ]+" 93 | ) 94 | )[c("domain", "username", "repo", "subdir")] 95 | 96 | # Remove cases which don't match and duplicates 97 | 98 | parts <- unique(stats::na.omit(parts)) 99 | 100 | nrow(parts) == 1 101 | } 102 | 103 | if (!requireNamespace("desc", quietly = TRUE)) { 104 | install.packages("desc") 105 | } 106 | 107 | deps_df <- desc::desc_get_deps() 108 | deps_df <- deps_df[deps_df$type %in% c("Depends", "Imports", "LinkingTo", "Suggests"), ] 109 | 110 | packages <- sort(deps_df$package) 111 | packages <- intersect(packages, rownames(available.packages())) 112 | 113 | valid_dev_dep <- vapply(packages, has_dev_dep, logical(1)) 114 | 115 | # https://github.com/r-lib/remotes/issues/576 116 | valid_dev_dep[packages %in% c("igraph", "duckdb", "logging")] <- FALSE 117 | 118 | deps <- packages[valid_dev_dep] 119 | if (any(!valid_dev_dep)) { 120 | msg <- paste0( 121 | "Could not determine development repository for packages: ", 122 | paste(packages[!valid_dev_dep], collapse = ", ") 123 | ) 124 | writeLines(paste0("::warning::", msg)) 125 | } 126 | 127 | json <- paste0( 128 | '{"package":[', 129 | paste0('"', deps, '"', collapse = ","), 130 | ']}' 131 | ) 132 | writeLines(json) 133 | writeLines(paste0("matrix=", json), Sys.getenv("GITHUB_OUTPUT")) 134 | shell: Rscript {0} 135 | -------------------------------------------------------------------------------- /.github/workflows/dep-suggests-matrix/action.R: -------------------------------------------------------------------------------- 1 | # FIXME: Dynamic lookup by parsing https://svn.r-project.org/R/tags/ 2 | get_deps <- function() { 3 | # Determine package dependencies 4 | if (!requireNamespace("desc", quietly = TRUE)) { 5 | install.packages("desc") 6 | } 7 | 8 | deps_df <- desc::desc_get_deps() 9 | deps_df_optional <- deps_df$package[deps_df$type %in% c("Suggests", "Enhances")] 10 | deps_df_hard <- deps_df$package[deps_df$type %in% c("Depends", "Imports", "LinkingTo")] 11 | deps_df_base <- unlist(tools::standard_package_names(), use.names = FALSE) 12 | 13 | packages <- sort(deps_df_optional) 14 | packages <- intersect(packages, rownames(available.packages())) 15 | 16 | # Too big to fail, or can't be avoided: 17 | off_limits <- c("testthat", "rmarkdown", "rcmdcheck", deps_df_hard, deps_df_base) 18 | off_limits_dep <- unlist(tools::package_dependencies(off_limits, recursive = TRUE, which = "strong")) 19 | setdiff(packages, c(off_limits, off_limits_dep)) 20 | } 21 | 22 | if (Sys.getenv("GITHUB_BASE_REF") != "") { 23 | print(Sys.getenv("GITHUB_BASE_REF")) 24 | system("git fetch origin ${GITHUB_BASE_REF}") 25 | # Use .. to avoid having to fetch the entire history 26 | # https://github.com/krlmlr/actions-sync/issues/45 27 | diff_cmd <- "git diff origin/${GITHUB_BASE_REF}.. -- R/ tests/ | egrep '^[+][^+]' | grep -q ::" 28 | diff_lines <- system(diff_cmd, intern = TRUE) 29 | if (length(diff_lines) > 0) { 30 | writeLines("Changes using :: in R/ or tests/:") 31 | writeLines(diff_lines) 32 | packages <- get_deps() 33 | } else { 34 | writeLines("No changes using :: found in R/ or tests/, not checking without suggested packages") 35 | packages <- character() 36 | } 37 | } else { 38 | writeLines("No GITHUB_BASE_REF, checking without suggested packages") 39 | packages <- get_deps() 40 | } 41 | 42 | if (length(packages) > 0) { 43 | json <- paste0( 44 | '{"package":[', 45 | paste0('"', packages, '"', collapse = ","), 46 | "]}" 47 | ) 48 | writeLines(paste0("matrix=", json), Sys.getenv("GITHUB_OUTPUT")) 49 | writeLines(json) 50 | } else { 51 | writeLines("No suggested packages found.") 52 | } 53 | -------------------------------------------------------------------------------- /.github/workflows/dep-suggests-matrix/action.yml: -------------------------------------------------------------------------------- 1 | name: "Actions to compute a matrix with all suggested packages" 2 | outputs: 3 | matrix: 4 | description: "Generated matrix" 5 | value: ${{ steps.set-matrix.outputs.matrix }} 6 | 7 | runs: 8 | using: "composite" 9 | steps: 10 | - id: set-matrix 11 | run: | 12 | Rscript ./.github/workflows/dep-suggests-matrix/action.R 13 | shell: bash 14 | -------------------------------------------------------------------------------- /.github/workflows/fledge.yaml: -------------------------------------------------------------------------------- 1 | name: fledge 2 | 3 | on: 4 | # for manual triggers 5 | workflow_dispatch: 6 | inputs: 7 | pr: 8 | description: "Create PR" 9 | required: false 10 | type: boolean 11 | default: false 12 | # daily run 13 | schedule: 14 | - cron: "30 0 * * *" 15 | 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || '' }}-${{ github.base_ref || '' }} 18 | cancel-in-progress: true 19 | 20 | jobs: 21 | check_fork: 22 | runs-on: ubuntu-24.04 23 | outputs: 24 | is_forked: ${{ steps.check.outputs.is_forked }} 25 | steps: 26 | - name: Check if the repo is forked 27 | id: check 28 | env: 29 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 30 | run: | 31 | is_forked=$(gh api repos/${{ github.repository }} | jq .fork) 32 | echo "is_forked=${is_forked}" >> $GITHUB_OUTPUT 33 | shell: bash 34 | 35 | fledge: 36 | runs-on: ubuntu-24.04 37 | needs: check_fork 38 | if: needs.check_fork.outputs.is_forked == 'false' 39 | permissions: 40 | contents: write 41 | pull-requests: write 42 | actions: write 43 | env: 44 | FLEDGE_GHA_CI: true 45 | steps: 46 | - uses: actions/checkout@v4 47 | with: 48 | fetch-depth: 0 49 | fetch-tags: true 50 | 51 | - name: Configure Git identity 52 | run: | 53 | env | sort 54 | git config --local user.name "$GITHUB_ACTOR" 55 | git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com" 56 | shell: bash 57 | 58 | - name: Update apt 59 | run: | 60 | sudo apt-get update 61 | shell: bash 62 | 63 | - uses: r-lib/actions/setup-r@v2 64 | with: 65 | use-public-rspm: true 66 | 67 | - uses: r-lib/actions/setup-r-dependencies@v2 68 | env: 69 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 70 | with: 71 | pak-version: devel 72 | packages: cynkra/fledge 73 | cache-version: fledge-1 74 | 75 | - name: Count rulesets 76 | # Assume that branch is protected if ruleset exists 77 | id: rulesets 78 | env: 79 | GH_TOKEN: ${{ github.token }} 80 | run: | 81 | n_rulesets=$(gh api repos/${{ github.repository }}/rulesets -q length) 82 | echo "count=${n_rulesets}" >> $GITHUB_OUTPUT 83 | shell: bash 84 | 85 | - name: Switch to branch if branch protection is enabled 86 | if: github.ref_protected == 'true' || inputs.pr == 'true' || steps.rulesets.outputs.count > 0 87 | run: | 88 | git checkout -b fledge 89 | git push -f -u origin HEAD 90 | shell: bash 91 | 92 | - name: Bump version 93 | env: 94 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 95 | run: | 96 | check_default_branch <- ("${{ github.ref_protected == 'true' || inputs.pr == 'true' || steps.rulesets.outputs.count > 0 }}" != "true") 97 | if (fledge::bump_version(which = "dev", no_change_behavior = "noop", check_default_branch = check_default_branch)) { 98 | fledge::finalize_version(push = TRUE) 99 | } 100 | shell: Rscript {0} 101 | 102 | - name: Create and merge PR if branch protection is enabled 103 | if: github.ref_protected == 'true' || inputs.pr == 'true' || steps.rulesets.outputs.count > 0 104 | env: 105 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 106 | run: | 107 | set -ex 108 | if [ -n "$(git diff main --numstat)" ]; then 109 | gh pr create --base main --head fledge --fill-first 110 | gh workflow run rcc -f ref=$(git rev-parse HEAD) 111 | gh pr merge --squash --auto 112 | else 113 | echo "No changes." 114 | fi 115 | shell: bash 116 | 117 | - name: Check release 118 | env: 119 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 120 | run: | 121 | fledge:::release_after_cran_built_binaries() 122 | shell: Rscript {0} 123 | -------------------------------------------------------------------------------- /.github/workflows/get-extra/action.yml: -------------------------------------------------------------------------------- 1 | name: "Action to determine extra packages to be installed" 2 | outputs: 3 | packages: 4 | description: "List of extra packages" 5 | value: ${{ steps.get-extra.outputs.packages }} 6 | 7 | runs: 8 | using: "composite" 9 | steps: 10 | - name: Get extra packages 11 | id: get-extra 12 | run: | 13 | set -x 14 | packages=$( ( grep Config/gha/extra-packages DESCRIPTION || true ) | cut -d " " -f 2) 15 | echo packages=$packages >> $GITHUB_OUTPUT 16 | shell: bash 17 | -------------------------------------------------------------------------------- /.github/workflows/git-identity/action.yml: -------------------------------------------------------------------------------- 1 | name: "Actions to set up a Git identity" 2 | 3 | runs: 4 | using: "composite" 5 | steps: 6 | - name: Configure Git identity 7 | run: | 8 | env | sort 9 | git config --local user.name "$GITHUB_ACTOR" 10 | git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com" 11 | shell: bash 12 | -------------------------------------------------------------------------------- /.github/workflows/install/action.yml: -------------------------------------------------------------------------------- 1 | name: "Actions to run for installing R packages" 2 | inputs: 3 | token: 4 | description: GitHub token, set to secrets.GITHUB_TOKEN 5 | required: true 6 | r-version: 7 | description: Passed on to r-lib/actions/setup-r@v2 8 | required: false 9 | default: release 10 | install-r: 11 | description: Passed on to r-lib/actions/setup-r@v2 12 | required: false 13 | default: true 14 | needs: 15 | description: Passed on to r-lib/actions/setup-r-dependencies@v2 16 | required: false 17 | default: "" 18 | packages: 19 | description: Passed on to r-lib/actions/setup-r-dependencies@v2 20 | required: false 21 | default: deps::., any::sessioninfo 22 | extra-packages: 23 | description: Passed on to r-lib/actions/setup-r-dependencies@v2 24 | required: false 25 | default: any::rcmdcheck 26 | cache-version: 27 | description: Passed on to r-lib/actions/setup-r-dependencies@v2 28 | required: false 29 | default: 1 30 | 31 | runs: 32 | using: "composite" 33 | steps: 34 | - name: Set environment variables 35 | run: | 36 | echo "R_REMOTES_NO_ERRORS_FROM_WARNINGS=true" | tee -a $GITHUB_ENV 37 | echo "R_KEEP_PKG_SOURCE=yes" | tee -a $GITHUB_ENV 38 | echo "_R_CHECK_SYSTEM_CLOCK_=false" | tee -a $GITHUB_ENV 39 | echo "_R_CHECK_FUTURE_FILE_TIMESTAMPS_=false" | tee -a $GITHUB_ENV 40 | # prevent rgl issues because no X11 display is available 41 | echo "RGL_USE_NULL=true" | tee -a $GITHUB_ENV 42 | # from https://github.com/r-devel/r-dev-web/blob/main/CRAN/QA/Kurt/lib/R/Scripts/check_CRAN_incoming.R 43 | echo "_R_CHECK_CRAN_INCOMING_CHECK_FILE_URIS_=true" | tee -a $GITHUB_ENV 44 | echo "_R_CHECK_CRAN_INCOMING_NOTE_GNU_MAKE_=true" | tee -a $GITHUB_ENV 45 | echo "_R_CHECK_PACKAGE_DEPENDS_IGNORE_MISSING_ENHANCES_=true" | tee -a $GITHUB_ENV 46 | echo "_R_CHECK_CODE_CLASS_IS_STRING_=true" | tee -a $GITHUB_ENV 47 | echo "_R_CHECK_CODOC_VARIABLES_IN_USAGES_=true" | tee -a $GITHUB_ENV 48 | echo "_R_CHECK_CONNECTIONS_LEFT_OPEN_=true" | tee -a $GITHUB_ENV 49 | echo "_R_CHECK_DATALIST_=true" | tee -a $GITHUB_ENV 50 | echo "_R_CHECK_NEWS_IN_PLAIN_TEXT_=true" | tee -a $GITHUB_ENV 51 | echo "_R_CHECK_PACKAGES_USED_CRAN_INCOMING_NOTES_=true" | tee -a $GITHUB_ENV 52 | echo "_R_CHECK_RD_CONTENTS_KEYWORDS_=true" | tee -a $GITHUB_ENV 53 | echo "_R_CHECK_R_DEPENDS_=warn" | tee -a $GITHUB_ENV 54 | echo "_R_CHECK_S3_METHODS_SHOW_POSSIBLE_ISSUES_=true" | tee -a $GITHUB_ENV 55 | echo "_R_CHECK_THINGS_IN_TEMP_DIR_=true" | tee -a $GITHUB_ENV 56 | echo "_R_CHECK_UNDOC_USE_ALL_NAMES_=true" | tee -a $GITHUB_ENV 57 | echo "_R_CHECK_URLS_SHOW_301_STATUS_=true" | tee -a $GITHUB_ENV 58 | echo "_R_CXX_USE_NO_REMAP_=true" | tee -a $GITHUB_ENV 59 | # There is no way to disable recency and frequency checks when the incoming checks are run 60 | # echo "_R_CHECK_CRAN_INCOMING_=true" | tee -a $GITHUB_ENV 61 | echo "_R_CHECK_CRAN_INCOMING_SKIP_LARGE_VERSION_=true" | tee -a $GITHUB_ENV 62 | echo "_R_CHECK_FORCE_SUGGESTS_=false" | tee -a $GITHUB_ENV 63 | shell: bash 64 | 65 | - name: Set environment variables (non-Windows only) 66 | if: runner.os != 'Windows' 67 | run: | 68 | echo "_R_CHECK_BASHISMS_=true" | tee -a $GITHUB_ENV 69 | shell: bash 70 | 71 | - name: Update apt 72 | if: runner.os == 'Linux' 73 | run: | 74 | sudo apt-get update 75 | sudo apt-get install -y aspell 76 | echo "_R_CHECK_CRAN_INCOMING_USE_ASPELL_=true" | tee -a $GITHUB_ENV 77 | shell: bash 78 | 79 | - name: Remove pkg-config@0.29.2 80 | if: runner.os == 'macOS' 81 | run: | 82 | brew uninstall pkg-config@0.29.2 || true 83 | shell: bash 84 | 85 | - uses: r-lib/actions/setup-pandoc@v2 86 | 87 | - uses: r-lib/actions/setup-r@v2 88 | with: 89 | r-version: ${{ inputs.r-version }} 90 | install-r: ${{ inputs.install-r }} 91 | http-user-agent: ${{ matrix.config.http-user-agent }} 92 | use-public-rspm: true 93 | 94 | - id: get-extra 95 | run: | 96 | set -x 97 | packages=$( ( grep Config/gha/extra-packages DESCRIPTION || true ) | cut -d " " -f 2) 98 | echo packages=$packages >> $GITHUB_OUTPUT 99 | shell: bash 100 | 101 | - uses: r-lib/actions/setup-r-dependencies@v2 102 | env: 103 | GITHUB_PAT: ${{ inputs.token }} 104 | with: 105 | pak-version: stable 106 | needs: ${{ inputs.needs }} 107 | packages: ${{ inputs.packages }} 108 | extra-packages: ${{ inputs.extra-packages }} ${{ ( matrix.covr && 'covr xml2' ) || '' }} ${{ steps.get-extra.outputs.packages }} 109 | cache-version: ${{ inputs.cache-version }} 110 | 111 | - name: Add pkg.lock to .gitignore 112 | run: | 113 | set -x 114 | if ! [ -f .github/.gitignore ] || [ -z "$(grep '^/pkg.lock$' .github/.gitignore)" ]; then 115 | echo /pkg.lock >> .github/.gitignore 116 | fi 117 | shell: bash 118 | 119 | - name: Add fake qpdf and checkbashisms 120 | if: runner.os == 'Linux' 121 | run: | 122 | sudo ln -s $(which true) /usr/local/bin/qpdf 123 | sudo ln -s $(which true) /usr/local/bin/checkbashisms 124 | shell: bash 125 | 126 | - name: Install ccache 127 | uses: krlmlr/ccache-action@parallel-dir 128 | with: 129 | max-size: 10G 130 | verbose: 1 131 | save: false 132 | restore: false 133 | 134 | - name: Use ccache for compiling R code, and parallelize 135 | run: | 136 | mkdir -p ~/.R 137 | echo 'CC := ccache $(CC)' >> ~/.R/Makevars 138 | echo 'CXX := ccache $(CXX)' >> ~/.R/Makevars 139 | echo 'CXX11 := ccache $(CXX11)' >> ~/.R/Makevars 140 | echo 'CXX14 := ccache $(CXX14)' >> ~/.R/Makevars 141 | echo 'CXX17 := ccache $(CXX17)' >> ~/.R/Makevars 142 | echo 'MAKEFLAGS = -j2' >> ~/.R/Makevars 143 | cat ~/.R/Makevars 144 | 145 | echo 'CCACHE_SLOPPINESS=locale,time_macros' | tee -a $GITHUB_ENV 146 | 147 | # echo 'CCACHE_DEBUG=true' | tee -a $GITHUB_ENV 148 | # echo "CCACHE_DEBUGDIR=$(dirname $(pwd))/ccache-debug" | tee -a $GITHUB_ENV 149 | # mkdir -p $(dirname $(pwd))/.ccache-debug 150 | 151 | echo 'PKG_BUILD_EXTRA_FLAGS=false' | tee -a $GITHUB_ENV 152 | 153 | # Repair 154 | git rm -rf .ccache || true 155 | rm -rf .ccache 156 | shell: bash 157 | 158 | - name: Show R CMD config --all 159 | run: | 160 | R CMD config --all 161 | shell: bash 162 | -------------------------------------------------------------------------------- /.github/workflows/lock.yaml: -------------------------------------------------------------------------------- 1 | name: "Lock threads" 2 | permissions: 3 | issues: write 4 | pull-requests: write 5 | discussions: write 6 | on: 7 | workflow_dispatch: 8 | schedule: 9 | - cron: "37 2 * * *" 10 | 11 | jobs: 12 | lock: 13 | runs-on: ubuntu-24.04 14 | steps: 15 | - uses: krlmlr/lock-threads@patch-1 16 | with: 17 | github-token: ${{ github.token }} 18 | issue-inactive-days: "365" 19 | issue-lock-reason: "" 20 | issue-comment: > 21 | This old thread has been automatically locked. If you think you have 22 | found something related to this, please open a new issue and link to this 23 | old issue if necessary. 24 | -------------------------------------------------------------------------------- /.github/workflows/matrix-check/action.yml: -------------------------------------------------------------------------------- 1 | name: "Actions to check a matrix with all R and OS versions, computed with the versions-matrix action" 2 | inputs: 3 | matrix: 4 | description: "Generated matrix" 5 | required: true 6 | 7 | runs: 8 | using: "composite" 9 | steps: 10 | - name: Install json2yaml 11 | run: | 12 | sudo npm install -g json2yaml 13 | shell: bash 14 | 15 | - run: | 16 | matrix='${{ inputs.matrix }}' 17 | if [ -n "${matrix}" ]; then 18 | echo $matrix | jq . 19 | echo $matrix | json2yaml 20 | else 21 | echo "No matrix found" 22 | fi 23 | shell: bash 24 | -------------------------------------------------------------------------------- /.github/workflows/pkgdown-build/action.yml: -------------------------------------------------------------------------------- 1 | name: "Action to build a pkgdown website" 2 | 3 | runs: 4 | using: "composite" 5 | steps: 6 | - name: Build site 7 | run: | 8 | pkgdown::build_site() 9 | shell: Rscript {0} 10 | -------------------------------------------------------------------------------- /.github/workflows/pkgdown-deploy/action.yml: -------------------------------------------------------------------------------- 1 | name: "Action to deploy a pkgdown website" 2 | 3 | runs: 4 | using: "composite" 5 | steps: 6 | - name: Deploy site 7 | uses: nick-fields/retry@v3 8 | with: 9 | timeout_minutes: 15 10 | max_attempts: 10 11 | command: | 12 | R -q -e 'pkgdown::deploy_to_branch(new_process = FALSE)' 13 | -------------------------------------------------------------------------------- /.github/workflows/pkgdown.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Also included in R-CMD-check.yaml, this workflow only listens to pushes to branches 3 | # that start with "docs*" or "cran-*" and does not need to act on pushes to the main branch. 4 | on: 5 | push: 6 | branches: 7 | - "docs*" 8 | - "cran-*" 9 | # The main branch is excluded here, it is handled by the R-CMD-check workflow. 10 | # This workflow is only for handling pushes to designated branches. 11 | workflow_dispatch: 12 | 13 | name: pkgdown 14 | 15 | concurrency: 16 | group: ${{ github.workflow }}-${{ github.ref }}-${{ github.head_ref || github.sha }}-${{ github.base_ref || '' }} 17 | cancel-in-progress: true 18 | 19 | jobs: 20 | pkgdown: 21 | runs-on: ubuntu-24.04 22 | 23 | name: "pkgdown" 24 | 25 | # Begin custom: services 26 | # End custom: services 27 | 28 | steps: 29 | - uses: actions/checkout@v4 30 | 31 | - uses: ./.github/workflows/rate-limit 32 | with: 33 | token: ${{ secrets.GITHUB_TOKEN }} 34 | 35 | - uses: ./.github/workflows/git-identity 36 | if: github.event_name == 'push' 37 | 38 | - uses: ./.github/workflows/custom/before-install 39 | if: hashFiles('.github/workflows/custom/before-install/action.yml') != '' 40 | 41 | - uses: ./.github/workflows/install 42 | with: 43 | token: ${{ secrets.GITHUB_TOKEN }} 44 | cache-version: pkgdown-2 45 | needs: website 46 | extra-packages: r-lib/pkgdown local::. 47 | 48 | - uses: ./.github/workflows/custom/after-install 49 | if: hashFiles('.github/workflows/custom/after-install/action.yml') != '' 50 | 51 | - uses: ./.github/workflows/pkgdown-build 52 | if: github.event_name != 'push' 53 | 54 | - uses: ./.github/workflows/pkgdown-deploy 55 | if: github.event_name == 'push' 56 | -------------------------------------------------------------------------------- /.github/workflows/pr-commands.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | issue_comment: 3 | types: [created] 4 | name: Commands 5 | jobs: 6 | document: 7 | if: startsWith(github.event.comment.body, '/document') 8 | name: document 9 | # macos is actually better here due to native binary packages 10 | runs-on: macos-latest 11 | env: 12 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 13 | steps: 14 | - uses: actions/checkout@v4 15 | - uses: r-lib/actions/pr-fetch@v2 16 | with: 17 | repo-token: ${{ secrets.GITHUB_TOKEN }} 18 | - uses: r-lib/actions/setup-r@v2 19 | - name: Configure Git identity 20 | run: | 21 | env | sort 22 | git config --local user.name "$GITHUB_ACTOR" 23 | git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com" 24 | shell: bash 25 | - name: Install dependencies 26 | run: | 27 | install.packages(c("remotes", "roxygen2"), type = "binary") 28 | remotes::install_deps(dependencies = TRUE) 29 | shell: Rscript {0} 30 | - name: Document 31 | run: | 32 | roxygen2::roxygenise() 33 | shell: Rscript {0} 34 | - name: commit 35 | run: | 36 | if [ -n "$(git status --porcelain man/ NAMESPACE)" ]; then 37 | git add man/ NAMESPACE 38 | git commit -m 'Document' 39 | fi 40 | - uses: r-lib/actions/pr-push@v2 41 | with: 42 | repo-token: ${{ secrets.GITHUB_TOKEN }} 43 | style: 44 | if: startsWith(github.event.comment.body, '/style') 45 | name: style 46 | # macos is actually better here due to native binary packages 47 | runs-on: macos-latest 48 | env: 49 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 50 | steps: 51 | - uses: actions/checkout@v4 52 | - uses: r-lib/actions/pr-fetch@v2 53 | with: 54 | repo-token: ${{ secrets.GITHUB_TOKEN }} 55 | - uses: r-lib/actions/setup-r@v2 56 | - name: Configure Git identity 57 | run: | 58 | env | sort 59 | git config --local user.name "$GITHUB_ACTOR" 60 | git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com" 61 | shell: bash 62 | - name: Install dependencies 63 | run: | 64 | install.packages(c("styler", "roxygen2"), type = "binary") 65 | shell: Rscript {0} 66 | - name: Style 67 | run: | 68 | styler::style_pkg(strict = FALSE) 69 | shell: Rscript {0} 70 | - name: commit 71 | run: | 72 | if [ -n "$(git status --porcelain '*.R' '*.Rmd')" ]; then 73 | git add '*.R' '*.Rmd' 74 | git commit -m 'Style' 75 | fi 76 | - uses: r-lib/actions/pr-push@v2 77 | with: 78 | repo-token: ${{ secrets.GITHUB_TOKEN }} 79 | merge: 80 | if: startsWith(github.event.comment.body, '/merge') 81 | name: merge 82 | runs-on: ubuntu-22.04 83 | steps: 84 | - name: Create and merge pull request 85 | run: | 86 | set -exo pipefail 87 | PR_DETAILS=$( curl -s --header "authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.issue.number }} ) 88 | echo "$PR_DETAILS" | jq . 89 | PR_BASE=$(echo "$PR_DETAILS" | jq -r .base.ref) 90 | PR_HEAD=$(echo "$PR_DETAILS" | jq -r .head.ref) 91 | PR_URL=$(curl -s -X POST --header "authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" --data '{ "head": "'$PR_BASE'", "base": "'$PR_HEAD'", "title": "Merge back PR target branch", "body": "Target: #${{ github.event.issue.number }}" }' https://api.github.com/repos/${{ github.repository }}/pulls | jq -r .url ) 92 | echo $PR_URL 93 | # Merging here won't run CI/CD 94 | # curl -s -X PUT --header "authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" $PR_URL/merge 95 | # A mock job just to ensure we have a successful build status 96 | finish: 97 | runs-on: ubuntu-22.04 98 | steps: 99 | - run: true 100 | -------------------------------------------------------------------------------- /.github/workflows/rate-limit/action.yml: -------------------------------------------------------------------------------- 1 | name: "Check GitHub rate limits" 2 | inputs: 3 | token: # id of input 4 | description: GitHub token, pass secrets.GITHUB_TOKEN 5 | required: true 6 | 7 | runs: 8 | using: "composite" 9 | steps: 10 | - name: Check rate limits 11 | run: | 12 | curl -s --header "authorization: Bearer ${{ inputs.token }}" https://api.github.com/rate_limit 13 | shell: bash 14 | -------------------------------------------------------------------------------- /.github/workflows/revdep.yaml: -------------------------------------------------------------------------------- 1 | # This workflow creates many jobs, run only when a branch is created 2 | on: 3 | push: 4 | branches: 5 | - "revdep*" # never run automatically on main branch 6 | 7 | name: revdep 8 | 9 | jobs: 10 | matrix: 11 | runs-on: ubuntu-22.04 12 | outputs: 13 | matrix: ${{ steps.set-matrix.outputs.matrix }} 14 | 15 | name: Collect revdeps 16 | 17 | env: 18 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 19 | RSPM: https://packagemanager.rstudio.com/cran/__linux__/bionic/latest 20 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 21 | # prevent rgl issues because no X11 display is available 22 | RGL_USE_NULL: true 23 | # Begin custom: env vars 24 | # End custom: env vars 25 | 26 | steps: 27 | - name: Check rate limits 28 | run: | 29 | curl -s --header "authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" https://api.github.com/rate_limit 30 | shell: bash 31 | 32 | - uses: actions/checkout@v4 33 | 34 | # FIXME: Avoid reissuing succesful jobs 35 | # https://docs.github.com/en/free-pro-team@latest/rest/reference/actions#list-jobs-for-a-workflow-run 36 | # https://docs.github.com/en/free-pro-team@latest/rest/reference/actions#workflow-runs 37 | - id: set-matrix 38 | run: | 39 | package <- read.dcf("DESCRIPTION")[, "Package"][[1]] 40 | deps <- tools:::package_dependencies(package, reverse = TRUE, which = c("Depends", "Imports", "LinkingTo", "Suggests"))[[1]] 41 | json <- paste0( 42 | '{"package":[', 43 | paste0('"', deps, '"', collapse = ","), 44 | ']}' 45 | ) 46 | writeLines(json) 47 | writeLines(paste0("matrix=", json), Sys.getenv("GITHUB_OUTPUT")) 48 | shell: Rscript {0} 49 | 50 | check-matrix: 51 | runs-on: ubuntu-22.04 52 | needs: matrix 53 | steps: 54 | - name: Install json2yaml 55 | run: | 56 | sudo npm install -g json2yaml 57 | 58 | - name: Check matrix definition 59 | run: | 60 | matrix='${{ needs.matrix.outputs.matrix }}' 61 | echo $matrix 62 | echo $matrix | jq . 63 | echo $matrix | json2yaml 64 | 65 | R-CMD-check: 66 | needs: matrix 67 | 68 | runs-on: ubuntu-22.04 69 | 70 | name: 'revdep: ${{ matrix.package }}' 71 | 72 | # Begin custom: services 73 | # End custom: services 74 | 75 | strategy: 76 | fail-fast: false 77 | matrix: ${{fromJson(needs.matrix.outputs.matrix)}} 78 | 79 | env: 80 | R_REMOTES_NO_ERRORS_FROM_WARNINGS: true 81 | RSPM: https://packagemanager.rstudio.com/cran/__linux__/bionic/latest 82 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 83 | # prevent rgl issues because no X11 display is available 84 | RGL_USE_NULL: true 85 | # Begin custom: env vars 86 | # End custom: env vars 87 | 88 | steps: 89 | - name: Check rate limits 90 | run: | 91 | curl -s --header "authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" https://api.github.com/rate_limit 92 | shell: bash 93 | 94 | - uses: actions/checkout@v4 95 | 96 | # Begin custom: before install 97 | # End custom: before install 98 | 99 | - name: Use RSPM 100 | run: | 101 | mkdir -p /home/runner/work/_temp/Library 102 | echo 'local({release <- system2("lsb_release", "-sc", stdout = TRUE); options(repos=c(CRAN = paste0("https://packagemanager.rstudio.com/all/__linux__/", release, "/latest")), HTTPUserAgent = sprintf("R/%s R (%s)", getRversion(), paste(getRversion(), R.version$platform, R.version$arch, R.version$os)))}); .libPaths("/home/runner/work/_temp/Library")' | sudo tee /etc/R/Rprofile.site 103 | 104 | - name: Install remotes 105 | run: | 106 | if (!requireNamespace("curl", quietly = TRUE)) install.packages("curl") 107 | if (!requireNamespace("remotes", quietly = TRUE)) install.packages("remotes") 108 | shell: Rscript {0} 109 | 110 | - uses: r-lib/actions/setup-pandoc@v2 111 | 112 | - name: Install system dependencies 113 | if: runner.os == 'Linux' 114 | run: | 115 | sudo apt-get update -y 116 | Rscript -e 'writeLines(remotes::system_requirements("ubuntu", "22.04")); package <- "${{ matrix.package }}"; deps <- tools::package_dependencies(package, which = "Suggests")[[1]]; lapply(c(package, deps), function(x) { writeLines(remotes::system_requirements("ubuntu", "22.04", package = x)) })' | sort | uniq > .github/deps.sh 117 | cat .github/deps.sh 118 | sudo sh < .github/deps.sh 119 | 120 | - name: Install package 121 | run: | 122 | package <- "${{ matrix.package }}" 123 | install.packages(package, dependencies = TRUE) 124 | remotes::install_cran("rcmdcheck") 125 | shell: Rscript {0} 126 | 127 | - name: Session info old 128 | run: | 129 | options(width = 100) 130 | if (!requireNamespace("sessioninfo", quietly = TRUE)) install.packages("sessioninfo") 131 | pkgs <- installed.packages()[, "Package"] 132 | sessioninfo::session_info(pkgs, include_base = TRUE) 133 | shell: Rscript {0} 134 | 135 | # Begin custom: after install 136 | # End custom: after install 137 | 138 | - name: Check old 139 | env: 140 | _R_CHECK_CRAN_INCOMING_: false 141 | _R_CHECK_SYSTEM_CLOCK_: false 142 | _R_CHECK_FUTURE_FILE_TIMESTAMPS_: false 143 | # Avoid downloading binary package from RSPM 144 | run: | 145 | package <- "${{ matrix.package }}" 146 | options(HTTPUserAgent = "gha") 147 | path <- download.packages(package, destdir = ".github")[, 2] 148 | print(path) 149 | 150 | dir <- file.path("revdep", package) 151 | dir.create(dir, showWarnings = FALSE, recursive = TRUE) 152 | check <- rcmdcheck::rcmdcheck(path, args = c("--no-manual", "--as-cran"), error_on = "never", check_dir = file.path(dir, "check")) 153 | file.rename(file.path(dir, "check"), file.path(dir, "old")) 154 | saveRDS(check, file.path(dir, "old.rds")) 155 | shell: Rscript {0} 156 | 157 | - name: Install local package 158 | run: | 159 | remotes::install_local(".", force = TRUE) 160 | shell: Rscript {0} 161 | 162 | - name: Session info new 163 | run: | 164 | options(width = 100) 165 | pkgs <- installed.packages()[, "Package"] 166 | sessioninfo::session_info(pkgs, include_base = TRUE) 167 | shell: Rscript {0} 168 | 169 | - name: Check new 170 | env: 171 | _R_CHECK_CRAN_INCOMING_: false 172 | _R_CHECK_SYSTEM_CLOCK_: false 173 | _R_CHECK_FUTURE_FILE_TIMESTAMPS_: false 174 | run: | 175 | package <- "${{ matrix.package }}" 176 | path <- dir(".github", pattern = paste0("^", package), full.names = TRUE)[[1]] 177 | print(path) 178 | 179 | dir <- file.path("revdep", package) 180 | check <- rcmdcheck::rcmdcheck(path, args = c("--no-manual", "--as-cran"), error_on = "never", check_dir = file.path(dir, "check")) 181 | file.rename(file.path(dir, "check"), file.path(dir, "new")) 182 | saveRDS(check, file.path(dir, "new.rds")) 183 | shell: Rscript {0} 184 | 185 | - name: Compare 186 | run: | 187 | package <- "${{ matrix.package }}" 188 | dir <- file.path("revdep", package) 189 | old <- readRDS(file.path(dir, "old.rds")) 190 | new <- readRDS(file.path(dir, "new.rds")) 191 | compare <- rcmdcheck::compare_checks(old, new) 192 | compare 193 | cmp <- compare$cmp 194 | if (!identical(cmp[cmp$which == "old", "output"], cmp[cmp$which == "new", "output"])) { 195 | if (!requireNamespace("waldo", quietly = TRUE)) install.packages("waldo") 196 | print(waldo::compare(old, new)) 197 | 198 | stop("Check output differs.") 199 | } 200 | shell: Rscript {0} 201 | 202 | - name: Upload check results 203 | if: failure() 204 | uses: actions/upload-artifact@main 205 | with: 206 | name: ${{ matrix.package }}-results 207 | path: revdep/${{ matrix.package }} 208 | 209 | - name: Check rate limits 210 | if: always() 211 | run: | 212 | curl -s --header "authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" https://api.github.com/rate_limit 213 | shell: bash 214 | -------------------------------------------------------------------------------- /.github/workflows/roxygenize/action.yml: -------------------------------------------------------------------------------- 1 | name: "Action to create documentation with roxygen2" 2 | 3 | runs: 4 | using: "composite" 5 | steps: 6 | - name: Roxygenize 7 | run: | 8 | try(roxygen2::roxygenize()) 9 | shell: Rscript {0} 10 | -------------------------------------------------------------------------------- /.github/workflows/style/action.yml: -------------------------------------------------------------------------------- 1 | name: "Action to auto-style a package" 2 | 3 | runs: 4 | using: "composite" 5 | steps: 6 | - name: Check styler options 7 | id: check 8 | run: | 9 | set -x 10 | scope=$( ( grep Config/autostyle/scope DESCRIPTION || true ) | cut -d " " -f 2) 11 | strict=$( ( grep Config/autostyle/strict DESCRIPTION || true ) | cut -d " " -f 2) 12 | rmd=$( ( grep Config/autostyle/rmd DESCRIPTION || true ) | cut -d " " -f 2) 13 | echo scope=$scope >> $GITHUB_OUTPUT 14 | echo strict=$strict >> $GITHUB_OUTPUT 15 | echo rmd=$rmd >> $GITHUB_OUTPUT 16 | shell: bash 17 | 18 | - uses: actions/cache@v4 19 | if: ${{ steps.check.outputs.scope }} 20 | with: 21 | path: | 22 | ~/.cache/R/R.cache 23 | key: ${{ runner.os }}-2-${{ github.run_id }}- 24 | restore-keys: | 25 | ${{ runner.os }}-2- 26 | 27 | - name: Imprint run ID 28 | if: ${{ steps.check.outputs.scope }} 29 | run: | 30 | mkdir -p ~/.cache/R/R.cache/styler 31 | touch ~/.cache/R/R.cache/${{ github.run_id }} 32 | shell: bash 33 | 34 | - name: Show cache 35 | if: ${{ steps.check.outputs.scope }} 36 | run: | 37 | ls -l ~/.cache/R/R.cache 38 | ls -l ~/.cache/R/R.cache/styler 39 | shell: bash 40 | 41 | - name: Enable styler cache 42 | if: ${{ steps.check.outputs.scope }} 43 | run: | 44 | styler::cache_activate(verbose = TRUE) 45 | shell: Rscript {0} 46 | 47 | - name: Run styler 48 | if: ${{ steps.check.outputs.scope }} 49 | run: | 50 | strict <- as.logical("${{ steps.check.outputs.strict }}") 51 | if (is.na(strict)) { 52 | strict <- FALSE 53 | } 54 | rmd <- as.logical("${{ steps.check.outputs.rmd }}") 55 | if (is.na(rmd)) { 56 | rmd <- TRUE 57 | } 58 | styler::style_pkg( 59 | scope = "${{ steps.check.outputs.scope }}", 60 | strict = strict, 61 | filetype = c("R", "Rprofile", if (rmd) c("Rmd", "Rmarkdown", "Rnw", "Qmd")) 62 | ) 63 | shell: Rscript {0} 64 | 65 | - name: Show cache again 66 | if: ${{ steps.check.outputs.scope }} 67 | run: | 68 | ls -l ~/.cache/R/R.cache 69 | ls -l ~/.cache/R/R.cache/styler 70 | gdu -s --inodes ~/.cache/R/R.cache/styler/* || du -s --inodes ~/.cache/R/R.cache/styler/* 71 | shell: bash 72 | -------------------------------------------------------------------------------- /.github/workflows/update-snapshots/action.yml: -------------------------------------------------------------------------------- 1 | name: "Action to create pull requests for updated testthat snapshots" 2 | description: > 3 | This action will run `testthat::test_local()` for tests that seem to use snapshots, 4 | this is determined by reading and grepping the test files. 5 | If the tests are failing, snapshots are updated, and a pull request is opened. 6 | inputs: 7 | base: 8 | description: "The base branch to create the pull request against." 9 | required: false 10 | default: "main" 11 | 12 | runs: 13 | using: "composite" 14 | steps: 15 | - name: Run tests on test files that use snapshots 16 | id: run-tests 17 | run: | 18 | ## -- Run tests on test files that use snapshots -- 19 | rx <- "^test-(.*)[.][rR]$" 20 | files <- dir("tests/testthat", pattern = rx) 21 | has_snapshot <- vapply(files, function(.x) any(grepl("snapshot", readLines(file.path("tests/testthat", .x)), fixed = TRUE)), logical(1)) 22 | if (any(has_snapshot)) { 23 | patterns <- gsub(rx, "^\\1$", files[has_snapshot]) 24 | pattern <- paste0(patterns, collapse = "|") 25 | tryCatch( 26 | { 27 | result <- as.data.frame(testthat::test_local(pattern = pattern, reporter = "silent", stop_on_failure = FALSE)) 28 | print(result) 29 | failures <- result[result$failed + result$warning > 0, ] 30 | if (nrow(failures) > 0) { 31 | writeLines("Snapshot tests failed/warned.") 32 | print(failures[names(failures) != "result"]) 33 | print(failures$result) 34 | testthat::snapshot_accept() 35 | writeLines("changed=true", Sys.getenv("GITHUB_OUTPUT")) 36 | } else { 37 | writeLines("Snapshot tests ran successfully.") 38 | } 39 | }, 40 | error = print 41 | ) 42 | } else { 43 | writeLines("No snapshots found.") 44 | } 45 | shell: Rscript {0} 46 | 47 | - name: Add snapshots to Git 48 | if: ${{ steps.run-tests.outputs.changed }} 49 | run: | 50 | ## -- Add snapshots to Git -- 51 | mkdir -p tests/testthat/_snaps 52 | git add -- tests/testthat/_snaps 53 | shell: bash 54 | 55 | - name: Check changed files 56 | if: ${{ steps.run-tests.outputs.changed }} 57 | id: check-changed 58 | run: | 59 | echo "changed=$(git status --porcelain -- tests/testthat/_snaps | head -n 1)" >> $GITHUB_OUTPUT 60 | shell: bash 61 | 62 | - name: Derive branch name 63 | if: ${{ steps.check-changed.outputs.changed }} 64 | id: matrix-desc 65 | run: | 66 | config=$(echo '${{ toJSON(matrix) }}' | jq -c .) 67 | echo "text=$(echo ${config})" >> $GITHUB_OUTPUT 68 | echo "branch=$(echo ${config} | sed -r 's/[^0-9a-zA-Z]+/-/g;s/^-//;s/-$//')" >> $GITHUB_OUTPUT 69 | shell: bash 70 | 71 | - name: Create pull request 72 | if: ${{ steps.check-changed.outputs.changed }} 73 | id: cpr 74 | uses: peter-evans/create-pull-request@v6 75 | with: 76 | base: ${{ inputs.base }} 77 | branch: snapshot-${{ inputs.base }}-${{ github.job }}-${{ steps.matrix-desc.outputs.branch }} 78 | delete-branch: true 79 | title: "test: Snapshot updates for ${{ github.job }} (${{ steps.matrix-desc.outputs.text }})" 80 | body: "Automated changes by [create-pull-request](https://github.com/peter-evans/create-pull-request) GitHub action${{ github.event.number && format(' for #{0}', github.event.number) || '' }}." 81 | add-paths: | 82 | tests/testthat/_snaps 83 | 84 | - name: Fail if pull request created 85 | if: ${{ steps.cpr.outputs.pull-request-number }} 86 | run: | 87 | false 88 | shell: bash 89 | -------------------------------------------------------------------------------- /.github/workflows/versions-matrix/action.R: -------------------------------------------------------------------------------- 1 | # Determine active versions of R to test against 2 | tags <- xml2::read_html("https://svn.r-project.org/R/tags/") 3 | 4 | bullets <- 5 | tags |> 6 | xml2::xml_find_all("//li") |> 7 | xml2::xml_text() 8 | 9 | version_bullets <- grep("^R-([0-9]+-[0-9]+-[0-9]+)/$", bullets, value = TRUE) 10 | versions <- unique(gsub("^R-([0-9]+)-([0-9]+)-[0-9]+/$", "\\1.\\2", version_bullets)) 11 | 12 | r_release <- head(sort(as.package_version(versions), decreasing = TRUE), 5) 13 | 14 | deps <- desc::desc_get_deps() 15 | r_crit <- deps$version[deps$package == "R"] 16 | if (length(r_crit) == 1) { 17 | min_r <- as.package_version(gsub("^>= ([0-9]+[.][0-9]+)(?:.*)$", "\\1", r_crit)) 18 | r_release <- r_release[r_release >= min_r] 19 | } 20 | 21 | r_versions <- c("devel", as.character(r_release)) 22 | 23 | macos <- data.frame(os = "macos-latest", r = r_versions[2:3]) 24 | windows <- data.frame(os = "windows-latest", r = r_versions[1:3]) 25 | linux_devel <- data.frame(os = "ubuntu-22.04", r = r_versions[1], `http-user-agent` = "release", check.names = FALSE) 26 | linux <- data.frame(os = "ubuntu-22.04", r = r_versions[-1]) 27 | covr <- data.frame(os = "ubuntu-22.04", r = r_versions[2], covr = "true", desc = "with covr") 28 | 29 | include_list <- list(macos, windows, linux_devel, linux, covr) 30 | 31 | if (file.exists(".github/versions-matrix.R")) { 32 | custom <- source(".github/versions-matrix.R")$value 33 | if (is.data.frame(custom)) { 34 | custom <- list(custom) 35 | } 36 | include_list <- c(include_list, custom) 37 | } 38 | 39 | print(include_list) 40 | 41 | filter <- read.dcf("DESCRIPTION")[1, ]["Config/gha/filter"] 42 | if (!is.na(filter)) { 43 | filter_expr <- parse(text = filter)[[1]] 44 | subset_fun_expr <- bquote(function(x) subset(x, .(filter_expr))) 45 | subset_fun <- eval(subset_fun_expr) 46 | include_list <- lapply(include_list, subset_fun) 47 | print(include_list) 48 | } 49 | 50 | to_json <- function(x) { 51 | if (nrow(x) == 0) return(character()) 52 | parallel <- vector("list", length(x)) 53 | for (i in seq_along(x)) { 54 | parallel[[i]] <- paste0('"', names(x)[[i]], '":"', x[[i]], '"') 55 | } 56 | paste0("{", do.call(paste, c(parallel, sep = ",")), "}") 57 | } 58 | 59 | configs <- unlist(lapply(include_list, to_json)) 60 | json <- paste0('{"include":[', paste(configs, collapse = ","), "]}") 61 | 62 | if (Sys.getenv("GITHUB_OUTPUT") != "") { 63 | writeLines(paste0("matrix=", json), Sys.getenv("GITHUB_OUTPUT")) 64 | } 65 | writeLines(json) 66 | -------------------------------------------------------------------------------- /.github/workflows/versions-matrix/action.yml: -------------------------------------------------------------------------------- 1 | name: "Actions to compute a matrix with all R and OS versions" 2 | 3 | outputs: 4 | matrix: 5 | description: "Generated matrix" 6 | value: ${{ steps.set-matrix.outputs.matrix }} 7 | 8 | runs: 9 | using: "composite" 10 | steps: 11 | - name: Install json2yaml 12 | run: | 13 | sudo npm install -g json2yaml 14 | shell: bash 15 | 16 | - id: set-matrix 17 | run: | 18 | Rscript ./.github/workflows/versions-matrix/action.R 19 | shell: bash 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | As contributors and maintainers of this project, we pledge to respect all people who 4 | contribute through reporting issues, posting feature requests, updating documentation, 5 | submitting pull requests or patches, and other activities. 6 | 7 | We are committed to making participation in this project a harassment-free experience for 8 | everyone, regardless of level of experience, gender, gender identity and expression, 9 | sexual orientation, disability, personal appearance, body size, race, ethnicity, age, or religion. 10 | 11 | Examples of unacceptable behavior by participants include the use of sexual language or 12 | imagery, derogatory comments or personal attacks, trolling, public or private harassment, 13 | insults, or other unprofessional conduct. 14 | 15 | Project maintainers have the right and responsibility to remove, edit, or reject comments, 16 | commits, code, wiki edits, issues, and other contributions that are not aligned to this 17 | Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed 18 | from the project team. 19 | 20 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by 21 | opening an issue or contacting one or more of the project maintainers. 22 | 23 | This Code of Conduct is adapted from the Contributor Covenant 24 | (https://www.contributor-covenant.org), version 1.0.0, available at 25 | https://contributor-covenant.org/version/1/0/0/. 26 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: dblog 2 | Title: Logging for DBI 3 | Version: 0.0.0.9026 4 | Date: 2024-12-09 5 | Authors@R: 6 | person(given = "Kirill", 7 | family = "Müller", 8 | role = c("aut", "cre"), 9 | email = "krlmlr+r@mailbox.org", 10 | comment = c(ORCID = "0000-0002-1416-3412")) 11 | Description: Provides logging of DBI methods for arbitrary backends. 12 | License: GPL-3 13 | URL: https://dblog.r-dbi.org, https://github.com/r-dbi/dblog 14 | BugReports: https://github.com/r-dbi/dblog/issues 15 | Imports: 16 | collections (>= 0.1.4), 17 | DBI, 18 | glue, 19 | methods, 20 | purrr, 21 | rlang 22 | Encoding: UTF-8 23 | Roxygen: list(markdown = TRUE) 24 | RoxygenNote: 7.3.2.9000 25 | Collate: 26 | 'Driver.R' 27 | 'Connection.R' 28 | 'Result.R' 29 | 'deparse.R' 30 | 'import.R' 31 | 'make-log-call.R' 32 | Suggests: 33 | testthat (>= 3.0.0) 34 | Config/testthat/edition: 3 35 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | S3method(format,dblogConnection) 4 | export(dblog) 5 | export(dblog_cnr) 6 | export(get_default_logger) 7 | export(make_collect_logger) 8 | export(make_text_logger) 9 | import(DBI) 10 | import(methods) 11 | import(rlang) 12 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # dblog 0.0.0.9026 4 | 5 | ## Continuous integration 6 | 7 | - Avoid failure in fledge workflow if no changes (#28). 8 | 9 | 10 | # dblog 0.0.0.9025 11 | 12 | ## Continuous integration 13 | 14 | - Fetch tags for fledge workflow to avoid unnecessary NEWS entries (#27). 15 | 16 | 17 | # dblog 0.0.0.9024 18 | 19 | ## Continuous integration 20 | 21 | - Use larger retry count for lock-threads workflow (#26). 22 | 23 | 24 | # dblog 0.0.0.9023 25 | 26 | ## Continuous integration 27 | 28 | - Ignore errors when removing pkg-config on macOS (#25). 29 | 30 | 31 | # dblog 0.0.0.9022 32 | 33 | ## Continuous integration 34 | 35 | - Explicit permissions (#24). 36 | 37 | 38 | # dblog 0.0.0.9021 39 | 40 | ## Continuous integration 41 | 42 | - Use styler from main branch (#23). 43 | 44 | 45 | # dblog 0.0.0.9020 46 | 47 | ## Continuous integration 48 | 49 | - Need to install R on Ubuntu 24.04 (#22). 50 | 51 | - Trigger run (#16). 52 | 53 | - Use Ubuntu 24.04 and styler PR (#20). 54 | 55 | 56 | # dblog 0.0.0.9019 57 | 58 | ## Continuous integration 59 | 60 | - Correctly detect branch protection (#19). 61 | 62 | 63 | # dblog 0.0.0.9018 64 | 65 | ## Continuous integration 66 | 67 | - Use stable pak (#18). 68 | 69 | 70 | # dblog 0.0.0.9017 71 | 72 | ## Continuous integration 73 | 74 | - Trigger run (#17). 75 | 76 | - ci: Trigger run 77 | 78 | - ci: Latest changes 79 | 80 | 81 | # dblog 0.0.0.9016 82 | 83 | ## Continuous integration 84 | 85 | - Trigger run (#16). 86 | 87 | - Use pkgdown branch (#15). 88 | 89 | - ci: Use pkgdown branch 90 | 91 | - ci: Updates from duckdb 92 | 93 | 94 | # dblog 0.0.0.9015 95 | 96 | ## Continuous integration 97 | 98 | - Install via R CMD INSTALL ., not pak (#14). 99 | 100 | - ci: Install via R CMD INSTALL ., not pak 101 | 102 | - ci: Bump version of upload-artifact action 103 | 104 | 105 | # dblog 0.0.0.9014 106 | 107 | ## Chore 108 | 109 | - Auto-update from GitHub Actions. 110 | 111 | Run: https://github.com/r-dbi/dblog/actions/runs/10425482998 112 | 113 | - Auto-update from GitHub Actions. 114 | 115 | Run: https://github.com/r-dbi/dblog/actions/runs/10200110967 116 | 117 | - Auto-update from GitHub Actions. 118 | 119 | Run: https://github.com/r-dbi/dblog/actions/runs/9728444611 120 | 121 | - Auto-update from GitHub Actions. 122 | 123 | Run: https://github.com/r-dbi/dblog/actions/runs/9691619402 124 | 125 | ## Continuous integration 126 | 127 | - Install local package for pkgdown builds. 128 | 129 | - Improve support for protected branches with fledge. 130 | 131 | - Improve support for protected branches, without fledge. 132 | 133 | - Sync with latest developments. 134 | 135 | - Use v2 instead of master. 136 | 137 | - Inline action. 138 | 139 | - Use dev roxygen2 and decor. 140 | 141 | - Fix on Windows, tweak lock workflow. 142 | 143 | - Avoid checking bashisms on Windows. 144 | 145 | - Better commit message. 146 | 147 | - Bump versions, better default, consume custom matrix. 148 | 149 | - Recent updates. 150 | 151 | 152 | # dblog 0.0.0.9013 153 | 154 | - Internal changes only. 155 | 156 | 157 | # dblog 0.0.0.9012 158 | 159 | - Internal changes only. 160 | 161 | 162 | # dblog 0.0.0.9011 163 | 164 | - Internal changes only. 165 | 166 | 167 | # dblog 0.0.0.9010 168 | 169 | - Internal changes only. 170 | 171 | 172 | # dblog 0.0.0.9009 173 | 174 | - Harmonize yaml formatting. 175 | 176 | - Revert changes to matrix section. 177 | 178 | - Reduce parallelism. 179 | 180 | - Also check dev on cran-* branches. 181 | 182 | - Update hash key for dev. 183 | 184 | - Remove R 3.3. 185 | 186 | - Merge pull request #12 from r-dbi/f-safe-deparse. 187 | 188 | - Work around problems with names containing backslashes in `deparse()` (#12). 189 | 190 | - Work around duckdb storing external pointers in the driver object. 191 | 192 | - Add dbCreateTable() and dbAppendTable() methods. 193 | 194 | 195 | # dblog 0.0.0.9008 196 | 197 | - New `dblog_cnr()` (#8). 198 | 199 | 200 | # dblog 0.0.0.9007 201 | 202 | - Add `display` argument to collecting logger to combine printing and collecting (#7). 203 | - Override `dbQuoteString()` for `"SQL"` class. 204 | - Override `dbQuoteLiteral()`. 205 | 206 | 207 | # dblog 0.0.0.9006 208 | 209 | - Dynamic instantiation of S4 classes. 210 | - Failed calls are wrapped with `try()`. 211 | 212 | 213 | # dblog 0.0.0.9005 214 | 215 | - Fix logging output to file, add line break. 216 | - Switch to `deparse(backtick = FALSE)` to create parseable code. 217 | - Fix error when wrapped DBI method fails. 218 | 219 | 220 | # dblog 0.0.0.9004 221 | 222 | - Same as previous version. 223 | 224 | 225 | # dblog 0.0.0.9003 226 | 227 | - Support logging of calls to DBIResult. 228 | - Remove dependency on evaluate package. 229 | - New `make_text_logger()`, renamed from `make_console_logger()`. 230 | - `make_console_logger()` gains `path` argument, is useful again. 231 | 232 | 233 | # dblog 0.0.0.9001 234 | 235 | Initial prototype. Exported functions: 236 | 237 | - `dblog()` wraps an arbitrary DBI driver with a custom logger. 238 | - `get_default_logger()` is the default console logger. 239 | - `make_console_logger()` creates a new console logger (for internal use only). 240 | - `make_collect_logger()` creates a new logger that collects logging output and emits all at once. 241 | -------------------------------------------------------------------------------- /R/Connection.R: -------------------------------------------------------------------------------- 1 | #' @include Driver.R 2 | NULL 3 | 4 | setClass("dblogConnection") 5 | 6 | #' @export 7 | format.dblogConnection <- function(x, ...) { 8 | paste0("Logging<", format(x@conn), ">") 9 | } 10 | 11 | make_connection_class <- function(base_class) { 12 | 13 | template_name <- "dblogConnection" 14 | class_name <- paste0(template_name, "-", base_class) 15 | all_base_classes <- c(template_name, base_class) 16 | 17 | if (isClass(class_name)) { 18 | return(class_name) 19 | } 20 | 21 | where <- parent.frame() 22 | 23 | setClass <- function(...) { 24 | methods::setClass(..., where = where, package = .packageName) 25 | } 26 | 27 | setMethod <- function(...) { 28 | methods::setMethod(..., where = where) 29 | } 30 | 31 | class <- setClass(class_name, 32 | contains = all_base_classes, slots = list(conn = base_class, log_call = "function")) 33 | 34 | setMethod( 35 | "show", class_name, 36 | function(object) { 37 | cat("\n") 38 | show(object@conn) 39 | }) 40 | 41 | setMethod( 42 | "dbIsValid", class_name, 43 | function(dbObj, ...) { 44 | dbObj@log_call(dbIsValid(dbObj@conn, !!! enquos(...))) 45 | }) 46 | 47 | setMethod( 48 | "dbDisconnect", class_name, 49 | function(conn, ...) { 50 | conn@log_call(dbDisconnect(conn@conn, !!! enquos(...))) 51 | }) 52 | 53 | setMethod( 54 | "dbSendQuery", c(class_name, "character"), 55 | function(conn, statement, ...) { 56 | conn@log_call(dbSendQuery(conn@conn, statement, !!! enquos(...))) 57 | }) 58 | 59 | setMethod( 60 | "dbGetQuery", c(class_name, "character"), 61 | function(conn, statement, ...) { 62 | conn@log_call(dbGetQuery(conn@conn, statement, !!! enquos(...))) 63 | }) 64 | 65 | setMethod( 66 | "dbSendStatement", c(class_name, "character"), 67 | function(conn, statement, ...) { 68 | conn@log_call(dbSendStatement(conn@conn, statement, !!! enquos(...))) 69 | }) 70 | 71 | setMethod( 72 | "dbDataType", class_name, 73 | function(dbObj, obj, ...) { 74 | dbObj@log_call(dbDataType(dbObj@conn, obj, !!! enquos(...))) 75 | }) 76 | 77 | setMethod( 78 | "dbQuoteString", c(class_name, "character"), 79 | function(conn, x, ...) { 80 | conn@log_call(dbQuoteString(conn@conn, x, !!! enquos(...))) 81 | }) 82 | 83 | setMethod( 84 | "dbQuoteString", c(class_name, "SQL"), 85 | function(conn, x, ...) { 86 | conn@log_call(dbQuoteString(conn@conn, x, !!! enquos(...))) 87 | }) 88 | 89 | setMethod( 90 | "dbQuoteIdentifier", class_name, 91 | function(conn, x, ...) { 92 | conn@log_call(dbQuoteIdentifier(conn@conn, x, !!! enquos(...))) 93 | }) 94 | 95 | setMethod( 96 | "dbQuoteIdentifier", c(class_name, "character"), 97 | function(conn, x, ...) { 98 | conn@log_call(dbQuoteIdentifier(conn@conn, x, !!! enquos(...))) 99 | }) 100 | 101 | setMethod( 102 | "dbQuoteIdentifier", c(class_name, "SQL"), 103 | function(conn, x, ...) { 104 | conn@log_call(dbQuoteIdentifier(conn@conn, x, !!! enquos(...))) 105 | }) 106 | 107 | setMethod( 108 | "dbQuoteIdentifier", c(class_name, "Id"), 109 | function(conn, x, ...) { 110 | conn@log_call(dbQuoteIdentifier(conn@conn, x, !!! enquos(...))) 111 | }) 112 | 113 | setMethod( 114 | "dbUnquoteIdentifier", class_name, 115 | function(conn, x, ...) { 116 | conn@log_call(dbUnquoteIdentifier(conn@conn, x, !!! enquos(...))) 117 | }) 118 | 119 | setMethod( 120 | "dbQuoteLiteral", class_name, 121 | function(conn, x, ...) { 122 | conn@log_call(dbQuoteLiteral(conn@conn, x, !!! enquos(...))) 123 | }) 124 | 125 | setMethod( 126 | "dbWriteTable", c(class_name, "character", "data.frame"), 127 | function(conn, name, value, overwrite = FALSE, append = FALSE, ...) { 128 | conn@log_call(dbWriteTable(conn@conn, name = name, value = value, overwrite = overwrite, append = append, !!! enquos(...))) 129 | }) 130 | 131 | setMethod( 132 | "dbReadTable", c(class_name, "character"), 133 | function(conn, name, ...) { 134 | conn@log_call(dbReadTable(conn@conn, name = name, !!! enquos(...))) 135 | }) 136 | 137 | setMethod( 138 | "dbListTables", class_name, 139 | function(conn, ...) { 140 | conn@log_call(dbListTables(conn@conn, !!! enquos(...))) 141 | }) 142 | 143 | setMethod( 144 | "dbExistsTable", c(class_name, "character"), 145 | function(conn, name, ...) { 146 | conn@log_call(dbExistsTable(conn@conn, name, !!! enquos(...))) 147 | }) 148 | 149 | setMethod( 150 | "dbListFields", c(class_name, "character"), 151 | function(conn, name, ...) { 152 | conn@log_call(dbListFields(conn@conn, name, !!! enquos(...))) 153 | }) 154 | 155 | setMethod( 156 | "dbCreateTable", c(class_name), 157 | function(conn, name, fields, ...) { 158 | conn@log_call(dbCreateTable(conn@conn, name = name, fields = fields, !!! enquos(...))) 159 | }) 160 | 161 | setMethod( 162 | "dbAppendTable", c(class_name), 163 | function(conn, name, value, ..., row.names = NULL) { 164 | conn@log_call(dbAppendTable(conn@conn, name = name, value = value, !!! enquos(...), row.names = row.names)) 165 | }) 166 | 167 | setMethod( 168 | "dbRemoveTable", c(class_name, "character"), 169 | function(conn, name, ...) { 170 | conn@log_call(dbRemoveTable(conn@conn, name, !!! enquos(...))) 171 | }) 172 | 173 | setMethod( 174 | "dbGetInfo", class_name, 175 | function(dbObj, ...) { 176 | dbObj@log_call(dbGetInfo(dbObj@conn, !!! enquos(...))) 177 | }) 178 | 179 | setMethod( 180 | "dbBegin", class_name, 181 | function(conn, ...) { 182 | conn@log_call(dbBegin(conn@conn, !!! enquos(...))) 183 | }) 184 | 185 | setMethod( 186 | "dbCommit", class_name, 187 | function(conn, ...) { 188 | conn@log_call(dbCommit(conn@conn, !!! enquos(...))) 189 | }) 190 | 191 | setMethod( 192 | "dbRollback", class_name, 193 | function(conn, ...) { 194 | conn@log_call(dbRollback(conn@conn, !!! enquos(...))) 195 | }) 196 | 197 | class_name 198 | } 199 | -------------------------------------------------------------------------------- /R/Driver.R: -------------------------------------------------------------------------------- 1 | #' DBI methods 2 | #' 3 | #' Implementations of pure virtual functions defined in the `DBI` package. 4 | #' @name DBI 5 | NULL 6 | 7 | #' dblog driver and connector 8 | #' 9 | #' TBD. 10 | #' 11 | #' @export 12 | #' @param drv Driver to be wrapped, object of class [DBI::DBIDriver-class]. 13 | #' @param logger Logger object, defaults to [get_default_logger()]. 14 | #' @import methods DBI 15 | #' @examples 16 | #' \dontrun{ 17 | #' #' library(DBI) 18 | #' Rdblog::dblog() 19 | #' } 20 | dblog <- function(drv, logger = get_default_logger()) { 21 | if (is(drv, "duckdb_driver")) { 22 | # duckdb::duckdb() 23 | # Avoid suggesting package 24 | return(logger$log_call(!! new_call(call("::", sym("duckdb"), sym("duckdb"))))) 25 | } 26 | expr <- gsub(", [)]$", ")", deparse(drv)) 27 | quo <- parse(text = expr)[[1]] 28 | logger$log_call(!! quo) 29 | } 30 | 31 | #' @param cnr Connector to be wrapped, object of class [DBI::DBIConnector-class]. 32 | #' @rdname dblog 33 | #' @export 34 | dblog_cnr <- function(cnr, logger = get_default_logger()) { 35 | new( 36 | "DBIConnector", 37 | .drv = dblog(cnr@.drv, logger = logger), 38 | .conn_args = cnr@.conn_args 39 | ) 40 | } 41 | 42 | setClass("dblogDriver") 43 | 44 | make_driver_class <- function(base_class) { 45 | 46 | template_name <- "dblogDriver" 47 | class_name <- paste0(template_name, "-", base_class) 48 | all_base_classes <- c(template_name, base_class) 49 | 50 | if (isClass(class_name)) { 51 | return(class_name) 52 | } 53 | 54 | where <- parent.frame() 55 | 56 | setClass <- function(...) { 57 | methods::setClass(..., where = where, package = .packageName) 58 | } 59 | 60 | setMethod <- function(...) { 61 | methods::setMethod(..., where = where) 62 | } 63 | 64 | class <- setClass(class_name, 65 | contains = all_base_classes, slots = list(drv = base_class, log_call = "function")) 66 | 67 | setMethod( 68 | "show", class_name, 69 | function(object) { 70 | cat("\n") 71 | show(object@drv) 72 | }) 73 | 74 | setMethod( 75 | "dbConnect", class_name, 76 | function(drv, ...) { 77 | drv@log_call(dbConnect(drv@drv, !!! enquos(...))) 78 | } 79 | ) 80 | 81 | setMethod( 82 | "dbDataType", class_name, 83 | function(dbObj, obj, ...) { 84 | dbObj@log_call(dbDataType(dbObj@drv, obj, !!! enquos(...))) 85 | }) 86 | 87 | setMethod( 88 | "dbIsValid", class_name, 89 | function(dbObj, ...) { 90 | dbObj@log_call(dbIsValid(dbObj@drv, !!! enquos(...))) 91 | }) 92 | 93 | setMethod( 94 | "dbGetInfo", class_name, 95 | function(dbObj, ...) { 96 | dbObj@log_call(dbGetInfo(dbObj@drv, !!! enquos(...))) 97 | }) 98 | 99 | class_name 100 | } 101 | -------------------------------------------------------------------------------- /R/Result.R: -------------------------------------------------------------------------------- 1 | #' @include Connection.R 2 | NULL 3 | 4 | setClass("dblogResult") 5 | 6 | make_result_class <- function(base_class) { 7 | 8 | template_name <- "dblogResult" 9 | class_name <- paste0(template_name, "-", base_class) 10 | all_base_classes <- c(template_name, base_class) 11 | 12 | if (isClass(class_name)) { 13 | return(class_name) 14 | } 15 | 16 | where <- parent.frame() 17 | 18 | setClass <- function(...) { 19 | methods::setClass(..., where = where, package = .packageName) 20 | } 21 | 22 | setMethod <- function(...) { 23 | methods::setMethod(..., where = where) 24 | } 25 | 26 | class <- setClass(class_name, 27 | contains = all_base_classes, slots = list(res = base_class, log_call = "function")) 28 | 29 | setMethod( 30 | "show", class_name, 31 | function(object) { 32 | cat("\n") 33 | show(object@res) 34 | }) 35 | 36 | setMethod( 37 | "dbClearResult", class_name, 38 | function(res, ...) { 39 | res@log_call(dbClearResult(res@res, !!! enquos(...))) 40 | }) 41 | 42 | setMethod( 43 | "dbFetch", class_name, 44 | function(res, n = -1, ...) { 45 | res@log_call(dbFetch(res@res, n = n, !!! enquos(...))) 46 | }) 47 | 48 | setMethod( 49 | "dbHasCompleted", class_name, 50 | function(res, ...) { 51 | res@log_call(dbHasCompleted(res@res, !!! enquos(...))) 52 | }) 53 | 54 | setMethod( 55 | "dbGetInfo", class_name, 56 | function(dbObj, ...) { 57 | dbObj@log_call(dbGetInfo(dbObj@res, !!! enquos(...))) 58 | }) 59 | 60 | setMethod( 61 | "dbIsValid", class_name, 62 | function(dbObj, ...) { 63 | dbObj@log_call(dbIsValid(dbObj@res, !!! enquos(...))) 64 | }) 65 | 66 | setMethod( 67 | "dbGetStatement", class_name, 68 | function(res, ...) { 69 | res@log_call(dbGetStatement(res@res, !!! enquos(...))) 70 | }) 71 | 72 | setMethod( 73 | "dbColumnInfo", class_name, 74 | function(res, ...) { 75 | res@log_call(dbColumnInfo(res@res, !!! enquos(...))) 76 | }) 77 | 78 | setMethod( 79 | "dbGetRowCount", class_name, 80 | function(res, ...) { 81 | res@log_call(dbGetRowCount(res@res, !!! enquos(...))) 82 | }) 83 | 84 | setMethod( 85 | "dbGetRowsAffected", class_name, 86 | function(res, ...) { 87 | res@log_call(dbGetRowsAffected(res@res, !!! enquos(...))) 88 | }) 89 | 90 | setMethod( 91 | "dbBind", class_name, 92 | function(res, params, ...) { 93 | res@log_call(dbBind(res@res, params, !!! enquos(...))) 94 | invisible(res) 95 | }) 96 | 97 | class_name 98 | } 99 | -------------------------------------------------------------------------------- /R/deparse.R: -------------------------------------------------------------------------------- 1 | safe_deparse <- function(x, width = getOption("width")) { 2 | out <- deparse(x, width.cutoff = width, backtick = TRUE) 3 | same <- tryCatch( 4 | identical(x, parse(text = out)[[1]]), 5 | error = function(e) FALSE 6 | ) 7 | if (same) { 8 | return(glue::as_glue(out)) 9 | } 10 | 11 | # Workaround for weird names like "" and "\n": 12 | glue::as_glue(deparse( 13 | x, width.cutoff = width, backtick = TRUE, 14 | control = c("keepNA", "keepInteger", "showAttributes") 15 | )) 16 | } 17 | 18 | expect_deparse <- function(x) { 19 | if (is.call(x)) { 20 | testthat::expect_identical(parse(text = safe_deparse(x))[[1]], x) 21 | } else { 22 | testthat::expect_identical(eval(parse(text = safe_deparse(x))[[1]]), x) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /R/import.R: -------------------------------------------------------------------------------- 1 | #' @import rlang 2 | NULL 3 | -------------------------------------------------------------------------------- /R/make-log-call.R: -------------------------------------------------------------------------------- 1 | make_log_call <- function(obj_name, log_obj) { 2 | force(obj_name) 3 | force(log_obj) 4 | 5 | s4_dict <- collections::Stack() 6 | 7 | find_s4_dict <- function(x) { 8 | all_s4 <- s4_dict$as_list() 9 | for (i in seq_along(all_s4)) { 10 | s4_i <- all_s4[[i]] 11 | if (identical(s4_i$obj, x)) { 12 | return(s4_i$name) 13 | } 14 | } 15 | 16 | # Not found 17 | x 18 | } 19 | 20 | add_s4_dict <- function(x) { 21 | if (inherits(x, "DBIResult")) prefix <- "res" 22 | else if (inherits(x, "DBIConnection")) prefix <- "conn" 23 | else if (inherits(x, "DBIDriver")) prefix <- "drv" 24 | else return(NULL) 25 | 26 | # Doesn't work yet (?) 27 | #if (!is.null(find_s4_dict(x))) return(NULL) 28 | 29 | all_s4 <- s4_dict$as_list() 30 | all_names <- purrr::map_chr(purrr::map(all_s4, "name"), as_string) 31 | 32 | prefix_names <- grep(paste0("^", prefix), all_names, value = TRUE) 33 | suffixes <- as.integer(gsub(paste0("^", prefix), "", prefix_names)) 34 | 35 | max_prefix <- max(c(suffixes, 0L)) 36 | 37 | new_name <- as.name(paste0(prefix, max_prefix + 1L)) 38 | 39 | s4_dict$push( 40 | list( 41 | obj = x, 42 | name = new_name 43 | ) 44 | ) 45 | 46 | new_name 47 | } 48 | 49 | clear_s4_dict <- function() { 50 | s4_dict$clear() 51 | } 52 | 53 | 54 | log_call <- function(call) { 55 | quo <- enquo(call) 56 | expr <- quo_get_expr(quo) 57 | env <- quo_get_env(quo) 58 | 59 | args <- purrr::map(as.list(expr[-1]), ~ eval_tidy(., env = env)) 60 | if (!is.null(obj_name)) { 61 | args[[1]] <- obj_name 62 | } 63 | 64 | args <- purrr::map(args, find_s4_dict) 65 | new_call <- call2(expr[[1]], !!!args) 66 | result <- NULL 67 | on.exit(log_obj$log(new_call, result)) 68 | 69 | visible_quo <- rlang::new_quosure(call2(withVisible, expr), env) 70 | result <- eval_tidy(visible_quo) 71 | 72 | new_obj <- add_s4_dict(result$value) 73 | if (!is.null(new_obj)) { 74 | new_call <- call2("<-", new_obj, new_call) 75 | result$value <- wrap(result$value, new_obj, log_obj) 76 | result$visible <- FALSE 77 | } 78 | 79 | if (result$visible) { 80 | result$value 81 | } else { 82 | invisible(result$value) 83 | } 84 | } 85 | 86 | log_call 87 | } 88 | 89 | wrap <- function(x, name, log_obj) { 90 | if (inherits(x, "DBIDriver")) { 91 | class_name <- make_driver_class(class(x)[[1]]) 92 | new(class_name, drv = x, log_call = make_log_call(name, log_obj)) 93 | } else if (inherits(x, "DBIConnection")) { 94 | class_name <- make_connection_class(class(x)[[1]]) 95 | new(class_name, conn = x, log_call = make_log_call(name, log_obj)) 96 | } else if (inherits(x, "DBIResult")) { 97 | class_name <- make_result_class(class(x)[[1]]) 98 | new(class_name, res = x, log_call = make_log_call(name, log_obj)) 99 | } else { 100 | abort(paste0("Unknown class: ", paste(class(x), collapse = "/"))) 101 | } 102 | } 103 | 104 | make_logger <- function(...) { 105 | logger <- list2(...) 106 | 107 | logger$log_call <- make_log_call(NULL, logger) 108 | logger 109 | } 110 | 111 | #' Logging parameters 112 | #' 113 | #' TBD. 114 | #' 115 | #' @export 116 | get_default_logger <- function() { 117 | default_logger 118 | } 119 | 120 | format_console <- function(call, result, width = 80) { 121 | local_options(width = width) 122 | 123 | if (is.null(result)) { 124 | call <- call("try", call) 125 | } 126 | 127 | call_fmt <- safe_deparse(call, width = width) 128 | if (isTRUE(result$visible)) { 129 | output <- utils::capture.output(print(result$value)) 130 | result_fmt <- paste0("## ", output) 131 | } else { 132 | result_fmt <- NULL 133 | } 134 | 135 | paste(c(call_fmt, result_fmt), collapse = "\n") 136 | } 137 | 138 | #' @export 139 | #' @param path Passed on to [cat()] for the output. Default: console output. 140 | #' @rdname get_default_logger 141 | make_text_logger <- function(path = NULL) { 142 | if (is.null(path)) { 143 | path <- "" 144 | } 145 | 146 | make_logger( 147 | log = function(call, result) { 148 | cat(format_console(call, result), "\n", sep = "", file = path, append = TRUE) 149 | } 150 | ) 151 | } 152 | 153 | #' @export 154 | #' @rdname get_default_logger 155 | #' @param display `[logical(1)]`\cr 156 | #' Set to `TRUE` to display log entries in addition to writing 157 | #' to a file. 158 | make_collect_logger <- function(display = FALSE) { 159 | queue <- collections::Queue() 160 | force(display) 161 | 162 | make_logger( 163 | log = function(call, result) { 164 | fmt <- format_console(call, result) 165 | 166 | if (isTRUE(display)) { 167 | writeLines(fmt) 168 | } 169 | 170 | queue$push(fmt) 171 | }, 172 | 173 | retrieve = function() { 174 | glue::glue_collapse(as.character(queue$as_list()), sep = "\n") 175 | } 176 | ) 177 | } 178 | 179 | default_logger <- make_text_logger() 180 | -------------------------------------------------------------------------------- /README.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | output: github_document 3 | --- 4 | 5 | 6 | 7 | ```{r, include = FALSE} 8 | knitr::opts_chunk$set( 9 | collapse = TRUE, 10 | comment = "#>", 11 | fig.path = "man/figures/README-", 12 | out.width = "100%" 13 | ) 14 | pkgload::load_all() 15 | ``` 16 | 17 | # dblog 18 | 19 | 20 | [![Lifecycle: experimental](https://img.shields.io/badge/lifecycle-experimental-orange.svg)](https://www.tidyverse.org/lifecycle/#experimental) 21 | [![CRAN status](https://www.r-pkg.org/badges/version/dblog)](https://cran.r-project.org/package=dblog) 22 | 23 | 24 | The goal of dblog is to implement logging for arbitrary DBI backends, similarly to Perl's [DBI::Log](https://metacpan.org/pod/DBI::Log). 25 | This is useful for troubleshooting and auditing codes that access a database. 26 | The initial use case for this package is to help debugging DBItest tests. 27 | 28 | ## Installation 29 | 30 | You can install the released version of dblog from [CRAN](https://CRAN.R-project.org) with: 31 | 32 | ``` r 33 | install.packages("dblog") 34 | ``` 35 | 36 | Install the development version from GitHub using 37 | 38 | ``` r 39 | # install.packages("devtools") 40 | devtools::install_github("r-dbi/dblog") 41 | ``` 42 | 43 | 44 | ## Basic example 45 | 46 | The `dblog` driver wraps arbitrary drivers: 47 | 48 | ```{r init} 49 | library(dblog) 50 | drv <- dblog(RSQLite::SQLite()) 51 | ``` 52 | 53 | 54 | All calls to DBI methods are logged, by default to the console. 55 | 56 | ```{r console} 57 | conn <- dbConnect(drv, file = ":memory:") 58 | dbWriteTable(conn, "iris", iris[1:3, ]) 59 | data <- dbGetQuery(conn, "SELECT * FROM iris") 60 | dbDisconnect(conn) 61 | 62 | data 63 | ``` 64 | 65 | The log is runnable R code! 66 | Run it in a fresh session to repeat the operations, step by step or in an otherwise controlled fashion. 67 | 68 | dblog is smart about DBI objects created or returned, and will assign a new variable name to each new object. 69 | Cleared results or closed connections are not removed automatically. 70 | 71 | ## Logging options 72 | 73 | Logging can be redirected to a file, optionally all outputs may be logged as well. 74 | For example, use a collecting logger to output all calls and results after the fact. 75 | 76 | 77 | ```{r collect} 78 | collecting_logger <- make_collect_logger() 79 | 80 | drv <- dblog(RSQLite::SQLite(), logger = collecting_logger) 81 | conn <- dbConnect(drv, file = ":memory:") 82 | dbWriteTable(conn, "iris", iris[1:3, ]) 83 | data <- dbGetQuery(conn, "SELECT * FROM iris") 84 | dbDisconnect(conn) 85 | 86 | collecting_logger$retrieve() 87 | 88 | ev <- evaluate::evaluate(collecting_logger$retrieve()) 89 | cat(unlist(ev, use.names = FALSE), sep = "\n") 90 | ``` 91 | 92 | ## Logging complex operations 93 | 94 | The full power is demonstrated when running with code where the underlying _DBI_ operations are not obvious: 95 | 96 | ```{r dplyr} 97 | library(dplyr) 98 | 99 | drv <- dblog(RSQLite::SQLite()) 100 | conn <- dbConnect(drv, file = ":memory:") 101 | dbWriteTable(conn, "iris", iris[1:3, ]) 102 | 103 | src <- dbplyr::src_dbi(conn) 104 | iris_tbl <- tbl(src, "iris") 105 | iris_tbl %>% 106 | summarize_if(is.numeric, mean) 107 | ``` 108 | 109 | ## Inheritance hierarchy 110 | 111 | Despite the common suggestion to [prefer composition over inheritance](https://en.wikipedia.org/wiki/Composition_over_inheritance), the new logging classes are implemented as subclasses of the actual DBI classes. 112 | Moreover, the class definitions are created on demand: for each different database backend, different subclasses are defined, to make sure dispatch is routed to the right methods. 113 | 114 | The reason for this is that other methods, unknown to this package, might dispatch on the DBI class. 115 | One such example is _dbplyr_ that introduces specialized behaviors for many classes. 116 | The `explain()` method calls the internal `db_explain()` method which uses `EXPLAIN QUERY PLAN` for SQLite connections but `EXPLAIN` for unspecified database connections. 117 | Without inheritance, _dbplyr_ would use the default method. 118 | This might lead to errors for other databases that do not understand `EXPLAIN`. 119 | 120 | ```{r dplyr-explain} 121 | iris_tbl %>% 122 | summarize_if(is.numeric, mean) %>% 123 | explain() 124 | ``` 125 | 126 | --- 127 | 128 | Please note that the 'dblog' project is released with a 129 | [Contributor Code of Conduct](CODE_OF_CONDUCT.md). 130 | By contributing to this project, you agree to abide by its terms. 131 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # dblog 5 | 6 | 7 | 8 | [![Lifecycle: 9 | experimental](https://img.shields.io/badge/lifecycle-experimental-orange.svg)](https://www.tidyverse.org/lifecycle/#experimental) 10 | [![CRAN 11 | status](https://www.r-pkg.org/badges/version/dblog)](https://cran.r-project.org/package=dblog) 12 | 13 | 14 | The goal of dblog is to implement logging for arbitrary DBI backends, 15 | similarly to Perl’s [DBI::Log](https://metacpan.org/pod/DBI::Log). This 16 | is useful for troubleshooting and auditing codes that access a database. 17 | The initial use case for this package is to help debugging DBItest 18 | tests. 19 | 20 | ## Installation 21 | 22 | You can install the released version of dblog from 23 | [CRAN](https://CRAN.R-project.org) with: 24 | 25 | ``` r 26 | install.packages("dblog") 27 | ``` 28 | 29 | Install the development version from GitHub using 30 | 31 | ``` r 32 | # install.packages("devtools") 33 | devtools::install_github("r-dbi/dblog") 34 | ``` 35 | 36 | ## Basic example 37 | 38 | The `dblog` driver wraps arbitrary drivers: 39 | 40 | ``` r 41 | library(dblog) 42 | drv <- dblog(RSQLite::SQLite()) 43 | #> drv1 <- RSQLite::SQLite() 44 | ``` 45 | 46 | All calls to DBI methods are logged, by default to the console. 47 | 48 | ``` r 49 | conn <- dbConnect(drv, file = ":memory:") 50 | #> conn1 <- dbConnect(drv1, file = ":memory:") 51 | dbWriteTable(conn, "iris", iris[1:3, ]) 52 | #> dbWriteTable(conn1, name = "iris", value = structure(list(Sepal.Length = c(5.1, 4.9, 53 | #> 4.7), Sepal.Width = c(3.5, 3, 3.2), Petal.Length = c(1.4, 1.4, 1.3), Petal.Width = c(0.2, 54 | #> 0.2, 0.2), Species = structure(c(1L, 1L, 1L), .Label = c("setosa", "versicolor", 55 | #> "virginica"), class = "factor")), row.names = c(NA, 3L), class = "data.frame"), overwrite = FALSE, 56 | #> append = FALSE) 57 | data <- dbGetQuery(conn, "SELECT * FROM iris") 58 | #> dbGetQuery(conn1, "SELECT * FROM iris") 59 | #> ## Sepal.Length Sepal.Width Petal.Length Petal.Width Species 60 | #> ## 1 5.1 3.5 1.4 0.2 setosa 61 | #> ## 2 4.9 3.0 1.4 0.2 setosa 62 | #> ## 3 4.7 3.2 1.3 0.2 setosa 63 | dbDisconnect(conn) 64 | #> dbDisconnect(conn1) 65 | 66 | data 67 | #> Sepal.Length Sepal.Width Petal.Length Petal.Width Species 68 | #> 1 5.1 3.5 1.4 0.2 setosa 69 | #> 2 4.9 3.0 1.4 0.2 setosa 70 | #> 3 4.7 3.2 1.3 0.2 setosa 71 | ``` 72 | 73 | The log is runnable R code\! Run it in a fresh session to repeat the 74 | operations, step by step or in an otherwise controlled fashion. 75 | 76 | dblog is smart about DBI objects created or returned, and will assign a 77 | new variable name to each new object. Cleared results or closed 78 | connections are not removed automatically. 79 | 80 | ## Logging options 81 | 82 | Logging can be redirected to a file, optionally all outputs may be 83 | logged as well. For example, use a collecting logger to output all calls 84 | and results after the fact. 85 | 86 | ``` r 87 | collecting_logger <- make_collect_logger() 88 | 89 | drv <- dblog(RSQLite::SQLite(), logger = collecting_logger) 90 | conn <- dbConnect(drv, file = ":memory:") 91 | dbWriteTable(conn, "iris", iris[1:3, ]) 92 | data <- dbGetQuery(conn, "SELECT * FROM iris") 93 | dbDisconnect(conn) 94 | 95 | collecting_logger$retrieve() 96 | #> drv1 <- RSQLite::SQLite() 97 | #> conn1 <- dbConnect(drv1, file = ":memory:") 98 | #> dbWriteTable(conn1, name = "iris", value = structure(list(Sepal.Length = c(5.1, 4.9, 99 | #> 4.7), Sepal.Width = c(3.5, 3, 3.2), Petal.Length = c(1.4, 1.4, 1.3), Petal.Width = c(0.2, 100 | #> 0.2, 0.2), Species = structure(c(1L, 1L, 1L), .Label = c("setosa", "versicolor", 101 | #> "virginica"), class = "factor")), row.names = c(NA, 3L), class = "data.frame"), overwrite = FALSE, 102 | #> append = FALSE) 103 | #> dbGetQuery(conn1, "SELECT * FROM iris") 104 | #> ## Sepal.Length Sepal.Width Petal.Length Petal.Width Species 105 | #> ## 1 5.1 3.5 1.4 0.2 setosa 106 | #> ## 2 4.9 3.0 1.4 0.2 setosa 107 | #> ## 3 4.7 3.2 1.3 0.2 setosa 108 | #> dbDisconnect(conn1) 109 | 110 | ev <- evaluate::evaluate(collecting_logger$retrieve()) 111 | cat(unlist(ev, use.names = FALSE), sep = "\n") 112 | #> drv1 <- RSQLite::SQLite() 113 | #> 114 | #> conn1 <- dbConnect(drv1, file = ":memory:") 115 | #> 116 | #> dbWriteTable(conn1, name = "iris", value = structure(list(Sepal.Length = c(5.1, 4.9, 117 | #> 4.7), Sepal.Width = c(3.5, 3, 3.2), Petal.Length = c(1.4, 1.4, 1.3), Petal.Width = c(0.2, 118 | #> 0.2, 0.2), Species = structure(c(1L, 1L, 1L), .Label = c("setosa", "versicolor", 119 | #> "virginica"), class = "factor")), row.names = c(NA, 3L), class = "data.frame"), overwrite = FALSE, 120 | #> append = FALSE) 121 | #> 122 | #> dbGetQuery(conn1, "SELECT * FROM iris") 123 | #> 124 | #> Sepal.Length Sepal.Width Petal.Length Petal.Width Species 125 | #> 1 5.1 3.5 1.4 0.2 setosa 126 | #> 2 4.9 3.0 1.4 0.2 setosa 127 | #> 3 4.7 3.2 1.3 0.2 setosa 128 | #> 129 | #> ## Sepal.Length Sepal.Width Petal.Length Petal.Width Species 130 | #> 131 | #> ## 1 5.1 3.5 1.4 0.2 setosa 132 | #> 133 | #> ## 2 4.9 3.0 1.4 0.2 setosa 134 | #> 135 | #> ## 3 4.7 3.2 1.3 0.2 setosa 136 | #> 137 | #> dbDisconnect(conn1) 138 | ``` 139 | 140 | ## Logging complex operations 141 | 142 | The full power is demonstrated when running with code where the 143 | underlying *DBI* operations are not obvious: 144 | 145 | ``` r 146 | library(dplyr) 147 | #> 148 | #> Attaching package: 'dplyr' 149 | #> The following objects are masked from 'package:stats': 150 | #> 151 | #> filter, lag 152 | #> The following objects are masked from 'package:base': 153 | #> 154 | #> intersect, setdiff, setequal, union 155 | 156 | drv <- dblog(RSQLite::SQLite()) 157 | #> drv2 <- RSQLite::SQLite() 158 | conn <- dbConnect(drv, file = ":memory:") 159 | #> conn1 <- dbConnect(drv2, file = ":memory:") 160 | dbWriteTable(conn, "iris", iris[1:3, ]) 161 | #> dbWriteTable(conn1, name = "iris", value = structure(list(Sepal.Length = c(5.1, 4.9, 162 | #> 4.7), Sepal.Width = c(3.5, 3, 3.2), Petal.Length = c(1.4, 1.4, 1.3), Petal.Width = c(0.2, 163 | #> 0.2, 0.2), Species = structure(c(1L, 1L, 1L), .Label = c("setosa", "versicolor", 164 | #> "virginica"), class = "factor")), row.names = c(NA, 3L), class = "data.frame"), overwrite = FALSE, 165 | #> append = FALSE) 166 | 167 | src <- dbplyr::src_dbi(conn) 168 | iris_tbl <- tbl(src, "iris") 169 | #> res1 <- dbSendQuery(conn1, structure("SELECT *\nFROM `iris` AS `zzz1`\nWHERE (0 = 1)", class = c("sql", 170 | #> "character"))) 171 | #> dbFetch(res1, n = 0) 172 | #> ## [1] Sepal.Length Sepal.Width Petal.Length Petal.Width Species 173 | #> ## <0 rows> (or 0-length row.names) 174 | #> dbClearResult(res1) 175 | iris_tbl %>% 176 | summarize_if(is.numeric, mean) 177 | #> Applying predicate on the first 100 rows 178 | #> res2 <- dbSendQuery(conn1, structure("SELECT *\nFROM `iris`\nLIMIT 100", class = c("sql", 179 | #> "character"))) 180 | #> dbFetch(res2, n = 100) 181 | #> ## Sepal.Length Sepal.Width Petal.Length Petal.Width Species 182 | #> ## 1 5.1 3.5 1.4 0.2 setosa 183 | #> ## 2 4.9 3.0 1.4 0.2 setosa 184 | #> ## 3 4.7 3.2 1.3 0.2 setosa 185 | #> dbHasCompleted(res2) 186 | #> ## [1] TRUE 187 | #> dbClearResult(res2) 188 | #> Warning: Missing values are always removed in SQL. 189 | #> Use `mean(x, na.rm = TRUE)` to silence this warning 190 | #> This warning is displayed only once per session. 191 | #> res3 <- dbSendQuery(conn1, structure("SELECT AVG(`Sepal.Length`) AS `Sepal.Length`, AVG(`Sepal.Width`) AS `Sepal.Width`, AVG(`Petal.Length`) AS `Petal.Length`, AVG(`Petal.Width`) AS `Petal.Width`\nFROM `iris`\nLIMIT 11", class = c("sql", 192 | #> "character"))) 193 | #> dbFetch(res3, n = -1) 194 | #> ## Sepal.Length Sepal.Width Petal.Length Petal.Width 195 | #> ## 1 4.9 3.233333 1.366667 0.2 196 | #> dbHasCompleted(res3) 197 | #> ## [1] TRUE 198 | #> dbClearResult(res3) 199 | #> # Source: lazy query [?? x 4] 200 | #> # Database: sqlite 3.29.0 [] 201 | #> Sepal.Length Sepal.Width Petal.Length Petal.Width 202 | #> 203 | #> 1 4.90 3.23 1.37 0.2 204 | ``` 205 | 206 | ## Inheritance hierarchy 207 | 208 | Despite the common suggestion to [prefer composition over 209 | inheritance](https://en.wikipedia.org/wiki/Composition_over_inheritance), 210 | the new logging classes are implemented as subclasses of the actual DBI 211 | classes. Moreover, the class definitions are created on demand: for each 212 | different database backend, different subclasses are defined, to make 213 | sure dispatch is routed to the right methods. 214 | 215 | The reason for this is that other methods, unknown to this package, 216 | might dispatch on the DBI class. One such example is *dbplyr* that 217 | introduces specialized behaviors for many classes. The `explain()` 218 | method calls the internal `db_explain()` method which uses `EXPLAIN 219 | QUERY PLAN` for SQLite connections but `EXPLAIN` for unspecified 220 | database connections. Without inheritance, *dbplyr* would use the 221 | default method. This might lead to errors for other databases that do 222 | not understand `EXPLAIN`. 223 | 224 | ``` r 225 | iris_tbl %>% 226 | summarize_if(is.numeric, mean) %>% 227 | explain() 228 | #> Applying predicate on the first 100 rows 229 | #> res4 <- dbSendQuery(conn1, structure("SELECT *\nFROM `iris`\nLIMIT 100", class = c("sql", 230 | #> "character"))) 231 | #> dbFetch(res4, n = 100) 232 | #> ## Sepal.Length Sepal.Width Petal.Length Petal.Width Species 233 | #> ## 1 5.1 3.5 1.4 0.2 setosa 234 | #> ## 2 4.9 3.0 1.4 0.2 setosa 235 | #> ## 3 4.7 3.2 1.3 0.2 setosa 236 | #> dbHasCompleted(res4) 237 | #> ## [1] TRUE 238 | #> dbClearResult(res4) 239 | #> 240 | #> SELECT AVG(`Sepal.Length`) AS `Sepal.Length`, AVG(`Sepal.Width`) AS `Sepal.Width`, AVG(`Petal.Length`) AS `Petal.Length`, AVG(`Petal.Width`) AS `Petal.Width` 241 | #> FROM `iris` 242 | #> 243 | #> 244 | #> dbGetQuery(conn1, structure("EXPLAIN QUERY PLAN SELECT AVG(`Sepal.Length`) AS `Sepal.Length`, AVG(`Sepal.Width`) AS `Sepal.Width`, AVG(`Petal.Length`) AS `Petal.Length`, AVG(`Petal.Width`) AS `Petal.Width`\nFROM `iris`", class = c("sql", 245 | #> "character"))) 246 | #> ## id parent notused detail 247 | #> ## 1 3 0 0 SCAN TABLE iris 248 | #> id parent notused detail 249 | #> 1 3 0 0 SCAN TABLE iris 250 | ``` 251 | 252 | ----- 253 | 254 | Please note that the ‘dblog’ project is released with a [Contributor 255 | Code of Conduct](CODE_OF_CONDUCT.md). By contributing to this project, 256 | you agree to abide by its terms. 257 | -------------------------------------------------------------------------------- /dblog.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: No 4 | SaveWorkspace: No 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | StripTrailingWhitespace: Yes 17 | 18 | BuildType: Package 19 | PackageUseDevtools: Yes 20 | PackageInstallArgs: --no-multiarch --with-keep.source --no-byte-compile 21 | PackageRoxygenize: rd,collate,namespace 22 | -------------------------------------------------------------------------------- /man/DBI.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/Driver.R 3 | \name{DBI} 4 | \alias{DBI} 5 | \title{DBI methods} 6 | \description{ 7 | Implementations of pure virtual functions defined in the \code{DBI} package. 8 | } 9 | -------------------------------------------------------------------------------- /man/dblog.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/Driver.R 3 | \name{dblog} 4 | \alias{dblog} 5 | \alias{dblog_cnr} 6 | \title{dblog driver and connector} 7 | \usage{ 8 | dblog(drv, logger = get_default_logger()) 9 | 10 | dblog_cnr(cnr, logger = get_default_logger()) 11 | } 12 | \arguments{ 13 | \item{drv}{Driver to be wrapped, object of class \link[DBI:DBIDriver-class]{DBI::DBIDriver}.} 14 | 15 | \item{logger}{Logger object, defaults to \code{\link[=get_default_logger]{get_default_logger()}}.} 16 | 17 | \item{cnr}{Connector to be wrapped, object of class \link[DBI:DBIConnector-class]{DBI::DBIConnector}.} 18 | } 19 | \description{ 20 | TBD. 21 | } 22 | \examples{ 23 | \dontrun{ 24 | #' library(DBI) 25 | Rdblog::dblog() 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /man/get_default_logger.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/make-log-call.R 3 | \name{get_default_logger} 4 | \alias{get_default_logger} 5 | \alias{make_text_logger} 6 | \alias{make_collect_logger} 7 | \title{Logging parameters} 8 | \usage{ 9 | get_default_logger() 10 | 11 | make_text_logger(path = NULL) 12 | 13 | make_collect_logger(display = FALSE) 14 | } 15 | \arguments{ 16 | \item{path}{Passed on to \code{\link[=cat]{cat()}} for the output. Default: console output.} 17 | 18 | \item{display}{\verb{[logical(1)]}\cr 19 | Set to \code{TRUE} to display log entries in addition to writing 20 | to a file.} 21 | } 22 | \description{ 23 | TBD. 24 | } 25 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | if (require(testthat)) { 2 | library(dblog) 3 | test_check("dblog") 4 | } else { 5 | message("testthat not available.") 6 | } 7 | -------------------------------------------------------------------------------- /tests/testthat/test-deparse.R: -------------------------------------------------------------------------------- 1 | test_that("output", { 2 | # Can't use expect_snapshot() because it uses deparse() itself 3 | 4 | expect_deparse(list(foo = "bar")) 5 | expect_deparse(list("\n" = 1)) 6 | expect_deparse(list("\\n" = 1)) 7 | expect_deparse(quote(list("\n" = 1))) 8 | expect_deparse(quote(list("\\n" = 1))) 9 | expect_deparse(rlang::set_names(list(1), "")) 10 | }) 11 | --------------------------------------------------------------------------------