├── .Rbuildignore ├── .covrignore ├── .github ├── .gitignore ├── CODEOWNERS ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE.md ├── SUPPORT.md └── workflows │ ├── R-CMD-check.yaml │ ├── live-api.yml │ ├── pkgdown.yaml │ ├── pr-commands.yaml │ └── test-coverage.yaml ├── .gitignore ├── .vscode └── settings.json ├── DESCRIPTION ├── LICENSE ├── LICENSE.md ├── NAMESPACE ├── NEWS.md ├── R ├── bigrquery-package.R ├── bq-auth.R ├── bq-dataset.R ├── bq-download.R ├── bq-field.R ├── bq-job.R ├── bq-param.R ├── bq-parse.R ├── bq-perform.R ├── bq-project.R ├── bq-projects.R ├── bq-query.R ├── bq-refs.R ├── bq-request.R ├── bq-table.R ├── bq-test.R ├── camelCase.R ├── connections-page.R ├── cpp11.R ├── dbi-connection.R ├── dbi-driver.R ├── dbi-result.R ├── dplyr.R ├── gs-object.R ├── import-standalone-obj-type.R ├── import-standalone-s3-register.R ├── import-standalone-types-check.R ├── utils.R └── zzz.R ├── README.Rmd ├── README.md ├── _pkgdown.yml ├── air.toml ├── bigrquery.Rproj ├── codecov.yml ├── cran-comments.md ├── inst ├── extdata │ └── data │ │ └── client_secret_123.googleusercontent.com.json ├── icons │ ├── bigquery-512-color.png │ ├── dataset.png │ ├── table.png │ └── view.png ├── rstudio │ ├── connections.dcf │ └── connections │ │ └── BigQuery.R └── secret │ └── bigrquery-testing.json ├── man ├── DBI.Rd ├── api-dataset.Rd ├── api-job.Rd ├── api-perform.Rd ├── api-project.Rd ├── api-table.Rd ├── bigquery.Rd ├── bigrquery-package.Rd ├── bq_auth.Rd ├── bq_auth_configure.Rd ├── bq_deauth.Rd ├── bq_field.Rd ├── bq_has_token.Rd ├── bq_oauth_app.Rd ├── bq_param.Rd ├── bq_projects.Rd ├── bq_query.Rd ├── bq_refs.Rd ├── bq_table_download.Rd ├── bq_test_project.Rd ├── bq_token.Rd ├── bq_user.Rd ├── collect.tbl_BigQueryConnection.Rd ├── figures │ ├── lifecycle-archived.svg │ ├── lifecycle-defunct.svg │ ├── lifecycle-deprecated.svg │ ├── lifecycle-experimental.svg │ ├── lifecycle-maturing.svg │ ├── lifecycle-questioning.svg │ ├── lifecycle-soft-deprecated.svg │ ├── lifecycle-stable.svg │ └── lifecycle-superseded.svg └── src_bigquery.Rd ├── revdep ├── .gitignore ├── README.md ├── cran.md ├── email.yml ├── failures.md └── problems.md ├── src ├── .gitignore ├── BqField.cpp ├── base64.c ├── base64.h ├── cpp11.cpp └── integer64.h └── tests ├── testthat.R └── testthat ├── .gitignore ├── _snaps ├── bq-auth.md ├── bq-dataset.md ├── bq-download.md ├── bq-field.md ├── bq-job.md ├── bq-param.md ├── bq-perform.md ├── bq-query.md ├── bq-refs.md ├── bq-request.md ├── bq-test.md ├── dbi-connection.md ├── dbi-result.md ├── dplyr.md ├── gs-object.md └── utils.md ├── dbi-result-print.txt ├── helper-auth.R ├── parse-schema-array-struct.json ├── parse-schema-array.json ├── parse-schema-struct-array.json ├── parse-schema-struct.json ├── parse-values-array-struct.json ├── parse-values-array.json ├── parse-values-struct-array.json ├── parse-values-struct.json ├── test-bq-auth.R ├── test-bq-dataset.R ├── test-bq-download.R ├── test-bq-field.R ├── test-bq-job.R ├── test-bq-param.R ├── test-bq-parse.R ├── test-bq-perform.R ├── test-bq-project.R ├── test-bq-projects.R ├── test-bq-query.R ├── test-bq-refs.R ├── test-bq-request.R ├── test-bq-table.R ├── test-bq-test.R ├── test-camelCase.R ├── test-dbi-connection.R ├── test-dbi-driver.R ├── test-dbi-result.R ├── test-dplyr.R ├── test-gs-object.R └── test-utils.R /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^data-raw$ 2 | ^pkgdown$ 3 | ^_pkgdown\.yml$ 4 | ^docs$ 5 | ^.*\.Rproj$ 6 | ^\.Rproj\.user$ 7 | ^\.httr-oauth$ 8 | ^\.travis\.yml$ 9 | ^cran-comments\.md$ 10 | ^revdep$ 11 | ^codecov\.yml$ 12 | ^README\.Rmd$ 13 | ^\.covrignore$ 14 | ^CRAN-RELEASE$ 15 | ^\.github$ 16 | ^LICENSE\.md$ 17 | ^appveyor\.yml$ 18 | ^internal$ 19 | ^CRAN-SUBMISSION$ 20 | ^air.toml$ 21 | -------------------------------------------------------------------------------- /.covrignore: -------------------------------------------------------------------------------- 1 | R/old-*.R 2 | R/zzz.R 3 | R/deprec-*.R 4 | R/compat-*.R 5 | -------------------------------------------------------------------------------- /.github/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # CODEOWNERS for bigrquery 2 | # https://www.tidyverse.org/development/understudies 3 | .github/CODEOWNERS @hadley @jennybc 4 | -------------------------------------------------------------------------------- /.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, caste, color, religion, or sexual 10 | identity and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the overall 26 | community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or advances of 31 | any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email address, 35 | without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at codeofconduct@posit.co. 63 | All complaints will be reviewed and investigated promptly and fairly. 64 | 65 | All community leaders are obligated to respect the privacy and security of the 66 | reporter of any incident. 67 | 68 | ## Enforcement Guidelines 69 | 70 | Community leaders will follow these Community Impact Guidelines in determining 71 | the consequences for any action they deem in violation of this Code of Conduct: 72 | 73 | ### 1. Correction 74 | 75 | **Community Impact**: Use of inappropriate language or other behavior deemed 76 | unprofessional or unwelcome in the community. 77 | 78 | **Consequence**: A private, written warning from community leaders, providing 79 | clarity around the nature of the violation and an explanation of why the 80 | behavior was inappropriate. A public apology may be requested. 81 | 82 | ### 2. Warning 83 | 84 | **Community Impact**: A violation through a single incident or series of 85 | actions. 86 | 87 | **Consequence**: A warning with consequences for continued behavior. No 88 | interaction with the people involved, including unsolicited interaction with 89 | those enforcing the Code of Conduct, for a specified period of time. This 90 | includes avoiding interactions in community spaces as well as external channels 91 | like social media. Violating these terms may lead to a temporary or permanent 92 | ban. 93 | 94 | ### 3. Temporary Ban 95 | 96 | **Community Impact**: A serious violation of community standards, including 97 | sustained inappropriate behavior. 98 | 99 | **Consequence**: A temporary ban from any sort of interaction or public 100 | communication with the community for a specified period of time. No public or 101 | private interaction with the people involved, including unsolicited interaction 102 | with those enforcing the Code of Conduct, is allowed during this period. 103 | Violating these terms may lead to a permanent ban. 104 | 105 | ### 4. Permanent Ban 106 | 107 | **Community Impact**: Demonstrating a pattern of violation of community 108 | standards, including sustained inappropriate behavior, harassment of an 109 | individual, or aggression toward or disparagement of classes of individuals. 110 | 111 | **Consequence**: A permanent ban from any sort of public interaction within the 112 | community. 113 | 114 | ## Attribution 115 | 116 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 117 | version 2.1, available at 118 | . 119 | 120 | Community Impact Guidelines were inspired by 121 | [Mozilla's code of conduct enforcement ladder][https://github.com/mozilla/inclusion]. 122 | 123 | For answers to common questions about this code of conduct, see the FAQ at 124 | . Translations are available at . 125 | 126 | [homepage]: https://www.contributor-covenant.org 127 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to bigrquery 2 | 3 | This outlines how to propose a change to bigrquery. For more detailed 4 | info about contributing to this, and other tidyverse packages, please see the 5 | [**development contributing guide**](https://rstd.io/tidy-contrib). 6 | 7 | ### Fixing typos 8 | 9 | Small typos or grammatical errors in documentation may be edited directly using 10 | the GitHub web interface, so long as the changes are made in the _source_ file. 11 | 12 | * YES: you edit a roxygen comment in a `.R` file below `R/`. 13 | * NO: you edit an `.Rd` file below `man/`. 14 | 15 | ### Prerequisites 16 | 17 | Before you make a substantial pull request, you should always file an issue and 18 | make sure someone from the team agrees that it’s a problem. If you’ve found a 19 | bug, create an associated issue and illustrate the bug with a minimal 20 | [reprex](https://www.tidyverse.org/help/#reprex). 21 | 22 | ### Pull request process 23 | 24 | * We recommend that you create a Git branch for each pull request (PR). 25 | * Look at the Travis and AppVeyor build status before and after making changes. 26 | The `README` should contain badges for any continuous integration services used 27 | by the package. 28 | * New code should follow the tidyverse [style guide](http://style.tidyverse.org). 29 | You can use the [styler](https://CRAN.R-project.org/package=styler) package to 30 | apply these styles, but please don't restyle code that has nothing to do with 31 | your PR. 32 | * We use [roxygen2](https://cran.r-project.org/package=roxygen2), with 33 | [Markdown syntax](https://cran.r-project.org/web/packages/roxygen2/vignettes/markdown.html), 34 | for documentation. 35 | * We use [testthat](https://cran.r-project.org/package=testthat). Contributions 36 | with test cases included are easier to accept. 37 | * For user-facing changes, add a bullet to the top of `NEWS.md` below the 38 | current development version header describing the changes made followed by your 39 | GitHub username, and links to relevant issue(s)/PR(s). 40 | 41 | ### Test project and bucket 42 | 43 | See the internal help for [`bq_test_project()`](https://bigrquery.r-dbi.org/reference/bq_test_project.html) for information about setting up a test project and test bucket. 44 | 45 | ### Testing token 46 | 47 | The overall approach to managing the service account token used in tests is described in the gargle article [Managing tokens securely](https://gargle.r-lib.org/articles/articles/managing-tokens-securely.html). 48 | 49 | ### Code of Conduct 50 | 51 | Please note that the bigrquery project is released with a 52 | [Contributor Code of Conduct](CODE_OF_CONDUCT.md). By contributing to this 53 | project you agree to abide by its terms. 54 | 55 | ### See tidyverse [development contributing guide](https://rstd.io/tidy-contrib) 56 | for further details. 57 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Please briefly describe your problem and what output you expect. If you have a question, please don't use this form. Instead, ask on or . 2 | 3 | Please include a minimal reproducible example (AKA a reprex). If you've never heard of a [reprex](https://reprex.tidyverse.org/) before, start by reading . 4 | 5 | --- 6 | 7 | Brief description of the problem 8 | 9 | ```r 10 | # insert reprex here 11 | ``` 12 | -------------------------------------------------------------------------------- /.github/SUPPORT.md: -------------------------------------------------------------------------------- 1 | # Getting help with bigrquery 2 | 3 | Thanks for using bigrquery. Before filing an issue, there are a few places 4 | to explore and pieces to put together to make the process as smooth as possible. 5 | 6 | Start by making a minimal **repr**oducible **ex**ample using the 7 | [reprex](https://reprex.tidyverse.org/) package. If you haven't heard of or used 8 | reprex before, you're in for a treat! Seriously, reprex will make all of your 9 | R-question-asking endeavors easier (which is a pretty insane ROI for the five to 10 | ten minutes it'll take you to learn what it's all about). For additional reprex 11 | pointers, check out the [Get help!](https://www.tidyverse.org/help/) section of 12 | the tidyverse site. 13 | 14 | Armed with your reprex, the next step is to figure out [where to ask](https://www.tidyverse.org/help/#where-to-ask). 15 | 16 | * If it's a question: start with [community.rstudio.com](https://community.rstudio.com/), 17 | and/or StackOverflow. There are more people there to answer questions. 18 | * If it's a bug: you're in the right place, file an issue. 19 | * If you're not sure: let the community help you figure it out! If your 20 | problem _is_ a bug or a feature request, you can easily return here and 21 | report it. 22 | 23 | Before opening a new issue, be sure to [search issues and pull requests](https://github.com/tidyverse/bigrquery/issues) to make sure the 24 | bug hasn't been reported and/or already fixed in the development version. By 25 | default, the search will be pre-populated with `is:issue is:open`. You can 26 | [edit the qualifiers](https://help.github.com/articles/searching-issues-and-pull-requests/) 27 | (e.g. `is:pr`, `is:closed`) as needed. For example, you'd simply 28 | remove `is:open` to search _all_ issues in the repo, open or closed. 29 | 30 | 31 | If you _are_ in the right place, and need to file an issue, please review the 32 | ["File issues"](https://www.tidyverse.org/contribute/#issues) paragraph from 33 | the tidyverse contributing guidelines. 34 | 35 | Thanks for your help! 36 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | # 4 | # NOTE: This workflow is overkill for most R packages and 5 | # check-standard.yaml is likely a better choice. 6 | # usethis::use_github_action("check-standard") will install it. 7 | on: 8 | push: 9 | branches: [main, master] 10 | pull_request: 11 | branches: [main, master] 12 | 13 | name: R-CMD-check 14 | 15 | jobs: 16 | R-CMD-check: 17 | runs-on: ${{ matrix.config.os }} 18 | 19 | name: ${{ matrix.config.os }} (${{ matrix.config.r }}) 20 | 21 | strategy: 22 | fail-fast: false 23 | matrix: 24 | config: 25 | - {os: macos-latest, r: 'release'} 26 | 27 | - {os: windows-latest, r: 'release'} 28 | # use 4.1 to check with rtools40's older compiler 29 | - {os: windows-latest, r: '4.1'} 30 | 31 | - {os: ubuntu-latest, r: 'devel', http-user-agent: 'release'} 32 | - {os: ubuntu-latest, r: 'release'} 33 | - {os: ubuntu-latest, r: 'oldrel-1'} 34 | - {os: ubuntu-latest, r: 'oldrel-2'} 35 | - {os: ubuntu-latest, r: 'oldrel-3'} 36 | - {os: ubuntu-latest, r: 'oldrel-4'} 37 | 38 | env: 39 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 40 | R_KEEP_PKG_SOURCE: yes 41 | 42 | steps: 43 | - uses: actions/checkout@v3 44 | 45 | - uses: r-lib/actions/setup-pandoc@v2 46 | 47 | - uses: r-lib/actions/setup-r@v2 48 | with: 49 | r-version: ${{ matrix.config.r }} 50 | http-user-agent: ${{ matrix.config.http-user-agent }} 51 | use-public-rspm: true 52 | 53 | - uses: r-lib/actions/setup-r-dependencies@v2 54 | with: 55 | extra-packages: any::rcmdcheck 56 | needs: check 57 | 58 | - uses: r-lib/actions/check-r-package@v2 59 | with: 60 | upload-snapshots: true 61 | -------------------------------------------------------------------------------- /.github/workflows/live-api.yml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | # 4 | # NOTE: This workflow is overkill for most R packages and 5 | # check-standard.yaml is likely a better choice. 6 | # usethis::use_github_action("check-standard") will install it. 7 | on: 8 | push: 9 | branches: [main, master] 10 | pull_request: 11 | branches: [main, master] 12 | 13 | name: live-api 14 | 15 | jobs: 16 | live-api: 17 | runs-on: ubuntu-latest 18 | 19 | env: 20 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 21 | R_KEEP_PKG_SOURCE: yes 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | 26 | - uses: r-lib/actions/setup-pandoc@v2 27 | 28 | - uses: r-lib/actions/setup-r@v2 29 | with: 30 | r-version: release 31 | http-user-agent: ${{ matrix.config.http-user-agent }} 32 | use-public-rspm: true 33 | 34 | - uses: r-lib/actions/setup-r-dependencies@v2 35 | with: 36 | extra-packages: any::rcmdcheck 37 | needs: check 38 | 39 | - uses: r-lib/actions/check-r-package@v2 40 | with: 41 | upload-snapshots: true 42 | env: 43 | BIGRQUERY_KEY: ${{ secrets.BIGRQUERY_KEY }} 44 | BIGQUERY_TEST_PROJECT: gargle-169921 45 | BIGQUERY_TEST_BUCKET: bigrquery-test-bucket-calliope 46 | -------------------------------------------------------------------------------- /.github/workflows/pkgdown.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | branches: [main, master] 8 | release: 9 | types: [published] 10 | workflow_dispatch: 11 | 12 | name: pkgdown 13 | 14 | jobs: 15 | pkgdown: 16 | runs-on: ubuntu-latest 17 | # Only restrict concurrency for non-PR jobs 18 | concurrency: 19 | group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }} 20 | env: 21 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 22 | permissions: 23 | contents: write 24 | steps: 25 | - uses: actions/checkout@v3 26 | 27 | - uses: r-lib/actions/setup-pandoc@v2 28 | 29 | - uses: r-lib/actions/setup-r@v2 30 | with: 31 | use-public-rspm: true 32 | 33 | - uses: r-lib/actions/setup-r-dependencies@v2 34 | with: 35 | extra-packages: any::pkgdown, local::. 36 | needs: website 37 | 38 | - name: Build site 39 | run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) 40 | shell: Rscript {0} 41 | env: 42 | BIGRQUERY_KEY: ${{ secrets.BIGRQUERY_KEY }} 43 | BIGQUERY_TEST_PROJECT: gargle-169921 44 | BIGQUERY_TEST_BUCKET: bigrquery-test-bucket-calliope 45 | 46 | - name: Deploy to GitHub pages 🚀 47 | if: github.event_name != 'pull_request' 48 | uses: JamesIves/github-pages-deploy-action@v4.4.1 49 | with: 50 | clean: false 51 | branch: gh-pages 52 | folder: docs 53 | -------------------------------------------------------------------------------- /.github/workflows/pr-commands.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | issue_comment: 5 | types: [created] 6 | 7 | name: Commands 8 | 9 | jobs: 10 | document: 11 | if: ${{ github.event.issue.pull_request && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER') && startsWith(github.event.comment.body, '/document') }} 12 | name: document 13 | runs-on: ubuntu-latest 14 | env: 15 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 16 | steps: 17 | - uses: actions/checkout@v3 18 | 19 | - uses: r-lib/actions/pr-fetch@v2 20 | with: 21 | repo-token: ${{ secrets.GITHUB_TOKEN }} 22 | 23 | - uses: r-lib/actions/setup-r@v2 24 | with: 25 | use-public-rspm: true 26 | 27 | - uses: r-lib/actions/setup-r-dependencies@v2 28 | with: 29 | extra-packages: any::roxygen2 30 | needs: pr-document 31 | 32 | - name: Document 33 | run: roxygen2::roxygenise() 34 | shell: Rscript {0} 35 | 36 | - name: commit 37 | run: | 38 | git config --local user.name "$GITHUB_ACTOR" 39 | git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com" 40 | git add man/\* NAMESPACE 41 | git commit -m 'Document' 42 | 43 | - uses: r-lib/actions/pr-push@v2 44 | with: 45 | repo-token: ${{ secrets.GITHUB_TOKEN }} 46 | 47 | style: 48 | if: ${{ github.event.issue.pull_request && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER') && startsWith(github.event.comment.body, '/style') }} 49 | name: style 50 | runs-on: ubuntu-latest 51 | env: 52 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 53 | steps: 54 | - uses: actions/checkout@v3 55 | 56 | - uses: r-lib/actions/pr-fetch@v2 57 | with: 58 | repo-token: ${{ secrets.GITHUB_TOKEN }} 59 | 60 | - uses: r-lib/actions/setup-r@v2 61 | 62 | - name: Install dependencies 63 | run: install.packages("styler") 64 | shell: Rscript {0} 65 | 66 | - name: Style 67 | run: styler::style_pkg() 68 | shell: Rscript {0} 69 | 70 | - name: commit 71 | run: | 72 | git config --local user.name "$GITHUB_ACTOR" 73 | git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com" 74 | git add \*.R 75 | git commit -m 'Style' 76 | 77 | - uses: r-lib/actions/pr-push@v2 78 | with: 79 | repo-token: ${{ secrets.GITHUB_TOKEN }} 80 | -------------------------------------------------------------------------------- /.github/workflows/test-coverage.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | branches: [main, master] 8 | 9 | name: test-coverage 10 | 11 | jobs: 12 | test-coverage: 13 | runs-on: ubuntu-latest 14 | env: 15 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 16 | 17 | steps: 18 | - uses: actions/checkout@v3 19 | 20 | - uses: r-lib/actions/setup-r@v2 21 | with: 22 | use-public-rspm: true 23 | 24 | - uses: r-lib/actions/setup-r-dependencies@v2 25 | with: 26 | extra-packages: any::covr 27 | needs: coverage 28 | 29 | - name: Test coverage 30 | run: | 31 | covr::codecov( 32 | quiet = FALSE, 33 | clean = FALSE, 34 | install_path = file.path(normalizePath(Sys.getenv("RUNNER_TEMP"), winslash = "/"), "package") 35 | ) 36 | shell: Rscript {0} 37 | 38 | - name: Show testthat output 39 | if: always() 40 | run: | 41 | ## -------------------------------------------------------------------- 42 | find ${{ runner.temp }}/package -name 'testthat.Rout*' -exec cat '{}' \; || true 43 | shell: bash 44 | 45 | - name: Upload test results 46 | if: failure() 47 | uses: actions/upload-artifact@v3 48 | with: 49 | name: coverage-test-failures 50 | path: ${{ runner.temp }}/package 51 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .httr-oauth 5 | docs 6 | internal/ 7 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "[r]": { 3 | "editor.formatOnSave": true, 4 | "editor.defaultFormatter": "Posit.air-vscode" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: bigrquery 2 | Title: An Interface to Google's 'BigQuery' 'API' 3 | Version: 1.5.1.9000 4 | Authors@R: c( 5 | person("Hadley", "Wickham", , "hadley@posit.co", role = c("aut", "cre"), 6 | comment = c(ORCID = "0000-0003-4757-117X")), 7 | person("Jennifer", "Bryan", , "jenny@posit.co", role = "aut", 8 | comment = c(ORCID = "0000-0002-6983-2759")), 9 | person("Posit Software, PBC", role = c("cph", "fnd")) 10 | ) 11 | Description: Easily talk to Google's 'BigQuery' database from R. 12 | License: MIT + file LICENSE 13 | URL: https://bigrquery.r-dbi.org, https://github.com/r-dbi/bigrquery 14 | BugReports: https://github.com/r-dbi/bigrquery/issues 15 | Depends: 16 | R (>= 4.0) 17 | Imports: 18 | bit64, 19 | brio, 20 | cli, 21 | clock, 22 | curl, 23 | DBI, 24 | gargle (>= 1.5.0), 25 | httr, 26 | jsonlite, 27 | lifecycle, 28 | methods, 29 | prettyunits, 30 | rlang (>= 1.1.0), 31 | tibble, 32 | nanoparquet (>= 0.3.1) 33 | Suggests: 34 | bigrquerystorage (>= 1.2.1), 35 | blob, 36 | covr, 37 | dbplyr (>= 2.4.0), 38 | dplyr (>= 1.1.0), 39 | hms, 40 | readr, 41 | sodium, 42 | testthat (>= 3.1.5), 43 | wk (>= 0.3.2), 44 | withr 45 | LinkingTo: 46 | cli, 47 | cpp11, 48 | rapidjsonr 49 | Config/Needs/website: tidyverse/tidytemplate 50 | Config/testthat/edition: 3 51 | Config/testthat/parallel: TRUE 52 | Config/testthat/start-first: bq-table, dplyr 53 | Encoding: UTF-8 54 | Roxygen: list(markdown = TRUE) 55 | RoxygenNote: 7.3.2 56 | Collate: 57 | 'bigrquery-package.R' 58 | 'bq-auth.R' 59 | 'bq-dataset.R' 60 | 'bq-download.R' 61 | 'bq-field.R' 62 | 'bq-job.R' 63 | 'bq-param.R' 64 | 'bq-parse.R' 65 | 'bq-perform.R' 66 | 'bq-project.R' 67 | 'bq-projects.R' 68 | 'bq-query.R' 69 | 'bq-refs.R' 70 | 'bq-request.R' 71 | 'bq-table.R' 72 | 'bq-test.R' 73 | 'camelCase.R' 74 | 'connections-page.R' 75 | 'cpp11.R' 76 | 'dbi-driver.R' 77 | 'dbi-connection.R' 78 | 'dbi-result.R' 79 | 'dplyr.R' 80 | 'gs-object.R' 81 | 'import-standalone-obj-type.R' 82 | 'import-standalone-s3-register.R' 83 | 'import-standalone-types-check.R' 84 | 'utils.R' 85 | 'zzz.R' 86 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | YEAR: 2023 2 | COPYRIGHT HOLDER: bigrquery authors 3 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # MIT License 2 | 3 | Copyright (c) 2023 bigrquery authors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | S3method(as.character,bq_job) 4 | S3method(as.character,gs_object) 5 | S3method(as_bq_dataset,BigQueryConnection) 6 | S3method(as_bq_dataset,bq_dataset) 7 | S3method(as_bq_dataset,character) 8 | S3method(as_bq_dataset,default) 9 | S3method(as_bq_dataset,list) 10 | S3method(as_bq_field,bq_field) 11 | S3method(as_bq_field,list) 12 | S3method(as_bq_fields,"NULL") 13 | S3method(as_bq_fields,bq_fields) 14 | S3method(as_bq_fields,data.frame) 15 | S3method(as_bq_fields,list) 16 | S3method(as_bq_job,bq_job) 17 | S3method(as_bq_job,character) 18 | S3method(as_bq_job,default) 19 | S3method(as_bq_job,list) 20 | S3method(as_bq_table,BigQueryConnection) 21 | S3method(as_bq_table,bq_table) 22 | S3method(as_bq_table,character) 23 | S3method(as_bq_table,default) 24 | S3method(as_bq_table,list) 25 | S3method(as_json,"NULL") 26 | S3method(as_json,bq_field) 27 | S3method(as_json,bq_fields) 28 | S3method(as_json,bq_param_array) 29 | S3method(as_json,bq_param_scalar) 30 | S3method(as_json,bq_params) 31 | S3method(format,bq_field) 32 | S3method(format,bq_fields) 33 | S3method(format,gs_object) 34 | S3method(op_can_download,lazy_base_query) 35 | S3method(op_can_download,lazy_query) 36 | S3method(op_can_download,lazy_select_query) 37 | S3method(op_can_download,tbl_lazy) 38 | S3method(op_rows,lazy_base_query) 39 | S3method(op_rows,lazy_select_query) 40 | S3method(op_rows,tbl_lazy) 41 | S3method(op_table,lazy_base_remote_query) 42 | S3method(op_table,lazy_query) 43 | S3method(op_table,lazy_select_query) 44 | S3method(op_table,tbl_lazy) 45 | S3method(print,bq_bytes) 46 | S3method(print,bq_dataset) 47 | S3method(print,bq_field) 48 | S3method(print,bq_fields) 49 | S3method(print,bq_job) 50 | S3method(print,bq_param) 51 | S3method(print,bq_params) 52 | S3method(print,bq_table) 53 | S3method(print,gs_object) 54 | S3method(toString,bq_table) 55 | export(as_bq_dataset) 56 | export(as_bq_field) 57 | export(as_bq_fields) 58 | export(as_bq_job) 59 | export(as_bq_table) 60 | export(bigquery) 61 | export(bq_auth) 62 | export(bq_auth_configure) 63 | export(bq_authable) 64 | export(bq_dataset) 65 | export(bq_dataset_create) 66 | export(bq_dataset_delete) 67 | export(bq_dataset_exists) 68 | export(bq_dataset_meta) 69 | export(bq_dataset_query) 70 | export(bq_dataset_tables) 71 | export(bq_dataset_update) 72 | export(bq_deauth) 73 | export(bq_field) 74 | export(bq_fields) 75 | export(bq_has_token) 76 | export(bq_job) 77 | export(bq_job_meta) 78 | export(bq_job_show_statistics) 79 | export(bq_job_status) 80 | export(bq_job_wait) 81 | export(bq_oauth_app) 82 | export(bq_oauth_client) 83 | export(bq_param) 84 | export(bq_param_array) 85 | export(bq_param_scalar) 86 | export(bq_perform_copy) 87 | export(bq_perform_extract) 88 | export(bq_perform_load) 89 | export(bq_perform_query) 90 | export(bq_perform_query_dry_run) 91 | export(bq_perform_query_schema) 92 | export(bq_perform_upload) 93 | export(bq_project_datasets) 94 | export(bq_project_jobs) 95 | export(bq_project_query) 96 | export(bq_projects) 97 | export(bq_table) 98 | export(bq_table_copy) 99 | export(bq_table_create) 100 | export(bq_table_delete) 101 | export(bq_table_download) 102 | export(bq_table_exists) 103 | export(bq_table_fields) 104 | export(bq_table_load) 105 | export(bq_table_meta) 106 | export(bq_table_nrow) 107 | export(bq_table_patch) 108 | export(bq_table_save) 109 | export(bq_table_size) 110 | export(bq_table_upload) 111 | export(bq_test_dataset) 112 | export(bq_test_init) 113 | export(bq_test_project) 114 | export(bq_testable) 115 | export(bq_token) 116 | export(bq_user) 117 | export(dbi_driver) 118 | export(gs_test_bucket) 119 | export(gs_test_object) 120 | export(src_bigquery) 121 | exportClasses(BigQueryConnection) 122 | exportClasses(BigQueryDriver) 123 | exportClasses(BigQueryResult) 124 | exportMethods(dbAppendTable) 125 | exportMethods(dbBegin) 126 | exportMethods(dbBind) 127 | exportMethods(dbClearResult) 128 | exportMethods(dbColumnInfo) 129 | exportMethods(dbCommit) 130 | exportMethods(dbConnect) 131 | exportMethods(dbCreateTable) 132 | exportMethods(dbDataType) 133 | exportMethods(dbDisconnect) 134 | exportMethods(dbExecute) 135 | exportMethods(dbExistsTable) 136 | exportMethods(dbFetch) 137 | exportMethods(dbGetInfo) 138 | exportMethods(dbGetRowCount) 139 | exportMethods(dbGetRowsAffected) 140 | exportMethods(dbGetStatement) 141 | exportMethods(dbHasCompleted) 142 | exportMethods(dbIsValid) 143 | exportMethods(dbListFields) 144 | exportMethods(dbListTables) 145 | exportMethods(dbQuoteIdentifier) 146 | exportMethods(dbQuoteLiteral) 147 | exportMethods(dbQuoteString) 148 | exportMethods(dbReadTable) 149 | exportMethods(dbRemoveTable) 150 | exportMethods(dbRollback) 151 | exportMethods(dbSendQuery) 152 | exportMethods(dbWriteTable) 153 | exportMethods(show) 154 | import(DBI) 155 | import(methods) 156 | import(rlang, except = unbox) 157 | importFrom(bit64,integer64) 158 | importFrom(gargle,token_fetch) 159 | importFrom(httr,DELETE) 160 | importFrom(httr,GET) 161 | importFrom(httr,PATCH) 162 | importFrom(httr,POST) 163 | importFrom(httr,PUT) 164 | importFrom(httr,add_headers) 165 | importFrom(httr,config) 166 | importFrom(httr,content) 167 | importFrom(httr,headers) 168 | importFrom(httr,http_status) 169 | importFrom(httr,parse_media) 170 | importFrom(httr,status_code) 171 | importFrom(jsonlite,unbox) 172 | importFrom(lifecycle,deprecated) 173 | importFrom(tibble,tibble) 174 | useDynLib(bigrquery, .registration = TRUE) 175 | -------------------------------------------------------------------------------- /R/bigrquery-package.R: -------------------------------------------------------------------------------- 1 | #' @section Package options: 2 | #' \describe{ 3 | #' \item{`bigrquery.quiet`}{Verbose output during processing? The default 4 | #' value, `NA`, turns on verbose output for queries that run longer than 5 | #' two seconds. Use `FALSE` for immediate verbose output, `TRUE` 6 | #' for quiet operation.} 7 | #' 8 | #' \item{`bigrquery.page.size`}{Default page size for fetching data, 9 | #' defaults to 1e4.} 10 | #' } 11 | #' 12 | #' @keywords internal 13 | #' @useDynLib bigrquery, .registration = TRUE 14 | #' @rawNamespace import(rlang, except = unbox) 15 | "_PACKAGE" 16 | 17 | the <- new_environment() 18 | 19 | # We import rlang this way because jsonlite's unbox() is used extensively. 20 | 21 | ## usethis namespace: start 22 | #' @importFrom bit64 integer64 23 | #' @importFrom jsonlite unbox 24 | #' @importFrom lifecycle deprecated 25 | #' @importFrom tibble tibble 26 | ## usethis namespace: end 27 | NULL 28 | -------------------------------------------------------------------------------- /R/bq-dataset.R: -------------------------------------------------------------------------------- 1 | #' BigQuery datasets 2 | #' 3 | #' Basic create-read-update-delete verbs for datasets. 4 | #' 5 | #' @param x A [bq_dataset] 6 | #' @inheritParams api-job 7 | #' @inheritParams api-perform 8 | #' @inheritParams bq_projects 9 | #' 10 | #' @section Google BigQuery API documentation: 11 | #' * [get](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/get) 12 | #' * [insert](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert) 13 | #' * [delete](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/delete) 14 | #' * [list](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list) 15 | #' @examplesIf bq_testable() 16 | #' ds <- bq_dataset(bq_test_project(), "dataset_api") 17 | #' bq_dataset_exists(ds) 18 | #' 19 | #' bq_dataset_create(ds) 20 | #' bq_dataset_exists(ds) 21 | #' str(bq_dataset_meta(ds)) 22 | #' 23 | #' bq_dataset_delete(ds) 24 | #' bq_dataset_exists(ds) 25 | #' 26 | #' # Use bq_test_dataset() to create a temporary dataset that will 27 | #' # be automatically deleted 28 | #' ds <- bq_test_dataset() 29 | #' bq_table_create(bq_table(ds, "x1")) 30 | #' bq_table_create(bq_table(ds, "x2")) 31 | #' bq_table_create(bq_table(ds, "x3")) 32 | #' bq_dataset_tables(ds) 33 | #' @name api-dataset 34 | NULL 35 | 36 | #' @export 37 | #' @rdname api-dataset 38 | #' @param location Dataset location 39 | bq_dataset_create <- function(x, location = "US", ...) { 40 | x <- as_bq_dataset(x) 41 | 42 | url <- bq_path(x$project, "") 43 | body <- list( 44 | datasetReference = datasetReference(x), 45 | location = location 46 | ) 47 | bq_post(url, body = bq_body(body, ...)) 48 | 49 | x 50 | } 51 | 52 | #' @export 53 | #' @rdname api-dataset 54 | bq_dataset_meta <- function(x, fields = NULL) { 55 | x <- as_bq_dataset(x) 56 | 57 | url <- bq_path(x$project, x$dataset) 58 | bq_get(url, query = list(fields = fields)) 59 | } 60 | 61 | #' @export 62 | #' @rdname api-dataset 63 | bq_dataset_exists <- function(x) { 64 | x <- as_bq_dataset(x) 65 | 66 | url <- bq_path(x$project, x$dataset) 67 | bq_exists(url) 68 | } 69 | 70 | #' @export 71 | #' @rdname api-dataset 72 | bq_dataset_update <- function(x, ...) { 73 | x <- as_bq_dataset(x) 74 | 75 | url <- bq_path(x$project, x$dataset) 76 | body <- list(datasetReference = datasetReference(x)) 77 | bq_patch(url, body = bq_body(body, ...)) 78 | 79 | invisible(x) 80 | } 81 | 82 | #' @export 83 | #' @rdname api-dataset 84 | #' @param delete_contents If `TRUE`, will recursively delete all tables in 85 | #' the dataset. Set to `FALSE` by default for safety. 86 | bq_dataset_delete <- function(x, delete_contents = FALSE) { 87 | x <- as_bq_dataset(x) 88 | 89 | url <- bq_path(x$project, x$dataset) 90 | query <- list(deleteContents = delete_contents) 91 | bq_delete(url, query = query) 92 | 93 | invisible(x) 94 | } 95 | 96 | #' @export 97 | #' @rdname api-dataset 98 | bq_dataset_tables <- function( 99 | x, 100 | page_size = 50, 101 | max_pages = Inf, 102 | warn = TRUE, 103 | ... 104 | ) { 105 | x <- as_bq_dataset(x) 106 | url <- bq_path(x$project, x$dataset, "") 107 | 108 | data <- bq_get_paginated( 109 | url, 110 | query = list(fields = "tables(tableReference,type)"), 111 | page_size = page_size, 112 | max_pages = max_pages, 113 | warn = warn 114 | ) 115 | 116 | tables <- unlist(lapply(data, function(x) x$tables), recursive = FALSE) 117 | lapply(tables, function(x) as_bq_table(x$tableReference, type = x$type)) 118 | } 119 | -------------------------------------------------------------------------------- /R/bq-field.R: -------------------------------------------------------------------------------- 1 | #' BigQuery field (and fields) class 2 | #' 3 | #' `bq_field()` and `bq_fields()` create; `as_bq_field()` and `as_bq_fields()` 4 | #' coerce from lists. 5 | #' 6 | #' @param name The field name. The name must contain only letters (a-z, A-Z), 7 | #' numbers (0-9), or underscores (_), and must start with a letter or 8 | #' underscore. The maximum length is 300 characters. 9 | #' @param type The field data type. Possible values include: 10 | #' `"STRING"`, `"BYTES"`, `"INTEGER"`, `"FLOAT"`, `"BOOLEAN"`, `"TIMESTAMP"`, 11 | #' `"DATE"`, `"TIME"`, `"DATETIME"`, `"GEOGRAPHY"`, `"NUMERIC"`, 12 | #' `"BIGNUMERIC"`, `"JSON"`, `"RECORD"`. 13 | #' @param mode The field mode. Possible values include: `"NULLABLE"`, 14 | #' `"REQUIRED"`, and `"REPEATED"`. 15 | #' @param fields For a field of type "record", a list of sub-fields. 16 | #' @param description The field description. The maximum length is 1,024 17 | #' characters. 18 | #' @param x A list of `bg_fields` 19 | #' @seealso `bq_field()` corresponds to a `TableFieldSchema`, see 20 | #' 21 | #' for more details. 22 | #' @export 23 | #' @examples 24 | #' bq_field("name", "string") 25 | #' 26 | #' as_bq_fields(list( 27 | #' list(name = "name", type = "string"), 28 | #' bq_field("age", "integer") 29 | #' )) 30 | #' 31 | #' # as_bq_fields() can also take a data frame 32 | #' as_bq_fields(mtcars) 33 | bq_field <- function( 34 | name, 35 | type, 36 | mode = "NULLABLE", 37 | fields = list(), 38 | description = NULL 39 | ) { 40 | check_string(name) 41 | check_string(type) 42 | check_string(mode) 43 | check_string(description, allow_null = TRUE) 44 | 45 | structure( 46 | list( 47 | name = name, 48 | type = toupper(type), 49 | mode = toupper(mode), 50 | fields = as_bq_fields(fields), 51 | description = description %||% "" 52 | ), 53 | class = "bq_field" 54 | ) 55 | } 56 | 57 | #' @export 58 | as_json.bq_field <- function(x) { 59 | list( 60 | name = unbox(x$name), 61 | type = unbox(x$type), 62 | mode = unbox(x$mode), 63 | fields = as_json(x$fields), 64 | description = unbox(x$description) 65 | ) 66 | } 67 | 68 | #' @export 69 | #' @rdname bq_field 70 | bq_fields <- function(x) { 71 | structure(x, class = "bq_fields") 72 | } 73 | 74 | #' @export 75 | as_json.bq_fields <- function(x) { 76 | lapply(x, as_json) 77 | } 78 | 79 | #' @export 80 | #' @rdname bq_field 81 | as_bq_field <- function(x) UseMethod("as_bq_field") 82 | 83 | #' @export 84 | as_bq_field.bq_field <- function(x) x 85 | 86 | #' @export 87 | as_bq_field.list <- function(x) { 88 | bq_field( 89 | name = x$name, 90 | type = x$type, 91 | mode = x$mode %||% "NULLABLE", 92 | fields = lapply(x$fields, as_bq_field), 93 | description = x$description %||% "" 94 | ) 95 | } 96 | 97 | #' @export 98 | #' @rdname bq_field 99 | as_bq_fields <- function(x) UseMethod("as_bq_fields") 100 | 101 | #' @export 102 | as_bq_fields.NULL <- function(x) x 103 | 104 | #' @export 105 | as_bq_fields.bq_fields <- function(x) x 106 | 107 | #' @export 108 | as_bq_fields.data.frame <- function(x) { 109 | types <- vapply(x, data_type, character(1)) 110 | fields <- Map(function(type, name) bq_field(name, type), types, names(x)) 111 | bq_fields(unname(fields)) 112 | } 113 | 114 | #' @export 115 | as_bq_fields.list <- function(x) { 116 | bq_fields(lapply(x, as_bq_field)) 117 | } 118 | 119 | #' @export 120 | format.bq_fields <- function(x, ...) { 121 | if (length(x) == 0) { 122 | return("") 123 | } 124 | 125 | fields <- lapply(x, format) 126 | gsub("\\n\\s+$", "\n", indent(paste0(fields, collapse = ""))) 127 | } 128 | 129 | #' @export 130 | print.bq_fields <- function(x, ...) { 131 | cat_line("\n", format(x, ...)) 132 | invisible(x) 133 | } 134 | 135 | #' @export 136 | format.bq_field <- function(x, ...) { 137 | type <- x$type 138 | if (x$mode != "NULLABLE") { 139 | type <- paste0(type, ": ", tolower(x$mode)) 140 | } 141 | 142 | paste0(x$name, " <", type, ">", "\n", format(x$fields)) 143 | } 144 | 145 | #' @export 146 | print.bq_field <- function(x, ...) { 147 | cat_line(" ", format(x, ...)) 148 | invisible(x) 149 | } 150 | 151 | data_type <- function(x) { 152 | if (is.factor(x)) return("STRING") 153 | if (inherits(x, "POSIXt")) return("TIMESTAMP") 154 | if (inherits(x, "hms")) return("TIME") 155 | if (inherits(x, "wk_wkt")) return("GEOGRAPHY") 156 | if (inherits(x, "blob")) return("BYTES") 157 | if (inherits(x, "Date")) return("DATE") 158 | 159 | switch( 160 | typeof(x), 161 | character = "STRING", 162 | logical = "BOOLEAN", 163 | double = "FLOAT", 164 | integer = "INTEGER", 165 | cli::cli_abort("Unsupported type {.str {typeof(x)}}.") 166 | ) 167 | } 168 | -------------------------------------------------------------------------------- /R/bq-job.R: -------------------------------------------------------------------------------- 1 | #' BigQuery job: retrieve metadata 2 | #' 3 | #' To perform a job, see [api-perform]. These functions all retrieve metadata 4 | #' (in various forms) about an existing job. 5 | #' 6 | #' @section Google BigQuery API documentation: 7 | #' * [get](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get) 8 | #' 9 | #' @examplesIf bq_testable() 10 | #' jobs <- bq_project_jobs(bq_test_project()) 11 | #' jobs[[1]] 12 | #' 13 | #' # Show statistics about job 14 | #' bq_job_show_statistics(jobs[[1]]) 15 | #' 16 | #' # Wait for job to complete 17 | #' bq_job_wait(jobs[[1]]) 18 | #' @name api-job 19 | NULL 20 | 21 | #' @export 22 | #' @name api-job 23 | #' @param x A [bq_job] 24 | #' @param fields An optional field specification for 25 | #' [partial response](https://cloud.google.com/bigquery/docs/api-performance#partial-response) 26 | bq_job_meta <- function(x, fields = NULL) { 27 | x <- as_bq_job(x) 28 | bq_get( 29 | bq_path(x$project, jobs = x$job), 30 | query = list( 31 | location = x$location, 32 | fields = fields 33 | ) 34 | ) 35 | } 36 | 37 | bq_job_table <- function(x) { 38 | meta <- bq_job_meta(x, "configuration(query(destinationTable))") 39 | as_bq_table(meta$configuration$query$destinationTable) 40 | } 41 | 42 | #' @export 43 | #' @name api-job 44 | bq_job_status <- function(x) { 45 | bq_job_meta(x, "status")$status 46 | } 47 | 48 | #' @export 49 | #' @name api-job 50 | bq_job_show_statistics <- function(x) { 51 | stats <- bq_job_meta(x, "statistics")$statistics 52 | 53 | if ("load" %in% names(stats)) { 54 | in_bytes <- as.numeric(stats$load$inputFileBytes) 55 | out_bytes <- as.numeric(stats$load$outputBytes) 56 | cli::cli_inform("Input: {prettyunits::pretty_bytes(in_bytes)}") 57 | cli::cli_inform("Output: {prettyunits::pretty_bytes(out_bytes)}") 58 | } 59 | 60 | if ("query" %in% names(stats)) { 61 | bytes <- as.numeric(stats$query$totalBytesBilled) 62 | cli::cli_inform("Billed: {prettyunits::pretty_bytes(bytes)}") 63 | } 64 | 65 | invisible(x) 66 | } 67 | 68 | #' @param quiet If `FALSE`, displays progress bar; if `TRUE` is silent; 69 | #' if `NA` picks based on whether or not you're in an interactive context. 70 | #' @param pause amount of time to wait between status requests 71 | #' @export 72 | #' @name api-job 73 | #' @inheritParams rlang::args_error_context 74 | bq_job_wait <- function( 75 | x, 76 | quiet = getOption("bigrquery.quiet"), 77 | pause = 0.5, 78 | call = caller_env() 79 | ) { 80 | x <- as_bq_job(x) 81 | quiet <- check_quiet(quiet) 82 | check_number_decimal(pause) 83 | 84 | if (!quiet) { 85 | cli::cli_progress_bar( 86 | format = "Running job {x} {cli::pb_spin} {cli::pb_elapsed}", 87 | total = NA, 88 | clear = FALSE 89 | ) 90 | } 91 | 92 | repeat { 93 | if (!quiet) cli::cli_progress_update() 94 | # https://cloud.google.com/bigquery/docs/error-messages 95 | # Switch to req_retry() when we move to httr2 96 | status <- tryCatch( 97 | bq_job_status(x), 98 | bigrquery_http_503 = function(err) NULL 99 | ) 100 | if (!quiet) cli::cli_progress_update() 101 | 102 | if (!is.null(status) && status$state == "DONE") break 103 | Sys.sleep(pause) 104 | } 105 | if (!quiet) cli::cli_progress_done() 106 | 107 | errors <- status$errors 108 | if (length(errors) > 0) { 109 | if (length(errors) > 1) { 110 | # First error says to look in errors[] 111 | errors <- errors[-1] 112 | } 113 | 114 | bullets <- map_chr(errors, function(x) { 115 | paste0(x$message, " [", x$reason, "]") 116 | }) 117 | bullets <- set_names(bullets, "x") 118 | cli::cli_abort(c("Job {x} failed", bullets), call = call) 119 | } 120 | 121 | if (!quiet) { 122 | cli::cli_inform("Job complete") 123 | bq_job_show_statistics(x) 124 | } 125 | 126 | invisible(x) 127 | } 128 | -------------------------------------------------------------------------------- /R/bq-param.R: -------------------------------------------------------------------------------- 1 | #' Explicitly define query parameters 2 | #' 3 | #' By default, bigrquery will assume vectors of length 1 are scalars, 4 | #' and longer vectors are arrays. If you need to pass a length-1 array, 5 | #' you'll need to explicitly use `bq_param_array()`. 6 | #' 7 | #' @param value vector of parameter values 8 | #' @param name name of the parameter in the query, omitting the `@` 9 | #' @param type BigQuery type of the parameter 10 | #' @keywords internal 11 | #' @export 12 | #' @examples 13 | #' # bq_param() automatically picks scalar vs array based on length 14 | #' bq_param("a") 15 | #' bq_param(c("a", "b", "c")) 16 | #' 17 | #' # use bq_param_array() to create a length-1 array 18 | #' bq_param_array("a") 19 | bq_param <- function(value, type = NULL, name = NULL) { 20 | if (length(value) > 1) { 21 | bq_param_array(value, type, name) 22 | } else { 23 | bq_param_scalar(value, type, name) 24 | } 25 | } 26 | 27 | #' @rdname bq_param 28 | #' @export 29 | bq_param_scalar <- function(value, type = NULL, name = NULL) { 30 | if (length(value) != 1) { 31 | cli::cli_abort("{.arg value} must be length 1, not {length(value)}.") 32 | } 33 | 34 | if (is.null(type)) { 35 | type <- data_type(value) 36 | } 37 | structure( 38 | list(value = value, name = name, type = type), 39 | class = c("bq_param_scalar", "bq_param") 40 | ) 41 | } 42 | 43 | #' @rdname bq_param 44 | #' @export 45 | bq_param_array <- function(value, type = NULL, name = NULL) { 46 | if (length(value) == 0) { 47 | cli::cli_abort("{.arg value} can't be zero-length.") 48 | } 49 | 50 | if (is.null(type)) { 51 | type <- data_type(value) 52 | } 53 | structure( 54 | list(value = value, name = name, type = type), 55 | class = c("bq_param_array", "bq_param") 56 | ) 57 | } 58 | 59 | as_bq_param <- function(x, name) { 60 | if (inherits(x, "bq_param")) { 61 | if (!is.null(name) & is.null(x$name)) { 62 | x$name <- name 63 | } 64 | x 65 | } else { 66 | bq_param(name = name, x) 67 | } 68 | } 69 | 70 | bq_params <- function(x) { 71 | structure(x, class = "bq_params") 72 | } 73 | 74 | as_bq_params <- function(x) { 75 | params <- lapply(names(x), function(name) { 76 | as_bq_param(x[[name]], name) 77 | }) 78 | bq_params(params) 79 | } 80 | 81 | #' @export 82 | as_json.bq_params <- function(x) { 83 | json <- lapply(x, as_json) 84 | unname(json) 85 | } 86 | 87 | 88 | #' @export 89 | as_json.bq_param_scalar <- function(x) { 90 | list( 91 | name = x$name, 92 | parameterType = list(type = unbox(x$type)), 93 | parameterValue = list(value = unbox(x$value)) 94 | ) 95 | } 96 | 97 | #' @export 98 | as_json.bq_param_array <- function(x) { 99 | values <- unname(c(x$value)) 100 | values <- lapply(values, function(x) list(value = unbox(x))) 101 | list( 102 | name = x$name, 103 | parameterType = list( 104 | type = "ARRAY", 105 | arrayType = list(type = unbox(x$type)) 106 | ), 107 | parameterValue = list(arrayValues = values) 108 | ) 109 | } 110 | 111 | #' @export 112 | print.bq_param <- function(x, ...) { 113 | cat(show_json(as_json(x))) 114 | invisible(x) 115 | } 116 | 117 | #' @export 118 | print.bq_params <- function(x, ...) { 119 | cat(show_json(as_json(x))) 120 | invisible(x) 121 | } 122 | -------------------------------------------------------------------------------- /R/bq-parse.R: -------------------------------------------------------------------------------- 1 | bq_parse_single <- function(value, type, ...) { 2 | field <- bq_field("", type, ...) 3 | field_j <- jsonlite::toJSON(as_json(field)) 4 | value_j <- jsonlite::toJSON(value, auto_unbox = TRUE) 5 | 6 | bq_field_init(field_j, value_j) 7 | } 8 | 9 | v <- function(x) list(v = x) 10 | vs <- function(...) lapply(list(...), v) 11 | f <- function(...) list(f = list(...)) 12 | -------------------------------------------------------------------------------- /R/bq-project.R: -------------------------------------------------------------------------------- 1 | #' BigQuery project methods 2 | #' 3 | #' Projects have two primary components: datasets and jobs. Unlike other 4 | #' BigQuery objects, is no accompanying `bq_project` S3 class because a project 5 | #' is a simple string. 6 | #' 7 | #' @section Google BigQuery API documentation: 8 | #' * [datasets](https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list) 9 | #' * [jobs](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list) 10 | #' 11 | #' One day we might also expose the general [project metadata](https://cloud.google.com/resource-manager/reference/rest/v1/projects). 12 | #' 13 | #' @return 14 | #' * `bq_project_datasets()`: a list of [bq_dataset]s 15 | #' * `bq_project_jobs()`: a list of [bq_job]s. 16 | #' 17 | #' @name api-project 18 | #' @examplesIf bq_testable() 19 | #' bq_project_datasets("bigquery-public-data") 20 | #' bq_project_datasets("githubarchive") 21 | #' 22 | #' bq_project_jobs(bq_test_project(), page_size = 10) 23 | NULL 24 | 25 | #' @export 26 | #' @rdname api-project 27 | #' @param x A string giving a project name. 28 | #' @inheritParams bq_projects 29 | bq_project_datasets <- function( 30 | x, 31 | page_size = 100, 32 | max_pages = 1, 33 | warn = TRUE 34 | ) { 35 | check_string(x) 36 | check_number_whole(page_size, min = 1) 37 | check_number_whole(max_pages, min = 1, allow_infinite = TRUE) 38 | check_bool(warn) 39 | 40 | pages <- bq_get_paginated( 41 | bq_path(x, ""), 42 | query = list(fields = "datasets(datasetReference)"), 43 | page_size = page_size, 44 | max_pages = max_pages, 45 | warn = warn 46 | ) 47 | 48 | datasets <- unlist(lapply(pages, function(x) x$datasets), recursive = FALSE) 49 | lapply(datasets, function(x) { 50 | ref <- x$datasetReference 51 | bq_dataset(ref$projectId, ref$datasetId) 52 | }) 53 | } 54 | 55 | #' @export 56 | #' @rdname api-project 57 | bq_project_jobs <- function(x, page_size = 100, max_pages = 1, warn = TRUE) { 58 | check_string(x) 59 | check_number_whole(page_size, min = 1) 60 | check_number_whole(max_pages, min = 1, allow_infinite = TRUE) 61 | check_bool(warn) 62 | 63 | pages <- bq_get_paginated( 64 | bq_path(x, jobs = ""), 65 | query = list(fields = "jobs(jobReference)"), 66 | page_size = page_size, 67 | max_pages = max_pages, 68 | warn = warn 69 | ) 70 | jobs <- unlist(lapply(pages, function(x) x$jobs), recursive = FALSE) 71 | lapply(jobs, function(x) as_bq_job(x$jobReference)) 72 | } 73 | -------------------------------------------------------------------------------- /R/bq-projects.R: -------------------------------------------------------------------------------- 1 | #' List available projects 2 | #' 3 | #' List all projects that you have access to. You can also work with 4 | #' [public datasets](https://cloud.google.com/bigquery/public-data/), 5 | #' but you will need to provide a `billing` project whenever you perform 6 | #' any non-free operation. 7 | #' 8 | #' @section Google BigQuery API documentation: 9 | #' * [list](https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list) 10 | #' @param page_size Number of items per page. 11 | #' @param max_pages Maximum number of pages to retrieve. Use `Inf` to retrieve 12 | #' all pages (this may take a long time!) 13 | #' @param warn If `TRUE`, warn when there are unretrieved pages. 14 | #' @return A character vector. 15 | #' @export 16 | #' @examplesIf bq_testable() 17 | #' bq_projects() 18 | bq_projects <- function(page_size = 100, max_pages = 1, warn = TRUE) { 19 | pages <- bq_get_paginated( 20 | "projects", 21 | query = list(fields = "projects(projectReference(projectId))"), 22 | page_size = page_size, 23 | max_pages = max_pages 24 | ) 25 | projects <- unlist(lapply(pages, function(x) x$projects), recursive = FALSE) 26 | map_chr(projects, function(x) x$projectReference$projectId) 27 | } 28 | -------------------------------------------------------------------------------- /R/bq-query.R: -------------------------------------------------------------------------------- 1 | #' Submit query to BigQuery 2 | #' 3 | #' These submit a query (using [bq_perform_query()]) and then wait for it 4 | #' complete (with [bq_job_wait()]). All BigQuery queries save their results 5 | #' into a table (temporary or otherwise), so these functions return a [bq_table] 6 | #' which you can then query for more information. 7 | #' 8 | #' @param x Either a project (a string) or a [bq_dataset]. 9 | #' @param billing If you query a dataset that you only have read access 10 | #' for, such as a public dataset, you must also submit a `billing` project. 11 | #' @inheritParams bq_perform_query 12 | #' @inheritParams api-job 13 | #' @param ... Passed on to [bq_perform_query()] 14 | #' @name bq_query 15 | #' @return A [bq_table] 16 | #' @examplesIf bq_testable() 17 | #' # Querying a project requires full name in query 18 | #' tb <- bq_project_query( 19 | #' bq_test_project(), 20 | #' "SELECT count(*) FROM publicdata.samples.natality" 21 | #' ) 22 | #' bq_table_fields(tb) 23 | #' bq_table_download(tb) 24 | #' 25 | #' # Querying a dataset sets default dataset so you can use bare table name, 26 | #' # but for public data, you'll need to set a project to bill. 27 | #' ds <- bq_dataset("publicdata", "samples") 28 | #' tb <- bq_dataset_query(ds, 29 | #' query = "SELECT count(*) FROM natality", 30 | #' billing = bq_test_project() 31 | #' ) 32 | #' bq_table_download(tb) 33 | #' 34 | #' tb <- bq_dataset_query(ds, 35 | #' query = "SELECT count(*) FROM natality WHERE state = @state", 36 | #' parameters = list(state = "KS"), 37 | #' billing = bq_test_project() 38 | #' ) 39 | #' bq_table_download(tb) 40 | NULL 41 | 42 | #' @export 43 | #' @rdname bq_query 44 | bq_project_query <- function( 45 | x, 46 | query, 47 | destination_table = NULL, 48 | ..., 49 | quiet = NA 50 | ) { 51 | check_string(x) 52 | query <- as_query(query) 53 | if (!is.null(destination_table)) { 54 | destination_table <- as_bq_table(destination_table) 55 | } 56 | check_bool(quiet, allow_na = TRUE) 57 | 58 | job <- bq_perform_query( 59 | query, 60 | billing = x, 61 | destination_table = destination_table, 62 | ... 63 | ) 64 | bq_job_wait(job, quiet = quiet) 65 | bq_job_table(job) 66 | } 67 | 68 | #' @export 69 | #' @rdname bq_query 70 | bq_dataset_query <- function( 71 | x, 72 | query, 73 | destination_table = NULL, 74 | ..., 75 | billing = NULL, 76 | quiet = NA 77 | ) { 78 | x <- as_bq_dataset(x) 79 | query <- as_query(query) 80 | if (!is.null(destination_table)) { 81 | destination_table <- as_bq_table(destination_table) 82 | } 83 | check_string(billing, allow_null = TRUE) 84 | check_bool(quiet, allow_na = TRUE) 85 | 86 | job <- bq_perform_query( 87 | query, 88 | billing = billing %||% x$project, 89 | destination_table = destination_table, 90 | default_dataset = x, 91 | ... 92 | ) 93 | bq_job_wait(job, quiet = quiet) 94 | bq_job_table(job) 95 | } 96 | -------------------------------------------------------------------------------- /R/bq-table.R: -------------------------------------------------------------------------------- 1 | #' BigQuery tables 2 | #' 3 | #' Basic create-read-update-delete verbs for tables, as well as functions 4 | #' uploading data (`bq_table_upload()`), saving to/loading from Google 5 | #' Cloud Storage (`bq_table_load()`, `bq_table_save()`), and getting 6 | #' various values from the metadata. 7 | #' 8 | #' @param x A [bq_table], or an object coercible to a `bq_table`. 9 | #' @inheritParams api-job 10 | #' @inheritParams api-perform 11 | #' @inheritParams bq_projects 12 | #' @section Google BigQuery API documentation: 13 | #' * [insert](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/insert) 14 | #' * [get](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/get) 15 | #' * [delete](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/delete) 16 | #' @return 17 | #' * `bq_table_copy()`, `bq_table_create()`, `bq_table_delete()`, `bq_table_upload()`: 18 | #' an invisible [bq_table] 19 | #' * `bq_table_exists()`: either `TRUE` or `FALSE`. 20 | #' * `bq_table_size()`: the size of the table in bytes 21 | #' * `bq_table_fields()`: a [bq_fields]. 22 | #' 23 | #' @examplesIf bq_testable() 24 | #' ds <- bq_test_dataset() 25 | #' 26 | #' bq_mtcars <- bq_table(ds, "mtcars") 27 | #' bq_table_exists(bq_mtcars) 28 | #' 29 | #' bq_table_create( 30 | #' bq_mtcars, 31 | #' fields = mtcars, 32 | #' friendly_name = "Motor Trend Car Road Tests", 33 | #' description = "The data was extracted from the 1974 Motor Trend US magazine", 34 | #' labels = list(category = "example") 35 | #' ) 36 | #' bq_table_exists(bq_mtcars) 37 | #' 38 | #' bq_table_upload(bq_mtcars, mtcars) 39 | #' 40 | #' bq_table_fields(bq_mtcars) 41 | #' bq_table_size(bq_mtcars) 42 | #' str(bq_table_meta(bq_mtcars)) 43 | #' 44 | #' bq_table_delete(bq_mtcars) 45 | #' bq_table_exists(bq_mtcars) 46 | #' 47 | #' my_natality <- bq_table(ds, "mynatality") 48 | #' bq_table_copy("publicdata.samples.natality", my_natality) 49 | #' @name api-table 50 | NULL 51 | 52 | #' @export 53 | #' @rdname api-table 54 | #' @param fields A [bq_fields] specification, or something coercible to it 55 | #' (like a data frame). 56 | bq_table_create <- function(x, fields = NULL, ...) { 57 | x <- as_bq_table(x) 58 | 59 | url <- bq_path(x$project, x$dataset, "") 60 | body <- list( 61 | tableReference = tableReference(x) 62 | ) 63 | if (!is.null(fields)) { 64 | fields <- as_bq_fields(fields) 65 | body$schema <- list(fields = as_json(fields)) 66 | } 67 | 68 | bq_post(url, body = bq_body(body, ...)) 69 | 70 | x 71 | } 72 | 73 | #' @export 74 | #' @rdname api-table 75 | #' @inheritParams api-job 76 | bq_table_meta <- function(x, fields = NULL) { 77 | x <- as_bq_table(x) 78 | url <- bq_path(x$project, x$dataset, x$table) 79 | bq_get(url, query = list(fields = fields)) 80 | } 81 | 82 | #' @export 83 | #' @rdname api-table 84 | bq_table_fields <- function(x) { 85 | meta <- bq_table_meta(x, fields = "schema") 86 | fields <- meta$schema$fields 87 | 88 | bq_fields(lapply(fields, as_bq_field)) 89 | } 90 | 91 | #' @export 92 | #' @rdname api-table 93 | bq_table_size <- function(x) { 94 | meta <- bq_table_meta(x, fields = "numBytes") 95 | bytes <- as.numeric(meta$numBytes) 96 | structure(bytes, class = "bq_bytes") 97 | } 98 | 99 | #' @export 100 | #' @rdname api-table 101 | bq_table_nrow <- function(x) { 102 | meta <- bq_table_meta(x, fields = "numRows") 103 | as.numeric(meta$numRows) 104 | } 105 | 106 | #' @export 107 | #' @rdname api-table 108 | bq_table_exists <- function(x) { 109 | x <- as_bq_table(x) 110 | url <- bq_path(x$project, x$dataset, x$table) 111 | bq_exists(url) 112 | } 113 | 114 | #' @export 115 | #' @rdname api-table 116 | bq_table_delete <- function(x) { 117 | x <- as_bq_table(x) 118 | url <- bq_path(x$project, x$dataset, x$table) 119 | invisible(bq_delete(url)) 120 | } 121 | 122 | #' @export 123 | #' @rdname api-table 124 | #' @inheritParams bq_perform_copy 125 | #' @param dest Source and destination [bq_table]s. 126 | bq_table_copy <- function(x, dest, ..., quiet = NA) { 127 | x <- as_bq_table(x) 128 | dest <- as_bq_table(dest) 129 | 130 | job <- bq_perform_copy(x, dest, ...) 131 | bq_job_wait(job, quiet = quiet) 132 | 133 | dest 134 | } 135 | 136 | #' @export 137 | #' @rdname api-table 138 | #' @inheritParams api-perform 139 | bq_table_upload <- function(x, values, ..., quiet = NA) { 140 | x <- as_bq_table(x) 141 | 142 | job <- bq_perform_upload(x, values, ...) 143 | bq_job_wait(job, quiet = quiet) 144 | 145 | invisible(x) 146 | } 147 | 148 | #' @export 149 | #' @rdname api-table 150 | bq_table_save <- function(x, destination_uris, ..., quiet = NA) { 151 | x <- as_bq_table(x) 152 | 153 | job <- bq_perform_extract(x, destination_uris = destination_uris, ...) 154 | bq_job_wait(job, quiet = quiet) 155 | 156 | invisible(x) 157 | } 158 | 159 | #' @export 160 | #' @rdname api-table 161 | bq_table_load <- function(x, source_uris, ..., quiet = NA) { 162 | x <- as_bq_table(x) 163 | 164 | job <- bq_perform_load(x, source_uris = source_uris, ...) 165 | bq_job_wait(job, quiet = quiet) 166 | 167 | invisible(x) 168 | } 169 | 170 | #' @export 171 | #' @rdname api-table 172 | bq_table_patch <- function(x, fields) { 173 | x <- as_bq_table(x) 174 | 175 | url <- bq_path(x$project, x$dataset, x$table) 176 | body <- list( 177 | tableReference = tableReference(x) 178 | ) 179 | fields <- as_bq_fields(fields) 180 | body$schema <- list(fields = as_json(fields)) 181 | bq_patch(url, body) 182 | } 183 | -------------------------------------------------------------------------------- /R/bq-test.R: -------------------------------------------------------------------------------- 1 | #' Project to use for testing bigrquery 2 | #' 3 | #' @description 4 | #' You'll need to set the `BIGQUERY_TEST_PROJECT` (name of a project) and 5 | #' `BIGQUERY_TEST_BUCKET` (name of bucket) env vars in order to run bigrquery 6 | #' tests locally. I recommend creating a new project because the tests involve 7 | #' both reading and writing in BigQuery and Cloud Storage. 8 | #' 9 | #' The `BIGQUERY_TEST_PROJECT` must have billing enabled for the project. While 10 | #' logged in, via `bq_auth()`, as a user with permission to work in 11 | #' `BIGQUERY_TEST_PROJECT`, run `bq_test_init()` once to perform some setup. 12 | #' 13 | #' @section 14 | #' Testing: In tests, `bq_test_project()` (and hence `bq_test_dataset()`) will 15 | #' automatically skip if auth and a test project are not available. 16 | #' 17 | #' @return `bq_test_project()` returns the name of a project suitable for use in 18 | #' testing. `bq_test_dataset()` creates a temporary dataset whose lifetime is 19 | #' tied to the lifetime of the object that it returns. 20 | #' @export 21 | #' @keywords internal 22 | #' @examplesIf bq_testable() 23 | #' ds <- bq_test_dataset() 24 | #' bq_mtcars <- bq_table_upload(bq_table(ds, "mtcars"), mtcars) 25 | #' 26 | #' # dataset and table will be automatically deleted when ds is GC'd 27 | bq_test_project <- function() { 28 | if (is_testing() && !bq_authable()) { 29 | testthat::skip("No BigQuery access credentials available") 30 | } 31 | 32 | env <- Sys.getenv("BIGQUERY_TEST_PROJECT") 33 | if (!identical(env, "")) { 34 | return(env) 35 | } 36 | 37 | if (is_testing()) { 38 | testthat::skip("BIGQUERY_TEST_PROJECT not set") 39 | } else { 40 | cli::cli_abort( 41 | "{.envvar BIGQUERY_TEST_PROJECT} envvar must to set to a project name." 42 | ) 43 | } 44 | } 45 | 46 | #' @export 47 | #' @rdname bq_test_project 48 | #' @param name Dataset name - used only for testing. 49 | bq_test_init <- function(name = "basedata") { 50 | proj <- bq_test_project() 51 | 52 | basedata <- bq_dataset(proj, name) 53 | if (!bq_dataset_exists(basedata)) { 54 | bq_dataset_create(basedata) 55 | } 56 | 57 | bq_mtcars <- bq_table(basedata, "mtcars") 58 | if (!bq_table_exists(bq_mtcars)) { 59 | job <- bq_table_upload(bq_mtcars, values = datasets::mtcars) 60 | } 61 | } 62 | 63 | #' @export 64 | #' @rdname bq_test_project 65 | bq_test_dataset <- function(name = random_name(), location = "US") { 66 | ds <- bq_dataset(bq_test_project(), name) 67 | bq_dataset_create(ds, location = location) 68 | 69 | env <- new.env() 70 | reg.finalizer( 71 | env, 72 | function(e) bq_dataset_delete(ds, delete_contents = TRUE), 73 | onexit = TRUE 74 | ) 75 | attr(ds, "env") <- env 76 | 77 | ds 78 | } 79 | 80 | bq_test_table <- function() { 81 | ds <- env_cache(the, "test_dataset", bq_test_dataset()) 82 | bq_table(ds, random_name()) 83 | } 84 | 85 | #' @export 86 | #' @rdname bq_test_project 87 | bq_testable <- function() { 88 | bq_authable() && !identical(Sys.getenv("BIGQUERY_TEST_PROJECT"), "") 89 | } 90 | 91 | #' @export 92 | #' @rdname bq_test_project 93 | bq_authable <- function() { 94 | bq_has_token() || (is_interactive() && !is_testing()) 95 | } 96 | 97 | #' @export 98 | #' @rdname bq_test_project 99 | gs_test_bucket <- function() { 100 | env <- Sys.getenv("BIGQUERY_TEST_BUCKET") 101 | if (!identical(env, "")) { 102 | return(env) 103 | } 104 | 105 | if (is_testing()) { 106 | testthat::skip("BIGQUERY_TEST_BUCKET not set") 107 | } else { 108 | cli::cli_abort( 109 | "{.envvar BIGQUERY_TEST_BUCKET} must be set to a bucket name." 110 | ) 111 | } 112 | } 113 | 114 | 115 | #' @export 116 | #' @rdname bq_test_project 117 | gs_test_object <- function(name = random_name()) { 118 | gs_object(gs_test_bucket(), name) 119 | } 120 | 121 | 122 | random_name <- function(n = 10) { 123 | paste0("TESTING_", paste(sample(letters, n, replace = TRUE), collapse = "")) 124 | } 125 | 126 | is_testing <- function() identical(Sys.getenv("TESTTHAT"), "true") 127 | 128 | is_snapshot <- function() identical(Sys.getenv("TESTTHAT_IS_SNAPSHOT"), "true") 129 | 130 | skip_if_no_auth <- function() { 131 | testthat::skip_if_not(bq_has_token(), "Authentication not available") 132 | } 133 | -------------------------------------------------------------------------------- /R/camelCase.R: -------------------------------------------------------------------------------- 1 | camelCase <- function(x) { 2 | gsub("_(.)", "\\U\\1", x, perl = TRUE) 3 | } 4 | 5 | toCamel <- function(x) { 6 | if (is.list(x)) { 7 | x[] <- lapply(x, toCamel) 8 | } 9 | 10 | if (!is.null(names(x))) { 11 | names(x) <- camelCase(names(x)) 12 | } 13 | 14 | x 15 | } 16 | -------------------------------------------------------------------------------- /R/connections-page.R: -------------------------------------------------------------------------------- 1 | # nocov start 2 | 3 | # Capture connection expression for pane 4 | connection_capture <- function() { 5 | if (is.null(getOption("connectionObserver"))) { 6 | return() 7 | } 8 | 9 | addTaskCallback(function(expr, ...) { 10 | tryCatch( 11 | { 12 | # notify if this is an assignment we can replay 13 | if (is_call(expr, c("<-", "=")) && is_call(expr[[3]], "dbConnect")) { 14 | on_connection_opened( 15 | eval(expr[[2]]), 16 | paste(c("library(bigrquery)", deparse(expr)), collapse = "\n") 17 | ) 18 | } 19 | }, 20 | error = function(e) { 21 | warning( 22 | "Could not notify connection observer. ", 23 | e$message, 24 | call. = FALSE 25 | ) 26 | } 27 | ) 28 | 29 | # always return false so the task callback is run at most once 30 | FALSE 31 | }) 32 | } 33 | 34 | # https://rstudio.github.io/rstudio-extensions/connections-contract.html#connection-closed 35 | on_connection_closed <- function(con) { 36 | observer <- getOption("connectionObserver") 37 | if (is.null(observer)) { 38 | return(invisible(NULL)) 39 | } 40 | 41 | observer$connectionClosed(bq_type, con@project) 42 | } 43 | 44 | # https://rstudio.github.io/rstudio-extensions/connections-contract.html#connection-updated 45 | on_connection_updated <- function(con, hint) { 46 | observer <- getOption("connectionObserver") 47 | if (is.null(observer)) { 48 | return(invisible(NULL)) 49 | } 50 | 51 | observer$connectionUpdated(bq_type, con@project, hint = hint) 52 | } 53 | 54 | # https://rstudio.github.io/rstudio-extensions/connections-contract.html#connection-opened 55 | on_connection_opened <- function(con, code) { 56 | observer <- getOption("connectionObserver") 57 | if (is.null(observer)) { 58 | return(invisible(NULL)) 59 | } 60 | 61 | observer$connectionOpened( 62 | type = bq_type, 63 | displayName = paste0(bq_type, ": ", con@project), 64 | host = con@project, 65 | icon = system.file("icons/bigquery-512-color.png", package = "bigrquery"), 66 | 67 | # connection code 68 | connectCode = code, 69 | 70 | # only action is to close connections pane 71 | disconnect = function() { 72 | }, 73 | 74 | listObjectTypes = function() { 75 | list( 76 | dataset = list( 77 | icon = system.file("icons/dataset.png", package = "bigrquery"), 78 | contains = list( 79 | table = list( 80 | icon = system.file("icons/table.png", package = "bigrquery"), 81 | contains = "data" 82 | ), 83 | view = list( 84 | icon = system.file("icons/view.png", package = "bigrquery"), 85 | contains = "data" 86 | ) 87 | ) 88 | ) 89 | ) 90 | }, 91 | 92 | # table enumeration code 93 | listObjects = function(...) { 94 | list_bigquery_objects(con, ...) 95 | }, 96 | 97 | # column enumeration code 98 | listColumns = function( 99 | dataset = con@dataset, 100 | table = NULL, 101 | view = NULL, 102 | ... 103 | ) { 104 | x <- bq_table(con@project, dataset, paste0(table, view)) 105 | fields <- bq_table_fields(x) 106 | 107 | tibble::tibble( 108 | name = vapply(fields, `[[`, character(1), "name"), 109 | type = vapply(fields, `[[`, character(1), "type") 110 | ) 111 | }, 112 | 113 | # table preview code 114 | previewObject = function( 115 | rowLimit, 116 | dataset = con@dataset, 117 | table = NULL, 118 | view = NULL, 119 | ... 120 | ) { 121 | x <- bq_table(con@project, dataset, paste0(table, view)) 122 | bq_table_download(x, max_results = rowLimit) 123 | }, 124 | 125 | # no actions 126 | 127 | # raw connection object 128 | connectionObject = con 129 | ) 130 | } 131 | 132 | list_bigquery_objects <- function(con, dataset = con@dataset, ...) { 133 | if (is.null(dataset)) { 134 | # Catching VPC/Permission errors to crash gracefully 135 | bq_datasets <- tryCatch( 136 | bq_project_datasets(con@project, warn = FALSE), 137 | error = function(e) list() 138 | ) 139 | datasets <- map_chr(bq_datasets, `[[`, "dataset") 140 | 141 | tibble::tibble(type = "dataset", name = datasets) 142 | } else { 143 | # Catching VPC/Permission errors to crash gracefully 144 | ds <- bq_dataset(con@project, dataset) 145 | bq_tables <- tryCatch(bq_dataset_tables(ds), error = function(e) list()) 146 | tables <- map_chr(bq_tables, `[[`, "table") 147 | types <- map_chr(bq_tables, `[[`, "type") 148 | types <- grepl("VIEW$", types) + 1L 149 | types <- c("table", "view")[types] 150 | tibble::tibble(type = types, name = tables) 151 | } 152 | } 153 | 154 | bq_type <- "BigQuery" 155 | 156 | # nocov end 157 | -------------------------------------------------------------------------------- /R/cpp11.R: -------------------------------------------------------------------------------- 1 | # Generated by cpp11: do not edit by hand 2 | 3 | bq_parse <- function(meta_s, data_s) { 4 | .Call(`_bigrquery_bq_parse`, meta_s, data_s) 5 | } 6 | 7 | bq_field_init <- function(json, value) { 8 | .Call(`_bigrquery_bq_field_init`, json, value) 9 | } 10 | 11 | bq_parse_files <- function(schema_path, file_paths, n, quiet) { 12 | .Call(`_bigrquery_bq_parse_files`, schema_path, file_paths, n, quiet) 13 | } 14 | -------------------------------------------------------------------------------- /R/dbi-driver.R: -------------------------------------------------------------------------------- 1 | #' DBI methods 2 | #' 3 | #' Implementations of pure virtual functions defined in the `DBI` package. 4 | #' @name DBI 5 | #' @keywords internal 6 | NULL 7 | 8 | #' BigQuery DBI driver 9 | #' 10 | #' Creates a BigQuery DBI driver for use in [DBI::dbConnect()]. 11 | #' 12 | #' @export 13 | #' @usage NULL 14 | #' @import methods DBI 15 | #' @examplesIf bq_testable() 16 | #' con <- DBI::dbConnect( 17 | #' bigquery(), 18 | #' project = "publicdata", 19 | #' dataset = "samples", 20 | #' billing = bq_test_project() 21 | #' ) 22 | #' con 23 | #' DBI::dbListTables(con) 24 | #' DBI::dbReadTable(con, "natality", n_max = 10) 25 | #' 26 | #' # Create a temporary dataset to explore 27 | #' ds <- bq_test_dataset() 28 | #' con <- DBI::dbConnect( 29 | #' bigquery(), 30 | #' project = ds$project, 31 | #' dataset = ds$dataset 32 | #' ) 33 | #' DBI::dbWriteTable(con, "mtcars", mtcars) 34 | #' DBI::dbReadTable(con, "mtcars")[1:6, ] 35 | #' 36 | #' DBI::dbGetQuery(con, "SELECT count(*) FROM mtcars") 37 | #' 38 | #' res <- DBI::dbSendQuery(con, "SELECT cyl, mpg FROM mtcars") 39 | #' dbColumnInfo(res) 40 | #' dbFetch(res, 10) 41 | #' dbFetch(res, -1) 42 | #' DBI::dbHasCompleted(res) 43 | bigquery <- function() { 44 | new("BigQueryDriver") 45 | } 46 | 47 | #' @export 48 | #' @rdname bigquery 49 | #' @usage NULL 50 | dbi_driver <- function() { 51 | warning( 52 | "`dbi_driver()` deprecated; please use `bigquery()` instead`", 53 | call. = FALSE 54 | ) 55 | new("BigQueryDriver") 56 | } 57 | 58 | #' @rdname DBI 59 | #' @export 60 | setClass("BigQueryDriver", contains = "DBIDriver") 61 | 62 | #' @rdname bigquery 63 | #' @inheritParams DBI::dbConnect 64 | #' @param project,dataset Project and dataset identifiers 65 | #' @inheritParams bq_perform_query 66 | #' @inheritParams bq_projects 67 | #' @inheritParams api-job 68 | #' @param bigint The R type that BigQuery's 64-bit integer types should be mapped to. 69 | #' The default is `"integer"` which returns R's `integer` type but results in `NA` for 70 | #' values above/below +/- 2147483647. `"integer64"` returns a [bit64::integer64], 71 | #' which allows the full range of 64 bit integers. 72 | #' @param ... Other arguments for compatibility with generic; currently ignored. 73 | #' @export 74 | setMethod( 75 | "dbConnect", 76 | "BigQueryDriver", 77 | function( 78 | drv, 79 | project, 80 | dataset = NULL, 81 | billing = project, 82 | page_size = 1e4, 83 | quiet = NA, 84 | use_legacy_sql = FALSE, 85 | bigint = c("integer", "integer64", "numeric", "character"), 86 | ... 87 | ) { 88 | check_string(project) 89 | check_string(dataset, allow_null = TRUE) 90 | check_string(billing) 91 | check_number_whole(page_size, min = 1) 92 | check_bool(quiet, allow_na = TRUE) 93 | check_bool(use_legacy_sql) 94 | bigint <- arg_match(bigint) 95 | 96 | BigQueryConnection( 97 | project = project, 98 | dataset = dataset, 99 | billing = billing, 100 | page_size = page_size, 101 | quiet = quiet, 102 | use_legacy_sql = use_legacy_sql, 103 | bigint = bigint 104 | ) 105 | } 106 | ) 107 | 108 | 109 | #' @rdname DBI 110 | #' @export 111 | setMethod( 112 | "dbConnect", 113 | "bq_dataset", 114 | function(drv, ...) { 115 | DBI::dbConnect( 116 | bigquery(), 117 | project = drv$project, 118 | dataset = drv$dataset, 119 | ... 120 | ) 121 | } 122 | ) 123 | 124 | 125 | # Included for DBI compatibility ------------------------------------------ 126 | # nocov start 127 | #' @rdname DBI 128 | #' @inheritParams methods::show 129 | #' @export 130 | setMethod( 131 | "show", 132 | "BigQueryDriver", 133 | function(object) { 134 | cat("\n") 135 | } 136 | ) 137 | 138 | #' @rdname DBI 139 | #' @inheritParams DBI::dbGetInfo 140 | #' @export 141 | setMethod( 142 | "dbGetInfo", 143 | "BigQueryDriver", 144 | function(dbObj, ...) { 145 | list( 146 | driver.version = PACKAGE_VERSION, 147 | client.version = NA, 148 | max.connections = NA 149 | ) 150 | } 151 | ) 152 | 153 | #' @rdname DBI 154 | #' @inheritParams DBI::dbIsValid 155 | #' @export 156 | setMethod( 157 | "dbIsValid", 158 | "BigQueryDriver", 159 | function(dbObj, ...) { 160 | TRUE 161 | } 162 | ) 163 | 164 | #' @rdname DBI 165 | #' @inheritParams DBI::dbDataType 166 | #' @export 167 | setMethod( 168 | "dbDataType", 169 | "BigQueryDriver", 170 | function(dbObj, obj, ...) { 171 | data_type(obj) 172 | } 173 | ) 174 | # nocov end 175 | -------------------------------------------------------------------------------- /R/dbi-result.R: -------------------------------------------------------------------------------- 1 | #' @include dbi-connection.R 2 | NULL 3 | 4 | BigQueryResult <- function(conn, sql, params = NULL, ...) { 5 | ds <- if (!is.null(conn@dataset)) as_bq_dataset(conn) 6 | job <- bq_perform_query( 7 | sql, 8 | billing = conn@billing, 9 | default_dataset = ds, 10 | quiet = conn@quiet, 11 | parameters = params, 12 | ... 13 | ) 14 | 15 | bq_job_wait(job, quiet = conn@quiet) 16 | meta <- bq_job_meta( 17 | job, 18 | paste0( 19 | "configuration(query(destinationTable)),", 20 | "statistics(query(statementType,numDmlAffectedRows))" 21 | ) 22 | ) 23 | 24 | tb <- as_bq_table(meta$configuration$query$destinationTable) 25 | 26 | if (meta$statistics$query$statementType == "SELECT") { 27 | nrow <- bq_table_nrow(tb) 28 | } else { 29 | nrow <- 0 30 | } 31 | 32 | affected <- as.numeric(meta$statistics$query$numDmlAffectedRows %||% 0) 33 | 34 | res <- new( 35 | "BigQueryResult", 36 | bq_table = tb, 37 | statement = sql, 38 | nrow = nrow, 39 | affected = affected, 40 | page_size = conn@page_size, 41 | quiet = conn@quiet, 42 | cursor = cursor(nrow), 43 | bigint = conn@bigint, 44 | billing = conn@billing 45 | ) 46 | res 47 | } 48 | 49 | #' @rdname DBI 50 | #' @export 51 | setClass( 52 | "BigQueryResult", 53 | contains = "DBIResult", 54 | slots = list( 55 | bq_table = "bq_table", 56 | statement = "character", 57 | nrow = "numeric", 58 | affected = "numeric", 59 | page_size = "numeric", 60 | quiet = "logical", 61 | cursor = "list", 62 | bigint = "character", 63 | billing = "character" 64 | ) 65 | ) 66 | 67 | #' @rdname DBI 68 | #' @inheritParams methods::show 69 | #' @export 70 | setMethod( 71 | "show", 72 | "BigQueryResult", 73 | function(object) { 74 | cat_line( 75 | "\n", 76 | " Query: ", 77 | dbGetStatement(object), 78 | "\n Has completed: ", 79 | dbHasCompleted(object), 80 | "\n Rows fetched: ", 81 | dbGetRowCount(object) 82 | ) 83 | } 84 | ) 85 | 86 | #' @rdname DBI 87 | #' @inheritParams DBI::dbIsValid 88 | #' @export 89 | setMethod( 90 | "dbIsValid", 91 | "BigQueryResult", 92 | function(dbObj, ...) TRUE 93 | ) 94 | 95 | #' @rdname DBI 96 | #' @inheritParams DBI::dbClearResult 97 | #' @export 98 | setMethod( 99 | "dbClearResult", 100 | "BigQueryResult", 101 | function(res, ...) { 102 | invisible(TRUE) 103 | } 104 | ) 105 | 106 | #' @rdname DBI 107 | #' @inheritParams DBI::dbFetch 108 | #' @export 109 | setMethod( 110 | "dbFetch", 111 | "BigQueryResult", 112 | function(res, n = -1, ...) { 113 | check_number_whole(n, min = -1, allow_infinite = TRUE) 114 | if (n == -1) n <- Inf 115 | 116 | if (has_bigrquerystorage() && n == Inf && res@cursor$cur() == 0) { 117 | # https://github.com/meztez/bigrquerystorage/issues/48 118 | n <- res@cursor$left() 119 | 120 | # If possible, download complete dataset using arrow 121 | data <- bq_table_download( 122 | res@bq_table, 123 | n_max = n, 124 | bigint = res@bigint, 125 | quiet = res@quiet, 126 | api = "arrow" 127 | ) 128 | } else { 129 | # Otherwise, fall back to slower JSON API 130 | data <- bq_table_download( 131 | res@bq_table, 132 | n_max = n, 133 | start_index = res@cursor$cur(), 134 | page_size = res@page_size, 135 | bigint = res@bigint, 136 | quiet = res@quiet, 137 | api = "json" 138 | ) 139 | } 140 | 141 | res@cursor$adv(nrow(data)) 142 | 143 | data 144 | } 145 | ) 146 | 147 | #' @rdname DBI 148 | #' @inheritParams DBI::dbHasCompleted 149 | #' @export 150 | setMethod( 151 | "dbHasCompleted", 152 | "BigQueryResult", 153 | function(res, ...) { 154 | res@cursor$left() == 0 155 | } 156 | ) 157 | 158 | #' @rdname DBI 159 | #' @inheritParams DBI::dbGetStatement 160 | #' @export 161 | setMethod( 162 | "dbGetStatement", 163 | "BigQueryResult", 164 | function(res, ...) { 165 | res@statement 166 | } 167 | ) 168 | 169 | #' @rdname DBI 170 | #' @inheritParams DBI::dbColumnInfo 171 | #' @export 172 | setMethod( 173 | "dbColumnInfo", 174 | "BigQueryResult", 175 | function(res, ...) { 176 | fields <- bq_table_fields(res@bq_table) 177 | 178 | data.frame( 179 | name = vapply(fields, function(x) x$name, character(1)), 180 | type = vapply(fields, function(x) x$type, character(1)), 181 | stringsAsFactors = FALSE 182 | ) 183 | } 184 | ) 185 | 186 | #' @rdname DBI 187 | #' @inheritParams DBI::dbGetRowCount 188 | #' @export 189 | setMethod( 190 | "dbGetRowCount", 191 | "BigQueryResult", 192 | function(res, ...) { 193 | res@cursor$cur() 194 | } 195 | ) 196 | 197 | #' @rdname DBI 198 | #' @inheritParams DBI::dbGetRowsAffected 199 | #' @export 200 | setMethod( 201 | "dbGetRowsAffected", 202 | "BigQueryResult", 203 | function(res, ...) { 204 | res@affected 205 | } 206 | ) 207 | 208 | #' @rdname DBI 209 | #' @inheritParams DBI::dbBind 210 | #' @export 211 | setMethod( 212 | "dbBind", 213 | "BigQueryResult", 214 | function(res, params, ...) { 215 | testthat::skip("Not yet implemented: dbBind(Result)") 216 | } 217 | ) 218 | 219 | 220 | cursor <- function(nrow) { 221 | pos <- 0 222 | 223 | list( 224 | cur = function() pos, 225 | adv = function(i) { 226 | pos <<- pos + i 227 | }, 228 | left = function() nrow - pos 229 | ) 230 | } 231 | -------------------------------------------------------------------------------- /R/gs-object.R: -------------------------------------------------------------------------------- 1 | gs_object <- function(bucket, object) { 2 | structure( 3 | list(bucket = bucket, object = object), 4 | class = "gs_object" 5 | ) 6 | } 7 | 8 | #' @export 9 | as.character.gs_object <- function(x, ...) { 10 | format(x) 11 | } 12 | 13 | #' @export 14 | format.gs_object <- function(x, ...) { 15 | sprintf("gs://%s/%s", x$bucket, x$object) 16 | } 17 | 18 | #' @export 19 | print.gs_object <- function(x, ...) { 20 | cat_line(" ", format(x)) 21 | invisible(x) 22 | } 23 | 24 | gs_object_delete <- function(x, token = bq_token()) { 25 | url <- sprintf( 26 | "https://storage.googleapis.com/storage/v1/b/%s/o/%s", 27 | x$bucket, 28 | x$object 29 | ) 30 | req <- httr::DELETE(url, token, httr::user_agent(bq_ua())) 31 | process_request(req) 32 | } 33 | 34 | gs_object_exists <- function(x, token = bq_token()) { 35 | url <- sprintf( 36 | "https://storage.googleapis.com/storage/v1/b/%s/o/%s", 37 | x$bucket, 38 | x$object 39 | ) 40 | req <- httr::GET(url, token, httr::user_agent(bq_ua())) 41 | req$status_code != 404 42 | } 43 | -------------------------------------------------------------------------------- /R/utils.R: -------------------------------------------------------------------------------- 1 | as_df <- function(x) { 2 | class(x) <- "data.frame" 3 | attr(x, "row.names") <- c(NA_integer_, -length(x[[1]])) 4 | 5 | x 6 | } 7 | 8 | check_quiet <- function(x, arg = caller_arg(x), call = caller_env(call)) { 9 | check_bool(x, allow_na = TRUE, arg = arg, call = call) 10 | 11 | if (is.na(x)) { 12 | !(is_interactive() || is_snapshot()) 13 | } else { 14 | x 15 | } 16 | } 17 | 18 | bq_check_namespace <- function(pkg, bq_type) { 19 | check_installed(pkg, sprintf("to parse BigQuery '%s' fields.", bq_type)) 20 | } 21 | 22 | isFALSE <- function(x) identical(x, FALSE) 23 | 24 | cat_line <- function(...) { 25 | cat(paste0(..., "\n", collapse = "")) 26 | } 27 | 28 | big_mark <- function(x, ...) { 29 | mark <- if (identical(getOption("OutDec"), ",")) "." else "," 30 | format(x, big.mark = mark, scientific = FALSE, ...) 31 | } 32 | 33 | map_chr <- function(x, f, ...) { 34 | vapply(x, f, ..., FUN.VALUE = character(1)) 35 | } 36 | 37 | indent <- function(x, n = 2) { 38 | space <- paste(rep(" ", n), collapse = "") 39 | paste0(space, gsub("\n", paste0("\n", space), x, fixed = TRUE)) 40 | } 41 | 42 | as_json <- function(x) UseMethod("as_json") 43 | 44 | #' @export 45 | as_json.NULL <- function(x) NULL 46 | 47 | # nocov start 48 | show_json <- function(x) { 49 | jsonlite::toJSON(x, pretty = TRUE, auto_unbox = TRUE) 50 | } 51 | 52 | #' @export 53 | print.bq_bytes <- function(x, ...) { 54 | cat_line(prettyunits::pretty_bytes(unclass(x))) 55 | } 56 | # nocov end 57 | 58 | defer <- function(expr, env = caller_env(), after = FALSE) { 59 | thunk <- as.call(list(function() expr)) 60 | do.call(on.exit, list(thunk, TRUE, after), envir = env) 61 | } 62 | 63 | in_pkgdown <- function() { 64 | identical(Sys.getenv("IN_PKGDOWN"), "true") 65 | } 66 | 67 | as_query <- function(x, error_arg = caller_arg(x), error_call = caller_env()) { 68 | if (is(x, "SQL")) { 69 | x <- x@.Data 70 | } 71 | check_string(x, arg = error_arg, call = error_call) 72 | x 73 | } 74 | 75 | has_bigrquerystorage <- function() { 76 | is_installed("bigrquerystorage") 77 | } 78 | -------------------------------------------------------------------------------- /R/zzz.R: -------------------------------------------------------------------------------- 1 | .onLoad <- function(libname, pkgname) { 2 | .auth <<- gargle::init_AuthState( 3 | package = "bigrquery", 4 | auth_active = TRUE 5 | ) 6 | 7 | if (has_internal_auth() && in_pkgdown()) { 8 | bq_auth_internal() 9 | } 10 | 11 | # S3 methods -------------------------------------------------------------- 12 | s3_register("dplyr::tbl", "BigQueryConnection") 13 | s3_register("dplyr::collect", "tbl_BigQueryConnection") 14 | s3_register("dplyr::same_src", "tbl_BigQueryConnection") 15 | 16 | s3_register("dbplyr::dbplyr_edition", "BigQueryConnection") 17 | s3_register("dbplyr::db_compute", "BigQueryConnection") 18 | s3_register("dbplyr::db_copy_to", "BigQueryConnection") 19 | s3_register("dbplyr::sql_join_suffix", "BigQueryConnection") 20 | s3_register("dbplyr::sql_translation", "BigQueryConnection") 21 | 22 | # Default options -------------------------------------------------------- 23 | op <- options() 24 | defaults <- list( 25 | bigrquery.quiet = NA, 26 | bigrquery.page.size = 1e4 27 | ) 28 | toset <- !(names(defaults) %in% names(op)) 29 | if (any(toset)) options(defaults[toset]) 30 | 31 | invisible() 32 | } 33 | 34 | PACKAGE_NAME <- utils::packageName() 35 | PACKAGE_VERSION <- utils::packageVersion(PACKAGE_NAME) 36 | -------------------------------------------------------------------------------- /README.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | output: github_document 3 | --- 4 | 5 | 6 | 7 | ```{r setup, include = FALSE} 8 | knitr::opts_chunk$set( 9 | collapse = TRUE, 10 | comment = "#>", 11 | fig.path = "man/figures/README-", 12 | out.width = "100%" 13 | ) 14 | 15 | if (bigrquery:::has_internal_auth()) { 16 | bigrquery:::bq_auth_internal() 17 | } else { 18 | knitr::opts_chunk$set(eval = FALSE) 19 | } 20 | ``` 21 | # bigrquery 22 | 23 | 24 | [![CRAN Status](https://www.r-pkg.org/badges/version/bigrquery)](https://cran.r-project.org/package=bigrquery) 25 | [![R-CMD-check](https://github.com/r-dbi/bigrquery/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/r-dbi/bigrquery/actions/workflows/R-CMD-check.yaml) 26 | [![Codecov test coverage](https://codecov.io/gh/r-dbi/bigrquery/branch/main/graph/badge.svg)](https://app.codecov.io/gh/r-dbi/bigrquery?branch=main) 27 | 28 | 29 | The bigrquery package makes it easy to work with data stored in 30 | [Google BigQuery](https://cloud.google.com/bigquery/docs) by allowing you to query BigQuery tables and retrieve metadata about your projects, datasets, tables, and jobs. The bigrquery package provides three levels of abstraction on top of BigQuery: 31 | 32 | * The low-level API provides thin wrappers over the underlying REST API. All 33 | the low-level functions start with `bq_`, and mostly have the form 34 | `bq_noun_verb()`. This level of abstraction is most appropriate if you're 35 | familiar with the REST API and you want do something not supported in the 36 | higher-level APIs. 37 | 38 | * The [DBI interface](https://r-dbi.org) wraps the low-level API and 39 | makes working with BigQuery like working with any other database system. 40 | This is most convenient layer if you want to execute SQL queries in 41 | BigQuery or upload smaller amounts (i.e. <100 MB) of data. 42 | 43 | * The [dplyr interface](https://dbplyr.tidyverse.org/) lets you treat BigQuery 44 | tables as if they are in-memory data frames. This is the most convenient 45 | layer if you don't want to write SQL, but instead want dbplyr to write it 46 | for you. 47 | 48 | ## Installation 49 | 50 | The current bigrquery release can be installed from CRAN: 51 | 52 | ```{r eval = FALSE} 53 | install.packages("bigrquery") 54 | ``` 55 | 56 | The newest development release can be installed from GitHub: 57 | 58 | ```{r eval = FALSE} 59 | #install.packages("pak") 60 | pak::pak("r-dbi/bigrquery") 61 | ``` 62 | 63 | ## Usage 64 | 65 | ### Low-level API 66 | 67 | ```{r} 68 | library(bigrquery) 69 | billing <- bq_test_project() # replace this with your project ID 70 | sql <- "SELECT year, month, day, weight_pounds FROM `publicdata.samples.natality`" 71 | 72 | tb <- bq_project_query(billing, sql) 73 | bq_table_download(tb, n_max = 10) 74 | ``` 75 | 76 | ### DBI 77 | 78 | ```{r, warning = FALSE} 79 | library(DBI) 80 | 81 | con <- dbConnect( 82 | bigrquery::bigquery(), 83 | project = "publicdata", 84 | dataset = "samples", 85 | billing = billing 86 | ) 87 | con 88 | 89 | dbListTables(con) 90 | 91 | dbGetQuery(con, sql, n = 10) 92 | ``` 93 | 94 | ### dplyr 95 | 96 | ```{r, message = FALSE} 97 | library(dplyr) 98 | 99 | natality <- tbl(con, "natality") 100 | 101 | natality %>% 102 | select(year, month, day, weight_pounds) %>% 103 | head(10) %>% 104 | collect() 105 | ``` 106 | 107 | ## Important details 108 | 109 | ### BigQuery account 110 | 111 | To use bigrquery, you'll need a BigQuery project. Fortunately, if you just want to play around with the BigQuery API, it's easy to start with Google's free [public data](https://cloud.google.com/bigquery/public-data) and the [BigQuery sandbox](https://cloud.google.com/bigquery/docs/sandbox). This gives you some fun data to play with along with enough free compute (1 TB of queries & 10 GB of storage per month) to learn the ropes. 112 | 113 | To get started, open and create a project. Make a note of the "Project ID" as you'll use this as the `billing` project whenever you work with free sample data; and as the `project` when you work with your own data. 114 | 115 | ### Authentication and authorization 116 | 117 | When using bigrquery interactively, you'll be prompted to [authorize bigrquery](https://cloud.google.com/bigquery/docs/authorization) in the browser. You'll be asked if you want to cache tokens for reuse in future sessions. For non-interactive usage, it is preferred to use a service account token, if possible. More places to learn about auth: 118 | 119 | * Help for [`bigrquery::bq_auth()`](https://bigrquery.r-dbi.org/reference/bq_auth.html). 120 | * [How gargle gets tokens](https://gargle.r-lib.org/articles/how-gargle-gets-tokens.html). 121 | - bigrquery obtains a token with `gargle::token_fetch()`, which supports 122 | a variety of token flows. This article provides full details, such as how 123 | to take advantage of Application Default Credentials or service accounts 124 | on GCE VMs. 125 | * [Non-interactive auth](https://gargle.r-lib.org/articles/non-interactive-auth.html). Explains 126 | how to set up a project when code must run without any user interaction. 127 | * [How to get your own API credentials](https://gargle.r-lib.org/articles/get-api-credentials.html). Instructions for getting your own OAuth client or service account token. 128 | 129 | Note that bigrquery requests permission to modify your data; but it will never do so unless you explicitly request it (e.g. by calling `bq_table_delete()` or `bq_table_upload()`). Our [Privacy policy](https://www.tidyverse.org/google_privacy_policy) provides more info. 130 | 131 | ## Useful links 132 | 133 | * [SQL reference](https://cloud.google.com/bigquery/docs/reference/standard-sql/functions-and-operators) 134 | * [API reference](https://cloud.google.com/bigquery/docs/reference/rest) 135 | * [Query/job console](https://console.cloud.google.com/bigquery/) 136 | * [Billing console](https://console.cloud.google.com/) 137 | 138 | ## Policies 139 | 140 | Please note that the 'bigrquery' project is released with a [Contributor Code of Conduct](https://bigrquery.r-dbi.org/CODE_OF_CONDUCT.html). By contributing to this project, you agree to abide by its terms. 141 | 142 | [Privacy policy](https://www.tidyverse.org/google_privacy_policy) 143 | -------------------------------------------------------------------------------- /_pkgdown.yml: -------------------------------------------------------------------------------- 1 | url: https://bigrquery.r-dbi.org 2 | 3 | template: 4 | package: tidytemplate 5 | bootstrap: 5 6 | 7 | includes: 8 | in_header: | 9 | 10 | 11 | development: 12 | mode: auto 13 | 14 | reference: 15 | - title: DBI and dplyr 16 | contents: 17 | - src_bigquery 18 | - bigquery 19 | - collect.tbl_BigQueryConnection 20 | 21 | - title: Low-level API 22 | contents: 23 | - starts_with("bq") 24 | 25 | news: 26 | releases: 27 | - text: "bigrquery 1.5.0" 28 | href: https://www.tidyverse.org/blog/2024/01/bigrquery-1-5-0/ 29 | - text: "bigrquery 1.4.0" 30 | href: https://www.tidyverse.org/blog/2021/08/bigrquery-1-4-0/ 31 | - text: "bigrquery 1.1.0" 32 | href: https://www.tidyverse.org/blog/2019/02/bigrquery-1-1-0/ 33 | - text: "bigrquery 1.0.0" 34 | href: https://www.tidyverse.org/blog/2018/04/bigrquery-1-0-0/ 35 | -------------------------------------------------------------------------------- /air.toml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r-dbi/bigrquery/7cbf430f9c2497fff3cf2dcb1f87f252b000f107/air.toml -------------------------------------------------------------------------------- /bigrquery.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | StripTrailingWhitespace: Yes 17 | 18 | BuildType: Package 19 | PackageUseDevtools: Yes 20 | PackageInstallArgs: --no-multiarch --with-keep.source 21 | PackageRoxygenize: rd,collate,namespace 22 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | 3 | coverage: 4 | status: 5 | project: 6 | default: 7 | target: auto 8 | threshold: 1% 9 | informational: true 10 | patch: 11 | default: 12 | target: auto 13 | threshold: 1% 14 | informational: true 15 | -------------------------------------------------------------------------------- /cran-comments.md: -------------------------------------------------------------------------------- 1 | ## R CMD check results 2 | 3 | 0 errors | 0 warnings | 0 notes 4 | 5 | ## revdepcheck results 6 | 7 | We checked 14 reverse dependencies (10 from CRAN + 4 from Bioconductor), comparing R CMD check results across CRAN and dev versions of this package. 8 | 9 | * We saw 0 new problems 10 | * We failed to check 1 packages 11 | 12 | Issues with CRAN packages are summarised below. 13 | 14 | ### Failed to check 15 | 16 | * bigrquerystorage (NA) 17 | -------------------------------------------------------------------------------- /inst/extdata/data/client_secret_123.googleusercontent.com.json: -------------------------------------------------------------------------------- 1 | {"installed":{"client_id":"abc.apps.googleusercontent.com","project_id":"a_project","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://accounts.google.com/o/oauth2/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"ssshh-i-am-a-secret","redirect_uris":["http://localhost"]}} 2 | -------------------------------------------------------------------------------- /inst/icons/bigquery-512-color.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r-dbi/bigrquery/7cbf430f9c2497fff3cf2dcb1f87f252b000f107/inst/icons/bigquery-512-color.png -------------------------------------------------------------------------------- /inst/icons/dataset.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r-dbi/bigrquery/7cbf430f9c2497fff3cf2dcb1f87f252b000f107/inst/icons/dataset.png -------------------------------------------------------------------------------- /inst/icons/table.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r-dbi/bigrquery/7cbf430f9c2497fff3cf2dcb1f87f252b000f107/inst/icons/table.png -------------------------------------------------------------------------------- /inst/icons/view.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/r-dbi/bigrquery/7cbf430f9c2497fff3cf2dcb1f87f252b000f107/inst/icons/view.png -------------------------------------------------------------------------------- /inst/rstudio/connections.dcf: -------------------------------------------------------------------------------- 1 | Name: BigQuery 2 | HelpUrl: https://cloud.google.com/bigquery/ 3 | Icon: icons/bigquery-512-color.png 4 | -------------------------------------------------------------------------------- /inst/rstudio/connections/BigQuery.R: -------------------------------------------------------------------------------- 1 | library(bigrquery) 2 | con <- dbConnect( 3 | bigquery(), 4 | project = "${1:Project}" 5 | ) 6 | -------------------------------------------------------------------------------- /inst/secret/bigrquery-testing.json: -------------------------------------------------------------------------------- 1 | Nn-4MMxMM8enqGFDciFb768FCNjLz5Ne0QiLZz3UMO0nGj3tiRzFvpoeUEFWcHsZ22U_TK6lkxQkMhhA50RAAFHumHyvkfkl-31py5ZgJpiyGO-_TOkdiMidw_gbUn3BWYE5z0EwE806pS2zfGQ4o5N1pjR-eDRNOJgPbS9CspwATg81WhDc-Of3XzlQgB6aEemLqqDFMF9XN8st0B-JIelyT3Wr32RED1tcfG5tP_pPooYrh_UNrFN-ZW4PpOdJw_T3ROk37uHAeFYtIZ5rYThjd2RkrEHnGu_zSlHPQfsNJ8iiSmS-LzedoYJdyg6x0bclZb7TgewQpDnaSJWnC5aK9yzUNXhNrdKyn33okmYO6prqXSQrmHMyp-e1-HkrZNecK5a0lqnn2qsvoWlJOldRDtd_m_A9YL9TybLxMainvvsYViCi5CnakAIxc-6T66-GuQMNvTGiuJccv1bdEMCX-9U7oO3WnBXZTJb2Eh0sQxj7cm7BGRTftxOTLsbx9w_9N2H32DCsPiGVgUVmJRbHkqixOmlaXKjqUJkQEClW216-AVakEuDW3OLUgK_pg3VgfonPxtP0o8sL2pZ4cRizzHlvpfVfXih9-qFtUIm4Y-8gcV3J8d0_0UlHYVtuIpVaIPPrYa_C_XG6YNNM1JimPXKqCjI4jUEb8saoTU2MUhK_2PBmm7e31ZCG3XGmxr1dmqPjyvnGy4u6LjH_1bKbkHdhF7IJcTFOox3bHrBtdYQ97qR6Il5aO4EF7NAEfWWOS9KPMveEl_1j3mw76F6MmHz5v8FFht5wpvE8EELw1KNUBYs7gGshZDpk7Ji3zk7iKIuws42n3A6p8TD9mTstBv9xfCrkI5alpY419ftEimYEfuE2Mi-KCNcicf87B6ML_cEeQR2NGl7Z2PU-MgsR1JOunEwT1sjoFHLzy95kZsXWB-iTp4IFiVRA_Y_7cu9QD1mZZdCUE8eVLKWLu5ORV048yXY1e23UHTU48y85JF3Qo7JSEthWs_b9_YhGDN9bORry2Xvoznmb1jyeKt1t0TS-HO8zKveqyMMyMoOxtm_J0xiOjZDXtLNfzsFMj0j4vMH_l5V6x64THS1R3Wq53Ob-h7VyuXCzFSfFr6kUDRcKEy_ina0nAl7WFyZC61xVKyXcZdxD63QTyg2JiiK9g8zjCLAxO4SvJI9p-eLLG3UmGRHEWVkc36LlghjLygRTY8v57ePSsUtEkh-gCyTBV8a2jRm_yMcVgmur7A08FG3MsnLycgkDRLEH1ON-a01GGg-iNCf_DtGNtdFZIYGxc_PjBmzlLRAaASeDz4ntannL9W5hTRcOiMcN8XZcqWuf9-YQ8R1xHmjc-AFCyZ41OU2cubfO2Zl5GdLLu9ep46kDsE6aHz-8pdxFnh1Zju8TrgpJQ0lnhHNTt4NcGaN1GKMiuweKvmutxW9ieZgYaKTZ7VQIryBIpN6xGnSRX51hamXjkIzkCyzfWFKe3_PDr3jnMWBkPePdQxANwj4i53ybtG48JFV111kcqy1M--z35vVDTkD0xBjb5qUBsK-RuezbZGuWcp1IbvtLLVBsMWeJiRMYF_gmP_6QAV0BvJcGsgoV_t-_pjY7x-m4apTqlJu8RF9b7tS9YKMeI5kmfknBzjYYdQp_pEwHyMVnyHMfnvq4r08MJZ9OZ0Ntj9bRPEycTGCcF2Q0rGYgy3pv-Eod_FaHkFz7QgoKRlPG1XTJC_B-YAXi7TwnIyheCaQ7D9yxYN9xnrpDvprtHLq2UfJTx8MtJ2rpipshFSamETyu_AgrjpqsXFqX1lLJF_g6ZIVkrBzrw8OERQ7QYDMfCUcfh-CMcybwWy2lE3r8zytJstwzTYSv4NuIeC4Qa660TUldQWXShh3Q_7nX2AlJta_U_z0hIyBhnPPyQjuc09lx71cyRpa9mAop_PEj7qOMUfYb_bPCEHT7seeSTDupS_DR5OkP5kQvVJ3YrJfGXF_5dmdErcTyGUfFKRZ5ztX3hbI-VrAGikDISTt1IFxEW6byY-Kstvx5GwWQwtLzAtdOrmI4FjdbRM761T4u1n1H7_6z7rFra6RPGZRLqxymQLDoSq9cIT2ARJrO6QgdcIqAsCUZtaTDGmZ2iAbu-IoeajoScLnaBsuNqg4mWvYTcC3KauIpwRDPLknFhExMO9OfsotQewfMDx0Kw4a8nj0e6wSrDuhCl4iDEKBKUokAdkAKvsh1unzGkcEO-BFFEdGWmLqPM3U_GrkBnL9c7ntHezrVhOaN0d2gc6-uUu6Jadj5jMHgbywD0mbRqEtlJs1BYLFDJNM7RV6t6RUpV952Hjw2v_go3CEseKNY9Vz_NmAK8ZHMHqXpguNL4Ysfp5OVDzE4Foo2IGj3N6iZlZv-AKEJVeP20m7VaGsKS-2WocVVn8h_RPYTPAJxixhudwhKeLrax1Vs_AZ-t-hur3NJSF-foWVzZzAFH_PiyF32DkI71_NSdCz4P3blpNMijeVVXGPZ_i0CznEn-2bP_cLqaL8S6miTFHbMYsr6t148kKsfaGzvQ1seUoUqP4Z6vZi14Rx-8qbE0dBAz2trplZqPGydD1si_6rnwLe6izD-5LAtSFP7EueNr2ADuWS9VW-RCBTlDBExguqJQmuqJyGkJ5RXo2ynWJbWJlOOsHa0JFVYeblubNlm9caO9-tIorI9cO8T0J0cEbOVLrze9iEgRizaa7dbk2jcU23Z-uAHni5tNmm3mnW9OCo23AMww38DpdM9vaHSg2gC2k2ScKFb6xbh9T894TxGJr8aMRoysGNVeLSeM2eKSw9y7ytqkezuvk7BF9LDYK5yMUuzUWvAT7L6gIWb5SBkv7N0b66aYMavZBEwT2YLFO0EzfII_hq8ynNmcq7b-FrAcru6gGGb60V5MTpb-iJZtT8_th6rvc_k0sHZTHSkmSBO6sTryGx9DB0ziHRlGjnEWPqqmZ2odThFl1HfMWYTnzu57AUnbRgmF0kSp1aN21VtuqbEi5r06xax7UulMHPqoTcphNDy5sNYXvhyXKykW2mVqPg3tU7RBQd1AzQibqtgD6SN8qtwxmV2kMW795Gzo3pB2Ben-wUHlB4x2Y4AueSKU4i1kHNG -------------------------------------------------------------------------------- /man/api-dataset.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-dataset.R 3 | \name{api-dataset} 4 | \alias{api-dataset} 5 | \alias{bq_dataset_create} 6 | \alias{bq_dataset_meta} 7 | \alias{bq_dataset_exists} 8 | \alias{bq_dataset_update} 9 | \alias{bq_dataset_delete} 10 | \alias{bq_dataset_tables} 11 | \title{BigQuery datasets} 12 | \usage{ 13 | bq_dataset_create(x, location = "US", ...) 14 | 15 | bq_dataset_meta(x, fields = NULL) 16 | 17 | bq_dataset_exists(x) 18 | 19 | bq_dataset_update(x, ...) 20 | 21 | bq_dataset_delete(x, delete_contents = FALSE) 22 | 23 | bq_dataset_tables(x, page_size = 50, max_pages = Inf, warn = TRUE, ...) 24 | } 25 | \arguments{ 26 | \item{x}{A \link{bq_dataset}} 27 | 28 | \item{location}{Dataset location} 29 | 30 | \item{...}{Additional arguments passed on to the underlying API call. 31 | snake_case names are automatically converted to camelCase.} 32 | 33 | \item{fields}{An optional field specification for 34 | \href{https://cloud.google.com/bigquery/docs/api-performance#partial-response}{partial response}} 35 | 36 | \item{delete_contents}{If \code{TRUE}, will recursively delete all tables in 37 | the dataset. Set to \code{FALSE} by default for safety.} 38 | 39 | \item{page_size}{Number of items per page.} 40 | 41 | \item{max_pages}{Maximum number of pages to retrieve. Use \code{Inf} to retrieve 42 | all pages (this may take a long time!)} 43 | 44 | \item{warn}{If \code{TRUE}, warn when there are unretrieved pages.} 45 | } 46 | \description{ 47 | Basic create-read-update-delete verbs for datasets. 48 | } 49 | \section{Google BigQuery API documentation}{ 50 | 51 | \itemize{ 52 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/get}{get} 53 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert}{insert} 54 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/delete}{delete} 55 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list}{list} 56 | } 57 | } 58 | 59 | \examples{ 60 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 61 | ds <- bq_dataset(bq_test_project(), "dataset_api") 62 | bq_dataset_exists(ds) 63 | 64 | bq_dataset_create(ds) 65 | bq_dataset_exists(ds) 66 | str(bq_dataset_meta(ds)) 67 | 68 | bq_dataset_delete(ds) 69 | bq_dataset_exists(ds) 70 | 71 | # Use bq_test_dataset() to create a temporary dataset that will 72 | # be automatically deleted 73 | ds <- bq_test_dataset() 74 | bq_table_create(bq_table(ds, "x1")) 75 | bq_table_create(bq_table(ds, "x2")) 76 | bq_table_create(bq_table(ds, "x3")) 77 | bq_dataset_tables(ds) 78 | \dontshow{\}) # examplesIf} 79 | } 80 | -------------------------------------------------------------------------------- /man/api-job.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-job.R 3 | \name{api-job} 4 | \alias{api-job} 5 | \alias{bq_job_meta} 6 | \alias{bq_job_status} 7 | \alias{bq_job_show_statistics} 8 | \alias{bq_job_wait} 9 | \title{BigQuery job: retrieve metadata} 10 | \usage{ 11 | bq_job_meta(x, fields = NULL) 12 | 13 | bq_job_status(x) 14 | 15 | bq_job_show_statistics(x) 16 | 17 | bq_job_wait( 18 | x, 19 | quiet = getOption("bigrquery.quiet"), 20 | pause = 0.5, 21 | call = caller_env() 22 | ) 23 | } 24 | \arguments{ 25 | \item{x}{A \link{bq_job}} 26 | 27 | \item{fields}{An optional field specification for 28 | \href{https://cloud.google.com/bigquery/docs/api-performance#partial-response}{partial response}} 29 | 30 | \item{quiet}{If \code{FALSE}, displays progress bar; if \code{TRUE} is silent; 31 | if \code{NA} picks based on whether or not you're in an interactive context.} 32 | 33 | \item{pause}{amount of time to wait between status requests} 34 | 35 | \item{call}{The execution environment of a currently 36 | running function, e.g. \code{caller_env()}. The function will be 37 | mentioned in error messages as the source of the error. See the 38 | \code{call} argument of \code{\link[rlang:abort]{abort()}} for more information.} 39 | } 40 | \description{ 41 | To perform a job, see \link{api-perform}. These functions all retrieve metadata 42 | (in various forms) about an existing job. 43 | } 44 | \section{Google BigQuery API documentation}{ 45 | 46 | \itemize{ 47 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get}{get} 48 | } 49 | } 50 | 51 | \examples{ 52 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 53 | jobs <- bq_project_jobs(bq_test_project()) 54 | jobs[[1]] 55 | 56 | # Show statistics about job 57 | bq_job_show_statistics(jobs[[1]]) 58 | 59 | # Wait for job to complete 60 | bq_job_wait(jobs[[1]]) 61 | \dontshow{\}) # examplesIf} 62 | } 63 | -------------------------------------------------------------------------------- /man/api-project.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-project.R 3 | \name{api-project} 4 | \alias{api-project} 5 | \alias{bq_project_datasets} 6 | \alias{bq_project_jobs} 7 | \title{BigQuery project methods} 8 | \usage{ 9 | bq_project_datasets(x, page_size = 100, max_pages = 1, warn = TRUE) 10 | 11 | bq_project_jobs(x, page_size = 100, max_pages = 1, warn = TRUE) 12 | } 13 | \arguments{ 14 | \item{x}{A string giving a project name.} 15 | 16 | \item{page_size}{Number of items per page.} 17 | 18 | \item{max_pages}{Maximum number of pages to retrieve. Use \code{Inf} to retrieve 19 | all pages (this may take a long time!)} 20 | 21 | \item{warn}{If \code{TRUE}, warn when there are unretrieved pages.} 22 | } 23 | \value{ 24 | \itemize{ 25 | \item \code{bq_project_datasets()}: a list of \link{bq_dataset}s 26 | \item \code{bq_project_jobs()}: a list of \link{bq_job}s. 27 | } 28 | } 29 | \description{ 30 | Projects have two primary components: datasets and jobs. Unlike other 31 | BigQuery objects, is no accompanying \code{bq_project} S3 class because a project 32 | is a simple string. 33 | } 34 | \section{Google BigQuery API documentation}{ 35 | 36 | \itemize{ 37 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list}{datasets} 38 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list}{jobs} 39 | } 40 | 41 | One day we might also expose the general \href{https://cloud.google.com/resource-manager/reference/rest/v1/projects}{project metadata}. 42 | } 43 | 44 | \examples{ 45 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 46 | bq_project_datasets("bigquery-public-data") 47 | bq_project_datasets("githubarchive") 48 | 49 | bq_project_jobs(bq_test_project(), page_size = 10) 50 | \dontshow{\}) # examplesIf} 51 | } 52 | -------------------------------------------------------------------------------- /man/api-table.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-table.R 3 | \name{api-table} 4 | \alias{api-table} 5 | \alias{bq_table_create} 6 | \alias{bq_table_meta} 7 | \alias{bq_table_fields} 8 | \alias{bq_table_size} 9 | \alias{bq_table_nrow} 10 | \alias{bq_table_exists} 11 | \alias{bq_table_delete} 12 | \alias{bq_table_copy} 13 | \alias{bq_table_upload} 14 | \alias{bq_table_save} 15 | \alias{bq_table_load} 16 | \alias{bq_table_patch} 17 | \title{BigQuery tables} 18 | \usage{ 19 | bq_table_create(x, fields = NULL, ...) 20 | 21 | bq_table_meta(x, fields = NULL) 22 | 23 | bq_table_fields(x) 24 | 25 | bq_table_size(x) 26 | 27 | bq_table_nrow(x) 28 | 29 | bq_table_exists(x) 30 | 31 | bq_table_delete(x) 32 | 33 | bq_table_copy(x, dest, ..., quiet = NA) 34 | 35 | bq_table_upload(x, values, ..., quiet = NA) 36 | 37 | bq_table_save(x, destination_uris, ..., quiet = NA) 38 | 39 | bq_table_load(x, source_uris, ..., quiet = NA) 40 | 41 | bq_table_patch(x, fields) 42 | } 43 | \arguments{ 44 | \item{x}{A \link{bq_table}, or an object coercible to a \code{bq_table}.} 45 | 46 | \item{fields}{A \link{bq_fields} specification, or something coercible to it 47 | (like a data frame).} 48 | 49 | \item{...}{Additional arguments passed on to the underlying API call. 50 | snake_case names are automatically converted to camelCase.} 51 | 52 | \item{dest}{Source and destination \link{bq_table}s.} 53 | 54 | \item{quiet}{If \code{FALSE}, displays progress bar; if \code{TRUE} is silent; 55 | if \code{NA} picks based on whether or not you're in an interactive context.} 56 | 57 | \item{values}{Data frame of values to insert.} 58 | 59 | \item{destination_uris}{A character vector of fully-qualified Google Cloud 60 | Storage URIs where the extracted table should be written. Can export 61 | up to 1 Gb of data per file. Use a wild card URI (e.g. 62 | \verb{gs://[YOUR_BUCKET]/file-name-*.json}) to automatically create any 63 | number of files.} 64 | 65 | \item{source_uris}{The fully-qualified URIs that point to your data in 66 | Google Cloud. 67 | 68 | For Google Cloud Storage URIs: Each URI can contain one 69 | \code{'*'} wildcard character and it must come after the 'bucket' name. 70 | Size limits related to load jobs apply to external data sources. 71 | 72 | For Google Cloud Bigtable URIs: Exactly one URI can be specified and 73 | it has be a fully specified and valid HTTPS URL for a Google Cloud 74 | Bigtable table. For Google Cloud Datastore backups: Exactly one URI 75 | can be specified. Also, the '*' wildcard character is not allowed.} 76 | } 77 | \value{ 78 | \itemize{ 79 | \item \code{bq_table_copy()}, \code{bq_table_create()}, \code{bq_table_delete()}, \code{bq_table_upload()}: 80 | an invisible \link{bq_table} 81 | \item \code{bq_table_exists()}: either \code{TRUE} or \code{FALSE}. 82 | \item \code{bq_table_size()}: the size of the table in bytes 83 | \item \code{bq_table_fields()}: a \link{bq_fields}. 84 | } 85 | } 86 | \description{ 87 | Basic create-read-update-delete verbs for tables, as well as functions 88 | uploading data (\code{bq_table_upload()}), saving to/loading from Google 89 | Cloud Storage (\code{bq_table_load()}, \code{bq_table_save()}), and getting 90 | various values from the metadata. 91 | } 92 | \section{Google BigQuery API documentation}{ 93 | 94 | \itemize{ 95 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/insert}{insert} 96 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/get}{get} 97 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/delete}{delete} 98 | } 99 | } 100 | 101 | \examples{ 102 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 103 | ds <- bq_test_dataset() 104 | 105 | bq_mtcars <- bq_table(ds, "mtcars") 106 | bq_table_exists(bq_mtcars) 107 | 108 | bq_table_create( 109 | bq_mtcars, 110 | fields = mtcars, 111 | friendly_name = "Motor Trend Car Road Tests", 112 | description = "The data was extracted from the 1974 Motor Trend US magazine", 113 | labels = list(category = "example") 114 | ) 115 | bq_table_exists(bq_mtcars) 116 | 117 | bq_table_upload(bq_mtcars, mtcars) 118 | 119 | bq_table_fields(bq_mtcars) 120 | bq_table_size(bq_mtcars) 121 | str(bq_table_meta(bq_mtcars)) 122 | 123 | bq_table_delete(bq_mtcars) 124 | bq_table_exists(bq_mtcars) 125 | 126 | my_natality <- bq_table(ds, "mynatality") 127 | bq_table_copy("publicdata.samples.natality", my_natality) 128 | \dontshow{\}) # examplesIf} 129 | } 130 | -------------------------------------------------------------------------------- /man/bigquery.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/dbi-driver.R 3 | \name{bigquery} 4 | \alias{bigquery} 5 | \alias{dbi_driver} 6 | \alias{dbConnect,BigQueryDriver-method} 7 | \title{BigQuery DBI driver} 8 | \usage{ 9 | \S4method{dbConnect}{BigQueryDriver}( 10 | drv, 11 | project, 12 | dataset = NULL, 13 | billing = project, 14 | page_size = 10000, 15 | quiet = NA, 16 | use_legacy_sql = FALSE, 17 | bigint = c("integer", "integer64", "numeric", "character"), 18 | ... 19 | ) 20 | } 21 | \arguments{ 22 | \item{drv}{an object that inherits from \linkS4class{DBIDriver}, 23 | or an existing \linkS4class{DBIConnection} 24 | object (in order to clone an existing connection).} 25 | 26 | \item{project, dataset}{Project and dataset identifiers} 27 | 28 | \item{billing}{Identifier of project to bill.} 29 | 30 | \item{page_size}{Number of items per page.} 31 | 32 | \item{quiet}{If \code{FALSE}, displays progress bar; if \code{TRUE} is silent; 33 | if \code{NA} picks based on whether or not you're in an interactive context.} 34 | 35 | \item{use_legacy_sql}{If \code{TRUE} will use BigQuery's legacy SQL format.} 36 | 37 | \item{bigint}{The R type that BigQuery's 64-bit integer types should be mapped to. 38 | The default is \code{"integer"} which returns R's \code{integer} type but results in \code{NA} for 39 | values above/below +/- 2147483647. \code{"integer64"} returns a \link[bit64:bit64-package]{bit64::integer64}, 40 | which allows the full range of 64 bit integers.} 41 | 42 | \item{...}{Other arguments for compatibility with generic; currently ignored.} 43 | } 44 | \description{ 45 | Creates a BigQuery DBI driver for use in \code{\link[DBI:dbConnect]{DBI::dbConnect()}}. 46 | } 47 | \examples{ 48 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 49 | con <- DBI::dbConnect( 50 | bigquery(), 51 | project = "publicdata", 52 | dataset = "samples", 53 | billing = bq_test_project() 54 | ) 55 | con 56 | DBI::dbListTables(con) 57 | DBI::dbReadTable(con, "natality", n_max = 10) 58 | 59 | # Create a temporary dataset to explore 60 | ds <- bq_test_dataset() 61 | con <- DBI::dbConnect( 62 | bigquery(), 63 | project = ds$project, 64 | dataset = ds$dataset 65 | ) 66 | DBI::dbWriteTable(con, "mtcars", mtcars) 67 | DBI::dbReadTable(con, "mtcars")[1:6, ] 68 | 69 | DBI::dbGetQuery(con, "SELECT count(*) FROM mtcars") 70 | 71 | res <- DBI::dbSendQuery(con, "SELECT cyl, mpg FROM mtcars") 72 | dbColumnInfo(res) 73 | dbFetch(res, 10) 74 | dbFetch(res, -1) 75 | DBI::dbHasCompleted(res) 76 | \dontshow{\}) # examplesIf} 77 | } 78 | -------------------------------------------------------------------------------- /man/bigrquery-package.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bigrquery-package.R 3 | \docType{package} 4 | \name{bigrquery-package} 5 | \alias{bigrquery} 6 | \alias{bigrquery-package} 7 | \title{bigrquery: An Interface to Google's 'BigQuery' 'API'} 8 | \description{ 9 | Easily talk to Google's 'BigQuery' database from R. 10 | } 11 | \section{Package options}{ 12 | 13 | \describe{ 14 | \item{\code{bigrquery.quiet}}{Verbose output during processing? The default 15 | value, \code{NA}, turns on verbose output for queries that run longer than 16 | two seconds. Use \code{FALSE} for immediate verbose output, \code{TRUE} 17 | for quiet operation.} 18 | 19 | \item{\code{bigrquery.page.size}}{Default page size for fetching data, 20 | defaults to 1e4.} 21 | } 22 | } 23 | 24 | \seealso{ 25 | Useful links: 26 | \itemize{ 27 | \item \url{https://bigrquery.r-dbi.org} 28 | \item \url{https://github.com/r-dbi/bigrquery} 29 | \item Report bugs at \url{https://github.com/r-dbi/bigrquery/issues} 30 | } 31 | 32 | } 33 | \author{ 34 | \strong{Maintainer}: Hadley Wickham \email{hadley@posit.co} (\href{https://orcid.org/0000-0003-4757-117X}{ORCID}) 35 | 36 | Authors: 37 | \itemize{ 38 | \item Jennifer Bryan \email{jenny@posit.co} (\href{https://orcid.org/0000-0002-6983-2759}{ORCID}) 39 | } 40 | 41 | Other contributors: 42 | \itemize{ 43 | \item Posit Software, PBC [copyright holder, funder] 44 | } 45 | 46 | } 47 | \keyword{internal} 48 | -------------------------------------------------------------------------------- /man/bq_auth_configure.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-auth.R 3 | \name{bq_auth_configure} 4 | \alias{bq_auth_configure} 5 | \alias{bq_oauth_client} 6 | \title{Edit and view auth configuration} 7 | \usage{ 8 | bq_auth_configure(client, path, app = deprecated()) 9 | 10 | bq_oauth_client() 11 | } 12 | \arguments{ 13 | \item{client}{A Google OAuth client, presumably constructed via 14 | \code{\link[gargle:gargle_oauth_client_from_json]{gargle::gargle_oauth_client_from_json()}}. Note, however, that it is 15 | preferred to specify the client with JSON, using the \code{path} argument.} 16 | 17 | \item{path}{JSON downloaded from \href{https://console.cloud.google.com}{Google Cloud Console}, containing a client id and 18 | secret, in one of the forms supported for the \code{txt} argument of 19 | \code{\link[jsonlite:fromJSON]{jsonlite::fromJSON()}} (typically, a file path or JSON string).} 20 | 21 | \item{app}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} Replaced by the \code{client} 22 | argument.} 23 | } 24 | \value{ 25 | \itemize{ 26 | \item \code{bq_auth_configure()}: An object of R6 class 27 | \link[gargle:AuthState-class]{gargle::AuthState}, invisibly. 28 | \item \code{bq_oauth_client()}: the current user-configured OAuth client. 29 | } 30 | } 31 | \description{ 32 | These functions give more control over and visibility into the auth 33 | configuration than \code{\link[=bq_auth]{bq_auth()}} does. \code{bq_auth_configure()} 34 | lets the user specify their own: 35 | \itemize{ 36 | \item OAuth client, which is used when obtaining a user token. 37 | } 38 | 39 | See the \code{vignette("get-api-credentials", package = "gargle")} 40 | for more. 41 | If the user does not configure these settings, internal defaults 42 | are used. 43 | 44 | \code{bq_oauth_client()} retrieves the currently configured OAuth client. 45 | } 46 | \examples{ 47 | # see and store the current user-configured OAuth client (probably `NULL`) 48 | (original_client <- bq_oauth_client()) 49 | 50 | # the preferred way to configure your own client is via a JSON file 51 | # downloaded from Google Developers Console 52 | # this example JSON is indicative, but fake 53 | path_to_json <- system.file( 54 | "extdata", "data", "client_secret_123.googleusercontent.com.json", 55 | package = "bigrquery" 56 | ) 57 | bq_auth_configure(path = path_to_json) 58 | 59 | # confirm the changes 60 | bq_oauth_client() 61 | 62 | # restore original auth config 63 | bq_auth_configure(client = original_client) 64 | } 65 | \seealso{ 66 | Other auth functions: 67 | \code{\link{bq_auth}()}, 68 | \code{\link{bq_deauth}()} 69 | } 70 | \concept{auth functions} 71 | -------------------------------------------------------------------------------- /man/bq_deauth.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-auth.R 3 | \name{bq_deauth} 4 | \alias{bq_deauth} 5 | \title{Clear current token} 6 | \usage{ 7 | bq_deauth() 8 | } 9 | \description{ 10 | Clears any currently stored token. The next time bigrquery needs a token, the 11 | token acquisition process starts over, with a fresh call to \code{\link[=bq_auth]{bq_auth()}} and, 12 | therefore, internally, a call to \code{\link[gargle:token_fetch]{gargle::token_fetch()}}. Unlike some other 13 | packages that use gargle, bigrquery is not usable in a de-authorized state. 14 | Therefore, calling \code{bq_deauth()} only clears the token, i.e. it does NOT 15 | imply that subsequent requests are made with an API key in lieu of a token. 16 | } 17 | \examples{ 18 | \dontrun{ 19 | bq_deauth() 20 | } 21 | } 22 | \seealso{ 23 | Other auth functions: 24 | \code{\link{bq_auth}()}, 25 | \code{\link{bq_auth_configure}()} 26 | } 27 | \concept{auth functions} 28 | -------------------------------------------------------------------------------- /man/bq_field.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-field.R 3 | \name{bq_field} 4 | \alias{bq_field} 5 | \alias{bq_fields} 6 | \alias{as_bq_field} 7 | \alias{as_bq_fields} 8 | \title{BigQuery field (and fields) class} 9 | \usage{ 10 | bq_field(name, type, mode = "NULLABLE", fields = list(), description = NULL) 11 | 12 | bq_fields(x) 13 | 14 | as_bq_field(x) 15 | 16 | as_bq_fields(x) 17 | } 18 | \arguments{ 19 | \item{name}{The field name. The name must contain only letters (a-z, A-Z), 20 | numbers (0-9), or underscores (_), and must start with a letter or 21 | underscore. The maximum length is 300 characters.} 22 | 23 | \item{type}{The field data type. Possible values include: 24 | \code{"STRING"}, \code{"BYTES"}, \code{"INTEGER"}, \code{"FLOAT"}, \code{"BOOLEAN"}, \code{"TIMESTAMP"}, 25 | \code{"DATE"}, \code{"TIME"}, \code{"DATETIME"}, \code{"GEOGRAPHY"}, \code{"NUMERIC"}, 26 | \code{"BIGNUMERIC"}, \code{"JSON"}, \code{"RECORD"}.} 27 | 28 | \item{mode}{The field mode. Possible values include: \code{"NULLABLE"}, 29 | \code{"REQUIRED"}, and \code{"REPEATED"}.} 30 | 31 | \item{fields}{For a field of type "record", a list of sub-fields.} 32 | 33 | \item{description}{The field description. The maximum length is 1,024 34 | characters.} 35 | 36 | \item{x}{A list of \code{bg_fields}} 37 | } 38 | \description{ 39 | \code{bq_field()} and \code{bq_fields()} create; \code{as_bq_field()} and \code{as_bq_fields()} 40 | coerce from lists. 41 | } 42 | \examples{ 43 | bq_field("name", "string") 44 | 45 | as_bq_fields(list( 46 | list(name = "name", type = "string"), 47 | bq_field("age", "integer") 48 | )) 49 | 50 | # as_bq_fields() can also take a data frame 51 | as_bq_fields(mtcars) 52 | } 53 | \seealso{ 54 | \code{bq_field()} corresponds to a \code{TableFieldSchema}, see 55 | \url{https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema} 56 | for more details. 57 | } 58 | -------------------------------------------------------------------------------- /man/bq_has_token.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-auth.R 3 | \name{bq_has_token} 4 | \alias{bq_has_token} 5 | \title{Is there a token on hand?} 6 | \usage{ 7 | bq_has_token() 8 | } 9 | \value{ 10 | Logical. 11 | } 12 | \description{ 13 | Reports whether bigrquery has stored a token, ready for use in downstream 14 | requests. 15 | } 16 | \examples{ 17 | bq_has_token() 18 | } 19 | \seealso{ 20 | Other low-level API functions: 21 | \code{\link{bq_token}()} 22 | } 23 | \concept{low-level API functions} 24 | -------------------------------------------------------------------------------- /man/bq_oauth_app.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-auth.R 3 | \name{bq_oauth_app} 4 | \alias{bq_oauth_app} 5 | \title{Get currently configured OAuth app (deprecated)} 6 | \usage{ 7 | bq_oauth_app() 8 | } 9 | \description{ 10 | \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} 11 | 12 | In light of the new \code{\link[gargle:gargle_oauth_client_from_json]{gargle::gargle_oauth_client()}} constructor and class of 13 | the same name, \code{bq_oauth_app()} is being replaced by 14 | \code{\link[=bq_oauth_client]{bq_oauth_client()}}. 15 | } 16 | \keyword{internal} 17 | -------------------------------------------------------------------------------- /man/bq_param.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-param.R 3 | \name{bq_param} 4 | \alias{bq_param} 5 | \alias{bq_param_scalar} 6 | \alias{bq_param_array} 7 | \title{Explicitly define query parameters} 8 | \usage{ 9 | bq_param(value, type = NULL, name = NULL) 10 | 11 | bq_param_scalar(value, type = NULL, name = NULL) 12 | 13 | bq_param_array(value, type = NULL, name = NULL) 14 | } 15 | \arguments{ 16 | \item{value}{vector of parameter values} 17 | 18 | \item{type}{BigQuery type of the parameter} 19 | 20 | \item{name}{name of the parameter in the query, omitting the \code{@}} 21 | } 22 | \description{ 23 | By default, bigrquery will assume vectors of length 1 are scalars, 24 | and longer vectors are arrays. If you need to pass a length-1 array, 25 | you'll need to explicitly use \code{bq_param_array()}. 26 | } 27 | \examples{ 28 | # bq_param() automatically picks scalar vs array based on length 29 | bq_param("a") 30 | bq_param(c("a", "b", "c")) 31 | 32 | # use bq_param_array() to create a length-1 array 33 | bq_param_array("a") 34 | } 35 | \keyword{internal} 36 | -------------------------------------------------------------------------------- /man/bq_projects.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-projects.R 3 | \name{bq_projects} 4 | \alias{bq_projects} 5 | \title{List available projects} 6 | \usage{ 7 | bq_projects(page_size = 100, max_pages = 1, warn = TRUE) 8 | } 9 | \arguments{ 10 | \item{page_size}{Number of items per page.} 11 | 12 | \item{max_pages}{Maximum number of pages to retrieve. Use \code{Inf} to retrieve 13 | all pages (this may take a long time!)} 14 | 15 | \item{warn}{If \code{TRUE}, warn when there are unretrieved pages.} 16 | } 17 | \value{ 18 | A character vector. 19 | } 20 | \description{ 21 | List all projects that you have access to. You can also work with 22 | \href{https://cloud.google.com/bigquery/public-data/}{public datasets}, 23 | but you will need to provide a \code{billing} project whenever you perform 24 | any non-free operation. 25 | } 26 | \section{Google BigQuery API documentation}{ 27 | 28 | \itemize{ 29 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list}{list} 30 | } 31 | } 32 | 33 | \examples{ 34 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 35 | bq_projects() 36 | \dontshow{\}) # examplesIf} 37 | } 38 | -------------------------------------------------------------------------------- /man/bq_query.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-query.R 3 | \name{bq_query} 4 | \alias{bq_query} 5 | \alias{bq_project_query} 6 | \alias{bq_dataset_query} 7 | \title{Submit query to BigQuery} 8 | \usage{ 9 | bq_project_query(x, query, destination_table = NULL, ..., quiet = NA) 10 | 11 | bq_dataset_query( 12 | x, 13 | query, 14 | destination_table = NULL, 15 | ..., 16 | billing = NULL, 17 | quiet = NA 18 | ) 19 | } 20 | \arguments{ 21 | \item{x}{Either a project (a string) or a \link{bq_dataset}.} 22 | 23 | \item{query}{SQL query string.} 24 | 25 | \item{destination_table}{A \link{bq_table} where results should be stored. 26 | If not supplied, results will be saved to a temporary table that lives 27 | in a special dataset. You must supply this parameter for large 28 | queries (> 128 MB compressed).} 29 | 30 | \item{...}{Passed on to \code{\link[=bq_perform_query]{bq_perform_query()}}} 31 | 32 | \item{quiet}{If \code{FALSE}, displays progress bar; if \code{TRUE} is silent; 33 | if \code{NA} picks based on whether or not you're in an interactive context.} 34 | 35 | \item{billing}{If you query a dataset that you only have read access 36 | for, such as a public dataset, you must also submit a \code{billing} project.} 37 | } 38 | \value{ 39 | A \link{bq_table} 40 | } 41 | \description{ 42 | These submit a query (using \code{\link[=bq_perform_query]{bq_perform_query()}}) and then wait for it 43 | complete (with \code{\link[=bq_job_wait]{bq_job_wait()}}). All BigQuery queries save their results 44 | into a table (temporary or otherwise), so these functions return a \link{bq_table} 45 | which you can then query for more information. 46 | } 47 | \examples{ 48 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 49 | # Querying a project requires full name in query 50 | tb <- bq_project_query( 51 | bq_test_project(), 52 | "SELECT count(*) FROM publicdata.samples.natality" 53 | ) 54 | bq_table_fields(tb) 55 | bq_table_download(tb) 56 | 57 | # Querying a dataset sets default dataset so you can use bare table name, 58 | # but for public data, you'll need to set a project to bill. 59 | ds <- bq_dataset("publicdata", "samples") 60 | tb <- bq_dataset_query(ds, 61 | query = "SELECT count(*) FROM natality", 62 | billing = bq_test_project() 63 | ) 64 | bq_table_download(tb) 65 | 66 | tb <- bq_dataset_query(ds, 67 | query = "SELECT count(*) FROM natality WHERE state = @state", 68 | parameters = list(state = "KS"), 69 | billing = bq_test_project() 70 | ) 71 | bq_table_download(tb) 72 | \dontshow{\}) # examplesIf} 73 | } 74 | -------------------------------------------------------------------------------- /man/bq_refs.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-refs.R 3 | \name{bq_refs} 4 | \alias{bq_dataset} 5 | \alias{as_bq_dataset} 6 | \alias{bq_table} 7 | \alias{as_bq_table} 8 | \alias{bq_job} 9 | \alias{as_bq_job} 10 | \title{S3 classes for BigQuery datasets, tables and jobs} 11 | \usage{ 12 | bq_dataset(project, dataset) 13 | 14 | as_bq_dataset(x, ..., error_arg = caller_arg(x), error_call = caller_env()) 15 | 16 | bq_table(project, dataset, table = NULL, type = "TABLE") 17 | 18 | as_bq_table(x, ..., error_arg = caller_arg(x), error_call = caller_env()) 19 | 20 | bq_job(project, job, location = "US") 21 | 22 | as_bq_job(x, ..., error_arg = caller_arg(x), error_call = caller_env()) 23 | } 24 | \arguments{ 25 | \item{project, dataset, table, job, type}{Individual project, dataset, table, 26 | job identifiers and table type (strings). 27 | 28 | For \code{bq_table()}, you if supply a \code{bq_dataset} as the first argument, 29 | the 2nd argument will be interpreted as the \code{table}} 30 | 31 | \item{x}{An object to coerce to a \code{bq_job}, \code{bq_dataset}, or \code{bq_table}. 32 | Built-in methods handle strings and lists.} 33 | 34 | \item{...}{Other arguments passed on to methods.} 35 | 36 | \item{error_arg}{An argument name as a string. This argument 37 | will be mentioned in error messages as the input that is at the 38 | origin of a problem.} 39 | 40 | \item{error_call}{The execution environment of a currently 41 | running function, e.g. \code{caller_env()}. The function will be 42 | mentioned in error messages as the source of the error. See the 43 | \code{call} argument of \code{\link[rlang:abort]{abort()}} for more information.} 44 | 45 | \item{location}{Job location} 46 | } 47 | \description{ 48 | Create references to BigQuery datasets, jobs, and tables. Each class 49 | has a constructor function (\code{bq_dataset()}, \code{bq_table()}, \code{bq_job()}) 50 | and a coercion function (\code{as_bq_dataset()}, \code{as_bq_table()}, \code{as_bq_job()}). 51 | The coercions functions come with methods for strings (which find components 52 | by splitting on \code{.}), and lists (which look for named components like 53 | \code{projectId} or \code{project_id}). 54 | 55 | All \code{bq_table_}, \code{bq_dataset_} and \code{bq_job_} functions call the appropriate 56 | coercion functions on their first argument, allowing you to flexible specify 57 | their inputs. 58 | } 59 | \examples{ 60 | # Creation ------------------------------------------------ 61 | samples <- bq_dataset("publicdata", "samples") 62 | natality <- bq_table("publicdata", "samples", "natality") 63 | natality 64 | 65 | # Or 66 | bq_table(samples, "natality") 67 | 68 | bq_job("bigrquery-examples", "m0SgFu2ycbbge6jgcvzvflBJ_Wft") 69 | 70 | # Coercion ------------------------------------------------ 71 | as_bq_dataset("publicdata.shakespeare") 72 | as_bq_table("publicdata.samples.natality") 73 | 74 | as_bq_table(list( 75 | project_id = "publicdata", 76 | dataset_id = "samples", 77 | table_id = "natality" 78 | )) 79 | 80 | as_bq_job(list( 81 | projectId = "bigrquery-examples", 82 | jobId = "job_m0SgFu2ycbbge6jgcvzvflBJ_Wft", 83 | location = "US" 84 | )) 85 | 86 | } 87 | \seealso{ 88 | \link{api-job}, \link{api-perform}, \link{api-dataset}, and \link{api-table} for 89 | functions that work with these objects. 90 | } 91 | -------------------------------------------------------------------------------- /man/bq_table_download.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-download.R 3 | \name{bq_table_download} 4 | \alias{bq_table_download} 5 | \title{Download table data} 6 | \usage{ 7 | bq_table_download( 8 | x, 9 | n_max = Inf, 10 | page_size = NULL, 11 | start_index = 0L, 12 | max_connections = 6L, 13 | quiet = NA, 14 | bigint = c("integer", "integer64", "numeric", "character"), 15 | api = c("json", "arrow"), 16 | billing = x$project, 17 | max_results = deprecated() 18 | ) 19 | } 20 | \arguments{ 21 | \item{x}{A \link{bq_table}} 22 | 23 | \item{n_max}{Maximum number of results to retrieve. Use \code{Inf} to retrieve all 24 | rows.} 25 | 26 | \item{page_size}{(JSON only) The number of rows requested per chunk. It is 27 | recommended to leave this unspecified until you have evidence that the 28 | \code{page_size} selected automatically by \code{bq_table_download()} is problematic. 29 | 30 | When \code{page_size = NULL} bigrquery determines a conservative, natural chunk 31 | size empirically. If you specify the \code{page_size}, it is important that each 32 | chunk fits on one page, i.e. that the requested row limit is low enough to 33 | prevent the API from paginating based on response size.} 34 | 35 | \item{start_index}{(JSON only) Starting row index (zero-based).} 36 | 37 | \item{max_connections}{(JSON only) Number of maximum simultaneous 38 | connections to BigQuery servers.} 39 | 40 | \item{quiet}{If \code{FALSE}, displays progress bar; if \code{TRUE} is silent; 41 | if \code{NA} picks based on whether or not you're in an interactive context.} 42 | 43 | \item{bigint}{The R type that BigQuery's 64-bit integer types should be 44 | mapped to. The default is \code{"integer"}, which returns R's \code{integer} type, 45 | but results in \code{NA} for values above/below +/- 2147483647. \code{"integer64"} 46 | returns a \link[bit64:bit64-package]{bit64::integer64}, which allows the full range of 64 bit 47 | integers.} 48 | 49 | \item{api}{Which API to use? The \code{"json"} API works where ever bigrquery 50 | does, but is slow and can require fiddling with the \code{page_size} parameter. 51 | The \code{"arrow"} API is faster and more reliable, but only works if you 52 | have also installed the bigrquerystorage package. 53 | 54 | Because the \code{"arrow"} API is so much faster, it will be used automatically 55 | if the bigrquerystorage package is installed.} 56 | 57 | \item{billing}{(Arrow only) Project to bill; defaults to the project of \code{x}, 58 | and typically only needs to be specified if you're working with public 59 | datasets.} 60 | 61 | \item{max_results}{\ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} Deprecated. Please use 62 | \code{n_max} instead.} 63 | } 64 | \value{ 65 | Because data retrieval may generate list-columns and the \code{data.frame} 66 | print method can have problems with list-columns, this method returns 67 | a tibble. If you need a \code{data.frame}, coerce the results with 68 | \code{\link[=as.data.frame]{as.data.frame()}}. 69 | } 70 | \description{ 71 | This function provides two ways to download data from BigQuery, transfering 72 | data using either JSON or arrow, depending on the \code{api} argument. If 73 | bigrquerystorage is installed, \code{api = "arrow"} will be used (because it's 74 | so much faster, but see the limitions below), otherwise you can select 75 | deliberately by using \code{api = "json"} or \code{api = "arrow"}. 76 | \subsection{Arrow API}{ 77 | 78 | The arrow API is much faster, but has heavier dependencies: bigrquerystorage 79 | requires the arrow package, which can be tricky to compile on Linux (but you 80 | usually should be able to get a binary from 81 | \href{https://posit.co/products/cloud/public-package-manager/}{Posit Public Package Manager}. 82 | 83 | There's one known limitation of \code{api = "arrow"}: when querying public data, 84 | you'll now need to provide a \code{billing} project. 85 | } 86 | 87 | \subsection{JSON API}{ 88 | 89 | The JSON API retrieves rows in chunks of \code{page_size}. It is most suitable 90 | for results of smaller queries (<100 MB, say). Unfortunately due to 91 | limitations in the BigQuery API, you may need to vary this parameter 92 | depending on the complexity of the underlying data. 93 | 94 | The JSON API will convert nested and repeated columns in to list-columns 95 | as follows: 96 | \itemize{ 97 | \item Repeated values (arrays) will become a list-column of vectors. 98 | \item Records will become list-columns of named lists. 99 | \item Repeated records will become list-columns of data frames. 100 | } 101 | } 102 | } 103 | \section{Google BigQuery API documentation}{ 104 | 105 | \itemize{ 106 | \item \href{https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/list}{list} 107 | } 108 | } 109 | 110 | \examples{ 111 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 112 | df <- bq_table_download("publicdata.samples.natality", n_max = 35000, billing = bq_test_project()) 113 | \dontshow{\}) # examplesIf} 114 | } 115 | -------------------------------------------------------------------------------- /man/bq_test_project.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-test.R 3 | \name{bq_test_project} 4 | \alias{bq_test_project} 5 | \alias{bq_test_init} 6 | \alias{bq_test_dataset} 7 | \alias{bq_testable} 8 | \alias{bq_authable} 9 | \alias{gs_test_bucket} 10 | \alias{gs_test_object} 11 | \title{Project to use for testing bigrquery} 12 | \usage{ 13 | bq_test_project() 14 | 15 | bq_test_init(name = "basedata") 16 | 17 | bq_test_dataset(name = random_name(), location = "US") 18 | 19 | bq_testable() 20 | 21 | bq_authable() 22 | 23 | gs_test_bucket() 24 | 25 | gs_test_object(name = random_name()) 26 | } 27 | \arguments{ 28 | \item{name}{Dataset name - used only for testing.} 29 | } 30 | \value{ 31 | \code{bq_test_project()} returns the name of a project suitable for use in 32 | testing. \code{bq_test_dataset()} creates a temporary dataset whose lifetime is 33 | tied to the lifetime of the object that it returns. 34 | } 35 | \description{ 36 | You'll need to set the \code{BIGQUERY_TEST_PROJECT} (name of a project) and 37 | \code{BIGQUERY_TEST_BUCKET} (name of bucket) env vars in order to run bigrquery 38 | tests locally. I recommend creating a new project because the tests involve 39 | both reading and writing in BigQuery and Cloud Storage. 40 | 41 | The \code{BIGQUERY_TEST_PROJECT} must have billing enabled for the project. While 42 | logged in, via \code{bq_auth()}, as a user with permission to work in 43 | \code{BIGQUERY_TEST_PROJECT}, run \code{bq_test_init()} once to perform some setup. 44 | } 45 | \section{Testing}{ 46 | In tests, \code{bq_test_project()} (and hence \code{bq_test_dataset()}) will 47 | automatically skip if auth and a test project are not available. 48 | } 49 | 50 | \examples{ 51 | \dontshow{if (bq_testable()) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 52 | ds <- bq_test_dataset() 53 | bq_mtcars <- bq_table_upload(bq_table(ds, "mtcars"), mtcars) 54 | 55 | # dataset and table will be automatically deleted when ds is GC'd 56 | \dontshow{\}) # examplesIf} 57 | } 58 | \keyword{internal} 59 | -------------------------------------------------------------------------------- /man/bq_token.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-auth.R 3 | \name{bq_token} 4 | \alias{bq_token} 5 | \title{Produce configured token} 6 | \usage{ 7 | bq_token() 8 | } 9 | \value{ 10 | A \code{request} object (an S3 class provided by \link[httr:httr-package]{httr}). 11 | } 12 | \description{ 13 | For internal use or for those programming around the BigQuery API. 14 | Returns a token pre-processed with \code{\link[httr:config]{httr::config()}}. Most users 15 | do not need to handle tokens "by hand" or, even if they need some 16 | control, \code{\link[=bq_auth]{bq_auth()}} is what they need. If there is no current 17 | token, \code{\link[=bq_auth]{bq_auth()}} is called to either load from cache or 18 | initiate OAuth2.0 flow. 19 | If auth has been deactivated via \code{\link[=bq_deauth]{bq_deauth()}}, \code{bq_token()} 20 | returns \code{NULL}. 21 | } 22 | \examples{ 23 | \dontrun{ 24 | bq_token() 25 | } 26 | } 27 | \seealso{ 28 | Other low-level API functions: 29 | \code{\link{bq_has_token}()} 30 | } 31 | \concept{low-level API functions} 32 | -------------------------------------------------------------------------------- /man/bq_user.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bq-auth.R 3 | \name{bq_user} 4 | \alias{bq_user} 5 | \title{Get info on current user} 6 | \usage{ 7 | bq_user() 8 | } 9 | \value{ 10 | An email address or, if no token has been loaded, \code{NULL}. 11 | } 12 | \description{ 13 | Reveals the email address of the user associated with the current token. 14 | If no token has been loaded yet, this function does not initiate auth. 15 | } 16 | \examples{ 17 | \dontrun{ 18 | bq_user() 19 | } 20 | } 21 | \seealso{ 22 | \code{\link[gargle:token-info]{gargle::token_userinfo()}}, \code{\link[gargle:token-info]{gargle::token_email()}}, 23 | \code{\link[gargle:token-info]{gargle::token_tokeninfo()}} 24 | } 25 | -------------------------------------------------------------------------------- /man/collect.tbl_BigQueryConnection.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/dplyr.R 3 | \name{collect.tbl_BigQueryConnection} 4 | \alias{collect.tbl_BigQueryConnection} 5 | \title{Collect a BigQuery table} 6 | \usage{ 7 | collect.tbl_BigQueryConnection( 8 | x, 9 | ..., 10 | n = Inf, 11 | api = c("json", "arrow"), 12 | page_size = NULL, 13 | max_connections = 6L 14 | ) 15 | } 16 | \arguments{ 17 | \item{x}{A data frame, data frame extension (e.g. a tibble), or a lazy 18 | data frame (e.g. from dbplyr or dtplyr). See \emph{Methods}, below, for more 19 | details.} 20 | 21 | \item{...}{Other arguments passed on to 22 | \code{bq_project_query()}/\code{bq_project_query()}} 23 | 24 | \item{n}{Maximum number of results to retrieve. 25 | The default, \code{Inf}, will retrieve all rows.} 26 | 27 | \item{api}{Which API to use? The \code{"json"} API works where ever bigrquery 28 | does, but is slow and can require fiddling with the \code{page_size} parameter. 29 | The \code{"arrow"} API is faster and more reliable, but only works if you 30 | have also installed the bigrquerystorage package. 31 | 32 | Because the \code{"arrow"} API is so much faster, it will be used automatically 33 | if the bigrquerystorage package is installed.} 34 | 35 | \item{page_size}{(JSON only) The number of rows requested per chunk. It is 36 | recommended to leave this unspecified until you have evidence that the 37 | \code{page_size} selected automatically by \code{bq_table_download()} is problematic. 38 | 39 | When \code{page_size = NULL} bigrquery determines a conservative, natural chunk 40 | size empirically. If you specify the \code{page_size}, it is important that each 41 | chunk fits on one page, i.e. that the requested row limit is low enough to 42 | prevent the API from paginating based on response size.} 43 | 44 | \item{max_connections}{(JSON only) Number of maximum simultaneous 45 | connections to BigQuery servers.} 46 | } 47 | \description{ 48 | This collect method is specialised for BigQuery tables, generating the 49 | SQL from your dplyr commands, then calling \code{\link[=bq_project_query]{bq_project_query()}} 50 | or \code{\link[=bq_dataset_query]{bq_dataset_query()}} to run the query, then \code{\link[=bq_table_download]{bq_table_download()}} 51 | to download the results. Thus the arguments are a combination of the 52 | arguments to \code{\link[dplyr:compute]{dplyr::collect()}}, \code{bq_project_query()}/\code{bq_dataset_query()}, 53 | and \code{bq_table_download()}. 54 | } 55 | -------------------------------------------------------------------------------- /man/figures/lifecycle-archived.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: archived 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | lifecycle 18 | 19 | archived 20 | 21 | 22 | -------------------------------------------------------------------------------- /man/figures/lifecycle-defunct.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: defunct 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | lifecycle 18 | 19 | defunct 20 | 21 | 22 | -------------------------------------------------------------------------------- /man/figures/lifecycle-deprecated.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: deprecated 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | lifecycle 18 | 19 | deprecated 20 | 21 | 22 | -------------------------------------------------------------------------------- /man/figures/lifecycle-experimental.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: experimental 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | lifecycle 18 | 19 | experimental 20 | 21 | 22 | -------------------------------------------------------------------------------- /man/figures/lifecycle-maturing.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: maturing 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | lifecycle 18 | 19 | maturing 20 | 21 | 22 | -------------------------------------------------------------------------------- /man/figures/lifecycle-questioning.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: questioning 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | lifecycle 18 | 19 | questioning 20 | 21 | 22 | -------------------------------------------------------------------------------- /man/figures/lifecycle-soft-deprecated.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: soft-deprecated 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | lifecycle 18 | 19 | soft-deprecated 20 | 21 | 22 | -------------------------------------------------------------------------------- /man/figures/lifecycle-stable.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: stable 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 19 | 20 | lifecycle 21 | 22 | 25 | 26 | stable 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /man/figures/lifecycle-superseded.svg: -------------------------------------------------------------------------------- 1 | 2 | lifecycle: superseded 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | lifecycle 18 | 19 | superseded 20 | 21 | 22 | -------------------------------------------------------------------------------- /man/src_bigquery.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/dplyr.R 3 | \name{src_bigquery} 4 | \alias{src_bigquery} 5 | \title{A BigQuery data source for dplyr.} 6 | \usage{ 7 | src_bigquery(project, dataset, billing = project, max_pages = 10) 8 | } 9 | \arguments{ 10 | \item{project}{project id or name} 11 | 12 | \item{dataset}{dataset name} 13 | 14 | \item{billing}{billing project, if different to \code{project}} 15 | 16 | \item{max_pages}{(IGNORED) maximum pages returned by a query} 17 | } 18 | \description{ 19 | Create the connection to the database with \code{DBI::dbConnect()} then 20 | use \code{\link[dplyr:tbl]{dplyr::tbl()}} to connect to tables within that database. Generally, 21 | it's best to provide the fully qualified name of the table (i.e. 22 | \code{project.dataset.table}) but if you supply a default \code{dataset} in the 23 | connection, you can use just the table name. (This, however, will 24 | prevent you from making joins across datasets.) 25 | } 26 | \examples{ 27 | \dontrun{ 28 | library(dplyr) 29 | 30 | # To run this example, replace billing with the id of one of your projects 31 | # set up for billing 32 | con <- DBI::dbConnect(bigquery(), project = bq_test_project()) 33 | 34 | shakespeare <- con \%>\% tbl(I("publicdata.samples.shakespeare")) 35 | shakespeare 36 | shakespeare \%>\% 37 | group_by(word) \%>\% 38 | summarise(n = sum(word_count, na.rm = TRUE)) \%>\% 39 | arrange(desc(n)) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /revdep/.gitignore: -------------------------------------------------------------------------------- 1 | checks 2 | library 3 | checks.noindex 4 | library.noindex 5 | cloud.noindex 6 | data.sqlite 7 | *.html 8 | -------------------------------------------------------------------------------- /revdep/README.md: -------------------------------------------------------------------------------- 1 | # Revdeps 2 | 3 | ## Failed to check (5) 4 | 5 | |package |version |error |warning |note | 6 | |:----------------|:-------|:-----|:-------|:----| 7 | |AnVILBilling |? | | | | 8 | |bigrquerystorage |1.0.0 |1 | | | 9 | |BiocOncoTK |? | | | | 10 | |restfulSE |? | | | | 11 | |SQLDataFrame |? | | | | 12 | 13 | -------------------------------------------------------------------------------- /revdep/cran.md: -------------------------------------------------------------------------------- 1 | ## revdepcheck results 2 | 3 | We checked 14 reverse dependencies (10 from CRAN + 4 from Bioconductor), comparing R CMD check results across CRAN and dev versions of this package. 4 | 5 | * We saw 0 new problems 6 | * We failed to check 1 packages 7 | 8 | Issues with CRAN packages are summarised below. 9 | 10 | ### Failed to check 11 | 12 | * bigrquerystorage (NA) 13 | -------------------------------------------------------------------------------- /revdep/email.yml: -------------------------------------------------------------------------------- 1 | release_date: ??? 2 | rel_release_date: ??? 3 | my_news_url: ??? 4 | release_version: ??? 5 | release_details: ??? 6 | -------------------------------------------------------------------------------- /revdep/failures.md: -------------------------------------------------------------------------------- 1 | # AnVILBilling 2 | 3 |
4 | 5 | * Version: NA 6 | * GitHub: NA 7 | * Source code: https://github.com/cran/AnVILBilling 8 | * Number of recursive dependencies: 106 9 | 10 | Run `revdepcheck::cloud_details(, "AnVILBilling")` for more info 11 | 12 |
13 | 14 | ## Error before installation 15 | 16 | ### Devel 17 | 18 | ``` 19 | 20 | 21 | 22 | 23 | 24 | 25 | ``` 26 | ### CRAN 27 | 28 | ``` 29 | 30 | 31 | 32 | 33 | 34 | 35 | ``` 36 | # bigrquerystorage 37 | 38 |
39 | 40 | * Version: 1.0.0 41 | * GitHub: https://github.com/meztez/bigrquerystorage 42 | * Source code: https://github.com/cran/bigrquerystorage 43 | * Date/Publication: 2024-02-26 19:00:05 UTC 44 | * Number of recursive dependencies: 56 45 | 46 | Run `revdepcheck::cloud_details(, "bigrquerystorage")` for more info 47 | 48 |
49 | 50 | ## In both 51 | 52 | * checking whether package ‘bigrquerystorage’ can be installed ... ERROR 53 | ``` 54 | Installation failed. 55 | See ‘/tmp/workdir/bigrquerystorage/new/bigrquerystorage.Rcheck/00install.out’ for details. 56 | ``` 57 | 58 | ## Installation 59 | 60 | ### Devel 61 | 62 | ``` 63 | * installing *source* package ‘bigrquerystorage’ ... 64 | ** package ‘bigrquerystorage’ successfully unpacked and MD5 sums checked 65 | ** using staged installation 66 | Package grpc++ was not found in the pkg-config search path. 67 | Perhaps you should add the directory containing `grpc++.pc' 68 | to the PKG_CONFIG_PATH environment variable 69 | No package 'grpc++' found 70 | Using PKG_CFLAGS= 71 | Using PKG_LIBS=-lgrpc++ -lgrpc -lprotobuf 72 | Found C++17 compiler: g++ -std=gnu++17 -g -O2 73 | ... 74 | If protobuf grpc++ is already installed, check that 'pkg-config' is in your 75 | PATH and PKG_CONFIG_PATH contains a protobuf grpc++.pc file. If pkg-config 76 | is unavailable you can set INCLUDE_DIR and LIB_DIR manually via: 77 | R CMD INSTALL --configure-vars='INCLUDE_DIR=... LIB_DIR=...' 78 | ----------------------------[ ERROR MESSAGE ]---------------------------- 79 | :1:10: fatal error: grpc/grpc.h: No such file or directory 80 | compilation terminated. 81 | ------------------------------------------------------------------------ 82 | ERROR: configuration failed for package ‘bigrquerystorage’ 83 | * removing ‘/tmp/workdir/bigrquerystorage/new/bigrquerystorage.Rcheck/bigrquerystorage’ 84 | 85 | 86 | ``` 87 | ### CRAN 88 | 89 | ``` 90 | * installing *source* package ‘bigrquerystorage’ ... 91 | ** package ‘bigrquerystorage’ successfully unpacked and MD5 sums checked 92 | ** using staged installation 93 | Package grpc++ was not found in the pkg-config search path. 94 | Perhaps you should add the directory containing `grpc++.pc' 95 | to the PKG_CONFIG_PATH environment variable 96 | No package 'grpc++' found 97 | Using PKG_CFLAGS= 98 | Using PKG_LIBS=-lgrpc++ -lgrpc -lprotobuf 99 | Found C++17 compiler: g++ -std=gnu++17 -g -O2 100 | ... 101 | If protobuf grpc++ is already installed, check that 'pkg-config' is in your 102 | PATH and PKG_CONFIG_PATH contains a protobuf grpc++.pc file. If pkg-config 103 | is unavailable you can set INCLUDE_DIR and LIB_DIR manually via: 104 | R CMD INSTALL --configure-vars='INCLUDE_DIR=... LIB_DIR=...' 105 | ----------------------------[ ERROR MESSAGE ]---------------------------- 106 | :1:10: fatal error: grpc/grpc.h: No such file or directory 107 | compilation terminated. 108 | ------------------------------------------------------------------------ 109 | ERROR: configuration failed for package ‘bigrquerystorage’ 110 | * removing ‘/tmp/workdir/bigrquerystorage/old/bigrquerystorage.Rcheck/bigrquerystorage’ 111 | 112 | 113 | ``` 114 | # BiocOncoTK 115 | 116 |
117 | 118 | * Version: NA 119 | * GitHub: NA 120 | * Source code: https://github.com/cran/BiocOncoTK 121 | * Number of recursive dependencies: 218 122 | 123 | Run `revdepcheck::cloud_details(, "BiocOncoTK")` for more info 124 | 125 |
126 | 127 | ## Error before installation 128 | 129 | ### Devel 130 | 131 | ``` 132 | 133 | 134 | 135 | 136 | 137 | 138 | ``` 139 | ### CRAN 140 | 141 | ``` 142 | 143 | 144 | 145 | 146 | 147 | 148 | ``` 149 | # restfulSE 150 | 151 |
152 | 153 | * Version: NA 154 | * GitHub: NA 155 | * Source code: https://github.com/cran/restfulSE 156 | * Number of recursive dependencies: 132 157 | 158 | Run `revdepcheck::cloud_details(, "restfulSE")` for more info 159 | 160 |
161 | 162 | ## Error before installation 163 | 164 | ### Devel 165 | 166 | ``` 167 | 168 | 169 | 170 | 171 | 172 | 173 | ``` 174 | ### CRAN 175 | 176 | ``` 177 | 178 | 179 | 180 | 181 | 182 | 183 | ``` 184 | # SQLDataFrame 185 | 186 |
187 | 188 | * Version: NA 189 | * GitHub: NA 190 | * Source code: https://github.com/cran/SQLDataFrame 191 | * Number of recursive dependencies: 93 192 | 193 | Run `revdepcheck::cloud_details(, "SQLDataFrame")` for more info 194 | 195 |
196 | 197 | ## Error before installation 198 | 199 | ### Devel 200 | 201 | ``` 202 | 203 | 204 | 205 | 206 | 207 | 208 | ``` 209 | ### CRAN 210 | 211 | ``` 212 | 213 | 214 | 215 | 216 | 217 | 218 | ``` 219 | -------------------------------------------------------------------------------- /revdep/problems.md: -------------------------------------------------------------------------------- 1 | *Wow, no problems at all. :)* -------------------------------------------------------------------------------- /src/.gitignore: -------------------------------------------------------------------------------- 1 | *.o 2 | *.so 3 | *.dll 4 | -------------------------------------------------------------------------------- /src/base64.c: -------------------------------------------------------------------------------- 1 | 2 | // implementation by Gábor Csárdi from the processx package: 3 | // https://github.com/r-lib/processx/blob/master/src/base64.c 4 | 5 | #ifndef _GNU_SOURCE 6 | #define _GNU_SOURCE 1 7 | #endif 8 | 9 | #include 10 | #include 11 | 12 | #define BASE64_ENCODE_OUT_SIZE(s) ((unsigned int)((((s) + 2) / 3) * 4)) 13 | #define BASE64_DECODE_OUT_SIZE(s) ((unsigned int)(((s) / 4) * 3)) 14 | 15 | #define BASE64_PAD '=' 16 | 17 | /* BASE 64 encode table */ 18 | static const char base64en[] = { 19 | 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 20 | 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 21 | 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 22 | 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 23 | 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 24 | 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 25 | 'w', 'x', 'y', 'z', '0', '1', '2', '3', 26 | '4', '5', '6', '7', '8', '9', '+', '/', 27 | }; 28 | 29 | /* ASCII order for BASE 64 decode, 255 in unused character */ 30 | static const unsigned char base64de[] = { 31 | /* nul, soh, stx, etx, eot, enq, ack, bel, */ 32 | 255, 255, 255, 255, 255, 255, 255, 255, 33 | 34 | /* bs, ht, nl, vt, np, cr, so, si, */ 35 | 255, 255, 255, 255, 255, 255, 255, 255, 36 | 37 | /* dle, dc1, dc2, dc3, dc4, nak, syn, etb, */ 38 | 255, 255, 255, 255, 255, 255, 255, 255, 39 | 40 | /* can, em, sub, esc, fs, gs, rs, us, */ 41 | 255, 255, 255, 255, 255, 255, 255, 255, 42 | 43 | /* sp, '!', '"', '#', '$', '%', '&', ''', */ 44 | 255, 255, 255, 255, 255, 255, 255, 255, 45 | 46 | /* '(', ')', '*', '+', ',', '-', '.', '/', */ 47 | 255, 255, 255, 62, 255, 255, 255, 63, 48 | 49 | /* '0', '1', '2', '3', '4', '5', '6', '7', */ 50 | 52, 53, 54, 55, 56, 57, 58, 59, 51 | 52 | /* '8', '9', ':', ';', '<', '=', '>', '?', */ 53 | 60, 61, 255, 255, 255, 255, 255, 255, 54 | 55 | /* '@', 'A', 'B', 'C', 'D', 'E', 'F', 'G', */ 56 | 255, 0, 1, 2, 3, 4, 5, 6, 57 | 58 | /* 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', */ 59 | 7, 8, 9, 10, 11, 12, 13, 14, 60 | 61 | /* 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', */ 62 | 15, 16, 17, 18, 19, 20, 21, 22, 63 | 64 | /* 'X', 'Y', 'Z', '[', '\', ']', '^', '_', */ 65 | 23, 24, 25, 255, 255, 255, 255, 255, 66 | 67 | /* '`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', */ 68 | 255, 26, 27, 28, 29, 30, 31, 32, 69 | 70 | /* 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', */ 71 | 33, 34, 35, 36, 37, 38, 39, 40, 72 | 73 | /* 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', */ 74 | 41, 42, 43, 44, 45, 46, 47, 48, 75 | 76 | /* 'x', 'y', 'z', '{', '|', '}', '~', del, */ 77 | 49, 50, 51, 255, 255, 255, 255, 255 78 | }; 79 | 80 | SEXP base64_encode(SEXP array) { 81 | 82 | const unsigned char *in = RAW(array); 83 | unsigned int inlen = LENGTH(array); 84 | unsigned int outlen = BASE64_ENCODE_OUT_SIZE(inlen); 85 | SEXP rout = PROTECT(allocVector(RAWSXP, outlen)); 86 | unsigned char *out = (unsigned char*) RAW(rout); 87 | 88 | int s; 89 | unsigned int i; 90 | unsigned int j; 91 | unsigned char c; 92 | unsigned char l; 93 | 94 | s = 0; 95 | l = 0; 96 | for (i = j = 0; i < inlen; i++) { 97 | c = in[i]; 98 | 99 | switch (s) { 100 | case 0: 101 | s = 1; 102 | out[j++] = base64en[(c >> 2) & 0x3F]; 103 | break; 104 | case 1: 105 | s = 2; 106 | out[j++] = base64en[((l & 0x3) << 4) | ((c >> 4) & 0xF)]; 107 | break; 108 | case 2: 109 | s = 0; 110 | out[j++] = base64en[((l & 0xF) << 2) | ((c >> 6) & 0x3)]; 111 | out[j++] = base64en[c & 0x3F]; 112 | break; 113 | } 114 | l = c; 115 | } 116 | 117 | switch (s) { 118 | case 1: 119 | out[j++] = base64en[(l & 0x3) << 4]; 120 | out[j++] = BASE64_PAD; 121 | out[j++] = BASE64_PAD; 122 | break; 123 | case 2: 124 | out[j++] = base64en[(l & 0xF) << 2]; 125 | out[j++] = BASE64_PAD; 126 | break; 127 | } 128 | 129 | 130 | UNPROTECT(1); 131 | return rout; 132 | } 133 | 134 | SEXP base64_decode(const char *in, unsigned int inlen) { 135 | // const unsigned char *in = (const unsigned char*) RAW(array); 136 | // unsigned int inlen = LENGTH(array); 137 | unsigned int outlen = BASE64_DECODE_OUT_SIZE(inlen); 138 | SEXP rout = PROTECT(allocVector(RAWSXP, outlen)); 139 | unsigned char *out = RAW(rout); 140 | 141 | unsigned int i; 142 | unsigned int j; 143 | unsigned char c; 144 | 145 | if (inlen & 0x3) { 146 | UNPROTECT(1); 147 | return rout; 148 | } 149 | 150 | for (i = j = 0; i < inlen; i++) { 151 | if (in[i] == BASE64_PAD) { 152 | break; 153 | } 154 | if (in[i] < 0) { 155 | UNPROTECT(1); 156 | return rout; 157 | } 158 | 159 | c = base64de[(unsigned) in[i]]; 160 | if (c == 255) { 161 | UNPROTECT(1); 162 | return rout; 163 | } 164 | 165 | switch (i & 0x3) { 166 | case 0: 167 | out[j] = (c << 2) & 0xFF; 168 | break; 169 | case 1: 170 | out[j++] |= (c >> 4) & 0x3; 171 | out[j] = (unsigned char)((c & 0xF) << 4); 172 | break; 173 | case 2: 174 | out[j++] |= (c >> 2) & 0xF; 175 | out[j] = (unsigned char)((c & 0x3) << 6); 176 | break; 177 | case 3: 178 | out[j++] |= c; 179 | break; 180 | } 181 | } 182 | 183 | /* We might have allocated to much space, because of the padding... */ 184 | if (j + 1 < outlen) { 185 | SEXP rout2 = PROTECT(allocVector(RAWSXP, j)); 186 | memcpy(RAW(rout2), RAW(rout), j); 187 | UNPROTECT(2); 188 | return rout2; 189 | } else { 190 | UNPROTECT(1); 191 | return rout; 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /src/base64.h: -------------------------------------------------------------------------------- 1 | 2 | // implementation by Gábor Csárdi from the processx package: 3 | // https://github.com/r-lib/processx/blob/master/src/base64.c 4 | 5 | #ifndef BASE64_H 6 | #define BASE64_H 7 | 8 | #ifdef __cplusplus 9 | extern "C" { 10 | #endif 11 | 12 | SEXP base64_encode(SEXP array); 13 | SEXP base64_decode(const char *in, unsigned int inlen); 14 | #ifdef __cplusplus 15 | } 16 | #endif 17 | 18 | #endif /* BASE64_H */ 19 | -------------------------------------------------------------------------------- /src/cpp11.cpp: -------------------------------------------------------------------------------- 1 | // Generated by cpp11: do not edit by hand 2 | // clang-format off 3 | 4 | 5 | #include "cpp11/declarations.hpp" 6 | #include 7 | 8 | // BqField.cpp 9 | SEXP bq_parse(std::string meta_s, std::string data_s); 10 | extern "C" SEXP _bigrquery_bq_parse(SEXP meta_s, SEXP data_s) { 11 | BEGIN_CPP11 12 | return cpp11::as_sexp(bq_parse(cpp11::as_cpp>(meta_s), cpp11::as_cpp>(data_s))); 13 | END_CPP11 14 | } 15 | // BqField.cpp 16 | SEXP bq_field_init(std::string json, std::string value); 17 | extern "C" SEXP _bigrquery_bq_field_init(SEXP json, SEXP value) { 18 | BEGIN_CPP11 19 | return cpp11::as_sexp(bq_field_init(cpp11::as_cpp>(json), cpp11::as_cpp>(value))); 20 | END_CPP11 21 | } 22 | // BqField.cpp 23 | SEXP bq_parse_files(std::string schema_path, std::vector file_paths, int n, bool quiet); 24 | extern "C" SEXP _bigrquery_bq_parse_files(SEXP schema_path, SEXP file_paths, SEXP n, SEXP quiet) { 25 | BEGIN_CPP11 26 | return cpp11::as_sexp(bq_parse_files(cpp11::as_cpp>(schema_path), cpp11::as_cpp>>(file_paths), cpp11::as_cpp>(n), cpp11::as_cpp>(quiet))); 27 | END_CPP11 28 | } 29 | 30 | extern "C" { 31 | static const R_CallMethodDef CallEntries[] = { 32 | {"_bigrquery_bq_field_init", (DL_FUNC) &_bigrquery_bq_field_init, 2}, 33 | {"_bigrquery_bq_parse", (DL_FUNC) &_bigrquery_bq_parse, 2}, 34 | {"_bigrquery_bq_parse_files", (DL_FUNC) &_bigrquery_bq_parse_files, 4}, 35 | {NULL, NULL, 0} 36 | }; 37 | } 38 | 39 | extern "C" attribute_visible void R_init_bigrquery(DllInfo* dll){ 40 | R_registerRoutines(dll, NULL, CallEntries, NULL, NULL); 41 | R_useDynamicSymbols(dll, FALSE); 42 | R_forceSymbols(dll, TRUE); 43 | } 44 | -------------------------------------------------------------------------------- /src/integer64.h: -------------------------------------------------------------------------------- 1 | #ifndef BIGRQUERY_INTEGER64_H 2 | #define BIGRQUERY_INTEGER64_H 3 | 4 | #define INT64SXP REALSXP 5 | 6 | #define NA_INTEGER64 (0x8000000000000000) 7 | 8 | inline int64_t* INTEGER64(SEXP x) { 9 | return reinterpret_cast(REAL(x)); 10 | } 11 | 12 | #endif // BIGRQUERY_INTEGER64_H 13 | 14 | // This file is a modified version of 15 | // https://github.com/r-dbi/RPostgres/blob/master/src/integer64.h 16 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | # This file is part of the standard setup for testthat. 2 | # It is recommended that you do not modify it. 3 | # 4 | # Where should you do additional test configuration? 5 | # Learn more about the roles of various files in: 6 | # * https://r-pkgs.org/tests.html 7 | # * https://testthat.r-lib.org/reference/test_package.html#special-files 8 | 9 | library(testthat) 10 | library(bigrquery) 11 | 12 | test_check("bigrquery") 13 | -------------------------------------------------------------------------------- /tests/testthat/.gitignore: -------------------------------------------------------------------------------- 1 | service-token.json 2 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-auth.md: -------------------------------------------------------------------------------- 1 | # useful error if can't auth 2 | 3 | Code 4 | bq_auth() 5 | Condition 6 | Error in `bq_auth()`: 7 | ! Can't get Google credentials. 8 | i Try calling `bq_auth()` directly with necessary specifics. 9 | 10 | # bq_auth_configure checks its inputs 11 | 12 | Code 13 | bq_auth_configure(1, 1) 14 | Condition 15 | Error in `bq_auth_configure()`: 16 | ! Exactly one of `client` or `path` must be supplied. 17 | Code 18 | bq_auth_configure(client = 1) 19 | Condition 20 | Error in `bq_auth_configure()`: 21 | ! `client` must be a gargle OAuth client or `NULL`, not the number 1. 22 | Code 23 | bq_auth_configure(path = 1) 24 | Condition 25 | Error in `bq_auth_configure()`: 26 | ! `path` must be a single string, not the number 1. 27 | 28 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-dataset.md: -------------------------------------------------------------------------------- 1 | # by default can not delete dataset containing tables 2 | 3 | Code 4 | bq_dataset_delete(ds) 5 | Condition 6 | Error in `bq_delete()`: 7 | ! Dataset gargle-169921: is still in use [resourceInUse] 8 | 9 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-download.md: -------------------------------------------------------------------------------- 1 | # errors when table is known to be incomplete 2 | 3 | Code 4 | bq_table_download(tb, n_max = 35000, page_size = 35000, bigint = "integer64", 5 | api = "json") 6 | Message 7 | Downloading first chunk of data. 8 | Condition 9 | Error in `bq_table_download()`: 10 | ! First chunk is incomplete: 11 | x 35,000 rows were requested, but only {n} rows were received. 12 | i Leave `page_size` unspecified or use an even smaller value. 13 | 14 | # warns if supplying unnused arguments 15 | 16 | Code 17 | . <- bq_table_download(tb, api = "arrow", page_size = 1, start_index = 1, 18 | max_connections = 1) 19 | Condition 20 | Warning in `bq_table_download()`: 21 | `page_size` is ignored when `api == "arrow"` 22 | Warning in `bq_table_download()`: 23 | `start_index` is ignored when `api == "arrow"` 24 | Warning in `bq_table_download()`: 25 | `max_connections` is ignored when `api == "arrow"` 26 | 27 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-field.md: -------------------------------------------------------------------------------- 1 | # recursive printing of subfields 2 | 3 | Code 4 | print(z3) 5 | Output 6 | z3 7 | z2 8 | z1 9 | x 10 | y 11 | 12 | Code 13 | print(z3$fields) 14 | Output 15 | 16 | z2 17 | z1 18 | x 19 | y 20 | 21 | 22 | # tests its inputs 23 | 24 | Code 25 | bq_field(1) 26 | Condition 27 | Error in `bq_field()`: 28 | ! `name` must be a single string, not the number 1. 29 | Code 30 | bq_field("x", 1) 31 | Condition 32 | Error in `bq_field()`: 33 | ! `type` must be a single string, not the number 1. 34 | Code 35 | bq_field("x", "y", mode = 1) 36 | Condition 37 | Error in `bq_field()`: 38 | ! `mode` must be a single string, not the number 1. 39 | Code 40 | bq_field("x", "y", description = 1) 41 | Condition 42 | Error in `bq_field()`: 43 | ! `description` must be a single string or `NULL`, not the number 1. 44 | 45 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-job.md: -------------------------------------------------------------------------------- 1 | # can control chattiness of bq_job_wait 2 | 3 | Code 4 | bq_job_wait(job, quiet = TRUE) 5 | bq_job_wait(job, quiet = FALSE) 6 | Message 7 | Job complete 8 | Billed: 0 B 9 | 10 | # informative errors on failure 11 | 12 | Code 13 | # One error 14 | bq_dataset_query(ds, "SELECT 1 +") 15 | Condition 16 | Error in `bq_dataset_query()`: 17 | ! Job failed 18 | x Syntax error: Unexpected end of script at [1:11] [invalidQuery] 19 | Code 20 | # Multiple errors 21 | bq_table_upload(tb, data.frame(x = "x", y = 1:5)) 22 | Condition 23 | Error in `bq_table_upload()`: 24 | ! Job failed 25 | x Error while reading data, error message: JSON processing encountered too many errors, giving up. Rows: 1; errors: 1; max bad: 0; error percent: 0 [invalid] 26 | x Error while reading data, error message: JSON parsing error in row starting at position 0: Could not convert value 'string_value: "x"' to integer. Field: x; Value: x [invalid] 27 | 28 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-param.md: -------------------------------------------------------------------------------- 1 | # parameter json doesn't change without notice 2 | 3 | Code 4 | as_bq_params(list(scalar = "a", vector = c("a", "b", "c"))) 5 | Output 6 | [ 7 | { 8 | "name": "scalar", 9 | "parameterType": { 10 | "type": "STRING" 11 | }, 12 | "parameterValue": { 13 | "value": "a" 14 | } 15 | }, 16 | { 17 | "name": "vector", 18 | "parameterType": { 19 | "type": "ARRAY", 20 | "arrayType": { 21 | "type": "STRING" 22 | } 23 | }, 24 | "parameterValue": { 25 | "arrayValues": [ 26 | { 27 | "value": "a" 28 | }, 29 | { 30 | "value": "b" 31 | }, 32 | { 33 | "value": "c" 34 | } 35 | ] 36 | } 37 | } 38 | ] 39 | 40 | # checks inputs 41 | 42 | Code 43 | bq_param_scalar(1:3) 44 | Condition 45 | Error in `bq_param_scalar()`: 46 | ! `value` must be length 1, not 3. 47 | Code 48 | bq_param_array(integer()) 49 | Condition 50 | Error in `bq_param_array()`: 51 | ! `value` can't be zero-length. 52 | 53 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-perform.md: -------------------------------------------------------------------------------- 1 | # bq_perform_upload creates job that succeeds 2 | 3 | Code 4 | bq_job_wait(job, quiet = FALSE) 5 | Message 6 | Job complete 7 | Input: 3.44 kB 8 | Output: 2.82 kB 9 | Code 10 | bq_job_wait(job, quiet = FALSE) 11 | Message 12 | Job complete 13 | Input: 3.44 kB 14 | Output: 2.82 kB 15 | 16 | # bq_perform_copy creates job that succeeds 17 | 18 | Code 19 | bq_job_wait(job, quiet = FALSE) 20 | Message 21 | Job complete 22 | 23 | # bq_perform_query creates job that succeeds 24 | 25 | Code 26 | bq_job_wait(job, quiet = FALSE) 27 | Message 28 | Job complete 29 | Billed: 0 B 30 | 31 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-query.md: -------------------------------------------------------------------------------- 1 | # bq_project_query inputs are checked 2 | 3 | Code 4 | bq_project_query(1) 5 | Condition 6 | Error in `bq_project_query()`: 7 | ! `x` must be a single string, not the number 1. 8 | Code 9 | bq_project_query("abc", 1) 10 | Condition 11 | Error in `bq_project_query()`: 12 | ! `query` must be a single string, not the number 1. 13 | Code 14 | bq_project_query("abc", "SELECT *", destination_table = 1) 15 | Condition 16 | Error in `bq_project_query()`: 17 | ! `destination_table` must be a string, list, or `bq_table()`. 18 | Code 19 | bq_project_query("abc", "SELECT *", destination_table = "a") 20 | Condition 21 | Error in `bq_project_query()`: 22 | ! When `destination_table` is a string, it must contain 3 components separted by ".". 23 | Code 24 | bq_project_query("abc", "SELECT *", destination_table = list()) 25 | Condition 26 | Error in `bq_project_query()`: 27 | ! When `destination_table` is a list, it must have components "projectId", "datasetId", and "tableId". 28 | Code 29 | bq_project_query("abc", "SELECT *", quiet = 1) 30 | Condition 31 | Error in `bq_project_query()`: 32 | ! `quiet` must be `TRUE`, `FALSE`, or `NA`, not the number 1. 33 | 34 | # bq_dataset_query inputs are checked 35 | 36 | Code 37 | bq_dataset_query(1) 38 | Condition 39 | Error in `bq_dataset_query()`: 40 | ! `x` must be a string, list, or `bq_dataset()`. 41 | Code 42 | bq_dataset_query("abc") 43 | Condition 44 | Error in `bq_dataset_query()`: 45 | ! When `x` is a string, it must contain 2 components separted by ".". 46 | Code 47 | bq_dataset_query("abc.def", 1) 48 | Condition 49 | Error in `bq_dataset_query()`: 50 | ! `query` must be a single string, not the number 1. 51 | Code 52 | bq_dataset_query("abc.def", "SELECT *", destination_table = 1) 53 | Condition 54 | Error in `bq_dataset_query()`: 55 | ! `destination_table` must be a string, list, or `bq_table()`. 56 | Code 57 | bq_dataset_query("abc.def", "SELECT *", destination_table = "a") 58 | Condition 59 | Error in `bq_dataset_query()`: 60 | ! When `destination_table` is a string, it must contain 3 components separted by ".". 61 | Code 62 | bq_dataset_query("abc.def", "SELECT *", destination_table = list()) 63 | Condition 64 | Error in `bq_dataset_query()`: 65 | ! When `destination_table` is a list, it must have components "projectId", "datasetId", and "tableId". 66 | Code 67 | bq_dataset_query("abc.def", "SELECT *", billing = 1) 68 | Condition 69 | Error in `bq_dataset_query()`: 70 | ! `billing` must be a single string or `NULL`, not the number 1. 71 | Code 72 | bq_dataset_query("abc.def", "SELECT *", quiet = 1) 73 | Condition 74 | Error in `bq_dataset_query()`: 75 | ! `quiet` must be `TRUE`, `FALSE`, or `NA`, not the number 1. 76 | 77 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-refs.md: -------------------------------------------------------------------------------- 1 | # can make table with dataset 2 | 3 | Code 4 | bq_table(ds, 1) 5 | Condition 6 | Error in `bq_table()`: 7 | ! `dataset` must be a single string, not the number 1. 8 | 9 | # objects have helpful print methods 10 | 11 | Code 12 | as_bq_job("x.y.US") 13 | Output 14 | x.y.US 15 | Code 16 | as_bq_dataset("x.y") 17 | Output 18 | x.y 19 | Code 20 | as_bq_table("x.y.z") 21 | Output 22 | x.y.z 23 | 24 | # useful error for non-strings 25 | 26 | Code 27 | as_bq_job(1) 28 | Condition 29 | Error: 30 | ! `1` must be a string, list, or `bq_job()`. 31 | Code 32 | as_bq_dataset(1) 33 | Condition 34 | Error: 35 | ! `1` must be a string, list, or `bq_dataset()`. 36 | Code 37 | as_bq_table(1) 38 | Condition 39 | Error: 40 | ! `1` must be a string, list, or `bq_table()`. 41 | 42 | # string coercion error on invalid number of components 43 | 44 | Code 45 | as_bq_table("x") 46 | Condition 47 | Error: 48 | ! When `"x"` is a string, it must contain 3 components separted by ".". 49 | Code 50 | as_bq_table("a.b.c.d") 51 | Condition 52 | Error: 53 | ! When `"a.b.c.d"` is a string, it must contain 3 components separted by ".". 54 | Code 55 | as_bq_job("x") 56 | Condition 57 | Error: 58 | ! When `"x"` is a string, it must contain 3 components separted by ".". 59 | Code 60 | as_bq_dataset("x") 61 | Condition 62 | Error: 63 | ! When `"x"` is a string, it must contain 2 components separted by ".". 64 | 65 | # list coercion errors with bad names 66 | 67 | Code 68 | as_bq_table(list()) 69 | Condition 70 | Error: 71 | ! When `list()` is a list, it must have components "projectId", "datasetId", and "tableId". 72 | Code 73 | as_bq_dataset(list()) 74 | Condition 75 | Error: 76 | ! When `list()` is a list, it must have components "projectId" and "datasetId". 77 | Code 78 | as_bq_job(list()) 79 | Condition 80 | Error: 81 | ! When `list()` is a list, it must have components "projectId", "jobId", and "location". 82 | 83 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-request.md: -------------------------------------------------------------------------------- 1 | # error call is forwarded all the way down 2 | 3 | Code 4 | bq_job_meta("a.b.c") 5 | Condition 6 | Error in `bq_get()`: 7 | ! Invalid value for location: c is not a valid value [invalid] 8 | 9 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/bq-test.md: -------------------------------------------------------------------------------- 1 | # error if env var not set 2 | 3 | Code 4 | bq_test_project() 5 | Condition 6 | Error in `bq_test_project()`: 7 | ! `BIGQUERY_TEST_PROJECT` envvar must to set to a project name. 8 | Code 9 | gs_test_bucket() 10 | Condition 11 | Error in `gs_test_bucket()`: 12 | ! `BIGQUERY_TEST_BUCKET` must be set to a bucket name. 13 | 14 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/dbi-connection.md: -------------------------------------------------------------------------------- 1 | # useful print with and without dataset 2 | 3 | Code 4 | # With dataset 5 | con1 6 | Output 7 | 8 | Dataset: p.x 9 | Billing: b 10 | Code 11 | # Without dataset 12 | con2 13 | Output 14 | 15 | Billing: p 16 | 17 | # dbQuoteIdentifier validates inputs 18 | 19 | Code 20 | DBI::dbQuoteIdentifier(con, c("x", NA)) 21 | Condition 22 | Error in `DBI::dbQuoteIdentifier()`: 23 | ! `x` must not contain missing values. 24 | 25 | # dbWriteTable errors on unsupported arguments 26 | 27 | Code 28 | DBI::dbWriteTable(con, "x", df, field.types = list()) 29 | Condition 30 | Error in `DBI::dbWriteTable()`: 31 | ! `field.types` not supported by bigrquery. 32 | Code 33 | DBI::dbWriteTable(con, "x", df, temporary = TRUE) 34 | Condition 35 | Error in `DBI::dbWriteTable()`: 36 | ! `temporary = FALSE` not supported by bigrquery. 37 | 38 | # dataset is optional 39 | 40 | Code 41 | DBI::dbListTables(con) 42 | Condition 43 | Error in `DBI::dbListTables()`: 44 | ! Can't list tables without a connection `dataset`. 45 | 46 | --- 47 | 48 | Code 49 | DBI::dbReadTable(con, "natality", n_max = 10) 50 | Condition 51 | Error in `as_bq_table()`: 52 | ! `name` ("natality") must have 2 or 3 components if the connection doesn't have a dataset. 53 | 54 | # can create bq_table from connection + name 55 | 56 | Code 57 | as_bq_table(con1, "x") 58 | Condition 59 | Error in `as_bq_table()`: 60 | ! `name` ("x") must have 2 or 3 components if the connection doesn't have a dataset. 61 | 62 | --- 63 | 64 | Code 65 | as_bq_table(con1, "a.b.c.d") 66 | Condition 67 | Error in `as_bq_table()`: 68 | ! `name` ("a.b.c.d") must have 1-3 components. 69 | 70 | # as_bq_table checks its input types 71 | 72 | Code 73 | as_bq_table(con1, letters) 74 | Condition 75 | Error in `as_bq_table()`: 76 | ! `name` must be a string or a dbplyr_table_ident. 77 | 78 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/dbi-result.md: -------------------------------------------------------------------------------- 1 | # can retrieve query in pieces and that quiet is respected 2 | 3 | Code 4 | DBI::dbFetch(res, NA) 5 | Condition 6 | Error in `DBI::dbFetch()`: 7 | ! `n` must be a whole number, not `NA`. 8 | 9 | --- 10 | 11 | Code 12 | df <- DBI::dbFetch(res, 10) 13 | Message 14 | Downloading first chunk of data. 15 | First chunk includes all requested rows. 16 | 17 | --- 18 | 19 | Code 20 | df <- DBI::dbFetch(res, -1) 21 | 22 | # can get metadata 23 | 24 | Code 25 | res 26 | Output 27 | 28 | Query: SELECT cyl, mpg FROM mtcars 29 | Has completed: FALSE 30 | Rows fetched: 0 31 | 32 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/dplyr.md: -------------------------------------------------------------------------------- 1 | # can copy_to 2 | 3 | Code 4 | dplyr::copy_to(con, mtcars) 5 | Condition 6 | Error in `db_copy_to()`: 7 | ! BigQuery does not support temporary tables 8 | 9 | # string functions correctly 10 | 11 | Code 12 | dbplyr::translate_sql(grepl("a.c", x), con = con) 13 | Output 14 | REGEXP_CONTAINS(`x`, 'a.c') 15 | Code 16 | dbplyr::translate_sql(gsub("a.c", "", x), con = con) 17 | Output 18 | REGEXP_REPLACE(`x`, 'a.c', '') 19 | 20 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/gs-object.md: -------------------------------------------------------------------------------- 1 | # has useful print method 2 | 3 | Code 4 | gs_object("xxx", "yyy") 5 | Output 6 | gs://xxx/yyy 7 | 8 | -------------------------------------------------------------------------------- /tests/testthat/_snaps/utils.md: -------------------------------------------------------------------------------- 1 | # bq_check_namespace() works 2 | 3 | Code 4 | bq_check_namespace("invalid package name", "FIELD_TYPE") 5 | Condition 6 | Error in `bq_check_namespace()`: 7 | ! The package "invalid package name" is required to parse BigQuery 'FIELD_TYPE' fields. 8 | 9 | -------------------------------------------------------------------------------- /tests/testthat/dbi-result-print.txt: -------------------------------------------------------------------------------- 1 | 2 | Query: SELECT cyl, mpg FROM mtcars 3 | Has completed: FALSE 4 | Rows fetched: 0 5 | -------------------------------------------------------------------------------- /tests/testthat/helper-auth.R: -------------------------------------------------------------------------------- 1 | if (has_internal_auth()) { 2 | bq_auth_internal() 3 | } 4 | -------------------------------------------------------------------------------- /tests/testthat/parse-schema-array-struct.json: -------------------------------------------------------------------------------- 1 | { 2 | "schema": { 3 | "fields": [ 4 | { 5 | "name": "x", 6 | "type": "RECORD", 7 | "mode": "REPEATED", 8 | "fields": [ 9 | { 10 | "name": "a", 11 | "type": "INTEGER", 12 | "mode": "NULLABLE" 13 | }, 14 | { 15 | "name": "b", 16 | "type": "STRING", 17 | "mode": "NULLABLE" 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /tests/testthat/parse-schema-array.json: -------------------------------------------------------------------------------- 1 | { 2 | "schema": { 3 | "fields": [ 4 | { 5 | "name": "x", 6 | "type": "INTEGER", 7 | "mode": "REPEATED" 8 | } 9 | ] 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /tests/testthat/parse-schema-struct-array.json: -------------------------------------------------------------------------------- 1 | { 2 | "schema": { 3 | "fields": [ 4 | { 5 | "name": "x", 6 | "type": "RECORD", 7 | "mode": "NULLABLE", 8 | "fields": [ 9 | { 10 | "name": "a", 11 | "type": "INTEGER", 12 | "mode": "REPEATED" 13 | }, 14 | { 15 | "name": "b", 16 | "type": "STRING", 17 | "mode": "REPEATED" 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /tests/testthat/parse-schema-struct.json: -------------------------------------------------------------------------------- 1 | { 2 | "schema": { 3 | "fields": [ 4 | { 5 | "name": "x", 6 | "type": "RECORD", 7 | "mode": "NULLABLE", 8 | "fields": [ 9 | { 10 | "name": "a", 11 | "type": "INTEGER", 12 | "mode": "NULLABLE" 13 | }, 14 | { 15 | "name": "b", 16 | "type": "STRING", 17 | "mode": "NULLABLE" 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /tests/testthat/parse-values-array-struct.json: -------------------------------------------------------------------------------- 1 | { 2 | "kind": "bigquery#tableDataList", 3 | "etag": "\"Of60aSRqAJUNFSgRZSQLdr125eE/O-wKGy4ThFRXlLvNoqf9sBdNeJw\"", 4 | "totalRows": "1", 5 | "rows": [ 6 | { 7 | "f": [ 8 | { 9 | "v": [ 10 | { 11 | "v": { 12 | "f": [ 13 | { 14 | "v": "1" 15 | }, 16 | { 17 | "v": "a" 18 | } 19 | ] 20 | } 21 | }, 22 | { 23 | "v": { 24 | "f": [ 25 | { 26 | "v": "2" 27 | }, 28 | { 29 | "v": "b" 30 | } 31 | ] 32 | } 33 | }, 34 | { 35 | "v": { 36 | "f": [ 37 | { 38 | "v": "3" 39 | }, 40 | { 41 | "v": "c" 42 | } 43 | ] 44 | } 45 | } 46 | ] 47 | } 48 | ] 49 | } 50 | ] 51 | } 52 | -------------------------------------------------------------------------------- /tests/testthat/parse-values-array.json: -------------------------------------------------------------------------------- 1 | { 2 | "kind": "bigquery#tableDataList", 3 | "etag": "\"Of60aSRqAJUNFSgRZSQLdr125eE/ks_Rm4lhbTcow-jlwec1clKIBNA\"", 4 | "totalRows": "1", 5 | "rows": [ 6 | { 7 | "f": [ 8 | { 9 | "v": [ 10 | { 11 | "v": "1" 12 | }, 13 | { 14 | "v": "2" 15 | }, 16 | { 17 | "v": "3" 18 | } 19 | ] 20 | } 21 | ] 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /tests/testthat/parse-values-struct-array.json: -------------------------------------------------------------------------------- 1 | { 2 | "kind": "bigquery#tableDataList", 3 | "etag": "\"Of60aSRqAJUNFSgRZSQLdr125eE/P9HdWmG-Wy4wKnTS-bv2y7xzt0k\"", 4 | "totalRows": "1", 5 | "rows": [ 6 | { 7 | "f": [ 8 | { 9 | "v": { 10 | "f": [ 11 | { 12 | "v": [ 13 | { 14 | "v": "1" 15 | }, 16 | { 17 | "v": "2" 18 | }, 19 | { 20 | "v": "3" 21 | } 22 | ] 23 | }, 24 | { 25 | "v": [ 26 | { 27 | "v": "a" 28 | }, 29 | { 30 | "v": "b" 31 | } 32 | ] 33 | } 34 | ] 35 | } 36 | } 37 | ] 38 | } 39 | ] 40 | } 41 | -------------------------------------------------------------------------------- /tests/testthat/parse-values-struct.json: -------------------------------------------------------------------------------- 1 | { 2 | "kind": "bigquery#tableDataList", 3 | "etag": "\"Of60aSRqAJUNFSgRZSQLdr125eE/oX2SJnXHlwLF7oPVeFhLk3UgiY0\"", 4 | "totalRows": "1", 5 | "rows": [ 6 | { 7 | "f": [ 8 | { 9 | "v": { 10 | "f": [ 11 | { 12 | "v": "1" 13 | }, 14 | { 15 | "v": "abc" 16 | } 17 | ] 18 | } 19 | } 20 | ] 21 | } 22 | ] 23 | } 24 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-auth.R: -------------------------------------------------------------------------------- 1 | test_that("bq_user() works", { 2 | skip_if_no_auth() 3 | expect_match(bq_user(), "@.*[.]iam[.]gserviceaccount[.]com") 4 | }) 5 | 6 | test_that("useful error if can't auth", { 7 | local_mocked_bindings(token_fetch = function(...) NULL) 8 | 9 | expect_snapshot(bq_auth(), error = TRUE) 10 | }) 11 | 12 | test_that("bq_auth_configure checks its inputs", { 13 | expect_snapshot(error = TRUE, { 14 | bq_auth_configure(1, 1) 15 | bq_auth_configure(client = 1) 16 | bq_auth_configure(path = 1) 17 | }) 18 | }) 19 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-dataset.R: -------------------------------------------------------------------------------- 1 | test_that("can create and delete datasets", { 2 | ds <- bq_dataset(bq_test_project(), random_name()) 3 | expect_false(bq_dataset_exists(ds)) 4 | 5 | bq_dataset_create(ds) 6 | expect_true(bq_dataset_exists(ds)) 7 | 8 | bq_dataset_delete(ds) 9 | expect_false(bq_dataset_exists(ds)) 10 | }) 11 | 12 | test_that("can update dataset metadata", { 13 | ds <- bq_dataset(bq_test_project(), random_name()) 14 | defer(bq_dataset_delete(ds)) 15 | 16 | bq_dataset_create(ds, description = "a", friendly_name = "b") 17 | bq_dataset_update(ds, description = "b") 18 | 19 | meta <- bq_dataset_meta(ds, "description,friendlyName") 20 | expect_equal(meta$description, "b") 21 | expect_equal(meta$friendlyName, "b") 22 | }) 23 | 24 | test_that("by default can not delete dataset containing tables", { 25 | ds <- bq_test_dataset() 26 | 27 | bq_table_create(bq_table(ds, "testing")) 28 | expect_snapshot( 29 | bq_dataset_delete(ds), 30 | error = TRUE, 31 | transform = function(x) gsub(ds$dataset, "", x) 32 | ) 33 | }) 34 | 35 | test_that("can list tables in a dataset", { 36 | ds <- bq_dataset(bq_test_project(), "basedata") 37 | 38 | expect_equal( 39 | bq_dataset_tables(ds), 40 | list(bq_table(ds, "mtcars"), bq_table(ds, "simple-five")) 41 | ) 42 | }) 43 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-field.R: -------------------------------------------------------------------------------- 1 | test_that("can create and coerce a single field", { 2 | x1 <- bq_field("x", "string") 3 | x2 <- as_bq_field(list(name = "x", type = "string")) 4 | 5 | expect_s3_class(x1, "bq_field") 6 | expect_equal(x1$name, "x") 7 | expect_equal(x1, x2) 8 | }) 9 | 10 | test_that("can compute fields from data frame", { 11 | df <- data.frame(x = 1, y = "a") 12 | fs <- as_bq_fields(df) 13 | 14 | expect_length(fs, 2) 15 | expect_equal(fs[[1]], bq_field("x", "float")) 16 | expect_equal(fs[[2]], bq_field("y", "string")) 17 | }) 18 | 19 | test_that("recursive printing of subfields", { 20 | x1 <- bq_field("x", "string") 21 | x2 <- bq_field("y", "integer") 22 | 23 | z1 <- bq_field("z1", "record", fields = list(x1, x2)) 24 | z2 <- bq_field("z2", "record", fields = list(z1)) 25 | z3 <- bq_field("z3", "record", fields = list(z2)) 26 | 27 | expect_snapshot({ 28 | print(z3) 29 | 30 | print(z3$fields) 31 | }) 32 | }) 33 | 34 | test_that("tests its inputs", { 35 | expect_snapshot(error = TRUE, { 36 | bq_field(1) 37 | bq_field("x", 1) 38 | bq_field("x", "y", mode = 1) 39 | bq_field("x", "y", description = 1) 40 | }) 41 | }) 42 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-job.R: -------------------------------------------------------------------------------- 1 | test_that("can control chattiness of bq_job_wait", { 2 | job <- bq_perform_query("SELECT 1 + 1", bq_test_project()) 3 | 4 | expect_snapshot({ 5 | bq_job_wait(job, quiet = TRUE) 6 | bq_job_wait(job, quiet = FALSE) 7 | }) 8 | }) 9 | 10 | test_that("informative errors on failure", { 11 | withr::local_options(cli.progress_show_after = 10) 12 | ds <- bq_test_dataset() 13 | 14 | tb <- bq_test_table() 15 | bq_table_create( 16 | tb, 17 | fields = list(bq_field("x", "integer"), bq_field("y", "string")) 18 | ) 19 | 20 | expect_snapshot( 21 | { 22 | "One error" 23 | bq_dataset_query(ds, "SELECT 1 +") 24 | 25 | "Multiple errors" 26 | bq_table_upload(tb, data.frame(x = "x", y = 1:5)) 27 | }, 28 | error = TRUE, 29 | transform = function(x) gsub("Job (.*?) failed", "Job failed", x) 30 | ) 31 | }) 32 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-param.R: -------------------------------------------------------------------------------- 1 | test_that("can create parameters from list", { 2 | x <- list(a = bq_param(1, "integer"), b = "x", c = 1:3) 3 | p <- as_bq_params(x) 4 | 5 | expect_length(p, 3) 6 | expect_equal(p[[1]], bq_param_scalar(1, "integer", name = "a")) 7 | expect_equal(p[[2]], bq_param_scalar("x", name = "b")) 8 | expect_equal(p[[3]], bq_param_array(1:3, name = "c")) 9 | }) 10 | 11 | test_that("parameter json doesn't change without notice", { 12 | expect_snapshot({ 13 | as_bq_params(list( 14 | scalar = "a", 15 | vector = c("a", "b", "c") 16 | )) 17 | }) 18 | }) 19 | 20 | test_that("checks inputs", { 21 | expect_snapshot(error = TRUE, { 22 | bq_param_scalar(1:3) 23 | bq_param_array(integer()) 24 | }) 25 | }) 26 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-perform.R: -------------------------------------------------------------------------------- 1 | test_that("bq_perform_upload creates job that succeeds", { 2 | withr::local_options(cli.progress_show_after = 10) 3 | 4 | bq_mtcars <- bq_test_table() 5 | job <- bq_perform_upload(bq_mtcars, mtcars) 6 | 7 | expect_s3_class(job, "bq_job") 8 | expect_snapshot({ 9 | bq_job_wait(job, quiet = FALSE) 10 | bq_job_wait(job, quiet = FALSE) 11 | }) 12 | 13 | expect_true(bq_table_exists(bq_mtcars)) 14 | }) 15 | 16 | test_that("bq_perform_copy creates job that succeeds", { 17 | withr::local_options(cli.progress_show_after = 10) 18 | 19 | src <- as_bq_table("bigquery-public-data.moon_phases.moon_phases") 20 | dst <- bq_test_table() 21 | 22 | job <- bq_perform_copy(src, dst) 23 | expect_s3_class(job, "bq_job") 24 | 25 | # Doesn't return any statistics to show 26 | expect_snapshot({ 27 | bq_job_wait(job, quiet = FALSE) 28 | }) 29 | 30 | expect_true(bq_table_exists(dst)) 31 | }) 32 | 33 | 34 | # Load / extract ---------------------------------------------------------- 35 | 36 | test_that("can round trip extract + load", { 37 | ds_public <- bq_dataset("bigquery-public-data", "moon_phases") 38 | 39 | tb <- bq_dataset_query( 40 | ds_public, 41 | query = "SELECT COUNT(*) as count FROM moon_phases", 42 | billing = bq_test_project() 43 | ) 44 | 45 | tmp <- gs_test_object() 46 | # on.exit(gs_object_delete(tmp)) 47 | 48 | job <- bq_perform_extract(tb, tmp) 49 | bq_job_wait(job) 50 | 51 | tb_ks <- bq_test_table() 52 | job <- bq_perform_load(tb_ks, tmp) 53 | bq_job_wait(job) 54 | 55 | df <- bq_table_download(tb_ks) 56 | expect_equal(nrow(df), 1) 57 | expect_named(df, "count") 58 | }) 59 | 60 | # Queries ----------------------------------------------------------------- 61 | 62 | test_that("bq_perform_query creates job that succeeds", { 63 | ds <- as_bq_dataset("bigquery-public-data.moon_phases") 64 | job <- bq_perform_query( 65 | "SELECT count(*) FROM moon_phases", 66 | billing = bq_test_project(), 67 | default_dataset = ds 68 | ) 69 | 70 | expect_s3_class(job, "bq_job") 71 | expect_snapshot({ 72 | bq_job_wait(job, quiet = FALSE) 73 | }) 74 | 75 | job_tb <- bq_job_table(job) 76 | expect_true(bq_table_exists(job_tb)) 77 | }) 78 | 79 | test_that("can supply scalar parameters", { 80 | job <- bq_project_query( 81 | bq_test_project(), 82 | "SELECT 1 + @x", 83 | parameters = list(x = bq_param_scalar(1)) 84 | ) 85 | df <- bq_table_download(job) 86 | expect_setequal(df[[1]], 2) 87 | }) 88 | 89 | test_that("can supply array parameters", { 90 | job <- bq_project_query( 91 | bq_test_project(), 92 | "SELECT values FROM UNNEST(@x) values", 93 | parameters = list(x = bq_param_array(c("a", "b"))) 94 | ) 95 | df <- bq_table_download(job) 96 | expect_setequal(df$values, c("a", "b")) 97 | }) 98 | 99 | test_that("can estimate cost and get schema", { 100 | cost <- bq_perform_query_dry_run( 101 | "SELECT count(*) FROM bigquery-public-data.moon_phases.moon_phases", 102 | billing = bq_test_project() 103 | ) 104 | expect_equal(cost, structure(0, class = "bq_bytes")) 105 | 106 | schema <- bq_perform_query_schema( 107 | "SELECT * FROM bigquery-public-data.moon_phases.moon_phases", 108 | billing = bq_test_project() 109 | ) 110 | names <- vapply(schema, function(x) x$name, character(1)) 111 | expect_equal(names, c("phase", "phase_emoji", "peak_datetime")) 112 | 113 | types <- vapply(schema, function(x) x$type, character(1)) 114 | expect_equal(types, c("STRING", "STRING", "DATETIME")) 115 | }) 116 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-project.R: -------------------------------------------------------------------------------- 1 | test_that("public datasets includes baseball", { 2 | skip_if_not(bq_authable()) 3 | 4 | public <- suppressWarnings(bq_project_datasets("bigquery-public-data")) 5 | names <- map_chr(public, function(x) x$dataset) 6 | 7 | expect_true("baseball" %in% names) 8 | }) 9 | 10 | test_that("test project has at least one job", { 11 | jobs <- bq_project_jobs(bq_test_project(), warn = FALSE) 12 | expect_gte(length(jobs), 1) 13 | expect_s3_class(jobs[[1]], "bq_job") 14 | }) 15 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-projects.R: -------------------------------------------------------------------------------- 1 | test_that("available projects should include test project", { 2 | proj <- bq_test_project() 3 | expect_true(proj %in% bq_projects()) 4 | }) 5 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-query.R: -------------------------------------------------------------------------------- 1 | test_that("bq_project_query inputs are checked", { 2 | expect_snapshot(error = TRUE, { 3 | bq_project_query(1) 4 | bq_project_query("abc", 1) 5 | bq_project_query("abc", "SELECT *", destination_table = 1) 6 | bq_project_query("abc", "SELECT *", destination_table = "a") 7 | bq_project_query("abc", "SELECT *", destination_table = list()) 8 | bq_project_query("abc", "SELECT *", quiet = 1) 9 | }) 10 | }) 11 | 12 | test_that("bq_dataset_query inputs are checked", { 13 | expect_snapshot(error = TRUE, { 14 | bq_dataset_query(1) 15 | bq_dataset_query("abc") 16 | bq_dataset_query("abc.def", 1) 17 | bq_dataset_query("abc.def", "SELECT *", destination_table = 1) 18 | bq_dataset_query("abc.def", "SELECT *", destination_table = "a") 19 | bq_dataset_query("abc.def", "SELECT *", destination_table = list()) 20 | bq_dataset_query("abc.def", "SELECT *", billing = 1) 21 | bq_dataset_query("abc.def", "SELECT *", quiet = 1) 22 | }) 23 | }) 24 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-refs.R: -------------------------------------------------------------------------------- 1 | test_that("job coercion equivalent to construction", { 2 | ref <- bq_job("a", "b", "c") 3 | l <- list(projectId = "a", jobId = "b", location = "c") 4 | 5 | expect_equal(as_bq_job("a.b.c"), ref) 6 | expect_equal(as_bq_job(l), ref) 7 | expect_equal(as_bq_job(ref), ref) 8 | }) 9 | 10 | test_that("dataset coercion equivalent to construction", { 11 | ref <- bq_dataset("a", "b") 12 | l <- list(projectId = "a", datasetId = "b") 13 | 14 | expect_equal(as_bq_dataset("a.b"), ref) 15 | expect_equal(as_bq_dataset(l), ref) 16 | expect_equal(as_bq_dataset(ref), ref) 17 | }) 18 | 19 | test_that("table equivalent to construction", { 20 | ref <- bq_table("a", "b", "c") 21 | l <- list(projectId = "a", datasetId = "b", tableId = "c") 22 | 23 | expect_equal(as_bq_table("a.b.c"), ref) 24 | expect_equal(as_bq_table(l), ref) 25 | expect_equal(as_bq_table(ref), ref) 26 | }) 27 | 28 | test_that("can make table with dataset", { 29 | ds <- bq_dataset("a", "b") 30 | expect_equal(bq_table(ds, "c"), bq_table("a", "b", "c")) 31 | 32 | expect_snapshot(bq_table(ds, 1), error = TRUE) 33 | }) 34 | 35 | test_that("objects have helpful print methods", { 36 | expect_snapshot({ 37 | as_bq_job("x.y.US") 38 | as_bq_dataset("x.y") 39 | as_bq_table("x.y.z") 40 | }) 41 | }) 42 | 43 | test_that("useful error for non-strings", { 44 | expect_snapshot(error = TRUE, { 45 | as_bq_job(1) 46 | as_bq_dataset(1) 47 | as_bq_table(1) 48 | }) 49 | }) 50 | 51 | test_that("string coercion error on invalid number of components", { 52 | expect_snapshot(error = TRUE, { 53 | as_bq_table("x") 54 | as_bq_table("a.b.c.d") 55 | as_bq_job("x") 56 | as_bq_dataset("x") 57 | }) 58 | }) 59 | 60 | test_that("list coercion errors with bad names", { 61 | expect_snapshot(error = TRUE, { 62 | as_bq_table(list()) 63 | as_bq_dataset(list()) 64 | as_bq_job(list()) 65 | }) 66 | }) 67 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-request.R: -------------------------------------------------------------------------------- 1 | test_that("api keys are added when present", { 2 | key <- "my.secret.key" 3 | withr::local_envvar(list(BIGRQUERY_API_KEY = key)) 4 | 5 | expect_equal(prepare_bq_query(NULL), list(key = key)) 6 | expect_equal( 7 | prepare_bq_query(list(herring_color = "red")), 8 | list(herring_color = "red", key = key) 9 | ) 10 | }) 11 | 12 | test_that("explicit api keys override env vars", { 13 | key <- "my.secret.key" 14 | withr::local_envvar(list(BIGRQUERY_API_KEY = key)) 15 | 16 | expect_equal( 17 | prepare_bq_query(list(key = "my.other.key")), 18 | list(key = "my.other.key") 19 | ) 20 | }) 21 | 22 | test_that("pagination warns if pages left on server", { 23 | skip_if_no_auth() 24 | 25 | expect_warning( 26 | bq_get_paginated( 27 | bq_path("bigquery-public-data", ""), 28 | query = list(fields = "datasets(datasetReference)"), 29 | page_size = 10, 30 | max_pages = 2 31 | ), 32 | "Only first 20 results" 33 | ) 34 | }) 35 | 36 | test_that("error call is forwarded all the way down", { 37 | skip_if_not(bq_authable()) 38 | 39 | expect_snapshot(bq_job_meta("a.b.c"), error = TRUE) 40 | }) 41 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-table.R: -------------------------------------------------------------------------------- 1 | test_that("can create and delete tables", { 2 | bq_mtcars <- bq_test_table() 3 | expect_false(bq_table_exists(bq_mtcars)) 4 | 5 | bq_table_create( 6 | bq_mtcars, 7 | mtcars, 8 | friendly_name = "Motor Trend Car Road Tests", 9 | description = "The data was extracted from the 1974 Motor Trend US magazine", 10 | labels = list(category = "test") 11 | ) 12 | expect_true(bq_table_exists(bq_mtcars)) 13 | 14 | bq_table_delete(bq_mtcars) 15 | expect_false(bq_table_exists(bq_mtcars)) 16 | }) 17 | 18 | test_that("can retrieve table size information", { 19 | bq_mtcars <- bq_table(bq_test_project(), "basedata", "mtcars") 20 | expect_equal(bq_table_nrow(bq_mtcars), 32) 21 | expect_equal(as.numeric(bq_table_size(bq_mtcars)), 2816) 22 | }) 23 | 24 | test_that("can create table with schema", { 25 | tb <- bq_test_table() 26 | 27 | df <- data.frame(x = 1, y = "a") 28 | bq_table_create(tb, "df", fields = df) 29 | 30 | fields <- bq_table_fields(tb) 31 | expect_equal(fields, as_bq_fields(df)) 32 | }) 33 | 34 | test_that("can round trip to non-default location", { 35 | dallas <- bq_test_dataset(location = "us-south1") 36 | df1 <- tibble(x = 1:10, y = letters[1:10]) 37 | 38 | bq_df <- bq_table(dallas, "df") 39 | bq_table_upload(bq_df, df1) 40 | 41 | df2 <- bq_table_download(bq_df, api = "json") 42 | df2 <- df2[order(df2$x), names(df1)] # BQ doesn't guarantee order 43 | rownames(df2) <- NULL 44 | 45 | expect_equal(df1, df2) 46 | }) 47 | 48 | test_that("can roundtrip via save + load", { 49 | tb1 <- bq_table(bq_test_project(), "basedata", "mtcars") 50 | tb2 <- bq_test_table() 51 | gs <- gs_test_object() 52 | 53 | bq_table_save(tb1, gs) 54 | defer(gs_object_delete(gs)) 55 | bq_table_load(tb2, gs) 56 | 57 | df <- bq_table_download(tb2, api = "json") 58 | expect_equal(dim(df), c(32, 11)) 59 | }) 60 | 61 | test_that("can copy table from public dataset", { 62 | my_natality <- bq_test_table() 63 | 64 | out <- bq_table_copy("publicdata.samples.natality", my_natality) 65 | expect_equal(out, my_natality) 66 | expect_true(bq_table_exists(my_natality)) 67 | }) 68 | 69 | # data-types -------------------------------------------------------------- 70 | 71 | test_that("can round trip atomic vectors", { 72 | df1 <- tibble( 73 | lgl = c(FALSE, TRUE, NA), 74 | int = c(-1, 1, NA), 75 | dbl = c(-1.5, 1.5, NA), 76 | chr = c("A", "B", NA) 77 | ) 78 | 79 | bq_df <- bq_test_table() 80 | bq_table_upload(bq_df, df1) 81 | 82 | df2 <- bq_table_download(bq_df, bigint = "integer", api = "json") 83 | df2 <- df2[order(df2[[1]]), names(df1)] # BQ doesn't gaurantee order 84 | rownames(df2) <- NULL 85 | 86 | expect_equal(df1, df2) 87 | }) 88 | 89 | test_that("can round-trip POSIXt to either TIMESTAMP or DATETIME", { 90 | df <- tibble(datetime = as.POSIXct("2020-01-01 09:00", tz = "UTC")) 91 | 92 | tb1 <- bq_table_create( 93 | bq_test_table(), 94 | bq_fields(list(bq_field("datetime", "TIMESTAMP"))) 95 | ) 96 | bq_table_upload(tb1, df) 97 | df1 <- bq_table_download(tb1, api = "json") 98 | expect_equal(df1, df) 99 | 100 | tb2 <- bq_table_create( 101 | bq_test_table(), 102 | bq_fields(list(bq_field("datetime", "DATETIME"))) 103 | ) 104 | bq_table_upload(tb2, df) 105 | df2 <- bq_table_download(tb2, api = "json") 106 | expect_equal(df2, df) 107 | }) 108 | 109 | test_that("can round trip data frame with list-cols", { 110 | tb <- bq_test_table() 111 | 112 | df1 <- tibble::tibble( 113 | val = 1.5, 114 | array = list(1L:5L), 115 | struct = list(list(x = "a", y = 1.5, z = 2L)), 116 | array_struct = list(tibble::tibble(x = "a", y = 1.5, z = 2L)) 117 | ) 118 | bq_table_upload(tb, df1) 119 | 120 | df2 <- bq_table_download(tb, bigint = "integer", api = "json") 121 | # restore column order 122 | df2 <- df2[names(df1)] 123 | df2$struct[[1]] <- df2$struct[[1]][c("x", "y", "z")] 124 | df2$array_struct[[1]] <- df2$array_struct[[1]][c("x", "y", "z")] 125 | 126 | # Converting to dataframe to avoid getting the error: 127 | # Can't join on 'array' x 'array' because of incompatible types (list / list) 128 | df1 <- as.data.frame(df1) 129 | df2 <- as.data.frame(df2) 130 | expect_equal(df1, df2) 131 | }) 132 | 133 | test_that("can create table field description", { 134 | partition_table <- bq_test_table() 135 | 136 | bq_table_create( 137 | partition_table, 138 | fields = bq_fields(list(bq_field( 139 | "id", 140 | "integer", 141 | description = "Key field" 142 | ))) 143 | ) 144 | 145 | meta <- bq_table_meta(partition_table) 146 | expect_equal(meta$schema$fields[[1]]$description, "Key field") 147 | }) 148 | 149 | test_that("can patch table with new fields in the schema", { 150 | tb <- bq_test_table() 151 | df <- data.frame(id = 1) 152 | bq_table_create(tb, fields = df) 153 | 154 | df.patch <- data.frame(id = 1, title = "record name") 155 | bq_table_patch(tb, fields = df.patch) 156 | 157 | tb.meta <- bq_table_meta(tb) 158 | expect_equal( 159 | tb.meta$schema$fields[[2]]$name, 160 | "title" 161 | ) 162 | }) 163 | 164 | test_that("can round-trip GEOGRAPHY", { 165 | skip_if_not_installed("wk") 166 | 167 | df <- tibble(geography = wk::wkt("POINT(30 10)")) 168 | 169 | tb1 <- bq_table_create(bq_test_table(), as_bq_fields(df)) 170 | bq_table_upload(tb1, df) 171 | df1 <- bq_table_download(tb1, api = "json") 172 | expect_equal(df1, df) 173 | }) 174 | 175 | test_that("can round-trip BYTES", { 176 | df <- tibble(x = blob::blob(charToRaw("hi!"), charToRaw("bye"))) 177 | 178 | tb1 <- bq_table_create(bq_test_table(), as_bq_fields(df)) 179 | bq_table_upload(tb1, df) 180 | df1 <- bq_table_download(tb1, api = "json") 181 | expect_equal(df1, df) 182 | }) 183 | -------------------------------------------------------------------------------- /tests/testthat/test-bq-test.R: -------------------------------------------------------------------------------- 1 | test_that("can init and clean up dataset", { 2 | ds <- bq_test_dataset() 3 | expect_true(bq_dataset_exists(ds)) 4 | 5 | bq_test_init(ds$dataset) 6 | expect_true(bq_table_exists(bq_table(ds, "mtcars"))) 7 | 8 | attr(ds, "env") <- NULL 9 | gc() 10 | expect_false(bq_dataset_exists(ds)) 11 | }) 12 | 13 | test_that("error if env var not set", { 14 | withr::local_envvar(c( 15 | BIGQUERY_TEST_PROJECT = "", 16 | BIGQUERY_TEST_BUCKET = "", 17 | TESTTHAT = "" 18 | )) 19 | 20 | expect_snapshot(error = TRUE, { 21 | bq_test_project() 22 | gs_test_bucket() 23 | }) 24 | }) 25 | -------------------------------------------------------------------------------- /tests/testthat/test-camelCase.R: -------------------------------------------------------------------------------- 1 | test_that("toCamel recursively renamed", { 2 | x1 <- list(a_b = 1, c_d = list(e_f = 2)) 3 | x2 <- list(aB = 1, cD = list(eF = 2)) 4 | 5 | expect_equal(toCamel(x1), x2) 6 | }) 7 | 8 | test_that("unnamed objects left as is", { 9 | x <- 1:5 10 | expect_null(names(toCamel(x))) 11 | }) 12 | -------------------------------------------------------------------------------- /tests/testthat/test-dbi-driver.R: -------------------------------------------------------------------------------- 1 | test_that("driver is always valid", { 2 | expect_true(DBI::dbIsValid(bigquery())) 3 | }) 4 | 5 | test_that("dbi_driver is deprecated", { 6 | expect_warning(dbi_driver(), "deprecated") 7 | }) 8 | 9 | test_that("connecting yields a BigQueryConnection", { 10 | con <- dbConnect(bigquery(), project = bq_test_project()) 11 | expect_s4_class(con, "BigQueryConnection") 12 | }) 13 | -------------------------------------------------------------------------------- /tests/testthat/test-dbi-result.R: -------------------------------------------------------------------------------- 1 | test_that("can retrieve full query results", { 2 | con <- DBI::dbConnect( 3 | bigquery(), 4 | project = bq_test_project(), 5 | dataset = "basedata", 6 | bigint = "integer" 7 | ) 8 | 9 | df <- DBI::dbGetQuery(con, "SELECT count(*) as count FROM mtcars") 10 | expect_equal(df, tibble(count = 32L)) 11 | }) 12 | 13 | test_that("can retrieve without dataset", { 14 | con <- DBI::dbConnect( 15 | bigquery(), 16 | project = bq_test_project(), 17 | bigint = "integer" 18 | ) 19 | df <- DBI::dbGetQuery(con, "SELECT count(*) as count FROM `basedata.mtcars`") 20 | expect_equal(df, tibble(count = 32L)) 21 | }) 22 | 23 | test_that("can retrieve query in pieces and that quiet is respected", { 24 | con <- DBI::dbConnect( 25 | bigquery(), 26 | project = bq_test_project(), 27 | dataset = "basedata" 28 | ) 29 | 30 | res <- DBI::dbSendQuery(con, "SELECT cyl, mpg FROM mtcars") 31 | expect_equal(DBI::dbGetRowCount(res), 0L) 32 | 33 | expect_snapshot(DBI::dbFetch(res, NA), error = TRUE) 34 | 35 | res@quiet <- FALSE 36 | expect_snapshot(df <- DBI::dbFetch(res, 10)) 37 | 38 | expect_equal(nrow(df), 10) 39 | expect_false(DBI::dbHasCompleted(res)) 40 | expect_equal(DBI::dbGetRowCount(res), 10L) 41 | 42 | res@quiet <- TRUE 43 | expect_snapshot(df <- DBI::dbFetch(res, -1)) 44 | expect_equal(nrow(df), 22) 45 | expect_true(DBI::dbHasCompleted(res)) 46 | }) 47 | 48 | test_that("dbHasCompleted() is accurate if you fetch past end", { 49 | con <- DBI::dbConnect( 50 | bigquery(), 51 | project = bq_test_project(), 52 | dataset = "basedata" 53 | ) 54 | 55 | res <- DBI::dbSendQuery(con, "SELECT cyl FROM mtcars LIMIT 5") 56 | df <- DBI::dbFetch(res, 10) 57 | 58 | expect_equal(DBI::dbGetRowCount(res), 5) 59 | expect_true(DBI::dbHasCompleted(res)) 60 | }) 61 | 62 | test_that("can get metadata", { 63 | con <- DBI::dbConnect( 64 | bigquery(), 65 | project = bq_test_project(), 66 | dataset = "basedata" 67 | ) 68 | sql <- "SELECT cyl, mpg FROM mtcars" 69 | res <- DBI::dbSendQuery(con, sql) 70 | expect_snapshot(res) 71 | 72 | col_info <- DBI::dbColumnInfo(res) 73 | expect_equal(dim(col_info), c(2, 2)) 74 | expect_equal(col_info$name, c("cyl", "mpg")) 75 | 76 | expect_equal(DBI::dbGetStatement(res), sql) 77 | }) 78 | 79 | test_that("dbExecute returns modified rows", { 80 | ds <- bq_test_dataset() 81 | con <- DBI::dbConnect(ds) 82 | 83 | DBI::dbExecute(con, "CREATE TABLE foo (a INT64)") 84 | expect_equal(DBI::dbExecute(con, "INSERT INTO foo VALUES (1), (2), (3)"), 3) 85 | expect_equal(DBI::dbExecute(con, "DELETE FROM foo WHERE a >= 2"), 2) 86 | DBI::dbExecute(con, "DROP TABLE foo") 87 | }) 88 | -------------------------------------------------------------------------------- /tests/testthat/test-gs-object.R: -------------------------------------------------------------------------------- 1 | test_that("can delete objects", { 2 | skip_if_no_auth() 3 | tb <- bq_table(bq_test_project(), "basedata", "mtcars") 4 | 5 | gs <- gs_test_object() 6 | expect_false(gs_object_exists(gs)) 7 | 8 | bq_table_save(tb, gs) 9 | expect_true(gs_object_exists(gs)) 10 | 11 | gs_object_delete(gs) 12 | expect_false(gs_object_exists(gs)) 13 | }) 14 | 15 | test_that("has useful print method", { 16 | expect_snapshot({ 17 | gs_object("xxx", "yyy") 18 | }) 19 | }) 20 | 21 | test_that("coercing to character gives URI", { 22 | gs <- gs_object("xxx", "yyy") 23 | expect_equal(as.character(gs), "gs://xxx/yyy") 24 | }) 25 | -------------------------------------------------------------------------------- /tests/testthat/test-utils.R: -------------------------------------------------------------------------------- 1 | test_that("bq_check_namespace() works", { 2 | expect_no_error(bq_check_namespace("bigrquery", "FIELD_TYPE")) 3 | expect_snapshot( 4 | bq_check_namespace("invalid package name", "FIELD_TYPE"), 5 | error = TRUE 6 | ) 7 | }) 8 | --------------------------------------------------------------------------------