├── .eslintignore ├── .eslintrc.json ├── .gitattributes ├── .github ├── .OwlBot.lock.yaml ├── .OwlBot.yaml ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ ├── documentation_request.yml │ ├── feature_request.yml │ ├── processs_request.md │ ├── questions.md │ └── support_request.md ├── PULL_REQUEST_TEMPLATE.md ├── auto-approve.yml ├── auto-label.yaml ├── blunderbuss.yml ├── generated-files-bot.yml ├── release-please.yml ├── release-trigger.yml ├── scripts │ ├── close-invalid-link.cjs │ ├── close-unresponsive.cjs │ ├── fixtures │ │ ├── invalidIssueBody.txt │ │ ├── validIssueBody.txt │ │ └── validIssueBodyDifferentLinkLocation.txt │ ├── package.json │ ├── remove-response-label.cjs │ └── tests │ │ ├── close-invalid-link.test.cjs │ │ └── close-or-remove-response-label.test.cjs ├── sync-repo-settings.yaml └── workflows │ ├── ci.yaml │ ├── discovery.yaml │ ├── issues-no-repro.yaml │ └── response.yaml ├── .gitignore ├── .jsdoc.js ├── .kokoro ├── .gitattributes ├── common.cfg ├── continuous │ └── node18 │ │ ├── common.cfg │ │ ├── lint.cfg │ │ ├── samples-test.cfg │ │ ├── system-test.cfg │ │ └── test.cfg ├── docs.sh ├── lint.sh ├── populate-secrets.sh ├── presubmit │ ├── node18 │ │ ├── common.cfg │ │ ├── samples-test.cfg │ │ ├── system-test.cfg │ │ └── test.cfg │ └── windows │ │ ├── common.cfg │ │ └── test.cfg ├── publish.sh ├── release │ ├── common.cfg │ ├── docs-devsite.cfg │ ├── docs-devsite.sh │ ├── docs.cfg │ ├── docs.sh │ └── publish.cfg ├── samples-test.sh ├── system-test.sh ├── test.bat ├── test.sh ├── trampoline.sh └── trampoline_v2.sh ├── .mocharc.js ├── .nycrc ├── .prettierignore ├── .prettierrc.js ├── .repo-metadata.json ├── .trampolinerc ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── benchmark ├── README.md ├── bench.ts └── queries.json ├── linkinator.config.json ├── owlbot.py ├── package.json ├── renovate.json ├── samples ├── .eslintrc.yml ├── README.md ├── addColumnLoadAppend.js ├── addColumnQueryAppend.js ├── addEmptyColumn.js ├── auth-user-sample │ ├── authUserFlow.js │ └── oauth2.keys.json ├── authViewTutorial.js ├── browseTable.js ├── cancelJob.js ├── clientJSONCredentials.js ├── copyTable.js ├── copyTableMultipleSource.js ├── createDataset.js ├── createJob.js ├── createModel.js ├── createRoutine.js ├── createRoutineDDL.js ├── createTable.js ├── createTableClustered.js ├── createTableColumnACL.js ├── createTablePartitioned.js ├── createTableRangePartitioned.js ├── createView.js ├── ddlCreateView.js ├── deleteDataset.js ├── deleteLabelDataset.js ├── deleteLabelTable.js ├── deleteModel.js ├── deleteRoutine.js ├── deleteTable.js ├── extractTableCompressed.js ├── extractTableJSON.js ├── extractTableToGCS.js ├── getDataset.js ├── getDatasetLabels.js ├── getJob.js ├── getModel.js ├── getRoutine.js ├── getTable.js ├── getTableLabels.js ├── getView.js ├── insertRowsAsStream.js ├── insertingDataTypes.js ├── labelDataset.js ├── labelTable.js ├── listDatasets.js ├── listDatasetsByLabel.js ├── listJobs.js ├── listModels.js ├── listModelsStreaming.js ├── listRoutines.js ├── listTables.js ├── loadCSVFromGCS.js ├── loadCSVFromGCSAutodetect.js ├── loadCSVFromGCSTruncate.js ├── loadJSONFromGCS.js ├── loadJSONFromGCSAutodetect.js ├── loadJSONFromGCSTruncate.js ├── loadLocalFile.js ├── loadOrcFromGCSTruncate.js ├── loadParquetFromGCSTruncate.js ├── loadTableClustered.js ├── loadTableGCSAvro.js ├── loadTableGCSAvroTruncate.js ├── loadTableGCSORC.js ├── loadTableGCSParquet.js ├── loadTablePartitioned.js ├── loadTableURIFirestore.js ├── nestedRepeatedSchema.js ├── package.json ├── query.js ├── queryBatch.js ├── queryClusteredTable.js ├── queryDestinationTable.js ├── queryDisableCache.js ├── queryDryRun.js ├── queryExternalGCSPerm.js ├── queryExternalGCSTemp.js ├── queryJobOptional.js ├── queryLegacy.js ├── queryLegacyLargeResults.js ├── queryPagination.js ├── queryParamsArrays.js ├── queryParamsNamed.js ├── queryParamsNamedTypes.js ├── queryParamsPositional.js ├── queryParamsPositionalTypes.js ├── queryParamsStructs.js ├── queryParamsTimestamps.js ├── queryStackOverflow.js ├── quickstart.js ├── relaxColumn.js ├── relaxColumnLoadAppend.js ├── relaxColumnQueryAppend.js ├── removeTableClustering.js ├── resources │ ├── avro-schema.json │ ├── data.avro │ ├── data.csv │ ├── data.json │ ├── partialdata.csv │ ├── schema.json │ └── streamrows.json ├── setClientEndpoint.js ├── setUserAgent.js ├── tableExists.js ├── test │ ├── auth.test.js │ ├── authViewTutorial.test.js │ ├── clients.test.js │ ├── datasets.test.js │ ├── jobs.test.js │ ├── models.test.js │ ├── queries.test.js │ ├── quickstart.test.js │ ├── routines.test.js │ └── tables.test.js ├── undeleteTable.js ├── updateDatasetAccess.js ├── updateDatasetDescription.js ├── updateDatasetExpiration.js ├── updateModel.js ├── updateRoutine.js ├── updateTableColumnACL.js ├── updateTableDescription.js ├── updateTableExpiration.js └── updateViewQuery.js ├── scripts └── gen-types.js ├── src ├── bigquery.ts ├── dataset.ts ├── index.ts ├── job.ts ├── logger.ts ├── model.ts ├── routine.ts ├── rowBatch.ts ├── rowQueue.ts ├── table.ts ├── types.d.ts └── util.ts ├── system-test ├── bigquery.ts ├── data │ ├── kitten-test-data.json │ ├── location-test-data.json │ ├── schema-test-data.json │ └── schema.json ├── fixtures │ └── sample │ │ └── src │ │ └── index.ts └── install.ts ├── test ├── bigquery.ts ├── dataset.ts ├── job.ts ├── model.ts ├── routine.ts ├── rowBatch.ts ├── rowQueue.ts ├── table.ts └── testdata │ └── testfile.json └── tsconfig.json /.eslintignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | **/coverage 3 | test/fixtures 4 | build/ 5 | docs/ 6 | protos/ 7 | samples/generated/ 8 | system-test/**/fixtures 9 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts" 3 | } 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.ts text eol=lf 2 | *.js text eol=lf 3 | protos/* linguist-generated 4 | **/api-extractor.json linguist-language=JSON-with-Comments 5 | -------------------------------------------------------------------------------- /.github/.OwlBot.lock.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | docker: 15 | image: 'gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest' 16 | digest: 'sha256:c7e4968cfc97a204a4b2381f3ecb55cabc40c4cccf88b1ef8bef0d976be87fee' 17 | -------------------------------------------------------------------------------- /.github/.OwlBot.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | docker: 15 | image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest 16 | 17 | 18 | begin-after-commit-hash: 397c0bfd367a2427104f988d5329bc117caafd95 19 | 20 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owners file. 2 | # This file controls who is tagged for review for any given pull request. 3 | # 4 | # For syntax help see: 5 | # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax 6 | 7 | 8 | # Unless specified, the jsteam is the default owner for nodejs repositories. 9 | * @googleapis/api-bigquery @googleapis/jsteam -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Google Cloud Support 3 | url: https://cloud.google.com/support/ 4 | about: If you have a support contract with Google, please use the Google Cloud Support portal. 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/processs_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Process Request 3 | about: Submit a process request to the library. Process requests are any requests related to library infrastructure, for example CI/CD, publishing, releasing, broken links. 4 | --- 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/questions.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: If you have a question, please use Discussions 4 | 5 | --- 6 | 7 | If you have a general question that goes beyond the library itself, we encourage you to use [Discussions](https://github.com//discussions) 8 | to engage with fellow community members! 9 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/support_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Support request 3 | about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. 4 | 5 | --- 6 | 7 | **PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. 8 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | > Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: 2 | 3 | ## Description 4 | 5 | > Please provide a detailed description for the change. 6 | > As much as possible, please try to keep changes separate by purpose. For example, try not to make a one-line bug fix in a feature request, or add an irrelevant README change to a bug fix. 7 | 8 | ## Impact 9 | 10 | > What's the impact of this change? 11 | 12 | ## Testing 13 | 14 | > Have you added unit and integration tests if necessary? 15 | > Were any tests changed? Are any breaking changes necessary? 16 | 17 | ## Additional Information 18 | 19 | > Any additional details that we should be aware of? 20 | 21 | ## Checklist 22 | 23 | - [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/nodejs-bigquery/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea 24 | - [ ] Ensure the tests and linter pass 25 | - [ ] Code coverage does not decrease 26 | - [ ] Appropriate docs were updated 27 | - [ ] Appropriate comments were added, particularly in complex areas or places that require background 28 | - [ ] No new warnings or issues will be generated from this change 29 | 30 | Fixes #issue_number_goes_here 🦕 31 | -------------------------------------------------------------------------------- /.github/auto-approve.yml: -------------------------------------------------------------------------------- 1 | processes: 2 | - "NodeDependency" -------------------------------------------------------------------------------- /.github/auto-label.yaml: -------------------------------------------------------------------------------- 1 | requestsize: 2 | enabled: true 3 | -------------------------------------------------------------------------------- /.github/blunderbuss.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/nodejs-bigquery/9729d1e4494fc266b6effbe204b6ff1996511abc/.github/blunderbuss.yml -------------------------------------------------------------------------------- /.github/generated-files-bot.yml: -------------------------------------------------------------------------------- 1 | generatedFiles: 2 | - path: '.kokoro/**' 3 | message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 4 | - path: '.github/CODEOWNERS' 5 | message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' 6 | - path: '.github/workflows/ci.yaml' 7 | message: '`.github/workflows/ci.yaml` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 8 | - path: '.github/generated-files-bot.+(yml|yaml)' 9 | message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 10 | - path: 'README.md' 11 | message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' 12 | - path: 'samples/README.md' 13 | message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' 14 | ignoreAuthors: 15 | - 'gcf-owl-bot[bot]' 16 | - 'yoshi-automation' 17 | -------------------------------------------------------------------------------- /.github/release-please.yml: -------------------------------------------------------------------------------- 1 | handleGHRelease: true 2 | releaseType: node 3 | -------------------------------------------------------------------------------- /.github/release-trigger.yml: -------------------------------------------------------------------------------- 1 | enabled: true 2 | multiScmName: nodejs-bigquery -------------------------------------------------------------------------------- /.github/scripts/close-unresponsive.cjs: -------------------------------------------------------------------------------- 1 | /// Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | function labeledEvent(data) { 16 | return data.event === "labeled" && data.label.name === "needs more info"; 17 | } 18 | 19 | const numberOfDaysLimit = 15; 20 | const close_message = `This has been closed since a request for information has \ 21 | not been answered for ${numberOfDaysLimit} days. It can be reopened when the \ 22 | requested information is provided.`; 23 | 24 | module.exports = async ({ github, context }) => { 25 | const owner = context.repo.owner; 26 | const repo = context.repo.repo; 27 | 28 | const issues = await github.rest.issues.listForRepo({ 29 | owner: owner, 30 | repo: repo, 31 | labels: "needs more info", 32 | }); 33 | const numbers = issues.data.map((e) => e.number); 34 | 35 | for (const number of numbers) { 36 | const events = await github.paginate( 37 | github.rest.issues.listEventsForTimeline, 38 | { 39 | owner: owner, 40 | repo: repo, 41 | issue_number: number, 42 | }, 43 | (response) => response.data.filter(labeledEvent) 44 | ); 45 | 46 | const latest_response_label = events[events.length - 1]; 47 | 48 | const created_at = new Date(latest_response_label.created_at); 49 | const now = new Date(); 50 | const diff = now - created_at; 51 | const diffDays = diff / (1000 * 60 * 60 * 24); 52 | 53 | if (diffDays > numberOfDaysLimit) { 54 | await github.rest.issues.update({ 55 | owner: owner, 56 | repo: repo, 57 | issue_number: number, 58 | state: "closed", 59 | }); 60 | 61 | await github.rest.issues.createComment({ 62 | owner: owner, 63 | repo: repo, 64 | issue_number: number, 65 | body: close_message, 66 | }); 67 | } 68 | } 69 | }; -------------------------------------------------------------------------------- /.github/scripts/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tests", 3 | "private": true, 4 | "description": "tests for script", 5 | "scripts": { 6 | "test": "mocha tests/close-invalid-link.test.cjs && mocha tests/close-or-remove-response-label.test.cjs" 7 | }, 8 | "author": "Google Inc.", 9 | "license": "Apache-2.0", 10 | "engines": { 11 | "node": ">=18" 12 | }, 13 | "dependencies": { 14 | "js-yaml": "^4.1.0" 15 | }, 16 | "devDependencies": { 17 | "@octokit/rest": "^19.0.0", 18 | "mocha": "^10.0.0", 19 | "sinon": "^18.0.0" 20 | } 21 | } -------------------------------------------------------------------------------- /.github/scripts/remove-response-label.cjs: -------------------------------------------------------------------------------- 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | module.exports = async ({ github, context }) => { 16 | const commenter = context.actor; 17 | const issue = await github.rest.issues.get({ 18 | owner: context.repo.owner, 19 | repo: context.repo.repo, 20 | issue_number: context.issue.number, 21 | }); 22 | const author = issue.data.user.login; 23 | const labels = issue.data.labels.map((e) => e.name); 24 | 25 | if (author === commenter && labels.includes("needs more info")) { 26 | await github.rest.issues.removeLabel({ 27 | owner: context.repo.owner, 28 | repo: context.repo.repo, 29 | issue_number: context.issue.number, 30 | name: "needs more info", 31 | }); 32 | } 33 | }; -------------------------------------------------------------------------------- /.github/sync-repo-settings.yaml: -------------------------------------------------------------------------------- 1 | branchProtectionRules: 2 | - pattern: main 3 | isAdminEnforced: true 4 | requiredApprovingReviewCount: 1 5 | requiresCodeOwnerReviews: true 6 | requiresStrictStatusChecks: true 7 | requiredStatusCheckContexts: 8 | - "ci/kokoro: Samples test" 9 | - "ci/kokoro: System test" 10 | - lint 11 | - test (18) 12 | - test (20) 13 | - test (22) 14 | - cla/google 15 | - windows 16 | - OwlBot Post Processor 17 | permissionRules: 18 | - team: yoshi-admins 19 | permission: admin 20 | - team: jsteam-admins 21 | permission: admin 22 | - team: jsteam 23 | permission: push 24 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | pull_request: 6 | name: ci 7 | jobs: 8 | test: 9 | runs-on: ubuntu-latest 10 | strategy: 11 | matrix: 12 | node: [18, 20, 22] 13 | steps: 14 | - uses: actions/checkout@v4 15 | - uses: actions/setup-node@v4 16 | with: 17 | node-version: ${{ matrix.node }} 18 | - run: node --version 19 | # The first installation step ensures that all of our production 20 | # dependencies work on the given Node.js version, this helps us find 21 | # dependencies that don't match our engines field: 22 | - run: npm install --production --engine-strict --ignore-scripts --no-package-lock 23 | # Clean up the production install, before installing dev/production: 24 | - run: rm -rf node_modules 25 | - run: npm install --engine-strict 26 | - run: npm test 27 | env: 28 | MOCHA_THROW_DEPRECATION: false 29 | test-script: 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: actions/checkout@v4 33 | - uses: actions/setup-node@v4 34 | with: 35 | node-version: 18 36 | - run: node --version 37 | - run: npm install --engine-strict 38 | working-directory: .github/scripts 39 | - run: npm test 40 | working-directory: .github/scripts 41 | env: 42 | MOCHA_THROW_DEPRECATION: false 43 | windows: 44 | runs-on: windows-latest 45 | steps: 46 | - uses: actions/checkout@v4 47 | - uses: actions/setup-node@v4 48 | with: 49 | node-version: 18 50 | - run: npm install --engine-strict 51 | - run: npm test 52 | env: 53 | MOCHA_THROW_DEPRECATION: false 54 | lint: 55 | runs-on: ubuntu-latest 56 | steps: 57 | - uses: actions/checkout@v4 58 | - uses: actions/setup-node@v4 59 | with: 60 | node-version: 18 61 | - run: npm install 62 | - run: npm run lint 63 | docs: 64 | runs-on: ubuntu-latest 65 | steps: 66 | - uses: actions/checkout@v4 67 | - uses: actions/setup-node@v4 68 | with: 69 | node-version: 18 70 | - run: npm install 71 | - run: npm run docs 72 | - uses: JustinBeckwith/linkinator-action@v1 73 | with: 74 | paths: docs/ 75 | -------------------------------------------------------------------------------- /.github/workflows/discovery.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | schedule: 3 | - cron: '0 12 * * TUE' 4 | workflow_dispatch: 5 | name: Update Discovery Generated Types 6 | jobs: 7 | sync: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 11 | - uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 12 | with: 13 | node-version: 16 14 | # Install all deps, including dev dependencies. 15 | - run: npm install 16 | # Generate types 17 | - run: npm run types 18 | # Install samples deps 19 | - run: cd samples/ && npm install && cd ../ 20 | # Fix formatting 21 | - run: npm run fix 22 | # Submit pull request 23 | - uses: googleapis/code-suggester@v5 24 | env: 25 | ACCESS_TOKEN: ${{ secrets.YOSHI_CODE_BOT_TOKEN }} 26 | with: 27 | command: pr 28 | upstream_owner: googleapis 29 | upstream_repo: nodejs-bigquery 30 | description: 'Automated pull-request to keep BigQuery Discovery types up-to-date.' 31 | title: 'chore: update types from Discovery' 32 | message: 'chore: update types from Discovery' 33 | branch: update-discovery-patch 34 | git_dir: '.' 35 | fork: true 36 | force: true 37 | -------------------------------------------------------------------------------- /.github/workflows/issues-no-repro.yaml: -------------------------------------------------------------------------------- 1 | name: invalid_link 2 | on: 3 | issues: 4 | types: [opened, reopened] 5 | 6 | jobs: 7 | close: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/checkout@v4 14 | - uses: actions/setup-node@v3 15 | with: 16 | node-version: 18 17 | - run: npm install 18 | working-directory: ./.github/scripts 19 | - uses: actions/github-script@v7 20 | with: 21 | script: | 22 | const script = require('./.github/scripts/close-invalid-link.cjs') 23 | await script({github, context}) 24 | -------------------------------------------------------------------------------- /.github/workflows/response.yaml: -------------------------------------------------------------------------------- 1 | name: no_response 2 | on: 3 | schedule: 4 | - cron: '30 1 * * *' # Run every day at 01:30 5 | workflow_dispatch: 6 | issue_comment: 7 | 8 | jobs: 9 | close: 10 | if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' 11 | runs-on: ubuntu-latest 12 | permissions: 13 | issues: write 14 | pull-requests: write 15 | steps: 16 | - uses: actions/checkout@v4 17 | - uses: actions/github-script@v7 18 | with: 19 | script: | 20 | const script = require('./.github/scripts/close-unresponsive.cjs') 21 | await script({github, context}) 22 | 23 | remove_label: 24 | if: github.event_name == 'issue_comment' 25 | runs-on: ubuntu-latest 26 | permissions: 27 | issues: write 28 | pull-requests: write 29 | steps: 30 | - uses: actions/checkout@v4 31 | - uses: actions/github-script@v7 32 | with: 33 | script: | 34 | const script = require('./.github/scripts/remove-response-label.cjs') 35 | await script({github, context}) 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/*.log 2 | **/node_modules 3 | .coverage 4 | .nyc_output 5 | docs/ 6 | out/ 7 | build/ 8 | system-test/secrets.js 9 | system-test/*key.json 10 | *.lock 11 | .DS_Store 12 | __pycache__ 13 | .vscode 14 | package-lock.json 15 | -------------------------------------------------------------------------------- /.jsdoc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | 16 | 'use strict'; 17 | 18 | module.exports = { 19 | opts: { 20 | readme: './README.md', 21 | package: './package.json', 22 | template: './node_modules/jsdoc-fresh', 23 | recurse: true, 24 | verbose: true, 25 | destination: './docs/' 26 | }, 27 | plugins: [ 28 | 'plugins/markdown', 29 | 'jsdoc-region-tag' 30 | ], 31 | source: { 32 | excludePattern: '(^|\\/|\\\\)[._]', 33 | include: [ 34 | 'build/src' 35 | ], 36 | includePattern: '\\.js$' 37 | }, 38 | templates: { 39 | copyright: 'Copyright 2019 Google, LLC.', 40 | includeDate: false, 41 | sourceFiles: false, 42 | systemName: '@google-cloud/bigquery', 43 | theme: 'lumen', 44 | default: { 45 | "outputSourceFiles": false 46 | } 47 | }, 48 | markdown: { 49 | idInHeadings: true 50 | } 51 | }; 52 | -------------------------------------------------------------------------------- /.kokoro/.gitattributes: -------------------------------------------------------------------------------- 1 | * linguist-generated=true 2 | -------------------------------------------------------------------------------- /.kokoro/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/nodejs-bigquery/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/continuous/node18/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/nodejs-bigquery/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/continuous/node18/lint.cfg: -------------------------------------------------------------------------------- 1 | env_vars: { 2 | key: "TRAMPOLINE_BUILD_FILE" 3 | value: "github/nodejs-bigquery/.kokoro/lint.sh" 4 | } 5 | -------------------------------------------------------------------------------- /.kokoro/continuous/node18/samples-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/nodejs-bigquery/.kokoro/samples-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/continuous/node18/system-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/nodejs-bigquery/.kokoro/system-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/continuous/node18/test.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/nodejs-bigquery/9729d1e4494fc266b6effbe204b6ff1996511abc/.kokoro/continuous/node18/test.cfg -------------------------------------------------------------------------------- /.kokoro/docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | 25 | npm run docs-test 26 | -------------------------------------------------------------------------------- /.kokoro/lint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | 25 | # Install and link samples 26 | if [ -f samples/package.json ]; then 27 | cd samples/ 28 | npm link ../ 29 | npm install 30 | cd .. 31 | fi 32 | 33 | npm run lint 34 | -------------------------------------------------------------------------------- /.kokoro/presubmit/node18/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/nodejs-bigquery/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/presubmit/node18/samples-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/nodejs-bigquery/.kokoro/samples-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/node18/system-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/nodejs-bigquery/.kokoro/system-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/node18/test.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/nodejs-bigquery/9729d1e4494fc266b6effbe204b6ff1996511abc/.kokoro/presubmit/node18/test.cfg -------------------------------------------------------------------------------- /.kokoro/presubmit/windows/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | -------------------------------------------------------------------------------- /.kokoro/presubmit/windows/test.cfg: -------------------------------------------------------------------------------- 1 | # Use the test file directly 2 | build_file: "nodejs-bigquery/.kokoro/test.bat" 3 | -------------------------------------------------------------------------------- /.kokoro/publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | # Start the releasetool reporter 22 | python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script 23 | 24 | cd $(dirname $0)/.. 25 | 26 | NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-npm-token-1) 27 | echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc 28 | 29 | npm install 30 | npm pack . 31 | # npm provides no way to specify, observe, or predict the name of the tarball 32 | # file it generates. We have to look in the current directory for the freshest 33 | # .tgz file. 34 | TARBALL=$(ls -1 -t *.tgz | head -1) 35 | 36 | npm publish --access=public --registry=https://wombat-dressing-room.appspot.com "$TARBALL" 37 | 38 | # Kokoro collects *.tgz and package-lock.json files and stores them in Placer 39 | # so we can generate SBOMs and attestations. 40 | # However, we *don't* want Kokoro to collect package-lock.json and *.tgz files 41 | # that happened to be installed with dependencies. 42 | find node_modules -name package-lock.json -o -name "*.tgz" | xargs rm -f -------------------------------------------------------------------------------- /.kokoro/release/common.cfg: -------------------------------------------------------------------------------- 1 | before_action { 2 | fetch_keystore { 3 | keystore_resource { 4 | keystore_config_id: 73713 5 | keyname: "yoshi-automation-github-key" 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.kokoro/release/docs-devsite.cfg: -------------------------------------------------------------------------------- 1 | # service account used to publish up-to-date docs. 2 | before_action { 3 | fetch_keystore { 4 | keystore_resource { 5 | keystore_config_id: 73713 6 | keyname: "docuploader_service_account" 7 | } 8 | } 9 | } 10 | 11 | # doc publications use a Python image. 12 | env_vars: { 13 | key: "TRAMPOLINE_IMAGE" 14 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 15 | } 16 | 17 | # Download trampoline resources. 18 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 19 | 20 | # Use the trampoline script to run in docker. 21 | build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" 22 | 23 | env_vars: { 24 | key: "TRAMPOLINE_BUILD_FILE" 25 | value: "github/nodejs-bigquery/.kokoro/release/docs-devsite.sh" 26 | } 27 | -------------------------------------------------------------------------------- /.kokoro/release/docs-devsite.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2021 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | if [[ -z "$CREDENTIALS" ]]; then 20 | # if CREDENTIALS are explicitly set, assume we're testing locally 21 | # and don't set NPM_CONFIG_PREFIX. 22 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 23 | export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" 24 | cd $(dirname $0)/../.. 25 | fi 26 | 27 | npm install 28 | npm install --no-save @google-cloud/cloud-rad@^0.4.0 29 | # publish docs to devsite 30 | npx @google-cloud/cloud-rad . cloud-rad 31 | -------------------------------------------------------------------------------- /.kokoro/release/docs.cfg: -------------------------------------------------------------------------------- 1 | # service account used to publish up-to-date docs. 2 | before_action { 3 | fetch_keystore { 4 | keystore_resource { 5 | keystore_config_id: 73713 6 | keyname: "docuploader_service_account" 7 | } 8 | } 9 | } 10 | 11 | # doc publications use a Python image. 12 | env_vars: { 13 | key: "TRAMPOLINE_IMAGE" 14 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 15 | } 16 | 17 | # Download trampoline resources. 18 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 19 | 20 | # Use the trampoline script to run in docker. 21 | build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" 22 | 23 | env_vars: { 24 | key: "TRAMPOLINE_BUILD_FILE" 25 | value: "github/nodejs-bigquery/.kokoro/release/docs.sh" 26 | } 27 | -------------------------------------------------------------------------------- /.kokoro/release/docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2019 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | # build jsdocs (Python is installed on the Node 18 docker image). 20 | if [[ -z "$CREDENTIALS" ]]; then 21 | # if CREDENTIALS are explicitly set, assume we're testing locally 22 | # and don't set NPM_CONFIG_PREFIX. 23 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 24 | export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" 25 | cd $(dirname $0)/../.. 26 | fi 27 | npm install 28 | npm run docs 29 | 30 | # create docs.metadata, based on package.json and .repo-metadata.json. 31 | npm i json@9.0.6 -g 32 | python3 -m docuploader create-metadata \ 33 | --name=$(cat .repo-metadata.json | json name) \ 34 | --version=$(cat package.json | json version) \ 35 | --language=$(cat .repo-metadata.json | json language) \ 36 | --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ 37 | --product-page=$(cat .repo-metadata.json | json product_documentation) \ 38 | --github-repository=$(cat .repo-metadata.json | json repo) \ 39 | --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) 40 | cp docs.metadata ./docs/docs.metadata 41 | 42 | # deploy the docs. 43 | if [[ -z "$CREDENTIALS" ]]; then 44 | CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account 45 | fi 46 | if [[ -z "$BUCKET" ]]; then 47 | BUCKET=docs-staging 48 | fi 49 | python3 -m docuploader upload ./docs --credentials $CREDENTIALS --staging-bucket $BUCKET 50 | -------------------------------------------------------------------------------- /.kokoro/release/publish.cfg: -------------------------------------------------------------------------------- 1 | before_action { 2 | fetch_keystore { 3 | keystore_resource { 4 | keystore_config_id: 73713 5 | keyname: "docuploader_service_account" 6 | } 7 | } 8 | } 9 | 10 | before_action { 11 | fetch_keystore { 12 | keystore_resource { 13 | keystore_config_id: 73713 14 | keyname: "google-cloud-npm-token-1" 15 | } 16 | } 17 | } 18 | 19 | env_vars: { 20 | key: "SECRET_MANAGER_KEYS" 21 | value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" 22 | } 23 | 24 | # Download trampoline resources. 25 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 26 | 27 | # Use the trampoline script to run in docker. 28 | build_file: "nodejs-bigquery/.kokoro/trampoline_v2.sh" 29 | 30 | # Configure the docker image for kokoro-trampoline. 31 | env_vars: { 32 | key: "TRAMPOLINE_IMAGE" 33 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 34 | } 35 | 36 | env_vars: { 37 | key: "TRAMPOLINE_BUILD_FILE" 38 | value: "github/nodejs-bigquery/.kokoro/publish.sh" 39 | } 40 | 41 | # Store the packages we uploaded to npmjs.org and their corresponding 42 | # package-lock.jsons in Placer. That way, we have a record of exactly 43 | # what we published, and which version of which tools we used to publish 44 | # it, which we can use to generate SBOMs and attestations. 45 | action { 46 | define_artifacts { 47 | regex: "github/**/*.tgz" 48 | regex: "github/**/package-lock.json" 49 | strip_prefix: "github" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /.kokoro/system-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | # Setup service account credentials. 22 | export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account 23 | export GCLOUD_PROJECT=long-door-651 24 | 25 | cd $(dirname $0)/.. 26 | 27 | # Run a pre-test hook, if a pre-system-test.sh is in the project 28 | if [ -f .kokoro/pre-system-test.sh ]; then 29 | set +x 30 | . .kokoro/pre-system-test.sh 31 | set -x 32 | fi 33 | 34 | npm install 35 | 36 | # If tests are running against main branch, configure flakybot 37 | # to open issues on failures: 38 | if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then 39 | export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml 40 | export MOCHA_REPORTER=xunit 41 | cleanup() { 42 | chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot 43 | $KOKORO_GFILE_DIR/linux_amd64/flakybot 44 | } 45 | trap cleanup EXIT HUP 46 | fi 47 | 48 | npm run system-test 49 | 50 | # codecov combines coverage across integration and unit tests. Include 51 | # the logic below for any environment you wish to collect coverage for: 52 | COVERAGE_NODE=18 53 | if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then 54 | NYC_BIN=./node_modules/nyc/bin/nyc.js 55 | if [ -f "$NYC_BIN" ]; then 56 | $NYC_BIN report || true 57 | fi 58 | bash $KOKORO_GFILE_DIR/codecov.sh 59 | else 60 | echo "coverage is only reported for Node $COVERAGE_NODE" 61 | fi 62 | -------------------------------------------------------------------------------- /.kokoro/test.bat: -------------------------------------------------------------------------------- 1 | @rem Copyright 2018 Google LLC. All rights reserved. 2 | @rem 3 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 4 | @rem you may not use this file except in compliance with the License. 5 | @rem You may obtain a copy of the License at 6 | @rem 7 | @rem http://www.apache.org/licenses/LICENSE-2.0 8 | @rem 9 | @rem Unless required by applicable law or agreed to in writing, software 10 | @rem distributed under the License is distributed on an "AS IS" BASIS, 11 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | @rem See the License for the specific language governing permissions and 13 | @rem limitations under the License. 14 | 15 | @echo "Starting Windows build" 16 | 17 | cd /d %~dp0 18 | cd .. 19 | 20 | @rem npm path is not currently set in our image, we should fix this next time 21 | @rem we upgrade Node.js in the image: 22 | SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm 23 | 24 | call nvm use 18 25 | call which node 26 | 27 | call npm install || goto :error 28 | call npm run test || goto :error 29 | 30 | goto :EOF 31 | 32 | :error 33 | exit /b 1 34 | -------------------------------------------------------------------------------- /.kokoro/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | # If tests are running against main branch, configure flakybot 25 | # to open issues on failures: 26 | if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then 27 | export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml 28 | export MOCHA_REPORTER=xunit 29 | cleanup() { 30 | chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot 31 | $KOKORO_GFILE_DIR/linux_amd64/flakybot 32 | } 33 | trap cleanup EXIT HUP 34 | fi 35 | # Unit tests exercise the entire API surface, which may include 36 | # deprecation warnings: 37 | export MOCHA_THROW_DEPRECATION=false 38 | npm test 39 | 40 | # codecov combines coverage across integration and unit tests. Include 41 | # the logic below for any environment you wish to collect coverage for: 42 | COVERAGE_NODE=18 43 | if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then 44 | NYC_BIN=./node_modules/nyc/bin/nyc.js 45 | if [ -f "$NYC_BIN" ]; then 46 | $NYC_BIN report || true 47 | fi 48 | bash $KOKORO_GFILE_DIR/codecov.sh 49 | else 50 | echo "coverage is only reported for Node $COVERAGE_NODE" 51 | fi 52 | -------------------------------------------------------------------------------- /.kokoro/trampoline.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2017 Google Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # This file is not used any more, but we keep this file for making it 17 | # easy to roll back. 18 | # TODO: Remove this file from the template. 19 | 20 | set -eo pipefail 21 | 22 | # Always run the cleanup script, regardless of the success of bouncing into 23 | # the container. 24 | function cleanup() { 25 | chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh 26 | ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh 27 | echo "cleanup"; 28 | } 29 | trap cleanup EXIT 30 | 31 | $(dirname $0)/populate-secrets.sh # Secret Manager secrets. 32 | python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" 33 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | const config = { 15 | "enable-source-maps": true, 16 | "throw-deprecation": true, 17 | "timeout": 10000, 18 | "recursive": true 19 | } 20 | if (process.env.MOCHA_THROW_DEPRECATION === 'false') { 21 | delete config['throw-deprecation']; 22 | } 23 | if (process.env.MOCHA_REPORTER) { 24 | config.reporter = process.env.MOCHA_REPORTER; 25 | } 26 | if (process.env.MOCHA_REPORTER_OUTPUT) { 27 | config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; 28 | } 29 | module.exports = config 30 | -------------------------------------------------------------------------------- /.nycrc: -------------------------------------------------------------------------------- 1 | { 2 | "report-dir": "./.coverage", 3 | "reporter": ["text", "lcov"], 4 | "exclude": [ 5 | "**/*-test", 6 | "**/.coverage", 7 | "**/apis", 8 | "**/benchmark", 9 | "**/conformance", 10 | "**/docs", 11 | "**/samples", 12 | "**/scripts", 13 | "**/protos", 14 | "**/test", 15 | "**/*.d.ts", 16 | ".jsdoc.js", 17 | "**/.jsdoc.js", 18 | "karma.conf.js", 19 | "webpack-tests.config.js", 20 | "webpack.config.js" 21 | ], 22 | "exclude-after-remap": false, 23 | "all": true 24 | } 25 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | **/coverage 3 | test/fixtures 4 | build/ 5 | docs/ 6 | protos/ 7 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | module.exports = { 16 | ...require('gts/.prettierrc.json') 17 | } 18 | -------------------------------------------------------------------------------- /.repo-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "bigquery", 3 | "name_pretty": "Google BigQuery", 4 | "product_documentation": "https://cloud.google.com/bigquery", 5 | "client_documentation": "https://cloud.google.com/nodejs/docs/reference/bigquery/latest", 6 | "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", 7 | "release_level": "stable", 8 | "language": "nodejs", 9 | "repo": "googleapis/nodejs-bigquery", 10 | "distribution_name": "@google-cloud/bigquery", 11 | "api_id": "bigquery.googleapis.com", 12 | "requires_billing": false, 13 | "codeowner_team": "@googleapis/api-bigquery", 14 | "api_shortname": "bigquery", 15 | "library_type": "GAPIC_MANUAL" 16 | } 17 | -------------------------------------------------------------------------------- /.trampolinerc: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # Template for .trampolinerc 16 | 17 | # Add required env vars here. 18 | required_envvars+=( 19 | ) 20 | 21 | # Add env vars which are passed down into the container here. 22 | pass_down_envvars+=( 23 | "AUTORELEASE_PR" 24 | "VERSION" 25 | ) 26 | 27 | # Prevent unintentional override on the default image. 28 | if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ 29 | [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then 30 | echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." 31 | exit 1 32 | fi 33 | 34 | # Define the default value if it makes sense. 35 | if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then 36 | TRAMPOLINE_IMAGE_UPLOAD="" 37 | fi 38 | 39 | if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then 40 | TRAMPOLINE_IMAGE="" 41 | fi 42 | 43 | if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then 44 | TRAMPOLINE_DOCKERFILE="" 45 | fi 46 | 47 | if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then 48 | TRAMPOLINE_BUILD_FILE="" 49 | fi 50 | 51 | # Secret Manager secrets. 52 | source ${PROJECT_ROOT}/.kokoro/populate-secrets.sh 53 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). 4 | 5 | The Google Security Team will respond within 5 working days of your report on g.co/vulnz. 6 | 7 | We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. 8 | -------------------------------------------------------------------------------- /benchmark/README.md: -------------------------------------------------------------------------------- 1 | # BigQuery Benchmark 2 | This directory contains benchmarks for BigQuery client. 3 | 4 | ## Usage 5 | `node bench.js queries.json` 6 | 7 | BigQuery service caches requests so the benchmark should be run 8 | at least twice, disregarding the first result. 9 | -------------------------------------------------------------------------------- /benchmark/queries.json: -------------------------------------------------------------------------------- 1 | [ 2 | "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 10000", 3 | "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 100000", 4 | "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 1000000", 5 | "SELECT title FROM `bigquery-public-data.samples.wikipedia` ORDER BY title LIMIT 1000", 6 | "SELECT title, id, timestamp, contributor_ip FROM `bigquery-public-data.samples.wikipedia` WHERE title like 'Blo%' ORDER BY id", 7 | "SELECT * FROM `bigquery-public-data.baseball.games_post_wide` ORDER BY gameId", 8 | "SELECT * FROM `bigquery-public-data.samples.github_nested` WHERE repository.has_downloads ORDER BY repository.created_at LIMIT 10000", 9 | "SELECT repo_name, path FROM `bigquery-public-data.github_repos.files` WHERE path LIKE '%.java' ORDER BY id LIMIT 1000000" 10 | ] 11 | -------------------------------------------------------------------------------- /linkinator.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "recurse": true, 3 | "skip": [ 4 | "https://codecov.io/gh/googleapis/", 5 | "www.googleapis.com", 6 | "http://goo.gl/f2SXcb", 7 | "img.shields.io" 8 | ], 9 | "silent": true, 10 | "concurrency": 10 11 | } 12 | -------------------------------------------------------------------------------- /owlbot.py: -------------------------------------------------------------------------------- 1 | # Copyright 2022 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | """This script is used to synthesize generated parts of this library.""" 16 | 17 | import synthtool.languages.node as node 18 | from synthtool import shell 19 | 20 | node.owlbot_main(templates_excludes=['.github/bug-report.md']) 21 | 22 | # Regenerate Discovery types. 23 | shell.run(('npm','run','types')) 24 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base", 4 | "docker:disable", 5 | ":disableDependencyDashboard" 6 | ], 7 | "constraintsFiltering": "strict", 8 | "pinVersions": false, 9 | "rebaseStalePrs": true, 10 | "schedule": [ 11 | "after 9am and before 3pm" 12 | ], 13 | "gitAuthor": null, 14 | "packageRules": [ 15 | { 16 | "extends": "packages:linters", 17 | "groupName": "linters" 18 | } 19 | ], 20 | "ignoreDeps": ["typescript"] 21 | } 22 | -------------------------------------------------------------------------------- /samples/.eslintrc.yml: -------------------------------------------------------------------------------- 1 | --- 2 | rules: 3 | no-console: off 4 | -------------------------------------------------------------------------------- /samples/addEmptyColumn.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_add_empty_column] 19 | 20 | // Import the Google Cloud client library and create a client 21 | const {BigQuery} = require('@google-cloud/bigquery'); 22 | const bigquery = new BigQuery(); 23 | 24 | async function addEmptyColumn() { 25 | // Adds an empty column to the schema. 26 | 27 | /** 28 | * TODO(developer): Uncomment the following lines before running the sample. 29 | */ 30 | // const datasetId = 'my_dataset'; 31 | // const tableId = 'my_table'; 32 | const column = {name: 'size', type: 'STRING'}; 33 | 34 | // Retrieve current table metadata 35 | const table = bigquery.dataset(datasetId).table(tableId); 36 | const [metadata] = await table.getMetadata(); 37 | 38 | // Update table schema 39 | const schema = metadata.schema; 40 | const new_schema = schema; 41 | new_schema.fields.push(column); 42 | metadata.schema = new_schema; 43 | 44 | const [result] = await table.setMetadata(metadata); 45 | console.log(result.schema.fields); 46 | } 47 | // [END bigquery_add_empty_column] 48 | addEmptyColumn(); 49 | } 50 | 51 | main(...process.argv.slice(2)); 52 | -------------------------------------------------------------------------------- /samples/auth-user-sample/oauth2.keys.json: -------------------------------------------------------------------------------- 1 | {"installed":{"client_id":"my_client_id","project_id":"my_project","auth_uri":"my_uri","token_uri":"https://oauth2.googleapis.com/token","auth_provider_x509_cert_url":"my_url","client_secret":"my_secret","redirect_uris":["http://localhost","http://localhost:3000/oauth2callback"]}} 2 | -------------------------------------------------------------------------------- /samples/cancelJob.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(jobId = 'existing-job-id') { 18 | // [START bigquery_cancel_job] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function cancelJob() { 24 | // Attempts to cancel a job. 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const jobId = "existing-job-id"; 30 | 31 | // Create a job reference 32 | const job = bigquery.job(jobId); 33 | 34 | // Attempt to cancel job 35 | const [apiResult] = await job.cancel(); 36 | 37 | console.log(apiResult.job.status); 38 | } 39 | // [END bigquery_cancel_job] 40 | cancelJob(); 41 | } 42 | main(...process.argv.slice(2)); 43 | -------------------------------------------------------------------------------- /samples/clientJSONCredentials.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_client_json_credentials] 19 | // Create a BigQuery client explicitly using service account credentials. 20 | // by specifying the private key file. 21 | const {BigQuery} = require('@google-cloud/bigquery'); 22 | 23 | const options = { 24 | keyFilename: 'path/to/service_account.json', 25 | projectId: 'my_project', 26 | }; 27 | 28 | const bigquery = new BigQuery(options); 29 | // [END bigquery_client_json_credentials] 30 | async function query() { 31 | // Queries the U.S. given names dataset for the state of Texas. 32 | 33 | const query = `SELECT name 34 | FROM \`bigquery-public-data.usa_names.usa_1910_2013\` 35 | WHERE state = 'TX' 36 | LIMIT 100`; 37 | 38 | // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query 39 | const options = { 40 | query: query, 41 | // Location must match that of the dataset(s) referenced in the query. 42 | location: 'US', 43 | }; 44 | 45 | // Run the query as a job 46 | const [job] = await bigquery.createQueryJob(options); 47 | console.log(`Job ${job.id} started.`); 48 | 49 | // Wait for the query to finish 50 | const [rows] = await job.getQueryResults(); 51 | 52 | // Print the results 53 | console.log('Rows:'); 54 | rows.forEach(row => console.log(row)); 55 | } 56 | query(); 57 | } 58 | 59 | main(...process.argv.slice(2)); 60 | -------------------------------------------------------------------------------- /samples/copyTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | srcDatasetId = 'my_src_dataset', 19 | srcTableId = 'my_src_table', 20 | destDatasetId = 'my_dest_dataset', 21 | destTableId = 'my_dest_table', 22 | ) { 23 | // [START bigquery_copy_table] 24 | // Import the Google Cloud client library and create a client 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const bigquery = new BigQuery(); 27 | 28 | async function copyTable() { 29 | // Copies src_dataset:src_table to dest_dataset:dest_table. 30 | 31 | /** 32 | * TODO(developer): Uncomment the following lines before running the sample 33 | */ 34 | // const srcDatasetId = "my_src_dataset"; 35 | // const srcTableId = "my_src_table"; 36 | // const destDatasetId = "my_dest_dataset"; 37 | // const destTableId = "my_dest_table"; 38 | 39 | // Copy the table contents into another table 40 | const [job] = await bigquery 41 | .dataset(srcDatasetId) 42 | .table(srcTableId) 43 | .copy(bigquery.dataset(destDatasetId).table(destTableId)); 44 | 45 | console.log(`Job ${job.id} completed.`); 46 | 47 | // Check the job's status for errors 48 | const errors = job.status.errors; 49 | if (errors && errors.length > 0) { 50 | throw errors; 51 | } 52 | } 53 | // [END bigquery_copy_table] 54 | copyTable(); 55 | } 56 | main(...process.argv.slice(2)); 57 | -------------------------------------------------------------------------------- /samples/copyTableMultipleSource.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | sourceTable = 'my_table', // Existing table to copy from 20 | destinationTable = 'testing', // Existing table to copy to 21 | ) { 22 | // [START bigquery_copy_table_multiple_source] 23 | // Import the Google Cloud client library 24 | const {BigQuery} = require('@google-cloud/bigquery'); 25 | const bigquery = new BigQuery(); 26 | 27 | async function copyTableMultipleSource() { 28 | // Copy multiple source tables to a given destination. 29 | 30 | /** 31 | * TODO(developer): Uncomment the following lines before running the sample. 32 | */ 33 | // const datasetId = "my_dataset"; 34 | // sourceTable = 'my_table'; 35 | // destinationTable = 'testing'; 36 | 37 | // Create a client 38 | const dataset = bigquery.dataset(datasetId); 39 | 40 | const metadata = { 41 | createDisposition: 'CREATE_NEVER', 42 | writeDisposition: 'WRITE_TRUNCATE', 43 | }; 44 | 45 | // Create table references 46 | const table = dataset.table(sourceTable); 47 | const yourTable = dataset.table(destinationTable); 48 | 49 | // Copy table 50 | const [apiResponse] = await table.copy(yourTable, metadata); 51 | console.log(apiResponse.configuration.copy); 52 | } 53 | // [END bigquery_copy_table_multiple_source] 54 | copyTableMultipleSource(); 55 | } 56 | 57 | main(...process.argv.slice(2)); 58 | -------------------------------------------------------------------------------- /samples/createDataset.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_new_dataset') { 18 | // [START bigquery_create_dataset] 19 | // Import the Google Cloud client library and create a client 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function createDataset() { 24 | // Creates a new dataset named "my_dataset". 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = "my_new_dataset"; 30 | 31 | // Specify the geographic location where the dataset should reside 32 | const options = { 33 | location: 'US', 34 | }; 35 | 36 | // Create a new dataset 37 | const [dataset] = await bigquery.createDataset(datasetId, options); 38 | console.log(`Dataset ${dataset.id} created.`); 39 | } 40 | createDataset(); 41 | // [END bigquery_create_dataset] 42 | } 43 | 44 | main(...process.argv.slice(2)); 45 | -------------------------------------------------------------------------------- /samples/createJob.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_create_job] 19 | // Import the Google Cloud client library and create a client 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function createJob() { 24 | // Run a BigQuery query job. 25 | 26 | // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/Job 27 | const options = { 28 | // Specify a job configuration to set optional job resource properties. 29 | configuration: { 30 | query: { 31 | query: `SELECT country_name 32 | FROM \`bigquery-public-data.utility_us.country_code_iso\` 33 | LIMIT 10`, 34 | useLegacySql: false, 35 | }, 36 | labels: {'example-label': 'example-value'}, 37 | }, 38 | }; 39 | 40 | // Make API request. 41 | const response = await bigquery.createJob(options); 42 | const job = response[0]; 43 | 44 | // Wait for the query to finish 45 | const [rows] = await job.getQueryResults(job); 46 | 47 | // Print the results 48 | console.log('Rows:'); 49 | rows.forEach(row => console.log(row)); 50 | } 51 | // [END bigquery_create_job] 52 | createJob(); 53 | } 54 | main(...process.argv.slice(2)); 55 | -------------------------------------------------------------------------------- /samples/createRoutine.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | routineId = 'my_routine', // Routine to be created 20 | ) { 21 | // [START bigquery_create_routine] 22 | // Import the Google Cloud client library and create a client 23 | const {BigQuery} = require('@google-cloud/bigquery'); 24 | const bigquery = new BigQuery(); 25 | 26 | async function createRoutine() { 27 | // Creates a new routine named "my_routine" in "my_dataset". 28 | 29 | /** 30 | * TODO(developer): Uncomment the following lines before running the sample. 31 | */ 32 | // const datasetId = 'my_dataset'; 33 | // const routineId = 'my_routine'; 34 | 35 | const dataset = bigquery.dataset(datasetId); 36 | 37 | // Create routine reference 38 | let routine = dataset.routine(routineId); 39 | 40 | const config = { 41 | arguments: [ 42 | { 43 | name: 'x', 44 | dataType: { 45 | typeKind: 'INT64', 46 | }, 47 | }, 48 | ], 49 | definitionBody: 'x * 3', 50 | routineType: 'SCALAR_FUNCTION', 51 | returnType: { 52 | typeKind: 'INT64', 53 | }, 54 | }; 55 | 56 | // Make API call 57 | [routine] = await routine.create(config); 58 | 59 | console.log(`Routine ${routineId} created.`); 60 | } 61 | createRoutine(); 62 | // [END bigquery_create_routine] 63 | } 64 | main(...process.argv.slice(2)); 65 | -------------------------------------------------------------------------------- /samples/createRoutineDDL.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | projectId = 'my_project', // GCP project 19 | datasetId = 'my_dataset', // Existing dataset 20 | routineId = 'my_routine', // Routine to be created 21 | ) { 22 | // [START bigquery_create_routine_ddl] 23 | // Import the Google Cloud client library and create a client 24 | const {BigQuery} = require('@google-cloud/bigquery'); 25 | const bigquery = new BigQuery(); 26 | 27 | async function createRoutineDDL() { 28 | // Creates a routine using DDL. 29 | 30 | /** 31 | * TODO(developer): Uncomment the following lines before running the sample. 32 | */ 33 | // projectId = 'my_project'; 34 | // const datasetId = 'my_dataset'; 35 | // const routineId = 'my_routine'; 36 | 37 | const query = `CREATE FUNCTION \`${projectId}.${datasetId}.${routineId}\`( 38 | arr ARRAY> 39 | ) AS ( 40 | (SELECT SUM(IF(elem.name = "foo",elem.val,null)) FROM UNNEST(arr) AS elem) 41 | )`; 42 | 43 | const options = { 44 | query: query, 45 | }; 46 | 47 | // Run the query as a job 48 | const [job] = await bigquery.createQueryJob(options); 49 | console.log(`Job ${job.id} started.`); 50 | 51 | // Wait for the query to finish 52 | await job.getQueryResults(); 53 | 54 | console.log(`Routine ${routineId} created.`); 55 | } 56 | createRoutineDDL(); 57 | // [END bigquery_create_routine_ddl] 58 | } 59 | main(...process.argv.slice(2)); 60 | -------------------------------------------------------------------------------- /samples/createTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | tableId = 'my_new_table', // Table to be created 20 | schema = [ 21 | {name: 'Name', type: 'STRING', mode: 'REQUIRED'}, 22 | {name: 'Age', type: 'INTEGER'}, 23 | {name: 'Weight', type: 'FLOAT'}, 24 | {name: 'IsMagic', type: 'BOOLEAN'}, 25 | ], 26 | ) { 27 | // [START bigquery_create_table] 28 | // Import the Google Cloud client library and create a client 29 | const {BigQuery} = require('@google-cloud/bigquery'); 30 | const bigquery = new BigQuery(); 31 | 32 | async function createTable() { 33 | // Creates a new table named "my_table" in "my_dataset". 34 | 35 | /** 36 | * TODO(developer): Uncomment the following lines before running the sample. 37 | */ 38 | // const datasetId = "my_dataset"; 39 | // const tableId = "my_table"; 40 | // const schema = 'Name:string, Age:integer, Weight:float, IsMagic:boolean'; 41 | 42 | // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource 43 | const options = { 44 | schema: schema, 45 | location: 'US', 46 | }; 47 | 48 | // Create a new table in the dataset 49 | const [table] = await bigquery 50 | .dataset(datasetId) 51 | .createTable(tableId, options); 52 | 53 | console.log(`Table ${table.id} created.`); 54 | } 55 | // [END bigquery_create_table] 56 | createTable(); 57 | } 58 | main(...process.argv.slice(2)); 59 | -------------------------------------------------------------------------------- /samples/createTableClustered.js: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_create_table_clustered] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function createTableClustered() { 24 | // Creates a new clustered table named "my_table" in "my_dataset". 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = "my_dataset"; 30 | // const tableId = "my_table"; 31 | const schema = 'name:string, city:string, zipcode:integer'; 32 | 33 | // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource 34 | const options = { 35 | schema: schema, 36 | clustering: { 37 | fields: ['city', 'zipcode'], 38 | }, 39 | }; 40 | 41 | // Create a new table in the dataset 42 | const [table] = await bigquery 43 | .dataset(datasetId) 44 | .createTable(tableId, options); 45 | console.log(`Table ${table.id} created with clustering:`); 46 | console.log(table.metadata.clustering); 47 | } 48 | // [END bigquery_create_table_clustered] 49 | createTableClustered(datasetId, tableId); 50 | } 51 | main(...process.argv.slice(2)); 52 | -------------------------------------------------------------------------------- /samples/createTablePartitioned.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_create_table_partitioned] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function createTablePartitioned() { 24 | // Creates a new partitioned table named "my_table" in "my_dataset". 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = "my_dataset"; 30 | // const tableId = "my_table"; 31 | const schema = 'Name:string, Post_Abbr:string, Date:date'; 32 | 33 | // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource 34 | const options = { 35 | schema: schema, 36 | location: 'US', 37 | timePartitioning: { 38 | type: 'DAY', 39 | expirationMs: '7776000000', 40 | field: 'date', 41 | }, 42 | }; 43 | 44 | // Create a new table in the dataset 45 | const [table] = await bigquery 46 | .dataset(datasetId) 47 | .createTable(tableId, options); 48 | console.log(`Table ${table.id} created with partitioning: `); 49 | console.log(table.metadata.timePartitioning); 50 | } 51 | // [END bigquery_create_table_partitioned] 52 | createTablePartitioned(datasetId, tableId); 53 | } 54 | main(...process.argv.slice(2)); 55 | -------------------------------------------------------------------------------- /samples/createView.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | myDatasetId = 'my_dataset', // Existing dataset 19 | myTableId = 'my_shared_view', // View to be created 20 | projectId = 'bigquery-public-data', // Source GCP project ID 21 | sourceDatasetId = 'usa_names', // Source dataset ID 22 | sourceTableId = 'usa_1910_current', //Source table ID 23 | ) { 24 | // [START bigquery_create_view] 25 | // Import the Google Cloud client library and create a client 26 | const {BigQuery} = require('@google-cloud/bigquery'); 27 | const bigquery = new BigQuery(); 28 | 29 | async function createView() { 30 | // Creates a new view named "my_shared_view" in "my_dataset". 31 | 32 | /** 33 | * TODO(developer): Uncomment the following lines before running the sample. 34 | */ 35 | // const myDatasetId = "my_dataset" 36 | // const myTableId = "my_shared_view" 37 | // const projectId = "bigquery-public-data"; 38 | // const sourceDatasetId = "usa_names" 39 | // const sourceTableId = "usa_1910_current"; 40 | const myDataset = await bigquery.dataset(myDatasetId); 41 | 42 | // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource 43 | const options = { 44 | view: `SELECT name 45 | FROM \`${projectId}.${sourceDatasetId}.${sourceTableId}\` 46 | LIMIT 10`, 47 | }; 48 | 49 | // Create a new view in the dataset 50 | const [view] = await myDataset.createTable(myTableId, options); 51 | 52 | console.log(`View ${view.id} created.`); 53 | } 54 | // [END bigquery_create_view] 55 | createView(); 56 | } 57 | main(...process.argv.slice(2)); 58 | -------------------------------------------------------------------------------- /samples/ddlCreateView.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | projectId = 'my_project', // GCP Project ID 19 | datasetId = 'my_dataset', // Existing dataset ID 20 | tableId = 'my_new_view', // View to be created 21 | ) { 22 | // [START bigquery_ddl_create_view] 23 | // Import the Google Cloud client library and create a client 24 | const {BigQuery} = require('@google-cloud/bigquery'); 25 | const bigquery = new BigQuery(); 26 | 27 | async function ddlCreateView() { 28 | // Creates a view via a DDL query 29 | 30 | /** 31 | * TODO(developer): Uncomment the following lines before running the sample. 32 | */ 33 | // const projectId = "my_project" 34 | // const datasetId = "my_dataset" 35 | // const tableId = "my_new_view" 36 | 37 | const query = ` 38 | CREATE VIEW \`${projectId}.${datasetId}.${tableId}\` 39 | OPTIONS( 40 | expiration_timestamp=TIMESTAMP_ADD( 41 | CURRENT_TIMESTAMP(), INTERVAL 48 HOUR), 42 | friendly_name="new_view", 43 | description="a view that expires in 2 days", 44 | labels=[("org_unit", "development")] 45 | ) 46 | AS SELECT name, state, year, number 47 | FROM \`bigquery-public-data.usa_names.usa_1910_current\` 48 | WHERE state LIKE 'W%'`; 49 | 50 | // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query 51 | const options = { 52 | query: query, 53 | }; 54 | 55 | // Run the query as a job 56 | const [job] = await bigquery.createQueryJob(options); 57 | 58 | job.on('complete', metadata => { 59 | console.log(`Created new view ${tableId} via job ${metadata.id}`); 60 | }); 61 | } 62 | // [END bigquery_ddl_create_view] 63 | ddlCreateView(); 64 | } 65 | main(...process.argv.slice(2)); 66 | -------------------------------------------------------------------------------- /samples/deleteDataset.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset') { 18 | // [START bigquery_delete_dataset] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function deleteDataset() { 24 | // Deletes a dataset named "my_dataset". 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = 'my_dataset'; 30 | 31 | // Create a reference to the existing dataset 32 | const dataset = bigquery.dataset(datasetId); 33 | 34 | // Delete the dataset and its contents 35 | await dataset.delete({force: true}); 36 | console.log(`Dataset ${dataset.id} deleted.`); 37 | } 38 | // [END bigquery_delete_dataset] 39 | deleteDataset(); 40 | } 41 | 42 | main(...process.argv.slice(2)); 43 | -------------------------------------------------------------------------------- /samples/deleteLabelDataset.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: Delete Label Dataset 19 | // description: Deletes a label on a dataset. 20 | // usage: node deleteLabelDataset.js 21 | 22 | function main( 23 | datasetId = 'my_dataset', // Existing dataset 24 | ) { 25 | // [START bigquery_delete_label_dataset] 26 | // Import the Google Cloud client library 27 | const {BigQuery} = require('@google-cloud/bigquery'); 28 | const bigquery = new BigQuery(); 29 | 30 | async function deleteLabelDataset() { 31 | // Deletes a label on a dataset. 32 | // This example dataset starts with existing label { color: 'green' } 33 | 34 | /** 35 | * TODO(developer): Uncomment the following lines before running the sample. 36 | */ 37 | // const datasetId = 'my_dataset'; 38 | 39 | // Retrieve current dataset metadata. 40 | const dataset = bigquery.dataset(datasetId); 41 | const [metadata] = await dataset.getMetadata(); 42 | 43 | // Add label to dataset metadata 44 | metadata.labels = {color: null}; 45 | const [apiResponse] = await dataset.setMetadata(metadata); 46 | 47 | console.log(`${datasetId} labels:`); 48 | console.log(apiResponse.labels); 49 | } 50 | // [END bigquery_delete_label_dataset] 51 | deleteLabelDataset(); 52 | } 53 | main(...process.argv.slice(2)); 54 | -------------------------------------------------------------------------------- /samples/deleteLabelTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_delete_label_table] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function deleteLabelTable() { 24 | // Deletes a label from an existing table. 25 | // This example dataset starts with existing label { color: 'green' } 26 | 27 | /** 28 | * TODO(developer): Uncomment the following lines before running the sample. 29 | */ 30 | // const datasetId = "my_dataset"; 31 | // const tableId = "my_table"; 32 | 33 | const dataset = bigquery.dataset(datasetId); 34 | const [table] = await dataset.table(tableId).get(); 35 | 36 | // Retrieve current table metadata 37 | const [metadata] = await table.getMetadata(); 38 | 39 | // Add label to table metadata 40 | metadata.labels = {color: null}; 41 | const [apiResponse] = await table.setMetadata(metadata); 42 | 43 | console.log(`${tableId} labels:`); 44 | console.log(apiResponse.labels); 45 | } 46 | // [END bigquery_delete_label_table] 47 | deleteLabelTable(); 48 | } 49 | main(...process.argv.slice(2)); 50 | -------------------------------------------------------------------------------- /samples/deleteModel.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', modelId = 'my_model') { 18 | // [START bigquery_delete_model] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function deleteModel() { 24 | // Deletes a model named "my_model" from "my_dataset". 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample 28 | */ 29 | // const datasetId = "my_dataset"; 30 | // const modelId = "my_model"; 31 | 32 | const dataset = bigquery.dataset(datasetId); 33 | const model = dataset.model(modelId); 34 | await model.delete(); 35 | 36 | console.log(`Model ${modelId} deleted.`); 37 | } 38 | // [END bigquery_delete_model] 39 | deleteModel(); 40 | } 41 | main(...process.argv.slice(2)); 42 | -------------------------------------------------------------------------------- /samples/deleteRoutine.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | routineId = 'my_routine', // Routine to be deleted 20 | ) { 21 | // [START bigquery_delete_routine] 22 | // Import the Google Cloud client library and create a client 23 | const {BigQuery} = require('@google-cloud/bigquery'); 24 | const bigquery = new BigQuery(); 25 | 26 | async function deleteRoutine() { 27 | // Deletes a routine named "my_routine" in "my_dataset". 28 | 29 | /** 30 | * TODO(developer): Uncomment the following lines before running the sample. 31 | */ 32 | // const datasetId = 'my_dataset'; 33 | // const routineId = 'my_routine'; 34 | 35 | const dataset = bigquery.dataset(datasetId); 36 | 37 | // Create routine reference 38 | let routine = dataset.routine(routineId); 39 | 40 | // Make API call 41 | [routine] = await routine.delete(); 42 | 43 | console.log(`Routine ${routineId} deleted.`); 44 | } 45 | deleteRoutine(); 46 | // [END bigquery_delete_routine] 47 | } 48 | main(...process.argv.slice(2)); 49 | -------------------------------------------------------------------------------- /samples/deleteTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_delete_table] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function deleteTable() { 24 | // Deletes "my_table" from "my_dataset". 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = "my_dataset"; 30 | // const tableId = "my_table"; 31 | 32 | // Delete the table 33 | await bigquery.dataset(datasetId).table(tableId).delete(); 34 | 35 | console.log(`Table ${tableId} deleted.`); 36 | } 37 | // [END bigquery_delete_table] 38 | deleteTable(); 39 | } 40 | 41 | main(...process.argv.slice(2)); 42 | -------------------------------------------------------------------------------- /samples/extractTableCompressed.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', 19 | tableId = 'my_table', 20 | bucketName = 'my-bucket', 21 | filename = 'file.csv', 22 | ) { 23 | // [START bigquery_extract_table_compressed] 24 | // Import the Google Cloud client libraries 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const {Storage} = require('@google-cloud/storage'); 27 | 28 | const bigquery = new BigQuery(); 29 | const storage = new Storage(); 30 | 31 | async function extractTableCompressed() { 32 | // Exports my_dataset:my_table to gcs://my-bucket/my-file as a compressed file. 33 | 34 | /** 35 | * TODO(developer): Uncomment the following lines before running the sample. 36 | */ 37 | // const datasetId = "my_dataset"; 38 | // const tableId = "my_table"; 39 | // const bucketName = "my-bucket"; 40 | // const filename = "file.csv"; 41 | 42 | // Location must match that of the source table. 43 | const options = { 44 | location: 'US', 45 | gzip: true, 46 | }; 47 | 48 | // Export data from the table into a Google Cloud Storage file 49 | const [job] = await bigquery 50 | .dataset(datasetId) 51 | .table(tableId) 52 | .extract(storage.bucket(bucketName).file(filename), options); 53 | 54 | console.log(`Job ${job.id} created.`); 55 | 56 | // Check the job's status for errors 57 | const errors = job.status.errors; 58 | if (errors && errors.length > 0) { 59 | throw errors; 60 | } 61 | } 62 | // [END bigquery_extract_table_compressed] 63 | extractTableCompressed(); 64 | } 65 | main(...process.argv.slice(2)); 66 | -------------------------------------------------------------------------------- /samples/extractTableJSON.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', 19 | tableId = 'my_table', 20 | bucketName = 'my-bucket', 21 | filename = 'file.json', 22 | ) { 23 | // [START bigquery_extract_table_json] 24 | // Import the Google Cloud client libraries 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const {Storage} = require('@google-cloud/storage'); 27 | 28 | const bigquery = new BigQuery(); 29 | const storage = new Storage(); 30 | 31 | async function extractTableJSON() { 32 | // Exports my_dataset:my_table to gcs://my-bucket/my-file as JSON. 33 | 34 | /** 35 | * TODO(developer): Uncomment the following lines before running the sample. 36 | */ 37 | // const datasetId = "my_dataset"; 38 | // const tableId = "my_table"; 39 | // const bucketName = "my-bucket"; 40 | // const filename = "file.json"; 41 | 42 | // Location must match that of the source table. 43 | const options = { 44 | format: 'json', 45 | location: 'US', 46 | }; 47 | 48 | // Export data from the table into a Google Cloud Storage file 49 | const [job] = await bigquery 50 | .dataset(datasetId) 51 | .table(tableId) 52 | .extract(storage.bucket(bucketName).file(filename), options); 53 | 54 | console.log(`Job ${job.id} created.`); 55 | 56 | // Check the job's status for errors 57 | const errors = job.status.errors; 58 | if (errors && errors.length > 0) { 59 | throw errors; 60 | } 61 | } 62 | // [END bigquery_extract_table_json] 63 | extractTableJSON(); 64 | } 65 | main(...process.argv.slice(2)); 66 | -------------------------------------------------------------------------------- /samples/extractTableToGCS.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', 19 | tableId = 'my_table', 20 | bucketName = 'my-bucket', 21 | filename = 'file.csv', 22 | ) { 23 | // [START bigquery_extract_table] 24 | // Import the Google Cloud client libraries 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const {Storage} = require('@google-cloud/storage'); 27 | 28 | const bigquery = new BigQuery(); 29 | const storage = new Storage(); 30 | 31 | async function extractTableToGCS() { 32 | // Exports my_dataset:my_table to gcs://my-bucket/my-file as raw CSV. 33 | 34 | /** 35 | * TODO(developer): Uncomment the following lines before running the sample. 36 | */ 37 | // const datasetId = "my_dataset"; 38 | // const tableId = "my_table"; 39 | // const bucketName = "my-bucket"; 40 | // const filename = "file.csv"; 41 | 42 | // Location must match that of the source table. 43 | const options = { 44 | location: 'US', 45 | }; 46 | 47 | // Export data from the table into a Google Cloud Storage file 48 | const [job] = await bigquery 49 | .dataset(datasetId) 50 | .table(tableId) 51 | .extract(storage.bucket(bucketName).file(filename), options); 52 | 53 | console.log(`Job ${job.id} created.`); 54 | 55 | // Check the job's status for errors 56 | const errors = job.status.errors; 57 | if (errors && errors.length > 0) { 58 | throw errors; 59 | } 60 | } 61 | // [END bigquery_extract_table] 62 | extractTableToGCS(); 63 | } 64 | main(...process.argv.slice(2)); 65 | -------------------------------------------------------------------------------- /samples/getDataset.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset') { 18 | // [START bigquery_get_dataset] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function getDataset() { 24 | // Retrieves dataset named "my_dataset". 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample 28 | */ 29 | // const datasetId = "my_dataset"; 30 | 31 | // Retrieve dataset reference 32 | const [dataset] = await bigquery.dataset(datasetId).get(); 33 | 34 | console.log('Dataset:'); 35 | console.log(dataset.metadata.datasetReference); 36 | } 37 | getDataset(); 38 | // [END bigquery_get_dataset] 39 | } 40 | main(...process.argv.slice(2)); 41 | -------------------------------------------------------------------------------- /samples/getDatasetLabels.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: Get Dataset Labels 19 | // description: Gets labels on a dataset. 20 | // usage: node getDatasetLabels.js 21 | 22 | function main(datasetId = 'my_dataset') { 23 | // [START bigquery_get_dataset_labels] 24 | // Import the Google Cloud client library 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const bigquery = new BigQuery(); 27 | 28 | async function getDatasetLabels() { 29 | // Gets labels on a dataset. 30 | 31 | /** 32 | * TODO(developer): Uncomment the following lines before running the sample. 33 | */ 34 | // const datasetId = "my_dataset"; 35 | 36 | // Retrieve current dataset metadata. 37 | const dataset = bigquery.dataset(datasetId); 38 | const [metadata] = await dataset.getMetadata(); 39 | const labels = metadata.labels; 40 | 41 | console.log(`${datasetId} Labels:`); 42 | for (const [key, value] of Object.entries(labels)) { 43 | console.log(`${key}: ${value}`); 44 | } 45 | } 46 | getDatasetLabels(); 47 | // [END bigquery_get_dataset_labels] 48 | } 49 | main(...process.argv.slice(2)); 50 | -------------------------------------------------------------------------------- /samples/getJob.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(jobId = 'existing-job-id') { 18 | // [START bigquery_get_job] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function getJob() { 24 | // Get job properties. 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const jobId = "existing-job-id"; 30 | 31 | // Create a job reference 32 | const job = bigquery.job(jobId); 33 | 34 | // Retrieve job 35 | const [jobResult] = await job.get(); 36 | 37 | console.log(jobResult.metadata.jobReference); 38 | } 39 | // [END bigquery_get_job] 40 | getJob(); 41 | } 42 | main(...process.argv.slice(2)); 43 | -------------------------------------------------------------------------------- /samples/getModel.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: BigQuery Get Model 19 | // description: Retrieves an existing model from a dataset. 20 | // usage: node getModel.js 21 | 22 | function main(datasetId = 'my_dataset', modelId = 'my_existing_model') { 23 | // [START bigquery_get_model] 24 | // Import the Google Cloud client library 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const bigquery = new BigQuery(); 27 | 28 | async function getModel() { 29 | // Retrieves model named "my_existing_model" in "my_dataset". 30 | 31 | /** 32 | * TODO(developer): Uncomment the following lines before running the sample 33 | */ 34 | // const datasetId = "my_dataset"; 35 | // const modelId = "my_existing_model"; 36 | 37 | const dataset = bigquery.dataset(datasetId); 38 | const [model] = await dataset.model(modelId).get(); 39 | 40 | console.log('Model:'); 41 | console.log(model.metadata.modelReference); 42 | } 43 | // [END bigquery_get_model] 44 | getModel(); 45 | } 46 | main(...process.argv.slice(2)); 47 | -------------------------------------------------------------------------------- /samples/getRoutine.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | routineId = 'my_routine', // Existing routine 20 | ) { 21 | // [START bigquery_get_routine] 22 | // Import the Google Cloud client library and create a client 23 | const {BigQuery} = require('@google-cloud/bigquery'); 24 | const bigquery = new BigQuery(); 25 | 26 | async function getRoutine() { 27 | // Gets an existing routine named "my_routine" in "my_dataset". 28 | 29 | /** 30 | * TODO(developer): Uncomment the following lines before running the sample. 31 | */ 32 | // const datasetId = 'my_dataset'; 33 | // const routineId = 'my_routine'; 34 | 35 | const dataset = bigquery.dataset(datasetId); 36 | 37 | // Create routine reference and make API call 38 | const [routine] = await dataset.routine(routineId).get(); 39 | 40 | console.log( 41 | `Routine ${routine.metadata.routineReference.routineId} retrieved.`, 42 | ); 43 | } 44 | getRoutine(); 45 | // [END bigquery_get_routine] 46 | } 47 | main(...process.argv.slice(2)); 48 | -------------------------------------------------------------------------------- /samples/getTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: BigQuery Get Table 19 | // description: Retrieves an existing table from a dataset. 20 | // usage: node getTable.js 21 | 22 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 23 | // [START bigquery_get_table] 24 | // Import the Google Cloud client library 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const bigquery = new BigQuery(); 27 | 28 | async function getTable() { 29 | // Retrieves table named "my_table" in "my_dataset". 30 | 31 | /** 32 | * TODO(developer): Uncomment the following lines before running the sample 33 | */ 34 | // const datasetId = "my_dataset"; 35 | // const tableId = "my_table"; 36 | 37 | // Retrieve table reference 38 | const dataset = bigquery.dataset(datasetId); 39 | const [table] = await dataset.table(tableId).get(); 40 | 41 | console.log('Table:'); 42 | console.log(table.metadata.tableReference); 43 | } 44 | getTable(); 45 | // [END bigquery_get_table] 46 | } 47 | main(...process.argv.slice(2)); 48 | -------------------------------------------------------------------------------- /samples/getTableLabels.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: Get Table Labels 19 | // description: Gets labels on a dataset. 20 | // usage: node getTableLabels.js 21 | 22 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 23 | // [START bigquery_get_table_labels] 24 | // Import the Google Cloud client library 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const bigquery = new BigQuery(); 27 | 28 | async function getTableLabels() { 29 | // Gets labels on a dataset. 30 | 31 | /** 32 | * TODO(developer): Uncomment the following lines before running the sample. 33 | */ 34 | // const datasetId = "my_dataset"; 35 | // const tableId = "my_table"; 36 | 37 | // Retrieve current dataset metadata. 38 | const table = bigquery.dataset(datasetId).table(tableId); 39 | const [metadata] = await table.getMetadata(); 40 | const labels = metadata.labels; 41 | 42 | console.log(`${tableId} Labels:`); 43 | for (const [key, value] of Object.entries(labels)) { 44 | console.log(`${key}: ${value}`); 45 | } 46 | } 47 | getTableLabels(); 48 | // [END bigquery_get_table_labels] 49 | } 50 | main(...process.argv.slice(2)); 51 | -------------------------------------------------------------------------------- /samples/getView.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset ID 19 | tableId = 'my_view', // Existing table ID 20 | ) { 21 | // [START bigquery_get_view] 22 | // Import the Google Cloud client library 23 | const {BigQuery} = require('@google-cloud/bigquery'); 24 | const bigquery = new BigQuery(); 25 | 26 | async function getView() { 27 | // Retrieves view properties. 28 | 29 | /** 30 | * TODO(developer): Uncomment the following lines before running the sample 31 | */ 32 | // const datasetId = "my_dataset"; 33 | // const tableId = "my_view"; 34 | 35 | // Retrieve view 36 | const dataset = bigquery.dataset(datasetId); 37 | const [view] = await dataset.table(tableId).get(); 38 | 39 | const fullTableId = view.metadata.id; 40 | const viewQuery = view.metadata.view.query; 41 | 42 | // Display view properties 43 | console.log(`View at ${fullTableId}`); 44 | console.log(`View query: ${viewQuery}`); 45 | } 46 | getView(); 47 | // [END bigquery_get_view] 48 | } 49 | main(...process.argv.slice(2)); 50 | -------------------------------------------------------------------------------- /samples/insertRowsAsStream.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_table_insert_rows] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function insertRowsAsStream() { 24 | // Inserts the JSON objects into my_dataset:my_table. 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = 'my_dataset'; 30 | // const tableId = 'my_table'; 31 | const rows = [ 32 | {name: 'Tom', age: 30}, 33 | {name: 'Jane', age: 32}, 34 | ]; 35 | 36 | // Insert data into a table 37 | await bigquery.dataset(datasetId).table(tableId).insert(rows); 38 | console.log(`Inserted ${rows.length} rows`); 39 | } 40 | // [END bigquery_table_insert_rows] 41 | insertRowsAsStream(); 42 | } 43 | main(...process.argv.slice(2)); 44 | -------------------------------------------------------------------------------- /samples/labelDataset.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: BigQuery Label Dataset 19 | // description: Updates a label on a dataset. 20 | // usage: node labelDataset.js 21 | 22 | function main(datasetId = 'my_dataset') { 23 | // [START bigquery_label_dataset] 24 | // Import the Google Cloud client library 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const bigquery = new BigQuery(); 27 | 28 | async function labelDataset() { 29 | // Updates a label on a dataset. 30 | 31 | /** 32 | * TODO(developer): Uncomment the following lines before running the sample 33 | */ 34 | // const datasetId = "my_dataset"; 35 | 36 | // Retrieve current dataset metadata. 37 | const dataset = bigquery.dataset(datasetId); 38 | const [metadata] = await dataset.getMetadata(); 39 | 40 | // Add label to dataset metadata 41 | metadata.labels = {color: 'green'}; 42 | const [apiResponse] = await dataset.setMetadata(metadata); 43 | 44 | console.log(`${datasetId} labels:`); 45 | console.log(apiResponse.labels); 46 | } 47 | // [END bigquery_label_dataset] 48 | labelDataset(); 49 | } 50 | main(...process.argv.slice(2)); 51 | -------------------------------------------------------------------------------- /samples/labelTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_label_table] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function labelTable() { 24 | // Adds a label to an existing table. 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = 'my_dataset'; 30 | // const tableId = 'my_table'; 31 | 32 | const dataset = bigquery.dataset(datasetId); 33 | const [table] = await dataset.table(tableId).get(); 34 | 35 | // Retrieve current table metadata 36 | const [metadata] = await table.getMetadata(); 37 | 38 | // Add label to table metadata 39 | metadata.labels = {color: 'green'}; 40 | const [apiResponse] = await table.setMetadata(metadata); 41 | 42 | console.log(`${tableId} labels:`); 43 | console.log(apiResponse.labels); 44 | } 45 | // [END bigquery_label_table] 46 | labelTable(); 47 | } 48 | main(...process.argv.slice(2)); 49 | -------------------------------------------------------------------------------- /samples/listDatasets.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(projectId) { 18 | // [START bigquery_list_datasets] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function listDatasets() { 24 | /** 25 | * TODO(developer): Uncomment the following lines before running the sample. 26 | */ 27 | // const projectId = "my_project_id"; 28 | 29 | // Lists all datasets in the specified project. 30 | // If projectId is not specified, this method will take 31 | // the projectId from the authenticated BigQuery Client. 32 | const [datasets] = await bigquery.getDatasets({projectId}); 33 | console.log('Datasets:'); 34 | datasets.forEach(dataset => console.log(dataset.id)); 35 | } 36 | // [END bigquery_list_datasets] 37 | listDatasets(); 38 | } 39 | main(...process.argv.slice(2)); 40 | -------------------------------------------------------------------------------- /samples/listDatasetsByLabel.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_list_datasets_by_label] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function listDatasetsByLabel() { 24 | // Lists all datasets in current GCP project, filtering by label color:green. 25 | 26 | const options = { 27 | filter: 'labels.color:green', 28 | }; 29 | // Lists all datasets in the specified project 30 | const [datasets] = await bigquery.getDatasets(options); 31 | 32 | console.log('Datasets:'); 33 | datasets.forEach(dataset => console.log(dataset.id)); 34 | } 35 | // [END bigquery_list_datasets_by_label] 36 | listDatasetsByLabel(); 37 | } 38 | main(...process.argv.slice(2)); 39 | -------------------------------------------------------------------------------- /samples/listJobs.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_list_jobs] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function listJobs() { 24 | // Lists all jobs in current GCP project. 25 | 26 | // List the 10 most recent jobs in reverse chronological order. 27 | // Omit the max_results parameter to list jobs from the past 6 months. 28 | const options = {maxResults: 10}; 29 | const [jobs] = await bigquery.getJobs(options); 30 | 31 | console.log('Jobs:'); 32 | jobs.forEach(job => console.log(job.id)); 33 | } 34 | // [END bigquery_list_jobs] 35 | listJobs(); 36 | } 37 | main(...process.argv.slice(2)); 38 | -------------------------------------------------------------------------------- /samples/listModels.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: BigQuery List Models 19 | // description: Lists all existing models in the dataset. 20 | // usage: node listModels.js 21 | 22 | function main(datasetId = 'my_dataset') { 23 | // [START bigquery_list_models] 24 | 25 | // Import the Google Cloud client library 26 | const {BigQuery} = require('@google-cloud/bigquery'); 27 | const bigquery = new BigQuery(); 28 | 29 | async function listModels() { 30 | // Lists all existing models in the dataset. 31 | 32 | /** 33 | * TODO(developer): Uncomment the following lines before running the sample. 34 | */ 35 | // const datasetId = "my_dataset"; 36 | 37 | const dataset = bigquery.dataset(datasetId); 38 | 39 | dataset.getModels().then(data => { 40 | const models = data[0]; 41 | console.log('Models:'); 42 | models.forEach(model => console.log(model.metadata)); 43 | }); 44 | } 45 | // [END bigquery_list_models] 46 | listModels(); 47 | } 48 | 49 | main(...process.argv.slice(2)); 50 | -------------------------------------------------------------------------------- /samples/listModelsStreaming.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: BigQuery List Models Streaming 19 | // description: Lists all existing models in the dataset using streaming method. 20 | // usage: node listModelsStreaming.js 21 | 22 | function main(datasetId = 'my_dataset') { 23 | // [START bigquery_list_models_streaming] 24 | 25 | // Import the Google Cloud client library 26 | const {BigQuery} = require('@google-cloud/bigquery'); 27 | const bigquery = new BigQuery(); 28 | 29 | async function listModels() { 30 | // Lists all existing models in the dataset using streaming method. 31 | 32 | /** 33 | * TODO(developer): Uncomment the following lines before running the sample. 34 | */ 35 | // const datasetId = "my_dataset"; 36 | 37 | const dataset = bigquery.dataset(datasetId); 38 | 39 | dataset 40 | .getModelsStream() 41 | .on('error', console.error) 42 | .on('data', model => { 43 | console.log(model.metadata); 44 | }) 45 | .on('end', () => { 46 | console.log('All models have been retrieved.'); 47 | }); 48 | } 49 | // [END bigquery_list_models_streaming] 50 | listModels(); 51 | } 52 | 53 | main(...process.argv.slice(2)); 54 | -------------------------------------------------------------------------------- /samples/listRoutines.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | ) { 20 | // [START bigquery_list_routines] 21 | // Import the Google Cloud client library and create a client 22 | const {BigQuery} = require('@google-cloud/bigquery'); 23 | const bigquery = new BigQuery(); 24 | 25 | async function listRoutines() { 26 | // Lists routines in "my_dataset". 27 | 28 | /** 29 | * TODO(developer): Uncomment the following lines before running the sample. 30 | */ 31 | // const datasetId = 'my_dataset'; 32 | 33 | // List all routines in the dataset 34 | const [routines] = await bigquery.dataset(datasetId).getRoutines(); 35 | 36 | console.log('Routines:'); 37 | routines.forEach(routine => console.log(routine.id)); 38 | } 39 | listRoutines(); 40 | // [END bigquery_list_routines] 41 | } 42 | main(...process.argv.slice(2)); 43 | -------------------------------------------------------------------------------- /samples/listTables.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset') { 18 | // [START bigquery_list_tables] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function listTables() { 24 | // Lists tables in 'my_dataset'. 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = 'my_dataset'; 30 | 31 | // List all tables in the dataset 32 | const [tables] = await bigquery.dataset(datasetId).getTables(); 33 | 34 | console.log('Tables:'); 35 | tables.forEach(table => console.log(table.id)); 36 | } 37 | // [END bigquery_list_tables] 38 | listTables(); 39 | } 40 | 41 | main(...process.argv.slice(2)); 42 | -------------------------------------------------------------------------------- /samples/loadLocalFile.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', 19 | tableId = 'my_table', 20 | filename = '/path/to/file.csv', 21 | ) { 22 | // [START bigquery_load_from_file] 23 | // Imports the Google Cloud client library 24 | const {BigQuery} = require('@google-cloud/bigquery'); 25 | const bigquery = new BigQuery(); 26 | 27 | async function loadLocalFile() { 28 | // Imports a local file into a table. 29 | 30 | /** 31 | * TODO(developer): Uncomment the following lines before running the sample. 32 | */ 33 | // const filename = '/path/to/file.csv'; 34 | // const datasetId = 'my_dataset'; 35 | // const tableId = 'my_table'; 36 | 37 | // Load data from a local file into the table 38 | const [job] = await bigquery 39 | .dataset(datasetId) 40 | .table(tableId) 41 | .load(filename); 42 | 43 | console.log(`Job ${job.id} completed.`); 44 | } 45 | // [END bigquery_load_from_file] 46 | loadLocalFile(); 47 | } 48 | 49 | main(...process.argv.slice(2)); 50 | -------------------------------------------------------------------------------- /samples/loadTableGCSORC.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_load_table_gcs_orc] 19 | // Import the Google Cloud client libraries 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const {Storage} = require('@google-cloud/storage'); 22 | 23 | // Instantiate clients 24 | const bigquery = new BigQuery(); 25 | const storage = new Storage(); 26 | 27 | /** 28 | * This sample loads the ORC file at 29 | * https://storage.googleapis.com/cloud-samples-data/bigquery/us-states/us-states.orc 30 | * 31 | * TODO(developer): Replace the following lines with the path to your file. 32 | */ 33 | const bucketName = 'cloud-samples-data'; 34 | const filename = 'bigquery/us-states/us-states.orc'; 35 | 36 | async function loadTableGCSORC() { 37 | // Imports a GCS file into a table with ORC source format. 38 | 39 | /** 40 | * TODO(developer): Uncomment the following line before running the sample. 41 | */ 42 | // const datasetId = 'my_dataset'; 43 | // const tableId = 'my_table' 44 | 45 | // Configure the load job. For full list of options, see: 46 | // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad 47 | const metadata = { 48 | sourceFormat: 'ORC', 49 | location: 'US', 50 | }; 51 | 52 | // Load data from a Google Cloud Storage file into the table 53 | const [job] = await bigquery 54 | .dataset(datasetId) 55 | .table(tableId) 56 | .load(storage.bucket(bucketName).file(filename), metadata); 57 | 58 | // load() waits for the job to finish 59 | console.log(`Job ${job.id} completed.`); 60 | } 61 | // [END bigquery_load_table_gcs_orc] 62 | loadTableGCSORC(); 63 | } 64 | main(...process.argv.slice(2)); 65 | -------------------------------------------------------------------------------- /samples/loadTableURIFirestore.js: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_load_table_uri_firestore] 19 | // Import the Google Cloud client libraries 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const {Storage} = require('@google-cloud/storage'); 22 | 23 | // Instantiate clients 24 | const bigquery = new BigQuery(); 25 | const storage = new Storage(); 26 | 27 | /** 28 | * TODO(developer): Replace the following lines with the path to your file. 29 | */ 30 | const bucketName = 'cloud-samples-data'; 31 | const filename = 32 | 'bigquery/us-states/2021-07-02T16:04:48_70344/all_namespaces/kind_us-states/all_namespaces_kind_us-states.export_metadata'; 33 | 34 | async function loadTableURIFirestore() { 35 | // Imports a GCS Firestore export file into a table. 36 | 37 | /** 38 | * TODO(developer): Uncomment the following lines before running the sample. 39 | */ 40 | // const datasetId = 'my_dataset'; 41 | // const tableId = 'my_table'; 42 | 43 | // Load data from a Google Cloud Storage file into the table 44 | const [job] = await bigquery 45 | .dataset(datasetId) 46 | .table(tableId) 47 | .load(storage.bucket(bucketName).file(filename)); 48 | 49 | // load() waits for the job to finish 50 | console.log(`Job ${job.id} completed.`); 51 | } 52 | // [END bigquery_load_table_uri_firestore] 53 | loadTableURIFirestore(); 54 | } 55 | main(...process.argv.slice(2)); 56 | -------------------------------------------------------------------------------- /samples/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nodejs-docs-samples-bigquery", 3 | "files": [ 4 | "*.js", 5 | "resources/", 6 | "auth-user-sample/oauth2.keys.json" 7 | ], 8 | "private": true, 9 | "license": "Apache-2.0", 10 | "author": "Google LLC", 11 | "repository": "googleapis/nodejs-bigquery", 12 | "engines": { 13 | "node": ">=14.0.0" 14 | }, 15 | "scripts": { 16 | "test": "mocha --timeout 200000", 17 | "fix": "gts fix" 18 | }, 19 | "dependencies": { 20 | "@google-cloud/bigquery": "^8.1.0", 21 | "@google-cloud/storage": "^7.0.0", 22 | "google-auth-library": "^9.6.0", 23 | "readline-promise": "^1.0.4", 24 | "yargs": "^17.0.0" 25 | }, 26 | "devDependencies": { 27 | "@google-cloud/datacatalog": "^4.0.0", 28 | "chai": "^4.2.0", 29 | "gts": "^5.0.0", 30 | "mocha": "^8.0.0", 31 | "proxyquire": "^2.1.3", 32 | "sinon": "^20.0.0", 33 | "uuid": "^9.0.0" 34 | } 35 | } -------------------------------------------------------------------------------- /samples/query.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query] 19 | // [START bigquery_client_default_credentials] 20 | // Import the Google Cloud client library using default credentials 21 | const {BigQuery} = require('@google-cloud/bigquery'); 22 | const bigquery = new BigQuery(); 23 | // [END bigquery_client_default_credentials] 24 | async function query() { 25 | // Queries the U.S. given names dataset for the state of Texas. 26 | 27 | const query = `SELECT name 28 | FROM \`bigquery-public-data.usa_names.usa_1910_2013\` 29 | WHERE state = 'TX' 30 | LIMIT 100`; 31 | 32 | // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query 33 | const options = { 34 | query: query, 35 | // Location must match that of the dataset(s) referenced in the query. 36 | location: 'US', 37 | }; 38 | 39 | // Run the query as a job 40 | const [job] = await bigquery.createQueryJob(options); 41 | console.log(`Job ${job.id} started.`); 42 | 43 | // Wait for the query to finish 44 | const [rows] = await job.getQueryResults(); 45 | 46 | // Print the results 47 | console.log('Rows:'); 48 | rows.forEach(row => console.log(row)); 49 | } 50 | // [END bigquery_query] 51 | query(); 52 | } 53 | main(...process.argv.slice(2)); 54 | -------------------------------------------------------------------------------- /samples/queryBatch.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_batch] 19 | // Import the Google Cloud client library and create a client 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function queryBatch() { 24 | // Runs a query at batch priority. 25 | 26 | // Create query job configuration. For all options, see 27 | // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationquery 28 | const queryJobConfig = { 29 | query: `SELECT corpus 30 | FROM \`bigquery-public-data.samples.shakespeare\` 31 | LIMIT 10`, 32 | useLegacySql: false, 33 | priority: 'BATCH', 34 | }; 35 | 36 | // Create job configuration. For all options, see 37 | // https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfiguration 38 | const jobConfig = { 39 | // Specify a job configuration to set optional job resource properties. 40 | configuration: { 41 | query: queryJobConfig, 42 | }, 43 | }; 44 | 45 | // Make API request. 46 | const [job] = await bigquery.createJob(jobConfig); 47 | 48 | const jobId = job.metadata.id; 49 | const state = job.metadata.status.state; 50 | console.log(`Job ${jobId} is currently in state ${state}`); 51 | } 52 | // [END bigquery_query_batch] 53 | queryBatch(); 54 | } 55 | main(...process.argv.slice(2)); 56 | -------------------------------------------------------------------------------- /samples/queryClusteredTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_query_clustered_table] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function queryClusteredTable() { 24 | // Queries a table that has a clustering specification. 25 | 26 | // Create destination table reference 27 | const dataset = bigquery.dataset(datasetId); 28 | const destinationTableId = dataset.table(tableId); 29 | 30 | const query = 'SELECT * FROM `bigquery-public-data.samples.shakespeare`'; 31 | const fields = ['corpus']; 32 | 33 | // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query 34 | const options = { 35 | query: query, 36 | // Location must match that of the dataset(s) referenced in the query. 37 | location: 'US', 38 | destination: destinationTableId, 39 | clusterFields: fields, 40 | }; 41 | 42 | // Run the query as a job 43 | const [job] = await bigquery.createQueryJob(options); 44 | 45 | console.log(`Job ${job.id} started.`); 46 | 47 | // Print the status and statistics 48 | console.log('Status:'); 49 | console.log(job.metadata.status); 50 | console.log('\nJob Statistics:'); 51 | console.log(job.metadata.statistics); 52 | } 53 | // [END bigquery_query_clustered_table] 54 | queryClusteredTable(); 55 | } 56 | main(...process.argv.slice(2)); 57 | -------------------------------------------------------------------------------- /samples/queryDestinationTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_query_destination_table] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function queryDestinationTable() { 24 | // Queries the U.S. given names dataset for the state of Texas 25 | // and saves results to permanent table. 26 | 27 | /** 28 | * TODO(developer): Uncomment the following lines before running the sample. 29 | */ 30 | // const datasetId = 'my_dataset'; 31 | // const tableId = 'my_table'; 32 | 33 | // Create destination table reference 34 | const dataset = bigquery.dataset(datasetId); 35 | const destinationTable = dataset.table(tableId); 36 | 37 | const query = `SELECT name 38 | FROM \`bigquery-public-data.usa_names.usa_1910_2013\` 39 | WHERE state = 'TX' 40 | LIMIT 100`; 41 | 42 | // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource 43 | const options = { 44 | query: query, 45 | // Location must match that of the dataset(s) referenced in the query. 46 | location: 'US', 47 | destination: destinationTable, 48 | }; 49 | 50 | // Run the query as a job 51 | const [job] = await bigquery.createQueryJob(options); 52 | 53 | console.log(`Job ${job.id} started.`); 54 | console.log(`Query results loaded to table ${destinationTable.id}`); 55 | } 56 | // [END bigquery_query_destination_table] 57 | queryDestinationTable(datasetId, tableId); 58 | } 59 | main(...process.argv.slice(2)); 60 | -------------------------------------------------------------------------------- /samples/queryDisableCache.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_no_cache] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | 22 | async function queryDisableCache() { 23 | // Queries the Shakespeare dataset with the cache disabled. 24 | 25 | // Create a client 26 | const bigquery = new BigQuery(); 27 | 28 | const query = `SELECT corpus 29 | FROM \`bigquery-public-data.samples.shakespeare\` 30 | GROUP BY corpus`; 31 | const options = { 32 | query: query, 33 | // Location must match that of the dataset(s) referenced in the query. 34 | location: 'US', 35 | useQueryCache: false, 36 | }; 37 | 38 | // Run the query as a job 39 | const [job] = await bigquery.createQueryJob(options); 40 | console.log(`Job ${job.id} started.`); 41 | 42 | // Wait for the query to finish 43 | const [rows] = await job.getQueryResults(); 44 | 45 | // Print the results 46 | console.log('Rows:'); 47 | rows.forEach(row => console.log(row)); 48 | } 49 | // [END bigquery_query_no_cache] 50 | queryDisableCache(); 51 | } 52 | main(...process.argv.slice(2)); 53 | -------------------------------------------------------------------------------- /samples/queryDryRun.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_dry_run] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function queryDryRun() { 24 | // Runs a dry query of the U.S. given names dataset for the state of Texas. 25 | 26 | const query = `SELECT name 27 | FROM \`bigquery-public-data.usa_names.usa_1910_2013\` 28 | WHERE state = 'TX' 29 | LIMIT 100`; 30 | 31 | // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query 32 | const options = { 33 | query: query, 34 | // Location must match that of the dataset(s) referenced in the query. 35 | location: 'US', 36 | dryRun: true, 37 | }; 38 | 39 | // Run the query as a job 40 | const [job] = await bigquery.createQueryJob(options); 41 | 42 | // Print the status and statistics 43 | console.log('Status:'); 44 | console.log(job.metadata.status); 45 | console.log('\nJob Statistics:'); 46 | console.log(job.metadata.statistics); 47 | } 48 | // [END bigquery_query_dry_run] 49 | queryDryRun(); 50 | } 51 | main(...process.argv.slice(2)); 52 | -------------------------------------------------------------------------------- /samples/queryExternalGCSTemp.js: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | schema = [ 19 | {name: 'name', type: 'STRING'}, 20 | {name: 'post_abbr', type: 'STRING'}, 21 | ], 22 | ) { 23 | // [START bigquery_query_external_gcs_temp] 24 | // Import the Google Cloud client library and create a client 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const bigquery = new BigQuery(); 27 | 28 | async function queryExternalGCSTemp() { 29 | // Queries an external data source using a temporary table. 30 | 31 | const tableId = 'us_states'; 32 | 33 | // Configure the external data source 34 | const externalDataConfig = { 35 | sourceFormat: 'CSV', 36 | sourceUris: ['gs://cloud-samples-data/bigquery/us-states/us-states.csv'], 37 | // Optionally skip header row. 38 | csvOptions: {skipLeadingRows: 1}, 39 | schema: {fields: schema}, 40 | }; 41 | 42 | // Example query to find states starting with 'W' 43 | const query = `SELECT post_abbr 44 | FROM \`${tableId}\` 45 | WHERE name LIKE 'W%'`; 46 | 47 | // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource 48 | const options = { 49 | query, 50 | tableDefinitions: {[tableId]: externalDataConfig}, 51 | }; 52 | 53 | // Run the query as a job 54 | const [job] = await bigquery.createQueryJob(options); 55 | console.log(`Job ${job.id} started.`); 56 | 57 | // Wait for the query to finish 58 | const [rows] = await job.getQueryResults(); 59 | 60 | // Print the results 61 | console.log('Rows:'); 62 | console.log(rows); 63 | } 64 | // [END bigquery_query_external_gcs_temp] 65 | queryExternalGCSTemp(); 66 | } 67 | main(...process.argv.slice(2)); 68 | -------------------------------------------------------------------------------- /samples/queryJobOptional.js: -------------------------------------------------------------------------------- 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_job_optional] 19 | // Demonstrates issuing a query that may be run in short query mode. 20 | 21 | // Import the Google Cloud client library 22 | const {BigQuery} = require('@google-cloud/bigquery'); 23 | const bigquery = new BigQuery({ 24 | // default behavior is to create jobs when using the jobs.query API 25 | defaultJobCreationMode: 'JOB_CREATION_REQUIRED', 26 | }); 27 | 28 | async function queryJobOptional() { 29 | // SQL query to run. 30 | 31 | const sqlQuery = ` 32 | SELECT name, gender, SUM(number) AS total 33 | FROM bigquery-public-data.usa_names.usa_1910_2013 34 | GROUP BY name, gender 35 | ORDER BY total DESC 36 | LIMIT 10`; 37 | 38 | // Run the query 39 | const [rows, , res] = await bigquery.query({ 40 | query: sqlQuery, 41 | // Skip job creation to enable short mode. 42 | jobCreationMode: 'JOB_CREATION_OPTIONAL', 43 | }); 44 | 45 | if (!res.jobReference) { 46 | console.log(`Query was run in short mode. Query ID: ${res.queryId}`); 47 | } else { 48 | const jobRef = res.jobReference; 49 | const qualifiedId = `${jobRef.projectId}.${jobRef.location}.${jobRef.jobId}`; 50 | console.log( 51 | `Query was run with job state. Job ID: ${qualifiedId}, Query ID: ${res.queryId}`, 52 | ); 53 | } 54 | // Print the results 55 | console.log('Rows:'); 56 | rows.forEach(row => console.log(row)); 57 | } 58 | // [END bigquery_query_job_optional] 59 | queryJobOptional(); 60 | } 61 | main(...process.argv.slice(2)); 62 | -------------------------------------------------------------------------------- /samples/queryLegacy.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_legacy] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function queryLegacy() { 24 | // Queries the U.S. given names dataset for the state of Texas using legacy SQL. 25 | 26 | const query = 27 | 'SELECT word FROM [bigquery-public-data:samples.shakespeare] LIMIT 10;'; 28 | 29 | // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query 30 | const options = { 31 | query: query, 32 | // Location must match that of the dataset(s) referenced in the query. 33 | location: 'US', 34 | useLegacySql: true, 35 | }; 36 | 37 | // Run the query as a job 38 | const [job] = await bigquery.createQueryJob(options); 39 | console.log(`Job ${job.id} started.`); 40 | 41 | // Wait for the query to finish 42 | const [rows] = await job.getQueryResults(); 43 | 44 | // Print the results 45 | console.log('Rows:'); 46 | rows.forEach(row => console.log(row)); 47 | } 48 | // [END bigquery_query_legacy] 49 | queryLegacy(); 50 | } 51 | main(...process.argv.slice(2)); 52 | -------------------------------------------------------------------------------- /samples/queryLegacyLargeResults.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', 19 | tableId = 'my_table', 20 | projectId = 'my_project', 21 | ) { 22 | // [START bigquery_query_legacy_large_results] 23 | // Import the Google Cloud client library 24 | const {BigQuery} = require('@google-cloud/bigquery'); 25 | const bigquery = new BigQuery(); 26 | 27 | async function queryLegacyLargeResults() { 28 | // Query enables large result sets. 29 | 30 | /** 31 | * TODO(developer): Uncomment the following lines before running the sample 32 | */ 33 | // const projectId = "my_project" 34 | // const datasetId = "my_dataset"; 35 | // const tableId = "my_table"; 36 | 37 | const query = 38 | 'SELECT word FROM [bigquery-public-data:samples.shakespeare] LIMIT 10;'; 39 | 40 | // For all options, see https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query 41 | const options = { 42 | query: query, 43 | // Location must match that of the dataset(s) referenced 44 | // in the query and of the destination table. 45 | useLegacySql: true, 46 | allowLargeResult: true, 47 | destinationTable: { 48 | projectId: projectId, 49 | datasetId: datasetId, 50 | tableId: tableId, 51 | }, 52 | }; 53 | 54 | const [job] = await bigquery.createQueryJob(options); 55 | console.log(`Job ${job.id} started.`); 56 | 57 | // Wait for the query to finish 58 | const [rows] = await job.getQueryResults(); 59 | 60 | // Print the results 61 | console.log('Rows:'); 62 | rows.forEach(row => console.log(row)); 63 | } 64 | // [END bigquery_query_legacy_large_results] 65 | queryLegacyLargeResults(); 66 | } 67 | main(...process.argv.slice(2)); 68 | -------------------------------------------------------------------------------- /samples/queryPagination.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_pagination] 19 | // Import the Google Cloud client library using default credentials 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function queryPagination() { 24 | // Run a query and get rows using automatic pagination. 25 | 26 | const query = `SELECT name, SUM(number) as total_people 27 | FROM \`bigquery-public-data.usa_names.usa_1910_2013\` 28 | GROUP BY name 29 | ORDER BY total_people DESC 30 | LIMIT 100`; 31 | 32 | // Run the query as a job. 33 | const [job] = await bigquery.createQueryJob(query); 34 | 35 | // Wait for job to complete and get rows. 36 | // The client library automatically handles pagination. 37 | // See more info on how to configure paging calls at: 38 | // * https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md#auto-pagination 39 | // * https://cloud.google.com/bigquery/docs/paging-results#iterate_through_client_libraries_results 40 | const [rows] = await job.getQueryResults(); 41 | 42 | console.log('Query results:'); 43 | rows.forEach(row => { 44 | console.log(`name: ${row.name}, ${row.total_people} total people`); 45 | }); 46 | } 47 | queryPagination(); 48 | // [END bigquery_query_pagination] 49 | } 50 | main(...process.argv.slice(2)); 51 | -------------------------------------------------------------------------------- /samples/queryParamsArrays.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_params_arrays] 19 | // Run a query using array query parameters 20 | 21 | // Import the Google Cloud client library 22 | const {BigQuery} = require('@google-cloud/bigquery'); 23 | const bigquery = new BigQuery(); 24 | 25 | async function queryParamsArrays() { 26 | // The SQL query to run 27 | const sqlQuery = `SELECT name, sum(number) as count 28 | FROM \`bigquery-public-data.usa_names.usa_1910_2013\` 29 | WHERE gender = @gender 30 | AND state IN UNNEST(@states) 31 | GROUP BY name 32 | ORDER BY count DESC 33 | LIMIT 10;`; 34 | 35 | const options = { 36 | query: sqlQuery, 37 | // Location must match that of the dataset(s) referenced in the query. 38 | location: 'US', 39 | params: {gender: 'M', states: ['WA', 'WI', 'WV', 'WY']}, 40 | }; 41 | 42 | // Run the query 43 | const [rows] = await bigquery.query(options); 44 | 45 | console.log('Rows:'); 46 | rows.forEach(row => console.log(row)); 47 | } 48 | // [END bigquery_query_params_arrays] 49 | queryParamsArrays(); 50 | } 51 | main(...process.argv.slice(2)); 52 | -------------------------------------------------------------------------------- /samples/queryParamsNamed.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_params_named] 19 | // Run a query using named query parameters 20 | 21 | // Import the Google Cloud client library 22 | const {BigQuery} = require('@google-cloud/bigquery'); 23 | const bigquery = new BigQuery(); 24 | 25 | async function queryParamsNamed() { 26 | // The SQL query to run 27 | const sqlQuery = `SELECT word, word_count 28 | FROM \`bigquery-public-data.samples.shakespeare\` 29 | WHERE corpus = @corpus 30 | AND word_count >= @min_word_count 31 | ORDER BY word_count DESC`; 32 | 33 | const options = { 34 | query: sqlQuery, 35 | // Location must match that of the dataset(s) referenced in the query. 36 | location: 'US', 37 | params: {corpus: 'romeoandjuliet', min_word_count: 250}, 38 | }; 39 | 40 | // Run the query 41 | const [rows] = await bigquery.query(options); 42 | 43 | console.log('Rows:'); 44 | rows.forEach(row => console.log(row)); 45 | } 46 | // [END bigquery_query_params_named] 47 | queryParamsNamed(); 48 | } 49 | main(...process.argv.slice(2)); 50 | -------------------------------------------------------------------------------- /samples/queryParamsNamedTypes.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_params_named_types] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function queryParamsNamedTypes() { 24 | // Run a query using named query parameters and provided parameter types. 25 | 26 | // The SQL query to run 27 | const sqlQuery = `SELECT word, word_count 28 | FROM \`bigquery-public-data.samples.shakespeare\` 29 | WHERE word IN UNNEST(@wordList) 30 | AND corpus = @corpus 31 | AND word_count >= @minWordCount 32 | ORDER BY word_count DESC`; 33 | 34 | const queryOptions = { 35 | query: sqlQuery, 36 | params: { 37 | wordList: ['and', 'is', 'the', 'moon'], 38 | corpus: 'romeoandjuliet', 39 | minWordCount: 250, 40 | }, 41 | types: {wordList: ['STRING'], corpus: 'STRING', minWordCount: 'INT64'}, 42 | }; 43 | 44 | // Run the query 45 | const [rows] = await bigquery.query(queryOptions); 46 | 47 | console.log('Rows:'); 48 | rows.forEach(row => console.log(row)); 49 | } 50 | // [END bigquery_query_params_named_types] 51 | queryParamsNamedTypes(); 52 | } 53 | main(...process.argv.slice(2)); 54 | -------------------------------------------------------------------------------- /samples/queryParamsPositional.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_params_positional] 19 | // Run a query using positional query parameters 20 | 21 | // Import the Google Cloud client library 22 | const {BigQuery} = require('@google-cloud/bigquery'); 23 | const bigquery = new BigQuery(); 24 | 25 | async function queryParamsPositional() { 26 | // The SQL query to run 27 | const sqlQuery = `SELECT word, word_count 28 | FROM \`bigquery-public-data.samples.shakespeare\` 29 | WHERE corpus = ? 30 | AND word_count >= ? 31 | ORDER BY word_count DESC`; 32 | 33 | const options = { 34 | query: sqlQuery, 35 | // Location must match that of the dataset(s) referenced in the query. 36 | location: 'US', 37 | params: ['romeoandjuliet', 250], 38 | }; 39 | 40 | // Run the query 41 | const [rows] = await bigquery.query(options); 42 | 43 | console.log('Rows:'); 44 | rows.forEach(row => console.log(row)); 45 | } 46 | // [END bigquery_query_params_positional] 47 | queryParamsPositional(); 48 | } 49 | main(...process.argv.slice(2)); 50 | -------------------------------------------------------------------------------- /samples/queryParamsPositionalTypes.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_params_positional_types] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function queryParamsPositionalTypes() { 24 | // Run a query using positional query parameters and provided parameter types. 25 | 26 | // The SQL query to run 27 | const sqlQuery = `SELECT word, word_count 28 | FROM \`bigquery-public-data.samples.shakespeare\` 29 | WHERE word IN UNNEST(?) 30 | AND corpus = ? 31 | AND word_count >= ? 32 | ORDER BY word_count DESC`; 33 | 34 | const queryOptions = { 35 | query: sqlQuery, 36 | params: [['and', 'is', 'the', 'moon'], 'romeoandjuliet', 250], 37 | types: [['STRING'], 'STRING', 'INT64'], 38 | }; 39 | 40 | // Run the query 41 | const [rows] = await bigquery.query(queryOptions); 42 | 43 | console.log('Rows:'); 44 | rows.forEach(row => console.log(row)); 45 | } 46 | // [END bigquery_query_params_positional_types] 47 | queryParamsPositionalTypes(); 48 | } 49 | main(...process.argv.slice(2)); 50 | -------------------------------------------------------------------------------- /samples/queryParamsStructs.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_params_structs] 19 | // Run a query using struct query parameters 20 | 21 | // Import the Google Cloud client library 22 | const {BigQuery} = require('@google-cloud/bigquery'); 23 | const bigquery = new BigQuery(); 24 | 25 | async function queryParamsStructs() { 26 | // The SQL query to run 27 | const sqlQuery = 'SELECT @struct_value AS struct_obj;'; 28 | 29 | const options = { 30 | query: sqlQuery, 31 | // Location must match that of the dataset(s) referenced in the query. 32 | location: 'US', 33 | params: {struct_value: {x: 1, y: 'foo'}}, 34 | }; 35 | 36 | // Run the query 37 | const [rows] = await bigquery.query(options); 38 | 39 | console.log('Rows:'); 40 | rows.forEach(row => console.log(row.struct_obj.y)); 41 | } 42 | // [END bigquery_query_params_structs] 43 | queryParamsStructs(); 44 | } 45 | main(...process.argv.slice(2)); 46 | -------------------------------------------------------------------------------- /samples/queryParamsTimestamps.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_query_params_timestamps] 19 | // Run a query using timestamp parameters 20 | 21 | // Import the Google Cloud client library 22 | const {BigQuery} = require('@google-cloud/bigquery'); 23 | const bigquery = new BigQuery(); 24 | 25 | async function queryParamsTimestamps() { 26 | // The SQL query to run 27 | const sqlQuery = 'SELECT TIMESTAMP_ADD(@ts_value, INTERVAL 1 HOUR);'; 28 | 29 | const options = { 30 | query: sqlQuery, 31 | // Location must match that of the dataset(s) referenced in the query. 32 | location: 'US', 33 | params: {ts_value: new Date()}, 34 | }; 35 | 36 | // Run the query 37 | const [rows] = await bigquery.query(options); 38 | 39 | console.log('Rows:'); 40 | rows.forEach(row => console.log(row.f0_)); 41 | } 42 | // [END bigquery_query_params_timestamps] 43 | queryParamsTimestamps(); 44 | } 45 | main(...process.argv.slice(2)); 46 | -------------------------------------------------------------------------------- /samples/queryStackOverflow.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_simple_app_all] 19 | // [START bigquery_simple_app_deps] 20 | // Import the Google Cloud client library 21 | const {BigQuery} = require('@google-cloud/bigquery'); 22 | // [END bigquery_simple_app_deps] 23 | 24 | async function queryStackOverflow() { 25 | // Queries a public Stack Overflow dataset. 26 | 27 | // [START bigquery_simple_app_client] 28 | // Create a client 29 | const bigqueryClient = new BigQuery(); 30 | // [END bigquery_simple_app_client] 31 | 32 | // [START bigquery_simple_app_query] 33 | // The SQL query to run 34 | const sqlQuery = `SELECT 35 | CONCAT( 36 | 'https://stackoverflow.com/questions/', 37 | CAST(id as STRING)) as url, 38 | view_count 39 | FROM \`bigquery-public-data.stackoverflow.posts_questions\` 40 | WHERE tags like '%google-bigquery%' 41 | ORDER BY view_count DESC 42 | LIMIT 10`; 43 | 44 | const options = { 45 | query: sqlQuery, 46 | // Location must match that of the dataset(s) referenced in the query. 47 | location: 'US', 48 | }; 49 | 50 | // Run the query 51 | const [rows] = await bigqueryClient.query(options); 52 | // [END bigquery_simple_app_query] 53 | 54 | // [START bigquery_simple_app_print] 55 | console.log('Query Results:'); 56 | rows.forEach(row => { 57 | const url = row['url']; 58 | const viewCount = row['view_count']; 59 | console.log(`url: ${url}, ${viewCount} views`); 60 | }); 61 | // [END bigquery_simple_app_print] 62 | } 63 | queryStackOverflow(); 64 | // [END bigquery_simple_app_all] 65 | } 66 | 67 | main(...process.argv.slice(2)); 68 | -------------------------------------------------------------------------------- /samples/quickstart.js: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetName = 'my_new_dataset') { 18 | // [START bigquery_quickstart] 19 | // Imports the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | 22 | async function createDataset() { 23 | // Creates a client 24 | const bigqueryClient = new BigQuery(); 25 | 26 | // Create the dataset 27 | const [dataset] = await bigqueryClient.createDataset(datasetName); 28 | console.log(`Dataset ${dataset.id} created.`); 29 | } 30 | createDataset(); 31 | // [END bigquery_quickstart] 32 | } 33 | 34 | const args = process.argv.slice(2); 35 | main(...args); 36 | -------------------------------------------------------------------------------- /samples/relaxColumn.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | tableId = 'my_new_table', // Table to be created 20 | ) { 21 | // [START bigquery_relax_column] 22 | // Import the Google Cloud client library and create a client 23 | const {BigQuery} = require('@google-cloud/bigquery'); 24 | const bigquery = new BigQuery(); 25 | 26 | async function relaxColumn() { 27 | /** 28 | * Changes columns from required to nullable. 29 | * Assumes existing table with the following schema: 30 | * [{name: 'Name', type: 'STRING', mode: 'REQUIRED'}, 31 | * {name: 'Age', type: 'INTEGER'}, 32 | * {name: 'Weight', type: 'FLOAT'}, 33 | * {name: 'IsMagic', type: 'BOOLEAN'}]; 34 | */ 35 | 36 | /** 37 | * TODO(developer): Uncomment the following lines before running the sample. 38 | */ 39 | // const datasetId = 'my_dataset'; 40 | // const tableId = 'my_table'; 41 | 42 | const newSchema = [ 43 | {name: 'Name', type: 'STRING', mode: 'NULLABLE'}, 44 | {name: 'Age', type: 'INTEGER'}, 45 | {name: 'Weight', type: 'FLOAT'}, 46 | {name: 'IsMagic', type: 'BOOLEAN'}, 47 | ]; 48 | 49 | // Retrieve current table metadata 50 | const table = bigquery.dataset(datasetId).table(tableId); 51 | const [metadata] = await table.getMetadata(); 52 | 53 | // Update schema 54 | metadata.schema = newSchema; 55 | const [apiResponse] = await table.setMetadata(metadata); 56 | 57 | console.log(apiResponse.schema.fields); 58 | } 59 | // [END bigquery_relax_column] 60 | relaxColumn(); 61 | } 62 | main(...process.argv.slice(2)); 63 | -------------------------------------------------------------------------------- /samples/removeTableClustering.js: -------------------------------------------------------------------------------- 1 | // Copyright 2021 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_remove_table_clustering] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function removeTableClustering() { 24 | // Removes clustering from a new clustered table named "my_table" in "my_dataset". 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = "my_dataset"; 30 | // const tableId = "my_table"; 31 | const schema = 'name:string, city:string, zipcode:integer'; 32 | 33 | // For all options, see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource 34 | const options = { 35 | schema: schema, 36 | clustering: { 37 | fields: ['city', 'zipcode'], 38 | }, 39 | }; 40 | 41 | // Create a new table in the dataset with clustering. 42 | const [table] = await bigquery 43 | .dataset(datasetId) 44 | .createTable(tableId, options); 45 | console.log(`Table ${tableId} created with clustering.`); 46 | 47 | // Remove clustering from table. 48 | const metadata = table.metadata; 49 | metadata.clustering = null; 50 | const [apiResponse] = await table.setMetadata(metadata); 51 | console.log(`Table ${tableId} updated clustering:`); 52 | console.log(apiResponse.clustering); 53 | } 54 | // [END bigquery_remove_table_clustering] 55 | removeTableClustering(datasetId, tableId); 56 | } 57 | main(...process.argv.slice(2)); 58 | -------------------------------------------------------------------------------- /samples/resources/avro-schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "characters", 4 | "namespace": "com.example.avro.characters", 5 | "fields": [ 6 | {"type":"string","name":"Name"}, 7 | {"type":"long","name":"Age"}, 8 | {"type":"double","name":"Weight"}, 9 | {"type":"boolean","name":"IsMagic"} 10 | ], 11 | "doc": "A list of fantasy characters." 12 | } -------------------------------------------------------------------------------- /samples/resources/data.avro: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/nodejs-bigquery/9729d1e4494fc266b6effbe204b6ff1996511abc/samples/resources/data.avro -------------------------------------------------------------------------------- /samples/resources/data.csv: -------------------------------------------------------------------------------- 1 | Gandalf,2000,140.0,TRUE -------------------------------------------------------------------------------- /samples/resources/data.json: -------------------------------------------------------------------------------- 1 | {"Name":"Gandalf","Age":2000,"Weight":140.0,"IsMagic":true} -------------------------------------------------------------------------------- /samples/resources/partialdata.csv: -------------------------------------------------------------------------------- 1 | 2000,140.0,TRUE -------------------------------------------------------------------------------- /samples/resources/schema.json: -------------------------------------------------------------------------------- 1 | [{"type":"STRING","name":"Name"},{"type":"INTEGER","name":"Age"},{"type":"FLOAT","name":"Weight"},{"type":"BOOLEAN","name":"IsMagic"}] -------------------------------------------------------------------------------- /samples/resources/streamrows.json: -------------------------------------------------------------------------------- 1 | [ 2 | {"Name":"test","Age":0,"Weight":100.0,"IsMagic":false}, 3 | {"Name":"test","Age":1,"Weight":100.0,"IsMagic":false}, 4 | {"Name":"test","Age":2,"Weight":100.0,"IsMagic":false}, 5 | {"Name":"test","Age":3,"Weight":100.0,"IsMagic":false}, 6 | {"Name":"test","Age":0,"Weight":100.0,"IsMagic":false} 7 | ] -------------------------------------------------------------------------------- /samples/setClientEndpoint.js: -------------------------------------------------------------------------------- 1 | // Copyright 2023 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(region = 'my-region') { 18 | // [START bigquery_set_client_endpoint] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | 22 | function setClientEndpoint() { 23 | // Create a bigquery client pointing to a specific endpoint 24 | 25 | /** 26 | * TODO(developer): Uncomment the following line of code and fill in your region before running the sample. 27 | */ 28 | // const region = 'my-region'; 29 | 30 | const bigquery = new BigQuery({ 31 | apiEndpoint: `${region}-bigquery.googleapis.com`, 32 | }); 33 | 34 | console.log('API Endpoint:'); 35 | console.log(bigquery.apiEndpoint); 36 | } 37 | // [END bigquery_set_client_endpoint] 38 | setClientEndpoint(); 39 | } 40 | main(...process.argv.slice(2)); 41 | -------------------------------------------------------------------------------- /samples/setUserAgent.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main() { 18 | // [START bigquery_set_user_agent] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | // Create a client and set the user agent 22 | const bigquery = new BigQuery({userAgent: 'my-user-agent'}); 23 | 24 | console.log('User agent:'); 25 | console.log(bigquery.providedUserAgent); 26 | // [END bigquery_set_user_agent] 27 | } 28 | main(...process.argv.slice(2)); 29 | -------------------------------------------------------------------------------- /samples/tableExists.js: -------------------------------------------------------------------------------- 1 | // Copyright 2022 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_table_exists] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function tableExists() { 24 | // Checks whether table named "my_table" in "my_dataset" exists. 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample 28 | */ 29 | // const datasetId = "my_dataset"; 30 | // const tableId = "my_table"; 31 | 32 | // Retrieve table reference 33 | const dataset = bigquery.dataset(datasetId); 34 | 35 | try { 36 | await dataset.table(tableId).get(); 37 | console.log(`Table ${tableId} exists.`); 38 | } catch (e) { 39 | console.log(e.message); 40 | } 41 | } 42 | tableExists(); 43 | // [END bigquery_table_exists] 44 | } 45 | main(...process.argv.slice(2)); 46 | -------------------------------------------------------------------------------- /samples/test/clients.test.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | const {assert} = require('chai'); 18 | const {describe, it} = require('mocha'); 19 | const cp = require('child_process'); 20 | 21 | const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); 22 | 23 | describe('Client', () => { 24 | it('should should set providedUserAgent', async () => { 25 | const output = execSync('node setUserAgent.js'); 26 | assert.match(output, /User agent:/); 27 | assert.match(output, /my-user-agent/); 28 | }); 29 | it('should should set client endpoint', async () => { 30 | let output = execSync('node setClientEndpoint.js us-east4'); 31 | assert.match(output, /API Endpoint:/); 32 | assert.match(output, /https:\/\/us-east4-bigquery.googleapis.com/); 33 | 34 | output = execSync('node setClientEndpoint.js eu'); 35 | assert.match(output, /API Endpoint:/); 36 | assert.match(output, /https:\/\/eu-bigquery.googleapis.com/); 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /samples/test/jobs.test.js: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | const {BigQuery} = require('@google-cloud/bigquery'); 18 | const {assert} = require('chai'); 19 | const {describe, it, before, beforeEach} = require('mocha'); 20 | const cp = require('child_process'); 21 | 22 | const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); 23 | 24 | const bigquery = new BigQuery(); 25 | let jobId; 26 | 27 | describe('Jobs', () => { 28 | before(async () => { 29 | const query = `SELECT name 30 | FROM \`bigquery-public-data.usa_names.usa_1910_2013\` 31 | WHERE state = 'TX' 32 | LIMIT 100`; 33 | 34 | const queryOptions = { 35 | query: query, 36 | }; 37 | beforeEach(async function () { 38 | this.currentTest.retries(2); 39 | }); 40 | 41 | const [job] = await bigquery.createQueryJob(queryOptions); 42 | jobId = job.metadata.jobReference.jobId; 43 | }); 44 | 45 | it('should list jobs', async () => { 46 | const output = execSync('node listJobs.js'); 47 | assert.match(output, /Jobs:/); 48 | assert.include(output, jobId); 49 | }); 50 | 51 | it('should retrieve a job', async () => { 52 | const output = execSync(`node getJob.js ${jobId}`); 53 | assert.include(output, `jobId: '${jobId}'`); 54 | }); 55 | 56 | it('should attempt to cancel a job', async () => { 57 | const output = execSync(`node cancelJob.js ${jobId}`); 58 | assert.include(output, 'state:'); 59 | }); 60 | 61 | it('should create a job', async () => { 62 | const output = execSync('node createJob.js'); 63 | assert.include(output, 'Rows:'); 64 | }); 65 | 66 | it('should auto-paginate through query result rows', async () => { 67 | const output = execSync('node queryPagination.js'); 68 | assert.match(output, /name/); 69 | assert.match(output, /total people/); 70 | }); 71 | }); 72 | -------------------------------------------------------------------------------- /samples/test/quickstart.test.js: -------------------------------------------------------------------------------- 1 | // Copyright 2017 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | const {assert} = require('chai'); 18 | const {describe, it, after, beforeEach} = require('mocha'); 19 | const {randomUUID} = require('crypto'); 20 | const cp = require('child_process'); 21 | const {BigQuery} = require('@google-cloud/bigquery'); 22 | 23 | const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); 24 | 25 | const bigquery = new BigQuery(); 26 | 27 | describe('Quickstart', () => { 28 | const datasetName = `nodejs_samples_tests_quickstart_${randomUUID()}`.replace( 29 | /-/gi, 30 | '_', 31 | ); 32 | beforeEach(async function () { 33 | this.currentTest.retries(2); 34 | }); 35 | after(async () => { 36 | await bigquery.dataset(datasetName).delete({force: true}); 37 | }); 38 | 39 | it('quickstart should create a dataset', async () => { 40 | const output = execSync(`node quickstart ${datasetName}`); 41 | assert.include(output, `Dataset ${datasetName} created.`); 42 | }); 43 | }); 44 | -------------------------------------------------------------------------------- /samples/undeleteTable.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Dataset 19 | tableId = 'my_table_to_undelete', // Table to recover 20 | recoveredTableId = 'my_recovered_table', // Recovered table 21 | ) { 22 | // [START bigquery_undelete_table] 23 | // Import the Google Cloud client library 24 | const {BigQuery} = require('@google-cloud/bigquery'); 25 | const bigquery = new BigQuery(); 26 | 27 | async function undeleteTable() { 28 | // Undeletes "my_table_to_undelete" from "my_dataset". 29 | 30 | /** 31 | * TODO(developer): Uncomment the following lines before running the sample. 32 | */ 33 | // const datasetId = "my_dataset"; 34 | // const tableId = "my_table_to_undelete"; 35 | // const recoveredTableId = "my_recovered_table"; 36 | 37 | /** 38 | * TODO(developer): Choose an appropriate snapshot point as epoch milliseconds. 39 | * For this example, we choose the current time as we're about to delete the 40 | * table immediately afterwards. 41 | */ 42 | const snapshotEpoch = Date.now(); 43 | 44 | // Delete the table 45 | await bigquery.dataset(datasetId).table(tableId).delete(); 46 | 47 | console.log(`Table ${tableId} deleted.`); 48 | 49 | // Construct the restore-from table ID using a snapshot decorator. 50 | const snapshotTableId = `${tableId}@${snapshotEpoch}`; 51 | 52 | // Construct and run a copy job. 53 | await bigquery 54 | .dataset(datasetId) 55 | .table(snapshotTableId) 56 | .copy(bigquery.dataset(datasetId).table(recoveredTableId)); 57 | 58 | console.log( 59 | `Copied data from deleted table ${tableId} to ${recoveredTableId}`, 60 | ); 61 | } 62 | // [END bigquery_undelete_table] 63 | undeleteTable(); 64 | } 65 | 66 | main(...process.argv.slice(2)); 67 | -------------------------------------------------------------------------------- /samples/updateDatasetAccess.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset') { 18 | // [START bigquery_update_dataset_access] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function updateDatasetAccess() { 24 | // Update a datasets's access controls. 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = "my_dataset"; 30 | 31 | // Create new role metadata 32 | const newRole = { 33 | role: 'READER', 34 | entity_type: 'userByEmail', 35 | userByEmail: 'sample.bigquery.dev@gmail.com', 36 | }; 37 | 38 | // Retreive current dataset metadata 39 | const dataset = bigquery.dataset(datasetId); 40 | const [metadata] = await dataset.getMetadata(); 41 | 42 | // Add new role to role acess array 43 | metadata.access.push(newRole); 44 | const [apiResponse] = await dataset.setMetadata(metadata); 45 | const newAccessRoles = apiResponse.access; 46 | newAccessRoles.forEach(role => console.log(role)); 47 | } 48 | // [END bigquery_update_dataset_access] 49 | updateDatasetAccess(); 50 | } 51 | main(...process.argv.slice(2)); 52 | -------------------------------------------------------------------------------- /samples/updateDatasetDescription.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset') { 18 | // [START bigquery_update_dataset_description] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function updateDatasetDescription() { 24 | // Updates a dataset's description. 25 | 26 | // Retreive current dataset metadata 27 | const dataset = bigquery.dataset(datasetId); 28 | const [metadata] = await dataset.getMetadata(); 29 | 30 | // Set new dataset description 31 | const description = 'New dataset description.'; 32 | metadata.description = description; 33 | 34 | const [apiResponse] = await dataset.setMetadata(metadata); 35 | const newDescription = apiResponse.description; 36 | 37 | console.log(`${datasetId} description: ${newDescription}`); 38 | } 39 | // [END bigquery_update_dataset_description] 40 | updateDatasetDescription(); 41 | } 42 | main(...process.argv.slice(2)); 43 | -------------------------------------------------------------------------------- /samples/updateDatasetExpiration.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset') { 18 | // [START bigquery_update_dataset_expiration] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function updateDatasetExpiration() { 24 | // Updates the lifetime of all tables in the dataset, in milliseconds. 25 | 26 | /** 27 | * TODO(developer): Uncomment the following lines before running the sample. 28 | */ 29 | // const datasetId = "my_dataset"; 30 | 31 | // Retreive current dataset metadata 32 | const dataset = bigquery.dataset(datasetId); 33 | const [metadata] = await dataset.getMetadata(); 34 | 35 | // Set new dataset metadata 36 | const expirationTime = 24 * 60 * 60 * 1000; 37 | metadata.defaultTableExpirationMs = expirationTime.toString(); 38 | 39 | const [apiResponse] = await dataset.setMetadata(metadata); 40 | const newExpirationTime = apiResponse.defaultTableExpirationMs; 41 | 42 | console.log(`${datasetId} expiration: ${newExpirationTime}`); 43 | } 44 | // [END bigquery_update_dataset_expiration] 45 | updateDatasetExpiration(); 46 | } 47 | main(...process.argv.slice(2)); 48 | -------------------------------------------------------------------------------- /samples/updateModel.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | // sample-metadata: 18 | // title: BigQuery Update Model 19 | // description: Updates a model's metadata. 20 | // usage: node updateModel.js 21 | 22 | function main(datasetId = 'my_datset', modelId = 'my_model') { 23 | // [START bigquery_update_model_description] 24 | // Import the Google Cloud client library 25 | const {BigQuery} = require('@google-cloud/bigquery'); 26 | const bigquery = new BigQuery(); 27 | 28 | async function updateModel() { 29 | // Updates a model's metadata. 30 | 31 | /** 32 | * TODO(developer): Uncomment the following lines before running the sample 33 | */ 34 | // const datasetId = "my_dataset"; 35 | // const modelId = "my__model"; 36 | 37 | const metadata = { 38 | description: 'A really great model.', 39 | }; 40 | 41 | const dataset = bigquery.dataset(datasetId); 42 | const [apiResponse] = await dataset.model(modelId).setMetadata(metadata); 43 | const newDescription = apiResponse.description; 44 | 45 | console.log(`${modelId} description: ${newDescription}`); 46 | } 47 | // [END bigquery_update_model_description] 48 | updateModel(); 49 | } 50 | main(...process.argv.slice(2)); 51 | -------------------------------------------------------------------------------- /samples/updateRoutine.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | routineId = 'my_routine', // Existing routine 20 | ) { 21 | // [START bigquery_update_routine] 22 | // Import the Google Cloud client library and create a client 23 | const {BigQuery} = require('@google-cloud/bigquery'); 24 | const bigquery = new BigQuery(); 25 | 26 | async function updateRoutine() { 27 | // Updates a routine named "my_routine" in "my_dataset". 28 | 29 | /** 30 | * TODO(developer): Uncomment the following lines before running the sample. 31 | */ 32 | // const datasetId = 'my_dataset'; 33 | // const routineId = 'my_routine'; 34 | 35 | const updates = { 36 | description: 'New description', 37 | }; 38 | 39 | const dataset = bigquery.dataset(datasetId); 40 | 41 | // Create routine reference 42 | let routine = dataset.routine(routineId); 43 | 44 | // Make API call 45 | [routine] = await routine.setMetadata(updates); 46 | 47 | console.log(`Routine description: ${routine.description}`); 48 | } 49 | updateRoutine(); 50 | // [END bigquery_update_routine] 51 | } 52 | main(...process.argv.slice(2)); 53 | -------------------------------------------------------------------------------- /samples/updateTableDescription.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main(datasetId = 'my_dataset', tableId = 'my_table') { 18 | // [START bigquery_update_table_description] 19 | // Import the Google Cloud client library 20 | const {BigQuery} = require('@google-cloud/bigquery'); 21 | const bigquery = new BigQuery(); 22 | 23 | async function updateTableDescription() { 24 | // Updates a table's description. 25 | 26 | // Retreive current table metadata 27 | const table = bigquery.dataset(datasetId).table(tableId); 28 | const [metadata] = await table.getMetadata(); 29 | 30 | // Set new table description 31 | const description = 'New table description.'; 32 | metadata.description = description; 33 | const [apiResponse] = await table.setMetadata(metadata); 34 | const newDescription = apiResponse.description; 35 | 36 | console.log(`${tableId} description: ${newDescription}`); 37 | } 38 | // [END bigquery_update_table_description] 39 | updateTableDescription(); 40 | } 41 | main(...process.argv.slice(2)); 42 | -------------------------------------------------------------------------------- /samples/updateTableExpiration.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset 19 | tableId = 'my_table', // Existing table 20 | expirationTime = Date.now() + 1000 * 60 * 60 * 24 * 5, // 5 days from current time in ms 21 | ) { 22 | // [START bigquery_update_table_expiration] 23 | // Import the Google Cloud client library 24 | const {BigQuery} = require('@google-cloud/bigquery'); 25 | const bigquery = new BigQuery(); 26 | 27 | async function updateTableExpiration() { 28 | // Updates a table's expiration. 29 | 30 | /** 31 | * TODO(developer): Uncomment the following lines before running the sample. 32 | */ 33 | // const datasetId = 'my_dataset', // Existing dataset 34 | // const tableId = 'my_table', // Existing table 35 | // const expirationTime = Date.now() + 1000 * 60 * 60 * 24 * 5 // 5 days from current time in ms 36 | 37 | // Retreive current table metadata 38 | const table = bigquery.dataset(datasetId).table(tableId); 39 | const [metadata] = await table.getMetadata(); 40 | 41 | // Set new table expiration to 5 days from current time 42 | metadata.expirationTime = expirationTime.toString(); 43 | const [apiResponse] = await table.setMetadata(metadata); 44 | 45 | const newExpirationTime = apiResponse.expirationTime; 46 | console.log(`${tableId} expiration: ${newExpirationTime}`); 47 | } 48 | // [END bigquery_update_table_expiration] 49 | updateTableExpiration(); 50 | } 51 | main(...process.argv.slice(2)); 52 | -------------------------------------------------------------------------------- /samples/updateViewQuery.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | function main( 18 | datasetId = 'my_dataset', // Existing dataset ID 19 | tableId = 'my_existing_view', // Existing view ID 20 | ) { 21 | // [START bigquery_update_view_query] 22 | // Import the Google Cloud client library and create a client 23 | const {BigQuery} = require('@google-cloud/bigquery'); 24 | const bigquery = new BigQuery(); 25 | 26 | async function updateViewQuery() { 27 | // Updates a view named "my_existing_view" in "my_dataset". 28 | 29 | /** 30 | * TODO(developer): Uncomment the following lines before running the sample. 31 | */ 32 | // const datasetId = "my_existing_dataset" 33 | // const tableId = "my_existing_table" 34 | const dataset = await bigquery.dataset(datasetId); 35 | 36 | // This example updates a view into the USA names dataset to include state. 37 | const newViewQuery = `SELECT name, state 38 | FROM \`bigquery-public-data.usa_names.usa_1910_current\` 39 | LIMIT 10`; 40 | 41 | // Retrieve existing view 42 | const [view] = await dataset.table(tableId).get(); 43 | 44 | // Retrieve existing view metadata 45 | const [metadata] = await view.getMetadata(); 46 | 47 | // Update view query 48 | metadata.view = newViewQuery; 49 | 50 | // Set metadata 51 | await view.setMetadata(metadata); 52 | 53 | console.log(`View ${tableId} updated.`); 54 | } 55 | // [END bigquery_update_view_query] 56 | updateViewQuery(); 57 | } 58 | main(...process.argv.slice(2)); 59 | -------------------------------------------------------------------------------- /src/logger.ts: -------------------------------------------------------------------------------- 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | import * as util from 'util'; 16 | 17 | /*! The external function used to emit logs. */ 18 | let logFunction: ((msg: string) => void) | null = null; 19 | 20 | /** 21 | * Log function to use for debug output. By default, we don't perform any 22 | * logging. 23 | * 24 | * @private 25 | * @internal 26 | */ 27 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 28 | export function logger(source: string, msg: string, ...otherArgs: any[]) { 29 | if (logFunction) { 30 | const time = new Date().toISOString(); 31 | const formattedMsg = util.format( 32 | `D ${time} | ${source} | ${msg} |`, 33 | ...otherArgs, 34 | ); 35 | logFunction(formattedMsg); 36 | } 37 | } 38 | 39 | /** 40 | * Sets or disables the log function for all active BigQuery instances. 41 | * 42 | * @param logger A log function that takes a message (such as `console.log`) or 43 | * `null` to turn off logging. 44 | */ 45 | export function setLogFunction(logger: ((msg: string) => void) | null): void { 46 | logFunction = logger; 47 | } 48 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | // Copyright 2025 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | /** 16 | * Convert a value to an array. Replacement to arrify 17 | * @internal 18 | */ 19 | export function toArray(value: any) { 20 | if (value === null || value === undefined) { 21 | return []; 22 | } 23 | 24 | if (Array.isArray(value)) { 25 | return value; 26 | } 27 | 28 | if (typeof value === 'string') { 29 | return [value]; 30 | } 31 | 32 | if (typeof value[Symbol.iterator] === 'function') { 33 | return [...value]; 34 | } 35 | 36 | return [value]; 37 | } 38 | -------------------------------------------------------------------------------- /system-test/data/kitten-test-data.json: -------------------------------------------------------------------------------- 1 | { "name": "silvano", "breed": "the cat kind", "id": 1, "dob": "2014-10-30T02:05:59.011Z", "around": false, "buffer": "dGVzdA==", "arrayOfInts": [1, 3, 5], "recordOfRecords": { "records": [{ "record": true }] } } 2 | { "name": "ryan", "breed": "golden retriever?", "id": 2, "dob": "2014-10-30T02:05:59.012Z", "around": false, "buffer": "dGVzdA==", "arrayOfInts": [1, 3, 5], "recordOfRecords": { "records": [{ "record": true }] } } 3 | { "name": "stephen", "breed": "idkanycatbreeds", "id": 3, "dob": "2014-10-30T02:05:59.013Z", "around": true, "buffer": "dGVzdA==", "arrayOfInts": [1, 3, 5], "recordOfRecords": { "records": [{ "record": true }] } } 4 | -------------------------------------------------------------------------------- /system-test/data/schema.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "mode": "NULLABLE", 4 | "name": "Name", 5 | "type": "STRING" 6 | }, 7 | { 8 | "mode": "NULLABLE", 9 | "name": "Age", 10 | "type": "INTEGER" 11 | }, 12 | { 13 | "mode": "NULLABLE", 14 | "name": "Weight", 15 | "type": "FLOAT" 16 | }, 17 | { 18 | "mode": "NULLABLE", 19 | "name": "IsMagic", 20 | "type": "BOOLEAN" 21 | }, 22 | { 23 | "fields": [ 24 | { 25 | "mode": "NULLABLE", 26 | "name": "Name", 27 | "type": "STRING" 28 | }, 29 | { 30 | "mode": "NULLABLE", 31 | "name": "LastUsed", 32 | "type": "TIMESTAMP" 33 | }, 34 | { 35 | "mode": "NULLABLE", 36 | "name": "DiscoveredBy", 37 | "type": "STRING" 38 | }, 39 | { 40 | "fields": [ 41 | { 42 | "mode": "NULLABLE", 43 | "name": "Name", 44 | "type": "STRING" 45 | }, 46 | { 47 | "mode": "NULLABLE", 48 | "name": "Power", 49 | "type": "FLOAT" 50 | } 51 | ], 52 | "mode": "REPEATED", 53 | "name": "Properties", 54 | "type": "RECORD" 55 | }, 56 | { 57 | "mode": "NULLABLE", 58 | "name": "Icon", 59 | "type": "BYTES" 60 | } 61 | ], 62 | "mode": "REPEATED", 63 | "name": "Spells", 64 | "type": "RECORD" 65 | }, 66 | { 67 | "mode": "NULLABLE", 68 | "name": "TeaTime", 69 | "type": "TIME" 70 | }, 71 | { 72 | "mode": "NULLABLE", 73 | "name": "NextVacation", 74 | "type": "DATE" 75 | }, 76 | { 77 | "mode": "NULLABLE", 78 | "name": "FavoriteTime", 79 | "type": "DATETIME" 80 | }, 81 | { 82 | "mode": "NULLABLE", 83 | "name": "FavoriteNumeric", 84 | "type": "NUMERIC" 85 | } 86 | ] 87 | -------------------------------------------------------------------------------- /system-test/fixtures/sample/src/index.ts: -------------------------------------------------------------------------------- 1 | // Copyright 2023 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | import {BigQuery} from '@google-cloud/bigquery'; 16 | 17 | function main() { 18 | const bq = new BigQuery(); 19 | console.log(bq); 20 | } 21 | 22 | main(); 23 | -------------------------------------------------------------------------------- /system-test/install.ts: -------------------------------------------------------------------------------- 1 | // Copyright 2023 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | import {packNTest} from 'pack-n-play'; 16 | import {readFileSync} from 'fs'; 17 | import {describe, it} from 'mocha'; 18 | 19 | describe('📦 pack-n-play test', () => { 20 | it('TypeScript code', async function () { 21 | this.timeout(300000); 22 | const options = { 23 | packageDir: process.cwd(), 24 | sample: { 25 | description: 'TypeScript user can use the type definitions', 26 | ts: readFileSync( 27 | './system-test/fixtures/sample/src/index.ts', 28 | ).toString(), 29 | dependencies: ['@types/node'], 30 | }, 31 | }; 32 | try { 33 | await packNTest(options); 34 | } catch (err) { 35 | console.error('TS install failed:\n', err); 36 | throw err; 37 | } 38 | }); 39 | }); 40 | -------------------------------------------------------------------------------- /test/testdata/testfile.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/nodejs-bigquery/9729d1e4494fc266b6effbe204b6ff1996511abc/test/testdata/testfile.json -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts/tsconfig-google.json", 3 | "compilerOptions": { 4 | "rootDir": ".", 5 | "outDir": "build", 6 | "resolveJsonModule": true, 7 | "lib": [ 8 | "es2023", 9 | "dom" 10 | ] 11 | }, 12 | "include": [ 13 | "src/*.ts", 14 | "src/**/*.ts", 15 | "test/*.ts", 16 | "test/**/*.ts", 17 | "system-test/*.ts", 18 | "benchmark/*.ts" 19 | ] 20 | } 21 | --------------------------------------------------------------------------------