├── .compodocrc ├── .eslintignore ├── .eslintrc.json ├── .gitattributes ├── .github ├── .OwlBot.lock.yaml ├── .OwlBot.yaml ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── documentation_request.yml │ ├── feature_request.yml │ ├── processs_request.md │ ├── questions.md │ └── support_request.md ├── PULL_REQUEST_TEMPLATE.md ├── auto-approve.yml ├── auto-label.yaml ├── generated-files-bot.yml ├── release-please.yml ├── release-trigger.yml ├── scripts │ ├── close-invalid-link.cjs │ ├── close-unresponsive.cjs │ ├── fixtures │ │ ├── invalidIssueBody.txt │ │ ├── validIssueBody.txt │ │ └── validIssueBodyDifferentLinkLocation.txt │ ├── remove-response-label.cjs │ └── tests │ │ ├── close-invalid-link.test.cjs │ │ └── close-or-remove-response-label.test.cjs ├── sync-repo-settings.yaml └── workflows │ ├── ci.yaml │ ├── issues-no-repro.yaml │ └── response.yaml ├── .gitignore ├── .jsdoc.js ├── .kokoro ├── .gitattributes ├── common.cfg ├── continuous │ └── node18 │ │ ├── common.cfg │ │ ├── lint.cfg │ │ ├── samples-test.cfg │ │ ├── system-test.cfg │ │ └── test.cfg ├── docs.sh ├── lint.sh ├── populate-secrets.sh ├── presubmit │ ├── node18 │ │ ├── common.cfg │ │ ├── samples-test.cfg │ │ ├── system-test.cfg │ │ └── test.cfg │ └── windows │ │ ├── common.cfg │ │ └── test.cfg ├── publish.sh ├── release │ ├── common.cfg │ ├── docs-devsite.cfg │ ├── docs-devsite.sh │ ├── docs.cfg │ ├── docs.sh │ └── publish.cfg ├── samples-test.sh ├── system-test.sh ├── test.bat ├── test.sh ├── trampoline.sh └── trampoline_v2.sh ├── .mocharc.js ├── .nycrc ├── .prettierignore ├── .prettierrc.js ├── .readme-partials.yaml ├── .repo-metadata.json ├── .trampolinerc ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── linkinator.config.json ├── owlbot.py ├── package.json ├── renovate.json ├── samples ├── .eslintrc.yml ├── README.md ├── package.json ├── quickstart.js └── test │ └── test.js ├── src ├── gcp-residency.ts └── index.ts ├── system-test ├── any.d.ts ├── fixtures │ ├── .eslintrc.yml │ ├── cloudbuild │ │ ├── cloudbuild.yaml │ │ ├── index.js │ │ └── package.json │ ├── hook │ │ ├── .gcloudignore │ │ ├── index.js │ │ └── package.json │ └── kitchen │ │ ├── package.json │ │ ├── src │ │ └── index.ts │ │ └── tsconfig.json ├── kitchen.test.ts └── system.ts ├── test ├── gcp-residency.test.ts ├── index.test.ts └── utils │ └── gcp-residency.ts └── tsconfig.json /.compodocrc: -------------------------------------------------------------------------------- 1 | --- 2 | tsconfig: ./tsconfig.json 3 | output: ./docs 4 | theme: material 5 | hideGenerator: true 6 | disablePrivate: true 7 | disableProtected: true 8 | disableInternal: true 9 | disableCoverage: true 10 | disableGraph: true 11 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | **/coverage 3 | test/fixtures 4 | build/ 5 | docs/ 6 | protos/ 7 | samples/generated/ 8 | system-test/**/fixtures 9 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts" 3 | } 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.ts text eol=lf 2 | *.js text eol=lf 3 | protos/* linguist-generated 4 | **/api-extractor.json linguist-language=JSON-with-Comments 5 | -------------------------------------------------------------------------------- /.github/.OwlBot.lock.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2025 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | docker: 15 | image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest 16 | digest: sha256:c7e4968cfc97a204a4b2381f3ecb55cabc40c4cccf88b1ef8bef0d976be87fee 17 | # created: 2025-04-08T17:33:08.498793944Z 18 | -------------------------------------------------------------------------------- /.github/.OwlBot.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | docker: 15 | image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest 16 | 17 | 18 | begin-after-commit-hash: 397c0bfd367a2427104f988d5329bc117caafd95 19 | 20 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owners file. 2 | # This file controls who is tagged for review for any given pull request. 3 | # 4 | # For syntax help see: 5 | # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax 6 | 7 | 8 | # Unless specified, the jsteam is the default owner for nodejs repositories. 9 | * @googleapis/jsteam -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Create a report to help us improve 3 | labels: 4 | - bug 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: > 9 | **PLEASE READ**: If you have a support contract with Google, please 10 | create an issue in the [support 11 | console](https://cloud.google.com/support/) instead of filing on GitHub. 12 | This will ensure a timely response. Otherwise, please make sure to 13 | follow the steps below. 14 | - type: checkboxes 15 | attributes: 16 | label: Please make sure you have searched for information in the following 17 | guides. 18 | options: 19 | - label: "Search the issues already opened: 20 | https://github.com/GoogleCloudPlatform/google-cloud-node/issues" 21 | required: true 22 | - label: "Search StackOverflow: 23 | http://stackoverflow.com/questions/tagged/google-cloud-platform+nod\ 24 | e.js" 25 | required: true 26 | - label: "Check our Troubleshooting guide: 27 | https://github.com/googleapis/google-cloud-node/blob/main/docs/trou\ 28 | bleshooting.md" 29 | required: true 30 | - label: "Check our FAQ: 31 | https://github.com/googleapis/google-cloud-node/blob/main/docs/faq.\ 32 | md" 33 | required: true 34 | - label: "Check our libraries HOW-TO: 35 | https://github.com/googleapis/gax-nodejs/blob/main/client-libraries\ 36 | .md" 37 | required: true 38 | - label: "Check out our authentication guide: 39 | https://github.com/googleapis/google-auth-library-nodejs" 40 | required: true 41 | - label: "Check out handwritten samples for many of our APIs: 42 | https://github.com/GoogleCloudPlatform/nodejs-docs-samples" 43 | required: true 44 | - type: input 45 | attributes: 46 | label: > 47 | Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal 48 | reproduction. 49 | - type: textarea 50 | attributes: 51 | label: > 52 | A step-by-step description of how to reproduce the issue, based on 53 | the linked reproduction. 54 | description: > 55 | Screenshots can be provided in the issue body below. 56 | placeholder: | 57 | 2. Click X 58 | 3. Y will happen 59 | validations: 60 | required: true 61 | - type: textarea 62 | attributes: 63 | label: A clear and concise description of what the bug is, and what you 64 | expected to happen. 65 | placeholder: Following the steps from the previous section, I expected A to 66 | happen, but I observed B instead 67 | validations: 68 | required: true 69 | 70 | - type: textarea 71 | attributes: 72 | label: A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** 73 | placeholder: 'Documentation here(link) states that B should happen instead of A' 74 | validations: 75 | required: true 76 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation_request.yml: -------------------------------------------------------------------------------- 1 | name: Documentation Requests 2 | description: Requests for more information 3 | body: 4 | - type: markdown 5 | attributes: 6 | value: > 7 | Please use this issue type to log documentation requests against the library itself. 8 | These requests should involve documentation on Github (`.md` files), and should relate to the library 9 | itself. If you have questions or documentation requests for an API, please 10 | reach out to the API tracker itself. 11 | 12 | Please submit an issue to the API team, either by submitting an 13 | issue in its issue tracker https://cloud.google.com/support/docs/issue-trackers), or by 14 | submitting an issue in its linked tracker in the .repo-metadata.json 15 | file in the API under packages/* ([example](https://github.com/googleapis/gcp-metadata/issues)). 16 | You can also submit a request to documentation on cloud.google.com itself with the "Send Feedback" 17 | on the bottom of the page. 18 | 19 | 20 | Please note that documentation requests and questions for specific APIs 21 | will be closed. 22 | - type: checkboxes 23 | attributes: 24 | label: Please make sure you have searched for information in the following 25 | guides. 26 | options: 27 | - label: "Search the issues already opened: 28 | https://github.com/GoogleCloudPlatform/google-cloud-node/issues" 29 | required: true 30 | - label: "Check our Troubleshooting guide: 31 | https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ 32 | es/troubleshooting" 33 | required: true 34 | - label: "Check our FAQ: 35 | https://googlecloudplatform.github.io/google-cloud-node/#/docs/guid\ 36 | es/faq" 37 | required: true 38 | - label: "Check our libraries HOW-TO: 39 | https://github.com/googleapis/gax-nodejs/blob/main/client-libraries\ 40 | .md" 41 | required: true 42 | - label: "Check out our authentication guide: 43 | https://github.com/googleapis/google-auth-library-nodejs" 44 | required: true 45 | - label: "Check out handwritten samples for many of our APIs: 46 | https://github.com/GoogleCloudPlatform/nodejs-docs-samples" 47 | required: true 48 | - type: textarea 49 | attributes: 50 | label: > 51 | Documentation Request 52 | validations: 53 | required: true 54 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 2 | description: Suggest an idea for this library 3 | labels: 4 | - feature request 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: > 9 | **PLEASE READ**: If you have a support contract with Google, please 10 | create an issue in the [support 11 | console](https://cloud.google.com/support/) instead of filing on GitHub. 12 | This will ensure a timely response. Otherwise, please make sure to 13 | follow the steps below. 14 | - type: textarea 15 | attributes: 16 | label: > 17 | What would you like to see in the library? 18 | description: > 19 | Screenshots can be provided in the issue body below. 20 | placeholder: | 21 | 1. Set up authentication like so 22 | 2. Run the program like so 23 | 3. X would be nice to happen 24 | 25 | - type: textarea 26 | attributes: 27 | label: Describe alternatives you've considered 28 | 29 | - type: textarea 30 | attributes: 31 | label: Additional context/notes 32 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/processs_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Process Request 3 | about: Submit a process request to the library. Process requests are any requests related to library infrastructure, for example CI/CD, publishing, releasing, broken links. 4 | --- 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/questions.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: If you have a question, please use Discussions 4 | 5 | --- 6 | 7 | If you have a general question that goes beyond the library itself, we encourage you to use [Discussions](https://github.com//discussions) 8 | to engage with fellow community members! 9 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/support_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Support request 3 | about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. 4 | 5 | --- 6 | 7 | **PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. 8 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | > Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: 2 | 3 | ## Description 4 | 5 | > Please provide a detailed description for the change. 6 | > As much as possible, please try to keep changes separate by purpose. For example, try not to make a one-line bug fix in a feature request, or add an irrelevant README change to a bug fix. 7 | 8 | ## Impact 9 | 10 | > What's the impact of this change? 11 | 12 | ## Testing 13 | 14 | > Have you added unit and integration tests if necessary? 15 | > Were any tests changed? Are any breaking changes necessary? 16 | 17 | ## Additional Information 18 | 19 | > Any additional details that we should be aware of? 20 | 21 | ## Checklist 22 | 23 | - [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/gcp-metadata/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea 24 | - [ ] Ensure the tests and linter pass 25 | - [ ] Code coverage does not decrease 26 | - [ ] Appropriate docs were updated 27 | - [ ] Appropriate comments were added, particularly in complex areas or places that require background 28 | - [ ] No new warnings or issues will be generated from this change 29 | 30 | Fixes #issue_number_goes_here 🦕 31 | -------------------------------------------------------------------------------- /.github/auto-approve.yml: -------------------------------------------------------------------------------- 1 | processes: 2 | - "NodeDependency" -------------------------------------------------------------------------------- /.github/auto-label.yaml: -------------------------------------------------------------------------------- 1 | requestsize: 2 | enabled: true 3 | -------------------------------------------------------------------------------- /.github/generated-files-bot.yml: -------------------------------------------------------------------------------- 1 | generatedFiles: 2 | - path: '.kokoro/**' 3 | message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 4 | - path: '.github/CODEOWNERS' 5 | message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' 6 | - path: '.github/workflows/ci.yaml' 7 | message: '`.github/workflows/ci.yaml` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 8 | - path: '.github/generated-files-bot.+(yml|yaml)' 9 | message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 10 | - path: 'README.md' 11 | message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' 12 | - path: 'samples/README.md' 13 | message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' 14 | ignoreAuthors: 15 | - 'gcf-owl-bot[bot]' 16 | - 'yoshi-automation' 17 | -------------------------------------------------------------------------------- /.github/release-please.yml: -------------------------------------------------------------------------------- 1 | handleGHRelease: true 2 | releaseType: node 3 | branches: 4 | - handleGHRelease: true 5 | releaseType: node 6 | branch: 6.x 7 | -------------------------------------------------------------------------------- /.github/release-trigger.yml: -------------------------------------------------------------------------------- 1 | enabled: true 2 | multiScmName: gcp-metadata -------------------------------------------------------------------------------- /.github/scripts/close-invalid-link.cjs: -------------------------------------------------------------------------------- 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | async function closeIssue(github, owner, repo, number) { 16 | await github.rest.issues.createComment({ 17 | owner: owner, 18 | repo: repo, 19 | issue_number: number, 20 | body: 'Issue was opened with an invalid reproduction link. Please make sure the repository is a valid, publicly-accessible github repository, and make sure the url is complete (example: https://github.com/googleapis/google-cloud-node)' 21 | }); 22 | await github.rest.issues.update({ 23 | owner: owner, 24 | repo: repo, 25 | issue_number: number, 26 | state: 'closed' 27 | }); 28 | } 29 | module.exports = async ({github, context}) => { 30 | const owner = context.repo.owner; 31 | const repo = context.repo.repo; 32 | const number = context.issue.number; 33 | 34 | const issue = await github.rest.issues.get({ 35 | owner: owner, 36 | repo: repo, 37 | issue_number: number, 38 | }); 39 | 40 | const isBugTemplate = issue.data.body.includes('Link to the code that reproduces this issue'); 41 | 42 | if (isBugTemplate) { 43 | console.log(`Issue ${number} is a bug template`) 44 | try { 45 | const link = issue.data.body.split('\n')[18].match(/(https?:\/\/(gist\.)?github.com\/.*)/)[0]; 46 | console.log(`Issue ${number} contains this link: ${link}`) 47 | const isValidLink = (await fetch(link)).ok; 48 | console.log(`Issue ${number} has a ${isValidLink ? 'valid' : 'invalid'} link`) 49 | if (!isValidLink) { 50 | await closeIssue(github, owner, repo, number); 51 | } 52 | } catch (err) { 53 | await closeIssue(github, owner, repo, number); 54 | } 55 | } 56 | }; 57 | -------------------------------------------------------------------------------- /.github/scripts/close-unresponsive.cjs: -------------------------------------------------------------------------------- 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | function labeledEvent(data) { 16 | return data.event === 'labeled' && data.label.name === 'needs more info'; 17 | } 18 | 19 | const numberOfDaysLimit = 15; 20 | const close_message = `This has been closed since a request for information has \ 21 | not been answered for ${numberOfDaysLimit} days. It can be reopened when the \ 22 | requested information is provided.`; 23 | 24 | module.exports = async ({github, context}) => { 25 | const owner = context.repo.owner; 26 | const repo = context.repo.repo; 27 | 28 | const issues = await github.rest.issues.listForRepo({ 29 | owner: owner, 30 | repo: repo, 31 | labels: 'needs more info', 32 | }); 33 | const numbers = issues.data.map((e) => e.number); 34 | 35 | for (const number of numbers) { 36 | const events = await github.paginate( 37 | github.rest.issues.listEventsForTimeline, 38 | { 39 | owner: owner, 40 | repo: repo, 41 | issue_number: number, 42 | }, 43 | (response) => response.data.filter(labeledEvent) 44 | ); 45 | 46 | const latest_response_label = events[events.length - 1]; 47 | 48 | const created_at = new Date(latest_response_label.created_at); 49 | const now = new Date(); 50 | const diff = now - created_at; 51 | const diffDays = diff / (1000 * 60 * 60 * 24); 52 | 53 | if (diffDays > numberOfDaysLimit) { 54 | await github.rest.issues.update({ 55 | owner: owner, 56 | repo: repo, 57 | issue_number: number, 58 | state: 'closed', 59 | }); 60 | 61 | await github.rest.issues.createComment({ 62 | owner: owner, 63 | repo: repo, 64 | issue_number: number, 65 | body: close_message, 66 | }); 67 | } 68 | } 69 | }; 70 | -------------------------------------------------------------------------------- /.github/scripts/fixtures/invalidIssueBody.txt: -------------------------------------------------------------------------------- 1 | ### Please make sure you have searched for information in the following guides. 2 | 3 | - [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues 4 | - [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js 5 | - [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting 6 | - [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq 7 | - [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md 8 | - [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs 9 | - [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples 10 | 11 | ### A screenshot that you have tested with "Try this API". 12 | 13 | 14 | N/A 15 | 16 | ### Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal reproduction. 17 | 18 | not-a-link 19 | 20 | ### A step-by-step description of how to reproduce the issue, based on the linked reproduction. 21 | 22 | 23 | Change MY_PROJECT to your project name, add credentials if needed and run. 24 | 25 | ### A clear and concise description of what the bug is, and what you expected to happen. 26 | 27 | The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. 28 | TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object 29 | at _write (node:internal/streams/writable:474:13) 30 | at Writable.write (node:internal/streams/writable:502:10) 31 | at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) 32 | at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) 33 | at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) 34 | at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) 35 | at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) 36 | at Object.onceWrapper (node:events:633:26) 37 | at Pumpify.emit (node:events:518:28) 38 | at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) 39 | at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) 40 | at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) 41 | at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) 42 | at PassThrough.ondata (node:internal/streams/readable:1007:22) 43 | at PassThrough.emit (node:events:518:28) 44 | at addChunk (node:internal/streams/readable:559:12) { 45 | code: 'ERR_INVALID_ARG_TYPE' 46 | 47 | 48 | ### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** 49 | 50 | No library should crash an application this way. -------------------------------------------------------------------------------- /.github/scripts/fixtures/validIssueBody.txt: -------------------------------------------------------------------------------- 1 | ### Please make sure you have searched for information in the following guides. 2 | 3 | - [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues 4 | - [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js 5 | - [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting 6 | - [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq 7 | - [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md 8 | - [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs 9 | - [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples 10 | 11 | ### A screenshot that you have tested with "Try this API". 12 | 13 | 14 | N/A 15 | 16 | ### Link to the code that reproduces this issue. A link to a **public** Github Repository or gist with a minimal reproduction. 17 | 18 | https://gist.github.com/orgads/13cbf44c91923da27d8772b5f10489c9 19 | 20 | ### A step-by-step description of how to reproduce the issue, based on the linked reproduction. 21 | 22 | 23 | Change MY_PROJECT to your project name, add credentials if needed and run. 24 | 25 | ### A clear and concise description of what the bug is, and what you expected to happen. 26 | 27 | The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. 28 | TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object 29 | at _write (node:internal/streams/writable:474:13) 30 | at Writable.write (node:internal/streams/writable:502:10) 31 | at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) 32 | at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) 33 | at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) 34 | at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) 35 | at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) 36 | at Object.onceWrapper (node:events:633:26) 37 | at Pumpify.emit (node:events:518:28) 38 | at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) 39 | at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) 40 | at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) 41 | at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) 42 | at PassThrough.ondata (node:internal/streams/readable:1007:22) 43 | at PassThrough.emit (node:events:518:28) 44 | at addChunk (node:internal/streams/readable:559:12) { 45 | code: 'ERR_INVALID_ARG_TYPE' 46 | 47 | 48 | ### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** 49 | 50 | No library should crash an application this way. -------------------------------------------------------------------------------- /.github/scripts/fixtures/validIssueBodyDifferentLinkLocation.txt: -------------------------------------------------------------------------------- 1 | ### Please make sure you have searched for information in the following guides. 2 | 3 | - [X] Search the issues already opened: https://github.com/GoogleCloudPlatform/google-cloud-node/issues 4 | - [X] Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js 5 | - [X] Check our Troubleshooting guide: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/troubleshooting 6 | - [X] Check our FAQ: https://googlecloudplatform.github.io/google-cloud-node/#/docs/guides/faq 7 | - [X] Check our libraries HOW-TO: https://github.com/googleapis/gax-nodejs/blob/main/client-libraries.md 8 | - [X] Check out our authentication guide: https://github.com/googleapis/google-auth-library-nodejs 9 | - [X] Check out handwritten samples for many of our APIs: https://github.com/GoogleCloudPlatform/nodejs-docs-samples 10 | 11 | ### A screenshot that you have tested with "Try this API". 12 | 13 | 14 | N/A 15 | 16 | ### A step-by-step description of how to reproduce the issue, based on the linked reproduction. 17 | 18 | 19 | Change MY_PROJECT to your project name, add credentials if needed and run. 20 | 21 | ### A clear and concise description of what the bug is, and what you expected to happen. 22 | 23 | The application crashes with the following exception (which there is no way to catch). It should just emit error, and allow graceful handling. 24 | TypeError [ERR_INVALID_ARG_TYPE]: The "chunk" argument must be of type string or an instance of Buffer or Uint8Array. Received an instance of Object 25 | at _write (node:internal/streams/writable:474:13) 26 | at Writable.write (node:internal/streams/writable:502:10) 27 | at Duplexify._write (/project/node_modules/duplexify/index.js:212:22) 28 | at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) 29 | at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) 30 | at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) 31 | at Pumpify. (/project/node_modules/@google-cloud/speech/build/src/helpers.js:79:27) 32 | at Object.onceWrapper (node:events:633:26) 33 | at Pumpify.emit (node:events:518:28) 34 | at obj. [as _write] (/project/node_modules/stubs/index.js:28:22) 35 | at doWrite (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:390:139) 36 | at writeOrBuffer (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:381:5) 37 | at Writable.write (/project/node_modules/duplexify/node_modules/readable-stream/lib/_stream_writable.js:302:11) 38 | at PassThrough.ondata (node:internal/streams/readable:1007:22) 39 | at PassThrough.emit (node:events:518:28) 40 | at addChunk (node:internal/streams/readable:559:12) { 41 | code: 'ERR_INVALID_ARG_TYPE' 42 | 43 | ### Link to the code that reproduces this issue. A link to a **public** Github Repository with a minimal reproduction. 44 | 45 | 46 | https://gist.github.com/orgads/13cbf44c91923da27d8772b5f10489c9 47 | 48 | ### A clear and concise description WHY you expect this behavior, i.e., was it a recent change, there is documentation that points to this behavior, etc. ** 49 | 50 | No library should crash an application this way. -------------------------------------------------------------------------------- /.github/scripts/remove-response-label.cjs: -------------------------------------------------------------------------------- 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | module.exports = async ({ github, context }) => { 16 | const commenter = context.actor; 17 | const issue = await github.rest.issues.get({ 18 | owner: context.repo.owner, 19 | repo: context.repo.repo, 20 | issue_number: context.issue.number, 21 | }); 22 | const author = issue.data.user.login; 23 | const labels = issue.data.labels.map((e) => e.name); 24 | 25 | if (author === commenter && labels.includes('needs more info')) { 26 | await github.rest.issues.removeLabel({ 27 | owner: context.repo.owner, 28 | repo: context.repo.repo, 29 | issue_number: context.issue.number, 30 | name: 'needs more info', 31 | }); 32 | } 33 | }; 34 | -------------------------------------------------------------------------------- /.github/scripts/tests/close-invalid-link.test.cjs: -------------------------------------------------------------------------------- 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | const { describe, it } = require('mocha'); 18 | const closeInvalidLink = require('../close-invalid-link.cjs'); 19 | const fs = require('fs'); 20 | const sinon = require('sinon'); 21 | 22 | describe('close issues with invalid links', () => { 23 | let octokitStub; 24 | let issuesStub; 25 | 26 | beforeEach(() => { 27 | issuesStub = { 28 | get: sinon.stub(), 29 | createComment: sinon.stub(), 30 | update: sinon.stub(), 31 | }; 32 | octokitStub = { 33 | rest: { 34 | issues: issuesStub, 35 | }, 36 | }; 37 | }); 38 | 39 | afterEach(() => { 40 | sinon.restore(); 41 | }); 42 | 43 | it('does not do anything if it is not a bug', async () => { 44 | const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; 45 | issuesStub.get.resolves({ data: { body: "I'm having a problem with this." } }); 46 | 47 | await closeInvalidLink({ github: octokitStub, context }); 48 | 49 | sinon.assert.calledOnce(issuesStub.get); 50 | sinon.assert.notCalled(issuesStub.createComment); 51 | sinon.assert.notCalled(issuesStub.update); 52 | }); 53 | 54 | it('does not do anything if it is a bug with an appropriate link', async () => { 55 | const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; 56 | issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/validIssueBody.txt', 'utf-8') } }); 57 | 58 | await closeInvalidLink({ github: octokitStub, context }); 59 | 60 | sinon.assert.calledOnce(issuesStub.get); 61 | sinon.assert.notCalled(issuesStub.createComment); 62 | sinon.assert.notCalled(issuesStub.update); 63 | }); 64 | 65 | it('does not do anything if it is a bug with an appropriate link and the template changes', async () => { 66 | const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; 67 | issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/validIssueBodyDifferentLinkLocation.txt', 'utf-8') } }); 68 | 69 | await closeInvalidLink({ github: octokitStub, context }); 70 | 71 | sinon.assert.calledOnce(issuesStub.get); 72 | sinon.assert.notCalled(issuesStub.createComment); 73 | sinon.assert.notCalled(issuesStub.update); 74 | }); 75 | 76 | it('closes the issue if the link is invalid', async () => { 77 | const context = { repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; 78 | issuesStub.get.resolves({ data: { body: fs.readFileSync('./fixtures/invalidIssueBody.txt', 'utf-8') } }); 79 | 80 | await closeInvalidLink({ github: octokitStub, context }); 81 | 82 | sinon.assert.calledOnce(issuesStub.get); 83 | sinon.assert.calledOnce(issuesStub.createComment); 84 | sinon.assert.calledOnce(issuesStub.update); 85 | }); 86 | }); -------------------------------------------------------------------------------- /.github/scripts/tests/close-or-remove-response-label.test.cjs: -------------------------------------------------------------------------------- 1 | // Copyright 2024 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | 'use strict'; 16 | 17 | const { describe, it, beforeEach, afterEach } = require('mocha'); 18 | const removeResponseLabel = require('../remove-response-label.cjs'); 19 | const closeUnresponsive = require('../close-unresponsive.cjs'); 20 | const sinon = require('sinon'); 21 | 22 | function getISODateDaysAgo(days) { 23 | const today = new Date(); 24 | const daysAgo = new Date(today.setDate(today.getDate() - days)); 25 | return daysAgo.toISOString(); 26 | } 27 | 28 | describe('close issues or remove needs more info labels', () => { 29 | let octokitStub; 30 | let issuesStub; 31 | let paginateStub; 32 | 33 | beforeEach(() => { 34 | issuesStub = { 35 | listForRepo: sinon.stub(), 36 | update: sinon.stub(), 37 | createComment: sinon.stub(), 38 | get: sinon.stub(), 39 | removeLabel: sinon.stub(), 40 | }; 41 | paginateStub = sinon.stub(); 42 | octokitStub = { 43 | rest: { 44 | issues: issuesStub, 45 | }, 46 | paginate: paginateStub, 47 | }; 48 | }); 49 | 50 | afterEach(() => { 51 | sinon.restore(); 52 | }); 53 | 54 | it('closes the issue if the OP has not responded within the allotted time and there is a needs-more-info label', async () => { 55 | const context = { owner: 'testOrg', repo: 'testRepo' }; 56 | const issuesInRepo = [{ user: { login: 'OP' }, labels: [{ name: 'needs more info' }] }]; 57 | const eventsInIssue = [{ event: 'labeled', label: { name: 'needs more info' }, created_at: getISODateDaysAgo(16) }]; 58 | 59 | issuesStub.listForRepo.resolves({ data: issuesInRepo }); 60 | paginateStub.resolves(eventsInIssue); 61 | 62 | await closeUnresponsive({ github: octokitStub, context }); 63 | 64 | sinon.assert.calledOnce(issuesStub.listForRepo); 65 | sinon.assert.calledOnce(paginateStub); 66 | sinon.assert.calledOnce(issuesStub.update); 67 | sinon.assert.calledOnce(issuesStub.createComment); 68 | }); 69 | 70 | it('does nothing if not enough time has passed and there is a needs-more-info label', async () => { 71 | const context = { owner: 'testOrg', repo: 'testRepo' }; 72 | const issuesInRepo = [{ user: { login: 'OP' }, labels: [{ name: 'needs more info' }] }]; 73 | const eventsInIssue = [{ event: 'labeled', label: { name: 'needs more info' }, created_at: getISODateDaysAgo(14) }]; 74 | 75 | issuesStub.listForRepo.resolves({ data: issuesInRepo }); 76 | paginateStub.resolves(eventsInIssue); 77 | 78 | await closeUnresponsive({ github: octokitStub, context }); 79 | 80 | sinon.assert.calledOnce(issuesStub.listForRepo); 81 | sinon.assert.calledOnce(paginateStub); 82 | sinon.assert.notCalled(issuesStub.update); 83 | sinon.assert.notCalled(issuesStub.createComment); 84 | }); 85 | 86 | it('removes the label if OP responded', async () => { 87 | const context = { actor: 'OP', repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; 88 | const issueContext = { user: {login: 'OP'}, labels: [{ name: 'needs more info' }] }; 89 | 90 | issuesStub.get.resolves({ data: issueContext }); 91 | 92 | await removeResponseLabel({ github: octokitStub, context }); 93 | 94 | sinon.assert.calledOnce(issuesStub.get); 95 | sinon.assert.calledOnce(issuesStub.removeLabel); 96 | }); 97 | 98 | it('does not remove the label if author responded', async () => { 99 | const context = { actor: 'repo-maintainer', repo: { owner: 'testOrg', repo: 'testRepo' }, issue: { number: 1 } }; 100 | const issueContext = { user: {login: 'OP'}, labels: [{ name: 'needs more info' }] }; 101 | 102 | issuesStub.get.resolves({ data: issueContext }); 103 | 104 | await removeResponseLabel({ github: octokitStub, context }); 105 | 106 | sinon.assert.calledOnce(issuesStub.get); 107 | sinon.assert.notCalled(issuesStub.removeLabel); 108 | }); 109 | }); -------------------------------------------------------------------------------- /.github/sync-repo-settings.yaml: -------------------------------------------------------------------------------- 1 | branchProtectionRules: 2 | - pattern: main 3 | isAdminEnforced: true 4 | requiredApprovingReviewCount: 1 5 | requiresCodeOwnerReviews: true 6 | requiresStrictStatusChecks: true 7 | requiredStatusCheckContexts: 8 | - 'ci/kokoro: Samples test' 9 | - 'ci/kokoro: System test' 10 | - lint 11 | - test (18) 12 | - test (20) 13 | - test (22) 14 | - cla/google 15 | - windows 16 | - OwlBot Post Processor 17 | - pattern: 6.x 18 | isAdminEnforced: true 19 | requiredApprovingReviewCount: 1 20 | requiresCodeOwnerReviews: true 21 | requiresStrictStatusChecks: true 22 | requiredStatusCheckContexts: 23 | - 'ci/kokoro: Samples test' 24 | - 'ci/kokoro: System test' 25 | - "ci/kokoro: Samples test" 26 | - "ci/kokoro: System test" 27 | - lint 28 | - test (18) 29 | - test (20) 30 | - test (22) 31 | - cla/google 32 | - windows 33 | - OwlBot Post Processor 34 | permissionRules: 35 | - team: yoshi-admins 36 | permission: admin 37 | - team: jsteam-admins 38 | permission: admin 39 | - team: jsteam 40 | permission: push 41 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | pull_request: 6 | name: ci 7 | jobs: 8 | test: 9 | runs-on: ubuntu-latest 10 | strategy: 11 | matrix: 12 | node: [18, 20, 22] 13 | steps: 14 | - uses: actions/checkout@v4 15 | - uses: actions/setup-node@v4 16 | with: 17 | node-version: ${{ matrix.node }} 18 | - run: node --version 19 | # The first installation step ensures that all of our production 20 | # dependencies work on the given Node.js version, this helps us find 21 | # dependencies that don't match our engines field: 22 | - run: npm install --production --engine-strict --ignore-scripts --no-package-lock 23 | # Clean up the production install, before installing dev/production: 24 | - run: rm -rf node_modules 25 | - run: npm install --engine-strict 26 | - run: npm test 27 | env: 28 | MOCHA_THROW_DEPRECATION: false 29 | test-script: 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: actions/checkout@v4 33 | - uses: actions/setup-node@v4 34 | with: 35 | node-version: 18 36 | - run: node --version 37 | - run: npm install --engine-strict 38 | working-directory: .github/scripts 39 | - run: npm test 40 | working-directory: .github/scripts 41 | env: 42 | MOCHA_THROW_DEPRECATION: false 43 | windows: 44 | runs-on: windows-latest 45 | steps: 46 | - uses: actions/checkout@v4 47 | - uses: actions/setup-node@v4 48 | with: 49 | node-version: 18 50 | - run: npm install --engine-strict 51 | - run: npm test 52 | env: 53 | MOCHA_THROW_DEPRECATION: false 54 | lint: 55 | runs-on: ubuntu-latest 56 | steps: 57 | - uses: actions/checkout@v4 58 | - uses: actions/setup-node@v4 59 | with: 60 | node-version: 18 61 | - run: npm install 62 | - run: npm run lint 63 | docs: 64 | runs-on: ubuntu-latest 65 | steps: 66 | - uses: actions/checkout@v4 67 | - uses: actions/setup-node@v4 68 | with: 69 | node-version: 18 70 | - run: npm install 71 | - run: npm run docs 72 | - uses: JustinBeckwith/linkinator-action@v1 73 | with: 74 | paths: docs/ 75 | -------------------------------------------------------------------------------- /.github/workflows/issues-no-repro.yaml: -------------------------------------------------------------------------------- 1 | name: invalid_link 2 | on: 3 | issues: 4 | types: [opened, reopened] 5 | 6 | jobs: 7 | close: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | steps: 13 | - uses: actions/checkout@v4 14 | - uses: actions/github-script@v7 15 | with: 16 | script: | 17 | const script = require('./.github/scripts/close-invalid-link.cjs') 18 | await script({github, context}) 19 | -------------------------------------------------------------------------------- /.github/workflows/response.yaml: -------------------------------------------------------------------------------- 1 | name: no_response 2 | on: 3 | schedule: 4 | - cron: '30 1 * * *' # Run every day at 01:30 5 | workflow_dispatch: 6 | issue_comment: 7 | 8 | jobs: 9 | close: 10 | if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' 11 | runs-on: ubuntu-latest 12 | permissions: 13 | issues: write 14 | pull-requests: write 15 | steps: 16 | - uses: actions/checkout@v4 17 | - uses: actions/github-script@v7 18 | with: 19 | script: | 20 | const script = require('./.github/scripts/close-unresponsive.cjs') 21 | await script({github, context}) 22 | 23 | remove_label: 24 | if: github.event_name == 'issue_comment' 25 | runs-on: ubuntu-latest 26 | permissions: 27 | issues: write 28 | pull-requests: write 29 | steps: 30 | - uses: actions/checkout@v4 31 | - uses: actions/github-script@v7 32 | with: 33 | script: | 34 | const script = require('./.github/scripts/remove-response-label.cjs') 35 | await script({github, context}) 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | .vscode 4 | package-lock.json 5 | .nyc_output 6 | .coverage 7 | package-lock.json 8 | gcp-metadata.tgz 9 | docs/ 10 | __pycache__ 11 | .DS_Store 12 | *.tgz -------------------------------------------------------------------------------- /.jsdoc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | 16 | 'use strict'; 17 | 18 | module.exports = { 19 | opts: { 20 | readme: './README.md', 21 | package: './package.json', 22 | template: './node_modules/jsdoc-fresh', 23 | recurse: true, 24 | verbose: true, 25 | destination: './docs/' 26 | }, 27 | plugins: [ 28 | 'plugins/markdown', 29 | 'jsdoc-region-tag' 30 | ], 31 | source: { 32 | excludePattern: '(^|\\/|\\\\)[._]', 33 | include: [ 34 | 'build/src', 35 | ], 36 | includePattern: '\\.js$' 37 | }, 38 | templates: { 39 | copyright: 'Copyright 2019 Google, LLC.', 40 | includeDate: false, 41 | sourceFiles: false, 42 | systemName: 'gcp-metadata', 43 | theme: 'lumen', 44 | default: { 45 | outputSourceFiles: false 46 | } 47 | }, 48 | markdown: { 49 | idInHeadings: true 50 | } 51 | }; 52 | -------------------------------------------------------------------------------- /.kokoro/.gitattributes: -------------------------------------------------------------------------------- 1 | * linguist-generated=true 2 | -------------------------------------------------------------------------------- /.kokoro/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "gcp-metadata/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/gcp-metadata/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/continuous/node18/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "gcp-metadata/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/gcp-metadata/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/continuous/node18/lint.cfg: -------------------------------------------------------------------------------- 1 | env_vars: { 2 | key: "TRAMPOLINE_BUILD_FILE" 3 | value: "github/gcp-metadata/.kokoro/lint.sh" 4 | } 5 | -------------------------------------------------------------------------------- /.kokoro/continuous/node18/samples-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/gcp-metadata/.kokoro/samples-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/continuous/node18/system-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/gcp-metadata/.kokoro/system-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/continuous/node18/test.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/gcp-metadata/3376111f89b5242392e128ba27a03d62b14ad3eb/.kokoro/continuous/node18/test.cfg -------------------------------------------------------------------------------- /.kokoro/docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | 25 | npm run docs-test 26 | -------------------------------------------------------------------------------- /.kokoro/lint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | 25 | # Install and link samples 26 | if [ -f samples/package.json ]; then 27 | cd samples/ 28 | npm link ../ 29 | npm install 30 | cd .. 31 | fi 32 | 33 | npm run lint 34 | -------------------------------------------------------------------------------- /.kokoro/populate-secrets.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2020 Google LLC. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # This file is called in the early stage of `trampoline_v2.sh` to 17 | # populate secrets needed for the CI builds. 18 | 19 | set -eo pipefail 20 | 21 | function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} 22 | function msg { println "$*" >&2 ;} 23 | function println { printf '%s\n' "$(now) $*" ;} 24 | 25 | # Populates requested secrets set in SECRET_MANAGER_KEYS 26 | 27 | # In Kokoro CI builds, we use the service account attached to the 28 | # Kokoro VM. This means we need to setup auth on other CI systems. 29 | # For local run, we just use the gcloud command for retrieving the 30 | # secrets. 31 | 32 | if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then 33 | GCLOUD_COMMANDS=( 34 | "docker" 35 | "run" 36 | "--entrypoint=gcloud" 37 | "--volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR}" 38 | "gcr.io/google.com/cloudsdktool/cloud-sdk" 39 | ) 40 | if [[ "${TRAMPOLINE_CI:-}" == "kokoro" ]]; then 41 | SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" 42 | else 43 | echo "Authentication for this CI system is not implemented yet." 44 | exit 2 45 | # TODO: Determine appropriate SECRET_LOCATION and the GCLOUD_COMMANDS. 46 | fi 47 | else 48 | # For local run, use /dev/shm or temporary directory for 49 | # KOKORO_GFILE_DIR. 50 | if [[ -d "/dev/shm" ]]; then 51 | export KOKORO_GFILE_DIR=/dev/shm 52 | else 53 | export KOKORO_GFILE_DIR=$(mktemp -d -t ci-XXXXXXXX) 54 | fi 55 | SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" 56 | GCLOUD_COMMANDS=("gcloud") 57 | fi 58 | 59 | msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" 60 | mkdir -p ${SECRET_LOCATION} 61 | 62 | for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") 63 | do 64 | msg "Retrieving secret ${key}" 65 | "${GCLOUD_COMMANDS[@]}" \ 66 | secrets versions access latest \ 67 | --project cloud-devrel-kokoro-resources \ 68 | --secret $key > \ 69 | "$SECRET_LOCATION/$key" 70 | if [[ $? == 0 ]]; then 71 | msg "Secret written to ${SECRET_LOCATION}/${key}" 72 | else 73 | msg "Error retrieving secret ${key}" 74 | exit 2 75 | fi 76 | done 77 | -------------------------------------------------------------------------------- /.kokoro/presubmit/node18/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "gcp-metadata/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/gcp-metadata/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/presubmit/node18/samples-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/gcp-metadata/.kokoro/samples-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/node18/system-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/gcp-metadata/.kokoro/system-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/node18/test.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/gcp-metadata/3376111f89b5242392e128ba27a03d62b14ad3eb/.kokoro/presubmit/node18/test.cfg -------------------------------------------------------------------------------- /.kokoro/presubmit/windows/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | -------------------------------------------------------------------------------- /.kokoro/presubmit/windows/test.cfg: -------------------------------------------------------------------------------- 1 | # Use the test file directly 2 | build_file: "gcp-metadata/.kokoro/test.bat" 3 | -------------------------------------------------------------------------------- /.kokoro/publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | # Start the releasetool reporter 22 | python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script 23 | 24 | cd $(dirname $0)/.. 25 | 26 | NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-npm-token-1) 27 | echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc 28 | 29 | npm install 30 | npm pack . 31 | # npm provides no way to specify, observe, or predict the name of the tarball 32 | # file it generates. We have to look in the current directory for the freshest 33 | # .tgz file. 34 | TARBALL=$(ls -1 -t *.tgz | head -1) 35 | 36 | npm publish --access=public --registry=https://wombat-dressing-room.appspot.com "$TARBALL" 37 | 38 | # Kokoro collects *.tgz and package-lock.json files and stores them in Placer 39 | # so we can generate SBOMs and attestations. 40 | # However, we *don't* want Kokoro to collect package-lock.json and *.tgz files 41 | # that happened to be installed with dependencies. 42 | find node_modules -name package-lock.json -o -name "*.tgz" | xargs rm -f -------------------------------------------------------------------------------- /.kokoro/release/common.cfg: -------------------------------------------------------------------------------- 1 | before_action { 2 | fetch_keystore { 3 | keystore_resource { 4 | keystore_config_id: 73713 5 | keyname: "yoshi-automation-github-key" 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.kokoro/release/docs-devsite.cfg: -------------------------------------------------------------------------------- 1 | # service account used to publish up-to-date docs. 2 | before_action { 3 | fetch_keystore { 4 | keystore_resource { 5 | keystore_config_id: 73713 6 | keyname: "docuploader_service_account" 7 | } 8 | } 9 | } 10 | 11 | # doc publications use a Python image. 12 | env_vars: { 13 | key: "TRAMPOLINE_IMAGE" 14 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 15 | } 16 | 17 | # Download trampoline resources. 18 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 19 | 20 | # Use the trampoline script to run in docker. 21 | build_file: "gcp-metadata/.kokoro/trampoline_v2.sh" 22 | 23 | env_vars: { 24 | key: "TRAMPOLINE_BUILD_FILE" 25 | value: "github/gcp-metadata/.kokoro/release/docs-devsite.sh" 26 | } 27 | -------------------------------------------------------------------------------- /.kokoro/release/docs-devsite.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2021 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | if [[ -z "$CREDENTIALS" ]]; then 20 | # if CREDENTIALS are explicitly set, assume we're testing locally 21 | # and don't set NPM_CONFIG_PREFIX. 22 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 23 | export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" 24 | cd $(dirname $0)/../.. 25 | fi 26 | 27 | npm install 28 | npm install --no-save @google-cloud/cloud-rad@^0.4.0 29 | # publish docs to devsite 30 | npx @google-cloud/cloud-rad . cloud-rad 31 | -------------------------------------------------------------------------------- /.kokoro/release/docs.cfg: -------------------------------------------------------------------------------- 1 | # service account used to publish up-to-date docs. 2 | before_action { 3 | fetch_keystore { 4 | keystore_resource { 5 | keystore_config_id: 73713 6 | keyname: "docuploader_service_account" 7 | } 8 | } 9 | } 10 | 11 | # doc publications use a Python image. 12 | env_vars: { 13 | key: "TRAMPOLINE_IMAGE" 14 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 15 | } 16 | 17 | # Download trampoline resources. 18 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 19 | 20 | # Use the trampoline script to run in docker. 21 | build_file: "gcp-metadata/.kokoro/trampoline_v2.sh" 22 | 23 | env_vars: { 24 | key: "TRAMPOLINE_BUILD_FILE" 25 | value: "github/gcp-metadata/.kokoro/release/docs.sh" 26 | } 27 | -------------------------------------------------------------------------------- /.kokoro/release/docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2019 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | # build jsdocs (Python is installed on the Node 18 docker image). 20 | if [[ -z "$CREDENTIALS" ]]; then 21 | # if CREDENTIALS are explicitly set, assume we're testing locally 22 | # and don't set NPM_CONFIG_PREFIX. 23 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 24 | export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" 25 | cd $(dirname $0)/../.. 26 | fi 27 | npm install 28 | npm run docs 29 | 30 | # create docs.metadata, based on package.json and .repo-metadata.json. 31 | npm i json@9.0.6 -g 32 | python3 -m docuploader create-metadata \ 33 | --name=$(cat .repo-metadata.json | json name) \ 34 | --version=$(cat package.json | json version) \ 35 | --language=$(cat .repo-metadata.json | json language) \ 36 | --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ 37 | --product-page=$(cat .repo-metadata.json | json product_documentation) \ 38 | --github-repository=$(cat .repo-metadata.json | json repo) \ 39 | --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) 40 | cp docs.metadata ./docs/docs.metadata 41 | 42 | # deploy the docs. 43 | if [[ -z "$CREDENTIALS" ]]; then 44 | CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account 45 | fi 46 | if [[ -z "$BUCKET" ]]; then 47 | BUCKET=docs-staging 48 | fi 49 | python3 -m docuploader upload ./docs --credentials $CREDENTIALS --staging-bucket $BUCKET 50 | -------------------------------------------------------------------------------- /.kokoro/release/publish.cfg: -------------------------------------------------------------------------------- 1 | before_action { 2 | fetch_keystore { 3 | keystore_resource { 4 | keystore_config_id: 73713 5 | keyname: "docuploader_service_account" 6 | } 7 | } 8 | } 9 | 10 | before_action { 11 | fetch_keystore { 12 | keystore_resource { 13 | keystore_config_id: 73713 14 | keyname: "google-cloud-npm-token-1" 15 | } 16 | } 17 | } 18 | 19 | env_vars: { 20 | key: "SECRET_MANAGER_KEYS" 21 | value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" 22 | } 23 | 24 | # Download trampoline resources. 25 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 26 | 27 | # Use the trampoline script to run in docker. 28 | build_file: "gcp-metadata/.kokoro/trampoline_v2.sh" 29 | 30 | # Configure the docker image for kokoro-trampoline. 31 | env_vars: { 32 | key: "TRAMPOLINE_IMAGE" 33 | value: "gcr.io/cloud-devrel-kokoro-resources/node:18-user" 34 | } 35 | 36 | env_vars: { 37 | key: "TRAMPOLINE_BUILD_FILE" 38 | value: "github/gcp-metadata/.kokoro/publish.sh" 39 | } 40 | 41 | # Store the packages we uploaded to npmjs.org and their corresponding 42 | # package-lock.jsons in Placer. That way, we have a record of exactly 43 | # what we published, and which version of which tools we used to publish 44 | # it, which we can use to generate SBOMs and attestations. 45 | action { 46 | define_artifacts { 47 | regex: "github/**/*.tgz" 48 | regex: "github/**/package-lock.json" 49 | strip_prefix: "github" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /.kokoro/samples-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | # Ensure the npm global directory is writable, otherwise rebuild `npm` 20 | mkdir -p $NPM_CONFIG_PREFIX 21 | npm config -g ls || npm i -g npm@`npm --version` 22 | 23 | echo $(npm -v) 24 | echo $(which npm) 25 | echo $(npm prefix -g) 26 | 27 | # Setup service account credentials. 28 | export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account 29 | export GCLOUD_PROJECT=long-door-651 30 | 31 | cd $(dirname $0)/.. 32 | 33 | # Run a pre-test hook, if a pre-samples-test.sh is in the project 34 | if [ -f .kokoro/pre-samples-test.sh ]; then 35 | set +x 36 | . .kokoro/pre-samples-test.sh 37 | set -x 38 | fi 39 | 40 | if [ -f samples/package.json ]; then 41 | npm install 42 | 43 | # Install and link samples 44 | cd samples/ 45 | npm link ../ 46 | npm install 47 | cd .. 48 | # If tests are running against main branch, configure flakybot 49 | # to open issues on failures: 50 | if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then 51 | export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml 52 | export MOCHA_REPORTER=xunit 53 | cleanup() { 54 | chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot 55 | $KOKORO_GFILE_DIR/linux_amd64/flakybot 56 | } 57 | trap cleanup EXIT HUP 58 | fi 59 | 60 | npm run samples-test 61 | fi 62 | 63 | # codecov combines coverage across integration and unit tests. Include 64 | # the logic below for any environment you wish to collect coverage for: 65 | COVERAGE_NODE=18 66 | if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then 67 | NYC_BIN=./node_modules/nyc/bin/nyc.js 68 | if [ -f "$NYC_BIN" ]; then 69 | $NYC_BIN report || true 70 | fi 71 | bash $KOKORO_GFILE_DIR/codecov.sh 72 | else 73 | echo "coverage is only reported for Node $COVERAGE_NODE" 74 | fi 75 | -------------------------------------------------------------------------------- /.kokoro/system-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | # Setup service account credentials. 22 | export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account 23 | export GCLOUD_PROJECT=long-door-651 24 | 25 | cd $(dirname $0)/.. 26 | 27 | # Run a pre-test hook, if a pre-system-test.sh is in the project 28 | if [ -f .kokoro/pre-system-test.sh ]; then 29 | set +x 30 | . .kokoro/pre-system-test.sh 31 | set -x 32 | fi 33 | 34 | npm install 35 | 36 | # If tests are running against main branch, configure flakybot 37 | # to open issues on failures: 38 | if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then 39 | export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml 40 | export MOCHA_REPORTER=xunit 41 | cleanup() { 42 | chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot 43 | $KOKORO_GFILE_DIR/linux_amd64/flakybot 44 | } 45 | trap cleanup EXIT HUP 46 | fi 47 | 48 | npm run system-test 49 | 50 | # codecov combines coverage across integration and unit tests. Include 51 | # the logic below for any environment you wish to collect coverage for: 52 | COVERAGE_NODE=18 53 | if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then 54 | NYC_BIN=./node_modules/nyc/bin/nyc.js 55 | if [ -f "$NYC_BIN" ]; then 56 | $NYC_BIN report || true 57 | fi 58 | bash $KOKORO_GFILE_DIR/codecov.sh 59 | else 60 | echo "coverage is only reported for Node $COVERAGE_NODE" 61 | fi 62 | -------------------------------------------------------------------------------- /.kokoro/test.bat: -------------------------------------------------------------------------------- 1 | @rem Copyright 2018 Google LLC. All rights reserved. 2 | @rem 3 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 4 | @rem you may not use this file except in compliance with the License. 5 | @rem You may obtain a copy of the License at 6 | @rem 7 | @rem http://www.apache.org/licenses/LICENSE-2.0 8 | @rem 9 | @rem Unless required by applicable law or agreed to in writing, software 10 | @rem distributed under the License is distributed on an "AS IS" BASIS, 11 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | @rem See the License for the specific language governing permissions and 13 | @rem limitations under the License. 14 | 15 | @echo "Starting Windows build" 16 | 17 | cd /d %~dp0 18 | cd .. 19 | 20 | @rem npm path is not currently set in our image, we should fix this next time 21 | @rem we upgrade Node.js in the image: 22 | SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm 23 | 24 | call nvm use 18 25 | call which node 26 | 27 | call npm install || goto :error 28 | call npm run test || goto :error 29 | 30 | goto :EOF 31 | 32 | :error 33 | exit /b 1 34 | -------------------------------------------------------------------------------- /.kokoro/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | # If tests are running against main branch, configure flakybot 25 | # to open issues on failures: 26 | if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then 27 | export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml 28 | export MOCHA_REPORTER=xunit 29 | cleanup() { 30 | chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot 31 | $KOKORO_GFILE_DIR/linux_amd64/flakybot 32 | } 33 | trap cleanup EXIT HUP 34 | fi 35 | # Unit tests exercise the entire API surface, which may include 36 | # deprecation warnings: 37 | export MOCHA_THROW_DEPRECATION=false 38 | npm test 39 | 40 | # codecov combines coverage across integration and unit tests. Include 41 | # the logic below for any environment you wish to collect coverage for: 42 | COVERAGE_NODE=18 43 | if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then 44 | NYC_BIN=./node_modules/nyc/bin/nyc.js 45 | if [ -f "$NYC_BIN" ]; then 46 | $NYC_BIN report || true 47 | fi 48 | bash $KOKORO_GFILE_DIR/codecov.sh 49 | else 50 | echo "coverage is only reported for Node $COVERAGE_NODE" 51 | fi 52 | -------------------------------------------------------------------------------- /.kokoro/trampoline.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2017 Google Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # This file is not used any more, but we keep this file for making it 17 | # easy to roll back. 18 | # TODO: Remove this file from the template. 19 | 20 | set -eo pipefail 21 | 22 | # Always run the cleanup script, regardless of the success of bouncing into 23 | # the container. 24 | function cleanup() { 25 | chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh 26 | ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh 27 | echo "cleanup"; 28 | } 29 | trap cleanup EXIT 30 | 31 | $(dirname $0)/populate-secrets.sh # Secret Manager secrets. 32 | python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" 33 | -------------------------------------------------------------------------------- /.kokoro/trampoline_v2.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 Google LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # trampoline_v2.sh 17 | # 18 | # If you want to make a change to this file, consider doing so at: 19 | # https://github.com/googlecloudplatform/docker-ci-helper 20 | # 21 | # This script is for running CI builds. For Kokoro builds, we 22 | # set this script to `build_file` field in the Kokoro configuration. 23 | 24 | # This script does 3 things. 25 | # 26 | # 1. Prepare the Docker image for the test 27 | # 2. Run the Docker with appropriate flags to run the test 28 | # 3. Upload the newly built Docker image 29 | # 30 | # in a way that is somewhat compatible with trampoline_v1. 31 | # 32 | # These environment variables are required: 33 | # TRAMPOLINE_IMAGE: The docker image to use. 34 | # TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. 35 | # 36 | # You can optionally change these environment variables: 37 | # TRAMPOLINE_IMAGE_UPLOAD: 38 | # (true|false): Whether to upload the Docker image after the 39 | # successful builds. 40 | # TRAMPOLINE_BUILD_FILE: The script to run in the docker container. 41 | # TRAMPOLINE_WORKSPACE: The workspace path in the docker container. 42 | # Defaults to /workspace. 43 | # Potentially there are some repo specific envvars in .trampolinerc in 44 | # the project root. 45 | # 46 | # Here is an example for running this script. 47 | # TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:18-user \ 48 | # TRAMPOLINE_BUILD_FILE=.kokoro/system-test.sh \ 49 | # .kokoro/trampoline_v2.sh 50 | 51 | set -euo pipefail 52 | 53 | TRAMPOLINE_VERSION="2.0.7" 54 | 55 | if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then 56 | readonly IO_COLOR_RED="$(tput setaf 1)" 57 | readonly IO_COLOR_GREEN="$(tput setaf 2)" 58 | readonly IO_COLOR_YELLOW="$(tput setaf 3)" 59 | readonly IO_COLOR_RESET="$(tput sgr0)" 60 | else 61 | readonly IO_COLOR_RED="" 62 | readonly IO_COLOR_GREEN="" 63 | readonly IO_COLOR_YELLOW="" 64 | readonly IO_COLOR_RESET="" 65 | fi 66 | 67 | function function_exists { 68 | [ $(LC_ALL=C type -t $1)"" == "function" ] 69 | } 70 | 71 | # Logs a message using the given color. The first argument must be one 72 | # of the IO_COLOR_* variables defined above, such as 73 | # "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the 74 | # given color. The log message will also have an RFC-3339 timestamp 75 | # prepended (in UTC). You can disable the color output by setting 76 | # TERM=vt100. 77 | function log_impl() { 78 | local color="$1" 79 | shift 80 | local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" 81 | echo "================================================================" 82 | echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" 83 | echo "================================================================" 84 | } 85 | 86 | # Logs the given message with normal coloring and a timestamp. 87 | function log() { 88 | log_impl "${IO_COLOR_RESET}" "$@" 89 | } 90 | 91 | # Logs the given message in green with a timestamp. 92 | function log_green() { 93 | log_impl "${IO_COLOR_GREEN}" "$@" 94 | } 95 | 96 | # Logs the given message in yellow with a timestamp. 97 | function log_yellow() { 98 | log_impl "${IO_COLOR_YELLOW}" "$@" 99 | } 100 | 101 | # Logs the given message in red with a timestamp. 102 | function log_red() { 103 | log_impl "${IO_COLOR_RED}" "$@" 104 | } 105 | 106 | readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) 107 | readonly tmphome="${tmpdir}/h" 108 | mkdir -p "${tmphome}" 109 | 110 | function cleanup() { 111 | rm -rf "${tmpdir}" 112 | } 113 | trap cleanup EXIT 114 | 115 | RUNNING_IN_CI="${RUNNING_IN_CI:-false}" 116 | 117 | # The workspace in the container, defaults to /workspace. 118 | TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" 119 | 120 | pass_down_envvars=( 121 | # TRAMPOLINE_V2 variables. 122 | # Tells scripts whether they are running as part of CI or not. 123 | "RUNNING_IN_CI" 124 | # Indicates which CI system we're in. 125 | "TRAMPOLINE_CI" 126 | # Indicates the version of the script. 127 | "TRAMPOLINE_VERSION" 128 | # Contains path to build artifacts being executed. 129 | "KOKORO_BUILD_ARTIFACTS_SUBDIR" 130 | ) 131 | 132 | log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" 133 | 134 | # Detect which CI systems we're in. If we're in any of the CI systems 135 | # we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be 136 | # the name of the CI system. Both envvars will be passing down to the 137 | # container for telling which CI system we're in. 138 | if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then 139 | # descriptive env var for indicating it's on CI. 140 | RUNNING_IN_CI="true" 141 | TRAMPOLINE_CI="kokoro" 142 | if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then 143 | if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then 144 | log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." 145 | exit 1 146 | fi 147 | # This service account will be activated later. 148 | TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" 149 | else 150 | if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then 151 | gcloud auth list 152 | fi 153 | log_yellow "Configuring Container Registry access" 154 | gcloud auth configure-docker --quiet 155 | fi 156 | pass_down_envvars+=( 157 | # KOKORO dynamic variables. 158 | "KOKORO_BUILD_NUMBER" 159 | "KOKORO_BUILD_ID" 160 | "KOKORO_JOB_NAME" 161 | "KOKORO_GIT_COMMIT" 162 | "KOKORO_GITHUB_COMMIT" 163 | "KOKORO_GITHUB_PULL_REQUEST_NUMBER" 164 | "KOKORO_GITHUB_PULL_REQUEST_COMMIT" 165 | # For flakybot 166 | "KOKORO_GITHUB_COMMIT_URL" 167 | "KOKORO_GITHUB_PULL_REQUEST_URL" 168 | ) 169 | elif [[ "${TRAVIS:-}" == "true" ]]; then 170 | RUNNING_IN_CI="true" 171 | TRAMPOLINE_CI="travis" 172 | pass_down_envvars+=( 173 | "TRAVIS_BRANCH" 174 | "TRAVIS_BUILD_ID" 175 | "TRAVIS_BUILD_NUMBER" 176 | "TRAVIS_BUILD_WEB_URL" 177 | "TRAVIS_COMMIT" 178 | "TRAVIS_COMMIT_MESSAGE" 179 | "TRAVIS_COMMIT_RANGE" 180 | "TRAVIS_JOB_NAME" 181 | "TRAVIS_JOB_NUMBER" 182 | "TRAVIS_JOB_WEB_URL" 183 | "TRAVIS_PULL_REQUEST" 184 | "TRAVIS_PULL_REQUEST_BRANCH" 185 | "TRAVIS_PULL_REQUEST_SHA" 186 | "TRAVIS_PULL_REQUEST_SLUG" 187 | "TRAVIS_REPO_SLUG" 188 | "TRAVIS_SECURE_ENV_VARS" 189 | "TRAVIS_TAG" 190 | ) 191 | elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then 192 | RUNNING_IN_CI="true" 193 | TRAMPOLINE_CI="github-workflow" 194 | pass_down_envvars+=( 195 | "GITHUB_WORKFLOW" 196 | "GITHUB_RUN_ID" 197 | "GITHUB_RUN_NUMBER" 198 | "GITHUB_ACTION" 199 | "GITHUB_ACTIONS" 200 | "GITHUB_ACTOR" 201 | "GITHUB_REPOSITORY" 202 | "GITHUB_EVENT_NAME" 203 | "GITHUB_EVENT_PATH" 204 | "GITHUB_SHA" 205 | "GITHUB_REF" 206 | "GITHUB_HEAD_REF" 207 | "GITHUB_BASE_REF" 208 | ) 209 | elif [[ "${CIRCLECI:-}" == "true" ]]; then 210 | RUNNING_IN_CI="true" 211 | TRAMPOLINE_CI="circleci" 212 | pass_down_envvars+=( 213 | "CIRCLE_BRANCH" 214 | "CIRCLE_BUILD_NUM" 215 | "CIRCLE_BUILD_URL" 216 | "CIRCLE_COMPARE_URL" 217 | "CIRCLE_JOB" 218 | "CIRCLE_NODE_INDEX" 219 | "CIRCLE_NODE_TOTAL" 220 | "CIRCLE_PREVIOUS_BUILD_NUM" 221 | "CIRCLE_PROJECT_REPONAME" 222 | "CIRCLE_PROJECT_USERNAME" 223 | "CIRCLE_REPOSITORY_URL" 224 | "CIRCLE_SHA1" 225 | "CIRCLE_STAGE" 226 | "CIRCLE_USERNAME" 227 | "CIRCLE_WORKFLOW_ID" 228 | "CIRCLE_WORKFLOW_JOB_ID" 229 | "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" 230 | "CIRCLE_WORKFLOW_WORKSPACE_ID" 231 | ) 232 | fi 233 | 234 | # Configure the service account for pulling the docker image. 235 | function repo_root() { 236 | local dir="$1" 237 | while [[ ! -d "${dir}/.git" ]]; do 238 | dir="$(dirname "$dir")" 239 | done 240 | echo "${dir}" 241 | } 242 | 243 | # Detect the project root. In CI builds, we assume the script is in 244 | # the git tree and traverse from there, otherwise, traverse from `pwd` 245 | # to find `.git` directory. 246 | if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then 247 | PROGRAM_PATH="$(realpath "$0")" 248 | PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" 249 | PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" 250 | else 251 | PROJECT_ROOT="$(repo_root $(pwd))" 252 | fi 253 | 254 | log_yellow "Changing to the project root: ${PROJECT_ROOT}." 255 | cd "${PROJECT_ROOT}" 256 | 257 | # To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need 258 | # to use this environment variable in `PROJECT_ROOT`. 259 | if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then 260 | 261 | mkdir -p "${tmpdir}/gcloud" 262 | gcloud_config_dir="${tmpdir}/gcloud" 263 | 264 | log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." 265 | export CLOUDSDK_CONFIG="${gcloud_config_dir}" 266 | 267 | log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." 268 | gcloud auth activate-service-account \ 269 | --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" 270 | log_yellow "Configuring Container Registry access" 271 | gcloud auth configure-docker --quiet 272 | fi 273 | 274 | required_envvars=( 275 | # The basic trampoline configurations. 276 | "TRAMPOLINE_IMAGE" 277 | "TRAMPOLINE_BUILD_FILE" 278 | ) 279 | 280 | if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then 281 | source "${PROJECT_ROOT}/.trampolinerc" 282 | fi 283 | 284 | log_yellow "Checking environment variables." 285 | for e in "${required_envvars[@]}" 286 | do 287 | if [[ -z "${!e:-}" ]]; then 288 | log "Missing ${e} env var. Aborting." 289 | exit 1 290 | fi 291 | done 292 | 293 | # We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 294 | # script: e.g. "github/repo-name/.kokoro/run_tests.sh" 295 | TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" 296 | log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" 297 | 298 | # ignore error on docker operations and test execution 299 | set +e 300 | 301 | log_yellow "Preparing Docker image." 302 | # We only download the docker image in CI builds. 303 | if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then 304 | # Download the docker image specified by `TRAMPOLINE_IMAGE` 305 | 306 | # We may want to add --max-concurrent-downloads flag. 307 | 308 | log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." 309 | if docker pull "${TRAMPOLINE_IMAGE}"; then 310 | log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." 311 | has_image="true" 312 | else 313 | log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." 314 | has_image="false" 315 | fi 316 | else 317 | # For local run, check if we have the image. 318 | if docker images "${TRAMPOLINE_IMAGE}" | grep "${TRAMPOLINE_IMAGE%:*}"; then 319 | has_image="true" 320 | else 321 | has_image="false" 322 | fi 323 | fi 324 | 325 | 326 | # The default user for a Docker container has uid 0 (root). To avoid 327 | # creating root-owned files in the build directory we tell docker to 328 | # use the current user ID. 329 | user_uid="$(id -u)" 330 | user_gid="$(id -g)" 331 | user_name="$(id -un)" 332 | 333 | # To allow docker in docker, we add the user to the docker group in 334 | # the host os. 335 | docker_gid=$(cut -d: -f3 < <(getent group docker)) 336 | 337 | update_cache="false" 338 | if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then 339 | # Build the Docker image from the source. 340 | context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") 341 | docker_build_flags=( 342 | "-f" "${TRAMPOLINE_DOCKERFILE}" 343 | "-t" "${TRAMPOLINE_IMAGE}" 344 | "--build-arg" "UID=${user_uid}" 345 | "--build-arg" "USERNAME=${user_name}" 346 | ) 347 | if [[ "${has_image}" == "true" ]]; then 348 | docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") 349 | fi 350 | 351 | log_yellow "Start building the docker image." 352 | if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then 353 | echo "docker build" "${docker_build_flags[@]}" "${context_dir}" 354 | fi 355 | 356 | # ON CI systems, we want to suppress docker build logs, only 357 | # output the logs when it fails. 358 | if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then 359 | if docker build "${docker_build_flags[@]}" "${context_dir}" \ 360 | > "${tmpdir}/docker_build.log" 2>&1; then 361 | if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then 362 | cat "${tmpdir}/docker_build.log" 363 | fi 364 | 365 | log_green "Finished building the docker image." 366 | update_cache="true" 367 | else 368 | log_red "Failed to build the Docker image, aborting." 369 | log_yellow "Dumping the build logs:" 370 | cat "${tmpdir}/docker_build.log" 371 | exit 1 372 | fi 373 | else 374 | if docker build "${docker_build_flags[@]}" "${context_dir}"; then 375 | log_green "Finished building the docker image." 376 | update_cache="true" 377 | else 378 | log_red "Failed to build the Docker image, aborting." 379 | exit 1 380 | fi 381 | fi 382 | else 383 | if [[ "${has_image}" != "true" ]]; then 384 | log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." 385 | exit 1 386 | fi 387 | fi 388 | 389 | # We use an array for the flags so they are easier to document. 390 | docker_flags=( 391 | # Remove the container after it exists. 392 | "--rm" 393 | 394 | # Use the host network. 395 | "--network=host" 396 | 397 | # Run in priviledged mode. We are not using docker for sandboxing or 398 | # isolation, just for packaging our dev tools. 399 | "--privileged" 400 | 401 | # Run the docker script with the user id. Because the docker image gets to 402 | # write in ${PWD} you typically want this to be your user id. 403 | # To allow docker in docker, we need to use docker gid on the host. 404 | "--user" "${user_uid}:${docker_gid}" 405 | 406 | # Pass down the USER. 407 | "--env" "USER=${user_name}" 408 | 409 | # Mount the project directory inside the Docker container. 410 | "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" 411 | "--workdir" "${TRAMPOLINE_WORKSPACE}" 412 | "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" 413 | 414 | # Mount the temporary home directory. 415 | "--volume" "${tmphome}:/h" 416 | "--env" "HOME=/h" 417 | 418 | # Allow docker in docker. 419 | "--volume" "/var/run/docker.sock:/var/run/docker.sock" 420 | 421 | # Mount the /tmp so that docker in docker can mount the files 422 | # there correctly. 423 | "--volume" "/tmp:/tmp" 424 | # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR 425 | # TODO(tmatsuo): This part is not portable. 426 | "--env" "TRAMPOLINE_SECRET_DIR=/secrets" 427 | "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" 428 | "--env" "KOKORO_GFILE_DIR=/secrets/gfile" 429 | "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" 430 | "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" 431 | ) 432 | 433 | # Add an option for nicer output if the build gets a tty. 434 | if [[ -t 0 ]]; then 435 | docker_flags+=("-it") 436 | fi 437 | 438 | # Passing down env vars 439 | for e in "${pass_down_envvars[@]}" 440 | do 441 | if [[ -n "${!e:-}" ]]; then 442 | docker_flags+=("--env" "${e}=${!e}") 443 | fi 444 | done 445 | 446 | # If arguments are given, all arguments will become the commands run 447 | # in the container, otherwise run TRAMPOLINE_BUILD_FILE. 448 | if [[ $# -ge 1 ]]; then 449 | log_yellow "Running the given commands '" "${@:1}" "' in the container." 450 | readonly commands=("${@:1}") 451 | if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then 452 | echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" 453 | fi 454 | docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" 455 | else 456 | log_yellow "Running the tests in a Docker container." 457 | docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") 458 | if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then 459 | echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" 460 | fi 461 | docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" 462 | fi 463 | 464 | 465 | test_retval=$? 466 | 467 | if [[ ${test_retval} -eq 0 ]]; then 468 | log_green "Build finished with ${test_retval}" 469 | else 470 | log_red "Build finished with ${test_retval}" 471 | fi 472 | 473 | # Only upload it when the test passes. 474 | if [[ "${update_cache}" == "true" ]] && \ 475 | [[ $test_retval == 0 ]] && \ 476 | [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then 477 | log_yellow "Uploading the Docker image." 478 | if docker push "${TRAMPOLINE_IMAGE}"; then 479 | log_green "Finished uploading the Docker image." 480 | else 481 | log_red "Failed uploading the Docker image." 482 | fi 483 | # Call trampoline_after_upload_hook if it's defined. 484 | if function_exists trampoline_after_upload_hook; then 485 | trampoline_after_upload_hook 486 | fi 487 | 488 | fi 489 | 490 | exit "${test_retval}" 491 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | const config = { 15 | "enable-source-maps": true, 16 | "throw-deprecation": true, 17 | "timeout": 10000, 18 | "recursive": true 19 | } 20 | if (process.env.MOCHA_THROW_DEPRECATION === 'false') { 21 | delete config['throw-deprecation']; 22 | } 23 | if (process.env.MOCHA_REPORTER) { 24 | config.reporter = process.env.MOCHA_REPORTER; 25 | } 26 | if (process.env.MOCHA_REPORTER_OUTPUT) { 27 | config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; 28 | } 29 | module.exports = config 30 | -------------------------------------------------------------------------------- /.nycrc: -------------------------------------------------------------------------------- 1 | { 2 | "report-dir": "./.coverage", 3 | "reporter": ["text", "lcov"], 4 | "exclude": [ 5 | "**/*-test", 6 | "**/.coverage", 7 | "**/apis", 8 | "**/benchmark", 9 | "**/conformance", 10 | "**/docs", 11 | "**/samples", 12 | "**/scripts", 13 | "**/protos", 14 | "**/test", 15 | "**/*.d.ts", 16 | ".jsdoc.js", 17 | "**/.jsdoc.js", 18 | "karma.conf.js", 19 | "webpack-tests.config.js", 20 | "webpack.config.js" 21 | ], 22 | "exclude-after-remap": false, 23 | "all": true 24 | } 25 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | **/coverage 3 | test/fixtures 4 | build/ 5 | docs/ 6 | protos/ 7 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | module.exports = { 16 | ...require('gts/.prettierrc.json') 17 | } 18 | -------------------------------------------------------------------------------- /.readme-partials.yaml: -------------------------------------------------------------------------------- 1 | body: |- 2 | 3 | #### Check to see if the metadata server is available 4 | ```js 5 | const isAvailable = await gcpMetadata.isAvailable(); 6 | ``` 7 | 8 | #### Access all metadata 9 | 10 | ```js 11 | const data = await gcpMetadata.instance(); 12 | console.log(data); // ... All metadata properties 13 | ``` 14 | 15 | #### Access specific properties 16 | ```js 17 | const data = await gcpMetadata.instance('hostname'); 18 | console.log(data); // ...Instance hostname 19 | const projectId = await gcpMetadata.project('project-id'); 20 | console.log(projectId); // ...Project ID of the running instance 21 | ``` 22 | 23 | #### Access nested properties with the relative path 24 | ```js 25 | const data = await gcpMetadata.instance('service-accounts/default/email'); 26 | console.log(data); // ...Email address of the Compute identity service account 27 | ``` 28 | 29 | #### Access specific properties with query parameters 30 | ```js 31 | const data = await gcpMetadata.instance({ 32 | property: 'tags', 33 | params: { alt: 'text' } 34 | }); 35 | console.log(data) // ...Tags as newline-delimited list 36 | ``` 37 | 38 | #### Access with custom headers 39 | ```js 40 | await gcpMetadata.instance({ 41 | headers: { 'no-trace': '1' } 42 | }); // ...Request is untraced 43 | ``` 44 | 45 | ### Take care with large number valued properties 46 | 47 | In some cases number valued properties returned by the Metadata Service may be 48 | too large to be representable as JavaScript numbers. In such cases we return 49 | those values as `BigNumber` objects (from the [bignumber.js](https://github.com/MikeMcl/bignumber.js) library). Numbers 50 | that fit within the JavaScript number range will be returned as normal number 51 | values. 52 | 53 | ```js 54 | const id = await gcpMetadata.instance('id'); 55 | console.log(id) // ... BigNumber { s: 1, e: 18, c: [ 45200, 31799277581759 ] } 56 | console.log(id.toString()) // ... 4520031799277581759 57 | ``` 58 | 59 | ### Environment variables 60 | 61 | * `GCE_METADATA_HOST`: provide an alternate host or IP to perform lookup against (useful, for example, you're connecting through a custom proxy server). 62 | 63 | For example: 64 | ``` 65 | export GCE_METADATA_HOST='169.254.169.254' 66 | ``` 67 | 68 | * `DETECT_GCP_RETRIES`: number representing number of retries that should be attempted on metadata lookup. 69 | 70 | * `DEBUG_AUTH`: emit debugging logs 71 | 72 | * `METADATA_SERVER_DETECTION`: configure desired metadata server availability check behavior. 73 | 74 | * `assume-present`: don't try to ping the metadata server, but assume it's present 75 | * `none`: don't try to ping the metadata server, but don't try to use it either 76 | * `bios-only`: treat the result of a BIOS probe as canonical (don't fall back to pinging) 77 | * `ping-only`: skip the BIOS probe, and go straight to pinging 78 | -------------------------------------------------------------------------------- /.repo-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gcp-metadata", 3 | "name_pretty": "GCP Metadata", 4 | "product_documentation": "https://cloud.google.com/compute/docs/storing-retrieving-metadata", 5 | "client_documentation": "https://cloud.google.com/nodejs/docs/reference/gcp-metadata/latest", 6 | "issue_tracker": "https://github.com/googleapis/gcp-metadata/issues", 7 | "release_level": "stable", 8 | "language": "nodejs", 9 | "repo": "googleapis/gcp-metadata", 10 | "distribution_name": "gcp-metadata", 11 | "requires_billing": false, 12 | "library_type": "CORE" 13 | } 14 | -------------------------------------------------------------------------------- /.trampolinerc: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # Template for .trampolinerc 16 | 17 | # Add required env vars here. 18 | required_envvars+=( 19 | ) 20 | 21 | # Add env vars which are passed down into the container here. 22 | pass_down_envvars+=( 23 | "AUTORELEASE_PR" 24 | "VERSION" 25 | ) 26 | 27 | # Prevent unintentional override on the default image. 28 | if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ 29 | [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then 30 | echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." 31 | exit 1 32 | fi 33 | 34 | # Define the default value if it makes sense. 35 | if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then 36 | TRAMPOLINE_IMAGE_UPLOAD="" 37 | fi 38 | 39 | if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then 40 | TRAMPOLINE_IMAGE="" 41 | fi 42 | 43 | if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then 44 | TRAMPOLINE_DOCKERFILE="" 45 | fi 46 | 47 | if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then 48 | TRAMPOLINE_BUILD_FILE="" 49 | fi 50 | 51 | # Secret Manager secrets. 52 | source ${PROJECT_ROOT}/.kokoro/populate-secrets.sh 53 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Code of Conduct 3 | 4 | ## Our Pledge 5 | 6 | In the interest of fostering an open and welcoming environment, we as 7 | contributors and maintainers pledge to making participation in our project and 8 | our community a harassment-free experience for everyone, regardless of age, body 9 | size, disability, ethnicity, gender identity and expression, level of 10 | experience, education, socio-economic status, nationality, personal appearance, 11 | race, religion, or sexual identity and orientation. 12 | 13 | ## Our Standards 14 | 15 | Examples of behavior that contributes to creating a positive environment 16 | include: 17 | 18 | * Using welcoming and inclusive language 19 | * Being respectful of differing viewpoints and experiences 20 | * Gracefully accepting constructive criticism 21 | * Focusing on what is best for the community 22 | * Showing empathy towards other community members 23 | 24 | Examples of unacceptable behavior by participants include: 25 | 26 | * The use of sexualized language or imagery and unwelcome sexual attention or 27 | advances 28 | * Trolling, insulting/derogatory comments, and personal or political attacks 29 | * Public or private harassment 30 | * Publishing others' private information, such as a physical or electronic 31 | address, without explicit permission 32 | * Other conduct which could reasonably be considered inappropriate in a 33 | professional setting 34 | 35 | ## Our Responsibilities 36 | 37 | Project maintainers are responsible for clarifying the standards of acceptable 38 | behavior and are expected to take appropriate and fair corrective action in 39 | response to any instances of unacceptable behavior. 40 | 41 | Project maintainers have the right and responsibility to remove, edit, or reject 42 | comments, commits, code, wiki edits, issues, and other contributions that are 43 | not aligned to this Code of Conduct, or to ban temporarily or permanently any 44 | contributor for other behaviors that they deem inappropriate, threatening, 45 | offensive, or harmful. 46 | 47 | ## Scope 48 | 49 | This Code of Conduct applies both within project spaces and in public spaces 50 | when an individual is representing the project or its community. Examples of 51 | representing a project or community include using an official project e-mail 52 | address, posting via an official social media account, or acting as an appointed 53 | representative at an online or offline event. Representation of a project may be 54 | further defined and clarified by project maintainers. 55 | 56 | This Code of Conduct also applies outside the project spaces when the Project 57 | Steward has a reasonable belief that an individual's behavior may have a 58 | negative impact on the project or its community. 59 | 60 | ## Conflict Resolution 61 | 62 | We do not believe that all conflict is bad; healthy debate and disagreement 63 | often yield positive results. However, it is never okay to be disrespectful or 64 | to engage in behavior that violates the project’s code of conduct. 65 | 66 | If you see someone violating the code of conduct, you are encouraged to address 67 | the behavior directly with those involved. Many issues can be resolved quickly 68 | and easily, and this gives people more control over the outcome of their 69 | dispute. If you are unable to resolve the matter for any reason, or if the 70 | behavior is threatening or harassing, report it. We are dedicated to providing 71 | an environment where participants feel welcome and safe. 72 | 73 | Reports should be directed to *googleapis-stewards@google.com*, the 74 | Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to 75 | receive and address reported violations of the code of conduct. They will then 76 | work with a committee consisting of representatives from the Open Source 77 | Programs Office and the Google Open Source Strategy team. If for any reason you 78 | are uncomfortable reaching out to the Project Steward, please email 79 | opensource@google.com. 80 | 81 | We will investigate every complaint, but you may not receive a direct response. 82 | We will use our discretion in determining when and how to follow up on reported 83 | incidents, which may range from not taking action to permanent expulsion from 84 | the project and project-sponsored spaces. We will notify the accused of the 85 | report and provide them an opportunity to discuss it before any action is taken. 86 | The identity of the reporter will be omitted from the details of the report 87 | supplied to the accused. In potentially harmful situations, such as ongoing 88 | harassment or threats to anyone's safety, we may take action without notice. 89 | 90 | ## Attribution 91 | 92 | This Code of Conduct is adapted from the Contributor Covenant, version 1.4, 93 | available at 94 | https://www.contributor-covenant.org/version/1/4/code-of-conduct.html -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to become a contributor and submit your own code 2 | 3 | **Table of contents** 4 | 5 | * [Contributor License Agreements](#contributor-license-agreements) 6 | * [Contributing a patch](#contributing-a-patch) 7 | * [Running the tests](#running-the-tests) 8 | * [Releasing the library](#releasing-the-library) 9 | 10 | ## Contributor License Agreements 11 | 12 | We'd love to accept your sample apps and patches! Before we can take them, we 13 | have to jump a couple of legal hurdles. 14 | 15 | Please fill out either the individual or corporate Contributor License Agreement 16 | (CLA). 17 | 18 | * If you are an individual writing original source code and you're sure you 19 | own the intellectual property, then you'll need to sign an [individual CLA](https://developers.google.com/open-source/cla/individual). 20 | * If you work for a company that wants to allow you to contribute your work, 21 | then you'll need to sign a [corporate CLA](https://developers.google.com/open-source/cla/corporate). 22 | 23 | Follow either of the two links above to access the appropriate CLA and 24 | instructions for how to sign and return it. Once we receive it, we'll be able to 25 | accept your pull requests. 26 | 27 | ## Contributing A Patch 28 | 29 | 1. Submit an issue describing your proposed change to the repo in question. 30 | 1. The repo owner will respond to your issue promptly. 31 | 1. If your proposed change is accepted, and you haven't already done so, sign a 32 | Contributor License Agreement (see details above). 33 | 1. Fork the desired repo, develop and test your code changes. 34 | 1. Ensure that your code adheres to the existing style in the code to which 35 | you are contributing. 36 | 1. Ensure that your code has an appropriate set of tests which all pass. 37 | 1. Title your pull request following [Conventional Commits](https://www.conventionalcommits.org/) styling. 38 | 1. Submit a pull request. 39 | 40 | ### Before you begin 41 | 42 | 1. [Select or create a Cloud Platform project][projects]. 43 | 1. [Set up authentication with a service account][auth] so you can access the 44 | API from your local workstation. 45 | 46 | 47 | ## Running the tests 48 | 49 | 1. [Prepare your environment for Node.js setup][setup]. 50 | 51 | 1. Install dependencies: 52 | 53 | npm install 54 | 55 | 1. Run the tests: 56 | 57 | # Run unit tests. 58 | npm test 59 | 60 | # Run sample integration tests. 61 | npm run samples-test 62 | 63 | # Run all system tests. 64 | npm run system-test 65 | 66 | 1. Lint (and maybe fix) any changes: 67 | 68 | npm run fix 69 | 70 | [setup]: https://cloud.google.com/nodejs/docs/setup 71 | [projects]: https://console.cloud.google.com/project 72 | [billing]: https://support.google.com/cloud/answer/6293499#enable-billing 73 | 74 | [auth]: https://cloud.google.com/docs/authentication/getting-started -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [//]: # "This README.md file is auto-generated, all changes to this file will be lost." 2 | [//]: # "To regenerate it, use `python -m synthtool`." 3 | Google Cloud Platform logo 4 | 5 | # [GCP Metadata: Node.js Client](https://github.com/googleapis/gcp-metadata) 6 | 7 | [![release level](https://img.shields.io/badge/release%20level-stable-brightgreen.svg?style=flat)](https://cloud.google.com/terms/launch-stages) 8 | [![npm version](https://img.shields.io/npm/v/gcp-metadata.svg)](https://www.npmjs.org/package/gcp-metadata) 9 | 10 | 11 | 12 | 13 | Get the metadata from a Google Cloud Platform environment 14 | 15 | 16 | A comprehensive list of changes in each version may be found in 17 | [the CHANGELOG](https://github.com/googleapis/gcp-metadata/blob/main/CHANGELOG.md). 18 | 19 | * [GCP Metadata Node.js Client API Reference][client-docs] 20 | * [GCP Metadata Documentation][product-docs] 21 | * [github.com/googleapis/gcp-metadata](https://github.com/googleapis/gcp-metadata) 22 | 23 | Read more about the client libraries for Cloud APIs, including the older 24 | Google APIs Client Libraries, in [Client Libraries Explained][explained]. 25 | 26 | [explained]: https://cloud.google.com/apis/docs/client-libraries-explained 27 | 28 | **Table of contents:** 29 | 30 | 31 | * [Quickstart](#quickstart) 32 | 33 | * [Installing the client library](#installing-the-client-library) 34 | * [Using the client library](#using-the-client-library) 35 | * [Samples](#samples) 36 | * [Versioning](#versioning) 37 | * [Contributing](#contributing) 38 | * [License](#license) 39 | 40 | ## Quickstart 41 | 42 | ### Installing the client library 43 | 44 | ```bash 45 | npm install gcp-metadata 46 | ``` 47 | 48 | 49 | ### Using the client library 50 | 51 | ```javascript 52 | const gcpMetadata = require('gcp-metadata'); 53 | 54 | async function quickstart() { 55 | // check to see if this code can access a metadata server 56 | const isAvailable = await gcpMetadata.isAvailable(); 57 | console.log(`Is available: ${isAvailable}`); 58 | 59 | // Instance and Project level metadata will only be available if 60 | // running inside of a Google Cloud compute environment such as 61 | // Cloud Functions, App Engine, Kubernetes Engine, or Compute Engine. 62 | // To learn more about the differences between instance and project 63 | // level metadata, see: 64 | // https://cloud.google.com/compute/docs/storing-retrieving-metadata#project-instance-metadata 65 | if (isAvailable) { 66 | // grab all top level metadata from the service 67 | const instanceMetadata = await gcpMetadata.instance(); 68 | console.log('Instance metadata:'); 69 | console.log(instanceMetadata); 70 | 71 | // get all project level metadata 72 | const projectMetadata = await gcpMetadata.project(); 73 | console.log('Project metadata:'); 74 | console.log(projectMetadata); 75 | } 76 | } 77 | 78 | quickstart(); 79 | 80 | ``` 81 | 82 | #### Check to see if the metadata server is available 83 | ```js 84 | const isAvailable = await gcpMetadata.isAvailable(); 85 | ``` 86 | 87 | #### Access all metadata 88 | 89 | ```js 90 | const data = await gcpMetadata.instance(); 91 | console.log(data); // ... All metadata properties 92 | ``` 93 | 94 | #### Access specific properties 95 | ```js 96 | const data = await gcpMetadata.instance('hostname'); 97 | console.log(data); // ...Instance hostname 98 | const projectId = await gcpMetadata.project('project-id'); 99 | console.log(projectId); // ...Project ID of the running instance 100 | ``` 101 | 102 | #### Access nested properties with the relative path 103 | ```js 104 | const data = await gcpMetadata.instance('service-accounts/default/email'); 105 | console.log(data); // ...Email address of the Compute identity service account 106 | ``` 107 | 108 | #### Access specific properties with query parameters 109 | ```js 110 | const data = await gcpMetadata.instance({ 111 | property: 'tags', 112 | params: { alt: 'text' } 113 | }); 114 | console.log(data) // ...Tags as newline-delimited list 115 | ``` 116 | 117 | #### Access with custom headers 118 | ```js 119 | await gcpMetadata.instance({ 120 | headers: { 'no-trace': '1' } 121 | }); // ...Request is untraced 122 | ``` 123 | 124 | ### Take care with large number valued properties 125 | 126 | In some cases number valued properties returned by the Metadata Service may be 127 | too large to be representable as JavaScript numbers. In such cases we return 128 | those values as `BigNumber` objects (from the [bignumber.js](https://github.com/MikeMcl/bignumber.js) library). Numbers 129 | that fit within the JavaScript number range will be returned as normal number 130 | values. 131 | 132 | ```js 133 | const id = await gcpMetadata.instance('id'); 134 | console.log(id) // ... BigNumber { s: 1, e: 18, c: [ 45200, 31799277581759 ] } 135 | console.log(id.toString()) // ... 4520031799277581759 136 | ``` 137 | 138 | ### Environment variables 139 | 140 | * `GCE_METADATA_HOST`: provide an alternate host or IP to perform lookup against (useful, for example, you're connecting through a custom proxy server). 141 | 142 | For example: 143 | ``` 144 | export GCE_METADATA_HOST='169.254.169.254' 145 | ``` 146 | 147 | * `DETECT_GCP_RETRIES`: number representing number of retries that should be attempted on metadata lookup. 148 | 149 | * `DEBUG_AUTH`: emit debugging logs 150 | 151 | * `METADATA_SERVER_DETECTION`: configure desired metadata server availability check behavior. 152 | 153 | * `assume-present`: don't try to ping the metadata server, but assume it's present 154 | * `none`: don't try to ping the metadata server, but don't try to use it either 155 | * `bios-only`: treat the result of a BIOS probe as canonical (don't fall back to pinging) 156 | * `ping-only`: skip the BIOS probe, and go straight to pinging 157 | 158 | 159 | ## Samples 160 | 161 | Samples are in the [`samples/`](https://github.com/googleapis/gcp-metadata/tree/main/samples) directory. Each sample's `README.md` has instructions for running its sample. 162 | 163 | | Sample | Source Code | Try it | 164 | | --------------------------- | --------------------------------- | ------ | 165 | | Quickstart | [source code](https://github.com/googleapis/gcp-metadata/blob/main/samples/quickstart.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/gcp-metadata&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) | 166 | 167 | 168 | 169 | The [GCP Metadata Node.js Client API Reference][client-docs] documentation 170 | also contains samples. 171 | 172 | ## Supported Node.js Versions 173 | 174 | Our client libraries follow the [Node.js release schedule](https://github.com/nodejs/release#release-schedule). 175 | Libraries are compatible with all current _active_ and _maintenance_ versions of 176 | Node.js. 177 | If you are using an end-of-life version of Node.js, we recommend that you update 178 | as soon as possible to an actively supported LTS version. 179 | 180 | Google's client libraries support legacy versions of Node.js runtimes on a 181 | best-efforts basis with the following warnings: 182 | 183 | * Legacy versions are not tested in continuous integration. 184 | * Some security patches and features cannot be backported. 185 | * Dependencies cannot be kept up-to-date. 186 | 187 | Client libraries targeting some end-of-life versions of Node.js are available, and 188 | can be installed through npm [dist-tags](https://docs.npmjs.com/cli/dist-tag). 189 | The dist-tags follow the naming convention `legacy-(version)`. 190 | For example, `npm install gcp-metadata@legacy-8` installs client libraries 191 | for versions compatible with Node.js 8. 192 | 193 | ## Versioning 194 | 195 | This library follows [Semantic Versioning](http://semver.org/). 196 | 197 | 198 | 199 | This library is considered to be **stable**. The code surface will not change in backwards-incompatible ways 200 | unless absolutely necessary (e.g. because of critical security issues) or with 201 | an extensive deprecation period. Issues and requests against **stable** libraries 202 | are addressed with the highest priority. 203 | 204 | 205 | 206 | 207 | 208 | 209 | More Information: [Google Cloud Platform Launch Stages][launch_stages] 210 | 211 | [launch_stages]: https://cloud.google.com/terms/launch-stages 212 | 213 | ## Contributing 214 | 215 | Contributions welcome! See the [Contributing Guide](https://github.com/googleapis/gcp-metadata/blob/main/CONTRIBUTING.md). 216 | 217 | Please note that this `README.md`, the `samples/README.md`, 218 | and a variety of configuration files in this repository (including `.nycrc` and `tsconfig.json`) 219 | are generated from a central template. To edit one of these files, make an edit 220 | to its templates in 221 | [directory](https://github.com/googleapis/synthtool). 222 | 223 | ## License 224 | 225 | Apache Version 2.0 226 | 227 | See [LICENSE](https://github.com/googleapis/gcp-metadata/blob/main/LICENSE) 228 | 229 | [client-docs]: https://cloud.google.com/nodejs/docs/reference/gcp-metadata/latest 230 | [product-docs]: https://cloud.google.com/compute/docs/storing-retrieving-metadata 231 | [shell_img]: https://gstatic.com/cloudssh/images/open-btn.png 232 | [projects]: https://console.cloud.google.com/project 233 | [billing]: https://support.google.com/cloud/answer/6293499#enable-billing 234 | 235 | [auth]: https://cloud.google.com/docs/authentication/external/set-up-adc-local 236 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). 4 | 5 | The Google Security Team will respond within 5 working days of your report on g.co/vulnz. 6 | 7 | We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. 8 | -------------------------------------------------------------------------------- /linkinator.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "recurse": true, 3 | "skip": [ 4 | "https://codecov.io/gh/googleapis/", 5 | "www.googleapis.com", 6 | "img.shields.io" 7 | ], 8 | "silent": true, 9 | "concurrency": 10 10 | } 11 | -------------------------------------------------------------------------------- /owlbot.py: -------------------------------------------------------------------------------- 1 | # See the License for the specific language governing permissions and 2 | # limitations under the License. 3 | import synthtool as s 4 | import synthtool.gcp as gcp 5 | import synthtool.languages.node as node 6 | 7 | common_templates = gcp.CommonTemplates() 8 | templates = common_templates.node_library() 9 | s.copy(sources=templates, excludes=[".github/ISSUE_TEMPLATE", ".github/scripts", ".github/workflows/issues-no-repro.yaml", ".kokoro/samples-test.sh", ".github/release-please.yml", ".github/sync-repo-settings.yaml"]) 10 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gcp-metadata", 3 | "version": "6.1.0", 4 | "description": "Get the metadata from a Google Cloud Platform environment", 5 | "repository": "googleapis/gcp-metadata", 6 | "main": "./build/src/index.js", 7 | "types": "./build/src/index.d.ts", 8 | "module": "commonjs", 9 | "files": [ 10 | "build/src" 11 | ], 12 | "scripts": { 13 | "compile": "cross-env NODE_OPTIONS=--max-old-space-size=8192 tsc -p .", 14 | "fix": "gts fix", 15 | "pretest": "npm run compile", 16 | "prepare": "npm run compile", 17 | "samples-test": "npm link && cd samples/ && npm link ../ && npm test && cd ../", 18 | "presystem-test": "npm run compile", 19 | "system-test": "mocha build/system-test --timeout 600000", 20 | "test": "c8 mocha --timeout=5000 build/test", 21 | "docs": "jsdoc -c .jsdoc.js", 22 | "lint": "gts check", 23 | "docs-test": "linkinator docs", 24 | "predocs-test": "npm run docs", 25 | "prelint": "cd samples; npm link ../; npm install", 26 | "clean": "gts clean", 27 | "precompile": "gts clean" 28 | }, 29 | "keywords": [ 30 | "google cloud platform", 31 | "google cloud", 32 | "google", 33 | "app engine", 34 | "compute engine", 35 | "metadata server", 36 | "metadata" 37 | ], 38 | "author": "Google LLC", 39 | "license": "Apache-2.0", 40 | "dependencies": { 41 | "gaxios": "^7.0.0-rc.5", 42 | "google-logging-utils": "^1.0.0", 43 | "json-bigint": "^1.0.0" 44 | }, 45 | "devDependencies": { 46 | "@google-cloud/functions": "^3.6.0", 47 | "@types/json-bigint": "^1.0.4", 48 | "@types/mocha": "^10.0.9", 49 | "@types/ncp": "^2.0.8", 50 | "@types/node": "^22.9.0", 51 | "@types/sinon": "^17.0.3", 52 | "@types/tmp": "^0.2.6", 53 | "c8": "^10.1.2", 54 | "cross-env": "^7.0.3", 55 | "gcbuild": "^1.3.39", 56 | "gcx": "^2.0.27", 57 | "gts": "^6.0.2", 58 | "jsdoc": "^4.0.4", 59 | "jsdoc-fresh": "^3.0.0", 60 | "jsdoc-region-tag": "^3.0.0", 61 | "linkinator": "^6.1.2", 62 | "mocha": "^11.1.0", 63 | "ncp": "^2.0.0", 64 | "nock": "^14.0.1", 65 | "sinon": "^20.0.0", 66 | "tmp": "^0.2.3", 67 | "typescript": "^5.6.3" 68 | }, 69 | "engines": { 70 | "node": ">=18" 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base", 4 | "docker:disable", 5 | ":disableDependencyDashboard" 6 | ], 7 | "constraintsFiltering": "strict", 8 | "pinVersions": false, 9 | "rebaseStalePrs": true, 10 | "schedule": [ 11 | "after 9am and before 3pm" 12 | ], 13 | "gitAuthor": null, 14 | "packageRules": [ 15 | { 16 | "extends": "packages:linters", 17 | "groupName": "linters" 18 | } 19 | ], 20 | "ignoreDeps": ["typescript"] 21 | } 22 | -------------------------------------------------------------------------------- /samples/.eslintrc.yml: -------------------------------------------------------------------------------- 1 | --- 2 | rules: 3 | no-console: off 4 | -------------------------------------------------------------------------------- /samples/README.md: -------------------------------------------------------------------------------- 1 | [//]: # "This README.md file is auto-generated, all changes to this file will be lost." 2 | [//]: # "To regenerate it, use `python -m synthtool`." 3 | Google Cloud Platform logo 4 | 5 | # [GCP Metadata: Node.js Samples](https://github.com/googleapis/gcp-metadata) 6 | 7 | [![Open in Cloud Shell][shell_img]][shell_link] 8 | 9 | 10 | 11 | ## Table of Contents 12 | 13 | * [Before you begin](#before-you-begin) 14 | * [Samples](#samples) 15 | * [Quickstart](#quickstart) 16 | 17 | ## Before you begin 18 | 19 | Before running the samples, make sure you've followed the steps outlined in 20 | [Using the client library](https://github.com/googleapis/gcp-metadata#using-the-client-library). 21 | 22 | `cd samples` 23 | 24 | `npm install` 25 | 26 | `cd ..` 27 | 28 | ## Samples 29 | 30 | 31 | 32 | ### Quickstart 33 | 34 | View the [source code](https://github.com/googleapis/gcp-metadata/blob/main/samples/quickstart.js). 35 | 36 | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/gcp-metadata&page=editor&open_in_editor=samples/quickstart.js,samples/README.md) 37 | 38 | __Usage:__ 39 | 40 | 41 | `node samples/quickstart.js` 42 | 43 | 44 | 45 | 46 | 47 | 48 | [shell_img]: https://gstatic.com/cloudssh/images/open-btn.png 49 | [shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/gcp-metadata&page=editor&open_in_editor=samples/README.md 50 | [product-docs]: https://cloud.google.com/compute/docs/storing-retrieving-metadata 51 | -------------------------------------------------------------------------------- /samples/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Samples for the gcp-metadata npm module.", 3 | "license": "Apache-2.0", 4 | "author": "Google LLC", 5 | "engines": { 6 | "node": ">=18" 7 | }, 8 | "files": [ 9 | "*.js" 10 | ], 11 | "repository": "googleapis/gcp-metadata", 12 | "private": true, 13 | "scripts": { 14 | "test": "mocha" 15 | }, 16 | "dependencies": { 17 | "gcp-metadata": "^6.1.0" 18 | }, 19 | "devDependencies": { 20 | "chai": "^4.2.0", 21 | "mocha": "^8.0.0" 22 | } 23 | } -------------------------------------------------------------------------------- /samples/quickstart.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | 'use strict'; 18 | 19 | function main() { 20 | // [START gcpmetadata_quickstart] 21 | const gcpMetadata = require('gcp-metadata'); 22 | 23 | async function quickstart() { 24 | // check to see if this code can access a metadata server 25 | const isAvailable = await gcpMetadata.isAvailable(); 26 | console.log(`Is available: ${isAvailable}`); 27 | 28 | // Instance and Project level metadata will only be available if 29 | // running inside of a Google Cloud compute environment such as 30 | // Cloud Functions, App Engine, Kubernetes Engine, or Compute Engine. 31 | // To learn more about the differences between instance and project 32 | // level metadata, see: 33 | // https://cloud.google.com/compute/docs/storing-retrieving-metadata#project-instance-metadata 34 | if (isAvailable) { 35 | // grab all top level metadata from the service 36 | const instanceMetadata = await gcpMetadata.instance(); 37 | console.log('Instance metadata:'); 38 | console.log(instanceMetadata); 39 | 40 | // get all project level metadata 41 | const projectMetadata = await gcpMetadata.project(); 42 | console.log('Project metadata:'); 43 | console.log(projectMetadata); 44 | } 45 | } 46 | 47 | quickstart(); 48 | // [END gcpmetadata_quickstart] 49 | } 50 | 51 | main(); 52 | -------------------------------------------------------------------------------- /samples/test/test.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | const {assert} = require('chai'); 18 | const {describe, it} = require('mocha'); 19 | const cp = require('child_process'); 20 | 21 | const execSync = cmd => cp.execSync(cmd, {encoding: 'utf-8'}); 22 | 23 | describe('gcp-metadata samples', () => { 24 | it('should run the quickstart', async () => { 25 | const stdout = execSync('node quickstart'); 26 | assert.match(stdout, /Is available/); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /src/gcp-residency.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2022 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import {readFileSync, statSync} from 'fs'; 18 | import {networkInterfaces, platform} from 'os'; 19 | 20 | /** 21 | * Known paths unique to Google Compute Engine Linux instances 22 | */ 23 | export const GCE_LINUX_BIOS_PATHS = { 24 | BIOS_DATE: '/sys/class/dmi/id/bios_date', 25 | BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', 26 | }; 27 | 28 | const GCE_MAC_ADDRESS_REGEX = /^42:01/; 29 | 30 | /** 31 | * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). 32 | * 33 | * Uses the: 34 | * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. 35 | * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. 36 | * 37 | * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. 38 | */ 39 | export function isGoogleCloudServerless(): boolean { 40 | /** 41 | * `CLOUD_RUN_JOB` is used for Cloud Run Jobs 42 | * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. 43 | * 44 | * `FUNCTION_NAME` is used in older Cloud Functions environments: 45 | * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. 46 | * 47 | * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: 48 | * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. 49 | * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. 50 | */ 51 | const isGFEnvironment = 52 | process.env.CLOUD_RUN_JOB || 53 | process.env.FUNCTION_NAME || 54 | process.env.K_SERVICE; 55 | 56 | return !!isGFEnvironment; 57 | } 58 | 59 | /** 60 | * Determines if the process is running on a Linux Google Compute Engine instance. 61 | * 62 | * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. 63 | */ 64 | export function isGoogleComputeEngineLinux(): boolean { 65 | if (platform() !== 'linux') return false; 66 | 67 | try { 68 | // ensure this file exist 69 | statSync(GCE_LINUX_BIOS_PATHS.BIOS_DATE); 70 | 71 | // ensure this file exist and matches 72 | const biosVendor = readFileSync(GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); 73 | 74 | return /Google/.test(biosVendor); 75 | } catch { 76 | return false; 77 | } 78 | } 79 | 80 | /** 81 | * Determines if the process is running on a Google Compute Engine instance with a known 82 | * MAC address. 83 | * 84 | * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. 85 | */ 86 | export function isGoogleComputeEngineMACAddress(): boolean { 87 | const interfaces = networkInterfaces(); 88 | 89 | for (const item of Object.values(interfaces)) { 90 | if (!item) continue; 91 | 92 | for (const {mac} of item) { 93 | if (GCE_MAC_ADDRESS_REGEX.test(mac)) { 94 | return true; 95 | } 96 | } 97 | } 98 | 99 | return false; 100 | } 101 | 102 | /** 103 | * Determines if the process is running on a Google Compute Engine instance. 104 | * 105 | * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. 106 | */ 107 | export function isGoogleComputeEngine(): boolean { 108 | return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); 109 | } 110 | 111 | /** 112 | * Determines if the process is running on Google Cloud Platform. 113 | * 114 | * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. 115 | */ 116 | export function detectGCPResidency(): boolean { 117 | return isGoogleCloudServerless() || isGoogleComputeEngine(); 118 | } 119 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import {GaxiosError, GaxiosOptions, GaxiosResponse, request} from 'gaxios'; 18 | import jsonBigint = require('json-bigint'); 19 | import {detectGCPResidency} from './gcp-residency'; 20 | import * as logger from 'google-logging-utils'; 21 | 22 | export const BASE_PATH = '/computeMetadata/v1'; 23 | export const HOST_ADDRESS = 'http://169.254.169.254'; 24 | export const SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; 25 | 26 | export const HEADER_NAME = 'Metadata-Flavor'; 27 | export const HEADER_VALUE = 'Google'; 28 | export const HEADERS = Object.freeze({[HEADER_NAME]: HEADER_VALUE}); 29 | 30 | const log = logger.log('gcp-metadata'); 31 | 32 | /** 33 | * Metadata server detection override options. 34 | * 35 | * Available via `process.env.METADATA_SERVER_DETECTION`. 36 | */ 37 | export const METADATA_SERVER_DETECTION = Object.freeze({ 38 | 'assume-present': 39 | "don't try to ping the metadata server, but assume it's present", 40 | none: "don't try to ping the metadata server, but don't try to use it either", 41 | 'bios-only': 42 | "treat the result of a BIOS probe as canonical (don't fall back to pinging)", 43 | 'ping-only': 'skip the BIOS probe, and go straight to pinging', 44 | }); 45 | 46 | // TypeScript does not have `HeadersInit` outside of DOM types yet 47 | type HeadersInit = ConstructorParameters[0]; 48 | 49 | export interface Options { 50 | params?: {[index: string]: string}; 51 | property?: string; 52 | headers?: HeadersInit; 53 | } 54 | 55 | export interface MetadataAccessor { 56 | /** 57 | * 58 | * @example 59 | * 60 | * // equivalent to `project('project-id')`; 61 | * const metadataKey = 'project/project-id'; 62 | */ 63 | metadataKey: string; 64 | params?: Options['params']; 65 | headers?: Options['headers']; 66 | noResponseRetries?: number; 67 | fastFail?: boolean; 68 | } 69 | 70 | export type BulkResults = { 71 | [key in T[number]['metadataKey']]: ReturnType; 72 | }; 73 | 74 | /** 75 | * Returns the base URL while taking into account the GCE_METADATA_HOST 76 | * environment variable if it exists. 77 | * 78 | * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. 79 | */ 80 | function getBaseUrl(baseUrl?: string) { 81 | if (!baseUrl) { 82 | baseUrl = 83 | process.env.GCE_METADATA_IP || 84 | process.env.GCE_METADATA_HOST || 85 | HOST_ADDRESS; 86 | } 87 | // If no scheme is provided default to HTTP: 88 | if (!/^https?:\/\//.test(baseUrl)) { 89 | baseUrl = `http://${baseUrl}`; 90 | } 91 | return new URL(BASE_PATH, baseUrl).href; 92 | } 93 | 94 | // Accepts an options object passed from the user to the API. In previous 95 | // versions of the API, it referred to a `Request` or an `Axios` request 96 | // options object. Now it refers to an object with very limited property 97 | // names. This is here to help ensure users don't pass invalid options when 98 | // they upgrade from 0.4 to 0.5 to 0.8. 99 | function validate(options: Options) { 100 | Object.keys(options).forEach(key => { 101 | switch (key) { 102 | case 'params': 103 | case 'property': 104 | case 'headers': 105 | break; 106 | case 'qs': 107 | throw new Error( 108 | "'qs' is not a valid configuration option. Please use 'params' instead.", 109 | ); 110 | default: 111 | throw new Error(`'${key}' is not a valid configuration option.`); 112 | } 113 | }); 114 | } 115 | 116 | async function metadataAccessor( 117 | type: string, 118 | options?: string | Options, 119 | noResponseRetries?: number, 120 | fastFail?: boolean, 121 | ): Promise; 122 | async function metadataAccessor(metadata: MetadataAccessor): Promise; 123 | async function metadataAccessor( 124 | type: MetadataAccessor | string, 125 | options: string | Options = {}, 126 | noResponseRetries = 3, 127 | fastFail = false, 128 | ): Promise { 129 | const headers = new Headers(HEADERS); 130 | let metadataKey = ''; 131 | let params: {} = {}; 132 | 133 | if (typeof type === 'object') { 134 | const metadataAccessor: MetadataAccessor = type; 135 | 136 | new Headers(metadataAccessor.headers).forEach((value, key) => 137 | headers.set(key, value), 138 | ); 139 | 140 | metadataKey = metadataAccessor.metadataKey; 141 | params = metadataAccessor.params || params; 142 | noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; 143 | fastFail = metadataAccessor.fastFail || fastFail; 144 | } else { 145 | metadataKey = type; 146 | } 147 | 148 | if (typeof options === 'string') { 149 | metadataKey += `/${options}`; 150 | } else { 151 | validate(options); 152 | 153 | if (options.property) { 154 | metadataKey += `/${options.property}`; 155 | } 156 | 157 | new Headers(options.headers).forEach((value, key) => 158 | headers.set(key, value), 159 | ); 160 | params = options.params || params; 161 | } 162 | 163 | const requestMethod = fastFail ? fastFailMetadataRequest : request; 164 | const req: GaxiosOptions = { 165 | url: `${getBaseUrl()}/${metadataKey}`, 166 | headers, 167 | retryConfig: {noResponseRetries}, 168 | params, 169 | responseType: 'text', 170 | timeout: requestTimeout(), 171 | } as GaxiosOptions; 172 | log.info('instance request %j', req); 173 | 174 | const res = await requestMethod(req); 175 | log.info('instance metadata is %s', res.data); 176 | 177 | const metadataFlavor = res.headers.get(HEADER_NAME); 178 | if (metadataFlavor !== HEADER_VALUE) { 179 | throw new RangeError( 180 | `Invalid response from metadata service: incorrect ${HEADER_NAME} header. Expected '${HEADER_VALUE}', got ${metadataFlavor ? `'${metadataFlavor}'` : 'no header'}`, 181 | ); 182 | } 183 | 184 | if (typeof res.data === 'string') { 185 | try { 186 | return jsonBigint.parse(res.data); 187 | } catch { 188 | /* ignore */ 189 | } 190 | } 191 | 192 | return res.data; 193 | } 194 | 195 | async function fastFailMetadataRequest( 196 | options: GaxiosOptions, 197 | ): Promise { 198 | const secondaryOptions = { 199 | ...options, 200 | url: options.url 201 | ?.toString() 202 | .replace(getBaseUrl(), getBaseUrl(SECONDARY_HOST_ADDRESS)), 203 | }; 204 | // We race a connection between DNS/IP to metadata server. There are a couple 205 | // reasons for this: 206 | // 207 | // 1. the DNS is slow in some GCP environments; by checking both, we might 208 | // detect the runtime environment significantly faster. 209 | // 2. we can't just check the IP, which is tarpitted and slow to respond 210 | // on a user's local machine. 211 | // 212 | // Returns first resolved promise or if all promises get rejected we return an AggregateError. 213 | // 214 | // Note, however, if a failure happens prior to a success, a rejection should 215 | // occur, this is for folks running locally. 216 | // 217 | const r1: Promise = request(options); 218 | const r2: Promise = request(secondaryOptions); 219 | return Promise.any([r1, r2]); 220 | } 221 | 222 | /** 223 | * Obtain metadata for the current GCE instance. 224 | * 225 | * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} 226 | * 227 | * @example 228 | * ``` 229 | * const serviceAccount: {} = await instance('service-accounts/'); 230 | * const serviceAccountEmail: string = await instance('service-accounts/default/email'); 231 | * ``` 232 | */ 233 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 234 | export function instance(options?: string | Options) { 235 | return metadataAccessor('instance', options); 236 | } 237 | 238 | /** 239 | * Obtain metadata for the current GCP project. 240 | * 241 | * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} 242 | * 243 | * @example 244 | * ``` 245 | * const projectId: string = await project('project-id'); 246 | * const numericProjectId: number = await project('numeric-project-id'); 247 | * ``` 248 | */ 249 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 250 | export function project(options?: string | Options) { 251 | return metadataAccessor('project', options); 252 | } 253 | 254 | /** 255 | * Obtain metadata for the current universe. 256 | * 257 | * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} 258 | * 259 | * @example 260 | * ``` 261 | * const universeDomain: string = await universe('universe-domain'); 262 | * ``` 263 | */ 264 | export function universe(options?: string | Options) { 265 | return metadataAccessor('universe', options); 266 | } 267 | 268 | /** 269 | * Retrieve metadata items in parallel. 270 | * 271 | * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} 272 | * 273 | * @example 274 | * ``` 275 | * const data = await bulk([ 276 | * { 277 | * metadataKey: 'instance', 278 | * }, 279 | * { 280 | * metadataKey: 'project/project-id', 281 | * }, 282 | * ] as const); 283 | * 284 | * // data.instance; 285 | * // data['project/project-id']; 286 | * ``` 287 | * 288 | * @param properties The metadata properties to retrieve 289 | * @returns The metadata in `metadatakey:value` format 290 | */ 291 | export async function bulk< 292 | T extends readonly Readonly[], 293 | R extends BulkResults = BulkResults, 294 | >(properties: T): Promise { 295 | const r = {} as BulkResults; 296 | 297 | await Promise.all( 298 | properties.map(item => { 299 | return (async () => { 300 | const res = await metadataAccessor(item); 301 | const key = item.metadataKey as keyof typeof r; 302 | 303 | r[key] = res; 304 | })(); 305 | }), 306 | ); 307 | 308 | return r as R; 309 | } 310 | 311 | /* 312 | * How many times should we retry detecting GCP environment. 313 | */ 314 | function detectGCPAvailableRetries(): number { 315 | return process.env.DETECT_GCP_RETRIES 316 | ? Number(process.env.DETECT_GCP_RETRIES) 317 | : 0; 318 | } 319 | 320 | let cachedIsAvailableResponse: Promise | undefined; 321 | 322 | /** 323 | * Determine if the metadata server is currently available. 324 | */ 325 | export async function isAvailable() { 326 | if (process.env.METADATA_SERVER_DETECTION) { 327 | const value = 328 | process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); 329 | 330 | if (!(value in METADATA_SERVER_DETECTION)) { 331 | throw new RangeError( 332 | `Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys( 333 | METADATA_SERVER_DETECTION, 334 | ).join('`, `')}\`, or unset`, 335 | ); 336 | } 337 | 338 | switch (value as keyof typeof METADATA_SERVER_DETECTION) { 339 | case 'assume-present': 340 | return true; 341 | case 'none': 342 | return false; 343 | case 'bios-only': 344 | return getGCPResidency(); 345 | case 'ping-only': 346 | // continue, we want to ping the server 347 | } 348 | } 349 | 350 | try { 351 | // If a user is instantiating several GCP libraries at the same time, 352 | // this may result in multiple calls to isAvailable(), to detect the 353 | // runtime environment. We use the same promise for each of these calls 354 | // to reduce the network load. 355 | if (cachedIsAvailableResponse === undefined) { 356 | cachedIsAvailableResponse = metadataAccessor( 357 | 'instance', 358 | undefined, 359 | detectGCPAvailableRetries(), 360 | // If the default HOST_ADDRESS has been overridden, we should not 361 | // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in 362 | // a non-GCP environment): 363 | !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST), 364 | ); 365 | } 366 | await cachedIsAvailableResponse; 367 | return true; 368 | } catch (e) { 369 | const err = e as GaxiosError & {type: string}; 370 | if (process.env.DEBUG_AUTH) { 371 | console.info(err); 372 | } 373 | 374 | if (err.type === 'request-timeout') { 375 | // If running in a GCP environment, metadata endpoint should return 376 | // within ms. 377 | return false; 378 | } 379 | if (err.response && err.response.status === 404) { 380 | return false; 381 | } else { 382 | if ( 383 | !(err.response && err.response.status === 404) && 384 | // A warning is emitted if we see an unexpected err.code, or err.code 385 | // is not populated: 386 | (!err.code || 387 | ![ 388 | 'EHOSTDOWN', 389 | 'EHOSTUNREACH', 390 | 'ENETUNREACH', 391 | 'ENOENT', 392 | 'ENOTFOUND', 393 | 'ECONNREFUSED', 394 | ].includes(err.code.toString())) 395 | ) { 396 | let code = 'UNKNOWN'; 397 | if (err.code) code = err.code.toString(); 398 | process.emitWarning( 399 | `received unexpected error = ${err.message} code = ${code}`, 400 | 'MetadataLookupWarning', 401 | ); 402 | } 403 | 404 | // Failure to resolve the metadata service means that it is not available. 405 | return false; 406 | } 407 | } 408 | } 409 | 410 | /** 411 | * reset the memoized isAvailable() lookup. 412 | */ 413 | export function resetIsAvailableCache() { 414 | cachedIsAvailableResponse = undefined; 415 | } 416 | 417 | /** 418 | * A cache for the detected GCP Residency. 419 | */ 420 | export let gcpResidencyCache: boolean | null = null; 421 | 422 | /** 423 | * Detects GCP Residency. 424 | * Caches results to reduce costs for subsequent calls. 425 | * 426 | * @see setGCPResidency for setting 427 | */ 428 | export function getGCPResidency(): boolean { 429 | if (gcpResidencyCache === null) { 430 | setGCPResidency(); 431 | } 432 | 433 | return gcpResidencyCache!; 434 | } 435 | 436 | /** 437 | * Sets the detected GCP Residency. 438 | * Useful for forcing metadata server detection behavior. 439 | * 440 | * Set `null` to autodetect the environment (default behavior). 441 | * @see getGCPResidency for getting 442 | */ 443 | export function setGCPResidency(value: boolean | null = null) { 444 | gcpResidencyCache = value !== null ? value : detectGCPResidency(); 445 | } 446 | 447 | /** 448 | * Obtain the timeout for requests to the metadata server. 449 | * 450 | * In certain environments and conditions requests can take longer than 451 | * the default timeout to complete. This function will determine the 452 | * appropriate timeout based on the environment. 453 | * 454 | * @returns {number} a request timeout duration in milliseconds. 455 | */ 456 | export function requestTimeout(): number { 457 | return getGCPResidency() ? 0 : 3000; 458 | } 459 | 460 | export * from './gcp-residency'; 461 | -------------------------------------------------------------------------------- /system-test/any.d.ts: -------------------------------------------------------------------------------- 1 | declare module '*'; 2 | -------------------------------------------------------------------------------- /system-test/fixtures/.eslintrc.yml: -------------------------------------------------------------------------------- 1 | --- 2 | rules: 3 | no-console: off 4 | -------------------------------------------------------------------------------- /system-test/fixtures/cloudbuild/cloudbuild.yaml: -------------------------------------------------------------------------------- 1 | steps: 2 | - name: 'node:22-alpine' 3 | args: ['npm', 'install'] 4 | - name: 'node:22-alpine' 5 | args: ['npm', 'start'] 6 | -------------------------------------------------------------------------------- /system-test/fixtures/cloudbuild/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | const gcpMetadata = require('gcp-metadata'); 18 | 19 | async function main() { 20 | const isAvailable = await gcpMetadata.isAvailable(); 21 | console.log(`isAvailable: ${isAvailable}`); 22 | await gcpMetadata.instance('service-accounts/default/token'); 23 | const svc = await gcpMetadata.instance({ 24 | property: 'service-accounts/', 25 | params: {recursive: 'true'}, 26 | }); 27 | console.log('serviceAccounts:'); 28 | console.log(JSON.stringify(svc).split('\n').join()); 29 | } 30 | 31 | main().catch(e => { 32 | console.error(e); 33 | throw e; 34 | }); 35 | -------------------------------------------------------------------------------- /system-test/fixtures/cloudbuild/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gcb-test-fixtures", 3 | "private": true, 4 | "main": "index.js", 5 | "license": "Apache-2.0", 6 | "scripts": { 7 | "start": "node index.js" 8 | }, 9 | "engines": { 10 | "node": ">=18" 11 | }, 12 | "dependencies": { 13 | "gcp-metadata": "file:./gcp-metadata.tgz" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /system-test/fixtures/hook/.gcloudignore: -------------------------------------------------------------------------------- 1 | # This file specifies files that are *not* uploaded to Google Cloud Platform 2 | # using gcloud. It follows the same syntax as .gitignore, with the addition of 3 | # "#!include" directives (which insert the entries of the given .gitignore-style 4 | # file at that point). 5 | # 6 | # For more information, run: 7 | # $ gcloud topic gcloudignore 8 | # 9 | .gcloudignore 10 | # If you would like to upload your .git directory, .gitignore file or files 11 | # from your .gitignore file, remove the corresponding line 12 | # below: 13 | .git 14 | .gitignore 15 | 16 | node_modules 17 | #!include:.gitignore 18 | 19 | test/ 20 | -------------------------------------------------------------------------------- /system-test/fixtures/hook/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | const gcpMetadata = require('gcp-metadata'); 18 | 19 | exports.getMetadata = async (req, res) => { 20 | const isAvailable = await gcpMetadata.isAvailable(); 21 | const instance = await gcpMetadata.instance(); 22 | const svc = await gcpMetadata.instance({ 23 | property: 'service-accounts/', 24 | params: {recursive: 'true'}, 25 | }); 26 | res.status(200).send({isAvailable, instance, svc}); 27 | }; 28 | -------------------------------------------------------------------------------- /system-test/fixtures/hook/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gcf-kitchen-test", 3 | "private": true, 4 | "main": "index.js", 5 | "license": "Apache-2.0", 6 | "engines": { 7 | "node": ">=18" 8 | }, 9 | "dependencies": { 10 | "gcp-metadata": "file:./gcp-metadata.tgz" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /system-test/fixtures/kitchen/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gcp-metadata-fixture", 3 | "version": "1.0.0", 4 | "description": "An app we're using to test the library. ", 5 | "scripts": { 6 | "check": "gts check", 7 | "clean": "gts clean", 8 | "compile": "tsc -p .", 9 | "fix": "gts fix", 10 | "prepare": "npm run compile", 11 | "pretest": "npm run compile", 12 | "posttest": "npm run check", 13 | "start": "node build/src/index.js" 14 | }, 15 | "license": "Apache-2.0", 16 | "dependencies": { 17 | "gcp-metadata": "file:./gcp-metadata.tgz" 18 | }, 19 | "devDependencies": { 20 | "@types/node": "^22.0.0", 21 | "gts": "^6.0.0", 22 | "typescript": "^5.0.2" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /system-test/fixtures/kitchen/src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import * as gcp from 'gcp-metadata'; 18 | // uncomment the line below during development 19 | // import * as gcp from '../../../../build/src/index'; 20 | 21 | const header = gcp.HEADER_NAME; 22 | const headers = gcp.HEADERS; 23 | 24 | async function main() { 25 | return Promise.allSettled([ 26 | gcp.project('project-id'), 27 | gcp.universe('universe-domain'), 28 | ]); 29 | } 30 | 31 | main().catch(console.error); 32 | -------------------------------------------------------------------------------- /system-test/fixtures/kitchen/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts/tsconfig-google.json", 3 | "compilerOptions": { 4 | "rootDir": ".", 5 | "outDir": "build" 6 | }, 7 | "include": ["src/*.ts", "src/**/*.ts"] 8 | } 9 | -------------------------------------------------------------------------------- /system-test/kitchen.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import {ncp} from 'ncp'; 18 | import * as tmp from 'tmp'; 19 | import {promisify} from 'util'; 20 | import {execSync} from 'child_process'; 21 | 22 | import {describe, it, after} from 'mocha'; 23 | 24 | describe('installation', () => { 25 | const ncpp = promisify(ncp); 26 | const keep = !!process.env.GCPM_KEEP_TEMPDIRS; 27 | const stagingDir = tmp.dirSync({keep, unsafeCleanup: true}); 28 | const stagingPath = stagingDir.name; 29 | const pkg = require('../../package.json'); // eslint-disable-line 30 | 31 | /** 32 | * Create a staging directory with temp fixtures used to test on a fresh 33 | * application. 34 | */ 35 | it('should be able to use the d.ts', async () => { 36 | console.log(`${__filename} staging area: ${stagingPath}`); 37 | execSync('npm pack', {stdio: 'inherit'}); 38 | const tarball = `${pkg.name}-${pkg.version}.tgz`; 39 | await ncpp(tarball, `${stagingPath}/${pkg.name}.tgz`); 40 | await ncpp('system-test/fixtures/kitchen', `${stagingPath}/`); 41 | execSync('npm install', {cwd: `${stagingPath}/`, stdio: 'inherit'}); 42 | }); 43 | 44 | /** 45 | * CLEAN UP - remove the staging directory when done. 46 | */ 47 | after('cleanup staging', () => { 48 | if (!keep) { 49 | stagingDir.removeCallback(); 50 | } 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /system-test/system.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import assert from 'assert'; 18 | import {before, after, describe, it} from 'mocha'; 19 | import fs from 'fs'; 20 | import * as gcbuild from 'gcbuild'; 21 | import {CloudFunctionsServiceClient} from '@google-cloud/functions'; 22 | import * as path from 'path'; 23 | import {promisify} from 'util'; 24 | import {execSync} from 'child_process'; 25 | import {request} from 'gaxios'; 26 | 27 | const loadGcx = () => import('gcx'); 28 | 29 | const copy = promisify(fs.copyFile); 30 | const pkg = require('../../package.json'); // eslint-disable-line 31 | 32 | let gcf: CloudFunctionsServiceClient; 33 | let projectId: string; 34 | const shortPrefix = 'gcloud-tests'; 35 | const randomUUID = () => 36 | globalThis.crypto?.randomUUID() || require('crypto').randomUUID(); 37 | const fullPrefix = `${shortPrefix}-${randomUUID().split('-')[0]}`; 38 | 39 | describe('gcp metadata', () => { 40 | before(async () => { 41 | // pack up the gcp-metadata module and copy to the target dir 42 | await packModule(); 43 | gcf = new CloudFunctionsServiceClient(); 44 | projectId = await gcf.auth.getProjectId(); 45 | }); 46 | 47 | describe('cloud functions', () => { 48 | before(async () => { 49 | // Clean up any old cloud functions just hanging out 50 | await pruneFunctions(false); 51 | 52 | // deploy the function to GCF 53 | await deployApp(); 54 | // cloud functions now require authentication by default, see: 55 | // https://cloud.google.com/functions/docs/release-notes 56 | await gcf.setIamPolicy({ 57 | resource: `projects/${projectId}/locations/us-central1/functions/${fullPrefix}`, 58 | policy: { 59 | bindings: [ 60 | {members: ['allUsers'], role: 'roles/cloudfunctions.invoker'}, 61 | ], 62 | }, 63 | }); 64 | }); 65 | 66 | it('should access the metadata service on GCF', async () => { 67 | const url = `https://us-central1-${projectId}.cloudfunctions.net/${fullPrefix}`; 68 | const res = await request<{isAvailable: boolean}>({url}); 69 | console.dir(res.data); 70 | assert.strictEqual(res.data.isAvailable, true); 71 | }); 72 | 73 | after(() => pruneFunctions(true)); 74 | }); 75 | 76 | describe('cloud build', () => { 77 | it('should access the metadata service on GCB', async () => { 78 | const result = await gcbuild.build({ 79 | sourcePath: path.join( 80 | __dirname, 81 | '../../system-test/fixtures/cloudbuild', 82 | ), 83 | }); 84 | console.log(result.log); 85 | assert.ok(/isAvailable: true/.test(result.log)); 86 | assert.ok( 87 | result.log.includes('"default":{"aliases":["default"],"email"'), 88 | ); 89 | }); 90 | }); 91 | }); 92 | 93 | /** 94 | * Delete all cloud functions created in the project by this 95 | * test suite. It can delete ones created in this session, and 96 | * also delete any of them created > 7 days ago by tests. 97 | * @param sessionOnly Only prune functions created in this session. 98 | */ 99 | async function pruneFunctions(sessionOnly: boolean) { 100 | console.log('Pruning leaked functions...'); 101 | const [fns] = await gcf.listFunctions({ 102 | parent: `projects/${projectId}/locations/-`, 103 | }); 104 | await Promise.all( 105 | fns 106 | .filter(fn => { 107 | if (sessionOnly) { 108 | return fn.name!.includes(fullPrefix); 109 | } 110 | const updateDate = ((fn.updateTime?.seconds as number) || 0) * 1000; 111 | const currentDate = Date.now(); 112 | const minutesSinceUpdate = (currentDate - updateDate) / 1000 / 60; 113 | return minutesSinceUpdate > 60 && fn.name!.includes(shortPrefix); 114 | }) 115 | .map(async fn => { 116 | await gcf.deleteFunction({name: fn.name}).catch(e => { 117 | console.error(`There was a problem deleting function ${fn.name}.`); 118 | console.error(e); 119 | }); 120 | }), 121 | ); 122 | } 123 | 124 | /** 125 | * Deploy the hook app to GCF. 126 | */ 127 | async function deployApp() { 128 | const targetDir = path.join(__dirname, '../../system-test/fixtures/hook'); 129 | const gcx = await loadGcx(); 130 | await gcx.deploy({ 131 | name: fullPrefix, 132 | entryPoint: 'getMetadata', 133 | triggerHTTP: true, 134 | runtime: 'nodejs18', 135 | region: 'us-central1', 136 | targetDir, 137 | }); 138 | } 139 | 140 | /** 141 | * Runs `npm pack` on the root directory, and copies the resulting 142 | * `gcp-metadata.tgz` over to the target directories in fixtures. 143 | */ 144 | async function packModule() { 145 | execSync('npm pack', {stdio: 'inherit'}); 146 | const from = `${pkg.name}-${pkg.version}.tgz`; 147 | const targets = ['hook', 'cloudbuild']; 148 | await Promise.all( 149 | targets.map(target => { 150 | const to = `system-test/fixtures/${target}/${pkg.name}.tgz`; 151 | return copy(from, to); 152 | }), 153 | ); 154 | } 155 | -------------------------------------------------------------------------------- /test/gcp-residency.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2022 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import {strict as assert} from 'assert'; 18 | 19 | import {beforeEach, describe, it} from 'mocha'; 20 | import {SinonSandbox, createSandbox} from 'sinon'; 21 | 22 | import * as gcpResidency from '../src/gcp-residency'; 23 | import {GCPResidencyUtil} from './utils/gcp-residency'; 24 | 25 | describe('gcp-residency', () => { 26 | let sandbox: SinonSandbox; 27 | let residency: GCPResidencyUtil; 28 | 29 | beforeEach(() => { 30 | sandbox = createSandbox(); 31 | residency = new GCPResidencyUtil(sandbox); 32 | 33 | // Default to non-GCP 34 | residency.setNonGCP(); 35 | }); 36 | 37 | afterEach(() => { 38 | sandbox.restore(); 39 | }); 40 | 41 | describe('isGoogleCloudServerless', () => { 42 | it('should return `true` if `CLOUD_RUN_JOB` env is set', () => { 43 | process.env.CLOUD_RUN_JOB = '1'; 44 | 45 | assert(gcpResidency.isGoogleCloudServerless()); 46 | }); 47 | 48 | it('should return `true` if `FUNCTION_NAME` env is set', () => { 49 | process.env.FUNCTION_NAME = '1'; 50 | 51 | assert(gcpResidency.isGoogleCloudServerless()); 52 | }); 53 | 54 | it('should return `true` if `K_SERVICE` env is set', () => { 55 | process.env.K_SERVICE = '1'; 56 | 57 | assert(gcpResidency.isGoogleCloudServerless()); 58 | }); 59 | 60 | it('should return `false` if none of the envs are set', () => { 61 | assert.equal(gcpResidency.isGoogleCloudServerless(), false); 62 | }); 63 | }); 64 | 65 | describe('isGoogleComputeEngine', () => { 66 | it('should return `true` if on Linux and has the expected BIOS files', () => { 67 | residency.setGCENetworkInterface(false); 68 | residency.setGCEPlatform('linux'); 69 | residency.setGCELinuxBios(true); 70 | 71 | assert.equal(gcpResidency.isGoogleComputeEngine(), true); 72 | }); 73 | 74 | it('should return `false` if on Linux and the expected BIOS files are not GCE', () => { 75 | residency.setGCENetworkInterface(false); 76 | residency.setGCEPlatform('linux'); 77 | residency.setGCELinuxBios(false); 78 | 79 | assert.equal(gcpResidency.isGoogleComputeEngine(), false); 80 | }); 81 | 82 | it('should return `false` if on Linux and the BIOS files do not exist', () => { 83 | residency.setGCENetworkInterface(false); 84 | residency.setGCEPlatform('linux'); 85 | residency.setGCELinuxBios(null); 86 | 87 | assert.equal(gcpResidency.isGoogleComputeEngine(), false); 88 | }); 89 | 90 | it('should return `true` if the host MAC address begins with `42:01`', () => { 91 | residency.setGCENetworkInterface(true); 92 | residency.setGCEPlatform('win32'); 93 | residency.setGCELinuxBios(null); 94 | 95 | assert.equal(gcpResidency.isGoogleComputeEngine(), true); 96 | }); 97 | 98 | it('should return `false` if the host MAC address does not begin with `42:01` & is not Linux', () => { 99 | residency.setGCENetworkInterface(false); 100 | residency.setGCEPlatform('win32'); 101 | residency.setGCELinuxBios(null); 102 | 103 | assert.equal(gcpResidency.isGoogleComputeEngine(), false); 104 | }); 105 | }); 106 | 107 | describe('detectGCPResidency', () => { 108 | it('should return `true` if `isGoogleCloudServerless`', () => { 109 | // `isGoogleCloudServerless` = true 110 | process.env.K_SERVICE = '1'; 111 | 112 | // `isGoogleComputeEngine` = false 113 | residency.setGCENetworkInterface(false); 114 | 115 | assert(gcpResidency.detectGCPResidency()); 116 | }); 117 | 118 | it('should return `true` if `isGoogleComputeEngine`', () => { 119 | // `isGoogleCloudServerless` = false 120 | residency.removeServerlessEnvironmentVariables(); 121 | 122 | // `isGoogleComputeEngine` = true 123 | residency.setGCENetworkInterface(true); 124 | 125 | assert(gcpResidency.detectGCPResidency()); 126 | }); 127 | 128 | it('should return `false` !`isGoogleCloudServerless` && !`isGoogleComputeEngine`', () => { 129 | // `isGoogleCloudServerless` = false 130 | residency.removeServerlessEnvironmentVariables(); 131 | 132 | // `isGoogleComputeEngine` = false 133 | residency.setGCENetworkInterface(false); 134 | residency.setGCELinuxBios(false); 135 | 136 | assert.equal(gcpResidency.detectGCPResidency(), false); 137 | }); 138 | }); 139 | }); 140 | -------------------------------------------------------------------------------- /test/index.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2018 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import assert from 'assert'; 18 | 19 | import {beforeEach, afterEach, describe, it} from 'mocha'; 20 | import nock from 'nock'; 21 | import {SinonSandbox, createSandbox} from 'sinon'; 22 | 23 | import * as gcp from '../src'; 24 | import {GCPResidencyUtil} from './utils/gcp-residency'; 25 | import {GaxiosError} from 'gaxios'; 26 | 27 | // the metadata IP entry: 28 | const HOST = gcp.HOST_ADDRESS; 29 | // the metadata DNS entry: 30 | const SECONDARY_HOST = gcp.SECONDARY_HOST_ADDRESS; 31 | const PATH = gcp.BASE_PATH; 32 | const TYPE = 'instance'; 33 | const PROPERTY = 'property'; 34 | 35 | // NOTE: nodejs switches all incoming header names to lower case. 36 | const HEADERS = { 37 | [gcp.HEADER_NAME.toLowerCase()]: gcp.HEADER_VALUE, 38 | }; 39 | 40 | describe('unit test', () => { 41 | const originalGceMetadataIp = process.env.GCE_METADATA_HOST; 42 | let sandbox: SinonSandbox; 43 | let residency: GCPResidencyUtil; 44 | 45 | before(() => { 46 | nock.disableNetConnect(); 47 | process.removeAllListeners('warning'); 48 | }); 49 | 50 | beforeEach(() => { 51 | // Clear this environment variable to ensure it does not affect 52 | // expected test outcome. 53 | delete process.env.GCE_METADATA_HOST; 54 | delete process.env.GCE_METADATA_IP; 55 | delete process.env.METADATA_SERVER_DETECTION; 56 | 57 | gcp.resetIsAvailableCache(); 58 | 59 | sandbox = createSandbox(); 60 | residency = new GCPResidencyUtil(sandbox); 61 | 62 | residency.setNonGCP(); 63 | }); 64 | 65 | afterEach(() => { 66 | // Restore environment variable if it previously existed. 67 | process.env.GCE_METADATA_HOST = originalGceMetadataIp; 68 | nock.cleanAll(); 69 | sandbox.restore(); 70 | }); 71 | 72 | it('should create the correct accessors', async () => { 73 | assert(typeof gcp.instance, 'function'); 74 | assert(typeof gcp.project, 'function'); 75 | }); 76 | 77 | it('should access all the metadata properly', async () => { 78 | const scope = nock(HOST) 79 | .get(`${PATH}/${TYPE}`, undefined, HEADERS) 80 | .reply(200, {}, HEADERS); 81 | await gcp.instance(); 82 | scope.done(); 83 | }); 84 | 85 | it('should access a specific metadata property', async () => { 86 | const scope = nock(HOST) 87 | .get(`${PATH}/${TYPE}/${PROPERTY}`) 88 | .reply(200, {}, HEADERS); 89 | await gcp.instance(PROPERTY); 90 | scope.done(); 91 | }); 92 | 93 | it('should use GCE_METADATA_IP if available', async () => { 94 | process.env.GCE_METADATA_IP = '127.0.0.1:8080'; 95 | const scope = nock(`http://${process.env.GCE_METADATA_IP}`) 96 | .get(`${PATH}/${TYPE}/${PROPERTY}`, undefined, HEADERS) 97 | .reply(200, {}, HEADERS); 98 | await gcp.instance(PROPERTY); 99 | scope.done(); 100 | }); 101 | 102 | it('should use GCE_METADATA_HOST if available', async () => { 103 | process.env.GCE_METADATA_HOST = '127.0.0.1:8080'; 104 | const scope = nock(`http://${process.env.GCE_METADATA_HOST}`) 105 | .get(`${PATH}/${TYPE}/${PROPERTY}`, undefined, HEADERS) 106 | .reply(200, {}, HEADERS); 107 | await gcp.instance(PROPERTY); 108 | scope.done(); 109 | }); 110 | 111 | it('should set custom headers when supplied', async () => { 112 | const headers = {human: 'phone', monkey: 'banana'}; 113 | const scope = nock(HOST, {reqheaders: headers}) 114 | .get(`${PATH}/${TYPE}/${PROPERTY}`) 115 | .reply(200, {}, HEADERS); 116 | await gcp.instance({property: PROPERTY, headers}); 117 | scope.done(); 118 | }); 119 | 120 | it('should throw a valuable error when headers do not match', async () => { 121 | const scope = nock(HOST) 122 | .get(`${PATH}/${TYPE}/${PROPERTY}`) 123 | .reply( 124 | 200, 125 | {}, 126 | { 127 | [gcp.HEADER_NAME.toLowerCase()]: 'wrongHeader', 128 | }, 129 | ); 130 | 131 | await assert.rejects( 132 | async () => { 133 | await gcp.instance({property: PROPERTY}); 134 | }, 135 | err => { 136 | assert(err instanceof Error); 137 | assert.strictEqual( 138 | err.message, 139 | "Invalid response from metadata service: incorrect Metadata-Flavor header. Expected 'Google', got 'wrongHeader'", 140 | ); 141 | scope.done(); 142 | return true; 143 | }, 144 | ); 145 | }); 146 | 147 | it('should throw a valuable error when header does not exist', async () => { 148 | const scope = nock(HOST).get(`${PATH}/${TYPE}/${PROPERTY}`).reply(200, {}); 149 | 150 | await assert.rejects( 151 | async () => { 152 | await gcp.instance({property: PROPERTY}); 153 | }, 154 | err => { 155 | assert(err instanceof Error); 156 | assert.strictEqual( 157 | err.message, 158 | "Invalid response from metadata service: incorrect Metadata-Flavor header. Expected 'Google', got no header", 159 | ); 160 | scope.done(); 161 | return true; 162 | }, 163 | ); 164 | }); 165 | 166 | it('should return large numbers as BigNumber values', async () => { 167 | const BIG_NUMBER_STRING = '3279739563200103600'; 168 | const scope = nock(HOST) 169 | .get(`${PATH}/${TYPE}/${PROPERTY}`) 170 | .reply(200, BIG_NUMBER_STRING, HEADERS); 171 | const property = await gcp.instance(PROPERTY); 172 | // property should be a BigNumber. 173 | assert.strictEqual(property.valueOf(), BIG_NUMBER_STRING); 174 | scope.done(); 175 | }); 176 | 177 | it('should return small numbers normally', async () => { 178 | const NUMBER = 32797; 179 | const scope = nock(HOST) 180 | .get(`${PATH}/${TYPE}/${PROPERTY}`) 181 | .reply(200, `${NUMBER}`, HEADERS); 182 | const property = await gcp.instance(PROPERTY); 183 | assert.strictEqual(typeof property, 'number'); 184 | assert.strictEqual(property, NUMBER); 185 | scope.done(); 186 | }); 187 | 188 | it('should deal with nested large numbers', async () => { 189 | const BIG_NUMBER_STRING = '3279739563200103600'; 190 | const RESPONSE = `{ "v1": true, "v2": ${BIG_NUMBER_STRING} }`; 191 | const scope = nock(HOST) 192 | .get(`${PATH}/${TYPE}/${PROPERTY}`) 193 | .reply(200, RESPONSE, HEADERS); 194 | const response = await gcp.instance(PROPERTY); 195 | assert.strictEqual(response.v2.valueOf(), BIG_NUMBER_STRING); 196 | scope.done(); 197 | }); 198 | 199 | it('should accept an object with property and query fields', async () => { 200 | const QUERY = {key: 'value'}; 201 | const scope = nock(HOST) 202 | .get(`${PATH}/project/${PROPERTY}`) 203 | .query(QUERY) 204 | .reply(200, {}, HEADERS); 205 | await gcp.project({property: PROPERTY, params: QUERY}); 206 | scope.done(); 207 | }); 208 | 209 | it('should query the `universe` type', async () => { 210 | const PROPERTY = 'universe-domain'; 211 | const VALUE = 'my-domain.com'; 212 | 213 | const scope = nock(HOST) 214 | .get(`${PATH}/universe/${PROPERTY}`) 215 | .reply(200, VALUE, HEADERS); 216 | 217 | assert(await gcp.universe(PROPERTY), VALUE); 218 | 219 | scope.done(); 220 | }); 221 | 222 | it('should return error when flavor header is incorrect', async () => { 223 | const scope = nock(HOST) 224 | .get(`${PATH}/${TYPE}`) 225 | .reply(200, {}, {[gcp.HEADER_NAME.toLowerCase()]: 'Hazelnut'}); 226 | await assert.rejects( 227 | gcp.instance(), 228 | /Invalid response from metadata service: incorrect Metadata-Flavor header./, 229 | ); 230 | scope.done(); 231 | }); 232 | 233 | it('should return the request error', async () => { 234 | const scope = nock(HOST) 235 | .get(`${PATH}/${TYPE}`) 236 | .reply(404, undefined, HEADERS); 237 | 238 | try { 239 | await gcp.instance(); 240 | } catch (err) { 241 | assert(err instanceof GaxiosError); 242 | assert.strictEqual(err.status, 404); 243 | } 244 | 245 | scope.done(); 246 | }); 247 | 248 | it('should retry if the initial request fails', async () => { 249 | const scope = nock(HOST) 250 | .get(`${PATH}/${TYPE}`) 251 | .times(2) 252 | .reply(500) 253 | .get(`${PATH}/${TYPE}`) 254 | .reply(200, {}, HEADERS); 255 | await gcp.instance(); 256 | scope.done(); 257 | }); 258 | 259 | it('should retry with GCE_METADATA_HOST if first request fails', async () => { 260 | process.env.GCE_METADATA_HOST = '127.0.0.1:8080'; 261 | const scope = nock(`http://${process.env.GCE_METADATA_HOST}`) 262 | .get(`${PATH}/${TYPE}`) 263 | .times(2) 264 | .reply(500) 265 | .get(`${PATH}/${TYPE}`) 266 | .reply(200, {}, HEADERS); 267 | await gcp.instance(); 268 | scope.done(); 269 | }); 270 | 271 | it('should throw if request options are passed', async () => { 272 | await assert.rejects( 273 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 274 | gcp.instance({qs: {one: 'two'}} as any), 275 | /'qs' is not a valid configuration option. Please use 'params' instead\./, 276 | ); 277 | }); 278 | 279 | it('should throw if invalid options are passed', async () => { 280 | await assert.rejects( 281 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 282 | gcp.instance({fake: 'news'} as any), 283 | /'fake' is not a valid/, 284 | ); 285 | }); 286 | 287 | it('should retry on DNS errors', async () => { 288 | const scope = nock(HOST) 289 | .get(`${PATH}/${TYPE}`) 290 | .reply(500, {code: 'ETIMEDOUT'}) 291 | .get(`${PATH}/${TYPE}`) 292 | .reply(200, {}, HEADERS); 293 | const data = await gcp.instance(); 294 | scope.done(); 295 | assert(data); 296 | }); 297 | 298 | async function secondaryHostRequest( 299 | delay: number, 300 | responseType = 'success', 301 | ): Promise { 302 | let secondary: nock.Scope; 303 | if (responseType === 'success') { 304 | secondary = nock(SECONDARY_HOST) 305 | .get(`${PATH}/${TYPE}`) 306 | .delayConnection(delay) 307 | .reply(200, {}, HEADERS); 308 | } else { 309 | secondary = nock(SECONDARY_HOST) 310 | .get(`${PATH}/${TYPE}`) 311 | .delayConnection(delay) 312 | .reply(500, {code: responseType}); 313 | } 314 | return new Promise((resolve, reject) => { 315 | setTimeout(() => { 316 | try { 317 | secondary.done(); 318 | return resolve(); 319 | } catch (err) { 320 | console.info(err); 321 | return reject(err); 322 | } 323 | }, delay + 50); 324 | }); 325 | } 326 | 327 | it('should make requests in bulk', async () => { 328 | const INSTANCE_VALUE = {instance: 'data'}; 329 | const UNIVERSE_DOMAIN = 'my-domain.com'; 330 | 331 | const scopes = [ 332 | nock(HOST) 333 | .get(`${PATH}/universe/universe-domain`) 334 | .reply(200, UNIVERSE_DOMAIN, HEADERS), 335 | nock(HOST).get(`${PATH}/instance`).reply(200, INSTANCE_VALUE, HEADERS), 336 | ]; 337 | 338 | const data = await gcp.bulk([ 339 | { 340 | metadataKey: 'instance', 341 | }, 342 | { 343 | metadataKey: 'universe/universe-domain', 344 | }, 345 | ] as const); 346 | 347 | assert.deepEqual(data.instance, INSTANCE_VALUE); 348 | assert.deepEqual(data['universe/universe-domain'], UNIVERSE_DOMAIN); 349 | 350 | scopes.map(scope => scope.done()); 351 | }); 352 | 353 | describe('METADATA_SERVER_DETECTION', () => { 354 | it('should respect `assume-present`', async () => { 355 | process.env.METADATA_SERVER_DETECTION = 'assume-present'; 356 | 357 | // if this is called, this the test would fail. 358 | const scope = nock(HOST); 359 | 360 | const isGCE = await gcp.isAvailable(); 361 | assert.strictEqual(isGCE, true); 362 | 363 | scope.done(); 364 | }); 365 | 366 | it('should respect `bios-only` (residency = true)', async () => { 367 | process.env.METADATA_SERVER_DETECTION = 'bios-only'; 368 | 369 | // if this is called, this the test would fail. 370 | const scope = nock(HOST); 371 | 372 | gcp.setGCPResidency(true); 373 | const isGCE = await gcp.isAvailable(); 374 | assert.strictEqual(isGCE, true); 375 | 376 | scope.done(); 377 | }); 378 | 379 | it('should respect `bios-only` (residency = false)', async () => { 380 | process.env.METADATA_SERVER_DETECTION = 'bios-only'; 381 | 382 | // if either are called, this the test would fail. 383 | nock(HOST).get(`${PATH}/${TYPE}`).reply(200, {}, HEADERS); 384 | nock(SECONDARY_HOST).get(`${PATH}/${TYPE}`).reply(200, {}, HEADERS); 385 | 386 | gcp.setGCPResidency(false); 387 | const isGCE = await gcp.isAvailable(); 388 | assert.strictEqual(isGCE, false); 389 | 390 | nock.cleanAll(); 391 | }); 392 | 393 | it('should respect `none`', async () => { 394 | process.env.METADATA_SERVER_DETECTION = 'none'; 395 | 396 | // if either are called, this the test would fail. 397 | nock(HOST).get(`${PATH}/${TYPE}`).reply(200, {}, HEADERS); 398 | nock(SECONDARY_HOST).get(`${PATH}/${TYPE}`).reply(200, {}, HEADERS); 399 | 400 | // if this is referenced, this test would fail. 401 | gcp.setGCPResidency(true); 402 | 403 | const isGCE = await gcp.isAvailable(); 404 | assert.strictEqual(isGCE, false); 405 | }); 406 | 407 | it('should respect `ping-only`', async () => { 408 | process.env.METADATA_SERVER_DETECTION = 'ping-only'; 409 | 410 | gcp.resetIsAvailableCache(); 411 | nock(HOST).get(`${PATH}/${TYPE}`).reply(200, {}, HEADERS); 412 | nock(SECONDARY_HOST).get(`${PATH}/${TYPE}`).reply(200, {}, HEADERS); 413 | 414 | // if this is referenced, this test would fail. 415 | gcp.setGCPResidency(false); 416 | 417 | const isGCE = await gcp.isAvailable(); 418 | assert.strictEqual(isGCE, true); 419 | 420 | nock.cleanAll(); 421 | }); 422 | 423 | it('should ignore spaces and capitalization', async () => { 424 | process.env.METADATA_SERVER_DETECTION = ' ASSUME-present\t'; 425 | 426 | // if this is called, this the test would fail. 427 | const scope = nock(HOST); 428 | 429 | const isGCE = await gcp.isAvailable(); 430 | assert.strictEqual(isGCE, true); 431 | 432 | scope.done(); 433 | }); 434 | 435 | it('should throw on unknown values', async () => { 436 | process.env.METADATA_SERVER_DETECTION = 'abc'; 437 | 438 | await assert.rejects(gcp.isAvailable, RangeError); 439 | }); 440 | }); 441 | 442 | it('should report isGCE if primary server returns 500 followed by 200', async () => { 443 | const secondary = secondaryHostRequest(500); 444 | const primary = nock(HOST) 445 | .get(`${PATH}/${TYPE}`) 446 | .reply(500) 447 | .get(`${PATH}/${TYPE}`) 448 | .reply(200, {}, HEADERS); 449 | const isGCE = await gcp.isAvailable(); 450 | await secondary; 451 | primary.done(); 452 | assert.strictEqual(isGCE, true); 453 | }); 454 | 455 | it('should log error if DEBUG_AUTH is set', async () => { 456 | process.env.DEBUG_AUTH = 'true'; 457 | 458 | const info = console.info; 459 | let err: Error | null = null; 460 | console.info = (_err: Error) => { 461 | err = _err; 462 | }; 463 | 464 | const secondary = secondaryHostRequest(500, 'ENOTFOUND'); 465 | const primary = nock(HOST) 466 | .get(`${PATH}/${TYPE}`) 467 | .replyWithError({code: 'ENOTFOUND'}); 468 | await gcp.isAvailable(); 469 | await secondary; 470 | primary.done(); 471 | console.info = info; 472 | delete process.env.DEBUG_AUTH; 473 | assert.strictEqual(/All promises were rejected/.test(err!.message), true); 474 | }); 475 | 476 | [ 477 | 'EHOSTDOWN', 478 | 'EHOSTUNREACH', 479 | 'ENETUNREACH', 480 | 'ENOENT', 481 | 'ENOTFOUND', 482 | 'ECONNREFUSED', 483 | ].forEach(errorCode => { 484 | it(`should fail on isAvailable if ${errorCode} is returned for primary and secondary requests`, async () => { 485 | const secondary = secondaryHostRequest(500, errorCode); 486 | const primary = nock(HOST) 487 | .get(`${PATH}/${TYPE}`) 488 | .replyWithError({code: errorCode}); 489 | const isGCE = await gcp.isAvailable(); 490 | await secondary; 491 | primary.done(); 492 | assert.strictEqual(false, isGCE); 493 | }); 494 | }); 495 | 496 | it('should return first successful response', async () => { 497 | const secondary = secondaryHostRequest(500); 498 | const primary = nock(HOST).get(`${PATH}/${TYPE}`).reply(404); 499 | const isGCE = await gcp.isAvailable(); 500 | await secondary; 501 | primary.done(); 502 | assert.strictEqual(true, isGCE); 503 | }); 504 | 505 | it('should fail fast with GCE_METADATA_HOST 404 on isAvailable', async () => { 506 | process.env.GCE_METADATA_HOST = '127.0.0.1:8080'; 507 | const primary = nock(`http://${process.env.GCE_METADATA_HOST}`) 508 | .get(`${PATH}/${TYPE}`) 509 | .reply(404); 510 | const isGCE = await gcp.isAvailable(); 511 | primary.done(); 512 | assert.strictEqual(false, isGCE); 513 | }); 514 | 515 | it('should fail on isAvailable if request times out', async () => { 516 | void secondaryHostRequest(5000); 517 | const primary = nock(HOST) 518 | .get(`${PATH}/${TYPE}`) 519 | .delayConnection(3500) 520 | // this should never get called, as the 3000 timeout will trigger. 521 | .reply(200, {}, HEADERS); 522 | const isGCE = await gcp.isAvailable(); 523 | // secondary is allowed to simply timeout in the aether, to avoid 524 | // having a test that waits 5000 ms. 525 | primary.done(); 526 | assert.strictEqual(false, isGCE); 527 | }); 528 | 529 | it('should fail on isAvailable if GCE_METADATA_HOST times out', async () => { 530 | process.env.GCE_METADATA_HOST = '127.0.0.1:8080'; 531 | void secondaryHostRequest(5000); 532 | const primary = nock(`http://${process.env.GCE_METADATA_HOST}`) 533 | .get(`${PATH}/${TYPE}`) 534 | .delayConnection(3500) 535 | // this should never get called, as the 3000 timeout will trigger. 536 | .reply(200, {}, HEADERS); 537 | const isGCE = await gcp.isAvailable(); 538 | // secondary is allowed to simply timeout in the aether, to avoid 539 | // having a test that waits 5000 ms. 540 | primary.done(); 541 | assert.strictEqual(false, isGCE); 542 | }); 543 | 544 | it('should report isGCE if GCE_METADATA_HOST responds with 200', async () => { 545 | process.env.GCE_METADATA_HOST = '127.0.0.1:8080'; 546 | const scope = nock(`http://${process.env.GCE_METADATA_HOST}`) 547 | .get(`${PATH}/${TYPE}`) 548 | .reply(200, {}, HEADERS); 549 | assert.strictEqual(await gcp.isAvailable(), true); 550 | scope.done(); 551 | }); 552 | 553 | it('should report isGCE if secondary responds before primary', async () => { 554 | const secondary = secondaryHostRequest(10); 555 | const primary = nock(HOST) 556 | .get(`${PATH}/${TYPE}`) 557 | .delayConnection(3500) 558 | // this should never get called, as the 3000 timeout will trigger. 559 | .reply(200, {}, HEADERS); 560 | const isGCE = await gcp.isAvailable(); 561 | await secondary; 562 | primary.done(); 563 | assert.strictEqual(isGCE, true); 564 | }); 565 | 566 | it('should fail fast on isAvailable if ENOENT is returned by secondary', async () => { 567 | const secondary = secondaryHostRequest(10, 'ENOENT'); 568 | const primary = nock(HOST) 569 | .get(`${PATH}/${TYPE}`) 570 | .delayConnection(250) 571 | .replyWithError({code: 'ENOENT'}); 572 | const isGCE = await gcp.isAvailable(); 573 | await secondary; 574 | primary.done(); 575 | assert.strictEqual(false, isGCE); 576 | }); 577 | 578 | it('should return false on unexpected errors and warn', async () => { 579 | const primary = nock(HOST) 580 | .get(`${PATH}/${TYPE}`) 581 | .replyWithError({code: '🤡'}); 582 | const secondary = nock(SECONDARY_HOST) 583 | .get(`${PATH}/${TYPE}`) 584 | .replyWithError({code: '🤡'}); 585 | const done = new Promise(resolve => { 586 | process.on('warning', warning => { 587 | assert.strictEqual( 588 | warning.toString().includes('unexpected error'), 589 | true, 590 | ); 591 | return resolve(); 592 | }); 593 | }); 594 | assert.strictEqual(await gcp.isAvailable(), false); 595 | primary.done(); 596 | secondary.done(); 597 | return done; 598 | }); 599 | 600 | it('should report isGCE if secondary succeeds before primary fails', async () => { 601 | const secondary = secondaryHostRequest(10); 602 | const primary = nock(HOST) 603 | .get(`${PATH}/${TYPE}`) 604 | .delayConnection(200) 605 | // this should never get called, as the 3000 timeout will trigger. 606 | .reply(500, {}, HEADERS); 607 | await gcp.isAvailable(); 608 | await secondary; 609 | await new Promise(resolve => { 610 | setTimeout(() => { 611 | primary.done(); 612 | return resolve(); 613 | }, 500); 614 | }); 615 | }); 616 | 617 | it('should retry environment detection if DETECT_GCP_RETRIES >= 2', async () => { 618 | process.env.DETECT_GCP_RETRIES = '2'; 619 | const primary = nock(HOST) 620 | .get(`${PATH}/${TYPE}`) 621 | .reply(500, {code: 'ENETUNREACH'}) 622 | .get(`${PATH}/${TYPE}`) 623 | .reply(200, {}, HEADERS); 624 | const isGCE = await gcp.isAvailable(); 625 | primary.done(); 626 | assert.strictEqual(true, isGCE); 627 | delete process.env.DETECT_GCP_RETRIES; 628 | }); 629 | 630 | it('should cache response from first isAvailable() call', async () => { 631 | const secondary = secondaryHostRequest(500); 632 | const primary = nock(HOST).get(`${PATH}/${TYPE}`).reply(200, {}, HEADERS); 633 | await gcp.isAvailable(); 634 | // because we haven't created additional mocks, we expect this to fail 635 | // if we were not caching the first isAvailable() call: 636 | const isGCE = await gcp.isAvailable(); 637 | await secondary; 638 | primary.done(); 639 | assert.strictEqual(isGCE, true); 640 | }); 641 | 642 | it('should only make one outbound request, if isAvailable() called in rapid succession', async () => { 643 | const secondary = secondaryHostRequest(500); 644 | const primary = nock(HOST).get(`${PATH}/${TYPE}`).reply(200, {}, HEADERS); 645 | void gcp.isAvailable(); 646 | // because we haven't created additional mocks, we expect this to fail 647 | // if we were not caching the first isAvailable() call: 648 | const isGCE = await gcp.isAvailable(); 649 | await secondary; 650 | primary.done(); 651 | assert.strictEqual(isGCE, true); 652 | }); 653 | 654 | it('resets cache when resetIsAvailableCache() is called', async () => { 655 | // we will attempt to hit the secondary and primary server twice, 656 | // mock accordingly. 657 | const secondary = secondaryHostRequest(250, 'ENOENT'); 658 | const secondary2 = secondaryHostRequest(500, 'ENOENT'); 659 | const primary = nock(HOST) 660 | .get(`${PATH}/${TYPE}`) 661 | .reply(200, {}, HEADERS) 662 | .get(`${PATH}/${TYPE}`) 663 | .replyWithError({code: 'ENOENT'}); 664 | 665 | // Check whether we're in a GCP environment twice, resetting the cache 666 | // inbetween: 667 | await gcp.isAvailable(); 668 | gcp.resetIsAvailableCache(); 669 | const isGCE = await gcp.isAvailable(); 670 | 671 | await secondary; 672 | await secondary2; 673 | primary.done(); 674 | assert.strictEqual(isGCE, false); 675 | }); 676 | 677 | describe('getGCPResidency', () => { 678 | it('should set and use `gcpResidencyCache`', () => { 679 | gcp.setGCPResidency(false); 680 | assert.equal(gcp.getGCPResidency(), false); 681 | assert.equal(gcp.gcpResidencyCache, false); 682 | 683 | gcp.setGCPResidency(true); 684 | assert.equal(gcp.getGCPResidency(), true); 685 | assert.equal(gcp.gcpResidencyCache, true); 686 | 687 | gcp.setGCPResidency(null); 688 | assert.equal(gcp.getGCPResidency(), gcp.gcpResidencyCache); 689 | }); 690 | }); 691 | 692 | describe('setGCPResidency', () => { 693 | it('should set `gcpResidencyCache`', () => { 694 | gcp.setGCPResidency(true); 695 | assert.equal(gcp.gcpResidencyCache, true); 696 | 697 | gcp.setGCPResidency(false); 698 | assert.equal(gcp.gcpResidencyCache, false); 699 | }); 700 | 701 | it('should match gcp residency results by default', () => { 702 | // Set as GCP 703 | residency.setGCENetworkInterface(true); 704 | gcp.setGCPResidency(); 705 | assert.equal(gcp.gcpResidencyCache, true); 706 | 707 | // Set as non-GCP 708 | residency.setNonGCP(); 709 | gcp.setGCPResidency(); 710 | assert.equal(gcp.gcpResidencyCache, false); 711 | }); 712 | }); 713 | 714 | describe('requestTimeout', () => { 715 | it('should return a request timeout of `0` when running on GCP', () => { 716 | gcp.setGCPResidency(true); 717 | assert.strictEqual(gcp.requestTimeout(), 0); 718 | }); 719 | 720 | it('should return a request timeout of `3000` when not running on GCP', () => { 721 | gcp.setGCPResidency(false); 722 | assert.strictEqual(gcp.requestTimeout(), 3000); 723 | }); 724 | }); 725 | }); 726 | -------------------------------------------------------------------------------- /test/utils/gcp-residency.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2023 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import {strict as assert} from 'assert'; 18 | import fs from 'fs'; 19 | import os from 'os'; 20 | 21 | import {SinonSandbox, SinonStub} from 'sinon'; 22 | 23 | import * as gcpResidency from '../../src/gcp-residency'; 24 | 25 | export class GCPResidencyUtil { 26 | /** 27 | * Stubs used in this utility. These are used within the provided sandbox. 28 | * */ 29 | stubs: { 30 | [key in 31 | | 'fsReadFileSync' 32 | | 'fsStatSync' 33 | | 'processEnv' 34 | | 'osNetworkInterfaces' 35 | | 'osPlatform']?: SinonStub | void; 36 | } = {}; 37 | 38 | constructor(public sandbox: SinonSandbox) {} 39 | 40 | /** 41 | * A simple utility for stubbing the networkInterface for GCE emulation. 42 | * 43 | * @param isGCE determines if the address should begin with `42:01` or not 44 | */ 45 | setGCENetworkInterface(isGCE = true) { 46 | const mac = isGCE ? '42:01:00:00:00:00' : '00:00:00:00:00:00'; 47 | 48 | this.stubs.osNetworkInterfaces ??= this.sandbox.stub( 49 | os, 50 | 'networkInterfaces', 51 | ); 52 | this.stubs.osNetworkInterfaces.returns({ 53 | 'test-interface': [{mac} as os.NetworkInterfaceInfo], 54 | }); 55 | } 56 | 57 | /** 58 | * A simple utility for stubbing the platform for GCE emulation. 59 | * 60 | * @param platform a Node.js platform 61 | */ 62 | setGCEPlatform(platform: NodeJS.Platform = 'linux') { 63 | this.stubs.osPlatform ??= this.sandbox.stub(os, 'platform'); 64 | 65 | this.stubs.osPlatform.returns(platform); 66 | } 67 | 68 | /** 69 | * A simple utility for stubbing the Linux BIOS files for GCE emulation. 70 | * 71 | * @param isGCE options: 72 | * - set `true` to simulate the files exist and are GCE 73 | * - set `false` for exist, but are not GCE 74 | * - set `null` for simulate ENOENT 75 | */ 76 | setGCELinuxBios(isGCE: boolean | null) { 77 | this.stubs.fsReadFileSync ??= this.sandbox.stub(fs, 'readFileSync'); 78 | this.stubs.fsStatSync ??= this.sandbox.stub(fs, 'statSync'); 79 | 80 | this.stubs.fsStatSync.callsFake(path => { 81 | assert.equal(path, gcpResidency.GCE_LINUX_BIOS_PATHS.BIOS_DATE); 82 | 83 | return undefined; 84 | }); 85 | 86 | this.stubs.fsReadFileSync.callsFake((path, encoding) => { 87 | assert.equal(path, gcpResidency.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR); 88 | assert.equal(encoding, 'utf8'); 89 | 90 | if (isGCE === true) { 91 | return 'x Google x'; 92 | } else if (isGCE === false) { 93 | return 'Sandwich Co.'; 94 | } else { 95 | throw new Error("File doesn't exist"); 96 | } 97 | }); 98 | } 99 | 100 | /** 101 | * Removes serverless-related environment variables from the 102 | * environment (such as Cloud Run and Cloud Functions). 103 | */ 104 | removeServerlessEnvironmentVariables() { 105 | const customEnv = {...process.env}; 106 | 107 | delete customEnv.CLOUD_RUN_JOB; 108 | delete customEnv.FUNCTION_NAME; 109 | delete customEnv.K_SERVICE; 110 | 111 | this.stubs.processEnv ??= this.sandbox.stub(process, 'env'); 112 | this.stubs.processEnv.value(customEnv); 113 | } 114 | 115 | /** 116 | * Sets the environment as non-GCP by stubbing and setting/removing the 117 | * environment variables. 118 | */ 119 | setNonGCP() { 120 | // `isGoogleCloudServerless` = false 121 | this.removeServerlessEnvironmentVariables(); 122 | 123 | // `isGoogleComputeEngine` = false 124 | this.setGCENetworkInterface(false); 125 | this.setGCELinuxBios(false); 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts/tsconfig-google.json", 3 | "compilerOptions": { 4 | "lib": ["ES2023", "dom"], 5 | "module": "Node16", 6 | "moduleResolution": "Node16", 7 | "outDir": "build", 8 | "rootDir": "." 9 | }, 10 | "include": ["src/*.ts", "test/*.ts", "system-test/*.ts", "test/**/*.ts"], 11 | "exclude": ["test/fixtures"] 12 | } 13 | --------------------------------------------------------------------------------