├── .compodocrc ├── .eslintignore ├── .eslintrc.json ├── .gitattributes ├── .github ├── .OwlBot.lock.yaml ├── .OwlBot.yaml ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ ├── feature_request.md │ ├── question.md │ └── support_request.md ├── PULL_REQUEST_TEMPLATE.md ├── auto-approve.yml ├── auto-label.yaml ├── generated-files-bot.yml ├── release-please.yml ├── release-trigger.yml ├── sync-repo-settings.yaml └── workflows │ └── ci.yaml ├── .gitignore ├── .jsdoc.js ├── .kokoro ├── .gitattributes ├── common.cfg ├── continuous │ └── node14 │ │ ├── common.cfg │ │ ├── lint.cfg │ │ ├── samples-test.cfg │ │ ├── system-test.cfg │ │ └── test.cfg ├── docs.sh ├── lint.sh ├── populate-secrets.sh ├── presubmit │ ├── node14 │ │ ├── common.cfg │ │ ├── samples-test.cfg │ │ ├── system-test.cfg │ │ └── test.cfg │ └── windows │ │ ├── common.cfg │ │ └── test.cfg ├── publish.sh ├── release │ ├── common.cfg │ ├── docs-devsite.cfg │ ├── docs-devsite.sh │ ├── docs.cfg │ ├── docs.sh │ └── publish.cfg ├── samples-test.sh ├── system-test.sh ├── test.bat ├── test.sh ├── trampoline.sh └── trampoline_v2.sh ├── .mocharc.js ├── .nycrc ├── .prettierignore ├── .prettierrc.js ├── .repo-metadata.json ├── .trampolinerc ├── 20MB.zip ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── linkinator.config.json ├── owlbot.py ├── package.json ├── renovate.json ├── samples ├── package.json ├── quickstart.js └── system-test │ └── system.js ├── src ├── cli.ts └── index.ts ├── system-test ├── kitchen.ts └── util.ts ├── test ├── fixtures │ └── keys.json └── test.ts └── tsconfig.json /.compodocrc: -------------------------------------------------------------------------------- 1 | --- 2 | tsconfig: ./tsconfig.json 3 | output: ./docs 4 | theme: material 5 | hideGenerator: true 6 | disablePrivate: true 7 | disableProtected: true 8 | disableInternal: true 9 | disableCoverage: true 10 | disableGraph: true 11 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | **/coverage 3 | test/fixtures 4 | build/ 5 | docs/ 6 | protos/ 7 | samples/generated/ 8 | system-test/**/fixtures 9 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts" 3 | } 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.ts text eol=lf 2 | *.js text eol=lf 3 | protos/* linguist-generated 4 | **/api-extractor.json linguist-language=JSON-with-Comments 5 | -------------------------------------------------------------------------------- /.github/.OwlBot.lock.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2023 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | docker: 15 | image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest 16 | digest: sha256:bfe4592953269bfa8d135200ca1b17809f106a337a885d7ecc12cd2a9998e98a 17 | # created: 2023-11-15T20:00:24.246072277Z 18 | -------------------------------------------------------------------------------- /.github/.OwlBot.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | docker: 15 | image: gcr.io/cloud-devrel-public-resources/owlbot-nodejs:latest 16 | 17 | 18 | begin-after-commit-hash: 397c0bfd367a2427104f988d5329bc117caafd95 19 | 20 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Code owners file. 2 | # This file controls who is tagged for review for any given pull request. 3 | # 4 | # For syntax help see: 5 | # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax 6 | 7 | 8 | # The yoshi-nodejs team is the default owner for nodejs repositories. 9 | * @googleapis/yoshi-nodejs @googleapis/cloud-storage-dpe 10 | 11 | # The github automation team is the default owner for the auto-approve file. 12 | .github/auto-approve.yml @googleapis/github-automation 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | labels: 'type: bug, priority: p2' 5 | --- 6 | 7 | Thanks for stopping by to let us know something could be better! 8 | 9 | **PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. 10 | 11 | 1) Is this a client library issue or a product issue? 12 | This is the client library for . We will only be able to assist with issues that pertain to the behaviors of this library. If the issue you're experiencing is due to the behavior of the product itself, please visit the [ Support page]() to reach the most relevant engineers. 13 | 14 | 2) Did someone already solve this? 15 | - Search the issues already opened: https://github.com/googleapis/gcs-resumable-upload/issues 16 | - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-node 17 | - Search or ask on StackOverflow (engineers monitor these tags): http://stackoverflow.com/questions/tagged/google-cloud-platform+node.js 18 | 19 | 3) Do you have a support contract? 20 | Please create an issue in the [support console](https://cloud.google.com/support/) to ensure a timely response. 21 | 22 | If the support paths suggested above still do not result in a resolution, please provide the following details. 23 | 24 | #### Environment details 25 | 26 | - OS: 27 | - Node.js version: 28 | - npm version: 29 | - `gcs-resumable-upload` version: 30 | 31 | #### Steps to reproduce 32 | 33 | 1. ? 34 | 2. ? 35 | 36 | Making sure to follow these steps will guarantee the quickest resolution possible. 37 | 38 | Thanks! 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Google Cloud Support 3 | url: https://cloud.google.com/support/ 4 | about: If you have a support contract with Google, please use the Google Cloud Support portal. 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this library 4 | labels: 'type: feature request, priority: p3' 5 | --- 6 | 7 | Thanks for stopping by to let us know something could be better! 8 | 9 | **PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. 10 | 11 | **Is your feature request related to a problem? Please describe.** 12 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | **Describe alternatives you've considered** 16 | A clear and concise description of any alternative solutions or features you've considered. 17 | **Additional context** 18 | Add any other context or screenshots about the feature request here. 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: Ask a question 4 | labels: 'type: question, priority: p3' 5 | --- 6 | 7 | Thanks for stopping by to ask us a question! Please make sure to include: 8 | - What you're trying to do 9 | - What code you've already tried 10 | - Any error messages you're getting 11 | 12 | **PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/support_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Support request 3 | about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. 4 | 5 | --- 6 | 7 | **PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. 8 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: 2 | - [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/gcs-resumable-upload/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea 3 | - [ ] Ensure the tests and linter pass 4 | - [ ] Code coverage does not decrease (if any source code was changed) 5 | - [ ] Appropriate docs were updated (if necessary) 6 | 7 | Fixes # 🦕 8 | -------------------------------------------------------------------------------- /.github/auto-approve.yml: -------------------------------------------------------------------------------- 1 | processes: 2 | - "NodeDependency" 3 | - "OwlBotTemplateChanges" 4 | -------------------------------------------------------------------------------- /.github/auto-label.yaml: -------------------------------------------------------------------------------- 1 | requestsize: 2 | enabled: true 3 | -------------------------------------------------------------------------------- /.github/generated-files-bot.yml: -------------------------------------------------------------------------------- 1 | generatedFiles: 2 | - path: '.kokoro/**' 3 | message: '`.kokoro` files are templated and should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 4 | - path: '.github/CODEOWNERS' 5 | message: 'CODEOWNERS should instead be modified via the `codeowner_team` property in .repo-metadata.json' 6 | - path: '.github/workflows/ci.yaml' 7 | message: '`.github/workflows/ci.yaml` (GitHub Actions) should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 8 | - path: '.github/generated-files-bot.+(yml|yaml)' 9 | message: '`.github/generated-files-bot.(yml|yaml)` should be updated in [`synthtool`](https://github.com/googleapis/synthtool)' 10 | - path: 'README.md' 11 | message: '`README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' 12 | - path: 'samples/README.md' 13 | message: '`samples/README.md` is managed by [`synthtool`](https://github.com/googleapis/synthtool). However, a partials file can be used to update the README, e.g.: https://github.com/googleapis/nodejs-storage/blob/main/.readme-partials.yaml' 14 | ignoreAuthors: 15 | - 'gcf-owl-bot[bot]' 16 | - 'yoshi-automation' 17 | -------------------------------------------------------------------------------- /.github/release-please.yml: -------------------------------------------------------------------------------- 1 | handleGHRelease: true 2 | releaseType: node 3 | -------------------------------------------------------------------------------- /.github/release-trigger.yml: -------------------------------------------------------------------------------- 1 | enabled: true 2 | -------------------------------------------------------------------------------- /.github/sync-repo-settings.yaml: -------------------------------------------------------------------------------- 1 | branchProtectionRules: 2 | - pattern: main 3 | isAdminEnforced: true 4 | requiredApprovingReviewCount: 1 5 | requiresCodeOwnerReviews: true 6 | requiresStrictStatusChecks: true 7 | requiredStatusCheckContexts: 8 | - "ci/kokoro: Samples test" 9 | - "ci/kokoro: System test" 10 | - lint 11 | - test (14) 12 | - test (16) 13 | - test (18) 14 | - cla/google 15 | - windows 16 | - OwlBot Post Processor 17 | permissionRules: 18 | - team: yoshi-admins 19 | permission: admin 20 | - team: jsteam-admins 21 | permission: admin 22 | - team: jsteam 23 | permission: push 24 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | pull_request: 6 | name: ci 7 | jobs: 8 | test: 9 | runs-on: ubuntu-latest 10 | strategy: 11 | matrix: 12 | node: [14, 16, 18, 20] 13 | steps: 14 | - uses: actions/checkout@v3 15 | - uses: actions/setup-node@v3 16 | with: 17 | node-version: ${{ matrix.node }} 18 | - run: node --version 19 | # The first installation step ensures that all of our production 20 | # dependencies work on the given Node.js version, this helps us find 21 | # dependencies that don't match our engines field: 22 | - run: npm install --production --engine-strict --ignore-scripts --no-package-lock 23 | # Clean up the production install, before installing dev/production: 24 | - run: rm -rf node_modules 25 | - run: npm install 26 | - run: npm test 27 | env: 28 | MOCHA_THROW_DEPRECATION: false 29 | windows: 30 | runs-on: windows-latest 31 | steps: 32 | - uses: actions/checkout@v3 33 | - uses: actions/setup-node@v3 34 | with: 35 | node-version: 14 36 | - run: npm install 37 | - run: npm test 38 | env: 39 | MOCHA_THROW_DEPRECATION: false 40 | lint: 41 | runs-on: ubuntu-latest 42 | steps: 43 | - uses: actions/checkout@v3 44 | - uses: actions/setup-node@v3 45 | with: 46 | node-version: 14 47 | - run: npm install 48 | - run: npm run lint 49 | docs: 50 | runs-on: ubuntu-latest 51 | steps: 52 | - uses: actions/checkout@v3 53 | - uses: actions/setup-node@v3 54 | with: 55 | node-version: 14 56 | - run: npm install 57 | - run: npm run docs 58 | - uses: JustinBeckwith/linkinator-action@v1 59 | with: 60 | paths: docs/ 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | key.json 3 | package-lock.json 4 | .vscode 5 | build 6 | package-lock.json 7 | __pycache__ 8 | .coverage 9 | .nyc_output 10 | docs/ 11 | .idea 12 | -------------------------------------------------------------------------------- /.jsdoc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2019 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | // 15 | 16 | 'use strict'; 17 | 18 | module.exports = { 19 | opts: { 20 | readme: './README.md', 21 | package: './package.json', 22 | template: './node_modules/jsdoc-fresh', 23 | recurse: true, 24 | verbose: true, 25 | destination: './docs/' 26 | }, 27 | plugins: [ 28 | 'plugins/markdown', 29 | 'jsdoc-region-tag' 30 | ], 31 | source: { 32 | excludePattern: '(^|\\/|\\\\)[._]', 33 | include: [ 34 | 'src' 35 | ], 36 | includePattern: '\\.js$' 37 | }, 38 | templates: { 39 | copyright: 'Copyright 2019 Google, LLC.', 40 | includeDate: false, 41 | sourceFiles: false, 42 | systemName: 'gcs-resumable-upload', 43 | theme: 'lumen', 44 | default: { 45 | "outputSourceFiles": false 46 | } 47 | }, 48 | markdown: { 49 | idInHeadings: true 50 | } 51 | }; 52 | -------------------------------------------------------------------------------- /.kokoro/.gitattributes: -------------------------------------------------------------------------------- 1 | * linguist-generated=true 2 | -------------------------------------------------------------------------------- /.kokoro/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "gcs-resumable-upload/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/gcs-resumable-upload/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/continuous/node14/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "gcs-resumable-upload/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/gcs-resumable-upload/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/continuous/node14/lint.cfg: -------------------------------------------------------------------------------- 1 | env_vars: { 2 | key: "TRAMPOLINE_BUILD_FILE" 3 | value: "github/gcs-resumable-upload/.kokoro/lint.sh" 4 | } 5 | -------------------------------------------------------------------------------- /.kokoro/continuous/node14/samples-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/gcs-resumable-upload/.kokoro/samples-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/continuous/node14/system-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/gcs-resumable-upload/.kokoro/system-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/continuous/node14/test.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/gcs-resumable-upload/0932e6843b3139db580a202c9318a2c7139f0dad/.kokoro/continuous/node14/test.cfg -------------------------------------------------------------------------------- /.kokoro/docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | 25 | npm run docs-test 26 | -------------------------------------------------------------------------------- /.kokoro/lint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | 25 | # Install and link samples 26 | if [ -f samples/package.json ]; then 27 | cd samples/ 28 | npm link ../ 29 | npm install 30 | cd .. 31 | fi 32 | 33 | npm run lint 34 | -------------------------------------------------------------------------------- /.kokoro/populate-secrets.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2020 Google LLC. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # This file is called in the early stage of `trampoline_v2.sh` to 17 | # populate secrets needed for the CI builds. 18 | 19 | set -eo pipefail 20 | 21 | function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} 22 | function msg { println "$*" >&2 ;} 23 | function println { printf '%s\n' "$(now) $*" ;} 24 | 25 | # Populates requested secrets set in SECRET_MANAGER_KEYS 26 | 27 | # In Kokoro CI builds, we use the service account attached to the 28 | # Kokoro VM. This means we need to setup auth on other CI systems. 29 | # For local run, we just use the gcloud command for retrieving the 30 | # secrets. 31 | 32 | if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then 33 | GCLOUD_COMMANDS=( 34 | "docker" 35 | "run" 36 | "--entrypoint=gcloud" 37 | "--volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR}" 38 | "gcr.io/google.com/cloudsdktool/cloud-sdk" 39 | ) 40 | if [[ "${TRAMPOLINE_CI:-}" == "kokoro" ]]; then 41 | SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" 42 | else 43 | echo "Authentication for this CI system is not implemented yet." 44 | exit 2 45 | # TODO: Determine appropriate SECRET_LOCATION and the GCLOUD_COMMANDS. 46 | fi 47 | else 48 | # For local run, use /dev/shm or temporary directory for 49 | # KOKORO_GFILE_DIR. 50 | if [[ -d "/dev/shm" ]]; then 51 | export KOKORO_GFILE_DIR=/dev/shm 52 | else 53 | export KOKORO_GFILE_DIR=$(mktemp -d -t ci-XXXXXXXX) 54 | fi 55 | SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" 56 | GCLOUD_COMMANDS=("gcloud") 57 | fi 58 | 59 | msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" 60 | mkdir -p ${SECRET_LOCATION} 61 | 62 | for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") 63 | do 64 | msg "Retrieving secret ${key}" 65 | "${GCLOUD_COMMANDS[@]}" \ 66 | secrets versions access latest \ 67 | --project cloud-devrel-kokoro-resources \ 68 | --secret $key > \ 69 | "$SECRET_LOCATION/$key" 70 | if [[ $? == 0 ]]; then 71 | msg "Secret written to ${SECRET_LOCATION}/${key}" 72 | else 73 | msg "Error retrieving secret ${key}" 74 | exit 2 75 | fi 76 | done 77 | -------------------------------------------------------------------------------- /.kokoro/presubmit/node14/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | # Build logs will be here 4 | action { 5 | define_artifacts { 6 | regex: "**/*sponge_log.xml" 7 | } 8 | } 9 | 10 | # Download trampoline resources. 11 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 12 | 13 | # Use the trampoline script to run in docker. 14 | build_file: "gcs-resumable-upload/.kokoro/trampoline_v2.sh" 15 | 16 | # Configure the docker image for kokoro-trampoline. 17 | env_vars: { 18 | key: "TRAMPOLINE_IMAGE" 19 | value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" 20 | } 21 | env_vars: { 22 | key: "TRAMPOLINE_BUILD_FILE" 23 | value: "github/gcs-resumable-upload/.kokoro/test.sh" 24 | } 25 | -------------------------------------------------------------------------------- /.kokoro/presubmit/node14/samples-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/gcs-resumable-upload/.kokoro/samples-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/node14/system-test.cfg: -------------------------------------------------------------------------------- 1 | # Download resources for system tests (service account key, etc.) 2 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-nodejs" 3 | 4 | env_vars: { 5 | key: "TRAMPOLINE_BUILD_FILE" 6 | value: "github/gcs-resumable-upload/.kokoro/system-test.sh" 7 | } 8 | 9 | env_vars: { 10 | key: "SECRET_MANAGER_KEYS" 11 | value: "long-door-651-kokoro-system-test-service-account" 12 | } -------------------------------------------------------------------------------- /.kokoro/presubmit/node14/test.cfg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/gcs-resumable-upload/0932e6843b3139db580a202c9318a2c7139f0dad/.kokoro/presubmit/node14/test.cfg -------------------------------------------------------------------------------- /.kokoro/presubmit/windows/common.cfg: -------------------------------------------------------------------------------- 1 | # Format: //devtools/kokoro/config/proto/build.proto 2 | 3 | -------------------------------------------------------------------------------- /.kokoro/presubmit/windows/test.cfg: -------------------------------------------------------------------------------- 1 | # Use the test file directly 2 | build_file: "gcs-resumable-upload/.kokoro/test.bat" 3 | -------------------------------------------------------------------------------- /.kokoro/publish.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | # Start the releasetool reporter 22 | python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script 23 | 24 | cd $(dirname $0)/.. 25 | 26 | NPM_TOKEN=$(cat $KOKORO_KEYSTORE_DIR/73713_google-cloud-npm-token-1) 27 | echo "//wombat-dressing-room.appspot.com/:_authToken=${NPM_TOKEN}" > ~/.npmrc 28 | 29 | npm install 30 | npm pack . 31 | # npm provides no way to specify, observe, or predict the name of the tarball 32 | # file it generates. We have to look in the current directory for the freshest 33 | # .tgz file. 34 | TARBALL=$(ls -1 -t *.tgz | head -1) 35 | 36 | npm publish --access=public --registry=https://wombat-dressing-room.appspot.com "$TARBALL" 37 | 38 | # Kokoro collects *.tgz and package-lock.json files and stores them in Placer 39 | # so we can generate SBOMs and attestations. 40 | # However, we *don't* want Kokoro to collect package-lock.json and *.tgz files 41 | # that happened to be installed with dependencies. 42 | find node_modules -name package-lock.json -o -name "*.tgz" | xargs rm -f -------------------------------------------------------------------------------- /.kokoro/release/common.cfg: -------------------------------------------------------------------------------- 1 | before_action { 2 | fetch_keystore { 3 | keystore_resource { 4 | keystore_config_id: 73713 5 | keyname: "yoshi-automation-github-key" 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.kokoro/release/docs-devsite.cfg: -------------------------------------------------------------------------------- 1 | # service account used to publish up-to-date docs. 2 | before_action { 3 | fetch_keystore { 4 | keystore_resource { 5 | keystore_config_id: 73713 6 | keyname: "docuploader_service_account" 7 | } 8 | } 9 | } 10 | 11 | # doc publications use a Python image. 12 | env_vars: { 13 | key: "TRAMPOLINE_IMAGE" 14 | value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" 15 | } 16 | 17 | # Download trampoline resources. 18 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 19 | 20 | # Use the trampoline script to run in docker. 21 | build_file: "gcs-resumable-upload/.kokoro/trampoline_v2.sh" 22 | 23 | env_vars: { 24 | key: "TRAMPOLINE_BUILD_FILE" 25 | value: "github/gcs-resumable-upload/.kokoro/release/docs-devsite.sh" 26 | } 27 | -------------------------------------------------------------------------------- /.kokoro/release/docs-devsite.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2021 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | if [[ -z "$CREDENTIALS" ]]; then 20 | # if CREDENTIALS are explicitly set, assume we're testing locally 21 | # and don't set NPM_CONFIG_PREFIX. 22 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 23 | export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" 24 | cd $(dirname $0)/../.. 25 | fi 26 | 27 | npm install 28 | npm install --no-save @google-cloud/cloud-rad@^0.4.0 29 | # publish docs to devsite 30 | npx @google-cloud/cloud-rad . cloud-rad 31 | -------------------------------------------------------------------------------- /.kokoro/release/docs.cfg: -------------------------------------------------------------------------------- 1 | # service account used to publish up-to-date docs. 2 | before_action { 3 | fetch_keystore { 4 | keystore_resource { 5 | keystore_config_id: 73713 6 | keyname: "docuploader_service_account" 7 | } 8 | } 9 | } 10 | 11 | # doc publications use a Python image. 12 | env_vars: { 13 | key: "TRAMPOLINE_IMAGE" 14 | value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" 15 | } 16 | 17 | # Download trampoline resources. 18 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 19 | 20 | # Use the trampoline script to run in docker. 21 | build_file: "gcs-resumable-upload/.kokoro/trampoline_v2.sh" 22 | 23 | env_vars: { 24 | key: "TRAMPOLINE_BUILD_FILE" 25 | value: "github/gcs-resumable-upload/.kokoro/release/docs.sh" 26 | } 27 | -------------------------------------------------------------------------------- /.kokoro/release/docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2019 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | # build jsdocs (Python is installed on the Node 10 docker image). 20 | if [[ -z "$CREDENTIALS" ]]; then 21 | # if CREDENTIALS are explicitly set, assume we're testing locally 22 | # and don't set NPM_CONFIG_PREFIX. 23 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 24 | export PATH="$PATH:${NPM_CONFIG_PREFIX}/bin" 25 | cd $(dirname $0)/../.. 26 | fi 27 | npm install 28 | npm run docs 29 | 30 | # create docs.metadata, based on package.json and .repo-metadata.json. 31 | npm i json@9.0.6 -g 32 | python3 -m docuploader create-metadata \ 33 | --name=$(cat .repo-metadata.json | json name) \ 34 | --version=$(cat package.json | json version) \ 35 | --language=$(cat .repo-metadata.json | json language) \ 36 | --distribution-name=$(cat .repo-metadata.json | json distribution_name) \ 37 | --product-page=$(cat .repo-metadata.json | json product_documentation) \ 38 | --github-repository=$(cat .repo-metadata.json | json repo) \ 39 | --issue-tracker=$(cat .repo-metadata.json | json issue_tracker) 40 | cp docs.metadata ./docs/docs.metadata 41 | 42 | # deploy the docs. 43 | if [[ -z "$CREDENTIALS" ]]; then 44 | CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account 45 | fi 46 | if [[ -z "$BUCKET" ]]; then 47 | BUCKET=docs-staging 48 | fi 49 | python3 -m docuploader upload ./docs --credentials $CREDENTIALS --staging-bucket $BUCKET 50 | -------------------------------------------------------------------------------- /.kokoro/release/publish.cfg: -------------------------------------------------------------------------------- 1 | before_action { 2 | fetch_keystore { 3 | keystore_resource { 4 | keystore_config_id: 73713 5 | keyname: "docuploader_service_account" 6 | } 7 | } 8 | } 9 | 10 | before_action { 11 | fetch_keystore { 12 | keystore_resource { 13 | keystore_config_id: 73713 14 | keyname: "google-cloud-npm-token-1" 15 | } 16 | } 17 | } 18 | 19 | env_vars: { 20 | key: "SECRET_MANAGER_KEYS" 21 | value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" 22 | } 23 | 24 | # Download trampoline resources. 25 | gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" 26 | 27 | # Use the trampoline script to run in docker. 28 | build_file: "gcs-resumable-upload/.kokoro/trampoline_v2.sh" 29 | 30 | # Configure the docker image for kokoro-trampoline. 31 | env_vars: { 32 | key: "TRAMPOLINE_IMAGE" 33 | value: "gcr.io/cloud-devrel-kokoro-resources/node:14-user" 34 | } 35 | 36 | env_vars: { 37 | key: "TRAMPOLINE_BUILD_FILE" 38 | value: "github/gcs-resumable-upload/.kokoro/publish.sh" 39 | } 40 | 41 | # Store the packages we uploaded to npmjs.org and their corresponding 42 | # package-lock.jsons in Placer. That way, we have a record of exactly 43 | # what we published, and which version of which tools we used to publish 44 | # it, which we can use to generate SBOMs and attestations. 45 | action { 46 | define_artifacts { 47 | regex: "github/**/*.tgz" 48 | regex: "github/**/package-lock.json" 49 | strip_prefix: "github" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /.kokoro/samples-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | # Setup service account credentials. 22 | export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account 23 | export GCLOUD_PROJECT=long-door-651 24 | 25 | cd $(dirname $0)/.. 26 | 27 | # Run a pre-test hook, if a pre-samples-test.sh is in the project 28 | if [ -f .kokoro/pre-samples-test.sh ]; then 29 | set +x 30 | . .kokoro/pre-samples-test.sh 31 | set -x 32 | fi 33 | 34 | if [ -f samples/package.json ]; then 35 | npm install 36 | 37 | # Install and link samples 38 | cd samples/ 39 | npm link ../ 40 | npm install 41 | cd .. 42 | # If tests are running against main branch, configure flakybot 43 | # to open issues on failures: 44 | if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then 45 | export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml 46 | export MOCHA_REPORTER=xunit 47 | cleanup() { 48 | chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot 49 | $KOKORO_GFILE_DIR/linux_amd64/flakybot 50 | } 51 | trap cleanup EXIT HUP 52 | fi 53 | 54 | npm run samples-test 55 | fi 56 | 57 | # codecov combines coverage across integration and unit tests. Include 58 | # the logic below for any environment you wish to collect coverage for: 59 | COVERAGE_NODE=14 60 | if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then 61 | NYC_BIN=./node_modules/nyc/bin/nyc.js 62 | if [ -f "$NYC_BIN" ]; then 63 | $NYC_BIN report || true 64 | fi 65 | bash $KOKORO_GFILE_DIR/codecov.sh 66 | else 67 | echo "coverage is only reported for Node $COVERAGE_NODE" 68 | fi 69 | -------------------------------------------------------------------------------- /.kokoro/system-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | # Setup service account credentials. 22 | export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/secret_manager/long-door-651-kokoro-system-test-service-account 23 | export GCLOUD_PROJECT=long-door-651 24 | 25 | cd $(dirname $0)/.. 26 | 27 | # Run a pre-test hook, if a pre-system-test.sh is in the project 28 | if [ -f .kokoro/pre-system-test.sh ]; then 29 | set +x 30 | . .kokoro/pre-system-test.sh 31 | set -x 32 | fi 33 | 34 | npm install 35 | 36 | # If tests are running against main branch, configure flakybot 37 | # to open issues on failures: 38 | if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then 39 | export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml 40 | export MOCHA_REPORTER=xunit 41 | cleanup() { 42 | chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot 43 | $KOKORO_GFILE_DIR/linux_amd64/flakybot 44 | } 45 | trap cleanup EXIT HUP 46 | fi 47 | 48 | npm run system-test 49 | 50 | # codecov combines coverage across integration and unit tests. Include 51 | # the logic below for any environment you wish to collect coverage for: 52 | COVERAGE_NODE=14 53 | if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then 54 | NYC_BIN=./node_modules/nyc/bin/nyc.js 55 | if [ -f "$NYC_BIN" ]; then 56 | $NYC_BIN report || true 57 | fi 58 | bash $KOKORO_GFILE_DIR/codecov.sh 59 | else 60 | echo "coverage is only reported for Node $COVERAGE_NODE" 61 | fi 62 | -------------------------------------------------------------------------------- /.kokoro/test.bat: -------------------------------------------------------------------------------- 1 | @rem Copyright 2018 Google LLC. All rights reserved. 2 | @rem 3 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 4 | @rem you may not use this file except in compliance with the License. 5 | @rem You may obtain a copy of the License at 6 | @rem 7 | @rem http://www.apache.org/licenses/LICENSE-2.0 8 | @rem 9 | @rem Unless required by applicable law or agreed to in writing, software 10 | @rem distributed under the License is distributed on an "AS IS" BASIS, 11 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | @rem See the License for the specific language governing permissions and 13 | @rem limitations under the License. 14 | 15 | @echo "Starting Windows build" 16 | 17 | cd /d %~dp0 18 | cd .. 19 | 20 | @rem npm path is not currently set in our image, we should fix this next time 21 | @rem we upgrade Node.js in the image: 22 | SET PATH=%PATH%;/cygdrive/c/Program Files/nodejs/npm 23 | 24 | call nvm use v14.17.3 25 | call which node 26 | 27 | call npm install || goto :error 28 | call npm run test || goto :error 29 | 30 | goto :EOF 31 | 32 | :error 33 | exit /b 1 34 | -------------------------------------------------------------------------------- /.kokoro/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2018 Google LLC 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # https://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | 17 | set -eo pipefail 18 | 19 | export NPM_CONFIG_PREFIX=${HOME}/.npm-global 20 | 21 | cd $(dirname $0)/.. 22 | 23 | npm install 24 | # If tests are running against main branch, configure flakybot 25 | # to open issues on failures: 26 | if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]] || [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"nightly"* ]]; then 27 | export MOCHA_REPORTER_OUTPUT=test_output_sponge_log.xml 28 | export MOCHA_REPORTER=xunit 29 | cleanup() { 30 | chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot 31 | $KOKORO_GFILE_DIR/linux_amd64/flakybot 32 | } 33 | trap cleanup EXIT HUP 34 | fi 35 | # Unit tests exercise the entire API surface, which may include 36 | # deprecation warnings: 37 | export MOCHA_THROW_DEPRECATION=false 38 | npm test 39 | 40 | # codecov combines coverage across integration and unit tests. Include 41 | # the logic below for any environment you wish to collect coverage for: 42 | COVERAGE_NODE=14 43 | if npx check-node-version@3.3.0 --silent --node $COVERAGE_NODE; then 44 | NYC_BIN=./node_modules/nyc/bin/nyc.js 45 | if [ -f "$NYC_BIN" ]; then 46 | $NYC_BIN report || true 47 | fi 48 | bash $KOKORO_GFILE_DIR/codecov.sh 49 | else 50 | echo "coverage is only reported for Node $COVERAGE_NODE" 51 | fi 52 | -------------------------------------------------------------------------------- /.kokoro/trampoline.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Copyright 2017 Google Inc. 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # This file is not used any more, but we keep this file for making it 17 | # easy to roll back. 18 | # TODO: Remove this file from the template. 19 | 20 | set -eo pipefail 21 | 22 | # Always run the cleanup script, regardless of the success of bouncing into 23 | # the container. 24 | function cleanup() { 25 | chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh 26 | ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh 27 | echo "cleanup"; 28 | } 29 | trap cleanup EXIT 30 | 31 | $(dirname $0)/populate-secrets.sh # Secret Manager secrets. 32 | python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" 33 | -------------------------------------------------------------------------------- /.kokoro/trampoline_v2.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Copyright 2020 Google LLC 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | # trampoline_v2.sh 17 | # 18 | # If you want to make a change to this file, consider doing so at: 19 | # https://github.com/googlecloudplatform/docker-ci-helper 20 | # 21 | # This script is for running CI builds. For Kokoro builds, we 22 | # set this script to `build_file` field in the Kokoro configuration. 23 | 24 | # This script does 3 things. 25 | # 26 | # 1. Prepare the Docker image for the test 27 | # 2. Run the Docker with appropriate flags to run the test 28 | # 3. Upload the newly built Docker image 29 | # 30 | # in a way that is somewhat compatible with trampoline_v1. 31 | # 32 | # These environment variables are required: 33 | # TRAMPOLINE_IMAGE: The docker image to use. 34 | # TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. 35 | # 36 | # You can optionally change these environment variables: 37 | # TRAMPOLINE_IMAGE_UPLOAD: 38 | # (true|false): Whether to upload the Docker image after the 39 | # successful builds. 40 | # TRAMPOLINE_BUILD_FILE: The script to run in the docker container. 41 | # TRAMPOLINE_WORKSPACE: The workspace path in the docker container. 42 | # Defaults to /workspace. 43 | # Potentially there are some repo specific envvars in .trampolinerc in 44 | # the project root. 45 | # 46 | # Here is an example for running this script. 47 | # TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:10-user \ 48 | # TRAMPOLINE_BUILD_FILE=.kokoro/system-test.sh \ 49 | # .kokoro/trampoline_v2.sh 50 | 51 | set -euo pipefail 52 | 53 | TRAMPOLINE_VERSION="2.0.7" 54 | 55 | if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then 56 | readonly IO_COLOR_RED="$(tput setaf 1)" 57 | readonly IO_COLOR_GREEN="$(tput setaf 2)" 58 | readonly IO_COLOR_YELLOW="$(tput setaf 3)" 59 | readonly IO_COLOR_RESET="$(tput sgr0)" 60 | else 61 | readonly IO_COLOR_RED="" 62 | readonly IO_COLOR_GREEN="" 63 | readonly IO_COLOR_YELLOW="" 64 | readonly IO_COLOR_RESET="" 65 | fi 66 | 67 | function function_exists { 68 | [ $(LC_ALL=C type -t $1)"" == "function" ] 69 | } 70 | 71 | # Logs a message using the given color. The first argument must be one 72 | # of the IO_COLOR_* variables defined above, such as 73 | # "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the 74 | # given color. The log message will also have an RFC-3339 timestamp 75 | # prepended (in UTC). You can disable the color output by setting 76 | # TERM=vt100. 77 | function log_impl() { 78 | local color="$1" 79 | shift 80 | local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" 81 | echo "================================================================" 82 | echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" 83 | echo "================================================================" 84 | } 85 | 86 | # Logs the given message with normal coloring and a timestamp. 87 | function log() { 88 | log_impl "${IO_COLOR_RESET}" "$@" 89 | } 90 | 91 | # Logs the given message in green with a timestamp. 92 | function log_green() { 93 | log_impl "${IO_COLOR_GREEN}" "$@" 94 | } 95 | 96 | # Logs the given message in yellow with a timestamp. 97 | function log_yellow() { 98 | log_impl "${IO_COLOR_YELLOW}" "$@" 99 | } 100 | 101 | # Logs the given message in red with a timestamp. 102 | function log_red() { 103 | log_impl "${IO_COLOR_RED}" "$@" 104 | } 105 | 106 | readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) 107 | readonly tmphome="${tmpdir}/h" 108 | mkdir -p "${tmphome}" 109 | 110 | function cleanup() { 111 | rm -rf "${tmpdir}" 112 | } 113 | trap cleanup EXIT 114 | 115 | RUNNING_IN_CI="${RUNNING_IN_CI:-false}" 116 | 117 | # The workspace in the container, defaults to /workspace. 118 | TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" 119 | 120 | pass_down_envvars=( 121 | # TRAMPOLINE_V2 variables. 122 | # Tells scripts whether they are running as part of CI or not. 123 | "RUNNING_IN_CI" 124 | # Indicates which CI system we're in. 125 | "TRAMPOLINE_CI" 126 | # Indicates the version of the script. 127 | "TRAMPOLINE_VERSION" 128 | # Contains path to build artifacts being executed. 129 | "KOKORO_BUILD_ARTIFACTS_SUBDIR" 130 | ) 131 | 132 | log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" 133 | 134 | # Detect which CI systems we're in. If we're in any of the CI systems 135 | # we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be 136 | # the name of the CI system. Both envvars will be passing down to the 137 | # container for telling which CI system we're in. 138 | if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then 139 | # descriptive env var for indicating it's on CI. 140 | RUNNING_IN_CI="true" 141 | TRAMPOLINE_CI="kokoro" 142 | if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then 143 | if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then 144 | log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." 145 | exit 1 146 | fi 147 | # This service account will be activated later. 148 | TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" 149 | else 150 | if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then 151 | gcloud auth list 152 | fi 153 | log_yellow "Configuring Container Registry access" 154 | gcloud auth configure-docker --quiet 155 | fi 156 | pass_down_envvars+=( 157 | # KOKORO dynamic variables. 158 | "KOKORO_BUILD_NUMBER" 159 | "KOKORO_BUILD_ID" 160 | "KOKORO_JOB_NAME" 161 | "KOKORO_GIT_COMMIT" 162 | "KOKORO_GITHUB_COMMIT" 163 | "KOKORO_GITHUB_PULL_REQUEST_NUMBER" 164 | "KOKORO_GITHUB_PULL_REQUEST_COMMIT" 165 | # For flakybot 166 | "KOKORO_GITHUB_COMMIT_URL" 167 | "KOKORO_GITHUB_PULL_REQUEST_URL" 168 | ) 169 | elif [[ "${TRAVIS:-}" == "true" ]]; then 170 | RUNNING_IN_CI="true" 171 | TRAMPOLINE_CI="travis" 172 | pass_down_envvars+=( 173 | "TRAVIS_BRANCH" 174 | "TRAVIS_BUILD_ID" 175 | "TRAVIS_BUILD_NUMBER" 176 | "TRAVIS_BUILD_WEB_URL" 177 | "TRAVIS_COMMIT" 178 | "TRAVIS_COMMIT_MESSAGE" 179 | "TRAVIS_COMMIT_RANGE" 180 | "TRAVIS_JOB_NAME" 181 | "TRAVIS_JOB_NUMBER" 182 | "TRAVIS_JOB_WEB_URL" 183 | "TRAVIS_PULL_REQUEST" 184 | "TRAVIS_PULL_REQUEST_BRANCH" 185 | "TRAVIS_PULL_REQUEST_SHA" 186 | "TRAVIS_PULL_REQUEST_SLUG" 187 | "TRAVIS_REPO_SLUG" 188 | "TRAVIS_SECURE_ENV_VARS" 189 | "TRAVIS_TAG" 190 | ) 191 | elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then 192 | RUNNING_IN_CI="true" 193 | TRAMPOLINE_CI="github-workflow" 194 | pass_down_envvars+=( 195 | "GITHUB_WORKFLOW" 196 | "GITHUB_RUN_ID" 197 | "GITHUB_RUN_NUMBER" 198 | "GITHUB_ACTION" 199 | "GITHUB_ACTIONS" 200 | "GITHUB_ACTOR" 201 | "GITHUB_REPOSITORY" 202 | "GITHUB_EVENT_NAME" 203 | "GITHUB_EVENT_PATH" 204 | "GITHUB_SHA" 205 | "GITHUB_REF" 206 | "GITHUB_HEAD_REF" 207 | "GITHUB_BASE_REF" 208 | ) 209 | elif [[ "${CIRCLECI:-}" == "true" ]]; then 210 | RUNNING_IN_CI="true" 211 | TRAMPOLINE_CI="circleci" 212 | pass_down_envvars+=( 213 | "CIRCLE_BRANCH" 214 | "CIRCLE_BUILD_NUM" 215 | "CIRCLE_BUILD_URL" 216 | "CIRCLE_COMPARE_URL" 217 | "CIRCLE_JOB" 218 | "CIRCLE_NODE_INDEX" 219 | "CIRCLE_NODE_TOTAL" 220 | "CIRCLE_PREVIOUS_BUILD_NUM" 221 | "CIRCLE_PROJECT_REPONAME" 222 | "CIRCLE_PROJECT_USERNAME" 223 | "CIRCLE_REPOSITORY_URL" 224 | "CIRCLE_SHA1" 225 | "CIRCLE_STAGE" 226 | "CIRCLE_USERNAME" 227 | "CIRCLE_WORKFLOW_ID" 228 | "CIRCLE_WORKFLOW_JOB_ID" 229 | "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" 230 | "CIRCLE_WORKFLOW_WORKSPACE_ID" 231 | ) 232 | fi 233 | 234 | # Configure the service account for pulling the docker image. 235 | function repo_root() { 236 | local dir="$1" 237 | while [[ ! -d "${dir}/.git" ]]; do 238 | dir="$(dirname "$dir")" 239 | done 240 | echo "${dir}" 241 | } 242 | 243 | # Detect the project root. In CI builds, we assume the script is in 244 | # the git tree and traverse from there, otherwise, traverse from `pwd` 245 | # to find `.git` directory. 246 | if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then 247 | PROGRAM_PATH="$(realpath "$0")" 248 | PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" 249 | PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" 250 | else 251 | PROJECT_ROOT="$(repo_root $(pwd))" 252 | fi 253 | 254 | log_yellow "Changing to the project root: ${PROJECT_ROOT}." 255 | cd "${PROJECT_ROOT}" 256 | 257 | # To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need 258 | # to use this environment variable in `PROJECT_ROOT`. 259 | if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then 260 | 261 | mkdir -p "${tmpdir}/gcloud" 262 | gcloud_config_dir="${tmpdir}/gcloud" 263 | 264 | log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." 265 | export CLOUDSDK_CONFIG="${gcloud_config_dir}" 266 | 267 | log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." 268 | gcloud auth activate-service-account \ 269 | --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" 270 | log_yellow "Configuring Container Registry access" 271 | gcloud auth configure-docker --quiet 272 | fi 273 | 274 | required_envvars=( 275 | # The basic trampoline configurations. 276 | "TRAMPOLINE_IMAGE" 277 | "TRAMPOLINE_BUILD_FILE" 278 | ) 279 | 280 | if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then 281 | source "${PROJECT_ROOT}/.trampolinerc" 282 | fi 283 | 284 | log_yellow "Checking environment variables." 285 | for e in "${required_envvars[@]}" 286 | do 287 | if [[ -z "${!e:-}" ]]; then 288 | log "Missing ${e} env var. Aborting." 289 | exit 1 290 | fi 291 | done 292 | 293 | # We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 294 | # script: e.g. "github/repo-name/.kokoro/run_tests.sh" 295 | TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" 296 | log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" 297 | 298 | # ignore error on docker operations and test execution 299 | set +e 300 | 301 | log_yellow "Preparing Docker image." 302 | # We only download the docker image in CI builds. 303 | if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then 304 | # Download the docker image specified by `TRAMPOLINE_IMAGE` 305 | 306 | # We may want to add --max-concurrent-downloads flag. 307 | 308 | log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." 309 | if docker pull "${TRAMPOLINE_IMAGE}"; then 310 | log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." 311 | has_image="true" 312 | else 313 | log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." 314 | has_image="false" 315 | fi 316 | else 317 | # For local run, check if we have the image. 318 | if docker images "${TRAMPOLINE_IMAGE}" | grep "${TRAMPOLINE_IMAGE%:*}"; then 319 | has_image="true" 320 | else 321 | has_image="false" 322 | fi 323 | fi 324 | 325 | 326 | # The default user for a Docker container has uid 0 (root). To avoid 327 | # creating root-owned files in the build directory we tell docker to 328 | # use the current user ID. 329 | user_uid="$(id -u)" 330 | user_gid="$(id -g)" 331 | user_name="$(id -un)" 332 | 333 | # To allow docker in docker, we add the user to the docker group in 334 | # the host os. 335 | docker_gid=$(cut -d: -f3 < <(getent group docker)) 336 | 337 | update_cache="false" 338 | if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then 339 | # Build the Docker image from the source. 340 | context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") 341 | docker_build_flags=( 342 | "-f" "${TRAMPOLINE_DOCKERFILE}" 343 | "-t" "${TRAMPOLINE_IMAGE}" 344 | "--build-arg" "UID=${user_uid}" 345 | "--build-arg" "USERNAME=${user_name}" 346 | ) 347 | if [[ "${has_image}" == "true" ]]; then 348 | docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") 349 | fi 350 | 351 | log_yellow "Start building the docker image." 352 | if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then 353 | echo "docker build" "${docker_build_flags[@]}" "${context_dir}" 354 | fi 355 | 356 | # ON CI systems, we want to suppress docker build logs, only 357 | # output the logs when it fails. 358 | if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then 359 | if docker build "${docker_build_flags[@]}" "${context_dir}" \ 360 | > "${tmpdir}/docker_build.log" 2>&1; then 361 | if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then 362 | cat "${tmpdir}/docker_build.log" 363 | fi 364 | 365 | log_green "Finished building the docker image." 366 | update_cache="true" 367 | else 368 | log_red "Failed to build the Docker image, aborting." 369 | log_yellow "Dumping the build logs:" 370 | cat "${tmpdir}/docker_build.log" 371 | exit 1 372 | fi 373 | else 374 | if docker build "${docker_build_flags[@]}" "${context_dir}"; then 375 | log_green "Finished building the docker image." 376 | update_cache="true" 377 | else 378 | log_red "Failed to build the Docker image, aborting." 379 | exit 1 380 | fi 381 | fi 382 | else 383 | if [[ "${has_image}" != "true" ]]; then 384 | log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." 385 | exit 1 386 | fi 387 | fi 388 | 389 | # We use an array for the flags so they are easier to document. 390 | docker_flags=( 391 | # Remove the container after it exists. 392 | "--rm" 393 | 394 | # Use the host network. 395 | "--network=host" 396 | 397 | # Run in priviledged mode. We are not using docker for sandboxing or 398 | # isolation, just for packaging our dev tools. 399 | "--privileged" 400 | 401 | # Run the docker script with the user id. Because the docker image gets to 402 | # write in ${PWD} you typically want this to be your user id. 403 | # To allow docker in docker, we need to use docker gid on the host. 404 | "--user" "${user_uid}:${docker_gid}" 405 | 406 | # Pass down the USER. 407 | "--env" "USER=${user_name}" 408 | 409 | # Mount the project directory inside the Docker container. 410 | "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" 411 | "--workdir" "${TRAMPOLINE_WORKSPACE}" 412 | "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" 413 | 414 | # Mount the temporary home directory. 415 | "--volume" "${tmphome}:/h" 416 | "--env" "HOME=/h" 417 | 418 | # Allow docker in docker. 419 | "--volume" "/var/run/docker.sock:/var/run/docker.sock" 420 | 421 | # Mount the /tmp so that docker in docker can mount the files 422 | # there correctly. 423 | "--volume" "/tmp:/tmp" 424 | # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR 425 | # TODO(tmatsuo): This part is not portable. 426 | "--env" "TRAMPOLINE_SECRET_DIR=/secrets" 427 | "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" 428 | "--env" "KOKORO_GFILE_DIR=/secrets/gfile" 429 | "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" 430 | "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" 431 | ) 432 | 433 | # Add an option for nicer output if the build gets a tty. 434 | if [[ -t 0 ]]; then 435 | docker_flags+=("-it") 436 | fi 437 | 438 | # Passing down env vars 439 | for e in "${pass_down_envvars[@]}" 440 | do 441 | if [[ -n "${!e:-}" ]]; then 442 | docker_flags+=("--env" "${e}=${!e}") 443 | fi 444 | done 445 | 446 | # If arguments are given, all arguments will become the commands run 447 | # in the container, otherwise run TRAMPOLINE_BUILD_FILE. 448 | if [[ $# -ge 1 ]]; then 449 | log_yellow "Running the given commands '" "${@:1}" "' in the container." 450 | readonly commands=("${@:1}") 451 | if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then 452 | echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" 453 | fi 454 | docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" 455 | else 456 | log_yellow "Running the tests in a Docker container." 457 | docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") 458 | if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then 459 | echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" 460 | fi 461 | docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" 462 | fi 463 | 464 | 465 | test_retval=$? 466 | 467 | if [[ ${test_retval} -eq 0 ]]; then 468 | log_green "Build finished with ${test_retval}" 469 | else 470 | log_red "Build finished with ${test_retval}" 471 | fi 472 | 473 | # Only upload it when the test passes. 474 | if [[ "${update_cache}" == "true" ]] && \ 475 | [[ $test_retval == 0 ]] && \ 476 | [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then 477 | log_yellow "Uploading the Docker image." 478 | if docker push "${TRAMPOLINE_IMAGE}"; then 479 | log_green "Finished uploading the Docker image." 480 | else 481 | log_red "Failed uploading the Docker image." 482 | fi 483 | # Call trampoline_after_upload_hook if it's defined. 484 | if function_exists trampoline_after_upload_hook; then 485 | trampoline_after_upload_hook 486 | fi 487 | 488 | fi 489 | 490 | exit "${test_retval}" 491 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // http://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | const config = { 15 | "enable-source-maps": true, 16 | "throw-deprecation": true, 17 | "timeout": 10000, 18 | "recursive": true 19 | } 20 | if (process.env.MOCHA_THROW_DEPRECATION === 'false') { 21 | delete config['throw-deprecation']; 22 | } 23 | if (process.env.MOCHA_REPORTER) { 24 | config.reporter = process.env.MOCHA_REPORTER; 25 | } 26 | if (process.env.MOCHA_REPORTER_OUTPUT) { 27 | config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; 28 | } 29 | module.exports = config 30 | -------------------------------------------------------------------------------- /.nycrc: -------------------------------------------------------------------------------- 1 | { 2 | "report-dir": "./.coverage", 3 | "reporter": ["text", "lcov"], 4 | "exclude": [ 5 | "**/*-test", 6 | "**/.coverage", 7 | "**/apis", 8 | "**/benchmark", 9 | "**/conformance", 10 | "**/docs", 11 | "**/samples", 12 | "**/scripts", 13 | "**/protos", 14 | "**/test", 15 | "**/*.d.ts", 16 | ".jsdoc.js", 17 | "**/.jsdoc.js", 18 | "karma.conf.js", 19 | "webpack-tests.config.js", 20 | "webpack.config.js" 21 | ], 22 | "exclude-after-remap": false, 23 | "all": true 24 | } 25 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | **/coverage 3 | test/fixtures 4 | build/ 5 | docs/ 6 | protos/ 7 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Google LLC 2 | // 3 | // Licensed under the Apache License, Version 2.0 (the "License"); 4 | // you may not use this file except in compliance with the License. 5 | // You may obtain a copy of the License at 6 | // 7 | // https://www.apache.org/licenses/LICENSE-2.0 8 | // 9 | // Unless required by applicable law or agreed to in writing, software 10 | // distributed under the License is distributed on an "AS IS" BASIS, 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | // See the License for the specific language governing permissions and 13 | // limitations under the License. 14 | 15 | module.exports = { 16 | ...require('gts/.prettierrc.json') 17 | } 18 | -------------------------------------------------------------------------------- /.repo-metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gcs-resumable-upload", 3 | "name_pretty": "Google Cloud Storage Resumable Upload", 4 | "release_level": "stable", 5 | "language": "nodejs", 6 | "repo": "googleapis/gcs-resumable-upload", 7 | "distribution_name": "gcs-resumable-upload", 8 | "codeowner_team": "@googleapis/cloud-storage-dpe", 9 | "client_documentation": "https://cloud.google.com/nodejs/docs/reference/gcs-resumable-upload/latest", 10 | "library_type": "CORE" 11 | } 12 | -------------------------------------------------------------------------------- /.trampolinerc: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | # Template for .trampolinerc 16 | 17 | # Add required env vars here. 18 | required_envvars+=( 19 | ) 20 | 21 | # Add env vars which are passed down into the container here. 22 | pass_down_envvars+=( 23 | "AUTORELEASE_PR" 24 | "VERSION" 25 | ) 26 | 27 | # Prevent unintentional override on the default image. 28 | if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ 29 | [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then 30 | echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." 31 | exit 1 32 | fi 33 | 34 | # Define the default value if it makes sense. 35 | if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then 36 | TRAMPOLINE_IMAGE_UPLOAD="" 37 | fi 38 | 39 | if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then 40 | TRAMPOLINE_IMAGE="" 41 | fi 42 | 43 | if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then 44 | TRAMPOLINE_DOCKERFILE="" 45 | fi 46 | 47 | if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then 48 | TRAMPOLINE_BUILD_FILE="" 49 | fi 50 | 51 | # Secret Manager secrets. 52 | source ${PROJECT_ROOT}/.kokoro/populate-secrets.sh 53 | -------------------------------------------------------------------------------- /20MB.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/googleapis/gcs-resumable-upload/0932e6843b3139db580a202c9318a2c7139f0dad/20MB.zip -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | [npm history][1] 4 | 5 | [1]: https://www.npmjs.com/package/gcs-resumable-upload?activeTab=versions 6 | 7 | ## [6.0.0](https://github.com/googleapis/gcs-resumable-upload/compare/v5.0.1...v6.0.0) (2023-08-10) 8 | 9 | 10 | ### ⚠ BREAKING CHANGES 11 | 12 | * update to Node 14 ([#549](https://github.com/googleapis/gcs-resumable-upload/issues/549)) 13 | 14 | ### Miscellaneous Chores 15 | 16 | * Update to Node 14 ([#549](https://github.com/googleapis/gcs-resumable-upload/issues/549)) ([be55cae](https://github.com/googleapis/gcs-resumable-upload/commit/be55cae1e4cb8c479ebae8e0ace75484405eb86c)) 17 | 18 | ## [5.0.1](https://github.com/googleapis/gcs-resumable-upload/compare/v5.0.0...v5.0.1) (2022-08-23) 19 | 20 | 21 | ### Bug Fixes 22 | 23 | * remove pip install statements ([#1546](https://github.com/googleapis/gcs-resumable-upload/issues/1546)) ([#525](https://github.com/googleapis/gcs-resumable-upload/issues/525)) ([50a11d0](https://github.com/googleapis/gcs-resumable-upload/commit/50a11d0bea4ffbad39d71d18931c485564e81c3c)) 24 | 25 | ## [5.0.0](https://github.com/googleapis/gcs-resumable-upload/compare/v4.0.2...v5.0.0) (2022-05-20) 26 | 27 | 28 | ### ⚠ BREAKING CHANGES 29 | 30 | * update library to use Node 12 (#520) 31 | 32 | ### Build System 33 | 34 | * update library to use Node 12 ([#520](https://github.com/googleapis/gcs-resumable-upload/issues/520)) ([f90175d](https://github.com/googleapis/gcs-resumable-upload/commit/f90175d7779f69a53ee0ba584b5bf98847a1469f)) 35 | 36 | ### [4.0.2](https://github.com/googleapis/gcs-resumable-upload/compare/v4.0.1...v4.0.2) (2022-01-26) 37 | 38 | 39 | ### Bug Fixes 40 | 41 | * Stop Duplicate Response Handlers on Retries ([#502](https://github.com/googleapis/gcs-resumable-upload/issues/502)) ([c5b3059](https://github.com/googleapis/gcs-resumable-upload/commit/c5b30594783e772c43b5eb5da01feb7cdb4d6094)) 42 | 43 | ### [4.0.1](https://www.github.com/googleapis/gcs-resumable-upload/compare/v4.0.0...v4.0.1) (2022-01-06) 44 | 45 | 46 | ### Bug Fixes 47 | 48 | * Fix support for streams without content-length property ([#491](https://www.github.com/googleapis/gcs-resumable-upload/issues/491)) ([ac2f73b](https://www.github.com/googleapis/gcs-resumable-upload/commit/ac2f73b73e3afbc218ca99cbba3c598af4aefa9e)) 49 | 50 | ## [4.0.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.6.0...v4.0.0) (2021-12-30) 51 | 52 | 53 | ### ⚠ BREAKING CHANGES 54 | 55 | * Multiple Chunk Upload Support (#486) 56 | 57 | ### Features 58 | 59 | * Multiple Chunk Upload Support ([#486](https://www.github.com/googleapis/gcs-resumable-upload/issues/486)) ([dba1a39](https://www.github.com/googleapis/gcs-resumable-upload/commit/dba1a39793d2f26e796130df55c99821e9145a21)) 60 | 61 | ## [3.6.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.5.1...v3.6.0) (2021-11-09) 62 | 63 | 64 | ### Features 65 | 66 | * add error code to error in startUploading ([#474](https://www.github.com/googleapis/gcs-resumable-upload/issues/474)) ([1ae6987](https://www.github.com/googleapis/gcs-resumable-upload/commit/1ae69870a4ad52340eb2e67c4d3d13391002c6fa)) 67 | 68 | ### [3.5.1](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.5.0...v3.5.1) (2021-11-04) 69 | 70 | 71 | ### Bug Fixes 72 | 73 | * cast code to string ([#481](https://www.github.com/googleapis/gcs-resumable-upload/issues/481)) ([93d5faf](https://www.github.com/googleapis/gcs-resumable-upload/commit/93d5fafe129de9e5fd9b1f6670c109091b543c4c)) 74 | 75 | ## [3.5.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.4.0...v3.5.0) (2021-11-04) 76 | 77 | 78 | ### Features 79 | 80 | * retry econnreset ([#479](https://www.github.com/googleapis/gcs-resumable-upload/issues/479)) ([0d230cb](https://www.github.com/googleapis/gcs-resumable-upload/commit/0d230cbcd662ff759105e382ff5b671594776d19)) 81 | 82 | ## [3.4.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.3.1...v3.4.0) (2021-11-03) 83 | 84 | 85 | ### Features 86 | 87 | * retry URI creation ([#475](https://www.github.com/googleapis/gcs-resumable-upload/issues/475)) ([e3d380f](https://www.github.com/googleapis/gcs-resumable-upload/commit/e3d380f892e75d95595fddac70fb9551700283e7)) 88 | 89 | 90 | ### Bug Fixes 91 | 92 | * throw informative error in the case that retries run out ([#477](https://www.github.com/googleapis/gcs-resumable-upload/issues/477)) ([4b3db66](https://www.github.com/googleapis/gcs-resumable-upload/commit/4b3db6651ae09e8c8abaa75720747e97cab79966)) 93 | 94 | ### [3.3.1](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.3.0...v3.3.1) (2021-09-02) 95 | 96 | 97 | ### Bug Fixes 98 | 99 | * **build:** switch primary branch to main ([#451](https://www.github.com/googleapis/gcs-resumable-upload/issues/451)) ([c180f66](https://www.github.com/googleapis/gcs-resumable-upload/commit/c180f6638ba85e9b0fe96e2f76749f6707807747)) 100 | 101 | ## [3.3.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.2.1...v3.3.0) (2021-07-19) 102 | 103 | 104 | ### Features 105 | 106 | * customization options for retries ([#441](https://www.github.com/googleapis/gcs-resumable-upload/issues/441)) ([2007234](https://www.github.com/googleapis/gcs-resumable-upload/commit/200723407881c24f77e5b4dc3ed4527799945133)) 107 | 108 | ### [3.2.1](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.2.0...v3.2.1) (2021-06-28) 109 | 110 | 111 | ### Bug Fixes 112 | 113 | * createURI() should not persist uri ([#437](https://www.github.com/googleapis/gcs-resumable-upload/issues/437)) ([5fbad65](https://www.github.com/googleapis/gcs-resumable-upload/commit/5fbad65b7f7b7a7e4f7f8ba5ba1573405b31afff)) 114 | 115 | ## [3.2.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.1.4...v3.2.0) (2021-06-10) 116 | 117 | 118 | ### Features 119 | 120 | * add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#421](https://www.github.com/googleapis/gcs-resumable-upload/issues/421)) ([b842b41](https://www.github.com/googleapis/gcs-resumable-upload/commit/b842b417d319af06ec2cc11b4078dd3f3a65c31b)) 121 | 122 | ### [3.1.4](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.1.3...v3.1.4) (2021-05-03) 123 | 124 | 125 | ### Bug Fixes 126 | 127 | * bypass auth when using emulator ([#416](https://www.github.com/googleapis/gcs-resumable-upload/issues/416)) ([9b9ecce](https://www.github.com/googleapis/gcs-resumable-upload/commit/9b9ecceb39ec46e4bb0dcf356e518c0b03d1c1ae)) 128 | 129 | ### [3.1.3](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.1.2...v3.1.3) (2021-02-12) 130 | 131 | 132 | ### Bug Fixes 133 | 134 | * **deps:** update dependency google-auth-library to v7 ([#404](https://www.github.com/googleapis/gcs-resumable-upload/issues/404)) ([8d4dd15](https://www.github.com/googleapis/gcs-resumable-upload/commit/8d4dd15b33a70d9589a58a48c510603eced85912)) 135 | 136 | ### [3.1.2](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.1.1...v3.1.2) (2020-12-22) 137 | 138 | 139 | ### Bug Fixes 140 | 141 | * **deps:** update dependency gaxios to v4 ([#389](https://www.github.com/googleapis/gcs-resumable-upload/issues/389)) ([05f3af2](https://www.github.com/googleapis/gcs-resumable-upload/commit/05f3af2820590551a5212df67130eb17e144d9a2)) 142 | 143 | ### [3.1.1](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.1.0...v3.1.1) (2020-07-09) 144 | 145 | 146 | ### Bug Fixes 147 | 148 | * typeo in nodejs .gitattribute ([#362](https://www.github.com/googleapis/gcs-resumable-upload/issues/362)) ([bd44456](https://www.github.com/googleapis/gcs-resumable-upload/commit/bd44456b92941237ac824dbb5365e704e4a1faea)) 149 | 150 | ## [3.1.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v3.0.0...v3.1.0) (2020-06-24) 151 | 152 | 153 | ### Features 154 | 155 | * allow custom request options ([#356](https://www.github.com/googleapis/gcs-resumable-upload/issues/356)) ([c44d956](https://www.github.com/googleapis/gcs-resumable-upload/commit/c44d956a69f3f2bd740575da15449786f519e1d6)) 156 | 157 | ## [3.0.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.3.3...v3.0.0) (2020-05-12) 158 | 159 | 160 | ### ⚠ BREAKING CHANGES 161 | 162 | * **dep:** upgrade gts 2.0.0/drop Node 8 (#329) 163 | * drop support for Node 8 (see: https://github.com/googleapis/gcs-resumable-upload/pull/330) (#333) 164 | 165 | ### Features 166 | 167 | * allow user-specified protocol for options.apiEndpoint ([#349](https://www.github.com/googleapis/gcs-resumable-upload/issues/349)) ([47f76d0](https://www.github.com/googleapis/gcs-resumable-upload/commit/47f76d0ac189d70341410dc5fc52374681ad15e9)) 168 | 169 | 170 | ### Bug Fixes 171 | 172 | * **deps:** update dependency gaxios to v3 ([#325](https://www.github.com/googleapis/gcs-resumable-upload/issues/325)) ([584ea71](https://www.github.com/googleapis/gcs-resumable-upload/commit/584ea7109d1af161af47785e48ca2498597db2e8)) 173 | * **deps:** update dependency google-auth-library to v6 ([#327](https://www.github.com/googleapis/gcs-resumable-upload/issues/327)) ([609bf9a](https://www.github.com/googleapis/gcs-resumable-upload/commit/609bf9aeda5c40bfedefa0a7b934173014ea3d39)) 174 | 175 | 176 | ### Miscellaneous Chores 177 | 178 | * **dep:** upgrade gts 2.0.0/drop Node 8 ([#329](https://www.github.com/googleapis/gcs-resumable-upload/issues/329)) ([f69a184](https://www.github.com/googleapis/gcs-resumable-upload/commit/f69a18494646206fb634b2400ec21348b690d78e)) 179 | 180 | 181 | ### Build System 182 | 183 | * drop support for Node 8 (see: https://github.com/googleapis/gcs-resumable-upload/pull/330) ([#333](https://www.github.com/googleapis/gcs-resumable-upload/issues/333)) ([165cd3b](https://www.github.com/googleapis/gcs-resumable-upload/commit/165cd3bdde1c3d9988dcd7349710da23545d68c3)), closes [#330](https://www.github.com/googleapis/gcs-resumable-upload/issues/330) 184 | 185 | ### [2.3.3](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.3.2...v2.3.3) (2020-03-06) 186 | 187 | 188 | ### Bug Fixes 189 | 190 | * **docs:** progress event in readme file ([#313](https://www.github.com/googleapis/gcs-resumable-upload/issues/313)) ([0dfdd07](https://www.github.com/googleapis/gcs-resumable-upload/commit/0dfdd07d7472e63eb19e5c216171a13a517bf29a)) 191 | 192 | ### [2.3.2](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.3.1...v2.3.2) (2019-12-05) 193 | 194 | 195 | ### Bug Fixes 196 | 197 | * **deps:** TypeScript 3.7.0 causes breaking change in typings ([#285](https://www.github.com/googleapis/gcs-resumable-upload/issues/285)) ([3e671b2](https://www.github.com/googleapis/gcs-resumable-upload/commit/3e671b262a7ef5383fbc5e5b6232d7bcd2c6641e)) 198 | * **typescript:** add return type for base uri getter ([#286](https://www.github.com/googleapis/gcs-resumable-upload/issues/286)) ([7121624](https://www.github.com/googleapis/gcs-resumable-upload/commit/71216249ee781714b73826ee7902ed1847402102)) 199 | 200 | ### [2.3.1](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.3.0...v2.3.1) (2019-11-14) 201 | 202 | 203 | ### Bug Fixes 204 | 205 | * **docs:** add jsdoc-region-tag plugin ([#282](https://www.github.com/googleapis/gcs-resumable-upload/issues/282)) ([4942669](https://www.github.com/googleapis/gcs-resumable-upload/commit/494266901b0cdcc9956dda721a4ca19d3b4ced53)) 206 | 207 | ## [2.3.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.2.5...v2.3.0) (2019-10-09) 208 | 209 | 210 | ### Features 211 | 212 | * support all query parameters during URI creation ([#275](https://www.github.com/googleapis/gcs-resumable-upload/issues/275)) ([383a490](https://www.github.com/googleapis/gcs-resumable-upload/commit/383a490)) 213 | 214 | ### [2.2.5](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.2.4...v2.2.5) (2019-09-07) 215 | 216 | 217 | ### Bug Fixes 218 | 219 | * typecast metadata.size from string to number ([#263](https://www.github.com/googleapis/gcs-resumable-upload/issues/263)) ([64ea7a1](https://www.github.com/googleapis/gcs-resumable-upload/commit/64ea7a1)) 220 | 221 | ### [2.2.4](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.2.3...v2.2.4) (2019-08-15) 222 | 223 | 224 | ### Bug Fixes 225 | 226 | * let gaxios handle API errors ([#255](https://www.github.com/googleapis/gcs-resumable-upload/issues/255)) ([7a913ef](https://www.github.com/googleapis/gcs-resumable-upload/commit/7a913ef)) 227 | 228 | ### [2.2.3](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.2.2...v2.2.3) (2019-07-26) 229 | 230 | 231 | ### Bug Fixes 232 | 233 | * **deps:** update dependency google-auth-library to v5 ([#250](https://www.github.com/googleapis/gcs-resumable-upload/issues/250)) ([8bc4798](https://www.github.com/googleapis/gcs-resumable-upload/commit/8bc4798)) 234 | 235 | ### [2.2.2](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.2.1...v2.2.2) (2019-07-17) 236 | 237 | 238 | ### Bug Fixes 239 | 240 | * **deps:** update dependency pumpify to v2 ([#237](https://www.github.com/googleapis/gcs-resumable-upload/issues/237)) ([a2a2636](https://www.github.com/googleapis/gcs-resumable-upload/commit/a2a2636)) 241 | 242 | ### [2.2.1](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.2.0...v2.2.1) (2019-07-17) 243 | 244 | 245 | ### Bug Fixes 246 | 247 | * handle a `0` generation ([#247](https://www.github.com/googleapis/gcs-resumable-upload/issues/247)) ([0b991d5](https://www.github.com/googleapis/gcs-resumable-upload/commit/0b991d5)), closes [#246](https://www.github.com/googleapis/gcs-resumable-upload/issues/246) 248 | 249 | ## [2.2.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.1.1...v2.2.0) (2019-07-14) 250 | 251 | 252 | ### Bug Fixes 253 | 254 | * **docs:** make anchors work in jsdoc ([#238](https://www.github.com/googleapis/gcs-resumable-upload/issues/238)) ([86e4433](https://www.github.com/googleapis/gcs-resumable-upload/commit/86e4433)) 255 | * expose 'Retry limit exceeded' server error message ([#240](https://www.github.com/googleapis/gcs-resumable-upload/issues/240)) ([40a1306](https://www.github.com/googleapis/gcs-resumable-upload/commit/40a1306)) 256 | * make cache key unique by including generation ([#243](https://www.github.com/googleapis/gcs-resumable-upload/issues/243)) ([85f80ab](https://www.github.com/googleapis/gcs-resumable-upload/commit/85f80ab)) 257 | 258 | 259 | ### Features 260 | 261 | * allow removing cache file ([#244](https://www.github.com/googleapis/gcs-resumable-upload/issues/244)) ([eb8976a](https://www.github.com/googleapis/gcs-resumable-upload/commit/eb8976a)) 262 | 263 | ### [2.1.1](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.1.0...v2.1.1) (2019-06-20) 264 | 265 | 266 | ### Bug Fixes 267 | 268 | * **deps:** update dependency configstore to v5 ([#234](https://www.github.com/googleapis/gcs-resumable-upload/issues/234)) ([9b957c6](https://www.github.com/googleapis/gcs-resumable-upload/commit/9b957c6)) 269 | 270 | ## [2.1.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v2.0.0...v2.1.0) (2019-06-19) 271 | 272 | 273 | ### Features 274 | 275 | * support apiEndpoint override ([#230](https://www.github.com/googleapis/gcs-resumable-upload/issues/230)) ([41325ac](https://www.github.com/googleapis/gcs-resumable-upload/commit/41325ac)) 276 | 277 | ## [2.0.0](https://www.github.com/googleapis/gcs-resumable-upload/compare/v1.1.0...v2.0.0) (2019-05-09) 278 | 279 | 280 | ### Bug Fixes 281 | 282 | * **deps:** update dependency abort-controller to v3 ([0c4f6c0](https://www.github.com/googleapis/gcs-resumable-upload/commit/0c4f6c0)) 283 | * **deps:** update dependency gaxios to v2 ([#210](https://www.github.com/googleapis/gcs-resumable-upload/issues/210)) ([d5a1a5c](https://www.github.com/googleapis/gcs-resumable-upload/commit/d5a1a5c)) 284 | * **deps:** update dependency google-auth-library to v4 ([#219](https://www.github.com/googleapis/gcs-resumable-upload/issues/219)) ([1e60178](https://www.github.com/googleapis/gcs-resumable-upload/commit/1e60178)) 285 | 286 | 287 | ### Build System 288 | 289 | * upgrade engines field to >=8.10.0 ([#213](https://www.github.com/googleapis/gcs-resumable-upload/issues/213)) ([5a81a8b](https://www.github.com/googleapis/gcs-resumable-upload/commit/5a81a8b)) 290 | 291 | 292 | ### BREAKING CHANGES 293 | 294 | * upgrade engines field to >=8.10.0 (#213) 295 | 296 | ## v1.1.0 297 | 298 | 03-26-2019 07:13 PDT 299 | 300 | ### New Features 301 | - feat: support ConfigStore configPath option ([#194](https://github.com/googleapis/gcs-resumable-upload/pull/194)) 302 | 303 | ### Internal / Testing Changes 304 | - chore: publish to npm using wombat ([#197](https://github.com/googleapis/gcs-resumable-upload/pull/197)) 305 | - build: use per-repo npm publish token ([#195](https://github.com/googleapis/gcs-resumable-upload/pull/195)) 306 | - build: Add docuploader credentials to node publish jobs ([#192](https://github.com/googleapis/gcs-resumable-upload/pull/192)) 307 | - build: use node10 to run samples-test, system-test etc ([#190](https://github.com/googleapis/gcs-resumable-upload/pull/190)) 308 | - build: update release configuration 309 | 310 | ## v1.0.0 311 | 312 | 02-28-2019 06:27 PST 313 | 314 | **This release has breaking changes**. The underlying transport library was changed from [request](https://github.com/request/request) to [gaxios](https://github.com/JustinBeckwith/gaxios). Any `response` objects returned via the API will now return a [`GaxiosResponse`](https://github.com/JustinBeckwith/gaxios/blob/88a47e000625d8192689acac5c40c0b1e1d963a2/src/gaxios.ts#L197-L203) object. 315 | 316 | 317 | #### Old Code 318 | ```js 319 | .on('response', function (resp, metadata) { 320 | console.log(resp.statusCode); 321 | }) 322 | ``` 323 | 324 | #### New Code 325 | ```js 326 | .on('response', function (resp) { 327 | console.log(resp.status); 328 | }); 329 | ``` 330 | 331 | ### Implementation Changes 332 | - fix: replace request with gaxios ([#174](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/174)) 333 | 334 | ### Documentation 335 | - docs: update links in contrib guide ([#184](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/184)) 336 | - docs: add lint/fix example to contributing guide ([#177](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/177)) 337 | 338 | ### Internal / Testing Changes 339 | - chore(deps): update dependency mocha to v6 ([#185](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/185)) 340 | - build: use linkinator for docs test ([#183](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/183)) 341 | - build: create docs test npm scripts ([#182](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/182)) 342 | - build: test using @grpc/grpc-js in CI ([#181](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/181)) 343 | - chore: move CONTRIBUTING.md to root ([#179](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/179)) 344 | - chore(deps): update dependency typescript to ~3.3.0 ([#176](https://github.com/GoogleCloudPlatform/gcs-resumable-upload/pull/176)) 345 | 346 | ## v0.14.1 347 | 348 | 01-25-2019 10:39 PST 349 | 350 | ### Implementation Changes 351 | 352 | - fix: return GaxiosError directly ([#171](https://github.com/googleapis/gcs-resumable-upload/pull/171)) 353 | 354 | ### Documentation 355 | 356 | - build: exclude googleapis in 404 check. ([#172](https://github.com/googleapis/gcs-resumable-upload/pull/172)) 357 | - build: exclude googleapis.com checks in 404 checker ([#170](https://github.com/googleapis/gcs-resumable-upload/pull/170)) 358 | 359 | ## v0.14.0 360 | 361 | 01-23-2019 17:57 PST 362 | 363 | ### New Features 364 | - feat: support async functions ([#164](https://github.com/googleapis/gcs-resumable-upload/pull/164)) 365 | - fix: use the reject handler for promises ([#144](https://github.com/googleapis/gcs-resumable-upload/pull/144)) 366 | - feat: add progress events ([#135](https://github.com/googleapis/gcs-resumable-upload/pull/135)) 367 | 368 | ### Dependencies 369 | - fix(deps): update dependency google-auth-library to v3 ([#165](https://github.com/googleapis/gcs-resumable-upload/pull/165)) 370 | - refactor: use teeny-request (part 1) ([#141](https://github.com/googleapis/gcs-resumable-upload/pull/141)) 371 | - chore(deps): update dependency @types/configstore to v4 ([#145](https://github.com/googleapis/gcs-resumable-upload/pull/145)) 372 | - chore(deps): update dependency typescript to ~3.2.0 ([#140](https://github.com/googleapis/gcs-resumable-upload/pull/140)) 373 | - chore(deps): update dependency gts to ^0.9.0 ([#137](https://github.com/googleapis/gcs-resumable-upload/pull/137)) 374 | - chore(deps): update dependency through2 to v3 ([#131](https://github.com/googleapis/gcs-resumable-upload/pull/131)) 375 | - refactor: move from axios back to request ([#123](https://github.com/googleapis/gcs-resumable-upload/pull/123)) 376 | - chore(deps): update dependency nock to v10 ([#113](https://github.com/googleapis/gcs-resumable-upload/pull/113)) 377 | - chore: update the version of typescript ([#106](https://github.com/googleapis/gcs-resumable-upload/pull/106)) 378 | 379 | ### Documentation 380 | - build: ignore googleapis.com in doc link checker ([#166](https://github.com/googleapis/gcs-resumable-upload/pull/166)) 381 | - build: check broken links in generated docs ([#162](https://github.com/googleapis/gcs-resumable-upload/pull/162)) 382 | 383 | ### Internal / Testing Changes 384 | - fix: fix the unit tests ([#161](https://github.com/googleapis/gcs-resumable-upload/pull/161)) 385 | - chore(build): inject yoshi automation key ([#160](https://github.com/googleapis/gcs-resumable-upload/pull/160)) 386 | - chore: update nyc and eslint configs ([#159](https://github.com/googleapis/gcs-resumable-upload/pull/159)) 387 | - chore: fix publish.sh permission +x ([#156](https://github.com/googleapis/gcs-resumable-upload/pull/156)) 388 | - fix(build): fix Kokoro release script ([#155](https://github.com/googleapis/gcs-resumable-upload/pull/155)) 389 | - build: add Kokoro configs for autorelease ([#154](https://github.com/googleapis/gcs-resumable-upload/pull/154)) 390 | - chore: always nyc report before calling codecov ([#153](https://github.com/googleapis/gcs-resumable-upload/pull/153)) 391 | - chore: nyc ignore build/test by default ([#152](https://github.com/googleapis/gcs-resumable-upload/pull/152)) 392 | - chore: update synth and common config ([#150](https://github.com/googleapis/gcs-resumable-upload/pull/150)) 393 | - fix(build): fix system key decryption ([#142](https://github.com/googleapis/gcs-resumable-upload/pull/142)) 394 | - chore: add synth.metadata 395 | - chore: update eslintignore config ([#136](https://github.com/googleapis/gcs-resumable-upload/pull/136)) 396 | - chore: use latest npm on Windows ([#134](https://github.com/googleapis/gcs-resumable-upload/pull/134)) 397 | - chore: update CircleCI config ([#129](https://github.com/googleapis/gcs-resumable-upload/pull/129)) 398 | - chore: include build in eslintignore ([#126](https://github.com/googleapis/gcs-resumable-upload/pull/126)) 399 | - chore: update issue templates ([#121](https://github.com/googleapis/gcs-resumable-upload/pull/121)) 400 | - chore: remove old issue template ([#119](https://github.com/googleapis/gcs-resumable-upload/pull/119)) 401 | - build: run tests on node11 ([#118](https://github.com/googleapis/gcs-resumable-upload/pull/118)) 402 | - chores(build): run codecov on continuous builds ([#112](https://github.com/googleapis/gcs-resumable-upload/pull/112)) 403 | - chores(build): do not collect sponge.xml from windows builds ([#114](https://github.com/googleapis/gcs-resumable-upload/pull/114)) 404 | - chore: update new issue template ([#111](https://github.com/googleapis/gcs-resumable-upload/pull/111)) 405 | - build: fix codecov uploading on Kokoro ([#108](https://github.com/googleapis/gcs-resumable-upload/pull/108)) 406 | - Update kokoro config ([#105](https://github.com/googleapis/gcs-resumable-upload/pull/105)) 407 | - Update CI config ([#103](https://github.com/googleapis/gcs-resumable-upload/pull/103)) 408 | - Update kokoro config ([#101](https://github.com/googleapis/gcs-resumable-upload/pull/101)) 409 | - test: remove appveyor config ([#100](https://github.com/googleapis/gcs-resumable-upload/pull/100)) 410 | - Update kokoro config ([#99](https://github.com/googleapis/gcs-resumable-upload/pull/99)) 411 | - Enable prefer-const in the eslint config ([#98](https://github.com/googleapis/gcs-resumable-upload/pull/98)) 412 | - Enable no-var in eslint ([#97](https://github.com/googleapis/gcs-resumable-upload/pull/97)) 413 | - Update to new repo location ([#96](https://github.com/googleapis/gcs-resumable-upload/pull/96)) 414 | - Update CI config ([#95](https://github.com/googleapis/gcs-resumable-upload/pull/95)) 415 | 416 | ## v0.13.0 417 | 418 | ### Dependencies 419 | - fix(deps): update dependency google-auth-library to v2 (#89) 420 | - chore(deps): update dependency nyc to v13 (#86) 421 | 422 | ### Docs 423 | - docs: update the README (#79) 424 | 425 | ### Internal / Testing Changes 426 | - Retry npm install in CI (#92) 427 | - Update CI config (#90) 428 | - Update CI config (#88) 429 | - Update the CI config (#85) 430 | - chore: update CircleCI config 431 | - chore: ignore package-lock.json (#83) 432 | - chore: update renovate config (#81) 433 | - chore: enable noImplicitThis (#82) 434 | - chore: enable CI and synth script (#77) 435 | 436 | ## v0.12.0 437 | 438 | ### Implemenation Changes 439 | BREAKING CHANGE: 440 | - chore: drop support for node.js 4 (#75) 441 | 442 | ### Dependencies 443 | - chore(deps): update dependency gts to ^0.8.0 (#71) 444 | - fix(deps): update dependency configstore to v4 (#72) 445 | - chore(deps): update dependency typescript to v3 (#74) 446 | 447 | ### Internal / Testing Changes 448 | - chore: make it OSPO compliant (#73) 449 | - fix: quarantine axios types (#70) 450 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Code of Conduct 3 | 4 | ## Our Pledge 5 | 6 | In the interest of fostering an open and welcoming environment, we as 7 | contributors and maintainers pledge to making participation in our project and 8 | our community a harassment-free experience for everyone, regardless of age, body 9 | size, disability, ethnicity, gender identity and expression, level of 10 | experience, education, socio-economic status, nationality, personal appearance, 11 | race, religion, or sexual identity and orientation. 12 | 13 | ## Our Standards 14 | 15 | Examples of behavior that contributes to creating a positive environment 16 | include: 17 | 18 | * Using welcoming and inclusive language 19 | * Being respectful of differing viewpoints and experiences 20 | * Gracefully accepting constructive criticism 21 | * Focusing on what is best for the community 22 | * Showing empathy towards other community members 23 | 24 | Examples of unacceptable behavior by participants include: 25 | 26 | * The use of sexualized language or imagery and unwelcome sexual attention or 27 | advances 28 | * Trolling, insulting/derogatory comments, and personal or political attacks 29 | * Public or private harassment 30 | * Publishing others' private information, such as a physical or electronic 31 | address, without explicit permission 32 | * Other conduct which could reasonably be considered inappropriate in a 33 | professional setting 34 | 35 | ## Our Responsibilities 36 | 37 | Project maintainers are responsible for clarifying the standards of acceptable 38 | behavior and are expected to take appropriate and fair corrective action in 39 | response to any instances of unacceptable behavior. 40 | 41 | Project maintainers have the right and responsibility to remove, edit, or reject 42 | comments, commits, code, wiki edits, issues, and other contributions that are 43 | not aligned to this Code of Conduct, or to ban temporarily or permanently any 44 | contributor for other behaviors that they deem inappropriate, threatening, 45 | offensive, or harmful. 46 | 47 | ## Scope 48 | 49 | This Code of Conduct applies both within project spaces and in public spaces 50 | when an individual is representing the project or its community. Examples of 51 | representing a project or community include using an official project e-mail 52 | address, posting via an official social media account, or acting as an appointed 53 | representative at an online or offline event. Representation of a project may be 54 | further defined and clarified by project maintainers. 55 | 56 | This Code of Conduct also applies outside the project spaces when the Project 57 | Steward has a reasonable belief that an individual's behavior may have a 58 | negative impact on the project or its community. 59 | 60 | ## Conflict Resolution 61 | 62 | We do not believe that all conflict is bad; healthy debate and disagreement 63 | often yield positive results. However, it is never okay to be disrespectful or 64 | to engage in behavior that violates the project’s code of conduct. 65 | 66 | If you see someone violating the code of conduct, you are encouraged to address 67 | the behavior directly with those involved. Many issues can be resolved quickly 68 | and easily, and this gives people more control over the outcome of their 69 | dispute. If you are unable to resolve the matter for any reason, or if the 70 | behavior is threatening or harassing, report it. We are dedicated to providing 71 | an environment where participants feel welcome and safe. 72 | 73 | Reports should be directed to *googleapis-stewards@google.com*, the 74 | Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to 75 | receive and address reported violations of the code of conduct. They will then 76 | work with a committee consisting of representatives from the Open Source 77 | Programs Office and the Google Open Source Strategy team. If for any reason you 78 | are uncomfortable reaching out to the Project Steward, please email 79 | opensource@google.com. 80 | 81 | We will investigate every complaint, but you may not receive a direct response. 82 | We will use our discretion in determining when and how to follow up on reported 83 | incidents, which may range from not taking action to permanent expulsion from 84 | the project and project-sponsored spaces. We will notify the accused of the 85 | report and provide them an opportunity to discuss it before any action is taken. 86 | The identity of the reporter will be omitted from the details of the report 87 | supplied to the accused. In potentially harmful situations, such as ongoing 88 | harassment or threats to anyone's safety, we may take action without notice. 89 | 90 | ## Attribution 91 | 92 | This Code of Conduct is adapted from the Contributor Covenant, version 1.4, 93 | available at 94 | https://www.contributor-covenant.org/version/1/4/code-of-conduct.html -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to become a contributor and submit your own code 2 | 3 | **Table of contents** 4 | 5 | * [Contributor License Agreements](#contributor-license-agreements) 6 | * [Contributing a patch](#contributing-a-patch) 7 | * [Running the tests](#running-the-tests) 8 | * [Releasing the library](#releasing-the-library) 9 | 10 | ## Contributor License Agreements 11 | 12 | We'd love to accept your sample apps and patches! Before we can take them, we 13 | have to jump a couple of legal hurdles. 14 | 15 | Please fill out either the individual or corporate Contributor License Agreement 16 | (CLA). 17 | 18 | * If you are an individual writing original source code and you're sure you 19 | own the intellectual property, then you'll need to sign an [individual CLA](https://developers.google.com/open-source/cla/individual). 20 | * If you work for a company that wants to allow you to contribute your work, 21 | then you'll need to sign a [corporate CLA](https://developers.google.com/open-source/cla/corporate). 22 | 23 | Follow either of the two links above to access the appropriate CLA and 24 | instructions for how to sign and return it. Once we receive it, we'll be able to 25 | accept your pull requests. 26 | 27 | ## Contributing A Patch 28 | 29 | 1. Submit an issue describing your proposed change to the repo in question. 30 | 1. The repo owner will respond to your issue promptly. 31 | 1. If your proposed change is accepted, and you haven't already done so, sign a 32 | Contributor License Agreement (see details above). 33 | 1. Fork the desired repo, develop and test your code changes. 34 | 1. Ensure that your code adheres to the existing style in the code to which 35 | you are contributing. 36 | 1. Ensure that your code has an appropriate set of tests which all pass. 37 | 1. Title your pull request following [Conventional Commits](https://www.conventionalcommits.org/) styling. 38 | 1. Submit a pull request. 39 | 40 | ### Before you begin 41 | 42 | 1. [Select or create a Cloud Platform project][projects]. 43 | 1. [Set up authentication with a service account][auth] so you can access the 44 | API from your local workstation. 45 | 46 | 47 | ## Running the tests 48 | 49 | 1. [Prepare your environment for Node.js setup][setup]. 50 | 51 | 1. Install dependencies: 52 | 53 | npm install 54 | 55 | 1. Run the tests: 56 | 57 | # Run unit tests. 58 | npm test 59 | 60 | # Run sample integration tests. 61 | npm run samples-test 62 | 63 | # Run all system tests. 64 | npm run system-test 65 | 66 | 1. Lint (and maybe fix) any changes: 67 | 68 | npm run fix 69 | 70 | [setup]: https://cloud.google.com/nodejs/docs/setup 71 | [projects]: https://console.cloud.google.com/project 72 | [billing]: https://support.google.com/cloud/answer/6293499#enable-billing 73 | 74 | [auth]: https://cloud.google.com/docs/authentication/getting-started -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2018 Google LLC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # [DEPRECATED] gcs-resumable-upload 2 | ***This repository has been deprecated. Support will end on November 1, 2023.*** 3 | 4 | > Upload a file to Google Cloud Storage with built-in resumable behavior 5 | 6 | ```sh 7 | $ npm install gcs-resumable-upload 8 | ``` 9 | ```js 10 | const {upload} = require('gcs-resumable-upload'); 11 | const fs = require('fs'); 12 | 13 | fs.createReadStream('titanic.mov') 14 | .pipe(upload({ bucket: 'legally-owned-movies', file: 'titanic.mov' })) 15 | .on('progress', (progress) => { 16 | console.log('Progress event:') 17 | console.log('\t bytes: ', progress.bytesWritten); 18 | }) 19 | .on('finish', () => { 20 | // Uploaded! 21 | }); 22 | ``` 23 | 24 | Or from the command line: 25 | 26 | ```sh 27 | $ npm install -g gcs-resumable-upload 28 | $ cat titanic.mov | gcs-upload legally-owned-movies titanic.mov 29 | ``` 30 | 31 | If somewhere during the operation, you lose your connection to the internet or your tough-guy brother slammed your laptop shut when he saw what you were uploading, the next time you try to upload to that file, it will resume automatically from where you left off. 32 | 33 | ## How it works 34 | 35 | This module stores a file using [ConfigStore](https://www.npmjs.com/package/configstore) that is written to when you first start an upload. It is aliased by the file name you are uploading to and holds the first 16kb chunk of data* as well as the unique resumable upload URI. ([Resumable uploads are complicated](https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#resumable)) 36 | 37 | If your upload was interrupted, next time you run the code, we ask the API how much data it has already, then simply dump all of the data coming through the pipe that it already has. 38 | 39 | After the upload completes, the entry in the config file is removed. Done! 40 | 41 | \* The first 16kb chunk is stored to validate if you are sending the same data when you resume the upload. If not, a new resumable upload is started with the new data. 42 | 43 | ## Authentication 44 | 45 | Oh, right. This module uses [google-auth-library](https://www.npmjs.com/package/google-auth-library) and accepts all of the configuration that module does to strike up a connection as `config.authConfig`. See [`authConfig`](https://github.com/google/google-auth-library-nodejs/#choosing-the-correct-credential-type-automatically). 46 | 47 | ## API 48 | 49 | ```js 50 | const {gcsResumableUpload} = require('gcs-resumable-upload') 51 | const upload = gcsResumableUpload(config) 52 | ``` 53 | 54 | `upload` is an instance of [`Duplexify`](https://www.npmjs.com/package/duplexify). 55 | 56 | --- 57 | 58 | ### Methods 59 | 60 | #### upload.createURI(callback) 61 | 62 | ##### callback(err, resumableURI) 63 | 64 | ###### callback.err 65 | 66 | - Type: `Error` 67 | 68 | Invoked if the authorization failed or the request to start a resumable session failed. 69 | 70 | ###### callback.resumableURI 71 | 72 | - Type: `String` 73 | 74 | The resumable upload session URI. 75 | 76 | 77 | #### upload.deleteConfig() 78 | 79 | This will remove the config data associated with the provided file. 80 | 81 | --- 82 | 83 | ### Configuration 84 | 85 | #### config 86 | 87 | - Type: `object` 88 | 89 | Configuration object. 90 | 91 | ##### config.authClient 92 | 93 | - Type: [`GoogleAuth`](https://www.npmjs.com/package/google-auth-library) 94 | - *Optional* 95 | 96 | If you want to re-use an auth client from [google-auth-library](https://www.npmjs.com/package/google-auth-library), pass an instance here. 97 | 98 | ##### config.authConfig 99 | 100 | - Type: `object` 101 | - *Optional* 102 | 103 | See [`authConfig`](https://github.com/google/google-auth-library-nodejs/#choosing-the-correct-credential-type-automatically). 104 | 105 | ##### config.bucket 106 | 107 | - Type: `string` 108 | - **Required** 109 | 110 | The name of the destination bucket. 111 | 112 | ##### config.configPath 113 | 114 | - Type: `string` 115 | - *Optional* 116 | 117 | Where the gcs-resumable-upload configuration file should be stored on your system. This maps to the [configstore option by the same name](https://github.com/yeoman/configstore/tree/0df1ec950d952b1f0dfb39ce22af8e505dffc71a#configpath). 118 | 119 | ##### config.customRequestOptions 120 | 121 | - Type: `object` 122 | - *Optional* 123 | 124 | For each API request we send, you may specify custom request options that we'll add onto the request. The request options follow the gaxios API: https://github.com/googleapis/gaxios#request-options. 125 | 126 | For example, to set your own HTTP headers: 127 | 128 | ```js 129 | const stream = upload({ 130 | customRequestOptions: { 131 | headers: { 132 | 'X-My-Header': 'My custom value', 133 | }, 134 | }, 135 | }) 136 | ``` 137 | 138 | ##### config.file 139 | 140 | - Type: `string` 141 | - **Required** 142 | 143 | The name of the destination file. 144 | 145 | ##### config.generation 146 | 147 | - Type: `number` 148 | - *Optional* 149 | 150 | This will cause the upload to fail if the current generation of the remote object does not match the one provided here. 151 | 152 | ##### config.key 153 | 154 | - Type: `string|buffer` 155 | - *Optional* 156 | 157 | A [customer-supplied encryption key](https://cloud.google.com/storage/docs/encryption#customer-supplied). 158 | 159 | ##### config.kmsKeyName 160 | 161 | - Type: `string` 162 | - *Optional* 163 | 164 | Resource name of the Cloud KMS key, of the form `projects/my-project/locations/global/keyRings/my-kr/cryptoKeys/my-key`, that will be used to encrypt the object. Overrides the object metadata's `kms_key_name` value, if any. 165 | 166 | ##### config.metadata 167 | 168 | - Type: `object` 169 | - *Optional* 170 | 171 | Any metadata you wish to set on the object. 172 | 173 | ###### *config.metadata.contentLength* 174 | 175 | Set the length of the file being uploaded. 176 | 177 | ###### *config.metadata.contentType* 178 | 179 | Set the content type of the incoming data. 180 | 181 | ##### config.offset 182 | 183 | - Type: `number` 184 | - *Optional* 185 | 186 | The starting byte of the upload stream, for [resuming an interrupted upload](https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload). 187 | 188 | ##### config.origin 189 | 190 | - Type: `string` 191 | - *Optional* 192 | 193 | Set an Origin header when creating the resumable upload URI. 194 | 195 | ##### config.predefinedAcl 196 | 197 | - Type: `string` 198 | - *Optional* 199 | 200 | Apply a predefined set of access controls to the created file. 201 | 202 | Acceptable values are: 203 | 204 | - **`authenticatedRead`** - Object owner gets `OWNER` access, and `allAuthenticatedUsers` get `READER` access. 205 | - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and project team owners get `OWNER` access. 206 | - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project team owners get `READER` access. 207 | - **`private`** - Object owner gets `OWNER` access. 208 | - **`projectPrivate`** - Object owner gets `OWNER` access, and project team members get access according to their roles. 209 | - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` get `READER` access. 210 | 211 | ##### config.private 212 | 213 | - Type: `boolean` 214 | - *Optional* 215 | 216 | Make the uploaded file private. (Alias for `config.predefinedAcl = 'private'`) 217 | 218 | ##### config.public 219 | 220 | - Type: `boolean` 221 | - *Optional* 222 | 223 | Make the uploaded file public. (Alias for `config.predefinedAcl = 'publicRead'`) 224 | 225 | ##### config.uri 226 | 227 | - Type: `string` 228 | - *Optional* 229 | 230 | If you already have a resumable URI from a previously-created resumable upload, just pass it in here and we'll use that. 231 | 232 | ##### config.userProject 233 | 234 | - Type: `string` 235 | - *Optional* 236 | 237 | If the bucket being accessed has `requesterPays` functionality enabled, this can be set to control which project is billed for the access of this file. 238 | 239 | ##### config.retryOptions 240 | 241 | - Type: `object` 242 | - *Optional* 243 | 244 | Parameters used to control retrying operations. 245 | 246 | ```js 247 | interface RetryOptions { 248 | retryDelayMultiplier?: number; 249 | totalTimeout?: number; 250 | maxRetryDelay?: number; 251 | autoRetry?: boolean; 252 | maxRetries?: number; 253 | retryableErrorFn?: (err: ApiError) => boolean; 254 | } 255 | ``` 256 | 257 | ##### config.retryOptions.retryDelayMultiplier 258 | 259 | - Type: `number` 260 | - *Optional* 261 | 262 | Base number used for exponential backoff. Default 2. 263 | 264 | ##### config.retryOptions.totalTimeout 265 | 266 | - Type: `number` 267 | - *Optional* 268 | 269 | Upper bound on the total amount of time to attempt retrying, in seconds. Default: 600. 270 | 271 | ##### config.retryOptions.maxRetryDelay 272 | 273 | - Type: `number` 274 | - *Optional* 275 | 276 | The maximum time to delay between retries, in seconds. Default: 64. 277 | 278 | ##### config.retryOptions.autoRetry 279 | 280 | - Type: `boolean` 281 | - *Optional* 282 | 283 | Whether or not errors should be retried. Default: true. 284 | 285 | ##### config.retryOptions.maxRetries 286 | 287 | - Type: `number` 288 | - *Optional* 289 | 290 | The maximum number of retries to attempt. Default: 5. 291 | 292 | ##### config.retryOptions.retryableErrorFn 293 | 294 | - Type: `function` 295 | - *Optional* 296 | 297 | Custom function returning a boolean indicating whether or not to retry an error. 298 | 299 | 300 | ##### config.chunkSize 301 | 302 | - Type: `number` 303 | - *Optional* 304 | 305 | Enables [Multiple chunk upload](https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload) mode and sets each request size to this amount. 306 | 307 | This only makes sense to use for larger files. The chunk size should be a multiple of 256 KiB (256 x 1024 bytes). Larger chunk sizes typically make uploads more efficient. We recommend using at least 8 MiB for the chunk size. 308 | 309 | Review [documentation](https://cloud.google.com/storage/docs/performing-resumable-uploads) for guidance and best practices. 310 | 311 | --- 312 | 313 | ### Events 314 | 315 | #### .on('error', function (err) {}) 316 | 317 | ##### err 318 | 319 | - Type: `Error` 320 | 321 | Invoked if the authorization failed, the request failed, or the file wasn't successfully uploaded. 322 | 323 | #### .on('response', function (response) {}) 324 | 325 | ##### resp 326 | 327 | - Type: `Object` 328 | 329 | The [response object from Gaxios](https://github.com/JustinBeckwith/gaxios/blob/88a47e000625d8192689acac5c40c0b1e1d963a2/src/gaxios.ts#L197-L203). 330 | 331 | ##### metadata 332 | 333 | - Type: `Object` 334 | 335 | The file's new metadata. 336 | 337 | #### .on('progress', function (progress) {}) 338 | 339 | #### progress 340 | 341 | - Type: `Object` 342 | 343 | ##### progress.bytesWritten 344 | 345 | - Type: `number` 346 | 347 | ##### progress.contentLength 348 | 349 | - Type: `number` 350 | 351 | Progress event provides upload stats like Transferred Bytes and content length. 352 | 353 | #### .on('finish', function () {}) 354 | 355 | The file was uploaded successfully. 356 | 357 | --- 358 | 359 | ### Static Methods 360 | 361 | ```js 362 | const {createURI} = require('gcs-resumable-upload') 363 | ```` 364 | 365 | #### createURI([config](#config), callback) 366 | 367 | ##### callback(err, resumableURI) 368 | 369 | ###### callback.err 370 | 371 | - Type: `Error` 372 | 373 | Invoked if the authorization failed or the request to start a resumable session failed. 374 | 375 | ###### callback.resumableURI 376 | 377 | - Type: `String` 378 | 379 | The resumable upload session URI. 380 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). 4 | 5 | The Google Security Team will respond within 5 working days of your report on g.co/vulnz. 6 | 7 | We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. 8 | -------------------------------------------------------------------------------- /linkinator.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "recurse": true, 3 | "skip": [ 4 | "https://codecov.io/gh/googleapis/", 5 | "www.googleapis.com", 6 | "img.shields.io" 7 | ], 8 | "silent": true, 9 | "concurrency": 10 10 | } 11 | -------------------------------------------------------------------------------- /owlbot.py: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # https://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | import synthtool.languages.node as node 16 | 17 | node.owlbot_main(templates_excludes=["LICENSE", "README.md"]) 18 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gcs-resumable-upload", 3 | "version": "6.0.0", 4 | "description": "Upload a file to Google Cloud Storage with built-in resumable behavior", 5 | "repository": "googleapis/gcs-resumable-upload", 6 | "main": "build/src/index.js", 7 | "types": "build/src/index.d.ts", 8 | "bin": { 9 | "gcs-upload": "build/src/cli.js" 10 | }, 11 | "scripts": { 12 | "test": "c8 mocha build/test", 13 | "lint": "gts check", 14 | "clean": "gts clean", 15 | "compile": "tsc -p .", 16 | "fix": "gts fix", 17 | "prepare": "npm run compile", 18 | "pretest": "npm run compile", 19 | "system-test": "mocha build/system-test --timeout 60000", 20 | "samples-test": "echo no samples 🤷‍♂️", 21 | "presystem-test": "npm run compile", 22 | "docs": "compodoc src/", 23 | "docs-test": "linkinator docs", 24 | "predocs-test": "npm run docs", 25 | "prelint": "cd samples; npm link ../; npm install", 26 | "precompile": "gts clean" 27 | }, 28 | "keywords": [ 29 | "google", 30 | "gcloud", 31 | "storage", 32 | "gcs", 33 | "upload", 34 | "resumable" 35 | ], 36 | "files": [ 37 | "build/src" 38 | ], 39 | "author": "Stephen Sawchuk ", 40 | "license": "MIT", 41 | "engines": { 42 | "node": ">=14.0.0" 43 | }, 44 | "dependencies": { 45 | "abort-controller": "^3.0.0", 46 | "async-retry": "^1.3.3", 47 | "configstore": "^5.0.0", 48 | "extend": "^3.0.2", 49 | "gaxios": "^6.0.4", 50 | "google-auth-library": "^9.0.0", 51 | "pumpify": "^2.0.0", 52 | "stream-events": "^1.0.4" 53 | }, 54 | "devDependencies": { 55 | "@compodoc/compodoc": "^1.1.7", 56 | "@types/async-retry": "^1.4.3", 57 | "@types/configstore": "^5.0.0", 58 | "@types/extend": "^3.0.1", 59 | "@types/mocha": "^9.0.0", 60 | "@types/mockery": "^1.4.29", 61 | "@types/node": "^20.4.9", 62 | "@types/pumpify": "^1.4.1", 63 | "@types/sinon": "^10.0.0", 64 | "c8": "^9.0.0", 65 | "gts": "^5.0.0", 66 | "linkinator": "^4.0.0", 67 | "mocha": "^9.2.2", 68 | "mockery": "^2.1.0", 69 | "nock": "^13.0.0", 70 | "sinon": "^15.0.0", 71 | "typescript": "^5.1.6" 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base", 4 | "docker:disable", 5 | ":disableDependencyDashboard" 6 | ], 7 | "pinVersions": false, 8 | "rebaseStalePrs": true, 9 | "schedule": [ 10 | "after 9am and before 3pm" 11 | ], 12 | "gitAuthor": null, 13 | "packageRules": [ 14 | { 15 | "extends": "packages:linters", 16 | "groupName": "linters" 17 | } 18 | ], 19 | "ignoreDeps": ["typescript"] 20 | } 21 | -------------------------------------------------------------------------------- /samples/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Samples for the gcs-resumable-upload npm module.", 3 | "license": "MIT", 4 | "author": "Google LLC", 5 | "engines": { 6 | "node": ">=14.0.0" 7 | }, 8 | "repository": "googleapis/gcs-resumable-upload", 9 | "private": true, 10 | "scripts": { 11 | "test": "mocha system-test" 12 | }, 13 | "dependencies": { 14 | "gcs-resumable-upload": "^6.0.0" 15 | }, 16 | "devDependencies": { 17 | "mocha": "^8.0.0" 18 | } 19 | } -------------------------------------------------------------------------------- /samples/quickstart.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Copyright 2018 Google LLC 3 | * 4 | * Use of this source code is governed by an MIT-style 5 | * license that can be found in the LICENSE file or at 6 | * https://opensource.org/licenses/MIT. 7 | */ 8 | 9 | /** 10 | * @fileoverview 11 | * This is an example of a command line application that takes a file path 12 | * and a bucket, and then uploads the file to GCS. It will show progress 13 | * information as the file as uploaded. To use it, try running: 14 | * 15 | * @example 16 | * $ node samples/quickstart.js path/to/file.ext name-of-bucket 17 | */ 18 | 19 | // eslint-disable-next-line node/no-missing-require 20 | const {upload} = require('gcs-resumable-upload'); 21 | const fs = require('fs'); 22 | const util = require('util'); 23 | 24 | async function main() { 25 | if (process.argv.length < 4) { 26 | throw new Error('Usage: node quickstart.js ${bucketName} ${filePath}'); 27 | } 28 | const bucket = process.argv[3]; 29 | const file = process.argv[2]; 30 | 31 | const stat = util.promisify(fs.stat); 32 | const fileSize = (await stat(file)).size; 33 | console.log(`Uploading '${file}' to '${bucket}' ...`); 34 | 35 | return new Promise((resolve, reject) => { 36 | fs.createReadStream(file) 37 | .pipe(upload({bucket, file})) 38 | .on('progress', progress => { 39 | console.log('Progress event:'); 40 | console.log('\t bytes: ', progress.bytesWritten); 41 | const pct = Math.round((progress.bytesWritten / fileSize) * 100); 42 | console.log(`\t ${pct}%`); 43 | }) 44 | .on('finish', () => { 45 | console.log('Upload complete!'); 46 | resolve(); 47 | }) 48 | .on('error', err => { 49 | console.error('There was a problem uploading the file :('); 50 | reject(err); 51 | }); 52 | }); 53 | } 54 | 55 | main().catch(console.error); 56 | -------------------------------------------------------------------------------- /samples/system-test/system.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * Copyright 2018 Google LLC 3 | * 4 | * Use of this source code is governed by an MIT-style 5 | * license that can be found in the LICENSE file or at 6 | * https://opensource.org/licenses/MIT. 7 | */ 8 | 9 | console.warn('no samples 🤷‍♂️'); 10 | -------------------------------------------------------------------------------- /src/cli.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /*! 4 | * Copyright 2018 Google LLC 5 | * 6 | * Use of this source code is governed by an MIT-style 7 | * license that can be found in the LICENSE file or at 8 | * https://opensource.org/licenses/MIT. 9 | */ 10 | 11 | import {upload} from '.'; 12 | 13 | const args = process.argv.slice(2); 14 | const opts = { 15 | bucket: args[0], 16 | file: args[1], 17 | }; 18 | 19 | process.stdin 20 | .pipe(upload(opts)) 21 | .on('error', console.error) 22 | .on('response', (resp, metadata) => { 23 | if (!metadata || !metadata.mediaLink) return; 24 | console.log('uploaded!'); 25 | console.log(metadata.mediaLink); 26 | }); 27 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /*! 2 | * Copyright 2018 Google LLC 3 | * 4 | * Use of this source code is governed by an MIT-style 5 | * license that can be found in the LICENSE file or at 6 | * https://opensource.org/licenses/MIT. 7 | */ 8 | 9 | import AbortController from 'abort-controller'; 10 | import * as ConfigStore from 'configstore'; 11 | import {createHash} from 'crypto'; 12 | import * as extend from 'extend'; 13 | import { 14 | GaxiosOptions, 15 | GaxiosPromise, 16 | GaxiosResponse, 17 | GaxiosError, 18 | } from 'gaxios'; 19 | import * as gaxios from 'gaxios'; 20 | import {GoogleAuth, GoogleAuthOptions} from 'google-auth-library'; 21 | import * as Pumpify from 'pumpify'; 22 | import {Duplex, PassThrough, Readable} from 'stream'; 23 | import * as streamEvents from 'stream-events'; 24 | import retry = require('async-retry'); 25 | 26 | const NOT_FOUND_STATUS_CODE = 404; 27 | const TERMINATED_UPLOAD_STATUS_CODE = 410; 28 | const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; 29 | const RETRY_LIMIT = 5; 30 | const DEFAULT_API_ENDPOINT_REGEX = /.*\.googleapis\.com/; 31 | const MAX_RETRY_DELAY = 64; 32 | const RETRY_DELAY_MULTIPLIER = 2; 33 | const MAX_TOTAL_RETRY_TIMEOUT = 600; 34 | const AUTO_RETRY_VALUE = true; 35 | 36 | export const PROTOCOL_REGEX = /^(\w*):\/\//; 37 | 38 | export interface ErrorWithCode extends Error { 39 | status: number; 40 | } 41 | 42 | export type CreateUriCallback = (err: Error | null, uri?: string) => void; 43 | 44 | export interface Encryption { 45 | key: {}; 46 | hash: {}; 47 | } 48 | 49 | export type PredefinedAcl = 50 | | 'authenticatedRead' 51 | | 'bucketOwnerFullControl' 52 | | 'bucketOwnerRead' 53 | | 'private' 54 | | 'projectPrivate' 55 | | 'publicRead'; 56 | 57 | export interface QueryParameters { 58 | contentEncoding?: string; 59 | ifGenerationMatch?: number; 60 | ifGenerationNotMatch?: number; 61 | ifMetagenerationMatch?: number; 62 | ifMetagenerationNotMatch?: number; 63 | kmsKeyName?: string; 64 | predefinedAcl?: PredefinedAcl; 65 | projection?: 'full' | 'noAcl'; 66 | userProject?: string; 67 | } 68 | 69 | export interface UploadConfig { 70 | /** 71 | * The API endpoint used for the request. 72 | * Defaults to `storage.googleapis.com`. 73 | * **Warning**: 74 | * If this value does not match the pattern *.googleapis.com, 75 | * an emulator context will be assumed and authentication will be bypassed. 76 | */ 77 | apiEndpoint?: string; 78 | 79 | /** 80 | * The name of the destination bucket. 81 | */ 82 | bucket: string; 83 | 84 | /** 85 | * The name of the destination file. 86 | */ 87 | file: string; 88 | 89 | /** 90 | * The GoogleAuthOptions passed to google-auth-library 91 | */ 92 | authConfig?: GoogleAuthOptions; 93 | 94 | /** 95 | * If you want to re-use an auth client from google-auto-auth, pass an 96 | * instance here. 97 | * Defaults to GoogleAuth and gets automatically overridden if an 98 | * emulator context is detected. 99 | */ 100 | authClient?: { 101 | request: ( 102 | opts: GaxiosOptions 103 | ) => Promise> | GaxiosPromise; 104 | }; 105 | 106 | /** 107 | * Where the gcs-resumable-upload configuration file should be stored on your 108 | * system. This maps to the configstore option by the same name. 109 | */ 110 | configPath?: string; 111 | 112 | /** 113 | * Create a separate request per chunk. 114 | * 115 | * Should be a multiple of 256 KiB (2^18). 116 | * We recommend using at least 8 MiB for the chunk size. 117 | * 118 | * @link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload 119 | */ 120 | chunkSize?: number; 121 | 122 | /** 123 | * For each API request we send, you may specify custom request options that 124 | * we'll add onto the request. The request options follow the gaxios API: 125 | * https://github.com/googleapis/gaxios#request-options. 126 | */ 127 | customRequestOptions?: GaxiosOptions; 128 | 129 | /** 130 | * This will cause the upload to fail if the current generation of the remote 131 | * object does not match the one provided here. 132 | */ 133 | generation?: number; 134 | 135 | /** 136 | * A customer-supplied encryption key. See 137 | * https://cloud.google.com/storage/docs/encryption#customer-supplied. 138 | */ 139 | key?: string | Buffer; 140 | 141 | /** 142 | * Resource name of the Cloud KMS key, of the form 143 | * `projects/my-project/locations/global/keyRings/my-kr/cryptoKeys/my-key`, 144 | * that will be used to encrypt the object. Overrides the object metadata's 145 | * `kms_key_name` value, if any. 146 | */ 147 | kmsKeyName?: string; 148 | 149 | /** 150 | * Any metadata you wish to set on the object. 151 | */ 152 | metadata?: ConfigMetadata; 153 | 154 | /** 155 | * The starting byte of the upload stream, for resuming an interrupted upload. 156 | * See 157 | * https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload#resume-upload. 158 | */ 159 | offset?: number; 160 | 161 | /** 162 | * Set an Origin header when creating the resumable upload URI. 163 | */ 164 | origin?: string; 165 | 166 | /** 167 | * Specify query parameters that go along with the initial upload request. See 168 | * https://cloud.google.com/storage/docs/json_api/v1/objects/insert#parameters 169 | */ 170 | params?: QueryParameters; 171 | 172 | /** 173 | * Apply a predefined set of access controls to the created file. 174 | */ 175 | predefinedAcl?: PredefinedAcl; 176 | 177 | /** 178 | * Make the uploaded file private. (Alias for config.predefinedAcl = 179 | * 'private') 180 | */ 181 | private?: boolean; 182 | 183 | /** 184 | * Make the uploaded file public. (Alias for config.predefinedAcl = 185 | * 'publicRead') 186 | */ 187 | public?: boolean; 188 | 189 | /** 190 | * If you already have a resumable URI from a previously-created resumable 191 | * upload, just pass it in here and we'll use that. 192 | */ 193 | uri?: string; 194 | 195 | /** 196 | * If the bucket being accessed has requesterPays functionality enabled, this 197 | * can be set to control which project is billed for the access of this file. 198 | */ 199 | userProject?: string; 200 | 201 | /** 202 | * Configuration options for retrying retryable errors. 203 | */ 204 | retryOptions?: RetryOptions; 205 | } 206 | 207 | export interface ConfigMetadata { 208 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 209 | [key: string]: any; 210 | 211 | /** 212 | * Set the length of the file being uploaded. 213 | */ 214 | contentLength?: number; 215 | 216 | /** 217 | * Set the content type of the incoming data. 218 | */ 219 | contentType?: string; 220 | } 221 | 222 | export interface RetryOptions { 223 | retryDelayMultiplier?: number; 224 | totalTimeout?: number; 225 | maxRetryDelay?: number; 226 | autoRetry?: boolean; 227 | maxRetries?: number; 228 | retryableErrorFn?: (err: ApiError) => boolean; 229 | } 230 | 231 | export interface GoogleInnerError { 232 | reason?: string; 233 | } 234 | 235 | export interface ApiError extends Error { 236 | code?: number; 237 | errors?: GoogleInnerError[]; 238 | } 239 | 240 | export class Upload extends Pumpify { 241 | bucket: string; 242 | file: string; 243 | apiEndpoint: string; 244 | baseURI: string; 245 | authConfig?: {scopes?: string[]}; 246 | /* 247 | * Defaults to GoogleAuth and gets automatically overridden if an 248 | * emulator context is detected. 249 | */ 250 | authClient: { 251 | request: ( 252 | opts: GaxiosOptions 253 | ) => Promise> | GaxiosPromise; 254 | }; 255 | cacheKey: string; 256 | chunkSize?: number; 257 | customRequestOptions: GaxiosOptions; 258 | generation?: number; 259 | key?: string | Buffer; 260 | kmsKeyName?: string; 261 | metadata: ConfigMetadata; 262 | offset?: number; 263 | origin?: string; 264 | params: QueryParameters; 265 | predefinedAcl?: PredefinedAcl; 266 | private?: boolean; 267 | public?: boolean; 268 | uri?: string; 269 | userProject?: string; 270 | encryption?: Encryption; 271 | configStore: ConfigStore; 272 | uriProvidedManually: boolean; 273 | numBytesWritten = 0; 274 | numRetries = 0; 275 | contentLength: number | '*'; 276 | retryLimit: number = RETRY_LIMIT; 277 | maxRetryDelay: number = MAX_RETRY_DELAY; 278 | retryDelayMultiplier: number = RETRY_DELAY_MULTIPLIER; 279 | maxRetryTotalTimeout: number = MAX_TOTAL_RETRY_TIMEOUT; 280 | timeOfFirstRequest: number; 281 | retryableErrorFn?: (err: ApiError) => boolean; 282 | private upstreamChunkBuffer: Buffer = Buffer.alloc(0); 283 | private chunkBufferEncoding?: BufferEncoding = undefined; 284 | private numChunksReadInRequest = 0; 285 | /** 286 | * A chunk used for caching the most recent upload chunk. 287 | * We should not assume that the server received all bytes sent in the request. 288 | * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload 289 | */ 290 | private lastChunkSent = Buffer.alloc(0); 291 | private upstreamEnded = false; 292 | 293 | constructor(cfg: UploadConfig) { 294 | super(); 295 | streamEvents(this); 296 | 297 | cfg = cfg || {}; 298 | 299 | if (!cfg.bucket || !cfg.file) { 300 | throw new Error('A bucket and file name are required'); 301 | } 302 | 303 | cfg.authConfig = cfg.authConfig || {}; 304 | cfg.authConfig.scopes = [ 305 | 'https://www.googleapis.com/auth/devstorage.full_control', 306 | ]; 307 | this.authClient = cfg.authClient || new GoogleAuth(cfg.authConfig); 308 | 309 | this.apiEndpoint = 'https://storage.googleapis.com'; 310 | if (cfg.apiEndpoint) { 311 | this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); 312 | if (!DEFAULT_API_ENDPOINT_REGEX.test(cfg.apiEndpoint)) { 313 | this.authClient = gaxios; 314 | } 315 | } 316 | 317 | this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; 318 | this.bucket = cfg.bucket; 319 | 320 | const cacheKeyElements = [cfg.bucket, cfg.file]; 321 | if (typeof cfg.generation === 'number') { 322 | cacheKeyElements.push(`${cfg.generation}`); 323 | } 324 | 325 | this.cacheKey = cacheKeyElements.join('/'); 326 | 327 | this.customRequestOptions = cfg.customRequestOptions || {}; 328 | this.file = cfg.file; 329 | this.generation = cfg.generation; 330 | this.kmsKeyName = cfg.kmsKeyName; 331 | this.metadata = cfg.metadata || {}; 332 | this.offset = cfg.offset; 333 | this.origin = cfg.origin; 334 | this.params = cfg.params || {}; 335 | this.userProject = cfg.userProject; 336 | this.chunkSize = cfg.chunkSize; 337 | 338 | if (cfg.key) { 339 | /** 340 | * NOTE: This is `as string` because there appears to be some weird kind 341 | * of TypeScript bug as 2.8. Tracking the issue here: 342 | * https://github.com/Microsoft/TypeScript/issues/23155 343 | */ 344 | const base64Key = Buffer.from(cfg.key as string).toString('base64'); 345 | this.encryption = { 346 | key: base64Key, 347 | hash: createHash('sha256').update(cfg.key).digest('base64'), 348 | }; 349 | } 350 | 351 | this.predefinedAcl = cfg.predefinedAcl; 352 | if (cfg.private) this.predefinedAcl = 'private'; 353 | if (cfg.public) this.predefinedAcl = 'publicRead'; 354 | 355 | const configPath = cfg.configPath; 356 | this.configStore = new ConfigStore('gcs-resumable-upload', null, { 357 | configPath, 358 | }); 359 | 360 | const autoRetry = cfg?.retryOptions?.autoRetry || AUTO_RETRY_VALUE; 361 | this.uriProvidedManually = !!cfg.uri; 362 | this.uri = cfg.uri || this.get('uri'); 363 | this.numBytesWritten = 0; 364 | this.numRetries = 0; //counter for number of retries currently executed 365 | 366 | if (autoRetry && cfg?.retryOptions?.maxRetries !== undefined) { 367 | this.retryLimit = cfg.retryOptions.maxRetries; 368 | } else if (!autoRetry) { 369 | this.retryLimit = 0; 370 | } 371 | 372 | if (cfg?.retryOptions?.maxRetryDelay !== undefined) { 373 | this.maxRetryDelay = cfg.retryOptions.maxRetryDelay; 374 | } 375 | 376 | if (cfg?.retryOptions?.retryDelayMultiplier !== undefined) { 377 | this.retryDelayMultiplier = cfg.retryOptions.retryDelayMultiplier; 378 | } 379 | 380 | if (cfg?.retryOptions?.totalTimeout !== undefined) { 381 | this.maxRetryTotalTimeout = cfg.retryOptions.totalTimeout; 382 | } 383 | 384 | this.timeOfFirstRequest = Date.now(); 385 | this.retryableErrorFn = cfg?.retryOptions?.retryableErrorFn; 386 | 387 | const contentLength = cfg.metadata 388 | ? Number(cfg.metadata.contentLength) 389 | : NaN; 390 | this.contentLength = isNaN(contentLength) ? '*' : contentLength; 391 | 392 | this.upstream.on('end', () => { 393 | this.upstreamEnded = true; 394 | }); 395 | 396 | this.on('prefinish', () => { 397 | this.upstreamEnded = true; 398 | }); 399 | 400 | this.once('writing', () => { 401 | // Now that someone is writing to this object, let's attach 402 | // some duplexes. These duplexes enable this object to be 403 | // better managed in terms of 'end'/'finish' control and 404 | // buffering writes downstream if someone enables multi- 405 | // chunk upload support (`chunkSize`) w/o adding too much into 406 | // memory. 407 | this.setPipeline(this.upstream, new PassThrough()); 408 | 409 | if (this.uri) { 410 | this.continueUploading(); 411 | } else { 412 | this.createURI((err, uri) => { 413 | if (err) { 414 | return this.destroy(err); 415 | } 416 | this.set({uri}); 417 | this.startUploading(); 418 | return; 419 | }); 420 | } 421 | }); 422 | } 423 | 424 | /** A stream representing the incoming data to upload */ 425 | private readonly upstream = new Duplex({ 426 | read: async () => { 427 | this.once('prepareFinish', () => { 428 | // Allows this (`Upload`) to finish/end once the upload has succeeded. 429 | this.upstream.push(null); 430 | }); 431 | }, 432 | write: this.writeToChunkBuffer.bind(this), 433 | }); 434 | 435 | /** 436 | * A handler for `upstream` to write and buffer its data. 437 | * 438 | * @param chunk The chunk to append to the buffer 439 | * @param encoding The encoding of the chunk 440 | * @param readCallback A callback for when the buffer has been read downstream 441 | */ 442 | private writeToChunkBuffer( 443 | chunk: Buffer | string, 444 | encoding: BufferEncoding, 445 | readCallback: () => void 446 | ) { 447 | this.upstreamChunkBuffer = Buffer.concat([ 448 | this.upstreamChunkBuffer, 449 | typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk, 450 | ]); 451 | this.chunkBufferEncoding = encoding; 452 | 453 | this.once('readFromChunkBuffer', readCallback); 454 | process.nextTick(() => this.emit('wroteToChunkBuffer')); 455 | } 456 | 457 | /** 458 | * Prepends data back to the upstream chunk buffer. 459 | * 460 | * @param chunk The data to prepend 461 | */ 462 | private unshiftChunkBuffer(chunk: Buffer) { 463 | this.upstreamChunkBuffer = Buffer.concat([chunk, this.upstreamChunkBuffer]); 464 | } 465 | 466 | /** 467 | * Retrieves data from upstream's buffer. 468 | * 469 | * @param limit The maximum amount to return from the buffer. 470 | * @returns The data requested. 471 | */ 472 | private pullFromChunkBuffer(limit: number) { 473 | const chunk = this.upstreamChunkBuffer.slice(0, limit); 474 | this.upstreamChunkBuffer = this.upstreamChunkBuffer.slice(limit); 475 | 476 | // notify upstream we've read from the buffer so it can potentially 477 | // send more data down. 478 | process.nextTick(() => this.emit('readFromChunkBuffer')); 479 | 480 | return chunk; 481 | } 482 | 483 | /** 484 | * A handler for determining if data is ready to be read from upstream. 485 | * 486 | * @returns If there will be more chunks to read in the future 487 | */ 488 | private async waitForNextChunk(): Promise { 489 | const willBeMoreChunks = await new Promise(resolve => { 490 | // There's data available - it should be digested 491 | if (this.upstreamChunkBuffer.byteLength) { 492 | return resolve(true); 493 | } 494 | 495 | // The upstream writable ended, we shouldn't expect any more data. 496 | if (this.upstreamEnded) { 497 | return resolve(false); 498 | } 499 | 500 | // Nothing immediate seems to be determined. We need to prepare some 501 | // listeners to determine next steps... 502 | 503 | const wroteToChunkBufferCallback = () => { 504 | removeListeners(); 505 | return resolve(true); 506 | }; 507 | 508 | const upstreamFinishedCallback = () => { 509 | removeListeners(); 510 | 511 | // this should be the last chunk, if there's anything there 512 | if (this.upstreamChunkBuffer.length) return resolve(true); 513 | 514 | return resolve(false); 515 | }; 516 | 517 | // Remove listeners when we're ready to callback. 518 | // It's important to clean-up listeners as Node has a default max number of 519 | // event listeners. Notably, The number of requests can be greater than the 520 | // number of potential listeners. 521 | // - https://nodejs.org/api/events.html#eventsdefaultmaxlisteners 522 | const removeListeners = () => { 523 | this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); 524 | this.upstream.removeListener('finish', upstreamFinishedCallback); 525 | this.removeListener('prefinish', upstreamFinishedCallback); 526 | }; 527 | 528 | // If there's data recently written it should be digested 529 | this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); 530 | 531 | // If the upstream finishes let's see if there's anything to grab 532 | this.upstream.once('finish', upstreamFinishedCallback); 533 | this.once('prefinish', upstreamFinishedCallback); 534 | }); 535 | 536 | return willBeMoreChunks; 537 | } 538 | 539 | /** 540 | * Reads data from upstream up to the provided `limit`. 541 | * Ends when the limit has reached or no data is expected to be pushed from upstream. 542 | * 543 | * @param limit The most amount of data this iterator should return. `Infinity` by default. 544 | * @param oneChunkMode Determines if one, exhaustive chunk is yielded for the iterator 545 | */ 546 | private async *upstreamIterator(limit = Infinity, oneChunkMode?: boolean) { 547 | let completeChunk = Buffer.alloc(0); 548 | 549 | // read from upstream chunk buffer 550 | while (limit && (await this.waitForNextChunk())) { 551 | // read until end or limit has been reached 552 | const chunk = this.pullFromChunkBuffer(limit); 553 | 554 | limit -= chunk.byteLength; 555 | if (oneChunkMode) { 556 | // return 1 chunk at the end of iteration 557 | completeChunk = Buffer.concat([completeChunk, chunk]); 558 | } else { 559 | // return many chunks throughout iteration 560 | yield { 561 | chunk, 562 | encoding: this.chunkBufferEncoding, 563 | }; 564 | } 565 | } 566 | 567 | if (oneChunkMode) { 568 | yield { 569 | chunk: completeChunk, 570 | encoding: this.chunkBufferEncoding, 571 | }; 572 | } 573 | } 574 | 575 | createURI(): Promise; 576 | createURI(callback: CreateUriCallback): void; 577 | createURI(callback?: CreateUriCallback): void | Promise { 578 | if (!callback) { 579 | return this.createURIAsync(); 580 | } 581 | this.createURIAsync().then(r => callback(null, r), callback); 582 | } 583 | 584 | protected async createURIAsync(): Promise { 585 | const metadata = this.metadata; 586 | 587 | const reqOpts: GaxiosOptions = { 588 | method: 'POST', 589 | url: [this.baseURI, this.bucket, 'o'].join('/'), 590 | params: Object.assign( 591 | { 592 | name: this.file, 593 | uploadType: 'resumable', 594 | }, 595 | this.params 596 | ), 597 | data: metadata, 598 | headers: {}, 599 | }; 600 | 601 | if (metadata.contentLength) { 602 | reqOpts.headers!['X-Upload-Content-Length'] = 603 | metadata.contentLength.toString(); 604 | } 605 | 606 | if (metadata.contentType) { 607 | reqOpts.headers!['X-Upload-Content-Type'] = metadata.contentType; 608 | } 609 | 610 | if (typeof this.generation !== 'undefined') { 611 | reqOpts.params.ifGenerationMatch = this.generation; 612 | } 613 | 614 | if (this.kmsKeyName) { 615 | reqOpts.params.kmsKeyName = this.kmsKeyName; 616 | } 617 | 618 | if (this.predefinedAcl) { 619 | reqOpts.params.predefinedAcl = this.predefinedAcl; 620 | } 621 | 622 | if (this.origin) { 623 | reqOpts.headers!.Origin = this.origin; 624 | } 625 | const uri = await retry( 626 | async (bail: (err: Error) => void) => { 627 | try { 628 | const res = await this.makeRequest(reqOpts); 629 | return res.headers.location; 630 | } catch (err) { 631 | const e = err as GaxiosError; 632 | const apiError = { 633 | code: e.response?.status, 634 | name: e.response?.statusText, 635 | message: e.response?.statusText, 636 | errors: [ 637 | { 638 | reason: e.code as string, 639 | }, 640 | ], 641 | }; 642 | if ( 643 | this.retryLimit > 0 && 644 | this.retryableErrorFn && 645 | this.retryableErrorFn!(apiError as ApiError) 646 | ) { 647 | throw e; 648 | } else { 649 | return bail(e); 650 | } 651 | } 652 | }, 653 | { 654 | retries: this.retryLimit, 655 | factor: this.retryDelayMultiplier, 656 | maxTimeout: this.maxRetryDelay! * 1000, //convert to milliseconds 657 | maxRetryTime: this.maxRetryTotalTimeout! * 1000, //convert to milliseconds 658 | } 659 | ); 660 | 661 | this.uri = uri; 662 | this.offset = 0; 663 | return uri; 664 | } 665 | 666 | private async continueUploading() { 667 | if (typeof this.offset === 'number') { 668 | this.startUploading(); 669 | return; 670 | } 671 | await this.getAndSetOffset(); 672 | this.startUploading(); 673 | } 674 | 675 | async startUploading() { 676 | const multiChunkMode = !!this.chunkSize; 677 | let responseReceived = false; 678 | this.numChunksReadInRequest = 0; 679 | 680 | if (!this.offset) { 681 | this.offset = 0; 682 | } 683 | 684 | // Check if we're uploading the expected object 685 | if (this.numBytesWritten === 0) { 686 | const isSameObject = await this.ensureUploadingSameObject(); 687 | if (!isSameObject) { 688 | // `ensureUploadingSameObject` will restart the upload. 689 | return; 690 | } 691 | } 692 | 693 | // Check if the offset (server) is too far behind the current stream 694 | if (this.offset < this.numBytesWritten) { 695 | this.emit( 696 | 'error', 697 | new RangeError('The offset is lower than the number of bytes written') 698 | ); 699 | return; 700 | } 701 | 702 | // Check if we should 'fast-forward' to the relevant data to upload 703 | if (this.numBytesWritten < this.offset) { 704 | // 'fast-forward' to the byte where we need to upload. 705 | // only push data from the byte after the one we left off on 706 | const fastForwardBytes = this.offset - this.numBytesWritten; 707 | 708 | for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { 709 | _chunk; // discard the data up until the point we want 710 | } 711 | 712 | this.numBytesWritten = this.offset; 713 | } 714 | 715 | let expectedUploadSize: number | undefined = undefined; 716 | 717 | // Set `expectedUploadSize` to `contentLength` if available 718 | if (typeof this.contentLength === 'number') { 719 | expectedUploadSize = this.contentLength - this.numBytesWritten; 720 | } 721 | 722 | // `expectedUploadSize` should be no more than the `chunkSize`. 723 | // It's possible this is the last chunk request for a multiple 724 | // chunk upload, thus smaller than the chunk size. 725 | if (this.chunkSize) { 726 | expectedUploadSize = expectedUploadSize 727 | ? Math.min(this.chunkSize, expectedUploadSize) 728 | : this.chunkSize; 729 | } 730 | 731 | // A queue for the upstream data 732 | const upstreamQueue = this.upstreamIterator( 733 | expectedUploadSize, 734 | multiChunkMode // multi-chunk mode should return 1 chunk per request 735 | ); 736 | 737 | // The primary read stream for this request. This stream retrieves no more 738 | // than the exact requested amount from upstream. 739 | const requestStream = new Readable({ 740 | read: async () => { 741 | // Don't attempt to retrieve data upstream if we already have a response 742 | if (responseReceived) requestStream.push(null); 743 | 744 | const result = await upstreamQueue.next(); 745 | 746 | if (result.value) { 747 | this.numChunksReadInRequest++; 748 | this.lastChunkSent = result.value.chunk; 749 | this.numBytesWritten += result.value.chunk.byteLength; 750 | 751 | this.emit('progress', { 752 | bytesWritten: this.numBytesWritten, 753 | contentLength: this.contentLength, 754 | }); 755 | 756 | requestStream.push(result.value.chunk, result.value.encoding); 757 | } 758 | 759 | if (result.done) { 760 | requestStream.push(null); 761 | } 762 | }, 763 | }); 764 | 765 | let headers: GaxiosOptions['headers'] = {}; 766 | 767 | // If using multiple chunk upload, set appropriate header 768 | if (multiChunkMode && expectedUploadSize) { 769 | // The '-1' is because the ending byte is inclusive in the request. 770 | const endingByte = expectedUploadSize + this.numBytesWritten - 1; 771 | headers = { 772 | 'Content-Length': expectedUploadSize, 773 | 'Content-Range': `bytes ${this.offset}-${endingByte}/${this.contentLength}`, 774 | }; 775 | } else { 776 | headers = { 777 | 'Content-Range': `bytes ${this.offset}-*/${this.contentLength}`, 778 | }; 779 | } 780 | 781 | const reqOpts: GaxiosOptions = { 782 | method: 'PUT', 783 | url: this.uri, 784 | headers, 785 | body: requestStream, 786 | }; 787 | 788 | try { 789 | const resp = await this.makeRequestStream(reqOpts); 790 | if (resp) { 791 | responseReceived = true; 792 | this.responseHandler(resp); 793 | } 794 | } catch (err) { 795 | const e = err as Error; 796 | this.destroy(e); 797 | } 798 | } 799 | 800 | // Process the API response to look for errors that came in 801 | // the response body. 802 | private responseHandler(resp: GaxiosResponse) { 803 | if (resp.data.error) { 804 | this.destroy(resp.data.error); 805 | return; 806 | } 807 | 808 | const shouldContinueWithNextMultiChunkRequest = 809 | this.chunkSize && 810 | resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && 811 | resp.headers.range; 812 | 813 | if (shouldContinueWithNextMultiChunkRequest) { 814 | // Use the upper value in this header to determine where to start the next chunk. 815 | // We should not assume that the server received all bytes sent in the request. 816 | // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload 817 | const range: string = resp.headers.range; 818 | this.offset = Number(range.split('-')[1]) + 1; 819 | 820 | // We should not assume that the server received all bytes sent in the request. 821 | // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload 822 | const missingBytes = this.numBytesWritten - this.offset; 823 | if (missingBytes) { 824 | const dataToPrependForResending = this.lastChunkSent.slice( 825 | -missingBytes 826 | ); 827 | // As multi-chunk uploads send one chunk per request and pulls one 828 | // chunk into the pipeline, prepending the missing bytes back should 829 | // be fine for the next request. 830 | this.unshiftChunkBuffer(dataToPrependForResending); 831 | this.numBytesWritten -= missingBytes; 832 | this.lastChunkSent = Buffer.alloc(0); 833 | } 834 | 835 | // continue uploading next chunk 836 | this.continueUploading(); 837 | } else if (!this.isSuccessfulResponse(resp.status)) { 838 | const err: ApiError = { 839 | code: resp.status, 840 | name: 'Upload failed', 841 | message: 'Upload failed', 842 | }; 843 | this.destroy(err); 844 | } else { 845 | // remove the last chunk sent 846 | this.lastChunkSent = Buffer.alloc(0); 847 | 848 | if (resp && resp.data) { 849 | resp.data.size = Number(resp.data.size); 850 | } 851 | this.emit('metadata', resp.data); 852 | this.deleteConfig(); 853 | 854 | // Allow the object (Upload) to continue naturally so the user's 855 | // "finish" event fires. 856 | this.emit('prepareFinish'); 857 | } 858 | } 859 | 860 | /** 861 | * Check if this is the same content uploaded previously. This caches a 862 | * slice of the first chunk, then compares it with the first byte of 863 | * incoming data. 864 | * 865 | * @returns if the request is ok to continue as-is 866 | */ 867 | private async ensureUploadingSameObject() { 868 | // A queue for the upstream data 869 | const upstreamQueue = this.upstreamIterator( 870 | 16, 871 | true // we just want one chunk for this validation 872 | ); 873 | 874 | const upstreamChunk = await upstreamQueue.next(); 875 | const chunk = upstreamChunk.value 876 | ? upstreamChunk.value.chunk 877 | : Buffer.alloc(0); 878 | 879 | // Put the original chunk back into the buffer as we just wanted to 'peek' 880 | // at the stream for validation. 881 | this.unshiftChunkBuffer(chunk); 882 | 883 | let cachedFirstChunk = this.get('firstChunk'); 884 | const firstChunk = chunk.valueOf(); 885 | 886 | if (!cachedFirstChunk) { 887 | // This is a new upload. Cache the first chunk. 888 | this.set({uri: this.uri, firstChunk}); 889 | } else { 890 | // this continues an upload in progress. check if the bytes are the same 891 | cachedFirstChunk = Buffer.from(cachedFirstChunk); 892 | const nextChunk = Buffer.from(firstChunk); 893 | if (Buffer.compare(cachedFirstChunk, nextChunk) !== 0) { 894 | // this data is not the same. start a new upload 895 | this.restart(); 896 | return false; 897 | } 898 | } 899 | 900 | return true; 901 | } 902 | 903 | private async getAndSetOffset() { 904 | const opts: GaxiosOptions = { 905 | method: 'PUT', 906 | url: this.uri!, 907 | headers: {'Content-Length': 0, 'Content-Range': 'bytes */*'}, 908 | }; 909 | try { 910 | const resp = await this.makeRequest(opts); 911 | if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { 912 | if (resp.headers.range) { 913 | const range = resp.headers.range as string; 914 | this.offset = Number(range.split('-')[1]) + 1; 915 | return; 916 | } 917 | } 918 | this.offset = 0; 919 | } catch (e) { 920 | const err = e as GaxiosError; 921 | const resp = err.response; 922 | // we don't return a 404 to the user if they provided the resumable 923 | // URI. if we're just using the configstore file to tell us that this 924 | // file exists, and it turns out that it doesn't (the 404), that's 925 | // probably stale config data. 926 | if ( 927 | resp && 928 | resp.status === NOT_FOUND_STATUS_CODE && 929 | !this.uriProvidedManually 930 | ) { 931 | this.restart(); 932 | return; 933 | } 934 | 935 | // this resumable upload is unrecoverable (bad data or service error). 936 | // - 937 | // https://github.com/googleapis/gcs-resumable-upload/issues/15 938 | // - 939 | // https://github.com/googleapis/gcs-resumable-upload/pull/16#discussion_r80363774 940 | if (resp && resp.status === TERMINATED_UPLOAD_STATUS_CODE) { 941 | this.restart(); 942 | return; 943 | } 944 | 945 | this.destroy(err); 946 | } 947 | } 948 | 949 | private async makeRequest(reqOpts: GaxiosOptions): GaxiosPromise { 950 | if (this.encryption) { 951 | reqOpts.headers = reqOpts.headers || {}; 952 | reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; 953 | reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); 954 | reqOpts.headers['x-goog-encryption-key-sha256'] = 955 | this.encryption.hash.toString(); 956 | } 957 | 958 | if (this.userProject) { 959 | reqOpts.params = reqOpts.params || {}; 960 | reqOpts.params.userProject = this.userProject; 961 | } 962 | // Let gaxios know we will handle a 308 error code ourselves. 963 | reqOpts.validateStatus = (status: number) => { 964 | return ( 965 | this.isSuccessfulResponse(status) || 966 | status === RESUMABLE_INCOMPLETE_STATUS_CODE 967 | ); 968 | }; 969 | 970 | const combinedReqOpts = extend( 971 | true, 972 | {}, 973 | this.customRequestOptions, 974 | reqOpts 975 | ); 976 | const res = await this.authClient.request<{error?: object}>( 977 | combinedReqOpts 978 | ); 979 | if (res.data && res.data.error) { 980 | throw res.data.error; 981 | } 982 | return res; 983 | } 984 | 985 | private async makeRequestStream(reqOpts: GaxiosOptions) { 986 | const controller = new AbortController(); 987 | const errorCallback = () => controller.abort(); 988 | this.once('error', errorCallback); 989 | 990 | if (this.userProject) { 991 | reqOpts.params = reqOpts.params || {}; 992 | reqOpts.params.userProject = this.userProject; 993 | } 994 | reqOpts.signal = controller.signal; 995 | reqOpts.validateStatus = () => true; 996 | 997 | const combinedReqOpts = extend( 998 | true, 999 | {}, 1000 | this.customRequestOptions, 1001 | reqOpts 1002 | ); 1003 | const res = await this.authClient.request(combinedReqOpts); 1004 | const successfulRequest = this.onResponse(res); 1005 | this.removeListener('error', errorCallback); 1006 | 1007 | return successfulRequest ? res : null; 1008 | } 1009 | 1010 | private restart() { 1011 | if (this.numBytesWritten) { 1012 | let message = 1013 | 'Attempting to restart an upload after unrecoverable bytes have been written from upstream. '; 1014 | message += 'Stopping as this could result in data loss. '; 1015 | message += 'Create a new upload object to continue.'; 1016 | 1017 | this.emit('error', new RangeError(message)); 1018 | return; 1019 | } 1020 | 1021 | this.lastChunkSent = Buffer.alloc(0); 1022 | this.deleteConfig(); 1023 | this.createURI((err, uri) => { 1024 | if (err) { 1025 | return this.destroy(err); 1026 | } 1027 | this.set({uri}); 1028 | this.startUploading(); 1029 | return; 1030 | }); 1031 | } 1032 | 1033 | private get(prop: string) { 1034 | const store = this.configStore.get(this.cacheKey); 1035 | return store && store[prop]; 1036 | } 1037 | 1038 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 1039 | private set(props: any) { 1040 | this.configStore.set(this.cacheKey, props); 1041 | } 1042 | 1043 | deleteConfig() { 1044 | this.configStore.delete(this.cacheKey); 1045 | } 1046 | 1047 | /** 1048 | * @return {bool} is the request good? 1049 | */ 1050 | private onResponse(resp: GaxiosResponse) { 1051 | if ( 1052 | (this.retryableErrorFn && 1053 | this.retryableErrorFn({ 1054 | code: resp.status, 1055 | message: resp.statusText, 1056 | name: resp.statusText, 1057 | })) || 1058 | resp.status === NOT_FOUND_STATUS_CODE || 1059 | this.isServerErrorResponse(resp.status) 1060 | ) { 1061 | this.attemptDelayedRetry(resp); 1062 | return false; 1063 | } 1064 | 1065 | this.emit('response', resp); 1066 | return true; 1067 | } 1068 | 1069 | /** 1070 | * @param resp GaxiosResponse object from previous attempt 1071 | */ 1072 | private attemptDelayedRetry(resp: GaxiosResponse) { 1073 | if (this.numRetries < this.retryLimit) { 1074 | if ( 1075 | resp.status === NOT_FOUND_STATUS_CODE && 1076 | this.numChunksReadInRequest === 0 1077 | ) { 1078 | this.startUploading(); 1079 | } else { 1080 | const retryDelay = this.getRetryDelay(); 1081 | 1082 | if (retryDelay <= 0) { 1083 | this.destroy( 1084 | new Error(`Retry total time limit exceeded - ${resp.data}`) 1085 | ); 1086 | return; 1087 | } 1088 | 1089 | // Unshift the most recent chunk back in case it's needed for the next 1090 | // request. 1091 | this.numBytesWritten -= this.lastChunkSent.byteLength; 1092 | this.unshiftChunkBuffer(this.lastChunkSent); 1093 | this.lastChunkSent = Buffer.alloc(0); 1094 | 1095 | // We don't know how much data has been received by the server. 1096 | // `continueUploading` will recheck the offset via `getAndSetOffset`. 1097 | // If `offset` < `numberBytesReceived` then we will raise a RangeError 1098 | // as we've streamed too much data that has been missed - this should 1099 | // not be the case for multi-chunk uploads as `lastChunkSent` is the 1100 | // body of the entire request. 1101 | this.offset = undefined; 1102 | 1103 | setTimeout(this.continueUploading.bind(this), retryDelay); 1104 | } 1105 | this.numRetries++; 1106 | } else { 1107 | this.destroy(new Error('Retry limit exceeded - ' + resp.data)); 1108 | } 1109 | } 1110 | 1111 | /** 1112 | * @returns {number} the amount of time to wait before retrying the request 1113 | */ 1114 | private getRetryDelay(): number { 1115 | const randomMs = Math.round(Math.random() * 1000); 1116 | const waitTime = 1117 | Math.pow(this.retryDelayMultiplier, this.numRetries) * 1000 + randomMs; 1118 | const maxAllowableDelayMs = 1119 | this.maxRetryTotalTimeout * 1000 - (Date.now() - this.timeOfFirstRequest); 1120 | const maxRetryDelayMs = this.maxRetryDelay * 1000; 1121 | 1122 | return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); 1123 | } 1124 | 1125 | /* 1126 | * Prepare user-defined API endpoint for compatibility with our API. 1127 | */ 1128 | private sanitizeEndpoint(url: string) { 1129 | if (!PROTOCOL_REGEX.test(url)) { 1130 | url = `https://${url}`; 1131 | } 1132 | return url.replace(/\/+$/, ''); // Remove trailing slashes 1133 | } 1134 | 1135 | /** 1136 | * Check if a given status code is 2xx 1137 | * 1138 | * @param status The status code to check 1139 | * @returns if the status is 2xx 1140 | */ 1141 | public isSuccessfulResponse(status: number): boolean { 1142 | return status >= 200 && status < 300; 1143 | } 1144 | 1145 | /** 1146 | * Check if a given status code is 5xx 1147 | * 1148 | * @param status The status code to check 1149 | * @returns if the status is 5xx 1150 | */ 1151 | public isServerErrorResponse(status: number): boolean { 1152 | return status >= 500 && status < 600; 1153 | } 1154 | } 1155 | 1156 | export function upload(cfg: UploadConfig) { 1157 | return new Upload(cfg); 1158 | } 1159 | 1160 | export function createURI(cfg: UploadConfig): Promise; 1161 | export function createURI(cfg: UploadConfig, callback: CreateUriCallback): void; 1162 | export function createURI( 1163 | cfg: UploadConfig, 1164 | callback?: CreateUriCallback 1165 | ): void | Promise { 1166 | const up = new Upload(cfg); 1167 | if (!callback) { 1168 | return up.createURI(); 1169 | } 1170 | up.createURI().then(r => callback(null, r), callback); 1171 | } 1172 | -------------------------------------------------------------------------------- /system-test/kitchen.ts: -------------------------------------------------------------------------------- 1 | /*! 2 | * Copyright 2018 Google LLC 3 | * 4 | * Use of this source code is governed by an MIT-style 5 | * license that can be found in the LICENSE file or at 6 | * https://opensource.org/licenses/MIT. 7 | */ 8 | 9 | import * as assert from 'assert'; 10 | import {describe, it, beforeEach} from 'mocha'; 11 | import * as fs from 'fs'; 12 | import * as path from 'path'; 13 | import * as os from 'os'; 14 | import {Readable} from 'stream'; 15 | import {createURI, ErrorWithCode, upload} from '../src'; 16 | import delay from './util'; 17 | 18 | const bucketName = process.env.BUCKET_NAME || 'gcs-resumable-upload-test'; 19 | const fileName = '20MB.zip'; 20 | 21 | describe('end to end', () => { 22 | beforeEach(() => { 23 | upload({bucket: bucketName, file: fileName}).deleteConfig(); 24 | }); 25 | 26 | it('should work', done => { 27 | let uploadSucceeded = false; 28 | fs.createReadStream(fileName) 29 | .on('error', done) 30 | .pipe( 31 | upload({ 32 | bucket: bucketName, 33 | file: fileName, 34 | metadata: {contentType: 'image/jpg'}, 35 | }) 36 | ) 37 | .on('error', done) 38 | .on('response', resp => { 39 | uploadSucceeded = resp.status === 200; 40 | }) 41 | .on('finish', () => { 42 | assert.strictEqual(uploadSucceeded, true); 43 | done(); 44 | }); 45 | }); 46 | 47 | let retries = 0; 48 | it('should resume an interrupted upload', function (done) { 49 | this.retries(3); 50 | delay(this.test!.title, retries, () => { 51 | retries++; 52 | // If we've retried, delay. 53 | fs.stat(fileName, (err, fd) => { 54 | assert.ifError(err); 55 | 56 | const size = fd.size; 57 | 58 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 59 | type DoUploadCallback = (...args: any[]) => void; 60 | const doUpload = ( 61 | opts: {interrupt?: boolean}, 62 | callback: DoUploadCallback 63 | ) => { 64 | let sizeStreamed = 0; 65 | let destroyed = false; 66 | 67 | const ws = upload({ 68 | bucket: bucketName, 69 | file: fileName, 70 | metadata: {contentType: 'image/jpg'}, 71 | }); 72 | 73 | fs.createReadStream(fileName) 74 | .on('error', callback) 75 | .on('data', function (this: Readable, chunk) { 76 | sizeStreamed += chunk.length; 77 | 78 | if (!destroyed && opts.interrupt && sizeStreamed >= size / 2) { 79 | // stop sending data half way through 80 | destroyed = true; 81 | this.destroy(); 82 | process.nextTick(() => ws.destroy(new Error('Interrupted'))); 83 | } 84 | }) 85 | .pipe(ws) 86 | .on('error', callback) 87 | .on('metadata', callback.bind(null, null)); 88 | }; 89 | 90 | doUpload({interrupt: true}, (err: Error) => { 91 | assert.strictEqual(err.message, 'Interrupted'); 92 | 93 | doUpload( 94 | {interrupt: false}, 95 | (err: Error, metadata: {size: number}) => { 96 | assert.ifError(err); 97 | assert.strictEqual(metadata.size, size); 98 | assert.strictEqual(typeof metadata.size, 'number'); 99 | done(); 100 | } 101 | ); 102 | }); 103 | }); 104 | }); 105 | }); 106 | 107 | it('should just make an upload URI', done => { 108 | createURI( 109 | { 110 | bucket: bucketName, 111 | file: fileName, 112 | metadata: {contentType: 'image/jpg'}, 113 | }, 114 | done 115 | ); 116 | }); 117 | 118 | it('should return a non-resumable failed upload', done => { 119 | const metadata = { 120 | metadata: {largeString: 'a'.repeat(2.1e6)}, 121 | }; 122 | 123 | fs.createReadStream(fileName) 124 | .on('error', done) 125 | .pipe( 126 | upload({ 127 | bucket: bucketName, 128 | file: fileName, 129 | metadata, 130 | }) 131 | ) 132 | .on('error', (err: ErrorWithCode) => { 133 | assert.strictEqual(err.status, 400); 134 | done(); 135 | }); 136 | }); 137 | 138 | it('should set custom config file', done => { 139 | const uploadOptions = { 140 | bucket: bucketName, 141 | file: fileName, 142 | metadata: {contentType: 'image/jpg'}, 143 | configPath: path.join( 144 | os.tmpdir(), 145 | `test-gcs-resumable-${Date.now()}.json` 146 | ), 147 | }; 148 | let uploadSucceeded = false; 149 | 150 | fs.createReadStream(fileName) 151 | .on('error', done) 152 | .pipe(upload(uploadOptions)) 153 | .on('error', done) 154 | .on('response', resp => { 155 | uploadSucceeded = resp.status === 200; 156 | }) 157 | .on('finish', () => { 158 | assert.strictEqual(uploadSucceeded, true); 159 | 160 | const configData = JSON.parse( 161 | fs.readFileSync(uploadOptions.configPath, 'utf8') 162 | ); 163 | const keyName = `${uploadOptions.bucket}/${uploadOptions.file}`.replace( 164 | path.extname(fileName), 165 | '' 166 | ); 167 | assert.ok(Object.keys(configData).includes(keyName)); 168 | done(); 169 | }); 170 | }); 171 | }); 172 | -------------------------------------------------------------------------------- /system-test/util.ts: -------------------------------------------------------------------------------- 1 | /*! 2 | * Copyright 2020 Google LLC 3 | * 4 | * Use of this source code is governed by an MIT-style 5 | * license that can be found in the LICENSE file or at 6 | * https://opensource.org/licenses/MIT. 7 | */ 8 | export default async function delay( 9 | title: string, 10 | retries: number, 11 | done: Function 12 | ) { 13 | if (retries === 0) return done(); // no retry on the first failure. 14 | // see: https://cloud.google.com/storage/docs/exponential-backoff: 15 | const ms = Math.pow(2, retries) * 1000 + Math.random() * 2000; 16 | console.info(`retrying "${title}" in ${ms}ms`); 17 | setTimeout(done, ms); 18 | } 19 | -------------------------------------------------------------------------------- /test/fixtures/keys.json: -------------------------------------------------------------------------------- 1 | { 2 | "type": "service_account", 3 | "project_id": "project-id", 4 | "private_key_id": "12345", 5 | "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC5z21IhrvlHBj7\nifRhobA9ibn25Od7DpE5OauGmqy7B+A9LQOsk1ZujAFdHItnBPcjihSVHpiYxf1a\nLpFbM8z/hRvDvYS3Hs1pyRejmpGiznoOjCyUf6Wv3T1xKelbgn0twHHjqD1o0xzW\njyUILl7yuCbsAf8QlsV6ewS3IqO3i5A9RNHfKjeap8e6A7U3s9QBtR58RrxaMQpM\nz72gw7yOdJRfElkerQfyZTbtu/EBfE6CcskOyoMoRN3YgkjQqLMr1yVdL5/phEcQ\n5hpbDN5lrafGHN7FUtsrMge2iIuIYFfWQUTqu7HtnNXVmwj1LJNq5WeI1iInWaGz\nb7c1rUT9AgMBAAECggEAEB0FicqVX3L7qk9LsBkeItgKFnfB/eaaKsTuM7K/fqCv\njjPpzlIgprQ20g+i+dYbuytC9Fjo5tFp/SNuBji3Ha7kuil56Yoe9NOJh0M6P0zP\nQj+0W1Rj1p0bB5tDhLoLh6eEDjgNde+zioUeCFhCck4MogmHnbVVfspNnba/99oD\nl36heAioqj/KODdkbQ83+ByiH+3BzqblqJ4TR/3y0wOUXtlQvCHko1qximJFIM0z\n3TNoPiit74hTiFFOYfJyHpmRsiEJ5FUUImkmCJz2gk4fbpafKrgxxOMo1m7GqlsE\nE+ybHxyAq61HYbZOoUOO8B4md1/52QXP7DgPvV7JyQKBgQD+JS5nsR4TXRl61c9G\nNxoPW9yCMCoarIjkpyPmhh0uJ7y68cj9wHFgX6ATi1QuTnG9BzJ4z27PMgvv70N+\nAK6k74sdIT2ts8wYsD8H0UyuxDxeKiAnb2JW2f5GTcXNmELQi6rKkMNMoS8jv00d\ngzLCV7UbCbdf+ng9uRPs+Fvk9wKBgQC7KpNaeYFf5dmIYRWQhlZWBRoftdm1ROH/\n5GJsURkzlEjUH1g1y9eAigBn5I+Z9hylX2q1vHLpUHqONWwDz8oQ1L1o2iLz+tkp\nkNoaLSAb9uCl6t8tpqCG2dqUrxOmy1+xj3G8KI8XuYb+IwVSy6KK2df8fWN4d+i0\ng+TBb75MqwKBgEezwcXriKq554hqblJHFYkjx7DLWfWwm+a26UAOsojlGTA9KxG8\ni8A++nDJLHTsGNbWAv1muMKoQgntnUMdeih6lOshB7/MLFcC0qWn/VSJdOa0R+IY\nYMxUMJMxOg9pV+BypzsDYLZr+1rAjEc5TsbZ6/S25w+jIO15HBANeg+9AoGAZulz\nGkVDCLq2UJGpLM1gvW2Svqrb6RrV9UDbiVlSNRUssk4Fz5akiM3YiUeYWfyEJb4A\nS6sxt+4DZRwkpzfikDyZZQTEQUjFjWBTPB9hz16AiVpKmqxLCbrRv/1AHe8nT9di\nnyXiABaIDkatT6geWKCNbQx43C16a382EdJiXX8CgYEAqyAS2xuDi2+uoljRm1Bp\naz7Q2UBtBbcBr/CQmagEacWPXsSyCL6EySOH0e985k7ABZiW+AzWlOwKS5WMWAIb\nkncmxP0SU6WQDWl8xGbXAQ8Dw+HTu5G1n0vrl1rRO5FPwRs3pbV94ML+d5eoai6D\njHs1asOGIpdQ3OGpBpNRub0=\n-----END PRIVATE KEY-----\n", 6 | "client_email": "some-email@example.com", 7 | "client_id": "12345", 8 | "auth_uri": "https://accounts.google.com/o/oauth2/auth", 9 | "token_uri": "https://accounts.google.com/o/oauth2/token", 10 | "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", 11 | "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/el-gato%40el-gato.iam.gserviceaccount.com" 12 | } 13 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts/tsconfig-google.json", 3 | "compilerOptions": { 4 | "lib": ["es2018", "dom"], 5 | "rootDir": ".", 6 | "outDir": "build" 7 | }, 8 | "include": [ 9 | "src/*.ts", 10 | "test/*.ts", 11 | "system-test/*.ts" 12 | ] 13 | } 14 | --------------------------------------------------------------------------------