├── .github
├── dependabot.yml
└── workflows
│ ├── draft-release.yml
│ ├── integration.yml
│ ├── publish.yml
│ ├── release.yml
│ └── unit.yml
├── .gitignore
├── .prettierrc.js
├── CHANGELOG.md
├── CODEOWNERS
├── LICENSE
├── README.md
├── action.yml
├── bin
└── runTests.sh
├── dist
└── index.js
├── eslint.config.mjs
├── package-lock.json
├── package.json
├── src
├── client.ts
├── headers.ts
├── main.ts
└── util.ts
├── tests
├── client.int.test.ts
├── client.test.ts
├── headers.test.ts
├── helpers.test.ts
├── main.int.test.ts
├── main.test.ts
├── testdata-unicode
│ └── 🚀
├── testdata
│ ├── nested1
│ │ ├── nested2
│ │ │ └── test3.txt
│ │ └── test1.txt
│ ├── test.css
│ ├── test.js
│ ├── test.json
│ ├── test1.txt
│ ├── test2.txt
│ └── testfile
└── util.test.ts
└── tsconfig.json
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: 'npm'
4 | directory: '/'
5 | rebase-strategy: 'disabled'
6 | schedule:
7 | interval: 'daily'
8 | commit-message:
9 | prefix: 'security: '
10 | open-pull-requests-limit: 0 # only check security updates
11 |
--------------------------------------------------------------------------------
/.github/workflows/draft-release.yml:
--------------------------------------------------------------------------------
1 | name: 'Draft release'
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | version_strategy:
7 | description: 'Version strategy: The strategy to used to update the version based on semantic versioning (more info at https://semver.org/).'
8 | required: true
9 | default: 'patch'
10 | type: 'choice'
11 | options:
12 | - 'major'
13 | - 'minor'
14 | - 'patch'
15 |
16 | jobs:
17 | draft-release:
18 | uses: 'google-github-actions/.github/.github/workflows/draft-release.yml@v3' # ratchet:exclude
19 | with:
20 | version_strategy: '${{ github.event.inputs.version_strategy }}'
21 | secrets:
22 | ACTIONS_BOT_TOKEN: '${{ secrets.ACTIONS_BOT_TOKEN }}'
23 |
--------------------------------------------------------------------------------
/.github/workflows/integration.yml:
--------------------------------------------------------------------------------
1 | name: 'Integration'
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'main'
7 | - 'release/**/*'
8 | pull_request:
9 | branches:
10 | - 'main'
11 | - 'release/**/*'
12 | workflow_dispatch:
13 |
14 | concurrency:
15 | group: '${{ github.workflow }}-${{ github.head_ref || github.ref }}'
16 | cancel-in-progress: true
17 |
18 | permissions:
19 | contents: 'read'
20 | id-token: 'write'
21 |
22 | jobs:
23 | integration:
24 | runs-on: 'ubuntu-latest'
25 |
26 | steps:
27 | - uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4
28 |
29 | - uses: 'actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a' # ratchet:actions/setup-node@v4
30 | with:
31 | node-version: '20.x'
32 |
33 | - name: 'npm build'
34 | run: 'npm ci && npm run build'
35 |
36 | - uses: 'google-github-actions/auth@v2' # ratchet:exclude
37 | with:
38 | workload_identity_provider: '${{ vars.WIF_PROVIDER_NAME }}'
39 | service_account: '${{ vars.SERVICE_ACCOUNT_EMAIL }}'
40 |
41 | - name: 'Create files'
42 | run: |-
43 | mkdir -p test
44 | touch test/test1.txt
45 | touch test/test2.txt
46 |
47 | - id: 'upload'
48 | name: 'Upload files'
49 | uses: './'
50 | with:
51 | path: './test'
52 | destination: '${{ vars.BUCKET_NAME }}/testprefix'
53 |
54 | - name: 'Get output'
55 | run: 'echo "${{ steps.upload.outputs.uploaded }}"'
56 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: 'Publish immutable action version'
2 |
3 | on:
4 | workflow_dispatch:
5 | release:
6 | types:
7 | - 'published'
8 |
9 | jobs:
10 | publish:
11 | runs-on: 'ubuntu-latest'
12 | permissions:
13 | contents: 'read'
14 | id-token: 'write'
15 | packages: 'write'
16 |
17 | steps:
18 | - name: 'Checkout'
19 | uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4
20 |
21 | - name: 'Publish'
22 | id: 'publish'
23 | uses: 'actions/publish-immutable-action@4bc8754ffc40f27910afb20287dbbbb675a4e978' # ratchet:actions/publish-immutable-action@v0.0.4
24 | with:
25 | github-token: '${{ secrets.GITHUB_TOKEN }}'
26 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: 'Release'
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'main'
7 | - 'release/**/*'
8 |
9 | jobs:
10 | release:
11 | uses: 'google-github-actions/.github/.github/workflows/release.yml@v3' # ratchet:exclude
12 | secrets:
13 | ACTIONS_BOT_TOKEN: '${{ secrets.ACTIONS_BOT_TOKEN }}'
14 |
--------------------------------------------------------------------------------
/.github/workflows/unit.yml:
--------------------------------------------------------------------------------
1 | name: 'Unit'
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'main'
7 | - 'release/**/*'
8 | pull_request:
9 | branches:
10 | - 'main'
11 | - 'release/**/*'
12 | workflow_dispatch:
13 |
14 | concurrency:
15 | group: '${{ github.workflow }}-${{ github.head_ref || github.ref }}'
16 | cancel-in-progress: true
17 |
18 | jobs:
19 | unit:
20 | runs-on: '${{ matrix.os }}'
21 |
22 | permissions:
23 | contents: 'read'
24 | id-token: 'write'
25 |
26 | strategy:
27 | fail-fast: false
28 | matrix:
29 | os:
30 | - 'ubuntu-latest'
31 | - 'windows-latest'
32 | - 'macos-latest'
33 |
34 | steps:
35 | - uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4
36 |
37 | - uses: 'actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a' # ratchet:actions/setup-node@v4
38 | with:
39 | node-version: '20.12.x' # https://github.com/nodejs/node/issues/53033
40 |
41 | - name: 'npm build'
42 | run: 'npm ci && npm run build'
43 |
44 | - name: 'npm lint'
45 | # There's no need to run the linter for each operating system, since it
46 | # will find the same thing 3x and clog up the PR review.
47 | if: ${{ matrix.os == 'ubuntu-latest' }}
48 | run: 'npm run lint'
49 |
50 | # Only authenticate if this is a full CI run.
51 | - if: ${{ github.event_name == 'push' || github.repository == github.event.pull_request.head.repo.full_name }}
52 | uses: 'google-github-actions/auth@v2' # ratchet:exclude
53 | with:
54 | workload_identity_provider: '${{ vars.WIF_PROVIDER_NAME }}'
55 | service_account: '${{ vars.SERVICE_ACCOUNT_EMAIL }}'
56 |
57 | # The secrets will only be injected in pushes to main or from maintainers.
58 | # If they aren't present, the associated steps are skipped.
59 | - name: 'npm test'
60 | run: 'npm run test'
61 | env:
62 | UPLOAD_CLOUD_STORAGE_TEST_BUCKET: '${{ vars.BUCKET_NAME }}'
63 | UPLOAD_CLOUD_STORAGE_TEST_PROJECT: '${{ vars.PROJECT_ID }}'
64 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Dependency directory
2 | node_modules
3 |
4 | # Rest pulled from https://github.com/github/gitignore/blob/main/Node.gitignore
5 | # Logs
6 | logs
7 | *.log
8 | npm-debug.log*
9 | yarn-debug.log*
10 | yarn-error.log*
11 | lerna-debug.log*
12 |
13 | # Diagnostic reports (https://nodejs.org/api/report.html)
14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
15 |
16 | # Runtime data
17 | pids
18 | *.pid
19 | *.seed
20 | *.pid.lock
21 |
22 | # Directory for instrumented libs generated by jscoverage/JSCover
23 | lib-cov
24 |
25 | # Coverage directory used by tools like istanbul
26 | coverage
27 | *.lcov
28 |
29 | # nyc test coverage
30 | .nyc_output
31 |
32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
33 | .grunt
34 |
35 | # Bower dependency directory (https://bower.io/)
36 | bower_components
37 |
38 | # node-waf configuration
39 | .lock-wscript
40 |
41 | # Compiled binary addons (https://nodejs.org/api/addons.html)
42 | build/Release
43 |
44 | # Dependency directories
45 | jspm_packages/
46 |
47 | # TypeScript v1 declaration files
48 | typings/
49 |
50 | # TypeScript cache
51 | *.tsbuildinfo
52 |
53 | # Optional npm cache directory
54 | .npm
55 |
56 | # Optional eslint cache
57 | .eslintcache
58 |
59 | # Optional REPL history
60 | .node_repl_history
61 |
62 | # Output of 'npm pack'
63 | *.tgz
64 |
65 | # Yarn Integrity file
66 | .yarn-integrity
67 |
68 | # dotenv environment variables file
69 | .env
70 | .env.test
71 |
72 | # parcel-bundler cache (https://parceljs.org/)
73 | .cache
74 |
75 | # next.js build output
76 | .next
77 |
78 | # nuxt.js build output
79 | .nuxt
80 |
81 | # vuepress build output
82 | .vuepress/dist
83 |
84 | # Serverless directories
85 | .serverless/
86 |
87 | # FuseBox cache
88 | .fusebox/
89 |
90 | # DynamoDB Local files
91 | .dynamodb/
92 |
93 | # OS metadata
94 | .DS_Store
95 | Thumbs.db
96 |
97 | # Ignore built ts files
98 | __tests__/runner/*
99 | lib/**/*
100 |
--------------------------------------------------------------------------------
/.prettierrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | arrowParens: 'always',
3 | bracketSpacing: true,
4 | endOfLine: 'auto',
5 | jsxSingleQuote: true,
6 | printWidth: 100,
7 | quoteProps: 'consistent',
8 | semi: true,
9 | singleQuote: true,
10 | tabWidth: 2,
11 | trailingComma: 'all',
12 | useTabs: false,
13 | };
14 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | Changelogs for each release are located on the [releases page](https://github.com/google-github-actions/upload-cloud-storage/releases).
4 |
5 |
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @google-github-actions/maintainers
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # upload-cloud-storage
2 |
3 | The `upload-cloud-storage` GitHub Action uploads files to a [Google Cloud
4 | Storage (GCS)][gcs] bucket.
5 |
6 | Paths to files that are successfully uploaded are set as output variables and
7 | can be used in subsequent steps.
8 |
9 | **This is not an officially supported Google product, and it is not covered by a
10 | Google Cloud support contract. To report bugs or request features in a Google
11 | Cloud product, please contact [Google Cloud
12 | support](https://cloud.google.com/support).**
13 |
14 | ## Prerequisites
15 |
16 | - This action requires Google Cloud credentials that are authorized to upload
17 | blobs to the specified bucket. See the [Authorization](#authorization)
18 | section below for more information.
19 |
20 | - This action runs using Node 16. If you are using self-hosted GitHub Actions
21 | runners, you must use runner version [2.285.0](https://github.com/actions/virtual-environments)
22 | or newer.
23 |
24 | ## Usage
25 |
26 | > **⚠️ WARNING!** The Node.js runtime has [known issues with unicode characters
27 | > in filepaths on Windows][nodejs-unicode-windows]. There is nothing we can do
28 | > to fix this issue in our GitHub Action. If you use unicode or special
29 | > characters in your filenames, please use `gsutil` or `gcloud` to upload
30 | > instead.
31 |
32 | ### For uploading a file
33 |
34 | ```yaml
35 | jobs:
36 | job_id:
37 | permissions:
38 | contents: 'read'
39 | id-token: 'write'
40 |
41 | steps:
42 | - id: 'checkout'
43 | uses: 'actions/checkout@v4'
44 |
45 | - id: 'auth'
46 | uses: 'google-github-actions/auth@v2'
47 | with:
48 | workload_identity_provider: 'projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider'
49 | service_account: 'my-service-account@my-project.iam.gserviceaccount.com'
50 |
51 | - id: 'upload-file'
52 | uses: 'google-github-actions/upload-cloud-storage@v2'
53 | with:
54 | path: '/path/to/file'
55 | destination: 'bucket-name'
56 |
57 | # Example of using the output
58 | - id: 'uploaded-files'
59 | uses: 'foo/bar@v1'
60 | env:
61 | file: '${{ steps.upload-file.outputs.uploaded }}'
62 | ```
63 |
64 | The file will be uploaded to `gs://bucket-name/file`
65 |
66 | ### For uploading a folder
67 |
68 | ```yaml
69 | jobs:
70 | job_id:
71 | permissions:
72 | contents: 'read'
73 | id-token: 'write'
74 |
75 | steps:
76 | - id: 'checkout'
77 | uses: 'actions/checkout@v4'
78 |
79 | - id: 'auth'
80 | uses: 'google-github-actions/auth@v2'
81 | with:
82 | workload_identity_provider: 'projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider'
83 | service_account: 'my-service-account@my-project.iam.gserviceaccount.com'
84 |
85 | - id: 'upload-folder'
86 | uses: 'google-github-actions/upload-cloud-storage@v2'
87 | with:
88 | path: '/path/to/folder'
89 | destination: 'bucket-name'
90 |
91 | # Example of using the output
92 | - id: 'uploaded-files'
93 | uses: 'foo/bar@v1'
94 | env:
95 | files: '${{ steps.upload-folder.outputs.uploaded }}'
96 | ```
97 |
98 | ## Destination Filenames
99 |
100 | If the folder has the following structure:
101 |
102 | ```text
103 | .
104 | └── myfolder
105 | ├── file1
106 | └── folder2
107 | └── file2.txt
108 | ```
109 |
110 | ### Default Configuration
111 |
112 | With default configuration
113 |
114 | ```yaml
115 | - id: 'upload-files'
116 | uses: 'google-github-actions/upload-cloud-storage@v2'
117 | with:
118 | path: 'myfolder'
119 | destination: 'bucket-name'
120 | ```
121 |
122 | The files will be uploaded to `gs://bucket-name/myfolder/file1`,`gs://bucket-name/myfolder/folder2/file2.txt`
123 |
124 | Optionally, you can also specify a prefix in destination.
125 |
126 | ```yaml
127 | - id: 'upload-files'
128 | uses: 'google-github-actions/upload-cloud-storage@v2'
129 | with:
130 | path: 'myfolder'
131 | destination: 'bucket-name/myprefix'
132 | ```
133 |
134 | The files will be uploaded to `gs://bucket-name/myprefix/myfolder/file1`,`gs://bucket-name/myprefix/myfolder/folder2/file2.txt`
135 |
136 | ### Upload to bucket root
137 |
138 | To upload `myfolder` to the root of the bucket, you can set `parent` to false.
139 | Setting `parent` to false will omit `path` when uploading to bucket.
140 |
141 | ```yaml
142 | - id: 'upload-files'
143 | uses: 'google-github-actions/upload-cloud-storage@v2'
144 | with:
145 | path: 'myfolder'
146 | destination: 'bucket-name'
147 | parent: false
148 | ```
149 |
150 | The files will be uploaded to `gs://bucket-name/file1`,`gs://bucket-name/folder2/file2.txt`
151 |
152 | If path was set to `myfolder/folder2`, the file will be uploaded to `gs://bucket-name/file2.txt`
153 |
154 | Optionally, you can also specify a prefix in destination.
155 |
156 | ```yaml
157 | - id: 'upload-files'
158 | uses: 'google-github-actions/upload-cloud-storage@v2'
159 | with:
160 | path: 'myfolder'
161 | destination: 'bucket-name/myprefix'
162 | parent: false
163 | ```
164 |
165 | The files will be uploaded to `gs://bucket-name/myprefix/file1`,`gs://bucket-name/myprefix/folder2/file2.txt`
166 |
167 | ### Glob Pattern
168 |
169 | You can specify a glob pattern like
170 |
171 | ```yaml
172 | - id: 'upload-files'
173 | uses: 'google-github-actions/upload-cloud-storage@v2'
174 | with:
175 | path: 'myfolder'
176 | destination: 'bucket-name'
177 | glob: '**/*.txt'
178 | ```
179 |
180 | This will particular pattern will match all text files within `myfolder`.
181 |
182 | In this case, `myfolder/folder2/file2.txt` is the only matched file and will be uploaded to `gs://bucket-name/myfolder/folder2/file2.txt`.
183 |
184 | If `parent` is set to `false`, it wil be uploaded to `gs://bucket-name/folder2/file2.txt`.
185 |
186 | ## Inputs
187 |
188 |
189 |
190 | - project_id
: _(Optional)_ Google Cloud project ID to use for billing and API requests. If not
191 | provided, the project will be inferred from the environment, best-effort.
192 | To explicitly set the value:
193 |
194 | ```yaml
195 | project_id: 'my-project'
196 | ```
197 |
198 | - universe
: _(Optional, default: `googleapis.com`)_ The Google Cloud universe to use for constructing API endpoints. Trusted
199 | Partner Cloud and Google Distributed Hosted Cloud should set this to their
200 | universe address.
201 |
202 | You can also override individual API endpoints by setting the environment
203 | variable `GHA_ENDPOINT_OVERRIDE_` where `` is the API
204 | endpoint to override. For example:
205 |
206 | ```yaml
207 | env:
208 | GHA_ENDPOINT_OVERRIDE_oauth2: 'https://oauth2.myapi.endpoint/v1'
209 | ```
210 |
211 | For more information about universes, see the Google Cloud documentation.
212 |
213 | - path
: _(Required)_ The path to a file or folder inside the action's filesystem that should be
214 | uploaded to the bucket.
215 |
216 | You can specify either the absolute path or the relative path from the
217 | action:
218 |
219 | ```yaml
220 | path: '/path/to/file'
221 | ```
222 |
223 | ```yaml
224 | path: '../path/to/file'
225 | ```
226 |
227 | - destination
: _(Required)_ The destination for the file/folder in the form bucket-name or with an
228 | optional prefix in the form `bucket-name/prefix`. For example, to upload a
229 | file named `file` to the GCS bucket `bucket-name`:
230 |
231 | ```yaml
232 | destination: 'bucket-name'
233 | ```
234 |
235 | To upload to a subfolder:
236 |
237 | ```yaml
238 | destination: 'bucket-name/prefix'
239 | ```
240 |
241 | - gzip
: _(Optional, default: `true`)_ Upload file(s) with gzip content encoding. To disable gzip
242 | content-encoding, set the value to false:
243 |
244 | ```yaml
245 | gzip: false
246 | ```
247 |
248 | - resumable
: _(Optional, default: `true`)_ Enable resumable uploads. To disable resumable uploads, set the value to
249 | false:
250 |
251 | ```yaml
252 | resumable: false
253 | ```
254 |
255 | - predefinedAcl
: _(Optional)_ Apply a predefined set of access controls to the files being uploaded. For
256 | example, to grant project team members access to the uploaded files
257 | according to their roles:
258 |
259 | ```yaml
260 | predefinedAcl: 'projectPrivate'
261 | ```
262 |
263 | Acceptable values are: `authenticatedRead`, `bucketOwnerFullControl`,
264 | `bucketOwnerRead`, `private`, `projectPrivate`, `publicRead`. See [the
265 | document](https://googleapis.dev/nodejs/storage/latest/global.html#UploadOptions)
266 | for details.
267 |
268 | - headers
: _(Optional)_ Set object metadata. For example, to set the `Content-Type` header to
269 | `application/json` and custom metadata with key `custom-field` and value
270 | `custom-value`:
271 |
272 | ```yaml
273 | headers: |-
274 | content-type: 'application/json'
275 | x-goog-meta-custom-field: 'custom-value'
276 | ```
277 |
278 | Settable fields are: `cache-control`, `content-disposition`,
279 | `content-encoding`, `content-language`, `content-type`, `custom-time`. See
280 | [the
281 | document](https://cloud.google.com/storage/docs/gsutil/addlhelp/WorkingWithObjectMetadata#settable-fields;-field-values)
282 | for details. All custom metadata fields must be prefixed with
283 | `x-goog-meta-`.
284 |
285 | - parent
: _(Optional, default: `true`)_ Whether the parent directory should be included in GCS destination path. To disable this:
286 |
287 | ```yaml
288 | parent: false
289 | ```
290 |
291 | - glob
: _(Optional)_ Glob pattern to match for files to upload.
292 |
293 | ```yaml
294 | glob: '*.txt'
295 | ```
296 |
297 | - concurrency
: _(Optional, default: `100`)_ Number of files to simultaneously upload.
298 |
299 | ```yaml
300 | concurrency: '10'
301 | ```
302 |
303 | - gcloudignore_path
: _(Optional, default: `.gcloudignore`)_ Path to a gcloudignore file within the repository.
304 |
305 | ```yaml
306 | gcloudignore_path: '.gcloudignore.dev'
307 | ```
308 |
309 | - process_gcloudignore
: _(Optional, default: `true`)_ Process a `.gcloudignore` file present in the top-level of the repository.
310 | If true, the file is parsed and any filepaths that match are not uploaded
311 | to the storage bucket. To disable, set the value to false:
312 |
313 | ```yaml
314 | process_gcloudignore: false
315 | ```
316 |
317 |
318 |
319 |
320 |
321 | ## Outputs
322 |
323 |
324 |
325 | - `uploaded`: Comma-separated list of files that were uploaded.
326 |
327 |
328 |
329 |
330 |
331 | ## Authorization
332 |
333 | There are a few ways to authenticate this action. The caller must have
334 | permissions to access the secrets being requested.
335 |
336 | ### Via google-github-actions/auth
337 |
338 | Use [google-github-actions/auth](https://github.com/google-github-actions/auth)
339 | to authenticate the action. You can use [Workload Identity Federation][wif] or
340 | traditional [Service Account Key JSON][sa] authentication.
341 |
342 | ```yaml
343 | jobs:
344 | job_id:
345 | permissions:
346 | contents: 'read'
347 | id-token: 'write'
348 |
349 | steps:
350 | - id: 'auth'
351 | uses: 'google-github-actions/auth@v2'
352 | with:
353 | workload_identity_provider: 'projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider'
354 | service_account: 'my-service-account@my-project.iam.gserviceaccount.com'
355 |
356 | - uses: 'google-github-actions/upload-cloud-storage@v2'
357 | ```
358 |
359 | ### Via Application Default Credentials
360 |
361 | If you are hosting your own runners, **and** those runners are on Google Cloud,
362 | you can leverage the Application Default Credentials of the instance. This will
363 | authenticate requests as the service account attached to the instance. **This
364 | only works using a custom runner hosted on GCP.**
365 |
366 | ```yaml
367 | jobs:
368 | job_id:
369 | steps:
370 | - id: 'upload-file'
371 | uses: 'google-github-actions/upload-cloud-storage@v2'
372 | ```
373 |
374 | The action will automatically detect and use the Application Default
375 | Credentials.
376 |
377 | [gcs]: https://cloud.google.com/storage
378 | [wif]: https://cloud.google.com/iam/docs/workload-identity-federation
379 | [sa]: https://cloud.google.com/iam/docs/creating-managing-service-accounts
380 | [nodejs-unicode-windows]: https://github.com/nodejs/node/issues/48673
381 |
--------------------------------------------------------------------------------
/action.yml:
--------------------------------------------------------------------------------
1 | name: Cloud Storage Uploader
2 | description: Upload files or folders to GCS buckets
3 | author: Google LLC
4 |
5 | inputs:
6 | #
7 | # Google Cloud
8 | # ------------
9 | project_id:
10 | description: |-
11 | Google Cloud project ID to use for billing and API requests. If not
12 | provided, the project will be inferred from the environment, best-effort.
13 | To explicitly set the value:
14 |
15 | ```yaml
16 | project_id: 'my-project'
17 | ```
18 | required: false
19 |
20 | universe:
21 | description: |-
22 | The Google Cloud universe to use for constructing API endpoints. Trusted
23 | Partner Cloud and Google Distributed Hosted Cloud should set this to their
24 | universe address.
25 |
26 | You can also override individual API endpoints by setting the environment
27 | variable `GHA_ENDPOINT_OVERRIDE_` where `` is the API
28 | endpoint to override. For example:
29 |
30 | ```yaml
31 | env:
32 | GHA_ENDPOINT_OVERRIDE_oauth2: 'https://oauth2.myapi.endpoint/v1'
33 | ```
34 |
35 | For more information about universes, see the Google Cloud documentation.
36 | default: 'googleapis.com'
37 | required: false
38 |
39 | #
40 | # GCS
41 | # ------------
42 | path:
43 | description: |-
44 | The path to a file or folder inside the action's filesystem that should be
45 | uploaded to the bucket.
46 |
47 | You can specify either the absolute path or the relative path from the
48 | action:
49 |
50 | ```yaml
51 | path: '/path/to/file'
52 | ```
53 |
54 | ```yaml
55 | path: '../path/to/file'
56 | ```
57 | required: true
58 |
59 | destination:
60 | description: |-
61 | The destination for the file/folder in the form bucket-name or with an
62 | optional prefix in the form `bucket-name/prefix`. For example, to upload a
63 | file named `file` to the GCS bucket `bucket-name`:
64 |
65 | ```yaml
66 | destination: 'bucket-name'
67 | ```
68 |
69 | To upload to a subfolder:
70 |
71 | ```yaml
72 | destination: 'bucket-name/prefix'
73 | ```
74 | required: true
75 |
76 | gzip:
77 | description: |-
78 | Upload file(s) with gzip content encoding. To disable gzip
79 | content-encoding, set the value to false:
80 |
81 | ```yaml
82 | gzip: false
83 | ```
84 | required: false
85 | default: true
86 |
87 | resumable:
88 | description: |-
89 | Enable resumable uploads. To disable resumable uploads, set the value to
90 | false:
91 |
92 | ```yaml
93 | resumable: false
94 | ```
95 | required: false
96 | default: true
97 |
98 | predefinedAcl:
99 | description: |-
100 | Apply a predefined set of access controls to the files being uploaded. For
101 | example, to grant project team members access to the uploaded files
102 | according to their roles:
103 |
104 | ```yaml
105 | predefinedAcl: 'projectPrivate'
106 | ```
107 |
108 | Acceptable values are: `authenticatedRead`, `bucketOwnerFullControl`,
109 | `bucketOwnerRead`, `private`, `projectPrivate`, `publicRead`. See [the
110 | document](https://googleapis.dev/nodejs/storage/latest/global.html#UploadOptions)
111 | for details.
112 | required: false
113 |
114 | headers:
115 | description: |-
116 | Set object metadata. For example, to set the `Content-Type` header to
117 | `application/json` and custom metadata with key `custom-field` and value
118 | `custom-value`:
119 |
120 | ```yaml
121 | headers: |-
122 | content-type: 'application/json'
123 | x-goog-meta-custom-field: 'custom-value'
124 | ```
125 |
126 | Settable fields are: `cache-control`, `content-disposition`,
127 | `content-encoding`, `content-language`, `content-type`, `custom-time`. See
128 | [the
129 | document](https://cloud.google.com/storage/docs/gsutil/addlhelp/WorkingWithObjectMetadata#settable-fields;-field-values)
130 | for details. All custom metadata fields must be prefixed with
131 | `x-goog-meta-`.
132 | required: false
133 |
134 | parent:
135 | description: |-
136 | Whether the parent directory should be included in GCS destination path. To disable this:
137 |
138 | ```yaml
139 | parent: false
140 | ```
141 | required: false
142 | default: true
143 |
144 | glob:
145 | description: |-
146 | Glob pattern to match for files to upload.
147 |
148 | ```yaml
149 | glob: '*.txt'
150 | ```
151 | required: false
152 |
153 | concurrency:
154 | description: |-
155 | Number of files to simultaneously upload.
156 |
157 | ```yaml
158 | concurrency: '10'
159 | ```
160 | required: false
161 | default: '100'
162 |
163 | gcloudignore_path:
164 | description: |-
165 | Path to a gcloudignore file within the repository.
166 |
167 | ```yaml
168 | gcloudignore_path: '.gcloudignore.dev'
169 | ```
170 | required: false
171 | default: '.gcloudignore'
172 |
173 | process_gcloudignore:
174 | description: |-
175 | Process a `.gcloudignore` file present in the top-level of the repository.
176 | If true, the file is parsed and any filepaths that match are not uploaded
177 | to the storage bucket. To disable, set the value to false:
178 |
179 | ```yaml
180 | process_gcloudignore: false
181 | ```
182 | required: false
183 | default: true
184 |
185 |
186 | outputs:
187 | uploaded:
188 | description: |-
189 | Comma-separated list of files that were uploaded.
190 |
191 |
192 | branding:
193 | icon: 'upload-cloud'
194 | color: 'blue'
195 |
196 |
197 | runs:
198 | using: 'node20'
199 | main: 'dist/index.js'
200 |
--------------------------------------------------------------------------------
/bin/runTests.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -eEuo pipefail
3 |
4 | #
5 | # As of Node 20, the --test parameter does not support globbing, and it does not
6 | # support variable Windows paths. We also cannot invoke the test runner
7 | # directly, because while it has an API, there's no way to force it to transpile
8 | # the Typescript into JavaScript before passing it to the runner.
9 | #
10 | # So we're left with this solution, which shells out to Node to list all files
11 | # that end in *.test.ts (excluding node_modules/), and then execs out to that
12 | # process. We have to exec so the stderr/stdout and exit code is appropriately
13 | # fed to the caller.
14 | #
15 |
16 | FILES="$(node -e "process.stdout.write(require('node:fs').readdirSync('./', { recursive: true }).filter((e) => {return e.endsWith('.test.ts') && !e.startsWith('node_modules');}).sort().join(' '));")"
17 |
18 | set -x
19 | exec node --require ts-node/register --test-reporter spec --test ${FILES}
20 |
--------------------------------------------------------------------------------
/eslint.config.mjs:
--------------------------------------------------------------------------------
1 | import js from '@eslint/js';
2 | import ts from 'typescript-eslint';
3 | import tsParser from '@typescript-eslint/parser';
4 |
5 | import prettierRecommended from 'eslint-plugin-prettier/recommended';
6 |
7 | export default ts.config(
8 | js.configs.recommended,
9 | ts.configs.eslintRecommended,
10 | {
11 | files: ['**/*.ts', '**/*.tsx'],
12 | languageOptions: {
13 | parser: tsParser,
14 | },
15 | },
16 | { ignores: ['dist/', '**/*.js'] },
17 | prettierRecommended,
18 | );
19 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "upload-cloud-storage",
3 | "version": "2.2.2",
4 | "description": "Upload to Google Cloud Storage (GCS)",
5 | "main": "dist/index.js",
6 | "scripts": {
7 | "build": "ncc build -m src/main.ts",
8 | "docs": "./node_modules/.bin/actions-gen-readme",
9 | "lint": "eslint .",
10 | "format": "eslint . --fix",
11 | "test": "bash ./bin/runTests.sh"
12 | },
13 | "repository": {
14 | "type": "git",
15 | "url": "https://github.com/google-github-actions/upload-cloud-storage"
16 | },
17 | "keywords": [
18 | "actions",
19 | "gcs",
20 | "upload",
21 | "google cloud",
22 | "cloud storage"
23 | ],
24 | "author": "GoogleCloudPlatform",
25 | "license": "Apache-2.0",
26 | "dependencies": {
27 | "@actions/core": "^1.11.1",
28 | "@google-cloud/storage": "^7.15.0",
29 | "@google-github-actions/actions-utils": "^0.8.6",
30 | "fast-glob": "^3.3.3",
31 | "ignore": "^7.0.3"
32 | },
33 | "devDependencies": {
34 | "@eslint/eslintrc": "^3.2.0",
35 | "@eslint/js": "^9.19.0",
36 | "@types/node": "^22.13.0",
37 | "@typescript-eslint/eslint-plugin": "^8.22.0",
38 | "@vercel/ncc": "^0.38.3",
39 | "eslint-config-prettier": "^10.0.1",
40 | "eslint-plugin-prettier": "^5.2.3",
41 | "eslint": "^9.19.0",
42 | "prettier": "^3.4.2",
43 | "ts-node": "^10.9.2",
44 | "typescript-eslint": "^8.22.0",
45 | "typescript": "^5.7.3"
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/client.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import * as path from 'path';
18 |
19 | import {
20 | IdempotencyStrategy,
21 | PredefinedAcl,
22 | Storage,
23 | StorageOptions,
24 | UploadOptions,
25 | } from '@google-cloud/storage';
26 | import { errorMessage, toPlatformPath, toPosixPath } from '@google-github-actions/actions-utils';
27 |
28 | import { Metadata } from './headers';
29 | import { deepClone } from './util';
30 |
31 | // Do not listen to the linter - this can NOT be rewritten as an ES6 import statement.
32 | const { version: appVersion } = require('../package.json');
33 |
34 | // userAgent is the default user agent.
35 | const userAgent = `google-github-actions:upload-cloud-storage/${appVersion}`;
36 |
37 | /**
38 | * Available options to create the client.
39 | *
40 | * @param projectID GCP Project ID.
41 | */
42 | export type ClientOptions = {
43 | projectID?: string;
44 | universe?: string;
45 | };
46 |
47 | /**
48 | * ClientFileUpload represents a file to upload. It keeps track of the local
49 | * source path and remote destination.
50 | */
51 | export type ClientFileUpload = {
52 | /**
53 | * source is the absolute, local path on disk to the file.
54 | */
55 | source: string;
56 |
57 | /**
58 | * destination is the remote location for the file, relative to the bucket
59 | * root.
60 | */
61 | destination: string;
62 | };
63 |
64 | /**
65 | * ClientUploadOptions is the list of available options during file upload.
66 | */
67 | export interface ClientUploadOptions {
68 | /**
69 | * bucket is the name of the bucket in which to upload.
70 | */
71 | bucket: string;
72 |
73 | /**
74 | * files is the list of absolute file paths on local disk to upload. This list
75 | * must use posix path separators for files.
76 | */
77 | files: ClientFileUpload[];
78 |
79 | /**
80 | * concurrency is the maximum number of parallel upload operations that will
81 | * take place.
82 | */
83 | concurrency?: number;
84 |
85 | /**
86 | * metadata is object metadata to set. These are usually populated from
87 | * headers.
88 | */
89 | metadata?: Metadata;
90 |
91 | /**
92 | * gzip indicates whether to gzip the object when uploading.
93 | */
94 | gzip?: boolean;
95 |
96 | /**
97 | * resumable indicates whether the upload should be resumable after interrupt.
98 | */
99 | resumable?: boolean;
100 |
101 | /**
102 | * predefinedAcl defines the default ACL to apply to new objects.
103 | */
104 | predefinedAcl?: PredefinedAcl;
105 |
106 | /**
107 | * onUploadObject is called each time an object upload begins.
108 | **/
109 | onUploadObject?: FOnUploadObject;
110 | }
111 |
112 | /**
113 | * FOnUploadObject is the function interface for the upload callback signature.
114 | */
115 | export interface FOnUploadObject {
116 | // eslint-disable-next-line no-unused-vars
117 | (source: string, destination: string, opts: UploadOptions): void;
118 | }
119 |
120 | /**
121 | * ClientComputeDestinationOptions is the list of options to compute file
122 | * destinations in a target bucket.
123 | */
124 | export interface ClientComputeDestinationOptions {
125 | /**
126 | * givenRoot is the root given by the input to the function.
127 | */
128 | givenRoot: string;
129 |
130 | /**
131 | * absoluteRoot is the absolute root path, used for resolving the files.
132 | */
133 | absoluteRoot: string;
134 |
135 | /**
136 | * files is a list of filenames, for a glob expansion. All files are relative
137 | * to absoluteRoot.
138 | */
139 | files: string[];
140 |
141 | /**
142 | * prefix is an optional prefix to predicate on all paths.
143 | */
144 | prefix?: string;
145 |
146 | /**
147 | * includeParent indicates whether the local directory parent name (dirname of
148 | * givenRoot) should be included in the destination path in the bucket.
149 | */
150 | includeParent?: boolean;
151 | }
152 |
153 | /**
154 | * Handles credential lookup, registration and wraps interactions with the GCS
155 | * Helper.
156 | *
157 | * @param opts List of ClientOptions.
158 | */
159 | export class Client {
160 | readonly storage: Storage;
161 |
162 | static async build(opts?: ClientOptions): Promise {
163 | const client = new Client(opts);
164 |
165 | // We need to force the authClient to cache its internal client. Since all
166 | // our calls are done in parallel, this has to be done as part of
167 | // initialization.
168 | //
169 | // https://github.com/google-github-actions/upload-cloud-storage/issues/364
170 | await client.storage.authClient.getClient();
171 |
172 | return client;
173 | }
174 |
175 | private constructor(opts?: ClientOptions) {
176 | const options: StorageOptions = {
177 | projectId: opts?.projectID,
178 | universeDomain: opts?.universe,
179 | userAgent: userAgent,
180 |
181 | retryOptions: {
182 | autoRetry: true,
183 | idempotencyStrategy: IdempotencyStrategy.RetryAlways,
184 | maxRetries: 5,
185 | maxRetryDelay: 30,
186 | retryDelayMultiplier: 2,
187 | totalTimeout: 500,
188 | },
189 | };
190 |
191 | this.storage = new Storage(options);
192 | }
193 |
194 | /**
195 | * computeDestinations builds a collection of files to their intended upload
196 | * paths in a Cloud Storage bucket, based on the given options.
197 | *
198 | * @param opts List of inputs and files to compute.
199 | * @return List of files to upload with the source as a local file path and
200 | * the remote destination path.
201 | */
202 | static computeDestinations(opts: ClientComputeDestinationOptions): ClientFileUpload[] {
203 | const list: ClientFileUpload[] = [];
204 | for (let i = 0; i < opts.files.length; i++) {
205 | const name = opts.files[i];
206 |
207 | // Calculate destination by joining the prefix (if one exists), the parent
208 | // directory name (if includeParent is true), and the file name. path.join
209 | // ignores empty strings. We only want to do this if
210 | const base = opts.includeParent ? path.posix.basename(toPosixPath(opts.givenRoot)) : '';
211 | const destination = path.posix.join(opts.prefix || '', base, name);
212 |
213 | // Compute the absolute path of the file.
214 | const source = path.resolve(opts.absoluteRoot, toPlatformPath(name));
215 |
216 | list.push({
217 | source: source,
218 | destination: destination,
219 | });
220 | }
221 |
222 | return list;
223 | }
224 |
225 | /**
226 | * upload puts the given collection of files into the bucket. It will
227 | * overwrite any existing objects with the same name and create any new
228 | * objects. It does not delete any existing objects.
229 | *
230 | * @param opts ClientUploadOptions
231 | *
232 | * @return The list of files uploaded.
233 | */
234 | async upload(opts: ClientUploadOptions): Promise {
235 | const bucket = opts.bucket;
236 | const storageBucket = this.storage.bucket(bucket);
237 |
238 | const tasks = opts.files.map((file) => async (): Promise => {
239 | const source = file.source;
240 | const destination = file.destination;
241 |
242 | // Apparently the Cloud Storage SDK modifies this object, so we need to
243 | // make our own deep copy before passing it to upload. See #258 for more
244 | // information.
245 | const shadowedUploadOpts: UploadOptions = {
246 | destination: destination,
247 | metadata: opts.metadata || {},
248 | gzip: opts.gzip,
249 | predefinedAcl: opts.predefinedAcl,
250 | resumable: opts.resumable,
251 | };
252 | const uploadOpts = deepClone(shadowedUploadOpts);
253 |
254 | // Execute callback if defined
255 | if (opts.onUploadObject) {
256 | opts.onUploadObject(source, path.posix.join(bucket, destination), uploadOpts);
257 | }
258 |
259 | // Do the upload
260 | const response = await storageBucket.upload(source, uploadOpts);
261 | const name = response[0].name;
262 | return name;
263 | });
264 |
265 | const results = await inParallel(tasks, opts.concurrency);
266 | return results;
267 | }
268 | }
269 |
270 | /**
271 | * TODO(sethvargo): move into actions-utils
272 | */
273 | import { cpus as oscpus } from 'os';
274 |
275 | export async function inParallel Promise>, R extends ReturnType>(
276 | tasks: (() => Promise | Promise)[],
277 | concurrency: number | undefined,
278 | ): Promise {
279 | // Concurrency is the minimum of the number of arguments or concurrency. This
280 | // prevents additional undefined entries in the results array.
281 | concurrency = Math.min(concurrency || oscpus().length - 1);
282 | if (concurrency < 1) {
283 | throw new Error(`concurrency must be at least 1`);
284 | }
285 |
286 | const results: R[] = [];
287 | const errors: string[] = [];
288 |
289 | const runTasks = async (iter: IterableIterator<[number, () => Promise | Promise]>) => {
290 | for (const [idx, task] of iter) {
291 | try {
292 | results[idx] = await task();
293 | } catch (err) {
294 | errors[idx] = errorMessage(err);
295 | }
296 | }
297 | };
298 |
299 | const workers = new Array(concurrency).fill(tasks.entries()).map(runTasks);
300 | await Promise.allSettled(workers);
301 |
302 | if (errors.length > 0) {
303 | throw new Error(errors.join('\n'));
304 | }
305 |
306 | return results;
307 | }
308 |
--------------------------------------------------------------------------------
/src/headers.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | const customMetadataPrefix = 'x-goog-meta-';
18 |
19 | interface CustomMetadata {
20 | [key: string]: string;
21 | }
22 |
23 | export interface Metadata {
24 | cacheControl?: string;
25 | contentDisposition?: string;
26 | contentEncoding?: string;
27 | contentLanguage?: string;
28 | contentType?: string;
29 | customTime?: string;
30 | metadata?: CustomMetadata;
31 | }
32 |
33 | function parseHeaderLines(input: string): Map {
34 | const map = new Map();
35 |
36 | const lines = input.split(/\r?\n/);
37 | for (let i = 0; i < lines.length; i++) {
38 | const line = (lines[i] || '').trim();
39 | if (!line) {
40 | continue;
41 | }
42 |
43 | const idx = line.indexOf(':');
44 | if (idx === -1) {
45 | throw new Error(
46 | `Failed to parse header line ${i} ("${line}") - the expected format is "key: value"`,
47 | );
48 | }
49 |
50 | const key = (line.substring(0, idx) || '').trim();
51 | const value = (line.substring(idx + 1) || '').trim();
52 | if (!key) {
53 | throw new Error(`Failed to parse header line ${i} ("${line}") - missing key`);
54 | }
55 | if (!value) {
56 | throw new Error(`Failed to parse header line ${i} ("${line}") - missing value`);
57 | }
58 |
59 | if (map.has(key)) {
60 | throw new Error(
61 | `Failed to parse header line ${i} ("${line}") - key "${key}" already ` +
62 | `exists, possibly from a previous line`,
63 | );
64 | }
65 |
66 | map.set(key, value);
67 | }
68 |
69 | return map;
70 | }
71 |
72 | /**
73 | * Parses multiline headers input to the expected metadata object
74 | * by the GCS library.
75 | *
76 | * Custom metadata must be prefixed with `x-goog-meta-`. Invalid
77 | * headers are ignored and logged as warnings.
78 | *
79 | * @param input multiline string with headers.
80 | * @returns The parsed metadata object.
81 | */
82 | export function parseHeadersInput(input: string): Metadata {
83 | const headers = parseHeaderLines(input);
84 | const metadata: Metadata = {};
85 | headers.forEach((value, key) => {
86 | if (key.startsWith(customMetadataPrefix)) {
87 | if (!metadata.metadata) {
88 | metadata.metadata = {};
89 | }
90 | metadata.metadata[key.slice(customMetadataPrefix.length)] = value;
91 | } else {
92 | switch (key) {
93 | case 'cache-control':
94 | metadata.cacheControl = value;
95 | break;
96 | case 'content-disposition':
97 | metadata.contentDisposition = value;
98 | break;
99 | case 'content-encoding':
100 | metadata.contentEncoding = value;
101 | break;
102 | case 'content-language':
103 | metadata.contentLanguage = value;
104 | break;
105 | case 'content-type':
106 | metadata.contentType = value;
107 | break;
108 | case 'custom-time':
109 | metadata.customTime = value;
110 | break;
111 | default:
112 | throw new Error(
113 | `Invalid header key "${key}" - custom header keys must be ` +
114 | `prefixed with "x-goog-meta-"`,
115 | );
116 | }
117 | }
118 | });
119 | return metadata;
120 | }
121 |
--------------------------------------------------------------------------------
/src/main.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import * as core from '@actions/core';
18 | import { PredefinedAcl, UploadOptions } from '@google-cloud/storage';
19 | import {
20 | errorMessage,
21 | isPinnedToHead,
22 | parseBoolean,
23 | parseGcloudIgnore,
24 | pinnedToHeadWarning,
25 | } from '@google-github-actions/actions-utils';
26 | import ignore from 'ignore';
27 |
28 | import * as path from 'path';
29 |
30 | import { Client } from './client';
31 | import { parseHeadersInput } from './headers';
32 | import {
33 | absoluteRootAndComputedGlob,
34 | deepClone,
35 | parseBucketNameAndPrefix,
36 | expandGlob,
37 | } from './util';
38 |
39 | const NO_FILES_WARNING =
40 | `There are no files to upload! Make sure the workflow uses the "checkout"` +
41 | `step before uploading files:\n` +
42 | `\n` +
43 | ` - uses: 'actions/checkout@v4'\n` +
44 | ` # ...\n` +
45 | ` - uses: 'google-github-actions/upload-cloud-storage@v2'\n` +
46 | `\n` +
47 | `Check that the "path" points to a valid destination on disk, relative to ` +
48 | `the GitHub Workspace. Make sure your files are not being ignored via a ` +
49 | `.gcloudignore file in the repository.`;
50 |
51 | export async function run(): Promise {
52 | try {
53 | // Warn if pinned to HEAD
54 | if (isPinnedToHead()) {
55 | core.warning(pinnedToHeadWarning('v0'));
56 | }
57 |
58 | // Google Cloud inputs
59 | const projectID = core.getInput('project_id');
60 | const universe = core.getInput('universe') || 'googleapis.com';
61 |
62 | // GCS inputs
63 | const root = core.getInput('path', { required: true });
64 | const destination = core.getInput('destination', { required: true });
65 | const gzip = parseBoolean(core.getInput('gzip'));
66 | const resumable = parseBoolean(core.getInput('resumable'));
67 | const includeParent = parseBoolean(core.getInput('parent'));
68 | const glob = core.getInput('glob');
69 | const concurrency = Number(core.getInput('concurrency'));
70 | const predefinedAclInput = core.getInput('predefinedAcl');
71 | const predefinedAcl =
72 | predefinedAclInput === '' ? undefined : (predefinedAclInput as PredefinedAcl);
73 | const headersInput = core.getInput('headers');
74 | const gcloudIgnorePath = core.getInput('gcloudignore_path') || '.gcloudignore';
75 | const processGcloudIgnore = parseBoolean(core.getInput('process_gcloudignore'));
76 | const metadata = headersInput === '' ? {} : parseHeadersInput(headersInput);
77 |
78 | // Compute the absolute root and compute the glob.
79 | const [absoluteRoot, computedGlob, rootIsDir] = await absoluteRootAndComputedGlob(root, glob);
80 | core.debug(`Computed absoluteRoot from "${root}" to "${absoluteRoot}" (isDir: ${rootIsDir})`);
81 | core.debug(`Computed computedGlob from "${glob}" to "${computedGlob}"`);
82 |
83 | // Build complete file list.
84 | const files = await expandGlob(absoluteRoot, computedGlob);
85 | core.debug(`Found ${files.length} files: ${JSON.stringify(files)}`);
86 |
87 | // Process ignores:
88 | //
89 | // - Find ignore file
90 | // - Format all files to be posix relative to input.path
91 | // - Filter out items that match
92 | if (processGcloudIgnore) {
93 | core.debug(`Processing gcloudignore at ${gcloudIgnorePath}`);
94 |
95 | const ignores = ignore();
96 |
97 | // Look for a .gcloudignore in the repository root.
98 | const githubWorkspace = process.env.GITHUB_WORKSPACE;
99 | if (githubWorkspace) {
100 | const gcloudIgnorePathAbs = path.join(githubWorkspace, gcloudIgnorePath);
101 | const ignoreList = await parseGcloudIgnore(gcloudIgnorePathAbs);
102 |
103 | if (ignoreList && ignoreList.length) {
104 | core.debug(`Using .gcloudignore at: ${gcloudIgnorePathAbs}`);
105 | core.debug(`Parsed ignore list: ${JSON.stringify(ignoreList)}`);
106 |
107 | ignores.add(ignoreList);
108 | } else {
109 | core.warning(
110 | `The "process_gcloudignore" option is true, but no .gcloudignore ` +
111 | `file was found. If you do not intend to process a ` +
112 | `gcloudignore file, set "process_gcloudignore" to false.`,
113 | );
114 | }
115 |
116 | for (let i = 0; i < files.length; i++) {
117 | const name = path.join(root, files[i]);
118 | try {
119 | if (ignores.ignores(name)) {
120 | core.debug(`Ignoring ${name} because of ignore file`);
121 | files.splice(i, 1);
122 | i--;
123 | }
124 | } catch (err) {
125 | const msg = errorMessage(err);
126 | core.error(`Failed to process ignore for ${name}, skipping: ${msg}`);
127 | }
128 | }
129 | } else {
130 | core.warning(
131 | `The "process_gcloudignore" option is true, but $GITHUB_WORKSPACE ` +
132 | `is not set. Did you forget to use "actions/checkout" before ` +
133 | `this step? If you do not intend to process a gcloudignore file, ` +
134 | `set "process_gcloudignore" to false.`,
135 | );
136 | }
137 | }
138 |
139 | core.debug(`Uploading ${files.length} files: ${JSON.stringify(files)}`);
140 |
141 | // Emit a helpful warning in case people specify the wrong directory.
142 | if (files.length === 0) {
143 | core.warning(NO_FILES_WARNING);
144 | }
145 |
146 | // Compute the bucket and prefix.
147 | const [bucket, prefix] = parseBucketNameAndPrefix(destination);
148 | core.debug(`Computed bucket as "${bucket}"`);
149 | core.debug(`Computed prefix as "${prefix}"`);
150 |
151 | // Compute the list of file destinations in the bucket based on given
152 | // parameters.
153 | const destinations = Client.computeDestinations({
154 | givenRoot: root,
155 | absoluteRoot: absoluteRoot,
156 | files: files,
157 | prefix: prefix,
158 |
159 | // Only include the parent if the given root was a directory. Without
160 | // this, uploading a single object will cause the object to be nested in
161 | // its own name: google-github-actions/upload-cloud-storage#259.
162 | includeParent: includeParent && rootIsDir,
163 | });
164 |
165 | // Create the client and upload files.
166 | core.startGroup('Upload files');
167 | const client = await Client.build({
168 | projectID: projectID,
169 | universe: universe,
170 | });
171 | const uploadResponses = await client.upload({
172 | bucket: bucket,
173 | files: destinations,
174 | concurrency: concurrency,
175 | metadata: metadata,
176 | gzip: gzip,
177 | resumable: resumable,
178 | predefinedAcl: predefinedAcl,
179 |
180 | onUploadObject: (source: string, destination: string, opts: UploadOptions) => {
181 | core.info(`Uploading ${source} to gs://${destination}`);
182 |
183 | if (core.isDebug()) {
184 | const data = deepClone(opts) as Record;
185 | data['ts'] = Date.now();
186 | data['source'] = source;
187 | data['destination'] = destination;
188 | core.debug(`Uploading: ${JSON.stringify(data)}`);
189 | }
190 | },
191 | });
192 | core.endGroup();
193 |
194 | core.setOutput('uploaded', uploadResponses.join(','));
195 | } catch (err) {
196 | const msg = errorMessage(err);
197 | core.setFailed(`google-github-actions/upload-cloud-storage failed with: ${msg}`);
198 | }
199 | }
200 |
201 | // Execute this as the entrypoint when requested.
202 | if (require.main === module) {
203 | run();
204 | }
205 |
--------------------------------------------------------------------------------
/src/util.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import { promises as fs } from 'fs';
18 | import * as path from 'path';
19 | import * as v8 from 'v8';
20 |
21 | import fg from 'fast-glob';
22 | import { toPlatformPath, toPosixPath } from '@google-github-actions/actions-utils';
23 |
24 | /**
25 | * absoluteRootAndComputedGlob expands the root to an absolute path. If the
26 | * result points to a file, the root is modified to be the absolute parent
27 | * directory and the glob is updated to match only the file. Otherwise, the
28 | * absolute path and glob are returned.
29 | *
30 | * If the file/directory does not exist, it throws an error.
31 | *
32 | * If the root is a path to a file and glob is defined, it throws an error.
33 | *
34 | * @param root The root path to expand.
35 | * @param glob The glob to compute.
36 | * @return [string, string, boolean] The absolute and expanded root, the
37 | * computed glob, and a boolean indicating whether the given root was a
38 | * directory.
39 | */
40 | export async function absoluteRootAndComputedGlob(
41 | root: string,
42 | glob: string,
43 | ): Promise<[absoluteRoot: string, computedGlob: string, isFile: boolean]> {
44 | // Resolve the root input path, relative to the active workspace. If the
45 | // value was already an absolute path, this has no effect.
46 | const githubWorkspace = process.env.GITHUB_WORKSPACE;
47 | if (!githubWorkspace) {
48 | throw new Error(`$GITHUB_WORKSPACE is not set`);
49 | }
50 | const resolvedRoot = path.resolve(githubWorkspace, toPlatformPath(root));
51 |
52 | // Handle when the root is pointing to a single file instead of a directory.
53 | // In this case, set the parent directory as the root and glob as the file.
54 | const absoluteRootStat = await fs.lstat(resolvedRoot);
55 | if (absoluteRootStat.isFile()) {
56 | if (glob) {
57 | throw new Error(`root "path" points to a file, but "glob" was also given`);
58 | }
59 |
60 | const computedGlob = path.basename(resolvedRoot);
61 | const absoluteRoot = path.dirname(resolvedRoot);
62 | return [absoluteRoot, toPosixPath(computedGlob), false];
63 | }
64 |
65 | return [resolvedRoot, toPosixPath(glob), true];
66 | }
67 |
68 | /**
69 | * parseBucketNameAndPrefix parses the given name and returns the bucket
70 | * portion and any prefix (if it exists).
71 | *
72 | * @param name Name the parse.
73 | * @return The bucket and prefix (prefix will be the empty string).
74 | */
75 | export function parseBucketNameAndPrefix(name: string): [bucket: string, prefix: string] {
76 | const trimmed = (name || '').trim();
77 |
78 | const idx = trimmed.indexOf('/');
79 | if (idx > -1) {
80 | const bucket = (trimmed.substring(0, idx) || '').trim();
81 | const prefix = (trimmed.substring(idx + 1) || '').trim();
82 | return [bucket, prefix];
83 | }
84 |
85 | return [trimmed, ''];
86 | }
87 |
88 | /**
89 | * expandGlob compiles the list of all files in the given directory for the
90 | * provided glob.
91 | *
92 | * @param directoryPath The path to the directory.
93 | * @param glob Glob pattern to use for searching. If the empty string, a
94 | * match-all pattern is used instead.
95 | * @return Sorted list of relative paths of files from directoryPath, in posix
96 | * form.
97 | */
98 | export async function expandGlob(directoryPath: string, glob: string): Promise {
99 | const directoryPosix = toPosixPath(directoryPath);
100 | const search = toPosixPath(glob || '**/*');
101 | const filesList = await fg(search, {
102 | absolute: true,
103 | cwd: directoryPath,
104 | dot: true,
105 | });
106 |
107 | for (let i = 0; i < filesList.length; i++) {
108 | const rel = path.posix.relative(directoryPosix, filesList[i]);
109 | filesList[i] = rel;
110 | }
111 |
112 | return filesList.sort();
113 | }
114 |
115 | /**
116 | * deepClone makes a deep clone of the given object.
117 | *
118 | * @param obj T, object to clone
119 | * @return T a copy of the object
120 | */
121 | export function deepClone(obj: T): T {
122 | return v8.deserialize(v8.serialize(obj));
123 | }
124 |
--------------------------------------------------------------------------------
/tests/client.int.test.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import { test } from 'node:test';
18 | import assert from 'node:assert';
19 |
20 | import { inParallel, skipIfMissingEnv } from '@google-github-actions/actions-utils';
21 | import { randomBytes } from 'crypto';
22 | import { Storage } from '@google-cloud/storage';
23 |
24 | import { Client } from '../src/client';
25 |
26 | const projectID = process.env.UPLOAD_CLOUD_STORAGE_TEST_PROJECT;
27 |
28 | test(
29 | 'integration/Client#upload',
30 | {
31 | concurrency: true,
32 | skip: skipIfMissingEnv('UPLOAD_CLOUD_STORAGE_TEST_PROJECT'),
33 | },
34 | async (suite) => {
35 | let storage: Storage;
36 | let testBucket: string;
37 |
38 | suite.before(async () => {
39 | storage = new Storage({
40 | projectId: projectID,
41 | });
42 |
43 | // Create a dedicated bucket for each run.
44 | const testBucketName = `client-${randomBytes(6).toString('hex')}-${
45 | process.env.GITHUB_SHA || 'unknown'
46 | }`;
47 | const [bucket] = await storage.createBucket(testBucketName, {
48 | location: 'US',
49 | });
50 | testBucket = bucket.name;
51 | });
52 |
53 | suite.afterEach(async () => {
54 | const bucket = storage.bucket(testBucket);
55 |
56 | const [files] = await bucket.getFiles();
57 | const tasks = files.map((file) => async (): Promise => {
58 | await bucket.file(file.name).delete();
59 | });
60 | await inParallel(tasks, 50);
61 | });
62 |
63 | suite.after(async () => {
64 | const bucket = storage.bucket(testBucket);
65 | await bucket.delete();
66 | });
67 |
68 | await suite.test('throws an error on a non-existent bucket', async () => {
69 | const client = await Client.build({ projectID: projectID });
70 | await assert.rejects(async () => {
71 | await client.upload({
72 | bucket: 'definitely-not-a-real-bucket',
73 | files: [{ source: './tests/testdata/test1.txt', destination: 'test1.txt' }],
74 | });
75 | }, 'dafdaf');
76 | });
77 |
78 | await suite.test('throws an error on a non-existent file', async () => {
79 | const client = await Client.build({ projectID: projectID });
80 | await assert.rejects(async () => {
81 | await client.upload({
82 | bucket: testBucket,
83 | files: [{ source: 'test1.txt', destination: 'test1.txt' }],
84 | });
85 | }, /ENOENT/);
86 | });
87 |
88 | await suite.test('uploads a single file', async () => {
89 | const client = await Client.build({ projectID: projectID });
90 | await client.upload({
91 | bucket: testBucket,
92 | files: [{ source: './tests/testdata/test1.txt', destination: 'test1.txt' }],
93 | });
94 |
95 | const [files] = await storage.bucket(testBucket).getFiles();
96 | const list = files.map((file) => file.name);
97 | assert.deepStrictEqual(list, ['test1.txt']);
98 | });
99 |
100 | await suite.test('uploads files with the correct mime type', async () => {
101 | const client = await Client.build({ projectID: projectID });
102 | await client.upload({
103 | bucket: testBucket,
104 | files: [
105 | { source: './tests/testdata/test.css', destination: 'test.css' },
106 | { source: './tests/testdata/test.js', destination: 'test.js' },
107 | { source: './tests/testdata/test.json', destination: 'test.json' },
108 | { source: './tests/testdata/test1.txt', destination: 'test1.txt' },
109 | ],
110 | });
111 |
112 | const [files] = await storage.bucket(testBucket).getFiles();
113 | const list = files.map((file) => file.name);
114 | assert.deepStrictEqual(list, ['test.css', 'test.js', 'test.json', 'test1.txt']);
115 |
116 | const css = files[0];
117 | assert.deepStrictEqual(css?.metadata?.contentType, 'text/css');
118 |
119 | const js = files[1];
120 | assert.deepStrictEqual(js?.metadata?.contentType, 'application/javascript');
121 |
122 | const json = files[2];
123 | assert.deepStrictEqual(json?.metadata?.contentType, 'application/json');
124 |
125 | const txt = files[3];
126 | assert.deepStrictEqual(txt?.metadata?.contentType, 'text/plain');
127 | });
128 |
129 | await suite.test('uploads a single file with prefix', async () => {
130 | const client = await Client.build({ projectID: projectID });
131 | await client.upload({
132 | bucket: testBucket,
133 | files: [{ source: './tests/testdata/test1.txt', destination: 'my/prefix/test1.txt' }],
134 | });
135 |
136 | const [files] = await storage.bucket(testBucket).getFiles();
137 | const list = files.map((file) => file.name);
138 | assert.deepStrictEqual(list, ['my/prefix/test1.txt']);
139 | });
140 |
141 | await suite.test('uploads a single file without an extension', async () => {
142 | const client = await Client.build({ projectID: projectID });
143 | await client.upload({
144 | bucket: testBucket,
145 | files: [{ source: './tests/testdata/testfile', destination: 'testfile' }],
146 | });
147 |
148 | const [files] = await storage.bucket(testBucket).getFiles();
149 | const list = files.map((file) => file.name);
150 | assert.deepStrictEqual(list, ['testfile']);
151 | });
152 |
153 | await suite.test(
154 | 'uploads a file with unicode characters in the filename',
155 | { skip: process.platform === 'win32' },
156 | async () => {
157 | const client = await Client.build({ projectID: projectID });
158 | await client.upload({
159 | bucket: testBucket,
160 | files: [{ source: './tests/testdata-unicode/🚀', destination: '🚀' }],
161 | });
162 |
163 | const [files] = await storage.bucket(testBucket).getFiles();
164 | const list = files.map((file) => file.name);
165 | assert.deepStrictEqual(list, ['🚀']);
166 | },
167 | );
168 |
169 | await suite.test('uploads a single file with metadata', async () => {
170 | const client = await Client.build({ projectID: projectID });
171 | await client.upload({
172 | bucket: testBucket,
173 | files: [{ source: './tests/testdata/test1.txt', destination: 'test1.txt' }],
174 | metadata: {
175 | contentType: 'application/json',
176 | metadata: {
177 | foo: 'bar',
178 | },
179 | },
180 | });
181 |
182 | const [files] = await storage.bucket(testBucket).getFiles();
183 | const metadata = files[0]?.metadata;
184 |
185 | assert.deepStrictEqual(metadata?.contentType, 'application/json');
186 | assert.deepStrictEqual(metadata?.metadata?.foo, 'bar');
187 | });
188 | },
189 | );
190 |
--------------------------------------------------------------------------------
/tests/client.test.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import { describe, test } from 'node:test';
18 | import assert from 'node:assert';
19 |
20 | import * as path from 'path';
21 |
22 | import { forceRemove, randomFilepath, writeSecureFile } from '@google-github-actions/actions-utils';
23 |
24 | import { Client } from '../src/client';
25 | import { Bucket, UploadOptions } from '@google-cloud/storage';
26 | import { GoogleAuth } from 'google-auth-library';
27 |
28 | import { mockUpload } from './helpers.test';
29 |
30 | describe('Client', { concurrency: true }, async () => {
31 | test('.build', async (suite) => {
32 | const originalEnv = Object.assign({}, process.env);
33 | const appCreds = {
34 | client_email: 'test-email@example.com',
35 | private_key: 'test-private-key',
36 | };
37 | const appCredsJSON = await writeSecureFile(randomFilepath(), JSON.stringify(appCreds));
38 |
39 | suite.beforeEach(async () => {
40 | process.env.GOOGLE_APPLICATION_CREDENTIALS = appCredsJSON;
41 | });
42 |
43 | suite.afterEach(async () => {
44 | await forceRemove(appCredsJSON);
45 | process.env = originalEnv;
46 | });
47 |
48 | await suite.test('initializes with ADC', async () => {
49 | const client = await Client.build();
50 | const result = client?.storage?.authClient?.jsonContent;
51 | assert.deepStrictEqual(result, appCreds);
52 | });
53 | });
54 |
55 | test('.computeDestinations', async (suite) => {
56 | const cases = [
57 | {
58 | name: 'no files',
59 | input: {
60 | givenRoot: '',
61 | absoluteRoot: '',
62 | files: [],
63 | },
64 | expected: [],
65 | },
66 |
67 | // relative
68 | {
69 | name: 'relative given root',
70 | input: {
71 | givenRoot: 'foo/bar',
72 | absoluteRoot: path.join(process.cwd(), 'foo', 'bar'),
73 | files: ['file1', 'nested/sub/file2'],
74 | },
75 | expected: [
76 | {
77 | source: path.join(process.cwd(), 'foo', 'bar', 'file1'),
78 | destination: 'file1',
79 | },
80 | {
81 | source: path.join(process.cwd(), 'foo', 'bar', 'nested', 'sub', 'file2'),
82 | destination: 'nested/sub/file2',
83 | },
84 | ],
85 | },
86 | {
87 | name: 'relative given root with parent',
88 | input: {
89 | givenRoot: 'foo/bar',
90 | absoluteRoot: path.join(process.cwd(), 'foo', 'bar'),
91 | files: ['file1', 'nested/sub/file2'],
92 | includeParent: true,
93 | },
94 | expected: [
95 | {
96 | source: path.join(process.cwd(), 'foo', 'bar', 'file1'),
97 | destination: 'bar/file1',
98 | },
99 | {
100 | source: path.join(process.cwd(), 'foo', 'bar', 'nested', 'sub', 'file2'),
101 | destination: 'bar/nested/sub/file2',
102 | },
103 | ],
104 | },
105 | {
106 | name: 'relative given root with prefix',
107 | input: {
108 | givenRoot: 'foo/bar',
109 | absoluteRoot: path.join(process.cwd(), 'foo', 'bar'),
110 | files: ['file1', 'nested/sub/file2'],
111 | prefix: 'prefix',
112 | },
113 | expected: [
114 | {
115 | source: path.join(process.cwd(), 'foo', 'bar', 'file1'),
116 | destination: 'prefix/file1',
117 | },
118 | {
119 | source: path.join(process.cwd(), 'foo', 'bar', 'nested', 'sub', 'file2'),
120 | destination: 'prefix/nested/sub/file2',
121 | },
122 | ],
123 | },
124 | {
125 | name: 'relative given root with parent and prefix',
126 | input: {
127 | givenRoot: 'foo/bar',
128 | absoluteRoot: path.join(process.cwd(), 'foo', 'bar'),
129 | files: ['file1', 'nested/sub/file2'],
130 | prefix: 'prefix',
131 | includeParent: true,
132 | },
133 | expected: [
134 | {
135 | source: path.join(process.cwd(), 'foo', 'bar', 'file1'),
136 | destination: 'prefix/bar/file1',
137 | },
138 | {
139 | source: path.join(process.cwd(), 'foo', 'bar', 'nested', 'sub', 'file2'),
140 | destination: 'prefix/bar/nested/sub/file2',
141 | },
142 | ],
143 | },
144 |
145 | // absolute
146 | {
147 | name: 'absolute given root',
148 | input: {
149 | givenRoot: path.join(process.cwd(), 'foo', 'bar'),
150 | absoluteRoot: path.join(process.cwd(), 'foo', 'bar'),
151 | files: ['file1', 'nested/sub/file2'],
152 | },
153 | expected: [
154 | {
155 | source: path.join(process.cwd(), 'foo', 'bar', 'file1'),
156 | destination: 'file1',
157 | },
158 | {
159 | source: path.join(process.cwd(), 'foo', 'bar', 'nested', 'sub', 'file2'),
160 | destination: 'nested/sub/file2',
161 | },
162 | ],
163 | },
164 | {
165 | name: 'absolute given root with parent',
166 | input: {
167 | givenRoot: path.join(process.cwd(), 'foo', 'bar'),
168 | absoluteRoot: path.join(process.cwd(), 'foo', 'bar'),
169 | files: ['file1', 'nested/sub/file2'],
170 | includeParent: true,
171 | },
172 | expected: [
173 | {
174 | source: path.join(process.cwd(), 'foo', 'bar', 'file1'),
175 | destination: 'bar/file1',
176 | },
177 | {
178 | source: path.join(process.cwd(), 'foo', 'bar', 'nested', 'sub', 'file2'),
179 | destination: 'bar/nested/sub/file2',
180 | },
181 | ],
182 | },
183 | {
184 | name: 'absolute given root with prefix',
185 | input: {
186 | givenRoot: path.join(process.cwd(), 'foo', 'bar'),
187 | absoluteRoot: path.join(process.cwd(), 'foo', 'bar'),
188 | files: ['file1', 'nested/sub/file2'],
189 | prefix: 'prefix',
190 | },
191 | expected: [
192 | {
193 | source: path.join(process.cwd(), 'foo', 'bar', 'file1'),
194 | destination: 'prefix/file1',
195 | },
196 | {
197 | source: path.join(process.cwd(), 'foo', 'bar', 'nested', 'sub', 'file2'),
198 | destination: 'prefix/nested/sub/file2',
199 | },
200 | ],
201 | },
202 | {
203 | name: 'absolute given root with parent and prefix',
204 | input: {
205 | givenRoot: path.join(process.cwd(), 'foo', 'bar'),
206 | absoluteRoot: path.join(process.cwd(), 'foo', 'bar'),
207 | files: ['file1', 'nested/sub/file2'],
208 | prefix: 'prefix',
209 | includeParent: true,
210 | },
211 | expected: [
212 | {
213 | source: path.join(process.cwd(), 'foo', 'bar', 'file1'),
214 | destination: 'prefix/bar/file1',
215 | },
216 | {
217 | source: path.join(process.cwd(), 'foo', 'bar', 'nested', 'sub', 'file2'),
218 | destination: 'prefix/bar/nested/sub/file2',
219 | },
220 | ],
221 | },
222 | ];
223 |
224 | for (const tc of cases) {
225 | await suite.test(tc.name, async () => {
226 | const result = Client.computeDestinations(tc.input);
227 | assert.deepStrictEqual(result, tc.expected);
228 | });
229 | }
230 | });
231 |
232 | test('#upload', async (suite) => {
233 | await suite.test('calls uploadFile', async (t) => {
234 | const uploadMock = t.mock.method(Bucket.prototype, 'upload', mockUpload);
235 | t.mock.method(GoogleAuth.prototype, 'getClient', () => {});
236 |
237 | // Do the upload
238 | const client = await Client.build();
239 | await client.upload({
240 | bucket: 'my-bucket',
241 | files: [
242 | {
243 | source: path.join(process.cwd(), 'file1'),
244 | destination: 'sub/path/to/file1',
245 | },
246 | {
247 | source: path.join(process.cwd(), 'nested', 'file2'),
248 | destination: 'sub/path/to/nested/file2',
249 | },
250 | ],
251 | concurrency: 10,
252 | metadata: {
253 | contentType: 'application/json',
254 | },
255 | gzip: true,
256 | resumable: true,
257 | predefinedAcl: 'authenticatedRead',
258 | });
259 |
260 | // Check call sites
261 | const uploadedFiles = uploadMock.mock.calls.map((call) => call?.arguments?.at(0));
262 | assert.deepStrictEqual(uploadedFiles, [
263 | path.join(process.cwd(), 'file1'),
264 | path.join(process.cwd(), 'nested', 'file2'),
265 | ]);
266 |
267 | const call = uploadMock.mock.calls.at(0)?.arguments?.at(1) as UploadOptions;
268 | assert.deepStrictEqual(call?.destination, 'sub/path/to/file1');
269 | assert.deepStrictEqual(call?.metadata, { contentType: 'application/json' });
270 | assert.deepStrictEqual(call?.gzip, true);
271 | assert.deepStrictEqual(call?.predefinedAcl, 'authenticatedRead');
272 | assert.deepStrictEqual(call?.resumable, true);
273 | });
274 | });
275 | });
276 |
--------------------------------------------------------------------------------
/tests/headers.test.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import { test } from 'node:test';
18 | import assert from 'node:assert';
19 |
20 | import { parseHeadersInput } from '../src/headers';
21 |
22 | test('#parseHeadersInput', { concurrency: true }, async (suite) => {
23 | const cases = [
24 | {
25 | name: 'empty string',
26 | input: ``,
27 | expected: {},
28 | },
29 | {
30 | name: 'empty string padded',
31 | input: `
32 |
33 | `,
34 | expected: {},
35 | },
36 | {
37 | name: 'empty string padded',
38 | input: `
39 | cache-control: public, max-age=3600
40 | content-disposition: attachment; filename=file.json;
41 | content-encoding: gzip
42 | content-language: en
43 | content-type: application/json
44 | custom-time: 1985-04-12T23:20:50.52Z
45 | `,
46 | expected: {
47 | cacheControl: 'public, max-age=3600',
48 | contentDisposition: 'attachment; filename=file.json;',
49 | contentEncoding: 'gzip',
50 | contentLanguage: 'en',
51 | contentType: 'application/json',
52 | customTime: '1985-04-12T23:20:50.52Z',
53 | },
54 | },
55 | {
56 | name: 'custom data',
57 | input: `
58 | x-goog-meta-foo: value1
59 | x-goog-meta-bar: value2
60 | x-goog-meta-baz: 🚀:to:the:moon
61 | `,
62 | expected: {
63 | metadata: {
64 | foo: 'value1',
65 | bar: 'value2',
66 | baz: '🚀:to:the:moon',
67 | },
68 | },
69 | },
70 | {
71 | name: 'value multiple colons',
72 | input: `
73 | x-goog-meta-foo: it::has:::fun
74 | `,
75 | expected: {
76 | metadata: {
77 | foo: 'it::has:::fun',
78 | },
79 | },
80 | },
81 | {
82 | name: 'no key',
83 | input: 'value',
84 | error: 'Failed to parse header',
85 | },
86 | {
87 | name: 'no value',
88 | input: 'value',
89 | error: 'Failed to parse header',
90 | },
91 | {
92 | name: 'duplicate',
93 | input: `
94 | one: two
95 | one: three
96 | `,
97 | error: 'key "one" already exists',
98 | },
99 | {
100 | name: 'invalid custom',
101 | input: 'invalid: value',
102 | error: 'must be prefixed with',
103 | },
104 | ];
105 |
106 | for await (const tc of cases) {
107 | await suite.test(tc.name, async () => {
108 | if (tc.error) {
109 | assert.throws(() => {
110 | parseHeadersInput(tc.input);
111 | }, tc.error);
112 | } else {
113 | const result = parseHeadersInput(tc.input);
114 | assert.deepStrictEqual(result, tc.expected);
115 | }
116 | });
117 | }
118 | });
119 |
--------------------------------------------------------------------------------
/tests/helpers.test.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2023 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import { Bucket, File, Storage, UploadOptions, UploadResponse } from '@google-cloud/storage';
18 |
19 | /**
20 | * mockUpload is a stub for Bucket.upload()
21 | */
22 | export const mockUpload = async (p: string, opts?: UploadOptions): Promise => {
23 | const bucket = new Bucket(new Storage(), 'bucket');
24 | const file = new File(bucket, p);
25 | return [file, opts];
26 | };
27 |
--------------------------------------------------------------------------------
/tests/main.int.test.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2022 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import { test } from 'node:test';
18 | import assert from 'node:assert';
19 |
20 | import * as path from 'path';
21 | import { randomBytes } from 'crypto';
22 |
23 | import * as core from '@actions/core';
24 | import {
25 | clearEnv,
26 | inParallel,
27 | setInputs,
28 | skipIfMissingEnv,
29 | } from '@google-github-actions/actions-utils';
30 | import { Storage } from '@google-cloud/storage';
31 |
32 | import { run } from '../src/main';
33 |
34 | const projectID = process.env.UPLOAD_CLOUD_STORAGE_TEST_PROJECT;
35 |
36 | test(
37 | 'integration/main#run',
38 | {
39 | concurrency: true,
40 | skip: skipIfMissingEnv('UPLOAD_CLOUD_STORAGE_TEST_PROJECT'),
41 | },
42 | async (suite) => {
43 | let storage: Storage;
44 | let testBucket: string;
45 |
46 | suite.before(async () => {
47 | storage = new Storage({
48 | projectId: projectID,
49 | });
50 |
51 | // Create a dedicated bucket for each run.
52 | const testBucketName = `main-${randomBytes(6).toString('hex')}-${
53 | process.env.GITHUB_SHA || 'unknown'
54 | }`;
55 | const [bucket] = await storage.createBucket(testBucketName, {
56 | location: 'US',
57 | });
58 | testBucket = bucket.name;
59 |
60 | process.env.GITHUB_WORKSPACE = path.join(path.dirname(__dirname), 'tests');
61 |
62 | suite.mock.method(core, 'debug', () => {});
63 | suite.mock.method(core, 'info', () => {});
64 | suite.mock.method(core, 'warning', () => {});
65 | suite.mock.method(core, 'setOutput', () => {});
66 | suite.mock.method(core, 'setSecret', () => {});
67 | suite.mock.method(core, 'group', () => {});
68 | suite.mock.method(core, 'startGroup', () => {});
69 | suite.mock.method(core, 'endGroup', () => {});
70 | suite.mock.method(core, 'addPath', () => {});
71 | suite.mock.method(core, 'exportVariable', () => {});
72 | });
73 |
74 | suite.afterEach(async () => {
75 | clearEnv((key) => {
76 | return key.startsWith(`INPUT_`) || key.startsWith(`GITHUB_`);
77 | });
78 |
79 | const bucket = storage.bucket(testBucket);
80 | const [files] = await bucket.getFiles();
81 | const tasks = files.map((file) => async (): Promise => {
82 | await bucket.file(file.name).delete();
83 | });
84 | await inParallel(tasks, 50);
85 | });
86 |
87 | suite.after(async () => {
88 | const bucket = storage.bucket(testBucket);
89 | await bucket.delete();
90 | });
91 |
92 | await suite.test('uploads all files', async () => {
93 | setInputs({
94 | // project_id cannot actually be undefined if we got here, but
95 | // TypeScript doesn't know about Mocha's skip().
96 | project_id: projectID || '',
97 | path: './testdata',
98 | destination: `${testBucket}/sub/path`,
99 | gzip: 'true',
100 | resumable: 'true',
101 | parent: 'false',
102 | glob: '**/*',
103 | concurrency: '10',
104 | process_gcloudignore: 'false',
105 | predefinedAcl: 'authenticatedRead',
106 | });
107 |
108 | await run();
109 |
110 | const [list] = await storage.bucket(testBucket).getFiles();
111 | const names = list.map((file) => [file.name, file.metadata.contentType]);
112 | assert.deepStrictEqual(names, [
113 | ['sub/path/nested1/nested2/test3.txt', 'text/plain'],
114 | ['sub/path/nested1/test1.txt', 'text/plain'],
115 | ['sub/path/test.css', 'text/css'],
116 | ['sub/path/test.js', 'application/javascript'],
117 | ['sub/path/test.json', 'application/json'],
118 | ['sub/path/test1.txt', 'text/plain'],
119 | ['sub/path/test2.txt', 'text/plain'],
120 | ['sub/path/testfile', undefined],
121 | ]);
122 | });
123 | },
124 | );
125 |
--------------------------------------------------------------------------------
/tests/main.test.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2022 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import { test } from 'node:test';
18 | import assert from 'node:assert';
19 |
20 | import * as path from 'path';
21 | import * as os from 'os';
22 | import { promises as fs } from 'fs';
23 |
24 | import * as core from '@actions/core';
25 | import { clearEnv, forceRemove, setInputs } from '@google-github-actions/actions-utils';
26 | import { Bucket, UploadOptions } from '@google-cloud/storage';
27 | import { GoogleAuth } from 'google-auth-library';
28 |
29 | import { mockUpload } from './helpers.test';
30 |
31 | import { run } from '../src/main';
32 |
33 | /**
34 | * These are ONLY meant to be the highest-level tests that exercise the entire
35 | * workflow up to but not including the actual uploading of files.
36 | */
37 | test('#run', { concurrency: true }, async (suite) => {
38 | let githubWorkspace: string;
39 |
40 | suite.before(() => {
41 | suite.mock.method(core, 'debug', () => {});
42 | suite.mock.method(core, 'info', () => {});
43 | suite.mock.method(core, 'warning', () => {});
44 | suite.mock.method(core, 'setOutput', () => {});
45 | suite.mock.method(core, 'setSecret', () => {});
46 | suite.mock.method(core, 'group', () => {});
47 | suite.mock.method(core, 'startGroup', () => {});
48 | suite.mock.method(core, 'endGroup', () => {});
49 | suite.mock.method(core, 'addPath', () => {});
50 | suite.mock.method(core, 'exportVariable', () => {});
51 |
52 | // We do not care about authentication in the unit tests
53 | suite.mock.method(GoogleAuth.prototype, 'getClient', () => {});
54 | });
55 |
56 | suite.beforeEach(async () => {
57 | // Create a temporary directory to serve as the actions workspace
58 | githubWorkspace = await fs.mkdtemp(path.join(os.tmpdir(), 'gha-'));
59 | await fs.cp('tests/testdata', path.join(githubWorkspace, 'testdata'), {
60 | recursive: true,
61 | force: true,
62 | });
63 | process.env.GITHUB_WORKSPACE = githubWorkspace;
64 | });
65 |
66 | suite.afterEach(async () => {
67 | await forceRemove(githubWorkspace);
68 |
69 | clearEnv((key) => {
70 | return key.startsWith(`INPUT_`) || key.startsWith(`GITHUB_`);
71 | });
72 | });
73 |
74 | await suite.test('uploads all files', async (t) => {
75 | const uploadMock = t.mock.method(Bucket.prototype, 'upload', mockUpload);
76 |
77 | setInputs({
78 | path: './testdata',
79 | destination: 'my-bucket/sub/path',
80 | gzip: 'true',
81 | resumable: 'true',
82 | parent: 'true',
83 | glob: '**/*',
84 | concurrency: '10',
85 | process_gcloudignore: 'false',
86 | predefinedAcl: 'authenticatedRead',
87 | headers: 'content-type: application/json',
88 | });
89 |
90 | await run();
91 |
92 | // Check call sites
93 | const uploadedFiles = uploadMock.mock.calls.map((call) => call?.arguments?.at(0)).sort();
94 | assert.deepStrictEqual(uploadedFiles, [
95 | path.join(githubWorkspace, 'testdata', 'nested1', 'nested2', 'test3.txt'),
96 | path.join(githubWorkspace, 'testdata', 'nested1', 'test1.txt'),
97 | path.join(githubWorkspace, 'testdata', 'test.css'),
98 | path.join(githubWorkspace, 'testdata', 'test.js'),
99 | path.join(githubWorkspace, 'testdata', 'test.json'),
100 | path.join(githubWorkspace, 'testdata', 'test1.txt'),
101 | path.join(githubWorkspace, 'testdata', 'test2.txt'),
102 | path.join(githubWorkspace, 'testdata', 'testfile'),
103 | ]);
104 |
105 | // Check arguments
106 | const call = uploadMock.mock.calls.at(0)?.arguments?.at(1) as UploadOptions;
107 | assert.deepStrictEqual(call?.destination, 'sub/path/testdata/nested1/nested2/test3.txt');
108 | assert.deepStrictEqual(call?.metadata, { contentType: 'application/json' });
109 | assert.deepStrictEqual(call?.gzip, true);
110 | assert.deepStrictEqual(call?.predefinedAcl, 'authenticatedRead');
111 | assert.deepStrictEqual(call?.resumable, true);
112 | });
113 |
114 | await suite.test('uploads all files without a parent', async (t) => {
115 | const uploadMock = t.mock.method(Bucket.prototype, 'upload', mockUpload);
116 |
117 | setInputs({
118 | path: './testdata',
119 | destination: 'my-bucket/sub/path',
120 | gzip: 'true',
121 | resumable: 'true',
122 | parent: 'false',
123 | glob: '**/*',
124 | concurrency: '10',
125 | process_gcloudignore: 'false',
126 | predefinedAcl: 'authenticatedRead',
127 | headers: 'content-type: application/json',
128 | });
129 |
130 | await run();
131 |
132 | // Check call sites
133 | const uploadedFiles = uploadMock.mock.calls.map((call) => call?.arguments?.at(0)).sort();
134 | assert.deepStrictEqual(uploadedFiles, [
135 | path.join(githubWorkspace, 'testdata', 'nested1', 'nested2', 'test3.txt'),
136 | path.join(githubWorkspace, 'testdata', 'nested1', 'test1.txt'),
137 | path.join(githubWorkspace, 'testdata', 'test.css'),
138 | path.join(githubWorkspace, 'testdata', 'test.js'),
139 | path.join(githubWorkspace, 'testdata', 'test.json'),
140 | path.join(githubWorkspace, 'testdata', 'test1.txt'),
141 | path.join(githubWorkspace, 'testdata', 'test2.txt'),
142 | path.join(githubWorkspace, 'testdata', 'testfile'),
143 | ]);
144 |
145 | // Check upload paths
146 | const paths = uploadMock.mock.calls.map(
147 | (call) => (call.arguments?.at(1) as UploadOptions)?.destination,
148 | );
149 | assert.deepStrictEqual(paths, [
150 | 'sub/path/nested1/nested2/test3.txt',
151 | 'sub/path/nested1/test1.txt',
152 | 'sub/path/test.css',
153 | 'sub/path/test.js',
154 | 'sub/path/test.json',
155 | 'sub/path/test1.txt',
156 | 'sub/path/test2.txt',
157 | 'sub/path/testfile',
158 | ]);
159 | });
160 |
161 | await suite.test('uploads a single file', async (t) => {
162 | const uploadMock = t.mock.method(Bucket.prototype, 'upload', mockUpload);
163 |
164 | setInputs({
165 | path: './testdata/test.css',
166 | destination: 'my-bucket/sub/path',
167 | gzip: 'true',
168 | resumable: 'true',
169 | // Even though this is true, the parent directory shouldn't be included
170 | // for direct file paths.
171 | parent: 'true',
172 | concurrency: '10',
173 | process_gcloudignore: 'false',
174 | });
175 |
176 | await run();
177 |
178 | // Check call sites
179 | const uploadedFiles = uploadMock.mock.calls.map((call) => call?.arguments?.at(0)).sort();
180 | assert.deepStrictEqual(uploadedFiles, [path.join(githubWorkspace, 'testdata', 'test.css')]);
181 |
182 | // Check arguments
183 | const call = uploadMock.mock.calls.at(0)?.arguments?.at(1) as UploadOptions;
184 | assert.deepStrictEqual(call?.destination, 'sub/path/test.css');
185 | });
186 |
187 | await suite.test('processes a gcloudignore', async (t) => {
188 | const uploadMock = t.mock.method(Bucket.prototype, 'upload', mockUpload);
189 |
190 | setInputs({
191 | path: './testdata',
192 | destination: 'my-bucket/sub/path',
193 | gzip: 'true',
194 | resumable: 'true',
195 | parent: 'true',
196 | concurrency: '10',
197 | process_gcloudignore: 'true',
198 | });
199 |
200 | // Add gcloudignore
201 | await fs.writeFile(path.join(githubWorkspace, '.gcloudignore'), 'testdata/**/*.txt');
202 |
203 | await run();
204 |
205 | // Check call sites
206 | const uploadedFiles = uploadMock.mock.calls.map((call) => call?.arguments?.at(0));
207 | assert.deepStrictEqual(uploadedFiles, [
208 | path.join(githubWorkspace, 'testdata', 'test.css'),
209 | path.join(githubWorkspace, 'testdata', 'test.js'),
210 | path.join(githubWorkspace, 'testdata', 'test.json'),
211 | path.join(githubWorkspace, 'testdata', 'testfile'),
212 | ]);
213 |
214 | // Check arguments
215 | const call = uploadMock.mock.calls.at(0)?.arguments?.at(1) as UploadOptions;
216 | assert.deepStrictEqual(call?.destination, 'sub/path/testdata/test.css');
217 | });
218 |
219 | await suite.test('processes a custom gcloudignore path', async (t) => {
220 | const uploadMock = t.mock.method(Bucket.prototype, 'upload', mockUpload);
221 | const gcloudIgnorePath = path.join(githubWorkspace, '.gcloudignore-other');
222 |
223 | setInputs({
224 | path: './testdata',
225 | destination: 'my-bucket/sub/path',
226 | gzip: 'true',
227 | resumable: 'true',
228 | parent: 'true',
229 | concurrency: '10',
230 | process_gcloudignore: 'true',
231 | gcloudignore_path: '.gcloudignore-other',
232 | });
233 |
234 | // Add gcloudignore
235 | await fs.writeFile(gcloudIgnorePath, 'testdata/**/*.txt');
236 |
237 | await run();
238 |
239 | // Check call sites
240 | const uploadedFiles = uploadMock.mock.calls.map((call) => call?.arguments?.at(0));
241 | assert.deepStrictEqual(uploadedFiles, [
242 | path.join(githubWorkspace, 'testdata', 'test.css'),
243 | path.join(githubWorkspace, 'testdata', 'test.js'),
244 | path.join(githubWorkspace, 'testdata', 'test.json'),
245 | path.join(githubWorkspace, 'testdata', 'testfile'),
246 | ]);
247 |
248 | // Check arguments
249 | const call = uploadMock.mock.calls.at(0)?.arguments?.at(1) as UploadOptions;
250 | assert.deepStrictEqual(call?.destination, 'sub/path/testdata/test.css');
251 | });
252 | });
253 |
--------------------------------------------------------------------------------
/tests/testdata-unicode/🚀:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/google-github-actions/upload-cloud-storage/b497d556992fddfe7d6e1672aa26657e7ed75caf/tests/testdata-unicode/🚀
--------------------------------------------------------------------------------
/tests/testdata/nested1/nested2/test3.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/google-github-actions/upload-cloud-storage/b497d556992fddfe7d6e1672aa26657e7ed75caf/tests/testdata/nested1/nested2/test3.txt
--------------------------------------------------------------------------------
/tests/testdata/nested1/test1.txt:
--------------------------------------------------------------------------------
1 | hellonested
--------------------------------------------------------------------------------
/tests/testdata/test.css:
--------------------------------------------------------------------------------
1 | body {
2 | display: none;
3 | }
4 |
--------------------------------------------------------------------------------
/tests/testdata/test.js:
--------------------------------------------------------------------------------
1 | (() => {
2 | alert('hi');
3 | })();
4 |
--------------------------------------------------------------------------------
/tests/testdata/test.json:
--------------------------------------------------------------------------------
1 | { "foo":"bar" }
--------------------------------------------------------------------------------
/tests/testdata/test1.txt:
--------------------------------------------------------------------------------
1 | hello world
--------------------------------------------------------------------------------
/tests/testdata/test2.txt:
--------------------------------------------------------------------------------
1 | hello gcs
--------------------------------------------------------------------------------
/tests/testdata/testfile:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/google-github-actions/upload-cloud-storage/b497d556992fddfe7d6e1672aa26657e7ed75caf/tests/testdata/testfile
--------------------------------------------------------------------------------
/tests/util.test.ts:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | import { test } from 'node:test';
18 | import assert from 'node:assert';
19 |
20 | import { promises as fs } from 'node:fs';
21 | import * as os from 'os';
22 | import * as path from 'path';
23 |
24 | import { forceRemove, toPosixPath, toWin32Path } from '@google-github-actions/actions-utils';
25 |
26 | import { absoluteRootAndComputedGlob, expandGlob, parseBucketNameAndPrefix } from '../src/util';
27 |
28 | test('#absoluteRootAndComputedGlob', { concurrency: true }, async (suite) => {
29 | let tmpdir: string;
30 |
31 | suite.beforeEach(async () => {
32 | // Make a temporary directory for each test.
33 | tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'gha-'));
34 | process.env.GITHUB_WORKSPACE = tmpdir;
35 | });
36 |
37 | suite.afterEach(async () => {
38 | delete process.env.GITHUB_WORKSPACE;
39 | await forceRemove(tmpdir);
40 | });
41 |
42 | await suite.test('throws an error when GITHUB_WORKSPACE is unset', async () => {
43 | delete process.env.GITHUB_WORKSPACE;
44 |
45 | await assert.rejects(async () => {
46 | await absoluteRootAndComputedGlob('/not/a/real/path', '');
47 | }, /GITHUB_WORKSPACE is not set/);
48 | });
49 |
50 | await suite.test('throws an error if input path does not exist', async () => {
51 | await assert.rejects(async () => {
52 | await absoluteRootAndComputedGlob('/not/a/real/path', '');
53 | }, 'ENOENT');
54 | });
55 |
56 | await suite.test('throws an error if the input is a file and glob is defined', async () => {
57 | const file = path.join(tmpdir, 'my-file');
58 | await fs.writeFile(file, 'test');
59 |
60 | await assert.rejects(async () => {
61 | await absoluteRootAndComputedGlob(file, '*.md');
62 | }, 'root "path" points to a file');
63 | });
64 |
65 | await suite.test('modifies the directory and glob when given a relative file', async () => {
66 | const file = path.join(tmpdir, 'my-file');
67 | await fs.writeFile(file, 'test');
68 |
69 | const result = await absoluteRootAndComputedGlob(path.basename(file), '');
70 | assert.deepStrictEqual(result, [path.dirname(file), 'my-file', false]);
71 | });
72 |
73 | await suite.test(
74 | 'modifies the directory and glob when given a relative file in a subpath',
75 | async () => {
76 | const subdir = await fs.mkdtemp(path.join(tmpdir, 'sub-'));
77 | const file = path.join(subdir, 'my-file');
78 | await fs.writeFile(file, 'test');
79 |
80 | const name = path.join(path.basename(subdir), path.basename(file));
81 | const result = await absoluteRootAndComputedGlob(name, '');
82 | assert.deepStrictEqual(result, [path.dirname(file), 'my-file', false]);
83 | },
84 | );
85 |
86 | await suite.test('modifies the directory and glob when given an absolute file', async () => {
87 | const file = path.join(tmpdir, 'my-file');
88 | await fs.writeFile(file, 'test');
89 |
90 | const result = await absoluteRootAndComputedGlob(file, '');
91 | assert.deepStrictEqual(result, [path.dirname(file), 'my-file', false]);
92 | });
93 |
94 | await suite.test('resolves a relative directory', async () => {
95 | const subdir = await fs.mkdtemp(path.join(tmpdir, 'sub-'));
96 | const rel = path.basename(subdir);
97 |
98 | const result = await absoluteRootAndComputedGlob(rel, '*.md');
99 | assert.deepStrictEqual(result, [subdir, '*.md', true]);
100 | });
101 |
102 | await suite.test('does not resolve an absolute directory', async () => {
103 | const subdir = await fs.mkdtemp(path.join(tmpdir, 'sub-'));
104 |
105 | const result = await absoluteRootAndComputedGlob(subdir, '*.md');
106 | assert.deepStrictEqual(result, [subdir, '*.md', true]);
107 | });
108 |
109 | await suite.test('always returns a posix glob', async () => {
110 | const result = await absoluteRootAndComputedGlob(tmpdir, 'foo\\bar\\*.txt');
111 | assert.deepStrictEqual(result, [tmpdir, 'foo/bar/*.txt', true]);
112 | });
113 |
114 | await suite.test('resolves a win32-style absolute root', async () => {
115 | const file = path.join(tmpdir, 'my-file');
116 | await fs.writeFile(file, 'test');
117 |
118 | const result = await absoluteRootAndComputedGlob(toWin32Path(file), '');
119 | assert.deepStrictEqual(result, [path.dirname(file), 'my-file', false]);
120 | });
121 |
122 | await suite.test('resolves a win32-style relative root', async () => {
123 | const file = path.join(tmpdir, 'my-file');
124 | await fs.writeFile(file, 'test');
125 |
126 | const result = await absoluteRootAndComputedGlob(toWin32Path(path.basename(file)), '');
127 | assert.deepStrictEqual(result, [path.dirname(file), 'my-file', false]);
128 | });
129 |
130 | await suite.test('resolves a posix-style absolute root', async () => {
131 | const file = path.join(tmpdir, 'my-file');
132 | await fs.writeFile(file, 'test');
133 |
134 | const result = await absoluteRootAndComputedGlob(toPosixPath(file), '');
135 | assert.deepStrictEqual(result, [path.dirname(file), 'my-file', false]);
136 | });
137 |
138 | await suite.test('resolves a posix-style relative root', async () => {
139 | const file = path.join(tmpdir, 'my-file');
140 | await fs.writeFile(file, 'test');
141 |
142 | const result = await absoluteRootAndComputedGlob(toPosixPath(path.basename(file)), '');
143 | assert.deepStrictEqual(result, [path.dirname(file), 'my-file', false]);
144 | });
145 | });
146 |
147 | test('#expandGlob', { concurrency: true }, async (suite) => {
148 | let tmpdir: string;
149 |
150 | suite.beforeEach(async () => {
151 | // Make a temporary directory for each test.
152 | tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'gha-'));
153 | });
154 |
155 | suite.afterEach(async () => {
156 | await forceRemove(tmpdir);
157 | });
158 |
159 | await suite.test('returns an empty array when the directory does not exist', async () => {
160 | const result = await expandGlob(path.join('dir', 'does', 'not', 'exist'), '');
161 | assert.deepStrictEqual(result, []);
162 | });
163 |
164 | await suite.test('returns an empty array when the directory is empty', async () => {
165 | const result = await expandGlob(tmpdir, '');
166 | assert.deepStrictEqual(result, []);
167 | });
168 |
169 | await suite.test('returns one file in a directory', async () => {
170 | const a = path.join(tmpdir, 'a');
171 | await fs.writeFile(a, 'test');
172 | const result = await expandGlob(tmpdir, '');
173 | assert.deepStrictEqual(result, [toPosixPath('a')]);
174 | });
175 |
176 | await suite.test('returns multiple files in a directory', async () => {
177 | const a = path.join(tmpdir, 'a');
178 | await fs.writeFile(a, 'test');
179 |
180 | const b = path.join(tmpdir, 'b');
181 | await fs.writeFile(b, 'test');
182 |
183 | const result = await expandGlob(tmpdir, '');
184 | assert.deepStrictEqual(result, [toPosixPath('a'), toPosixPath('b')]);
185 | });
186 |
187 | await suite.test('returns files in subdirectories', async () => {
188 | const a = path.join(tmpdir, 'a');
189 | await fs.writeFile(a, 'test');
190 |
191 | const pth = path.join(tmpdir, 'sub', 'directory');
192 | await fs.mkdir(pth, { recursive: true });
193 | const b = path.join(pth, 'b');
194 | await fs.writeFile(b, 'test');
195 |
196 | const result = await expandGlob(tmpdir, '');
197 | assert.deepStrictEqual(result, [toPosixPath('a'), toPosixPath('sub/directory/b')]);
198 | });
199 |
200 | await suite.test('returns files beginning with a dot', async () => {
201 | const a = path.join(tmpdir, '.a');
202 | await fs.writeFile(a, 'test');
203 |
204 | const pth = path.join(tmpdir, 'sub', 'directory');
205 | await fs.mkdir(pth, { recursive: true });
206 | const b = path.join(pth, '.b');
207 | await fs.writeFile(b, 'test');
208 |
209 | const result = await expandGlob(tmpdir, '');
210 | assert.deepStrictEqual(result, [toPosixPath('.a'), toPosixPath('sub/directory/.b')]);
211 | });
212 |
213 | await suite.test(
214 | 'returns files with unicode characters in the filename',
215 | { skip: process.platform === 'win32' },
216 | async () => {
217 | const a = path.join(tmpdir, '🚀');
218 | await fs.writeFile(a, 'test');
219 |
220 | const pth = path.join(tmpdir, 'sub', 'directory');
221 | await fs.mkdir(pth, { recursive: true });
222 | const b = path.join(pth, '.🚀');
223 | await fs.writeFile(b, 'test');
224 |
225 | const result = await expandGlob(tmpdir, '');
226 | assert.deepStrictEqual(result, [toPosixPath('sub/directory/.🚀'), toPosixPath('🚀')]);
227 | },
228 | );
229 |
230 | await suite.test('returns files when given a relative path', async () => {
231 | const a = path.join(tmpdir, 'a');
232 | await fs.writeFile(a, 'test');
233 |
234 | const pth = path.join(tmpdir, 'sub', 'directory');
235 | await fs.mkdir(pth, { recursive: true });
236 | const b = path.join(pth, 'b');
237 | await fs.writeFile(b, 'test');
238 |
239 | const rel = path.relative(process.cwd(), tmpdir);
240 | const result = await expandGlob(rel, '');
241 | assert.deepStrictEqual(result, [toPosixPath('a'), toPosixPath('sub/directory/b')]);
242 | });
243 |
244 | await suite.test('only returns files', async () => {
245 | const a = path.join(tmpdir, '.a');
246 | await fs.writeFile(a, 'test');
247 |
248 | const b = path.join(tmpdir, 'b');
249 | await fs.writeFile(b, 'test');
250 |
251 | const pth = path.join(tmpdir, 'sub', 'directory');
252 | await fs.mkdir(pth, { recursive: true });
253 |
254 | // "sub/directory" should not be included because it has no files.
255 | const result = await expandGlob(tmpdir, '');
256 | assert.deepStrictEqual(result, [toPosixPath('.a'), toPosixPath('b')]);
257 | });
258 |
259 | await suite.test('honors the glob pattern', async () => {
260 | const a = path.join(tmpdir, '.a');
261 | await fs.writeFile(a, 'test');
262 |
263 | const b = path.join(tmpdir, 'b');
264 | await fs.writeFile(b, 'test');
265 |
266 | // The list should only contain a, since the glob only includes files
267 | // starting with a ".".
268 | const result = await expandGlob(tmpdir, '.*');
269 | assert.deepStrictEqual(result, [toPosixPath('.a')]);
270 | });
271 | });
272 |
273 | test('#parseBucketNameAndPrefix', { concurrency: true }, async (suite) => {
274 | const cases = [
275 | {
276 | name: 'empty string',
277 | input: '',
278 | expected: ['', ''],
279 | },
280 | {
281 | name: 'spaces',
282 | input: ' ',
283 | expected: ['', ''],
284 | },
285 | {
286 | name: 'spaces slash',
287 | input: ' / ',
288 | expected: ['', ''],
289 | },
290 | {
291 | name: 'only bucket name',
292 | input: 'foobar',
293 | expected: ['foobar', ''],
294 | },
295 | {
296 | name: 'bucket and prefix',
297 | input: 'foo/bar',
298 | expected: ['foo', 'bar'],
299 | },
300 | {
301 | name: 'bucket and long prefix',
302 | input: 'foo/bar/baz/zip/zap',
303 | expected: ['foo', 'bar/baz/zip/zap'],
304 | },
305 | ];
306 |
307 | for await (const tc of cases) {
308 | await suite.test(tc.name, async () => {
309 | const result = parseBucketNameAndPrefix(tc.input);
310 | assert.deepStrictEqual(result, tc.expected);
311 | });
312 | }
313 | });
314 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | {
17 | "compilerOptions": {
18 | "target": "es6",
19 | "module": "commonjs",
20 | "lib": ["es6"],
21 | "outDir": "./dist",
22 | "rootDir": "./src",
23 | "strict": true,
24 | "noImplicitAny": true,
25 | "esModuleInterop": true
26 | },
27 | "exclude": ["node_modules/", "tests/"]
28 | }
29 |
--------------------------------------------------------------------------------