├── .github ├── dependabot.yml └── workflows │ ├── cleanup.yml │ ├── draft-release.yml │ ├── integration.yml │ ├── publish.yml │ ├── release.yml │ └── unit.yml ├── .gitignore ├── .prettierrc.js ├── CHANGELOG.md ├── CODEOWNERS ├── LICENSE ├── README.md ├── action.yml ├── bin └── runTests.sh ├── dist └── index.js ├── eslint.config.mjs ├── package-lock.json ├── package.json ├── src ├── client.ts ├── main.ts ├── secret.ts └── util.ts ├── tests ├── client.test.ts ├── secret.test.ts ├── test-func-ignore-node │ ├── .gcloudignore │ ├── bar │ │ ├── bar.txt │ │ └── baz │ │ │ └── baz.txt │ ├── foo │ │ └── data.txt │ ├── index.js │ ├── notIgnored.txt │ └── package.json ├── test-func-ignore │ ├── .gcloudignore │ ├── ignore.txt │ ├── index.js │ └── package.json ├── test-node-func │ ├── .dotfile │ ├── index.js │ └── package.json └── util.test.ts └── tsconfig.json /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: 'npm' 4 | directory: '/' 5 | rebase-strategy: 'disabled' 6 | schedule: 7 | interval: 'daily' 8 | commit-message: 9 | prefix: 'security: ' 10 | open-pull-requests-limit: 0 # only check security updates 11 | -------------------------------------------------------------------------------- /.github/workflows/cleanup.yml: -------------------------------------------------------------------------------- 1 | name: 'Cleanup' 2 | 3 | on: 4 | schedule: 5 | - cron: '0 */6 * * *' 6 | workflow_dispatch: 7 | 8 | permissions: 9 | contents: 'read' 10 | id-token: 'write' 11 | 12 | jobs: 13 | cleanup: 14 | runs-on: 'ubuntu-latest' 15 | 16 | steps: 17 | - uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4 18 | 19 | - uses: 'google-github-actions/auth@v2' # ratchet:exclude 20 | with: 21 | workload_identity_provider: '${{ vars.WIF_PROVIDER_NAME }}' 22 | service_account: '${{ vars.SERVICE_ACCOUNT_EMAIL }}' 23 | 24 | - uses: 'google-github-actions/setup-gcloud@v2' # ratchet:exclude 25 | 26 | - name: Delete services 27 | run: |- 28 | gcloud config set core/project "${{ vars.PROJECT_ID }}" 29 | gcloud config set functions/region "us-central1" 30 | 31 | # List and delete all functions that were deployed 30 minutes ago or 32 | # earlier. The date math here is a little weird, but we're looking for 33 | # deployments "earlier than" 30 minutes ago, so it's less than since 34 | # time increases. 35 | (IFS=$'\n'; for NAME in $(gcloud functions list --format="value(name)" --filter="updateTime < '-pt30m'"); do 36 | echo "Deleting ${NAME}..." 37 | gcloud functions delete ${NAME} --quiet 38 | done) 39 | -------------------------------------------------------------------------------- /.github/workflows/draft-release.yml: -------------------------------------------------------------------------------- 1 | name: 'Draft release' 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | version_strategy: 7 | description: 'Version strategy: The strategy to used to update the version based on semantic versioning (more info at https://semver.org/).' 8 | required: true 9 | default: 'patch' 10 | type: 'choice' 11 | options: 12 | - 'major' 13 | - 'minor' 14 | - 'patch' 15 | 16 | jobs: 17 | draft-release: 18 | uses: 'google-github-actions/.github/.github/workflows/draft-release.yml@v3' # ratchet:exclude 19 | with: 20 | version_strategy: '${{ github.event.inputs.version_strategy }}' 21 | secrets: 22 | ACTIONS_BOT_TOKEN: '${{ secrets.ACTIONS_BOT_TOKEN }}' 23 | -------------------------------------------------------------------------------- /.github/workflows/integration.yml: -------------------------------------------------------------------------------- 1 | name: 'Integration' 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'main' 7 | - 'release/**/*' 8 | pull_request: 9 | branches: 10 | - 'main' 11 | - 'release/**/*' 12 | workflow_dispatch: 13 | 14 | concurrency: 15 | group: '${{ github.workflow }}-${{ github.head_ref || github.ref }}' 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | https_trigger: 20 | timeout-minutes: 10 21 | permissions: 22 | contents: 'read' 23 | id-token: 'write' 24 | runs-on: 'ubuntu-latest' 25 | steps: 26 | - uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4 27 | 28 | - uses: 'actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a' # ratchet:actions/setup-node@v4 29 | with: 30 | node-version: '20.x' 31 | 32 | - name: 'npm build' 33 | run: 'npm ci && npm run build' 34 | 35 | - uses: 'google-github-actions/auth@v2' # ratchet:exclude 36 | with: 37 | project_id: '${{ vars.PROJECT_ID }}' 38 | workload_identity_provider: '${{ vars.WIF_PROVIDER_NAME }}' 39 | 40 | - id: 'deploy' 41 | uses: './' 42 | with: 43 | name: 'integration-https-trigger-${{ github.run_number }}-${{ github.run_attempt }}' 44 | runtime: 'nodejs22' 45 | entry_point: 'helloWorld' 46 | source_dir: './tests/test-node-func/' 47 | service_account: '${{ vars.SERVICE_ACCOUNT_EMAIL }}' 48 | 49 | event_trigger: 50 | timeout-minutes: 10 51 | permissions: 52 | contents: 'read' 53 | id-token: 'write' 54 | runs-on: 'ubuntu-latest' 55 | steps: 56 | - uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4 57 | 58 | - uses: 'actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a' # ratchet:actions/setup-node@v4 59 | with: 60 | node-version: '20.x' 61 | 62 | - name: 'npm build' 63 | run: 'npm ci && npm run build' 64 | 65 | - uses: 'google-github-actions/auth@v2' # ratchet:exclude 66 | with: 67 | project_id: '${{ vars.PROJECT_ID }}' 68 | workload_identity_provider: '${{ vars.WIF_PROVIDER_NAME }}' 69 | 70 | - id: 'deploy' 71 | uses: './' 72 | with: 73 | name: 'integration-event-trigger-${{ github.run_number }}-${{ github.run_attempt }}' 74 | runtime: 'nodejs22' 75 | entry_point: 'helloWorld' 76 | source_dir: './tests/test-node-func/' 77 | event_trigger_type: 'google.cloud.audit.log.v1.written' 78 | event_trigger_filters: |- 79 | serviceName=storage.googleapis.com 80 | methodName=storage.objects.create 81 | event_trigger_retry: true 82 | event_trigger_service_account: '${{ vars.SERVICE_ACCOUNT_EMAIL }}' 83 | environment_variables: |- 84 | FOO=bar 85 | ZIP=zap 86 | build_environment_variables: |- 87 | FRUIT=apple 88 | MEAT=bacon 89 | secrets: |- 90 | SECRET_FOO=${{ vars.SECRET_VERSION_NAME }} 91 | SECRET_BAR=${{ vars.SECRET_NAME }} 92 | /etc/secrets/foo=${{ vars.SECRET_VERSION_NAME }} 93 | service_account: '${{ vars.SERVICE_ACCOUNT_EMAIL }}' 94 | min_instance_count: 2 95 | max_instance_count: 5 96 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: 'Publish immutable action version' 2 | 3 | on: 4 | workflow_dispatch: 5 | release: 6 | types: 7 | - 'published' 8 | 9 | jobs: 10 | publish: 11 | runs-on: 'ubuntu-latest' 12 | permissions: 13 | contents: 'read' 14 | id-token: 'write' 15 | packages: 'write' 16 | 17 | steps: 18 | - name: 'Checkout' 19 | uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4 20 | 21 | - name: 'Publish' 22 | id: 'publish' 23 | uses: 'actions/publish-immutable-action@4bc8754ffc40f27910afb20287dbbbb675a4e978' # ratchet:actions/publish-immutable-action@v0.0.4 24 | with: 25 | github-token: '${{ secrets.GITHUB_TOKEN }}' 26 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: 'Release' 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'main' 7 | - 'release/**/*' 8 | 9 | jobs: 10 | release: 11 | uses: 'google-github-actions/.github/.github/workflows/release.yml@v3' # ratchet:exclude 12 | secrets: 13 | ACTIONS_BOT_TOKEN: '${{ secrets.ACTIONS_BOT_TOKEN }}' 14 | -------------------------------------------------------------------------------- /.github/workflows/unit.yml: -------------------------------------------------------------------------------- 1 | name: 'Unit' 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'main' 7 | - 'release/**/*' 8 | pull_request: 9 | branches: 10 | - 'main' 11 | - 'release/**/*' 12 | workflow_dispatch: 13 | 14 | concurrency: 15 | group: '${{ github.workflow }}-${{ github.head_ref || github.ref }}' 16 | cancel-in-progress: true 17 | 18 | jobs: 19 | unit: 20 | permissions: 21 | contents: 'read' 22 | id-token: 'write' 23 | 24 | strategy: 25 | fail-fast: false 26 | matrix: 27 | os: 28 | - 'ubuntu-latest' 29 | - 'windows-latest' 30 | - 'macos-latest' 31 | runs-on: '${{ matrix.os }}' 32 | 33 | steps: 34 | - uses: 'actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683' # ratchet:actions/checkout@v4 35 | 36 | - uses: 'actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a' # ratchet:actions/setup-node@v4 37 | with: 38 | node-version: '20.x' 39 | 40 | - name: 'npm build' 41 | run: 'npm ci && npm run build' 42 | 43 | - name: 'npm lint' 44 | # There's no need to run the linter for each operating system, since it 45 | # will find the same thing 3x and clog up the PR review. 46 | if: ${{ matrix.os == 'ubuntu-latest' }} 47 | run: 'npm run lint' 48 | 49 | - id: 'auth' 50 | uses: 'google-github-actions/auth@v2' # ratchet:exclude 51 | if: ${{ github.event_name == 'push' || github.repository == github.event.pull_request.head.repo.full_name && github.actor != 'dependabot[bot]' }} 52 | with: 53 | project_id: '${{ vars.PROJECT_ID }}' 54 | workload_identity_provider: '${{ vars.WIF_PROVIDER_NAME }}' 55 | 56 | - name: 'npm test' 57 | env: 58 | TEST_AUTHENTICATED: '${{ !!steps.auth.outputs.auth_token }}' 59 | TEST_PROJECT_ID: '${{ vars.PROJECT_ID }}' 60 | TEST_SERVICE_ACCOUNT_EMAIL: '${{ vars.SERVICE_ACCOUNT_EMAIL }}' 61 | TEST_SECRET_VERSION_NAME: '${{ vars.SECRET_VERSION_NAME }}' 62 | run: 'npm run test' 63 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | runner/ 3 | 4 | # Rest of the file pulled from https://github.com/github/gitignore/blob/main/Node.gitignore 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # TypeScript v1 declaration files 30 | typings/ 31 | 32 | # TypeScript cache 33 | *.tsbuildinfo 34 | 35 | # Optional npm cache directory 36 | .npm 37 | 38 | # Optional eslint cache 39 | .eslintcache 40 | 41 | # Optional REPL history 42 | .node_repl_history 43 | 44 | # Output of 'npm pack' 45 | *.tgz 46 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | arrowParens: 'always', 3 | bracketSpacing: true, 4 | endOfLine: 'auto', 5 | jsxSingleQuote: true, 6 | printWidth: 100, 7 | quoteProps: 'consistent', 8 | semi: true, 9 | singleQuote: true, 10 | tabWidth: 2, 11 | trailingComma: 'all', 12 | useTabs: false, 13 | }; 14 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | Changelogs for each release are located on the [releases page](https://github.com/google-github-actions/deploy-cloud-functions/releases). 4 | 5 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @google-github-actions/maintainers 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # deploy-cloud-functions 2 | 3 | This action deploys your function source code to [Cloud Functions][cloud-functions] and makes the URL 4 | available to later build steps via outputs. 5 | 6 | > [!CAUTION] 7 | > 8 | > **This README corresponds to the "v3" GitHub Action**, which is currently in 9 | > beta. If you are using "v2", see the [documentation for 10 | > google-github-actions/deploy-cloud-functions@v2](https://github.com/google-github-actions/deploy-cloud-functions/tree/release/v2). 11 | 12 | **This is not an officially supported Google product, and it is not covered by a 13 | Google Cloud support contract. To report bugs or request features in a Google 14 | Cloud product, please contact [Google Cloud 15 | support](https://cloud.google.com/support).** 16 | 17 | 18 | ## Prerequisites 19 | 20 | - This action requires Google Cloud credentials that are authorized to access 21 | the secrets being requested. See [Authorization](#authorization) for more 22 | information. 23 | 24 | - This action runs using Node 20. If you are using self-hosted GitHub Actions 25 | runners, you must use a version of the GitHub Actions runner that supports 26 | Node 20 or higher. 27 | 28 | 29 | ## Usage 30 | 31 | ```yaml 32 | jobs: 33 | job_id: 34 | runs-on: 'ubuntu-latest' 35 | permissions: 36 | contents: 'read' 37 | id-token: 'write' 38 | 39 | steps: 40 | - uses: 'actions/checkout@v4' 41 | 42 | - id: 'auth' 43 | uses: 'google-github-actions/auth@v2' 44 | with: 45 | project_id: 'my-project' 46 | workload_identity_provider: 'projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider' 47 | 48 | - id: 'deploy' 49 | uses: 'google-github-actions/deploy-cloud-functions@v3' 50 | timeout-minutes: 10 51 | with: 52 | name: 'my-function' 53 | runtime: 'nodejs22' 54 | 55 | # Example of using the output 56 | - id: 'test' 57 | run: 'curl "${{ steps.deploy.outputs.url }}"' 58 | ``` 59 | 60 | ## Inputs 61 | 62 | > [!IMPORTANT] 63 | > 64 | > In addition to these inputs, we **highly recommend** setting [job and 65 | > step-level 66 | > timeouts](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepstimeout-minutes), 67 | > which can be used to control total deployment time. 68 | 69 | 70 | 71 | - project_id: _(Optional)_ ID of the Google Cloud project in which to deploy the service. The default 72 | value is computed from the environment. 73 | 74 | - region: _(Optional, default: `us-central1`)_ Region in which the function should be deployed. 75 | 76 | - universe: _(Optional, default: `googleapis.com`)_ The Google Cloud universe to use for constructing API endpoints. Trusted 77 | Partner Cloud and Google Distributed Hosted Cloud should set this to their 78 | universe address. 79 | 80 | You can also override individual API endpoints by setting the environment 81 | variable `GHA_ENDPOINT_OVERRIDE_` where `` is the API 82 | endpoint to override. For example: 83 | 84 | ```yaml 85 | env: 86 | GHA_ENDPOINT_OVERRIDE_oauth2: 'https://oauth2.myapi.endpoint/v1' 87 | ``` 88 | 89 | For more information about universes, see the Google Cloud documentation. 90 | 91 | - name: _(Required)_ Name of the Cloud Function. 92 | 93 | - description: _(Optional)_ Human-friendly description of the Cloud Function. 94 | 95 | - environment: _(Optional, default: `GEN_2`)_ Runtime environment for the Cloud Function. Allowed values are "GEN_1" and 96 | "GEN_2", but this GitHub Action only provides support for "GEN_2". 97 | 98 | - kms_key_name: _(Optional)_ Resource name of a Google Cloud KMS crypto key used to encrypt/decrypt 99 | function resources. If specified, you must also provide an artifact 100 | registry repository using the 'docker_repository' field that was created 101 | with the same key. 102 | 103 | - labels: _(Optional)_ List of labels that should be set on the function. These are 104 | comma-separated or newline-separated `KEY=VALUE`. Keys or values that 105 | contain separators must be escaped with a backslash (e.g. `\,` or `\\n`) 106 | unless quoted. Any leading or trailing whitespace is trimmed unless values 107 | are quoted. 108 | 109 | ```yaml 110 | labels: |- 111 | labela=my-label 112 | labelb=my-other-label 113 | ``` 114 | 115 | This value will only be set if the input is a non-empty value. If a 116 | non-empty value is given, the field values will be overwritten (not 117 | merged). To remove all values, set the value to the literal string `{}`. 118 | 119 | Google Cloud restricts the allowed values and length for labels. Please 120 | see the Google Cloud documentation for labels for more information. 121 | 122 | - source_dir: _(Optional, default: `./`)_ Path on disk to the root of the the function's source code. Defaults to 123 | current directory. This does NOT follow symlinks to directories or files 124 | when generating the upload artifact. 125 | 126 | **NOTE:** The function source code must exist on the GitHub Actions 127 | filesystem. This means you must have `use: actions/checkout@v4` before the 128 | deployment step!. 129 | 130 | - runtime: _(Required)_ Runtime for the function, such as "nodejs20". For a list of all available 131 | runtimes, run: 132 | 133 | $ gcloud functions runtimes list 134 | 135 | The available runtimes change over time. 136 | 137 | - build_environment_variables: _(Optional)_ List of environment variables that should be set in the build environment. 138 | These are comma-separated or newline-separated `KEY=VALUE`. Keys or values 139 | that contain separators must be escaped with a backslash (e.g. `\,` or 140 | `\\n`) unless quoted. Any leading or trailing whitespace is trimmed unless 141 | values are quoted. 142 | 143 | ```yaml 144 | build_environment_variables: |- 145 | FRUIT=apple 146 | SENTENCE=" this will retain leading and trailing spaces " 147 | ``` 148 | 149 | This value will only be set if the input is a non-empty value. If a 150 | non-empty value is given, the field values will be overwritten (not 151 | merged). To remove all values, set the value to the literal string `{}`. 152 | 153 | Previous versions of this GitHub Action also included a separate input for 154 | sourcing values from a value, but this is no longer supported. Use a 155 | community action or script to read the file in a separate step and import 156 | the contents as an output. 157 | 158 | - build_service_account: _(Optional)_ Service account to be used for building the container. 159 | 160 | - build_worker_pool: _(Optional)_ Name of the Cloud Build Custom Worker Pool that should be used to build 161 | the function. The format of this field is: 162 | 163 | projects//locations//workerPools/ 164 | 165 | where `` and `` are the project id and region 166 | respectively where the worker pool is defined and `` is the 167 | short name of the worker pool. 168 | 169 | If the project ID is not the same as the function, then the Cloud 170 | Functions Service Agent must be granted the role Cloud Build Custom 171 | Workers Builder in the project. 172 | 173 | - docker_repository: _(Optional)_ Repository in Artifact Registry to which the function docker image will be 174 | pushed after it is built by Cloud Build. If unspecified, Cloud Functions 175 | will create and use a repository named 'gcf-artifacts' for every deployed 176 | region. 177 | 178 | The value must match the pattern: 179 | 180 | projects//locations//repositories/. 181 | 182 | Cross-project repositories are not supported. Cross-location repositories 183 | are not supported. Repository format must be 'DOCKER'. 184 | 185 | - entry_point: _(Optional)_ Name of a Google Cloud Function (as defined in source code) that will be 186 | executed. Defaults to the resource name suffix (ID of the function), if 187 | not specified. 188 | 189 | - all_traffic_on_latest_revision: _(Optional, default: `true`)_ If true, the latest function revision will be served all traffic. 190 | 191 | - cpu: _(Optional)_ The number of available CPUs to set (e.g. 0.5, 2, 2000m). By default, a 192 | new function's available CPUs is determined based on its memory value. 193 | 194 | - memory: _(Optional)_ The amount of memory available for the function to use. Allowed values are 195 | of the format: with allowed units of "k", "M", "G", "Ki", 196 | "Mi", "Gi" (e.g 128M, 10Mb, 1024Gib). 197 | 198 | For all generations, the default value is 256MB of memory. 199 | 200 | - environment_variables: _(Optional)_ List of environment variables that should be set in the runtime 201 | environment. These are comma-separated or newline-separated `KEY=VALUE`. 202 | Keys or values that contain separators must be escaped with a backslash 203 | (e.g. `\,` or `\\n`) unless quoted. Any leading or trailing whitespace is 204 | trimmed unless values are quoted. 205 | 206 | ```yaml 207 | environment_variables: |- 208 | FRUIT=apple 209 | SENTENCE=" this will retain leading and trailing spaces " 210 | ``` 211 | 212 | This value will only be set if the input is a non-empty value. If a 213 | non-empty value is given, the field values will be overwritten (not 214 | merged). To remove all values, set the value to the literal string `{}`. 215 | 216 | Previous versions of this GitHub Action also included a separate input for 217 | sourcing values from a value, but this is no longer supported. Use a 218 | community action or script to read the file in a separate step and import 219 | the contents as an output. 220 | 221 | - ingress_settings: _(Optional, default: `ALLOW_ALL`)_ Ingress settings controls what traffic can reach the function. Valid 222 | values are "ALLOW_ALL", "ALLOW_INTERNAL_ONLY", and 223 | "ALLOW_INTERNAL_AND_GCLB". 224 | 225 | - max_instance_count: _(Optional)_ Sets the maximum number of instances for the function. A function 226 | execution that would exceed max-instances times out. 227 | 228 | - max_instance_request_concurrency: _(Optional)_ Sets the maximum number of concurrent requests allowed per container 229 | instance. 230 | 231 | - min_instance_count: _(Optional)_ Sets the minimum number of instances for the function. This is helpful for 232 | reducing cold start times. 233 | 234 | - secrets: _(Optional)_ List of KEY=VALUE pairs to use as secrets. These are comma-separated or 235 | newline-separated `KEY=VALUE`. Keys or values that contain separators must 236 | be escaped with a backslash (e.g. `\,` or `\\n`) unless quoted. Any 237 | leading or trailing whitespace is trimmed unless values are quoted. 238 | 239 | These can either be injected as environment variables or mounted as 240 | volumes. Keys starting with a forward slash '/' are mount paths. All other 241 | keys correspond to environment variables: 242 | 243 | 244 | ```yaml 245 | with: 246 | secrets: |- 247 | # As an environment variable: 248 | KEY1=secret-key-1:latest 249 | 250 | # As a volume mount: 251 | /secrets/api/key=secret-key-2:latest 252 | ``` 253 | 254 | This value will only be set if the input is a non-empty value. If a 255 | non-empty value is given, the field values will be overwritten (not 256 | merged). To remove all values, set the value to the literal string `{}`. 257 | 258 | - service_account: _(Optional)_ The email address of the IAM service account associated with the Cloud Run 259 | service for the function. The service account represents the identity of 260 | the running function, and determines what permissions the function has. If 261 | not provided, the function will use the project's default service account 262 | for Compute Engine. 263 | 264 | Note this differs from the service account used to deploy the Cloud 265 | Function, which is the currently-authenticated principal. However, the 266 | deploying service account must have permission to impersonate the runtime 267 | service account, which can be achieved by granting the deployment service 268 | account "roles/iam.serviceAccountUser" permission on the runtime service 269 | account. 270 | 271 | - service_timeout: _(Optional, default: `60s`)_ The function execution timeout, specified as a time duration (e.g. "30s" 272 | for 30 seconds). 273 | 274 | - vpc_connector: _(Optional)_ ID of the connector or fully qualified identifier for the connector. 275 | 276 | - vpc_connector_egress_settings: _(Optional, default: `PRIVATE_RANGES_ONLY`)_ Egress settings controls what traffic is diverted through the VPC Access 277 | Connector resource. Allowed values are "PRIVATE_RANGES_ONLY" and 278 | "ALL_TRAFFIC". 279 | 280 | - event_trigger_location: _(Optional)_ The location of the trigger, which must be a region or multi-region where 281 | the relevant events originate. 282 | 283 | - event_trigger_type: _(Optional)_ Specifies which action should trigger the function. For a list of 284 | acceptable values, run: 285 | 286 | $ gcloud functions event-types list 287 | 288 | This usually requires the eventarc API to be enabled: 289 | 290 | $ gcloud services enable eventarc.googleapis.com 291 | 292 | The available trigger types may change over time. 293 | 294 | - event_trigger_filters: _(Optional)_ List of event filters that the trigger should monitor. An event that 295 | matches all the filteres will trigger calls to the function. These are 296 | comma-separated or newline-separated `ATTRIBUTE=VALUE`. Attributes or 297 | values that contain separators must be escaped with a backslash (e.g. `\,` 298 | or `\\n`) unless quoted. To treat a value as a path pattern, prefix the 299 | value with the literal string `PATTERN:`. Any leading or trailing 300 | whitespace is trimmed unless values are quoted. 301 | 302 | ```yaml 303 | event_trigger_type: 'google.cloud.audit.log.v1.written' 304 | event_trigger_filters: |- 305 | serviceName=compute.googleapis.com 306 | methodName=PATTERN:compute.instances.* 307 | ``` 308 | 309 | This value will only be set if the input is a non-empty value. If a 310 | non-empty value is given, the field values will be overwritten (not 311 | merged). To remove all values, set the value to the literal string `{}`. 312 | 313 | For more information, see [Eventarc 314 | Triggers](https://cloud.google.com/functions/docs/calling/eventarc) and 315 | [Eventarc Path 316 | Patterns](https://cloud.google.com/eventarc/docs/path-patterns). 317 | 318 | - event_trigger_pubsub_topic: _(Optional)_ Name of Google Cloud Pub/Sub topic. Every message published in this topic 319 | will trigger function execution with message contents passed as input 320 | data of the format: 321 | 322 | projects//topics/ 323 | 324 | The service account must have permissions on this topic. 325 | 326 | - event_trigger_service_account: _(Optional)_ The email address of the IAM service account associated with the Eventarc 327 | trigger for the function. This is used for authenticated invocation. 328 | 329 | - event_trigger_retry: _(Optional, default: `true`)_ Describes whether event triggers should retry in case of function's 330 | execution failure. 331 | 332 | - event_trigger_channel: _(Optional)_ The name of the channel associated with the trigger in the format: 333 | 334 | projects//locations//channels/ 335 | 336 | You must provide a channel to receive events from Eventarc SaaS partners. 337 | 338 | 339 | 340 | 341 | 342 | ### Allowing unauthenticated requests 343 | 344 | The Cloud Functions product recommendation is that CI/CD systems not set or 345 | change settings for allowing unauthenticated invocations. New deployments are 346 | automatically private services, while deploying a revision of a public 347 | (unauthenticated) service will preserve the IAM setting of public 348 | (unauthenticated). For more information, see [Controlling access on an 349 | individual 350 | service](https://cloud.google.com/functions/docs/securing/managing-access-iam). 351 | 352 | ## Outputs 353 | 354 | 355 | 356 | - `name`: Full resource name of the Cloud Function, of the format: 357 | 358 | projects//locations//functions/ 359 | 360 | - `url`: The URL of your Cloud Function. 361 | 362 | 363 | 364 | 365 | 366 | ## Authorization 367 | 368 | The _deployment_ service account must have the following IAM permissions: 369 | 370 | - Cloud Functions Developer (`roles/cloudfunctions.developer`) 371 | 372 | Additionally, the _deployment_ service account must have permissions to act as 373 | (impersonate) the _runtime_ service account, which can be achieved by granting 374 | the deployment _service_ account "roles/iam.serviceAccountUser" permissions on 375 | the _runtime_ service account. See the Google Cloud documentation to [learn more 376 | about custom runtime service 377 | accounts](https://cloud.google.com/functions/docs/securing/function-identity#individual) 378 | and [additional configuration for 379 | deployment](https://cloud.google.com/functions/docs/reference/iam/roles#additional-configuration) 380 | 381 | ### Via google-github-actions/auth 382 | 383 | Use [google-github-actions/auth](https://github.com/google-github-actions/auth) 384 | to authenticate the action. You can use [Workload Identity Federation][wif] or 385 | traditional [Service Account Key JSON][sa] authentication. 386 | 387 | #### Authenticating via Workload Identity Federation 388 | 389 | ```yaml 390 | jobs: 391 | job_id: 392 | permissions: 393 | contents: 'read' 394 | id-token: 'write' 395 | 396 | steps: 397 | - uses: 'actions/checkout@v4' 398 | 399 | - id: 'auth' 400 | uses: 'google-github-actions/auth@v2' 401 | with: 402 | project_id: 'my-project' 403 | workload_identity_provider: 'projects/123456789/locations/global/workloadIdentityPools/my-pool/providers/my-provider' 404 | 405 | - id: 'deploy' 406 | uses: 'google-github-actions/deploy-cloud-functions@v3' 407 | timeout-minutes: 10 408 | with: 409 | name: 'my-function' 410 | runtime: 'nodejs22' 411 | ``` 412 | 413 | ### Via Application Default Credentials 414 | 415 | If you are hosting your own runners, **and** those runners are on Google Cloud, 416 | you can leverage the Application Default Credentials of the instance. This will 417 | authenticate requests as the service account attached to the instance. **This 418 | only works using a custom runner hosted on GCP.** 419 | 420 | ```yaml 421 | jobs: 422 | job_id: 423 | steps: 424 | - uses: 'actions/checkout@v4' 425 | 426 | - id: 'deploy' 427 | uses: 'google-github-actions/deploy-cloud-functions@v3' 428 | timeout-minutes: 10 429 | with: 430 | name: 'my-function' 431 | runtime: 'nodejs22' 432 | ``` 433 | 434 | The action will automatically detect and use the Application Default 435 | Credentials. 436 | 437 | [cloud-functions]: https://cloud.google.com/functions 438 | [memory]: https://cloud.google.com/sdk/gcloud/reference/functions/deploy#--memory 439 | [sm]: https://cloud.google.com/secret-manager 440 | [wif]: https://cloud.google.com/iam/docs/workload-identity-federation 441 | [sa]: https://cloud.google.com/iam/docs/creating-managing-service-accounts 442 | [gh-runners]: https://help.github.com/en/actions/hosting-your-own-runners/about-self-hosted-runners 443 | [gh-secret]: https://help.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets 444 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | # Copyright 2020 Google LLC 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | name: 'Deploy to Cloud Functions' 16 | author: 'Google LLC' 17 | description: |- 18 | Use this action to deploy code to Google Cloud Functions. 19 | 20 | inputs: 21 | # 22 | # Google Cloud 23 | # ------------ 24 | project_id: 25 | description: |- 26 | ID of the Google Cloud project in which to deploy the service. The default 27 | value is computed from the environment. 28 | required: false 29 | 30 | region: 31 | description: |- 32 | Region in which the function should be deployed. 33 | default: 'us-central1' 34 | required: false 35 | 36 | universe: 37 | description: |- 38 | The Google Cloud universe to use for constructing API endpoints. Trusted 39 | Partner Cloud and Google Distributed Hosted Cloud should set this to their 40 | universe address. 41 | 42 | You can also override individual API endpoints by setting the environment 43 | variable `GHA_ENDPOINT_OVERRIDE_` where `` is the API 44 | endpoint to override. For example: 45 | 46 | ```yaml 47 | env: 48 | GHA_ENDPOINT_OVERRIDE_oauth2: 'https://oauth2.myapi.endpoint/v1' 49 | ``` 50 | 51 | For more information about universes, see the Google Cloud documentation. 52 | default: 'googleapis.com' 53 | required: false 54 | 55 | 56 | # 57 | # Top-level 58 | # --------- 59 | name: 60 | description: |- 61 | Name of the Cloud Function. 62 | required: true 63 | 64 | description: 65 | description: |- 66 | Human-friendly description of the Cloud Function. 67 | required: false 68 | 69 | environment: 70 | description: |- 71 | Runtime environment for the Cloud Function. Allowed values are "GEN_1" and 72 | "GEN_2", but this GitHub Action only provides support for "GEN_2". 73 | default: 'GEN_2' 74 | required: false 75 | 76 | kms_key_name: 77 | description: |- 78 | Resource name of a Google Cloud KMS crypto key used to encrypt/decrypt 79 | function resources. If specified, you must also provide an artifact 80 | registry repository using the 'docker_repository' field that was created 81 | with the same key. 82 | required: false 83 | 84 | labels: 85 | description: |- 86 | List of labels that should be set on the function. These are 87 | comma-separated or newline-separated `KEY=VALUE`. Keys or values that 88 | contain separators must be escaped with a backslash (e.g. `\,` or `\\n`) 89 | unless quoted. Any leading or trailing whitespace is trimmed unless values 90 | are quoted. 91 | 92 | ```yaml 93 | labels: |- 94 | labela=my-label 95 | labelb=my-other-label 96 | ``` 97 | 98 | This value will only be set if the input is a non-empty value. If a 99 | non-empty value is given, the field values will be overwritten (not 100 | merged). To remove all values, set the value to the literal string `{}`. 101 | 102 | Google Cloud restricts the allowed values and length for labels. Please 103 | see the Google Cloud documentation for labels for more information. 104 | required: false 105 | 106 | source_dir: 107 | description: |- 108 | Path on disk to the root of the the function's source code. Defaults to 109 | current directory. This does NOT follow symlinks to directories or files 110 | when generating the upload artifact. 111 | 112 | **NOTE:** The function source code must exist on the GitHub Actions 113 | filesystem. This means you must have `use: actions/checkout@v4` before the 114 | deployment step!. 115 | default: './' 116 | required: false 117 | 118 | 119 | # 120 | # buildConfig 121 | # ----------- 122 | runtime: 123 | description: |- 124 | Runtime for the function, such as "nodejs20". For a list of all available 125 | runtimes, run: 126 | 127 | $ gcloud functions runtimes list 128 | 129 | The available runtimes change over time. 130 | required: true 131 | 132 | build_environment_variables: 133 | description: |- 134 | List of environment variables that should be set in the build environment. 135 | These are comma-separated or newline-separated `KEY=VALUE`. Keys or values 136 | that contain separators must be escaped with a backslash (e.g. `\,` or 137 | `\\n`) unless quoted. Any leading or trailing whitespace is trimmed unless 138 | values are quoted. 139 | 140 | ```yaml 141 | build_environment_variables: |- 142 | FRUIT=apple 143 | SENTENCE=" this will retain leading and trailing spaces " 144 | ``` 145 | 146 | This value will only be set if the input is a non-empty value. If a 147 | non-empty value is given, the field values will be overwritten (not 148 | merged). To remove all values, set the value to the literal string `{}`. 149 | 150 | Previous versions of this GitHub Action also included a separate input for 151 | sourcing values from a value, but this is no longer supported. Use a 152 | community action or script to read the file in a separate step and import 153 | the contents as an output. 154 | required: false 155 | 156 | build_service_account: 157 | description: |- 158 | Service account to be used for building the container. 159 | required: false 160 | 161 | build_worker_pool: 162 | description: |- 163 | Name of the Cloud Build Custom Worker Pool that should be used to build 164 | the function. The format of this field is: 165 | 166 | projects//locations//workerPools/ 167 | 168 | where `` and `` are the project id and region 169 | respectively where the worker pool is defined and `` is the 170 | short name of the worker pool. 171 | 172 | If the project ID is not the same as the function, then the Cloud 173 | Functions Service Agent must be granted the role Cloud Build Custom 174 | Workers Builder in the project. 175 | required: false 176 | 177 | docker_repository: 178 | description: |- 179 | Repository in Artifact Registry to which the function docker image will be 180 | pushed after it is built by Cloud Build. If unspecified, Cloud Functions 181 | will create and use a repository named 'gcf-artifacts' for every deployed 182 | region. 183 | 184 | The value must match the pattern: 185 | 186 | projects//locations//repositories/. 187 | 188 | Cross-project repositories are not supported. Cross-location repositories 189 | are not supported. Repository format must be 'DOCKER'. 190 | required: false 191 | 192 | entry_point: 193 | description: |- 194 | Name of a Google Cloud Function (as defined in source code) that will be 195 | executed. Defaults to the resource name suffix (ID of the function), if 196 | not specified. 197 | required: false 198 | 199 | # 200 | # serviceConfig 201 | # ------------- 202 | all_traffic_on_latest_revision: 203 | description: |- 204 | If true, the latest function revision will be served all traffic. 205 | default: true 206 | required: false 207 | 208 | cpu: 209 | description: |- 210 | The number of available CPUs to set (e.g. 0.5, 2, 2000m). By default, a 211 | new function's available CPUs is determined based on its memory value. 212 | required: false 213 | 214 | memory: 215 | description: |- 216 | The amount of memory available for the function to use. Allowed values are 217 | of the format: with allowed units of "k", "M", "G", "Ki", 218 | "Mi", "Gi" (e.g 128M, 10Mb, 1024Gib). 219 | 220 | For all generations, the default value is 256MB of memory. 221 | required: false 222 | 223 | environment_variables: 224 | description: |- 225 | List of environment variables that should be set in the runtime 226 | environment. These are comma-separated or newline-separated `KEY=VALUE`. 227 | Keys or values that contain separators must be escaped with a backslash 228 | (e.g. `\,` or `\\n`) unless quoted. Any leading or trailing whitespace is 229 | trimmed unless values are quoted. 230 | 231 | ```yaml 232 | environment_variables: |- 233 | FRUIT=apple 234 | SENTENCE=" this will retain leading and trailing spaces " 235 | ``` 236 | 237 | This value will only be set if the input is a non-empty value. If a 238 | non-empty value is given, the field values will be overwritten (not 239 | merged). To remove all values, set the value to the literal string `{}`. 240 | 241 | Previous versions of this GitHub Action also included a separate input for 242 | sourcing values from a value, but this is no longer supported. Use a 243 | community action or script to read the file in a separate step and import 244 | the contents as an output. 245 | required: false 246 | 247 | ingress_settings: 248 | description: |- 249 | Ingress settings controls what traffic can reach the function. Valid 250 | values are "ALLOW_ALL", "ALLOW_INTERNAL_ONLY", and 251 | "ALLOW_INTERNAL_AND_GCLB". 252 | default: 'ALLOW_ALL' 253 | required: false 254 | 255 | max_instance_count: 256 | description: |- 257 | Sets the maximum number of instances for the function. A function 258 | execution that would exceed max-instances times out. 259 | required: false 260 | 261 | max_instance_request_concurrency: 262 | description: |- 263 | Sets the maximum number of concurrent requests allowed per container 264 | instance. 265 | required: false 266 | 267 | min_instance_count: 268 | description: |- 269 | Sets the minimum number of instances for the function. This is helpful for 270 | reducing cold start times. 271 | required: false 272 | 273 | secrets: 274 | description: |- 275 | List of KEY=VALUE pairs to use as secrets. These are comma-separated or 276 | newline-separated `KEY=VALUE`. Keys or values that contain separators must 277 | be escaped with a backslash (e.g. `\,` or `\\n`) unless quoted. Any 278 | leading or trailing whitespace is trimmed unless values are quoted. 279 | 280 | These can either be injected as environment variables or mounted as 281 | volumes. Keys starting with a forward slash '/' are mount paths. All other 282 | keys correspond to environment variables: 283 | 284 | 285 | ```yaml 286 | with: 287 | secrets: |- 288 | # As an environment variable: 289 | KEY1=secret-key-1:latest 290 | 291 | # As a volume mount: 292 | /secrets/api/key=secret-key-2:latest 293 | ``` 294 | 295 | This value will only be set if the input is a non-empty value. If a 296 | non-empty value is given, the field values will be overwritten (not 297 | merged). To remove all values, set the value to the literal string `{}`. 298 | required: false 299 | 300 | service_account: 301 | description: |- 302 | The email address of the IAM service account associated with the Cloud Run 303 | service for the function. The service account represents the identity of 304 | the running function, and determines what permissions the function has. If 305 | not provided, the function will use the project's default service account 306 | for Compute Engine. 307 | 308 | Note this differs from the service account used to deploy the Cloud 309 | Function, which is the currently-authenticated principal. However, the 310 | deploying service account must have permission to impersonate the runtime 311 | service account, which can be achieved by granting the deployment service 312 | account "roles/iam.serviceAccountUser" permission on the runtime service 313 | account. 314 | required: false 315 | 316 | service_timeout: 317 | description: |- 318 | The function execution timeout, specified as a time duration (e.g. "30s" 319 | for 30 seconds). 320 | default: '60s' 321 | required: false 322 | 323 | vpc_connector: 324 | description: |- 325 | ID of the connector or fully qualified identifier for the connector. 326 | required: false 327 | 328 | vpc_connector_egress_settings: 329 | description: |- 330 | Egress settings controls what traffic is diverted through the VPC Access 331 | Connector resource. Allowed values are "PRIVATE_RANGES_ONLY" and 332 | "ALL_TRAFFIC". 333 | default: 'PRIVATE_RANGES_ONLY' 334 | required: false 335 | 336 | # 337 | # eventTrigger 338 | # ------------- 339 | event_trigger_location: 340 | description: |- 341 | The location of the trigger, which must be a region or multi-region where 342 | the relevant events originate. 343 | required: false 344 | 345 | event_trigger_type: 346 | description: |- 347 | Specifies which action should trigger the function. For a list of 348 | acceptable values, run: 349 | 350 | $ gcloud functions event-types list 351 | 352 | This usually requires the eventarc API to be enabled: 353 | 354 | $ gcloud services enable eventarc.googleapis.com 355 | 356 | The available trigger types may change over time. 357 | required: false 358 | 359 | event_trigger_filters: 360 | description: |- 361 | List of event filters that the trigger should monitor. An event that 362 | matches all the filteres will trigger calls to the function. These are 363 | comma-separated or newline-separated `ATTRIBUTE=VALUE`. Attributes or 364 | values that contain separators must be escaped with a backslash (e.g. `\,` 365 | or `\\n`) unless quoted. To treat a value as a path pattern, prefix the 366 | value with the literal string `PATTERN:`. Any leading or trailing 367 | whitespace is trimmed unless values are quoted. 368 | 369 | ```yaml 370 | event_trigger_type: 'google.cloud.audit.log.v1.written' 371 | event_trigger_filters: |- 372 | serviceName=compute.googleapis.com 373 | methodName=PATTERN:compute.instances.* 374 | ``` 375 | 376 | This value will only be set if the input is a non-empty value. If a 377 | non-empty value is given, the field values will be overwritten (not 378 | merged). To remove all values, set the value to the literal string `{}`. 379 | 380 | For more information, see [Eventarc 381 | Triggers](https://cloud.google.com/functions/docs/calling/eventarc) and 382 | [Eventarc Path 383 | Patterns](https://cloud.google.com/eventarc/docs/path-patterns). 384 | 385 | event_trigger_pubsub_topic: 386 | description: |- 387 | Name of Google Cloud Pub/Sub topic. Every message published in this topic 388 | will trigger function execution with message contents passed as input 389 | data of the format: 390 | 391 | projects//topics/ 392 | 393 | The service account must have permissions on this topic. 394 | required: false 395 | 396 | event_trigger_service_account: 397 | description: |- 398 | The email address of the IAM service account associated with the Eventarc 399 | trigger for the function. This is used for authenticated invocation. 400 | required: false 401 | 402 | event_trigger_retry: 403 | description: |- 404 | Describes whether event triggers should retry in case of function's 405 | execution failure. 406 | default: true 407 | required: false 408 | 409 | event_trigger_channel: 410 | description: |- 411 | The name of the channel associated with the trigger in the format: 412 | 413 | projects//locations//channels/ 414 | 415 | You must provide a channel to receive events from Eventarc SaaS partners. 416 | required: false 417 | 418 | 419 | outputs: 420 | name: 421 | description: |- 422 | Full resource name of the Cloud Function, of the format: 423 | 424 | projects//locations//functions/ 425 | 426 | url: 427 | description: |- 428 | The URL of your Cloud Function. 429 | 430 | 431 | branding: 432 | icon: 'code' 433 | color: 'blue' 434 | 435 | runs: 436 | using: 'node20' 437 | main: 'dist/index.js' 438 | -------------------------------------------------------------------------------- /bin/runTests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eEuo pipefail 3 | 4 | # 5 | # As of Node 20, the --test parameter does not support globbing, and it does not 6 | # support variable Windows paths. We also cannot invoke the test runner 7 | # directly, because while it has an API, there's no way to force it to transpile 8 | # the Typescript into JavaScript before passing it to the runner. 9 | # 10 | # So we're left with this solution, which shells out to Node to list all files 11 | # that end in *.test.ts (excluding node_modules/), and then execs out to that 12 | # process. We have to exec so the stderr/stdout and exit code is appropriately 13 | # fed to the caller. 14 | # 15 | 16 | FILES="$(node -e "process.stdout.write(require('node:fs').readdirSync('./', { recursive: true }).filter((e) => {return e.endsWith('.test.ts') && !e.startsWith('node_modules');}).sort().join(' '));")" 17 | 18 | set -x 19 | exec node --require ts-node/register --test-reporter spec --test ${FILES} 20 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import js from '@eslint/js'; 2 | import ts from 'typescript-eslint'; 3 | import tsParser from '@typescript-eslint/parser'; 4 | 5 | import prettierRecommended from 'eslint-plugin-prettier/recommended'; 6 | 7 | export default ts.config( 8 | js.configs.recommended, 9 | ts.configs.eslintRecommended, 10 | { 11 | files: ['**/*.ts', '**/*.tsx'], 12 | languageOptions: { 13 | parser: tsParser, 14 | }, 15 | }, 16 | { ignores: ['dist/', '**/*.js'] }, 17 | { 18 | rules: { 19 | 'no-unused-vars': 'off', 20 | }, 21 | }, 22 | prettierRecommended, 23 | ); 24 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@google-github-actions/deploy-cloud-functions", 3 | "version": "3.0.8", 4 | "description": "Deploy a Cloud Function", 5 | "main": "dist/index.js", 6 | "scripts": { 7 | "build": "ncc build -m src/main.ts", 8 | "docs": "./node_modules/.bin/actions-gen-readme", 9 | "lint": "eslint .", 10 | "format": "eslint . --fix", 11 | "test": "bash ./bin/runTests.sh" 12 | }, 13 | "repository": { 14 | "type": "git", 15 | "url": "https://github.com/google-github-actions/deploy-cloud-functions" 16 | }, 17 | "keywords": [ 18 | "actions", 19 | "gcf", 20 | "functions", 21 | "google cloud", 22 | "cloud function" 23 | ], 24 | "author": "Google LLC", 25 | "license": "Apache-2.0", 26 | "dependencies": { 27 | "@actions/core": "^1.11.1", 28 | "@actions/http-client": "^2.2.3", 29 | "@google-github-actions/actions-utils": "^0.8.6", 30 | "archiver": "^7.0.1", 31 | "google-auth-library": "^9.15.1", 32 | "ignore": "^7.0.3" 33 | }, 34 | "devDependencies": { 35 | "@eslint/eslintrc": "^3.2.0", 36 | "@eslint/js": "^9.19.0", 37 | "@types/archiver": "^6.0.3", 38 | "@types/node": "^22.13.0", 39 | "@typescript-eslint/eslint-plugin": "^8.22.0", 40 | "@typescript-eslint/parser": "^8.22.0", 41 | "@vercel/ncc": "^0.38.3", 42 | "eslint-config-prettier": "^10.0.1", 43 | "eslint-plugin-prettier": "^5.2.3", 44 | "eslint": "^9.19.0", 45 | "node-stream-zip": "^1.15.0", 46 | "prettier": "^3.4.2", 47 | "ts-node": "^10.9.2", 48 | "typescript-eslint": "^8.22.0", 49 | "typescript": "^5.7.3" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/client.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2021 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import { randomBytes } from 'crypto'; 18 | import fs from 'fs'; 19 | import * as path from 'path'; 20 | import { tmpdir } from 'os'; 21 | 22 | import { HttpClient } from '@actions/http-client'; 23 | import { GoogleAuth } from 'google-auth-library'; 24 | import { 25 | errorMessage, 26 | expandUniverseEndpoints, 27 | forceRemove, 28 | KVPair, 29 | } from '@google-github-actions/actions-utils'; 30 | 31 | import { zipDir, ZipOptions } from './util'; 32 | 33 | // Do not listen to the linter - this can NOT be rewritten as an ES6 import statement. 34 | const { version: appVersion } = require('../package.json'); 35 | 36 | // userAgent is the default user agent. 37 | const userAgent = `google-github-actions:deploy-cloud-functions/${appVersion}`; 38 | 39 | // cloudFunctionResourceNamePattern is the regular expression to use to match 40 | // resource names. 41 | const cloudFunctionResourceNamePattern = new RegExp( 42 | /^projects\/.+\/locations\/.+\/functions\/.+$/gi, 43 | ); 44 | 45 | export type CloudFunctionClientOptions = { 46 | projectID?: string; 47 | location?: string; 48 | universe?: string; 49 | }; 50 | 51 | export type PollOperationOptions = { 52 | onPoll?: OnFunction; 53 | onDebug?: OnDebugFunction; 54 | }; 55 | 56 | export type Operation = { 57 | name: string; 58 | metadata: Record; 59 | done: boolean; 60 | error: OperationStatus; 61 | response?: CloudFunctionResponse; 62 | }; 63 | 64 | export type OperationStatus = { 65 | code: number; 66 | message: string; 67 | }; 68 | 69 | export type CloudFunctionResponse = CloudFunction & { 70 | buildConfig: { 71 | build: string; 72 | }; 73 | serviceConfig: { 74 | service: string; 75 | uri: string; 76 | revision: string; 77 | }; 78 | eventTrigger: { 79 | trigger: string; 80 | }; 81 | state: 'STATE_UNSPECIFIED' | 'ACTIVE' | 'FAILED' | 'DEPLOYING' | 'DELETING' | 'UNKNOWN'; 82 | updateTime: string; 83 | stateMessages: { 84 | severity: 'SEVERITY_UNSPECIFIED' | 'ERROR' | 'WARNING' | 'INFO'; 85 | type: string; 86 | message: string; 87 | }[]; 88 | url: string; 89 | }; 90 | 91 | type GenerateUploadUrlResponse = { 92 | uploadUrl: string; 93 | storageSource: StorageSource; 94 | }; 95 | 96 | export type StorageSource = { 97 | bucket: string; 98 | object: string; 99 | generation?: string; 100 | }; 101 | 102 | export type SecretEnvVar = { 103 | key: string; 104 | projectId: string; 105 | secret: string; 106 | version: string; 107 | }; 108 | 109 | export type SecretVolume = { 110 | mountPath: string; 111 | projectId: string; 112 | secret: string; 113 | versions: { 114 | path: string; 115 | version: string; 116 | }[]; 117 | }; 118 | 119 | export type EventFilter = { 120 | attribute: string; 121 | value: string; 122 | operator?: string; 123 | }; 124 | 125 | export enum Environment { 126 | GEN_1 = 'GEN_1', 127 | GEN_2 = 'GEN_2', 128 | } 129 | 130 | export enum VpcConnectorEgressSettings { 131 | PRIVATE_RANGES_ONLY = 'PRIVATE_RANGES_ONLY', 132 | ALL_TRAFFIC = 'ALL_TRAFFIC', 133 | } 134 | 135 | export enum IngressSettings { 136 | ALLOW_ALL = 'ALLOW_ALL', 137 | ALLOW_INTERNAL_ONLY = 'ALLOW_INTERNAL_ONLY', 138 | ALLOW_INTERNAL_AND_GCLB = 'ALLOW_INTERNAL_AND_GCLB', 139 | } 140 | 141 | export enum RetryPolicy { 142 | RETRY_POLICY_DO_NOT_RETRY = 'RETRY_POLICY_DO_NOT_RETRY', 143 | RETRY_POLICY_RETRY = 'RETRY_POLICY_RETRY', 144 | } 145 | 146 | export type CloudFunction = { 147 | name: string; 148 | description?: string; 149 | environment?: Environment; 150 | kmsKeyName?: string; 151 | labels?: KVPair; 152 | 153 | buildConfig?: { 154 | runtime?: string; 155 | entryPoint?: string; 156 | source?: { 157 | storageSource?: StorageSource; 158 | }; 159 | dockerRepository?: string; 160 | environmentVariables?: KVPair; 161 | serviceAccount?: string; 162 | workerPool?: string; 163 | }; 164 | 165 | serviceConfig?: { 166 | allTrafficOnLatestRevision?: boolean; 167 | availableCpu?: string; 168 | availableMemory?: string; 169 | environmentVariables?: KVPair; 170 | ingressSettings: IngressSettings; 171 | maxInstanceCount?: number; 172 | maxInstanceRequestConcurrency?: number; 173 | minInstanceCount?: number; 174 | secretEnvironmentVariables?: SecretEnvVar[]; 175 | secretVolumes?: SecretVolume[]; 176 | serviceAccountEmail?: string; 177 | timeoutSeconds?: number; 178 | vpcConnector?: string; 179 | vpcConnectorEgressSettings?: VpcConnectorEgressSettings; 180 | }; 181 | 182 | eventTrigger?: { 183 | triggerRegion?: string; 184 | eventType?: string; 185 | eventFilters?: EventFilter[]; 186 | pubsubTopic?: string; 187 | serviceAccountEmail?: string; 188 | retryPolicy?: RetryPolicy; 189 | channel?: string; 190 | service?: string; 191 | }; 192 | }; 193 | 194 | export type CreateOptions = { 195 | onPoll?: OnFunction; 196 | onDebug?: OnDebugFunction; 197 | }; 198 | 199 | export type DeleteOptions = { 200 | onPoll?: OnFunction; 201 | onDebug?: OnDebugFunction; 202 | }; 203 | 204 | export type PatchOptions = { 205 | onPoll?: OnFunction; 206 | onDebug?: OnDebugFunction; 207 | }; 208 | 209 | export type DeployOptions = { 210 | onPoll?: OnFunction; 211 | onZip?: OnZipFunction; 212 | onNew?: OnFunction; 213 | onExisting?: OnFunction; 214 | onDebug?: OnDebugFunction; 215 | } & ZipOptions; 216 | 217 | export type OnFunction = () => void; 218 | export type OnDebugFunction = (f: () => string) => void; 219 | export type OnZipFunction = (sourceDir: string, zipPath: string) => void; 220 | 221 | export class CloudFunctionsClient { 222 | /** 223 | * auth is the authentication client. 224 | */ 225 | readonly #auth: GoogleAuth; 226 | 227 | /** 228 | * projectID and location are hints to the client if a Cloud Function resource 229 | * name does not include the full resource name. If a full resource name is 230 | * given (e.g. `projects/p/locations/l/functions/f`), then that is used. 231 | * However, if just a name is given (e.g. `f`), these values will be used to 232 | * construct the full resource name. 233 | */ 234 | readonly #projectID?: string; 235 | readonly #location?: string; 236 | 237 | /** 238 | * client is the HTTP client. 239 | */ 240 | readonly #client: HttpClient; 241 | 242 | /** 243 | * endpoints are the universe-aware API endpoints. 244 | */ 245 | readonly #endpoints = { 246 | cloudfunctions: 'https://cloudfunctions.{universe}/v2', 247 | }; 248 | 249 | constructor(opts?: CloudFunctionClientOptions) { 250 | this.#auth = new GoogleAuth({ 251 | scopes: ['https://www.googleapis.com/auth/cloud-platform'], 252 | projectId: opts?.projectID, 253 | }); 254 | 255 | this.#projectID = opts?.projectID; 256 | this.#location = opts?.location; 257 | 258 | this.#client = new HttpClient(userAgent); 259 | this.#endpoints = expandUniverseEndpoints(this.#endpoints, opts?.universe); 260 | } 261 | 262 | async #request(method: string, url: string, data?: any) { 263 | const authToken = await this.#auth.getAccessToken(); 264 | if (!authToken) { 265 | throw new Error(`Failed to get auth token for ${method} ${url}`); 266 | } 267 | 268 | const headers = { 269 | 'Authorization': `Bearer ${authToken}`, 270 | 'Accept': 'application/json', 271 | 'Content-Type': 'application/json', 272 | }; 273 | 274 | try { 275 | const response = await this.#client.request(method, url, data, headers); 276 | const body = await response.readBody(); 277 | const statusCode = response.message.statusCode || 500; 278 | if (statusCode >= 400) { 279 | throw new Error(`(${statusCode}) ${body}`); 280 | } 281 | return JSON.parse(body); 282 | } catch (err) { 283 | const msg = errorMessage(err); 284 | throw new Error(`Failed to ${method} ${url}: ${msg}`); 285 | } 286 | } 287 | 288 | /** 289 | * pollOperation polls the operation, calling pollFn on each attempt. 290 | * 291 | * @param name Name of the operation, of the format `operations/{name}`. 292 | * @param opts Options for polling 293 | */ 294 | async #pollOperation(name: string, opts: PollOperationOptions): Promise { 295 | const pollInterval = 5000; // ms 296 | 297 | for (;;) { 298 | // If a poll function was given, call it. 299 | if (opts.onPoll) opts.onPoll(); 300 | 301 | const resp = await this.getOperation(name); 302 | if (resp.error) { 303 | throw new Error(`Operation failed: ${resp.error.message}`); 304 | } 305 | if (resp.done) { 306 | return resp; 307 | } 308 | 309 | await new Promise((resolve) => setTimeout(resolve, pollInterval)); 310 | } 311 | } 312 | 313 | /** 314 | * getOperation fetches the operation by name. 315 | * 316 | * @param name Name of the operation, of the format `operations/{name}`. 317 | */ 318 | async getOperation(name: string): Promise { 319 | if (name.startsWith('operations/')) { 320 | name.slice(11); 321 | } 322 | 323 | const u = `${this.#endpoints.cloudfunctions}/${name}`; 324 | const resp: Operation = await this.#request('GET', u); 325 | return resp; 326 | } 327 | 328 | /** 329 | * create creates a new Cloud Function. 330 | * 331 | * @param cf Cloud Function to deploy. 332 | */ 333 | async create(cf: CloudFunction, opts?: CreateOptions): Promise { 334 | const resourceName = this.fullResourceName(cf.name); 335 | cf.name = resourceName; 336 | if (opts?.onDebug) { 337 | opts.onDebug((): string => { 338 | return `create: computed Cloud Function:\n${JSON.stringify(cf, null, 2)}`; 339 | }); 340 | } 341 | 342 | const parent = this.parentFromName(resourceName); 343 | const functionName = resourceName.split('/').at(-1); 344 | 345 | const u = `${this.#endpoints.cloudfunctions}/${parent}/functions?functionId=${functionName}`; 346 | const body = JSON.stringify(cf); 347 | 348 | const resp: Operation = await this.#request('POST', u, body); 349 | const op = await this.#pollOperation(resp.name, { 350 | onPoll: opts?.onPoll, 351 | onDebug: opts?.onDebug, 352 | }); 353 | 354 | if (!op.response) { 355 | throw new Error(`create operation result did not include function`); 356 | } 357 | return op.response; 358 | } 359 | 360 | /** 361 | * delete removes a function with the given name. 362 | * 363 | * @param name Full resource name of the Cloud Function. 364 | */ 365 | async delete(name: string, opts?: DeleteOptions): Promise { 366 | const resourceName = this.fullResourceName(name); 367 | const u = `${this.#endpoints.cloudfunctions}/${resourceName}`; 368 | const resp: Operation = await this.#request('DELETE', u); 369 | return await this.#pollOperation(resp.name, { 370 | onPoll: opts?.onPoll, 371 | onDebug: opts?.onDebug, 372 | }); 373 | } 374 | 375 | /** 376 | * generateUploadURL generates a signed URL for which to upload the blob. 377 | * 378 | * @param parent Name of the location in which to deploy the function, of the 379 | * format `projects/p/locations/l`. 380 | */ 381 | async generateUploadURL(parent: string): Promise { 382 | const u = `${this.#endpoints.cloudfunctions}/${parent}/functions:generateUploadUrl`; 383 | const body = JSON.stringify({ 384 | environment: Environment.GEN_2, 385 | }); 386 | const resp: GenerateUploadUrlResponse = await this.#request('POST', u, body); 387 | return resp; 388 | } 389 | 390 | /** 391 | * get returns a function with the given name. 392 | * 393 | * @param name Name of the function to get, of the format 394 | * `projects/p/locations/l/functions/f`. 395 | */ 396 | async get(name: string): Promise { 397 | const resourceName = this.fullResourceName(name); 398 | const u = `${this.#endpoints.cloudfunctions}/${resourceName}`; 399 | const resp: CloudFunctionResponse = await this.#request('GET', u); 400 | return resp; 401 | } 402 | 403 | /** 404 | * getSafe attempts to get the existing function by resource name. 405 | * If the function exists, it returns the function. If the function does not 406 | * exist, it returns null. If there are any errors besides a 404 returned, it 407 | * throws that error. 408 | */ 409 | async getSafe(name: string): Promise { 410 | try { 411 | return await this.get(name); 412 | } catch (err) { 413 | const msg = errorMessage(err); 414 | if (!msg.includes('404') && !msg.includes('NOT_FOUND')) { 415 | throw new Error( 416 | `Failed to lookup existing function - does the caller have ` + 417 | `cloudfunctions.functions.get permissions? ${err}`, 418 | ); 419 | } 420 | return null; 421 | } 422 | } 423 | 424 | /** 425 | * patch updates fields on the function. 426 | * 427 | * @param cf Cloud Function to patch 428 | */ 429 | async patch(cf: CloudFunction, opts?: PatchOptions): Promise { 430 | const resourceName = this.fullResourceName(cf.name); 431 | cf.name = resourceName; 432 | if (opts?.onDebug) { 433 | opts.onDebug((): string => { 434 | return `patch: computed Cloud Function:\n${JSON.stringify(cf, null, 2)}`; 435 | }); 436 | } 437 | 438 | const updateMask = this.computeUpdateMask(cf); 439 | if (opts?.onDebug) { 440 | opts.onDebug((): string => { 441 | return `Computed updateMask: ${updateMask}`; 442 | }); 443 | } 444 | 445 | const u = `${this.#endpoints.cloudfunctions}/${resourceName}?updateMask=${updateMask}`; 446 | const body = JSON.stringify(cf); 447 | const resp: Operation = await this.#request('PATCH', u, body); 448 | const op = await this.#pollOperation(resp.name, { 449 | onPoll: opts?.onPoll, 450 | onDebug: opts?.onDebug, 451 | }); 452 | 453 | if (!op.response) { 454 | throw new Error(`patch operation result did not include function`); 455 | } 456 | return op.response; 457 | } 458 | 459 | /** 460 | * deployFromLocalSource deploys a function. If the function already exists, it deploys a new 461 | * version. If the function does not already exist, it creates a new one. This is not an API method, but rather a helper around a collection of API methods. 462 | * 463 | * @param cf Cloud Function. 464 | * @param sourceDir Path on local disk to the source to deploy. 465 | */ 466 | async deployFromLocalSource( 467 | cf: CloudFunction, 468 | sourceDir: string, 469 | opts?: DeployOptions, 470 | ): Promise { 471 | const randomName = randomBytes(12).toString('hex'); 472 | const zipPath = path.join(tmpdir(), `cfsrc-${randomName}.zip`); 473 | try { 474 | await zipDir(sourceDir, zipPath, opts); 475 | if (opts?.onZip) opts.onZip(sourceDir, zipPath); 476 | } catch (err) { 477 | throw new Error(`Zip file ${zipPath} creation failed: ${err}`); 478 | } 479 | 480 | const resourceName = this.fullResourceName(cf.name); 481 | cf.name = resourceName; 482 | 483 | // Extract the parent from the name attribute. 484 | const parent = this.parentFromName(resourceName); 485 | 486 | // Upload source code to the upload URL. 487 | let sourceUploadResp: GenerateUploadUrlResponse; 488 | try { 489 | sourceUploadResp = await this.generateUploadURL(parent); 490 | await this.uploadSource(sourceUploadResp.uploadUrl, zipPath); 491 | } catch (err) { 492 | throw new Error(`Failed to upload zip file: ${err}`); 493 | } 494 | 495 | // Delete temp zip file after upload 496 | await forceRemove(zipPath); 497 | if (!cf.buildConfig) { 498 | cf.buildConfig = {}; 499 | } 500 | if (!cf.buildConfig.source) { 501 | cf.buildConfig.source = {}; 502 | } 503 | cf.buildConfig.source.storageSource = sourceUploadResp.storageSource; 504 | 505 | // Get the existing function data. 506 | const existingFunction = await this.getSafe(resourceName); 507 | 508 | // If the function already exists, create a new version 509 | if (existingFunction) { 510 | if (opts?.onExisting) opts.onExisting(); 511 | const resp: CloudFunctionResponse = await this.patch(cf, { 512 | onPoll: opts?.onPoll, 513 | onDebug: opts?.onDebug, 514 | }); 515 | return resp; 516 | } else { 517 | if (opts?.onNew) opts.onNew(); 518 | const resp: CloudFunctionResponse = await this.create(cf, { 519 | onPoll: opts?.onPoll, 520 | onDebug: opts?.onDebug, 521 | }); 522 | return resp; 523 | } 524 | } 525 | 526 | /** 527 | * Upload a file to a Signed URL. 528 | * 529 | * @param uploadURL Signed URL. 530 | * @param zipPath File to upload. 531 | * @returns uploaded URL. 532 | */ 533 | async uploadSource(uploadURL: string, zipPath: string): Promise { 534 | const zipFile = fs.createReadStream(zipPath); 535 | 536 | try { 537 | // This is different logic than the primary request function, and it does 538 | // not return JSON. 539 | const response = await this.#client.request('PUT', uploadURL, zipFile, { 540 | 'content-type': 'application/zip', 541 | }); 542 | 543 | const body = await response.readBody(); 544 | const statusCode = response.message.statusCode || 500; 545 | if (statusCode >= 400) { 546 | throw new Error(`(${statusCode}) ${body}`); 547 | } 548 | } catch (err) { 549 | const msg = errorMessage(err); 550 | throw new Error(`Failed to upload source: ${msg}`); 551 | } 552 | } 553 | 554 | fullResourceName(name: string): string { 555 | if (!name) { 556 | name = ''; 557 | } 558 | 559 | name = name.trim(); 560 | if (!name) { 561 | throw new Error(`Failed to parse resource name: name cannot be empty`); 562 | } 563 | 564 | if (name.includes('/')) { 565 | if (name.match(cloudFunctionResourceNamePattern)) { 566 | return name; 567 | } else { 568 | throw new Error(`Invalid resource name '${name}'`); 569 | } 570 | } 571 | 572 | const projectID = this.#projectID; 573 | if (!projectID) { 574 | throw new Error(`Failed to get project ID to build resource name. Try setting 'project_id'.`); 575 | } 576 | 577 | const location = this.#location; 578 | if (!location) { 579 | throw new Error( 580 | `Failed to get location (region) to build resource name. Try setting 'region'.`, 581 | ); 582 | } 583 | 584 | return `projects/${projectID}/locations/${location}/functions/${name}`; 585 | } 586 | 587 | parentFromName(name: string): string { 588 | const parts = name.split('/'); 589 | if (parts.length < 3) { 590 | throw new Error( 591 | `Invalid or missing name '${name}' (expected 'projects/p/locations/l/functions/f')`, 592 | ); 593 | } 594 | const parent = parts.slice(0, parts.length - 2).join('/'); 595 | return parent; 596 | } 597 | 598 | computeUpdateMask(cf: CloudFunction): string { 599 | const keys: string[] = []; 600 | 601 | if (cf.name !== undefined) keys.push('name'); 602 | if (cf.description !== undefined) keys.push('description'); 603 | if (cf.environment !== undefined) keys.push('environment'); 604 | if (cf.kmsKeyName !== undefined) keys.push('kmsKeyName'); 605 | if (cf.labels !== undefined) keys.push('labels'); 606 | 607 | if (cf.buildConfig?.runtime !== undefined) keys.push('buildConfig.runtime'); 608 | if (cf.buildConfig?.entryPoint !== undefined) keys.push('buildConfig.entryPoint'); 609 | if (cf.buildConfig?.source !== undefined) keys.push('buildConfig.source'); 610 | if (cf.buildConfig?.dockerRepository !== undefined) keys.push('buildConfig.dockerRepository'); 611 | if (cf.buildConfig?.environmentVariables !== undefined) 612 | keys.push('buildConfig.environmentVariables'); 613 | if (cf.buildConfig?.serviceAccount !== undefined) keys.push('buildConfig.serviceAccount'); 614 | if (cf.buildConfig?.workerPool !== undefined) keys.push('buildConfig.workerPool'); 615 | 616 | if (cf.serviceConfig?.allTrafficOnLatestRevision !== undefined) 617 | keys.push('serviceConfig.allTrafficOnLatestRevision'); 618 | if (cf.serviceConfig?.availableCpu !== undefined) keys.push('serviceConfig.availableCpu'); 619 | if (cf.serviceConfig?.availableMemory !== undefined) keys.push('serviceConfig.availableMemory'); 620 | if (cf.serviceConfig?.environmentVariables !== undefined) 621 | keys.push('serviceConfig.environmentVariables'); 622 | if (cf.serviceConfig?.ingressSettings !== undefined) keys.push('serviceConfig.ingressSettings'); 623 | if (cf.serviceConfig?.maxInstanceCount !== undefined) 624 | keys.push('serviceConfig.maxInstanceCount'); 625 | if (cf.serviceConfig?.maxInstanceRequestConcurrency !== undefined) 626 | keys.push('serviceConfig.maxInstanceRequestConcurrency'); 627 | if (cf.serviceConfig?.minInstanceCount !== undefined) 628 | keys.push('serviceConfig.minInstanceCount'); 629 | if (cf.serviceConfig?.secretEnvironmentVariables !== undefined) 630 | keys.push('serviceConfig.secretEnvironmentVariables'); 631 | if (cf.serviceConfig?.secretVolumes !== undefined) keys.push('serviceConfig.secretVolumes'); 632 | if (cf.serviceConfig?.serviceAccountEmail !== undefined) 633 | keys.push('serviceConfig.serviceAccountEmail'); 634 | if (cf.serviceConfig?.timeoutSeconds !== undefined) keys.push('serviceConfig.timeoutSeconds'); 635 | if (cf.serviceConfig?.vpcConnector !== undefined) keys.push('serviceConfig.vpcConnector'); 636 | if (cf.serviceConfig?.vpcConnectorEgressSettings !== undefined) 637 | keys.push('serviceConfig.vpcConnectorEgressSettings'); 638 | 639 | if (cf.eventTrigger?.triggerRegion !== undefined) keys.push('eventTrigger.triggerRegion'); 640 | if (cf.eventTrigger?.eventType !== undefined) keys.push('eventTrigger.eventType'); 641 | if (cf.eventTrigger?.eventFilters !== undefined) keys.push('eventTrigger.eventFilters'); 642 | if (cf.eventTrigger?.pubsubTopic !== undefined) keys.push('eventTrigger.pubsubTopic'); 643 | if (cf.eventTrigger?.serviceAccountEmail !== undefined) 644 | keys.push('eventTrigger.serviceAccountEmail'); 645 | if (cf.eventTrigger?.retryPolicy !== undefined) keys.push('eventTrigger.retryPolicy'); 646 | if (cf.eventTrigger?.channel !== undefined) keys.push('eventTrigger.channel'); 647 | if (cf.eventTrigger?.service !== undefined) keys.push('eventTrigger.service'); 648 | 649 | return keys.join(','); 650 | } 651 | } 652 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import { EntryData } from 'archiver'; 18 | import { 19 | debug as logDebug, 20 | getInput, 21 | info as logInfo, 22 | isDebug, 23 | setFailed, 24 | setOutput, 25 | } from '@actions/core'; 26 | import { 27 | errorMessage, 28 | parseBoolean, 29 | parseDuration, 30 | parseKVString, 31 | presence, 32 | toEnum, 33 | } from '@google-github-actions/actions-utils'; 34 | 35 | import { 36 | CloudFunction, 37 | CloudFunctionsClient, 38 | Environment, 39 | IngressSettings, 40 | RetryPolicy, 41 | VpcConnectorEgressSettings, 42 | } from './client'; 43 | import { formatEntry, parseEventTriggerFilters, parseSecrets, stringToInt } from './util'; 44 | 45 | async function run() { 46 | try { 47 | // Google Cloud inputs 48 | const projectID = presence(getInput('project_id')) || presence(process.env?.GCLOUD_PROJECT); 49 | const region = presence(getInput('region')) || 'us-central1'; 50 | const universe = getInput('universe') || 'googleapis.com'; 51 | 52 | // top-level inputs 53 | const name = getInput('name', { required: true }); 54 | const description = presence(getInput('description')); 55 | const environment = toEnum(Environment, getInput('environment') || Environment.GEN_2); 56 | const kmsKeyName = presence(getInput('kms_key_name')); 57 | const labels = parseKVString(getInput('labels')); 58 | const sourceDir = presence(getInput('source_dir')) || process.cwd(); 59 | 60 | // buildConfig 61 | const runtime = getInput('runtime', { required: true }); 62 | const buildEnvironmentVariables = parseKVString(getInput('build_environment_variables')); 63 | const buildServiceAccount = presence(getInput('build_service_account')); 64 | const buildWorkerPool = presence(getInput('build_worker_pool')); 65 | const dockerRepository = presence(getInput('docker_repository')); 66 | const entryPoint = presence(getInput('entry_point')); 67 | 68 | // serviceConfig 69 | const allTrafficOnLatestRevision = parseBoolean( 70 | getInput('all_traffic_on_latest_revision'), 71 | true, 72 | ); 73 | const availableCpu = presence(getInput('cpu')); 74 | const availableMemory = presence(getInput('memory')) || '256Mi'; 75 | const environmentVariables = parseKVString(getInput('environment_variables')); 76 | const ingressSettings = toEnum( 77 | IngressSettings, 78 | getInput('ingress_settings') || IngressSettings.ALLOW_ALL, 79 | ); 80 | const maxInstanceCount = presence(getInput('max_instance_count')); 81 | const maxInstanceRequestConcurrency = stringToInt(getInput('max_instance_request_concurrency')); 82 | const minInstanceCount = presence(getInput('min_instance_count')); 83 | const [secretEnvironmentVariables, secretVolumes] = parseSecrets(getInput('secrets')); 84 | const serviceAccount = presence(getInput('service_account')); 85 | const serviceTimeout = parseDuration(getInput('service_timeout')); 86 | const vpcConnector = presence(getInput('vpc_connector')); 87 | const vpcConnectorEgressSettings = toEnum( 88 | VpcConnectorEgressSettings, 89 | getInput('vpc_connector_egress_settings') || VpcConnectorEgressSettings.PRIVATE_RANGES_ONLY, 90 | ); 91 | 92 | // eventTrigger 93 | const eventTriggerLocation = presence(getInput('event_trigger_location')); 94 | const eventTriggerType = presence(getInput('event_trigger_type')); 95 | const eventTriggerFilters = parseEventTriggerFilters(getInput('event_trigger_filters')); 96 | const eventTriggerPubSubTopic = presence(getInput('event_trigger_pubsub_topic')); 97 | const eventTriggerServiceAccount = presence(getInput('event_trigger_service_account')); 98 | const eventTriggerRetryPolicy = parseBoolean(getInput('event_trigger_retry'), true) 99 | ? RetryPolicy.RETRY_POLICY_RETRY 100 | : RetryPolicy.RETRY_POLICY_DO_NOT_RETRY; 101 | const eventTriggerChannel = presence(getInput('event_trigger_channel')); 102 | 103 | // Validation 104 | if (serviceTimeout <= 0) { 105 | throw new Error( 106 | `The 'service_timeout' parameter must be > 0 seconds (got ${serviceTimeout})`, 107 | ); 108 | } 109 | 110 | // Create Cloud Functions client 111 | const client = new CloudFunctionsClient({ 112 | projectID: projectID, 113 | location: region, 114 | universe: universe, 115 | }); 116 | 117 | // Create Function definition 118 | const cf: CloudFunction = { 119 | name: name, 120 | description: description, 121 | environment: environment, 122 | kmsKeyName: kmsKeyName, 123 | labels: labels, 124 | 125 | buildConfig: { 126 | runtime: runtime, 127 | entryPoint: entryPoint, 128 | dockerRepository: dockerRepository, 129 | environmentVariables: buildEnvironmentVariables, 130 | serviceAccount: buildServiceAccount, 131 | workerPool: buildWorkerPool, 132 | }, 133 | 134 | serviceConfig: { 135 | allTrafficOnLatestRevision: allTrafficOnLatestRevision, 136 | availableCpu: availableCpu, 137 | availableMemory: availableMemory, 138 | environmentVariables: environmentVariables, 139 | ingressSettings: ingressSettings, 140 | maxInstanceCount: maxInstanceCount ? +maxInstanceCount : undefined, 141 | maxInstanceRequestConcurrency: maxInstanceRequestConcurrency, 142 | minInstanceCount: minInstanceCount ? +minInstanceCount : undefined, 143 | secretEnvironmentVariables: secretEnvironmentVariables, 144 | secretVolumes: secretVolumes, 145 | serviceAccountEmail: serviceAccount, 146 | timeoutSeconds: serviceTimeout, 147 | vpcConnector: vpcConnector, 148 | vpcConnectorEgressSettings: vpcConnectorEgressSettings, 149 | }, 150 | 151 | eventTrigger: { 152 | triggerRegion: eventTriggerLocation, 153 | eventType: eventTriggerType, 154 | eventFilters: eventTriggerFilters, 155 | pubsubTopic: eventTriggerPubSubTopic, 156 | serviceAccountEmail: eventTriggerServiceAccount, 157 | retryPolicy: eventTriggerRetryPolicy, 158 | channel: eventTriggerChannel, 159 | }, 160 | }; 161 | 162 | // Ensure eventTrigger isn't set if no eventTrigger was given 163 | if (!cf.eventTrigger?.eventType) { 164 | delete cf.eventTrigger; 165 | } 166 | 167 | // Ensure vpcConnectorEgressSettings isn't set if no vpcConnector was given 168 | if (!cf.serviceConfig?.vpcConnector) { 169 | delete cf.serviceConfig?.vpcConnectorEgressSettings; 170 | } 171 | 172 | // Deploy the Cloud Function 173 | const resp = await client.deployFromLocalSource(cf, sourceDir, { 174 | onZip: (sourceDir: string, zipPath: string) => { 175 | logInfo(`Created zip file from '${sourceDir}' at '${zipPath}'`); 176 | }, 177 | onZipAddEntry: (entry: EntryData) => { 178 | logDebug(formatEntry(entry)); 179 | }, 180 | onZipIgnoreEntry: (entry: EntryData) => { 181 | logDebug(`Ignoring ${entry.name}`); 182 | }, 183 | onNew: () => { 184 | logInfo('Creating new Cloud Functions deployment'); 185 | }, 186 | onExisting: () => { 187 | logInfo('Updating existing Cloud Functions deployment'); 188 | }, 189 | onPoll: ((): (() => void) => { 190 | let iteration = 0; 191 | return () => { 192 | if (iteration === 0) { 193 | process.stdout.write(`Deploying Cloud Function...`); 194 | } else { 195 | process.stdout.write(`.`); 196 | } 197 | iteration++; 198 | }; 199 | })(), 200 | onDebug: (f) => { 201 | if (isDebug()) { 202 | logDebug(f()); 203 | } 204 | }, 205 | }); 206 | 207 | if (resp.state !== 'ACTIVE') { 208 | throw new Error( 209 | `Cloud Function deployment finished, but the function not in the ` + 210 | `"ACTIVE" status. The current status is "${resp.state}", which ` + 211 | `could indicate a failed deployment. Check the Cloud Function ` + 212 | `logs for more information.`, 213 | ); 214 | } 215 | 216 | setOutput('name', resp.name); 217 | setOutput('url', resp.url); 218 | } catch (err) { 219 | const msg = errorMessage(err); 220 | setFailed(`google-github-actions/deploy-cloud-functions failed with: ${msg}`); 221 | } 222 | } 223 | 224 | if (require.main === module) { 225 | run(); 226 | } 227 | -------------------------------------------------------------------------------- /src/secret.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2021 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | /** 18 | * Parses a string into a Google Secret Manager reference. 19 | * 20 | * @param s String reference to parse 21 | * @returns Reference 22 | */ 23 | export class SecretName { 24 | // project, name, and version are the secret ref 25 | readonly project: string; 26 | readonly name: string; 27 | readonly version: string; 28 | 29 | constructor(s: string | null | undefined) { 30 | s = (s || '').trim(); 31 | if (!s) { 32 | throw new Error(`Missing secret name`); 33 | } 34 | 35 | const refParts = s.split('/'); 36 | switch (refParts.length) { 37 | // projects/

/secrets//versions/ 38 | case 6: { 39 | this.project = refParts[1]; 40 | this.name = refParts[3]; 41 | this.version = refParts[5]; 42 | break; 43 | } 44 | // projects/

/secrets/ 45 | case 4: { 46 | this.project = refParts[1]; 47 | this.name = refParts[3]; 48 | this.version = 'latest'; 49 | break; 50 | } 51 | //

// 52 | case 3: { 53 | this.project = refParts[0]; 54 | this.name = refParts[1]; 55 | this.version = refParts[2]; 56 | break; 57 | } 58 | //

/ 59 | case 2: { 60 | this.project = refParts[0]; 61 | this.name = refParts[1]; 62 | this.version = 'latest'; 63 | break; 64 | } 65 | default: { 66 | throw new TypeError( 67 | `Failed to parse secret reference "${s}": unknown format. Secrets ` + 68 | `should be of the format "projects/p/secrets/s/versions/v".`, 69 | ); 70 | } 71 | } 72 | } 73 | 74 | /** 75 | * Returns the full GCP self link. 76 | * 77 | * @returns String self link. 78 | */ 79 | public selfLink(): string { 80 | return `projects/${this.project}/secrets/${this.name}/versions/${this.version}`; 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import fs from 'fs'; 18 | import { posix } from 'path'; 19 | import * as path from 'path'; 20 | 21 | import * as Archiver from 'archiver'; 22 | import { 23 | parseGcloudIgnore, 24 | parseKVString, 25 | toPlatformPath, 26 | } from '@google-github-actions/actions-utils'; 27 | import ignore from 'ignore'; 28 | 29 | import { EventFilter, SecretEnvVar, SecretVolume } from './client'; 30 | import { SecretName } from './secret'; 31 | 32 | /** 33 | * OnZipEntryFunction is a function that is called for each entry in the 34 | * archive. 35 | */ 36 | export type OnZipEntryFunction = (entry: Archiver.EntryData) => void; 37 | 38 | /** 39 | * ZipOptions is used as input to the zip function. 40 | */ 41 | export type ZipOptions = { 42 | /** 43 | * onZipAddEntry is called when an entry is added to the archive. 44 | */ 45 | onZipAddEntry?: OnZipEntryFunction; 46 | 47 | /** 48 | * onZipIgnoreEntry is called when an entry is ignored due to an ignore 49 | * specification. 50 | */ 51 | onZipIgnoreEntry?: OnZipEntryFunction; 52 | }; 53 | 54 | /** 55 | * Zip a directory. 56 | * 57 | * @param dirPath Directory to zip. 58 | * @param outputPath Path to output file. 59 | * @param opts Options with which to invoke the zip. 60 | * @returns filepath of the created zip file. 61 | */ 62 | export async function zipDir( 63 | dirPath: string, 64 | outputPath: string, 65 | opts?: ZipOptions, 66 | ): Promise { 67 | // Check dirpath 68 | if (!fs.existsSync(dirPath)) { 69 | throw new Error(`Unable to find ${dirPath}`); 70 | } 71 | 72 | // Create output file stream 73 | const output = fs.createWriteStream(outputPath); 74 | 75 | // Process gcloudignore 76 | const ignoreFile = toPlatformPath(path.join(dirPath, '.gcloudignore')); 77 | const ignores = await parseGcloudIgnore(ignoreFile); 78 | const ignorer = ignore().add(ignores); 79 | const ignoreFn = (entry: Archiver.EntryData): false | Archiver.EntryData => { 80 | if (ignorer.ignores(entry.name)) { 81 | if (opts?.onZipIgnoreEntry) opts.onZipIgnoreEntry(entry); 82 | return false; 83 | } 84 | return entry; 85 | }; 86 | 87 | return new Promise((resolve, reject) => { 88 | // Initialize archive 89 | const archive = Archiver.create('zip', { zlib: { level: 7 } }); 90 | archive.on('entry', (entry) => { 91 | // For some reason, TypeScript complains if this guard is outside the 92 | // closure. It would be more performant just not create this listener, but 93 | // alas... 94 | if (opts?.onZipAddEntry) opts.onZipAddEntry(entry); 95 | }); 96 | archive.on('warning', (err) => reject(err)); 97 | archive.on('error', (err) => reject(err)); 98 | output.on('finish', () => resolve(outputPath)); 99 | 100 | // Pipe all archive data to be written 101 | archive.pipe(output); 102 | 103 | // Add files in dir to archive iff file not ignored 104 | archive.directory(dirPath, false, ignoreFn); 105 | 106 | // Finish writing files 107 | archive.finalize(); 108 | }); 109 | } 110 | 111 | /** 112 | * RealEntryData is an extended form of entry data. 113 | */ 114 | type RealEntryData = Archiver.EntryData & { 115 | sourcePath?: string; 116 | type?: string; 117 | }; 118 | 119 | /** 120 | * formatEntry formats the given entry data into a single-line string. 121 | * @returns string 122 | */ 123 | export function formatEntry(entry: RealEntryData): string { 124 | const name = entry.name; 125 | const mode = entry.mode || '000'; 126 | const sourcePath = entry.sourcePath || 'unknown'; 127 | const type = (entry.type || 'unknown').toUpperCase()[0]; 128 | return `[${type}] (${mode}) ${name} => ${sourcePath}`; 129 | } 130 | 131 | /** 132 | * stringToInt is a helper that converts the given string into an integer. If 133 | * the given string is empty, it returns undefined. If the string is not empty 134 | * and parseInt fails (returns NaN), it throws an error. Otherwise, it returns 135 | * the integer value. 136 | * 137 | * @param str String to parse as an int. 138 | * @returns Parsed integer or undefined if the input was the empty string. 139 | */ 140 | export function stringToInt(str: string): number | undefined { 141 | str = (str || '').trim().replace(/[_,]/g, ''); 142 | if (str === '') { 143 | return undefined; 144 | } 145 | 146 | const result = parseInt(str); 147 | if (isNaN(result)) { 148 | throw new Error(`input "${str}" is not a number`); 149 | } 150 | return result; 151 | } 152 | 153 | /** 154 | * parseEventTriggerFilters is a helper that parses the inputs into a list of event 155 | * filters. 156 | */ 157 | export function parseEventTriggerFilters(val: string): EventFilter[] | undefined { 158 | const kv = parseKVString(val); 159 | if (kv === undefined) { 160 | return undefined; 161 | } 162 | 163 | const result: EventFilter[] = []; 164 | for (const [key, value] of Object.entries(kv)) { 165 | if (value.startsWith('PATTERN:')) { 166 | result.push({ 167 | attribute: key, 168 | value: value.slice(8), 169 | operator: 'match-path-pattern', 170 | }); 171 | } else { 172 | result.push({ 173 | attribute: key, 174 | value: value, 175 | }); 176 | } 177 | } 178 | 179 | return result; 180 | } 181 | 182 | /** 183 | * parseSecrets parses the input as environment variable and volume mounted 184 | * secrets. 185 | */ 186 | export function parseSecrets( 187 | val: string, 188 | ): [SecretEnvVar[] | undefined, SecretVolume[] | undefined] { 189 | const kv = parseKVString(val); 190 | if (kv === undefined) { 191 | return [undefined, undefined]; 192 | } 193 | 194 | const secretEnvVars: SecretEnvVar[] = []; 195 | const secretVolumes: SecretVolume[] = []; 196 | for (const [key, value] of Object.entries(kv)) { 197 | const secretRef = new SecretName(value); 198 | 199 | if (key.startsWith('/')) { 200 | // SecretVolume 201 | const mountPath = posix.dirname(key); 202 | const pth = posix.basename(key); 203 | 204 | secretVolumes.push({ 205 | mountPath: mountPath, 206 | projectId: secretRef.project, 207 | secret: secretRef.name, 208 | versions: [ 209 | { 210 | path: pth, 211 | version: secretRef.version, 212 | }, 213 | ], 214 | }); 215 | } else { 216 | // SecretEnvVar 217 | secretEnvVars.push({ 218 | key: key, 219 | projectId: secretRef.project, 220 | secret: secretRef.name, 221 | version: secretRef.version, 222 | }); 223 | } 224 | } 225 | 226 | return [secretEnvVars, secretVolumes]; 227 | } 228 | -------------------------------------------------------------------------------- /tests/client.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2024 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import { test } from 'node:test'; 18 | import assert from 'node:assert'; 19 | 20 | import os from 'os'; 21 | import path from 'path'; 22 | import crypto from 'crypto'; 23 | 24 | import { skipIfMissingEnv } from '@google-github-actions/actions-utils'; 25 | 26 | import { CloudFunctionsClient, CloudFunction, Environment, IngressSettings } from '../src/client'; 27 | import { SecretName } from '../src/secret'; 28 | import { zipDir } from '../src/util'; 29 | 30 | const { TEST_PROJECT_ID, TEST_SERVICE_ACCOUNT_EMAIL, TEST_SECRET_VERSION_NAME } = process.env; 31 | const TEST_LOCATION = 'us-central1'; 32 | const TEST_SEED = crypto.randomBytes(12).toString('hex').toLowerCase(); 33 | const TEST_SEED_UPPER = TEST_SEED.toUpperCase(); 34 | const TEST_FUNCTION_NAME = `unit-${TEST_SEED}`; 35 | 36 | test( 37 | 'lifecycle', 38 | { 39 | concurrency: true, 40 | skip: skipIfMissingEnv('TEST_AUTHENTICATED'), 41 | }, 42 | async (suite) => { 43 | // Always try to delete the function 44 | suite.after(async function () { 45 | try { 46 | const client = new CloudFunctionsClient({ 47 | projectID: TEST_PROJECT_ID, 48 | location: TEST_LOCATION, 49 | }); 50 | 51 | await client.delete(TEST_FUNCTION_NAME); 52 | } catch { 53 | // do nothing 54 | } 55 | }); 56 | 57 | await suite.test('can create, read, update, and delete', async () => { 58 | const secret = new SecretName(TEST_SECRET_VERSION_NAME); 59 | 60 | const client = new CloudFunctionsClient({ 61 | projectID: TEST_PROJECT_ID, 62 | location: TEST_LOCATION, 63 | }); 64 | 65 | const outputPath = path.join(os.tmpdir(), crypto.randomBytes(12).toString('hex')); 66 | const zipPath = await zipDir('tests/test-node-func', outputPath); 67 | 68 | // Generate upload URL 69 | const sourceUploadResp = await client.generateUploadURL( 70 | `projects/${TEST_PROJECT_ID}/locations/${TEST_LOCATION}`, 71 | ); 72 | 73 | // Upload source 74 | await client.uploadSource(sourceUploadResp.uploadUrl, zipPath); 75 | 76 | const cf: CloudFunction = { 77 | name: TEST_FUNCTION_NAME, 78 | description: 'test function', 79 | environment: Environment.GEN_2, 80 | labels: { 81 | [`label1-${TEST_SEED}`]: `value1_${TEST_SEED}`, 82 | [`label2-${TEST_SEED}`]: `value2_${TEST_SEED}`, 83 | }, 84 | 85 | buildConfig: { 86 | runtime: 'nodejs22', 87 | entryPoint: 'helloWorld', 88 | source: { 89 | storageSource: sourceUploadResp.storageSource, 90 | }, 91 | environmentVariables: { 92 | [`BUILD_ENV_KEY1_${TEST_SEED_UPPER}`]: `VALUE1_${TEST_SEED}`, 93 | [`BUILD_ENV_KEY2_${TEST_SEED_UPPER}`]: `VALUE2_${TEST_SEED}`, 94 | }, 95 | }, 96 | 97 | serviceConfig: { 98 | allTrafficOnLatestRevision: true, 99 | availableCpu: '1', 100 | availableMemory: '512Mi', 101 | environmentVariables: { 102 | [`SERVICE_ENV_KEY1_${TEST_SEED_UPPER}`]: `VALUE1_${TEST_SEED}`, 103 | [`SERVICE_ENV_KEY2_${TEST_SEED_UPPER}`]: `VALUE2_${TEST_SEED}`, 104 | }, 105 | ingressSettings: IngressSettings.ALLOW_ALL, 106 | maxInstanceCount: 5, 107 | minInstanceCount: 2, 108 | secretEnvironmentVariables: [ 109 | { 110 | key: `SECRET1_${TEST_SEED_UPPER}`, 111 | projectId: secret.project, 112 | secret: secret.name, 113 | version: secret.version, 114 | }, 115 | ], 116 | secretVolumes: [ 117 | { 118 | mountPath: `/etc/secrets/one_${TEST_SEED}`, 119 | projectId: secret.project, 120 | secret: secret.name, 121 | versions: [ 122 | { 123 | path: 'value1', 124 | version: secret.version, 125 | }, 126 | ], 127 | }, 128 | ], 129 | serviceAccountEmail: TEST_SERVICE_ACCOUNT_EMAIL, 130 | timeoutSeconds: 300, 131 | }, 132 | }; 133 | 134 | // Create 135 | const createResp = await client.create(cf, { 136 | onDebug: (f) => { 137 | process.stdout.write('\n\n\n\n'); 138 | process.stdout.write(f()); 139 | process.stdout.write('\n\n\n\n'); 140 | }, 141 | }); 142 | assert.ok(createResp?.url); 143 | 144 | // Read 145 | const getResp = await client.get(cf.name); 146 | assert.ok(getResp.name.endsWith(TEST_FUNCTION_NAME)); // The response is the fully-qualified name 147 | assert.deepStrictEqual(getResp.description, 'test function'); 148 | assert.deepStrictEqual(getResp.labels, { 149 | [`label1-${TEST_SEED}`]: `value1_${TEST_SEED}`, 150 | [`label2-${TEST_SEED}`]: `value2_${TEST_SEED}`, 151 | }); 152 | assert.deepStrictEqual(getResp.buildConfig.runtime, 'nodejs22'); 153 | assert.deepStrictEqual(getResp.buildConfig.environmentVariables, { 154 | [`BUILD_ENV_KEY1_${TEST_SEED_UPPER}`]: `VALUE1_${TEST_SEED}`, 155 | [`BUILD_ENV_KEY2_${TEST_SEED_UPPER}`]: `VALUE2_${TEST_SEED}`, 156 | }); 157 | assert.deepStrictEqual(getResp.buildConfig.entryPoint, 'helloWorld'); 158 | assert.deepStrictEqual(getResp.serviceConfig.availableCpu, '1'); 159 | assert.deepStrictEqual(getResp.serviceConfig.availableMemory, '512Mi'); 160 | assert.deepStrictEqual(getResp.serviceConfig.environmentVariables, { 161 | LOG_EXECUTION_ID: 'true', // inserted by GCP 162 | [`SERVICE_ENV_KEY1_${TEST_SEED_UPPER}`]: `VALUE1_${TEST_SEED}`, 163 | [`SERVICE_ENV_KEY2_${TEST_SEED_UPPER}`]: `VALUE2_${TEST_SEED}`, 164 | }); 165 | assert.deepStrictEqual(getResp.serviceConfig.ingressSettings, 'ALLOW_ALL'); 166 | assert.deepStrictEqual(getResp.serviceConfig.maxInstanceCount, 5); 167 | assert.deepStrictEqual(getResp.serviceConfig.minInstanceCount, 2); 168 | assert.deepStrictEqual(getResp.serviceConfig.secretEnvironmentVariables, [ 169 | { 170 | key: `SECRET1_${TEST_SEED_UPPER}`, 171 | projectId: secret.project, 172 | secret: secret.name, 173 | version: secret.version, 174 | }, 175 | ]); 176 | assert.deepStrictEqual(getResp.serviceConfig.secretVolumes, [ 177 | { 178 | mountPath: `/etc/secrets/one_${TEST_SEED}`, 179 | projectId: secret.project, 180 | secret: secret.name, 181 | versions: [ 182 | { 183 | path: 'value1', 184 | version: secret.version, 185 | }, 186 | ], 187 | }, 188 | ]); 189 | assert.deepStrictEqual(getResp.serviceConfig.serviceAccountEmail, TEST_SERVICE_ACCOUNT_EMAIL); 190 | assert.deepStrictEqual(getResp.serviceConfig.timeoutSeconds, 300); 191 | 192 | // Update 193 | const sourceUploadUpdateResp = await client.generateUploadURL( 194 | `projects/${TEST_PROJECT_ID}/locations/${TEST_LOCATION}`, 195 | ); 196 | await client.uploadSource(sourceUploadUpdateResp.uploadUrl, zipPath); 197 | 198 | const cf2: CloudFunction = { 199 | name: TEST_FUNCTION_NAME, 200 | description: 'test function2', 201 | labels: { 202 | [`label3-${TEST_SEED}`]: `value3_${TEST_SEED}`, 203 | [`label4-${TEST_SEED}`]: `value4_${TEST_SEED}`, 204 | }, 205 | 206 | buildConfig: { 207 | runtime: 'nodejs20', 208 | entryPoint: 'helloWorld', 209 | source: { 210 | storageSource: sourceUploadResp.storageSource, 211 | }, 212 | environmentVariables: { 213 | [`BUILD_ENV_KEY3_${TEST_SEED_UPPER}`]: `VALUE3_${TEST_SEED}`, 214 | [`BUILD_ENV_KEY4_${TEST_SEED_UPPER}`]: `VALUE4_${TEST_SEED}`, 215 | }, 216 | }, 217 | 218 | serviceConfig: { 219 | allTrafficOnLatestRevision: true, 220 | availableMemory: '1Gi', 221 | environmentVariables: { 222 | [`SERVICE_ENV_KEY3_${TEST_SEED_UPPER}`]: `VALUE3_${TEST_SEED}`, 223 | [`SERVICE_ENV_KEY4_${TEST_SEED_UPPER}`]: `VALUE4_${TEST_SEED}`, 224 | }, 225 | ingressSettings: IngressSettings.ALLOW_INTERNAL_AND_GCLB, 226 | maxInstanceCount: 3, 227 | minInstanceCount: 1, 228 | secretEnvironmentVariables: [ 229 | { 230 | key: `SECRET2_${TEST_SEED_UPPER}`, 231 | projectId: secret.project, 232 | secret: secret.name, 233 | version: secret.version, 234 | }, 235 | ], 236 | secretVolumes: [ 237 | { 238 | mountPath: `/etc/secrets/two_${TEST_SEED}`, 239 | projectId: secret.project, 240 | secret: secret.name, 241 | versions: [ 242 | { 243 | path: 'value2', 244 | version: secret.version, 245 | }, 246 | ], 247 | }, 248 | ], 249 | serviceAccountEmail: TEST_SERVICE_ACCOUNT_EMAIL, 250 | timeoutSeconds: 30, 251 | }, 252 | }; 253 | 254 | const patchResp = await client.patch(cf2, { 255 | onDebug: (f) => { 256 | process.stdout.write('\n\n\n\n'); 257 | process.stdout.write(f()); 258 | process.stdout.write('\n\n\n\n'); 259 | }, 260 | }); 261 | assert.ok(patchResp.name.endsWith(TEST_FUNCTION_NAME)); // The response is the fully-qualified name 262 | assert.deepStrictEqual(patchResp.description, 'test function2'); 263 | assert.deepStrictEqual(patchResp.labels, { 264 | [`label3-${TEST_SEED}`]: `value3_${TEST_SEED}`, 265 | [`label4-${TEST_SEED}`]: `value4_${TEST_SEED}`, 266 | }); 267 | assert.deepStrictEqual(patchResp.buildConfig.runtime, 'nodejs20'); 268 | assert.deepStrictEqual(patchResp.buildConfig.entryPoint, 'helloWorld'); 269 | assert.deepStrictEqual(patchResp.buildConfig.environmentVariables, { 270 | [`BUILD_ENV_KEY3_${TEST_SEED_UPPER}`]: `VALUE3_${TEST_SEED}`, 271 | [`BUILD_ENV_KEY4_${TEST_SEED_UPPER}`]: `VALUE4_${TEST_SEED}`, 272 | }); 273 | assert.deepStrictEqual(patchResp.serviceConfig.availableMemory, '1Gi'); 274 | assert.deepStrictEqual(patchResp.serviceConfig.environmentVariables, { 275 | LOG_EXECUTION_ID: 'true', // inserted by GCP 276 | [`SERVICE_ENV_KEY3_${TEST_SEED_UPPER}`]: `VALUE3_${TEST_SEED}`, 277 | [`SERVICE_ENV_KEY4_${TEST_SEED_UPPER}`]: `VALUE4_${TEST_SEED}`, 278 | }); 279 | assert.deepStrictEqual(patchResp.serviceConfig.ingressSettings, 'ALLOW_INTERNAL_AND_GCLB'); 280 | assert.deepStrictEqual(patchResp.serviceConfig.maxInstanceCount, 3); 281 | assert.deepStrictEqual(patchResp.serviceConfig.minInstanceCount, 1); 282 | assert.deepStrictEqual(patchResp.serviceConfig.secretEnvironmentVariables, [ 283 | { 284 | key: `SECRET2_${TEST_SEED_UPPER}`, 285 | projectId: secret.project, 286 | secret: secret.name, 287 | version: secret.version, 288 | }, 289 | ]); 290 | assert.deepStrictEqual(patchResp.serviceConfig.secretVolumes, [ 291 | { 292 | mountPath: `/etc/secrets/two_${TEST_SEED}`, 293 | projectId: secret.project, 294 | secret: secret.name, 295 | versions: [ 296 | { 297 | path: 'value2', 298 | version: secret.version, 299 | }, 300 | ], 301 | }, 302 | ]); 303 | assert.deepStrictEqual( 304 | patchResp.serviceConfig.serviceAccountEmail, 305 | TEST_SERVICE_ACCOUNT_EMAIL, 306 | ); 307 | assert.deepStrictEqual(patchResp.serviceConfig.timeoutSeconds, 30); 308 | 309 | // Delete 310 | const deleteResp = await client.delete(createResp.name); 311 | assert.ok(deleteResp.done); 312 | }); 313 | }, 314 | ); 315 | 316 | test('#getSafe', { concurrency: true }, async (suite) => { 317 | await suite.test('does not error on a 404', async (t) => { 318 | t.mock.method(CloudFunctionsClient.prototype, 'get', () => { 319 | throw new Error(` 320 | { 321 | "error": { 322 | "code": 404, 323 | "message": "Function my-function does not exist", 324 | "status": "NOT_FOUND" 325 | } 326 | } 327 | `); 328 | }); 329 | 330 | const client = new CloudFunctionsClient(); 331 | const result = await client.getSafe('projects/p/functions/f'); 332 | assert.deepStrictEqual(result, null); 333 | }); 334 | 335 | await suite.test('errors on a 403', async (t) => { 336 | t.mock.method(CloudFunctionsClient.prototype, 'get', () => { 337 | throw new Error(` 338 | { 339 | "error": { 340 | "code": 403, 341 | "message": "Permission denied", 342 | "status": "PERMISSION_DENIED" 343 | } 344 | } 345 | `); 346 | }); 347 | 348 | const client = new CloudFunctionsClient(); 349 | await assert.rejects(async () => { 350 | await client.getSafe('projects/p/functions/f'); 351 | }, 'failed to lookup existing function'); 352 | }); 353 | }); 354 | 355 | test('#fullResourceName', { concurrency: true }, async (suite) => { 356 | const cases = [ 357 | { 358 | name: 'empty name', 359 | client: new CloudFunctionsClient(), 360 | input: '', 361 | error: 'name cannot be empty', 362 | }, 363 | { 364 | name: 'empty name spaces', 365 | client: new CloudFunctionsClient(), 366 | input: ' ', 367 | error: 'name cannot be empty', 368 | }, 369 | { 370 | name: 'client missing project id', 371 | client: new CloudFunctionsClient({ projectID: '' }), 372 | input: 'f', 373 | error: 'Failed to get project ID to build resource name', 374 | }, 375 | { 376 | name: 'client missing location', 377 | client: new CloudFunctionsClient({ projectID: 'p', location: '' }), 378 | input: 'f', 379 | error: 'Failed to get location', 380 | }, 381 | { 382 | name: 'invalid resource name', 383 | client: new CloudFunctionsClient(), 384 | input: 'projects/foo', 385 | error: 'Invalid resource name', 386 | }, 387 | { 388 | name: 'full resource name', 389 | client: new CloudFunctionsClient(), 390 | input: 'projects/p/locations/l/functions/f', 391 | expected: 'projects/p/locations/l/functions/f', 392 | }, 393 | { 394 | name: 'builds location', 395 | client: new CloudFunctionsClient({ projectID: 'p', location: 'l' }), 396 | input: 'f', 397 | expected: 'projects/p/locations/l/functions/f', 398 | }, 399 | ]; 400 | 401 | for await (const tc of cases) { 402 | await suite.test(tc.name, async () => { 403 | if (tc.expected) { 404 | const actual = tc.client.fullResourceName(tc.input); 405 | assert.deepStrictEqual(actual, tc.expected); 406 | } else if (tc.error) { 407 | assert.throws(() => { 408 | tc.client.fullResourceName(tc.input); 409 | }, new RegExp(tc.error)); 410 | } 411 | }); 412 | } 413 | }); 414 | 415 | test('#parentFromName', { concurrency: true }, async (suite) => { 416 | const client = new CloudFunctionsClient(); 417 | 418 | const cases = [ 419 | { 420 | name: 'empty string', 421 | input: '', 422 | error: 'Invalid or missing name', 423 | }, 424 | { 425 | name: 'not enough parts', 426 | input: 'foo/bar', 427 | error: 'Invalid or missing name', 428 | }, 429 | { 430 | name: 'extracts parent', 431 | input: 'projects/p/locations/l/functions/f', 432 | expected: 'projects/p/locations/l', 433 | }, 434 | ]; 435 | 436 | for await (const tc of cases) { 437 | await suite.test(tc.name, async () => { 438 | if (tc.expected) { 439 | const actual = client.parentFromName(tc.input); 440 | assert.deepStrictEqual(actual, tc.expected); 441 | } else if (tc.error) { 442 | assert.throws(() => { 443 | client.parentFromName(tc.input); 444 | }, new RegExp(tc.error)); 445 | } 446 | }); 447 | } 448 | }); 449 | -------------------------------------------------------------------------------- /tests/secret.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2021 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import { test } from 'node:test'; 18 | import assert from 'node:assert'; 19 | 20 | import { SecretName } from '../src/secret'; 21 | 22 | test('SecretName', { concurrency: true }, async (suite) => { 23 | const cases = [ 24 | { 25 | name: 'empty string', 26 | input: '', 27 | error: 'Missing secret name', 28 | }, 29 | { 30 | name: 'null', 31 | input: null, 32 | error: 'Missing secret name', 33 | }, 34 | { 35 | name: 'undefined', 36 | input: undefined, 37 | error: 'Missing secret name', 38 | }, 39 | { 40 | name: 'bad resource name', 41 | input: 'projects/fruits/secrets/apple/versions/123/subversions/5', 42 | error: 'Failed to parse secret reference', 43 | }, 44 | { 45 | name: 'bad resource name', 46 | input: 'projects/fruits/secrets/apple/banana/bacon/pants', 47 | error: 'Failed to parse secret reference', 48 | }, 49 | { 50 | name: 'full resource name', 51 | input: 'projects/fruits/secrets/apple/versions/123', 52 | expected: { 53 | project: 'fruits', 54 | secret: 'apple', 55 | version: '123', 56 | }, 57 | }, 58 | { 59 | name: 'full resource name without version', 60 | input: 'projects/fruits/secrets/apple', 61 | expected: { 62 | project: 'fruits', 63 | secret: 'apple', 64 | version: 'latest', 65 | }, 66 | }, 67 | { 68 | name: 'short ref', 69 | input: 'fruits/apple/123', 70 | expected: { 71 | project: 'fruits', 72 | secret: 'apple', 73 | version: '123', 74 | }, 75 | }, 76 | { 77 | name: 'short ref without version', 78 | input: 'fruits/apple', 79 | expected: { 80 | project: 'fruits', 81 | secret: 'apple', 82 | version: 'latest', 83 | }, 84 | }, 85 | ]; 86 | 87 | for await (const tc of cases) { 88 | await suite.test(tc.name, async () => { 89 | if (tc.error) { 90 | assert.throws(() => { 91 | new SecretName(tc.input); 92 | }, new RegExp(tc.error)); 93 | } else { 94 | const secret = new SecretName(tc.input); 95 | assert.deepStrictEqual(secret.project, tc.expected?.project); 96 | assert.deepStrictEqual(secret.name, tc.expected?.secret); 97 | assert.deepStrictEqual(secret.version, tc.expected?.version); 98 | } 99 | }); 100 | } 101 | }); 102 | -------------------------------------------------------------------------------- /tests/test-func-ignore-node/.gcloudignore: -------------------------------------------------------------------------------- 1 | bar/ 2 | -------------------------------------------------------------------------------- /tests/test-func-ignore-node/bar/bar.txt: -------------------------------------------------------------------------------- 1 | test -------------------------------------------------------------------------------- /tests/test-func-ignore-node/bar/baz/baz.txt: -------------------------------------------------------------------------------- 1 | test -------------------------------------------------------------------------------- /tests/test-func-ignore-node/foo/data.txt: -------------------------------------------------------------------------------- 1 | data -------------------------------------------------------------------------------- /tests/test-func-ignore-node/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Responds to any HTTP request. 3 | * 4 | * @param {!express:Request} req HTTP request context. 5 | * @param {!express:Response} res HTTP response context. 6 | */ 7 | exports.helloWorld = (req, res) => { 8 | let message = req.query.message || req.body.message || 'Hello World!!'; 9 | res.status(200).send(message); 10 | }; 11 | -------------------------------------------------------------------------------- /tests/test-func-ignore-node/notIgnored.txt: -------------------------------------------------------------------------------- 1 | baz 2 | -------------------------------------------------------------------------------- /tests/test-func-ignore-node/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sample-http", 3 | "version": "0.0.1" 4 | } 5 | -------------------------------------------------------------------------------- /tests/test-func-ignore/.gcloudignore: -------------------------------------------------------------------------------- 1 | *.txt 2 | .gcloudignore -------------------------------------------------------------------------------- /tests/test-func-ignore/ignore.txt: -------------------------------------------------------------------------------- 1 | foo -------------------------------------------------------------------------------- /tests/test-func-ignore/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Responds to any HTTP request. 3 | * 4 | * @param {!express:Request} req HTTP request context. 5 | * @param {!express:Response} res HTTP response context. 6 | */ 7 | exports.helloWorld = (req, res) => { 8 | let message = req.query.message || req.body.message || 'Hello World!!'; 9 | res.status(200).send(message); 10 | }; 11 | -------------------------------------------------------------------------------- /tests/test-func-ignore/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sample-http", 3 | "version": "0.0.1" 4 | } 5 | -------------------------------------------------------------------------------- /tests/test-node-func/.dotfile: -------------------------------------------------------------------------------- 1 | I exist! 2 | -------------------------------------------------------------------------------- /tests/test-node-func/index.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Responds to any HTTP request. 3 | * 4 | * @param {!express:Request} req HTTP request context. 5 | * @param {!express:Response} res HTTP response context. 6 | */ 7 | 8 | let fs = require('fs'); 9 | 10 | exports.helloWorld = (req, res) => { 11 | // Still send a 200 so we get the response (gaxios and other libraries barf on 12 | // non-200) 13 | if (!fs.existsSync('.dotfile')) { 14 | res.status(200).send('Dotfile does not exist!'); 15 | return; 16 | } 17 | 18 | let message = req.query.message || req.body.message || 'Hello World!!'; 19 | res.status(200).send(message); 20 | }; 21 | -------------------------------------------------------------------------------- /tests/test-node-func/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sample-http", 3 | "version": "0.0.1" 4 | } 5 | -------------------------------------------------------------------------------- /tests/util.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2024 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | import { test } from 'node:test'; 18 | import assert from 'node:assert'; 19 | 20 | import StreamZip from 'node-stream-zip'; 21 | import { assertMembers, randomFilepath } from '@google-github-actions/actions-utils'; 22 | 23 | import { parseEventTriggerFilters, stringToInt, zipDir } from '../src/util'; 24 | 25 | test('#zipDir', { concurrency: true }, async (suite) => { 26 | const cases = [ 27 | { 28 | name: 'throws an error if sourceDir does not exist', 29 | zipDir: '/not/a/real/path', 30 | expectedFiles: [], 31 | error: 'Unable to find', 32 | }, 33 | { 34 | name: 'creates a zipfile with correct files without gcloudignore', 35 | zipDir: 'tests/test-node-func', 36 | expectedFiles: ['.dotfile', 'index.js', 'package.json'], 37 | }, 38 | { 39 | name: 'creates a zipfile with correct files with simple gcloudignore', 40 | zipDir: 'tests/test-func-ignore', 41 | expectedFiles: ['index.js', 'package.json'], 42 | }, 43 | { 44 | name: 'creates a zipfile with correct files with simple gcloudignore', 45 | zipDir: 'tests/test-func-ignore-node', 46 | expectedFiles: [ 47 | '.gcloudignore', 48 | 'foo/data.txt', 49 | 'index.js', 50 | 'notIgnored.txt', 51 | 'package.json', 52 | ], 53 | }, 54 | ]; 55 | 56 | for await (const tc of cases) { 57 | await suite.test(tc.name, async () => { 58 | if (tc.error) { 59 | await assert.rejects(async () => { 60 | await zipDir(tc.zipDir, randomFilepath()); 61 | }, new RegExp(tc.error)); 62 | } else { 63 | const zf = await zipDir(tc.zipDir, randomFilepath()); 64 | const filesInsideZip = await getFilesInZip(zf); 65 | assertMembers(filesInsideZip, tc.expectedFiles); 66 | } 67 | }); 68 | } 69 | }); 70 | 71 | test('#stringToInt', { concurrency: true }, async (suite) => { 72 | const cases = [ 73 | { 74 | name: 'empty', 75 | input: '', 76 | expected: undefined, 77 | }, 78 | { 79 | name: 'spaces', 80 | input: ' ', 81 | expected: undefined, 82 | }, 83 | { 84 | name: 'digit', 85 | input: '1', 86 | expected: 1, 87 | }, 88 | { 89 | name: 'multi-digit', 90 | input: '123', 91 | expected: 123, 92 | }, 93 | { 94 | name: 'suffix', 95 | input: '100MB', 96 | expected: 100, 97 | }, 98 | { 99 | name: 'comma', 100 | input: '1,000', 101 | expected: 1000, 102 | }, 103 | { 104 | name: 'NaN', 105 | input: 'this is definitely not a number', 106 | error: 'input "this is definitely not a number" is not a number', 107 | }, 108 | ]; 109 | 110 | for await (const tc of cases) { 111 | await suite.test(tc.name, async () => { 112 | if (tc.error) { 113 | assert.throws(() => { 114 | stringToInt(tc.input); 115 | }, new RegExp(tc.error)); 116 | } else { 117 | const actual = stringToInt(tc.input); 118 | assert.deepStrictEqual(actual, tc.expected); 119 | } 120 | }); 121 | } 122 | }); 123 | 124 | test('#parseEventTriggerFilters', { concurrency: true }, async (suite) => { 125 | const cases = [ 126 | { 127 | name: 'empty', 128 | input: '', 129 | expected: undefined, 130 | }, 131 | { 132 | name: 'braces', 133 | input: '{}', 134 | expected: [], 135 | }, 136 | { 137 | name: 'braces', 138 | input: ` 139 | type=google.cloud.audit.log.v1.written 140 | serviceName=compute.googleapis.com 141 | methodName=PATTERN:compute.instances.* 142 | `, 143 | expected: [ 144 | { 145 | attribute: 'type', 146 | value: 'google.cloud.audit.log.v1.written', 147 | }, 148 | { 149 | attribute: 'serviceName', 150 | value: 'compute.googleapis.com', 151 | }, 152 | { 153 | attribute: 'methodName', 154 | value: 'compute.instances.*', 155 | operator: 'match-path-pattern', 156 | }, 157 | ], 158 | }, 159 | ]; 160 | 161 | for await (const tc of cases) { 162 | await suite.test(tc.name, async () => { 163 | const actual = parseEventTriggerFilters(tc.input); 164 | assert.deepStrictEqual(actual, tc.expected); 165 | }); 166 | } 167 | }); 168 | 169 | /** 170 | * 171 | * @param zipFilePath path to zipfile 172 | * @returns list of files within zipfile 173 | */ 174 | async function getFilesInZip(zipFilePath: string): Promise { 175 | const uzf = new StreamZip.async({ file: zipFilePath }); 176 | const zipEntries = await uzf.entries(); 177 | const filesInsideZip: string[] = []; 178 | for (const k in zipEntries) { 179 | if (zipEntries[k].isFile) { 180 | filesInsideZip.push(zipEntries[k].name); 181 | } 182 | } 183 | return filesInsideZip; 184 | } 185 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Google LLC 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | { 17 | "compilerOptions": { 18 | "alwaysStrict": true, 19 | "target": "es6", 20 | "module": "commonjs", 21 | "lib": [ 22 | "es6" 23 | ], 24 | "outDir": "./dist", 25 | "rootDir": "./src", 26 | "strict": true, 27 | "noImplicitAny": true, 28 | "esModuleInterop": true 29 | }, 30 | "exclude": ["node_modules/", "tests/"] 31 | } 32 | --------------------------------------------------------------------------------