├── .node-version ├── .gitattributes ├── .prettierignore ├── .eslintignore ├── CODEOWNERS ├── src ├── index.ts ├── api-client.ts ├── config.ts ├── fs-helper.ts ├── oci-container.ts ├── main.ts └── ghcr-client.ts ├── .github ├── linters │ ├── .markdown-lint.yml │ ├── .yaml-lint.yml │ ├── tsconfig.json │ └── .eslintrc.yml ├── dependabot.yml └── workflows │ ├── release.yml │ ├── release-new-action-version.yml │ ├── ci.yml │ ├── codeql-analysis.yml │ ├── linter.yml │ └── check-dist.yml ├── .prettierrc.json ├── __tests__ ├── index.test.ts ├── api-client.test.ts ├── oci-container.test.ts ├── fs-helper.test.ts ├── config.test.ts ├── ghcr-client.test.ts └── main.test.ts ├── tsconfig.json ├── action.yml ├── badges └── coverage.svg ├── LICENSE ├── script └── release ├── .gitignore ├── README.md └── package.json /.node-version: -------------------------------------------------------------------------------- 1 | 20.6.0 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dist/** -diff linguist-generated=true 2 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | node_modules/ 3 | coverage/ 4 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | lib/ 2 | dist/ 3 | node_modules/ 4 | coverage/ 5 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Repository CODEOWNERS 2 | 3 | * @actions/actions-sudo 4 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * The entrypoint for the action. 3 | */ 4 | import { run } from './main' 5 | 6 | // eslint-disable-next-line @typescript-eslint/no-floating-promises 7 | run() 8 | -------------------------------------------------------------------------------- /.github/linters/.markdown-lint.yml: -------------------------------------------------------------------------------- 1 | # Unordered list style 2 | MD004: 3 | style: dash 4 | 5 | # Increase the max line length limit 6 | MD013: 7 | line_length: 200 8 | 9 | # Ordered list item prefix 10 | MD029: 11 | style: one 12 | 13 | -------------------------------------------------------------------------------- /.github/linters/.yaml-lint.yml: -------------------------------------------------------------------------------- 1 | rules: 2 | document-end: disable 3 | document-start: 4 | level: warning 5 | present: false 6 | line-length: 7 | level: warning 8 | max: 80 9 | allow-non-breakable-words: true 10 | allow-non-breakable-inline-mappings: true 11 | -------------------------------------------------------------------------------- /.github/linters/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "extends": "../../tsconfig.json", 4 | "compilerOptions": { 5 | "noEmit": true 6 | }, 7 | "include": ["../../__tests__/**/*", "../../src/**/*"], 8 | "exclude": ["../../dist", "../../node_modules", "../../coverage", "*.json"] 9 | } 10 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: / 5 | labels: 6 | - dependabot 7 | - actions 8 | schedule: 9 | interval: daily 10 | 11 | - package-ecosystem: npm 12 | directory: / 13 | labels: 14 | - dependabot 15 | - npm 16 | schedule: 17 | interval: daily 18 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 80, 3 | "tabWidth": 2, 4 | "useTabs": false, 5 | "semi": false, 6 | "singleQuote": true, 7 | "quoteProps": "as-needed", 8 | "jsxSingleQuote": false, 9 | "trailingComma": "none", 10 | "bracketSpacing": true, 11 | "bracketSameLine": true, 12 | "arrowParens": "avoid", 13 | "proseWrap": "always", 14 | "htmlWhitespaceSensitivity": "css", 15 | "endOfLine": "lf" 16 | } 17 | -------------------------------------------------------------------------------- /__tests__/index.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Unit tests for the action's entrypoint, src/index.ts 3 | */ 4 | 5 | import * as main from '../src/main' 6 | 7 | // Mock the action's entrypoint 8 | const runMock = jest.spyOn(main, 'run').mockImplementation() 9 | 10 | describe('index', () => { 11 | it('calls run when imported', async () => { 12 | // eslint-disable-next-line @typescript-eslint/no-require-imports 13 | require('../src/index') 14 | 15 | expect(runMock).toHaveBeenCalled() 16 | }) 17 | }) 18 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "compilerOptions": { 4 | "target": "ES2022", 5 | "module": "NodeNext", 6 | "rootDir": "./src", 7 | "moduleResolution": "NodeNext", 8 | "baseUrl": "./", 9 | "sourceMap": true, 10 | "outDir": "./dist", 11 | "noImplicitAny": true, 12 | "esModuleInterop": true, 13 | "forceConsistentCasingInFileNames": true, 14 | "strict": true, 15 | "skipLibCheck": true, 16 | "newLine": "lf" 17 | }, 18 | "exclude": ["./dist", "./node_modules", "./__tests__", "./coverage"] 19 | } 20 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # Package and publish the action when a new release is published 2 | # Since this is the publishing action itself, we can use the current checkout as the action 3 | name: 'Publish Immutable Action Version' 4 | on: 5 | release: 6 | types: [published] 7 | permissions: 8 | contents: read 9 | id-token: write 10 | packages: write 11 | jobs: 12 | package-and-publish: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - name: Check out repository 16 | uses: actions/checkout@v4 17 | - name: Publish Immutable Action Version 18 | uses: ./ 19 | -------------------------------------------------------------------------------- /.github/workflows/release-new-action-version.yml: -------------------------------------------------------------------------------- 1 | name: Release new action version 2 | on: 3 | release: 4 | types: [released] 5 | 6 | env: 7 | TAG_NAME: ${{ github.event.release.tag_name }} 8 | permissions: 9 | contents: write 10 | 11 | jobs: 12 | update_tag: 13 | name: Update the major tag to include the ${{ github.event.release.tag_name }} changes 14 | environment: 15 | name: releaseNewActionVersion 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Update the ${{ env.TAG_NAME }} tag 19 | id: update-major-tag 20 | uses: actions/publish-action@v0.3.0 21 | with: 22 | source-tag: ${{ env.TAG_NAME }} 23 | slack-webhook: ${{ secrets.SLACK_WEBHOOK }} 24 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: 'Package and Publish' 2 | description: 'Publish actions as OCI artifacts to GHCR' 3 | 4 | # TODO: Add your action's branding here. This will appear on the GitHub Marketplace. 5 | branding: 6 | icon: 'heart' 7 | color: 'red' 8 | 9 | inputs: 10 | github-token: 11 | description: 'The GitHub actions token used to authenticate with GitHub APIs' 12 | default: ${{ github.token }} 13 | 14 | outputs: 15 | package-manifest-sha: 16 | description: 'A sha256 hash of the package manifest' 17 | attestation-manifest-sha: 18 | description: 'The sha256 of the provenance attestation uploaded to GHCR. This is not present if the package is not attested, e.g. in enterprise environments.' 19 | referrer-index-manifest-sha: 20 | description: 'The sha256 of the referrer index uploaded to GHCR. This is not present if the package is not attested, e.g. in enterprise environments.' 21 | 22 | runs: 23 | using: node20 24 | main: dist/index.js 25 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Continuous Integration 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - main 8 | - 'releases/*' 9 | 10 | permissions: 11 | contents: read 12 | packages: read 13 | 14 | jobs: 15 | test-typescript: 16 | name: TypeScript Tests 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - name: Checkout 21 | id: checkout 22 | uses: actions/checkout@v4 23 | 24 | - name: Setup Node.js 25 | id: setup-node 26 | uses: actions/setup-node@v4 27 | with: 28 | node-version-file: .node-version 29 | cache: npm 30 | 31 | - name: Install Dependencies 32 | id: npm-ci 33 | run: npm ci 34 | 35 | - name: Check Format 36 | id: npm-format-check 37 | run: npm run format:check 38 | 39 | - name: Lint 40 | id: npm-lint 41 | run: npm run lint 42 | 43 | - name: Test 44 | id: npm-ci-test 45 | run: npm run ci-test 46 | 47 | -------------------------------------------------------------------------------- /badges/coverage.svg: -------------------------------------------------------------------------------- 1 | Coverage: 97.06%Coverage97.06% -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 immutable-actions 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: CodeQL 2 | 3 | on: 4 | workflow_dispatch: 5 | # Disable until this is a public repo since advanced security is not enabled 6 | # push: 7 | # branches: 8 | # - main 9 | # pull_request: 10 | # branches: 11 | # - main 12 | # schedule: 13 | # - cron: '31 7 * * 3' 14 | 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | jobs: 21 | analyze: 22 | name: Analyze 23 | runs-on: ubuntu-latest 24 | 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | language: 29 | - TypeScript 30 | 31 | steps: 32 | - name: Checkout 33 | id: checkout 34 | uses: actions/checkout@v4 35 | 36 | - name: Initialize CodeQL 37 | id: initialize 38 | uses: github/codeql-action/init@v3 39 | with: 40 | languages: ${{ matrix.language }} 41 | source-root: src 42 | 43 | - name: Autobuild 44 | id: autobuild 45 | uses: github/codeql-action/autobuild@v3 46 | 47 | - name: Perform CodeQL Analysis 48 | id: analyze 49 | uses: github/codeql-action/analyze@v3 50 | -------------------------------------------------------------------------------- /.github/workflows/linter.yml: -------------------------------------------------------------------------------- 1 | name: Lint Code Base 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - main 10 | 11 | permissions: 12 | contents: read 13 | statuses: write 14 | packages: read 15 | 16 | jobs: 17 | lint: 18 | name: Lint Code Base 19 | runs-on: ubuntu-latest 20 | 21 | steps: 22 | - name: Checkout 23 | id: checkout 24 | uses: actions/checkout@v4 25 | # this is necessary based on https://github.com/super-linter/super-linter?tab=readme-ov-file#get-started 26 | with: 27 | fetch-depth: 0 28 | 29 | - name: Setup Node.js 30 | id: setup-node 31 | uses: actions/setup-node@v4 32 | with: 33 | node-version-file: .node-version 34 | cache: npm 35 | 36 | - name: Install Dependencies 37 | id: install 38 | run: npm ci 39 | 40 | - name: Lint Code Base 41 | id: super-linter 42 | uses: super-linter/super-linter/slim@v6 43 | env: 44 | DEFAULT_BRANCH: main 45 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 46 | VALIDATE_TYPESCRIPT_STANDARD: false 47 | VALIDATE_JSCPD: false 48 | FILTER_REGEX_EXCLUDE: .*/licenses\.txt$ 49 | -------------------------------------------------------------------------------- /script/release: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # About: 4 | # This is a helper script to tag and push a new release. 5 | # GitHub Actions use release tags to allow users to select a specific version of the action to use. 6 | # This script will do the following: 7 | # 1. Get the latest release tag 8 | # 2. Prompt the user for a new release tag (while displaying the latest release tag, and a regex to validate the new tag) 9 | # 3. Tag the new release 10 | # 4. Push the new tag to the remote 11 | 12 | # Usage: 13 | # script/release 14 | 15 | # COLORS 16 | OFF='\033[0m' 17 | RED='\033[0;31m' 18 | GREEN='\033[0;32m' 19 | BLUE='\033[0;34m' 20 | 21 | latest_tag=$(git describe --tags "$(git rev-list --tags --max-count=1)") 22 | 23 | # if the latest_tag is empty, then there are no tags - let the user know 24 | if [[ -z "$latest_tag" ]]; then 25 | echo -e "No tags found (yet) - continue to create your first tag and push it" 26 | latest_tag="[unknown]" 27 | fi 28 | 29 | echo -e "The latest release tag is: ${BLUE}${latest_tag}${OFF}" 30 | read -r -p 'New Release Tag (vX.X.X format): ' new_tag 31 | 32 | tag_regex='v[0-9]+\.[0-9]+\.[0-9]+$' 33 | if echo "$new_tag" | grep -q -E "$tag_regex"; then 34 | echo -e "Tag: ${BLUE}$new_tag${OFF} is valid" 35 | else 36 | echo -e "Tag: ${BLUE}$new_tag${OFF} is ${RED}not valid${OFF} (must be in vX.X.X format)" 37 | exit 1 38 | fi 39 | 40 | git tag -a "$new_tag" -m "$new_tag Release" 41 | 42 | echo -e "${GREEN}OK${OFF} - Tagged: $new_tag" 43 | 44 | git push --tags 45 | 46 | echo -e "${GREEN}OK${OFF} - Tags pushed to remote!" 47 | echo -e "${GREEN}DONE${OFF}" 48 | -------------------------------------------------------------------------------- /src/api-client.ts: -------------------------------------------------------------------------------- 1 | export async function getRepositoryMetadata( 2 | githubAPIURL: string, 3 | repository: string, 4 | token: string 5 | ): Promise<{ repoId: string; ownerId: string; visibility: string }> { 6 | const response = await fetch(`${githubAPIURL}/repos/${repository}`, { 7 | method: 'GET', 8 | headers: { 9 | Authorization: `Bearer ${token}`, 10 | Accept: 'application/vnd.github.v3+json' 11 | } 12 | }) 13 | 14 | if (!response.ok) { 15 | throw new Error( 16 | `Failed to fetch repository metadata due to bad status code: ${response.status}` 17 | ) 18 | } 19 | 20 | const data = await response.json() 21 | 22 | // Check that the response contains the expected data 23 | if (!data.id || !data.owner.id) { 24 | throw new Error( 25 | `Failed to fetch repository metadata: unexpected response format` 26 | ) 27 | } 28 | 29 | return { 30 | repoId: String(data.id), 31 | ownerId: String(data.owner.id), 32 | visibility: String(data.visibility) 33 | } 34 | } 35 | 36 | export async function getContainerRegistryURL( 37 | githubAPIURL: string, 38 | token: string 39 | ): Promise { 40 | const response = await fetch( 41 | `${githubAPIURL}/packages/container-registry-url`, 42 | { 43 | method: 'GET', 44 | headers: { 45 | Authorization: `Bearer ${token}`, 46 | Accept: 'application/vnd.github.v3+json' 47 | } 48 | } 49 | ) 50 | if (!response.ok) { 51 | throw new Error( 52 | `Failed to fetch container registry url due to bad status code: ${response.status}` 53 | ) 54 | } 55 | const data = await response.json() 56 | 57 | if (!data.url) { 58 | throw new Error( 59 | `Failed to fetch repository metadata: unexpected response format` 60 | ) 61 | } 62 | 63 | const registryURL: URL = new URL(data.url) 64 | return registryURL 65 | } 66 | -------------------------------------------------------------------------------- /.github/workflows/check-dist.yml: -------------------------------------------------------------------------------- 1 | # In TypeScript actions, `dist/index.js` is a special file. When you reference 2 | # an action with `uses:`, `dist/index.js` is the code that will be run. For this 3 | # project, the `dist/index.js` file is generated from other source files through 4 | # the build process. We need to make sure that the checked-in `dist/index.js` 5 | # file matches what is expected from the build. 6 | # 7 | # This workflow will fail if the checked-in `dist/index.js` file does not match 8 | # what is expected from the build. 9 | name: Check dist/ 10 | 11 | on: 12 | push: 13 | branches: 14 | - main 15 | pull_request: 16 | workflow_dispatch: 17 | 18 | permissions: 19 | contents: read 20 | packages: read 21 | 22 | jobs: 23 | check-dist: 24 | name: Check dist/ 25 | runs-on: ubuntu-latest 26 | 27 | steps: 28 | - name: Checkout 29 | id: checkout 30 | uses: actions/checkout@v4 31 | 32 | - name: Setup Node.js 33 | uses: actions/setup-node@v4 34 | with: 35 | node-version-file: .node-version 36 | cache: npm 37 | 38 | - name: Install Dependencies 39 | id: install 40 | run: npm ci 41 | 42 | - name: Build dist/ Directory 43 | id: build 44 | run: npm run bundle 45 | 46 | - name: Compare Expected and Actual Directories 47 | id: diff 48 | run: | 49 | if [ "$(git diff --ignore-space-at-eol --text dist/ | wc -l)" -gt "0" ]; then 50 | echo "Detected uncommitted changes after build. See status below:" 51 | git diff --ignore-space-at-eol --text dist/ 52 | exit 1 53 | fi 54 | 55 | # If index.js was different than expected, upload the expected version as 56 | # a workflow artifact. 57 | - uses: actions/upload-artifact@v4 58 | if: ${{ failure() && steps.diff.conclusion == 'failure' }} 59 | with: 60 | name: dist 61 | path: dist/ 62 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependency directory 2 | node_modules 3 | 4 | .npmrc 5 | 6 | # Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore 7 | # Logs 8 | logs 9 | *.log 10 | npm-debug.log* 11 | yarn-debug.log* 12 | yarn-error.log* 13 | lerna-debug.log* 14 | 15 | # Diagnostic reports (https://nodejs.org/api/report.html) 16 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 17 | 18 | # Runtime data 19 | pids 20 | *.pid 21 | *.seed 22 | *.pid.lock 23 | 24 | # Directory for instrumented libs generated by jscoverage/JSCover 25 | lib-cov 26 | 27 | # Coverage directory used by tools like istanbul 28 | coverage 29 | *.lcov 30 | 31 | # nyc test coverage 32 | .nyc_output 33 | 34 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 35 | .grunt 36 | 37 | # Bower dependency directory (https://bower.io/) 38 | bower_components 39 | 40 | # node-waf configuration 41 | .lock-wscript 42 | 43 | # Compiled binary addons (https://nodejs.org/api/addons.html) 44 | build/Release 45 | 46 | # Dependency directories 47 | jspm_packages/ 48 | 49 | # TypeScript v1 declaration files 50 | typings/ 51 | 52 | # TypeScript cache 53 | *.tsbuildinfo 54 | 55 | # Optional npm cache directory 56 | .npm 57 | 58 | # Optional eslint cache 59 | .eslintcache 60 | 61 | # Optional REPL history 62 | .node_repl_history 63 | 64 | # Output of 'npm pack' 65 | *.tgz 66 | 67 | # Yarn Integrity file 68 | .yarn-integrity 69 | 70 | # dotenv environment variables file 71 | .env 72 | .env.test 73 | 74 | # parcel-bundler cache (https://parceljs.org/) 75 | .cache 76 | 77 | # next.js build output 78 | .next 79 | 80 | # nuxt.js build output 81 | .nuxt 82 | 83 | # vuepress build output 84 | .vuepress/dist 85 | 86 | # Serverless directories 87 | .serverless/ 88 | 89 | # FuseBox cache 90 | .fusebox/ 91 | 92 | # DynamoDB Local files 93 | .dynamodb/ 94 | 95 | # OS metadata 96 | .DS_Store 97 | Thumbs.db 98 | 99 | # Ignore built ts files 100 | __tests__/runner/* 101 | 102 | # IDE files 103 | .idea 104 | .vscode 105 | *.code-workspace 106 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Publish Immutable Action 2 | 3 | > [!IMPORTANT] 4 | > This action is **not ready for public use**. It is part of an upcoming public roadmap item (see [GitHub Actions: Immutable actions publishing](https://github.com/github/roadmap/issues/592)). 5 | > Attempts to use this action to upload an OCI artifact will not work until this feature has been fully released to the public. Please do not attempt to use it until that time. 6 | 7 | This action packages _your action_ as an [OCI container](https://opencontainers.org/) and publishes it to the [GitHub Container registry](https://ghcr.io). 8 | This allows your action to be consumed as an _immutable_ package if a [SemVer](https://semver.org/) is specified in the consumer's workflow file. 9 | 10 | Your workflow can be triggered by any [event](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows) which has a `GITHUB_REF` that points to a Git tag. 11 | Some examples of these events are: 12 | 13 | - [`release`](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#release) (uses tag associated with release) 14 | - [`push`](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#push) (only applies to pushed tags) 15 | - [`workflow_dispatch`](https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_dispatch) (only applies if subject of dispatch is a tag) 16 | 17 | The associated tag must follow [semantic versioning](https://semver.org/) - this tag value will be used to create a package version. 18 | 19 | Consumers of your action will then be able to specify that version to consume your action from the package, e.g. 20 | 21 | - `- uses: your-name/your-action@v1.2.3` 22 | - `- uses: your-name/your-action@v1` 23 | 24 | Such packages will come with stronger security guarantees for consumers than existing git-based action resolution, such as: 25 | 26 | - Provenance attestations generated using the [`@actions/attest`](https://github.com/actions/toolkit/tree/main/packages/attest) package 27 | - Tag immutability - it will not be possible to overwrite tags once published, ensuring versions of an action can't change once in use 28 | - Namespace immutability - it will not be possible to delete and recreate the package with different content; this would undermine tag immutability 29 | 30 | ## Usage 31 | 32 | An actions workflow file like the following should be placed in your action repository: 33 | 34 | 35 | ```yaml 36 | name: "Publish Immutable Action Version" 37 | 38 | on: 39 | release: 40 | types: [published] 41 | 42 | jobs: 43 | publish: 44 | runs-on: ubuntu-latest 45 | permissions: 46 | contents: read 47 | id-token: write 48 | packages: write 49 | steps: 50 | - name: Check out repo 51 | uses: actions/checkout@v4 52 | - name: Publish 53 | id: publish 54 | uses: actions/publish-immutable-action@0.0.3 55 | ``` 56 | 57 | 58 | ## License 59 | 60 | The scripts and documentation in this project are released under the [MIT License](LICENSE). 61 | -------------------------------------------------------------------------------- /.github/linters/.eslintrc.yml: -------------------------------------------------------------------------------- 1 | env: 2 | node: true 3 | es6: true 4 | jest: true 5 | 6 | globals: 7 | Atomics: readonly 8 | SharedArrayBuffer: readonly 9 | 10 | ignorePatterns: 11 | - '!.*' 12 | - '**/node_modules/.*' 13 | - '**/dist/.*' 14 | - '**/coverage/.*' 15 | - '*.json' 16 | 17 | parser: '@typescript-eslint/parser' 18 | 19 | parserOptions: 20 | ecmaVersion: 2023 21 | sourceType: module 22 | project: 23 | - './.github/linters/tsconfig.json' 24 | - './tsconfig.json' 25 | 26 | plugins: 27 | - jest 28 | - '@typescript-eslint' 29 | 30 | extends: 31 | - eslint:recommended 32 | - plugin:@typescript-eslint/eslint-recommended 33 | - plugin:@typescript-eslint/recommended 34 | - plugin:github/recommended 35 | - plugin:jest/recommended 36 | 37 | rules: 38 | { 39 | 'camelcase': 'off', 40 | 'eslint-comments/no-use': 'off', 41 | 'eslint-comments/no-unused-disable': 'off', 42 | 'i18n-text/no-en': 'off', 43 | 'import/no-namespace': 'off', 44 | 'no-console': 'off', 45 | 'no-unused-vars': 'off', 46 | 'prettier/prettier': 'error', 47 | 'semi': 'off', 48 | '@typescript-eslint/array-type': 'error', 49 | '@typescript-eslint/await-thenable': 'error', 50 | '@typescript-eslint/ban-ts-comment': 'error', 51 | '@typescript-eslint/consistent-type-assertions': 'error', 52 | '@typescript-eslint/explicit-member-accessibility': 53 | ['error', { 'accessibility': 'no-public' }], 54 | '@typescript-eslint/explicit-function-return-type': 55 | ['error', { 'allowExpressions': true }], 56 | '@typescript-eslint/func-call-spacing': ['error', 'never'], 57 | '@typescript-eslint/no-array-constructor': 'error', 58 | '@typescript-eslint/no-empty-interface': 'error', 59 | '@typescript-eslint/no-explicit-any': 'error', 60 | '@typescript-eslint/no-extraneous-class': 'error', 61 | '@typescript-eslint/no-for-in-array': 'error', 62 | '@typescript-eslint/no-inferrable-types': 'error', 63 | '@typescript-eslint/no-misused-new': 'error', 64 | '@typescript-eslint/no-namespace': 'error', 65 | '@typescript-eslint/no-non-null-assertion': 'warn', 66 | '@typescript-eslint/no-require-imports': 'error', 67 | '@typescript-eslint/no-unnecessary-qualifier': 'error', 68 | '@typescript-eslint/no-unnecessary-type-assertion': 'error', 69 | '@typescript-eslint/no-unused-vars': 'error', 70 | '@typescript-eslint/no-useless-constructor': 'error', 71 | '@typescript-eslint/no-var-requires': 'error', 72 | '@typescript-eslint/prefer-for-of': 'warn', 73 | '@typescript-eslint/prefer-function-type': 'warn', 74 | '@typescript-eslint/prefer-includes': 'error', 75 | '@typescript-eslint/prefer-string-starts-ends-with': 'error', 76 | '@typescript-eslint/promise-function-async': 'error', 77 | '@typescript-eslint/require-array-sort-compare': 'error', 78 | '@typescript-eslint/restrict-plus-operands': 'error', 79 | '@typescript-eslint/semi': ['error', 'never'], 80 | '@typescript-eslint/space-before-function-paren': 'off', 81 | '@typescript-eslint/type-annotation-spacing': 'error', 82 | '@typescript-eslint/unbound-method': 'error' 83 | } 84 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "typescript-action", 3 | "description": "GitHub Actions TypeScript template", 4 | "version": "0.0.0", 5 | "author": "", 6 | "private": true, 7 | "homepage": "https://github.com/actions/typescript-action", 8 | "repository": { 9 | "type": "git", 10 | "url": "git+https://github.com/actions/typescript-action.git" 11 | }, 12 | "bugs": { 13 | "url": "https://github.com/actions/typescript-action/issues" 14 | }, 15 | "keywords": [ 16 | "actions", 17 | "node", 18 | "setup" 19 | ], 20 | "exports": { 21 | ".": "./dist/index.js" 22 | }, 23 | "engines": { 24 | "node": ">=20" 25 | }, 26 | "scripts": { 27 | "bundle": "npm run format:write && npm run package", 28 | "ci-test": "jest", 29 | "coverage": "make-coverage-badge --output-path ./badges/coverage.svg", 30 | "format:write": "prettier --write **/*.ts", 31 | "format:check": "prettier --check **/*.ts", 32 | "lint": "npx eslint . -c ./.github/linters/.eslintrc.yml", 33 | "package": "rm -rf dist && ncc build src/index.ts --license licenses.txt", 34 | "package:watch": "npm run package -- --watch", 35 | "test": "jest", 36 | "start": "node dist/index.js", 37 | "all": "npm run format:write && npm run lint && npm run test && npm run coverage && npm run package" 38 | }, 39 | "license": "MIT", 40 | "jest": { 41 | "preset": "ts-jest", 42 | "verbose": true, 43 | "clearMocks": true, 44 | "testEnvironment": "node", 45 | "moduleFileExtensions": [ 46 | "js", 47 | "ts" 48 | ], 49 | "testMatch": [ 50 | "**/*.test.ts" 51 | ], 52 | "testPathIgnorePatterns": [ 53 | "/node_modules/", 54 | "/dist/" 55 | ], 56 | "transform": { 57 | "^.+\\.ts$": "ts-jest" 58 | }, 59 | "coverageReporters": [ 60 | "json-summary", 61 | "text", 62 | "lcov" 63 | ], 64 | "collectCoverage": true, 65 | "collectCoverageFrom": [ 66 | "./src/**" 67 | ] 68 | }, 69 | "dependencies": { 70 | "@actions/attest": "^1.4.0", 71 | "@actions/core": "^1.10.1", 72 | "@actions/exec": "^1.1.1", 73 | "@actions/github": "^6.0.0", 74 | "@sigstore/oci": "^0.3.7", 75 | "@types/fs-extra": "^11.0.4", 76 | "archiver": "^7.0.1", 77 | "fs-extra": "^11.2.0", 78 | "simple-git": "^3.22.0", 79 | "tar": "^7.4.3" 80 | }, 81 | "devDependencies": { 82 | "@types/archiver": "^6.0.2", 83 | "@types/axios": "^0.14.0", 84 | "@types/jest": "^29.5.12", 85 | "@types/minimist": "^1.2.5", 86 | "@types/node": "^22.0.0", 87 | "@types/tar": "^6.1.13", 88 | "@typescript-eslint/eslint-plugin": "^6.21.0", 89 | "@typescript-eslint/parser": "^6.21.0", 90 | "@vercel/ncc": "^0.38.1", 91 | "eslint": "^8.57.0", 92 | "eslint-plugin-github": "^4.10.1", 93 | "eslint-plugin-jest": "^28.6.0", 94 | "eslint-plugin-jsonc": "^2.13.0", 95 | "eslint-plugin-prettier": "^5.2.1", 96 | "jest": "^29.7.0", 97 | "js-yaml": "^4.1.0", 98 | "make-coverage-badge": "^1.2.0", 99 | "prettier": "^3.3.3", 100 | "prettier-eslint": "^16.3.0", 101 | "ts-jest": "^29.2.3", 102 | "typescript": "^5.5.4" 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /__tests__/api-client.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | getRepositoryMetadata, 3 | getContainerRegistryURL 4 | } from '../src/api-client' 5 | 6 | const url = 'https://registry.example.com' 7 | const test_token = 'test_token' 8 | 9 | let fetchMock: jest.SpyInstance 10 | 11 | beforeEach(() => { 12 | fetchMock = jest.spyOn(global, 'fetch') 13 | }) 14 | 15 | afterEach(() => { 16 | fetchMock.mockRestore() 17 | }) 18 | 19 | describe('getRepositoryMetadata', () => { 20 | it('returns repository metadata when the fetch response is ok', async () => { 21 | fetchMock.mockResolvedValueOnce( 22 | new Response( 23 | JSON.stringify({ 24 | id: '123', 25 | owner: { id: '456' }, 26 | visibility: 'public' 27 | }) 28 | ) 29 | ) 30 | const result = await getRepositoryMetadata(url, 'repository', test_token) 31 | expect(result).toEqual({ 32 | repoId: '123', 33 | ownerId: '456', 34 | visibility: 'public' 35 | }) 36 | 37 | expect(fetchMock).toHaveBeenCalledWith( 38 | 'https://registry.example.com/repos/repository', 39 | { 40 | method: 'GET', 41 | headers: { 42 | Authorization: `Bearer ${test_token}`, 43 | Accept: 'application/vnd.github.v3+json' 44 | } 45 | } 46 | ) 47 | }) 48 | 49 | it('throws an error when the fetch errors', async () => { 50 | fetchMock.mockRejectedValueOnce(new Error('API is down')) 51 | await expect( 52 | getRepositoryMetadata(url, 'repository', 'token') 53 | ).rejects.toThrow('API is down') 54 | }) 55 | 56 | it('throws an error when the response status is not ok', async () => { 57 | fetchMock.mockResolvedValueOnce(new Response(null, { status: 500 })) 58 | await expect( 59 | getRepositoryMetadata(url, 'repository', 'token') 60 | ).rejects.toThrow( 61 | 'Failed to fetch repository metadata due to bad status code: 500' 62 | ) 63 | }) 64 | 65 | it('throws an error when the response data is in the wrong format', async () => { 66 | fetchMock.mockResolvedValueOnce( 67 | new Response(JSON.stringify({ wrong: 'format' })) 68 | ) 69 | await expect( 70 | getRepositoryMetadata(url, 'repository', 'token') 71 | ).rejects.toThrow( 72 | 'Failed to fetch repository metadata: unexpected response format' 73 | ) 74 | }) 75 | }) 76 | 77 | describe('getContainerRegistryURL', () => { 78 | it('returns container registry URL when the fetch response is ok', async () => { 79 | fetchMock.mockResolvedValueOnce( 80 | new Response(JSON.stringify({ url: 'https://registry.example.com' })) 81 | ) 82 | const result = await getContainerRegistryURL(url, test_token) 83 | 84 | expect(result).toEqual(new URL('https://registry.example.com')) 85 | 86 | expect(fetchMock).toHaveBeenCalledWith( 87 | 'https://registry.example.com/packages/container-registry-url', 88 | { 89 | method: 'GET', 90 | headers: { 91 | Authorization: `Bearer ${test_token}`, 92 | Accept: 'application/vnd.github.v3+json' 93 | } 94 | } 95 | ) 96 | }) 97 | 98 | it('throws an error when the fetch errors', async () => { 99 | fetchMock.mockRejectedValueOnce(new Error('API is down')) 100 | await expect(getContainerRegistryURL(url, test_token)).rejects.toThrow( 101 | 'API is down' 102 | ) 103 | }) 104 | 105 | it('throws an error when the response status is not ok', async () => { 106 | fetchMock.mockResolvedValueOnce(new Response(null, { status: 500 })) 107 | await expect(getContainerRegistryURL(url, test_token)).rejects.toThrow( 108 | 'Failed to fetch container registry url due to bad status code: 500' 109 | ) 110 | }) 111 | 112 | it('throws an error when the response data is in the wrong format', async () => { 113 | fetchMock.mockResolvedValueOnce( 114 | new Response(JSON.stringify({ wrong: 'format' })) 115 | ) 116 | await expect(getContainerRegistryURL(url, test_token)).rejects.toThrow( 117 | 'Failed to fetch repository metadata: unexpected response format' 118 | ) 119 | }) 120 | }) 121 | -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | import * as apiClient from './api-client' 2 | import * as core from '@actions/core' 3 | import * as github from '@actions/github' 4 | 5 | // All the environment options required to run the action 6 | export interface PublishActionOptions { 7 | // The name of the repository in the format owner/repo 8 | nameWithOwner: string 9 | // The GitHub token to use for API requests 10 | token: string 11 | // The base URL for the GitHub API 12 | apiBaseUrl: string 13 | // The base URL for the GitHub Container Registry 14 | containerRegistryUrl: URL 15 | // The directory where the action is running, used for git operations 16 | workspaceDir: string 17 | // The directory set up to be used for temporary files by the runner 18 | runnerTempDir: string 19 | // Whether this action is running in enterprise, determined from the github URL 20 | isEnterprise: boolean 21 | // The visibility of the action repository ("public", "internal" or "private") 22 | repositoryVisibility: string 23 | // The repository ID of the action repository 24 | repositoryId: string 25 | // The owner ID of the action repository 26 | repositoryOwnerId: string 27 | // The event that triggered the action 28 | event: string 29 | // The ref that triggered the action, associated with the event 30 | ref: string 31 | // The commit SHA associated with the ref that triggered the action 32 | sha: string 33 | } 34 | 35 | export async function resolvePublishActionOptions(): Promise { 36 | // Action Inputs 37 | const token: string = core.getInput('github-token') || '' 38 | if (token === '') { 39 | throw new Error(`Could not find GITHUB_TOKEN.`) 40 | } 41 | 42 | // Context Inputs 43 | const event: string = github.context.eventName 44 | if (event === '') { 45 | throw new Error(`Could not find event name.`) 46 | } 47 | 48 | const ref: string = github.context.ref || '' 49 | if (ref === '') { 50 | throw new Error(`Could not find GITHUB_REF.`) 51 | } 52 | 53 | const nameWithOwner: string = 54 | github.context.payload.repository?.full_name || '' 55 | if (nameWithOwner === '') { 56 | throw new Error(`Could not find Repository.`) 57 | } 58 | 59 | const sha: string = github.context.sha || '' 60 | if (sha === '') { 61 | throw new Error(`Could not find GITHUB_SHA.`) 62 | } 63 | 64 | const apiBaseUrl: string = github.context.apiUrl || '' 65 | if (apiBaseUrl === '') { 66 | throw new Error(`Could not find GITHUB_API_URL.`) 67 | } 68 | 69 | const githubServerUrl = github.context.serverUrl || '' 70 | if (githubServerUrl === '') { 71 | throw new Error(`Could not find GITHUB_SERVER_URL.`) 72 | } 73 | 74 | // Environment Variables 75 | const workspaceDir: string = process.env.GITHUB_WORKSPACE || '' 76 | if (workspaceDir === '') { 77 | throw new Error(`Could not find GITHUB_WORKSPACE.`) 78 | } 79 | 80 | const runnerTempDir: string = process.env.RUNNER_TEMP || '' 81 | if (runnerTempDir === '') { 82 | throw new Error(`Could not find RUNNER_TEMP.`) 83 | } 84 | 85 | const repositoryId = process.env.GITHUB_REPOSITORY_ID || '' 86 | if (repositoryId === '') { 87 | throw new Error(`Could not find GITHUB_REPOSITORY_ID.`) 88 | } 89 | 90 | const repositoryOwnerId = process.env.GITHUB_REPOSITORY_OWNER_ID || '' 91 | if (repositoryOwnerId === '') { 92 | throw new Error(`Could not find GITHUB_REPOSITORY_OWNER_ID.`) 93 | } 94 | 95 | // Required Values fetched from the GitHub API 96 | const containerRegistryUrl: URL = await apiClient.getContainerRegistryURL( 97 | apiBaseUrl, 98 | token 99 | ) 100 | 101 | const isEnterprise = 102 | !githubServerUrl.includes('https://github.com') && 103 | !githubServerUrl.endsWith('.ghe.com') 104 | 105 | const repoMetadata = await apiClient.getRepositoryMetadata( 106 | apiBaseUrl, 107 | nameWithOwner, 108 | token 109 | ) 110 | 111 | if (repoMetadata.visibility === '') { 112 | throw new Error(`Could not find repository visibility.`) 113 | } 114 | 115 | if (repoMetadata.repoId !== repositoryId) { 116 | throw new Error(`Repository ID mismatch.`) 117 | } 118 | 119 | if (repoMetadata.ownerId !== repositoryOwnerId) { 120 | throw new Error(`Repository Owner ID mismatch.`) 121 | } 122 | 123 | const repositoryVisibility = repoMetadata.visibility 124 | 125 | return { 126 | event, 127 | ref, 128 | workspaceDir, 129 | nameWithOwner, 130 | token, 131 | apiBaseUrl, 132 | runnerTempDir, 133 | sha, 134 | containerRegistryUrl, 135 | isEnterprise, 136 | repositoryVisibility, 137 | repositoryId, 138 | repositoryOwnerId 139 | } 140 | } 141 | 142 | // When printing this object, we want to hide some of them from being displayed 143 | const internalKeys = new Set([ 144 | 'token', 145 | 'runnerTempDir', 146 | 'repositoryId', 147 | 'repositoryOwnerId' 148 | ]) 149 | 150 | export function serializeOptions(options: PublishActionOptions): string { 151 | return JSON.stringify( 152 | options, 153 | (key: string, value: unknown) => 154 | internalKeys.has(key) ? undefined : value, 155 | 2 // 2 spaces for pretty-printing 156 | ) 157 | } 158 | -------------------------------------------------------------------------------- /src/fs-helper.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs' 2 | import fsExtra from 'fs-extra' 3 | import * as path from 'path' 4 | import * as tar from 'tar' 5 | import * as archiver from 'archiver' 6 | import * as crypto from 'crypto' 7 | import * as simpleGit from 'simple-git' 8 | 9 | export interface FileMetadata { 10 | path: string 11 | size: number 12 | sha256: string 13 | } 14 | 15 | // Simple convenience around creating subdirectories in the same base temporary directory 16 | export function createTempDir(tmpDirPath: string, subDirName: string): string { 17 | const tempDir = path.join(tmpDirPath, subDirName) 18 | 19 | if (!fs.existsSync(tempDir)) { 20 | fs.mkdirSync(tempDir, { recursive: true }) 21 | } 22 | 23 | return tempDir 24 | } 25 | 26 | // Creates both a tar.gz and zip archive of the given directory and returns the paths to both archives (stored in the provided target directory) 27 | // as well as the size/sha256 hash of each file. 28 | export async function createArchives( 29 | distPath: string, 30 | archiveTargetPath: string 31 | ): Promise<{ zipFile: FileMetadata; tarFile: FileMetadata }> { 32 | const zipPath = path.join(archiveTargetPath, `archive.zip`) 33 | const tarPath = path.join(archiveTargetPath, `archive.tar.gz`) 34 | 35 | const createZipPromise = new Promise((resolve, reject) => { 36 | const output = fs.createWriteStream(zipPath) 37 | const archive = archiver.create('zip') 38 | 39 | output.on('error', (err: Error) => { 40 | reject(err) 41 | }) 42 | 43 | archive.on('error', (err: Error) => { 44 | reject(err) 45 | }) 46 | 47 | output.on('close', () => { 48 | resolve(fileMetadata(zipPath)) 49 | }) 50 | 51 | archive.pipe(output) 52 | archive.directory(distPath, 'action') 53 | archive.finalize() 54 | }) 55 | 56 | const createTarPromise = new Promise((resolve, reject) => { 57 | tar 58 | .c( 59 | { 60 | file: tarPath, 61 | C: distPath, 62 | gzip: true, 63 | prefix: 'action' 64 | }, 65 | ['.'] 66 | ) 67 | // eslint-disable-next-line github/no-then 68 | .catch(err => { 69 | reject(err) 70 | }) 71 | // eslint-disable-next-line github/no-then 72 | .then(() => { 73 | resolve(fileMetadata(tarPath)) 74 | }) 75 | }) 76 | 77 | const [zipFile, tarFile] = await Promise.all([ 78 | createZipPromise, 79 | createTarPromise 80 | ]) 81 | 82 | return { zipFile, tarFile } 83 | } 84 | 85 | export function isDirectory(dirPath: string): boolean { 86 | return fs.existsSync(dirPath) && fs.lstatSync(dirPath).isDirectory() 87 | } 88 | 89 | export function readFileContents(filePath: string): Buffer { 90 | return fs.readFileSync(filePath) 91 | } 92 | 93 | // Copy actions files from sourceDir to targetDir, excluding the .git folder. 94 | export function stageActionFiles(actionDir: string, targetDir: string): void { 95 | fsExtra.copySync(actionDir, targetDir, { 96 | filter: (src: string) => { 97 | const basename = path.basename(src) 98 | 99 | // Filter out the .git folder. 100 | if (basename === '.git') { 101 | return false 102 | } 103 | 104 | return true 105 | } 106 | }) 107 | } 108 | 109 | // Ensure the correct SHA is checked out for the tag by inspecting the git metadata in the workspace 110 | // and comparing it to the information actions provided us. 111 | // Provided ref should be in format refs/tags/. 112 | export async function ensureTagAndRefCheckedOut( 113 | tagRef: string, 114 | expectedSha: string, 115 | gitDir: string 116 | ): Promise { 117 | if (!tagRef.startsWith('refs/tags/')) { 118 | throw new Error(`Tag ref provided is not in expected format.`) 119 | } 120 | 121 | const git: simpleGit.SimpleGit = simpleGit.simpleGit(gitDir) 122 | 123 | let tagCommitSha: string 124 | 125 | try { 126 | tagCommitSha = await git.raw(['rev-parse', '--verify', tagRef]) 127 | } catch (err) { 128 | throw new Error(`Error retrieving commit associated with tag: ${err}`) 129 | } 130 | if (tagCommitSha.trim() !== expectedSha) { 131 | throw new Error( 132 | `The commit associated with the tag ${tagRef} does not match the SHA of the commit provided by the actions context.` 133 | ) 134 | } 135 | 136 | let currentlyCheckedOutSha: string 137 | try { 138 | currentlyCheckedOutSha = await git.revparse(['HEAD']) 139 | } catch (err) { 140 | throw new Error(`Error validating checked out tag and ref: ${err}`) 141 | } 142 | if (currentlyCheckedOutSha.trim() !== expectedSha) { 143 | throw new Error( 144 | `The expected commit associated with the tag ${tagRef} is not checked out.` 145 | ) 146 | } 147 | 148 | // Call git status to check for any changes in the working directory 149 | // This version of this action only supports uploading actions packages 150 | // which contain the same content as the repository at the appropriate source commit. 151 | let status: simpleGit.StatusResult 152 | try { 153 | status = await git.status() 154 | } catch (err) { 155 | throw new Error(`Error checking git status: ${err}`) 156 | } 157 | if (!status.isClean()) { 158 | throw new Error( 159 | `The working directory has uncommitted changes. Uploading modified code from the checked out repository is not supported by this action.` 160 | ) 161 | } 162 | } 163 | 164 | // Converts a file path to a filemetadata object by querying the fs for relevant metadata. 165 | async function fileMetadata(filePath: string): Promise { 166 | const stats = fs.statSync(filePath) 167 | const size = stats.size 168 | const hash = crypto.createHash('sha256') 169 | const fileStream = fs.createReadStream(filePath) 170 | return new Promise((resolve, reject) => { 171 | fileStream.on('data', data => { 172 | hash.update(data) 173 | }) 174 | fileStream.on('end', () => { 175 | const sha256 = hash.digest('hex') 176 | resolve({ 177 | path: filePath, 178 | size, 179 | sha256: `sha256:${sha256}` 180 | }) 181 | }) 182 | fileStream.on('error', err => { 183 | reject(err) 184 | }) 185 | }) 186 | } 187 | -------------------------------------------------------------------------------- /src/oci-container.ts: -------------------------------------------------------------------------------- 1 | import { FileMetadata } from './fs-helper' 2 | import * as crypto from 'crypto' 3 | 4 | export const imageIndexMediaType = 'application/vnd.oci.image.index.v1+json' 5 | export const imageManifestMediaType = 6 | 'application/vnd.oci.image.manifest.v1+json' 7 | export const actionsPackageMediaType = 8 | 'application/vnd.github.actions.package.v1+json' 9 | export const actionsPackageTarLayerMediaType = 10 | 'application/vnd.github.actions.package.layer.v1.tar+gzip' 11 | export const actionsPackageZipLayerMediaType = 12 | 'application/vnd.github.actions.package.layer.v1.zip' 13 | 14 | export const actionPackageAnnotationValue = 'actions_oci_pkg' 15 | export const actionPackageAttestationAnnotationValue = 16 | 'actions_oci_pkg_attestation' 17 | export const actionPackageReferrerTagAnnotationValue = 18 | 'actions_oci_pkg_referrer_index' 19 | 20 | export const ociEmptyMediaType = 'application/vnd.oci.empty.v1+json' 21 | export const emptyConfigSize = 2 22 | export const emptyConfigSha = 23 | 'sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a' 24 | 25 | export interface OCIImageManifest { 26 | schemaVersion: number 27 | mediaType: string 28 | artifactType: string 29 | config: Descriptor 30 | layers: Descriptor[] 31 | subject?: Descriptor 32 | annotations: { [key: string]: string } 33 | } 34 | 35 | export interface OCIIndexManifest { 36 | schemaVersion: number 37 | mediaType: string 38 | manifests: Descriptor[] 39 | annotations: { [key: string]: string } 40 | } 41 | 42 | export interface Descriptor { 43 | mediaType: string 44 | size: number 45 | digest: string 46 | artifactType?: string 47 | annotations?: { [key: string]: string } 48 | } 49 | 50 | // Given a name and archive metadata, creates a manifest in the format expected by GHCR for an Actions Package. 51 | export function createActionPackageManifest( 52 | tarFile: FileMetadata, 53 | zipFile: FileMetadata, 54 | repository: string, 55 | repoId: string, 56 | ownerId: string, 57 | sourceCommit: string, 58 | version: string, 59 | created: Date = new Date() 60 | ): OCIImageManifest { 61 | const configLayer = createEmptyConfigLayer() 62 | const sanitizedRepo = sanitizeRepository(repository) 63 | const tarLayer = createTarLayer(tarFile, sanitizedRepo, version) 64 | const zipLayer = createZipLayer(zipFile, sanitizedRepo, version) 65 | 66 | const manifest: OCIImageManifest = { 67 | schemaVersion: 2, 68 | mediaType: imageManifestMediaType, 69 | artifactType: actionsPackageMediaType, 70 | config: configLayer, 71 | layers: [tarLayer, zipLayer], 72 | annotations: { 73 | 'org.opencontainers.image.created': created.toISOString(), 74 | 'action.tar.gz.digest': tarFile.sha256, 75 | 'action.zip.digest': zipFile.sha256, 76 | 'com.github.package.type': actionPackageAnnotationValue, 77 | 'com.github.package.version': version, 78 | 'com.github.source.repo.id': repoId, 79 | 'com.github.source.repo.owner.id': ownerId, 80 | 'com.github.source.commit': sourceCommit 81 | } 82 | } 83 | 84 | return manifest 85 | } 86 | 87 | export function createSigstoreAttestationManifest( 88 | bundleSize: number, 89 | bundleDigest: string, 90 | bundleMediaType: string, 91 | bundlePredicateType: string, 92 | subjectSize: number, 93 | subjectDigest: string, 94 | created: Date = new Date() 95 | ): OCIImageManifest { 96 | const configLayer = createEmptyConfigLayer() 97 | 98 | const sigstoreAttestationLayer: Descriptor = { 99 | mediaType: bundleMediaType, 100 | size: bundleSize, 101 | digest: bundleDigest 102 | } 103 | 104 | const subject: Descriptor = { 105 | mediaType: imageManifestMediaType, 106 | size: subjectSize, 107 | digest: subjectDigest 108 | } 109 | 110 | const manifest: OCIImageManifest = { 111 | schemaVersion: 2, 112 | mediaType: imageManifestMediaType, 113 | artifactType: bundleMediaType, 114 | config: configLayer, 115 | layers: [sigstoreAttestationLayer], 116 | subject, 117 | 118 | annotations: { 119 | 'dev.sigstore.bundle.content': 'dsse-envelope', 120 | 'dev.sigstore.bundle.predicateType': bundlePredicateType, 121 | 'com.github.package.type': actionPackageAttestationAnnotationValue, 122 | 'org.opencontainers.image.created': created.toISOString() 123 | } 124 | } 125 | 126 | return manifest 127 | } 128 | 129 | export function createReferrerTagManifest( 130 | attestationDigest: string, 131 | attestationSize: number, 132 | bundleMediaType: string, 133 | bundlePredicateType: string, 134 | attestationCreated: Date, 135 | created: Date = new Date() 136 | ): OCIIndexManifest { 137 | const manifest: OCIIndexManifest = { 138 | schemaVersion: 2, 139 | mediaType: imageIndexMediaType, 140 | manifests: [ 141 | { 142 | mediaType: imageManifestMediaType, 143 | artifactType: bundleMediaType, 144 | size: attestationSize, 145 | digest: attestationDigest, 146 | annotations: { 147 | 'com.github.package.type': actionPackageAttestationAnnotationValue, 148 | 'org.opencontainers.image.created': attestationCreated.toISOString(), 149 | 'dev.sigstore.bundle.content': 'dsse-envelope', 150 | 'dev.sigstore.bundle.predicateType': bundlePredicateType 151 | } 152 | } 153 | ], 154 | annotations: { 155 | 'com.github.package.type': actionPackageReferrerTagAnnotationValue, 156 | 'org.opencontainers.image.created': created.toISOString() 157 | } 158 | } 159 | 160 | return manifest 161 | } 162 | 163 | // Calculate the SHA256 digest of a given manifest. 164 | // This should match the digest which the GitHub container registry calculates for this manifest. 165 | export function sha256Digest( 166 | manifest: OCIImageManifest | OCIIndexManifest 167 | ): string { 168 | const data = JSON.stringify(manifest) 169 | const buffer = Buffer.from(data, 'utf8') 170 | const hash = crypto.createHash('sha256') 171 | hash.update(buffer) 172 | const hexHash = hash.digest('hex') 173 | return `sha256:${hexHash}` 174 | } 175 | 176 | export function sizeInBytes( 177 | manifest: OCIImageManifest | OCIIndexManifest 178 | ): number { 179 | const data = JSON.stringify(manifest) 180 | return Buffer.byteLength(data, 'utf8') 181 | } 182 | 183 | export function createEmptyConfigLayer(): Descriptor { 184 | const configLayer: Descriptor = { 185 | mediaType: ociEmptyMediaType, 186 | size: emptyConfigSize, 187 | digest: emptyConfigSha 188 | } 189 | 190 | return configLayer 191 | } 192 | 193 | function createZipLayer( 194 | zipFile: FileMetadata, 195 | repository: string, 196 | version: string 197 | ): Descriptor { 198 | const zipLayer: Descriptor = { 199 | mediaType: actionsPackageZipLayerMediaType, 200 | size: zipFile.size, 201 | digest: zipFile.sha256, 202 | annotations: { 203 | 'org.opencontainers.image.title': `${repository}_${version}.zip` 204 | } 205 | } 206 | 207 | return zipLayer 208 | } 209 | 210 | function createTarLayer( 211 | tarFile: FileMetadata, 212 | repository: string, 213 | version: string 214 | ): Descriptor { 215 | const tarLayer: Descriptor = { 216 | mediaType: actionsPackageTarLayerMediaType, 217 | size: tarFile.size, 218 | digest: tarFile.sha256, 219 | annotations: { 220 | 'org.opencontainers.image.title': `${repository}_${version}.tar.gz` 221 | } 222 | } 223 | 224 | return tarLayer 225 | } 226 | 227 | // Remove slashes so we can use the repository in a filename 228 | // repository usually includes the namespace too, e.g. my-org/my-repo 229 | function sanitizeRepository(repository: string): string { 230 | return repository.replace('/', '-') 231 | } 232 | -------------------------------------------------------------------------------- /__tests__/oci-container.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | createActionPackageManifest, 3 | sha256Digest, 4 | sizeInBytes, 5 | OCIImageManifest, 6 | createSigstoreAttestationManifest, 7 | OCIIndexManifest, 8 | createReferrerTagManifest 9 | } from '../src/oci-container' 10 | import { FileMetadata } from '../src/fs-helper' 11 | 12 | const createdTimestamp = '2021-01-01T00:00:00.000Z' 13 | 14 | describe('sha256Digest', () => { 15 | it('calculates the SHA256 digest of the provided manifest', () => { 16 | const { manifest } = testActionPackageManifest() 17 | const digest = sha256Digest(manifest) 18 | const expectedDigest = 19 | 'sha256:1af9bf993bf068a51fbb54822471ab7507b07c553bcac09a7c91328740d8ed69' 20 | 21 | expect(digest).toEqual(expectedDigest) 22 | }) 23 | }) 24 | 25 | describe('size', () => { 26 | it('returns the total size of the provided manifest', () => { 27 | const { manifest } = testActionPackageManifest() 28 | const size = sizeInBytes(manifest) 29 | expect(size).toBe(991) 30 | }) 31 | }) 32 | 33 | describe('createActionPackageManifest', () => { 34 | it('creates a manifest containing the provided information', () => { 35 | const { manifest, zipFile, tarFile } = testActionPackageManifest() 36 | 37 | const expectedJSON = `{ 38 | "schemaVersion": 2, 39 | "mediaType": "application/vnd.oci.image.manifest.v1+json", 40 | "artifactType": "application/vnd.github.actions.package.v1+json", 41 | "config": { 42 | "mediaType":"application/vnd.oci.empty.v1+json", 43 | "size":2, 44 | "digest":"sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a" 45 | }, 46 | "layers":[ 47 | { 48 | "mediaType":"application/vnd.github.actions.package.layer.v1.tar+gzip", 49 | "size":${tarFile.size}, 50 | "digest":"${tarFile.sha256}", 51 | "annotations":{ 52 | "org.opencontainers.image.title":"test-org-test-repo_1.2.3.tar.gz" 53 | } 54 | }, 55 | { 56 | "mediaType":"application/vnd.github.actions.package.layer.v1.zip", 57 | "size":${zipFile.size}, 58 | "digest":"${zipFile.sha256}", 59 | "annotations":{ 60 | "org.opencontainers.image.title":"test-org-test-repo_1.2.3.zip" 61 | } 62 | } 63 | ], 64 | "annotations":{ 65 | "org.opencontainers.image.created":"${createdTimestamp}", 66 | "action.tar.gz.digest":"${tarFile.sha256}", 67 | "action.zip.digest":"${zipFile.sha256}", 68 | "com.github.package.type":"actions_oci_pkg", 69 | "com.github.package.version":"1.2.3", 70 | "com.github.source.repo.id":"123", 71 | "com.github.source.repo.owner.id":"456", 72 | "com.github.source.commit":"abc" 73 | } 74 | }` 75 | 76 | const manifestJSON = JSON.stringify(manifest) 77 | expect(manifestJSON).toEqual(expectedJSON.replace(/\s/g, '')) 78 | }) 79 | 80 | it('uses the current time if no created date is provided', () => { 81 | const { manifest } = testActionPackageManifest(false) 82 | expect( 83 | manifest.annotations['org.opencontainers.image.created'] 84 | ).toBeDefined() 85 | }) 86 | }) 87 | 88 | describe('createSigstoreAttestationManifest', () => { 89 | it('creates a manifest containing the provided information', () => { 90 | const manifest = testAttestationManifest() 91 | 92 | const expectedJSON = `{ 93 | "schemaVersion": 2, 94 | "mediaType": "application/vnd.oci.image.manifest.v1+json", 95 | "artifactType": "application/vnd.dev.sigstore.bundle.v0.3+json", 96 | "config": { 97 | "mediaType": "application/vnd.oci.empty.v1+json", 98 | "size": 2, 99 | "digest": "sha256:44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a" 100 | }, 101 | "layers": [ 102 | { 103 | "mediaType": "application/vnd.dev.sigstore.bundle.v0.3+json", 104 | "size": 10, 105 | "digest": "bundleDigest" 106 | } 107 | ], 108 | "subject": { 109 | "mediaType": "application/vnd.oci.image.manifest.v1+json", 110 | "size": 100, 111 | "digest": "subjectDigest" 112 | }, 113 | "annotations": { 114 | "dev.sigstore.bundle.content": "dsse-envelope", 115 | "dev.sigstore.bundle.predicateType": "https://slsa.dev/provenance/v1", 116 | "com.github.package.type": "actions_oci_pkg_attestation", 117 | "org.opencontainers.image.created": "2021-01-01T00:00:00.000Z" 118 | } 119 | } 120 | ` 121 | 122 | const manifestJSON = JSON.stringify(manifest) 123 | 124 | expect(manifestJSON).toEqual(expectedJSON.replace(/\s/g, '')) 125 | }) 126 | 127 | it('uses the current time if no created date is provided', () => { 128 | const manifest = testAttestationManifest(false) 129 | expect( 130 | manifest.annotations['org.opencontainers.image.created'] 131 | ).toBeDefined() 132 | }) 133 | }) 134 | 135 | describe('createReferrerIndexManifest', () => { 136 | it('creates a manifest containing the provided information', () => { 137 | const manifest = testReferrerIndexManifest() 138 | 139 | const expectedJSON = ` 140 | { 141 | "schemaVersion": 2, 142 | "mediaType": "application/vnd.oci.image.index.v1+json", 143 | "manifests": [ 144 | { 145 | "mediaType": "application/vnd.oci.image.manifest.v1+json", 146 | "artifactType": "application/vnd.dev.sigstore.bundle.v0.3+json", 147 | "size": 100, 148 | "digest": "attDigest", 149 | "annotations": { 150 | "com.github.package.type": "actions_oci_pkg_attestation", 151 | "org.opencontainers.image.created": "2021-01-01T00:00:00.000Z", 152 | "dev.sigstore.bundle.content": "dsse-envelope", 153 | "dev.sigstore.bundle.predicateType": "https://slsa.dev/provenance/v1" 154 | } 155 | } 156 | ], 157 | "annotations": { 158 | "com.github.package.type": "actions_oci_pkg_referrer_index", 159 | "org.opencontainers.image.created": "2021-01-01T00:00:00.000Z" 160 | } 161 | } 162 | ` 163 | 164 | const manifestJSON = JSON.stringify(manifest) 165 | 166 | expect(manifestJSON).toEqual(expectedJSON.replace(/\s/g, '')) 167 | }) 168 | 169 | it('uses the current time if no created date is provided', () => { 170 | const manifest = testReferrerIndexManifest(false) 171 | expect( 172 | manifest.annotations['org.opencontainers.image.created'] 173 | ).toBeDefined() 174 | }) 175 | }) 176 | 177 | function testActionPackageManifest(setCreated = true): { 178 | manifest: OCIImageManifest 179 | tarFile: FileMetadata 180 | zipFile: FileMetadata 181 | } { 182 | const date = new Date('2021-01-01T00:00:00Z') 183 | const repo = 'test-org/test-repo' 184 | const version = '1.2.3' 185 | const repoId = '123' 186 | const ownerId = '456' 187 | const sourceCommit = 'abc' 188 | const tarFile: FileMetadata = { 189 | path: '/test/test/test.tar.gz', 190 | sha256: 'tarSha', 191 | size: 123 192 | } 193 | const zipFile: FileMetadata = { 194 | path: '/test/test/test.zip', 195 | sha256: 'zipSha', 196 | size: 456 197 | } 198 | 199 | const manifest = createActionPackageManifest( 200 | tarFile, 201 | zipFile, 202 | repo, 203 | repoId, 204 | ownerId, 205 | sourceCommit, 206 | version, 207 | setCreated ? date : undefined 208 | ) 209 | 210 | return { 211 | manifest, 212 | tarFile, 213 | zipFile 214 | } 215 | } 216 | 217 | function testAttestationManifest(setCreated = true): OCIImageManifest { 218 | const date = new Date(createdTimestamp) 219 | return createSigstoreAttestationManifest( 220 | 10, 221 | 'bundleDigest', 222 | 'application/vnd.dev.sigstore.bundle.v0.3+json', 223 | 'https://slsa.dev/provenance/v1', 224 | 100, 225 | 'subjectDigest', 226 | setCreated ? date : undefined 227 | ) 228 | } 229 | 230 | function testReferrerIndexManifest(setCreated = true): OCIIndexManifest { 231 | const date = new Date(createdTimestamp) 232 | return createReferrerTagManifest( 233 | 'attDigest', 234 | 100, 235 | 'application/vnd.dev.sigstore.bundle.v0.3+json', 236 | 'https://slsa.dev/provenance/v1', 237 | date, 238 | setCreated ? date : undefined 239 | ) 240 | } 241 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import semver from 'semver' 3 | import * as fsHelper from './fs-helper' 4 | import * as ociContainer from './oci-container' 5 | import * as ghcr from './ghcr-client' 6 | import * as attest from '@actions/attest' 7 | import * as cfg from './config' 8 | import * as crypto from 'crypto' 9 | 10 | /** 11 | * The main function for the action. 12 | * @returns {Promise} Resolves when the action is complete. 13 | */ 14 | export async function run(): Promise { 15 | try { 16 | const options: cfg.PublishActionOptions = 17 | await cfg.resolvePublishActionOptions() 18 | 19 | core.info(`Publishing action package version with options:`) 20 | core.info(cfg.serializeOptions(options)) 21 | 22 | const semverTag: semver.SemVer = parseSemverTagFromRef(options) 23 | 24 | // Ensure the correct SHA is checked out for the tag we're parsing, otherwise the bundled content will be incorrect. 25 | await fsHelper.ensureTagAndRefCheckedOut( 26 | options.ref, 27 | options.sha, 28 | options.workspaceDir 29 | ) 30 | 31 | const stagedActionFilesDir = fsHelper.createTempDir( 32 | options.runnerTempDir, 33 | 'staging' 34 | ) 35 | fsHelper.stageActionFiles(options.workspaceDir, stagedActionFilesDir) 36 | 37 | const archiveDir = fsHelper.createTempDir(options.runnerTempDir, 'archives') 38 | const archives = await fsHelper.createArchives( 39 | stagedActionFilesDir, 40 | archiveDir 41 | ) 42 | 43 | const manifest = ociContainer.createActionPackageManifest( 44 | archives.tarFile, 45 | archives.zipFile, 46 | options.nameWithOwner, 47 | options.repositoryId, 48 | options.repositoryOwnerId, 49 | options.sha, 50 | semverTag.raw, 51 | new Date() 52 | ) 53 | 54 | const manifestDigest = ociContainer.sha256Digest(manifest) 55 | 56 | const ghcrClient = new ghcr.Client( 57 | options.token, 58 | options.containerRegistryUrl 59 | ) 60 | 61 | // Attestations are not supported in GHES. 62 | if (!options.isEnterprise) { 63 | const { bundle, bundleDigest, bundleMediaType, bundlePredicateType } = 64 | await generateAttestation(manifestDigest, semverTag.raw, options) 65 | 66 | const attestationCreated = new Date() 67 | const attestationManifest = 68 | ociContainer.createSigstoreAttestationManifest( 69 | bundle.length, 70 | bundleDigest, 71 | bundleMediaType, 72 | bundlePredicateType, 73 | ociContainer.sizeInBytes(manifest), 74 | manifestDigest, 75 | attestationCreated 76 | ) 77 | 78 | const referrerIndexManifest = ociContainer.createReferrerTagManifest( 79 | ociContainer.sha256Digest(attestationManifest), 80 | ociContainer.sizeInBytes(attestationManifest), 81 | bundleMediaType, 82 | bundlePredicateType, 83 | attestationCreated 84 | ) 85 | 86 | const { attestationSHA, referrerIndexSHA } = await publishAttestation( 87 | ghcrClient, 88 | options.nameWithOwner, 89 | bundle, 90 | bundleDigest, 91 | manifest, 92 | attestationManifest, 93 | referrerIndexManifest 94 | ) 95 | 96 | if (attestationSHA !== undefined) { 97 | core.info(`Uploaded attestation ${attestationSHA}`) 98 | core.setOutput('attestation-manifest-sha', attestationSHA) 99 | } 100 | if (referrerIndexSHA !== undefined) { 101 | core.info(`Uploaded referrer index ${referrerIndexSHA}`) 102 | core.setOutput('referrer-index-manifest-sha', referrerIndexSHA) 103 | } 104 | } 105 | 106 | const publishedDigest = await publishImmutableActionVersion( 107 | ghcrClient, 108 | options.nameWithOwner, 109 | semverTag.raw, 110 | archives.zipFile, 111 | archives.tarFile, 112 | manifest 113 | ) 114 | 115 | core.setOutput('package-manifest-sha', publishedDigest) 116 | } catch (error) { 117 | // Fail the workflow run if an error occurs 118 | if (error instanceof Error) core.setFailed(error.message) 119 | } 120 | } 121 | 122 | // This action can be triggered by any workflow that specifies a tag as its GITHUB_REF. 123 | // This includes releases, creating or pushing tags, or workflow_dispatch. 124 | // See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#about-events-that-trigger-workflows. 125 | function parseSemverTagFromRef(opts: cfg.PublishActionOptions): semver.SemVer { 126 | const ref = opts.ref 127 | 128 | if (!ref.startsWith('refs/tags/')) { 129 | throw new Error(`The ref ${ref} is not a valid tag reference.`) 130 | } 131 | 132 | const rawTag = ref.replace(/^refs\/tags\//, '') 133 | const semverTag = semver.parse(rawTag.replace(/^v/, '')) 134 | if (!semverTag) { 135 | throw new Error( 136 | `${rawTag} is not a valid semantic version tag, and so cannot be uploaded to the action package.` 137 | ) 138 | } 139 | 140 | return semverTag 141 | } 142 | 143 | async function publishImmutableActionVersion( 144 | client: ghcr.Client, 145 | nameWithOwner: string, 146 | semverTag: string, 147 | zipFile: fsHelper.FileMetadata, 148 | tarFile: fsHelper.FileMetadata, 149 | manifest: ociContainer.OCIImageManifest 150 | ): Promise { 151 | const manifestDigest = ociContainer.sha256Digest(manifest) 152 | 153 | core.info( 154 | `Creating GHCR package ${manifestDigest} for release with semver: ${semverTag}.` 155 | ) 156 | 157 | const files = new Map() 158 | files.set(zipFile.sha256, fsHelper.readFileContents(zipFile.path)) 159 | files.set(tarFile.sha256, fsHelper.readFileContents(tarFile.path)) 160 | files.set(ociContainer.emptyConfigSha, Buffer.from('{}')) 161 | 162 | return await client.uploadOCIImageManifest( 163 | nameWithOwner, 164 | manifest, 165 | files, 166 | semverTag 167 | ) 168 | } 169 | 170 | async function publishAttestation( 171 | client: ghcr.Client, 172 | nameWithOwner: string, 173 | bundle: Buffer, 174 | bundleDigest: string, 175 | subjectManifest: ociContainer.OCIImageManifest, 176 | attestationManifest: ociContainer.OCIImageManifest, 177 | referrerIndexManifest: ociContainer.OCIIndexManifest 178 | ): Promise<{ 179 | attestationSHA: string 180 | referrerIndexSHA: string 181 | }> { 182 | const attestationManifestDigest = 183 | ociContainer.sha256Digest(attestationManifest) 184 | const subjectManifestDigest = ociContainer.sha256Digest(subjectManifest) 185 | const referrerIndexManifestDigest = ociContainer.sha256Digest( 186 | referrerIndexManifest 187 | ) 188 | 189 | core.info( 190 | `Publishing attestation ${attestationManifestDigest} for subject ${subjectManifestDigest}.` 191 | ) 192 | 193 | const files = new Map() 194 | files.set(ociContainer.emptyConfigSha, Buffer.from('{}')) 195 | files.set(bundleDigest, bundle) 196 | 197 | const attestationSHA = await client.uploadOCIImageManifest( 198 | nameWithOwner, 199 | attestationManifest, 200 | files 201 | ) 202 | 203 | // The referrer index is tagged with the subject's digest in format sha256- 204 | const referrerTag = subjectManifestDigest.replace(':', '-') 205 | 206 | core.info( 207 | `Publishing referrer index ${referrerIndexManifestDigest} with tag ${referrerTag} for attestation ${attestationManifestDigest} and subject ${subjectManifestDigest}.` 208 | ) 209 | 210 | const referrerIndexSHA = await client.uploadOCIIndexManifest( 211 | nameWithOwner, 212 | referrerIndexManifest, 213 | referrerTag 214 | ) 215 | 216 | return { attestationSHA, referrerIndexSHA } 217 | } 218 | 219 | async function generateAttestation( 220 | manifestDigest: string, 221 | semverTag: string, 222 | options: cfg.PublishActionOptions 223 | ): Promise<{ 224 | bundle: Buffer 225 | bundleDigest: string 226 | bundleMediaType: string 227 | bundlePredicateType: string 228 | }> { 229 | const subjectName = `${options.nameWithOwner}@${semverTag}` 230 | const subjectDigest = removePrefix(manifestDigest, 'sha256:') 231 | 232 | core.info(`Generating attestation ${subjectName} for digest ${subjectDigest}`) 233 | 234 | const attestation = await attest.attestProvenance({ 235 | subjectName, 236 | subjectDigest: { sha256: subjectDigest }, 237 | token: options.token, 238 | sigstore: 'github', 239 | skipWrite: true // We will upload attestations to GHCR 240 | }) 241 | 242 | const bundleArtifact = Buffer.from(JSON.stringify(attestation.bundle)) 243 | 244 | const hash = crypto.createHash('sha256') 245 | hash.update(bundleArtifact) 246 | const bundleSHA = hash.digest('hex') 247 | 248 | // We must base64 decode the dsse envelope to grab the predicate type 249 | const dsseEnvelopeArtifact = attestation.bundle.dsseEnvelope 250 | if (dsseEnvelopeArtifact === undefined) { 251 | throw new Error('Attestation bundle is missing dsseEnvelope artifact') 252 | } 253 | 254 | const dsseEnvelope = JSON.parse( 255 | Buffer.from(dsseEnvelopeArtifact.payload, 'base64').toString('utf-8') 256 | ) 257 | const predicateType = dsseEnvelope.predicateType 258 | if (predicateType === undefined) { 259 | throw new Error('Attestation bundle is missing predicateType') 260 | } 261 | 262 | return { 263 | bundle: bundleArtifact, 264 | bundleDigest: `sha256:${bundleSHA}`, 265 | bundleMediaType: attestation.bundle.mediaType, 266 | bundlePredicateType: predicateType 267 | } 268 | } 269 | 270 | function removePrefix(str: string, prefix: string): string { 271 | if (str.startsWith(prefix)) { 272 | return str.slice(prefix.length) 273 | } 274 | return str 275 | } 276 | -------------------------------------------------------------------------------- /__tests__/fs-helper.test.ts: -------------------------------------------------------------------------------- 1 | import * as fsHelper from '../src/fs-helper' 2 | import * as fs from 'fs' 3 | import * as os from 'os' 4 | import { execSync } from 'child_process' 5 | 6 | const fileContent = 'This is the content of the file' 7 | const tmpFileDir = '/tmp' 8 | 9 | describe('stageActionFiles', () => { 10 | let sourceDir: string 11 | let stagingDir: string 12 | 13 | beforeEach(() => { 14 | sourceDir = fsHelper.createTempDir(tmpFileDir, 'source') 15 | fs.mkdirSync(`${sourceDir}/src`) 16 | fs.writeFileSync(`${sourceDir}/src/main.js`, fileContent) 17 | fs.writeFileSync(`${sourceDir}/src/other.js`, fileContent) 18 | 19 | stagingDir = fsHelper.createTempDir(tmpFileDir, 'staging') 20 | }) 21 | 22 | afterEach(() => { 23 | fs.rmSync(sourceDir, { recursive: true }) 24 | fs.rmSync(stagingDir, { recursive: true }) 25 | }) 26 | 27 | it('copies all files (excluding the .git folder) to the staging directory', () => { 28 | fs.writeFileSync(`${sourceDir}/action.yml`, fileContent) 29 | 30 | fs.mkdirSync(`${sourceDir}/.git`) 31 | fs.writeFileSync(`${sourceDir}/.git/HEAD`, fileContent) 32 | 33 | fs.mkdirSync(`${sourceDir}/.github/workflows`, { recursive: true }) 34 | fs.writeFileSync(`${sourceDir}/.github/workflows/workflow.yml`, fileContent) 35 | 36 | fsHelper.stageActionFiles(sourceDir, stagingDir) 37 | expect(fs.existsSync(`${stagingDir}/action.yml`)).toBe(true) 38 | expect(fs.existsSync(`${stagingDir}/src/main.js`)).toBe(true) 39 | expect(fs.existsSync(`${stagingDir}/src/other.js`)).toBe(true) 40 | 41 | // Hidden files are copied 42 | expect(fs.existsSync(`${stagingDir}/.github`)).toBe(true) 43 | 44 | // .git folder is not copied 45 | expect(fs.existsSync(`${stagingDir}/.git`)).toBe(false) 46 | }) 47 | }) 48 | 49 | describe('createArchives', () => { 50 | let stageDir: string 51 | let archiveDir: string 52 | 53 | beforeAll(() => { 54 | stageDir = fsHelper.createTempDir(tmpFileDir, 'staging') 55 | fs.writeFileSync(`${stageDir}/hello.txt`, fileContent) 56 | fs.writeFileSync(`${stageDir}/world.txt`, fileContent) 57 | }) 58 | 59 | beforeEach(() => { 60 | archiveDir = fsHelper.createTempDir(tmpFileDir, 'archive') 61 | }) 62 | 63 | afterEach(() => { 64 | fs.rmSync(archiveDir, { recursive: true }) 65 | }) 66 | 67 | afterAll(() => { 68 | fs.rmSync(stageDir, { recursive: true }) 69 | }) 70 | 71 | it('creates archives', async () => { 72 | const { zipFile, tarFile } = await fsHelper.createArchives( 73 | stageDir, 74 | archiveDir 75 | ) 76 | 77 | expect(zipFile.path).toEqual(`${archiveDir}/archive.zip`) 78 | expect(fs.existsSync(zipFile.path)).toEqual(true) 79 | expect(fs.statSync(zipFile.path).size).toBeGreaterThan(0) 80 | expect(zipFile.sha256.startsWith('sha256:')).toEqual(true) 81 | 82 | expect(tarFile.path).toEqual(`${archiveDir}/archive.tar.gz`) 83 | expect(fs.existsSync(tarFile.path)).toEqual(true) 84 | expect(fs.statSync(tarFile.path).size).toBeGreaterThan(0) 85 | expect(tarFile.sha256.startsWith('sha256:')).toEqual(true) 86 | 87 | // Validate the hashes by comparing to the output of the system's hashing utility 88 | const zipSHA = zipFile.sha256.substring(7) // remove "sha256:" prefix 89 | const tarSHA = tarFile.sha256.substring(7) // remove "sha256:" prefix 90 | 91 | // sha256 hash is 64 characters long 92 | expect(zipSHA).toHaveLength(64) 93 | expect(tarSHA).toHaveLength(64) 94 | 95 | let systemZipHash: string 96 | let systemTarHash: string 97 | 98 | if (os.platform() === 'win32') { 99 | // Windows 100 | systemZipHash = execSync(`CertUtil -hashfile ${zipFile.path} SHA256`) 101 | .toString() 102 | .split(' ')[1] 103 | .trim() 104 | systemTarHash = execSync(`CertUtil -hashfile ${tarFile.path} SHA256`) 105 | .toString() 106 | .split(' ')[1] 107 | .trim() 108 | } else { 109 | // Unix-based systems 110 | systemZipHash = execSync(`shasum -a 256 ${zipFile.path}`) 111 | .toString() 112 | .split(' ')[0] 113 | systemTarHash = execSync(`shasum -a 256 ${tarFile.path}`) 114 | .toString() 115 | .split(' ')[0] 116 | } 117 | 118 | expect(zipSHA).toEqual(systemZipHash) 119 | expect(tarSHA).toEqual(systemTarHash) 120 | }) 121 | }) 122 | 123 | describe('createTempDir', () => { 124 | let dirs: string[] = [] 125 | 126 | beforeEach(() => { 127 | dirs = [] 128 | }) 129 | 130 | afterEach(() => { 131 | for (const dir of dirs) { 132 | fs.rmSync(dir, { recursive: true }) 133 | } 134 | }) 135 | 136 | it('creates a temporary directory', () => { 137 | const tmpDir = fsHelper.createTempDir(tmpFileDir, 'subdir') 138 | 139 | expect(fs.existsSync(tmpDir)).toEqual(true) 140 | expect(fs.statSync(tmpDir).isDirectory()).toEqual(true) 141 | }) 142 | 143 | it('creates a unique temporary directory', () => { 144 | const dir1 = fsHelper.createTempDir(tmpFileDir, 'dir1') 145 | dirs.push(dir1) 146 | 147 | const dir2 = fsHelper.createTempDir(tmpFileDir, 'dir2') 148 | dirs.push(dir2) 149 | 150 | expect(dir1).not.toEqual(dir2) 151 | }) 152 | }) 153 | 154 | describe('isDirectory', () => { 155 | let dir: string 156 | 157 | beforeEach(() => { 158 | dir = fsHelper.createTempDir(tmpFileDir, 'subdir') 159 | }) 160 | 161 | afterEach(() => { 162 | fs.rmSync(dir, { recursive: true }) 163 | }) 164 | 165 | it('returns true if the path is a directory', () => { 166 | expect(fsHelper.isDirectory(dir)).toEqual(true) 167 | }) 168 | 169 | it('returns false if the path is not a directory', () => { 170 | const tempFile = `${dir}/file.txt` 171 | fs.writeFileSync(tempFile, fileContent) 172 | expect(fsHelper.isDirectory(tempFile)).toEqual(false) 173 | }) 174 | }) 175 | 176 | describe('readFileContents', () => { 177 | let dir: string 178 | 179 | beforeEach(() => { 180 | dir = fsHelper.createTempDir(tmpFileDir, 'subdir') 181 | }) 182 | 183 | afterEach(() => { 184 | fs.rmSync(dir, { recursive: true }) 185 | }) 186 | 187 | it('reads the contents of a file', () => { 188 | const tempFile = `${dir}/file.txt` 189 | fs.writeFileSync(tempFile, fileContent) 190 | 191 | expect(fsHelper.readFileContents(tempFile).toString()).toEqual(fileContent) 192 | }) 193 | }) 194 | 195 | describe('ensureCorrectShaCheckedOut', () => { 196 | let dir: string 197 | let commit1: string 198 | let commit2: string 199 | const tag1 = 'tag1' 200 | const tag2 = 'tag2' 201 | 202 | beforeEach(() => { 203 | dir = fsHelper.createTempDir(tmpFileDir, 'subdir') 204 | 205 | // Set up a git repository 206 | execSync('git init', { cwd: dir }) 207 | 208 | // Set user and email in this git repo (not globally) 209 | execSync('git config user.email monalisa@github.com', { cwd: dir }) 210 | execSync('git config user.name Mona', { cwd: dir }) 211 | 212 | // Add a file to the repo 213 | fs.writeFileSync(`${dir}/file1.txt`, fileContent) 214 | execSync('git add .', { cwd: dir }) 215 | 216 | // Add two commits 217 | execSync('git commit --allow-empty -m "test"', { cwd: dir }) 218 | execSync('git commit --allow-empty -m "test"', { cwd: dir }) 219 | 220 | // Grab the two commits 221 | commit1 = execSync('git rev-parse HEAD~1', { cwd: dir }).toString().trim() 222 | commit2 = execSync('git rev-parse HEAD', { cwd: dir }).toString().trim() 223 | 224 | // Create a tag for each commit 225 | execSync(`git tag ${tag1} ${commit1}`, { cwd: dir }) 226 | execSync(`git tag ${tag2} ${commit2}`, { cwd: dir }) 227 | }) 228 | 229 | afterEach(() => { 230 | fs.rmSync(dir, { recursive: true }) 231 | }) 232 | 233 | it('does not throw an error if the correct SHA is checked out', async () => { 234 | await expect( 235 | fsHelper.ensureTagAndRefCheckedOut(`refs/tags/${tag2}`, commit2, dir) 236 | ).resolves.toBeUndefined() 237 | }) 238 | 239 | it('throws an error if the correct SHA is not checked out', async () => { 240 | await expect( 241 | fsHelper.ensureTagAndRefCheckedOut(`refs/tags/${tag1}`, commit1, dir) 242 | ).rejects.toThrow( 243 | 'The expected commit associated with the tag refs/tags/tag1 is not checked out.' 244 | ) 245 | }) 246 | 247 | it('throws if there is an issue getting sha for tag', async () => { 248 | await expect(async () => 249 | fsHelper.ensureTagAndRefCheckedOut( 250 | `refs/tags/some-unknown-tag`, 251 | commit2, 252 | dir 253 | ) 254 | ).rejects.toThrow('Error retrieving commit associated with tag') 255 | }) 256 | 257 | it('throws an error if the sha of the tag does not match expected sha', async () => { 258 | await expect(async () => 259 | fsHelper.ensureTagAndRefCheckedOut(`refs/tags/${tag1}`, commit2, dir) 260 | ).rejects.toThrow( 261 | 'The commit associated with the tag refs/tags/tag1 does not match the SHA of the commit provided by the actions context.' 262 | ) 263 | }) 264 | 265 | it('throws if the provided ref is not a tag ref', async () => { 266 | await expect(async () => 267 | fsHelper.ensureTagAndRefCheckedOut(`refs/heads/main`, commit2, dir) 268 | ).rejects.toThrow('Tag ref provided is not in expected format.') 269 | }) 270 | 271 | it('throws if there are untracked files in the working directory', async () => { 272 | // Add an untracked file 273 | fs.writeFileSync(`${dir}/untracked-file.txt`, fileContent) 274 | 275 | await expect(async () => 276 | fsHelper.ensureTagAndRefCheckedOut(`refs/tags/${tag2}`, commit2, dir) 277 | ).rejects.toThrow( 278 | 'The working directory has uncommitted changes. Uploading modified code from the checked out repository is not supported by this action.' 279 | ) 280 | }) 281 | 282 | it('throws if there are uncommitted changes in the working directory', async () => { 283 | // Add an untracked file 284 | fs.writeFileSync(`${dir}/file1.txt`, fileContent + fileContent) 285 | execSync('git add .', { cwd: dir }) 286 | 287 | await expect(async () => 288 | fsHelper.ensureTagAndRefCheckedOut(`refs/tags/${tag2}`, commit2, dir) 289 | ).rejects.toThrow( 290 | 'The working directory has uncommitted changes. Uploading modified code from the checked out repository is not supported by this action.' 291 | ) 292 | }) 293 | }) 294 | -------------------------------------------------------------------------------- /src/ghcr-client.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as ociContainer from './oci-container' 3 | 4 | const defaultRetries = 5 5 | const defaultBackoff = 1000 6 | const retryableStatusCodes = [408, 429, 500, 502, 503, 504] 7 | 8 | export interface RetryOptions { 9 | retries: number 10 | backoff: number 11 | } 12 | 13 | export class Client { 14 | private _b64Token: string 15 | private _registry: URL 16 | private _retryOptions: RetryOptions 17 | 18 | constructor( 19 | token: string, 20 | registry: URL, 21 | retryOptions: RetryOptions = { 22 | retries: defaultRetries, 23 | backoff: defaultBackoff 24 | } 25 | ) { 26 | this._b64Token = Buffer.from(token).toString('base64') 27 | this._registry = registry 28 | this._retryOptions = retryOptions 29 | } 30 | 31 | async uploadOCIImageManifest( 32 | repository: string, 33 | manifest: ociContainer.OCIImageManifest, 34 | blobs: Map, 35 | tag?: string 36 | ): Promise { 37 | const manifestSHA = ociContainer.sha256Digest(manifest) 38 | 39 | if (tag) { 40 | core.info( 41 | `Uploading manifest ${manifestSHA} with tag ${tag} to ${repository}.` 42 | ) 43 | } else { 44 | core.info(`Uploading manifest ${manifestSHA} to ${repository}.`) 45 | } 46 | 47 | // We must also upload the config layer 48 | const layersToUpload = manifest.layers.concat(manifest.config) 49 | 50 | const layerUploads: Promise[] = layersToUpload.map(async layer => { 51 | const blob = blobs.get(layer.digest) 52 | if (!blob) { 53 | throw new Error(`Blob for layer ${layer.digest} not found`) 54 | } 55 | return this.uploadLayer(layer, blob, repository) 56 | }) 57 | 58 | await Promise.all(layerUploads) 59 | 60 | const publishedDigest = await this.uploadManifest( 61 | JSON.stringify(manifest), 62 | manifest.mediaType, 63 | repository, 64 | tag || manifestSHA 65 | ) 66 | 67 | if (publishedDigest !== manifestSHA) { 68 | throw new Error( 69 | `Digest mismatch. Expected ${manifestSHA}, got ${publishedDigest}.` 70 | ) 71 | } 72 | 73 | return manifestSHA 74 | } 75 | 76 | async uploadOCIIndexManifest( 77 | repository: string, 78 | manifest: ociContainer.OCIIndexManifest, 79 | tag: string 80 | ): Promise { 81 | const manifestSHA = ociContainer.sha256Digest(manifest) 82 | 83 | core.info( 84 | `Uploading index manifest ${manifestSHA} with tag ${tag} to ${repository}.` 85 | ) 86 | 87 | const publishedDigest = await this.uploadManifest( 88 | JSON.stringify(manifest), 89 | manifest.mediaType, 90 | repository, 91 | tag 92 | ) 93 | 94 | if (publishedDigest !== manifestSHA) { 95 | throw new Error( 96 | `Digest mismatch. Expected ${manifestSHA}, got ${publishedDigest}.` 97 | ) 98 | } 99 | 100 | return manifestSHA 101 | } 102 | 103 | private async uploadLayer( 104 | layer: ociContainer.Descriptor, 105 | data: Buffer, 106 | repository: string 107 | ): Promise { 108 | const checkExistsResponse = await this.fetchWithRetries( 109 | this.checkBlobEndpoint(repository, layer.digest), 110 | { 111 | method: 'HEAD', 112 | headers: { 113 | Authorization: `Bearer ${this._b64Token}` 114 | } 115 | } 116 | ) 117 | 118 | if ( 119 | checkExistsResponse.status === 200 || 120 | checkExistsResponse.status === 202 121 | ) { 122 | core.info(`Layer ${layer.digest} already exists. Skipping upload.`) 123 | return 124 | } 125 | 126 | if (checkExistsResponse.status !== 404) { 127 | throw new Error( 128 | await errorMessageForFailedRequest( 129 | `check blob (${layer.digest}) exists`, 130 | checkExistsResponse 131 | ) 132 | ) 133 | } 134 | 135 | core.info(`Uploading layer ${layer.digest}.`) 136 | 137 | const initiateUploadBlobURL = this.uploadBlobEndpoint(repository) 138 | 139 | const initiateUploadResponse = await this.fetchWithRetries( 140 | initiateUploadBlobURL, 141 | { 142 | method: 'POST', 143 | headers: { 144 | Authorization: `Bearer ${this._b64Token}` 145 | }, 146 | body: JSON.stringify(layer) 147 | } 148 | ) 149 | 150 | if (initiateUploadResponse.status !== 202) { 151 | throw new Error( 152 | await errorMessageForFailedRequest( 153 | `initiate layer upload`, 154 | initiateUploadResponse 155 | ) 156 | ) 157 | } 158 | 159 | const locationResponseHeader = 160 | initiateUploadResponse.headers.get('location') 161 | if (locationResponseHeader === undefined) { 162 | throw new Error( 163 | `No location header in response from upload post ${initiateUploadBlobURL} for layer ${layer.digest}` 164 | ) 165 | } 166 | 167 | const pathname = `${locationResponseHeader}?digest=${layer.digest}` 168 | const uploadBlobUrl = new URL(pathname, this._registry).toString() 169 | 170 | const putResponse = await this.fetchWithRetries(uploadBlobUrl, { 171 | method: 'PUT', 172 | headers: { 173 | Authorization: `Bearer ${this._b64Token}`, 174 | 'Content-Type': 'application/octet-stream', 175 | 'Accept-Encoding': 'gzip', 176 | 'Content-Length': layer.size.toString() 177 | }, 178 | body: data 179 | }) 180 | 181 | if (putResponse.status !== 201) { 182 | throw new Error( 183 | await errorMessageForFailedRequest( 184 | `layer (${layer.digest}) upload`, 185 | putResponse 186 | ) 187 | ) 188 | } 189 | } 190 | 191 | // Uploads the manifest and returns the digest returned by GHCR 192 | private async uploadManifest( 193 | manifestJSON: string, 194 | manifestMediaType: string, 195 | repository: string, 196 | version: string 197 | ): Promise { 198 | const manifestUrl = this.manifestEndpoint(repository, version) 199 | 200 | core.info(`Uploading manifest to ${manifestUrl}.`) 201 | 202 | const putResponse = await this.fetchWithRetries(manifestUrl, { 203 | method: 'PUT', 204 | headers: { 205 | Authorization: `Bearer ${this._b64Token}`, 206 | 'Content-Type': manifestMediaType 207 | }, 208 | body: manifestJSON 209 | }) 210 | 211 | if (putResponse.status !== 201) { 212 | throw new Error( 213 | await errorMessageForFailedRequest(`manifest upload`, putResponse) 214 | ) 215 | } 216 | 217 | const digestResponseHeader = 218 | putResponse.headers.get('docker-content-digest') || '' 219 | 220 | return digestResponseHeader 221 | } 222 | 223 | private checkBlobEndpoint(repository: string, digest: string): string { 224 | return new URL( 225 | `v2/${repository}/blobs/${digest}`, 226 | this._registry 227 | ).toString() 228 | } 229 | 230 | private uploadBlobEndpoint(repository: string): string { 231 | return new URL(`v2/${repository}/blobs/uploads/`, this._registry).toString() 232 | } 233 | 234 | private manifestEndpoint(repository: string, version: string): string { 235 | return new URL( 236 | `v2/${repository}/manifests/${version}`, 237 | this._registry 238 | ).toString() 239 | } 240 | 241 | private async fetchWithDebug( 242 | url: string, 243 | config: RequestInit = {} 244 | ): Promise { 245 | core.debug(`Request from ${url} with config: ${JSON.stringify(config)}`) 246 | try { 247 | const response = await fetch(url, config) 248 | core.debug(`Response with ${JSON.stringify(response)}`) 249 | return response 250 | } catch (error) { 251 | core.debug(`Error with ${error}`) 252 | throw error 253 | } 254 | } 255 | 256 | private async fetchWithRetries( 257 | url: string, 258 | config: RequestInit = {} 259 | ): Promise { 260 | const allowedAttempts = this._retryOptions.retries + 1 // Initial attempt + retries 261 | 262 | for ( 263 | let attemptNumber = 1; 264 | attemptNumber <= allowedAttempts; 265 | attemptNumber++ 266 | ) { 267 | let backoff = this._retryOptions.backoff 268 | 269 | try { 270 | const response = await this.fetchWithDebug(url, config) 271 | 272 | // If this is the last attempt, just return it 273 | if (attemptNumber === allowedAttempts) { 274 | return response 275 | } 276 | 277 | // If the response is retryable, backoff and retry 278 | if (retryableStatusCodes.includes(response.status)) { 279 | const retryAfter = response.headers.get('retry-after') 280 | if (retryAfter) { 281 | backoff = parseInt(retryAfter) * 1000 // convert to ms 282 | } 283 | 284 | core.info( 285 | `Received ${response.status} response. Retrying after ${backoff}ms...` 286 | ) 287 | await new Promise(resolve => setTimeout(resolve, backoff)) 288 | continue 289 | } 290 | 291 | // Otherwise, just return the response 292 | return response 293 | } catch (error) { 294 | // If this is the last attempt, throw the error 295 | if (attemptNumber === allowedAttempts) { 296 | throw error 297 | } 298 | 299 | core.info(`Encountered error: ${error}. Retrying after ${backoff}ms...`) 300 | await new Promise(resolve => setTimeout(resolve, backoff)) 301 | } 302 | } 303 | 304 | // Should be unreachable 305 | throw new Error('Exhausted retries without a successful response') 306 | } 307 | } 308 | 309 | interface ghcrError { 310 | code: string 311 | message: string 312 | } 313 | 314 | // Generate an error message for a failed HTTP request 315 | async function errorMessageForFailedRequest( 316 | requestDescription: string, 317 | response: Response 318 | ): Promise { 319 | const bodyText = await response.text() 320 | 321 | // Try to parse the body as JSON and extract the expected fields returned from GHCR 322 | // Expected format: { "errors": [{"code": "BAD_REQUEST", "message": "Something went wrong."}] } 323 | // If the body does not match the expected format, just return the whole response body 324 | let errorString = `Response Body: ${bodyText}.` 325 | 326 | try { 327 | const body = JSON.parse(bodyText) 328 | const errors = body.errors 329 | 330 | if ( 331 | Array.isArray(errors) && 332 | errors.length > 0 && 333 | errors.every(isGHCRError) 334 | ) { 335 | const errorMessages = errors.map((error: ghcrError) => { 336 | return `${error.code} - ${error.message}` 337 | }) 338 | errorString = `Errors: ${errorMessages.join(', ')}` 339 | } 340 | } catch (error) { 341 | // Ignore error 342 | } 343 | 344 | return `Unexpected ${response.status} ${response.statusText} response from ${requestDescription}. ${errorString}` 345 | } 346 | 347 | // Runtime checks that parsed JSON object is in the expected format 348 | // {"code": "BAD_REQUEST", "message": "Something went wrong."} 349 | function isGHCRError(obj: unknown): boolean { 350 | return ( 351 | typeof obj === 'object' && 352 | obj !== null && 353 | 'code' in obj && 354 | typeof (obj as { code: unknown }).code === 'string' && 355 | 'message' in obj && 356 | typeof (obj as { message: unknown }).message === 'string' 357 | ) 358 | } 359 | -------------------------------------------------------------------------------- /__tests__/config.test.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as github from '@actions/github' 3 | import * as cfg from '../src/config' 4 | import * as apiClient from '../src/api-client' 5 | 6 | let getContainerRegistryURLMock: jest.SpyInstance 7 | let getRepositoryMetadataMock: jest.SpyInstance 8 | let getInputMock: jest.SpyInstance 9 | 10 | const ghcrUrl = new URL('https://ghcr.io') 11 | 12 | describe('config.resolvePublishActionOptions', () => { 13 | beforeEach(() => { 14 | getContainerRegistryURLMock = jest 15 | .spyOn(apiClient, 'getContainerRegistryURL') 16 | .mockImplementation() 17 | 18 | getRepositoryMetadataMock = jest 19 | .spyOn(apiClient, 'getRepositoryMetadata') 20 | .mockImplementation() 21 | 22 | getInputMock = jest.spyOn(core, 'getInput').mockImplementation() 23 | 24 | configureEventContext() 25 | }) 26 | 27 | afterEach(() => { 28 | jest.clearAllMocks() 29 | clearEventContext() 30 | }) 31 | 32 | it('throws an error when the token is not provided', async () => { 33 | getInputMock.mockReturnValueOnce(undefined) 34 | 35 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 36 | 'Could not find GITHUB_TOKEN.' 37 | ) 38 | }) 39 | 40 | it('throws an error when the event is not provided', async () => { 41 | getInputMock.mockReturnValueOnce('token') 42 | github.context.eventName = '' 43 | 44 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 45 | 'Could not find event name.' 46 | ) 47 | }) 48 | 49 | it('throws an error when the ref is not provided', async () => { 50 | getInputMock.mockReturnValueOnce('token') 51 | github.context.ref = '' 52 | 53 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 54 | 'Could not find GITHUB_REF.' 55 | ) 56 | }) 57 | 58 | it('throws an error when the workspaceDir is not provided', async () => { 59 | getInputMock.mockReturnValueOnce('token') 60 | process.env.GITHUB_WORKSPACE = '' 61 | 62 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 63 | 'Could not find GITHUB_WORKSPACE.' 64 | ) 65 | }) 66 | 67 | it('throws an error when the repository is not provided', async () => { 68 | getInputMock.mockReturnValueOnce('token') 69 | github.context.payload.repository = undefined 70 | 71 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 72 | 'Could not find Repository.' 73 | ) 74 | }) 75 | 76 | it('throws an error when the apiBaseUrl is not provided', async () => { 77 | getInputMock.mockReturnValueOnce('token') 78 | github.context.apiUrl = '' 79 | 80 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 81 | 'Could not find GITHUB_API_URL.' 82 | ) 83 | }) 84 | 85 | it('throws an error when the runnerTempDir is not provided', async () => { 86 | getInputMock.mockReturnValueOnce('token') 87 | process.env.RUNNER_TEMP = '' 88 | 89 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 90 | 'Could not find RUNNER_TEMP.' 91 | ) 92 | }) 93 | 94 | it('throws an error when the sha is not provided', async () => { 95 | getInputMock.mockReturnValueOnce('token') 96 | github.context.sha = '' 97 | 98 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 99 | 'Could not find GITHUB_SHA.' 100 | ) 101 | }) 102 | 103 | it('throws an error when the githubServerUrl is not provided', async () => { 104 | getInputMock.mockReturnValueOnce('token') 105 | github.context.serverUrl = '' 106 | 107 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 108 | 'Could not find GITHUB_SERVER_URL.' 109 | ) 110 | }) 111 | 112 | it('throws an error when the repositoryId is not provided', async () => { 113 | getInputMock.mockReturnValueOnce('token') 114 | process.env.GITHUB_REPOSITORY_ID = '' 115 | 116 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 117 | 'Could not find GITHUB_REPOSITORY_ID.' 118 | ) 119 | }) 120 | 121 | it('throws an error when the repositoryOwnerId is not provided', async () => { 122 | getInputMock.mockReturnValueOnce('token') 123 | process.env.GITHUB_REPOSITORY_OWNER_ID = '' 124 | 125 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 126 | 'Could not find GITHUB_REPOSITORY_OWNER_ID.' 127 | ) 128 | }) 129 | 130 | it('throws an error when getting the container registry URL fails', async () => { 131 | getInputMock.mockReturnValueOnce('token') 132 | getContainerRegistryURLMock.mockRejectedValue( 133 | new Error('Failed to get container registry URL') 134 | ) 135 | 136 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 137 | 'Failed to get container registry URL' 138 | ) 139 | }) 140 | 141 | it('throws an error when getting the repository metadata fails', async () => { 142 | getInputMock.mockReturnValueOnce('token') 143 | getContainerRegistryURLMock.mockResolvedValue(ghcrUrl) 144 | getRepositoryMetadataMock.mockRejectedValue( 145 | new Error('Failed to get repository metadata') 146 | ) 147 | 148 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 149 | 'Failed to get repository metadata' 150 | ) 151 | }) 152 | 153 | it('throws an error when returned repository visibility is empty', async () => { 154 | getInputMock.mockReturnValueOnce('token') 155 | getContainerRegistryURLMock.mockResolvedValue(ghcrUrl) 156 | getRepositoryMetadataMock.mockResolvedValue({ 157 | visibility: '' 158 | }) 159 | 160 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 161 | 'Could not find repository visibility.' 162 | ) 163 | }) 164 | 165 | it('throws an error when returned repository id does not match env var', async () => { 166 | getInputMock.mockReturnValueOnce('token') 167 | getContainerRegistryURLMock.mockResolvedValue(ghcrUrl) 168 | getRepositoryMetadataMock.mockResolvedValue({ 169 | visibility: 'public', 170 | ownerId: '12345', 171 | repoId: '54321' 172 | }) 173 | 174 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 175 | 'Repository ID mismatch.' 176 | ) 177 | }) 178 | 179 | it('throws an error when returned repository owner id does not match env var', async () => { 180 | getInputMock.mockReturnValueOnce('token') 181 | getContainerRegistryURLMock.mockResolvedValue(ghcrUrl) 182 | getRepositoryMetadataMock.mockResolvedValue({ 183 | visibility: 'public', 184 | ownerId: '123124', 185 | repoId: 'repositoryId' 186 | }) 187 | 188 | await expect(cfg.resolvePublishActionOptions()).rejects.toThrow( 189 | 'Repository Owner ID mismatch.' 190 | ) 191 | }) 192 | 193 | it('returns options when all values are present', async () => { 194 | getInputMock.mockImplementation((name: string) => { 195 | expect(name).toBe('github-token') 196 | return 'token' 197 | }) 198 | getContainerRegistryURLMock.mockResolvedValue(ghcrUrl) 199 | 200 | getRepositoryMetadataMock.mockResolvedValue({ 201 | visibility: 'public', 202 | repoId: 'repositoryId', 203 | ownerId: 'repositoryOwnerId' 204 | }) 205 | 206 | const options = await cfg.resolvePublishActionOptions() 207 | 208 | expect(options).toEqual({ 209 | nameWithOwner: 'nameWithOwner', 210 | ref: 'ref', 211 | workspaceDir: 'workspaceDir', 212 | event: 'release', 213 | apiBaseUrl: 'apiBaseUrl', 214 | runnerTempDir: 'runnerTempDir', 215 | sha: 'sha', 216 | repositoryVisibility: 'public', 217 | repositoryId: 'repositoryId', 218 | repositoryOwnerId: 'repositoryOwnerId', 219 | isEnterprise: false, 220 | containerRegistryUrl: ghcrUrl, 221 | token: 'token' 222 | }) 223 | }) 224 | 225 | it('sets enterprise to true when the server URL is not github.com or ghe.com', async () => { 226 | getInputMock.mockImplementation((name: string) => { 227 | expect(name).toBe('github-token') 228 | return 'token' 229 | }) 230 | getContainerRegistryURLMock.mockResolvedValue(ghcrUrl) 231 | 232 | getRepositoryMetadataMock.mockResolvedValue({ 233 | visibility: 'public', 234 | repoId: 'repositoryId', 235 | ownerId: 'repositoryOwnerId' 236 | }) 237 | 238 | github.context.serverUrl = 'https://github-enterprise.com' 239 | 240 | const options = await cfg.resolvePublishActionOptions() 241 | 242 | expect(options).toEqual({ 243 | nameWithOwner: 'nameWithOwner', 244 | ref: 'ref', 245 | workspaceDir: 'workspaceDir', 246 | event: 'release', 247 | apiBaseUrl: 'apiBaseUrl', 248 | runnerTempDir: 'runnerTempDir', 249 | sha: 'sha', 250 | repositoryId: 'repositoryId', 251 | repositoryOwnerId: 'repositoryOwnerId', 252 | isEnterprise: true, 253 | containerRegistryUrl: ghcrUrl, 254 | token: 'token', 255 | repositoryVisibility: 'public' 256 | }) 257 | }) 258 | }) 259 | 260 | describe('config.serializeOptions', () => { 261 | it('serializes the options, ignoring internal keys', () => { 262 | const options: cfg.PublishActionOptions = { 263 | nameWithOwner: 'nameWithOwner', 264 | ref: 'ref', 265 | workspaceDir: 'workspaceDir', 266 | event: 'release', 267 | apiBaseUrl: 'apiBaseUrl', 268 | runnerTempDir: 'runnerTempDir', 269 | sha: 'sha', 270 | repositoryId: 'repositoryId', 271 | repositoryOwnerId: 'repositoryOwnerId', 272 | isEnterprise: false, 273 | containerRegistryUrl: ghcrUrl, 274 | token: 'token', 275 | repositoryVisibility: 'public' 276 | } 277 | 278 | const serialized = cfg.serializeOptions(options) 279 | 280 | // Parse the JSON 281 | const parsed = JSON.parse(serialized) 282 | 283 | expect(parsed.nameWithOwner).toBe('nameWithOwner') 284 | expect(parsed.ref).toBe('ref') 285 | expect(parsed.workspaceDir).toBe('workspaceDir') 286 | expect(parsed.event).toBe('release') 287 | expect(parsed.apiBaseUrl).toBe('apiBaseUrl') 288 | expect(parsed.sha).toBe('sha') 289 | expect(parsed.isEnterprise).toBe(false) 290 | expect(parsed.containerRegistryUrl).toBe(ghcrUrl.toString()) 291 | expect(parsed.token).toBeUndefined() 292 | expect(parsed.repositoryId).toBeUndefined() 293 | expect(parsed.repositoryOwnerId).toBeUndefined() 294 | expect(parsed.runnerTempDir).toBeUndefined() 295 | }) 296 | }) 297 | 298 | function configureEventContext(): void { 299 | github.context.ref = 'ref' 300 | github.context.eventName = 'release' 301 | github.context.apiUrl = 'apiBaseUrl' 302 | github.context.sha = 'sha' 303 | github.context.serverUrl = 'https://github.com/' 304 | github.context.payload = { 305 | repository: { 306 | full_name: 'nameWithOwner', 307 | name: 'name', 308 | owner: { 309 | login: 'owner' 310 | } 311 | } 312 | } 313 | 314 | process.env.RUNNER_TEMP = 'runnerTempDir' 315 | process.env.GITHUB_WORKSPACE = 'workspaceDir' 316 | process.env.GITHUB_REPOSITORY_ID = 'repositoryId' 317 | process.env.GITHUB_REPOSITORY_OWNER_ID = 'repositoryOwnerId' 318 | } 319 | 320 | function clearEventContext(): void { 321 | github.context.ref = '' 322 | github.context.eventName = '' 323 | github.context.apiUrl = '' 324 | github.context.sha = '' 325 | github.context.serverUrl = '' 326 | github.context.payload = {} 327 | process.env.RUNNER_TEMP = '' 328 | process.env.GITHUB_WORKSPACE = '' 329 | process.env.GITHUB_REPOSITORY_ID = '' 330 | process.env.GITHUB_REPOSITORY_OWNER_ID = '' 331 | } 332 | -------------------------------------------------------------------------------- /__tests__/ghcr-client.test.ts: -------------------------------------------------------------------------------- 1 | import { Client } from '../src/ghcr-client' 2 | import * as ociContainer from '../src/oci-container' 3 | import * as crypto from 'crypto' 4 | 5 | // Mocks 6 | let fetchMock: jest.SpyInstance 7 | 8 | let client: Client 9 | 10 | const token = 'test-token' 11 | const registry = new URL('https://ghcr.io') 12 | const repository = 'test-org/test-repo' 13 | const semver = '1.2.3' 14 | const genericSha = '1234567890' // We should look at using different shas here to catch bug, but that make location validation harder 15 | 16 | const checkBlobNoExistingBlobs = (): object => { 17 | // Simulate none of the blobs existing currently 18 | return { 19 | text() { 20 | return '{"errors": [{"code": "NOT_FOUND", "message": "blob not found."}]}' 21 | }, 22 | status: 404, 23 | statusText: 'Not Found' 24 | } 25 | } 26 | 27 | const checkBlobAllExistingBlobs = (): object => { 28 | // Simulate all of the blobs existing currently 29 | return { 30 | status: 200, 31 | statusText: 'OK' 32 | } 33 | } 34 | 35 | let count = 0 36 | const checkBlobSomeExistingBlobs = (): object => { 37 | count++ 38 | // report one as existing 39 | if (count === 1) { 40 | return { 41 | status: 200, 42 | statusText: 'OK' 43 | } 44 | } else { 45 | // report all others are missing 46 | return { 47 | text() { 48 | return '{"errors": [{"code": "NOT_FOUND", "message": "blob not found."}]}' 49 | }, 50 | status: 404, 51 | statusText: 'Not Found' 52 | } 53 | } 54 | } 55 | 56 | const checkBlobFailure = (): object => { 57 | return { 58 | text() { 59 | // In this case we'll simulate a response which does not use the expected error format 60 | return '503 Service Unavailable' 61 | }, 62 | status: 503, 63 | statusText: 'Service Unavailable' 64 | } 65 | } 66 | 67 | const initiateBlobUploadSuccessForAllBlobs = (): object => { 68 | // Simulate successful initiation of uploads for all blobs & return location 69 | return { 70 | status: 202, 71 | headers: { 72 | get: (header: string) => { 73 | if (header === 'location') { 74 | return `https://ghcr.io/v2/${repository}/blobs/uploads/${genericSha}` 75 | } 76 | } 77 | } 78 | } 79 | } 80 | 81 | const initiateBlobUploadFailureForAllBlobs = (): object => { 82 | // Simulate failed initiation of uploads 83 | return { 84 | text() { 85 | // In this case we'll simulate a response which does not use the expected error format 86 | return '503 Service Unavailable' 87 | }, 88 | status: 503, 89 | statusText: 'Service Unavailable' 90 | } 91 | } 92 | 93 | const initiateBlobUploadNoLocationHeader = (): object => { 94 | return { 95 | status: 202, 96 | headers: { 97 | get: () => {} 98 | } 99 | } 100 | } 101 | 102 | const putManifestSuccessful = ( 103 | digestToReturn: string, 104 | expectedVersion: string 105 | ): ((url: string) => object) => { 106 | return (url: string): object => { 107 | expect(url.endsWith(`manifests/${expectedVersion}`)).toBeTruthy() 108 | 109 | return { 110 | status: 201, 111 | headers: { 112 | get: (header: string) => { 113 | if (header === 'docker-content-digest') { 114 | return digestToReturn 115 | } 116 | } 117 | } 118 | } 119 | } 120 | } 121 | 122 | const putBlobSuccess = (): object => { 123 | return { 124 | status: 201 125 | } 126 | } 127 | 128 | const putManifestFailure = (): object => { 129 | // Simulate fails upload of all blobs & manifest 130 | return { 131 | text() { 132 | return '{"errors": [{"code": "BAD_REQUEST", "message": "tag already exists."}]}' 133 | }, 134 | status: 400, 135 | statusText: 'Bad Request' 136 | } 137 | } 138 | 139 | const putBlobFailure = (): object => { 140 | // Simulate fails upload of all blobs & manifest 141 | return { 142 | text() { 143 | return '{"errors": [{"code": "BAD_REQUEST", "message": "digest issue."}]}' 144 | }, 145 | status: 400, 146 | statusText: 'Bad Request' 147 | } 148 | } 149 | 150 | type MethodHandlers = { 151 | checkBlobMock?: (url: string, options: { method: string }) => object 152 | initiateBlobUploadMock?: (url: string, options: { method: string }) => object 153 | putManifestMock?: (url: string, options: { method: string }) => object 154 | putBlobMock?: (url: string, options: { method: string }) => object 155 | } 156 | 157 | type ForcedRetries = { 158 | checkBlob: number 159 | initiateBlobUpload: number 160 | putBlob: number 161 | putManifest: number 162 | } 163 | 164 | function configureFetchMock( 165 | fetchMockInstance: jest.SpyInstance, 166 | methodHandlers: MethodHandlers, 167 | forcedRetries: ForcedRetries = { 168 | checkBlob: 0, 169 | initiateBlobUpload: 0, 170 | putBlob: 0, 171 | putManifest: 0 172 | } 173 | ): void { 174 | const retriableError = async (retries: number): Promise => { 175 | if (retries % 2 === 0) { 176 | throw new Error('Network Error') 177 | } else { 178 | return { 179 | status: 429, 180 | statusText: 'Too Many Requests', 181 | headers: { 182 | get: (header: string) => { 183 | if (header === 'retry-after') { 184 | return '0.1' 185 | } 186 | } 187 | } 188 | } 189 | } 190 | } 191 | 192 | fetchMockInstance.mockImplementation( 193 | async (url: string, options: { method: string }) => { 194 | // Simulate retries for every request until the number of forced retries is exhausted. 195 | // We'll simulate both failing status codes and network errors for full coverage. 196 | validateRequestConfig(url, options) 197 | switch (options.method) { 198 | case 'HEAD': 199 | if (forcedRetries.checkBlob > 0) { 200 | forcedRetries.checkBlob-- 201 | return retriableError(forcedRetries.checkBlob) 202 | } 203 | 204 | return methodHandlers.checkBlobMock?.(url, options) 205 | case 'POST': 206 | if (forcedRetries.initiateBlobUpload > 0) { 207 | forcedRetries.initiateBlobUpload-- 208 | return retriableError(forcedRetries.initiateBlobUpload) 209 | } 210 | 211 | return methodHandlers.initiateBlobUploadMock?.(url, options) 212 | case 'PUT': 213 | if (url.includes('manifest')) { 214 | if (forcedRetries.putManifest > 0) { 215 | forcedRetries.putManifest-- 216 | return retriableError(forcedRetries.putManifest) 217 | } 218 | 219 | return methodHandlers.putManifestMock?.(url, options) 220 | } else { 221 | if (forcedRetries.putBlob > 0) { 222 | forcedRetries.putBlob-- 223 | return retriableError(forcedRetries.putBlob) 224 | } 225 | 226 | return methodHandlers.putBlobMock?.(url, options) 227 | } 228 | } 229 | } 230 | ) 231 | } 232 | 233 | describe('uploadOCIIndexManifest', () => { 234 | beforeEach(() => { 235 | jest.clearAllMocks() 236 | fetchMock = jest.spyOn(global, 'fetch').mockImplementation() 237 | 238 | client = new Client(token, registry, { 239 | retries: 5, 240 | backoff: 1 241 | }) 242 | }) 243 | 244 | it('uploads the tagged manifest with the appropriate tag', async () => { 245 | const { manifest, sha } = testIndexManifest() 246 | const tag = 'sha-1234' 247 | 248 | configureFetchMock(fetchMock, { 249 | putManifestMock: putManifestSuccessful(sha, tag) 250 | }) 251 | 252 | await client.uploadOCIIndexManifest(repository, manifest, tag) 253 | 254 | expect(fetchMock).toHaveBeenCalledTimes(1) 255 | expect( 256 | fetchMock.mock.calls.filter(call => call[1].method === 'PUT') 257 | ).toHaveLength(1) 258 | }) 259 | 260 | it('throws an error if a manifest upload fails', async () => { 261 | const { manifest, blobs } = testImageManifest() 262 | 263 | configureFetchMock(fetchMock, { 264 | checkBlobMock: checkBlobAllExistingBlobs, 265 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 266 | putBlobMock: putBlobSuccess, 267 | putManifestMock: putManifestFailure 268 | }) 269 | 270 | await expect( 271 | client.uploadOCIImageManifest(repository, manifest, blobs) 272 | ).rejects.toThrow( 273 | 'Unexpected 400 Bad Request response from manifest upload. Errors: BAD_REQUEST - tag already exists.' 274 | ) 275 | }) 276 | 277 | it('throws an error if the returned digest does not match the precalculated one', async () => { 278 | const { manifest, sha } = testIndexManifest() 279 | 280 | const tag = 'sha-1234' 281 | 282 | configureFetchMock(fetchMock, { 283 | checkBlobMock: checkBlobAllExistingBlobs, 284 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 285 | putBlobMock: putBlobSuccess, 286 | putManifestMock: putManifestSuccessful('some-garbage-digest', tag) 287 | }) 288 | 289 | await expect( 290 | client.uploadOCIIndexManifest(repository, manifest, tag) 291 | ).rejects.toThrow( 292 | `Digest mismatch. Expected ${sha}, got some-garbage-digest.` 293 | ) 294 | }) 295 | }) 296 | 297 | describe('uploadOCIImageManifest', () => { 298 | beforeEach(() => { 299 | jest.clearAllMocks() 300 | fetchMock = jest.spyOn(global, 'fetch').mockImplementation() 301 | }) 302 | 303 | it('uploads blobs then untagged manifest to the provided registry', async () => { 304 | const { manifest, sha, blobs } = testImageManifest() 305 | 306 | configureFetchMock(fetchMock, { 307 | checkBlobMock: checkBlobNoExistingBlobs, 308 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 309 | putBlobMock: putBlobSuccess, 310 | putManifestMock: putManifestSuccessful(sha, sha) 311 | }) 312 | 313 | await client.uploadOCIImageManifest(repository, manifest, blobs) 314 | 315 | expect(fetchMock).toHaveBeenCalledTimes(10) 316 | expect( 317 | fetchMock.mock.calls.filter(call => call[1].method === 'HEAD') 318 | ).toHaveLength(3) 319 | expect( 320 | fetchMock.mock.calls.filter(call => call[1].method === 'POST') 321 | ).toHaveLength(3) 322 | expect( 323 | fetchMock.mock.calls.filter(call => call[1].method === 'PUT') 324 | ).toHaveLength(4) 325 | }) 326 | 327 | it('uploads blobs then tagged manifest to the provided registry', async () => { 328 | const { manifest, sha, blobs } = testImageManifest() 329 | 330 | configureFetchMock(fetchMock, { 331 | checkBlobMock: checkBlobNoExistingBlobs, 332 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 333 | putBlobMock: putBlobSuccess, 334 | putManifestMock: putManifestSuccessful(sha, semver) 335 | }) 336 | 337 | await client.uploadOCIImageManifest(repository, manifest, blobs, semver) 338 | 339 | expect(fetchMock).toHaveBeenCalledTimes(10) 340 | expect( 341 | fetchMock.mock.calls.filter(call => call[1].method === 'HEAD') 342 | ).toHaveLength(3) 343 | expect( 344 | fetchMock.mock.calls.filter(call => call[1].method === 'POST') 345 | ).toHaveLength(3) 346 | expect( 347 | fetchMock.mock.calls.filter(call => call[1].method === 'PUT') 348 | ).toHaveLength(4) 349 | }) 350 | 351 | it('uploads everything to the provided registry by retrying requests', async () => { 352 | const { manifest, sha, blobs } = testImageManifest() 353 | 354 | configureFetchMock( 355 | fetchMock, 356 | { 357 | checkBlobMock: checkBlobNoExistingBlobs, 358 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 359 | putBlobMock: putBlobSuccess, 360 | putManifestMock: putManifestSuccessful(sha, sha) 361 | }, 362 | { 363 | checkBlob: 2, 364 | initiateBlobUpload: 2, 365 | putBlob: 2, 366 | putManifest: 2 367 | } 368 | ) // Fail each request twice before succeeding 369 | 370 | await client.uploadOCIImageManifest(repository, manifest, blobs) 371 | 372 | // 8 Additional requests - 2 for each of the 4 failed request types 373 | expect(fetchMock).toHaveBeenCalledTimes(18) 374 | expect( 375 | fetchMock.mock.calls.filter(call => call[1].method === 'HEAD') 376 | ).toHaveLength(5) 377 | expect( 378 | fetchMock.mock.calls.filter(call => call[1].method === 'POST') 379 | ).toHaveLength(5) 380 | expect( 381 | fetchMock.mock.calls.filter(call => call[1].method === 'PUT') 382 | ).toHaveLength(8) 383 | }) 384 | 385 | it('skips blob uploads if all blobs already exist', async () => { 386 | const { manifest, sha, blobs } = testImageManifest() 387 | 388 | configureFetchMock(fetchMock, { 389 | checkBlobMock: checkBlobAllExistingBlobs, 390 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 391 | putBlobMock: putBlobSuccess, 392 | putManifestMock: putManifestSuccessful(sha, sha) 393 | }) 394 | 395 | await client.uploadOCIImageManifest(repository, manifest, blobs) 396 | 397 | expect(fetchMock).toHaveBeenCalledTimes(4) 398 | expect( 399 | fetchMock.mock.calls.filter(call => call[1].method === 'HEAD') 400 | ).toHaveLength(3) 401 | expect( 402 | fetchMock.mock.calls.filter(call => call[1].method === 'POST') 403 | ).toHaveLength(0) 404 | expect( 405 | fetchMock.mock.calls.filter(call => call[1].method === 'PUT') 406 | ).toHaveLength(1) 407 | }) 408 | 409 | it('skips blob uploads if some blobs already exist', async () => { 410 | const { manifest, sha, blobs } = testImageManifest() 411 | 412 | configureFetchMock(fetchMock, { 413 | checkBlobMock: checkBlobSomeExistingBlobs, 414 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 415 | putBlobMock: putBlobSuccess, 416 | putManifestMock: putManifestSuccessful(sha, sha) 417 | }) 418 | 419 | await client.uploadOCIImageManifest(repository, manifest, blobs) 420 | 421 | expect(fetchMock).toHaveBeenCalledTimes(8) 422 | expect( 423 | fetchMock.mock.calls.filter(call => call[1].method === 'HEAD') 424 | ).toHaveLength(3) 425 | expect( 426 | fetchMock.mock.calls.filter(call => call[1].method === 'POST') 427 | ).toHaveLength(2) 428 | expect( 429 | fetchMock.mock.calls.filter(call => call[1].method === 'PUT') 430 | ).toHaveLength(3) 431 | }) 432 | 433 | it('throws an error if checking for existing blobs fails', async () => { 434 | const { manifest, sha, blobs } = testImageManifest() 435 | 436 | configureFetchMock(fetchMock, { 437 | checkBlobMock: checkBlobFailure, 438 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 439 | putBlobMock: putBlobSuccess, 440 | putManifestMock: putManifestSuccessful(sha, sha) 441 | }) 442 | 443 | await expect( 444 | client.uploadOCIImageManifest(repository, manifest, blobs) 445 | ).rejects.toThrow( 446 | /^Unexpected 503 Service Unavailable response from check blob/ 447 | ) 448 | }) 449 | 450 | it('throws an error if a blob file is not provided', async () => { 451 | const { manifest, sha } = testImageManifest() 452 | 453 | configureFetchMock(fetchMock, { 454 | checkBlobMock: checkBlobNoExistingBlobs, 455 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 456 | putBlobMock: putBlobSuccess, 457 | putManifestMock: putManifestSuccessful(sha, sha) 458 | }) 459 | 460 | await expect( 461 | client.uploadOCIImageManifest( 462 | repository, 463 | manifest, 464 | new Map() 465 | ) 466 | ).rejects.toThrow(/^Blob for layer sha256:[a-zA-Z0-9]+ not found/) 467 | }) 468 | 469 | it('throws an error if initiating layer upload fails', async () => { 470 | const { manifest, sha, blobs } = testImageManifest() 471 | 472 | configureFetchMock(fetchMock, { 473 | checkBlobMock: checkBlobNoExistingBlobs, 474 | initiateBlobUploadMock: initiateBlobUploadFailureForAllBlobs, 475 | putBlobMock: putBlobSuccess, 476 | putManifestMock: putManifestSuccessful(sha, sha) 477 | }) 478 | 479 | await expect( 480 | client.uploadOCIImageManifest(repository, manifest, blobs) 481 | ).rejects.toThrow( 482 | 'Unexpected 503 Service Unavailable response from initiate layer upload. Response Body: 503 Service Unavailable.' 483 | ) 484 | }) 485 | 486 | it('throws an error if the upload endpoint does not return a location', async () => { 487 | const { manifest, sha, blobs } = testImageManifest() 488 | 489 | configureFetchMock(fetchMock, { 490 | checkBlobMock: checkBlobNoExistingBlobs, 491 | initiateBlobUploadMock: initiateBlobUploadNoLocationHeader, 492 | putBlobMock: putBlobSuccess, 493 | putManifestMock: putManifestSuccessful(sha, sha) 494 | }) 495 | 496 | await expect( 497 | client.uploadOCIImageManifest(repository, manifest, blobs) 498 | ).rejects.toThrow(/^No location header in response from upload post/) 499 | }) 500 | 501 | it('throws an error if a layer upload fails', async () => { 502 | const { manifest, sha, blobs } = testImageManifest() 503 | 504 | configureFetchMock(fetchMock, { 505 | checkBlobMock: checkBlobNoExistingBlobs, 506 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 507 | putBlobMock: putBlobFailure, 508 | putManifestMock: putManifestSuccessful(sha, sha) 509 | }) 510 | 511 | await expect( 512 | client.uploadOCIImageManifest(repository, manifest, blobs) 513 | ).rejects.toThrow(/^Unexpected 400 Bad Request response from layer/) 514 | }) 515 | 516 | it('throws an error if a manifest upload fails', async () => { 517 | const { manifest, blobs } = testImageManifest() 518 | 519 | configureFetchMock(fetchMock, { 520 | checkBlobMock: checkBlobAllExistingBlobs, 521 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 522 | putBlobMock: putBlobSuccess, 523 | putManifestMock: putManifestFailure 524 | }) 525 | 526 | await expect( 527 | client.uploadOCIImageManifest(repository, manifest, blobs) 528 | ).rejects.toThrow( 529 | 'Unexpected 400 Bad Request response from manifest upload. Errors: BAD_REQUEST - tag already exists.' 530 | ) 531 | }) 532 | 533 | it('throws an error if the returned digest does not match the precalculated one', async () => { 534 | const { manifest, sha, blobs } = testImageManifest() 535 | 536 | configureFetchMock(fetchMock, { 537 | checkBlobMock: checkBlobAllExistingBlobs, 538 | initiateBlobUploadMock: initiateBlobUploadSuccessForAllBlobs, 539 | putBlobMock: putBlobSuccess, 540 | putManifestMock: putManifestSuccessful('some-garbage-digest', sha) 541 | }) 542 | 543 | await expect( 544 | client.uploadOCIImageManifest(repository, manifest, blobs) 545 | ).rejects.toThrow( 546 | `Digest mismatch. Expected ${sha}, got some-garbage-digest.` 547 | ) 548 | }) 549 | }) 550 | 551 | function testImageManifest(): { 552 | manifest: ociContainer.OCIImageManifest 553 | sha: string 554 | blobs: Map 555 | } { 556 | const blobs = new Map() 557 | blobs.set(ociContainer.emptyConfigSha, Buffer.from('{}')) 558 | 559 | const firstFile = Buffer.from('test1') 560 | const secondFile = Buffer.from('test2') 561 | 562 | const firstFileDigest = `sha256:${crypto 563 | .createHash('sha256') 564 | .update(firstFile) 565 | .digest('hex')}` 566 | 567 | const secondFileDigest = `sha256:${crypto 568 | .createHash('sha256') 569 | .update(secondFile) 570 | .digest('hex')}` 571 | 572 | blobs.set(firstFileDigest, firstFile) 573 | blobs.set(secondFileDigest, secondFile) 574 | 575 | const manifest: ociContainer.OCIImageManifest = { 576 | schemaVersion: 2, 577 | mediaType: ociContainer.imageManifestMediaType, 578 | artifactType: ociContainer.imageManifestMediaType, 579 | config: ociContainer.createEmptyConfigLayer(), 580 | layers: [ 581 | { 582 | mediaType: 'application/octet-stream', 583 | size: firstFile.length, 584 | digest: firstFileDigest 585 | }, 586 | { 587 | mediaType: 'application/octet-stream', 588 | size: secondFile.length, 589 | digest: secondFileDigest 590 | } 591 | ], 592 | annotations: { 593 | 'org.opencontainers.image.created': new Date().toISOString() 594 | } 595 | } 596 | 597 | const sha = ociContainer.sha256Digest(manifest) 598 | 599 | return { manifest, sha, blobs } 600 | } 601 | 602 | function testIndexManifest(): { 603 | manifest: ociContainer.OCIIndexManifest 604 | sha: string 605 | } { 606 | const manifest = ociContainer.createReferrerTagManifest( 607 | 'attestation-digest', 608 | 1234, 609 | 'bundle-media-type', 610 | 'bundle-predicate-type', 611 | new Date(), 612 | new Date() 613 | ) 614 | const sha = ociContainer.sha256Digest(manifest) 615 | return { manifest, sha } 616 | } 617 | 618 | // We expect all fetch calls to have auth headers set 619 | // This function verifies that given an request config. 620 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 621 | function validateRequestConfig(url: string, config: any): void { 622 | // Basic URL checks 623 | expect(url).toBeDefined() 624 | if (!url.startsWith(registry.toString())) { 625 | console.log(`${url} does not start with ${registry}`) 626 | } 627 | // if these expect fails, run the test again with `-- --silent=false` 628 | // the console.log above should give a clue about which URL is failing 629 | expect(url.startsWith(registry.toString())).toBeTruthy() 630 | 631 | // Config checks 632 | expect(config).toBeDefined() 633 | 634 | expect(config.headers).toBeDefined() 635 | if (config.headers) { 636 | // Check the auth header is set 637 | expect(config.headers.Authorization).toBeDefined() 638 | // Check the auth header is the base 64 encoded token 639 | expect(config.headers.Authorization).toBe( 640 | `Bearer ${Buffer.from(token).toString('base64')}` 641 | ) 642 | } 643 | } 644 | -------------------------------------------------------------------------------- /__tests__/main.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Unit tests for the action's main functionality, src/main.ts 3 | * 4 | * These should be run as if the action was called from a workflow. 5 | * Specifically, the inputs listed in `action.yml` should be set as environment 6 | * variables following the pattern `INPUT_`. 7 | */ 8 | 9 | import * as core from '@actions/core' 10 | import * as attest from '@actions/attest' 11 | import * as main from '../src/main' 12 | import * as cfg from '../src/config' 13 | import * as fsHelper from '../src/fs-helper' 14 | import * as ghcr from '../src/ghcr-client' 15 | import * as ociContainer from '../src/oci-container' 16 | 17 | const ghcrUrl = new URL('https://ghcr.io') 18 | const predicateType = 'https://slsa.dev/provenance/v1' 19 | const bundleMediaType = 'application/vnd.dev.sigstore.bundle.v0.3+json' 20 | 21 | // Mock the GitHub Actions core library 22 | let setFailedMock: jest.SpyInstance 23 | let setOutputMock: jest.SpyInstance 24 | 25 | // Mock the FS Helper 26 | let createTempDirMock: jest.SpyInstance 27 | let createArchivesMock: jest.SpyInstance 28 | let stageActionFilesMock: jest.SpyInstance 29 | let ensureCorrectShaCheckedOutMock: jest.SpyInstance 30 | let readFileContentsMock: jest.SpyInstance 31 | 32 | // Mock OCI container lib 33 | let calculateManifestDigestMock: jest.SpyInstance 34 | 35 | // Mock GHCR client 36 | let client: ghcr.Client 37 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 38 | let createGHCRClient: jest.SpyInstance 39 | let uploadOCIImageManifestMock: jest.SpyInstance 40 | let uploadOCIIndexManifestMock: jest.SpyInstance 41 | 42 | // Mock the config resolution 43 | let resolvePublishActionOptionsMock: jest.SpyInstance 44 | 45 | // Mock generating attestation 46 | let generateAttestationMock: jest.SpyInstance 47 | 48 | describe('run', () => { 49 | beforeEach(() => { 50 | jest.clearAllMocks() 51 | 52 | client = new ghcr.Client('token', ghcrUrl) 53 | 54 | // Core mocks 55 | setFailedMock = jest.spyOn(core, 'setFailed').mockImplementation() 56 | setOutputMock = jest.spyOn(core, 'setOutput').mockImplementation() 57 | 58 | // FS mocks 59 | createTempDirMock = jest 60 | .spyOn(fsHelper, 'createTempDir') 61 | .mockImplementation() 62 | createArchivesMock = jest 63 | .spyOn(fsHelper, 'createArchives') 64 | .mockImplementation() 65 | stageActionFilesMock = jest 66 | .spyOn(fsHelper, 'stageActionFiles') 67 | .mockImplementation() 68 | ensureCorrectShaCheckedOutMock = jest 69 | .spyOn(fsHelper, 'ensureTagAndRefCheckedOut') 70 | .mockImplementation() 71 | readFileContentsMock = jest 72 | .spyOn(fsHelper, 'readFileContents') 73 | .mockImplementation() 74 | 75 | // OCI Container mocks 76 | calculateManifestDigestMock = jest 77 | .spyOn(ociContainer, 'sha256Digest') 78 | .mockImplementation() 79 | 80 | // GHCR Client mocks 81 | createGHCRClient = jest 82 | .spyOn(ghcr, 'Client') 83 | .mockImplementation(() => client) 84 | 85 | uploadOCIImageManifestMock = jest 86 | .spyOn(client, 'uploadOCIImageManifest') 87 | .mockImplementation() 88 | uploadOCIIndexManifestMock = jest 89 | .spyOn(client, 'uploadOCIIndexManifest') 90 | .mockImplementation() 91 | 92 | // Config mocks 93 | resolvePublishActionOptionsMock = jest 94 | .spyOn(cfg, 'resolvePublishActionOptions') 95 | .mockImplementation() 96 | 97 | // Attestation mocks 98 | generateAttestationMock = jest 99 | .spyOn(attest, 'attestProvenance') 100 | .mockImplementation() 101 | }) 102 | 103 | it('fails if the action ref is not a tag', async () => { 104 | const options = baseOptions() 105 | options.ref = 'refs/heads/main' // This is a branch, not a tag 106 | resolvePublishActionOptionsMock.mockReturnValueOnce(options) 107 | 108 | await main.run() 109 | 110 | expect(setFailedMock).toHaveBeenCalledWith( 111 | 'The ref refs/heads/main is not a valid tag reference.' 112 | ) 113 | }) 114 | 115 | it('fails if the value of the tag ref is not a valid semver', async () => { 116 | const tags = ['test', 'v1.0', 'chicken', '111111'] 117 | 118 | for (const tag of tags) { 119 | const options = baseOptions() 120 | options.ref = `refs/tags/${tag}` 121 | resolvePublishActionOptionsMock.mockReturnValueOnce(options) 122 | 123 | await main.run() 124 | expect(setFailedMock).toHaveBeenCalledWith( 125 | `${tag} is not a valid semantic version tag, and so cannot be uploaded to the action package.` 126 | ) 127 | } 128 | }) 129 | 130 | it('fails if ensuring the correct SHA is checked out errors', async () => { 131 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 132 | 133 | ensureCorrectShaCheckedOutMock.mockImplementation(() => { 134 | throw new Error('Something went wrong') 135 | }) 136 | 137 | // Run the action 138 | await main.run() 139 | 140 | // Check the results 141 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 142 | }) 143 | 144 | it('fails if creating staging temp directory fails', async () => { 145 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 146 | 147 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 148 | createTempDirMock.mockImplementation(() => { 149 | throw new Error('Something went wrong') 150 | }) 151 | 152 | // Run the action 153 | await main.run() 154 | 155 | // Check the results 156 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 157 | }) 158 | 159 | it('fails if staging files fails', async () => { 160 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 161 | 162 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 163 | 164 | createTempDirMock.mockImplementation(() => { 165 | return 'tmpDir/staging' 166 | }) 167 | 168 | stageActionFilesMock.mockImplementation(() => { 169 | throw new Error('Something went wrong') 170 | }) 171 | 172 | // Run the action 173 | await main.run() 174 | 175 | // Check the results 176 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 177 | }) 178 | 179 | it('fails if creating archives temp directory fails', async () => { 180 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 181 | 182 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 183 | 184 | createTempDirMock.mockImplementation((_, path: string) => { 185 | if (path === 'staging') { 186 | return 'staging' 187 | } 188 | throw new Error('Something went wrong') 189 | }) 190 | 191 | stageActionFilesMock.mockImplementation(() => {}) 192 | 193 | // Run the action 194 | await main.run() 195 | 196 | // Check the results 197 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 198 | }) 199 | 200 | it('fails if creating archives fails', async () => { 201 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 202 | 203 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 204 | 205 | createTempDirMock.mockImplementation(() => { 206 | return 'stagingOrArchivesDir' 207 | }) 208 | 209 | stageActionFilesMock.mockImplementation(() => {}) 210 | 211 | createArchivesMock.mockImplementation(() => { 212 | throw new Error('Something went wrong') 213 | }) 214 | 215 | // Run the action 216 | await main.run() 217 | 218 | // Check the results 219 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 220 | }) 221 | 222 | it('fails if creating attestation fails', async () => { 223 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 224 | 225 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 226 | 227 | createTempDirMock.mockImplementation(() => { 228 | return 'stagingOrArchivesDir' 229 | }) 230 | 231 | stageActionFilesMock.mockImplementation(() => {}) 232 | 233 | calculateManifestDigestMock.mockImplementation(() => { 234 | return 'sha256:my-test-digest' 235 | }) 236 | 237 | createArchivesMock.mockImplementation(() => { 238 | return { 239 | zipFile: { 240 | path: 'test', 241 | size: 5, 242 | sha256: '123' 243 | }, 244 | tarFile: { 245 | path: 'test2', 246 | size: 52, 247 | sha256: '1234' 248 | } 249 | } 250 | }) 251 | 252 | uploadOCIImageManifestMock.mockImplementation(() => { 253 | return { 254 | packageURL: 'https://ghcr.io/v2/test-org/test-repo:1.2.3', 255 | publishedDigest: 'sha256:my-test-digest' 256 | } 257 | }) 258 | 259 | generateAttestationMock.mockImplementation(async () => { 260 | throw new Error('Something went wrong') 261 | }) 262 | 263 | // Run the action 264 | await main.run() 265 | 266 | // Check the results 267 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 268 | }) 269 | 270 | it('fails if uploading attestation to GHCR fails', async () => { 271 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 272 | 273 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 274 | 275 | createTempDirMock.mockImplementation(() => { 276 | return 'stagingOrArchivesDir' 277 | }) 278 | 279 | stageActionFilesMock.mockImplementation(() => {}) 280 | 281 | createArchivesMock.mockImplementation(() => { 282 | return { 283 | zipFile: { 284 | path: 'test', 285 | size: 5, 286 | sha256: '123' 287 | }, 288 | tarFile: { 289 | path: 'test2', 290 | size: 52, 291 | sha256: '1234' 292 | } 293 | } 294 | }) 295 | 296 | calculateManifestDigestMock.mockImplementation(() => { 297 | return 'sha256:my-test-digest' 298 | }) 299 | 300 | generateAttestationMock.mockImplementation(async options => { 301 | expect(options).toHaveProperty('skipWrite', true) 302 | 303 | return { 304 | attestationID: 'test-attestation-id', 305 | certificate: 'test', 306 | bundle: { 307 | mediaType: bundleMediaType, 308 | verificationMaterial: { 309 | publicKey: { 310 | hint: 'test-hint' 311 | } 312 | }, 313 | dsseEnvelope: { 314 | payload: btoa(`{"predicateType": "${predicateType}"}`) 315 | } 316 | } 317 | } 318 | }) 319 | 320 | uploadOCIImageManifestMock.mockImplementation(() => { 321 | throw new Error('Something went wrong') 322 | }) 323 | 324 | // Run the action 325 | await main.run() 326 | 327 | // Check the results 328 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 329 | }) 330 | 331 | it('fails if uploading referrer index manifest to GHCR fails', async () => { 332 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 333 | 334 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 335 | 336 | createTempDirMock.mockImplementation(() => { 337 | return 'stagingOrArchivesDir' 338 | }) 339 | 340 | stageActionFilesMock.mockImplementation(() => {}) 341 | 342 | createArchivesMock.mockImplementation(() => { 343 | return { 344 | zipFile: { 345 | path: 'test', 346 | size: 5, 347 | sha256: '123' 348 | }, 349 | tarFile: { 350 | path: 'test2', 351 | size: 52, 352 | sha256: '1234' 353 | } 354 | } 355 | }) 356 | 357 | calculateManifestDigestMock.mockImplementation(() => { 358 | return 'sha256:my-test-digest' 359 | }) 360 | 361 | generateAttestationMock.mockImplementation(async options => { 362 | expect(options).toHaveProperty('skipWrite', true) 363 | 364 | return { 365 | attestationID: 'test-attestation-id', 366 | certificate: 'test', 367 | bundle: { 368 | mediaType: bundleMediaType, 369 | verificationMaterial: { 370 | publicKey: { 371 | hint: 'test-hint' 372 | } 373 | }, 374 | dsseEnvelope: { 375 | payload: btoa(`{"predicateType": "${predicateType}"}`) 376 | } 377 | } 378 | } 379 | }) 380 | 381 | uploadOCIImageManifestMock.mockImplementation(() => { 382 | return 'attestation-digest' 383 | }) 384 | 385 | uploadOCIIndexManifestMock.mockImplementation(() => { 386 | throw new Error('Something went wrong') 387 | }) 388 | 389 | // Run the action 390 | await main.run() 391 | 392 | // Check the results 393 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 394 | }) 395 | 396 | it('fails if publishing action package version fails', async () => { 397 | resolvePublishActionOptionsMock.mockReturnValue(baseOptions()) 398 | 399 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 400 | 401 | createTempDirMock.mockImplementation(() => { 402 | return 'stagingOrArchivesDir' 403 | }) 404 | 405 | stageActionFilesMock.mockImplementation(() => {}) 406 | 407 | createArchivesMock.mockImplementation(() => { 408 | return { 409 | zipFile: { 410 | path: 'test', 411 | size: 5, 412 | sha256: '123' 413 | }, 414 | tarFile: { 415 | path: 'test2', 416 | size: 52, 417 | sha256: '1234' 418 | } 419 | } 420 | }) 421 | 422 | readFileContentsMock.mockImplementation(() => { 423 | return Buffer.from('test') 424 | }) 425 | 426 | calculateManifestDigestMock.mockImplementation(() => { 427 | return 'sha256:my-test-digest' 428 | }) 429 | 430 | generateAttestationMock.mockImplementation(async options => { 431 | expect(options).toHaveProperty('skipWrite', true) 432 | 433 | return { 434 | attestationID: 'test-attestation-id', 435 | certificate: 'test', 436 | bundle: { 437 | mediaType: bundleMediaType, 438 | verificationMaterial: { 439 | publicKey: { 440 | hint: 'test-hint' 441 | } 442 | }, 443 | dsseEnvelope: { 444 | payload: btoa(`{"predicateType": "${predicateType}"}`) 445 | } 446 | } 447 | } 448 | }) 449 | 450 | uploadOCIImageManifestMock.mockImplementation( 451 | (repo, manifest, blobs, tag) => { 452 | if (tag === undefined) { 453 | return 'attestation-digest' 454 | } else { 455 | throw new Error('Something went wrong') 456 | } 457 | } 458 | ) 459 | 460 | uploadOCIIndexManifestMock.mockImplementation(() => { 461 | return 'referrer-index-digest' 462 | }) 463 | 464 | // Run the action 465 | await main.run() 466 | 467 | // Check the results 468 | expect(setFailedMock).toHaveBeenCalledWith('Something went wrong') 469 | }) 470 | 471 | it('uploads the artifact, returns package metadata from GHCR, and skips writing attestation in enterprise', async () => { 472 | const options = baseOptions() 473 | options.isEnterprise = true 474 | resolvePublishActionOptionsMock.mockReturnValue(options) 475 | 476 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 477 | 478 | createTempDirMock.mockImplementation(() => { 479 | return 'stagingOrArchivesDir' 480 | }) 481 | 482 | stageActionFilesMock.mockImplementation(() => {}) 483 | 484 | createArchivesMock.mockImplementation(() => { 485 | return { 486 | zipFile: { 487 | path: 'zip', 488 | size: 5, 489 | sha256: '123' 490 | }, 491 | tarFile: { 492 | path: 'tar', 493 | size: 52, 494 | sha256: '1234' 495 | } 496 | } 497 | }) 498 | 499 | readFileContentsMock.mockImplementation(filepath => { 500 | return Buffer.from(`${filepath}`) 501 | }) 502 | 503 | calculateManifestDigestMock.mockImplementation(() => { 504 | return 'sha256:my-test-digest' 505 | }) 506 | 507 | uploadOCIImageManifestMock.mockImplementation( 508 | (repository, manifest, blobs, tag) => { 509 | expect(repository).toBe(options.nameWithOwner) 510 | expect(tag).toBe('1.2.3') 511 | expect(blobs.size).toBe(3) 512 | expect(blobs.has(ociContainer.emptyConfigSha)).toBeTruthy() 513 | expect(blobs.has('123')).toBeTruthy() 514 | expect(blobs.has('1234')).toBeTruthy() 515 | expect(manifest.mediaType).toBe(ociContainer.imageManifestMediaType) 516 | expect(manifest.layers.length).toBe(2) 517 | expect(manifest.annotations['com.github.package.type']).toBe( 518 | ociContainer.actionPackageAnnotationValue 519 | ) 520 | 521 | return 'sha256:my-test-digest' 522 | } 523 | ) 524 | 525 | // Run the action 526 | await main.run() 527 | 528 | // Check the results 529 | expect(uploadOCIImageManifestMock).toHaveBeenCalledTimes(1) 530 | 531 | // Check outputs 532 | expect(setOutputMock).toHaveBeenCalledTimes(1) 533 | 534 | expect(setOutputMock).toHaveBeenCalledWith( 535 | 'package-manifest-sha', 536 | 'sha256:my-test-digest' 537 | ) 538 | }) 539 | 540 | it('uploads the artifact, returns package metadata from GHCR, and creates an attestation in non-enterprise', async () => { 541 | const options = baseOptions() 542 | resolvePublishActionOptionsMock.mockReturnValue(options) 543 | 544 | ensureCorrectShaCheckedOutMock.mockImplementation(() => {}) 545 | 546 | createTempDirMock.mockImplementation(() => { 547 | return 'stagingOrArchivesDir' 548 | }) 549 | 550 | stageActionFilesMock.mockImplementation(() => {}) 551 | 552 | createArchivesMock.mockImplementation(() => { 553 | return { 554 | zipFile: { 555 | path: 'test', 556 | size: 5, 557 | sha256: '123' 558 | }, 559 | tarFile: { 560 | path: 'test2', 561 | size: 52, 562 | sha256: '1234' 563 | } 564 | } 565 | }) 566 | 567 | readFileContentsMock.mockImplementation(() => { 568 | return Buffer.from('test') 569 | }) 570 | 571 | calculateManifestDigestMock.mockImplementation(() => { 572 | return 'sha256:my-test-digest' 573 | }) 574 | 575 | generateAttestationMock.mockImplementation(async opts => { 576 | expect(opts).toHaveProperty('skipWrite', true) 577 | 578 | return { 579 | attestationID: 'test-attestation-id', 580 | certificate: 'test', 581 | bundle: { 582 | mediaType: bundleMediaType, 583 | verificationMaterial: { 584 | publicKey: { 585 | hint: 'test-hint' 586 | } 587 | }, 588 | dsseEnvelope: { 589 | payload: btoa(`{"predicateType": "${predicateType}"}`) 590 | } 591 | } 592 | } 593 | }) 594 | 595 | uploadOCIIndexManifestMock.mockImplementation( 596 | async (repository, manifest, tag) => { 597 | expect(repository).toBe(options.nameWithOwner) 598 | expect(tag).toBe('sha256-my-test-digest') 599 | expect(manifest.mediaType).toBe(ociContainer.imageIndexMediaType) 600 | expect(manifest.annotations['com.github.package.type']).toBe( 601 | ociContainer.actionPackageReferrerTagAnnotationValue 602 | ) 603 | expect(manifest.manifests.length).toBe(1) 604 | expect(manifest.manifests[0].mediaType).toBe( 605 | ociContainer.imageManifestMediaType 606 | ) 607 | expect(manifest.manifests[0].artifactType).toBe(bundleMediaType) 608 | expect( 609 | manifest.manifests[0].annotations['dev.sigstore.bundle.predicateType'] 610 | ).toBe(predicateType) 611 | expect( 612 | manifest.manifests[0].annotations['com.github.package.type'] 613 | ).toBe(ociContainer.actionPackageAttestationAnnotationValue) 614 | 615 | return 'sha256:referrer-index-digest' 616 | } 617 | ) 618 | 619 | uploadOCIImageManifestMock.mockImplementation( 620 | (repository, manifest, blobs, tag) => { 621 | let expectedBlobKeys: string[] = [] 622 | let expectedAnnotationValue = '' 623 | let expectedTagValue: string | undefined = undefined 624 | let returnValue = '' 625 | let expectedPredicateTypeValue: string | undefined = undefined 626 | 627 | let expectedSubjectMediaType: string | undefined = undefined 628 | 629 | if (tag === undefined) { 630 | expectedAnnotationValue = 631 | ociContainer.actionPackageAttestationAnnotationValue 632 | const sigStoreLayer = manifest.layers.find( 633 | (layer: ociContainer.Descriptor) => 634 | layer.mediaType === bundleMediaType 635 | ) 636 | expectedPredicateTypeValue = predicateType 637 | 638 | expectedBlobKeys = [sigStoreLayer.digest, ociContainer.emptyConfigSha] 639 | 640 | expectedSubjectMediaType = ociContainer.imageManifestMediaType 641 | 642 | returnValue = 'sha256:attestation-digest' 643 | } else { 644 | expectedAnnotationValue = ociContainer.actionPackageAnnotationValue 645 | expectedTagValue = '1.2.3' 646 | expectedBlobKeys = ['123', '1234', ociContainer.emptyConfigSha] 647 | returnValue = 'sha256:my-test-digest' 648 | } 649 | 650 | expect(repository).toBe(options.nameWithOwner) 651 | expect(manifest.mediaType).toBe(ociContainer.imageManifestMediaType) 652 | expect(manifest.annotations['com.github.package.type']).toBe( 653 | expectedAnnotationValue 654 | ) 655 | expect(manifest.annotations['dev.sigstore.bundle.predicateType']).toBe( 656 | expectedPredicateTypeValue 657 | ) 658 | expect(tag).toBe(expectedTagValue) 659 | expect(manifest.subject?.mediaType).toBe(expectedSubjectMediaType) 660 | 661 | expect(manifest.layers.length).toBe(expectedBlobKeys.length - 1) // Minus config layer 662 | expect(blobs.size).toBe(expectedBlobKeys.length) 663 | for (const expectedBlobKey of expectedBlobKeys) { 664 | expect(blobs.has(expectedBlobKey)).toBeTruthy() 665 | } 666 | 667 | return returnValue 668 | } 669 | ) 670 | 671 | // Run the action 672 | await main.run() 673 | 674 | // Check the results 675 | expect(uploadOCIImageManifestMock).toHaveBeenCalledTimes(2) 676 | expect(uploadOCIIndexManifestMock).toHaveBeenCalledTimes(1) 677 | 678 | // Check outputs 679 | expect(setOutputMock).toHaveBeenCalledTimes(3) 680 | 681 | expect(setOutputMock).toHaveBeenCalledWith( 682 | 'attestation-manifest-sha', 683 | 'sha256:attestation-digest' 684 | ) 685 | 686 | expect(setOutputMock).toHaveBeenCalledWith( 687 | 'referrer-index-manifest-sha', 688 | 'sha256:referrer-index-digest' 689 | ) 690 | 691 | expect(setOutputMock).toHaveBeenCalledWith( 692 | 'package-manifest-sha', 693 | 'sha256:my-test-digest' 694 | ) 695 | }) 696 | }) 697 | 698 | function baseOptions(): cfg.PublishActionOptions { 699 | return { 700 | nameWithOwner: 'nameWithOwner', 701 | workspaceDir: 'workspaceDir', 702 | event: 'release', 703 | apiBaseUrl: 'apiBaseUrl', 704 | runnerTempDir: 'runnerTempDir', 705 | sha: 'sha', 706 | repositoryId: 'repositoryId', 707 | repositoryOwnerId: 'repositoryOwnerId', 708 | isEnterprise: false, 709 | containerRegistryUrl: ghcrUrl, 710 | token: 'token', 711 | ref: 'refs/tags/v1.2.3', 712 | repositoryVisibility: 'public' 713 | } 714 | } 715 | --------------------------------------------------------------------------------