├── .editorconfig ├── .eslintignore ├── .eslintrc.json ├── .gitattributes ├── .github ├── labeler.yml ├── release.yml └── workflows │ ├── check-dist.yml │ ├── codeql-analysis.yml │ ├── dependency-review.yml │ ├── draft-release.yml │ ├── labeler.yml │ ├── lint.yml │ ├── renovate-build.yml │ ├── scorecards.yml │ ├── test-workflow.yml │ └── test.yml ├── .gitignore ├── .markdownlint-cli2.yaml ├── .nvmrc ├── .pre-commit-config.yaml ├── .prettierignore ├── .prettierrc.json ├── .yamllint.yaml ├── .yarnrc.yml ├── CODEOWNERS ├── LICENSE ├── README.md ├── SECURITY.md ├── __tests__ ├── docker-hub.test.ts ├── github.test.ts └── image-utils.test.ts ├── action.yml ├── dist ├── index.js ├── index.js.map ├── licenses.txt └── sourcemap-register.js ├── jest.config.js ├── package.json ├── renovate.json ├── src ├── auth.ts ├── docker-hub.ts ├── gcr.ts ├── github.ts ├── image-utils.ts ├── main.ts └── registry.ts ├── tsconfig.json └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | insert_final_newline = true 7 | trim_trailing_whitespace = true 8 | indent_size = 4 9 | indent_style = tab 10 | 11 | [*.{md,yml,yaml}] 12 | indent_size = 2 13 | indent_style = space 14 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ 4 | jest.config.js 5 | /.yarn/ 6 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "es6": true, 4 | "jest": true, 5 | "node": true 6 | }, 7 | "extends": [ 8 | "eslint:recommended", 9 | "plugin:@typescript-eslint/eslint-recommended", 10 | "plugin:@typescript-eslint/recommended", 11 | "plugin:import/errors", 12 | "plugin:import/typescript", 13 | "plugin:import/warnings", 14 | "plugin:jest/recommended", 15 | "plugin:prettier/recommended" 16 | ], 17 | "parser": "@typescript-eslint/parser", 18 | "parserOptions": { 19 | "ecmaVersion": 2023, 20 | "sourceType": "module" 21 | }, 22 | "plugins": [ 23 | "@typescript-eslint", 24 | "jest", 25 | "prettier" 26 | ], 27 | "rules": { 28 | "import/no-unresolved": [ 29 | "error", 30 | { 31 | "ignore": [ 32 | "csv-parse/sync", 33 | "@octokit/openapi-types" 34 | ] 35 | } 36 | ], 37 | "jest/no-disabled-tests": 0 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dist/** -diff linguist-generated=true 2 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | change: 2 | - head-branch: ['^change/'] 3 | 4 | enhancement: 5 | - head-branch: ['^feature/', '^feat/', '^enhancement/', '^enh/'] 6 | 7 | bug: 8 | - head-branch: ['^fix/', '^bug/'] 9 | 10 | chore: 11 | - head-branch: ['^chore/'] 12 | 13 | tests: 14 | - head-branch: ['^tests/', '^test/'] 15 | - changed-files: 16 | - any-glob-to-any-file: ['__tests__/**/*'] 17 | 18 | documentation: 19 | - head-branch: ['^docs/', '^doc/'] 20 | - changed-files: 21 | - any-glob-to-any-file: '**/*.md' 22 | 23 | dependencies: 24 | - head-branch: ['^deps/', '^dep/', '^dependabot/', '^renovate/'] 25 | - changed-files: 26 | - any-glob-to-any-file: ['package.json', 'yarn.lock'] 27 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | exclude: 3 | labels: 4 | - skip-changelog 5 | categories: 6 | - title: 🚀 Features 7 | labels: 8 | - enhancement 9 | - title: 💣 Breaking Changes 10 | labels: 11 | - change 12 | - title: 🐛 Bug Fixes 13 | labels: 14 | - bug 15 | - title: 📝 Documentation 16 | labels: 17 | - documentation 18 | - title: 🧪 Tests 19 | labels: 20 | - tests 21 | - title: 🔨 Maintenance 22 | labels: 23 | - chore 24 | - title: ⬆️ Dependencies 25 | labels: 26 | - dependencies 27 | - title: Other Changes 28 | labels: 29 | - "*" 30 | -------------------------------------------------------------------------------- /.github/workflows/check-dist.yml: -------------------------------------------------------------------------------- 1 | # `dist/index.js` is a special file in Actions. 2 | # When you reference an action with `uses:` in a workflow, 3 | # `index.js` is the code that will run. 4 | # For our project, we generate this file through a build process from other source files. 5 | # We need to make sure the checked-in `index.js` actually matches what we expect it to be. 6 | name: Check dist/ 7 | 8 | on: 9 | push: 10 | branches: 11 | - main 12 | paths-ignore: 13 | - '**.md' 14 | pull_request: 15 | paths-ignore: 16 | - '**.md' 17 | workflow_dispatch: # yamllint disable-line rule:empty-values 18 | 19 | permissions: 20 | contents: read 21 | 22 | concurrency: 23 | group: ${{ github.ref_name }}-check-dist 24 | cancel-in-progress: true 25 | 26 | defaults: 27 | run: 28 | shell: bash 29 | 30 | jobs: 31 | check-dist: 32 | runs-on: ubuntu-24.04 33 | 34 | steps: 35 | - name: Checkout Repository 36 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 37 | 38 | - name: Set Node.js 39 | uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 40 | with: 41 | node-version-file: .nvmrc 42 | 43 | - name: Install dependencies 44 | run: | 45 | corepack yarn install 46 | 47 | - name: Rebuild the dist/ directory 48 | run: | 49 | corepack yarn build 50 | 51 | 52 | - name: Compare the expected and actual dist/ directories 53 | run: | 54 | if [ "$(git diff --ignore-space-at-eol dist/ | wc -l)" -gt "0" ]; then 55 | echo "Detected uncommitted changes after build. See status below:" 56 | git diff 57 | exit 1 58 | fi 59 | id: diff 60 | 61 | # If index.js was different than expected, upload the expected version as an artifact 62 | - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 63 | if: ${{ failure() && steps.diff.conclusion == 'failure' }} 64 | with: 65 | name: dist 66 | path: dist/ 67 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | schedule: 11 | - cron: '22 4 * * 1' 12 | 13 | permissions: 14 | contents: read 15 | 16 | jobs: 17 | analyze: 18 | name: Analyze (${{ matrix.language }}) 19 | runs-on: ubuntu-24.04 20 | permissions: 21 | # required for all workflows 22 | security-events: write 23 | 24 | # required to fetch internal or private CodeQL packs 25 | packages: read 26 | 27 | # only required for workflows in private repositories 28 | actions: read 29 | contents: read 30 | 31 | strategy: 32 | fail-fast: false 33 | matrix: 34 | include: 35 | - language: javascript-typescript 36 | build-mode: none 37 | # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' 38 | # Use `c-cpp` to analyze code written in C, C++ or both 39 | # Use 'java-kotlin' to analyze code written in Java, Kotlin or both 40 | # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both 41 | # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, 42 | # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. 43 | # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how 44 | # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages 45 | steps: 46 | - name: Checkout Repository 47 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 48 | 49 | # Initializes the CodeQL tools for scanning. 50 | - name: Initialize CodeQL 51 | uses: github/codeql-action/init@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19 52 | with: 53 | languages: ${{ matrix.language }} 54 | build-mode: ${{ matrix.build-mode }} 55 | # If you wish to specify custom queries, you can do so here or in a config file. 56 | # By default, queries listed here will override any specified in a config file. 57 | # Prefix the list here with "+" to use these queries and those in the config file. 58 | 59 | # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 60 | # queries: security-extended,security-and-quality 61 | 62 | # If the analyze step fails for one of the languages you are analyzing with 63 | # "We were unable to automatically build your code", modify the matrix above 64 | # to set the build mode to "manual" for that language. Then modify this step 65 | # to build your code. 66 | # ℹ️ Command-line programs to run using the OS shell. 67 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 68 | - if: matrix.build-mode == 'manual' 69 | shell: bash 70 | run: | 71 | echo 'If you are using a "manual" build mode for one or more of the' \ 72 | 'languages you are analyzing, replace this with the commands to build' \ 73 | 'your code, for example:' 74 | echo ' make bootstrap' 75 | echo ' make release' 76 | exit 1 77 | 78 | - name: Perform CodeQL Analysis 79 | uses: github/codeql-action/analyze@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19 80 | with: 81 | category: "/language:${{matrix.language}}" 82 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | name: 'Dependency Review' 2 | on: [pull_request] 3 | 4 | permissions: 5 | contents: read 6 | 7 | jobs: 8 | dependency-review: 9 | runs-on: ubuntu-24.04 10 | permissions: 11 | contents: read 12 | pull-requests: write 13 | steps: 14 | - name: Checkout Repository 15 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 16 | 17 | - name: Dependency Review 18 | uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # v4.7.1 19 | -------------------------------------------------------------------------------- /.github/workflows/draft-release.yml: -------------------------------------------------------------------------------- 1 | name: Create Draft Release Notes 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | tags: 8 | - "v[0-9]+.[0-9]+.[0-9]+*" 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | draft-release: 15 | name: Create Draft Release 16 | runs-on: ubuntu-24.04 17 | permissions: 18 | contents: write 19 | if: ${{ github.event_name != 'pull_request' }} 20 | steps: 21 | - name: Checkout Repository 22 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 23 | 24 | - name: Draft Release Notes 25 | uses: lucacome/draft-release@38def8b74645796e9743b53e0f187d4a8915ea3e # v1.2.3 26 | with: 27 | minor-label: "enhancement" 28 | major-label: "change" 29 | publish: ${{ github.ref_type == 'tag' }} 30 | -------------------------------------------------------------------------------- /.github/workflows/labeler.yml: -------------------------------------------------------------------------------- 1 | name: "Pull Request Labeler" 2 | on: 3 | - pull_request_target 4 | 5 | permissions: 6 | contents: read 7 | 8 | jobs: 9 | triage: 10 | permissions: 11 | contents: read 12 | pull-requests: write 13 | runs-on: ubuntu-24.04 14 | steps: 15 | - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 16 | with: 17 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 18 | sync-labels: true 19 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - main 10 | 11 | concurrency: 12 | group: ${{ github.ref_name }}-lint 13 | cancel-in-progress: true 14 | 15 | permissions: 16 | contents: read 17 | 18 | defaults: 19 | run: 20 | shell: bash 21 | 22 | jobs: 23 | lint: 24 | runs-on: ubuntu-24.04 25 | steps: 26 | - name: Checkout repository 27 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 28 | 29 | - name: Set Node.js 30 | uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 31 | with: 32 | node-version-file: .nvmrc 33 | 34 | - name: Install dependencies 35 | run: | 36 | corepack yarn install 37 | 38 | - name: Lint the code 39 | run: | 40 | corepack yarn run lint 41 | 42 | - name: Run format check 43 | run: | 44 | corepack yarn run format 45 | 46 | - name: Check for uncommitted changes 47 | run: | 48 | if [ "$(git diff --ignore-space-at-eol | wc -l)" -gt "0" ]; then 49 | echo "Detected uncommitted changes after linting. See status below:" 50 | git diff 51 | exit 1 52 | fi 53 | 54 | actionlint: 55 | name: Actionlint 56 | runs-on: ubuntu-24.04 57 | steps: 58 | - name: Checkout Repository 59 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 60 | 61 | - uses: reviewdog/action-actionlint@a5524e1c19e62881d79c1f1b9b6f09f16356e281 # v1.65.2 62 | with: 63 | actionlint_flags: -shellcheck "" 64 | 65 | markdown-lint: 66 | name: Markdown Lint 67 | runs-on: ubuntu-24.04 68 | steps: 69 | - name: Checkout Repository 70 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 71 | 72 | - uses: DavidAnson/markdownlint-cli2-action@992badcdf24e3b8eb7e87ff9287fe931bcb00c6e # v20.0.0 73 | with: 74 | config: .markdownlint-cli2.yaml 75 | globs: '**/*.md' 76 | fix: false 77 | -------------------------------------------------------------------------------- /.github/workflows/renovate-build.yml: -------------------------------------------------------------------------------- 1 | name: Run build for renovate PRs 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize] 6 | 7 | permissions: 8 | contents: read 9 | 10 | defaults: 11 | run: 12 | shell: bash 13 | 14 | jobs: 15 | check: 16 | runs-on: ubuntu-24.04 17 | outputs: 18 | javascript: ${{ steps.filter.outputs.javascript }} 19 | steps: 20 | - name: Checkout repository 21 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 22 | 23 | - name: Check for changes 24 | uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 25 | id: filter 26 | with: 27 | filters: | 28 | javascript: 29 | - 'package.json' 30 | - 'yarn.lock' 31 | build: 32 | if: ${{ github.actor == 'renovate[bot]' && needs.check.outputs.javascript == 'true' }} 33 | runs-on: ubuntu-24.04 34 | needs: check 35 | permissions: 36 | contents: write 37 | pull-requests: read 38 | steps: 39 | - name: Checkout repository 40 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 41 | with: 42 | ref: ${{ github.head_ref }} 43 | token: ${{ secrets.COMMIT_PAT }} 44 | 45 | - name: Set up Node.js 46 | uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 47 | with: 48 | node-version-file: .nvmrc 49 | 50 | - name: Install dependencies 51 | run: corepack yarn install 52 | 53 | - name: Run build 54 | run: corepack yarn build 55 | 56 | - name: Commit changes 57 | id: commit 58 | uses: stefanzweifel/git-auto-commit-action@b863ae1933cb653a53c021fe36dbb774e1fb9403 # v5.2.0 59 | with: 60 | commit_message: 'Update dist/ after build' 61 | -------------------------------------------------------------------------------- /.github/workflows/scorecards.yml: -------------------------------------------------------------------------------- 1 | name: OpenSSF Scorecard 2 | on: 3 | # For Branch-Protection check. Only the default branch is supported. See 4 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 5 | branch_protection_rule: # yamllint disable-line rule:empty-values 6 | # To guarantee Maintained check is occasionally updated. See 7 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 8 | schedule: 9 | - cron: '20 7 * * 2' 10 | push: 11 | branches: ["main"] 12 | 13 | # Declare default permissions as read only. 14 | permissions: read-all 15 | 16 | jobs: 17 | analysis: 18 | name: Scorecard analysis 19 | runs-on: ubuntu-24.04 20 | permissions: 21 | # Needed to upload the results to code-scanning dashboard. 22 | security-events: write 23 | # Needed to publish results and get a badge (see publish_results below). 24 | id-token: write 25 | contents: read 26 | actions: read 27 | 28 | steps: 29 | - name: Checkout Repository 30 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 31 | with: 32 | persist-credentials: false 33 | 34 | - name: Run analysis 35 | uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 36 | with: 37 | results_file: results.sarif 38 | results_format: sarif 39 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: 40 | # - you want to enable the Branch-Protection check on a *public* repository, or 41 | # - you are installing Scorecards on a *private* repository 42 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. 43 | # repo_token: ${{ secrets.SCORECARD_TOKEN }} 44 | 45 | # Public repositories: 46 | # - Publish results to OpenSSF REST API for easy access by consumers 47 | # - Allows the repository to include the Scorecard badge. 48 | # - See https://github.com/ossf/scorecard-action#publishing-results. 49 | # For private repositories: 50 | # - `publish_results` will always be set to `false`, regardless 51 | # of the value entered here. 52 | publish_results: true 53 | 54 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF 55 | # format to the repository Actions tab. 56 | - name: Upload artifact 57 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 58 | with: 59 | name: SARIF file 60 | path: results.sarif 61 | retention-days: 5 62 | 63 | # Upload the results to GitHub's code scanning dashboard. 64 | - name: Upload to code-scanning 65 | uses: github/codeql-action/upload-sarif@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19 66 | with: 67 | sarif_file: results.sarif 68 | -------------------------------------------------------------------------------- /.github/workflows/test-workflow.yml: -------------------------------------------------------------------------------- 1 | name: Workflow Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | concurrency: 12 | group: ${{ github.ref_name }}-test-workflow 13 | cancel-in-progress: true 14 | 15 | permissions: 16 | contents: read 17 | 18 | defaults: 19 | run: 20 | shell: bash 21 | 22 | jobs: 23 | ##################################### 24 | # Singe platform tests 25 | 26 | test1: 27 | name: Test Update Needed 28 | runs-on: ubuntu-24.04 29 | steps: 30 | - name: Checkout Repository 31 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 32 | 33 | - name: Test Action 34 | id: test 35 | uses: ./ 36 | with: 37 | base-image: library/nginx:1.21.0 38 | image: nginx/nginx-ingress:2.1.0 39 | platforms: linux/amd64 40 | 41 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 42 | run: | 43 | echo "Images: ${{ steps.test.outputs.diff-images }}" 44 | 45 | - name: Check value 46 | run: | 47 | if [[ "${{ steps.test.outputs.needs-updating }}" != "true" ]]; then 48 | exit 1 49 | fi 50 | 51 | test2: 52 | name: Test Update Not Needed 53 | runs-on: macos-14 54 | steps: 55 | - name: Checkout Repository 56 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 57 | 58 | - name: Test Action 59 | id: test 60 | uses: ./ 61 | with: 62 | base-image: nginx:1.21.0 63 | image: library/nginx:1.21.0 64 | platforms: linux/amd64 65 | 66 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 67 | run: | 68 | echo "Images: ${{ steps.test.outputs.diff-images }}" 69 | 70 | - name: Check value 71 | run: | 72 | if [[ "${{ steps.test.outputs.needs-updating }}" != "false" ]]; then 73 | exit 1 74 | fi 75 | 76 | test3: 77 | name: Test image with only one platform 78 | runs-on: windows-2025 79 | steps: 80 | - name: Checkout Repository 81 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 82 | 83 | - name: Test Action 84 | id: test 85 | uses: ./ 86 | with: 87 | base-image: alpine:3.17.1 88 | image: lucacome/alpine-amd64 89 | 90 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 91 | run: | 92 | echo "Images: ${{ steps.test.outputs.diff-images }}" 93 | 94 | - name: Check value 95 | run: | 96 | if [[ "${{ steps.test.outputs.needs-updating }}" != "false" ]]; then 97 | exit 1 98 | fi 99 | 100 | test4: 101 | name: Test Update Needed on ARM64 102 | runs-on: ubuntu-24.04 103 | steps: 104 | - name: Checkout Repository 105 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 106 | 107 | - name: Test Action 108 | id: test 109 | uses: ./ 110 | with: 111 | base-image: nginx:1.21.0 112 | image: nginx/nginx-ingress:2.1.0 113 | platforms: linux/arm64 114 | 115 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 116 | run: | 117 | echo "Images: ${{ steps.test.outputs.diff-images }}" 118 | 119 | - name: Check value 120 | run: | 121 | if [[ "${{ steps.test.outputs.needs-updating }}" != "true" ]]; then 122 | exit 1 123 | fi 124 | 125 | ##################################### 126 | # Test with multiple platforms 127 | 128 | test5: 129 | name: Test Update Needed on multiple platforms with spaces 130 | runs-on: ubuntu-24.04 131 | steps: 132 | - name: Checkout Repository 133 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 134 | 135 | - name: Test Action 136 | id: test 137 | uses: ./ 138 | with: 139 | base-image: nginx:1.21.0 140 | image: nginx/nginx-ingress:2.1.0 141 | platforms: linux/arm64, linux/amd64 142 | 143 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 144 | run: | 145 | echo "Images: ${{ steps.test.outputs.diff-images }}" 146 | 147 | - name: Check value 148 | run: | 149 | if [[ "${{ steps.test.outputs.needs-updating }}" != "true" ]]; then 150 | exit 1 151 | fi 152 | 153 | test6: 154 | name: Test Update Not Needed on multiple platforms 155 | runs-on: ubuntu-24.04 156 | steps: 157 | - name: Checkout Repository 158 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 159 | 160 | - name: Test Action 161 | id: test 162 | uses: ./ 163 | with: 164 | base-image: nginx:1.21.0 165 | image: library/nginx:1.21.0 166 | platforms: linux/arm64,linux/amd64 167 | 168 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 169 | run: | 170 | echo "Images: ${{ steps.test.outputs.diff-images }}" 171 | 172 | - name: Check value 173 | run: | 174 | if [[ "${{ steps.test.outputs.needs-updating }}" != "false" ]]; then 175 | exit 1 176 | fi 177 | 178 | test7: 179 | name: Test GitHub 180 | runs-on: ubuntu-24.04 181 | steps: 182 | - name: Checkout Repository 183 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 184 | 185 | - name: Test Action 186 | id: test 187 | uses: ./ 188 | with: 189 | base-image: nginx:1.25.2-alpine 190 | image: ghcr.io/nginxinc/nginx-gateway-fabric/nginx:1.0.0 191 | platforms: linux/arm64,linux/amd64 192 | 193 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 194 | run: | 195 | echo "Images: ${{ steps.test.outputs.diff-images }}" 196 | 197 | - name: Check value 198 | run: | 199 | if [[ "${{ steps.test.outputs.needs-updating }}" != "false" ]]; then 200 | exit 1 201 | fi 202 | 203 | test8: 204 | name: Test GitHub with Login 205 | runs-on: ubuntu-24.04 206 | steps: 207 | - name: Checkout Repository 208 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 209 | 210 | - name: Login to Docker Registry 211 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 212 | with: 213 | username: ${{ secrets.DOCKER_USERNAME }} 214 | password: ${{ secrets.DOCKER_PASSWORD }} 215 | 216 | - name: Login to GitHub Container Registry 217 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 218 | with: 219 | registry: ghcr.io 220 | username: ${{ github.repository_owner }} 221 | password: ${{ secrets.GITHUB_TOKEN }} 222 | 223 | - name: Test Action 224 | id: test 225 | uses: ./ 226 | with: 227 | base-image: nginx:1.25.2-alpine 228 | image: ghcr.io/nginxinc/nginx-gateway-fabric/nginx:1.0.0 229 | platforms: linux/arm64,linux/amd64 230 | 231 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 232 | run: | 233 | echo "Images: ${{ steps.test.outputs.diff-images }}" 234 | 235 | - name: Check value 236 | run: | 237 | if [[ "${{ steps.test.outputs.needs-updating }}" != "false" ]]; then 238 | exit 1 239 | fi 240 | 241 | test9: 242 | name: Test Update Not Needed on multiple (all) platforms 243 | runs-on: ubuntu-24.04 244 | steps: 245 | - name: Checkout Repository 246 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 247 | 248 | - name: Test Action 249 | id: test 250 | uses: ./ 251 | with: 252 | base-image: nginx:1.21.0 253 | image: library/nginx:1.21.0 254 | platforms: all 255 | 256 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 257 | run: | 258 | echo "Images: ${{ steps.test.outputs.diff-images }}" 259 | 260 | - name: Check value 261 | run: | 262 | if [[ "${{ steps.test.outputs.needs-updating }}" != "false" ]]; then 263 | exit 1 264 | fi 265 | 266 | ##################################### 267 | # Test with platform variants 268 | 269 | test10: 270 | name: Test Update Needed (Docker login) on multiple platforms with variants 271 | runs-on: windows-2025 272 | steps: 273 | - name: Checkout Repository 274 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 275 | 276 | - name: Login to Docker Registry 277 | uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 278 | with: 279 | username: ${{ secrets.DOCKER_USERNAME }} 280 | password: ${{ secrets.DOCKER_PASSWORD }} 281 | 282 | - name: Test Action 283 | id: test 284 | uses: ./ 285 | with: 286 | base-image: alpine:latest 287 | image: golang:1.14.15-alpine3.13 288 | platforms: linux/arm64/v8,linux/arm/v7,linux/amd64 289 | 290 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 291 | run: | 292 | echo "Images: ${{ steps.test.outputs.diff-images }}" 293 | 294 | - name: Check value 295 | run: | 296 | if [[ "${{ steps.test.outputs.needs-updating }}" != "true" ]]; then 297 | exit 1 298 | fi 299 | 300 | test11: 301 | name: Test Update Not Needed on multiple platforms with variants 302 | runs-on: ubuntu-24.04 303 | steps: 304 | - name: Checkout Repository 305 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 306 | 307 | - name: Test Action 308 | id: test 309 | uses: ./ 310 | with: 311 | base-image: alpine:3.17.2 312 | image: alpine:3.17.2 313 | platforms: | 314 | linux/arm64/v8 315 | linux/arm/v7 316 | linux/amd64 317 | 318 | - name: Get Test Output (needs-updating=${{ steps.test.outputs.needs-updating }}) 319 | run: | 320 | echo "Images: ${{ steps.test.outputs.diff-images }}" 321 | 322 | - name: Check value 323 | run: | 324 | if [[ "${{ steps.test.outputs.needs-updating }}" != "false" ]]; then 325 | exit 1 326 | fi 327 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | on: 3 | pull_request: 4 | branches: 5 | - main 6 | push: 7 | branches: 8 | - main 9 | 10 | concurrency: 11 | group: ${{ github.ref_name }}-test 12 | cancel-in-progress: true 13 | 14 | permissions: 15 | contents: read 16 | 17 | jobs: 18 | test: 19 | runs-on: ubuntu-24.04 20 | steps: 21 | - name: Checkout Repository 22 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 23 | 24 | - name: Set Node.js 25 | uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 26 | with: 27 | node-version-file: .nvmrc 28 | 29 | - run: | 30 | corepack yarn install 31 | 32 | - run: | 33 | corepack yarn run test 34 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependency directory 2 | node_modules 3 | 4 | # Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | jspm_packages/ 46 | 47 | # TypeScript v1 declaration files 48 | typings/ 49 | 50 | # TypeScript cache 51 | *.tsbuildinfo 52 | 53 | # Optional npm cache directory 54 | .npm 55 | 56 | # Optional eslint cache 57 | .eslintcache 58 | 59 | # Optional REPL history 60 | .node_repl_history 61 | 62 | # Output of 'npm pack' 63 | *.tgz 64 | 65 | # Yarn Integrity file 66 | .yarn-integrity 67 | 68 | # dotenv environment variables file 69 | .env 70 | .env.test 71 | 72 | # parcel-bundler cache (https://parceljs.org/) 73 | .cache 74 | 75 | # next.js build output 76 | .next 77 | 78 | # nuxt.js build output 79 | .nuxt 80 | 81 | # vuepress build output 82 | .vuepress/dist 83 | 84 | # Serverless directories 85 | .serverless/ 86 | 87 | # FuseBox cache 88 | .fusebox/ 89 | 90 | # DynamoDB Local files 91 | .dynamodb/ 92 | 93 | # OS metadata 94 | .DS_Store 95 | Thumbs.db 96 | 97 | # Ignore built ts files 98 | __tests__/runner/* 99 | lib/**/* 100 | 101 | # yarn 102 | .yarn/cache 103 | .yarn/unplugged 104 | .yarn/build-state.yml 105 | .yarn/install-state.gz 106 | .pnp.* 107 | -------------------------------------------------------------------------------- /.markdownlint-cli2.yaml: -------------------------------------------------------------------------------- 1 | # Rule configuration. 2 | # For rule descriptions and how to fix: https://github.com/DavidAnson/markdownlint/tree/main#rules--aliases 3 | config: 4 | ul-style: 5 | style: dash 6 | no-duplicate-heading: 7 | siblings_only: true 8 | line-length: 9 | line_length: 500 10 | code_blocks: false 11 | tables: false 12 | 13 | # Define glob expressions to ignore 14 | ignores: 15 | - '.github/**' 16 | 17 | # Fix any fixable errors 18 | fix: true 19 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 20.19.2 2 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | exclude: ^$|^(dist|node_modules)/ 4 | repos: 5 | - repo: https://github.com/pre-commit/pre-commit-hooks 6 | rev: v5.0.0 7 | hooks: 8 | - id: trailing-whitespace 9 | - id: end-of-file-fixer 10 | - id: check-yaml 11 | args: [--allow-multiple-documents] 12 | - id: check-added-large-files 13 | - id: check-merge-conflict 14 | - id: check-case-conflict 15 | - id: check-vcs-permalinks 16 | - id: check-json 17 | - id: pretty-format-json 18 | args: [--autofix, --no-sort-keys, --no-ensure-ascii] 19 | - id: mixed-line-ending 20 | args: [--fix=lf] 21 | - id: no-commit-to-branch 22 | - id: detect-private-key 23 | 24 | - repo: https://github.com/gitleaks/gitleaks 25 | rev: v8.27.0 26 | hooks: 27 | - id: gitleaks 28 | 29 | - repo: https://github.com/DavidAnson/markdownlint-cli2 30 | rev: v0.18.1 31 | hooks: 32 | - id: markdownlint-cli2 33 | 34 | - repo: https://github.com/adrienverge/yamllint.git 35 | rev: v1.37.1 36 | hooks: 37 | - id: yamllint 38 | 39 | - repo: https://github.com/rhysd/actionlint 40 | rev: v1.7.7 41 | hooks: 42 | - id: actionlint 43 | 44 | ci: 45 | autoupdate_schedule: quarterly # We use renovate for more frequent updates and there's no way to disable autoupdate 46 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ 4 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "arrowParens": "always", 3 | "bracketSpacing": false, 4 | "parser": "typescript", 5 | "printWidth": 140, 6 | "semi": false, 7 | "singleQuote": true, 8 | "tabWidth": 2, 9 | "trailingComma": "all", 10 | "useTabs": false 11 | } 12 | -------------------------------------------------------------------------------- /.yamllint.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | ignore-from-file: .gitignore 3 | 4 | extends: default 5 | 6 | rules: 7 | comments: 8 | min-spaces-from-content: 1 9 | comments-indentation: enable 10 | document-start: disable 11 | empty-values: enable 12 | line-length: 13 | max: 120 14 | ignore: | 15 | action.yml 16 | .github/ 17 | truthy: 18 | check-keys: false 19 | -------------------------------------------------------------------------------- /.yarnrc.yml: -------------------------------------------------------------------------------- 1 | logFilters: 2 | - code: YN0013 3 | level: discard 4 | - code: YN0019 5 | level: discard 6 | - code: YN0076 7 | level: discard 8 | 9 | nodeLinker: node-modules 10 | 11 | npmAuthToken: "${NODE_AUTH_TOKEN:-fallback}" 12 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @lucacome 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Luca Comellini 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Docker Image Update Checker Action 2 | 3 | [![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/lucacome/docker-image-update-checker/badge)](https://scorecard.dev/viewer/?uri=github.com/lucacome/docker-image-update-checker) 4 | [![Test](https://github.com/lucacome/docker-image-update-checker/actions/workflows/test.yml/badge.svg)](https://github.com/lucacome/docker-image-update-checker/actions/workflows/test.yml) 5 | [![GitHub release badge](https://badgen.net/github/release/lucacome/docker-image-update-checker/stable)](https://github.com/lucacome/docker-image-update-checker/releases/latest) 6 | [![GitHub license badge](https://badgen.net/github/license/lucacome/docker-image-update-checker)](https://github.com/lucacome/docker-image-update-checker/blob/main/LICENSE) 7 | [![GitHub Workflows badge](https://badgen.net/runkit/lucacome/lucacome-workflow)](https://github.com/search?q=docker-image-update-checker+path%3A.github%2Fworkflows%2F+language%3AYAML&type=Code) 8 | 9 | This action checks if a Docker image needs to be updated based on the base image it uses (e.g. `FROM nginx:1.21.0`). By default it checks for all platforms, but you can specify the platforms to check. 10 | 11 | ## Inputs 12 | 13 | | Name | Type | Description | 14 | |--------------|--------|----------------------------------------------------------------------------| 15 | | `base-image` | String | Base Docker Image. This is the image you have as `FROM` in your Dockerfile | 16 | | `image` | String | Your image based on `base-image` | 17 | | `platforms` | String | Platforms to check (default `all`), e.g. `linux/amd64,linux/arm64` | 18 | 19 | ## Output 20 | 21 | | Name | Type | Description | 22 | |------------------|--------|---------------------------------------------------------------------------------------| 23 | | `needs-updating` | String | 'true' or 'false' if the image needs to be updated or not | 24 | | `diff-images` | String | List of images (platforms) that need to be updated | 25 | | `diff-json` | String | JSON output of the images (platforms) that need to be updated with the list of layers | 26 | 27 | ## Runners 28 | 29 | The action works on `ubuntu` and `windows` runners with or without a `docker/login-action` step. Without a login step, it will perform an anonymous pull of the manifests, except for Docker Hub because the Runners already have a token provided by GitHub (I can't find any documentation on this, but the token is there and it works). 30 | 31 | It also works on `macos` runners, but because `docker` is not installed on the runners, you can't use the `docker/login-action`, so you can only use it with public images and anonymous pulls. 32 | 33 | ## Authentication 34 | 35 | To authenticate with a Docker registry, you can use the [`docker/login-action`](https://github.com/docker/login-action) in a step before this action. 36 | 37 | ## Examples 38 | 39 | - [Minimal](#minimal) 40 | - [Single platform](#single-platform) 41 | - [Multiple platforms](#multiple-platforms) 42 | 43 | ### Minimal 44 | 45 | Check if the image `user/app:latest`, that has `nginx` as a base image, needs to be updated: 46 | 47 | ```yaml 48 | name: Check docker image 49 | 50 | on: 51 | schedule: 52 | - cron: '0 4 * * *' 53 | 54 | jobs: 55 | docker: 56 | runs-on: ubuntu-latest 57 | steps: 58 | - name: Check if update available 59 | id: check 60 | uses: lucacome/docker-image-update-checker@v2.0.0 61 | with: 62 | base-image: nginx:1.21.0 63 | image: user/app:latest 64 | 65 | - name: Check result 66 | run: echo "Needs updating: ${{ steps.check.outputs.needs-updating }}" 67 | 68 | ``` 69 | 70 | ### Single platform 71 | 72 | Check if the image `user/app:latest`, that has `nginx` has a base image, needs to be updated and build and push the image if needed: 73 | 74 | ```yaml 75 | name: Check docker image 76 | 77 | on: 78 | schedule: 79 | - cron: '0 4 * * *' 80 | 81 | jobs: 82 | docker: 83 | runs-on: ubuntu-latest 84 | steps: 85 | - name: Checkout 86 | uses: actions/checkout@v4.2.2 87 | 88 | - name: Check if update available 89 | id: check 90 | uses: lucacome/docker-image-update-checker@v2.0.0 91 | with: 92 | base-image: nginx:1.21.0 93 | image: user/app:latest 94 | platforms: linux/amd64 95 | 96 | - name: Build and push 97 | uses: docker/build-push-action@v6.18.0 98 | with: 99 | context: . 100 | push: true 101 | tags: user/app:latest 102 | if: steps.check.outputs.needs-updating == 'true' 103 | ``` 104 | 105 | ### Multiple platforms 106 | 107 | Check if the image `user/app:latest`, that has `nginx` has a base image, needs to be updated for `linux/amd64` and `linux/arm64`: 108 | 109 | ```yaml 110 | name: Check docker image for multiple platforms 111 | 112 | on: 113 | schedule: 114 | - cron: '0 4 * * *' 115 | 116 | jobs: 117 | check: 118 | runs-on: ubuntu-latest 119 | outputs: 120 | needs-updating: ${{ steps.check.outputs.needs-updating }} 121 | steps: 122 | - name: Login to Docker Registry 123 | uses: docker/login-action@v3.4.0 124 | with: 125 | username: ${{ secrets.DOCKER_USERNAME }} 126 | password: ${{ secrets.DOCKER_PASSWORD }} 127 | 128 | - name: Check if update available 129 | id: check 130 | uses: lucacome/docker-image-update-checker@v2.0.0 131 | with: 132 | base-image: nginx:1.21.0 133 | image: user/app:latest 134 | platforms: linux/amd64,linux/arm64 # Use 'all' to check all platforms 135 | 136 | build: 137 | needs: check 138 | runs-on: ubuntu-latest 139 | if: needs.check.outputs.needs-updating == 'true' 140 | steps: 141 | - name: Checkout 142 | uses: actions/checkout@v4.2.2 143 | 144 | - name: Setup QEMU 145 | uses: docker/setup-qemu-action@v3.6.0 146 | with: 147 | platforms: arm64 148 | 149 | - name: Docker Buildx 150 | uses: docker/setup-buildx-action@v3.10.0 151 | 152 | - name: Build and push 153 | uses: docker/build-push-action@v6.18.0 154 | with: 155 | context: . 156 | push: true 157 | tags: user/app:latest 158 | platforms: linux/amd64,linux/arm64 159 | ``` 160 | 161 | > **Note** 162 | > 163 | > The `platforms` input is optional and defaults to `all`. 164 | 165 | ## Debugging 166 | 167 | If something is not working as expected, you can enable debug logging to get more information (a lot more information). 168 | You can re-run the action with the `Enable debug logging` checkbox checked for a single run or set the `ACTIONS_STEP_DEBUG` secret to `true` in the repository's secrets. 169 | For more information on debugging actions, see [Enabling debug logging](https://docs.github.com/en/actions/managing-workflow-runs/enabling-debug-logging). 170 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | It is strongly recommended to always use the most recent version of the software. 6 | 7 | ## Reporting a Vulnerability 8 | 9 | To report a vulnerability, please use 10 | -------------------------------------------------------------------------------- /__tests__/docker-hub.test.ts: -------------------------------------------------------------------------------- 1 | import {DockerHub} from '../src/docker-hub' 2 | import {getDiffs} from '../src/image-utils' 3 | 4 | describe('DockerHub', () => { 5 | const dockerHub = new DockerHub() 6 | 7 | test('getImageInfo', async () => { 8 | const repository = 'library/nginx' 9 | const tag = '1.23.4' 10 | const nginxImageInfo = await dockerHub.getImageInfo({repository, tag}) 11 | 12 | expect(nginxImageInfo).not.toBeNull() 13 | expect(nginxImageInfo.has('linux|amd64|')).toBe(true) 14 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('architecture', 'amd64') 15 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('digest') 16 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('layers') 17 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('os', 'linux') 18 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('variant', undefined) 19 | 20 | const repository2 = 'nginx/nginx-ingress' 21 | const tag2 = '3.1.0' 22 | const nginxIngressImageInfo = await dockerHub.getImageInfo({repository: repository2, tag: tag2}) 23 | 24 | expect(nginxIngressImageInfo).not.toBeNull() 25 | expect(nginxIngressImageInfo.has('linux|amd64|')).toBe(true) 26 | expect(nginxIngressImageInfo.get('linux|amd64|')).toHaveProperty('architecture', 'amd64') 27 | expect(nginxIngressImageInfo.get('linux|amd64|')).toHaveProperty( 28 | 'digest', 29 | 'sha256:01f441a40d8782fdf82ced4b5efa58afdc4be5753217f12d15b9ad53ed5c31af', 30 | ) 31 | expect(nginxIngressImageInfo.get('linux|amd64|')).toHaveProperty('layers') 32 | expect(nginxIngressImageInfo.get('linux|amd64|')).toHaveProperty('os', 'linux') 33 | expect(nginxIngressImageInfo.get('linux|amd64|')).toHaveProperty('variant', undefined) 34 | 35 | const diffs = getDiffs(['linux/amd64'], nginxImageInfo, nginxIngressImageInfo) 36 | 37 | expect(diffs).not.toBeNull() 38 | expect(diffs.length).toBe(1) 39 | 40 | expect(diffs).toEqual([ 41 | { 42 | architecture: 'amd64', 43 | digest: 'sha256:01f441a40d8782fdf82ced4b5efa58afdc4be5753217f12d15b9ad53ed5c31af', 44 | layers: [ 45 | 'sha256:f1f26f5702560b7e591bef5c4d840f76a232bf13fd5aefc4e22077a1ae4440c7', 46 | 'sha256:84181e80d10e844350789d3324e848cf728df4f3d0f6c978789dd489f493934a', 47 | 'sha256:1ff0f94a80076ab49af75159e23f062a30a75d333a8e9c021bf39669230afcfe', 48 | 'sha256:d776269cad101c9f8e33e2baa0a05993ed0786604d86ea525f62d5d7ae7b9540', 49 | 'sha256:e9427fcfa8642f8ddf5106f742a75eca0dbac676cf8145598623d04fa45dd74e', 50 | 'sha256:d4ceccbfc2696101c94fbf2149036e4ff815e4723e518721ff85105ce5aa8afc', 51 | 'sha256:20d303c988056055cd3278497c39b934757bee14bd1ef8f830f5ea04e2db1fcd', 52 | 'sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1', 53 | 'sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1', 54 | 'sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1', 55 | 'sha256:88b0e7f3304bce7254389c8242d11cc3ce84027874ad4dcf55c7b85c32743621', 56 | 'sha256:d94cdb39e3428e3745fe1b142cff1146807d46786c234b53722d2592286ede15', 57 | ], 58 | os: 'linux', 59 | variant: undefined, 60 | }, 61 | ]) 62 | }) 63 | 64 | test('getToken', async () => { 65 | const repository = 'library/nginx' 66 | const token = await dockerHub.getToken(repository) 67 | 68 | expect(token).toMatch(/^[A-Za-z0-9\-_]+\.[A-Za-z0-9\-_]+\.[A-Za-z0-9\-_]+$/) 69 | }) 70 | }) 71 | -------------------------------------------------------------------------------- /__tests__/github.test.ts: -------------------------------------------------------------------------------- 1 | import {GitHubContainerRegistry} from '../src/github' 2 | import {getDiffs} from '../src/image-utils' 3 | 4 | describe('GitHub', () => { 5 | const gitHubRegistry = new GitHubContainerRegistry() 6 | 7 | test('getImageInfo', async () => { 8 | const repository = 'ghcr.io/nginxinc/nginx-gateway-fabric/nginx' 9 | const tag = '1.1.0' 10 | const nginxImageInfo = await gitHubRegistry.getImageInfo({repository, tag}) 11 | 12 | expect(nginxImageInfo).not.toBeNull() 13 | expect(nginxImageInfo.has('linux|amd64|')).toBe(true) 14 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('architecture', 'amd64') 15 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('digest') 16 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('layers') 17 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('os', 'linux') 18 | expect(nginxImageInfo.get('linux|amd64|')).toHaveProperty('variant', undefined) 19 | 20 | const repository2 = 'ghcr.io/nginxinc/nginx-gateway-fabric/nginx' 21 | const tag2 = '1.3.0' 22 | const newNginxImageInfo = await gitHubRegistry.getImageInfo({repository: repository2, tag: tag2}) 23 | 24 | expect(newNginxImageInfo).not.toBeNull() 25 | expect(newNginxImageInfo.has('linux|amd64|')).toBe(true) 26 | expect(newNginxImageInfo.get('linux|amd64|')).toHaveProperty('architecture', 'amd64') 27 | expect(newNginxImageInfo.get('linux|amd64|')).toHaveProperty( 28 | 'digest', 29 | 'sha256:b54460b2a2c70743fc2d43e2ad78376cd0abaecba4fcd48dfe8c034e19c15b23', 30 | ) 31 | expect(newNginxImageInfo.get('linux|amd64|')).toHaveProperty('layers') 32 | expect(newNginxImageInfo.get('linux|amd64|')).toHaveProperty('os', 'linux') 33 | expect(newNginxImageInfo.get('linux|amd64|')).toHaveProperty('variant', undefined) 34 | 35 | const diffs = getDiffs(['linux/amd64'], nginxImageInfo, newNginxImageInfo) 36 | 37 | expect(diffs).not.toBeNull() 38 | expect(diffs.length).toBe(1) 39 | 40 | expect(diffs).toEqual([ 41 | { 42 | architecture: 'amd64', 43 | digest: 'sha256:b54460b2a2c70743fc2d43e2ad78376cd0abaecba4fcd48dfe8c034e19c15b23', 44 | layers: [ 45 | 'sha256:4abcf20661432fb2d719aaf90656f55c287f8ca915dc1c92ec14ff61e67fbaf8', 46 | 'sha256:b1e69ebc7f924a03f4e1d3906db5423920d8b40d8f315db72445e6a7041c6237', 47 | 'sha256:628158b45bceaf19d9e86fbfb08c925d75e1e2ab888cd9b97b7c8a8181232be4', 48 | 'sha256:346e52e95fa0a52e495913d9d99e4766d1164631ddbf3a79b1b7860c44a4582a', 49 | 'sha256:8c57fb1cd6448c27acb916942fed2522079e5256bc92466c1351f1b6d5f201e9', 50 | 'sha256:dc3800d1d0f27990204f4c7f60ef0a8fdbf41a3199d38467475aba551823ccd4', 51 | 'sha256:e3227d68030df2f1c6db2654cf30f1e42d5700dc7b5c73eb1a4585bbd588affa', 52 | 'sha256:8c50e1264d11b6f97944fb962f743063fbe75e06535780bb4919d491cf9ccde4', 53 | 'sha256:e73705b81978bae07455da98270fcf00d50303c3ced48c2d0e0412c041058906', 54 | 'sha256:08f3247bafb95323f81bd617dc9fe210b361b00e49dda69d6ce419a525423db8', 55 | 'sha256:7c33792ff5a2bb827715a58b43fd2383cc53a7c3404d7570a0d06f4502f239a4', 56 | 'sha256:e286804e58a8ad6b276e66d47563a8986ea8e95749c511980dbc5f116d90fed3', 57 | 'sha256:ba8ea7cf37cb0661494adeddd2c0c9c3bdd7b0ddc866dfa61cc48f269d6276d1', 58 | 'sha256:e6e0f1ed3be3d015d254554788a89638102f4d854dd9814204ad648269c672b6', 59 | 'sha256:caf1493c2109a431cd01ebb80dc57e53c3f39dddebc376298648b0763f44a704', 60 | ], 61 | os: 'linux', 62 | variant: undefined, 63 | }, 64 | ]) 65 | }) 66 | 67 | test('getImageInfo with variant', async () => { 68 | const repository = 'ghcr.io/nginxinc/nginx-prometheus-exporter' 69 | const tag = '1.3.0' 70 | const nginxImageInfo = await gitHubRegistry.getImageInfo({repository, tag}) 71 | 72 | expect(nginxImageInfo).not.toBeNull() 73 | expect(nginxImageInfo.has('linux|arm|v7')).toBe(true) 74 | expect(nginxImageInfo.get('linux|arm|v7')).toHaveProperty('architecture', 'arm') 75 | expect(nginxImageInfo.get('linux|arm|v7')).toHaveProperty( 76 | 'digest', 77 | 'sha256:7f170c221a19738fb70c98d0920fcebf4581145a34179b1c935be956c4213229', 78 | ) 79 | expect(nginxImageInfo.get('linux|arm|v7')).toHaveProperty('layers') 80 | expect(nginxImageInfo.get('linux|arm|v7')).toHaveProperty('os', 'linux') 81 | expect(nginxImageInfo.get('linux|arm|v7')).toHaveProperty('variant', 'v7') 82 | }) 83 | }) 84 | -------------------------------------------------------------------------------- /__tests__/image-utils.test.ts: -------------------------------------------------------------------------------- 1 | import {findDiffImages, parseImageInput, ImageInput, getDiffs} from '../src/image-utils' 2 | import {ImageInfo, ImageMap} from '../src/registry' 3 | 4 | describe('findDiffImages', () => { 5 | test('should return diff images when layers do not match', () => { 6 | const set1: ImageMap = new Map([ 7 | [ 8 | 'linux/amd64', 9 | { 10 | os: 'linux', 11 | architecture: 'amd64', 12 | digest: 'digest1', 13 | layers: ['layer1', 'layer10', 'layer20', 'layer30'], 14 | }, 15 | ], 16 | [ 17 | 'linux/arm64/v8', 18 | { 19 | os: 'linux', 20 | architecture: 'arm64', 21 | digest: 'digest2', 22 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 23 | variant: 'v8', 24 | }, 25 | ], 26 | ]) 27 | 28 | const set2: ImageMap = new Map([ 29 | [ 30 | 'linux/amd64', 31 | { 32 | os: 'linux', 33 | architecture: 'amd64', 34 | digest: 'digest2', 35 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 36 | }, 37 | ], 38 | ]) 39 | 40 | const result = findDiffImages(set1, set2) 41 | expect(result).toEqual([ 42 | { 43 | os: 'linux', 44 | architecture: 'amd64', 45 | digest: 'digest2', 46 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 47 | }, 48 | ]) 49 | }) 50 | 51 | test('should not return diff images when all layers from obj1 are in obj2', () => { 52 | const set1: ImageMap = new Map([ 53 | [ 54 | 'linux/arm64/v8', 55 | { 56 | os: 'linux', 57 | architecture: 'arm64', 58 | digest: 'digest1', 59 | layers: ['layer1', 'layer2', 'layer3'], 60 | variant: 'v8', 61 | }, 62 | ], 63 | ]) 64 | 65 | const set2: ImageMap = new Map([ 66 | [ 67 | 'linux/arm64/v8', 68 | { 69 | os: 'linux', 70 | architecture: 'arm64', 71 | digest: 'digest2', 72 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 73 | variant: 'v8', 74 | }, 75 | ], 76 | ]) 77 | 78 | const result = findDiffImages(set1, set2) 79 | expect(result).toEqual([]) 80 | }) 81 | }) 82 | 83 | describe('parseImageInput', () => { 84 | test('should parse image string with default registry and tag', () => { 85 | const imageString = 'nginx' 86 | const expectedResult: ImageInput = { 87 | registry: 'docker.io', 88 | image: 'library/nginx', 89 | tag: 'latest', 90 | } 91 | 92 | const result = parseImageInput(imageString) 93 | expect(result).toEqual(expectedResult) 94 | }) 95 | 96 | test('should parse image string with custom registry and default tag', () => { 97 | const imageString = 'myregistry.example.com/nginx' 98 | const expectedResult: ImageInput = { 99 | registry: 'myregistry.example.com', 100 | image: 'nginx', 101 | tag: 'latest', 102 | } 103 | 104 | const result = parseImageInput(imageString) 105 | expect(result).toEqual(expectedResult) 106 | }) 107 | 108 | test('should parse image string with custom registry, organization and default tag', () => { 109 | const imageString = 'myregistry.example.com/myorg/nginx' 110 | const expectedResult: ImageInput = { 111 | registry: 'myregistry.example.com', 112 | image: 'myorg/nginx', 113 | tag: 'latest', 114 | } 115 | 116 | const result = parseImageInput(imageString) 117 | expect(result).toEqual(expectedResult) 118 | }) 119 | 120 | test('should parse image string with custom registry, organization, and tag', () => { 121 | const imageString = 'myregistry.example.com/myorg/nginx:1.0.0' 122 | const expectedResult: ImageInput = { 123 | registry: 'myregistry.example.com', 124 | image: 'myorg/nginx', 125 | tag: '1.0.0', 126 | } 127 | 128 | const result = parseImageInput(imageString) 129 | expect(result).toEqual(expectedResult) 130 | }) 131 | 132 | test('should parse image string with default registry, organization, and custom tag', () => { 133 | const imageString = 'myorg/nginx:1.0.0' 134 | const expectedResult: ImageInput = { 135 | registry: 'docker.io', 136 | image: 'myorg/nginx', 137 | tag: '1.0.0', 138 | } 139 | 140 | const result = parseImageInput(imageString) 141 | expect(result).toEqual(expectedResult) 142 | }) 143 | 144 | test('should parse image string with default registry and custom tag', () => { 145 | const imageString = 'nginx:1.0.0' 146 | const expectedResult: ImageInput = { 147 | registry: 'docker.io', 148 | image: 'library/nginx', 149 | tag: '1.0.0', 150 | } 151 | 152 | const result = parseImageInput(imageString) 153 | expect(result).toEqual(expectedResult) 154 | }) 155 | }) 156 | 157 | const image1: ImageMap = new Map([ 158 | [ 159 | 'linux/amd64', 160 | { 161 | os: 'linux', 162 | architecture: 'amd64', 163 | digest: '123', 164 | layers: ['layer1', 'layer20', 'layer30'], 165 | }, 166 | ], 167 | [ 168 | 'linux/arm64/v8', 169 | { 170 | os: 'linux', 171 | architecture: 'arm64', 172 | variant: 'v8', 173 | digest: '456', 174 | layers: ['layer1', 'layer20', 'layer30'], 175 | }, 176 | ], 177 | ]) 178 | 179 | const image2: ImageMap = new Map([ 180 | [ 181 | 'linux/amd64', 182 | { 183 | os: 'linux', 184 | architecture: 'amd64', 185 | digest: '789', 186 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 187 | }, 188 | ], 189 | [ 190 | 'linux/arm64/v8', 191 | { 192 | os: 'linux', 193 | architecture: 'arm64', 194 | variant: 'v8', 195 | digest: '101112', 196 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 197 | }, 198 | ], 199 | ]) 200 | 201 | describe('getDiffs', () => { 202 | test('should return all diff images when platforms is "all"', () => { 203 | const expectedResult: ImageInfo[] = [ 204 | { 205 | os: 'linux', 206 | architecture: 'amd64', 207 | digest: '789', 208 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 209 | }, 210 | { 211 | os: 'linux', 212 | architecture: 'arm64', 213 | variant: 'v8', 214 | digest: '101112', 215 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 216 | }, 217 | ] 218 | 219 | const result = getDiffs(['all'], image1, image2) 220 | expect(result).toEqual(expectedResult) 221 | }) 222 | 223 | test('should return diff images for specified platform', () => { 224 | const expectedResult: ImageInfo[] = [ 225 | { 226 | os: 'linux', 227 | architecture: 'amd64', 228 | digest: '789', 229 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 230 | }, 231 | ] 232 | 233 | const result = getDiffs(['linux/amd64'], image1, image2) 234 | expect(result).toEqual(expectedResult) 235 | }) 236 | 237 | test('should return diff images for multiple specified platforms', () => { 238 | const expectedResult: ImageInfo[] = [ 239 | { 240 | os: 'linux', 241 | architecture: 'amd64', 242 | digest: '789', 243 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 244 | }, 245 | { 246 | os: 'linux', 247 | architecture: 'arm64', 248 | variant: 'v8', 249 | digest: '101112', 250 | layers: ['layer1', 'layer2', 'layer3', 'layer4'], 251 | }, 252 | ] 253 | 254 | const result = getDiffs(['linux/amd64', 'linux/arm64'], image1, image2) 255 | expect(result).toEqual(expectedResult) 256 | }) 257 | 258 | test('should return empty array when there are no diff images for specified platform', () => { 259 | const expectedResult: ImageInfo[] = [] 260 | 261 | const result = getDiffs(['windows/amd64'], image1, image2) 262 | expect(result).toEqual(expectedResult) 263 | }) 264 | }) 265 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: "Docker Image Update Checker" 2 | description: "GitHub Action to check if the base Docker image that your image is based on was updated and your image needs to be updated" 3 | author: Luca Comellini 4 | 5 | inputs: 6 | base-image: 7 | description: "Docker base image" 8 | required: true 9 | image: 10 | description: "Docker image" 11 | required: true 12 | platforms: 13 | description: "Platforms to check" 14 | required: false 15 | default: "all" 16 | outputs: 17 | needs-updating: 18 | description: "Whether the image needs updating" 19 | diff-images: 20 | description: "The diff of the images" 21 | 22 | runs: 23 | using: node20 24 | main: "dist/index.js" 25 | 26 | branding: 27 | icon: anchor 28 | color: blue 29 | -------------------------------------------------------------------------------- /dist/sourcemap-register.js: -------------------------------------------------------------------------------- 1 | (()=>{var e={296:e=>{var r=Object.prototype.toString;var n=typeof Buffer!=="undefined"&&typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},599:(e,r,n)=>{e=n.nmd(e);var t=n(927).SourceMapConsumer;var o=n(928);var i;try{i=n(896);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(296);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var d=[];var h=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function globalProcessVersion(){if(typeof process==="object"&&process!==null){return process.version}else{return""}}function globalProcessStderr(){if(typeof process==="object"&&process!==null){return process.stderr}}function globalProcessExit(e){if(typeof process==="object"&&process!==null&&typeof process.exit==="function"){return process.exit(e)}}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(globalProcessVersion())?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);var n=globalProcessStderr();if(n&&n._handle&&n._handle.setBlocking){n._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);globalProcessExit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=d.slice(0);var _=h.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){d.length=0}d.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){h.length=0}h.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){d.length=0;h.length=0;d=S.slice(0);h=_.slice(0);v=handlerExec(h);m=handlerExec(d)}},517:(e,r,n)=>{var t=n(297);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(158);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},24:(e,r,n)=>{var t=n(297);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.P=MappingList},299:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(297);var i=n(197);var a=n(517).C;var u=n(818);var s=n(299).g;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){h.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(h,o.compareByOriginalPositions);this.__originalMappings=h};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(818);var o=n(297);var i=n(517).C;var a=n(24).P;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var d=0,h=g.length;d0){if(!o.compareByGeneratedPositionsInflated(c,g[d-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.x=SourceMapGenerator},565:(e,r,n)=>{var t;var o=n(163).x;var i=n(297);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},927:(e,r,n)=>{n(163).x;r.SourceMapConsumer=n(684).SourceMapConsumer;n(565)},896:e=>{"use strict";e.exports=require("fs")},928:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};__webpack_require__(599).install();module.exports=n})(); -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | clearMocks: true, 3 | testEnvironment: 'node', 4 | moduleFileExtensions: ['js', 'ts'], 5 | testMatch: ['**/*.test.ts'], 6 | transform: { 7 | '^.+\\.ts$': 'ts-jest' 8 | }, 9 | verbose: true 10 | } 11 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "author": "Luca Comellini", 3 | "dependencies": { 4 | "@actions/core": "^1.11.1", 5 | "@docker/actions-toolkit": "^0.62.1", 6 | "axios": "^1.9.0" 7 | }, 8 | "description": "GitHub Action to check if a Docker image needs to be updated", 9 | "devDependencies": { 10 | "@jest/globals": "^29.7.0", 11 | "@types/jest": "^29.5.14", 12 | "@types/node": "^20.17.51", 13 | "@typescript-eslint/eslint-plugin": "^8.33.0", 14 | "@typescript-eslint/parser": "^8.33.0", 15 | "@vercel/ncc": "^0.38.3", 16 | "eslint": "^8.57.1", 17 | "eslint-config-prettier": "^10.1.5", 18 | "eslint-plugin-import": "^2.31.0", 19 | "eslint-plugin-jest": "^28.11.1", 20 | "eslint-plugin-prettier": "^5.4.0", 21 | "jest": "^29.7.0", 22 | "js-yaml": "^4.1.0", 23 | "prettier": "^3.5.3", 24 | "ts-jest": "^29.3.4", 25 | "typescript": "^5.8.3" 26 | }, 27 | "keywords": [ 28 | "actions", 29 | "docker", 30 | "update" 31 | ], 32 | "license": "MIT", 33 | "main": "src/main.ts", 34 | "name": "lucacome-docker-image-update-checker", 35 | "packageManager": "yarn@4.9.2", 36 | "repository": { 37 | "type": "git", 38 | "url": "git+https://github.com/lucacome/docker-image-update-checker.git" 39 | }, 40 | "scripts": { 41 | "all": "yarn run format && yarn run build && yarn run test", 42 | "build": "ncc build --source-map --minify --license licenses.txt", 43 | "eslint": "eslint --max-warnings=0 .", 44 | "eslint:fix": "eslint --fix .", 45 | "format": "yarn run prettier:fix && yarn run eslint:fix", 46 | "lint": "yarn run prettier && yarn run eslint", 47 | "prettier": "prettier --check \"./**/*.ts\"", 48 | "prettier:fix": "prettier --write \"./**/*.ts\"", 49 | "test": "jest" 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "github>nginxinc/k8s-common", 5 | "schedule:weekly" 6 | ], 7 | "rangeStrategy": "bump", 8 | "packageRules": [ 9 | { 10 | "matchUpdateTypes": [ 11 | "major" 12 | ], 13 | "enabled": false, 14 | "matchPackageNames": [ 15 | "/^node$/", 16 | "/@types/node/" 17 | ] 18 | } 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /src/auth.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import {Docker} from '@docker/actions-toolkit/lib/docker/docker' 3 | import {spawnSync} from 'child_process' 4 | 5 | export interface DockerAuth { 6 | username: string 7 | password: string 8 | } 9 | 10 | export function getRegistryAuth(registry: string): DockerAuth | undefined { 11 | const config = Docker.configFile() 12 | if (!config) { 13 | core.warning('No Docker config found') 14 | } 15 | 16 | const auths = config?.auths || {} 17 | const registryAuth = auths[registry] 18 | 19 | if (!registryAuth) { 20 | core.warning(`No credentials found for ${registry}`) 21 | return undefined 22 | } 23 | 24 | if (!registryAuth.username || !registryAuth.password) { 25 | core.debug(`No username or password found for ${registry}, trying auth field`) 26 | if (registryAuth.auth) { 27 | const [user, pass] = Buffer.from(registryAuth.auth, 'base64').toString('utf8').split(':') 28 | return {username: user, password: pass} 29 | } 30 | if (config?.credsStore) { 31 | core.debug('No auth field, using credential store to get credentials') 32 | const child = spawnSync(`docker-credential-${config.credsStore}`, ['get'], { 33 | input: `\n${registry}\n`, 34 | encoding: 'utf-8', 35 | }) 36 | 37 | if (child.error) { 38 | console.error('Error executing command:', child.error) 39 | } 40 | 41 | const creds = child.stdout 42 | if (creds) { 43 | const {Username, Secret} = JSON.parse(creds) 44 | return {username: Username, password: Secret} 45 | } 46 | } 47 | core.debug('No credentials found, returning undefined') 48 | return undefined 49 | } 50 | 51 | return {username: registryAuth.username, password: registryAuth.password} 52 | } 53 | -------------------------------------------------------------------------------- /src/docker-hub.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios' 2 | import {ContainerRegistry} from './registry' 3 | import {DockerAuth, getRegistryAuth} from './auth' 4 | import * as core from '@actions/core' 5 | 6 | export class DockerHub extends ContainerRegistry { 7 | constructor() { 8 | super('index.docker.io/v2/') 9 | } 10 | async getToken(repository: string): Promise { 11 | const auth = this.getCredentials() 12 | if (!auth) { 13 | core.info('No credentials found for Docker, using anonymous pull') 14 | } 15 | const response = await axios.get('https://auth.docker.io/token', { 16 | params: { 17 | service: 'registry.docker.io', 18 | scope: `repository:${repository}:pull`, 19 | }, 20 | auth, 21 | }) 22 | return response.data.token 23 | } 24 | 25 | getCredentials(): DockerAuth | undefined { 26 | return getRegistryAuth('https://index.docker.io/v1/') 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/gcr.ts: -------------------------------------------------------------------------------- 1 | import {ContainerRegistry} from './registry' 2 | import axios from 'axios' 3 | import {DockerAuth, getRegistryAuth} from './auth' 4 | 5 | export class GoogleContainerRegistry extends ContainerRegistry { 6 | constructor() { 7 | super('gcr.io/v2/') 8 | } 9 | 10 | async getToken(repository: string): Promise { 11 | const auth = this.getCredentials() 12 | const response = await axios.get('https://gcr.io/token', { 13 | params: { 14 | scope: `repository:${repository}:pull`, 15 | }, 16 | auth, 17 | }) 18 | return response.data.token 19 | } 20 | 21 | getCredentials(): DockerAuth | undefined { 22 | return getRegistryAuth('https://gcr.io/v2/') 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/github.ts: -------------------------------------------------------------------------------- 1 | import {ContainerRegistry} from './registry' 2 | import axios from 'axios' 3 | import {DockerAuth, getRegistryAuth} from './auth' 4 | import * as core from '@actions/core' 5 | 6 | export class GitHubContainerRegistry extends ContainerRegistry { 7 | constructor() { 8 | super('ghcr.io/v2/') 9 | } 10 | 11 | async getToken(repository: string): Promise { 12 | const auth = this.getCredentials() 13 | if (!auth) { 14 | core.info('No credentials found for GitHub, using anonymous pull') 15 | } 16 | const response = await axios.get(`https://ghcr.io/token`, { 17 | params: { 18 | scope: `repository:${repository}:pull`, 19 | }, 20 | auth, 21 | }) 22 | // check if the call was successful 23 | if (response.status !== 200) { 24 | core.info(response.data) 25 | throw new Error(`Failed to get token from GitHub Container Registry: ${response.status}`) 26 | } 27 | return response.data.token 28 | } 29 | 30 | getCredentials(): DockerAuth | undefined { 31 | return getRegistryAuth('ghcr.io') 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/image-utils.ts: -------------------------------------------------------------------------------- 1 | import {ImageInfo, ImageMap} from './registry' 2 | import * as core from '@actions/core' 3 | 4 | export type ImageInput = { 5 | registry: string 6 | image: string 7 | tag: string 8 | } 9 | 10 | export function findDiffImages(set1: ImageMap, set2: ImageMap): ImageInfo[] { 11 | const diffImages: ImageInfo[] = [] 12 | 13 | for (const [key, obj1] of set1) { 14 | const obj2 = set2.get(key) 15 | if (obj2) { 16 | const obj1LayersInObj2 = obj1.layers.every((layer) => obj2.layers.includes(layer)) 17 | if (!obj1LayersInObj2) { 18 | diffImages.push(obj2) 19 | } 20 | } 21 | } 22 | 23 | if (core.isDebug()) { 24 | core.startGroup('Diff Images') 25 | core.info(`Diff Images: ${(JSON.stringify(diffImages), null, 2)}`) 26 | core.endGroup() 27 | } 28 | 29 | return diffImages 30 | } 31 | 32 | export function parseImageInput(imageString: string): ImageInput { 33 | const defaultRegistry = 'docker.io' 34 | 35 | const [registryAndImage, tag] = imageString.split(':') 36 | const parts = registryAndImage.split('/') 37 | const registry = (parts.length > 2 || parts[0].includes('.') ? parts.shift() : defaultRegistry) ?? defaultRegistry 38 | 39 | const isOfficialImage = registry === defaultRegistry && parts.length === 1 40 | const image = isOfficialImage ? `library/${parts.join('/')}` : parts.join('/') 41 | 42 | return { 43 | registry, 44 | image, 45 | tag: tag ?? 'latest', 46 | } 47 | } 48 | 49 | export function getDiffs(platforms: string[], image1: ImageMap, image2: ImageMap): ImageInfo[] { 50 | const diffImages = findDiffImages(image1, image2) 51 | 52 | if (platforms.length === 1 && platforms[0] === 'all') { 53 | return diffImages 54 | } else { 55 | return diffImages.filter((diffImage) => { 56 | return platforms.some((platform) => { 57 | platform = platform.includes('arm64') && !platform.includes('v8') ? platform + '/v8' : platform 58 | 59 | return platform.includes(`${diffImage.os}/${diffImage.architecture}${diffImage.variant ? `/${diffImage.variant}` : ''}`) 60 | }) 61 | }) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import {ContainerRegistry} from './registry' 3 | import {DockerHub} from './docker-hub' 4 | import {GitHubContainerRegistry} from './github' 5 | import {getDiffs, parseImageInput} from './image-utils' 6 | import {Util} from '@docker/actions-toolkit/lib/util' 7 | 8 | function getRegistryInstance(registry: string): ContainerRegistry { 9 | switch (registry.toLowerCase()) { 10 | case 'docker.io': 11 | return new DockerHub() 12 | case 'ghcr.io': 13 | return new GitHubContainerRegistry() 14 | default: 15 | throw new Error(`Invalid registry specified: ${registry}`) 16 | } 17 | } 18 | 19 | async function run(): Promise { 20 | try { 21 | const baseInput = core.getInput('base-image') 22 | const imageInput = core.getInput('image') 23 | const platformsInput = Util.getInputList('platforms') 24 | 25 | core.startGroup('Inputs') 26 | core.info(`Base image: ${baseInput}`) 27 | core.info(`Image: ${imageInput}`) 28 | core.info(`Platforms: ${platformsInput}`) 29 | core.endGroup() 30 | 31 | const base = parseImageInput(baseInput) 32 | const image = parseImageInput(imageInput) 33 | 34 | const registryBase = getRegistryInstance(base.registry) 35 | const registryImage = getRegistryInstance(image.registry) 36 | 37 | const image1 = await registryBase.getImageInfo({ 38 | repository: base.image, 39 | tag: base.tag, 40 | }) 41 | const image2 = await registryImage.getImageInfo({ 42 | repository: image.image, 43 | tag: image.tag, 44 | }) 45 | 46 | const diffs = getDiffs(platformsInput, image1, image2) 47 | core.startGroup(`Found ${diffs.length} differences`) 48 | core.debug(`Differences: ${JSON.stringify(diffs, null, 2)}`) 49 | 50 | const diffPlatforms: string[] = [] 51 | diffs.forEach((diff) => { 52 | const str = `${diff.os}/${diff.architecture}${diff.variant ? `/${diff.variant}` : ''}` 53 | core.info(`- ${str}`) 54 | diffPlatforms.push(str) 55 | }) 56 | core.setOutput('diff-images', diffPlatforms.toString()) 57 | core.endGroup() 58 | 59 | core.setOutput('diff-json', JSON.stringify(diffs)) 60 | core.setOutput('needs-updating', diffs.length > 0) 61 | } catch (error) { 62 | core.setFailed(`Action failed with error: ${error}`) 63 | } 64 | } 65 | 66 | run() 67 | -------------------------------------------------------------------------------- /src/registry.ts: -------------------------------------------------------------------------------- 1 | import axios, {AxiosError} from 'axios' 2 | import * as core from '@actions/core' 3 | import {DockerAuth} from './auth' 4 | 5 | export interface Image { 6 | repository: string 7 | tag: string 8 | } 9 | 10 | interface Manifest { 11 | schemaVersion: number 12 | mediaType: string 13 | config: { 14 | mediaType: string 15 | size: number 16 | digest: string 17 | } 18 | layers?: { 19 | mediaType: string 20 | size: number 21 | digest: string 22 | }[] 23 | mainfests?: { 24 | mediaType: string 25 | digest: string 26 | size: number 27 | platform: { 28 | architecture: string 29 | os: string 30 | variant?: string 31 | } 32 | }[] 33 | } 34 | 35 | interface FetchResult { 36 | headers: Record 37 | data: Manifest 38 | } 39 | 40 | export interface ImageInfo { 41 | os: string 42 | architecture: string 43 | variant?: string 44 | digest: string 45 | layers: string[] 46 | } 47 | 48 | export type ImageMap = Map 49 | 50 | function generateKey(obj: ImageInfo): string { 51 | return [obj.os, obj.architecture, obj.variant || ''].join('|') 52 | } 53 | 54 | function convertHeaders(headers: Record): Record { 55 | const result: Record = {} 56 | for (const [key, value] of Object.entries(headers)) { 57 | if (typeof value === 'string') { 58 | result[key] = value 59 | } 60 | } 61 | return result 62 | } 63 | 64 | export abstract class ContainerRegistry { 65 | constructor(protected baseUrl: string) {} 66 | 67 | protected abstract getToken(repository: string): Promise 68 | 69 | protected abstract getCredentials(): DockerAuth | undefined 70 | 71 | protected async getLayers(digest: string, repo: string, token: string): Promise { 72 | const url = `https://${this.baseUrl}${repo}/manifests/${digest}` 73 | const headers = { 74 | Accept: 'application/vnd.docker.distribution.manifest.v2+json,application/vnd.oci.image.manifest.v1+json', 75 | Authorization: `Bearer ${token}`, 76 | } 77 | 78 | const fetchResult = await this.fetch(url, headers) 79 | 80 | const layers = fetchResult.data.layers as unknown as {digest: string}[] 81 | 82 | return layers.map((layer) => layer.digest) 83 | } 84 | 85 | protected async fetch(url: string, headers?: Record): Promise { 86 | try { 87 | const response = await axios.get(url, {headers}) 88 | if (core.isDebug()) { 89 | core.startGroup('Fetch response') 90 | core.info(`Fetching ${url}`) 91 | core.info(`Response status: ${response.status}`) 92 | core.info(`Response headers: ${JSON.stringify(response.headers, null, 2)}`) 93 | core.info(`Response data: ${JSON.stringify(response.data, null, 2)}`) 94 | core.endGroup() 95 | } 96 | return { 97 | headers: convertHeaders(response.headers), 98 | data: response.data, 99 | } 100 | } catch (error) { 101 | if (error instanceof Error) { 102 | const axiosError = error as AxiosError 103 | if (axiosError.response) { 104 | throw new Error(`Failed to fetch ${url}: ${axiosError.response.status} ${axiosError.response.statusText}`) 105 | } 106 | throw error 107 | } 108 | throw new Error('Unknown error occurred during fetch') 109 | } 110 | } 111 | 112 | async getImageInfo(image: Image): Promise { 113 | core.debug(`Fetching token for repository: ${image.repository}`) 114 | const token = await this.getToken(image.repository) 115 | const url = `https://${this.baseUrl}${image.repository}/manifests/${image.tag}` 116 | const headers = { 117 | Accept: 118 | 'application/vnd.docker.distribution.manifest.list.v2+json,application/vnd.oci.image.index.v1+json,application/vnd.docker.distribution.manifest.v2+json,application/vnd.oci.image.manifest.v1+json', 119 | Authorization: `Bearer ${token}`, 120 | } 121 | 122 | core.debug(`Fetching manifest for image: ${image.repository}:${image.tag}`) 123 | const fetchResult = await this.fetch(url, headers) 124 | const contentType = fetchResult.headers['content-type'] 125 | const dockerContentDigest = fetchResult.headers['docker-content-digest'] 126 | 127 | core.debug(`Content type: ${contentType}`) 128 | core.debug(`Docker content digest: ${dockerContentDigest}`) 129 | 130 | if ( 131 | contentType === 'application/vnd.docker.distribution.manifest.list.v2+json' || 132 | contentType === 'application/vnd.oci.image.index.v1+json' 133 | ) { 134 | core.debug(`Processing manifest list for image: ${image.repository}:${image.tag}`) 135 | const manifestList = fetchResult.data as unknown as { 136 | manifests: { 137 | digest: string 138 | platform: { 139 | architecture: string 140 | os: string 141 | variant: string 142 | } 143 | }[] 144 | } 145 | 146 | const imagesInfo = new Map() 147 | core.debug(`Initial imagesInfo: ${JSON.stringify(Array.from(imagesInfo.values()), null, 2)}`) 148 | for (const manifest of manifestList.manifests) { 149 | core.debug(`Processing manifest: ${JSON.stringify(manifest, null, 2)}`) 150 | if (manifest.platform.architecture === 'unknown') { 151 | continue 152 | } 153 | const imageInfo = { 154 | architecture: manifest.platform.architecture, 155 | digest: manifest.digest, 156 | os: manifest.platform?.os, 157 | variant: manifest.platform?.variant ? manifest.platform.variant : manifest.platform.architecture === 'arm64' ? 'v8' : undefined, 158 | layers: await this.getLayers(manifest.digest, image.repository, token), 159 | } 160 | core.debug(`Generated imageInfo: ${JSON.stringify(imageInfo, null, 2)}`) 161 | imagesInfo.set(generateKey(imageInfo), imageInfo) 162 | } 163 | core.debug(`Found ${imagesInfo.size} images in manifest list for ${image.repository}:${image.tag}`) 164 | core.debug(`Images: ${JSON.stringify(Array.from(imagesInfo.values()), null, 2)}`) 165 | return imagesInfo 166 | } else if ( 167 | contentType === 'application/vnd.docker.distribution.manifest.v2+json' || 168 | contentType === 'application/vnd.oci.image.manifest.v1+json' 169 | ) { 170 | core.debug(`Processing single manifest for image: ${image.repository}:${image.tag}`) 171 | const digest = fetchResult.data.config.digest 172 | const blobUrl = `https://${this.baseUrl}${image.repository}/blobs/${digest}` 173 | const blobHeaders = { 174 | Accept: 'application/vnd.docker.container.image.v1+json,application/vnd.oci.image.config.v1+json', 175 | Authorization: `Bearer ${token}`, 176 | } 177 | const blobFetchResult = await this.fetch(blobUrl, blobHeaders) 178 | 179 | const {architecture, os, variant} = blobFetchResult.data as unknown as { 180 | architecture: string 181 | os: string 182 | variant: string 183 | } 184 | const manifest = {architecture, os, variant} 185 | core.debug(`Manifest for ${image.repository}:${image.tag}: ${JSON.stringify(manifest, null, 2)}`) 186 | 187 | const imageInfo = { 188 | architecture: manifest.architecture, 189 | digest: dockerContentDigest, 190 | os: manifest.os, 191 | variant: manifest.variant ? manifest.variant : manifest.architecture === 'arm64' ? 'v8' : undefined, 192 | layers: await this.getLayers(dockerContentDigest, image.repository, token), 193 | } 194 | core.debug(`Found image for ${image.repository}:${image.tag}: ${JSON.stringify(imageInfo)}`) 195 | 196 | return new Map([[generateKey(imageInfo), imageInfo]]) 197 | } else { 198 | throw new Error('Unsupported content type') 199 | } 200 | } 201 | } 202 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "esModuleInterop": true, 4 | "forceConsistentCasingInFileNames": true, 5 | "module": "commonjs", 6 | "newLine": "lf", 7 | "noImplicitAny": false, 8 | "outDir": "./lib", 9 | "resolveJsonModule": true, 10 | "rootDir": "./src", 11 | "strict": true, 12 | "target": "es6", 13 | "useUnknownInCatchVariables": false 14 | }, 15 | "exclude": [ 16 | "./__mocks__/**/*", 17 | "./__tests__/**/*", 18 | "./lib/**/*", 19 | "node_modules", 20 | "jest.config.ts" 21 | ] 22 | } 23 | --------------------------------------------------------------------------------