├── .codacy.yml ├── .eslintignore ├── .eslintrc.json ├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── actions_release.yml │ ├── audit_package.yml │ ├── auto_cherry_pick.yml │ ├── codeql.yml │ ├── dependency-review.yml │ ├── guarddog.yml │ ├── matrix-example.yml │ ├── multi-job-example.yml │ ├── scorecards.yml │ ├── test.yml │ └── workflow-run-example.yml ├── .gitignore ├── .gitmodules ├── .nvmrc ├── .pre-commit-config.yaml ├── .prettierignore ├── .prettierrc.json ├── .whitesource ├── LICENSE ├── README.md ├── SECURITY.md ├── action.yml ├── dist ├── index.js ├── index.js.map ├── licenses.txt └── sourcemap-register.js ├── jest.config.js ├── jest └── setupEnv.cjs ├── package.json ├── src ├── __tests__ │ ├── __snapshots__ │ │ └── inputs.test.ts.snap │ ├── inputs.test.ts │ └── utils.test.ts ├── changedFiles.ts ├── changedFilesOutput.ts ├── commitSha.ts ├── constant.ts ├── env.ts ├── inputs.ts ├── main.ts └── utils.ts ├── test ├── [test new].txt ├── changed-files-list.txt ├── changed-files.yml ├── demo │ └── test │ │ └── test.txt ├── new.md ├── test new 1.txt ├── test new.txt ├── test rename-1.txt ├── test rename-2.txt ├── test-è.txt ├── test.txt └── test2 │ ├── test.txt │ └── test3 │ ├── new.txt │ ├── new2.txt │ └── test4 │ └── test.txt ├── tsconfig.json └── yarn.lock /.codacy.yml: -------------------------------------------------------------------------------- 1 | --- 2 | exclude_paths: 3 | - "*.md" 4 | - "dist/**" 5 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ 4 | jest.config.js 5 | coverage/ -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": [ 3 | "jest", 4 | "@typescript-eslint", 5 | "github" 6 | ], 7 | "extends": [ 8 | "plugin:github/recommended", 9 | "plugin:prettier/recommended" 10 | ], 11 | "parser": "@typescript-eslint/parser", 12 | "parserOptions": { 13 | "ecmaVersion": 9, 14 | "sourceType": "module", 15 | "project": "./tsconfig.json" 16 | }, 17 | "rules": { 18 | "i18n-text/no-en": "off", 19 | "eslint-comments/no-use": "off", 20 | "import/no-namespace": "off", 21 | "no-unused-vars": "off", 22 | "@typescript-eslint/no-unused-vars": "error", 23 | "@typescript-eslint/explicit-member-accessibility": [ 24 | "error", 25 | { 26 | "accessibility": "no-public" 27 | } 28 | ], 29 | "@typescript-eslint/no-require-imports": "error", 30 | "@typescript-eslint/array-type": "error", 31 | "@typescript-eslint/await-thenable": "error", 32 | "@typescript-eslint/ban-ts-comment": "off", 33 | "camelcase": "off", 34 | "@typescript-eslint/consistent-type-assertions": "error", 35 | "@typescript-eslint/explicit-function-return-type": [ 36 | "error", 37 | { 38 | "allowExpressions": true 39 | } 40 | ], 41 | "@typescript-eslint/func-call-spacing": [ 42 | "error", 43 | "never" 44 | ], 45 | "@typescript-eslint/no-array-constructor": "error", 46 | "@typescript-eslint/no-empty-interface": "error", 47 | "@typescript-eslint/no-explicit-any": "error", 48 | "@typescript-eslint/no-extraneous-class": "error", 49 | "@typescript-eslint/no-for-in-array": "error", 50 | "@typescript-eslint/no-inferrable-types": "error", 51 | "@typescript-eslint/no-misused-new": "error", 52 | "@typescript-eslint/no-namespace": "error", 53 | "@typescript-eslint/no-non-null-assertion": "warn", 54 | "@typescript-eslint/no-unnecessary-qualifier": "error", 55 | "@typescript-eslint/no-unnecessary-type-assertion": "error", 56 | "@typescript-eslint/no-useless-constructor": "error", 57 | "@typescript-eslint/no-var-requires": "error", 58 | "@typescript-eslint/prefer-for-of": "warn", 59 | "@typescript-eslint/prefer-function-type": "warn", 60 | "@typescript-eslint/prefer-includes": "error", 61 | "@typescript-eslint/prefer-string-starts-ends-with": "error", 62 | "@typescript-eslint/promise-function-async": "error", 63 | "@typescript-eslint/require-array-sort-compare": "error", 64 | "@typescript-eslint/restrict-plus-operands": "error", 65 | "no-shadow": "off", 66 | "@typescript-eslint/no-shadow": "error", 67 | "semi": "off", 68 | "filenames/match-regex": [ 69 | "error", 70 | "^[a-zA-Z0-9\\-.]+$", 71 | true 72 | ], 73 | "@typescript-eslint/semi": [ 74 | "error", 75 | "never" 76 | ], 77 | "@typescript-eslint/type-annotation-spacing": "error", 78 | "@typescript-eslint/unbound-method": "error" 79 | }, 80 | "env": { 81 | "node": true, 82 | "es6": true, 83 | "jest/globals": true 84 | } 85 | } -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dist/** -diff linguist-generated=true -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: npm 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | open-pull-requests-limit: 10 8 | versioning-strategy: widen 9 | labels: 10 | - "merge when passing" 11 | - package-ecosystem: github-actions 12 | directory: "/" 13 | schedule: 14 | interval: daily 15 | open-pull-requests-limit: 10 16 | labels: 17 | - "merge when passing" 18 | - package-ecosystem: gitsubmodule 19 | directory: / 20 | schedule: 21 | interval: daily 22 | open-pull-requests-limit: 10 23 | labels: 24 | - "merge when passing" 25 | -------------------------------------------------------------------------------- /.github/workflows/actions_release.yml: -------------------------------------------------------------------------------- 1 | name: Release GitHub Actions 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | tag: 7 | description: "Tag for the release" 8 | required: true 9 | script: 10 | description: "Script to run for the release" 11 | required: false 12 | default: "yarn build" 13 | 14 | permissions: 15 | contents: read 16 | 17 | jobs: 18 | release: 19 | permissions: 20 | actions: read 21 | id-token: write 22 | contents: write 23 | uses: step-security/reusable-workflows/.github/workflows/actions_release.yaml@v1 24 | with: 25 | tag: "${{ github.event.inputs.tag }}" 26 | script: "${{ github.event.inputs.script }}" -------------------------------------------------------------------------------- /.github/workflows/audit_package.yml: -------------------------------------------------------------------------------- 1 | name: Yarn Audit Fix Run 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | base_branch: 7 | description: "Specify a base branch" 8 | required: false 9 | default: "main" 10 | package_manager: 11 | description: "Specify package manager (npm or yarn)" 12 | required: false 13 | default: "yarn" 14 | script: 15 | description: "Specify a script to run after audit fix" 16 | required: false 17 | default: "yarn build" 18 | 19 | schedule: 20 | - cron: "0 0 * * 1" 21 | 22 | jobs: 23 | audit-fix: 24 | uses: step-security/reusable-workflows/.github/workflows/audit_fix.yml@v1 25 | with: 26 | base_branch: ${{ inputs.base_branch || 'main' }} 27 | package_manager: "yarn" 28 | script: ${{ inputs.script || 'yarn build' }} 29 | 30 | permissions: 31 | contents: write 32 | pull-requests: write 33 | packages: read 34 | issues: write -------------------------------------------------------------------------------- /.github/workflows/auto_cherry_pick.yml: -------------------------------------------------------------------------------- 1 | 2 | name: Auto Cherry-Pick from Upstream 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | base_branch: 8 | description: "Base branch to create the PR against" 9 | required: true 10 | default: "main" 11 | package_manager: 12 | description: "Specify package manager (npm or yarn)" 13 | required: false 14 | default: "yarn" 15 | script: 16 | description: "Specify a script to run after audit fix" 17 | required: false 18 | default: "yarn run all" 19 | 20 | permissions: 21 | contents: write 22 | pull-requests: write 23 | packages: read 24 | issues: write 25 | 26 | jobs: 27 | audit-fix: 28 | uses: step-security/reusable-workflows/.github/workflows/auto_cherry_pick.yaml@upstream-Changes-CherryPick 29 | with: 30 | original-owner: "tj-actions" 31 | repo-name: "changed-files" 32 | base_branch: ${{ inputs.base_branch }} 33 | package_manager: "yarn" 34 | script: ${{ inputs.script || 'yarn run all' }} -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ "main" ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ "main" ] 20 | schedule: 21 | - cron: '44 20 * * 0' 22 | 23 | permissions: 24 | actions: read 25 | contents: read 26 | security-events: write 27 | 28 | jobs: 29 | analyze: 30 | name: Analyze 31 | runs-on: ubuntu-latest 32 | permissions: 33 | actions: read 34 | contents: read 35 | security-events: write 36 | 37 | strategy: 38 | fail-fast: false 39 | matrix: 40 | language: [ 'javascript' ] 41 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 42 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 43 | 44 | steps: 45 | - name: Harden the runner (Audit all outbound calls) 46 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 47 | with: 48 | egress-policy: audit 49 | 50 | - name: Checkout repository 51 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 52 | 53 | # Initializes the CodeQL tools for scanning. 54 | - name: Initialize CodeQL 55 | uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 56 | with: 57 | languages: ${{ matrix.language }} 58 | # If you wish to specify custom queries, you can do so here or in a config file. 59 | # By default, queries listed here will override any specified in a config file. 60 | # Prefix the list here with "+" to use these queries and those in the config file. 61 | 62 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 63 | # queries: security-extended,security-and-quality 64 | 65 | 66 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 67 | # If this step fails, then you should remove it and run the build manually (see below) 68 | - name: Autobuild 69 | uses: github/codeql-action/autobuild@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 70 | 71 | # ℹ️ Command-line programs to run using the OS shell. 72 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 73 | 74 | # If the Autobuild fails above, remove it and uncomment the following three lines. 75 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 76 | 77 | # - run: | 78 | # echo "Run, Build Application using script" 79 | # ./location_of_script_within_repo/buildscript.sh 80 | 81 | - name: Perform CodeQL Analysis 82 | uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 83 | with: 84 | category: "/language:${{matrix.language}}" 85 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | # Dependency Review Action 2 | # 3 | # This Action will scan dependency manifest files that change as part of a Pull Request, 4 | # surfacing known-vulnerable versions of the packages declared or updated in the PR. 5 | # Once installed, if the workflow run is marked as required, 6 | # PRs introducing known-vulnerable packages will be blocked from merging. 7 | # 8 | # Source repository: https://github.com/actions/dependency-review-action 9 | name: 'Dependency Review' 10 | on: [pull_request] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | dependency-review: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - name: Harden the runner (Audit all outbound calls) 20 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 21 | with: 22 | egress-policy: audit 23 | 24 | - name: 'Checkout Repository' 25 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 26 | - name: 'Dependency Review' 27 | uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8 # v4 28 | -------------------------------------------------------------------------------- /.github/workflows/guarddog.yml: -------------------------------------------------------------------------------- 1 | name: Run GuardDog Scan on PRs 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | permissions: 10 | contents: read 11 | 12 | jobs: 13 | call-guarddog-scan: 14 | uses: step-security/reusable-workflows/.github/workflows/guarddog.yml@v1 15 | -------------------------------------------------------------------------------- /.github/workflows/matrix-example.yml: -------------------------------------------------------------------------------- 1 | name: Matrix Example 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | workflow_dispatch: 8 | pull_request: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | changed-files: 14 | name: Get changed files 15 | runs-on: ubuntu-latest 16 | outputs: 17 | matrix: ${{ steps.changed-files.outputs.all_changed_files }} 18 | steps: 19 | - name: Harden the runner (Audit all outbound calls) 20 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 21 | with: 22 | egress-policy: audit 23 | 24 | - name: Checkout 25 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 26 | with: 27 | fetch-depth: 0 28 | - name: Get changed files 29 | id: changed-files 30 | uses: ./ 31 | with: 32 | matrix: true 33 | - name: List all changed files 34 | run: echo '${{ steps.changed-files.outputs.all_changed_files }}' 35 | 36 | matrix-job: 37 | name: Run Matrix Job 38 | runs-on: ubuntu-latest 39 | needs: [changed-files] 40 | strategy: 41 | matrix: 42 | files: ${{ fromJSON(needs.changed-files.outputs.matrix) }} 43 | max-parallel: 4 44 | fail-fast: false 45 | steps: 46 | - name: Harden the runner (Audit all outbound calls) 47 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 48 | with: 49 | egress-policy: audit 50 | 51 | - name: Checkout 52 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 53 | - name: Test 54 | run: | 55 | echo ${{ matrix.files }} 56 | -------------------------------------------------------------------------------- /.github/workflows/multi-job-example.yml: -------------------------------------------------------------------------------- 1 | name: Multi Job Example 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | push: 8 | branches: 9 | - "**" 10 | pull_request: 11 | branches: 12 | - "**" 13 | 14 | jobs: 15 | changed-files: 16 | name: Get changed files 17 | runs-on: ubuntu-latest 18 | outputs: 19 | all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }} 20 | steps: 21 | - name: Harden the runner (Audit all outbound calls) 22 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 23 | with: 24 | egress-policy: audit 25 | 26 | - name: Checkout 27 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 28 | with: 29 | fetch-depth: 0 30 | - name: Get changed files 31 | id: changed-files 32 | uses: ./ 33 | - name: List all changed files 34 | run: echo '${{ steps.changed-files.outputs.all_changed_files }}' 35 | 36 | view-changed-files: 37 | name: View all changed files 38 | runs-on: ubuntu-latest 39 | needs: [changed-files] 40 | steps: 41 | - name: Harden the runner (Audit all outbound calls) 42 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 43 | with: 44 | egress-policy: audit 45 | 46 | - name: List all changed files 47 | run: | 48 | echo '${{ needs.changed-files.outputs.all_changed_files }}' 49 | 50 | 51 | changed-files-rest-api: 52 | name: Get changed files using REST API 53 | runs-on: ubuntu-latest 54 | outputs: 55 | all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }} 56 | steps: 57 | - name: Harden the runner (Audit all outbound calls) 58 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 59 | with: 60 | egress-policy: audit 61 | 62 | - name: Checkout 63 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 64 | with: 65 | fetch-depth: 0 66 | - name: Get changed files 67 | id: changed-files 68 | continue-on-error: ${{ github.event_name == 'push' }} 69 | uses: ./ 70 | with: 71 | use_rest_api: true 72 | - name: List all changed files 73 | run: echo '${{ steps.changed-files.outputs.all_changed_files }}' 74 | 75 | view-changed-files-rest-api: 76 | name: View all changed files using REST API 77 | runs-on: ubuntu-latest 78 | needs: [changed-files-rest-api] 79 | steps: 80 | - name: Harden the runner (Audit all outbound calls) 81 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 82 | with: 83 | egress-policy: audit 84 | 85 | - name: List all changed files 86 | run: | 87 | echo '${{ needs.changed-files-rest-api.outputs.all_changed_files }}' 88 | -------------------------------------------------------------------------------- /.github/workflows/scorecards.yml: -------------------------------------------------------------------------------- 1 | # This workflow uses actions that are not certified by GitHub. They are provided 2 | # by a third-party and are governed by separate terms of service, privacy 3 | # policy, and support documentation. 4 | 5 | name: Scorecard supply-chain security 6 | on: 7 | # For Branch-Protection check. Only the default branch is supported. See 8 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection 9 | branch_protection_rule: 10 | # To guarantee Maintained check is occasionally updated. See 11 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained 12 | schedule: 13 | - cron: '20 7 * * 2' 14 | push: 15 | branches: ["main"] 16 | 17 | # Declare default permissions as read only. 18 | permissions: read-all 19 | 20 | jobs: 21 | analysis: 22 | name: Scorecard analysis 23 | runs-on: ubuntu-latest 24 | permissions: 25 | # Needed to upload the results to code-scanning dashboard. 26 | security-events: write 27 | # Needed to publish results and get a badge (see publish_results below). 28 | id-token: write 29 | contents: read 30 | actions: read 31 | # To allow GraphQL ListCommits to work 32 | issues: read 33 | pull-requests: read 34 | # To detect SAST tools 35 | checks: read 36 | 37 | steps: 38 | - name: Harden the runner (Audit all outbound calls) 39 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0 40 | with: 41 | egress-policy: audit 42 | 43 | - name: "Checkout code" 44 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 45 | with: 46 | persist-credentials: false 47 | 48 | - name: "Run analysis" 49 | uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 50 | with: 51 | results_file: results.sarif 52 | results_format: sarif 53 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: 54 | # - you want to enable the Branch-Protection check on a *public* repository, or 55 | # - you are installing Scorecards on a *private* repository 56 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. 57 | # repo_token: ${{ secrets.SCORECARD_TOKEN }} 58 | 59 | # Public repositories: 60 | # - Publish results to OpenSSF REST API for easy access by consumers 61 | # - Allows the repository to include the Scorecard badge. 62 | # - See https://github.com/ossf/scorecard-action#publishing-results. 63 | # For private repositories: 64 | # - `publish_results` will always be set to `false`, regardless 65 | # of the value entered here. 66 | publish_results: true 67 | 68 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF 69 | # format to the repository Actions tab. 70 | - name: "Upload artifact" 71 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 72 | with: 73 | name: SARIF file 74 | path: results.sarif 75 | retention-days: 5 76 | 77 | # Upload the results to GitHub's code scanning dashboard. 78 | - name: "Upload to code-scanning" 79 | uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17 80 | with: 81 | sarif_file: results.sarif 82 | -------------------------------------------------------------------------------- /.github/workflows/workflow-run-example.yml: -------------------------------------------------------------------------------- 1 | name: Workflow Run Example 2 | on: 3 | workflow_run: 4 | workflows: [Matrix Example] 5 | types: [completed] 6 | 7 | permissions: 8 | contents: read 9 | 10 | jobs: 11 | on-success: 12 | runs-on: ubuntu-latest 13 | if: ${{ github.event.workflow_run.conclusion == 'success' }} 14 | steps: 15 | - name: Harden the runner (Audit all outbound calls) 16 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 17 | with: 18 | egress-policy: audit 19 | 20 | - name: Checkout code 21 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 22 | 23 | - name: Get changed files 24 | id: changed-files 25 | uses: ./ 26 | 27 | - name: Echo list of changed files on success 28 | run: | 29 | echo "Changed files on success:" 30 | echo "${{ steps.changed-files.outputs.all_changed_files }}" 31 | 32 | on-failure: 33 | runs-on: ubuntu-latest 34 | if: ${{ github.event.workflow_run.conclusion == 'failure' }} 35 | steps: 36 | - name: Harden the runner (Audit all outbound calls) 37 | uses: step-security/harden-runner@4d991eb9b905ef189e4c376166672c3f2f230481 # v2.11.0 38 | with: 39 | egress-policy: audit 40 | 41 | - name: Checkout code 42 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 43 | 44 | - name: Get changed files 45 | id: changed-files 46 | uses: ./ 47 | 48 | - name: Echo list of changed files on failure 49 | run: | 50 | echo "Changed files on failure:" 51 | echo "${{ steps.changed-files.outputs.all_changed_files }}" 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependency directory 2 | node_modules 3 | 4 | # Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | jspm_packages/ 46 | 47 | # TypeScript v1 declaration files 48 | typings/ 49 | 50 | # TypeScript cache 51 | *.tsbuildinfo 52 | 53 | # Optional npm cache directory 54 | .npm 55 | 56 | # Optional eslint cache 57 | .eslintcache 58 | 59 | # Optional REPL history 60 | .node_repl_history 61 | 62 | # Output of 'npm pack' 63 | *.tgz 64 | 65 | # Yarn Integrity file 66 | .yarn-integrity 67 | 68 | # dotenv environment variables file 69 | ./.env 70 | .env/../.env 71 | ./.env.local 72 | ./.env/../.env.local 73 | .env 74 | .env.test 75 | 76 | # parcel-bundler cache (https://parceljs.org/) 77 | .cache 78 | 79 | # next.js build output 80 | .next 81 | 82 | # nuxt.js build output 83 | .nuxt 84 | 85 | # vuepress build output 86 | .vuepress/dist 87 | 88 | # Serverless directories 89 | .serverless/ 90 | 91 | # FuseBox cache 92 | .fusebox/ 93 | 94 | # DynamoDB Local files 95 | .dynamodb/ 96 | 97 | # OS metadata 98 | .DS_Store 99 | Thumbs.db 100 | 101 | # Ignore built ts files 102 | __tests__/runner/* 103 | lib/**/* 104 | 105 | # IDEA 106 | .idea/ 107 | 108 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "test/demo"] 2 | path = test/demo 3 | url = git@github.com:step-security/demo.git 4 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | 20 2 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/gitleaks/gitleaks 3 | rev: v8.16.3 4 | hooks: 5 | - id: gitleaks 6 | - repo: https://github.com/pre-commit/mirrors-eslint 7 | rev: v8.38.0 8 | hooks: 9 | - id: eslint 10 | - repo: https://github.com/pre-commit/pre-commit-hooks 11 | rev: v4.4.0 12 | hooks: 13 | - id: end-of-file-fixer 14 | - id: trailing-whitespace 15 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 80, 3 | "tabWidth": 2, 4 | "useTabs": false, 5 | "semi": false, 6 | "singleQuote": true, 7 | "trailingComma": "none", 8 | "bracketSpacing": false, 9 | "arrowParens": "avoid" 10 | } -------------------------------------------------------------------------------- /.whitesource: -------------------------------------------------------------------------------- 1 | { 2 | "scanSettings": { 3 | "baseBranches": [] 4 | }, 5 | "checkRunSettings": { 6 | "vulnerableCheckRunConclusionLevel": "failure", 7 | "displayMode": "diff" 8 | }, 9 | "issueSettings": { 10 | "minSeverityLevel": "LOW" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2021, Tonye Jack 4 | Copyright (c) 2023 StepSecurity 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | Please report security vulnerabilities to security@stepsecurity.io 6 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: Changed Files 2 | description: Get all Added, Copied, Modified, Deleted, Renamed, Type changed, Unmerged, and Unknown files. 3 | author: step-security 4 | 5 | inputs: 6 | separator: 7 | description: "Split character for output strings." 8 | required: false 9 | default: " " 10 | include_all_old_new_renamed_files: 11 | description: "Include `all_old_new_renamed_files` output. Note this can generate a large output See: #501." 12 | required: false 13 | default: "false" 14 | old_new_separator: 15 | description: "Split character for old and new filename pairs." 16 | required: false 17 | default: "," 18 | old_new_files_separator: 19 | description: "Split character for old and new renamed filename pairs." 20 | required: false 21 | default: " " 22 | files_from_source_file: 23 | description: "Source file(s) used to populate the `files` input." 24 | required: false 25 | default: "" 26 | files_from_source_file_separator: 27 | description: "Separator used to split the `files_from_source_file` input." 28 | default: "\n" 29 | required: false 30 | files: 31 | description: | 32 | File and directory patterns used to detect changes (Defaults to the entire repo if unset). 33 | NOTE: Multiline file/directory patterns should not include quotes. 34 | required: false 35 | default: "" 36 | files_separator: 37 | description: "Separator used to split the `files` input" 38 | default: "\n" 39 | required: false 40 | files_yaml: 41 | description: "YAML used to define a set of file patterns to detect changes" 42 | required: false 43 | default: "" 44 | files_yaml_from_source_file: 45 | description: "Source file(s) used to populate the `files_yaml` input. Example: https://github.com/step-security/changed-files/blob/main/test/changed-files.yml" 46 | required: false 47 | default: "" 48 | files_yaml_from_source_file_separator: 49 | description: 'Separator used to split the `files_yaml_from_source_file` input' 50 | default: "\n" 51 | required: false 52 | files_ignore_yaml: 53 | description: "YAML used to define a set of file patterns to ignore changes" 54 | required: false 55 | default: "" 56 | files_ignore_yaml_from_source_file: 57 | description: "Source file(s) used to populate the `files_ignore_yaml` input. Example: https://github.com/step-security/changed-files/blob/main/test/changed-files.yml" 58 | required: false 59 | default: "" 60 | files_ignore_yaml_from_source_file_separator: 61 | description: 'Separator used to split the `files_ignore_yaml_from_source_file` input' 62 | default: "\n" 63 | required: false 64 | files_ignore: 65 | description: "Ignore changes to these file(s). NOTE: Multiline file/directory patterns should not include quotes." 66 | required: false 67 | default: "" 68 | files_ignore_separator: 69 | description: "Separator used to split the `files_ignore` input" 70 | default: "\n" 71 | required: false 72 | files_ignore_from_source_file: 73 | description: "Source file(s) used to populate the `files_ignore` input" 74 | required: false 75 | default: "" 76 | files_ignore_from_source_file_separator: 77 | description: 'Separator used to split the `files_ignore_from_source_file` input' 78 | default: "\n" 79 | required: false 80 | sha: 81 | description: "Specify a different commit SHA or branch used for comparing changes" 82 | required: false 83 | base_sha: 84 | description: "Specify a different base commit SHA or branch used for comparing changes" 85 | required: false 86 | since: 87 | description: "Get changed files for commits whose timestamp is older than the given time." 88 | required: false 89 | default: "" 90 | until: 91 | description: "Get changed files for commits whose timestamp is earlier than the given time." 92 | required: false 93 | default: "" 94 | path: 95 | description: "Specify a relative path under `$GITHUB_WORKSPACE` to locate the repository." 96 | required: false 97 | default: "." 98 | quotepath: 99 | description: "Use non-ASCII characters to match files and output the filenames completely verbatim by setting this to `false`" 100 | default: "true" 101 | required: false 102 | diff_relative: 103 | description: "Exclude changes outside the current directory and show path names relative to it. NOTE: This requires you to specify the top-level directory via the `path` input." 104 | required: false 105 | default: "true" 106 | dir_names: 107 | default: "false" 108 | description: "Output unique changed directories instead of filenames. NOTE: This returns `.` for changed files located in the current working directory which defaults to `$GITHUB_WORKSPACE`." 109 | required: false 110 | dir_names_max_depth: 111 | description: "Limit the directory output to a maximum depth e.g `test/test1/test2` with max depth of `2` returns `test/test1`." 112 | required: false 113 | dir_names_exclude_current_dir: 114 | description: "Exclude the current directory represented by `.` from the output when `dir_names` is set to `true`." 115 | required: false 116 | default: "false" 117 | dir_names_include_files: 118 | description: "File and directory patterns to include in the output when `dir_names` is set to `true`. NOTE: This returns only the matching files and also the directory names." 119 | required: false 120 | default: "" 121 | dir_names_include_files_separator: 122 | description: "Separator used to split the `dir_names_include_files` input" 123 | default: "\n" 124 | required: false 125 | dir_names_deleted_files_include_only_deleted_dirs: 126 | description: "Include only directories that have been deleted as opposed to directory names of files that have been deleted in the `deleted_files` output when `dir_names` is set to `true`." 127 | required: false 128 | default: "false" 129 | json: 130 | description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs. Example: https://github.com/step-security/changed-files/blob/main/.github/workflows/matrix-example.yml" 131 | required: false 132 | default: "false" 133 | escape_json: 134 | description: "Escape JSON output." 135 | required: false 136 | default: "true" 137 | safe_output: 138 | description: "Apply sanitization to output filenames before being set as output." 139 | required: false 140 | default: "true" 141 | fetch_depth: 142 | description: "Depth of additional branch history fetched. NOTE: This can be adjusted to resolve errors with insufficient history." 143 | required: false 144 | default: "25" 145 | skip_initial_fetch: 146 | description: | 147 | Skip initially fetching additional history to improve performance for shallow repositories. 148 | NOTE: This could lead to errors with missing history. It's intended to be used when you've fetched all necessary history to perform the diff. 149 | required: false 150 | default: "false" 151 | fetch_additional_submodule_history: 152 | description: "Fetch additional history for submodules." 153 | required: false 154 | default: "false" 155 | since_last_remote_commit: 156 | description: "Use the last commit on the remote branch as the `base_sha`. Defaults to the last non-merge commit on the target branch for pull request events and the previous remote commit of the current branch for push events." 157 | required: false 158 | default: "false" 159 | write_output_files: 160 | description: "Write outputs to the `output_dir` defaults to `.github/outputs` folder. NOTE: This creates a `.txt` file by default and a `.json` file if `json` is set to `true`." 161 | required: false 162 | default: "false" 163 | output_dir: 164 | description: "Directory to store output files." 165 | required: false 166 | default: ".github/outputs" 167 | output_renamed_files_as_deleted_and_added: 168 | description: "Output renamed files as deleted and added files." 169 | required: false 170 | default: "false" 171 | recover_deleted_files: 172 | description: "Recover deleted files." 173 | required: false 174 | default: "false" 175 | recover_deleted_files_to_destination: 176 | description: "Recover deleted files to a new destination directory, defaults to the original location." 177 | required: false 178 | default: "" 179 | recover_files: 180 | description: | 181 | File and directory patterns used to recover deleted files, 182 | defaults to the patterns provided via the `files`, `files_from_source_file`, `files_ignore` and `files_ignore_from_source_file` inputs 183 | or all deleted files if no patterns are provided. 184 | required: false 185 | default: "" 186 | recover_files_separator: 187 | description: "Separator used to split the `recover_files` input" 188 | default: "\n" 189 | required: false 190 | recover_files_ignore: 191 | description: "File and directory patterns to ignore when recovering deleted files." 192 | required: false 193 | default: "" 194 | recover_files_ignore_separator: 195 | description: "Separator used to split the `recover_files_ignore` input" 196 | default: "\n" 197 | required: false 198 | token: 199 | description: "GitHub token used to fetch changed files from Github's API." 200 | required: false 201 | default: ${{ github.token }} 202 | api_url: 203 | description: "Github API URL." 204 | required: false 205 | default: ${{ github.api_url }} 206 | use_rest_api: 207 | description: "Force the use of Github's REST API even when a local copy of the repository exists" 208 | required: false 209 | default: "false" 210 | fail_on_initial_diff_error: 211 | description: "Fail when the initial diff fails." 212 | required: false 213 | default: "false" 214 | fail_on_submodule_diff_error: 215 | description: "Fail when the submodule diff fails." 216 | required: false 217 | default: "false" 218 | negation_patterns_first: 219 | description: "Apply the negation patterns first. NOTE: This affects how changed files are matched." 220 | required: false 221 | default: "false" 222 | matrix: 223 | description: "Output changed files in a format that can be used for matrix jobs. Alias for setting inputs `json` to `true` and `escape_json` to `false`." 224 | required: false 225 | default: "false" 226 | exclude_submodules: 227 | description: "Exclude changes to submodules." 228 | required: false 229 | default: "false" 230 | fetch_missing_history_max_retries: 231 | description: "Maximum number of retries to fetch missing history." 232 | required: false 233 | default: "20" 234 | use_posix_path_separator: 235 | description: "Use POSIX path separator `/` for output file paths on Windows." 236 | required: false 237 | default: "false" 238 | tags_pattern: 239 | description: "Tags pattern to include." 240 | required: false 241 | default: "*" 242 | tags_ignore_pattern: 243 | description: "Tags pattern to ignore." 244 | required: false 245 | default: "" 246 | 247 | 248 | outputs: 249 | added_files: 250 | description: "Returns only files that are Added (A)." 251 | added_files_count: 252 | description: "Returns the number of `added_files`" 253 | copied_files: 254 | description: "Returns only files that are Copied (C)." 255 | copied_files_count: 256 | description: "Returns the number of `copied_files`" 257 | deleted_files: 258 | description: "Returns only files that are Deleted (D)." 259 | deleted_files_count: 260 | description: "Returns the number of `deleted_files`" 261 | modified_files: 262 | description: "Returns only files that are Modified (M)." 263 | modified_files_count: 264 | description: "Returns the number of `modified_files`" 265 | renamed_files: 266 | description: "Returns only files that are Renamed (R)." 267 | renamed_files_count: 268 | description: "Returns the number of `renamed_files`" 269 | all_old_new_renamed_files: 270 | description: "Returns only files that are Renamed and lists their old and new names. **NOTE:** This requires setting `include_all_old_new_renamed_files` to `true`. Also, keep in mind that this output is global and wouldn't be nested in outputs generated when the `*_yaml_*` input is used. (R)" 271 | all_old_new_renamed_files_count: 272 | description: "Returns the number of `all_old_new_renamed_files`" 273 | type_changed_files: 274 | description: "Returns only files that have their file type changed (T)." 275 | type_changed_files_count: 276 | description: "Returns the number of `type_changed_files`" 277 | unmerged_files: 278 | description: "Returns only files that are Unmerged (U)." 279 | unmerged_files_count: 280 | description: "Returns the number of `unmerged_files`" 281 | unknown_files: 282 | description: "Returns only files that are Unknown (X)." 283 | unknown_files_count: 284 | description: "Returns the number of `unknown_files`" 285 | all_changed_and_modified_files: 286 | description: "Returns all changed and modified files i.e. a combination of (ACMRDTUX)" 287 | all_changed_and_modified_files_count: 288 | description: "Returns the number of `all_changed_and_modified_files`" 289 | all_changed_files: 290 | description: "Returns all changed files i.e. a combination of all added, copied, modified and renamed files (ACMR)" 291 | all_changed_files_count: 292 | description: "Returns the number of `all_changed_files`" 293 | any_changed: 294 | description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have changed. This defaults to `true` when no patterns are specified. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*." 295 | only_changed: 296 | description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have changed. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*." 297 | other_changed_files: 298 | description: "Returns all other changed files not listed in the files input i.e. includes a combination of all added, copied, modified and renamed files (ACMR)." 299 | other_changed_files_count: 300 | description: "Returns the number of `other_changed_files`" 301 | all_modified_files: 302 | description: "Returns all changed files i.e. a combination of all added, copied, modified, renamed and deleted files (ACMRD)." 303 | all_modified_files_count: 304 | description: "Returns the number of `all_modified_files`" 305 | any_modified: 306 | description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been modified. This defaults to `true` when no patterns are specified. i.e. *includes a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*." 307 | only_modified: 308 | description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have been modified. (ACMRD)." 309 | other_modified_files: 310 | description: "Returns all other modified files not listed in the files input i.e. a combination of all added, copied, modified, and deleted files (ACMRD)" 311 | other_modified_files_count: 312 | description: "Returns the number of `other_modified_files`" 313 | any_deleted: 314 | description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been deleted. This defaults to `true` when no patterns are specified. (D)" 315 | only_deleted: 316 | description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have been deleted. (D)" 317 | other_deleted_files: 318 | description: "Returns all other deleted files not listed in the files input i.e. a combination of all deleted files (D)" 319 | other_deleted_files_count: 320 | description: "Returns the number of `other_deleted_files`" 321 | modified_keys: 322 | description: "Returns all modified YAML keys when the `files_yaml` input is used. i.e. key that contains any path that has either been added, copied, modified, and deleted (ACMRD)" 323 | changed_keys: 324 | description: "Returns all changed YAML keys when the `files_yaml` input is used. i.e. key that contains any path that has either been added, copied, modified, and renamed (ACMR)" 325 | 326 | runs: 327 | using: 'node20' 328 | main: 'dist/index.js' 329 | 330 | branding: 331 | icon: file-text 332 | color: white 333 | -------------------------------------------------------------------------------- /dist/sourcemap-register.js: -------------------------------------------------------------------------------- 1 | (()=>{var e={296:e=>{var r=Object.prototype.toString;var n=typeof Buffer!=="undefined"&&typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},599:(e,r,n)=>{e=n.nmd(e);var t=n(927).SourceMapConsumer;var o=n(928);var i;try{i=n(896);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(296);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var d=[];var h=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function globalProcessVersion(){if(typeof process==="object"&&process!==null){return process.version}else{return""}}function globalProcessStderr(){if(typeof process==="object"&&process!==null){return process.stderr}}function globalProcessExit(e){if(typeof process==="object"&&process!==null&&typeof process.exit==="function"){return process.exit(e)}}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(globalProcessVersion())?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);var n=globalProcessStderr();if(n&&n._handle&&n._handle.setBlocking){n._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);globalProcessExit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=d.slice(0);var _=h.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){d.length=0}d.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){h.length=0}h.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){d.length=0;h.length=0;d=S.slice(0);h=_.slice(0);v=handlerExec(h);m=handlerExec(d)}},517:(e,r,n)=>{var t=n(297);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(158);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},24:(e,r,n)=>{var t=n(297);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.P=MappingList},299:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(297);var i=n(197);var a=n(517).C;var u=n(818);var s=n(299).g;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){h.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(h,o.compareByOriginalPositions);this.__originalMappings=h};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(818);var o=n(297);var i=n(517).C;var a=n(24).P;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var d=0,h=g.length;d0){if(!o.compareByGeneratedPositionsInflated(c,g[d-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.x=SourceMapGenerator},565:(e,r,n)=>{var t;var o=n(163).x;var i=n(297);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},927:(e,r,n)=>{n(163).x;r.SourceMapConsumer=n(684).SourceMapConsumer;n(565)},896:e=>{"use strict";e.exports=require("fs")},928:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};__webpack_require__(599).install();module.exports=n})(); -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | clearMocks: true, 3 | moduleFileExtensions: ['js', 'ts'], 4 | testMatch: ['**/*.test.ts'], 5 | transform: { 6 | '^.+\\.ts$': 'ts-jest' 7 | }, 8 | verbose: true, 9 | testTimeout: 10000, 10 | setupFiles: [ 11 | "/jest/setupEnv.cjs" 12 | ] 13 | }; 14 | -------------------------------------------------------------------------------- /jest/setupEnv.cjs: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | 3 | process.env.GITHUB_WORKSPACE = path.join( 4 | path.resolve(__dirname, '..'), '.' 5 | ) 6 | process.env.GITHUB_ACTION_PATH = path.join( 7 | path.resolve(__dirname, '..'), '.' 8 | ) 9 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@step-security/changed-files", 3 | "version": "45.0.7", 4 | "description": "Github action to retrieve all (added, copied, modified, deleted, renamed, type changed, unmerged, unknown) files and directories.", 5 | "main": "lib/main.js", 6 | "publishConfig": { 7 | "registry": "https://npm.pkg.github.com" 8 | }, 9 | "scripts": { 10 | "build": "tsc", 11 | "format": "prettier --write src/*.ts src/**/*.ts", 12 | "format-check": "prettier --check src/*.ts src/**/*.ts", 13 | "lint": "eslint src/*.ts src/**/*.ts --max-warnings 0", 14 | "lint:fix": "eslint --fix src/*.ts src/**/*.ts", 15 | "package": "ncc build lib/main.js --source-map --license licenses.txt", 16 | "test": "jest --coverage", 17 | "update-snapshot": "jest -u", 18 | "all": "yarn build && yarn format && yarn lint && yarn package && yarn test" 19 | }, 20 | "repository": { 21 | "type": "git", 22 | "url": "git+https://github.com/step-security/changed-files.git" 23 | }, 24 | "keywords": [ 25 | "actions", 26 | "glob", 27 | "github-actions" 28 | ], 29 | "author": "step-security", 30 | "license": "MIT", 31 | "bugs": { 32 | "url": "https://github.com/step-security/changed-files/issues" 33 | }, 34 | "homepage": "https://github.com/step-security/changed-files#readme", 35 | "dependencies": { 36 | "@actions/core": "^1.10.0", 37 | "@actions/exec": "^1.1.1", 38 | "@actions/github": "^6.0.0", 39 | "@octokit/rest": "^21.0.0", 40 | "@stdlib/utils-convert-path": "^0.2.1", 41 | "lodash": "^4.17.21", 42 | "micromatch": "^4.0.5", 43 | "yaml": "^2.3.1" 44 | }, 45 | "devDependencies": { 46 | "@types/jest": "^29.5.2", 47 | "@types/lodash": "^4.14.195", 48 | "@types/micromatch": "^4.0.2", 49 | "@types/node": "^22.0.0", 50 | "@types/uuid": "^10.0.0", 51 | "@typescript-eslint/eslint-plugin": "^7.0.0", 52 | "@typescript-eslint/parser": "^7.0.0", 53 | "@vercel/ncc": "^0.38.0", 54 | "eslint": "^8.43.0", 55 | "eslint-config-prettier": "^10.0.0", 56 | "eslint-plugin-github": "^5.0.0", 57 | "eslint-plugin-jest": "^28.0.0", 58 | "eslint-plugin-prettier": "^5.0.0-alpha.2", 59 | "jest": "^29.5.0", 60 | "prettier": "^3.0.0", 61 | "ts-jest": "^29.1.0", 62 | "typescript": "^5.1.3" 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/__tests__/__snapshots__/inputs.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`getInputs should correctly parse boolean inputs 1`] = ` 4 | { 5 | "apiUrl": "", 6 | "baseSha": "", 7 | "diffRelative": "false", 8 | "dirNames": "false", 9 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": "false", 10 | "dirNamesExcludeCurrentDir": "false", 11 | "dirNamesIncludeFiles": "", 12 | "dirNamesIncludeFilesSeparator": "", 13 | "escapeJson": false, 14 | "excludeSubmodules": "false", 15 | "failOnInitialDiffError": "false", 16 | "failOnSubmoduleDiffError": "false", 17 | "fetchAdditionalSubmoduleHistory": "false", 18 | "fetchMissingHistoryMaxRetries": 20, 19 | "files": "", 20 | "filesFromSourceFile": "", 21 | "filesFromSourceFileSeparator": "", 22 | "filesIgnore": "", 23 | "filesIgnoreFromSourceFile": "", 24 | "filesIgnoreFromSourceFileSeparator": "", 25 | "filesIgnoreSeparator": "", 26 | "filesIgnoreYaml": "", 27 | "filesIgnoreYamlFromSourceFile": "", 28 | "filesIgnoreYamlFromSourceFileSeparator": "", 29 | "filesSeparator": "", 30 | "filesYaml": "", 31 | "filesYamlFromSourceFile": "", 32 | "filesYamlFromSourceFileSeparator": "", 33 | "includeAllOldNewRenamedFiles": "false", 34 | "json": true, 35 | "negationPatternsFirst": "false", 36 | "oldNewFilesSeparator": " ", 37 | "oldNewSeparator": ",", 38 | "outputDir": "", 39 | "outputRenamedFilesAsDeletedAndAdded": "false", 40 | "path": ".", 41 | "quotepath": "false", 42 | "recoverDeletedFiles": "false", 43 | "recoverDeletedFilesToDestination": "", 44 | "recoverFiles": "", 45 | "recoverFilesIgnore": "", 46 | "recoverFilesIgnoreSeparator": " 47 | ", 48 | "recoverFilesSeparator": " 49 | ", 50 | "safeOutput": "false", 51 | "separator": "", 52 | "sha": "", 53 | "since": "", 54 | "sinceLastRemoteCommit": "false", 55 | "skipInitialFetch": "true", 56 | "tagsIgnorePattern": "", 57 | "tagsPattern": "*", 58 | "token": "", 59 | "until": "", 60 | "usePosixPathSeparator": "false", 61 | "useRestApi": "false", 62 | "writeOutputFiles": "false", 63 | } 64 | `; 65 | 66 | exports[`getInputs should correctly parse numeric inputs 1`] = ` 67 | { 68 | "apiUrl": "", 69 | "baseSha": "", 70 | "diffRelative": true, 71 | "dirNames": false, 72 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 73 | "dirNamesExcludeCurrentDir": false, 74 | "dirNamesIncludeFiles": "", 75 | "dirNamesIncludeFilesSeparator": "", 76 | "dirNamesMaxDepth": 2, 77 | "escapeJson": false, 78 | "excludeSubmodules": false, 79 | "failOnInitialDiffError": false, 80 | "failOnSubmoduleDiffError": false, 81 | "fetchAdditionalSubmoduleHistory": false, 82 | "fetchDepth": 5, 83 | "files": "", 84 | "filesFromSourceFile": "", 85 | "filesFromSourceFileSeparator": "", 86 | "filesIgnore": "", 87 | "filesIgnoreFromSourceFile": "", 88 | "filesIgnoreFromSourceFileSeparator": "", 89 | "filesIgnoreSeparator": "", 90 | "filesIgnoreYaml": "", 91 | "filesIgnoreYamlFromSourceFile": "", 92 | "filesIgnoreYamlFromSourceFileSeparator": "", 93 | "filesSeparator": "", 94 | "filesYaml": "", 95 | "filesYamlFromSourceFile": "", 96 | "filesYamlFromSourceFileSeparator": "", 97 | "includeAllOldNewRenamedFiles": false, 98 | "json": false, 99 | "negationPatternsFirst": false, 100 | "oldNewFilesSeparator": "", 101 | "oldNewSeparator": "", 102 | "outputDir": "", 103 | "outputRenamedFilesAsDeletedAndAdded": false, 104 | "path": "", 105 | "quotepath": true, 106 | "recoverDeletedFiles": false, 107 | "recoverDeletedFilesToDestination": "", 108 | "recoverFiles": "", 109 | "recoverFilesIgnore": "", 110 | "recoverFilesIgnoreSeparator": "", 111 | "recoverFilesSeparator": "", 112 | "safeOutput": false, 113 | "separator": "", 114 | "sha": "", 115 | "since": "", 116 | "sinceLastRemoteCommit": false, 117 | "skipInitialFetch": false, 118 | "tagsIgnorePattern": "", 119 | "tagsPattern": "", 120 | "token": "", 121 | "until": "", 122 | "usePosixPathSeparator": false, 123 | "useRestApi": false, 124 | "writeOutputFiles": false, 125 | } 126 | `; 127 | 128 | exports[`getInputs should correctly parse string inputs 1`] = ` 129 | { 130 | "apiUrl": "https://api.github.com", 131 | "baseSha": "", 132 | "diffRelative": true, 133 | "dirNames": false, 134 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 135 | "dirNamesExcludeCurrentDir": false, 136 | "dirNamesIncludeFiles": "", 137 | "dirNamesIncludeFilesSeparator": "", 138 | "escapeJson": false, 139 | "excludeSubmodules": false, 140 | "failOnInitialDiffError": false, 141 | "failOnSubmoduleDiffError": false, 142 | "fetchAdditionalSubmoduleHistory": false, 143 | "files": "", 144 | "filesFromSourceFile": "", 145 | "filesFromSourceFileSeparator": "", 146 | "filesIgnore": "", 147 | "filesIgnoreFromSourceFile": "", 148 | "filesIgnoreFromSourceFileSeparator": "", 149 | "filesIgnoreSeparator": "", 150 | "filesIgnoreYaml": "", 151 | "filesIgnoreYamlFromSourceFile": "", 152 | "filesIgnoreYamlFromSourceFileSeparator": "", 153 | "filesSeparator": "", 154 | "filesYaml": "", 155 | "filesYamlFromSourceFile": "", 156 | "filesYamlFromSourceFileSeparator": "", 157 | "includeAllOldNewRenamedFiles": false, 158 | "json": false, 159 | "negationPatternsFirst": false, 160 | "oldNewFilesSeparator": "", 161 | "oldNewSeparator": "", 162 | "outputDir": "", 163 | "outputRenamedFilesAsDeletedAndAdded": false, 164 | "path": "", 165 | "quotepath": true, 166 | "recoverDeletedFiles": false, 167 | "recoverDeletedFilesToDestination": "", 168 | "recoverFiles": "", 169 | "recoverFilesIgnore": "", 170 | "recoverFilesIgnoreSeparator": "", 171 | "recoverFilesSeparator": "", 172 | "safeOutput": false, 173 | "separator": "", 174 | "sha": "", 175 | "since": "", 176 | "sinceLastRemoteCommit": false, 177 | "skipInitialFetch": false, 178 | "tagsIgnorePattern": "", 179 | "tagsPattern": "", 180 | "token": "token", 181 | "until": "", 182 | "usePosixPathSeparator": false, 183 | "useRestApi": false, 184 | "writeOutputFiles": false, 185 | } 186 | `; 187 | 188 | exports[`getInputs should handle invalid numeric inputs correctly 1`] = ` 189 | { 190 | "apiUrl": "", 191 | "baseSha": "", 192 | "diffRelative": true, 193 | "dirNames": false, 194 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 195 | "dirNamesExcludeCurrentDir": false, 196 | "dirNamesIncludeFiles": "", 197 | "dirNamesIncludeFilesSeparator": "", 198 | "dirNamesMaxDepth": 2, 199 | "escapeJson": false, 200 | "excludeSubmodules": false, 201 | "failOnInitialDiffError": false, 202 | "failOnSubmoduleDiffError": false, 203 | "fetchAdditionalSubmoduleHistory": false, 204 | "fetchDepth": NaN, 205 | "files": "", 206 | "filesFromSourceFile": "", 207 | "filesFromSourceFileSeparator": "", 208 | "filesIgnore": "", 209 | "filesIgnoreFromSourceFile": "", 210 | "filesIgnoreFromSourceFileSeparator": "", 211 | "filesIgnoreSeparator": "", 212 | "filesIgnoreYaml": "", 213 | "filesIgnoreYamlFromSourceFile": "", 214 | "filesIgnoreYamlFromSourceFileSeparator": "", 215 | "filesSeparator": "", 216 | "filesYaml": "", 217 | "filesYamlFromSourceFile": "", 218 | "filesYamlFromSourceFileSeparator": "", 219 | "includeAllOldNewRenamedFiles": false, 220 | "json": false, 221 | "negationPatternsFirst": false, 222 | "oldNewFilesSeparator": "", 223 | "oldNewSeparator": "", 224 | "outputDir": "", 225 | "outputRenamedFilesAsDeletedAndAdded": false, 226 | "path": "", 227 | "quotepath": true, 228 | "recoverDeletedFiles": false, 229 | "recoverDeletedFilesToDestination": "", 230 | "recoverFiles": "", 231 | "recoverFilesIgnore": "", 232 | "recoverFilesIgnoreSeparator": "", 233 | "recoverFilesSeparator": "", 234 | "safeOutput": false, 235 | "separator": "", 236 | "sha": "", 237 | "since": "", 238 | "sinceLastRemoteCommit": false, 239 | "skipInitialFetch": false, 240 | "tagsIgnorePattern": "", 241 | "tagsPattern": "", 242 | "token": "", 243 | "until": "", 244 | "usePosixPathSeparator": false, 245 | "useRestApi": false, 246 | "writeOutputFiles": false, 247 | } 248 | `; 249 | 250 | exports[`getInputs should handle negative numeric inputs correctly 1`] = ` 251 | { 252 | "apiUrl": "", 253 | "baseSha": "", 254 | "diffRelative": true, 255 | "dirNames": false, 256 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 257 | "dirNamesExcludeCurrentDir": false, 258 | "dirNamesIncludeFiles": "", 259 | "dirNamesIncludeFilesSeparator": "", 260 | "dirNamesMaxDepth": -2, 261 | "escapeJson": false, 262 | "excludeSubmodules": false, 263 | "failOnInitialDiffError": false, 264 | "failOnSubmoduleDiffError": false, 265 | "fetchAdditionalSubmoduleHistory": false, 266 | "fetchDepth": 2, 267 | "files": "", 268 | "filesFromSourceFile": "", 269 | "filesFromSourceFileSeparator": "", 270 | "filesIgnore": "", 271 | "filesIgnoreFromSourceFile": "", 272 | "filesIgnoreFromSourceFileSeparator": "", 273 | "filesIgnoreSeparator": "", 274 | "filesIgnoreYaml": "", 275 | "filesIgnoreYamlFromSourceFile": "", 276 | "filesIgnoreYamlFromSourceFileSeparator": "", 277 | "filesSeparator": "", 278 | "filesYaml": "", 279 | "filesYamlFromSourceFile": "", 280 | "filesYamlFromSourceFileSeparator": "", 281 | "includeAllOldNewRenamedFiles": false, 282 | "json": false, 283 | "negationPatternsFirst": false, 284 | "oldNewFilesSeparator": "", 285 | "oldNewSeparator": "", 286 | "outputDir": "", 287 | "outputRenamedFilesAsDeletedAndAdded": false, 288 | "path": "", 289 | "quotepath": true, 290 | "recoverDeletedFiles": false, 291 | "recoverDeletedFilesToDestination": "", 292 | "recoverFiles": "", 293 | "recoverFilesIgnore": "", 294 | "recoverFilesIgnoreSeparator": "", 295 | "recoverFilesSeparator": "", 296 | "safeOutput": false, 297 | "separator": "", 298 | "sha": "", 299 | "since": "", 300 | "sinceLastRemoteCommit": false, 301 | "skipInitialFetch": false, 302 | "tagsIgnorePattern": "", 303 | "tagsPattern": "", 304 | "token": "", 305 | "until": "", 306 | "usePosixPathSeparator": false, 307 | "useRestApi": false, 308 | "writeOutputFiles": false, 309 | } 310 | `; 311 | 312 | exports[`getInputs should return default values when no inputs are provided 1`] = ` 313 | { 314 | "apiUrl": "", 315 | "baseSha": "", 316 | "diffRelative": true, 317 | "dirNames": false, 318 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 319 | "dirNamesExcludeCurrentDir": false, 320 | "dirNamesIncludeFiles": "", 321 | "dirNamesIncludeFilesSeparator": "", 322 | "escapeJson": false, 323 | "excludeSubmodules": false, 324 | "failOnInitialDiffError": false, 325 | "failOnSubmoduleDiffError": false, 326 | "fetchAdditionalSubmoduleHistory": false, 327 | "fetchMissingHistoryMaxRetries": 20, 328 | "files": "", 329 | "filesFromSourceFile": "", 330 | "filesFromSourceFileSeparator": "", 331 | "filesIgnore": "", 332 | "filesIgnoreFromSourceFile": "", 333 | "filesIgnoreFromSourceFileSeparator": "", 334 | "filesIgnoreSeparator": "", 335 | "filesIgnoreYaml": "", 336 | "filesIgnoreYamlFromSourceFile": "", 337 | "filesIgnoreYamlFromSourceFileSeparator": "", 338 | "filesSeparator": "", 339 | "filesYaml": "", 340 | "filesYamlFromSourceFile": "", 341 | "filesYamlFromSourceFileSeparator": "", 342 | "includeAllOldNewRenamedFiles": false, 343 | "json": false, 344 | "negationPatternsFirst": false, 345 | "oldNewFilesSeparator": " ", 346 | "oldNewSeparator": ",", 347 | "outputDir": "", 348 | "outputRenamedFilesAsDeletedAndAdded": false, 349 | "path": ".", 350 | "quotepath": true, 351 | "recoverDeletedFiles": false, 352 | "recoverDeletedFilesToDestination": "", 353 | "recoverFiles": "", 354 | "recoverFilesIgnore": "", 355 | "recoverFilesIgnoreSeparator": " 356 | ", 357 | "recoverFilesSeparator": " 358 | ", 359 | "safeOutput": false, 360 | "separator": "", 361 | "sha": "", 362 | "since": "", 363 | "sinceLastRemoteCommit": false, 364 | "skipInitialFetch": false, 365 | "tagsIgnorePattern": "", 366 | "tagsPattern": "*", 367 | "token": "", 368 | "until": "", 369 | "usePosixPathSeparator": false, 370 | "useRestApi": false, 371 | "writeOutputFiles": false, 372 | } 373 | `; 374 | -------------------------------------------------------------------------------- /src/__tests__/inputs.test.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import {getInputs, Inputs} from '../inputs' 3 | import {DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS} from '../constant' 4 | 5 | jest.mock('@actions/core') 6 | 7 | describe('getInputs', () => { 8 | afterEach(() => { 9 | jest.clearAllMocks() 10 | }) 11 | 12 | test('should return default values when no inputs are provided', () => { 13 | ;(core.getInput as jest.Mock).mockImplementation(name => { 14 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 15 | return g[1].toUpperCase() 16 | }) as keyof Inputs 17 | 18 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 19 | '') as string 20 | }) 21 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 22 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 23 | return g[1].toUpperCase() 24 | }) as keyof Inputs 25 | 26 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 27 | false) as boolean 28 | }) 29 | expect(getInputs()).toMatchSnapshot() 30 | }) 31 | 32 | test('should correctly parse boolean inputs', () => { 33 | ;(core.getInput as jest.Mock).mockImplementation(name => { 34 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 35 | return g[1].toUpperCase() 36 | }) as keyof Inputs 37 | 38 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 39 | '') as string 40 | }) 41 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 42 | switch (name) { 43 | case 'matrix': 44 | return 'true' 45 | case 'skip_initial_fetch': 46 | return 'true' 47 | default: 48 | return 'false' 49 | } 50 | }) 51 | expect(getInputs()).toMatchSnapshot() 52 | }) 53 | 54 | test('should handle matrix alias correctly', () => { 55 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 56 | return name === 'matrix' ? 'true' : 'false' 57 | }) 58 | 59 | const inputs = getInputs() 60 | expect(inputs).toHaveProperty('json', true) 61 | expect(inputs).toHaveProperty('escapeJson', false) 62 | }) 63 | 64 | test('should correctly parse string inputs', () => { 65 | ;(core.getInput as jest.Mock).mockImplementation(name => { 66 | switch (name) { 67 | case 'token': 68 | return 'token' 69 | case 'api_url': 70 | return 'https://api.github.com' 71 | default: 72 | return '' 73 | } 74 | }) 75 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 76 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 77 | return g[1].toUpperCase() 78 | }) as keyof Inputs 79 | 80 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 81 | false) as boolean 82 | }) 83 | expect(getInputs()).toMatchSnapshot() 84 | }) 85 | 86 | test('should correctly parse numeric inputs', () => { 87 | ;(core.getInput as jest.Mock).mockImplementation(name => { 88 | switch (name) { 89 | case 'fetch_depth': 90 | return '5' 91 | case 'dir_names_max_depth': 92 | return '2' 93 | default: 94 | return '' 95 | } 96 | }) 97 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 98 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 99 | return g[1].toUpperCase() 100 | }) as keyof Inputs 101 | 102 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 103 | false) as boolean 104 | }) 105 | expect(getInputs()).toMatchSnapshot() 106 | }) 107 | 108 | test('should handle invalid numeric inputs correctly', () => { 109 | ;(core.getInput as jest.Mock).mockImplementation(name => { 110 | // TODO: Add validation for invalid numbers which should result in an error instead of NaN 111 | switch (name) { 112 | case 'fetch_depth': 113 | return 'invalid' 114 | case 'dir_names_max_depth': 115 | return '2' 116 | default: 117 | return '' 118 | } 119 | }) 120 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 121 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 122 | return g[1].toUpperCase() 123 | }) as keyof Inputs 124 | 125 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 126 | false) as boolean 127 | }) 128 | expect(getInputs()).toMatchSnapshot() 129 | }) 130 | 131 | test('should handle negative numeric inputs correctly', () => { 132 | ;(core.getInput as jest.Mock).mockImplementation(name => { 133 | // TODO: Add validation for negative numbers which should result in an error 134 | switch (name) { 135 | case 'fetch_depth': 136 | return '-5' 137 | case 'dir_names_max_depth': 138 | return '-2' 139 | default: 140 | return '' 141 | } 142 | }) 143 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 144 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 145 | return g[1].toUpperCase() 146 | }) as keyof Inputs 147 | 148 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 149 | false) as boolean 150 | }) 151 | expect(getInputs()).toMatchSnapshot() 152 | }) 153 | }) 154 | -------------------------------------------------------------------------------- /src/__tests__/utils.test.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as exec from '@actions/exec' 3 | import {ChangeTypeEnum} from '../changedFiles' 4 | import {Inputs} from '../inputs' 5 | import { 6 | getDirname, 7 | getDirnameMaxDepth, 8 | getFilteredChangedFiles, 9 | getPreviousGitTag, 10 | normalizeSeparators, 11 | warnUnsupportedRESTAPIInputs 12 | } from '../utils' 13 | 14 | const originalPlatform = process.platform 15 | 16 | function mockedPlatform(platform: string): void { 17 | Object.defineProperty(process, 'platform', { 18 | value: platform 19 | }) 20 | } 21 | 22 | describe('utils test', () => { 23 | afterEach(() => { 24 | Object.defineProperty(process, 'platform', { 25 | value: originalPlatform 26 | }) 27 | }) 28 | 29 | describe('getDirnameMaxDepth_function', () => { 30 | // Tests that the function returns the correct dirname when the relative path has multiple directories 31 | it('test_multiple_directories', () => { 32 | const result = getDirnameMaxDepth({ 33 | relativePath: 'path/to/some/file', 34 | dirNamesMaxDepth: 2, 35 | excludeCurrentDir: false 36 | }) 37 | expect(result).toEqual('path/to') 38 | }) 39 | 40 | // Tests that the function returns the correct dirname when the relative path has only one directory 41 | it('test_single_directory', () => { 42 | const result = getDirnameMaxDepth({ 43 | relativePath: 'path/to', 44 | dirNamesMaxDepth: 1, 45 | excludeCurrentDir: false 46 | }) 47 | expect(result).toEqual('path') 48 | }) 49 | 50 | // Tests that the function returns the correct dirname when the relative path has no directories 51 | it('test_no_directories', () => { 52 | const result = getDirnameMaxDepth({ 53 | relativePath: 'file.txt', 54 | dirNamesMaxDepth: 1, 55 | excludeCurrentDir: false 56 | }) 57 | expect(result).toEqual('.') 58 | }) 59 | 60 | // Tests that the function returns the correct dirname when dirNamesMaxDepth is set to a value less than the number of directories in the relative path 61 | it('test_dirnames_max_depth_less_than_num_directories', () => { 62 | const result = getDirnameMaxDepth({ 63 | relativePath: 'path/to/some/file', 64 | dirNamesMaxDepth: 1, 65 | excludeCurrentDir: false 66 | }) 67 | expect(result).toEqual('path') 68 | }) 69 | 70 | // Tests that the function returns an empty string when excludeCurrentDir is true and the output is '.' 71 | it('test_exclude_current_dir_is_true_and_output_is_dot', () => { 72 | const result = getDirnameMaxDepth({ 73 | relativePath: '.', 74 | dirNamesMaxDepth: 1, 75 | excludeCurrentDir: true 76 | }) 77 | expect(result).toEqual('') 78 | }) 79 | 80 | // Tests that the function returns the correct dirname when the relative path is a Windows drive root and excludeCurrentDir is true 81 | it('test_windows_drive_root_and_exclude_current_dir_is_true', () => { 82 | mockedPlatform('win32') 83 | const result = getDirnameMaxDepth({ 84 | relativePath: 'C:\\', 85 | dirNamesMaxDepth: 1, 86 | excludeCurrentDir: true 87 | }) 88 | expect(result).toEqual('') 89 | }) 90 | 91 | // Tests that getDirnameMaxDepth handles a relative path with a trailing separator correctly 92 | it('test_trailing_separator', () => { 93 | const input = { 94 | relativePath: 'path/to/dir/', 95 | dirNamesMaxDepth: 2, 96 | excludeCurrentDir: true 97 | } 98 | const expectedOutput = 'path/to' 99 | const actualOutput = getDirnameMaxDepth(input) 100 | expect(actualOutput).toEqual(expectedOutput) 101 | }) 102 | 103 | // Tests that getDirnameMaxDepth returns an empty string when excludeCurrentDir is true and the output is '.' 104 | it('test_trailing_separator_exclude_current_dir', () => { 105 | const input = { 106 | relativePath: 'file', 107 | excludeCurrentDir: true 108 | } 109 | const expectedOutput = '' 110 | const actualOutput = getDirnameMaxDepth(input) 111 | expect(actualOutput).toEqual(expectedOutput) 112 | }) 113 | 114 | // Tests that getDirnameMaxDepth returns the correct output for a Windows UNC root path 115 | it('test_windows_unc_root', () => { 116 | mockedPlatform('win32') 117 | const input = { 118 | relativePath: '\\hello', 119 | dirNamesMaxDepth: 2, 120 | excludeCurrentDir: true 121 | } 122 | const expectedOutput = '' 123 | expect(getDirnameMaxDepth(input)).toEqual(expectedOutput) 124 | }) 125 | 126 | // Tests that getDirnameMaxDepth returns an empty string when given a Windows UNC root and excludeCurrentDir is true 127 | it('test_windows_unc_root_exclude_current_dir', () => { 128 | mockedPlatform('win32') 129 | const relativePath = '\\hello' 130 | const result = getDirnameMaxDepth({ 131 | relativePath, 132 | excludeCurrentDir: true 133 | }) 134 | expect(result).toEqual('') 135 | }) 136 | 137 | // Tests that getDirnameMaxDepth returns the correct dirname with a relative path that contains both forward and backward slashes 138 | it('test_relative_path_with_slashes', () => { 139 | const relativePath = 'path/to\file' 140 | const expectedOutput = 'path' 141 | const actualOutput = getDirnameMaxDepth({relativePath}) 142 | expect(actualOutput).toEqual(expectedOutput) 143 | }) 144 | 145 | // Tests that getDirnameMaxDepth returns the correct dirname for a relative path that contains special characters 146 | it('test_special_characters', () => { 147 | const relativePath = 148 | 'path/with/special/characters/!@#$%^&*()_+{}|:<>?[];,./' 149 | const expectedDirname = 'path/with/special/characters' 150 | const actualDirname = getDirnameMaxDepth({relativePath}) 151 | expect(actualDirname).toEqual(expectedDirname) 152 | }) 153 | }) 154 | 155 | describe('getDirname_function', () => { 156 | // Tests that the function returns the correct dirname for a valid path 157 | it('test valid path', () => { 158 | expect(getDirname('/path/to/file')).toEqual('/path/to') 159 | }) 160 | 161 | // Tests that the function returns the correct dirname for a valid Windows UNC root path 162 | it('test windows unc root path', () => { 163 | mockedPlatform('win32') 164 | expect(getDirname('\\helloworld')).toEqual('.') 165 | }) 166 | 167 | // Tests that the function returns the correct dirname for a path with a trailing slash 168 | it('test path with trailing slash', () => { 169 | expect(getDirname('/path/to/file/')).toEqual('/path/to') 170 | }) 171 | 172 | // Tests that the function returns the correct dirname for a Windows UNC root path with a trailing slash 173 | it('test windows unc root path with trailing slash', () => { 174 | mockedPlatform('win32') 175 | expect(getDirname('\\hello\\world\\')).toEqual('.') 176 | }) 177 | 178 | // Tests that the function returns the correct dirname for a path with multiple slashes 179 | it('test path with multiple slashes', () => { 180 | expect(getDirname('/path//to/file')).toEqual('/path/to') 181 | }) 182 | 183 | // Tests that the function returns the correct dirname for a Windows UNC root path with multiple slashes 184 | it('test windows unc root path with multiple slashes', () => { 185 | mockedPlatform('win32') 186 | expect(getDirname('\\hello\\world')).toEqual('.') 187 | }) 188 | }) 189 | 190 | describe('normalizeSeparators_function', () => { 191 | // Tests that forward slashes are normalized on Linux 192 | it('test forward slashes linux', () => { 193 | const input = 'path/to/file' 194 | const expectedOutput = 'path/to/file' 195 | const actualOutput = normalizeSeparators(input) 196 | expect(actualOutput).toEqual(expectedOutput) 197 | }) 198 | 199 | // Tests that backslashes are normalized on Windows 200 | it('test backslashes windows', () => { 201 | mockedPlatform('win32') 202 | const input = 'path\\to\\file' 203 | const expectedOutput = 'path\\to\\file' 204 | const actualOutput = normalizeSeparators(input) 205 | expect(actualOutput).toEqual(expectedOutput) 206 | }) 207 | 208 | // Tests that forward slashes are normalized on Windows 209 | it('test mixed slashes windows', () => { 210 | mockedPlatform('win32') 211 | const input = 'path/to/file' 212 | const expectedOutput = 'path\\to\\file' 213 | const actualOutput = normalizeSeparators(input) 214 | expect(actualOutput).toEqual(expectedOutput) 215 | }) 216 | 217 | // Tests that mixed slashes are normalized on Windows 218 | it('test mixed slashes windows', () => { 219 | mockedPlatform('win32') 220 | const input = 'path\\to/file' 221 | const expectedOutput = 'path\\to\\file' 222 | const actualOutput = normalizeSeparators(input) 223 | expect(actualOutput).toEqual(expectedOutput) 224 | }) 225 | 226 | // Tests that an empty string returns an empty string 227 | it('test empty string', () => { 228 | const input = '' 229 | const expectedOutput = '' 230 | const actualOutput = normalizeSeparators(input) 231 | expect(actualOutput).toEqual(expectedOutput) 232 | }) 233 | 234 | // Tests that multiple consecutive slashes are removed 235 | it('test multiple consecutive slashes', () => { 236 | const input = 'path//to//file' 237 | const expectedOutput = 'path/to/file' 238 | const actualOutput = normalizeSeparators(input) 239 | expect(actualOutput).toEqual(expectedOutput) 240 | }) 241 | 242 | // Tests that UNC format is preserved on Windows 243 | it('test unc format windows', () => { 244 | mockedPlatform('win32') 245 | const input = '\\\\hello\\world' 246 | const expectedOutput = '\\\\hello\\world' 247 | const actualOutput = normalizeSeparators(input) 248 | expect(actualOutput).toEqual(expectedOutput) 249 | }) 250 | 251 | // Tests that a drive root is preserved on Windows 252 | it('test drive root windows', () => { 253 | mockedPlatform('win32') 254 | const input = 'C:\\' 255 | const expectedOutput = 'C:\\' 256 | const actualOutput = normalizeSeparators(input) 257 | expect(actualOutput).toEqual(expectedOutput) 258 | }) 259 | }) 260 | 261 | describe('getFilteredChangedFiles', () => { 262 | // Tests that the function returns an empty object when allDiffFiles and filePatterns are empty 263 | it('should return an empty object when allDiffFiles and filePatterns are empty', async () => { 264 | const result = await getFilteredChangedFiles({ 265 | allDiffFiles: { 266 | [ChangeTypeEnum.Added]: [], 267 | [ChangeTypeEnum.Copied]: [], 268 | [ChangeTypeEnum.Deleted]: [], 269 | [ChangeTypeEnum.Modified]: [], 270 | [ChangeTypeEnum.Renamed]: [], 271 | [ChangeTypeEnum.TypeChanged]: [], 272 | [ChangeTypeEnum.Unmerged]: [], 273 | [ChangeTypeEnum.Unknown]: [] 274 | }, 275 | filePatterns: [] 276 | }) 277 | expect(result).toEqual({ 278 | [ChangeTypeEnum.Added]: [], 279 | [ChangeTypeEnum.Copied]: [], 280 | [ChangeTypeEnum.Deleted]: [], 281 | [ChangeTypeEnum.Modified]: [], 282 | [ChangeTypeEnum.Renamed]: [], 283 | [ChangeTypeEnum.TypeChanged]: [], 284 | [ChangeTypeEnum.Unmerged]: [], 285 | [ChangeTypeEnum.Unknown]: [] 286 | }) 287 | }) 288 | 289 | // Tests that the function returns allDiffFiles when filePatterns is empty 290 | it('should return allDiffFiles when filePatterns is empty', async () => { 291 | const allDiffFiles = { 292 | [ChangeTypeEnum.Added]: ['file1.txt'], 293 | [ChangeTypeEnum.Copied]: [], 294 | [ChangeTypeEnum.Deleted]: [], 295 | [ChangeTypeEnum.Modified]: [], 296 | [ChangeTypeEnum.Renamed]: [], 297 | [ChangeTypeEnum.TypeChanged]: [], 298 | [ChangeTypeEnum.Unmerged]: [], 299 | [ChangeTypeEnum.Unknown]: [] 300 | } 301 | const result = await getFilteredChangedFiles({ 302 | allDiffFiles, 303 | filePatterns: [] 304 | }) 305 | expect(result).toEqual(allDiffFiles) 306 | }) 307 | 308 | // Tests that the function returns an empty object when allDiffFiles is empty 309 | it('should return an empty object when allDiffFiles is empty', async () => { 310 | const result = await getFilteredChangedFiles({ 311 | allDiffFiles: { 312 | [ChangeTypeEnum.Added]: [], 313 | [ChangeTypeEnum.Copied]: [], 314 | [ChangeTypeEnum.Deleted]: [], 315 | [ChangeTypeEnum.Modified]: [], 316 | [ChangeTypeEnum.Renamed]: [], 317 | [ChangeTypeEnum.TypeChanged]: [], 318 | [ChangeTypeEnum.Unmerged]: [], 319 | [ChangeTypeEnum.Unknown]: [] 320 | }, 321 | filePatterns: ['*.txt'] 322 | }) 323 | expect(result).toEqual({ 324 | [ChangeTypeEnum.Added]: [], 325 | [ChangeTypeEnum.Copied]: [], 326 | [ChangeTypeEnum.Deleted]: [], 327 | [ChangeTypeEnum.Modified]: [], 328 | [ChangeTypeEnum.Renamed]: [], 329 | [ChangeTypeEnum.TypeChanged]: [], 330 | [ChangeTypeEnum.Unmerged]: [], 331 | [ChangeTypeEnum.Unknown]: [] 332 | }) 333 | }) 334 | 335 | // Tests that the function returns only the files that match the file patterns on non windows platforms 336 | it('should return only the files that match the file patterns', async () => { 337 | const allDiffFiles = { 338 | [ChangeTypeEnum.Added]: [ 339 | 'file1.txt', 340 | 'file2.md', 341 | 'file3.txt', 342 | 'test/dir/file4.txt', 343 | 'test/dir/file5.txt', 344 | 'dir/file6.md' 345 | ], 346 | [ChangeTypeEnum.Copied]: [], 347 | [ChangeTypeEnum.Deleted]: [], 348 | [ChangeTypeEnum.Modified]: [], 349 | [ChangeTypeEnum.Renamed]: [], 350 | [ChangeTypeEnum.TypeChanged]: [], 351 | [ChangeTypeEnum.Unmerged]: [], 352 | [ChangeTypeEnum.Unknown]: [] 353 | } 354 | const result = await getFilteredChangedFiles({ 355 | allDiffFiles, 356 | filePatterns: ['*.txt'] 357 | }) 358 | expect(result).toEqual({ 359 | [ChangeTypeEnum.Added]: ['file1.txt', 'file3.txt'], 360 | [ChangeTypeEnum.Copied]: [], 361 | [ChangeTypeEnum.Deleted]: [], 362 | [ChangeTypeEnum.Modified]: [], 363 | [ChangeTypeEnum.Renamed]: [], 364 | [ChangeTypeEnum.TypeChanged]: [], 365 | [ChangeTypeEnum.Unmerged]: [], 366 | [ChangeTypeEnum.Unknown]: [] 367 | }) 368 | }) 369 | 370 | // Tests that the function returns only the files that match the file patterns on windows 371 | it('should return only the files that match the file patterns on windows', async () => { 372 | mockedPlatform('win32') 373 | const allDiffFiles = { 374 | [ChangeTypeEnum.Added]: [ 375 | 'file1.txt', 376 | 'file2.md', 377 | 'file3.txt', 378 | 'test\\dir\\file4.txt', 379 | 'test\\dir\\file5.txt', 380 | 'dir\\file6.md' 381 | ], 382 | [ChangeTypeEnum.Copied]: [], 383 | [ChangeTypeEnum.Deleted]: [], 384 | [ChangeTypeEnum.Modified]: [], 385 | [ChangeTypeEnum.Renamed]: [], 386 | [ChangeTypeEnum.TypeChanged]: [], 387 | [ChangeTypeEnum.Unmerged]: [], 388 | [ChangeTypeEnum.Unknown]: [] 389 | } 390 | const result = await getFilteredChangedFiles({ 391 | allDiffFiles, 392 | filePatterns: ['*.txt'] 393 | }) 394 | 395 | expect(result).toEqual({ 396 | [ChangeTypeEnum.Added]: ['file1.txt', 'file3.txt'], 397 | [ChangeTypeEnum.Copied]: [], 398 | [ChangeTypeEnum.Deleted]: [], 399 | [ChangeTypeEnum.Modified]: [], 400 | [ChangeTypeEnum.Renamed]: [], 401 | [ChangeTypeEnum.TypeChanged]: [], 402 | [ChangeTypeEnum.Unmerged]: [], 403 | [ChangeTypeEnum.Unknown]: [] 404 | }) 405 | }) 406 | 407 | // Tests that the function returns only the files that match the file patterns with globstar on non windows platforms 408 | it('should return only the files that match the file patterns with globstar', async () => { 409 | const allDiffFiles = { 410 | [ChangeTypeEnum.Added]: [ 411 | 'file1.txt', 412 | 'file2.md', 413 | 'file3.txt', 414 | 'test/dir/file4.txt', 415 | 'test/dir/file5.txt', 416 | 'dir/file6.md' 417 | ], 418 | [ChangeTypeEnum.Copied]: [], 419 | [ChangeTypeEnum.Deleted]: [], 420 | [ChangeTypeEnum.Modified]: [], 421 | [ChangeTypeEnum.Renamed]: [], 422 | [ChangeTypeEnum.TypeChanged]: [], 423 | [ChangeTypeEnum.Unmerged]: [], 424 | [ChangeTypeEnum.Unknown]: [] 425 | } 426 | const result = await getFilteredChangedFiles({ 427 | allDiffFiles, 428 | filePatterns: ['**.txt'] 429 | }) 430 | expect(result).toEqual({ 431 | [ChangeTypeEnum.Added]: [ 432 | 'file1.txt', 433 | 'file3.txt', 434 | 'test/dir/file4.txt', 435 | 'test/dir/file5.txt' 436 | ], 437 | [ChangeTypeEnum.Copied]: [], 438 | [ChangeTypeEnum.Deleted]: [], 439 | [ChangeTypeEnum.Modified]: [], 440 | [ChangeTypeEnum.Renamed]: [], 441 | [ChangeTypeEnum.TypeChanged]: [], 442 | [ChangeTypeEnum.Unmerged]: [], 443 | [ChangeTypeEnum.Unknown]: [] 444 | }) 445 | }) 446 | 447 | // Tests that the function returns only the files that match the file patterns with globstar on windows 448 | it('should return only the files that match the file patterns with globstar on windows', async () => { 449 | mockedPlatform('win32') 450 | const allDiffFiles = { 451 | [ChangeTypeEnum.Added]: ['test\\test rename-1.txt'], 452 | [ChangeTypeEnum.Copied]: [], 453 | [ChangeTypeEnum.Deleted]: [], 454 | [ChangeTypeEnum.Modified]: [], 455 | [ChangeTypeEnum.Renamed]: [], 456 | [ChangeTypeEnum.TypeChanged]: [], 457 | [ChangeTypeEnum.Unmerged]: [], 458 | [ChangeTypeEnum.Unknown]: [] 459 | } 460 | const result = await getFilteredChangedFiles({ 461 | allDiffFiles, 462 | filePatterns: ['test/**'] 463 | }) 464 | expect(result).toEqual({ 465 | [ChangeTypeEnum.Added]: ['test\\test rename-1.txt'], 466 | [ChangeTypeEnum.Copied]: [], 467 | [ChangeTypeEnum.Deleted]: [], 468 | [ChangeTypeEnum.Modified]: [], 469 | [ChangeTypeEnum.Renamed]: [], 470 | [ChangeTypeEnum.TypeChanged]: [], 471 | [ChangeTypeEnum.Unmerged]: [], 472 | [ChangeTypeEnum.Unknown]: [] 473 | }) 474 | }) 475 | 476 | // Tests that the function returns an empty object when there are no files that match the file patterns 477 | it('should return an empty object when there are no files that match the file patterns', async () => { 478 | const allDiffFiles = { 479 | [ChangeTypeEnum.Added]: ['file1.md', 'file2.md', 'file3.md'], 480 | [ChangeTypeEnum.Copied]: [], 481 | [ChangeTypeEnum.Deleted]: [], 482 | [ChangeTypeEnum.Modified]: [], 483 | [ChangeTypeEnum.Renamed]: [], 484 | [ChangeTypeEnum.TypeChanged]: [], 485 | [ChangeTypeEnum.Unmerged]: [], 486 | [ChangeTypeEnum.Unknown]: [] 487 | } 488 | const result = await getFilteredChangedFiles({ 489 | allDiffFiles, 490 | filePatterns: ['*.txt'] 491 | }) 492 | expect(result).toEqual({ 493 | [ChangeTypeEnum.Added]: [], 494 | [ChangeTypeEnum.Copied]: [], 495 | [ChangeTypeEnum.Deleted]: [], 496 | [ChangeTypeEnum.Modified]: [], 497 | [ChangeTypeEnum.Renamed]: [], 498 | [ChangeTypeEnum.TypeChanged]: [], 499 | [ChangeTypeEnum.Unmerged]: [], 500 | [ChangeTypeEnum.Unknown]: [] 501 | }) 502 | }) 503 | 504 | // Tests that the function can handle file names with special characters 505 | it('should handle file names with special characters', async () => { 506 | const allDiffFiles = { 507 | [ChangeTypeEnum.Added]: [ 508 | 'file1.txt', 509 | 'file2 with spaces.txt', 510 | 'file3$$.txt' 511 | ], 512 | [ChangeTypeEnum.Copied]: [], 513 | [ChangeTypeEnum.Deleted]: [], 514 | [ChangeTypeEnum.Modified]: [], 515 | [ChangeTypeEnum.Renamed]: [], 516 | [ChangeTypeEnum.TypeChanged]: [], 517 | [ChangeTypeEnum.Unmerged]: [], 518 | [ChangeTypeEnum.Unknown]: [] 519 | } 520 | const result = await getFilteredChangedFiles({ 521 | allDiffFiles, 522 | filePatterns: ['file2*.txt'] 523 | }) 524 | expect(result).toEqual({ 525 | [ChangeTypeEnum.Added]: ['file2 with spaces.txt'], 526 | [ChangeTypeEnum.Copied]: [], 527 | [ChangeTypeEnum.Deleted]: [], 528 | [ChangeTypeEnum.Modified]: [], 529 | [ChangeTypeEnum.Renamed]: [], 530 | [ChangeTypeEnum.TypeChanged]: [], 531 | [ChangeTypeEnum.Unmerged]: [], 532 | [ChangeTypeEnum.Unknown]: [] 533 | }) 534 | }) 535 | 536 | // Tests that getFilteredChangedFiles correctly filters files using glob patterns 537 | it('should filter files using glob patterns', async () => { 538 | const allDiffFiles = { 539 | [ChangeTypeEnum.Added]: ['test/migrations/test.sql'], 540 | [ChangeTypeEnum.Copied]: [], 541 | [ChangeTypeEnum.Deleted]: [], 542 | [ChangeTypeEnum.Modified]: [], 543 | [ChangeTypeEnum.Renamed]: [], 544 | [ChangeTypeEnum.TypeChanged]: [], 545 | [ChangeTypeEnum.Unmerged]: [], 546 | [ChangeTypeEnum.Unknown]: [] 547 | } 548 | const filePatterns = ['test/migrations/**'] 549 | const filteredFiles = await getFilteredChangedFiles({ 550 | allDiffFiles, 551 | filePatterns 552 | }) 553 | expect(filteredFiles[ChangeTypeEnum.Added]).toEqual([ 554 | 'test/migrations/test.sql' 555 | ]) 556 | }) 557 | 558 | // Tests that getFilteredChangedFiles correctly filters files using ignore glob patterns 559 | it('should filter files using ignore glob patterns', async () => { 560 | const allDiffFiles = { 561 | [ChangeTypeEnum.Added]: [], 562 | [ChangeTypeEnum.Copied]: [], 563 | [ChangeTypeEnum.Deleted]: [], 564 | [ChangeTypeEnum.Modified]: [ 565 | 'assets/scripts/configure-minikube-linux.sh' 566 | ], 567 | [ChangeTypeEnum.Renamed]: [], 568 | [ChangeTypeEnum.TypeChanged]: [], 569 | [ChangeTypeEnum.Unmerged]: [], 570 | [ChangeTypeEnum.Unknown]: [] 571 | } 572 | const filePatterns = [ 573 | 'assets/scripts/**.sh', 574 | '!assets/scripts/configure-minikube-linux.sh' 575 | ] 576 | const filteredFiles = await getFilteredChangedFiles({ 577 | allDiffFiles, 578 | filePatterns 579 | }) 580 | expect(filteredFiles[ChangeTypeEnum.Modified]).toEqual([]) 581 | }) 582 | }) 583 | 584 | describe('warnUnsupportedRESTAPIInputs', () => { 585 | // Warns about unsupported inputs when using the REST API. 586 | it('should warn about unsupported inputs when all inputs are supported', async () => { 587 | const inputs: Inputs = { 588 | files: '', 589 | filesSeparator: '\n', 590 | filesFromSourceFile: '', 591 | filesFromSourceFileSeparator: '\n', 592 | filesYaml: '', 593 | filesYamlFromSourceFile: '', 594 | filesYamlFromSourceFileSeparator: '\n', 595 | filesIgnore: '', 596 | filesIgnoreSeparator: '\n', 597 | filesIgnoreFromSourceFile: '', 598 | filesIgnoreFromSourceFileSeparator: '\n', 599 | filesIgnoreYaml: '', 600 | filesIgnoreYamlFromSourceFile: '', 601 | filesIgnoreYamlFromSourceFileSeparator: '\n', 602 | separator: ' ', 603 | includeAllOldNewRenamedFiles: false, 604 | oldNewSeparator: ',', 605 | oldNewFilesSeparator: ' ', 606 | sha: '1313123', 607 | baseSha: '', 608 | since: '', 609 | until: '', 610 | path: '.', 611 | quotepath: true, 612 | diffRelative: true, 613 | dirNames: false, 614 | dirNamesMaxDepth: undefined, 615 | dirNamesExcludeCurrentDir: false, 616 | dirNamesIncludeFiles: '', 617 | dirNamesIncludeFilesSeparator: '\n', 618 | dirNamesDeletedFilesIncludeOnlyDeletedDirs: false, 619 | json: false, 620 | escapeJson: true, 621 | safeOutput: true, 622 | fetchDepth: 50, 623 | fetchAdditionalSubmoduleHistory: false, 624 | sinceLastRemoteCommit: false, 625 | writeOutputFiles: false, 626 | outputDir: '.github/outputs', 627 | outputRenamedFilesAsDeletedAndAdded: false, 628 | recoverDeletedFiles: false, 629 | recoverDeletedFilesToDestination: '', 630 | recoverFiles: '', 631 | recoverFilesSeparator: '\n', 632 | recoverFilesIgnore: '', 633 | recoverFilesIgnoreSeparator: '\n', 634 | token: '${{ github.token }}', 635 | apiUrl: '${{ github.api_url }}', 636 | skipInitialFetch: false, 637 | failOnInitialDiffError: false, 638 | failOnSubmoduleDiffError: false, 639 | negationPatternsFirst: false, 640 | useRestApi: false, 641 | excludeSubmodules: false, 642 | fetchMissingHistoryMaxRetries: 20, 643 | usePosixPathSeparator: false, 644 | tagsPattern: '*', 645 | tagsIgnorePattern: '' 646 | } 647 | 648 | const coreWarningSpy = jest.spyOn(core, 'warning') 649 | 650 | await warnUnsupportedRESTAPIInputs({ 651 | inputs 652 | }) 653 | 654 | expect(coreWarningSpy).toHaveBeenCalledWith( 655 | 'Input "sha" is not supported when using GitHub\'s REST API to get changed files' 656 | ) 657 | 658 | expect(coreWarningSpy).toHaveBeenCalledTimes(1) 659 | }) 660 | }) 661 | describe('getPreviousGitTag', () => { 662 | // const shouldSkip = !!process.env.GITHUB_EVENT_PULL_REQUEST_HEAD_REPO_FORK 663 | // Function returns the second-latest tag and its SHA 664 | // it('should return the second latest tag and its SHA when multiple tags are present', async () => { 665 | // if (shouldSkip) { 666 | // return 667 | // } 668 | // const result = await getPreviousGitTag({ 669 | // cwd: '.', 670 | // tagsPattern: '*', 671 | // tagsIgnorePattern: '', 672 | // currentBranch: 'v1.0.1' 673 | // }) 674 | // expect(result).toEqual({ 675 | // tag: 'v1.0.0', 676 | // sha: 'f0751de6af436d4e79016e2041cf6400e0833653' 677 | // }) 678 | // }) 679 | // // Tags are filtered by a specified pattern when 'tagsPattern' is provided 680 | // it('should filter tags by the specified pattern', async () => { 681 | // if (shouldSkip) { 682 | // return 683 | // } 684 | // const result = await getPreviousGitTag({ 685 | // cwd: '.', 686 | // tagsPattern: 'v1.*', 687 | // tagsIgnorePattern: '', 688 | // currentBranch: 'v1.0.1' 689 | // }) 690 | // expect(result).toEqual({ 691 | // tag: 'v1.0.0', 692 | // sha: 'f0751de6af436d4e79016e2041cf6400e0833653' 693 | // }) 694 | // }) 695 | // // Tags are excluded by a specified ignore pattern when 'tagsIgnorePattern' is provided 696 | // it('should exclude tags by the specified ignore pattern', async () => { 697 | // if (shouldSkip) { 698 | // return 699 | // } 700 | // const result = await getPreviousGitTag({ 701 | // cwd: '.', 702 | // tagsPattern: '*', 703 | // tagsIgnorePattern: 'v0.*.*', 704 | // currentBranch: 'v1.0.1' 705 | // }) 706 | // expect(result).toEqual({ 707 | // tag: 'v1.0.0', 708 | // sha: 'f0751de6af436d4e79016e2041cf6400e0833653' 709 | // }) 710 | // }) 711 | 712 | // No tags are available in the repository 713 | it('should return empty values when no tags are available in the repository', async () => { 714 | jest.spyOn(exec, 'getExecOutput').mockResolvedValueOnce({ 715 | stdout: '', 716 | stderr: '', 717 | exitCode: 0 718 | }) 719 | const result = await getPreviousGitTag({ 720 | cwd: '.', 721 | tagsPattern: '*', 722 | tagsIgnorePattern: '', 723 | currentBranch: '' 724 | }) 725 | expect(result).toEqual({tag: '', sha: ''}) 726 | }) 727 | 728 | // Only one tag is available, making it impossible to find a previous tag 729 | it('should return empty values when only one tag is available', async () => { 730 | jest.spyOn(exec, 'getExecOutput').mockResolvedValueOnce({ 731 | stdout: 732 | 'v1.0.1|f0751de6af436d4e79016e2041cf6400e0833653|2021-01-01T00:00:00Z', 733 | stderr: '', 734 | exitCode: 0 735 | }) 736 | const result = await getPreviousGitTag({ 737 | cwd: '.', 738 | tagsPattern: '*', 739 | tagsIgnorePattern: '', 740 | currentBranch: 'v1.0.1' 741 | }) 742 | expect(result).toEqual({tag: '', sha: ''}) 743 | }) 744 | 745 | // Git commands fail and throw errors 746 | it('should throw an error when git commands fail', async () => { 747 | jest 748 | .spyOn(exec, 'getExecOutput') 749 | .mockRejectedValue(new Error('git command failed')) 750 | await expect( 751 | getPreviousGitTag({ 752 | cwd: '.', 753 | tagsPattern: '*', 754 | tagsIgnorePattern: '', 755 | currentBranch: 'v1.0.1' 756 | }) 757 | ).rejects.toThrow('git command failed') 758 | }) 759 | }) 760 | }) 761 | -------------------------------------------------------------------------------- /src/changedFiles.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as github from '@actions/github' 3 | import type {RestEndpointMethodTypes} from '@octokit/rest' 4 | import flatten from 'lodash/flatten' 5 | import convertPath from '@stdlib/utils-convert-path' 6 | import mm from 'micromatch' 7 | import * as path from 'path' 8 | import {setOutputsAndGetModifiedAndChangedFilesStatus} from './changedFilesOutput' 9 | import {DiffResult} from './commitSha' 10 | import {Inputs} from './inputs' 11 | import { 12 | canDiffCommits, 13 | getAllChangedFiles, 14 | getDirnameMaxDepth, 15 | getDirNamesIncludeFilesPattern, 16 | getFilteredChangedFiles, 17 | gitRenamedFiles, 18 | gitSubmoduleDiffSHA, 19 | isWindows, 20 | jsonOutput, 21 | setArrayOutput 22 | } from './utils' 23 | 24 | export const processChangedFiles = async ({ 25 | filePatterns, 26 | allDiffFiles, 27 | inputs, 28 | yamlFilePatterns, 29 | workingDirectory 30 | }: { 31 | filePatterns: string[] 32 | allDiffFiles: ChangedFiles 33 | inputs: Inputs 34 | yamlFilePatterns: Record 35 | workingDirectory?: string 36 | }): Promise => { 37 | if (filePatterns.length > 0) { 38 | core.startGroup('changed-files-patterns') 39 | const allFilteredDiffFiles = await getFilteredChangedFiles({ 40 | allDiffFiles, 41 | filePatterns 42 | }) 43 | core.debug( 44 | `All filtered diff files: ${JSON.stringify(allFilteredDiffFiles)}` 45 | ) 46 | await setOutputsAndGetModifiedAndChangedFilesStatus({ 47 | allDiffFiles, 48 | allFilteredDiffFiles, 49 | inputs, 50 | filePatterns, 51 | workingDirectory 52 | }) 53 | core.info('All Done!') 54 | core.endGroup() 55 | } 56 | 57 | if (Object.keys(yamlFilePatterns).length > 0) { 58 | const modifiedKeys: string[] = [] 59 | const changedKeys: string[] = [] 60 | 61 | for (const key of Object.keys(yamlFilePatterns)) { 62 | core.startGroup(`changed-files-yaml-${key}`) 63 | const allFilteredDiffFiles = await getFilteredChangedFiles({ 64 | allDiffFiles, 65 | filePatterns: yamlFilePatterns[key] 66 | }) 67 | core.debug( 68 | `All filtered diff files for ${key}: ${JSON.stringify( 69 | allFilteredDiffFiles 70 | )}` 71 | ) 72 | const {anyChanged, anyModified} = 73 | await setOutputsAndGetModifiedAndChangedFilesStatus({ 74 | allDiffFiles, 75 | allFilteredDiffFiles, 76 | inputs, 77 | filePatterns: yamlFilePatterns[key], 78 | outputPrefix: key, 79 | workingDirectory 80 | }) 81 | if (anyModified) { 82 | modifiedKeys.push(key) 83 | } 84 | if (anyChanged) { 85 | changedKeys.push(key) 86 | } 87 | 88 | core.info('All Done!') 89 | core.endGroup() 90 | } 91 | 92 | await setArrayOutput({ 93 | key: 'modified_keys', 94 | inputs, 95 | value: modifiedKeys 96 | }) 97 | 98 | await setArrayOutput({ 99 | key: 'changed_keys', 100 | inputs, 101 | value: changedKeys 102 | }) 103 | } 104 | 105 | if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) { 106 | core.startGroup('changed-files-all') 107 | await setOutputsAndGetModifiedAndChangedFilesStatus({ 108 | allDiffFiles, 109 | allFilteredDiffFiles: allDiffFiles, 110 | inputs, 111 | workingDirectory 112 | }) 113 | core.info('All Done!') 114 | core.endGroup() 115 | } 116 | } 117 | 118 | export const getRenamedFiles = async ({ 119 | inputs, 120 | workingDirectory, 121 | diffSubmodule, 122 | diffResult, 123 | submodulePaths 124 | }: { 125 | inputs: Inputs 126 | workingDirectory: string 127 | diffSubmodule: boolean 128 | diffResult: DiffResult 129 | submodulePaths: string[] 130 | }): Promise<{paths: string; count: string}> => { 131 | const renamedFiles = await gitRenamedFiles({ 132 | cwd: workingDirectory, 133 | sha1: diffResult.previousSha, 134 | sha2: diffResult.currentSha, 135 | diff: diffResult.diff, 136 | oldNewSeparator: inputs.oldNewSeparator 137 | }) 138 | 139 | if (diffSubmodule) { 140 | for (const submodulePath of submodulePaths) { 141 | const submoduleShaResult = await gitSubmoduleDiffSHA({ 142 | cwd: workingDirectory, 143 | parentSha1: diffResult.previousSha, 144 | parentSha2: diffResult.currentSha, 145 | submodulePath, 146 | diff: diffResult.diff 147 | }) 148 | 149 | const submoduleWorkingDirectory = path.join( 150 | workingDirectory, 151 | submodulePath 152 | ) 153 | 154 | if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) { 155 | let diff = '...' 156 | 157 | if ( 158 | !(await canDiffCommits({ 159 | cwd: submoduleWorkingDirectory, 160 | sha1: submoduleShaResult.previousSha, 161 | sha2: submoduleShaResult.currentSha, 162 | diff 163 | })) 164 | ) { 165 | let message = `Unable to use three dot diff for: ${submodulePath} submodule. Falling back to two dot diff. You can set 'fetch_additional_submodule_history: true' to fetch additional submodule history in order to use three dot diff` 166 | if (inputs.fetchAdditionalSubmoduleHistory) { 167 | message = `To fetch additional submodule history for: ${submodulePath} you can increase history depth using 'fetch_depth' input` 168 | } 169 | core.info(message) 170 | diff = '..' 171 | } 172 | 173 | const submoduleRenamedFiles = await gitRenamedFiles({ 174 | cwd: submoduleWorkingDirectory, 175 | sha1: submoduleShaResult.previousSha, 176 | sha2: submoduleShaResult.currentSha, 177 | diff, 178 | oldNewSeparator: inputs.oldNewSeparator, 179 | isSubmodule: true, 180 | parentDir: submodulePath 181 | }) 182 | renamedFiles.push(...submoduleRenamedFiles) 183 | } 184 | } 185 | } 186 | 187 | if (inputs.json) { 188 | return { 189 | paths: jsonOutput({value: renamedFiles, shouldEscape: inputs.escapeJson}), 190 | count: renamedFiles.length.toString() 191 | } 192 | } 193 | 194 | return { 195 | paths: renamedFiles.join(inputs.oldNewFilesSeparator), 196 | count: renamedFiles.length.toString() 197 | } 198 | } 199 | 200 | export enum ChangeTypeEnum { 201 | Added = 'A', 202 | Copied = 'C', 203 | Deleted = 'D', 204 | Modified = 'M', 205 | Renamed = 'R', 206 | TypeChanged = 'T', 207 | Unmerged = 'U', 208 | Unknown = 'X' 209 | } 210 | 211 | export type ChangedFiles = { 212 | [key in ChangeTypeEnum]: string[] 213 | } 214 | 215 | export const getAllDiffFiles = async ({ 216 | workingDirectory, 217 | diffSubmodule, 218 | diffResult, 219 | submodulePaths, 220 | outputRenamedFilesAsDeletedAndAdded, 221 | fetchAdditionalSubmoduleHistory, 222 | failOnInitialDiffError, 223 | failOnSubmoduleDiffError 224 | }: { 225 | workingDirectory: string 226 | diffSubmodule: boolean 227 | diffResult: DiffResult 228 | submodulePaths: string[] 229 | outputRenamedFilesAsDeletedAndAdded: boolean 230 | fetchAdditionalSubmoduleHistory: boolean 231 | failOnInitialDiffError: boolean 232 | failOnSubmoduleDiffError: boolean 233 | }): Promise => { 234 | const files = await getAllChangedFiles({ 235 | cwd: workingDirectory, 236 | sha1: diffResult.previousSha, 237 | sha2: diffResult.currentSha, 238 | diff: diffResult.diff, 239 | outputRenamedFilesAsDeletedAndAdded, 240 | failOnInitialDiffError 241 | }) 242 | 243 | if (diffSubmodule) { 244 | for (const submodulePath of submodulePaths) { 245 | const submoduleShaResult = await gitSubmoduleDiffSHA({ 246 | cwd: workingDirectory, 247 | parentSha1: diffResult.previousSha, 248 | parentSha2: diffResult.currentSha, 249 | submodulePath, 250 | diff: diffResult.diff 251 | }) 252 | 253 | const submoduleWorkingDirectory = path.join( 254 | workingDirectory, 255 | submodulePath 256 | ) 257 | 258 | if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) { 259 | let diff = '...' 260 | 261 | if ( 262 | !(await canDiffCommits({ 263 | cwd: submoduleWorkingDirectory, 264 | sha1: submoduleShaResult.previousSha, 265 | sha2: submoduleShaResult.currentSha, 266 | diff 267 | })) 268 | ) { 269 | let message = `Set 'fetch_additional_submodule_history: true' to fetch additional submodule history for: ${submodulePath}` 270 | if (fetchAdditionalSubmoduleHistory) { 271 | message = `To fetch additional submodule history for: ${submodulePath} you can increase history depth using 'fetch_depth' input` 272 | } 273 | core.warning(message) 274 | diff = '..' 275 | } 276 | 277 | const submoduleFiles = await getAllChangedFiles({ 278 | cwd: submoduleWorkingDirectory, 279 | sha1: submoduleShaResult.previousSha, 280 | sha2: submoduleShaResult.currentSha, 281 | diff, 282 | isSubmodule: true, 283 | parentDir: submodulePath, 284 | outputRenamedFilesAsDeletedAndAdded, 285 | failOnSubmoduleDiffError 286 | }) 287 | 288 | for (const changeType of Object.keys( 289 | submoduleFiles 290 | ) as ChangeTypeEnum[]) { 291 | if (!files[changeType]) { 292 | files[changeType] = [] 293 | } 294 | files[changeType].push(...submoduleFiles[changeType]) 295 | } 296 | } 297 | } 298 | } 299 | 300 | return files 301 | } 302 | 303 | function* getFilePaths({ 304 | inputs, 305 | filePaths, 306 | dirNamesIncludeFilePatterns 307 | }: { 308 | inputs: Inputs 309 | filePaths: string[] 310 | dirNamesIncludeFilePatterns: string[] 311 | }): Generator { 312 | for (const filePath of filePaths) { 313 | if (inputs.dirNames) { 314 | if (dirNamesIncludeFilePatterns.length > 0) { 315 | const isWin = isWindows() 316 | const matchOptions = {dot: true, windows: isWin, noext: true} 317 | if (mm.isMatch(filePath, dirNamesIncludeFilePatterns, matchOptions)) { 318 | yield filePath 319 | } 320 | } 321 | yield getDirnameMaxDepth({ 322 | relativePath: filePath, 323 | dirNamesMaxDepth: inputs.dirNamesMaxDepth, 324 | excludeCurrentDir: inputs.dirNamesExcludeCurrentDir 325 | }) 326 | } else { 327 | yield filePath 328 | } 329 | } 330 | } 331 | 332 | function* getChangeTypeFilesGenerator({ 333 | inputs, 334 | changedFiles, 335 | changeTypes 336 | }: { 337 | inputs: Inputs 338 | changedFiles: ChangedFiles 339 | changeTypes: ChangeTypeEnum[] 340 | }): Generator { 341 | const dirNamesIncludeFilePatterns = getDirNamesIncludeFilesPattern({inputs}) 342 | core.debug( 343 | `Dir names include file patterns: ${JSON.stringify( 344 | dirNamesIncludeFilePatterns 345 | )}` 346 | ) 347 | 348 | for (const changeType of changeTypes) { 349 | const filePaths = changedFiles[changeType] || [] 350 | for (const filePath of getFilePaths({ 351 | inputs, 352 | filePaths, 353 | dirNamesIncludeFilePatterns 354 | })) { 355 | if (isWindows() && inputs.usePosixPathSeparator) { 356 | yield convertPath(filePath, 'mixed') 357 | } else { 358 | yield filePath 359 | } 360 | } 361 | } 362 | } 363 | 364 | export const getChangeTypeFiles = async ({ 365 | inputs, 366 | changedFiles, 367 | changeTypes 368 | }: { 369 | inputs: Inputs 370 | changedFiles: ChangedFiles 371 | changeTypes: ChangeTypeEnum[] 372 | }): Promise<{paths: string[] | string; count: string}> => { 373 | const files = [ 374 | ...new Set(getChangeTypeFilesGenerator({inputs, changedFiles, changeTypes})) 375 | ].filter(Boolean) 376 | 377 | const paths = inputs.json ? files : files.join(inputs.separator) 378 | 379 | return { 380 | paths, 381 | count: files.length.toString() 382 | } 383 | } 384 | 385 | function* getAllChangeTypeFilesGenerator({ 386 | inputs, 387 | changedFiles 388 | }: { 389 | inputs: Inputs 390 | changedFiles: ChangedFiles 391 | }): Generator { 392 | const dirNamesIncludeFilePatterns = getDirNamesIncludeFilesPattern({inputs}) 393 | core.debug( 394 | `Dir names include file patterns: ${JSON.stringify( 395 | dirNamesIncludeFilePatterns 396 | )}` 397 | ) 398 | 399 | const filePaths = flatten(Object.values(changedFiles)) 400 | 401 | for (const filePath of getFilePaths({ 402 | inputs, 403 | filePaths, 404 | dirNamesIncludeFilePatterns 405 | })) { 406 | if (isWindows() && inputs.usePosixPathSeparator) { 407 | yield convertPath(filePath, 'mixed') 408 | } else { 409 | yield filePath 410 | } 411 | } 412 | } 413 | 414 | export const getAllChangeTypeFiles = async ({ 415 | inputs, 416 | changedFiles 417 | }: { 418 | inputs: Inputs 419 | changedFiles: ChangedFiles 420 | }): Promise<{paths: string[] | string; count: string}> => { 421 | const files = [ 422 | ...new Set(getAllChangeTypeFilesGenerator({inputs, changedFiles})) 423 | ].filter(Boolean) 424 | 425 | const paths = inputs.json ? files : files.join(inputs.separator) 426 | 427 | return { 428 | paths, 429 | count: files.length.toString() 430 | } 431 | } 432 | 433 | export const getChangedFilesFromGithubAPI = async ({ 434 | inputs 435 | }: { 436 | inputs: Inputs 437 | }): Promise => { 438 | const octokit = github.getOctokit(inputs.token, { 439 | baseUrl: inputs.apiUrl 440 | }) 441 | const changedFiles: ChangedFiles = { 442 | [ChangeTypeEnum.Added]: [], 443 | [ChangeTypeEnum.Copied]: [], 444 | [ChangeTypeEnum.Deleted]: [], 445 | [ChangeTypeEnum.Modified]: [], 446 | [ChangeTypeEnum.Renamed]: [], 447 | [ChangeTypeEnum.TypeChanged]: [], 448 | [ChangeTypeEnum.Unmerged]: [], 449 | [ChangeTypeEnum.Unknown]: [] 450 | } 451 | 452 | core.info('Getting changed files from GitHub API...') 453 | 454 | const options = octokit.rest.pulls.listFiles.endpoint.merge({ 455 | owner: github.context.repo.owner, 456 | repo: github.context.repo.repo, 457 | pull_number: github.context.payload.pull_request?.number, 458 | per_page: 100 459 | }) 460 | 461 | const paginatedResponse = 462 | await octokit.paginate< 463 | RestEndpointMethodTypes['pulls']['listFiles']['response']['data'][0] 464 | >(options) 465 | 466 | core.info(`Found ${paginatedResponse.length} changed files from GitHub API`) 467 | const statusMap: Record = { 468 | added: ChangeTypeEnum.Added, 469 | removed: ChangeTypeEnum.Deleted, 470 | modified: ChangeTypeEnum.Modified, 471 | renamed: ChangeTypeEnum.Renamed, 472 | copied: ChangeTypeEnum.Copied, 473 | changed: ChangeTypeEnum.TypeChanged, 474 | unchanged: ChangeTypeEnum.Unmerged 475 | } 476 | 477 | for await (const item of paginatedResponse) { 478 | const changeType: ChangeTypeEnum = 479 | statusMap[item.status] || ChangeTypeEnum.Unknown 480 | 481 | if (changeType === ChangeTypeEnum.Renamed) { 482 | if (inputs.outputRenamedFilesAsDeletedAndAdded) { 483 | changedFiles[ChangeTypeEnum.Deleted].push(item.previous_filename || '') 484 | changedFiles[ChangeTypeEnum.Added].push(item.filename) 485 | } else { 486 | changedFiles[ChangeTypeEnum.Renamed].push(item.filename) 487 | } 488 | } else { 489 | changedFiles[changeType].push(item.filename) 490 | } 491 | } 492 | 493 | return changedFiles 494 | } 495 | -------------------------------------------------------------------------------- /src/changedFilesOutput.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import path from 'path' 3 | import { 4 | ChangedFiles, 5 | ChangeTypeEnum, 6 | getAllChangeTypeFiles, 7 | getChangeTypeFiles 8 | } from './changedFiles' 9 | import {Inputs} from './inputs' 10 | import {getOutputKey, setArrayOutput, setOutput, exists} from './utils' 11 | 12 | const getArrayFromPaths = ( 13 | paths: string | string[], 14 | inputs: Inputs 15 | ): string[] => { 16 | return Array.isArray(paths) ? paths : paths.split(inputs.separator) 17 | } 18 | 19 | export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({ 20 | allDiffFiles, 21 | allFilteredDiffFiles, 22 | inputs, 23 | filePatterns = [], 24 | outputPrefix = '', 25 | workingDirectory 26 | }: { 27 | allDiffFiles: ChangedFiles 28 | allFilteredDiffFiles: ChangedFiles 29 | inputs: Inputs 30 | filePatterns?: string[] 31 | outputPrefix?: string 32 | workingDirectory?: string 33 | }): Promise<{anyModified: boolean; anyChanged: boolean}> => { 34 | const addedFiles = await getChangeTypeFiles({ 35 | inputs, 36 | changedFiles: allFilteredDiffFiles, 37 | changeTypes: [ChangeTypeEnum.Added] 38 | }) 39 | core.debug(`Added files: ${JSON.stringify(addedFiles)}`) 40 | await setOutput({ 41 | key: getOutputKey('added_files', outputPrefix), 42 | value: addedFiles.paths, 43 | writeOutputFiles: inputs.writeOutputFiles, 44 | outputDir: inputs.outputDir, 45 | json: inputs.json, 46 | shouldEscape: inputs.escapeJson, 47 | safeOutput: inputs.safeOutput 48 | }) 49 | await setOutput({ 50 | key: getOutputKey('added_files_count', outputPrefix), 51 | value: addedFiles.count, 52 | writeOutputFiles: inputs.writeOutputFiles, 53 | outputDir: inputs.outputDir 54 | }) 55 | 56 | const copiedFiles = await getChangeTypeFiles({ 57 | inputs, 58 | changedFiles: allFilteredDiffFiles, 59 | changeTypes: [ChangeTypeEnum.Copied] 60 | }) 61 | core.debug(`Copied files: ${JSON.stringify(copiedFiles)}`) 62 | await setOutput({ 63 | key: getOutputKey('copied_files', outputPrefix), 64 | value: copiedFiles.paths, 65 | writeOutputFiles: inputs.writeOutputFiles, 66 | outputDir: inputs.outputDir, 67 | json: inputs.json, 68 | shouldEscape: inputs.escapeJson, 69 | safeOutput: inputs.safeOutput 70 | }) 71 | 72 | await setOutput({ 73 | key: getOutputKey('copied_files_count', outputPrefix), 74 | value: copiedFiles.count, 75 | writeOutputFiles: inputs.writeOutputFiles, 76 | outputDir: inputs.outputDir 77 | }) 78 | 79 | const modifiedFiles = await getChangeTypeFiles({ 80 | inputs, 81 | changedFiles: allFilteredDiffFiles, 82 | changeTypes: [ChangeTypeEnum.Modified] 83 | }) 84 | core.debug(`Modified files: ${JSON.stringify(modifiedFiles)}`) 85 | await setOutput({ 86 | key: getOutputKey('modified_files', outputPrefix), 87 | value: modifiedFiles.paths, 88 | writeOutputFiles: inputs.writeOutputFiles, 89 | outputDir: inputs.outputDir, 90 | json: inputs.json, 91 | shouldEscape: inputs.escapeJson, 92 | safeOutput: inputs.safeOutput 93 | }) 94 | 95 | await setOutput({ 96 | key: getOutputKey('modified_files_count', outputPrefix), 97 | value: modifiedFiles.count, 98 | writeOutputFiles: inputs.writeOutputFiles, 99 | outputDir: inputs.outputDir 100 | }) 101 | 102 | const renamedFiles = await getChangeTypeFiles({ 103 | inputs, 104 | changedFiles: allFilteredDiffFiles, 105 | changeTypes: [ChangeTypeEnum.Renamed] 106 | }) 107 | core.debug(`Renamed files: ${JSON.stringify(renamedFiles)}`) 108 | await setOutput({ 109 | key: getOutputKey('renamed_files', outputPrefix), 110 | value: renamedFiles.paths, 111 | writeOutputFiles: inputs.writeOutputFiles, 112 | outputDir: inputs.outputDir, 113 | json: inputs.json, 114 | shouldEscape: inputs.escapeJson, 115 | safeOutput: inputs.safeOutput 116 | }) 117 | 118 | await setOutput({ 119 | key: getOutputKey('renamed_files_count', outputPrefix), 120 | value: renamedFiles.count, 121 | writeOutputFiles: inputs.writeOutputFiles, 122 | outputDir: inputs.outputDir 123 | }) 124 | 125 | const typeChangedFiles = await getChangeTypeFiles({ 126 | inputs, 127 | changedFiles: allFilteredDiffFiles, 128 | changeTypes: [ChangeTypeEnum.TypeChanged] 129 | }) 130 | core.debug(`Type changed files: ${JSON.stringify(typeChangedFiles)}`) 131 | await setOutput({ 132 | key: getOutputKey('type_changed_files', outputPrefix), 133 | value: typeChangedFiles.paths, 134 | writeOutputFiles: inputs.writeOutputFiles, 135 | outputDir: inputs.outputDir, 136 | json: inputs.json, 137 | shouldEscape: inputs.escapeJson, 138 | safeOutput: inputs.safeOutput 139 | }) 140 | 141 | await setOutput({ 142 | key: getOutputKey('type_changed_files_count', outputPrefix), 143 | value: typeChangedFiles.count, 144 | writeOutputFiles: inputs.writeOutputFiles, 145 | outputDir: inputs.outputDir 146 | }) 147 | 148 | const unmergedFiles = await getChangeTypeFiles({ 149 | inputs, 150 | changedFiles: allFilteredDiffFiles, 151 | changeTypes: [ChangeTypeEnum.Unmerged] 152 | }) 153 | core.debug(`Unmerged files: ${JSON.stringify(unmergedFiles)}`) 154 | await setOutput({ 155 | key: getOutputKey('unmerged_files', outputPrefix), 156 | value: unmergedFiles.paths, 157 | writeOutputFiles: inputs.writeOutputFiles, 158 | outputDir: inputs.outputDir, 159 | json: inputs.json, 160 | shouldEscape: inputs.escapeJson, 161 | safeOutput: inputs.safeOutput 162 | }) 163 | 164 | await setOutput({ 165 | key: getOutputKey('unmerged_files_count', outputPrefix), 166 | value: unmergedFiles.count, 167 | writeOutputFiles: inputs.writeOutputFiles, 168 | outputDir: inputs.outputDir 169 | }) 170 | 171 | const unknownFiles = await getChangeTypeFiles({ 172 | inputs, 173 | changedFiles: allFilteredDiffFiles, 174 | changeTypes: [ChangeTypeEnum.Unknown] 175 | }) 176 | core.debug(`Unknown files: ${JSON.stringify(unknownFiles)}`) 177 | await setOutput({ 178 | key: getOutputKey('unknown_files', outputPrefix), 179 | value: unknownFiles.paths, 180 | writeOutputFiles: inputs.writeOutputFiles, 181 | outputDir: inputs.outputDir, 182 | json: inputs.json, 183 | shouldEscape: inputs.escapeJson, 184 | safeOutput: inputs.safeOutput 185 | }) 186 | 187 | await setOutput({ 188 | key: getOutputKey('unknown_files_count', outputPrefix), 189 | value: unknownFiles.count, 190 | writeOutputFiles: inputs.writeOutputFiles, 191 | outputDir: inputs.outputDir 192 | }) 193 | 194 | const allChangedAndModifiedFiles = await getAllChangeTypeFiles({ 195 | inputs, 196 | changedFiles: allFilteredDiffFiles 197 | }) 198 | core.debug( 199 | `All changed and modified files: ${JSON.stringify( 200 | allChangedAndModifiedFiles 201 | )}` 202 | ) 203 | await setOutput({ 204 | key: getOutputKey('all_changed_and_modified_files', outputPrefix), 205 | value: allChangedAndModifiedFiles.paths, 206 | writeOutputFiles: inputs.writeOutputFiles, 207 | outputDir: inputs.outputDir, 208 | json: inputs.json, 209 | shouldEscape: inputs.escapeJson, 210 | safeOutput: inputs.safeOutput 211 | }) 212 | 213 | await setOutput({ 214 | key: getOutputKey('all_changed_and_modified_files_count', outputPrefix), 215 | value: allChangedAndModifiedFiles.count, 216 | writeOutputFiles: inputs.writeOutputFiles, 217 | outputDir: inputs.outputDir 218 | }) 219 | 220 | const allChangedFiles = await getChangeTypeFiles({ 221 | inputs, 222 | changedFiles: allFilteredDiffFiles, 223 | changeTypes: [ 224 | ChangeTypeEnum.Added, 225 | ChangeTypeEnum.Copied, 226 | ChangeTypeEnum.Modified, 227 | ChangeTypeEnum.Renamed 228 | ] 229 | }) 230 | core.debug(`All changed files: ${JSON.stringify(allChangedFiles)}`) 231 | await setOutput({ 232 | key: getOutputKey('all_changed_files', outputPrefix), 233 | value: allChangedFiles.paths, 234 | writeOutputFiles: inputs.writeOutputFiles, 235 | outputDir: inputs.outputDir, 236 | json: inputs.json, 237 | shouldEscape: inputs.escapeJson, 238 | safeOutput: inputs.safeOutput 239 | }) 240 | 241 | await setOutput({ 242 | key: getOutputKey('all_changed_files_count', outputPrefix), 243 | value: allChangedFiles.count, 244 | writeOutputFiles: inputs.writeOutputFiles, 245 | outputDir: inputs.outputDir 246 | }) 247 | 248 | await setOutput({ 249 | key: getOutputKey('any_changed', outputPrefix), 250 | value: allChangedFiles.paths.length > 0, 251 | writeOutputFiles: inputs.writeOutputFiles, 252 | outputDir: inputs.outputDir, 253 | json: inputs.json 254 | }) 255 | 256 | const allOtherChangedFiles = await getChangeTypeFiles({ 257 | inputs, 258 | changedFiles: allDiffFiles, 259 | changeTypes: [ 260 | ChangeTypeEnum.Added, 261 | ChangeTypeEnum.Copied, 262 | ChangeTypeEnum.Modified, 263 | ChangeTypeEnum.Renamed 264 | ] 265 | }) 266 | core.debug(`All other changed files: ${JSON.stringify(allOtherChangedFiles)}`) 267 | 268 | const allOtherChangedFilesPaths: string[] = getArrayFromPaths( 269 | allOtherChangedFiles.paths, 270 | inputs 271 | ) 272 | const allChangedFilesPaths: string[] = getArrayFromPaths( 273 | allChangedFiles.paths, 274 | inputs 275 | ) 276 | 277 | const otherChangedFiles = allOtherChangedFilesPaths.filter( 278 | (filePath: string) => !allChangedFilesPaths.includes(filePath) 279 | ) 280 | 281 | const onlyChanged = 282 | otherChangedFiles.length === 0 && 283 | allChangedFiles.paths.length > 0 && 284 | filePatterns.length > 0 285 | 286 | await setOutput({ 287 | key: getOutputKey('only_changed', outputPrefix), 288 | value: onlyChanged, 289 | writeOutputFiles: inputs.writeOutputFiles, 290 | outputDir: inputs.outputDir, 291 | json: inputs.json 292 | }) 293 | 294 | await setArrayOutput({ 295 | key: 'other_changed_files', 296 | inputs, 297 | value: otherChangedFiles, 298 | outputPrefix 299 | }) 300 | 301 | await setOutput({ 302 | key: getOutputKey('other_changed_files_count', outputPrefix), 303 | value: otherChangedFiles.length.toString(), 304 | writeOutputFiles: inputs.writeOutputFiles, 305 | outputDir: inputs.outputDir 306 | }) 307 | 308 | const allModifiedFiles = await getChangeTypeFiles({ 309 | inputs, 310 | changedFiles: allFilteredDiffFiles, 311 | changeTypes: [ 312 | ChangeTypeEnum.Added, 313 | ChangeTypeEnum.Copied, 314 | ChangeTypeEnum.Modified, 315 | ChangeTypeEnum.Renamed, 316 | ChangeTypeEnum.Deleted 317 | ] 318 | }) 319 | core.debug(`All modified files: ${JSON.stringify(allModifiedFiles)}`) 320 | await setOutput({ 321 | key: getOutputKey('all_modified_files', outputPrefix), 322 | value: allModifiedFiles.paths, 323 | writeOutputFiles: inputs.writeOutputFiles, 324 | outputDir: inputs.outputDir, 325 | json: inputs.json, 326 | shouldEscape: inputs.escapeJson, 327 | safeOutput: inputs.safeOutput 328 | }) 329 | 330 | await setOutput({ 331 | key: getOutputKey('all_modified_files_count', outputPrefix), 332 | value: allModifiedFiles.count, 333 | writeOutputFiles: inputs.writeOutputFiles, 334 | outputDir: inputs.outputDir 335 | }) 336 | 337 | await setOutput({ 338 | key: getOutputKey('any_modified', outputPrefix), 339 | value: allModifiedFiles.paths.length > 0, 340 | writeOutputFiles: inputs.writeOutputFiles, 341 | outputDir: inputs.outputDir, 342 | json: inputs.json 343 | }) 344 | 345 | const allOtherModifiedFiles = await getChangeTypeFiles({ 346 | inputs, 347 | changedFiles: allDiffFiles, 348 | changeTypes: [ 349 | ChangeTypeEnum.Added, 350 | ChangeTypeEnum.Copied, 351 | ChangeTypeEnum.Modified, 352 | ChangeTypeEnum.Renamed, 353 | ChangeTypeEnum.Deleted 354 | ] 355 | }) 356 | 357 | const allOtherModifiedFilesPaths: string[] = getArrayFromPaths( 358 | allOtherModifiedFiles.paths, 359 | inputs 360 | ) 361 | 362 | const allModifiedFilesPaths: string[] = getArrayFromPaths( 363 | allModifiedFiles.paths, 364 | inputs 365 | ) 366 | 367 | const otherModifiedFiles = allOtherModifiedFilesPaths.filter( 368 | (filePath: string) => !allModifiedFilesPaths.includes(filePath) 369 | ) 370 | 371 | const onlyModified = 372 | otherModifiedFiles.length === 0 && 373 | allModifiedFiles.paths.length > 0 && 374 | filePatterns.length > 0 375 | 376 | await setOutput({ 377 | key: getOutputKey('only_modified', outputPrefix), 378 | value: onlyModified, 379 | writeOutputFiles: inputs.writeOutputFiles, 380 | outputDir: inputs.outputDir, 381 | json: inputs.json 382 | }) 383 | 384 | await setArrayOutput({ 385 | key: 'other_modified_files', 386 | inputs, 387 | value: otherModifiedFiles, 388 | outputPrefix 389 | }) 390 | 391 | await setOutput({ 392 | key: getOutputKey('other_modified_files_count', outputPrefix), 393 | value: otherModifiedFiles.length.toString(), 394 | writeOutputFiles: inputs.writeOutputFiles, 395 | outputDir: inputs.outputDir 396 | }) 397 | 398 | const deletedFiles = await getChangeTypeFiles({ 399 | inputs, 400 | changedFiles: allFilteredDiffFiles, 401 | changeTypes: [ChangeTypeEnum.Deleted] 402 | }) 403 | core.debug(`Deleted files: ${JSON.stringify(deletedFiles)}`) 404 | 405 | if ( 406 | inputs.dirNamesDeletedFilesIncludeOnlyDeletedDirs && 407 | inputs.dirNames && 408 | workingDirectory 409 | ) { 410 | const newDeletedFilesPaths: string[] = [] 411 | for (const deletedPath of getArrayFromPaths(deletedFiles.paths, inputs)) { 412 | const dirPath = path.join(workingDirectory, deletedPath) 413 | core.debug(`Checking if directory exists: ${dirPath}`) 414 | if (!(await exists(dirPath))) { 415 | core.debug(`Directory not found: ${dirPath}`) 416 | newDeletedFilesPaths.push(deletedPath) 417 | } 418 | } 419 | deletedFiles.paths = inputs.json 420 | ? newDeletedFilesPaths 421 | : newDeletedFilesPaths.join(inputs.separator) 422 | deletedFiles.count = newDeletedFilesPaths.length.toString() 423 | core.debug(`New deleted files: ${JSON.stringify(deletedFiles)}`) 424 | } 425 | 426 | await setOutput({ 427 | key: getOutputKey('deleted_files', outputPrefix), 428 | value: deletedFiles.paths, 429 | writeOutputFiles: inputs.writeOutputFiles, 430 | outputDir: inputs.outputDir, 431 | json: inputs.json, 432 | shouldEscape: inputs.escapeJson, 433 | safeOutput: inputs.safeOutput 434 | }) 435 | 436 | await setOutput({ 437 | key: getOutputKey('deleted_files_count', outputPrefix), 438 | value: deletedFiles.count, 439 | writeOutputFiles: inputs.writeOutputFiles, 440 | outputDir: inputs.outputDir 441 | }) 442 | 443 | await setOutput({ 444 | key: getOutputKey('any_deleted', outputPrefix), 445 | value: deletedFiles.paths.length > 0, 446 | writeOutputFiles: inputs.writeOutputFiles, 447 | outputDir: inputs.outputDir, 448 | json: inputs.json 449 | }) 450 | 451 | const allOtherDeletedFiles = await getChangeTypeFiles({ 452 | inputs, 453 | changedFiles: allDiffFiles, 454 | changeTypes: [ChangeTypeEnum.Deleted] 455 | }) 456 | 457 | const allOtherDeletedFilesPaths: string[] = getArrayFromPaths( 458 | allOtherDeletedFiles.paths, 459 | inputs 460 | ) 461 | 462 | const deletedFilesPaths: string[] = getArrayFromPaths( 463 | deletedFiles.paths, 464 | inputs 465 | ) 466 | 467 | const otherDeletedFiles = allOtherDeletedFilesPaths.filter( 468 | filePath => !deletedFilesPaths.includes(filePath) 469 | ) 470 | 471 | const onlyDeleted = 472 | otherDeletedFiles.length === 0 && 473 | deletedFiles.paths.length > 0 && 474 | filePatterns.length > 0 475 | 476 | await setOutput({ 477 | key: getOutputKey('only_deleted', outputPrefix), 478 | value: onlyDeleted, 479 | writeOutputFiles: inputs.writeOutputFiles, 480 | outputDir: inputs.outputDir, 481 | json: inputs.json 482 | }) 483 | 484 | await setArrayOutput({ 485 | key: 'other_deleted_files', 486 | inputs, 487 | value: otherDeletedFiles, 488 | outputPrefix 489 | }) 490 | 491 | await setOutput({ 492 | key: getOutputKey('other_deleted_files_count', outputPrefix), 493 | value: otherDeletedFiles.length.toString(), 494 | writeOutputFiles: inputs.writeOutputFiles, 495 | outputDir: inputs.outputDir 496 | }) 497 | 498 | return { 499 | anyModified: allModifiedFiles.paths.length > 0, 500 | anyChanged: allChangedFiles.paths.length > 0 501 | } 502 | } 503 | -------------------------------------------------------------------------------- /src/commitSha.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as github from '@actions/github' 3 | 4 | import {Env} from './env' 5 | import {Inputs} from './inputs' 6 | import { 7 | canDiffCommits, 8 | cleanShaInput, 9 | getCurrentBranchName, 10 | getHeadSha, 11 | getParentSha, 12 | getPreviousGitTag, 13 | getRemoteBranchHeadSha, 14 | gitFetch, 15 | gitFetchSubmodules, 16 | gitLog, 17 | verifyCommitSha 18 | } from './utils' 19 | 20 | const getCurrentSHA = async ({ 21 | inputs, 22 | workingDirectory 23 | }: { 24 | inputs: Inputs 25 | workingDirectory: string 26 | }): Promise => { 27 | let currentSha = await cleanShaInput({ 28 | sha: inputs.sha, 29 | cwd: workingDirectory, 30 | token: inputs.token 31 | }) 32 | core.debug('Getting current SHA...') 33 | 34 | if (inputs.until) { 35 | core.debug(`Getting base SHA for '${inputs.until}'...`) 36 | try { 37 | currentSha = await gitLog({ 38 | cwd: workingDirectory, 39 | args: [ 40 | '--format=%H', 41 | '-n', 42 | '1', 43 | '--date', 44 | 'local', 45 | '--until', 46 | inputs.until 47 | ] 48 | }) 49 | } catch (error) { 50 | core.error( 51 | `Invalid until date: ${inputs.until}. ${(error as Error).message}` 52 | ) 53 | throw error 54 | } 55 | } else { 56 | if (!currentSha) { 57 | if ( 58 | github.context.payload.pull_request?.head?.sha && 59 | (await verifyCommitSha({ 60 | sha: github.context.payload.pull_request?.head?.sha, 61 | cwd: workingDirectory, 62 | showAsErrorMessage: false 63 | })) === 0 64 | ) { 65 | currentSha = github.context.payload.pull_request?.head?.sha 66 | } else if (github.context.eventName === 'merge_group') { 67 | currentSha = github.context.payload.merge_group?.head_sha 68 | } else { 69 | currentSha = await getHeadSha({cwd: workingDirectory}) 70 | } 71 | } 72 | } 73 | 74 | await verifyCommitSha({sha: currentSha, cwd: workingDirectory}) 75 | core.debug(`Current SHA: ${currentSha}`) 76 | 77 | return currentSha 78 | } 79 | 80 | export interface DiffResult { 81 | previousSha: string 82 | currentSha: string 83 | currentBranch: string 84 | targetBranch: string 85 | diff: string 86 | initialCommit?: boolean 87 | } 88 | 89 | interface SHAForNonPullRequestEvent { 90 | inputs: Inputs 91 | env: Env 92 | workingDirectory: string 93 | isShallow: boolean 94 | diffSubmodule: boolean 95 | gitFetchExtraArgs: string[] 96 | isTag: boolean 97 | remoteName: string 98 | } 99 | 100 | export const getSHAForNonPullRequestEvent = async ({ 101 | inputs, 102 | env, 103 | workingDirectory, 104 | isShallow, 105 | diffSubmodule, 106 | gitFetchExtraArgs, 107 | isTag, 108 | remoteName 109 | }: SHAForNonPullRequestEvent): Promise => { 110 | let targetBranch = env.GITHUB_REF_NAME 111 | let currentBranch = targetBranch 112 | let initialCommit = false 113 | 114 | if (!inputs.skipInitialFetch) { 115 | if (isShallow) { 116 | core.info('Repository is shallow, fetching more history...') 117 | 118 | if (isTag) { 119 | let sourceBranch = '' 120 | 121 | if (github.context.payload.base_ref) { 122 | sourceBranch = github.context.payload.base_ref.replace( 123 | 'refs/heads/', 124 | '' 125 | ) 126 | } else if (github.context.payload.release?.target_commitish) { 127 | sourceBranch = github.context.payload.release?.target_commitish 128 | } 129 | 130 | await gitFetch({ 131 | cwd: workingDirectory, 132 | args: [ 133 | ...gitFetchExtraArgs, 134 | '-u', 135 | '--progress', 136 | `--deepen=${inputs.fetchDepth}`, 137 | remoteName, 138 | `+refs/heads/${sourceBranch}:refs/remotes/${remoteName}/${sourceBranch}` 139 | ] 140 | }) 141 | } else { 142 | await gitFetch({ 143 | cwd: workingDirectory, 144 | args: [ 145 | ...gitFetchExtraArgs, 146 | '-u', 147 | '--progress', 148 | `--deepen=${inputs.fetchDepth}`, 149 | remoteName, 150 | `+refs/heads/${targetBranch}:refs/remotes/${remoteName}/${targetBranch}` 151 | ] 152 | }) 153 | } 154 | 155 | if (diffSubmodule) { 156 | await gitFetchSubmodules({ 157 | cwd: workingDirectory, 158 | args: [ 159 | ...gitFetchExtraArgs, 160 | '-u', 161 | '--progress', 162 | `--deepen=${inputs.fetchDepth}` 163 | ] 164 | }) 165 | } 166 | } else { 167 | if (diffSubmodule && inputs.fetchAdditionalSubmoduleHistory) { 168 | await gitFetchSubmodules({ 169 | cwd: workingDirectory, 170 | args: [ 171 | ...gitFetchExtraArgs, 172 | '-u', 173 | '--progress', 174 | `--deepen=${inputs.fetchDepth}` 175 | ] 176 | }) 177 | } 178 | } 179 | } 180 | 181 | const currentSha = await getCurrentSHA({inputs, workingDirectory}) 182 | let previousSha = await cleanShaInput({ 183 | sha: inputs.baseSha, 184 | cwd: workingDirectory, 185 | token: inputs.token 186 | }) 187 | const diff = '..' 188 | const currentBranchName = await getCurrentBranchName({cwd: workingDirectory}) 189 | 190 | if ( 191 | currentBranchName && 192 | currentBranchName !== 'HEAD' && 193 | (currentBranchName !== targetBranch || currentBranchName !== currentBranch) 194 | ) { 195 | targetBranch = currentBranchName 196 | currentBranch = currentBranchName 197 | } 198 | 199 | if (inputs.baseSha && inputs.sha && currentBranch && targetBranch) { 200 | if (previousSha === currentSha) { 201 | core.error( 202 | `Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.` 203 | ) 204 | core.error( 205 | `Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.` 206 | ) 207 | throw new Error('Similar commit hashes detected.') 208 | } 209 | 210 | core.debug(`Previous SHA: ${previousSha}`) 211 | 212 | return { 213 | previousSha, 214 | currentSha, 215 | currentBranch, 216 | targetBranch, 217 | diff 218 | } 219 | } 220 | 221 | if (!previousSha || previousSha === currentSha) { 222 | core.debug('Getting previous SHA...') 223 | if (inputs.since) { 224 | core.debug(`Getting base SHA for '${inputs.since}'...`) 225 | try { 226 | const allCommitsFrom = await gitLog({ 227 | cwd: workingDirectory, 228 | args: ['--format=%H', '--date', 'local', '--since', inputs.since] 229 | }) 230 | 231 | if (allCommitsFrom) { 232 | const allCommitsFromArray = allCommitsFrom.split('\n') 233 | previousSha = allCommitsFromArray[allCommitsFromArray.length - 1] 234 | } 235 | } catch (error) { 236 | core.error( 237 | `Invalid since date: ${inputs.since}. ${(error as Error).message}` 238 | ) 239 | throw error 240 | } 241 | } else if (isTag) { 242 | core.debug('Getting previous SHA for tag...') 243 | const {sha, tag} = await getPreviousGitTag({ 244 | cwd: workingDirectory, 245 | tagsPattern: inputs.tagsPattern, 246 | tagsIgnorePattern: inputs.tagsIgnorePattern, 247 | currentBranch 248 | }) 249 | previousSha = sha 250 | targetBranch = tag 251 | } else { 252 | if (github.context.eventName === 'merge_group') { 253 | core.debug('Getting previous SHA for merge group...') 254 | previousSha = github.context.payload.merge_group?.base_sha 255 | } else { 256 | core.debug('Getting previous SHA for last remote commit...') 257 | if ( 258 | github.context.payload.forced === 'false' || 259 | !github.context.payload.forced 260 | ) { 261 | previousSha = github.context.payload.before 262 | } 263 | } 264 | 265 | if ( 266 | !previousSha || 267 | previousSha === '0000000000000000000000000000000000000000' 268 | ) { 269 | previousSha = await getParentSha({ 270 | cwd: workingDirectory 271 | }) 272 | } else if ( 273 | (await verifyCommitSha({ 274 | sha: previousSha, 275 | cwd: workingDirectory, 276 | showAsErrorMessage: false 277 | })) !== 0 278 | ) { 279 | core.warning( 280 | `Previous commit ${previousSha} is not valid. Using parent commit.` 281 | ) 282 | previousSha = await getParentSha({ 283 | cwd: workingDirectory 284 | }) 285 | } 286 | 287 | if (!previousSha || previousSha === currentSha) { 288 | previousSha = await getParentSha({ 289 | cwd: workingDirectory 290 | }) 291 | 292 | if (!previousSha) { 293 | core.warning('Initial commit detected no previous commit found.') 294 | initialCommit = true 295 | previousSha = currentSha 296 | } 297 | } 298 | } 299 | } 300 | 301 | await verifyCommitSha({sha: previousSha, cwd: workingDirectory}) 302 | core.debug(`Previous SHA: ${previousSha}`) 303 | 304 | core.debug(`Target branch: ${targetBranch}`) 305 | core.debug(`Current branch: ${currentBranch}`) 306 | 307 | if (!initialCommit && previousSha === currentSha) { 308 | core.error( 309 | `Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.` 310 | ) 311 | core.error( 312 | `Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.` 313 | ) 314 | throw new Error('Similar commit hashes detected.') 315 | } 316 | 317 | return { 318 | previousSha, 319 | currentSha, 320 | currentBranch, 321 | targetBranch, 322 | diff, 323 | initialCommit 324 | } 325 | } 326 | 327 | interface SHAForPullRequestEvent { 328 | inputs: Inputs 329 | workingDirectory: string 330 | isShallow: boolean 331 | diffSubmodule: boolean 332 | gitFetchExtraArgs: string[] 333 | remoteName: string 334 | } 335 | 336 | export const getSHAForPullRequestEvent = async ({ 337 | inputs, 338 | workingDirectory, 339 | isShallow, 340 | diffSubmodule, 341 | gitFetchExtraArgs, 342 | remoteName 343 | }: SHAForPullRequestEvent): Promise => { 344 | let targetBranch = github.context.payload.pull_request?.base?.ref 345 | const currentBranch = github.context.payload.pull_request?.head?.ref 346 | if (inputs.sinceLastRemoteCommit) { 347 | targetBranch = currentBranch 348 | } 349 | 350 | if (!inputs.skipInitialFetch) { 351 | core.info('Repository is shallow, fetching more history...') 352 | if (isShallow) { 353 | let prFetchExitCode = await gitFetch({ 354 | cwd: workingDirectory, 355 | args: [ 356 | ...gitFetchExtraArgs, 357 | '-u', 358 | '--progress', 359 | remoteName, 360 | `pull/${github.context.payload.pull_request?.number}/head:${currentBranch}` 361 | ] 362 | }) 363 | 364 | if (prFetchExitCode !== 0) { 365 | prFetchExitCode = await gitFetch({ 366 | cwd: workingDirectory, 367 | args: [ 368 | ...gitFetchExtraArgs, 369 | '-u', 370 | '--progress', 371 | `--deepen=${inputs.fetchDepth}`, 372 | remoteName, 373 | `+refs/heads/${currentBranch}*:refs/remotes/${remoteName}/${currentBranch}*` 374 | ] 375 | }) 376 | } 377 | 378 | if (prFetchExitCode !== 0) { 379 | throw new Error( 380 | 'Failed to fetch pull request branch. Please ensure "persist-credentials" is set to "true" when checking out the repository. See: https://github.com/actions/checkout#usage' 381 | ) 382 | } 383 | core.debug('Fetching target branch...') 384 | await gitFetch({ 385 | cwd: workingDirectory, 386 | args: [ 387 | ...gitFetchExtraArgs, 388 | '-u', 389 | '--progress', 390 | `--deepen=${inputs.fetchDepth}`, 391 | remoteName, 392 | `+refs/heads/${github.context.payload.pull_request?.base?.ref}:refs/remotes/${remoteName}/${github.context.payload.pull_request?.base?.ref}` 393 | ] 394 | }) 395 | 396 | if (diffSubmodule) { 397 | await gitFetchSubmodules({ 398 | cwd: workingDirectory, 399 | args: [ 400 | ...gitFetchExtraArgs, 401 | '-u', 402 | '--progress', 403 | `--deepen=${inputs.fetchDepth}` 404 | ] 405 | }) 406 | } 407 | } else { 408 | if (diffSubmodule && inputs.fetchAdditionalSubmoduleHistory) { 409 | await gitFetchSubmodules({ 410 | cwd: workingDirectory, 411 | args: [ 412 | ...gitFetchExtraArgs, 413 | '-u', 414 | '--progress', 415 | `--deepen=${inputs.fetchDepth}` 416 | ] 417 | }) 418 | } 419 | } 420 | core.info('Completed fetching more history.') 421 | } 422 | 423 | const currentSha = await getCurrentSHA({inputs, workingDirectory}) 424 | let previousSha = await cleanShaInput({ 425 | sha: inputs.baseSha, 426 | cwd: workingDirectory, 427 | token: inputs.token 428 | }) 429 | let diff = '...' 430 | 431 | if (inputs.baseSha && inputs.sha && currentBranch && targetBranch) { 432 | if (previousSha === currentSha) { 433 | core.error( 434 | `Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.` 435 | ) 436 | core.error( 437 | `Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.` 438 | ) 439 | throw new Error('Similar commit hashes detected.') 440 | } 441 | 442 | core.debug(`Previous SHA: ${previousSha}`) 443 | 444 | return { 445 | previousSha, 446 | currentSha, 447 | currentBranch, 448 | targetBranch, 449 | diff 450 | } 451 | } 452 | 453 | if (!github.context.payload.pull_request?.base?.ref) { 454 | diff = '..' 455 | } 456 | 457 | if (!previousSha || previousSha === currentSha) { 458 | if (inputs.sinceLastRemoteCommit) { 459 | previousSha = github.context.payload.before 460 | 461 | if ( 462 | !previousSha || 463 | (previousSha && 464 | (await verifyCommitSha({ 465 | sha: previousSha, 466 | cwd: workingDirectory, 467 | showAsErrorMessage: false 468 | })) !== 0) 469 | ) { 470 | core.info( 471 | `Unable to locate the previous commit in the local history for ${github.context.eventName} (${github.context.payload.action}) event. Falling back to the previous commit in the local history.` 472 | ) 473 | 474 | previousSha = await getParentSha({ 475 | cwd: workingDirectory 476 | }) 477 | 478 | if ( 479 | github.context.payload.action && 480 | github.context.payload.action === 'synchronize' && 481 | previousSha && 482 | (!previousSha || 483 | (previousSha && 484 | (await verifyCommitSha({ 485 | sha: previousSha, 486 | cwd: workingDirectory, 487 | showAsErrorMessage: false 488 | })) !== 0)) 489 | ) { 490 | throw new Error( 491 | 'Unable to locate the previous commit in the local history. Please ensure to checkout pull request HEAD commit instead of the merge commit. See: https://github.com/actions/checkout/blob/main/README.md#checkout-pull-request-head-commit-instead-of-merge-commit' 492 | ) 493 | } 494 | 495 | if ( 496 | !previousSha || 497 | (previousSha && 498 | (await verifyCommitSha({ 499 | sha: previousSha, 500 | cwd: workingDirectory, 501 | showAsErrorMessage: false 502 | })) !== 0) 503 | ) { 504 | throw new Error( 505 | 'Unable to locate the previous commit in the local history. Please ensure to checkout pull request HEAD commit instead of the merge commit. See: https://github.com/actions/checkout/blob/main/README.md#checkout-pull-request-head-commit-instead-of-merge-commit' 506 | ) 507 | } 508 | } 509 | } else { 510 | previousSha = github.context.payload.pull_request?.base?.sha 511 | 512 | if (!previousSha) { 513 | previousSha = await getRemoteBranchHeadSha({ 514 | cwd: workingDirectory, 515 | remoteName, 516 | branch: targetBranch 517 | }) 518 | } 519 | 520 | if (isShallow) { 521 | if ( 522 | !(await canDiffCommits({ 523 | cwd: workingDirectory, 524 | sha1: previousSha, 525 | sha2: currentSha, 526 | diff 527 | })) 528 | ) { 529 | core.info( 530 | 'Merge base is not in the local history, fetching remote target branch...' 531 | ) 532 | 533 | for ( 534 | let i = 1; 535 | i <= (inputs.fetchMissingHistoryMaxRetries || 10); 536 | i++ 537 | ) { 538 | await gitFetch({ 539 | cwd: workingDirectory, 540 | args: [ 541 | ...gitFetchExtraArgs, 542 | '-u', 543 | '--progress', 544 | `--deepen=${inputs.fetchDepth}`, 545 | remoteName, 546 | `+refs/heads/${targetBranch}:refs/remotes/${remoteName}/${targetBranch}` 547 | ] 548 | }) 549 | 550 | if ( 551 | await canDiffCommits({ 552 | cwd: workingDirectory, 553 | sha1: previousSha, 554 | sha2: currentSha, 555 | diff 556 | }) 557 | ) { 558 | break 559 | } 560 | 561 | core.info( 562 | 'Merge base is not in the local history, fetching remote target branch again...' 563 | ) 564 | core.info( 565 | `Attempt ${i}/${inputs.fetchMissingHistoryMaxRetries || 10}` 566 | ) 567 | } 568 | } 569 | } 570 | } 571 | 572 | if (!previousSha || previousSha === currentSha) { 573 | previousSha = github.context.payload.pull_request?.base?.sha 574 | } 575 | } 576 | 577 | if ( 578 | !(await canDiffCommits({ 579 | cwd: workingDirectory, 580 | sha1: previousSha, 581 | sha2: currentSha, 582 | diff 583 | })) 584 | ) { 585 | diff = '..' 586 | } 587 | 588 | await verifyCommitSha({sha: previousSha, cwd: workingDirectory}) 589 | core.debug(`Previous SHA: ${previousSha}`) 590 | 591 | if ( 592 | !(await canDiffCommits({ 593 | cwd: workingDirectory, 594 | sha1: previousSha, 595 | sha2: currentSha, 596 | diff 597 | })) 598 | ) { 599 | core.warning( 600 | 'If this pull request is from a forked repository, please set the checkout action `repository` input to the same repository as the pull request.' 601 | ) 602 | core.warning( 603 | 'This can be done by setting actions/checkout `repository` to ${{ github.event.pull_request.head.repo.full_name }}' 604 | ) 605 | throw new Error( 606 | `Unable to determine a difference between ${previousSha}${diff}${currentSha}` 607 | ) 608 | } 609 | 610 | if (previousSha === currentSha) { 611 | core.error( 612 | `Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.` 613 | ) 614 | // This occurs if a PR is created from a forked repository and the event is pull_request_target. 615 | // - name: Checkout to branch 616 | // uses: actions/checkout@v3 617 | // Without setting the repository to use the same repository as the pull request will cause the previousSha 618 | // to be the same as the currentSha since the currentSha cannot be found in the local history. 619 | // The solution is to use: 620 | // - name: Checkout to branch 621 | // uses: actions/checkout@v3 622 | // with: 623 | // repository: ${{ github.event.pull_request.head.repo.full_name }} 624 | if (github.context.eventName === 'pull_request_target') { 625 | core.warning( 626 | 'If this pull request is from a forked repository, please set the checkout action `repository` input to the same repository as the pull request.' 627 | ) 628 | core.warning( 629 | 'This can be done by setting actions/checkout `repository` to ${{ github.event.pull_request.head.repo.full_name }}' 630 | ) 631 | } else { 632 | core.error( 633 | `Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.` 634 | ) 635 | } 636 | throw new Error('Similar commit hashes detected.') 637 | } 638 | 639 | return { 640 | previousSha, 641 | currentSha, 642 | currentBranch, 643 | targetBranch, 644 | diff 645 | } 646 | } 647 | -------------------------------------------------------------------------------- /src/constant.ts: -------------------------------------------------------------------------------- 1 | import {Inputs} from './inputs' 2 | 3 | export const DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS: Partial = { 4 | sha: '', 5 | baseSha: '', 6 | since: '', 7 | until: '', 8 | path: '.', 9 | quotepath: true, 10 | diffRelative: true, 11 | sinceLastRemoteCommit: false, 12 | recoverDeletedFiles: false, 13 | recoverDeletedFilesToDestination: '', 14 | recoverFiles: '', 15 | recoverFilesSeparator: '\n', 16 | recoverFilesIgnore: '', 17 | recoverFilesIgnoreSeparator: '\n', 18 | includeAllOldNewRenamedFiles: false, 19 | oldNewSeparator: ',', 20 | oldNewFilesSeparator: ' ', 21 | skipInitialFetch: false, 22 | fetchAdditionalSubmoduleHistory: false, 23 | dirNamesDeletedFilesIncludeOnlyDeletedDirs: false, 24 | excludeSubmodules: false, 25 | fetchMissingHistoryMaxRetries: 20, 26 | usePosixPathSeparator: false, 27 | tagsPattern: '*', 28 | tagsIgnorePattern: '' 29 | } 30 | -------------------------------------------------------------------------------- /src/env.ts: -------------------------------------------------------------------------------- 1 | export type Env = { 2 | GITHUB_REF_NAME: string 3 | GITHUB_REF: string 4 | GITHUB_WORKSPACE: string 5 | } 6 | 7 | export const getEnv = async (): Promise => { 8 | return { 9 | GITHUB_REF_NAME: process.env.GITHUB_REF_NAME || '', 10 | GITHUB_REF: process.env.GITHUB_REF || '', 11 | GITHUB_WORKSPACE: process.env.GITHUB_WORKSPACE || '' 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/inputs.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | 3 | export type Inputs = { 4 | files: string 5 | filesSeparator: string 6 | filesFromSourceFile: string 7 | filesFromSourceFileSeparator: string 8 | filesYaml: string 9 | filesYamlFromSourceFile: string 10 | filesYamlFromSourceFileSeparator: string 11 | filesIgnore: string 12 | filesIgnoreSeparator: string 13 | filesIgnoreFromSourceFile: string 14 | filesIgnoreFromSourceFileSeparator: string 15 | filesIgnoreYaml: string 16 | filesIgnoreYamlFromSourceFile: string 17 | filesIgnoreYamlFromSourceFileSeparator: string 18 | separator: string 19 | includeAllOldNewRenamedFiles: boolean 20 | oldNewSeparator: string 21 | oldNewFilesSeparator: string 22 | sha: string 23 | baseSha: string 24 | since: string 25 | until: string 26 | path: string 27 | quotepath: boolean 28 | diffRelative: boolean 29 | dirNames: boolean 30 | dirNamesMaxDepth?: number 31 | dirNamesExcludeCurrentDir: boolean 32 | dirNamesIncludeFiles: string 33 | dirNamesIncludeFilesSeparator: string 34 | dirNamesDeletedFilesIncludeOnlyDeletedDirs: boolean 35 | json: boolean 36 | escapeJson: boolean 37 | safeOutput: boolean 38 | fetchDepth?: number 39 | fetchAdditionalSubmoduleHistory: boolean 40 | sinceLastRemoteCommit: boolean 41 | writeOutputFiles: boolean 42 | outputDir: string 43 | outputRenamedFilesAsDeletedAndAdded: boolean 44 | recoverDeletedFiles: boolean 45 | recoverDeletedFilesToDestination: string 46 | recoverFiles: string 47 | recoverFilesSeparator: string 48 | recoverFilesIgnore: string 49 | recoverFilesIgnoreSeparator: string 50 | token: string 51 | apiUrl: string 52 | skipInitialFetch: boolean 53 | failOnInitialDiffError: boolean 54 | failOnSubmoduleDiffError: boolean 55 | negationPatternsFirst: boolean 56 | useRestApi: boolean 57 | excludeSubmodules: boolean 58 | fetchMissingHistoryMaxRetries?: number 59 | usePosixPathSeparator: boolean 60 | tagsPattern: string 61 | tagsIgnorePattern?: string 62 | } 63 | 64 | export const getInputs = (): Inputs => { 65 | const files = core.getInput('files', {required: false}) 66 | const filesSeparator = core.getInput('files_separator', { 67 | required: false, 68 | trimWhitespace: false 69 | }) 70 | const filesIgnore = core.getInput('files_ignore', {required: false}) 71 | const filesIgnoreSeparator = core.getInput('files_ignore_separator', { 72 | required: false, 73 | trimWhitespace: false 74 | }) 75 | const filesFromSourceFile = core.getInput('files_from_source_file', { 76 | required: false 77 | }) 78 | const filesFromSourceFileSeparator = core.getInput( 79 | 'files_from_source_file_separator', 80 | { 81 | required: false, 82 | trimWhitespace: false 83 | } 84 | ) 85 | const filesYaml = core.getInput('files_yaml', {required: false}) 86 | const filesYamlFromSourceFile = core.getInput('files_yaml_from_source_file', { 87 | required: false 88 | }) 89 | const filesYamlFromSourceFileSeparator = core.getInput( 90 | 'files_yaml_from_source_file_separator', 91 | { 92 | required: false, 93 | trimWhitespace: false 94 | } 95 | ) 96 | const filesIgnoreFromSourceFile = core.getInput( 97 | 'files_ignore_from_source_file', 98 | {required: false} 99 | ) 100 | const filesIgnoreFromSourceFileSeparator = core.getInput( 101 | 'files_ignore_from_source_file_separator', 102 | { 103 | required: false, 104 | trimWhitespace: false 105 | } 106 | ) 107 | const filesIgnoreYaml = core.getInput('files_ignore_yaml', {required: false}) 108 | const filesIgnoreYamlFromSourceFile = core.getInput( 109 | 'files_ignore_yaml_from_source_file', 110 | {required: false} 111 | ) 112 | const filesIgnoreYamlFromSourceFileSeparator = core.getInput( 113 | 'files_ignore_yaml_from_source_file_separator', 114 | { 115 | required: false, 116 | trimWhitespace: false 117 | } 118 | ) 119 | const separator = core.getInput('separator', { 120 | required: true, 121 | trimWhitespace: false 122 | }) 123 | const includeAllOldNewRenamedFiles = core.getBooleanInput( 124 | 'include_all_old_new_renamed_files', 125 | {required: false} 126 | ) 127 | const oldNewSeparator = core.getInput('old_new_separator', { 128 | required: true, 129 | trimWhitespace: false 130 | }) 131 | const oldNewFilesSeparator = core.getInput('old_new_files_separator', { 132 | required: true, 133 | trimWhitespace: false 134 | }) 135 | const sha = core.getInput('sha', {required: false}) 136 | const baseSha = core.getInput('base_sha', {required: false}) 137 | const since = core.getInput('since', {required: false}) 138 | const until = core.getInput('until', {required: false}) 139 | const path = core.getInput('path', {required: false}) 140 | const quotepath = core.getBooleanInput('quotepath', {required: false}) 141 | const diffRelative = core.getBooleanInput('diff_relative', {required: false}) 142 | const dirNames = core.getBooleanInput('dir_names', {required: false}) 143 | const dirNamesMaxDepth = core.getInput('dir_names_max_depth', { 144 | required: false 145 | }) 146 | const dirNamesExcludeCurrentDir = core.getBooleanInput( 147 | 'dir_names_exclude_current_dir', 148 | { 149 | required: false 150 | } 151 | ) 152 | const dirNamesIncludeFiles = core.getInput('dir_names_include_files', { 153 | required: false 154 | }) 155 | const dirNamesIncludeFilesSeparator = core.getInput( 156 | 'dir_names_include_files_separator', 157 | { 158 | required: false, 159 | trimWhitespace: false 160 | } 161 | ) 162 | let json = core.getBooleanInput('json', {required: false}) 163 | let escapeJson = core.getBooleanInput('escape_json', {required: false}) 164 | const matrix = core.getBooleanInput('matrix', {required: false}) 165 | 166 | if (matrix) { 167 | json = true 168 | escapeJson = false 169 | } 170 | 171 | const safeOutput = core.getBooleanInput('safe_output', {required: false}) 172 | const fetchDepth = core.getInput('fetch_depth', {required: false}) 173 | const sinceLastRemoteCommit = core.getBooleanInput( 174 | 'since_last_remote_commit', 175 | {required: false} 176 | ) 177 | const writeOutputFiles = core.getBooleanInput('write_output_files', { 178 | required: false 179 | }) 180 | const outputDir = core.getInput('output_dir', {required: false}) 181 | const outputRenamedFilesAsDeletedAndAdded = core.getBooleanInput( 182 | 'output_renamed_files_as_deleted_and_added', 183 | {required: false} 184 | ) 185 | const recoverDeletedFiles = core.getBooleanInput('recover_deleted_files', { 186 | required: false 187 | }) 188 | const recoverDeletedFilesToDestination = core.getInput( 189 | 'recover_deleted_files_to_destination', 190 | {required: false} 191 | ) 192 | const recoverFiles = core.getInput('recover_files', {required: false}) 193 | const recoverFilesSeparator = core.getInput('recover_files_separator', { 194 | required: false, 195 | trimWhitespace: false 196 | }) 197 | const recoverFilesIgnore = core.getInput('recover_files_ignore', { 198 | required: false 199 | }) 200 | const recoverFilesIgnoreSeparator = core.getInput( 201 | 'recover_files_ignore_separator', 202 | { 203 | required: false, 204 | trimWhitespace: false 205 | } 206 | ) 207 | const token = core.getInput('token', {required: false}) 208 | const apiUrl = core.getInput('api_url', {required: false}) 209 | const skipInitialFetch = core.getBooleanInput('skip_initial_fetch', { 210 | required: false 211 | }) 212 | const fetchAdditionalSubmoduleHistory = core.getBooleanInput( 213 | 'fetch_additional_submodule_history', 214 | { 215 | required: false 216 | } 217 | ) 218 | const failOnInitialDiffError = core.getBooleanInput( 219 | 'fail_on_initial_diff_error', 220 | { 221 | required: false 222 | } 223 | ) 224 | const failOnSubmoduleDiffError = core.getBooleanInput( 225 | 'fail_on_submodule_diff_error', 226 | { 227 | required: false 228 | } 229 | ) 230 | const dirNamesDeletedFilesIncludeOnlyDeletedDirs = core.getBooleanInput( 231 | 'dir_names_deleted_files_include_only_deleted_dirs', 232 | { 233 | required: false 234 | } 235 | ) 236 | 237 | const negationPatternsFirst = core.getBooleanInput( 238 | 'negation_patterns_first', 239 | { 240 | required: false 241 | } 242 | ) 243 | 244 | const useRestApi = core.getBooleanInput('use_rest_api', { 245 | required: false 246 | }) 247 | 248 | const excludeSubmodules = core.getBooleanInput('exclude_submodules', { 249 | required: false 250 | }) 251 | 252 | const fetchMissingHistoryMaxRetries = core.getInput( 253 | 'fetch_missing_history_max_retries', 254 | {required: false} 255 | ) 256 | 257 | const usePosixPathSeparator = core.getBooleanInput( 258 | 'use_posix_path_separator', 259 | { 260 | required: false 261 | } 262 | ) 263 | 264 | const tagsPattern = core.getInput('tags_pattern', { 265 | required: false, 266 | trimWhitespace: false 267 | }) 268 | const tagsIgnorePattern = core.getInput('tags_ignore_pattern', { 269 | required: false, 270 | trimWhitespace: false 271 | }) 272 | 273 | const inputs: Inputs = { 274 | files, 275 | filesSeparator, 276 | filesFromSourceFile, 277 | filesFromSourceFileSeparator, 278 | filesYaml, 279 | filesYamlFromSourceFile, 280 | filesYamlFromSourceFileSeparator, 281 | filesIgnore, 282 | filesIgnoreSeparator, 283 | filesIgnoreFromSourceFile, 284 | filesIgnoreFromSourceFileSeparator, 285 | filesIgnoreYaml, 286 | filesIgnoreYamlFromSourceFile, 287 | filesIgnoreYamlFromSourceFileSeparator, 288 | failOnInitialDiffError, 289 | failOnSubmoduleDiffError, 290 | separator, 291 | // Not Supported via REST API 292 | sha, 293 | baseSha, 294 | since, 295 | until, 296 | path, 297 | quotepath, 298 | diffRelative, 299 | sinceLastRemoteCommit, 300 | recoverDeletedFiles, 301 | recoverDeletedFilesToDestination, 302 | recoverFiles, 303 | recoverFilesSeparator, 304 | recoverFilesIgnore, 305 | recoverFilesIgnoreSeparator, 306 | includeAllOldNewRenamedFiles, 307 | oldNewSeparator, 308 | oldNewFilesSeparator, 309 | skipInitialFetch, 310 | fetchAdditionalSubmoduleHistory, 311 | dirNamesDeletedFilesIncludeOnlyDeletedDirs, 312 | excludeSubmodules, 313 | usePosixPathSeparator, 314 | tagsPattern, 315 | tagsIgnorePattern, 316 | // End Not Supported via REST API 317 | dirNames, 318 | dirNamesExcludeCurrentDir, 319 | dirNamesIncludeFiles, 320 | dirNamesIncludeFilesSeparator, 321 | json, 322 | escapeJson, 323 | safeOutput, 324 | writeOutputFiles, 325 | outputDir, 326 | outputRenamedFilesAsDeletedAndAdded, 327 | token, 328 | apiUrl, 329 | negationPatternsFirst, 330 | useRestApi 331 | } 332 | 333 | if (fetchDepth) { 334 | // Fallback to at least 2 if the fetch_depth is less than 2 335 | inputs.fetchDepth = Math.max(parseInt(fetchDepth, 10), 2) 336 | } 337 | 338 | if (dirNamesMaxDepth) { 339 | inputs.dirNamesMaxDepth = parseInt(dirNamesMaxDepth, 10) 340 | } 341 | 342 | if (fetchMissingHistoryMaxRetries) { 343 | // Fallback to at least 1 if the fetch_missing_history_max_retries is less than 1 344 | inputs.fetchMissingHistoryMaxRetries = Math.max( 345 | parseInt(fetchMissingHistoryMaxRetries, 10), 346 | 1 347 | ) 348 | } 349 | 350 | return inputs 351 | } 352 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as github from '@actions/github' 3 | import path from 'path' 4 | import { 5 | processChangedFiles, 6 | ChangeTypeEnum, 7 | getAllDiffFiles, 8 | getChangedFilesFromGithubAPI, 9 | getRenamedFiles 10 | } from './changedFiles' 11 | import { 12 | DiffResult, 13 | getSHAForNonPullRequestEvent, 14 | getSHAForPullRequestEvent 15 | } from './commitSha' 16 | import {Env, getEnv} from './env' 17 | import {getInputs, Inputs} from './inputs' 18 | import { 19 | getFilePatterns, 20 | getRecoverFilePatterns, 21 | getSubmodulePath, 22 | getYamlFilePatterns, 23 | hasLocalGitDirectory, 24 | isRepoShallow, 25 | recoverDeletedFiles, 26 | setOutput, 27 | submoduleExists, 28 | updateGitGlobalConfig, 29 | verifyMinimumGitVersion, 30 | warnUnsupportedRESTAPIInputs 31 | } from './utils' 32 | 33 | const getChangedFilesFromLocalGitHistory = async ({ 34 | inputs, 35 | env, 36 | workingDirectory, 37 | filePatterns, 38 | yamlFilePatterns 39 | }: { 40 | inputs: Inputs 41 | env: Env 42 | workingDirectory: string 43 | filePatterns: string[] 44 | yamlFilePatterns: Record 45 | }): Promise => { 46 | await verifyMinimumGitVersion() 47 | 48 | let quotepathValue = 'on' 49 | 50 | if (!inputs.quotepath) { 51 | quotepathValue = 'off' 52 | } 53 | 54 | await updateGitGlobalConfig({ 55 | name: 'core.quotepath', 56 | value: quotepathValue 57 | }) 58 | 59 | if (inputs.diffRelative) { 60 | await updateGitGlobalConfig({ 61 | name: 'diff.relative', 62 | value: 'true' 63 | }) 64 | } 65 | 66 | const isShallow = await isRepoShallow({cwd: workingDirectory}) 67 | let diffSubmodule = false 68 | let gitFetchExtraArgs = ['--no-tags', '--prune'] 69 | 70 | if (inputs.excludeSubmodules) { 71 | core.info('Excluding submodules from the diff') 72 | } else { 73 | diffSubmodule = await submoduleExists({cwd: workingDirectory}) 74 | } 75 | 76 | if (diffSubmodule) { 77 | gitFetchExtraArgs.push('--recurse-submodules') 78 | } 79 | 80 | const isTag = env.GITHUB_REF?.startsWith('refs/tags/') 81 | const remoteName = 'origin' 82 | const outputRenamedFilesAsDeletedAndAdded = 83 | inputs.outputRenamedFilesAsDeletedAndAdded 84 | let submodulePaths: string[] = [] 85 | 86 | if (diffSubmodule) { 87 | submodulePaths = await getSubmodulePath({cwd: workingDirectory}) 88 | } 89 | 90 | if (isTag) { 91 | gitFetchExtraArgs = ['--prune', '--no-recurse-submodules'] 92 | } 93 | 94 | let diffResult: DiffResult 95 | 96 | if (!github.context.payload.pull_request?.base?.ref) { 97 | core.info(`Running on a ${github.context.eventName || 'push'} event...`) 98 | diffResult = await getSHAForNonPullRequestEvent({ 99 | inputs, 100 | env, 101 | workingDirectory, 102 | isShallow, 103 | diffSubmodule, 104 | gitFetchExtraArgs, 105 | isTag, 106 | remoteName 107 | }) 108 | } else { 109 | core.info( 110 | `Running on a ${github.context.eventName || 'pull_request'} (${ 111 | github.context.payload.action 112 | }) event...` 113 | ) 114 | diffResult = await getSHAForPullRequestEvent({ 115 | inputs, 116 | workingDirectory, 117 | isShallow, 118 | diffSubmodule, 119 | gitFetchExtraArgs, 120 | remoteName 121 | }) 122 | } 123 | 124 | if (diffResult.initialCommit) { 125 | core.info('This is the first commit for this repository; exiting...') 126 | core.endGroup() 127 | return 128 | } 129 | 130 | core.info( 131 | `Retrieving changes between ${diffResult.previousSha} (${diffResult.targetBranch}) → ${diffResult.currentSha} (${diffResult.currentBranch})` 132 | ) 133 | 134 | const allDiffFiles = await getAllDiffFiles({ 135 | workingDirectory, 136 | diffSubmodule, 137 | diffResult, 138 | submodulePaths, 139 | outputRenamedFilesAsDeletedAndAdded, 140 | fetchAdditionalSubmoduleHistory: inputs.fetchAdditionalSubmoduleHistory, 141 | failOnInitialDiffError: inputs.failOnInitialDiffError, 142 | failOnSubmoduleDiffError: inputs.failOnSubmoduleDiffError 143 | }) 144 | core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`) 145 | core.info('All Done!') 146 | core.endGroup() 147 | 148 | if (inputs.recoverDeletedFiles) { 149 | let recoverPatterns = getRecoverFilePatterns({inputs}) 150 | 151 | if (recoverPatterns.length > 0 && filePatterns.length > 0) { 152 | core.info('No recover patterns found; defaulting to file patterns') 153 | recoverPatterns = filePatterns 154 | } 155 | 156 | await recoverDeletedFiles({ 157 | inputs, 158 | workingDirectory, 159 | deletedFiles: allDiffFiles[ChangeTypeEnum.Deleted], 160 | recoverPatterns, 161 | diffResult, 162 | diffSubmodule, 163 | submodulePaths 164 | }) 165 | } 166 | 167 | await processChangedFiles({ 168 | filePatterns, 169 | allDiffFiles, 170 | inputs, 171 | yamlFilePatterns, 172 | workingDirectory 173 | }) 174 | 175 | if (inputs.includeAllOldNewRenamedFiles) { 176 | core.startGroup('changed-files-all-old-new-renamed-files') 177 | const allOldNewRenamedFiles = await getRenamedFiles({ 178 | inputs, 179 | workingDirectory, 180 | diffSubmodule, 181 | diffResult, 182 | submodulePaths 183 | }) 184 | core.debug(`All old new renamed files: ${allOldNewRenamedFiles}`) 185 | await setOutput({ 186 | key: 'all_old_new_renamed_files', 187 | value: allOldNewRenamedFiles.paths, 188 | writeOutputFiles: inputs.writeOutputFiles, 189 | outputDir: inputs.outputDir, 190 | json: inputs.json, 191 | safeOutput: inputs.safeOutput 192 | }) 193 | await setOutput({ 194 | key: 'all_old_new_renamed_files_count', 195 | value: allOldNewRenamedFiles.count, 196 | writeOutputFiles: inputs.writeOutputFiles, 197 | outputDir: inputs.outputDir, 198 | json: inputs.json 199 | }) 200 | core.info('All Done!') 201 | core.endGroup() 202 | } 203 | } 204 | 205 | const getChangedFilesFromRESTAPI = async ({ 206 | inputs, 207 | filePatterns, 208 | yamlFilePatterns 209 | }: { 210 | inputs: Inputs 211 | filePatterns: string[] 212 | yamlFilePatterns: Record 213 | }): Promise => { 214 | const allDiffFiles = await getChangedFilesFromGithubAPI({ 215 | inputs 216 | }) 217 | core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`) 218 | core.info('All Done!') 219 | 220 | await processChangedFiles({ 221 | filePatterns, 222 | allDiffFiles, 223 | inputs, 224 | yamlFilePatterns 225 | }) 226 | } 227 | 228 | export async function run(): Promise { 229 | core.startGroup('changed-files') 230 | 231 | const env = await getEnv() 232 | core.debug(`Env: ${JSON.stringify(env, null, 2)}`) 233 | 234 | const inputs = getInputs() 235 | core.debug(`Inputs: ${JSON.stringify(inputs, null, 2)}`) 236 | 237 | const workingDirectory = path.resolve( 238 | env.GITHUB_WORKSPACE || process.cwd(), 239 | inputs.useRestApi ? '.' : inputs.path 240 | ) 241 | core.debug(`Working directory: ${workingDirectory}`) 242 | 243 | const hasGitDirectory = await hasLocalGitDirectory({workingDirectory}) 244 | core.debug(`Has git directory: ${hasGitDirectory}`) 245 | 246 | const filePatterns = await getFilePatterns({ 247 | inputs, 248 | workingDirectory 249 | }) 250 | core.debug(`File patterns: ${filePatterns}`) 251 | 252 | const yamlFilePatterns = await getYamlFilePatterns({ 253 | inputs, 254 | workingDirectory 255 | }) 256 | core.debug(`Yaml file patterns: ${JSON.stringify(yamlFilePatterns)}`) 257 | 258 | if (inputs.useRestApi && !github.context.payload.pull_request?.number) { 259 | throw new Error( 260 | "Only pull_request* events are supported when using GitHub's REST API." 261 | ) 262 | } 263 | 264 | if ( 265 | inputs.token && 266 | github.context.payload.pull_request?.number && 267 | (!hasGitDirectory || inputs.useRestApi) 268 | ) { 269 | core.info("Using GitHub's REST API to get changed files") 270 | await warnUnsupportedRESTAPIInputs({inputs}) 271 | await getChangedFilesFromRESTAPI({ 272 | inputs, 273 | filePatterns, 274 | yamlFilePatterns 275 | }) 276 | } else { 277 | if (!hasGitDirectory) { 278 | throw new Error( 279 | `Unable to locate the git repository in the given path: ${workingDirectory}.\n Please run actions/checkout before this action (Make sure the 'path' input is correct).\n If you intend to use Github's REST API note that only pull_request* events are supported. Current event is "${github.context.eventName}".` 280 | ) 281 | } 282 | 283 | core.info('Using local .git directory') 284 | await getChangedFilesFromLocalGitHistory({ 285 | inputs, 286 | env, 287 | workingDirectory, 288 | filePatterns, 289 | yamlFilePatterns 290 | }) 291 | } 292 | } 293 | 294 | // eslint-disable-next-line github/no-then 295 | run().catch(e => { 296 | core.setFailed(e.message || e) 297 | process.exit(1) 298 | }) 299 | -------------------------------------------------------------------------------- /test/[test new].txt: -------------------------------------------------------------------------------- 1 | This is a test file 2 | -------------------------------------------------------------------------------- /test/changed-files-list.txt: -------------------------------------------------------------------------------- 1 | .github/workflows/test.yml 2 | action.yml 3 | action.yml 4 | action.yml 5 | **/test.txt 6 | !test/test/test.txt 7 | [test new].txt 8 | -------------------------------------------------------------------------------- /test/changed-files.yml: -------------------------------------------------------------------------------- 1 | test: 2 | - test/**.txt 3 | src: 4 | - src/*.ts 5 | - '!src/__tests__/**' 6 | dist: 7 | - dist/** 8 | shared: &shared 9 | - .github/** 10 | common: 11 | - *shared 12 | - .gitignore 13 | multiline: | 14 | test/** 15 | src/*.ts 16 | .github/** 17 | -------------------------------------------------------------------------------- /test/demo/test/test.txt: -------------------------------------------------------------------------------- 1 | test file -------------------------------------------------------------------------------- /test/new.md: -------------------------------------------------------------------------------- 1 | This is a test markdown file 2 | -------------------------------------------------------------------------------- /test/test new 1.txt: -------------------------------------------------------------------------------- 1 | This is a test file 2 | -------------------------------------------------------------------------------- /test/test new.txt: -------------------------------------------------------------------------------- 1 | This is a test file. 2 | -------------------------------------------------------------------------------- /test/test rename-1.txt: -------------------------------------------------------------------------------- 1 | This is a test file 1. 2 | -------------------------------------------------------------------------------- /test/test rename-2.txt: -------------------------------------------------------------------------------- 1 | This is test file 2. 2 | -------------------------------------------------------------------------------- /test/test-è.txt: -------------------------------------------------------------------------------- 1 | This is a test file with non ASCII character in the filename. 2 | -------------------------------------------------------------------------------- /test/test.txt: -------------------------------------------------------------------------------- 1 | This is a test file... 2 | -------------------------------------------------------------------------------- /test/test2/test.txt: -------------------------------------------------------------------------------- 1 | Lorem ipsum dolor sit amet, consectetur adipiscing elit et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. -------------------------------------------------------------------------------- /test/test2/test3/new.txt: -------------------------------------------------------------------------------- 1 | Test file. -------------------------------------------------------------------------------- /test/test2/test3/new2.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/step-security/changed-files/6bc0fede9f9cf694bc2cffb1481827cd09ef7a66/test/test2/test3/new2.txt -------------------------------------------------------------------------------- /test/test2/test3/test4/test.txt: -------------------------------------------------------------------------------- 1 | Test file. -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2018", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ 4 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 5 | "outDir": "./lib", /* Redirect output structure to the directory. */ 6 | "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 7 | "strict": true, /* Enable all strict type-checking options. */ 8 | "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 9 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 10 | }, 11 | "exclude": ["node_modules", "jest/setEnvVars.cjs"] 12 | } 13 | --------------------------------------------------------------------------------