├── .nvmrc ├── CONTRIBUTING.md ├── test ├── test.txt ├── test2 │ ├── test3 │ │ ├── new.txt │ │ └── test4 │ │ │ └── test.txt │ └── test.txt ├── [test new].txt ├── test new 1.txt ├── test new.txt ├── test │ └── test.txt ├── test rename 2.txt ├── test rename-1.txt ├── test-è.txt ├── changed-files-list.txt └── changed-files.yml ├── .prettierignore ├── .gitattributes ├── .codacy.yml ├── .eslintignore ├── .gitmodules ├── .prettierrc.json ├── jest └── setupEnv.cjs ├── .whitesource ├── jest.config.js ├── src ├── env.ts ├── constant.ts ├── __tests__ │ ├── inputs.test.ts │ ├── __snapshots__ │ │ └── inputs.test.ts.snap │ └── utils.test.ts ├── main.ts ├── inputs.ts ├── changedFiles.ts ├── changedFilesOutput.ts └── commitSha.ts ├── .github ├── dependabot.yml └── workflows │ ├── greetings.yml │ ├── matrix-example.yml │ ├── workflow-run-example.yml │ ├── update-readme.yml │ ├── manual-triggered-job-example.yml │ ├── sync-release-version.yml │ ├── multi-job-example.yml │ ├── codacy-analysis.yml │ ├── codeql.yml │ └── issue-comment-job-example.yml ├── tsconfig.json ├── LICENSE ├── renovate.json ├── .gitignore ├── package.json ├── SECURITY.md ├── .eslintrc.json ├── CODE_OF_CONDUCT.md ├── .all-contributorsrc └── action.yml /.nvmrc: -------------------------------------------------------------------------------- 1 | 20 2 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/test.txt: -------------------------------------------------------------------------------- 1 | This is a test file. 2 | -------------------------------------------------------------------------------- /test/test2/test3/new.txt: -------------------------------------------------------------------------------- 1 | Test file. -------------------------------------------------------------------------------- /test/[test new].txt: -------------------------------------------------------------------------------- 1 | This is a test file 2 | -------------------------------------------------------------------------------- /test/test new 1.txt: -------------------------------------------------------------------------------- 1 | This is a test file 2 | -------------------------------------------------------------------------------- /test/test new.txt: -------------------------------------------------------------------------------- 1 | This is a test file. 2 | -------------------------------------------------------------------------------- /test/test/test.txt: -------------------------------------------------------------------------------- 1 | This is a test file. 2 | -------------------------------------------------------------------------------- /test/test2/test3/test4/test.txt: -------------------------------------------------------------------------------- 1 | Test file. -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ -------------------------------------------------------------------------------- /test/test rename 2.txt: -------------------------------------------------------------------------------- 1 | This is test file 2. 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | dist/** -diff linguist-generated=true -------------------------------------------------------------------------------- /test/test rename-1.txt: -------------------------------------------------------------------------------- 1 | This is a test file 1. 2 | -------------------------------------------------------------------------------- /.codacy.yml: -------------------------------------------------------------------------------- 1 | --- 2 | exclude_paths: 3 | - "*.md" 4 | - "dist/**" 5 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ 4 | jest.config.js 5 | coverage/ -------------------------------------------------------------------------------- /test/test-è.txt: -------------------------------------------------------------------------------- 1 | This is a test file with non ASCII character in the filename. 2 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "test/demo"] 2 | path = test/demo 3 | url = git@github.com:tj-actions/demo.git 4 | -------------------------------------------------------------------------------- /test/changed-files-list.txt: -------------------------------------------------------------------------------- 1 | .github/workflows/test.yml 2 | action.yml 3 | action.yml 4 | action.yml 5 | **/test.txt 6 | !test/test/test.txt 7 | [test new].txt 8 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 80, 3 | "tabWidth": 2, 4 | "useTabs": false, 5 | "semi": false, 6 | "singleQuote": true, 7 | "trailingComma": "none", 8 | "bracketSpacing": false, 9 | "arrowParens": "avoid" 10 | } -------------------------------------------------------------------------------- /jest/setupEnv.cjs: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | 3 | process.env.GITHUB_WORKSPACE = path.join( 4 | path.resolve(__dirname, '..'), '.' 5 | ) 6 | process.env.GITHUB_ACTION_PATH = path.join( 7 | path.resolve(__dirname, '..'), '.' 8 | ) 9 | -------------------------------------------------------------------------------- /.whitesource: -------------------------------------------------------------------------------- 1 | { 2 | "scanSettings": { 3 | "baseBranches": [] 4 | }, 5 | "checkRunSettings": { 6 | "vulnerableCheckRunConclusionLevel": "failure", 7 | "displayMode": "diff" 8 | }, 9 | "issueSettings": { 10 | "minSeverityLevel": "LOW" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /test/changed-files.yml: -------------------------------------------------------------------------------- 1 | test: 2 | - test/**.txt 3 | src: 4 | - src/*.ts 5 | - '!src/__tests__/**' 6 | dist: 7 | - dist/** 8 | shared: &shared 9 | - .github/** 10 | common: 11 | - *shared 12 | - .gitignore 13 | multiline: | 14 | test/** 15 | src/*.ts 16 | .github/** 17 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | clearMocks: true, 3 | moduleFileExtensions: ['js', 'ts'], 4 | testMatch: ['**/*.test.ts'], 5 | transform: { 6 | '^.+\\.ts$': 'ts-jest' 7 | }, 8 | verbose: true, 9 | testTimeout: 10000, 10 | setupFiles: [ 11 | "/jest/setupEnv.cjs" 12 | ] 13 | }; 14 | -------------------------------------------------------------------------------- /src/env.ts: -------------------------------------------------------------------------------- 1 | export type Env = { 2 | GITHUB_REF_NAME: string 3 | GITHUB_REF: string 4 | GITHUB_WORKSPACE: string 5 | } 6 | 7 | export const getEnv = async (): Promise => { 8 | return { 9 | GITHUB_REF_NAME: process.env.GITHUB_REF_NAME || '', 10 | GITHUB_REF: process.env.GITHUB_REF || '', 11 | GITHUB_WORKSPACE: process.env.GITHUB_WORKSPACE || '' 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /test/test2/test.txt: -------------------------------------------------------------------------------- 1 | Lorem ipsum dolor sit amet, consectetur adipiscing elit et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: npm 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | open-pull-requests-limit: 10 8 | versioning-strategy: widen 9 | labels: 10 | - "merge when passing" 11 | - package-ecosystem: github-actions 12 | directory: "/" 13 | schedule: 14 | interval: daily 15 | open-pull-requests-limit: 10 16 | labels: 17 | - "merge when passing" 18 | - package-ecosystem: gitsubmodule 19 | directory: / 20 | schedule: 21 | interval: daily 22 | open-pull-requests-limit: 10 23 | labels: 24 | - "merge when passing" 25 | -------------------------------------------------------------------------------- /.github/workflows/greetings.yml: -------------------------------------------------------------------------------- 1 | name: Greetings 2 | 3 | on: [pull_request_target, issues] 4 | 5 | permissions: 6 | pull-requests: write 7 | issues: write 8 | 9 | jobs: 10 | greeting: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/first-interaction@v1 14 | continue-on-error: true 15 | with: 16 | repo-token: ${{ secrets.PAT_TOKEN }} 17 | issue-message: "Thanks for reporting this issue, don't forget to star this project if you haven't already to help us reach a wider audience." 18 | pr-message: "Thanks for implementing a fix, could you ensure that the test covers your changes if applicable." 19 | -------------------------------------------------------------------------------- /src/constant.ts: -------------------------------------------------------------------------------- 1 | import {Inputs} from './inputs' 2 | 3 | export const DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS: Partial = { 4 | sha: '', 5 | baseSha: '', 6 | since: '', 7 | until: '', 8 | path: '.', 9 | quotepath: true, 10 | diffRelative: true, 11 | sinceLastRemoteCommit: false, 12 | recoverDeletedFiles: false, 13 | recoverDeletedFilesToDestination: '', 14 | recoverFiles: '', 15 | recoverFilesSeparator: '\n', 16 | recoverFilesIgnore: '', 17 | recoverFilesIgnoreSeparator: '\n', 18 | includeAllOldNewRenamedFiles: false, 19 | oldNewSeparator: ',', 20 | oldNewFilesSeparator: ' ', 21 | skipInitialFetch: false, 22 | fetchAdditionalSubmoduleHistory: false, 23 | dirNamesDeletedFilesIncludeOnlyDeletedDirs: false, 24 | excludeSubmodules: false, 25 | fetchMissingHistoryMaxRetries: 20, 26 | usePosixPathSeparator: false, 27 | tagsPattern: '*', 28 | tagsIgnorePattern: '' 29 | } 30 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ 4 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 5 | "outDir": "./lib", /* Redirect output structure to the directory. */ 6 | "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 7 | "strict": true, /* Enable all strict type-checking options. */ 8 | "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 9 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 10 | }, 11 | "exclude": ["node_modules", "jest/setEnvVars.cjs"] 12 | } 13 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021, Tonye Jack 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /.github/workflows/matrix-example.yml: -------------------------------------------------------------------------------- 1 | name: Matrix Example 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | workflow_dispatch: 8 | pull_request: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | changed-files: 14 | name: Get changed files 15 | runs-on: ubuntu-latest 16 | outputs: 17 | matrix: ${{ steps.changed-files.outputs.all_changed_files }} 18 | steps: 19 | - name: Checkout 20 | uses: actions/checkout@v4 21 | with: 22 | fetch-depth: 0 23 | - name: Get changed files 24 | id: changed-files 25 | uses: ./ 26 | with: 27 | matrix: true 28 | - name: List all changed files 29 | run: echo '${{ steps.changed-files.outputs.all_changed_files }}' 30 | 31 | matrix-job: 32 | name: Run Matrix Job 33 | runs-on: ubuntu-latest 34 | needs: [changed-files] 35 | strategy: 36 | matrix: 37 | files: ${{ fromJSON(needs.changed-files.outputs.matrix) }} 38 | max-parallel: 4 39 | fail-fast: false 40 | steps: 41 | - name: Checkout 42 | uses: actions/checkout@v4 43 | - name: Test 44 | run: | 45 | echo ${{ matrix.files }} 46 | -------------------------------------------------------------------------------- /.github/workflows/workflow-run-example.yml: -------------------------------------------------------------------------------- 1 | name: Workflow Run Example 2 | on: 3 | workflow_run: 4 | workflows: [Matrix Example] 5 | types: [completed] 6 | 7 | permissions: 8 | contents: read 9 | 10 | jobs: 11 | on-success: 12 | runs-on: ubuntu-latest 13 | if: ${{ github.event.workflow_run.conclusion == 'success' }} 14 | steps: 15 | - name: Checkout code 16 | uses: actions/checkout@v4 17 | 18 | - name: Get changed files 19 | id: changed-files 20 | uses: ./ 21 | 22 | - name: Echo list of changed files on success 23 | run: | 24 | echo "Changed files on success:" 25 | echo "${{ steps.changed-files.outputs.all_changed_files }}" 26 | 27 | on-failure: 28 | runs-on: ubuntu-latest 29 | if: ${{ github.event.workflow_run.conclusion == 'failure' }} 30 | steps: 31 | - name: Checkout code 32 | uses: actions/checkout@v4 33 | 34 | - name: Get changed files 35 | id: changed-files 36 | uses: ./ 37 | 38 | - name: Echo list of changed files on failure 39 | run: | 40 | echo "Changed files on failure:" 41 | echo "${{ steps.changed-files.outputs.all_changed_files }}" 42 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "config:base" 4 | ], 5 | "enabled": true, 6 | "prHourlyLimit": 10, 7 | "prConcurrentLimit": 5, 8 | "rebaseWhen": "behind-base-branch", 9 | "addLabels": [ 10 | "dependencies", 11 | "merge when passing" 12 | ], 13 | "assignees": [ 14 | "jackton1" 15 | ], 16 | "assignAutomerge": true, 17 | "dependencyDashboard": true, 18 | "dependencyDashboardAutoclose": true, 19 | "lockFileMaintenance": { 20 | "enabled": true, 21 | "automerge": true 22 | }, 23 | "nvm": { 24 | "enabled": false 25 | }, 26 | "packageRules": [ 27 | { 28 | "matchUpdateTypes": [ 29 | "minor", 30 | "patch", 31 | "pin", 32 | "digest" 33 | ], 34 | "automerge": true, 35 | "rebaseWhen": "behind-base-branch", 36 | "addLabels": [ 37 | "merge when passing" 38 | ] 39 | }, 40 | { 41 | "description": "docker images", 42 | "matchLanguages": [ 43 | "docker" 44 | ], 45 | "matchUpdateTypes": [ 46 | "minor", 47 | "patch", 48 | "pin", 49 | "digest" 50 | ], 51 | "rebaseWhen": "behind-base-branch", 52 | "addLabels": [ 53 | "merge when passing" 54 | ], 55 | "automerge": true 56 | } 57 | ] 58 | } 59 | -------------------------------------------------------------------------------- /.github/workflows/update-readme.yml: -------------------------------------------------------------------------------- 1 | name: Format README.md 2 | 3 | permissions: 4 | contents: read 5 | pull-requests: write 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | sync-assets: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | with: 18 | fetch-depth: 0 19 | 20 | - name: Run auto-doc 21 | uses: tj-actions/auto-doc@v3 22 | with: 23 | use_code_blocks: true 24 | use_major_version: true 25 | 26 | - name: Run remark 27 | uses: tj-actions/remark@v3 28 | 29 | - name: Verify Changed files 30 | uses: tj-actions/verify-changed-files@v20 31 | id: verify_changed_files 32 | with: 33 | files: | 34 | README.md 35 | 36 | - name: README.md changed 37 | if: steps.verify_changed_files.outputs.files_changed == 'true' 38 | run: | 39 | echo "README.md has uncommitted changes" 40 | exit 1 41 | 42 | - name: Create Pull Request 43 | if: failure() 44 | uses: peter-evans/create-pull-request@v6 45 | with: 46 | base: "main" 47 | labels: "merge when passing" 48 | title: "Updated README.md" 49 | branch: "chore/update-readme" 50 | commit-message: "Updated README.md" 51 | body: "Updated README.md" 52 | token: ${{ secrets.PAT_TOKEN }} 53 | -------------------------------------------------------------------------------- /.github/workflows/manual-triggered-job-example.yml: -------------------------------------------------------------------------------- 1 | name: Manual Triggered Job Example 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | workflow_dispatch: 8 | 9 | jobs: 10 | test: 11 | name: Test changed-files 12 | runs-on: ${{ matrix.platform }} 13 | strategy: 14 | fail-fast: false 15 | max-parallel: 7 16 | matrix: 17 | platform: [ubuntu-latest, windows-latest, macos-latest] 18 | 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | with: 23 | submodules: true 24 | fetch-depth: 0 25 | 26 | - name: Run changed-files with defaults 27 | id: changed-files 28 | uses: ./ 29 | 30 | - name: Show output 31 | run: | 32 | echo '${{ toJSON(steps.changed-files.outputs) }}' 33 | 34 | - name: Run changed-files with glob filtering 35 | id: changed-files-glob 36 | uses: ./ 37 | with: 38 | files: | 39 | test/*.txt 40 | 41 | - name: Show output 42 | run: | 43 | echo '${{ toJSON(steps.changed-files-glob.outputs) }}' 44 | 45 | - name: Run changed-files with glob filtering and all_old_new_renamed_files 46 | id: changed-files-glob-all-old-new-renamed-files 47 | uses: ./ 48 | with: 49 | include_all_old_new_renamed_files: true 50 | files: | 51 | test/*.txt 52 | 53 | - name: Show output 54 | run: | 55 | echo '${{ toJSON(steps.changed-files-glob-all-old-new-renamed-files.outputs) }}' 56 | -------------------------------------------------------------------------------- /.github/workflows/sync-release-version.yml: -------------------------------------------------------------------------------- 1 | name: Update release version 2 | 3 | permissions: 4 | contents: write 5 | pull-requests: write 6 | 7 | on: 8 | release: 9 | types: [published] 10 | 11 | 12 | jobs: 13 | update-version: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | with: 18 | fetch-depth: 0 19 | - name: Run release-tagger 20 | uses: tj-actions/release-tagger@v4 21 | - name: Sync release version. 22 | uses: tj-actions/sync-release-version@v13 23 | id: sync-release-version 24 | with: 25 | pattern: '${{ github.repository }}@' 26 | only_major: true 27 | paths: | 28 | README.md 29 | - name: Sync release package version. 30 | uses: tj-actions/sync-release-version@v13 31 | id: sync-release-package-version 32 | with: 33 | pattern: '"version": "' 34 | strip_prefix: "v" 35 | paths: | 36 | package.json 37 | - name: Run git-cliff 38 | uses: tj-actions/git-cliff@v1 39 | - name: Create Pull Request 40 | uses: peter-evans/create-pull-request@v6.1.0 41 | with: 42 | base: "main" 43 | labels: "merge when passing" 44 | title: "Upgraded to ${{ steps.sync-release-version.outputs.new_version }}" 45 | branch: "upgrade-to-${{ steps.sync-release-version.outputs.new_version }}" 46 | commit-message: "Upgraded from ${{ steps.sync-release-version.outputs.old_version }} -> ${{ steps.sync-release-version.outputs.new_version }}" 47 | body: "View [CHANGES](https://github.com/${{ github.repository }}/compare/${{ steps.sync-release-version.outputs.old_version }}...${{ steps.sync-release-version.outputs.new_version }})" 48 | token: ${{ secrets.PAT_TOKEN }} 49 | -------------------------------------------------------------------------------- /.github/workflows/multi-job-example.yml: -------------------------------------------------------------------------------- 1 | name: Multi Job Example 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | push: 8 | branches: 9 | - "**" 10 | pull_request: 11 | branches: 12 | - "**" 13 | 14 | jobs: 15 | changed-files: 16 | name: Get changed files 17 | runs-on: ubuntu-latest 18 | outputs: 19 | all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }} 20 | steps: 21 | - name: Checkout 22 | uses: actions/checkout@v4 23 | with: 24 | fetch-depth: 0 25 | - name: Get changed files 26 | id: changed-files 27 | uses: ./ 28 | - name: List all changed files 29 | run: echo '${{ steps.changed-files.outputs.all_changed_files }}' 30 | 31 | view-changed-files: 32 | name: View all changed files 33 | runs-on: ubuntu-latest 34 | needs: [changed-files] 35 | steps: 36 | - name: List all changed files 37 | run: | 38 | echo '${{ needs.changed-files.outputs.all_changed_files }}' 39 | 40 | 41 | changed-files-rest-api: 42 | name: Get changed files using REST API 43 | runs-on: ubuntu-latest 44 | outputs: 45 | all_changed_files: ${{ steps.changed-files.outputs.all_changed_files }} 46 | steps: 47 | - name: Checkout 48 | uses: actions/checkout@v4 49 | with: 50 | fetch-depth: 0 51 | - name: Get changed files 52 | id: changed-files 53 | continue-on-error: ${{ github.event_name == 'push' }} 54 | uses: ./ 55 | with: 56 | use_rest_api: true 57 | - name: List all changed files 58 | run: echo '${{ steps.changed-files.outputs.all_changed_files }}' 59 | 60 | view-changed-files-rest-api: 61 | name: View all changed files using REST API 62 | runs-on: ubuntu-latest 63 | needs: [changed-files-rest-api] 64 | steps: 65 | - name: List all changed files 66 | run: | 67 | echo '${{ needs.changed-files-rest-api.outputs.all_changed_files }}' 68 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependency directory 2 | node_modules 3 | 4 | # Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | jspm_packages/ 46 | 47 | # TypeScript v1 declaration files 48 | typings/ 49 | 50 | # TypeScript cache 51 | *.tsbuildinfo 52 | 53 | # Optional npm cache directory 54 | .npm 55 | 56 | # Optional eslint cache 57 | .eslintcache 58 | 59 | # Optional REPL history 60 | .node_repl_history 61 | 62 | # Output of 'npm pack' 63 | *.tgz 64 | 65 | # Yarn Integrity file 66 | .yarn-integrity 67 | 68 | # dotenv environment variables file 69 | ./.env 70 | .env/../.env 71 | ./.env.local 72 | ./.env/../.env.local 73 | .env 74 | .env.test 75 | 76 | # parcel-bundler cache (https://parceljs.org/) 77 | .cache 78 | 79 | # next.js build output 80 | .next 81 | 82 | # nuxt.js build output 83 | .nuxt 84 | 85 | # vuepress build output 86 | .vuepress/dist 87 | 88 | # Serverless directories 89 | .serverless/ 90 | 91 | # FuseBox cache 92 | .fusebox/ 93 | 94 | # DynamoDB Local files 95 | .dynamodb/ 96 | 97 | # OS metadata 98 | .DS_Store 99 | Thumbs.db 100 | 101 | # Ignore built ts files 102 | __tests__/runner/* 103 | lib/**/* 104 | 105 | # IDEA 106 | .idea/ 107 | 108 | -------------------------------------------------------------------------------- /.github/workflows/codacy-analysis.yml: -------------------------------------------------------------------------------- 1 | # This workflow checks out code, performs a Codacy security scan 2 | # and integrates the results with the 3 | # GitHub Advanced Security code scanning feature. For more information on 4 | # the Codacy security scan action usage and parameters, see 5 | # https://github.com/codacy/codacy-analysis-cli-action. 6 | # For more information on Codacy Analysis CLI in general, see 7 | # https://github.com/codacy/codacy-analysis-cli. 8 | 9 | name: Codacy Security Scan 10 | 11 | on: 12 | push: 13 | branches: [ main ] 14 | pull_request: 15 | # The branches below must be a subset of the branches above 16 | branches: [ main ] 17 | schedule: 18 | - cron: '15 16 * * 2' 19 | 20 | permissions: 21 | actions: read 22 | contents: read 23 | security-events: write 24 | 25 | jobs: 26 | codacy-security-scan: 27 | name: Codacy Security Scan 28 | runs-on: ubuntu-latest 29 | steps: 30 | # Checkout the repository to the GitHub Actions runner 31 | - name: Checkout code 32 | uses: actions/checkout@v4 33 | 34 | # Execute Codacy Analysis CLI and generate a SARIF output with the security issues identified during the analysis 35 | - name: Run Codacy Analysis CLI 36 | continue-on-error: true 37 | uses: codacy/codacy-analysis-cli-action@v4.4.5 38 | with: 39 | # Check https://github.com/codacy/codacy-analysis-cli#project-token to get your project token from your Codacy repository 40 | # You can also omit the token and run the tools that support default configurations 41 | project-token: ${{ secrets.CODACY_PROJECT_TOKEN }} 42 | verbose: true 43 | output: results.sarif 44 | format: sarif 45 | # Adjust severity of non-security issues 46 | gh-code-scanning-compat: true 47 | # Force 0 exit code to allow SARIF file generation 48 | # This will hand over control about PR rejection to the GitHub side 49 | max-allowed-issues: 2147483647 50 | 51 | # Upload the SARIF file generated in the previous step 52 | - name: Upload SARIF results file 53 | continue-on-error: true 54 | uses: github/codeql-action/upload-sarif@v3 55 | with: 56 | sarif_file: results.sarif 57 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@tj-actions/changed-files", 3 | "version": "44.5.7", 4 | "description": "Github action to retrieve all (added, copied, modified, deleted, renamed, type changed, unmerged, unknown) files and directories.", 5 | "main": "lib/main.js", 6 | "publishConfig": { 7 | "registry": "https://npm.pkg.github.com" 8 | }, 9 | "scripts": { 10 | "build": "tsc", 11 | "format": "prettier --write src/*.ts src/**/*.ts", 12 | "format-check": "prettier --check src/*.ts src/**/*.ts", 13 | "lint": "eslint src/*.ts src/**/*.ts --max-warnings 0", 14 | "lint:fix": "eslint --fix src/*.ts src/**/*.ts", 15 | "package": "ncc build lib/main.js --source-map --license licenses.txt", 16 | "test": "jest --coverage", 17 | "update-snapshot": "jest -u", 18 | "all": "yarn build && yarn format && yarn lint && yarn package && yarn test" 19 | }, 20 | "repository": { 21 | "type": "git", 22 | "url": "git+https://github.com/tj-actions/changed-files.git" 23 | }, 24 | "keywords": [ 25 | "actions", 26 | "glob", 27 | "github-actions" 28 | ], 29 | "author": "Tonye Jack", 30 | "license": "MIT", 31 | "bugs": { 32 | "url": "https://github.com/tj-actions/changed-files/issues" 33 | }, 34 | "homepage": "https://github.com/tj-actions/changed-files#readme", 35 | "dependencies": { 36 | "@actions/core": "^1.10.0", 37 | "@actions/exec": "^1.1.1", 38 | "@actions/github": "^6.0.0", 39 | "@octokit/rest": "^21.0.0", 40 | "@stdlib/utils-convert-path": "^0.2.1", 41 | "lodash": "^4.17.21", 42 | "micromatch": "^4.0.5", 43 | "yaml": "^2.3.1" 44 | }, 45 | "devDependencies": { 46 | "@types/jest": "^29.5.2", 47 | "@types/lodash": "^4.14.195", 48 | "@types/micromatch": "^4.0.2", 49 | "@types/node": "^22.0.0", 50 | "@types/uuid": "^10.0.0", 51 | "@typescript-eslint/eslint-plugin": "^7.0.0", 52 | "@typescript-eslint/parser": "^7.0.0", 53 | "@vercel/ncc": "^0.38.0", 54 | "eslint": "^8.43.0", 55 | "eslint-config-prettier": "^9.0.0", 56 | "eslint-plugin-github": "^5.0.0", 57 | "eslint-plugin-jest": "^28.0.0", 58 | "eslint-plugin-prettier": "^5.0.0-alpha.2", 59 | "jest": "^29.5.0", 60 | "prettier": "^3.0.0", 61 | "ts-jest": "^29.1.0", 62 | "typescript": "^5.1.3" 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Proactive Security Measures 4 | 5 | To proactively detect and address security vulnerabilities, we utilize several robust tools and processes: 6 | 7 | - **Dependency Updates:** We use [Renovate](https://renovatebot.com) and [Dependabot](https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates) to keep our dependencies updated and promptly patch detected vulnerabilities through automated PRs. 8 | - **[GitHub's Security Features](https://github.com/features/security):** Our repository and dependencies are continuously monitored via GitHub's security features, which include: 9 | - **Code Scanning:** Using GitHub's CodeQL, all pull requests are scanned to identify potential vulnerabilities in our source code. 10 | - **Automated Alerts:** Dependabot identifies vulnerabilities based on the GitHub Advisory Database and opens PRs with patches, while automated [secret scanning](https://docs.github.com/en/enterprise-cloud@latest/code-security/secret-scanning/about-secret-scanning#about-secret-scanning-for-partner-patterns) provides alerts for detected secrets. 11 | - **[GitGuardian Security Checks](https://www.gitguardian.com/):** We employ GitGuardian to ensure security checks are performed on the codebase, enhancing the overall security of our project. 12 | - **Code Analysis and Security Scanning:** With the help of [Codacy Static Code Analysis](https://www.codacy.com/) and [Codacy Security Scan](https://security.codacy.com/), we conduct thorough analyses and scans of our code for potential security risks. 13 | 14 | ## Reporting Security Vulnerabilities 15 | 16 | Despite our best efforts to deliver secure software, we acknowledge the invaluable role of the community in identifying security breaches. 17 | 18 | ### Private Vulnerability Disclosures 19 | 20 | We request all suspected vulnerabilities to be responsibly and privately disclosed by sending an email to [support@tj-actions.online](mailto:support@tj-actions.online). 21 | 22 | ### Public Vulnerability Disclosures 23 | 24 | For publicly disclosed security vulnerabilities, please **IMMEDIATELY** email [support@tj-actions.online](mailto:support@tj-actions.online) with the details for prompt action. 25 | 26 | Upon confirmation of a breach, reporters will receive full credit and recognition for their contribution. Please note, that we do not offer monetary compensation for reporting vulnerabilities. 27 | 28 | ## Communication of Security Breaches 29 | 30 | We will utilize the [GitHub Security Advisory](https://github.com/tj-actions/changed-files/security/advisories) to communicate any security breaches. The advisory will be made public once a patch has been released to rectify the issue. 31 | 32 | We appreciate your cooperation and contribution to maintaining the security of our software. Remember, a secure community is a strong community. 33 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": [ 3 | "jest", 4 | "@typescript-eslint", 5 | "github" 6 | ], 7 | "extends": [ 8 | "plugin:github/recommended", 9 | "plugin:prettier/recommended" 10 | ], 11 | "parser": "@typescript-eslint/parser", 12 | "parserOptions": { 13 | "ecmaVersion": 9, 14 | "sourceType": "module", 15 | "project": "./tsconfig.json" 16 | }, 17 | "rules": { 18 | "i18n-text/no-en": "off", 19 | "eslint-comments/no-use": "off", 20 | "import/no-namespace": "off", 21 | "no-unused-vars": "off", 22 | "@typescript-eslint/no-unused-vars": "error", 23 | "@typescript-eslint/explicit-member-accessibility": [ 24 | "error", 25 | { 26 | "accessibility": "no-public" 27 | } 28 | ], 29 | "@typescript-eslint/no-require-imports": "error", 30 | "@typescript-eslint/array-type": "error", 31 | "@typescript-eslint/await-thenable": "error", 32 | "@typescript-eslint/ban-ts-comment": "off", 33 | "camelcase": "off", 34 | "@typescript-eslint/consistent-type-assertions": "error", 35 | "@typescript-eslint/explicit-function-return-type": [ 36 | "error", 37 | { 38 | "allowExpressions": true 39 | } 40 | ], 41 | "@typescript-eslint/func-call-spacing": [ 42 | "error", 43 | "never" 44 | ], 45 | "@typescript-eslint/no-array-constructor": "error", 46 | "@typescript-eslint/no-empty-interface": "error", 47 | "@typescript-eslint/no-explicit-any": "error", 48 | "@typescript-eslint/no-extraneous-class": "error", 49 | "@typescript-eslint/no-for-in-array": "error", 50 | "@typescript-eslint/no-inferrable-types": "error", 51 | "@typescript-eslint/no-misused-new": "error", 52 | "@typescript-eslint/no-namespace": "error", 53 | "@typescript-eslint/no-non-null-assertion": "warn", 54 | "@typescript-eslint/no-unnecessary-qualifier": "error", 55 | "@typescript-eslint/no-unnecessary-type-assertion": "error", 56 | "@typescript-eslint/no-useless-constructor": "error", 57 | "@typescript-eslint/no-var-requires": "error", 58 | "@typescript-eslint/prefer-for-of": "warn", 59 | "@typescript-eslint/prefer-function-type": "warn", 60 | "@typescript-eslint/prefer-includes": "error", 61 | "@typescript-eslint/prefer-string-starts-ends-with": "error", 62 | "@typescript-eslint/promise-function-async": "error", 63 | "@typescript-eslint/require-array-sort-compare": "error", 64 | "@typescript-eslint/restrict-plus-operands": "error", 65 | "no-shadow": "off", 66 | "@typescript-eslint/no-shadow": "error", 67 | "semi": "off", 68 | "filenames/match-regex": [ 69 | "error", 70 | "^[a-zA-Z0-9\\-.]+$", 71 | true 72 | ], 73 | "@typescript-eslint/semi": [ 74 | "error", 75 | "never" 76 | ], 77 | "@typescript-eslint/type-annotation-spacing": "error", 78 | "@typescript-eslint/unbound-method": "error" 79 | }, 80 | "env": { 81 | "node": true, 82 | "es6": true, 83 | "jest/globals": true 84 | } 85 | } -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ "main" ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ "main" ] 20 | schedule: 21 | - cron: '44 20 * * 0' 22 | 23 | permissions: 24 | actions: read 25 | contents: read 26 | security-events: write 27 | 28 | jobs: 29 | analyze: 30 | name: Analyze 31 | runs-on: ubuntu-latest 32 | permissions: 33 | actions: read 34 | contents: read 35 | security-events: write 36 | 37 | strategy: 38 | fail-fast: false 39 | matrix: 40 | language: [ 'javascript' ] 41 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 42 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 43 | 44 | steps: 45 | - name: Checkout repository 46 | uses: actions/checkout@v4 47 | 48 | # Initializes the CodeQL tools for scanning. 49 | - name: Initialize CodeQL 50 | uses: github/codeql-action/init@v3 51 | with: 52 | languages: ${{ matrix.language }} 53 | # If you wish to specify custom queries, you can do so here or in a config file. 54 | # By default, queries listed here will override any specified in a config file. 55 | # Prefix the list here with "+" to use these queries and those in the config file. 56 | 57 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 58 | # queries: security-extended,security-and-quality 59 | 60 | 61 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 62 | # If this step fails, then you should remove it and run the build manually (see below) 63 | - name: Autobuild 64 | uses: github/codeql-action/autobuild@v3 65 | 66 | # ℹ️ Command-line programs to run using the OS shell. 67 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 68 | 69 | # If the Autobuild fails above, remove it and uncomment the following three lines. 70 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 71 | 72 | # - run: | 73 | # echo "Run, Build Application using script" 74 | # ./location_of_script_within_repo/buildscript.sh 75 | 76 | - name: Perform CodeQL Analysis 77 | uses: github/codeql-action/analyze@v3 78 | with: 79 | category: "/language:${{matrix.language}}" 80 | -------------------------------------------------------------------------------- /src/__tests__/inputs.test.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import {getInputs, Inputs} from '../inputs' 3 | import {DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS} from '../constant' 4 | 5 | jest.mock('@actions/core') 6 | 7 | describe('getInputs', () => { 8 | afterEach(() => { 9 | jest.clearAllMocks() 10 | }) 11 | 12 | test('should return default values when no inputs are provided', () => { 13 | ;(core.getInput as jest.Mock).mockImplementation(name => { 14 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 15 | return g[1].toUpperCase() 16 | }) as keyof Inputs 17 | 18 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 19 | '') as string 20 | }) 21 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 22 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 23 | return g[1].toUpperCase() 24 | }) as keyof Inputs 25 | 26 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 27 | false) as boolean 28 | }) 29 | expect(getInputs()).toMatchSnapshot() 30 | }) 31 | 32 | test('should correctly parse boolean inputs', () => { 33 | ;(core.getInput as jest.Mock).mockImplementation(name => { 34 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 35 | return g[1].toUpperCase() 36 | }) as keyof Inputs 37 | 38 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 39 | '') as string 40 | }) 41 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 42 | switch (name) { 43 | case 'matrix': 44 | return 'true' 45 | case 'skip_initial_fetch': 46 | return 'true' 47 | default: 48 | return 'false' 49 | } 50 | }) 51 | expect(getInputs()).toMatchSnapshot() 52 | }) 53 | 54 | test('should handle matrix alias correctly', () => { 55 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 56 | return name === 'matrix' ? 'true' : 'false' 57 | }) 58 | 59 | const inputs = getInputs() 60 | expect(inputs).toHaveProperty('json', true) 61 | expect(inputs).toHaveProperty('escapeJson', false) 62 | }) 63 | 64 | test('should correctly parse string inputs', () => { 65 | ;(core.getInput as jest.Mock).mockImplementation(name => { 66 | switch (name) { 67 | case 'token': 68 | return 'token' 69 | case 'api_url': 70 | return 'https://api.github.com' 71 | default: 72 | return '' 73 | } 74 | }) 75 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 76 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 77 | return g[1].toUpperCase() 78 | }) as keyof Inputs 79 | 80 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 81 | false) as boolean 82 | }) 83 | expect(getInputs()).toMatchSnapshot() 84 | }) 85 | 86 | test('should correctly parse numeric inputs', () => { 87 | ;(core.getInput as jest.Mock).mockImplementation(name => { 88 | switch (name) { 89 | case 'fetch_depth': 90 | return '5' 91 | case 'dir_names_max_depth': 92 | return '2' 93 | default: 94 | return '' 95 | } 96 | }) 97 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 98 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 99 | return g[1].toUpperCase() 100 | }) as keyof Inputs 101 | 102 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 103 | false) as boolean 104 | }) 105 | expect(getInputs()).toMatchSnapshot() 106 | }) 107 | 108 | test('should handle invalid numeric inputs correctly', () => { 109 | ;(core.getInput as jest.Mock).mockImplementation(name => { 110 | // TODO: Add validation for invalid numbers which should result in an error instead of NaN 111 | switch (name) { 112 | case 'fetch_depth': 113 | return 'invalid' 114 | case 'dir_names_max_depth': 115 | return '2' 116 | default: 117 | return '' 118 | } 119 | }) 120 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 121 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 122 | return g[1].toUpperCase() 123 | }) as keyof Inputs 124 | 125 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 126 | false) as boolean 127 | }) 128 | expect(getInputs()).toMatchSnapshot() 129 | }) 130 | 131 | test('should handle negative numeric inputs correctly', () => { 132 | ;(core.getInput as jest.Mock).mockImplementation(name => { 133 | // TODO: Add validation for negative numbers which should result in an error 134 | switch (name) { 135 | case 'fetch_depth': 136 | return '-5' 137 | case 'dir_names_max_depth': 138 | return '-2' 139 | default: 140 | return '' 141 | } 142 | }) 143 | ;(core.getBooleanInput as jest.Mock).mockImplementation(name => { 144 | const camelCaseName = name.replace(/_([a-z])/g, (g: string[]) => { 145 | return g[1].toUpperCase() 146 | }) as keyof Inputs 147 | 148 | return (DEFAULT_VALUES_OF_UNSUPPORTED_API_INPUTS[camelCaseName] || 149 | false) as boolean 150 | }) 151 | expect(getInputs()).toMatchSnapshot() 152 | }) 153 | }) 154 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | jtonye@ymail.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /.github/workflows/issue-comment-job-example.yml: -------------------------------------------------------------------------------- 1 | name: Issue Comment Job Example 2 | 3 | permissions: 4 | contents: read 5 | 6 | on: 7 | issue_comment: 8 | 9 | jobs: 10 | pr_commented: 11 | # This job only runs for pull request comments 12 | name: PR comment 13 | if: ${{ github.event.issue.pull_request }} 14 | runs-on: ubuntu-latest 15 | steps: 16 | - run: | 17 | echo A comment on PR $NUMBER 18 | env: 19 | NUMBER: ${{ github.event.issue.number }} 20 | 21 | - name: Checkout 22 | uses: actions/checkout@v4 23 | with: 24 | submodules: true 25 | fetch-depth: 0 26 | 27 | - name: Dump GitHub context 28 | env: 29 | GITHUB_CONTEXT: ${{ toJson(github) }} 30 | run: echo "$GITHUB_CONTEXT" 31 | 32 | - name: Run changed-files with defaults 33 | id: changed-files 34 | uses: ./ 35 | 36 | - name: Show output 37 | run: | 38 | echo '${{ toJSON(steps.changed-files.outputs) }}' 39 | shell: 40 | bash 41 | 42 | - name: Run changed-files for old new filenames test rename 43 | id: changed-files-all-old-new-renamed-files 44 | uses: ./ 45 | with: 46 | base_sha: d1c0ee4 47 | sha: 4d04215 48 | fetch_depth: 60000 49 | include_all_old_new_renamed_files: true 50 | 51 | - name: Show output 52 | run: | 53 | echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}' 54 | shell: 55 | bash 56 | 57 | - name: Show output 58 | run: | 59 | echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}' 60 | shell: 61 | bash 62 | 63 | - name: Check all_old_new_renamed_files output on non windows platform 64 | if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test/test rename 1.txt,test/test rename-1.txt') && runner.os != 'Windows'" 65 | run: | 66 | echo "Invalid output: Expected to include (test/test rename 1.txt,test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})" 67 | exit 1 68 | shell: 69 | bash 70 | 71 | - name: Check all_old_new_renamed_files output on windows platform 72 | if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test\\test rename 1.txt,test\\test rename-1.txt') && runner.os == 'Windows'" 73 | run: | 74 | echo "Invalid output: Expected to not include (test\\test rename 1.txt,test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})" 75 | exit 1 76 | shell: 77 | bash 78 | 79 | - name: Check the renamed_files output on non windows platform 80 | if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test/test rename-1.txt') && runner.os != 'Windows'" 81 | run: | 82 | echo "Invalid output: Expected to include (test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})" 83 | exit 1 84 | shell: 85 | bash 86 | 87 | - name: Check the renamed_files output on windows platform 88 | if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test\\test rename-1.txt') && runner.os == 'Windows'" 89 | run: | 90 | echo "Invalid output: Expected to not include (test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})" 91 | exit 1 92 | shell: 93 | bash 94 | 95 | issue_commented: 96 | # This job only runs for issue comments 97 | name: Issue comment 98 | if: ${{ !github.event.issue.pull_request }} 99 | runs-on: ubuntu-latest 100 | steps: 101 | - run: | 102 | echo A comment on issue $NUMBER 103 | env: 104 | NUMBER: ${{ github.event.issue.number }} 105 | 106 | - name: Checkout 107 | uses: actions/checkout@v4 108 | with: 109 | submodules: true 110 | fetch-depth: 0 111 | 112 | - name: Dump GitHub context 113 | env: 114 | GITHUB_CONTEXT: ${{ toJson(github) }} 115 | run: echo "$GITHUB_CONTEXT" 116 | 117 | - name: Run changed-files with defaults 118 | id: changed-files 119 | uses: ./ 120 | 121 | - name: Show output 122 | run: | 123 | echo '${{ toJSON(steps.changed-files.outputs) }}' 124 | shell: 125 | bash 126 | 127 | - name: Run changed-files for old new filenames test rename 128 | id: changed-files-all-old-new-renamed-files 129 | uses: ./ 130 | with: 131 | base_sha: d1c0ee4 132 | sha: 4d04215 133 | fetch_depth: 60000 134 | include_all_old_new_renamed_files: true 135 | 136 | - name: Show output 137 | run: | 138 | echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}' 139 | shell: 140 | bash 141 | 142 | - name: Show output 143 | run: | 144 | echo '${{ toJSON(steps.changed-files-all-old-new-renamed-files.outputs) }}' 145 | shell: 146 | bash 147 | 148 | - name: Check all_old_new_renamed_files output on non windows platform 149 | if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test/test rename 1.txt,test/test rename-1.txt') && runner.os != 'Windows'" 150 | run: | 151 | echo "Invalid output: Expected to include (test/test rename 1.txt,test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})" 152 | exit 1 153 | shell: 154 | bash 155 | 156 | - name: Check all_old_new_renamed_files output on windows platform 157 | if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files, 'test\\test rename 1.txt,test\\test rename-1.txt') && runner.os == 'Windows'" 158 | run: | 159 | echo "Invalid output: Expected to not include (test\\test rename 1.txt,test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.all_old_new_renamed_files }})" 160 | exit 1 161 | shell: 162 | bash 163 | 164 | - name: Check the renamed_files output on non windows platform 165 | if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test/test rename-1.txt') && runner.os != 'Windows'" 166 | run: | 167 | echo "Invalid output: Expected to include (test/test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})" 168 | exit 1 169 | shell: 170 | bash 171 | 172 | - name: Check the renamed_files output on windows platform 173 | if: "!contains(steps.changed-files-all-old-new-renamed-files.outputs.renamed_files, 'test\\test rename-1.txt') && runner.os == 'Windows'" 174 | run: | 175 | echo "Invalid output: Expected to not include (test\\test rename-1.txt) got (${{ steps.changed-files-all-old-new-renamed-files.outputs.renamed_files }})" 176 | exit 1 177 | shell: 178 | bash 179 | -------------------------------------------------------------------------------- /.all-contributorsrc: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | "README.md" 4 | ], 5 | "imageSize": 100, 6 | "commit": false, 7 | "contributors": [ 8 | { 9 | "login": "jsoref", 10 | "name": "Josh Soref", 11 | "avatar_url": "https://avatars.githubusercontent.com/u/2119212?v=4", 12 | "profile": "https://github.com/jsoref", 13 | "contributions": [ 14 | "doc" 15 | ] 16 | }, 17 | { 18 | "login": "monoxgas", 19 | "name": "Nick Landers", 20 | "avatar_url": "https://avatars.githubusercontent.com/u/1223016?v=4", 21 | "profile": "https://github.com/monoxgas", 22 | "contributions": [ 23 | "code" 24 | ] 25 | }, 26 | { 27 | "login": "Kras4ooo", 28 | "name": "Krasimir Nikolov", 29 | "avatar_url": "https://avatars.githubusercontent.com/u/1948054?v=4", 30 | "profile": "https://github.com/Kras4ooo", 31 | "contributions": [ 32 | "code", 33 | "doc" 34 | ] 35 | }, 36 | { 37 | "login": "IvanPizhenko", 38 | "name": "Ivan Pizhenko", 39 | "avatar_url": "https://avatars.githubusercontent.com/u/11859904?v=4", 40 | "profile": "https://github.com/IvanPizhenko", 41 | "contributions": [ 42 | "code", 43 | "doc" 44 | ] 45 | }, 46 | { 47 | "login": "talva-tr", 48 | "name": "talva-tr", 49 | "avatar_url": "https://avatars.githubusercontent.com/u/82046981?v=4", 50 | "profile": "https://github.com/talva-tr", 51 | "contributions": [ 52 | "code" 53 | ] 54 | }, 55 | { 56 | "login": "eltociear", 57 | "name": "Ikko Ashimine", 58 | "avatar_url": "https://avatars.githubusercontent.com/u/22633385?v=4", 59 | "profile": "https://bandism.net/", 60 | "contributions": [ 61 | "doc" 62 | ] 63 | }, 64 | { 65 | "login": "Zamiell", 66 | "name": "James", 67 | "avatar_url": "https://avatars.githubusercontent.com/u/5511220?v=4", 68 | "profile": "https://github.com/Zamiell", 69 | "contributions": [ 70 | "doc" 71 | ] 72 | }, 73 | { 74 | "login": "wushujames", 75 | "name": "James Cheng", 76 | "avatar_url": "https://avatars.githubusercontent.com/u/677529?v=4", 77 | "profile": "https://github.com/wushujames", 78 | "contributions": [ 79 | "doc" 80 | ] 81 | }, 82 | { 83 | "login": "massongit", 84 | "name": "Masaya Suzuki", 85 | "avatar_url": "https://avatars.githubusercontent.com/u/15100604?v=4", 86 | "profile": "https://qiita.com/SUZUKI_Masaya", 87 | "contributions": [ 88 | "code" 89 | ] 90 | }, 91 | { 92 | "login": "fagai", 93 | "name": "fagai", 94 | "avatar_url": "https://avatars.githubusercontent.com/u/1772112?v=4", 95 | "profile": "https://fagai.net", 96 | "contributions": [ 97 | "doc" 98 | ] 99 | }, 100 | { 101 | "login": "pkit", 102 | "name": "Constantine Peresypkin", 103 | "avatar_url": "https://avatars.githubusercontent.com/u/805654?v=4", 104 | "profile": "https://github.com/pkit", 105 | "contributions": [ 106 | "code" 107 | ] 108 | }, 109 | { 110 | "login": "deronnax", 111 | "name": "Mathieu Dupuy", 112 | "avatar_url": "https://avatars.githubusercontent.com/u/439279?v=4", 113 | "profile": "https://github.com/deronnax", 114 | "contributions": [ 115 | "doc" 116 | ] 117 | }, 118 | { 119 | "login": "JoeOvo", 120 | "name": "Joe Moggridge", 121 | "avatar_url": "https://avatars.githubusercontent.com/u/100686542?v=4", 122 | "profile": "https://github.com/JoeOvo", 123 | "contributions": [ 124 | "doc" 125 | ] 126 | }, 127 | { 128 | "login": "thyarles", 129 | "name": "Charles Santos", 130 | "avatar_url": "https://avatars.githubusercontent.com/u/1340046?v=4", 131 | "profile": "https://www.credly.com/users/thyarles/badges", 132 | "contributions": [ 133 | "code" 134 | ] 135 | }, 136 | { 137 | "login": "kostiantyn-korniienko-aurea", 138 | "name": "Kostiantyn Korniienko", 139 | "avatar_url": "https://avatars.githubusercontent.com/u/37180625?v=4", 140 | "profile": "https://github.com/kostiantyn-korniienko-aurea", 141 | "contributions": [ 142 | "doc" 143 | ] 144 | }, 145 | { 146 | "login": "lpulley", 147 | "name": "Logan Pulley", 148 | "avatar_url": "https://avatars.githubusercontent.com/u/7193187?v=4", 149 | "profile": "https://github.com/lpulley", 150 | "contributions": [ 151 | "code" 152 | ] 153 | }, 154 | { 155 | "login": "kenji-miyake", 156 | "name": "Kenji Miyake", 157 | "avatar_url": "https://avatars.githubusercontent.com/u/31987104?v=4", 158 | "profile": "https://www.linkedin.com/in/kenji-miyake/", 159 | "contributions": [ 160 | "code" 161 | ] 162 | }, 163 | { 164 | "login": "adonisgarciac", 165 | "name": "adonisgarciac", 166 | "avatar_url": "https://avatars.githubusercontent.com/u/71078987?v=4", 167 | "profile": "https://github.com/adonisgarciac", 168 | "contributions": [ 169 | "code", 170 | "doc" 171 | ] 172 | }, 173 | { 174 | "login": "cfernhout", 175 | "name": "Chiel Fernhout", 176 | "avatar_url": "https://avatars.githubusercontent.com/u/22294606?v=4", 177 | "profile": "https://github.com/cfernhout", 178 | "contributions": [ 179 | "doc" 180 | ] 181 | }, 182 | { 183 | "login": "albertoperdomo2", 184 | "name": "Alberto Perdomo", 185 | "avatar_url": "https://avatars.githubusercontent.com/u/62241095?v=4", 186 | "profile": "https://github.com/albertoperdomo2", 187 | "contributions": [ 188 | "doc" 189 | ] 190 | }, 191 | { 192 | "login": "V0lantis", 193 | "name": "Arthur", 194 | "avatar_url": "https://avatars.githubusercontent.com/u/37664438?v=4", 195 | "profile": "https://arthurvolant.com", 196 | "contributions": [ 197 | "bug", 198 | "code" 199 | ] 200 | }, 201 | { 202 | "login": "rodrigorfk", 203 | "name": "Rodrigo Fior Kuntzer", 204 | "avatar_url": "https://avatars.githubusercontent.com/u/1995033?v=4", 205 | "profile": "https://github.com/rodrigorfk", 206 | "contributions": [ 207 | "code", 208 | "test", 209 | "bug" 210 | ] 211 | }, 212 | { 213 | "login": "levenleven", 214 | "name": "Aleksey Levenstein", 215 | "avatar_url": "https://avatars.githubusercontent.com/u/6463364?v=4", 216 | "profile": "https://github.com/levenleven", 217 | "contributions": [ 218 | "doc" 219 | ] 220 | }, 221 | { 222 | "login": "dan-hill2802", 223 | "name": "Daniel Hill", 224 | "avatar_url": "https://avatars.githubusercontent.com/u/5046322?v=4", 225 | "profile": "https://github.com/dan-hill2802", 226 | "contributions": [ 227 | "doc" 228 | ] 229 | }, 230 | { 231 | "login": "KeisukeYamashita", 232 | "name": "KeisukeYamashita", 233 | "avatar_url": "https://avatars.githubusercontent.com/u/23056537?v=4", 234 | "profile": "https://keisukeyamashita.com", 235 | "contributions": [ 236 | "doc" 237 | ] 238 | }, 239 | { 240 | "login": "codesculpture", 241 | "name": "Aravind", 242 | "avatar_url": "https://avatars.githubusercontent.com/u/63452117?v=4", 243 | "profile": "https://github.com/codesculpture", 244 | "contributions": [ 245 | "code", 246 | "bug" 247 | ] 248 | } 249 | ], 250 | "contributorsPerLine": 7, 251 | "projectName": "changed-files", 252 | "projectOwner": "tj-actions", 253 | "repoType": "github", 254 | "repoHost": "https://github.com", 255 | "skipCi": true, 256 | "commitConvention": "angular", 257 | "commitType": "docs" 258 | } 259 | -------------------------------------------------------------------------------- /src/main.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as github from '@actions/github' 3 | import path from 'path' 4 | import { 5 | processChangedFiles, 6 | ChangeTypeEnum, 7 | getAllDiffFiles, 8 | getChangedFilesFromGithubAPI, 9 | getRenamedFiles 10 | } from './changedFiles' 11 | import { 12 | DiffResult, 13 | getSHAForNonPullRequestEvent, 14 | getSHAForPullRequestEvent 15 | } from './commitSha' 16 | import {Env, getEnv} from './env' 17 | import {getInputs, Inputs} from './inputs' 18 | import { 19 | getFilePatterns, 20 | getRecoverFilePatterns, 21 | getSubmodulePath, 22 | getYamlFilePatterns, 23 | hasLocalGitDirectory, 24 | isRepoShallow, 25 | recoverDeletedFiles, 26 | setOutput, 27 | submoduleExists, 28 | updateGitGlobalConfig, 29 | verifyMinimumGitVersion, 30 | warnUnsupportedRESTAPIInputs 31 | } from './utils' 32 | 33 | const getChangedFilesFromLocalGitHistory = async ({ 34 | inputs, 35 | env, 36 | workingDirectory, 37 | filePatterns, 38 | yamlFilePatterns 39 | }: { 40 | inputs: Inputs 41 | env: Env 42 | workingDirectory: string 43 | filePatterns: string[] 44 | yamlFilePatterns: Record 45 | }): Promise => { 46 | await verifyMinimumGitVersion() 47 | 48 | let quotepathValue = 'on' 49 | 50 | if (!inputs.quotepath) { 51 | quotepathValue = 'off' 52 | } 53 | 54 | await updateGitGlobalConfig({ 55 | name: 'core.quotepath', 56 | value: quotepathValue 57 | }) 58 | 59 | if (inputs.diffRelative) { 60 | await updateGitGlobalConfig({ 61 | name: 'diff.relative', 62 | value: 'true' 63 | }) 64 | } 65 | 66 | const isShallow = await isRepoShallow({cwd: workingDirectory}) 67 | let diffSubmodule = false 68 | let gitFetchExtraArgs = ['--no-tags', '--prune'] 69 | 70 | if (inputs.excludeSubmodules) { 71 | core.info('Excluding submodules from the diff') 72 | } else { 73 | diffSubmodule = await submoduleExists({cwd: workingDirectory}) 74 | } 75 | 76 | if (diffSubmodule) { 77 | gitFetchExtraArgs.push('--recurse-submodules') 78 | } 79 | 80 | const isTag = env.GITHUB_REF?.startsWith('refs/tags/') 81 | const remoteName = 'origin' 82 | const outputRenamedFilesAsDeletedAndAdded = 83 | inputs.outputRenamedFilesAsDeletedAndAdded 84 | let submodulePaths: string[] = [] 85 | 86 | if (diffSubmodule) { 87 | submodulePaths = await getSubmodulePath({cwd: workingDirectory}) 88 | } 89 | 90 | if (isTag) { 91 | gitFetchExtraArgs = ['--prune', '--no-recurse-submodules'] 92 | } 93 | 94 | let diffResult: DiffResult 95 | 96 | if (!github.context.payload.pull_request?.base?.ref) { 97 | core.info(`Running on a ${github.context.eventName || 'push'} event...`) 98 | diffResult = await getSHAForNonPullRequestEvent({ 99 | inputs, 100 | env, 101 | workingDirectory, 102 | isShallow, 103 | diffSubmodule, 104 | gitFetchExtraArgs, 105 | isTag, 106 | remoteName 107 | }) 108 | } else { 109 | core.info( 110 | `Running on a ${github.context.eventName || 'pull_request'} (${ 111 | github.context.payload.action 112 | }) event...` 113 | ) 114 | diffResult = await getSHAForPullRequestEvent({ 115 | inputs, 116 | workingDirectory, 117 | isShallow, 118 | diffSubmodule, 119 | gitFetchExtraArgs, 120 | remoteName 121 | }) 122 | } 123 | 124 | if (diffResult.initialCommit) { 125 | core.info('This is the first commit for this repository; exiting...') 126 | core.endGroup() 127 | return 128 | } 129 | 130 | core.info( 131 | `Retrieving changes between ${diffResult.previousSha} (${diffResult.targetBranch}) → ${diffResult.currentSha} (${diffResult.currentBranch})` 132 | ) 133 | 134 | const allDiffFiles = await getAllDiffFiles({ 135 | workingDirectory, 136 | diffSubmodule, 137 | diffResult, 138 | submodulePaths, 139 | outputRenamedFilesAsDeletedAndAdded, 140 | fetchAdditionalSubmoduleHistory: inputs.fetchAdditionalSubmoduleHistory, 141 | failOnInitialDiffError: inputs.failOnInitialDiffError, 142 | failOnSubmoduleDiffError: inputs.failOnSubmoduleDiffError 143 | }) 144 | core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`) 145 | core.info('All Done!') 146 | core.endGroup() 147 | 148 | if (inputs.recoverDeletedFiles) { 149 | let recoverPatterns = getRecoverFilePatterns({inputs}) 150 | 151 | if (recoverPatterns.length > 0 && filePatterns.length > 0) { 152 | core.info('No recover patterns found; defaulting to file patterns') 153 | recoverPatterns = filePatterns 154 | } 155 | 156 | await recoverDeletedFiles({ 157 | inputs, 158 | workingDirectory, 159 | deletedFiles: allDiffFiles[ChangeTypeEnum.Deleted], 160 | recoverPatterns, 161 | diffResult, 162 | diffSubmodule, 163 | submodulePaths 164 | }) 165 | } 166 | 167 | await processChangedFiles({ 168 | filePatterns, 169 | allDiffFiles, 170 | inputs, 171 | yamlFilePatterns, 172 | workingDirectory 173 | }) 174 | 175 | if (inputs.includeAllOldNewRenamedFiles) { 176 | core.startGroup('changed-files-all-old-new-renamed-files') 177 | const allOldNewRenamedFiles = await getRenamedFiles({ 178 | inputs, 179 | workingDirectory, 180 | diffSubmodule, 181 | diffResult, 182 | submodulePaths 183 | }) 184 | core.debug(`All old new renamed files: ${allOldNewRenamedFiles}`) 185 | await setOutput({ 186 | key: 'all_old_new_renamed_files', 187 | value: allOldNewRenamedFiles.paths, 188 | writeOutputFiles: inputs.writeOutputFiles, 189 | outputDir: inputs.outputDir, 190 | json: inputs.json, 191 | safeOutput: inputs.safeOutput 192 | }) 193 | await setOutput({ 194 | key: 'all_old_new_renamed_files_count', 195 | value: allOldNewRenamedFiles.count, 196 | writeOutputFiles: inputs.writeOutputFiles, 197 | outputDir: inputs.outputDir, 198 | json: inputs.json 199 | }) 200 | core.info('All Done!') 201 | core.endGroup() 202 | } 203 | } 204 | 205 | const getChangedFilesFromRESTAPI = async ({ 206 | inputs, 207 | filePatterns, 208 | yamlFilePatterns 209 | }: { 210 | inputs: Inputs 211 | filePatterns: string[] 212 | yamlFilePatterns: Record 213 | }): Promise => { 214 | const allDiffFiles = await getChangedFilesFromGithubAPI({ 215 | inputs 216 | }) 217 | core.debug(`All diff files: ${JSON.stringify(allDiffFiles)}`) 218 | core.info('All Done!') 219 | 220 | await processChangedFiles({ 221 | filePatterns, 222 | allDiffFiles, 223 | inputs, 224 | yamlFilePatterns 225 | }) 226 | } 227 | 228 | export async function run(): Promise { 229 | core.startGroup('changed-files') 230 | 231 | const env = await getEnv() 232 | core.debug(`Env: ${JSON.stringify(env, null, 2)}`) 233 | 234 | const inputs = getInputs() 235 | core.debug(`Inputs: ${JSON.stringify(inputs, null, 2)}`) 236 | 237 | const workingDirectory = path.resolve( 238 | env.GITHUB_WORKSPACE || process.cwd(), 239 | inputs.useRestApi ? '.' : inputs.path 240 | ) 241 | core.debug(`Working directory: ${workingDirectory}`) 242 | 243 | const hasGitDirectory = await hasLocalGitDirectory({workingDirectory}) 244 | core.debug(`Has git directory: ${hasGitDirectory}`) 245 | 246 | const filePatterns = await getFilePatterns({ 247 | inputs, 248 | workingDirectory 249 | }) 250 | core.debug(`File patterns: ${filePatterns}`) 251 | 252 | const yamlFilePatterns = await getYamlFilePatterns({ 253 | inputs, 254 | workingDirectory 255 | }) 256 | core.debug(`Yaml file patterns: ${JSON.stringify(yamlFilePatterns)}`) 257 | 258 | if (inputs.useRestApi && !github.context.payload.pull_request?.number) { 259 | throw new Error( 260 | "Only pull_request* events are supported when using GitHub's REST API." 261 | ) 262 | } 263 | 264 | if ( 265 | inputs.token && 266 | github.context.payload.pull_request?.number && 267 | (!hasGitDirectory || inputs.useRestApi) 268 | ) { 269 | core.info("Using GitHub's REST API to get changed files") 270 | await warnUnsupportedRESTAPIInputs({inputs}) 271 | await getChangedFilesFromRESTAPI({ 272 | inputs, 273 | filePatterns, 274 | yamlFilePatterns 275 | }) 276 | } else { 277 | if (!hasGitDirectory) { 278 | throw new Error( 279 | `Unable to locate the git repository in the given path: ${workingDirectory}.\n Please run actions/checkout before this action (Make sure the 'path' input is correct).\n If you intend to use Github's REST API note that only pull_request* events are supported. Current event is "${github.context.eventName}".` 280 | ) 281 | } 282 | 283 | core.info('Using local .git directory') 284 | await getChangedFilesFromLocalGitHistory({ 285 | inputs, 286 | env, 287 | workingDirectory, 288 | filePatterns, 289 | yamlFilePatterns 290 | }) 291 | } 292 | } 293 | 294 | // eslint-disable-next-line github/no-then 295 | run().catch(e => { 296 | core.setFailed(e.message || e) 297 | process.exit(1) 298 | }) 299 | -------------------------------------------------------------------------------- /src/inputs.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | 3 | export type Inputs = { 4 | files: string 5 | filesSeparator: string 6 | filesFromSourceFile: string 7 | filesFromSourceFileSeparator: string 8 | filesYaml: string 9 | filesYamlFromSourceFile: string 10 | filesYamlFromSourceFileSeparator: string 11 | filesIgnore: string 12 | filesIgnoreSeparator: string 13 | filesIgnoreFromSourceFile: string 14 | filesIgnoreFromSourceFileSeparator: string 15 | filesIgnoreYaml: string 16 | filesIgnoreYamlFromSourceFile: string 17 | filesIgnoreYamlFromSourceFileSeparator: string 18 | separator: string 19 | includeAllOldNewRenamedFiles: boolean 20 | oldNewSeparator: string 21 | oldNewFilesSeparator: string 22 | sha: string 23 | baseSha: string 24 | since: string 25 | until: string 26 | path: string 27 | quotepath: boolean 28 | diffRelative: boolean 29 | dirNames: boolean 30 | dirNamesMaxDepth?: number 31 | dirNamesExcludeCurrentDir: boolean 32 | dirNamesIncludeFiles: string 33 | dirNamesIncludeFilesSeparator: string 34 | dirNamesDeletedFilesIncludeOnlyDeletedDirs: boolean 35 | json: boolean 36 | escapeJson: boolean 37 | safeOutput: boolean 38 | fetchDepth?: number 39 | fetchAdditionalSubmoduleHistory: boolean 40 | sinceLastRemoteCommit: boolean 41 | writeOutputFiles: boolean 42 | outputDir: string 43 | outputRenamedFilesAsDeletedAndAdded: boolean 44 | recoverDeletedFiles: boolean 45 | recoverDeletedFilesToDestination: string 46 | recoverFiles: string 47 | recoverFilesSeparator: string 48 | recoverFilesIgnore: string 49 | recoverFilesIgnoreSeparator: string 50 | token: string 51 | apiUrl: string 52 | skipInitialFetch: boolean 53 | failOnInitialDiffError: boolean 54 | failOnSubmoduleDiffError: boolean 55 | negationPatternsFirst: boolean 56 | useRestApi: boolean 57 | excludeSubmodules: boolean 58 | fetchMissingHistoryMaxRetries?: number 59 | usePosixPathSeparator: boolean 60 | tagsPattern: string 61 | tagsIgnorePattern?: string 62 | } 63 | 64 | export const getInputs = (): Inputs => { 65 | const files = core.getInput('files', {required: false}) 66 | const filesSeparator = core.getInput('files_separator', { 67 | required: false, 68 | trimWhitespace: false 69 | }) 70 | const filesIgnore = core.getInput('files_ignore', {required: false}) 71 | const filesIgnoreSeparator = core.getInput('files_ignore_separator', { 72 | required: false, 73 | trimWhitespace: false 74 | }) 75 | const filesFromSourceFile = core.getInput('files_from_source_file', { 76 | required: false 77 | }) 78 | const filesFromSourceFileSeparator = core.getInput( 79 | 'files_from_source_file_separator', 80 | { 81 | required: false, 82 | trimWhitespace: false 83 | } 84 | ) 85 | const filesYaml = core.getInput('files_yaml', {required: false}) 86 | const filesYamlFromSourceFile = core.getInput('files_yaml_from_source_file', { 87 | required: false 88 | }) 89 | const filesYamlFromSourceFileSeparator = core.getInput( 90 | 'files_yaml_from_source_file_separator', 91 | { 92 | required: false, 93 | trimWhitespace: false 94 | } 95 | ) 96 | const filesIgnoreFromSourceFile = core.getInput( 97 | 'files_ignore_from_source_file', 98 | {required: false} 99 | ) 100 | const filesIgnoreFromSourceFileSeparator = core.getInput( 101 | 'files_ignore_from_source_file_separator', 102 | { 103 | required: false, 104 | trimWhitespace: false 105 | } 106 | ) 107 | const filesIgnoreYaml = core.getInput('files_ignore_yaml', {required: false}) 108 | const filesIgnoreYamlFromSourceFile = core.getInput( 109 | 'files_ignore_yaml_from_source_file', 110 | {required: false} 111 | ) 112 | const filesIgnoreYamlFromSourceFileSeparator = core.getInput( 113 | 'files_ignore_yaml_from_source_file_separator', 114 | { 115 | required: false, 116 | trimWhitespace: false 117 | } 118 | ) 119 | const separator = core.getInput('separator', { 120 | required: true, 121 | trimWhitespace: false 122 | }) 123 | const includeAllOldNewRenamedFiles = core.getBooleanInput( 124 | 'include_all_old_new_renamed_files', 125 | {required: false} 126 | ) 127 | const oldNewSeparator = core.getInput('old_new_separator', { 128 | required: true, 129 | trimWhitespace: false 130 | }) 131 | const oldNewFilesSeparator = core.getInput('old_new_files_separator', { 132 | required: true, 133 | trimWhitespace: false 134 | }) 135 | const sha = core.getInput('sha', {required: false}) 136 | const baseSha = core.getInput('base_sha', {required: false}) 137 | const since = core.getInput('since', {required: false}) 138 | const until = core.getInput('until', {required: false}) 139 | const path = core.getInput('path', {required: false}) 140 | const quotepath = core.getBooleanInput('quotepath', {required: false}) 141 | const diffRelative = core.getBooleanInput('diff_relative', {required: false}) 142 | const dirNames = core.getBooleanInput('dir_names', {required: false}) 143 | const dirNamesMaxDepth = core.getInput('dir_names_max_depth', { 144 | required: false 145 | }) 146 | const dirNamesExcludeCurrentDir = core.getBooleanInput( 147 | 'dir_names_exclude_current_dir', 148 | { 149 | required: false 150 | } 151 | ) 152 | const dirNamesIncludeFiles = core.getInput('dir_names_include_files', { 153 | required: false 154 | }) 155 | const dirNamesIncludeFilesSeparator = core.getInput( 156 | 'dir_names_include_files_separator', 157 | { 158 | required: false, 159 | trimWhitespace: false 160 | } 161 | ) 162 | let json = core.getBooleanInput('json', {required: false}) 163 | let escapeJson = core.getBooleanInput('escape_json', {required: false}) 164 | const matrix = core.getBooleanInput('matrix', {required: false}) 165 | 166 | if (matrix) { 167 | json = true 168 | escapeJson = false 169 | } 170 | 171 | const safeOutput = core.getBooleanInput('safe_output', {required: false}) 172 | const fetchDepth = core.getInput('fetch_depth', {required: false}) 173 | const sinceLastRemoteCommit = core.getBooleanInput( 174 | 'since_last_remote_commit', 175 | {required: false} 176 | ) 177 | const writeOutputFiles = core.getBooleanInput('write_output_files', { 178 | required: false 179 | }) 180 | const outputDir = core.getInput('output_dir', {required: false}) 181 | const outputRenamedFilesAsDeletedAndAdded = core.getBooleanInput( 182 | 'output_renamed_files_as_deleted_and_added', 183 | {required: false} 184 | ) 185 | const recoverDeletedFiles = core.getBooleanInput('recover_deleted_files', { 186 | required: false 187 | }) 188 | const recoverDeletedFilesToDestination = core.getInput( 189 | 'recover_deleted_files_to_destination', 190 | {required: false} 191 | ) 192 | const recoverFiles = core.getInput('recover_files', {required: false}) 193 | const recoverFilesSeparator = core.getInput('recover_files_separator', { 194 | required: false, 195 | trimWhitespace: false 196 | }) 197 | const recoverFilesIgnore = core.getInput('recover_files_ignore', { 198 | required: false 199 | }) 200 | const recoverFilesIgnoreSeparator = core.getInput( 201 | 'recover_files_ignore_separator', 202 | { 203 | required: false, 204 | trimWhitespace: false 205 | } 206 | ) 207 | const token = core.getInput('token', {required: false}) 208 | const apiUrl = core.getInput('api_url', {required: false}) 209 | const skipInitialFetch = core.getBooleanInput('skip_initial_fetch', { 210 | required: false 211 | }) 212 | const fetchAdditionalSubmoduleHistory = core.getBooleanInput( 213 | 'fetch_additional_submodule_history', 214 | { 215 | required: false 216 | } 217 | ) 218 | const failOnInitialDiffError = core.getBooleanInput( 219 | 'fail_on_initial_diff_error', 220 | { 221 | required: false 222 | } 223 | ) 224 | const failOnSubmoduleDiffError = core.getBooleanInput( 225 | 'fail_on_submodule_diff_error', 226 | { 227 | required: false 228 | } 229 | ) 230 | const dirNamesDeletedFilesIncludeOnlyDeletedDirs = core.getBooleanInput( 231 | 'dir_names_deleted_files_include_only_deleted_dirs', 232 | { 233 | required: false 234 | } 235 | ) 236 | 237 | const negationPatternsFirst = core.getBooleanInput( 238 | 'negation_patterns_first', 239 | { 240 | required: false 241 | } 242 | ) 243 | 244 | const useRestApi = core.getBooleanInput('use_rest_api', { 245 | required: false 246 | }) 247 | 248 | const excludeSubmodules = core.getBooleanInput('exclude_submodules', { 249 | required: false 250 | }) 251 | 252 | const fetchMissingHistoryMaxRetries = core.getInput( 253 | 'fetch_missing_history_max_retries', 254 | {required: false} 255 | ) 256 | 257 | const usePosixPathSeparator = core.getBooleanInput( 258 | 'use_posix_path_separator', 259 | { 260 | required: false 261 | } 262 | ) 263 | 264 | const tagsPattern = core.getInput('tags_pattern', { 265 | required: false, 266 | trimWhitespace: false 267 | }) 268 | const tagsIgnorePattern = core.getInput('tags_ignore_pattern', { 269 | required: false, 270 | trimWhitespace: false 271 | }) 272 | 273 | const inputs: Inputs = { 274 | files, 275 | filesSeparator, 276 | filesFromSourceFile, 277 | filesFromSourceFileSeparator, 278 | filesYaml, 279 | filesYamlFromSourceFile, 280 | filesYamlFromSourceFileSeparator, 281 | filesIgnore, 282 | filesIgnoreSeparator, 283 | filesIgnoreFromSourceFile, 284 | filesIgnoreFromSourceFileSeparator, 285 | filesIgnoreYaml, 286 | filesIgnoreYamlFromSourceFile, 287 | filesIgnoreYamlFromSourceFileSeparator, 288 | failOnInitialDiffError, 289 | failOnSubmoduleDiffError, 290 | separator, 291 | // Not Supported via REST API 292 | sha, 293 | baseSha, 294 | since, 295 | until, 296 | path, 297 | quotepath, 298 | diffRelative, 299 | sinceLastRemoteCommit, 300 | recoverDeletedFiles, 301 | recoverDeletedFilesToDestination, 302 | recoverFiles, 303 | recoverFilesSeparator, 304 | recoverFilesIgnore, 305 | recoverFilesIgnoreSeparator, 306 | includeAllOldNewRenamedFiles, 307 | oldNewSeparator, 308 | oldNewFilesSeparator, 309 | skipInitialFetch, 310 | fetchAdditionalSubmoduleHistory, 311 | dirNamesDeletedFilesIncludeOnlyDeletedDirs, 312 | excludeSubmodules, 313 | usePosixPathSeparator, 314 | tagsPattern, 315 | tagsIgnorePattern, 316 | // End Not Supported via REST API 317 | dirNames, 318 | dirNamesExcludeCurrentDir, 319 | dirNamesIncludeFiles, 320 | dirNamesIncludeFilesSeparator, 321 | json, 322 | escapeJson, 323 | safeOutput, 324 | writeOutputFiles, 325 | outputDir, 326 | outputRenamedFilesAsDeletedAndAdded, 327 | token, 328 | apiUrl, 329 | negationPatternsFirst, 330 | useRestApi 331 | } 332 | 333 | if (fetchDepth) { 334 | // Fallback to at least 2 if the fetch_depth is less than 2 335 | inputs.fetchDepth = Math.max(parseInt(fetchDepth, 10), 2) 336 | } 337 | 338 | if (dirNamesMaxDepth) { 339 | inputs.dirNamesMaxDepth = parseInt(dirNamesMaxDepth, 10) 340 | } 341 | 342 | if (fetchMissingHistoryMaxRetries) { 343 | // Fallback to at least 1 if the fetch_missing_history_max_retries is less than 1 344 | inputs.fetchMissingHistoryMaxRetries = Math.max( 345 | parseInt(fetchMissingHistoryMaxRetries, 10), 346 | 1 347 | ) 348 | } 349 | 350 | return inputs 351 | } 352 | -------------------------------------------------------------------------------- /src/__tests__/__snapshots__/inputs.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`getInputs should correctly parse boolean inputs 1`] = ` 4 | { 5 | "apiUrl": "", 6 | "baseSha": "", 7 | "diffRelative": "false", 8 | "dirNames": "false", 9 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": "false", 10 | "dirNamesExcludeCurrentDir": "false", 11 | "dirNamesIncludeFiles": "", 12 | "dirNamesIncludeFilesSeparator": "", 13 | "escapeJson": false, 14 | "excludeSubmodules": "false", 15 | "failOnInitialDiffError": "false", 16 | "failOnSubmoduleDiffError": "false", 17 | "fetchAdditionalSubmoduleHistory": "false", 18 | "fetchMissingHistoryMaxRetries": 20, 19 | "files": "", 20 | "filesFromSourceFile": "", 21 | "filesFromSourceFileSeparator": "", 22 | "filesIgnore": "", 23 | "filesIgnoreFromSourceFile": "", 24 | "filesIgnoreFromSourceFileSeparator": "", 25 | "filesIgnoreSeparator": "", 26 | "filesIgnoreYaml": "", 27 | "filesIgnoreYamlFromSourceFile": "", 28 | "filesIgnoreYamlFromSourceFileSeparator": "", 29 | "filesSeparator": "", 30 | "filesYaml": "", 31 | "filesYamlFromSourceFile": "", 32 | "filesYamlFromSourceFileSeparator": "", 33 | "includeAllOldNewRenamedFiles": "false", 34 | "json": true, 35 | "negationPatternsFirst": "false", 36 | "oldNewFilesSeparator": " ", 37 | "oldNewSeparator": ",", 38 | "outputDir": "", 39 | "outputRenamedFilesAsDeletedAndAdded": "false", 40 | "path": ".", 41 | "quotepath": "false", 42 | "recoverDeletedFiles": "false", 43 | "recoverDeletedFilesToDestination": "", 44 | "recoverFiles": "", 45 | "recoverFilesIgnore": "", 46 | "recoverFilesIgnoreSeparator": " 47 | ", 48 | "recoverFilesSeparator": " 49 | ", 50 | "safeOutput": "false", 51 | "separator": "", 52 | "sha": "", 53 | "since": "", 54 | "sinceLastRemoteCommit": "false", 55 | "skipInitialFetch": "true", 56 | "tagsIgnorePattern": "", 57 | "tagsPattern": "*", 58 | "token": "", 59 | "until": "", 60 | "usePosixPathSeparator": "false", 61 | "useRestApi": "false", 62 | "writeOutputFiles": "false", 63 | } 64 | `; 65 | 66 | exports[`getInputs should correctly parse numeric inputs 1`] = ` 67 | { 68 | "apiUrl": "", 69 | "baseSha": "", 70 | "diffRelative": true, 71 | "dirNames": false, 72 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 73 | "dirNamesExcludeCurrentDir": false, 74 | "dirNamesIncludeFiles": "", 75 | "dirNamesIncludeFilesSeparator": "", 76 | "dirNamesMaxDepth": 2, 77 | "escapeJson": false, 78 | "excludeSubmodules": false, 79 | "failOnInitialDiffError": false, 80 | "failOnSubmoduleDiffError": false, 81 | "fetchAdditionalSubmoduleHistory": false, 82 | "fetchDepth": 5, 83 | "files": "", 84 | "filesFromSourceFile": "", 85 | "filesFromSourceFileSeparator": "", 86 | "filesIgnore": "", 87 | "filesIgnoreFromSourceFile": "", 88 | "filesIgnoreFromSourceFileSeparator": "", 89 | "filesIgnoreSeparator": "", 90 | "filesIgnoreYaml": "", 91 | "filesIgnoreYamlFromSourceFile": "", 92 | "filesIgnoreYamlFromSourceFileSeparator": "", 93 | "filesSeparator": "", 94 | "filesYaml": "", 95 | "filesYamlFromSourceFile": "", 96 | "filesYamlFromSourceFileSeparator": "", 97 | "includeAllOldNewRenamedFiles": false, 98 | "json": false, 99 | "negationPatternsFirst": false, 100 | "oldNewFilesSeparator": "", 101 | "oldNewSeparator": "", 102 | "outputDir": "", 103 | "outputRenamedFilesAsDeletedAndAdded": false, 104 | "path": "", 105 | "quotepath": true, 106 | "recoverDeletedFiles": false, 107 | "recoverDeletedFilesToDestination": "", 108 | "recoverFiles": "", 109 | "recoverFilesIgnore": "", 110 | "recoverFilesIgnoreSeparator": "", 111 | "recoverFilesSeparator": "", 112 | "safeOutput": false, 113 | "separator": "", 114 | "sha": "", 115 | "since": "", 116 | "sinceLastRemoteCommit": false, 117 | "skipInitialFetch": false, 118 | "tagsIgnorePattern": "", 119 | "tagsPattern": "", 120 | "token": "", 121 | "until": "", 122 | "usePosixPathSeparator": false, 123 | "useRestApi": false, 124 | "writeOutputFiles": false, 125 | } 126 | `; 127 | 128 | exports[`getInputs should correctly parse string inputs 1`] = ` 129 | { 130 | "apiUrl": "https://api.github.com", 131 | "baseSha": "", 132 | "diffRelative": true, 133 | "dirNames": false, 134 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 135 | "dirNamesExcludeCurrentDir": false, 136 | "dirNamesIncludeFiles": "", 137 | "dirNamesIncludeFilesSeparator": "", 138 | "escapeJson": false, 139 | "excludeSubmodules": false, 140 | "failOnInitialDiffError": false, 141 | "failOnSubmoduleDiffError": false, 142 | "fetchAdditionalSubmoduleHistory": false, 143 | "files": "", 144 | "filesFromSourceFile": "", 145 | "filesFromSourceFileSeparator": "", 146 | "filesIgnore": "", 147 | "filesIgnoreFromSourceFile": "", 148 | "filesIgnoreFromSourceFileSeparator": "", 149 | "filesIgnoreSeparator": "", 150 | "filesIgnoreYaml": "", 151 | "filesIgnoreYamlFromSourceFile": "", 152 | "filesIgnoreYamlFromSourceFileSeparator": "", 153 | "filesSeparator": "", 154 | "filesYaml": "", 155 | "filesYamlFromSourceFile": "", 156 | "filesYamlFromSourceFileSeparator": "", 157 | "includeAllOldNewRenamedFiles": false, 158 | "json": false, 159 | "negationPatternsFirst": false, 160 | "oldNewFilesSeparator": "", 161 | "oldNewSeparator": "", 162 | "outputDir": "", 163 | "outputRenamedFilesAsDeletedAndAdded": false, 164 | "path": "", 165 | "quotepath": true, 166 | "recoverDeletedFiles": false, 167 | "recoverDeletedFilesToDestination": "", 168 | "recoverFiles": "", 169 | "recoverFilesIgnore": "", 170 | "recoverFilesIgnoreSeparator": "", 171 | "recoverFilesSeparator": "", 172 | "safeOutput": false, 173 | "separator": "", 174 | "sha": "", 175 | "since": "", 176 | "sinceLastRemoteCommit": false, 177 | "skipInitialFetch": false, 178 | "tagsIgnorePattern": "", 179 | "tagsPattern": "", 180 | "token": "token", 181 | "until": "", 182 | "usePosixPathSeparator": false, 183 | "useRestApi": false, 184 | "writeOutputFiles": false, 185 | } 186 | `; 187 | 188 | exports[`getInputs should handle invalid numeric inputs correctly 1`] = ` 189 | { 190 | "apiUrl": "", 191 | "baseSha": "", 192 | "diffRelative": true, 193 | "dirNames": false, 194 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 195 | "dirNamesExcludeCurrentDir": false, 196 | "dirNamesIncludeFiles": "", 197 | "dirNamesIncludeFilesSeparator": "", 198 | "dirNamesMaxDepth": 2, 199 | "escapeJson": false, 200 | "excludeSubmodules": false, 201 | "failOnInitialDiffError": false, 202 | "failOnSubmoduleDiffError": false, 203 | "fetchAdditionalSubmoduleHistory": false, 204 | "fetchDepth": NaN, 205 | "files": "", 206 | "filesFromSourceFile": "", 207 | "filesFromSourceFileSeparator": "", 208 | "filesIgnore": "", 209 | "filesIgnoreFromSourceFile": "", 210 | "filesIgnoreFromSourceFileSeparator": "", 211 | "filesIgnoreSeparator": "", 212 | "filesIgnoreYaml": "", 213 | "filesIgnoreYamlFromSourceFile": "", 214 | "filesIgnoreYamlFromSourceFileSeparator": "", 215 | "filesSeparator": "", 216 | "filesYaml": "", 217 | "filesYamlFromSourceFile": "", 218 | "filesYamlFromSourceFileSeparator": "", 219 | "includeAllOldNewRenamedFiles": false, 220 | "json": false, 221 | "negationPatternsFirst": false, 222 | "oldNewFilesSeparator": "", 223 | "oldNewSeparator": "", 224 | "outputDir": "", 225 | "outputRenamedFilesAsDeletedAndAdded": false, 226 | "path": "", 227 | "quotepath": true, 228 | "recoverDeletedFiles": false, 229 | "recoverDeletedFilesToDestination": "", 230 | "recoverFiles": "", 231 | "recoverFilesIgnore": "", 232 | "recoverFilesIgnoreSeparator": "", 233 | "recoverFilesSeparator": "", 234 | "safeOutput": false, 235 | "separator": "", 236 | "sha": "", 237 | "since": "", 238 | "sinceLastRemoteCommit": false, 239 | "skipInitialFetch": false, 240 | "tagsIgnorePattern": "", 241 | "tagsPattern": "", 242 | "token": "", 243 | "until": "", 244 | "usePosixPathSeparator": false, 245 | "useRestApi": false, 246 | "writeOutputFiles": false, 247 | } 248 | `; 249 | 250 | exports[`getInputs should handle negative numeric inputs correctly 1`] = ` 251 | { 252 | "apiUrl": "", 253 | "baseSha": "", 254 | "diffRelative": true, 255 | "dirNames": false, 256 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 257 | "dirNamesExcludeCurrentDir": false, 258 | "dirNamesIncludeFiles": "", 259 | "dirNamesIncludeFilesSeparator": "", 260 | "dirNamesMaxDepth": -2, 261 | "escapeJson": false, 262 | "excludeSubmodules": false, 263 | "failOnInitialDiffError": false, 264 | "failOnSubmoduleDiffError": false, 265 | "fetchAdditionalSubmoduleHistory": false, 266 | "fetchDepth": 2, 267 | "files": "", 268 | "filesFromSourceFile": "", 269 | "filesFromSourceFileSeparator": "", 270 | "filesIgnore": "", 271 | "filesIgnoreFromSourceFile": "", 272 | "filesIgnoreFromSourceFileSeparator": "", 273 | "filesIgnoreSeparator": "", 274 | "filesIgnoreYaml": "", 275 | "filesIgnoreYamlFromSourceFile": "", 276 | "filesIgnoreYamlFromSourceFileSeparator": "", 277 | "filesSeparator": "", 278 | "filesYaml": "", 279 | "filesYamlFromSourceFile": "", 280 | "filesYamlFromSourceFileSeparator": "", 281 | "includeAllOldNewRenamedFiles": false, 282 | "json": false, 283 | "negationPatternsFirst": false, 284 | "oldNewFilesSeparator": "", 285 | "oldNewSeparator": "", 286 | "outputDir": "", 287 | "outputRenamedFilesAsDeletedAndAdded": false, 288 | "path": "", 289 | "quotepath": true, 290 | "recoverDeletedFiles": false, 291 | "recoverDeletedFilesToDestination": "", 292 | "recoverFiles": "", 293 | "recoverFilesIgnore": "", 294 | "recoverFilesIgnoreSeparator": "", 295 | "recoverFilesSeparator": "", 296 | "safeOutput": false, 297 | "separator": "", 298 | "sha": "", 299 | "since": "", 300 | "sinceLastRemoteCommit": false, 301 | "skipInitialFetch": false, 302 | "tagsIgnorePattern": "", 303 | "tagsPattern": "", 304 | "token": "", 305 | "until": "", 306 | "usePosixPathSeparator": false, 307 | "useRestApi": false, 308 | "writeOutputFiles": false, 309 | } 310 | `; 311 | 312 | exports[`getInputs should return default values when no inputs are provided 1`] = ` 313 | { 314 | "apiUrl": "", 315 | "baseSha": "", 316 | "diffRelative": true, 317 | "dirNames": false, 318 | "dirNamesDeletedFilesIncludeOnlyDeletedDirs": false, 319 | "dirNamesExcludeCurrentDir": false, 320 | "dirNamesIncludeFiles": "", 321 | "dirNamesIncludeFilesSeparator": "", 322 | "escapeJson": false, 323 | "excludeSubmodules": false, 324 | "failOnInitialDiffError": false, 325 | "failOnSubmoduleDiffError": false, 326 | "fetchAdditionalSubmoduleHistory": false, 327 | "fetchMissingHistoryMaxRetries": 20, 328 | "files": "", 329 | "filesFromSourceFile": "", 330 | "filesFromSourceFileSeparator": "", 331 | "filesIgnore": "", 332 | "filesIgnoreFromSourceFile": "", 333 | "filesIgnoreFromSourceFileSeparator": "", 334 | "filesIgnoreSeparator": "", 335 | "filesIgnoreYaml": "", 336 | "filesIgnoreYamlFromSourceFile": "", 337 | "filesIgnoreYamlFromSourceFileSeparator": "", 338 | "filesSeparator": "", 339 | "filesYaml": "", 340 | "filesYamlFromSourceFile": "", 341 | "filesYamlFromSourceFileSeparator": "", 342 | "includeAllOldNewRenamedFiles": false, 343 | "json": false, 344 | "negationPatternsFirst": false, 345 | "oldNewFilesSeparator": " ", 346 | "oldNewSeparator": ",", 347 | "outputDir": "", 348 | "outputRenamedFilesAsDeletedAndAdded": false, 349 | "path": ".", 350 | "quotepath": true, 351 | "recoverDeletedFiles": false, 352 | "recoverDeletedFilesToDestination": "", 353 | "recoverFiles": "", 354 | "recoverFilesIgnore": "", 355 | "recoverFilesIgnoreSeparator": " 356 | ", 357 | "recoverFilesSeparator": " 358 | ", 359 | "safeOutput": false, 360 | "separator": "", 361 | "sha": "", 362 | "since": "", 363 | "sinceLastRemoteCommit": false, 364 | "skipInitialFetch": false, 365 | "tagsIgnorePattern": "", 366 | "tagsPattern": "*", 367 | "token": "", 368 | "until": "", 369 | "usePosixPathSeparator": false, 370 | "useRestApi": false, 371 | "writeOutputFiles": false, 372 | } 373 | `; 374 | -------------------------------------------------------------------------------- /src/changedFiles.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as github from '@actions/github' 3 | import type {RestEndpointMethodTypes} from '@octokit/rest' 4 | import flatten from 'lodash/flatten' 5 | import convertPath from '@stdlib/utils-convert-path' 6 | import mm from 'micromatch' 7 | import * as path from 'path' 8 | import {setOutputsAndGetModifiedAndChangedFilesStatus} from './changedFilesOutput' 9 | import {DiffResult} from './commitSha' 10 | import {Inputs} from './inputs' 11 | import { 12 | canDiffCommits, 13 | getAllChangedFiles, 14 | getDirnameMaxDepth, 15 | getDirNamesIncludeFilesPattern, 16 | getFilteredChangedFiles, 17 | gitRenamedFiles, 18 | gitSubmoduleDiffSHA, 19 | isWindows, 20 | jsonOutput, 21 | setArrayOutput 22 | } from './utils' 23 | 24 | export const processChangedFiles = async ({ 25 | filePatterns, 26 | allDiffFiles, 27 | inputs, 28 | yamlFilePatterns, 29 | workingDirectory 30 | }: { 31 | filePatterns: string[] 32 | allDiffFiles: ChangedFiles 33 | inputs: Inputs 34 | yamlFilePatterns: Record 35 | workingDirectory?: string 36 | }): Promise => { 37 | if (filePatterns.length > 0) { 38 | core.startGroup('changed-files-patterns') 39 | const allFilteredDiffFiles = await getFilteredChangedFiles({ 40 | allDiffFiles, 41 | filePatterns 42 | }) 43 | core.debug( 44 | `All filtered diff files: ${JSON.stringify(allFilteredDiffFiles)}` 45 | ) 46 | await setOutputsAndGetModifiedAndChangedFilesStatus({ 47 | allDiffFiles, 48 | allFilteredDiffFiles, 49 | inputs, 50 | filePatterns, 51 | workingDirectory 52 | }) 53 | core.info('All Done!') 54 | core.endGroup() 55 | } 56 | 57 | if (Object.keys(yamlFilePatterns).length > 0) { 58 | const modifiedKeys: string[] = [] 59 | const changedKeys: string[] = [] 60 | 61 | for (const key of Object.keys(yamlFilePatterns)) { 62 | core.startGroup(`changed-files-yaml-${key}`) 63 | const allFilteredDiffFiles = await getFilteredChangedFiles({ 64 | allDiffFiles, 65 | filePatterns: yamlFilePatterns[key] 66 | }) 67 | core.debug( 68 | `All filtered diff files for ${key}: ${JSON.stringify( 69 | allFilteredDiffFiles 70 | )}` 71 | ) 72 | const {anyChanged, anyModified} = 73 | await setOutputsAndGetModifiedAndChangedFilesStatus({ 74 | allDiffFiles, 75 | allFilteredDiffFiles, 76 | inputs, 77 | filePatterns: yamlFilePatterns[key], 78 | outputPrefix: key, 79 | workingDirectory 80 | }) 81 | if (anyModified) { 82 | modifiedKeys.push(key) 83 | } 84 | if (anyChanged) { 85 | changedKeys.push(key) 86 | } 87 | 88 | core.info('All Done!') 89 | core.endGroup() 90 | } 91 | 92 | if (modifiedKeys.length > 0) { 93 | await setArrayOutput({ 94 | key: 'modified_keys', 95 | inputs, 96 | value: modifiedKeys 97 | }) 98 | } 99 | 100 | if (changedKeys.length > 0) { 101 | await setArrayOutput({ 102 | key: 'changed_keys', 103 | inputs, 104 | value: changedKeys 105 | }) 106 | } 107 | } 108 | 109 | if (filePatterns.length === 0 && Object.keys(yamlFilePatterns).length === 0) { 110 | core.startGroup('changed-files-all') 111 | await setOutputsAndGetModifiedAndChangedFilesStatus({ 112 | allDiffFiles, 113 | allFilteredDiffFiles: allDiffFiles, 114 | inputs, 115 | workingDirectory 116 | }) 117 | core.info('All Done!') 118 | core.endGroup() 119 | } 120 | } 121 | 122 | export const getRenamedFiles = async ({ 123 | inputs, 124 | workingDirectory, 125 | diffSubmodule, 126 | diffResult, 127 | submodulePaths 128 | }: { 129 | inputs: Inputs 130 | workingDirectory: string 131 | diffSubmodule: boolean 132 | diffResult: DiffResult 133 | submodulePaths: string[] 134 | }): Promise<{paths: string; count: string}> => { 135 | const renamedFiles = await gitRenamedFiles({ 136 | cwd: workingDirectory, 137 | sha1: diffResult.previousSha, 138 | sha2: diffResult.currentSha, 139 | diff: diffResult.diff, 140 | oldNewSeparator: inputs.oldNewSeparator 141 | }) 142 | 143 | if (diffSubmodule) { 144 | for (const submodulePath of submodulePaths) { 145 | const submoduleShaResult = await gitSubmoduleDiffSHA({ 146 | cwd: workingDirectory, 147 | parentSha1: diffResult.previousSha, 148 | parentSha2: diffResult.currentSha, 149 | submodulePath, 150 | diff: diffResult.diff 151 | }) 152 | 153 | const submoduleWorkingDirectory = path.join( 154 | workingDirectory, 155 | submodulePath 156 | ) 157 | 158 | if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) { 159 | let diff = '...' 160 | 161 | if ( 162 | !(await canDiffCommits({ 163 | cwd: submoduleWorkingDirectory, 164 | sha1: submoduleShaResult.previousSha, 165 | sha2: submoduleShaResult.currentSha, 166 | diff 167 | })) 168 | ) { 169 | let message = `Unable to use three dot diff for: ${submodulePath} submodule. Falling back to two dot diff. You can set 'fetch_additional_submodule_history: true' to fetch additional submodule history in order to use three dot diff` 170 | if (inputs.fetchAdditionalSubmoduleHistory) { 171 | message = `To fetch additional submodule history for: ${submodulePath} you can increase history depth using 'fetch_depth' input` 172 | } 173 | core.info(message) 174 | diff = '..' 175 | } 176 | 177 | const submoduleRenamedFiles = await gitRenamedFiles({ 178 | cwd: submoduleWorkingDirectory, 179 | sha1: submoduleShaResult.previousSha, 180 | sha2: submoduleShaResult.currentSha, 181 | diff, 182 | oldNewSeparator: inputs.oldNewSeparator, 183 | isSubmodule: true, 184 | parentDir: submodulePath 185 | }) 186 | renamedFiles.push(...submoduleRenamedFiles) 187 | } 188 | } 189 | } 190 | 191 | if (inputs.json) { 192 | return { 193 | paths: jsonOutput({value: renamedFiles, shouldEscape: inputs.escapeJson}), 194 | count: renamedFiles.length.toString() 195 | } 196 | } 197 | 198 | return { 199 | paths: renamedFiles.join(inputs.oldNewFilesSeparator), 200 | count: renamedFiles.length.toString() 201 | } 202 | } 203 | 204 | export enum ChangeTypeEnum { 205 | Added = 'A', 206 | Copied = 'C', 207 | Deleted = 'D', 208 | Modified = 'M', 209 | Renamed = 'R', 210 | TypeChanged = 'T', 211 | Unmerged = 'U', 212 | Unknown = 'X' 213 | } 214 | 215 | export type ChangedFiles = { 216 | [key in ChangeTypeEnum]: string[] 217 | } 218 | 219 | export const getAllDiffFiles = async ({ 220 | workingDirectory, 221 | diffSubmodule, 222 | diffResult, 223 | submodulePaths, 224 | outputRenamedFilesAsDeletedAndAdded, 225 | fetchAdditionalSubmoduleHistory, 226 | failOnInitialDiffError, 227 | failOnSubmoduleDiffError 228 | }: { 229 | workingDirectory: string 230 | diffSubmodule: boolean 231 | diffResult: DiffResult 232 | submodulePaths: string[] 233 | outputRenamedFilesAsDeletedAndAdded: boolean 234 | fetchAdditionalSubmoduleHistory: boolean 235 | failOnInitialDiffError: boolean 236 | failOnSubmoduleDiffError: boolean 237 | }): Promise => { 238 | const files = await getAllChangedFiles({ 239 | cwd: workingDirectory, 240 | sha1: diffResult.previousSha, 241 | sha2: diffResult.currentSha, 242 | diff: diffResult.diff, 243 | outputRenamedFilesAsDeletedAndAdded, 244 | failOnInitialDiffError 245 | }) 246 | 247 | if (diffSubmodule) { 248 | for (const submodulePath of submodulePaths) { 249 | const submoduleShaResult = await gitSubmoduleDiffSHA({ 250 | cwd: workingDirectory, 251 | parentSha1: diffResult.previousSha, 252 | parentSha2: diffResult.currentSha, 253 | submodulePath, 254 | diff: diffResult.diff 255 | }) 256 | 257 | const submoduleWorkingDirectory = path.join( 258 | workingDirectory, 259 | submodulePath 260 | ) 261 | 262 | if (submoduleShaResult.currentSha && submoduleShaResult.previousSha) { 263 | let diff = '...' 264 | 265 | if ( 266 | !(await canDiffCommits({ 267 | cwd: submoduleWorkingDirectory, 268 | sha1: submoduleShaResult.previousSha, 269 | sha2: submoduleShaResult.currentSha, 270 | diff 271 | })) 272 | ) { 273 | let message = `Set 'fetch_additional_submodule_history: true' to fetch additional submodule history for: ${submodulePath}` 274 | if (fetchAdditionalSubmoduleHistory) { 275 | message = `To fetch additional submodule history for: ${submodulePath} you can increase history depth using 'fetch_depth' input` 276 | } 277 | core.warning(message) 278 | diff = '..' 279 | } 280 | 281 | const submoduleFiles = await getAllChangedFiles({ 282 | cwd: submoduleWorkingDirectory, 283 | sha1: submoduleShaResult.previousSha, 284 | sha2: submoduleShaResult.currentSha, 285 | diff, 286 | isSubmodule: true, 287 | parentDir: submodulePath, 288 | outputRenamedFilesAsDeletedAndAdded, 289 | failOnSubmoduleDiffError 290 | }) 291 | 292 | for (const changeType of Object.keys( 293 | submoduleFiles 294 | ) as ChangeTypeEnum[]) { 295 | if (!files[changeType]) { 296 | files[changeType] = [] 297 | } 298 | files[changeType].push(...submoduleFiles[changeType]) 299 | } 300 | } 301 | } 302 | } 303 | 304 | return files 305 | } 306 | 307 | function* getFilePaths({ 308 | inputs, 309 | filePaths, 310 | dirNamesIncludeFilePatterns 311 | }: { 312 | inputs: Inputs 313 | filePaths: string[] 314 | dirNamesIncludeFilePatterns: string[] 315 | }): Generator { 316 | for (const filePath of filePaths) { 317 | if (inputs.dirNames) { 318 | if (dirNamesIncludeFilePatterns.length > 0) { 319 | const isWin = isWindows() 320 | const matchOptions = {dot: true, windows: isWin, noext: true} 321 | if (mm.isMatch(filePath, dirNamesIncludeFilePatterns, matchOptions)) { 322 | yield filePath 323 | } 324 | } 325 | yield getDirnameMaxDepth({ 326 | relativePath: filePath, 327 | dirNamesMaxDepth: inputs.dirNamesMaxDepth, 328 | excludeCurrentDir: inputs.dirNamesExcludeCurrentDir 329 | }) 330 | } else { 331 | yield filePath 332 | } 333 | } 334 | } 335 | 336 | function* getChangeTypeFilesGenerator({ 337 | inputs, 338 | changedFiles, 339 | changeTypes 340 | }: { 341 | inputs: Inputs 342 | changedFiles: ChangedFiles 343 | changeTypes: ChangeTypeEnum[] 344 | }): Generator { 345 | const dirNamesIncludeFilePatterns = getDirNamesIncludeFilesPattern({inputs}) 346 | core.debug( 347 | `Dir names include file patterns: ${JSON.stringify( 348 | dirNamesIncludeFilePatterns 349 | )}` 350 | ) 351 | 352 | for (const changeType of changeTypes) { 353 | const filePaths = changedFiles[changeType] || [] 354 | for (const filePath of getFilePaths({ 355 | inputs, 356 | filePaths, 357 | dirNamesIncludeFilePatterns 358 | })) { 359 | if (isWindows() && inputs.usePosixPathSeparator) { 360 | yield convertPath(filePath, 'mixed') 361 | } else { 362 | yield filePath 363 | } 364 | } 365 | } 366 | } 367 | 368 | export const getChangeTypeFiles = async ({ 369 | inputs, 370 | changedFiles, 371 | changeTypes 372 | }: { 373 | inputs: Inputs 374 | changedFiles: ChangedFiles 375 | changeTypes: ChangeTypeEnum[] 376 | }): Promise<{paths: string[] | string; count: string}> => { 377 | const files = [ 378 | ...new Set(getChangeTypeFilesGenerator({inputs, changedFiles, changeTypes})) 379 | ].filter(Boolean) 380 | 381 | const paths = inputs.json ? files : files.join(inputs.separator) 382 | 383 | return { 384 | paths, 385 | count: files.length.toString() 386 | } 387 | } 388 | 389 | function* getAllChangeTypeFilesGenerator({ 390 | inputs, 391 | changedFiles 392 | }: { 393 | inputs: Inputs 394 | changedFiles: ChangedFiles 395 | }): Generator { 396 | const dirNamesIncludeFilePatterns = getDirNamesIncludeFilesPattern({inputs}) 397 | core.debug( 398 | `Dir names include file patterns: ${JSON.stringify( 399 | dirNamesIncludeFilePatterns 400 | )}` 401 | ) 402 | 403 | const filePaths = flatten(Object.values(changedFiles)) 404 | 405 | for (const filePath of getFilePaths({ 406 | inputs, 407 | filePaths, 408 | dirNamesIncludeFilePatterns 409 | })) { 410 | if (isWindows() && inputs.usePosixPathSeparator) { 411 | yield convertPath(filePath, 'mixed') 412 | } else { 413 | yield filePath 414 | } 415 | } 416 | } 417 | 418 | export const getAllChangeTypeFiles = async ({ 419 | inputs, 420 | changedFiles 421 | }: { 422 | inputs: Inputs 423 | changedFiles: ChangedFiles 424 | }): Promise<{paths: string[] | string; count: string}> => { 425 | const files = [ 426 | ...new Set(getAllChangeTypeFilesGenerator({inputs, changedFiles})) 427 | ].filter(Boolean) 428 | 429 | const paths = inputs.json ? files : files.join(inputs.separator) 430 | 431 | return { 432 | paths, 433 | count: files.length.toString() 434 | } 435 | } 436 | 437 | export const getChangedFilesFromGithubAPI = async ({ 438 | inputs 439 | }: { 440 | inputs: Inputs 441 | }): Promise => { 442 | const octokit = github.getOctokit(inputs.token, { 443 | baseUrl: inputs.apiUrl 444 | }) 445 | const changedFiles: ChangedFiles = { 446 | [ChangeTypeEnum.Added]: [], 447 | [ChangeTypeEnum.Copied]: [], 448 | [ChangeTypeEnum.Deleted]: [], 449 | [ChangeTypeEnum.Modified]: [], 450 | [ChangeTypeEnum.Renamed]: [], 451 | [ChangeTypeEnum.TypeChanged]: [], 452 | [ChangeTypeEnum.Unmerged]: [], 453 | [ChangeTypeEnum.Unknown]: [] 454 | } 455 | 456 | core.info('Getting changed files from GitHub API...') 457 | 458 | const options = octokit.rest.pulls.listFiles.endpoint.merge({ 459 | owner: github.context.repo.owner, 460 | repo: github.context.repo.repo, 461 | pull_number: github.context.payload.pull_request?.number, 462 | per_page: 100 463 | }) 464 | 465 | const paginatedResponse = 466 | await octokit.paginate< 467 | RestEndpointMethodTypes['pulls']['listFiles']['response']['data'][0] 468 | >(options) 469 | 470 | core.info(`Found ${paginatedResponse.length} changed files from GitHub API`) 471 | const statusMap: Record = { 472 | added: ChangeTypeEnum.Added, 473 | removed: ChangeTypeEnum.Deleted, 474 | modified: ChangeTypeEnum.Modified, 475 | renamed: ChangeTypeEnum.Renamed, 476 | copied: ChangeTypeEnum.Copied, 477 | changed: ChangeTypeEnum.TypeChanged, 478 | unchanged: ChangeTypeEnum.Unmerged 479 | } 480 | 481 | for await (const item of paginatedResponse) { 482 | const changeType: ChangeTypeEnum = 483 | statusMap[item.status] || ChangeTypeEnum.Unknown 484 | 485 | if (changeType === ChangeTypeEnum.Renamed) { 486 | if (inputs.outputRenamedFilesAsDeletedAndAdded) { 487 | changedFiles[ChangeTypeEnum.Deleted].push(item.filename) 488 | changedFiles[ChangeTypeEnum.Added].push(item.filename) 489 | } else { 490 | changedFiles[ChangeTypeEnum.Renamed].push(item.filename) 491 | } 492 | } else { 493 | changedFiles[changeType].push(item.filename) 494 | } 495 | } 496 | 497 | return changedFiles 498 | } 499 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: Changed Files 2 | description: Get all Added, Copied, Modified, Deleted, Renamed, Type changed, Unmerged, and Unknown files. 3 | author: tj-actions 4 | 5 | inputs: 6 | separator: 7 | description: "Split character for output strings." 8 | required: false 9 | default: " " 10 | include_all_old_new_renamed_files: 11 | description: "Include `all_old_new_renamed_files` output. Note this can generate a large output See: #501." 12 | required: false 13 | default: "false" 14 | old_new_separator: 15 | description: "Split character for old and new filename pairs." 16 | required: false 17 | default: "," 18 | old_new_files_separator: 19 | description: "Split character for old and new renamed filename pairs." 20 | required: false 21 | default: " " 22 | files_from_source_file: 23 | description: "Source file(s) used to populate the `files` input." 24 | required: false 25 | default: "" 26 | files_from_source_file_separator: 27 | description: "Separator used to split the `files_from_source_file` input." 28 | default: "\n" 29 | required: false 30 | files: 31 | description: | 32 | File and directory patterns used to detect changes (Defaults to the entire repo if unset). 33 | NOTE: Multiline file/directory patterns should not include quotes. 34 | required: false 35 | default: "" 36 | files_separator: 37 | description: "Separator used to split the `files` input" 38 | default: "\n" 39 | required: false 40 | files_yaml: 41 | description: "YAML used to define a set of file patterns to detect changes" 42 | required: false 43 | default: "" 44 | files_yaml_from_source_file: 45 | description: "Source file(s) used to populate the `files_yaml` input. Example: https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml" 46 | required: false 47 | default: "" 48 | files_yaml_from_source_file_separator: 49 | description: 'Separator used to split the `files_yaml_from_source_file` input' 50 | default: "\n" 51 | required: false 52 | files_ignore_yaml: 53 | description: "YAML used to define a set of file patterns to ignore changes" 54 | required: false 55 | default: "" 56 | files_ignore_yaml_from_source_file: 57 | description: "Source file(s) used to populate the `files_ignore_yaml` input. Example: https://github.com/tj-actions/changed-files/blob/main/test/changed-files.yml" 58 | required: false 59 | default: "" 60 | files_ignore_yaml_from_source_file_separator: 61 | description: 'Separator used to split the `files_ignore_yaml_from_source_file` input' 62 | default: "\n" 63 | required: false 64 | files_ignore: 65 | description: "Ignore changes to these file(s). NOTE: Multiline file/directory patterns should not include quotes." 66 | required: false 67 | default: "" 68 | files_ignore_separator: 69 | description: "Separator used to split the `files_ignore` input" 70 | default: "\n" 71 | required: false 72 | files_ignore_from_source_file: 73 | description: "Source file(s) used to populate the `files_ignore` input" 74 | required: false 75 | default: "" 76 | files_ignore_from_source_file_separator: 77 | description: 'Separator used to split the `files_ignore_from_source_file` input' 78 | default: "\n" 79 | required: false 80 | sha: 81 | description: "Specify a different commit SHA or branch used for comparing changes" 82 | required: false 83 | base_sha: 84 | description: "Specify a different base commit SHA or branch used for comparing changes" 85 | required: false 86 | since: 87 | description: "Get changed files for commits whose timestamp is older than the given time." 88 | required: false 89 | default: "" 90 | until: 91 | description: "Get changed files for commits whose timestamp is earlier than the given time." 92 | required: false 93 | default: "" 94 | path: 95 | description: "Specify a relative path under `$GITHUB_WORKSPACE` to locate the repository." 96 | required: false 97 | default: "." 98 | quotepath: 99 | description: "Use non-ASCII characters to match files and output the filenames completely verbatim by setting this to `false`" 100 | default: "true" 101 | required: false 102 | diff_relative: 103 | description: "Exclude changes outside the current directory and show path names relative to it. NOTE: This requires you to specify the top-level directory via the `path` input." 104 | required: false 105 | default: "true" 106 | dir_names: 107 | default: "false" 108 | description: "Output unique changed directories instead of filenames. NOTE: This returns `.` for changed files located in the current working directory which defaults to `$GITHUB_WORKSPACE`." 109 | required: false 110 | dir_names_max_depth: 111 | description: "Limit the directory output to a maximum depth e.g `test/test1/test2` with max depth of `2` returns `test/test1`." 112 | required: false 113 | dir_names_exclude_current_dir: 114 | description: "Exclude the current directory represented by `.` from the output when `dir_names` is set to `true`." 115 | required: false 116 | default: "false" 117 | dir_names_include_files: 118 | description: "File and directory patterns to include in the output when `dir_names` is set to `true`. NOTE: This returns only the matching files and also the directory names." 119 | required: false 120 | default: "" 121 | dir_names_include_files_separator: 122 | description: "Separator used to split the `dir_names_include_files` input" 123 | default: "\n" 124 | required: false 125 | dir_names_deleted_files_include_only_deleted_dirs: 126 | description: "Include only directories that have been deleted as opposed to directory names of files that have been deleted in the `deleted_files` output when `dir_names` is set to `true`." 127 | required: false 128 | default: "false" 129 | json: 130 | description: "Output list of changed files in a JSON formatted string which can be used for matrix jobs. Example: https://github.com/tj-actions/changed-files/blob/main/.github/workflows/matrix-example.yml" 131 | required: false 132 | default: "false" 133 | escape_json: 134 | description: "Escape JSON output." 135 | required: false 136 | default: "true" 137 | safe_output: 138 | description: "Apply sanitization to output filenames before being set as output." 139 | required: false 140 | default: "true" 141 | fetch_depth: 142 | description: "Depth of additional branch history fetched. NOTE: This can be adjusted to resolve errors with insufficient history." 143 | required: false 144 | default: "25" 145 | skip_initial_fetch: 146 | description: | 147 | Skip initially fetching additional history to improve performance for shallow repositories. 148 | NOTE: This could lead to errors with missing history. It's intended to be used when you've fetched all necessary history to perform the diff. 149 | required: false 150 | default: "false" 151 | fetch_additional_submodule_history: 152 | description: "Fetch additional history for submodules." 153 | required: false 154 | default: "false" 155 | since_last_remote_commit: 156 | description: "Use the last commit on the remote branch as the `base_sha`. Defaults to the last non-merge commit on the target branch for pull request events and the previous remote commit of the current branch for push events." 157 | required: false 158 | default: "false" 159 | write_output_files: 160 | description: "Write outputs to the `output_dir` defaults to `.github/outputs` folder. NOTE: This creates a `.txt` file by default and a `.json` file if `json` is set to `true`." 161 | required: false 162 | default: "false" 163 | output_dir: 164 | description: "Directory to store output files." 165 | required: false 166 | default: ".github/outputs" 167 | output_renamed_files_as_deleted_and_added: 168 | description: "Output renamed files as deleted and added files." 169 | required: false 170 | default: "false" 171 | recover_deleted_files: 172 | description: "Recover deleted files." 173 | required: false 174 | default: "false" 175 | recover_deleted_files_to_destination: 176 | description: "Recover deleted files to a new destination directory, defaults to the original location." 177 | required: false 178 | default: "" 179 | recover_files: 180 | description: | 181 | File and directory patterns used to recover deleted files, 182 | defaults to the patterns provided via the `files`, `files_from_source_file`, `files_ignore` and `files_ignore_from_source_file` inputs 183 | or all deleted files if no patterns are provided. 184 | required: false 185 | default: "" 186 | recover_files_separator: 187 | description: "Separator used to split the `recover_files` input" 188 | default: "\n" 189 | required: false 190 | recover_files_ignore: 191 | description: "File and directory patterns to ignore when recovering deleted files." 192 | required: false 193 | default: "" 194 | recover_files_ignore_separator: 195 | description: "Separator used to split the `recover_files_ignore` input" 196 | default: "\n" 197 | required: false 198 | token: 199 | description: "GitHub token used to fetch changed files from Github's API." 200 | required: false 201 | default: ${{ github.token }} 202 | api_url: 203 | description: "Github API URL." 204 | required: false 205 | default: ${{ github.api_url }} 206 | use_rest_api: 207 | description: "Force the use of Github's REST API even when a local copy of the repository exists" 208 | required: false 209 | default: "false" 210 | fail_on_initial_diff_error: 211 | description: "Fail when the initial diff fails." 212 | required: false 213 | default: "false" 214 | fail_on_submodule_diff_error: 215 | description: "Fail when the submodule diff fails." 216 | required: false 217 | default: "false" 218 | negation_patterns_first: 219 | description: "Apply the negation patterns first. NOTE: This affects how changed files are matched." 220 | required: false 221 | default: "false" 222 | matrix: 223 | description: "Output changed files in a format that can be used for matrix jobs. Alias for setting inputs `json` to `true` and `escape_json` to `false`." 224 | required: false 225 | default: "false" 226 | exclude_submodules: 227 | description: "Exclude changes to submodules." 228 | required: false 229 | default: "false" 230 | fetch_missing_history_max_retries: 231 | description: "Maximum number of retries to fetch missing history." 232 | required: false 233 | default: "20" 234 | use_posix_path_separator: 235 | description: "Use POSIX path separator `/` for output file paths on Windows." 236 | required: false 237 | default: "false" 238 | tags_pattern: 239 | description: "Tags pattern to include." 240 | required: false 241 | default: "*" 242 | tags_ignore_pattern: 243 | description: "Tags pattern to ignore." 244 | required: false 245 | default: "" 246 | 247 | 248 | outputs: 249 | added_files: 250 | description: "Returns only files that are Added (A)." 251 | added_files_count: 252 | description: "Returns the number of `added_files`" 253 | copied_files: 254 | description: "Returns only files that are Copied (C)." 255 | copied_files_count: 256 | description: "Returns the number of `copied_files`" 257 | deleted_files: 258 | description: "Returns only files that are Deleted (D)." 259 | deleted_files_count: 260 | description: "Returns the number of `deleted_files`" 261 | modified_files: 262 | description: "Returns only files that are Modified (M)." 263 | modified_files_count: 264 | description: "Returns the number of `modified_files`" 265 | renamed_files: 266 | description: "Returns only files that are Renamed (R)." 267 | renamed_files_count: 268 | description: "Returns the number of `renamed_files`" 269 | all_old_new_renamed_files: 270 | description: "Returns only files that are Renamed and lists their old and new names. **NOTE:** This requires setting `include_all_old_new_renamed_files` to `true`. Also, keep in mind that this output is global and wouldn't be nested in outputs generated when the `*_yaml_*` input is used. (R)" 271 | all_old_new_renamed_files_count: 272 | description: "Returns the number of `all_old_new_renamed_files`" 273 | type_changed_files: 274 | description: "Returns only files that have their file type changed (T)." 275 | type_changed_files_count: 276 | description: "Returns the number of `type_changed_files`" 277 | unmerged_files: 278 | description: "Returns only files that are Unmerged (U)." 279 | unmerged_files_count: 280 | description: "Returns the number of `unmerged_files`" 281 | unknown_files: 282 | description: "Returns only files that are Unknown (X)." 283 | unknown_files_count: 284 | description: "Returns the number of `unknown_files`" 285 | all_changed_and_modified_files: 286 | description: "Returns all changed and modified files i.e. a combination of (ACMRDTUX)" 287 | all_changed_and_modified_files_count: 288 | description: "Returns the number of `all_changed_and_modified_files`" 289 | all_changed_files: 290 | description: "Returns all changed files i.e. a combination of all added, copied, modified and renamed files (ACMR)" 291 | all_changed_files_count: 292 | description: "Returns the number of `all_changed_files`" 293 | any_changed: 294 | description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have changed. This defaults to `true` when no patterns are specified. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*." 295 | only_changed: 296 | description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have changed. i.e. *includes a combination of all added, copied, modified and renamed files (ACMR)*." 297 | other_changed_files: 298 | description: "Returns all other changed files not listed in the files input i.e. includes a combination of all added, copied, modified and renamed files (ACMR)." 299 | other_changed_files_count: 300 | description: "Returns the number of `other_changed_files`" 301 | all_modified_files: 302 | description: "Returns all changed files i.e. a combination of all added, copied, modified, renamed and deleted files (ACMRD)." 303 | all_modified_files_count: 304 | description: "Returns the number of `all_modified_files`" 305 | any_modified: 306 | description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been modified. This defaults to `true` when no patterns are specified. i.e. *includes a combination of all added, copied, modified, renamed, and deleted files (ACMRD)*." 307 | only_modified: 308 | description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have been modified. (ACMRD)." 309 | other_modified_files: 310 | description: "Returns all other modified files not listed in the files input i.e. a combination of all added, copied, modified, and deleted files (ACMRD)" 311 | other_modified_files_count: 312 | description: "Returns the number of `other_modified_files`" 313 | any_deleted: 314 | description: "Returns `true` when any of the filenames provided using the `files*` or `files_ignore*` inputs have been deleted. This defaults to `true` when no patterns are specified. (D)" 315 | only_deleted: 316 | description: "Returns `true` when only files provided using the `files*` or `files_ignore*` inputs have been deleted. (D)" 317 | other_deleted_files: 318 | description: "Returns all other deleted files not listed in the files input i.e. a combination of all deleted files (D)" 319 | other_deleted_files_count: 320 | description: "Returns the number of `other_deleted_files`" 321 | modified_keys: 322 | description: "Returns all modified YAML keys when the `files_yaml` input is used. i.e. key that contains any path that has either been added, copied, modified, and deleted (ACMRD)" 323 | changed_keys: 324 | description: "Returns all changed YAML keys when the `files_yaml` input is used. i.e. key that contains any path that has either been added, copied, modified, and renamed (ACMR)" 325 | 326 | runs: 327 | using: 'node20' 328 | main: 'dist/index.js' 329 | 330 | branding: 331 | icon: file-text 332 | color: white 333 | -------------------------------------------------------------------------------- /src/changedFilesOutput.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import path from 'path' 3 | import { 4 | ChangedFiles, 5 | ChangeTypeEnum, 6 | getAllChangeTypeFiles, 7 | getChangeTypeFiles 8 | } from './changedFiles' 9 | import {Inputs} from './inputs' 10 | import {getOutputKey, setArrayOutput, setOutput, exists} from './utils' 11 | 12 | const getArrayFromPaths = ( 13 | paths: string | string[], 14 | inputs: Inputs 15 | ): string[] => { 16 | return Array.isArray(paths) ? paths : paths.split(inputs.separator) 17 | } 18 | 19 | export const setOutputsAndGetModifiedAndChangedFilesStatus = async ({ 20 | allDiffFiles, 21 | allFilteredDiffFiles, 22 | inputs, 23 | filePatterns = [], 24 | outputPrefix = '', 25 | workingDirectory 26 | }: { 27 | allDiffFiles: ChangedFiles 28 | allFilteredDiffFiles: ChangedFiles 29 | inputs: Inputs 30 | filePatterns?: string[] 31 | outputPrefix?: string 32 | workingDirectory?: string 33 | }): Promise<{anyModified: boolean; anyChanged: boolean}> => { 34 | const addedFiles = await getChangeTypeFiles({ 35 | inputs, 36 | changedFiles: allFilteredDiffFiles, 37 | changeTypes: [ChangeTypeEnum.Added] 38 | }) 39 | core.debug(`Added files: ${JSON.stringify(addedFiles)}`) 40 | await setOutput({ 41 | key: getOutputKey('added_files', outputPrefix), 42 | value: addedFiles.paths, 43 | writeOutputFiles: inputs.writeOutputFiles, 44 | outputDir: inputs.outputDir, 45 | json: inputs.json, 46 | shouldEscape: inputs.escapeJson, 47 | safeOutput: inputs.safeOutput 48 | }) 49 | await setOutput({ 50 | key: getOutputKey('added_files_count', outputPrefix), 51 | value: addedFiles.count, 52 | writeOutputFiles: inputs.writeOutputFiles, 53 | outputDir: inputs.outputDir 54 | }) 55 | 56 | const copiedFiles = await getChangeTypeFiles({ 57 | inputs, 58 | changedFiles: allFilteredDiffFiles, 59 | changeTypes: [ChangeTypeEnum.Copied] 60 | }) 61 | core.debug(`Copied files: ${JSON.stringify(copiedFiles)}`) 62 | await setOutput({ 63 | key: getOutputKey('copied_files', outputPrefix), 64 | value: copiedFiles.paths, 65 | writeOutputFiles: inputs.writeOutputFiles, 66 | outputDir: inputs.outputDir, 67 | json: inputs.json, 68 | shouldEscape: inputs.escapeJson, 69 | safeOutput: inputs.safeOutput 70 | }) 71 | 72 | await setOutput({ 73 | key: getOutputKey('copied_files_count', outputPrefix), 74 | value: copiedFiles.count, 75 | writeOutputFiles: inputs.writeOutputFiles, 76 | outputDir: inputs.outputDir 77 | }) 78 | 79 | const modifiedFiles = await getChangeTypeFiles({ 80 | inputs, 81 | changedFiles: allFilteredDiffFiles, 82 | changeTypes: [ChangeTypeEnum.Modified] 83 | }) 84 | core.debug(`Modified files: ${JSON.stringify(modifiedFiles)}`) 85 | await setOutput({ 86 | key: getOutputKey('modified_files', outputPrefix), 87 | value: modifiedFiles.paths, 88 | writeOutputFiles: inputs.writeOutputFiles, 89 | outputDir: inputs.outputDir, 90 | json: inputs.json, 91 | shouldEscape: inputs.escapeJson, 92 | safeOutput: inputs.safeOutput 93 | }) 94 | 95 | await setOutput({ 96 | key: getOutputKey('modified_files_count', outputPrefix), 97 | value: modifiedFiles.count, 98 | writeOutputFiles: inputs.writeOutputFiles, 99 | outputDir: inputs.outputDir 100 | }) 101 | 102 | const renamedFiles = await getChangeTypeFiles({ 103 | inputs, 104 | changedFiles: allFilteredDiffFiles, 105 | changeTypes: [ChangeTypeEnum.Renamed] 106 | }) 107 | core.debug(`Renamed files: ${JSON.stringify(renamedFiles)}`) 108 | await setOutput({ 109 | key: getOutputKey('renamed_files', outputPrefix), 110 | value: renamedFiles.paths, 111 | writeOutputFiles: inputs.writeOutputFiles, 112 | outputDir: inputs.outputDir, 113 | json: inputs.json, 114 | shouldEscape: inputs.escapeJson, 115 | safeOutput: inputs.safeOutput 116 | }) 117 | 118 | await setOutput({ 119 | key: getOutputKey('renamed_files_count', outputPrefix), 120 | value: renamedFiles.count, 121 | writeOutputFiles: inputs.writeOutputFiles, 122 | outputDir: inputs.outputDir 123 | }) 124 | 125 | const typeChangedFiles = await getChangeTypeFiles({ 126 | inputs, 127 | changedFiles: allFilteredDiffFiles, 128 | changeTypes: [ChangeTypeEnum.TypeChanged] 129 | }) 130 | core.debug(`Type changed files: ${JSON.stringify(typeChangedFiles)}`) 131 | await setOutput({ 132 | key: getOutputKey('type_changed_files', outputPrefix), 133 | value: typeChangedFiles.paths, 134 | writeOutputFiles: inputs.writeOutputFiles, 135 | outputDir: inputs.outputDir, 136 | json: inputs.json, 137 | shouldEscape: inputs.escapeJson, 138 | safeOutput: inputs.safeOutput 139 | }) 140 | 141 | await setOutput({ 142 | key: getOutputKey('type_changed_files_count', outputPrefix), 143 | value: typeChangedFiles.count, 144 | writeOutputFiles: inputs.writeOutputFiles, 145 | outputDir: inputs.outputDir 146 | }) 147 | 148 | const unmergedFiles = await getChangeTypeFiles({ 149 | inputs, 150 | changedFiles: allFilteredDiffFiles, 151 | changeTypes: [ChangeTypeEnum.Unmerged] 152 | }) 153 | core.debug(`Unmerged files: ${JSON.stringify(unmergedFiles)}`) 154 | await setOutput({ 155 | key: getOutputKey('unmerged_files', outputPrefix), 156 | value: unmergedFiles.paths, 157 | writeOutputFiles: inputs.writeOutputFiles, 158 | outputDir: inputs.outputDir, 159 | json: inputs.json, 160 | shouldEscape: inputs.escapeJson, 161 | safeOutput: inputs.safeOutput 162 | }) 163 | 164 | await setOutput({ 165 | key: getOutputKey('unmerged_files_count', outputPrefix), 166 | value: unmergedFiles.count, 167 | writeOutputFiles: inputs.writeOutputFiles, 168 | outputDir: inputs.outputDir 169 | }) 170 | 171 | const unknownFiles = await getChangeTypeFiles({ 172 | inputs, 173 | changedFiles: allFilteredDiffFiles, 174 | changeTypes: [ChangeTypeEnum.Unknown] 175 | }) 176 | core.debug(`Unknown files: ${JSON.stringify(unknownFiles)}`) 177 | await setOutput({ 178 | key: getOutputKey('unknown_files', outputPrefix), 179 | value: unknownFiles.paths, 180 | writeOutputFiles: inputs.writeOutputFiles, 181 | outputDir: inputs.outputDir, 182 | json: inputs.json, 183 | shouldEscape: inputs.escapeJson, 184 | safeOutput: inputs.safeOutput 185 | }) 186 | 187 | await setOutput({ 188 | key: getOutputKey('unknown_files_count', outputPrefix), 189 | value: unknownFiles.count, 190 | writeOutputFiles: inputs.writeOutputFiles, 191 | outputDir: inputs.outputDir 192 | }) 193 | 194 | const allChangedAndModifiedFiles = await getAllChangeTypeFiles({ 195 | inputs, 196 | changedFiles: allFilteredDiffFiles 197 | }) 198 | core.debug( 199 | `All changed and modified files: ${JSON.stringify( 200 | allChangedAndModifiedFiles 201 | )}` 202 | ) 203 | await setOutput({ 204 | key: getOutputKey('all_changed_and_modified_files', outputPrefix), 205 | value: allChangedAndModifiedFiles.paths, 206 | writeOutputFiles: inputs.writeOutputFiles, 207 | outputDir: inputs.outputDir, 208 | json: inputs.json, 209 | shouldEscape: inputs.escapeJson, 210 | safeOutput: inputs.safeOutput 211 | }) 212 | 213 | await setOutput({ 214 | key: getOutputKey('all_changed_and_modified_files_count', outputPrefix), 215 | value: allChangedAndModifiedFiles.count, 216 | writeOutputFiles: inputs.writeOutputFiles, 217 | outputDir: inputs.outputDir 218 | }) 219 | 220 | const allChangedFiles = await getChangeTypeFiles({ 221 | inputs, 222 | changedFiles: allFilteredDiffFiles, 223 | changeTypes: [ 224 | ChangeTypeEnum.Added, 225 | ChangeTypeEnum.Copied, 226 | ChangeTypeEnum.Modified, 227 | ChangeTypeEnum.Renamed 228 | ] 229 | }) 230 | core.debug(`All changed files: ${JSON.stringify(allChangedFiles)}`) 231 | await setOutput({ 232 | key: getOutputKey('all_changed_files', outputPrefix), 233 | value: allChangedFiles.paths, 234 | writeOutputFiles: inputs.writeOutputFiles, 235 | outputDir: inputs.outputDir, 236 | json: inputs.json, 237 | shouldEscape: inputs.escapeJson, 238 | safeOutput: inputs.safeOutput 239 | }) 240 | 241 | await setOutput({ 242 | key: getOutputKey('all_changed_files_count', outputPrefix), 243 | value: allChangedFiles.count, 244 | writeOutputFiles: inputs.writeOutputFiles, 245 | outputDir: inputs.outputDir 246 | }) 247 | 248 | await setOutput({ 249 | key: getOutputKey('any_changed', outputPrefix), 250 | value: allChangedFiles.paths.length > 0, 251 | writeOutputFiles: inputs.writeOutputFiles, 252 | outputDir: inputs.outputDir, 253 | json: inputs.json 254 | }) 255 | 256 | const allOtherChangedFiles = await getChangeTypeFiles({ 257 | inputs, 258 | changedFiles: allDiffFiles, 259 | changeTypes: [ 260 | ChangeTypeEnum.Added, 261 | ChangeTypeEnum.Copied, 262 | ChangeTypeEnum.Modified, 263 | ChangeTypeEnum.Renamed 264 | ] 265 | }) 266 | core.debug(`All other changed files: ${JSON.stringify(allOtherChangedFiles)}`) 267 | 268 | const allOtherChangedFilesPaths: string[] = getArrayFromPaths( 269 | allOtherChangedFiles.paths, 270 | inputs 271 | ) 272 | const allChangedFilesPaths: string[] = getArrayFromPaths( 273 | allChangedFiles.paths, 274 | inputs 275 | ) 276 | 277 | const otherChangedFiles = allOtherChangedFilesPaths.filter( 278 | (filePath: string) => !allChangedFilesPaths.includes(filePath) 279 | ) 280 | 281 | const onlyChanged = 282 | otherChangedFiles.length === 0 && 283 | allChangedFiles.paths.length > 0 && 284 | filePatterns.length > 0 285 | 286 | await setOutput({ 287 | key: getOutputKey('only_changed', outputPrefix), 288 | value: onlyChanged, 289 | writeOutputFiles: inputs.writeOutputFiles, 290 | outputDir: inputs.outputDir, 291 | json: inputs.json 292 | }) 293 | 294 | await setArrayOutput({ 295 | key: 'other_changed_files', 296 | inputs, 297 | value: otherChangedFiles, 298 | outputPrefix 299 | }) 300 | 301 | await setOutput({ 302 | key: getOutputKey('other_changed_files_count', outputPrefix), 303 | value: otherChangedFiles.length.toString(), 304 | writeOutputFiles: inputs.writeOutputFiles, 305 | outputDir: inputs.outputDir 306 | }) 307 | 308 | const allModifiedFiles = await getChangeTypeFiles({ 309 | inputs, 310 | changedFiles: allFilteredDiffFiles, 311 | changeTypes: [ 312 | ChangeTypeEnum.Added, 313 | ChangeTypeEnum.Copied, 314 | ChangeTypeEnum.Modified, 315 | ChangeTypeEnum.Renamed, 316 | ChangeTypeEnum.Deleted 317 | ] 318 | }) 319 | core.debug(`All modified files: ${JSON.stringify(allModifiedFiles)}`) 320 | await setOutput({ 321 | key: getOutputKey('all_modified_files', outputPrefix), 322 | value: allModifiedFiles.paths, 323 | writeOutputFiles: inputs.writeOutputFiles, 324 | outputDir: inputs.outputDir, 325 | json: inputs.json, 326 | shouldEscape: inputs.escapeJson, 327 | safeOutput: inputs.safeOutput 328 | }) 329 | 330 | await setOutput({ 331 | key: getOutputKey('all_modified_files_count', outputPrefix), 332 | value: allModifiedFiles.count, 333 | writeOutputFiles: inputs.writeOutputFiles, 334 | outputDir: inputs.outputDir 335 | }) 336 | 337 | await setOutput({ 338 | key: getOutputKey('any_modified', outputPrefix), 339 | value: allModifiedFiles.paths.length > 0, 340 | writeOutputFiles: inputs.writeOutputFiles, 341 | outputDir: inputs.outputDir, 342 | json: inputs.json 343 | }) 344 | 345 | const allOtherModifiedFiles = await getChangeTypeFiles({ 346 | inputs, 347 | changedFiles: allDiffFiles, 348 | changeTypes: [ 349 | ChangeTypeEnum.Added, 350 | ChangeTypeEnum.Copied, 351 | ChangeTypeEnum.Modified, 352 | ChangeTypeEnum.Renamed, 353 | ChangeTypeEnum.Deleted 354 | ] 355 | }) 356 | 357 | const allOtherModifiedFilesPaths: string[] = getArrayFromPaths( 358 | allOtherModifiedFiles.paths, 359 | inputs 360 | ) 361 | 362 | const allModifiedFilesPaths: string[] = getArrayFromPaths( 363 | allModifiedFiles.paths, 364 | inputs 365 | ) 366 | 367 | const otherModifiedFiles = allOtherModifiedFilesPaths.filter( 368 | (filePath: string) => !allModifiedFilesPaths.includes(filePath) 369 | ) 370 | 371 | const onlyModified = 372 | otherModifiedFiles.length === 0 && 373 | allModifiedFiles.paths.length > 0 && 374 | filePatterns.length > 0 375 | 376 | await setOutput({ 377 | key: getOutputKey('only_modified', outputPrefix), 378 | value: onlyModified, 379 | writeOutputFiles: inputs.writeOutputFiles, 380 | outputDir: inputs.outputDir, 381 | json: inputs.json 382 | }) 383 | 384 | await setArrayOutput({ 385 | key: 'other_modified_files', 386 | inputs, 387 | value: otherModifiedFiles, 388 | outputPrefix 389 | }) 390 | 391 | await setOutput({ 392 | key: getOutputKey('other_modified_files_count', outputPrefix), 393 | value: otherModifiedFiles.length.toString(), 394 | writeOutputFiles: inputs.writeOutputFiles, 395 | outputDir: inputs.outputDir 396 | }) 397 | 398 | const deletedFiles = await getChangeTypeFiles({ 399 | inputs, 400 | changedFiles: allFilteredDiffFiles, 401 | changeTypes: [ChangeTypeEnum.Deleted] 402 | }) 403 | core.debug(`Deleted files: ${JSON.stringify(deletedFiles)}`) 404 | 405 | if ( 406 | inputs.dirNamesDeletedFilesIncludeOnlyDeletedDirs && 407 | inputs.dirNames && 408 | workingDirectory 409 | ) { 410 | const newDeletedFilesPaths: string[] = [] 411 | for (const deletedPath of getArrayFromPaths(deletedFiles.paths, inputs)) { 412 | const dirPath = path.join(workingDirectory, deletedPath) 413 | core.debug(`Checking if directory exists: ${dirPath}`) 414 | if (!(await exists(dirPath))) { 415 | core.debug(`Directory not found: ${dirPath}`) 416 | newDeletedFilesPaths.push(deletedPath) 417 | } 418 | } 419 | deletedFiles.paths = inputs.json 420 | ? newDeletedFilesPaths 421 | : newDeletedFilesPaths.join(inputs.separator) 422 | deletedFiles.count = newDeletedFilesPaths.length.toString() 423 | core.debug(`New deleted files: ${JSON.stringify(deletedFiles)}`) 424 | } 425 | 426 | await setOutput({ 427 | key: getOutputKey('deleted_files', outputPrefix), 428 | value: deletedFiles.paths, 429 | writeOutputFiles: inputs.writeOutputFiles, 430 | outputDir: inputs.outputDir, 431 | json: inputs.json, 432 | shouldEscape: inputs.escapeJson, 433 | safeOutput: inputs.safeOutput 434 | }) 435 | 436 | await setOutput({ 437 | key: getOutputKey('deleted_files_count', outputPrefix), 438 | value: deletedFiles.count, 439 | writeOutputFiles: inputs.writeOutputFiles, 440 | outputDir: inputs.outputDir 441 | }) 442 | 443 | await setOutput({ 444 | key: getOutputKey('any_deleted', outputPrefix), 445 | value: deletedFiles.paths.length > 0, 446 | writeOutputFiles: inputs.writeOutputFiles, 447 | outputDir: inputs.outputDir, 448 | json: inputs.json 449 | }) 450 | 451 | const allOtherDeletedFiles = await getChangeTypeFiles({ 452 | inputs, 453 | changedFiles: allDiffFiles, 454 | changeTypes: [ChangeTypeEnum.Deleted] 455 | }) 456 | 457 | const allOtherDeletedFilesPaths: string[] = getArrayFromPaths( 458 | allOtherDeletedFiles.paths, 459 | inputs 460 | ) 461 | 462 | const deletedFilesPaths: string[] = getArrayFromPaths( 463 | deletedFiles.paths, 464 | inputs 465 | ) 466 | 467 | const otherDeletedFiles = allOtherDeletedFilesPaths.filter( 468 | filePath => !deletedFilesPaths.includes(filePath) 469 | ) 470 | 471 | const onlyDeleted = 472 | otherDeletedFiles.length === 0 && 473 | deletedFiles.paths.length > 0 && 474 | filePatterns.length > 0 475 | 476 | await setOutput({ 477 | key: getOutputKey('only_deleted', outputPrefix), 478 | value: onlyDeleted, 479 | writeOutputFiles: inputs.writeOutputFiles, 480 | outputDir: inputs.outputDir, 481 | json: inputs.json 482 | }) 483 | 484 | await setArrayOutput({ 485 | key: 'other_deleted_files', 486 | inputs, 487 | value: otherDeletedFiles, 488 | outputPrefix 489 | }) 490 | 491 | await setOutput({ 492 | key: getOutputKey('other_deleted_files_count', outputPrefix), 493 | value: otherDeletedFiles.length.toString(), 494 | writeOutputFiles: inputs.writeOutputFiles, 495 | outputDir: inputs.outputDir 496 | }) 497 | 498 | return { 499 | anyModified: allModifiedFiles.paths.length > 0, 500 | anyChanged: allChangedFiles.paths.length > 0 501 | } 502 | } 503 | -------------------------------------------------------------------------------- /src/commitSha.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as github from '@actions/github' 3 | 4 | import {Env} from './env' 5 | import {Inputs} from './inputs' 6 | import { 7 | canDiffCommits, 8 | cleanShaInput, 9 | getCurrentBranchName, 10 | getHeadSha, 11 | getParentSha, 12 | getPreviousGitTag, 13 | getRemoteBranchHeadSha, 14 | gitFetch, 15 | gitFetchSubmodules, 16 | gitLog, 17 | verifyCommitSha 18 | } from './utils' 19 | 20 | const getCurrentSHA = async ({ 21 | inputs, 22 | workingDirectory 23 | }: { 24 | inputs: Inputs 25 | workingDirectory: string 26 | }): Promise => { 27 | let currentSha = await cleanShaInput({ 28 | sha: inputs.sha, 29 | cwd: workingDirectory, 30 | token: inputs.token 31 | }) 32 | core.debug('Getting current SHA...') 33 | 34 | if (inputs.until) { 35 | core.debug(`Getting base SHA for '${inputs.until}'...`) 36 | try { 37 | currentSha = await gitLog({ 38 | cwd: workingDirectory, 39 | args: [ 40 | '--format=%H', 41 | '-n', 42 | '1', 43 | '--date', 44 | 'local', 45 | '--until', 46 | inputs.until 47 | ] 48 | }) 49 | } catch (error) { 50 | core.error( 51 | `Invalid until date: ${inputs.until}. ${(error as Error).message}` 52 | ) 53 | throw error 54 | } 55 | } else { 56 | if (!currentSha) { 57 | if ( 58 | github.context.payload.pull_request?.head?.sha && 59 | (await verifyCommitSha({ 60 | sha: github.context.payload.pull_request?.head?.sha, 61 | cwd: workingDirectory, 62 | showAsErrorMessage: false 63 | })) === 0 64 | ) { 65 | currentSha = github.context.payload.pull_request?.head?.sha 66 | } else if (github.context.eventName === 'merge_group') { 67 | currentSha = github.context.payload.merge_group?.head_sha 68 | } else { 69 | currentSha = await getHeadSha({cwd: workingDirectory}) 70 | } 71 | } 72 | } 73 | 74 | await verifyCommitSha({sha: currentSha, cwd: workingDirectory}) 75 | core.debug(`Current SHA: ${currentSha}`) 76 | 77 | return currentSha 78 | } 79 | 80 | export interface DiffResult { 81 | previousSha: string 82 | currentSha: string 83 | currentBranch: string 84 | targetBranch: string 85 | diff: string 86 | initialCommit?: boolean 87 | } 88 | 89 | interface SHAForNonPullRequestEvent { 90 | inputs: Inputs 91 | env: Env 92 | workingDirectory: string 93 | isShallow: boolean 94 | diffSubmodule: boolean 95 | gitFetchExtraArgs: string[] 96 | isTag: boolean 97 | remoteName: string 98 | } 99 | 100 | export const getSHAForNonPullRequestEvent = async ({ 101 | inputs, 102 | env, 103 | workingDirectory, 104 | isShallow, 105 | diffSubmodule, 106 | gitFetchExtraArgs, 107 | isTag, 108 | remoteName 109 | }: SHAForNonPullRequestEvent): Promise => { 110 | let targetBranch = env.GITHUB_REF_NAME 111 | let currentBranch = targetBranch 112 | let initialCommit = false 113 | 114 | if (!inputs.skipInitialFetch) { 115 | if (isShallow) { 116 | core.info('Repository is shallow, fetching more history...') 117 | 118 | if (isTag) { 119 | let sourceBranch = '' 120 | 121 | if (github.context.payload.base_ref) { 122 | sourceBranch = github.context.payload.base_ref.replace( 123 | 'refs/heads/', 124 | '' 125 | ) 126 | } else if (github.context.payload.release?.target_commitish) { 127 | sourceBranch = github.context.payload.release?.target_commitish 128 | } 129 | 130 | await gitFetch({ 131 | cwd: workingDirectory, 132 | args: [ 133 | ...gitFetchExtraArgs, 134 | '-u', 135 | '--progress', 136 | `--deepen=${inputs.fetchDepth}`, 137 | remoteName, 138 | `+refs/heads/${sourceBranch}:refs/remotes/${remoteName}/${sourceBranch}` 139 | ] 140 | }) 141 | } else { 142 | await gitFetch({ 143 | cwd: workingDirectory, 144 | args: [ 145 | ...gitFetchExtraArgs, 146 | '-u', 147 | '--progress', 148 | `--deepen=${inputs.fetchDepth}`, 149 | remoteName, 150 | `+refs/heads/${targetBranch}:refs/remotes/${remoteName}/${targetBranch}` 151 | ] 152 | }) 153 | } 154 | 155 | if (diffSubmodule) { 156 | await gitFetchSubmodules({ 157 | cwd: workingDirectory, 158 | args: [ 159 | ...gitFetchExtraArgs, 160 | '-u', 161 | '--progress', 162 | `--deepen=${inputs.fetchDepth}` 163 | ] 164 | }) 165 | } 166 | } else { 167 | if (diffSubmodule && inputs.fetchAdditionalSubmoduleHistory) { 168 | await gitFetchSubmodules({ 169 | cwd: workingDirectory, 170 | args: [ 171 | ...gitFetchExtraArgs, 172 | '-u', 173 | '--progress', 174 | `--deepen=${inputs.fetchDepth}` 175 | ] 176 | }) 177 | } 178 | } 179 | } 180 | 181 | const currentSha = await getCurrentSHA({inputs, workingDirectory}) 182 | let previousSha = await cleanShaInput({ 183 | sha: inputs.baseSha, 184 | cwd: workingDirectory, 185 | token: inputs.token 186 | }) 187 | const diff = '..' 188 | const currentBranchName = await getCurrentBranchName({cwd: workingDirectory}) 189 | 190 | if ( 191 | currentBranchName && 192 | currentBranchName !== 'HEAD' && 193 | (currentBranchName !== targetBranch || currentBranchName !== currentBranch) 194 | ) { 195 | targetBranch = currentBranchName 196 | currentBranch = currentBranchName 197 | } 198 | 199 | if (inputs.baseSha && inputs.sha && currentBranch && targetBranch) { 200 | if (previousSha === currentSha) { 201 | core.error( 202 | `Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.` 203 | ) 204 | core.error( 205 | `Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.` 206 | ) 207 | throw new Error('Similar commit hashes detected.') 208 | } 209 | 210 | core.debug(`Previous SHA: ${previousSha}`) 211 | 212 | return { 213 | previousSha, 214 | currentSha, 215 | currentBranch, 216 | targetBranch, 217 | diff 218 | } 219 | } 220 | 221 | if (!previousSha || previousSha === currentSha) { 222 | core.debug('Getting previous SHA...') 223 | if (inputs.since) { 224 | core.debug(`Getting base SHA for '${inputs.since}'...`) 225 | try { 226 | const allCommitsFrom = await gitLog({ 227 | cwd: workingDirectory, 228 | args: ['--format=%H', '--date', 'local', '--since', inputs.since] 229 | }) 230 | 231 | if (allCommitsFrom) { 232 | const allCommitsFromArray = allCommitsFrom.split('\n') 233 | previousSha = allCommitsFromArray[allCommitsFromArray.length - 1] 234 | } 235 | } catch (error) { 236 | core.error( 237 | `Invalid since date: ${inputs.since}. ${(error as Error).message}` 238 | ) 239 | throw error 240 | } 241 | } else if (isTag) { 242 | core.debug('Getting previous SHA for tag...') 243 | const {sha, tag} = await getPreviousGitTag({ 244 | cwd: workingDirectory, 245 | tagsPattern: inputs.tagsPattern, 246 | tagsIgnorePattern: inputs.tagsIgnorePattern, 247 | currentBranch 248 | }) 249 | previousSha = sha 250 | targetBranch = tag 251 | } else { 252 | if (github.context.eventName === 'merge_group') { 253 | core.debug('Getting previous SHA for merge group...') 254 | previousSha = github.context.payload.merge_group?.base_sha 255 | } else { 256 | core.debug('Getting previous SHA for last remote commit...') 257 | if ( 258 | github.context.payload.forced === 'false' || 259 | !github.context.payload.forced 260 | ) { 261 | previousSha = github.context.payload.before 262 | } 263 | } 264 | 265 | if ( 266 | !previousSha || 267 | previousSha === '0000000000000000000000000000000000000000' 268 | ) { 269 | previousSha = await getParentSha({ 270 | cwd: workingDirectory 271 | }) 272 | } else if ( 273 | (await verifyCommitSha({ 274 | sha: previousSha, 275 | cwd: workingDirectory, 276 | showAsErrorMessage: false 277 | })) !== 0 278 | ) { 279 | core.warning( 280 | `Previous commit ${previousSha} is not valid. Using parent commit.` 281 | ) 282 | previousSha = await getParentSha({ 283 | cwd: workingDirectory 284 | }) 285 | } 286 | 287 | if (!previousSha || previousSha === currentSha) { 288 | previousSha = await getParentSha({ 289 | cwd: workingDirectory 290 | }) 291 | 292 | if (!previousSha) { 293 | core.warning('Initial commit detected no previous commit found.') 294 | initialCommit = true 295 | previousSha = currentSha 296 | } 297 | } 298 | } 299 | } 300 | 301 | await verifyCommitSha({sha: previousSha, cwd: workingDirectory}) 302 | core.debug(`Previous SHA: ${previousSha}`) 303 | 304 | core.debug(`Target branch: ${targetBranch}`) 305 | core.debug(`Current branch: ${currentBranch}`) 306 | 307 | if (!initialCommit && previousSha === currentSha) { 308 | core.error( 309 | `Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.` 310 | ) 311 | core.error( 312 | `Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.` 313 | ) 314 | throw new Error('Similar commit hashes detected.') 315 | } 316 | 317 | return { 318 | previousSha, 319 | currentSha, 320 | currentBranch, 321 | targetBranch, 322 | diff, 323 | initialCommit 324 | } 325 | } 326 | 327 | interface SHAForPullRequestEvent { 328 | inputs: Inputs 329 | workingDirectory: string 330 | isShallow: boolean 331 | diffSubmodule: boolean 332 | gitFetchExtraArgs: string[] 333 | remoteName: string 334 | } 335 | 336 | export const getSHAForPullRequestEvent = async ({ 337 | inputs, 338 | workingDirectory, 339 | isShallow, 340 | diffSubmodule, 341 | gitFetchExtraArgs, 342 | remoteName 343 | }: SHAForPullRequestEvent): Promise => { 344 | let targetBranch = github.context.payload.pull_request?.base?.ref 345 | const currentBranch = github.context.payload.pull_request?.head?.ref 346 | if (inputs.sinceLastRemoteCommit) { 347 | targetBranch = currentBranch 348 | } 349 | 350 | if (!inputs.skipInitialFetch) { 351 | core.info('Repository is shallow, fetching more history...') 352 | if (isShallow) { 353 | let prFetchExitCode = await gitFetch({ 354 | cwd: workingDirectory, 355 | args: [ 356 | ...gitFetchExtraArgs, 357 | '-u', 358 | '--progress', 359 | remoteName, 360 | `pull/${github.context.payload.pull_request?.number}/head:${currentBranch}` 361 | ] 362 | }) 363 | 364 | if (prFetchExitCode !== 0) { 365 | prFetchExitCode = await gitFetch({ 366 | cwd: workingDirectory, 367 | args: [ 368 | ...gitFetchExtraArgs, 369 | '-u', 370 | '--progress', 371 | `--deepen=${inputs.fetchDepth}`, 372 | remoteName, 373 | `+refs/heads/${currentBranch}*:refs/remotes/${remoteName}/${currentBranch}*` 374 | ] 375 | }) 376 | } 377 | 378 | if (prFetchExitCode !== 0) { 379 | throw new Error( 380 | 'Failed to fetch pull request branch. Please ensure "persist-credentials" is set to "true" when checking out the repository. See: https://github.com/actions/checkout#usage' 381 | ) 382 | } 383 | core.debug('Fetching target branch...') 384 | await gitFetch({ 385 | cwd: workingDirectory, 386 | args: [ 387 | ...gitFetchExtraArgs, 388 | '-u', 389 | '--progress', 390 | `--deepen=${inputs.fetchDepth}`, 391 | remoteName, 392 | `+refs/heads/${github.context.payload.pull_request?.base?.ref}:refs/remotes/${remoteName}/${github.context.payload.pull_request?.base?.ref}` 393 | ] 394 | }) 395 | 396 | if (diffSubmodule) { 397 | await gitFetchSubmodules({ 398 | cwd: workingDirectory, 399 | args: [ 400 | ...gitFetchExtraArgs, 401 | '-u', 402 | '--progress', 403 | `--deepen=${inputs.fetchDepth}` 404 | ] 405 | }) 406 | } 407 | } else { 408 | if (diffSubmodule && inputs.fetchAdditionalSubmoduleHistory) { 409 | await gitFetchSubmodules({ 410 | cwd: workingDirectory, 411 | args: [ 412 | ...gitFetchExtraArgs, 413 | '-u', 414 | '--progress', 415 | `--deepen=${inputs.fetchDepth}` 416 | ] 417 | }) 418 | } 419 | } 420 | core.info('Completed fetching more history.') 421 | } 422 | 423 | const currentSha = await getCurrentSHA({inputs, workingDirectory}) 424 | let previousSha = await cleanShaInput({ 425 | sha: inputs.baseSha, 426 | cwd: workingDirectory, 427 | token: inputs.token 428 | }) 429 | let diff = '...' 430 | 431 | if (inputs.baseSha && inputs.sha && currentBranch && targetBranch) { 432 | if (previousSha === currentSha) { 433 | core.error( 434 | `Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.` 435 | ) 436 | core.error( 437 | `Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.` 438 | ) 439 | throw new Error('Similar commit hashes detected.') 440 | } 441 | 442 | core.debug(`Previous SHA: ${previousSha}`) 443 | 444 | return { 445 | previousSha, 446 | currentSha, 447 | currentBranch, 448 | targetBranch, 449 | diff 450 | } 451 | } 452 | 453 | if (!github.context.payload.pull_request?.base?.ref) { 454 | diff = '..' 455 | } 456 | 457 | if (!previousSha || previousSha === currentSha) { 458 | if (inputs.sinceLastRemoteCommit) { 459 | previousSha = github.context.payload.before 460 | 461 | if ( 462 | !previousSha || 463 | (previousSha && 464 | (await verifyCommitSha({ 465 | sha: previousSha, 466 | cwd: workingDirectory, 467 | showAsErrorMessage: false 468 | })) !== 0) 469 | ) { 470 | core.info( 471 | `Unable to locate the previous commit in the local history for ${github.context.eventName} (${github.context.payload.action}) event. Falling back to the previous commit in the local history.` 472 | ) 473 | 474 | previousSha = await getParentSha({ 475 | cwd: workingDirectory 476 | }) 477 | 478 | if ( 479 | github.context.payload.action && 480 | github.context.payload.action === 'synchronize' && 481 | previousSha && 482 | (!previousSha || 483 | (previousSha && 484 | (await verifyCommitSha({ 485 | sha: previousSha, 486 | cwd: workingDirectory, 487 | showAsErrorMessage: false 488 | })) !== 0)) 489 | ) { 490 | throw new Error( 491 | 'Unable to locate the previous commit in the local history. Please ensure to checkout pull request HEAD commit instead of the merge commit. See: https://github.com/actions/checkout/blob/main/README.md#checkout-pull-request-head-commit-instead-of-merge-commit' 492 | ) 493 | } 494 | 495 | if ( 496 | !previousSha || 497 | (previousSha && 498 | (await verifyCommitSha({ 499 | sha: previousSha, 500 | cwd: workingDirectory, 501 | showAsErrorMessage: false 502 | })) !== 0) 503 | ) { 504 | throw new Error( 505 | 'Unable to locate the previous commit in the local history. Please ensure to checkout pull request HEAD commit instead of the merge commit. See: https://github.com/actions/checkout/blob/main/README.md#checkout-pull-request-head-commit-instead-of-merge-commit' 506 | ) 507 | } 508 | } 509 | } else { 510 | previousSha = github.context.payload.pull_request?.base?.sha 511 | 512 | if (!previousSha) { 513 | previousSha = await getRemoteBranchHeadSha({ 514 | cwd: workingDirectory, 515 | remoteName, 516 | branch: targetBranch 517 | }) 518 | } 519 | 520 | if (isShallow) { 521 | if ( 522 | !(await canDiffCommits({ 523 | cwd: workingDirectory, 524 | sha1: previousSha, 525 | sha2: currentSha, 526 | diff 527 | })) 528 | ) { 529 | core.info( 530 | 'Merge base is not in the local history, fetching remote target branch...' 531 | ) 532 | 533 | for ( 534 | let i = 1; 535 | i <= (inputs.fetchMissingHistoryMaxRetries || 10); 536 | i++ 537 | ) { 538 | await gitFetch({ 539 | cwd: workingDirectory, 540 | args: [ 541 | ...gitFetchExtraArgs, 542 | '-u', 543 | '--progress', 544 | `--deepen=${inputs.fetchDepth}`, 545 | remoteName, 546 | `+refs/heads/${targetBranch}:refs/remotes/${remoteName}/${targetBranch}` 547 | ] 548 | }) 549 | 550 | if ( 551 | await canDiffCommits({ 552 | cwd: workingDirectory, 553 | sha1: previousSha, 554 | sha2: currentSha, 555 | diff 556 | }) 557 | ) { 558 | break 559 | } 560 | 561 | core.info( 562 | 'Merge base is not in the local history, fetching remote target branch again...' 563 | ) 564 | core.info(`Attempt ${i}/10`) 565 | } 566 | } 567 | } 568 | } 569 | 570 | if (!previousSha || previousSha === currentSha) { 571 | previousSha = github.context.payload.pull_request?.base?.sha 572 | } 573 | } 574 | 575 | if ( 576 | !(await canDiffCommits({ 577 | cwd: workingDirectory, 578 | sha1: previousSha, 579 | sha2: currentSha, 580 | diff 581 | })) 582 | ) { 583 | diff = '..' 584 | } 585 | 586 | await verifyCommitSha({sha: previousSha, cwd: workingDirectory}) 587 | core.debug(`Previous SHA: ${previousSha}`) 588 | 589 | if ( 590 | !(await canDiffCommits({ 591 | cwd: workingDirectory, 592 | sha1: previousSha, 593 | sha2: currentSha, 594 | diff 595 | })) 596 | ) { 597 | core.warning( 598 | 'If this pull request is from a forked repository, please set the checkout action `repository` input to the same repository as the pull request.' 599 | ) 600 | core.warning( 601 | 'This can be done by setting actions/checkout `repository` to ${{ github.event.pull_request.head.repo.full_name }}' 602 | ) 603 | throw new Error( 604 | `Unable to determine a difference between ${previousSha}${diff}${currentSha}` 605 | ) 606 | } 607 | 608 | if (previousSha === currentSha) { 609 | core.error( 610 | `Similar commit hashes detected: previous sha: ${previousSha} is equivalent to the current sha: ${currentSha}.` 611 | ) 612 | // This occurs if a PR is created from a forked repository and the event is pull_request_target. 613 | // - name: Checkout to branch 614 | // uses: actions/checkout@v3 615 | // Without setting the repository to use the same repository as the pull request will cause the previousSha 616 | // to be the same as the currentSha since the currentSha cannot be found in the local history. 617 | // The solution is to use: 618 | // - name: Checkout to branch 619 | // uses: actions/checkout@v3 620 | // with: 621 | // repository: ${{ github.event.pull_request.head.repo.full_name }} 622 | if (github.context.eventName === 'pull_request_target') { 623 | core.warning( 624 | 'If this pull request is from a forked repository, please set the checkout action `repository` input to the same repository as the pull request.' 625 | ) 626 | core.warning( 627 | 'This can be done by setting actions/checkout `repository` to ${{ github.event.pull_request.head.repo.full_name }}' 628 | ) 629 | } else { 630 | core.error( 631 | `Please verify that both commits are valid, and increase the fetch_depth to a number higher than ${inputs.fetchDepth}.` 632 | ) 633 | } 634 | throw new Error('Similar commit hashes detected.') 635 | } 636 | 637 | return { 638 | previousSha, 639 | currentSha, 640 | currentBranch, 641 | targetBranch, 642 | diff 643 | } 644 | } 645 | -------------------------------------------------------------------------------- /src/__tests__/utils.test.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import * as exec from '@actions/exec' 3 | import {ChangeTypeEnum} from '../changedFiles' 4 | import {Inputs} from '../inputs' 5 | import { 6 | getDirname, 7 | getDirnameMaxDepth, 8 | getFilteredChangedFiles, 9 | getPreviousGitTag, 10 | normalizeSeparators, 11 | warnUnsupportedRESTAPIInputs 12 | } from '../utils' 13 | 14 | const originalPlatform = process.platform 15 | 16 | function mockedPlatform(platform: string): void { 17 | Object.defineProperty(process, 'platform', { 18 | value: platform 19 | }) 20 | } 21 | 22 | describe('utils test', () => { 23 | afterEach(() => { 24 | Object.defineProperty(process, 'platform', { 25 | value: originalPlatform 26 | }) 27 | }) 28 | 29 | describe('getDirnameMaxDepth_function', () => { 30 | // Tests that the function returns the correct dirname when the relative path has multiple directories 31 | it('test_multiple_directories', () => { 32 | const result = getDirnameMaxDepth({ 33 | relativePath: 'path/to/some/file', 34 | dirNamesMaxDepth: 2, 35 | excludeCurrentDir: false 36 | }) 37 | expect(result).toEqual('path/to') 38 | }) 39 | 40 | // Tests that the function returns the correct dirname when the relative path has only one directory 41 | it('test_single_directory', () => { 42 | const result = getDirnameMaxDepth({ 43 | relativePath: 'path/to', 44 | dirNamesMaxDepth: 1, 45 | excludeCurrentDir: false 46 | }) 47 | expect(result).toEqual('path') 48 | }) 49 | 50 | // Tests that the function returns the correct dirname when the relative path has no directories 51 | it('test_no_directories', () => { 52 | const result = getDirnameMaxDepth({ 53 | relativePath: 'file.txt', 54 | dirNamesMaxDepth: 1, 55 | excludeCurrentDir: false 56 | }) 57 | expect(result).toEqual('.') 58 | }) 59 | 60 | // Tests that the function returns the correct dirname when dirNamesMaxDepth is set to a value less than the number of directories in the relative path 61 | it('test_dirnames_max_depth_less_than_num_directories', () => { 62 | const result = getDirnameMaxDepth({ 63 | relativePath: 'path/to/some/file', 64 | dirNamesMaxDepth: 1, 65 | excludeCurrentDir: false 66 | }) 67 | expect(result).toEqual('path') 68 | }) 69 | 70 | // Tests that the function returns an empty string when excludeCurrentDir is true and the output is '.' 71 | it('test_exclude_current_dir_is_true_and_output_is_dot', () => { 72 | const result = getDirnameMaxDepth({ 73 | relativePath: '.', 74 | dirNamesMaxDepth: 1, 75 | excludeCurrentDir: true 76 | }) 77 | expect(result).toEqual('') 78 | }) 79 | 80 | // Tests that the function returns the correct dirname when the relative path is a Windows drive root and excludeCurrentDir is true 81 | it('test_windows_drive_root_and_exclude_current_dir_is_true', () => { 82 | mockedPlatform('win32') 83 | const result = getDirnameMaxDepth({ 84 | relativePath: 'C:\\', 85 | dirNamesMaxDepth: 1, 86 | excludeCurrentDir: true 87 | }) 88 | expect(result).toEqual('') 89 | }) 90 | 91 | // Tests that getDirnameMaxDepth handles a relative path with a trailing separator correctly 92 | it('test_trailing_separator', () => { 93 | const input = { 94 | relativePath: 'path/to/dir/', 95 | dirNamesMaxDepth: 2, 96 | excludeCurrentDir: true 97 | } 98 | const expectedOutput = 'path/to' 99 | const actualOutput = getDirnameMaxDepth(input) 100 | expect(actualOutput).toEqual(expectedOutput) 101 | }) 102 | 103 | // Tests that getDirnameMaxDepth returns an empty string when excludeCurrentDir is true and the output is '.' 104 | it('test_trailing_separator_exclude_current_dir', () => { 105 | const input = { 106 | relativePath: 'file', 107 | excludeCurrentDir: true 108 | } 109 | const expectedOutput = '' 110 | const actualOutput = getDirnameMaxDepth(input) 111 | expect(actualOutput).toEqual(expectedOutput) 112 | }) 113 | 114 | // Tests that getDirnameMaxDepth returns the correct output for a Windows UNC root path 115 | it('test_windows_unc_root', () => { 116 | mockedPlatform('win32') 117 | const input = { 118 | relativePath: '\\hello', 119 | dirNamesMaxDepth: 2, 120 | excludeCurrentDir: true 121 | } 122 | const expectedOutput = '' 123 | expect(getDirnameMaxDepth(input)).toEqual(expectedOutput) 124 | }) 125 | 126 | // Tests that getDirnameMaxDepth returns an empty string when given a Windows UNC root and excludeCurrentDir is true 127 | it('test_windows_unc_root_exclude_current_dir', () => { 128 | mockedPlatform('win32') 129 | const relativePath = '\\hello' 130 | const result = getDirnameMaxDepth({ 131 | relativePath, 132 | excludeCurrentDir: true 133 | }) 134 | expect(result).toEqual('') 135 | }) 136 | 137 | // Tests that getDirnameMaxDepth returns the correct dirname with a relative path that contains both forward and backward slashes 138 | it('test_relative_path_with_slashes', () => { 139 | const relativePath = 'path/to\file' 140 | const expectedOutput = 'path' 141 | const actualOutput = getDirnameMaxDepth({relativePath}) 142 | expect(actualOutput).toEqual(expectedOutput) 143 | }) 144 | 145 | // Tests that getDirnameMaxDepth returns the correct dirname for a relative path that contains special characters 146 | it('test_special_characters', () => { 147 | const relativePath = 148 | 'path/with/special/characters/!@#$%^&*()_+{}|:<>?[];,./' 149 | const expectedDirname = 'path/with/special/characters' 150 | const actualDirname = getDirnameMaxDepth({relativePath}) 151 | expect(actualDirname).toEqual(expectedDirname) 152 | }) 153 | }) 154 | 155 | describe('getDirname_function', () => { 156 | // Tests that the function returns the correct dirname for a valid path 157 | it('test valid path', () => { 158 | expect(getDirname('/path/to/file')).toEqual('/path/to') 159 | }) 160 | 161 | // Tests that the function returns the correct dirname for a valid Windows UNC root path 162 | it('test windows unc root path', () => { 163 | mockedPlatform('win32') 164 | expect(getDirname('\\helloworld')).toEqual('.') 165 | }) 166 | 167 | // Tests that the function returns the correct dirname for a path with a trailing slash 168 | it('test path with trailing slash', () => { 169 | expect(getDirname('/path/to/file/')).toEqual('/path/to') 170 | }) 171 | 172 | // Tests that the function returns the correct dirname for a Windows UNC root path with a trailing slash 173 | it('test windows unc root path with trailing slash', () => { 174 | mockedPlatform('win32') 175 | expect(getDirname('\\hello\\world\\')).toEqual('.') 176 | }) 177 | 178 | // Tests that the function returns the correct dirname for a path with multiple slashes 179 | it('test path with multiple slashes', () => { 180 | expect(getDirname('/path//to/file')).toEqual('/path/to') 181 | }) 182 | 183 | // Tests that the function returns the correct dirname for a Windows UNC root path with multiple slashes 184 | it('test windows unc root path with multiple slashes', () => { 185 | mockedPlatform('win32') 186 | expect(getDirname('\\hello\\world')).toEqual('.') 187 | }) 188 | }) 189 | 190 | describe('normalizeSeparators_function', () => { 191 | // Tests that forward slashes are normalized on Linux 192 | it('test forward slashes linux', () => { 193 | const input = 'path/to/file' 194 | const expectedOutput = 'path/to/file' 195 | const actualOutput = normalizeSeparators(input) 196 | expect(actualOutput).toEqual(expectedOutput) 197 | }) 198 | 199 | // Tests that backslashes are normalized on Windows 200 | it('test backslashes windows', () => { 201 | mockedPlatform('win32') 202 | const input = 'path\\to\\file' 203 | const expectedOutput = 'path\\to\\file' 204 | const actualOutput = normalizeSeparators(input) 205 | expect(actualOutput).toEqual(expectedOutput) 206 | }) 207 | 208 | // Tests that forward slashes are normalized on Windows 209 | it('test mixed slashes windows', () => { 210 | mockedPlatform('win32') 211 | const input = 'path/to/file' 212 | const expectedOutput = 'path\\to\\file' 213 | const actualOutput = normalizeSeparators(input) 214 | expect(actualOutput).toEqual(expectedOutput) 215 | }) 216 | 217 | // Tests that mixed slashes are normalized on Windows 218 | it('test mixed slashes windows', () => { 219 | mockedPlatform('win32') 220 | const input = 'path\\to/file' 221 | const expectedOutput = 'path\\to\\file' 222 | const actualOutput = normalizeSeparators(input) 223 | expect(actualOutput).toEqual(expectedOutput) 224 | }) 225 | 226 | // Tests that an empty string returns an empty string 227 | it('test empty string', () => { 228 | const input = '' 229 | const expectedOutput = '' 230 | const actualOutput = normalizeSeparators(input) 231 | expect(actualOutput).toEqual(expectedOutput) 232 | }) 233 | 234 | // Tests that multiple consecutive slashes are removed 235 | it('test multiple consecutive slashes', () => { 236 | const input = 'path//to//file' 237 | const expectedOutput = 'path/to/file' 238 | const actualOutput = normalizeSeparators(input) 239 | expect(actualOutput).toEqual(expectedOutput) 240 | }) 241 | 242 | // Tests that UNC format is preserved on Windows 243 | it('test unc format windows', () => { 244 | mockedPlatform('win32') 245 | const input = '\\\\hello\\world' 246 | const expectedOutput = '\\\\hello\\world' 247 | const actualOutput = normalizeSeparators(input) 248 | expect(actualOutput).toEqual(expectedOutput) 249 | }) 250 | 251 | // Tests that a drive root is preserved on Windows 252 | it('test drive root windows', () => { 253 | mockedPlatform('win32') 254 | const input = 'C:\\' 255 | const expectedOutput = 'C:\\' 256 | const actualOutput = normalizeSeparators(input) 257 | expect(actualOutput).toEqual(expectedOutput) 258 | }) 259 | }) 260 | 261 | describe('getFilteredChangedFiles', () => { 262 | // Tests that the function returns an empty object when allDiffFiles and filePatterns are empty 263 | it('should return an empty object when allDiffFiles and filePatterns are empty', async () => { 264 | const result = await getFilteredChangedFiles({ 265 | allDiffFiles: { 266 | [ChangeTypeEnum.Added]: [], 267 | [ChangeTypeEnum.Copied]: [], 268 | [ChangeTypeEnum.Deleted]: [], 269 | [ChangeTypeEnum.Modified]: [], 270 | [ChangeTypeEnum.Renamed]: [], 271 | [ChangeTypeEnum.TypeChanged]: [], 272 | [ChangeTypeEnum.Unmerged]: [], 273 | [ChangeTypeEnum.Unknown]: [] 274 | }, 275 | filePatterns: [] 276 | }) 277 | expect(result).toEqual({ 278 | [ChangeTypeEnum.Added]: [], 279 | [ChangeTypeEnum.Copied]: [], 280 | [ChangeTypeEnum.Deleted]: [], 281 | [ChangeTypeEnum.Modified]: [], 282 | [ChangeTypeEnum.Renamed]: [], 283 | [ChangeTypeEnum.TypeChanged]: [], 284 | [ChangeTypeEnum.Unmerged]: [], 285 | [ChangeTypeEnum.Unknown]: [] 286 | }) 287 | }) 288 | 289 | // Tests that the function returns allDiffFiles when filePatterns is empty 290 | it('should return allDiffFiles when filePatterns is empty', async () => { 291 | const allDiffFiles = { 292 | [ChangeTypeEnum.Added]: ['file1.txt'], 293 | [ChangeTypeEnum.Copied]: [], 294 | [ChangeTypeEnum.Deleted]: [], 295 | [ChangeTypeEnum.Modified]: [], 296 | [ChangeTypeEnum.Renamed]: [], 297 | [ChangeTypeEnum.TypeChanged]: [], 298 | [ChangeTypeEnum.Unmerged]: [], 299 | [ChangeTypeEnum.Unknown]: [] 300 | } 301 | const result = await getFilteredChangedFiles({ 302 | allDiffFiles, 303 | filePatterns: [] 304 | }) 305 | expect(result).toEqual(allDiffFiles) 306 | }) 307 | 308 | // Tests that the function returns an empty object when allDiffFiles is empty 309 | it('should return an empty object when allDiffFiles is empty', async () => { 310 | const result = await getFilteredChangedFiles({ 311 | allDiffFiles: { 312 | [ChangeTypeEnum.Added]: [], 313 | [ChangeTypeEnum.Copied]: [], 314 | [ChangeTypeEnum.Deleted]: [], 315 | [ChangeTypeEnum.Modified]: [], 316 | [ChangeTypeEnum.Renamed]: [], 317 | [ChangeTypeEnum.TypeChanged]: [], 318 | [ChangeTypeEnum.Unmerged]: [], 319 | [ChangeTypeEnum.Unknown]: [] 320 | }, 321 | filePatterns: ['*.txt'] 322 | }) 323 | expect(result).toEqual({ 324 | [ChangeTypeEnum.Added]: [], 325 | [ChangeTypeEnum.Copied]: [], 326 | [ChangeTypeEnum.Deleted]: [], 327 | [ChangeTypeEnum.Modified]: [], 328 | [ChangeTypeEnum.Renamed]: [], 329 | [ChangeTypeEnum.TypeChanged]: [], 330 | [ChangeTypeEnum.Unmerged]: [], 331 | [ChangeTypeEnum.Unknown]: [] 332 | }) 333 | }) 334 | 335 | // Tests that the function returns only the files that match the file patterns on non windows platforms 336 | it('should return only the files that match the file patterns', async () => { 337 | const allDiffFiles = { 338 | [ChangeTypeEnum.Added]: [ 339 | 'file1.txt', 340 | 'file2.md', 341 | 'file3.txt', 342 | 'test/dir/file4.txt', 343 | 'test/dir/file5.txt', 344 | 'dir/file6.md' 345 | ], 346 | [ChangeTypeEnum.Copied]: [], 347 | [ChangeTypeEnum.Deleted]: [], 348 | [ChangeTypeEnum.Modified]: [], 349 | [ChangeTypeEnum.Renamed]: [], 350 | [ChangeTypeEnum.TypeChanged]: [], 351 | [ChangeTypeEnum.Unmerged]: [], 352 | [ChangeTypeEnum.Unknown]: [] 353 | } 354 | const result = await getFilteredChangedFiles({ 355 | allDiffFiles, 356 | filePatterns: ['*.txt'] 357 | }) 358 | expect(result).toEqual({ 359 | [ChangeTypeEnum.Added]: ['file1.txt', 'file3.txt'], 360 | [ChangeTypeEnum.Copied]: [], 361 | [ChangeTypeEnum.Deleted]: [], 362 | [ChangeTypeEnum.Modified]: [], 363 | [ChangeTypeEnum.Renamed]: [], 364 | [ChangeTypeEnum.TypeChanged]: [], 365 | [ChangeTypeEnum.Unmerged]: [], 366 | [ChangeTypeEnum.Unknown]: [] 367 | }) 368 | }) 369 | 370 | // Tests that the function returns only the files that match the file patterns on windows 371 | it('should return only the files that match the file patterns on windows', async () => { 372 | mockedPlatform('win32') 373 | const allDiffFiles = { 374 | [ChangeTypeEnum.Added]: [ 375 | 'file1.txt', 376 | 'file2.md', 377 | 'file3.txt', 378 | 'test\\dir\\file4.txt', 379 | 'test\\dir\\file5.txt', 380 | 'dir\\file6.md' 381 | ], 382 | [ChangeTypeEnum.Copied]: [], 383 | [ChangeTypeEnum.Deleted]: [], 384 | [ChangeTypeEnum.Modified]: [], 385 | [ChangeTypeEnum.Renamed]: [], 386 | [ChangeTypeEnum.TypeChanged]: [], 387 | [ChangeTypeEnum.Unmerged]: [], 388 | [ChangeTypeEnum.Unknown]: [] 389 | } 390 | const result = await getFilteredChangedFiles({ 391 | allDiffFiles, 392 | filePatterns: ['*.txt'] 393 | }) 394 | 395 | expect(result).toEqual({ 396 | [ChangeTypeEnum.Added]: ['file1.txt', 'file3.txt'], 397 | [ChangeTypeEnum.Copied]: [], 398 | [ChangeTypeEnum.Deleted]: [], 399 | [ChangeTypeEnum.Modified]: [], 400 | [ChangeTypeEnum.Renamed]: [], 401 | [ChangeTypeEnum.TypeChanged]: [], 402 | [ChangeTypeEnum.Unmerged]: [], 403 | [ChangeTypeEnum.Unknown]: [] 404 | }) 405 | }) 406 | 407 | // Tests that the function returns only the files that match the file patterns with globstar on non windows platforms 408 | it('should return only the files that match the file patterns with globstar', async () => { 409 | const allDiffFiles = { 410 | [ChangeTypeEnum.Added]: [ 411 | 'file1.txt', 412 | 'file2.md', 413 | 'file3.txt', 414 | 'test/dir/file4.txt', 415 | 'test/dir/file5.txt', 416 | 'dir/file6.md' 417 | ], 418 | [ChangeTypeEnum.Copied]: [], 419 | [ChangeTypeEnum.Deleted]: [], 420 | [ChangeTypeEnum.Modified]: [], 421 | [ChangeTypeEnum.Renamed]: [], 422 | [ChangeTypeEnum.TypeChanged]: [], 423 | [ChangeTypeEnum.Unmerged]: [], 424 | [ChangeTypeEnum.Unknown]: [] 425 | } 426 | const result = await getFilteredChangedFiles({ 427 | allDiffFiles, 428 | filePatterns: ['**.txt'] 429 | }) 430 | expect(result).toEqual({ 431 | [ChangeTypeEnum.Added]: [ 432 | 'file1.txt', 433 | 'file3.txt', 434 | 'test/dir/file4.txt', 435 | 'test/dir/file5.txt' 436 | ], 437 | [ChangeTypeEnum.Copied]: [], 438 | [ChangeTypeEnum.Deleted]: [], 439 | [ChangeTypeEnum.Modified]: [], 440 | [ChangeTypeEnum.Renamed]: [], 441 | [ChangeTypeEnum.TypeChanged]: [], 442 | [ChangeTypeEnum.Unmerged]: [], 443 | [ChangeTypeEnum.Unknown]: [] 444 | }) 445 | }) 446 | 447 | // Tests that the function returns only the files that match the file patterns with globstar on windows 448 | it('should return only the files that match the file patterns with globstar on windows', async () => { 449 | mockedPlatform('win32') 450 | const allDiffFiles = { 451 | [ChangeTypeEnum.Added]: ['test\\test rename-1.txt'], 452 | [ChangeTypeEnum.Copied]: [], 453 | [ChangeTypeEnum.Deleted]: [], 454 | [ChangeTypeEnum.Modified]: [], 455 | [ChangeTypeEnum.Renamed]: [], 456 | [ChangeTypeEnum.TypeChanged]: [], 457 | [ChangeTypeEnum.Unmerged]: [], 458 | [ChangeTypeEnum.Unknown]: [] 459 | } 460 | const result = await getFilteredChangedFiles({ 461 | allDiffFiles, 462 | filePatterns: ['test/**'] 463 | }) 464 | expect(result).toEqual({ 465 | [ChangeTypeEnum.Added]: ['test\\test rename-1.txt'], 466 | [ChangeTypeEnum.Copied]: [], 467 | [ChangeTypeEnum.Deleted]: [], 468 | [ChangeTypeEnum.Modified]: [], 469 | [ChangeTypeEnum.Renamed]: [], 470 | [ChangeTypeEnum.TypeChanged]: [], 471 | [ChangeTypeEnum.Unmerged]: [], 472 | [ChangeTypeEnum.Unknown]: [] 473 | }) 474 | }) 475 | 476 | // Tests that the function returns an empty object when there are no files that match the file patterns 477 | it('should return an empty object when there are no files that match the file patterns', async () => { 478 | const allDiffFiles = { 479 | [ChangeTypeEnum.Added]: ['file1.md', 'file2.md', 'file3.md'], 480 | [ChangeTypeEnum.Copied]: [], 481 | [ChangeTypeEnum.Deleted]: [], 482 | [ChangeTypeEnum.Modified]: [], 483 | [ChangeTypeEnum.Renamed]: [], 484 | [ChangeTypeEnum.TypeChanged]: [], 485 | [ChangeTypeEnum.Unmerged]: [], 486 | [ChangeTypeEnum.Unknown]: [] 487 | } 488 | const result = await getFilteredChangedFiles({ 489 | allDiffFiles, 490 | filePatterns: ['*.txt'] 491 | }) 492 | expect(result).toEqual({ 493 | [ChangeTypeEnum.Added]: [], 494 | [ChangeTypeEnum.Copied]: [], 495 | [ChangeTypeEnum.Deleted]: [], 496 | [ChangeTypeEnum.Modified]: [], 497 | [ChangeTypeEnum.Renamed]: [], 498 | [ChangeTypeEnum.TypeChanged]: [], 499 | [ChangeTypeEnum.Unmerged]: [], 500 | [ChangeTypeEnum.Unknown]: [] 501 | }) 502 | }) 503 | 504 | // Tests that the function can handle file names with special characters 505 | it('should handle file names with special characters', async () => { 506 | const allDiffFiles = { 507 | [ChangeTypeEnum.Added]: [ 508 | 'file1.txt', 509 | 'file2 with spaces.txt', 510 | 'file3$$.txt' 511 | ], 512 | [ChangeTypeEnum.Copied]: [], 513 | [ChangeTypeEnum.Deleted]: [], 514 | [ChangeTypeEnum.Modified]: [], 515 | [ChangeTypeEnum.Renamed]: [], 516 | [ChangeTypeEnum.TypeChanged]: [], 517 | [ChangeTypeEnum.Unmerged]: [], 518 | [ChangeTypeEnum.Unknown]: [] 519 | } 520 | const result = await getFilteredChangedFiles({ 521 | allDiffFiles, 522 | filePatterns: ['file2*.txt'] 523 | }) 524 | expect(result).toEqual({ 525 | [ChangeTypeEnum.Added]: ['file2 with spaces.txt'], 526 | [ChangeTypeEnum.Copied]: [], 527 | [ChangeTypeEnum.Deleted]: [], 528 | [ChangeTypeEnum.Modified]: [], 529 | [ChangeTypeEnum.Renamed]: [], 530 | [ChangeTypeEnum.TypeChanged]: [], 531 | [ChangeTypeEnum.Unmerged]: [], 532 | [ChangeTypeEnum.Unknown]: [] 533 | }) 534 | }) 535 | 536 | // Tests that getFilteredChangedFiles correctly filters files using glob patterns 537 | it('should filter files using glob patterns', async () => { 538 | const allDiffFiles = { 539 | [ChangeTypeEnum.Added]: ['test/migrations/test.sql'], 540 | [ChangeTypeEnum.Copied]: [], 541 | [ChangeTypeEnum.Deleted]: [], 542 | [ChangeTypeEnum.Modified]: [], 543 | [ChangeTypeEnum.Renamed]: [], 544 | [ChangeTypeEnum.TypeChanged]: [], 545 | [ChangeTypeEnum.Unmerged]: [], 546 | [ChangeTypeEnum.Unknown]: [] 547 | } 548 | const filePatterns = ['test/migrations/**'] 549 | const filteredFiles = await getFilteredChangedFiles({ 550 | allDiffFiles, 551 | filePatterns 552 | }) 553 | expect(filteredFiles[ChangeTypeEnum.Added]).toEqual([ 554 | 'test/migrations/test.sql' 555 | ]) 556 | }) 557 | 558 | // Tests that getFilteredChangedFiles correctly filters files using ignore glob patterns 559 | it('should filter files using ignore glob patterns', async () => { 560 | const allDiffFiles = { 561 | [ChangeTypeEnum.Added]: [], 562 | [ChangeTypeEnum.Copied]: [], 563 | [ChangeTypeEnum.Deleted]: [], 564 | [ChangeTypeEnum.Modified]: [ 565 | 'assets/scripts/configure-minikube-linux.sh' 566 | ], 567 | [ChangeTypeEnum.Renamed]: [], 568 | [ChangeTypeEnum.TypeChanged]: [], 569 | [ChangeTypeEnum.Unmerged]: [], 570 | [ChangeTypeEnum.Unknown]: [] 571 | } 572 | const filePatterns = [ 573 | 'assets/scripts/**.sh', 574 | '!assets/scripts/configure-minikube-linux.sh' 575 | ] 576 | const filteredFiles = await getFilteredChangedFiles({ 577 | allDiffFiles, 578 | filePatterns 579 | }) 580 | expect(filteredFiles[ChangeTypeEnum.Modified]).toEqual([]) 581 | }) 582 | }) 583 | 584 | describe('warnUnsupportedRESTAPIInputs', () => { 585 | // Warns about unsupported inputs when using the REST API. 586 | it('should warn about unsupported inputs when all inputs are supported', async () => { 587 | const inputs: Inputs = { 588 | files: '', 589 | filesSeparator: '\n', 590 | filesFromSourceFile: '', 591 | filesFromSourceFileSeparator: '\n', 592 | filesYaml: '', 593 | filesYamlFromSourceFile: '', 594 | filesYamlFromSourceFileSeparator: '\n', 595 | filesIgnore: '', 596 | filesIgnoreSeparator: '\n', 597 | filesIgnoreFromSourceFile: '', 598 | filesIgnoreFromSourceFileSeparator: '\n', 599 | filesIgnoreYaml: '', 600 | filesIgnoreYamlFromSourceFile: '', 601 | filesIgnoreYamlFromSourceFileSeparator: '\n', 602 | separator: ' ', 603 | includeAllOldNewRenamedFiles: false, 604 | oldNewSeparator: ',', 605 | oldNewFilesSeparator: ' ', 606 | sha: '1313123', 607 | baseSha: '', 608 | since: '', 609 | until: '', 610 | path: '.', 611 | quotepath: true, 612 | diffRelative: true, 613 | dirNames: false, 614 | dirNamesMaxDepth: undefined, 615 | dirNamesExcludeCurrentDir: false, 616 | dirNamesIncludeFiles: '', 617 | dirNamesIncludeFilesSeparator: '\n', 618 | dirNamesDeletedFilesIncludeOnlyDeletedDirs: false, 619 | json: false, 620 | escapeJson: true, 621 | safeOutput: true, 622 | fetchDepth: 50, 623 | fetchAdditionalSubmoduleHistory: false, 624 | sinceLastRemoteCommit: false, 625 | writeOutputFiles: false, 626 | outputDir: '.github/outputs', 627 | outputRenamedFilesAsDeletedAndAdded: false, 628 | recoverDeletedFiles: false, 629 | recoverDeletedFilesToDestination: '', 630 | recoverFiles: '', 631 | recoverFilesSeparator: '\n', 632 | recoverFilesIgnore: '', 633 | recoverFilesIgnoreSeparator: '\n', 634 | token: '${{ github.token }}', 635 | apiUrl: '${{ github.api_url }}', 636 | skipInitialFetch: false, 637 | failOnInitialDiffError: false, 638 | failOnSubmoduleDiffError: false, 639 | negationPatternsFirst: false, 640 | useRestApi: false, 641 | excludeSubmodules: false, 642 | fetchMissingHistoryMaxRetries: 20, 643 | usePosixPathSeparator: false, 644 | tagsPattern: '*', 645 | tagsIgnorePattern: '' 646 | } 647 | 648 | const coreWarningSpy = jest.spyOn(core, 'warning') 649 | 650 | await warnUnsupportedRESTAPIInputs({ 651 | inputs 652 | }) 653 | 654 | expect(coreWarningSpy).toHaveBeenCalledWith( 655 | 'Input "sha" is not supported when using GitHub\'s REST API to get changed files' 656 | ) 657 | 658 | expect(coreWarningSpy).toHaveBeenCalledTimes(1) 659 | }) 660 | }) 661 | describe('getPreviousGitTag', () => { 662 | // Function returns the second-latest tag and its SHA 663 | it('should return the second latest tag and its SHA when multiple tags are present', async () => { 664 | const result = await getPreviousGitTag({ 665 | cwd: '.', 666 | tagsPattern: '*', 667 | tagsIgnorePattern: '', 668 | currentBranch: 'v1.0.1' 669 | }) 670 | expect(result).toEqual({ 671 | tag: 'v1.0.0', 672 | sha: 'f0751de6af436d4e79016e2041cf6400e0833653' 673 | }) 674 | }) 675 | 676 | // Tags are filtered by a specified pattern when 'tagsPattern' is provided 677 | it('should filter tags by the specified pattern', async () => { 678 | const result = await getPreviousGitTag({ 679 | cwd: '.', 680 | tagsPattern: 'v1.*', 681 | tagsIgnorePattern: '', 682 | currentBranch: 'v1.0.1' 683 | }) 684 | expect(result).toEqual({ 685 | tag: 'v1.0.0', 686 | sha: 'f0751de6af436d4e79016e2041cf6400e0833653' 687 | }) 688 | }) 689 | 690 | // Tags are excluded by a specified ignore pattern when 'tagsIgnorePattern' is provided 691 | it('should exclude tags by the specified ignore pattern', async () => { 692 | const result = await getPreviousGitTag({ 693 | cwd: '.', 694 | tagsPattern: '*', 695 | tagsIgnorePattern: 'v0.*.*', 696 | currentBranch: 'v1.0.1' 697 | }) 698 | expect(result).toEqual({ 699 | tag: 'v1.0.0', 700 | sha: 'f0751de6af436d4e79016e2041cf6400e0833653' 701 | }) 702 | }) 703 | 704 | // No tags are available in the repository 705 | it('should return empty values when no tags are available in the repository', async () => { 706 | jest.spyOn(exec, 'getExecOutput').mockResolvedValueOnce({ 707 | stdout: '', 708 | stderr: '', 709 | exitCode: 0 710 | }) 711 | const result = await getPreviousGitTag({ 712 | cwd: '.', 713 | tagsPattern: '*', 714 | tagsIgnorePattern: '', 715 | currentBranch: '' 716 | }) 717 | expect(result).toEqual({tag: '', sha: ''}) 718 | }) 719 | 720 | // Only one tag is available, making it impossible to find a previous tag 721 | it('should return empty values when only one tag is available', async () => { 722 | jest.spyOn(exec, 'getExecOutput').mockResolvedValueOnce({ 723 | stdout: 724 | 'v1.0.1|f0751de6af436d4e79016e2041cf6400e0833653|2021-01-01T00:00:00Z', 725 | stderr: '', 726 | exitCode: 0 727 | }) 728 | const result = await getPreviousGitTag({ 729 | cwd: '.', 730 | tagsPattern: '*', 731 | tagsIgnorePattern: '', 732 | currentBranch: 'v1.0.1' 733 | }) 734 | expect(result).toEqual({tag: '', sha: ''}) 735 | }) 736 | 737 | // Git commands fail and throw errors 738 | it('should throw an error when git commands fail', async () => { 739 | jest 740 | .spyOn(exec, 'getExecOutput') 741 | .mockRejectedValue(new Error('git command failed')) 742 | await expect( 743 | getPreviousGitTag({ 744 | cwd: '.', 745 | tagsPattern: '*', 746 | tagsIgnorePattern: '', 747 | currentBranch: 'v1.0.1' 748 | }) 749 | ).rejects.toThrow('git command failed') 750 | }) 751 | }) 752 | }) 753 | --------------------------------------------------------------------------------