├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── matrix.yml │ ├── npm-run-package.yml │ ├── release-tag.yml │ └── test.yml ├── .gitignore ├── .jest └── set-env-vars.js ├── .markdownlint.json ├── .prettierignore ├── .prettierrc.json ├── .vscode └── settings.json ├── CODEOWNERS ├── LICENSE ├── README.md ├── __tests__ └── main.test.ts ├── action.yml ├── dist ├── index.js ├── index.js.map ├── licenses.txt └── sourcemap-register.js ├── eslint.config.mjs ├── jest.config.js ├── main.ts ├── package-lock.json ├── package.json ├── pre-push.hook ├── src ├── __tests__ │ └── git.test.ts ├── ci_artifacts.ts ├── downloader.ts ├── git.ts └── spawn.ts └── tsconfig.json /.gitattributes: -------------------------------------------------------------------------------- 1 | dist/** -diff linguist-generated=true 2 | * eol=lf 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Enable version updates for npm 4 | - package-ecosystem: 'npm' 5 | # Look for `package.json` and `lock` files in the `root` directory 6 | directory: '/' 7 | # Check the npm registry for updates every day (weekdays) 8 | schedule: 9 | interval: 'daily' 10 | # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/keeping-your-actions-up-to-date-with-dependabot#enabling-dependabot-version-updates-for-actions 11 | - package-ecosystem: "github-actions" # See documentation for possible values 12 | directory: "/" # Location of package manifests 13 | schedule: 14 | interval: "weekly" 15 | -------------------------------------------------------------------------------- /.github/workflows/matrix.yml: -------------------------------------------------------------------------------- 1 | name: 'test all artifact flavors' 2 | on: # since this is expensive, require a manual trigger 3 | workflow_dispatch: 4 | 5 | jobs: 6 | test: 7 | runs-on: windows-latest 8 | strategy: 9 | fail-fast: false 10 | matrix: 11 | flavor: ['minimal', 'makepkg-git', 'build-installers', 'full'] 12 | architecture: ['i686', 'x86_64'] 13 | exclude: 14 | - flavor: minimal 15 | architecture: i686 16 | - flavor: makepkg-git 17 | architecture: i686 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: run in-place 21 | uses: ./ 22 | with: 23 | flavor: ${{ matrix.flavor }} 24 | architecture: ${{ matrix.architecture }} 25 | - name: build installer 26 | if: matrix.flavor == 'build-installers' 27 | shell: bash 28 | run: | 29 | # clone build-extra 30 | git clone --depth 1 --single-branch -b main https://github.com/git-for-windows/build-extra && 31 | # build installer 32 | ./build-extra/installer/release.sh --output=$PWD/installer-${{ matrix.architecture }} 0-test 33 | - uses: actions/upload-artifact@v4 34 | if: matrix.flavor == 'build-installers' 35 | with: 36 | name: installer-${{ matrix.architecture }} 37 | path: installer-${{ matrix.architecture }} 38 | -------------------------------------------------------------------------------- /.github/workflows/npm-run-package.yml: -------------------------------------------------------------------------------- 1 | name: 'npm run package' 2 | # Main use case: repackage when Dependabot updates a dependency 3 | on: 4 | push: 5 | branches: 6 | - 'dependabot/**' 7 | workflow_dispatch: 8 | inputs: 9 | branch: 10 | description: 'Process this branch' 11 | required: false 12 | type: string 13 | 14 | jobs: 15 | npm-run-package-and-push: # make sure build/ci work properly 16 | runs-on: ubuntu-latest 17 | if: github.event_name == 'workflow_dispatch' || github.event.repository.owner.login == 'git-for-windows' 18 | environment: git-for-windows-ci-push 19 | steps: 20 | - uses: actions/checkout@v4 21 | with: 22 | repository: ${{ github.event.repository.full_name }} 23 | ref: ${{ inputs.branch }}${{ github.event.ref }} 24 | token: ${{ secrets.GIT_FOR_WINDOWS_CI_PUSH }} 25 | - run: npm ci 26 | - run: npm run build 27 | - run: npm run lint 28 | - run: npm run format && git diff-files 29 | - run: npm run test 30 | - run: npm run package 31 | - name: check if commit & push is needed 32 | id: check 33 | run: | 34 | git add -u -- dist/ && 35 | git diff-index --cached --exit-code HEAD -- || 36 | echo "::set-output name=need-to-commit::yes" 37 | - name: commit & push 38 | if: steps.check.outputs.need-to-commit == 'yes' 39 | run: | 40 | git config user.name "${{github.actor}}" && 41 | git config user.email "${{github.actor}}@users.noreply.github.com" && 42 | git commit -m 'npm run build && npm run package' -- dist/ && 43 | git update-index --refresh && 44 | git diff-files --exit-code && 45 | git diff-index --cached --exit-code HEAD -- && 46 | git push 47 | -------------------------------------------------------------------------------- /.github/workflows/release-tag.yml: -------------------------------------------------------------------------------- 1 | name: Auto-publish tags 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' # Push events to release tags 7 | 8 | permissions: 9 | contents: write 10 | 11 | jobs: 12 | build: 13 | name: Publish GitHub Release from tag 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | with: 18 | token: ${{ secrets.PUSH_RELEASE_TRAINS_PAT }} 19 | fetch-depth: 0 20 | - name: Import public GPG keys to verify the tag 21 | uses: actions/github-script@v7 22 | with: 23 | github-token: ${{secrets.GITHUB_TOKEN}} 24 | script: | 25 | const { execSync } = require('child_process') 26 | 27 | for (const { key_id, raw_key } of (await github.rest.users.listGpgKeysForUser({ 28 | username: 'dscho' 29 | })).data) { 30 | execSync(`gpg ${raw_key ? '--import' : `--recv-keys ${key_id}`}`, 31 | { input: raw_key, stdio: [null, 'inherit', 'inherit'] }) 32 | } 33 | - name: Check prerequisites 34 | id: prerequisites 35 | run: | 36 | die () { 37 | echo "::error::$*" >&2 38 | exit 1 39 | } 40 | 41 | tag_name=${GITHUB_REF#refs/tags/} 42 | test "x$GITHUB_REF" != "x$tag_name" || die "Not a tag: $GITHUB_REF" 43 | 44 | # `actions/checkout` only downloads the peeled tag (i.e. the commit) 45 | git fetch origin +$GITHUB_REF:$GITHUB_REF 46 | 47 | train="$(echo "$tag_name" | sed -n 's|^\(v[0-9][0-9]*\)[.0-9]*$|\1|p')" 48 | test -n "$train" || die "Unexpected tag name: $tag_name" 49 | echo "$train" >train 50 | 51 | if train_rev="$(git rev-parse --verify "refs/remotes/origin/$train" 2>/dev/null)" 52 | then 53 | test 0 -eq "$(git rev-list --count "$GITHUB_REF..$train_rev")" || 54 | die "Branch '$train' does not fast-forward to tag '$tag_name'" 55 | else 56 | test "$train.0.0" = "$tag_name" || die "Branch '$train' does not yet exist?!?" 57 | fi 58 | 59 | git tag --verify "$tag_name" || die "Tag does not have a valid signature: $tag_name" 60 | 61 | test "$(git rev-parse --verify refs/remotes/origin/main 2>&1)" = \ 62 | "$(git rev-parse --verify "$GITHUB_REF^0")" || 63 | die "The tag '$tag_name' does not point to the tip of 'main'" 64 | 65 | printf '%s' "$tag_name" >tag_name 66 | git cat-file tag "$GITHUB_REF" | sed -e '1,/^$/d' -e '/-----BEGIN PGP SIGNATURE-----/,$d' >body 67 | - name: Create Release 68 | if: github.repository_owner == 'git-for-windows' 69 | uses: actions/github-script@v7 70 | with: 71 | github-token: ${{secrets.GITHUB_TOKEN}} 72 | script: | 73 | const { readFileSync } = require('fs') 74 | 75 | const tag_name = readFileSync('tag_name').toString() 76 | await github.rest.repos.createRelease({ 77 | owner: context.repo.owner, 78 | repo: context.repo.repo, 79 | tag_name: tag_name, 80 | name: tag_name, 81 | draft: false, 82 | prerelease: false, 83 | body: readFileSync('body').toString() 84 | }) 85 | - name: Push to release train branch 86 | if: github.repository_owner == 'git-for-windows' 87 | run: | 88 | git push origin "$GITHUB_REF^0:refs/heads/$(cat train)" 89 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: 'build-test' 2 | on: # rebuild any PRs and main branch changes 3 | pull_request: 4 | push: 5 | tags: 6 | - 'v[0-9]*' 7 | branches: 8 | - main 9 | - 'v[0-9]*' 10 | 11 | jobs: 12 | build: # make sure build/ci work properly 13 | runs-on: windows-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - run: npm ci 17 | - run: npm run build 18 | - run: npm run lint 19 | - run: npm run format && git diff-files 20 | - run: npm run test 21 | - run: npm run package 22 | - name: Verify that `dist/index.js` is up to date 23 | shell: bash 24 | run: | 25 | # `ncc` produces output with mixed line endings 26 | test -z "$(git diff -aw HEAD -- ':(exclude)dist/index.js.map' | tee diff.txt)" || { 27 | echo 'Files changed after `npm run package`' 28 | cat diff.txt 29 | exit 1 30 | } 31 | - uses: actions/upload-artifact@v4 32 | if: failure() 33 | with: 34 | name: diff.txt 35 | path: diff.txt 36 | test: # make sure the action works on a clean machine without building 37 | runs-on: windows-latest 38 | steps: 39 | - uses: actions/checkout@v4 40 | - name: Run this Action in-place 41 | uses: ./ 42 | - name: Verify that the Bash of Git for Windows' SDK is used 43 | shell: bash 44 | run: | 45 | set -ex 46 | test ! -e .tmp 47 | echo "This is the MSYS2 pseudo root: $(cygpath -aw /)" 48 | test "gcc is /mingw64/bin/gcc" = "$(type gcc)" 49 | test "prove is /usr/bin/core_perl/prove" = "$(type prove)" 50 | prove -V 51 | printf '%s\n' \ 52 | "#include " \ 53 | '' \ 54 | 'int main()' \ 55 | '{' \ 56 | ' printf("Hello, world!\n");' \ 57 | ' return 0;' \ 58 | '}' >hello-world.c 59 | gcc -o hello-world.exe hello-world.c 60 | hello="$(./hello-world.exe)" 61 | test 'Hello, world!' = "$hello" 62 | 63 | # Verify that the locale is set, enabling `grep -P` to work 64 | test 123 = "$(printf '1248\n123\n' | grep -P '2(?!4)')" 65 | 66 | # Verify that /dev/fd/ works 67 | test hello = "$(cat <(echo hello))" 68 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependency directory 2 | node_modules 3 | 4 | # Rest pulled from https://github.com/github/gitignore/blob/master/Node.gitignore 5 | # Logs 6 | logs 7 | *.log 8 | npm-debug.log* 9 | yarn-debug.log* 10 | yarn-error.log* 11 | lerna-debug.log* 12 | 13 | # Diagnostic reports (https://nodejs.org/api/report.html) 14 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 15 | 16 | # Runtime data 17 | pids 18 | *.pid 19 | *.seed 20 | *.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | lib-cov 24 | 25 | # Coverage directory used by tools like istanbul 26 | coverage 27 | *.lcov 28 | 29 | # nyc test coverage 30 | .nyc_output 31 | 32 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 33 | .grunt 34 | 35 | # Bower dependency directory (https://bower.io/) 36 | bower_components 37 | 38 | # node-waf configuration 39 | .lock-wscript 40 | 41 | # Compiled binary addons (https://nodejs.org/api/addons.html) 42 | build/Release 43 | 44 | # Dependency directories 45 | jspm_packages/ 46 | 47 | # TypeScript v1 declaration files 48 | typings/ 49 | 50 | # TypeScript cache 51 | *.tsbuildinfo 52 | 53 | # Optional npm cache directory 54 | .npm 55 | 56 | # Optional eslint cache 57 | .eslintcache 58 | 59 | # Optional REPL history 60 | .node_repl_history 61 | 62 | # Output of 'npm pack' 63 | *.tgz 64 | 65 | # Yarn Integrity file 66 | .yarn-integrity 67 | 68 | # dotenv environment variables file 69 | .env 70 | .env.test 71 | 72 | # parcel-bundler cache (https://parceljs.org/) 73 | .cache 74 | 75 | # next.js build output 76 | .next 77 | 78 | # nuxt.js build output 79 | .nuxt 80 | 81 | # vuepress build output 82 | .vuepress/dist 83 | 84 | # Serverless directories 85 | .serverless/ 86 | 87 | # FuseBox cache 88 | .fusebox/ 89 | 90 | # DynamoDB Local files 91 | .dynamodb/ 92 | 93 | # OS metadata 94 | .DS_Store 95 | Thumbs.db 96 | 97 | # Ignore built ts files 98 | __tests__/runner/* 99 | lib/**/* 100 | 101 | # Ignore downloaded 64-bit minimal SDK 102 | /git-sdk-64-minimal/ 103 | -------------------------------------------------------------------------------- /.jest/set-env-vars.js: -------------------------------------------------------------------------------- 1 | /* 2 | This ensures that the PATH of the machine that the tests are running on, 3 | doesn't leak into our tests. 4 | */ 5 | process.env.PATH = '' 6 | -------------------------------------------------------------------------------- /.markdownlint.json: -------------------------------------------------------------------------------- 1 | { 2 | "line-length": false, 3 | "commands-show-output": false 4 | } -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | lib/ 3 | node_modules/ -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 80, 3 | "tabWidth": 2, 4 | "useTabs": false, 5 | "semi": false, 6 | "singleQuote": true, 7 | "trailingComma": "none", 8 | "bracketSpacing": false, 9 | "arrowParens": "avoid" 10 | } 11 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "cSpell.ignoreRegExpList": [ 3 | "--noconfirm", 4 | "CHERE_INVOKING", 5 | "LC_CTYPE", 6 | "SDK's", 7 | "aarch64", 8 | "base-devel", 9 | "clangarm64", 10 | "makepkg-git", 11 | "mingw-w64-git", 12 | "mingw32", 13 | "mingw64", 14 | "pcre2", 15 | "vstfs://.*" 16 | ], 17 | "cSpell.words": [ 18 | "artifactsize", 19 | "autodrain", 20 | "Backoff", 21 | "bitness", 22 | "Cygwin", 23 | "libiconv", 24 | "libssp", 25 | "mqueue", 26 | "MSYS", 27 | "MSYSTEM", 28 | "Pacman", 29 | "unzipper", 30 | "vercel", 31 | "WINDIR", 32 | "winsymlinks" 33 | ], 34 | "git.ignoreLimitWarning": true 35 | } 36 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @dscho 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | The MIT License (MIT) 3 | 4 | Copyright (c) 2018 GitHub, Inc. and contributors 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Set up a Git for Windows SDK (or a subset thereof) 2 | 3 | Use this Action to initialize an environment to develop Git for Windows. 4 | 5 | ## Getting Started 6 | 7 | ```yaml 8 | name: Build stuff in Git for Windows' SDK 9 | on: [push] 10 | jobs: 11 | build: 12 | runs-on: windows-latest 13 | steps: 14 | - uses: actions/checkout@v3 15 | - name: Setup Git for Windows' minimal SDK 16 | uses: git-for-windows/setup-git-for-windows-sdk@v1 17 | - name: Build 18 | shell: bash 19 | run: make 20 | ``` 21 | 22 | ## Input parameters 23 | 24 | ### Available flavors 25 | 26 | This Action supports several flavors (read: subsets) of the Git for Windows SDK that can be configured like this: 27 | 28 | ```yaml 29 | - uses: git-for-windows/setup-git-for-windows-sdk 30 | with: 31 | flavor: build-installers 32 | ``` 33 | 34 | The supported flavors are: 35 | 36 | - `minimal`: 37 | 38 | This is the most useful flavor to build Git for Windows' source code and run its own test suite. Only available for x86_64. 39 | 40 | - `makepkg-git`: 41 | 42 | This flavor allows packaging `mingw-w64-git`, the Pacman package. It is only available for x86_64 but can be used to "cross-compile" for i686. 43 | 44 | - `build-installers`: 45 | 46 | In addition to building `mingw-w64-git`, this flavor allows bundling Git for Windows' artifacts such as the installer and the Portable Git. 47 | 48 | - `full`: 49 | 50 | This is the "full" SDK, [as users would install it](https://gitforwindows.org/#download-sdk), with a pre-selected set of packages pre-installed. Additional packages can be installed via `pacman -S `. 51 | 52 | ### CPU architecture support 53 | 54 | Git for Windows SDK comes in variants targeting `x86_64` (AKA "64-bit"), `i686` (AKA 32-bit) and `aarch64` (AKA arm64). The default is `x86_64` and can be overridden like this: 55 | 56 | ```yaml 57 | - uses: git-for-windows/setup-git-for-windows-sdk 58 | with: 59 | flavor: build-installers 60 | architecture: i686 61 | ``` 62 | 63 | Please note that only the `build-installers` and the `full` flavors are available for `i686`. 64 | 65 | ### Verbosity 66 | 67 | By default, this Action prints a line whenever 250 items were extracted (this does not work for the `full` flavor, where this Action is silent by default). It can be overridden by setting the input parameter `verbose`; setting it to a number will show updates whenever that many items were extracted. Setting it to `false` will suppress progress updates. Setting it to `true` will print every extracted file (this also works for the `full` flavor). 68 | 69 | ### Caching 70 | 71 | To accelerate this Action, artifacts are cached once downloaded. This can be turned off by setting the input parameter `cache` to `false`. 72 | 73 | In practice, caching the `full` artifacts does not provide much of a speed-up. Instead, it slows it down by spending extra minutes on caching the artifact. Therefore, caching is disabled for the `full` artifacts by default, corresponding to `cache: auto`. 74 | 75 | ### Clean-up 76 | 77 | On self-hosted runners, the SDK files persist after the workflow run is done. To remove these files, set the input parameter `cleanup` to `true`. 78 | 79 | ## Developing _this_ Action 80 | 81 | > First, you'll need to have a reasonably modern version of `node` handy, such as Node 12. 82 | 83 | Install the dependencies 84 | 85 | ```bash 86 | $ npm install 87 | ``` 88 | 89 | Build the Action and package it for distribution 90 | 91 | ```bash 92 | $ npm run build && npm run package 93 | ``` 94 | 95 | Run the tests :heavy_check_mark: 96 | 97 | ```bash 98 | $ npm test 99 | 100 | > setup-git-for-windows-sdk@0.0.0 test C:\Users\me\setup-git-for-windows-sdk 101 | > jest 102 | 103 | PASS __tests__/main.test.ts (28.869 s) 104 | √ skipping tests requiring network access (224 ms) 105 | 106 | console.log 107 | If you want to run tests that access the network, set: 108 | export RUN_NETWORK_TESTS=true 109 | 110 | at __tests__/main.test.ts:26:13 111 | 112 | PASS __tests__/downloader.test.ts (29.889 s) 113 | √ can obtain build ID (9 ms) 114 | 115 | Test Suites: 2 passed, 2 total 116 | Tests: 2 passed, 2 total 117 | Snapshots: 0 total 118 | Time: 31.11 s 119 | Ran all test suites. 120 | ... 121 | ``` 122 | -------------------------------------------------------------------------------- /__tests__/main.test.ts: -------------------------------------------------------------------------------- 1 | /* eslint no-console: "off" */ 2 | import * as child_process from 'child_process' 3 | import * as path from 'path' 4 | import * as process from 'process' 5 | import {statSync} from 'fs' 6 | 7 | async function runAction( 8 | options?: child_process.SpawnOptionsWithoutStdio 9 | ): Promise { 10 | return new Promise((resolve, reject) => { 11 | const nodeExePath = process.execPath 12 | const scriptPath = path.join(__dirname, '..', 'lib', 'main.js') 13 | 14 | const child = child_process 15 | .spawn(nodeExePath, [scriptPath], options) 16 | .on('error', reject) 17 | .on('close', resolve) 18 | 19 | child.stderr.on('data', data => console.log(`${data}`)) 20 | child.stdout.on('data', data => console.log(`${data}`)) 21 | }) 22 | } 23 | 24 | if (process.env.RUN_NETWORK_TESTS !== 'true') { 25 | test('skipping tests requiring network access', async () => { 26 | console.log( 27 | `If you want to run tests that access the network, set:\nexport RUN_NETWORK_TESTS=true` 28 | ) 29 | }) 30 | } else { 31 | // shows how the runner will run a javascript action with env / stdout protocol 32 | test('cannot download 32-bit minimal SDK', async () => { 33 | expect( 34 | await runAction({ 35 | env: { 36 | INPUT_FLAVOR: 'minimal', 37 | INPUT_ARCHITECTURE: 'i686' 38 | } 39 | }) 40 | ).toEqual(1) 41 | }) 42 | 43 | jest.setTimeout(5 * 60 * 1000) // this can easily take a minute or five 44 | 45 | test('extract the 64-bit minimal SDK', async () => { 46 | const outputDirectory = `${__dirname}/../git-sdk-64-minimal` 47 | expect( 48 | await runAction({ 49 | env: { 50 | INPUT_FLAVOR: 'minimal', 51 | INPUT_ARCHITECTURE: 'x86_64', 52 | INPUT_PATH: outputDirectory, 53 | INPUT_VERBOSE: '250', 54 | INPUT_CACHE: 'true' 55 | } 56 | }) 57 | ).toEqual(0) 58 | expect( 59 | statSync.bind(null, `${outputDirectory}/mingw64/bin/gcc.exe`) 60 | ).not.toThrow() 61 | 62 | const hello = child_process.spawnSync( 63 | 'usr\\bin\\bash.exe', 64 | ['-lc', 'cat <(echo hello)'], 65 | { 66 | cwd: outputDirectory 67 | } 68 | ) 69 | expect(hello.stderr.toString()).toBe('') 70 | expect(hello.stdout.toString()).toBe('hello\n') 71 | }) 72 | } 73 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: 'Setup Git for Windows SDK' 2 | description: 'Set up an environment to develop Git for Windows' 3 | author: 'Johannes Schindelin' 4 | branding: 5 | icon: fast-forward 6 | color: blue 7 | inputs: 8 | flavor: 9 | required: false 10 | description: 'The subset (if any) of the SDK: minimal, makepkg-git, build-installers, or full' 11 | default: 'minimal' 12 | architecture: 13 | required: false 14 | description: 'The architecture of the SDK: x86_64, i686 or aarch64. Note that "aarch64" only supports the "full" flavor for now.' 15 | default: 'x86_64' 16 | msys: 17 | required: false 18 | description: 'Whether to start in MSYS mode (defaults to false)' 19 | default: 'false' 20 | path: 21 | required: false 22 | description: 'Where to write the SDK files' 23 | default: '' 24 | cleanup: 25 | required: false 26 | description: 'Whether to clean up SDK files. This is only needed on self-hosted runners that are reused for multiple jobs.' 27 | default: 'false' 28 | verbose: 29 | required: false 30 | description: 'Whether to log files as they are extracted' 31 | default: '250' 32 | cache: 33 | required: false 34 | description: 'Use @actions/cache to accelerate this Action' 35 | default: 'auto' 36 | github-token: 37 | description: > 38 | Personal access token (PAT) used to call into GitHub's REST API. 39 | We recommend using a service account with the least permissions necessary. 40 | Also when generating a new PAT, select the least scopes necessary. 41 | [Learn more about creating and using encrypted secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets) 42 | default: ${{ github.token }} 43 | runs: 44 | using: 'node20' 45 | main: 'dist/index.js' 46 | post: 'dist/index.js' 47 | -------------------------------------------------------------------------------- /dist/licenses.txt: -------------------------------------------------------------------------------- 1 | @actions/core 2 | MIT 3 | The MIT License (MIT) 4 | 5 | Copyright 2019 GitHub 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 8 | 9 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 12 | -------------------------------------------------------------------------------- /dist/sourcemap-register.js: -------------------------------------------------------------------------------- 1 | (()=>{var e={296:e=>{var r=Object.prototype.toString;var n=typeof Buffer!=="undefined"&&typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},599:(e,r,n)=>{e=n.nmd(e);var t=n(927).SourceMapConsumer;var o=n(928);var i;try{i=n(896);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(296);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var d=[];var h=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function globalProcessVersion(){if(typeof process==="object"&&process!==null){return process.version}else{return""}}function globalProcessStderr(){if(typeof process==="object"&&process!==null){return process.stderr}}function globalProcessExit(e){if(typeof process==="object"&&process!==null&&typeof process.exit==="function"){return process.exit(e)}}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(globalProcessVersion())?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);var n=globalProcessStderr();if(n&&n._handle&&n._handle.setBlocking){n._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);globalProcessExit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=d.slice(0);var _=h.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){d.length=0}d.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){h.length=0}h.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){d.length=0;h.length=0;d=S.slice(0);h=_.slice(0);v=handlerExec(h);m=handlerExec(d)}},517:(e,r,n)=>{var t=n(297);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(158);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},24:(e,r,n)=>{var t=n(297);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.P=MappingList},299:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(297);var i=n(197);var a=n(517).C;var u=n(818);var s=n(299).g;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){h.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(h,o.compareByOriginalPositions);this.__originalMappings=h};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(818);var o=n(297);var i=n(517).C;var a=n(24).P;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var d=0,h=g.length;d0){if(!o.compareByGeneratedPositionsInflated(c,g[d-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.x=SourceMapGenerator},565:(e,r,n)=>{var t;var o=n(163).x;var i=n(297);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},927:(e,r,n)=>{n(163).x;r.SourceMapConsumer=n(684).SourceMapConsumer;n(565)},896:e=>{"use strict";e.exports=require("fs")},928:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};__webpack_require__(599).install();module.exports=n})(); -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import typescriptEslint from "@typescript-eslint/eslint-plugin"; 2 | import stylistic from "@stylistic/eslint-plugin"; 3 | import antiTrojanSource from "eslint-plugin-anti-trojan-source"; 4 | import jest from "eslint-plugin-jest"; 5 | import globals from "globals"; 6 | import tsParser from "@typescript-eslint/parser"; 7 | import tseslint from 'typescript-eslint'; 8 | import github from 'eslint-plugin-github'; 9 | import importPlugin from 'eslint-plugin-import'; 10 | 11 | export default tseslint.config([ 12 | github.getFlatConfigs().recommended, 13 | ...github.getFlatConfigs().typescript, 14 | importPlugin.flatConfigs.typescript, 15 | { 16 | ignores: ["**/dist/", "**/lib/", "**/node_modules/", "**/jest.config.js"], 17 | plugins: { 18 | "@typescript-eslint": typescriptEslint, 19 | "@stylistic": stylistic, 20 | "anti-trojan-source": antiTrojanSource, 21 | jest, 22 | }, 23 | languageOptions: { 24 | globals: { 25 | ...globals.jest, 26 | ...jest.environments.globals.globals, 27 | ...globals.node, 28 | }, 29 | parser: tsParser, 30 | ecmaVersion: 9, 31 | sourceType: "module", 32 | parserOptions: { 33 | project: "./tsconfig.json", 34 | }, 35 | }, 36 | rules: { 37 | "@typescript-eslint/array-type": "error", 38 | "@typescript-eslint/await-thenable": "error", 39 | "@typescript-eslint/ban-ts-comment": "error", 40 | "@typescript-eslint/consistent-type-assertions": "error", 41 | "@typescript-eslint/explicit-function-return-type": ["error", { 42 | allowExpressions: true, 43 | }], 44 | "@typescript-eslint/explicit-member-accessibility": ["error", { 45 | accessibility: "no-public", 46 | }], 47 | "@stylistic/func-call-spacing": ["error", "never"], 48 | "@typescript-eslint/no-array-constructor": "error", 49 | "@typescript-eslint/no-empty-interface": "error", 50 | "@typescript-eslint/no-explicit-any": "error", 51 | "@typescript-eslint/no-extraneous-class": "error", 52 | "@typescript-eslint/no-for-in-array": "error", 53 | "@typescript-eslint/no-inferrable-types": "error", 54 | "@typescript-eslint/no-misused-new": "error", 55 | "@typescript-eslint/no-namespace": "error", 56 | "@typescript-eslint/no-non-null-assertion": "warn", 57 | "@typescript-eslint/no-require-imports": "error", 58 | "@typescript-eslint/no-unnecessary-qualifier": "error", 59 | "@typescript-eslint/no-unnecessary-type-assertion": "error", 60 | "@typescript-eslint/no-unused-vars": "off", 61 | "@typescript-eslint/no-useless-constructor": "error", 62 | "@typescript-eslint/no-var-requires": "error", 63 | "@typescript-eslint/prefer-for-of": "warn", 64 | "@typescript-eslint/prefer-function-type": "warn", 65 | "@typescript-eslint/prefer-includes": "error", 66 | "@typescript-eslint/prefer-string-starts-ends-with": "error", 67 | "@typescript-eslint/promise-function-async": "error", 68 | "@typescript-eslint/require-array-sort-compare": "error", 69 | "@typescript-eslint/restrict-plus-operands": "error", 70 | "@stylistic/semi": ["error", "never"], 71 | "@stylistic/type-annotation-spacing": "error", 72 | "@typescript-eslint/unbound-method": "error", 73 | "anti-trojan-source/no-bidi": "error", 74 | camelcase: "off", 75 | "eslint-comments/no-use": "off", 76 | "github/filenames-match-regex": ["error", "^[a-z_]+(\\.test|\\.d)?$"], 77 | "i18n-text/no-en": "off", 78 | "importPlugin/no-namespace": "off", 79 | "no-unused-vars": "off", 80 | semi: "off", 81 | }, 82 | } 83 | ]); 84 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | clearMocks: true, 3 | moduleFileExtensions: ['js', 'ts'], 4 | setupFiles: ["/.jest/set-env-vars.js"], 5 | testEnvironment: 'node', 6 | testMatch: ['**/*.test.ts'], 7 | testRunner: 'jest-circus/runner', 8 | transform: { 9 | '^.+\\.ts$': 'ts-jest' 10 | }, 11 | verbose: true 12 | } 13 | -------------------------------------------------------------------------------- /main.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import {mkdirp} from './src/downloader' 3 | import {restoreCache, saveCache} from '@actions/cache' 4 | import process from 'process' 5 | import {spawnSync} from 'child_process' 6 | import { 7 | getArtifactMetadata, 8 | getViaGit, 9 | gitForWindowsUsrBinPath 10 | } from './src/git' 11 | import {getViaCIArtifacts} from './src/ci_artifacts' 12 | import * as fs from 'fs' 13 | 14 | const flavor = core.getInput('flavor') 15 | const architecture = core.getInput('architecture') 16 | 17 | /** 18 | * Some Azure VM types have a temporary disk which is local to the VM and therefore provides 19 | * _much_ faster disk IO than the OS Disk (or any other attached disk). 20 | * 21 | * Hosted GitHub Actions runners also leverage this disk and do their work in D:/a/_work, so let's 22 | * use it too if we can. It leads to a ~25% speed increase when doing heavy IO operations. 23 | * 24 | * https://learn.microsoft.com/en-us/azure/virtual-machines/managed-disks-overview#temporary-disk 25 | */ 26 | function getDriveLetterPrefix(): string { 27 | if (fs.existsSync('D:/')) { 28 | core.info('Found a fast, temporary disk on this VM (D:/). Will use that.') 29 | return 'D:/' 30 | } 31 | 32 | return 'C:/' 33 | } 34 | 35 | async function run(): Promise { 36 | try { 37 | if (process.platform !== 'win32') { 38 | core.warning( 39 | `Skipping this Action because it only works on Windows, not on ${process.platform}` 40 | ) 41 | return 42 | } 43 | 44 | const githubToken = core.getInput('github-token') 45 | const verbose = core.getInput('verbose') 46 | const msysMode = core.getInput('msys') === 'true' 47 | 48 | const {artifactName, download, id} = 49 | flavor === 'minimal' 50 | ? await getViaCIArtifacts(architecture, githubToken) 51 | : await getViaGit(flavor, architecture, githubToken) 52 | const outputDirectory = 53 | core.getInput('path') || `${getDriveLetterPrefix()}${artifactName}` 54 | core.setOutput('result', outputDirectory) 55 | 56 | let useCache: boolean 57 | switch (core.getInput('cache')) { 58 | case 'true': 59 | useCache = true 60 | break 61 | case 'auto': 62 | useCache = !['full', 'minimal'].includes(flavor) 63 | break 64 | default: 65 | useCache = false 66 | } 67 | 68 | let needToDownload = true 69 | try { 70 | if (useCache && (await restoreCache([outputDirectory], id))) { 71 | core.info(`Cached ${id} was successfully restored`) 72 | needToDownload = false 73 | } 74 | } catch (e) { 75 | core.warning(`Cannot use @actions/cache (${e})`) 76 | useCache = false 77 | } 78 | 79 | if (needToDownload) { 80 | core.info(`Downloading ${artifactName}`) 81 | await download( 82 | outputDirectory, 83 | verbose.match(/^\d+$/) ? parseInt(verbose) : verbose === 'true' 84 | ) 85 | 86 | try { 87 | if (useCache && !(await saveCache([outputDirectory], id))) { 88 | core.warning(`Failed to cache ${id}`) 89 | } 90 | } catch (e) { 91 | core.warning( 92 | `Failed to cache ${id}: ${e instanceof Error ? e.message : e}` 93 | ) 94 | } 95 | } 96 | 97 | const mingw = { 98 | i686: 'MINGW32', 99 | x86_64: 'MINGW64', 100 | aarch64: 'CLANGARM64' 101 | }[architecture] 102 | 103 | if (mingw === undefined) { 104 | core.setFailed(`Invalid architecture ${architecture} specified`) 105 | return 106 | } 107 | 108 | const msystem = msysMode ? 'MSYS' : mingw 109 | 110 | const binPaths = [ 111 | // Set up PATH so that Git for Windows' SDK's `bash.exe`, `prove` and `gcc` are found 112 | '/usr/bin/core_perl', 113 | '/usr/bin', 114 | `/${mingw.toLocaleLowerCase()}/bin` 115 | ] 116 | 117 | for (const binPath of msysMode ? binPaths.reverse() : binPaths) { 118 | core.addPath(`${outputDirectory}${binPath}`) 119 | } 120 | 121 | core.exportVariable('MSYSTEM', msystem) 122 | if ( 123 | !('LANG' in process.env) && 124 | !('LC_ALL' in process.env) && 125 | !('LC_CTYPE' in process.env) 126 | ) { 127 | core.exportVariable('LC_CTYPE', 'C.UTF-8') 128 | } 129 | 130 | // ensure that /dev/fd/*, /dev/mqueue and friends exist 131 | for (const path of ['/dev/mqueue', '/dev/shm']) { 132 | mkdirp(`${outputDirectory}${path}`) 133 | } 134 | 135 | const ln = (linkPath: string, target: string): void => { 136 | const child = spawnSync( 137 | flavor === 'minimal' ? 'ln.exe' : 'usr\\bin\\ln.exe', 138 | ['-s', target, linkPath], 139 | { 140 | cwd: outputDirectory, 141 | env: { 142 | MSYS: 'winsymlinks:sys' 143 | } 144 | } 145 | ) 146 | if (child.error) throw child.error 147 | } 148 | for (const [linkPath, target] of Object.entries({ 149 | fd: 'fd', 150 | stdin: 'fd/0', 151 | stdout: 'fd/1', 152 | stderr: 'fd/2' 153 | })) { 154 | ln(`/dev/${linkPath}`, `/proc/self/${target}`) 155 | } 156 | } catch (error) { 157 | core.setFailed(error instanceof Error ? error.message : `${error}`) 158 | } 159 | } 160 | 161 | function cleanup(): void { 162 | if (core.getInput('cleanup') !== 'true') { 163 | core.info( 164 | `Won't clean up SDK files as the 'cleanup' input was not provided or doesn't equal 'true'.` 165 | ) 166 | return 167 | } 168 | 169 | const outputDirectory = 170 | core.getInput('path') || 171 | `${getDriveLetterPrefix()}${ 172 | getArtifactMetadata(flavor, architecture).artifactName 173 | }` 174 | 175 | /** 176 | * Shelling out to `rm -rf` is more than twice as fast as Node's `fs.rmSync` method. 177 | * Let's use it if it's available, and otherwise fall back to `fs.rmSync`. 178 | */ 179 | const cleanupMethod = fs.existsSync(`${gitForWindowsUsrBinPath}/bash.exe`) 180 | ? 'rm -rf' 181 | : 'node' 182 | 183 | core.info( 184 | `Cleaning up ${outputDirectory} using the "${cleanupMethod}" method...` 185 | ) 186 | 187 | if (cleanupMethod === 'rm -rf') { 188 | const child = spawnSync( 189 | `${gitForWindowsUsrBinPath}/bash.exe`, 190 | ['-c', `rm -rf "${outputDirectory}"`], 191 | { 192 | encoding: 'utf-8', 193 | env: {PATH: '/usr/bin'} 194 | } 195 | ) 196 | 197 | if (child.error) throw child.error 198 | if (child.stderr) core.error(child.stderr) 199 | } else { 200 | fs.rmSync(outputDirectory, {recursive: true, force: true}) 201 | } 202 | 203 | core.info(`Finished cleaning up ${outputDirectory}.`) 204 | } 205 | 206 | /** 207 | * Indicates whether the POST action is running 208 | */ 209 | export const isPost = !!core.getState('isPost') 210 | 211 | if (!isPost) { 212 | run() 213 | /* 214 | * Publish a variable so that when the POST action runs, it can determine it should run the cleanup logic. 215 | * This is necessary since we don't have a separate entry point. 216 | * Inspired by https://github.com/actions/checkout/blob/v3.1.0/src/state-helper.ts#L56-L60 217 | */ 218 | core.saveState('isPost', 'true') 219 | } else { 220 | // If the POST action is running, we cleanup our artifacts 221 | cleanup() 222 | } 223 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "setup-git-for-windows-sdk", 3 | "version": "1.11.0", 4 | "private": true, 5 | "description": "Set up an environment to develop Git for WindowsTypeScript template action", 6 | "main": "lib/main.js", 7 | "scripts": { 8 | "build": "tsc", 9 | "format": "prettier --write **/*.ts", 10 | "format-check": "prettier --check **/*.ts", 11 | "lint": "eslint **/*.ts", 12 | "package": "ncc build --source-map", 13 | "test": "jest", 14 | "all": "npm run build && npm run format && npm run lint && npm run package && npm test" 15 | }, 16 | "repository": { 17 | "type": "git", 18 | "url": "git+https://github.com/git-for-windows/setup-git-for-windows-sdk.git" 19 | }, 20 | "keywords": [ 21 | "actions", 22 | "node", 23 | "setup", 24 | "git", 25 | "git-for-windows" 26 | ], 27 | "author": "", 28 | "license": "MIT", 29 | "dependencies": { 30 | "@actions/cache": "^4.0.3", 31 | "@actions/core": "^1.11.1", 32 | "@adobe/node-fetch-retry": "^2.2.0", 33 | "@octokit/rest": "^20.1.2", 34 | "node-fetch": "^2.7.0", 35 | "unzipper": "^0.12.3" 36 | }, 37 | "devDependencies": { 38 | "@eslint/eslintrc": "^3.2.0", 39 | "@eslint/js": "^9.27.0", 40 | "@stylistic/eslint-plugin": "^4.4.0", 41 | "@types/jest": "^29.5.14", 42 | "@types/node": "^22.15.29", 43 | "@types/unzipper": "^0.10.11", 44 | "@typescript-eslint/eslint-plugin": "^8.17.0", 45 | "@typescript-eslint/parser": "^8.33.1", 46 | "@vercel/ncc": "^0.38.3", 47 | "eslint": "^9.28.0", 48 | "eslint-plugin-anti-trojan-source": "^1.1.1", 49 | "eslint-plugin-github": "^5.1.5", 50 | "eslint-plugin-jest": "^28.12.0", 51 | "eslint-plugin-prettier": "^5.4.1", 52 | "globals": "^16.2.0", 53 | "jest": "^29.7.0", 54 | "jest-circus": "^29.7.0", 55 | "js-yaml": "^4.1.0", 56 | "prettier": "3.5.3", 57 | "ts-jest": "^29.3.4", 58 | "typescript": "^5.8.3", 59 | "typescript-eslint": "^8.33.1" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /pre-push.hook: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # This pre-push hook ensures that we only push branches with up to date files in `dist/`. 4 | # 5 | # While at it, it also ensures that it is itself up to date with `pre-push.hook` 6 | 7 | die () { 8 | echo "$*" >&2 9 | exit 1 10 | } 11 | 12 | LF=' 13 | ' 14 | 15 | git diff --no-index --quiet pre-push.hook "$(git rev-parse --git-path hooks/pre-push)" || 16 | die 'The `pre-push` hook is not up to date with `pre-push.hook`. Please update: 17 | 18 | cp pre-push.hook .git/hooks/pre-push' 19 | 20 | # Verify that any tagged version is reflected in its `package.json` 21 | for tag in $(git for-each-ref --format='%(refname:short)' --points-at=HEAD 'refs/tags/v[0-9]*') 22 | do 23 | out="$(git tag --verify $tag 2>&1)" || 24 | die "$out$LF${LF}Tag $tag is not signed/signature cannot be verified" 25 | 26 | test 0 = $(git rev-list --count ...main) || die 'HEAD is not up to date with `main`' 27 | test 0 = $(git rev-list --count ...origin/main) || 28 | echo 'warning: HEAD is not up to date with `origin/main`' >&2 29 | 30 | regex="^ \"version\": \"$(echo "${tag#v}" | sed 's/\./\\./g')\",\$" 31 | substitute="/\"name\": \"setup-git-for-windows-sdk\"/{N;s/\\(\"version\": \"\\).*\",\$/\\1${tag#v}\",/}" 32 | git grep -q "$regex" refs/tags/$tag -- package.json || { 33 | sed "$substitute" package.json.new && 34 | mv -f package.json.new package.json 35 | sed "$substitute" package-lock.json.new && 36 | mv -f package-lock.json.new package-lock.json 37 | die "package.json did not reflect $tag; It was adjusted." 38 | exit 1 39 | } 40 | 41 | git grep -q "$regex" refs/tags/$tag -- package-lock.json || { 42 | sed "$substitute" package-lock.json.new && 43 | mv -f package-lock.json.new package-lock.json 44 | die "package-lock.json did not reflect $tag; It was adjusted." 45 | exit 1 46 | } 47 | done 48 | 49 | git diff --quiet dist/ || 50 | die '`dist/` is dirty' 51 | 52 | base="$(git rev-list HEAD -1 -- dist/)" 53 | if test 0 -lt $(git rev-list --count ${base+$base..}HEAD -- \*.ts) 54 | then 55 | echo "Verifying that dist/ is up to date" >&2 56 | npm run build && 57 | npm run package && 58 | if ! git diff --quiet dist/ 59 | then 60 | echo "Committing dist/ because it was not up to date" >&2 61 | git commit -sm "npm run build && npm run package" dist/ 62 | fi 63 | fi -------------------------------------------------------------------------------- /src/__tests__/git.test.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs' 2 | import * as git from '../git' 3 | import * as spawn from '../spawn' 4 | import * as core from '@actions/core' 5 | 6 | // We want to mock only the rmSync method on the fs module, and leave everything 7 | // else untouched. 8 | jest.mock('fs', () => ({ 9 | ...jest.requireActual('fs'), 10 | rmSync: jest.fn() 11 | })) 12 | 13 | describe('git', () => { 14 | // We don't want to _actually_ spawn external commands, so we mock the function 15 | let spawnSpy: jest.SpyInstance 16 | // Capture the startGroup calls 17 | let coreSpy: jest.SpyInstance 18 | // The script calls fs.rmSync, so let's mock it and verify it was called 19 | let rmSyncSpy: jest.SpyInstance 20 | 21 | beforeEach(() => { 22 | coreSpy = jest.spyOn(core, 'startGroup') 23 | spawnSpy = jest.spyOn(spawn, 'spawnAndWaitForExitCode').mockResolvedValue({ 24 | // 0 is the exit code for success 25 | exitCode: 0 26 | }) 27 | // We don't want to _actually_ clone the repo, so we mock the function 28 | jest.spyOn(git, 'clone').mockResolvedValue() 29 | rmSyncSpy = fs.rmSync as jest.Mocked['rmSync'] 30 | }) 31 | 32 | test('getViaGit build-installers x86_64', async () => { 33 | const flavor = 'build-installers' 34 | const architecture = 'x86_64' 35 | const outputDirectory = 'outputDirectory' 36 | const {artifactName, download} = await git.getViaGit(flavor, architecture) 37 | 38 | expect(artifactName).toEqual('git-sdk-64-build-installers') 39 | 40 | await download(outputDirectory, true) 41 | 42 | expect(coreSpy).toHaveBeenCalledWith(`Creating ${flavor} artifact`) 43 | expect(spawnSpy).toHaveBeenCalledWith( 44 | expect.stringContaining('/bash.exe'), 45 | expect.arrayContaining([ 46 | '.tmp/build-extra/please.sh', 47 | 'create-sdk-artifact', 48 | `--architecture=${architecture}`, 49 | `--out=${outputDirectory}` 50 | ]), 51 | expect.objectContaining({ 52 | env: expect.objectContaining({ 53 | // We want to ensure that the correct /bin folders are in the PATH, 54 | // so that please.sh can find git.exe 55 | // https://github.com/git-for-windows/setup-git-for-windows-sdk/issues/951 56 | PATH: 57 | expect.stringContaining('/clangarm64/bin') && 58 | expect.stringContaining('/mingw64/bin') 59 | }) 60 | }) 61 | ) 62 | expect(rmSyncSpy).toHaveBeenCalledWith('.tmp', {recursive: true}) 63 | }) 64 | 65 | test('getViaGit full x86_64', async () => { 66 | const flavor = 'full' 67 | const architecture = 'x86_64' 68 | const outputDirectory = 'outputDirectory' 69 | const {artifactName, download} = await git.getViaGit(flavor, architecture) 70 | 71 | expect(artifactName).toEqual('git-sdk-64-full') 72 | 73 | await download(outputDirectory, true) 74 | 75 | expect(coreSpy).toHaveBeenCalledWith(`Checking out git-sdk-64`) 76 | expect(spawnSpy).toHaveBeenCalledWith( 77 | expect.stringContaining('/git.exe'), 78 | expect.arrayContaining([ 79 | '--git-dir=.tmp', 80 | 'worktree', 81 | 'add', 82 | outputDirectory 83 | ]), 84 | expect.any(Object) 85 | ) 86 | expect(rmSyncSpy).toHaveBeenCalledWith('.tmp', {recursive: true}) 87 | }) 88 | }) 89 | -------------------------------------------------------------------------------- /src/ci_artifacts.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import {Octokit} from '@octokit/rest' 3 | import {getArtifactMetadata} from './git' 4 | import {spawn} from 'child_process' 5 | import * as fs from 'fs' 6 | 7 | async function sleep(milliseconds: number): Promise { 8 | return new Promise((resolve, _reject) => { 9 | setTimeout(resolve, milliseconds) 10 | }) 11 | } 12 | 13 | export async function getViaCIArtifacts( 14 | architecture: string, 15 | githubToken?: string 16 | ): Promise<{ 17 | artifactName: string 18 | id: string 19 | download: ( 20 | outputDirectory: string, 21 | verbose?: number | boolean 22 | ) => Promise 23 | }> { 24 | const owner = 'git-for-windows' 25 | 26 | const {repo, artifactName} = getArtifactMetadata('minimal', architecture) 27 | 28 | const octokit = githubToken ? new Octokit({auth: githubToken}) : new Octokit() 29 | 30 | const { 31 | name, 32 | updated_at: updatedAt, 33 | browser_download_url: url 34 | } = await (async () => { 35 | let error: Error | undefined 36 | for (const seconds of [0, 5, 10, 15, 20, 40]) { 37 | if (seconds) await sleep(seconds) 38 | 39 | const ciArtifactsResponse = await octokit.repos.getReleaseByTag({ 40 | owner, 41 | repo, 42 | tag: 'ci-artifacts' 43 | }) 44 | 45 | if (ciArtifactsResponse.status !== 200) { 46 | error = new Error( 47 | `Failed to get ci-artifacts release from the ${owner}/${repo} repo: ${ciArtifactsResponse.status}` 48 | ) 49 | continue 50 | } 51 | 52 | core.info( 53 | `Found ci-artifacts release: ${ciArtifactsResponse.data.html_url}` 54 | ) 55 | const tarGzArtifact = ciArtifactsResponse.data.assets.find(asset => 56 | asset.name.endsWith('.tar.gz') 57 | ) 58 | 59 | if (!tarGzArtifact) { 60 | error = new Error( 61 | `Failed to find a .tar.gz artifact in the ci-artifacts release of the ${owner}/${repo} repo` 62 | ) 63 | continue 64 | } 65 | 66 | return tarGzArtifact 67 | } 68 | throw error 69 | })() 70 | core.info(`Found ${name} at ${url}`) 71 | 72 | return { 73 | artifactName, 74 | id: `ci-artifacts-${updatedAt}`, 75 | download: async ( 76 | outputDirectory: string, 77 | verbose: number | boolean = false 78 | ): Promise => { 79 | return new Promise((resolve, reject) => { 80 | const curl = spawn( 81 | `${process.env.SYSTEMROOT}/system32/curl.exe`, 82 | [ 83 | ...(githubToken 84 | ? ['-H', `Authorization: Bearer ${githubToken}`] 85 | : []), 86 | '-H', 87 | 'Accept: application/octet-stream', 88 | `-${verbose === true ? '' : 's'}fL`, 89 | url 90 | ], 91 | { 92 | stdio: ['ignore', 'pipe', process.stderr] 93 | } 94 | ) 95 | curl.on('error', error => reject(error)) 96 | 97 | fs.mkdirSync(outputDirectory, {recursive: true}) 98 | 99 | const tar = spawn( 100 | `${process.env.SYSTEMROOT}/system32/tar.exe`, 101 | ['-C', outputDirectory, `-x${verbose === true ? 'v' : ''}f`, '-'], 102 | {stdio: ['pipe', process.stdout, process.stderr]} 103 | ) 104 | tar.on('error', error => reject(error)) 105 | tar.on('close', code => { 106 | if (code === 0) resolve() 107 | else reject(new Error(`tar exited with code ${code}`)) 108 | }) 109 | 110 | curl.stdout.pipe(tar.stdin) 111 | }) 112 | } 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /src/downloader.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | 3 | export function mkdirp(directoryPath: string): void { 4 | try { 5 | const stat = fs.statSync(directoryPath) 6 | if (stat.isDirectory()) { 7 | return 8 | } 9 | throw new Error(`${directoryPath} exists, but is not a directory`) 10 | } catch (e) { 11 | if (!(e instanceof Object) || (e as {code: string}).code !== 'ENOENT') { 12 | throw e 13 | } 14 | } 15 | fs.mkdirSync(directoryPath, {recursive: true}) 16 | } 17 | -------------------------------------------------------------------------------- /src/git.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@actions/core' 2 | import {spawnAndWaitForExitCode, SpawnReturnArgs} from './spawn' 3 | import {Octokit} from '@octokit/rest' 4 | import {delimiter} from 'path' 5 | import * as fs from 'fs' 6 | 7 | // If present, do prefer the build agent's copy of Git 8 | const externalsGitDir = `${process.env.AGENT_HOMEDIRECTORY}/externals/git` 9 | const gitForWindowsRoot = 'C:/Program Files/Git' 10 | const gitRoot = fs.existsSync(externalsGitDir) 11 | ? externalsGitDir 12 | : gitForWindowsRoot 13 | 14 | const gitForWindowsBinPaths = ['clangarm64', 'mingw64', 'mingw32', 'usr'].map( 15 | p => `${gitRoot}/${p}/bin` 16 | ) 17 | export const gitForWindowsUsrBinPath = 18 | gitForWindowsBinPaths[gitForWindowsBinPaths.length - 1] 19 | const gitExePath = `${gitRoot}/cmd/git.exe` 20 | 21 | /* 22 | * It looks a bit ridiculous to use 56 workers on a build agent that has only 23 | * a two-core CPU, yet manual testing revealed that 64 workers would be _even 24 | * better_. But at 92 workers, resources are starved so much that the checkout 25 | * is not only much faster, but also fails. 26 | * 27 | * Let's stick with 56, which should avoid running out of resources, but still 28 | * is much faster than, say, using only 2 workers. 29 | */ 30 | const GIT_CONFIG_PARAMETERS = `'checkout.workers=56'` 31 | 32 | export function getArtifactMetadata( 33 | flavor: string, 34 | architecture: string 35 | ): {repo: string; artifactName: string} { 36 | const repo = { 37 | i686: 'git-sdk-32', 38 | x86_64: 'git-sdk-64', 39 | aarch64: 'git-sdk-arm64' 40 | }[architecture] 41 | 42 | if (repo === undefined) { 43 | throw new Error(`Invalid architecture ${architecture} specified`) 44 | } 45 | 46 | const artifactName = `${repo}-${flavor}` 47 | 48 | return {repo, artifactName} 49 | } 50 | 51 | export async function clone( 52 | url: string, 53 | destination: string, 54 | verbose: number | boolean, 55 | cloneExtraOptions: string[] = [] 56 | ): Promise { 57 | if (verbose) core.info(`Cloning ${url} to ${destination}`) 58 | const child = await spawnAndWaitForExitCode( 59 | gitExePath, 60 | [ 61 | 'clone', 62 | '--depth=1', 63 | '--single-branch', 64 | '--branch=main', 65 | ...cloneExtraOptions, 66 | url, 67 | destination 68 | ], 69 | { 70 | env: { 71 | GIT_CONFIG_PARAMETERS 72 | } 73 | } 74 | ) 75 | if (child.exitCode !== 0) { 76 | throw new Error(`git clone: exited with code ${child.exitCode}`) 77 | } 78 | } 79 | 80 | async function updateHEAD( 81 | bareRepositoryPath: string, 82 | headSHA: string 83 | ): Promise { 84 | const child = await spawnAndWaitForExitCode( 85 | gitExePath, 86 | ['--git-dir', bareRepositoryPath, 'update-ref', 'HEAD', headSHA], 87 | { 88 | env: { 89 | GIT_CONFIG_PARAMETERS 90 | } 91 | } 92 | ) 93 | if (child.exitCode !== 0) { 94 | throw new Error(`git: exited with code ${child.exitCode}`) 95 | } 96 | } 97 | 98 | export async function getViaGit( 99 | flavor: string, 100 | architecture: string, 101 | githubToken?: string 102 | ): Promise<{ 103 | artifactName: string 104 | id: string 105 | download: ( 106 | outputDirectory: string, 107 | verbose?: number | boolean 108 | ) => Promise 109 | }> { 110 | const owner = 'git-for-windows' 111 | 112 | const {repo, artifactName} = getArtifactMetadata(flavor, architecture) 113 | 114 | const octokit = githubToken ? new Octokit({auth: githubToken}) : new Octokit() 115 | let head_sha: string 116 | if (flavor === 'minimal') { 117 | const info = await octokit.actions.listWorkflowRuns({ 118 | owner, 119 | repo, 120 | workflow_id: 938271, 121 | status: 'success', 122 | branch: 'main', 123 | event: 'push', 124 | per_page: 1 125 | }) 126 | head_sha = info.data.workflow_runs[0].head_sha 127 | /* 128 | * There was a GCC upgrade to v14.1 that broke the build with `DEVELOPER=1`, 129 | * and `ci-artifacts` was not updated to test-build with `DEVELOPER=1` (this 130 | * was fixed in https://github.com/git-for-windows/git-sdk-64/pull/83). 131 | * 132 | * Work around that by forcing the incorrectly-passing revision back to the 133 | * last one before that GCC upgrade. 134 | */ 135 | if (head_sha === '5f6ba092f690c0bbf84c7201be97db59cdaeb891') { 136 | head_sha = 'e37e3f44c1934f0f263dabbf4ed50a3cfb6eaf71' 137 | } 138 | } else { 139 | const info = await octokit.repos.getBranch({ 140 | owner, 141 | repo, 142 | branch: 'main' 143 | }) 144 | head_sha = info.data.commit.sha 145 | } 146 | const id = `${artifactName}-${head_sha}${head_sha === 'e37e3f44c1934f0f263dabbf4ed50a3cfb6eaf71' ? '-2' : ''}` 147 | core.info(`Got commit ${head_sha} for ${repo}`) 148 | 149 | return { 150 | artifactName, 151 | id, 152 | download: async ( 153 | outputDirectory: string, 154 | verbose: number | boolean = false 155 | ): Promise => { 156 | core.startGroup(`Cloning ${repo}`) 157 | const partialCloneArg = flavor === 'full' ? [] : ['--filter=blob:none'] 158 | await clone(`https://github.com/${owner}/${repo}`, `.tmp`, verbose, [ 159 | '--bare', 160 | ...partialCloneArg 161 | ]) 162 | core.endGroup() 163 | 164 | let child: SpawnReturnArgs 165 | if (flavor === 'full') { 166 | core.startGroup(`Checking out ${repo}`) 167 | child = await spawnAndWaitForExitCode( 168 | gitExePath, 169 | [`--git-dir=.tmp`, 'worktree', 'add', outputDirectory, head_sha], 170 | { 171 | env: { 172 | GIT_CONFIG_PARAMETERS 173 | } 174 | } 175 | ) 176 | } else { 177 | await updateHEAD('.tmp', head_sha) 178 | core.startGroup('Cloning build-extra') 179 | await clone( 180 | `https://github.com/${owner}/build-extra`, 181 | '.tmp/build-extra', 182 | verbose 183 | ) 184 | core.endGroup() 185 | 186 | core.startGroup(`Creating ${flavor} artifact`) 187 | const traceArg = verbose ? ['-x'] : [] 188 | child = await spawnAndWaitForExitCode( 189 | `${gitForWindowsUsrBinPath}/bash.exe`, 190 | [ 191 | ...traceArg, 192 | '.tmp/build-extra/please.sh', 193 | 'create-sdk-artifact', 194 | `--architecture=${architecture}`, 195 | `--out=${outputDirectory}`, 196 | '--sdk=.tmp', 197 | flavor 198 | ], 199 | { 200 | env: { 201 | GIT_CONFIG_PARAMETERS, 202 | COMSPEC: 203 | process.env.COMSPEC || 204 | `${process.env.WINDIR}\\system32\\cmd.exe`, 205 | LC_CTYPE: 'C.UTF-8', 206 | CHERE_INVOKING: '1', 207 | MSYSTEM: 'MINGW64', 208 | PATH: `${gitForWindowsBinPaths.join(delimiter)}${delimiter}${process.env.PATH}` 209 | } 210 | } 211 | ) 212 | } 213 | core.endGroup() 214 | if (child.exitCode === 0) { 215 | fs.rmSync('.tmp', {recursive: true}) 216 | } else { 217 | throw new Error(`process exited with code ${child.exitCode}`) 218 | } 219 | } 220 | } 221 | } 222 | -------------------------------------------------------------------------------- /src/spawn.ts: -------------------------------------------------------------------------------- 1 | import {spawn as SpawnInternal, SpawnOptions} from 'child_process' 2 | 3 | export type SpawnReturnArgs = { 4 | exitCode: number | null 5 | } 6 | 7 | /** 8 | * Simple wrapper around NodeJS's "child_process.spawn" function. 9 | * Since we only use the exit code, we only expose that. 10 | */ 11 | export async function spawnAndWaitForExitCode( 12 | command: string, 13 | args: readonly string[], 14 | options: SpawnOptions 15 | ): Promise { 16 | const child = SpawnInternal(command, args, { 17 | ...options, 18 | // 'inherit' means that the child process will use the same stdio/stderr as the parent process 19 | stdio: [undefined, 'inherit', 'inherit'] 20 | }) 21 | 22 | return new Promise((resolve, reject) => { 23 | child.on('error', reject) 24 | child.on('close', code => { 25 | resolve({exitCode: code}) 26 | }) 27 | }) 28 | } 29 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ 4 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 5 | "outDir": "./lib", /* Redirect output structure to the directory. */ 6 | "rootDir": ".", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 7 | "strict": true, /* Enable all strict type-checking options. */ 8 | "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 9 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 10 | "noUnusedLocals": true, 11 | "noUnusedParameters": true 12 | }, 13 | "include": ["**/*.ts"], 14 | "exclude": ["node_modules"] 15 | } 16 | --------------------------------------------------------------------------------