├── .craft.yml
├── .dockerignore
├── .eslintrc.js
├── .github
└── workflows
│ ├── build.yml
│ ├── enforce-license-compliance.yml
│ ├── image.yml
│ ├── lint.yml
│ └── release.yml
├── .gitignore
├── .npmignore
├── .prettierignore
├── .prettierrc.yml
├── .vscode
├── extensions.json
└── settings.json
├── CHANGELOG.md
├── CONTRIBUTING.md
├── Dockerfile
├── Gemfile
├── Gemfile.lock
├── LICENSE
├── Makefile
├── README.md
├── docs
└── _site
│ ├── assets
│ ├── main.css
│ └── normalize.css
│ ├── favicon.ico
│ ├── images
│ └── sentry-glyph-black.png
│ └── index.html
├── img
├── icon.svg
└── logo.svg
├── jest.config.js
├── package.json
├── scripts
└── config-json-schema-to-ts.js
├── src
├── __mocks__
│ ├── @aws-sdk
│ │ └── client-lambda.ts
│ ├── fs.ts
│ └── logger.ts
├── __tests__
│ ├── config.test.ts
│ └── index.test.ts
├── artifact_providers
│ ├── __tests__
│ │ ├── base.test.ts
│ │ └── github.test.ts
│ ├── base.ts
│ ├── gcs.ts
│ ├── github.ts
│ └── none.ts
├── commands
│ ├── __tests__
│ │ ├── prepare.test.ts
│ │ └── publish.test.ts
│ ├── artifacts.ts
│ ├── artifacts_cmds
│ │ ├── download.ts
│ │ └── list.ts
│ ├── config.ts
│ ├── prepare.ts
│ ├── publish.ts
│ └── targets.ts
├── config.ts
├── index.ts
├── logger.ts
├── schemas
│ ├── projectConfig.schema.ts
│ └── project_config.ts
├── status_providers
│ ├── base.ts
│ └── github.ts
├── targets
│ ├── __tests__
│ │ ├── awsLambda.test.ts
│ │ ├── commitOnGitRepository.test.ts
│ │ ├── crates.test.ts
│ │ ├── github.test.ts
│ │ ├── index.test.ts
│ │ ├── maven.test.ts
│ │ ├── mavenDiskIo.test.ts
│ │ ├── npm.test.ts
│ │ ├── powershell.test.ts
│ │ ├── pubDev.test.ts
│ │ ├── pypi.test.ts
│ │ ├── registry.test.ts
│ │ ├── sentryPypi.test.ts
│ │ ├── symbolCollector.test.ts
│ │ └── upm.test.ts
│ ├── awsLambdaLayer.ts
│ ├── base.ts
│ ├── brew.ts
│ ├── cocoapods.ts
│ ├── commitOnGitRepository.ts
│ ├── crates.ts
│ ├── docker.ts
│ ├── gcs.ts
│ ├── gem.ts
│ ├── ghPages.ts
│ ├── github.ts
│ ├── hex.ts
│ ├── index.ts
│ ├── maven.ts
│ ├── npm.ts
│ ├── nuget.ts
│ ├── powershell.ts
│ ├── pubDev.ts
│ ├── pypi.ts
│ ├── registry.ts
│ ├── sentryPypi.ts
│ ├── symbolCollector.ts
│ └── upm.ts
├── types
│ ├── mustache.d.ts
│ ├── nvar.ts
│ └── split.d.ts
└── utils
│ ├── __fixtures__
│ ├── gcsApi.ts
│ ├── gcsFileObj.ts
│ └── listFiles
│ │ ├── a
│ │ ├── b
│ │ └── subdir
│ │ └── .empty
│ ├── __tests__
│ ├── async.test.ts
│ ├── awsLambdaLayerManager.test.ts
│ ├── changelog.test.ts
│ ├── env.test.ts
│ ├── files.test.ts
│ ├── filters.test.ts
│ ├── gcsAPI.test.ts
│ ├── githubApi.test.ts
│ ├── gpg.test.ts
│ ├── helpers.test.ts
│ ├── objects.test.ts
│ ├── packagePath.test.ts
│ ├── strings.test.ts
│ ├── symlink.test.ts
│ ├── system.test.ts
│ └── version.test.ts
│ ├── async.ts
│ ├── awsLambdaLayerManager.ts
│ ├── changelog.ts
│ ├── checksum.ts
│ ├── env.ts
│ ├── errors.ts
│ ├── files.ts
│ ├── filters.ts
│ ├── gcsApi.ts
│ ├── git.ts
│ ├── githubApi.ts
│ ├── gpg.ts
│ ├── helpers.ts
│ ├── objects.ts
│ ├── packagePath.ts
│ ├── registry.ts
│ ├── sentry.ts
│ ├── strings.ts
│ ├── symlink.ts
│ ├── system.ts
│ └── version.ts
├── tsconfig.build.json
├── tsconfig.json
└── yarn.lock
/.craft.yml:
--------------------------------------------------------------------------------
1 | minVersion: '0.30.0'
2 | changelogPolicy: auto
3 | preReleaseCommand: >-
4 | node -p "
5 | const {execSync} = require('child_process');
6 | execSync('npm --no-git-tag-version version ' + process.env.CRAFT_NEW_VERSION).toString();
7 | "
8 | postReleaseCommand: >-
9 | node -p "
10 | const {execSync} = require('child_process');
11 | execSync('npm --no-git-tag-version version preminor --preid=dev');
12 | execSync('git diff --quiet || git commit -anm \'meta: Bump new development version\\n\\n#skip-changelog\' && git pull --rebase && git push').toString();"
13 | requireNames:
14 | - /^sentry-craft.*\.tgz$/
15 | - /^craft$/
16 | targets:
17 | - name: npm
18 | - name: gcs
19 | includeNames: /^.*craft.*$/
20 | bucket: sentry-sdk-assets
21 | paths:
22 | - path: /craft/{{version}}/
23 | metadata:
24 | cacheControl: 'public, max-age=2592000'
25 | - path: /craft/latest/
26 | metadata:
27 | cacheControl: 'public, max-age=300'
28 | - name: registry
29 | apps:
30 | 'app:craft':
31 | urlTemplate: 'https://downloads.sentry-cdn.com/craft/{{version}}/{{file}}'
32 | checksums:
33 | - algorithm: sha256
34 | format: hex
35 | - id: release
36 | name: docker
37 | source: ghcr.io/getsentry/craft
38 | target: getsentry/craft
39 | - id: latest
40 | name: docker
41 | source: ghcr.io/getsentry/craft
42 | target: getsentry/craft
43 | targetFormat: '{{{target}}}:latest'
44 | - name: github
45 | - name: gh-pages
46 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Ignore everything
2 | *
3 |
4 | !/Gemfile
5 | !/Gemfile.lock
6 | !/LICENSE
7 | !/README.md
8 | !/package.json
9 | !/scripts
10 | !/src
11 | !/yarn.lock
12 |
--------------------------------------------------------------------------------
/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | root: true,
3 | env: {
4 | es2017: true,
5 | node: true,
6 | },
7 | parser: '@typescript-eslint/parser',
8 | plugins: ['@typescript-eslint'],
9 | extends: [
10 | 'prettier',
11 | 'eslint:recommended',
12 | 'plugin:@typescript-eslint/recommended',
13 | 'prettier/@typescript-eslint',
14 | ],
15 | rules: {
16 | '@typescript-eslint/no-explicit-any': 'off',
17 | 'no-constant-condition': ['error', { checkLoops: false }],
18 | // Make sure variables marked with _ are ignored (ex. _varName)
19 | '@typescript-eslint/no-unused-vars': ['warn', { argsIgnorePattern: '^_' }],
20 | '@typescript-eslint/ban-ts-comment': [
21 | 'error',
22 | {
23 | 'ts-ignore': 'allow-with-description',
24 | },
25 | ],
26 | },
27 | };
28 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: 'Build / Test / Artifacts'
2 | on:
3 | push:
4 | branches:
5 | - master
6 | - release/**
7 | pull_request:
8 |
9 | jobs:
10 | test:
11 | runs-on: ubuntu-latest
12 | strategy:
13 | matrix:
14 | node: ['20','22']
15 | name: Node ${{ matrix.node }}
16 | steps:
17 | - uses: actions/checkout@v3
18 | - uses: actions/setup-node@v3
19 | with:
20 | node-version: '${{ matrix.node }}'
21 | - uses: actions/cache@v3
22 | id: cache
23 | with:
24 | path: node_modules
25 | key: ${{ runner.os }}-${{ hashFiles('package.json', 'yarn.lock') }}
26 | - name: Install Dependencies
27 | if: steps.cache.outputs.cache-hit != 'true'
28 | run: yarn install --frozen-lockfile
29 | - name: Test
30 | run: yarn test
31 |
32 | artifacts:
33 | name: Artifacts Upload
34 | needs: test
35 | runs-on: ubuntu-latest
36 | steps:
37 | - uses: actions/checkout@v3
38 | - uses: actions/setup-node@v3
39 | with:
40 | node-version-file: package.json
41 | - uses: actions/cache@v3
42 | id: cache
43 | with:
44 | path: node_modules
45 | key: ${{ runner.os }}-${{ hashFiles('package.json', 'yarn.lock') }}
46 | - name: Install Dependencies
47 | if: steps.cache.outputs.cache-hit != 'true'
48 | run: yarn install --frozen-lockfile
49 | - name: Build
50 | run: yarn build --define:process.env.CRAFT_BUILD_SHA='"'${{ github.sha }}'"'
51 | - name: NPM Pack
52 | run: npm pack
53 | - name: Docs
54 | run: cd docs && zip -r ../gh-pages _site/
55 | - name: Archive Artifacts
56 | uses: actions/upload-artifact@v4
57 | with:
58 | name: ${{ github.sha }}
59 | path: |
60 | ${{ github.workspace }}/gh-pages.zip
61 | ${{ github.workspace }}/*.tgz
62 | ${{ github.workspace }}/dist/craft
63 |
--------------------------------------------------------------------------------
/.github/workflows/enforce-license-compliance.yml:
--------------------------------------------------------------------------------
1 | name: Enforce License Compliance
2 |
3 | on:
4 | push:
5 | branches: [master]
6 | pull_request:
7 | branches: [master]
8 |
9 | jobs:
10 | enforce-license-compliance:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - name: 'Enforce License Compliance'
14 | uses: getsentry/action-enforce-license-compliance@main
15 | with:
16 | fossa_api_key: ${{ secrets.FOSSA_API_KEY }}
17 |
--------------------------------------------------------------------------------
/.github/workflows/image.yml:
--------------------------------------------------------------------------------
1 | name: image
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches: [master, release/**, test-me-*]
7 |
8 | jobs:
9 | image:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v3
13 | - name: builder-image
14 | run: |
15 | set -euxo pipefail
16 |
17 | img=ghcr.io/getsentry/craft-builder:latest
18 | args=()
19 | if docker pull -q "$img"; then
20 | args+=(--cache-from "$img")
21 | fi
22 | docker buildx build \
23 | "${args[@]}" \
24 | --build-arg BUILDKIT_INLINE_CACHE=1 \
25 | --target builder \
26 | --tag "$img" \
27 | .
28 | - name: image
29 | run: |
30 | set -euxo pipefail
31 |
32 | img=ghcr.io/getsentry/craft:latest
33 | args=()
34 | if docker pull -q "$img"; then
35 | args+=(--cache-from "$img")
36 | fi
37 | docker buildx build \
38 | "${args[@]}" \
39 | --build-arg "SOURCE_COMMIT=$GITHUB_SHA" \
40 | --build-arg BUILDKIT_INLINE_CACHE=1 \
41 | --tag "$img" \
42 | .
43 | - name: docker login
44 | run: docker login --username "$DOCKER_USER" --password-stdin ghcr.io <<< "$DOCKER_PASS"
45 | env:
46 | DOCKER_USER: ${{ github.actor }}
47 | DOCKER_PASS: ${{ secrets.GITHUB_TOKEN }}
48 | if: github.event_name != 'pull_request'
49 | - name: docker push
50 | run: |
51 | set -euxo pipefail
52 |
53 | craft_builder=ghcr.io/getsentry/craft-builder:latest
54 | craft_latest=ghcr.io/getsentry/craft:latest
55 | craft_versioned="ghcr.io/getsentry/craft:${GITHUB_SHA}"
56 |
57 | docker push "$craft_builder"
58 |
59 | docker tag "$craft_latest" "$craft_versioned"
60 | docker push "$craft_versioned"
61 | docker push "$craft_latest"
62 | if: github.event_name != 'pull_request'
63 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | name: 'Lint'
2 | on:
3 | pull_request:
4 | branches: [master]
5 |
6 | jobs:
7 | lint:
8 | name: Lint fixes
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v3
12 | with:
13 | ref: ${{ github.event.pull_request.head.sha }}
14 | - uses: actions/setup-node@v3
15 | with:
16 | node-version-file: package.json
17 | - uses: actions/cache@v3
18 | id: cache
19 | with:
20 | path: |
21 | node_modules
22 | .eslintcache
23 | key: ${{ runner.os }}-${{ hashFiles('package.json', 'yarn.lock') }}
24 | - name: Install Dependencies
25 | if: steps.cache.outputs.cache-hit != 'true'
26 | run: yarn install --frozen-lockfile
27 | - name: Lint
28 | run: yarn lint -f github-annotations
29 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 | on:
3 | workflow_dispatch:
4 | inputs:
5 | version:
6 | description: Version to release
7 | required: false
8 | force:
9 | description: Force a release even when there are release-blockers (optional)
10 | required: false
11 | jobs:
12 | release:
13 | runs-on: ubuntu-latest
14 | name: 'Release a new version'
15 | steps:
16 | - name: Get auth token
17 | id: token
18 | uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69 # v1.11.0
19 | with:
20 | app-id: ${{ vars.SENTRY_RELEASE_BOT_CLIENT_ID }}
21 | private-key: ${{ secrets.SENTRY_RELEASE_BOT_PRIVATE_KEY }}
22 | - uses: actions/checkout@v3
23 | with:
24 | # Fetch all commits so we can determine previous version
25 | token: ${{ steps.token.outputs.token }}
26 | fetch-depth: 0
27 | - name: Prepare release
28 | uses: getsentry/action-prepare-release@v1
29 | env:
30 | GITHUB_TOKEN: ${{ steps.token.outputs.token }}
31 | with:
32 | version: ${{ github.event.inputs.version }}
33 | force: ${{ github.event.inputs.force }}
34 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | coverage/
2 | dist/
3 | node_modules/
4 |
5 | yarn-error.log
6 | npm-debug.log
7 |
8 | .DS_Store
9 | ._*
10 | .Spotlight-V100
11 | .Trashes
12 | *.env
13 | .eslintcache
14 | .idea
15 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | *
2 | !/dist/**/*
3 | README.md
4 | LICENSE
5 | package.json
6 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | coverage/
2 | dist/
3 | node_modules/
4 |
--------------------------------------------------------------------------------
/.prettierrc.yml:
--------------------------------------------------------------------------------
1 | singleQuote: true
2 | arrowParens: avoid
3 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | // See http://go.microsoft.com/fwlink/?LinkId=827846
3 | // for the documentation about the extensions.json format
4 | "recommendations": [
5 | "esbenp.prettier-vscode",
6 | "stkb.rewrap",
7 | "dbaeumer.vscode-eslint"
8 | ]
9 | }
10 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "editor.codeActionsOnSave": {
3 | "source.organizeImports": "never"
4 | },
5 | "editor.formatOnType": true,
6 | "editor.formatOnSave": true,
7 | "editor.rulers": [80],
8 | "editor.tabSize": 2,
9 | "files.trimTrailingWhitespace": true,
10 | "files.insertFinalNewline": true,
11 | "typescript.tsdk": "node_modules/typescript/lib"
12 | }
13 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | ## Setup
4 |
5 | Craft uses Yarn v1 for managing its dependencies. We also rely on
6 | [Volta](https://volta.sh/) to manage our Node and Yarn versions. We highly
7 | recommend installing Volta if you don't already have it.
8 |
9 | Then, to get started, install the dependencies and get an initial build:
10 |
11 | ```shell
12 | yarn install
13 | yarn build
14 | ```
15 |
16 | ## Logging Level
17 |
18 | Logging level for `craft` can be configured via setting the `CRAFT_LOG_LEVEL`
19 | environment variable or using the `--log-level` CLI flag.
20 |
21 | Accepted values are: `Fatal`, `Error`, `Warn`, `Log`, `Info`, `Success`,
22 | `Debug`, `Trace`, `Silent`, `Verbose`
23 |
24 | ## Dry-run Mode
25 |
26 | Dry-run mode can be enabled via setting the `CRAFT_DRY_RUN` environment variable
27 | to any truthy value (any value other than `undefined`, `null`, `""`, `0`,
28 | `false`, and `no`). One may also use the `--dry-run` CLI flag.
29 |
30 | In dry-run mode no destructive actions will be performed (creating remote
31 | branches, pushing tags, committing files, etc.)
32 |
33 | ## Sentry Support
34 |
35 | Errors you encounter while using Craft can be sent to Sentry. To use this
36 | feature, add `CRAFT_SENTRY_DSN` variable to your environment (or "craft"
37 | configuration file) that contains a Sentry project's DSN.
38 |
39 | For example:
40 |
41 | ```shell
42 | export CRAFT_SENTRY_DSN='https://1234@sentry.io/2345'
43 | ```
44 |
45 | ## Releasing
46 |
47 | `craft` obviously uses itself for preparing and publishing new releases so
48 | [_did you mean recursion_](https://github.com/getsentry/craft/#craft-prepare-preparing-a-new-release)?
49 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:22-bookworm-slim AS builder
2 |
3 | WORKDIR /usr/local/lib
4 |
5 | COPY package.json yarn.lock ./
6 | RUN export YARN_CACHE_FOLDER="$(mktemp -d)" \
7 | && yarn install --frozen-lockfile --quiet \
8 | && rm -r "$YARN_CACHE_FOLDER"
9 |
10 | COPY . .
11 |
12 | RUN \
13 | NODE_ENV=production \
14 | NODE_PATH=/usr/local/lib/node_modules \
15 | PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/lib/node_modules/.bin" \
16 | yarn --modules-folder /usr/local/lib/node_modules build
17 |
18 | FROM node:22-bookworm
19 |
20 | ENV DEBIAN_FRONTEND=noninteractive \
21 | DOTNET_CLI_TELEMETRY_OPTOUT=1 \
22 | # See https://github.com/CocoaPods/CocoaPods/issues/6795
23 | COCOAPODS_ALLOW_ROOT=1 \
24 | CARGO_HOME=/root/.cargo \
25 | RUSTUP_HOME=/root/.rustup \
26 | PATH=${PATH}:/root/.cargo/bin:/opt/flutter/bin:/venv/bin
27 |
28 | RUN apt-get -qq update \
29 | && apt-get install -y --no-install-recommends \
30 | apt-transport-https \
31 | build-essential \
32 | curl \
33 | default-jdk-headless \
34 | dirmngr \
35 | gnupg \
36 | git \
37 | jq \
38 | python3-packaging \
39 | python3-venv \
40 | rsync \
41 | ruby-full \
42 | unzip \
43 | maven \
44 | && apt-get clean \
45 | && rm -rf /var/lib/apt/lists/*
46 |
47 | COPY Gemfile Gemfile.lock ./
48 |
49 | RUN python3 -m venv /venv && pip install twine==6.1.0 pkginfo==1.12.1.2 --no-cache
50 |
51 | RUN : \
52 | && . /etc/os-release \
53 | && curl -fsSL "https://packages.microsoft.com/config/debian/${VERSION_ID}/packages-microsoft-prod.deb" -o /tmp/packages-microsoft-prod.deb \
54 | && dpkg -i /tmp/packages-microsoft-prod.deb \
55 | && rm /tmp/packages-microsoft-prod.deb \
56 | && curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
57 | && echo "deb [arch=amd64] https://download.docker.com/linux/debian ${VERSION_CODENAME} stable" >> /etc/apt/sources.list \
58 | # We are using RabbitMQ's Erlang repository to get the latest Erlang version
59 | # https://www.rabbitmq.com/docs/install-debian
60 | # https://github.com/rabbitmq/erlang-debian-package
61 | ## Team RabbitMQ's main signing key
62 | && curl -1sLf "https://keys.openpgp.org/vks/v1/by-fingerprint/0A9AF2115F4687BD29803A206B73A36E6026DFCA" | gpg --dearmor | tee /usr/share/keyrings/com.rabbitmq.team.gpg > /dev/null \
63 | ## Community mirror of Cloudsmith: modern Erlang repository
64 | && curl -1sLf https://github.com/rabbitmq/signing-keys/releases/download/3.0/cloudsmith.rabbitmq-erlang.E495BB49CC4BBE5B.key | gpg --dearmor | tee /usr/share/keyrings/rabbitmq.E495BB49CC4BBE5B.gpg > /dev/null \
65 | ## Add apt repositories maintained by Team RabbitMQ
66 | && echo "deb [arch=amd64 signed-by=/usr/share/keyrings/rabbitmq.E495BB49CC4BBE5B.gpg] https://ppa1.rabbitmq.com/rabbitmq/rabbitmq-erlang/deb/debian bookworm main" >> /etc/apt/sources.list.d/erlang.list \
67 | && echo "deb [arch=amd64 signed-by=/usr/share/keyrings/rabbitmq.E495BB49CC4BBE5B.gpg] https://ppa2.rabbitmq.com/rabbitmq/rabbitmq-erlang/deb/debian bookworm main" >> /etc/apt/sources.list.d/erlang.list \
68 | && apt-get update -qq \
69 | && apt-get install -y --no-install-recommends \
70 | dotnet-sdk-9.0 \
71 | dotnet-sdk-8.0 \
72 | docker-ce-cli \
73 | docker-buildx-plugin \
74 | erlang \
75 | elixir \
76 | && apt-get clean \
77 | && rm -rf /var/lib/apt/lists/* \
78 | && curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --profile minimal -y \
79 | && cargo --version \
80 | && cargo install cargo-hack \
81 | && gem install -g --no-document \
82 | # Install https://github.com/getsentry/symbol-collector
83 | && symbol_collector_url=$(curl -s https://api.github.com/repos/getsentry/symbol-collector/releases/tags/1.17.0 | \
84 | jq -r '.assets[].browser_download_url | select(endswith("symbolcollector-console-linux-x64.zip"))') \
85 | && curl -sL $symbol_collector_url -o "/tmp/sym-collector.zip" \
86 | && unzip /tmp/sym-collector.zip -d /usr/local/bin/ \
87 | && rm /tmp/sym-collector.zip \
88 | && chmod +x /usr/local/bin/SymbolCollector.Console
89 |
90 | # https://docs.flutter.dev/get-started/install/linux#install-flutter-manually
91 | RUN curl -fsSL https://storage.googleapis.com/flutter_infra_release/releases/stable/linux/flutter_linux_3.22.0-stable.tar.xz -o /opt/flutter.tar.xz \
92 | && tar xf /opt/flutter.tar.xz -C /opt \
93 | && rm /opt/flutter.tar.xz
94 |
95 | # https://learn.microsoft.com/en-us/powershell/scripting/install/install-debian
96 | RUN curl -fsSL https://github.com/PowerShell/PowerShell/releases/download/v7.4.1/powershell_7.4.1-1.deb_amd64.deb -o /opt/powershell.deb \
97 | && dpkg -i /opt/powershell.deb \
98 | && apt-get install -f \
99 | && apt-get clean \
100 | && rm /opt/powershell.deb
101 |
102 | # craft does `git` things against mounted directories as root
103 | RUN git config --global --add safe.directory '*'
104 |
105 | COPY --from=builder /usr/local/lib/dist/craft /usr/local/bin/craft
106 | ARG SOURCE_COMMIT
107 | ENV CRAFT_BUILD_SHA=$SOURCE_COMMIT
108 |
109 | ENTRYPOINT ["craft"]
110 |
--------------------------------------------------------------------------------
/Gemfile:
--------------------------------------------------------------------------------
1 | # Gemfile
2 | # Pin CocoaPods Version to avoid that bugs in CocoaPods like
3 | # https://github.com/CocoaPods/CocoaPods/issues/12081 break our release
4 | # workflow.
5 | gem "cocoapods", "= 1.16.2"
6 |
--------------------------------------------------------------------------------
/Gemfile.lock:
--------------------------------------------------------------------------------
1 | GEM
2 | remote: https://rubygems.org/
3 | specs:
4 | CFPropertyList (3.0.7)
5 | base64
6 | nkf
7 | rexml
8 | activesupport (7.2.2.1)
9 | base64
10 | benchmark (>= 0.3)
11 | bigdecimal
12 | concurrent-ruby (~> 1.0, >= 1.3.1)
13 | connection_pool (>= 2.2.5)
14 | drb
15 | i18n (>= 1.6, < 2)
16 | logger (>= 1.4.2)
17 | minitest (>= 5.1)
18 | securerandom (>= 0.3)
19 | tzinfo (~> 2.0, >= 2.0.5)
20 | addressable (2.8.7)
21 | public_suffix (>= 2.0.2, < 7.0)
22 | algoliasearch (1.27.5)
23 | httpclient (~> 2.8, >= 2.8.3)
24 | json (>= 1.5.1)
25 | atomos (0.1.3)
26 | base64 (0.2.0)
27 | benchmark (0.4.0)
28 | bigdecimal (3.1.9)
29 | claide (1.1.0)
30 | cocoapods (1.16.2)
31 | addressable (~> 2.8)
32 | claide (>= 1.0.2, < 2.0)
33 | cocoapods-core (= 1.16.2)
34 | cocoapods-deintegrate (>= 1.0.3, < 2.0)
35 | cocoapods-downloader (>= 2.1, < 3.0)
36 | cocoapods-plugins (>= 1.0.0, < 2.0)
37 | cocoapods-search (>= 1.0.0, < 2.0)
38 | cocoapods-trunk (>= 1.6.0, < 2.0)
39 | cocoapods-try (>= 1.1.0, < 2.0)
40 | colored2 (~> 3.1)
41 | escape (~> 0.0.4)
42 | fourflusher (>= 2.3.0, < 3.0)
43 | gh_inspector (~> 1.0)
44 | molinillo (~> 0.8.0)
45 | nap (~> 1.0)
46 | ruby-macho (>= 2.3.0, < 3.0)
47 | xcodeproj (>= 1.27.0, < 2.0)
48 | cocoapods-core (1.16.2)
49 | activesupport (>= 5.0, < 8)
50 | addressable (~> 2.8)
51 | algoliasearch (~> 1.0)
52 | concurrent-ruby (~> 1.1)
53 | fuzzy_match (~> 2.0.4)
54 | nap (~> 1.0)
55 | netrc (~> 0.11)
56 | public_suffix (~> 4.0)
57 | typhoeus (~> 1.0)
58 | cocoapods-deintegrate (1.0.5)
59 | cocoapods-downloader (2.1)
60 | cocoapods-plugins (1.0.0)
61 | nap
62 | cocoapods-search (1.0.1)
63 | cocoapods-trunk (1.6.0)
64 | nap (>= 0.8, < 2.0)
65 | netrc (~> 0.11)
66 | cocoapods-try (1.2.0)
67 | colored2 (3.1.2)
68 | concurrent-ruby (1.3.5)
69 | connection_pool (2.5.0)
70 | drb (2.2.1)
71 | escape (0.0.4)
72 | ethon (0.16.0)
73 | ffi (>= 1.15.0)
74 | ffi (1.17.1)
75 | fourflusher (2.3.1)
76 | fuzzy_match (2.0.4)
77 | gh_inspector (1.1.3)
78 | httpclient (2.9.0)
79 | mutex_m
80 | i18n (1.14.7)
81 | concurrent-ruby (~> 1.0)
82 | json (2.10.2)
83 | logger (1.6.6)
84 | minitest (5.25.5)
85 | molinillo (0.8.0)
86 | mutex_m (0.3.0)
87 | nanaimo (0.4.0)
88 | nap (1.1.0)
89 | netrc (0.11.0)
90 | nkf (0.2.0)
91 | public_suffix (4.0.7)
92 | rexml (3.4.1)
93 | ruby-macho (2.5.1)
94 | securerandom (0.4.1)
95 | typhoeus (1.4.1)
96 | ethon (>= 0.9.0)
97 | tzinfo (2.0.6)
98 | concurrent-ruby (~> 1.0)
99 | xcodeproj (1.27.0)
100 | CFPropertyList (>= 2.3.3, < 4.0)
101 | atomos (~> 0.1.3)
102 | claide (>= 1.0.2, < 2.0)
103 | colored2 (~> 3.1)
104 | nanaimo (~> 0.4.0)
105 | rexml (>= 3.3.6, < 4.0)
106 |
107 | PLATFORMS
108 | ruby
109 |
110 | DEPENDENCIES
111 | cocoapods (= 1.16.2)
112 |
113 | BUNDLED WITH
114 | 2.4.20
115 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2018 Sentry (https://sentry.io) and individual contributors.
2 | All rights reserved.
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy of
5 | this software and associated documentation files (the "Software"), to deal in
6 | the Software without restriction, including without limitation the rights to
7 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
8 | of the Software, and to permit persons to whom the Software is furnished to do
9 | so, subject to the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be included in all
12 | copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 | SOFTWARE.
21 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | test:
2 | yarn test
3 | .PHONY: test
4 |
5 | build:
6 | yarn build
7 | .PHONY: build
8 |
9 | lint: build
10 | yarn lint
11 | .PHONY: lint
12 |
13 | check: test lint
14 | .PHONY: check
15 |
--------------------------------------------------------------------------------
/docs/_site/assets/main.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: 'Oxygen', serif;
3 | color: #46433a;
4 | background-color: #fcfcfc;
5 | }
6 |
7 | header,
8 | main {
9 | padding: 0 20px;
10 | }
11 |
12 | /* ** wrapper div for both header and main ** */
13 | .wrapper {
14 | margin-top: 10%;
15 | }
16 |
17 | /* ** anchor tags ** */
18 | a:link,
19 | a:visited,
20 | a:hover,
21 | a:active {
22 | color: #ce534d;
23 | text-decoration: none;
24 | }
25 |
26 | a:hover {
27 | text-decoration: underline;
28 | }
29 |
30 | /* ** main content list ** */
31 | .main-list-item {
32 | font-weight: bold;
33 | font-size: 1.2em;
34 | margin: 0.8em 0;
35 | }
36 |
37 | /* override the left margin added by font awesome for the main content list,
38 | since it must be aligned with the content */
39 | .fa-ul.main-list {
40 | margin-left: 0;
41 | }
42 |
43 | /* list icons */
44 | .main-list-item-icon {
45 | width: 36px;
46 | color: #46433a;
47 | }
48 |
49 | /* ** logo ** */
50 | .logo-container {
51 | text-align: center;
52 | }
53 |
54 | .logo {
55 | width: 160px;
56 | height: 160px;
57 | display: inline-block;
58 | background-size: cover;
59 | border: 2px solid #fcfcfc;
60 | }
61 |
62 | /* ** author ** */
63 | .author-container h1 {
64 | font-size: 1.6em;
65 | margin-top: 0;
66 | margin-bottom: 0;
67 | text-align: center;
68 | }
69 |
70 | /* ** tagline ** */
71 | .tagline-container p {
72 | font-size: 1.3em;
73 | text-align: center;
74 | margin-bottom: 2em;
75 | }
76 |
77 | /* **** */
78 | hr {
79 | border: 0;
80 | height: 1px;
81 | background-image: -webkit-linear-gradient(
82 | left,
83 | rgba(0, 0, 0, 0),
84 | #46433a,
85 | rgba(0, 0, 0, 0)
86 | );
87 | background-image: -moz-linear-gradient(
88 | left,
89 | rgba(0, 0, 0, 0),
90 | #46433a,
91 | rgba(0, 0, 0, 0)
92 | );
93 | background-image: -ms-linear-gradient(
94 | left,
95 | rgba(0, 0, 0, 0),
96 | #46433a,
97 | rgba(0, 0, 0, 0)
98 | );
99 | background-image: -o-linear-gradient(
100 | left,
101 | rgba(0, 0, 0, 0),
102 | #46433a,
103 | rgba(0, 0, 0, 0)
104 | );
105 | }
106 |
107 | /* ** footer ** */
108 | footer {
109 | position: fixed;
110 | bottom: 0;
111 | right: 0;
112 | height: 20px;
113 | }
114 |
115 | .poweredby {
116 | font-family: 'Arial Narrow', Arial;
117 | font-size: 0.6em;
118 | line-height: 0.6em;
119 | padding: 0 5px;
120 | }
121 |
122 | /* ** media queries ** */
123 | /* X-Small devices (phones, 480px and up) */
124 | @media (min-width: 480px) {
125 | /* wrapper stays 480px wide past 480px wide and is kept centered */
126 | .wrapper {
127 | width: 480px;
128 | margin: 10% auto 0 auto;
129 | }
130 | }
131 | /* All other devices (768px and up) */
132 | @media (min-width: 768px) {
133 | /* past 768px the layout is changed and the wrapper has a fixed width of 760px
134 | to accomodate both the header column and the content column */
135 | .wrapper {
136 | width: 760px;
137 | }
138 |
139 | /* the header column stays left and has a dynamic width with all contents
140 | aligned right */
141 | header {
142 | float: left;
143 | width: 46%;
144 | text-align: right;
145 | }
146 |
147 | .author-container h1,
148 | .logo-container,
149 | .tagline-container p {
150 | text-align: right;
151 | }
152 |
153 | main {
154 | width: 46%;
155 | margin-left: 54%;
156 | padding: 0;
157 | }
158 | }
159 |
--------------------------------------------------------------------------------
/docs/_site/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getsentry/craft/f4949f4b275e495e949bfd88e50125f284680446/docs/_site/favicon.ico
--------------------------------------------------------------------------------
/docs/_site/images/sentry-glyph-black.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getsentry/craft/f4949f4b275e495e949bfd88e50125f284680446/docs/_site/images/sentry-glyph-black.png
--------------------------------------------------------------------------------
/docs/_site/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | • Sentry Craft
7 |
8 |
9 |
14 |
15 |
16 |
17 |
21 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
Sentry Craft
59 |
60 | "Craft" is a command line tool that helps to automate and pipeline
61 | package releases.
62 |
63 |
64 |
65 | -
66 |
67 | GitHub
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/img/icon.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/jest.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | preset: 'ts-jest',
3 | testEnvironment: 'node',
4 | testPathIgnorePatterns: ['/dist/', '/node_modules/'],
5 | modulePathIgnorePatterns: ['/dist/'],
6 | };
7 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@sentry/craft",
3 | "version": "2.8.0-dev.0",
4 | "description": "The universal sentry workflow CLI",
5 | "main": "dist/craft",
6 | "repository": "https://github.com/getsentry/craft",
7 | "author": "Sentry",
8 | "license": "MIT",
9 | "bin": {
10 | "craft": "dist/craft",
11 | "sentry-craft": "dist/craft"
12 | },
13 | "resolutions": {
14 | "**/set-value": ">=2.0.1",
15 | "**/https-proxy-agent": ">=2.2.3",
16 | "**/node-forge": ">=0.10.0",
17 | "**/dot-prop": ">=4.2.1",
18 | "**/kind-of": ">=6.0.3",
19 | "**/node-fetch": "^2.6.7",
20 | "**/yargs-parser": ">=18.1.2",
21 | "**/parse-url": ">=5.0.3",
22 | "**/ansi-regex": ">=5.0.1 < 6.0.0"
23 | },
24 | "devDependencies": {
25 | "@aws-sdk/client-lambda": "^3.2.0",
26 | "@google-cloud/storage": "^5.7.0",
27 | "@octokit/plugin-retry": "^3.0.9",
28 | "@octokit/request-error": "^2.1.0",
29 | "@octokit/rest": "^18.10.0",
30 | "@sentry/node": "4.6.3",
31 | "@sentry/typescript": "^5.17.0",
32 | "@types/async": "^3.0.1",
33 | "@types/aws4": "^1.5.1",
34 | "@types/cli-table": "^0.3.0",
35 | "@types/extract-zip": "^2.0.1",
36 | "@types/git-url-parse": "^9.0.0",
37 | "@types/is-ci": "^2.0.0",
38 | "@types/jest": "^29.5.2",
39 | "@types/js-yaml": "^4.0.5",
40 | "@types/mkdirp": "^1.0.0",
41 | "@types/node": "^22.10.1",
42 | "@types/node-fetch": "^2.5.10",
43 | "@types/ora": "^1.3.4",
44 | "@types/prompts": "^2.0.11",
45 | "@types/rimraf": "^2.0.2",
46 | "@types/shell-quote": "^1.6.0",
47 | "@types/tar": "^4.0.0",
48 | "@types/tmp": "^0.0.33",
49 | "@types/yargs": "^15.0.3",
50 | "@typescript-eslint/eslint-plugin": "^5.19.0",
51 | "@typescript-eslint/parser": "^5.19.0",
52 | "ajv": "6.12.6",
53 | "async": "3.2.2",
54 | "aws4": "^1.11.0",
55 | "chalk": "4.1.1",
56 | "cli-table": "0.3.1",
57 | "consola": "2.15.3",
58 | "esbuild": "^0.25.0",
59 | "eslint": "^7.2.0",
60 | "eslint-config-prettier": "^6.11.0",
61 | "eslint-formatter-github-annotations": "^0.1.0",
62 | "extract-zip": "^2.0.1",
63 | "fast-xml-parser": "^4.2.4",
64 | "git-url-parse": "^11.4.4",
65 | "is-ci": "^2.0.0",
66 | "jest": "^29.6.0",
67 | "js-yaml": "4.1.0",
68 | "json-schema-to-typescript": "5.7.0",
69 | "mkdirp": "^1.0.4",
70 | "mustache": "3.0.1",
71 | "nock": "^13.2.4",
72 | "node-fetch": "^2.6.1",
73 | "nvar": "1.3.1",
74 | "ora": "5.4.0",
75 | "prettier": "^2.2.1",
76 | "prompts": "2.4.1",
77 | "rimraf": "2.7.1",
78 | "shell-quote": "1.7.3",
79 | "simple-git": "^3.6.0",
80 | "source-map-support": "^0.5.20",
81 | "split": "1.0.1",
82 | "string-length": "3.1.0",
83 | "tar": "6.2.1",
84 | "tmp": "0.1.0",
85 | "ts-jest": "^29.1.1",
86 | "typescript": "^5.1.6",
87 | "yargs": "15.4.1"
88 | },
89 | "scripts": {
90 | "build:fat": "yarn run compile-config-schema && tsc -p tsconfig.build.json",
91 | "build:watch": "yarn run compile-config-schema && tsc -p tsconfig.build.json --watch",
92 | "build": "yarn compile-config-schema && esbuild src/index.ts --sourcemap --bundle --platform=node --target=node20 --outfile=dist/craft --minify",
93 | "precli": "yarn build",
94 | "cli": "node -r source-map-support/register dist/craft",
95 | "clean": "rimraf dist coverage",
96 | "lint": "eslint . --ext .ts,.tsx,.js --cache --cache-strategy content",
97 | "fix": "yarn lint --fix",
98 | "test": "jest",
99 | "test:watch": "jest --watch --notify",
100 | "compile-config-schema": "node ./scripts/config-json-schema-to-ts.js"
101 | },
102 | "volta": {
103 | "node": "22.12.0",
104 | "yarn": "1.22.19"
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/scripts/config-json-schema-to-ts.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable @typescript-eslint/no-var-requires */
2 | /**
3 | * Convert JSON schema for project configuration to a set of TypeScript interfaces
4 | */
5 | const fs = require('fs');
6 | const json2ts = require('json-schema-to-typescript');
7 |
8 | process.chdir(__dirname);
9 |
10 | const jsonInputPath = '../src/schemas/projectConfig.schema.ts';
11 | const tsOutputPath = '../src/schemas/project_config.ts';
12 |
13 | // FIXME Duplicates compilation options in config.test.ts
14 | const compileOptions = { style: { singleQuote: true, trailingComma: 'es5' } };
15 |
16 | const schema = require(jsonInputPath);
17 | json2ts
18 | .compile(schema, '', compileOptions)
19 | .then(ts => fs.writeFileSync(tsOutputPath, ts))
20 | .catch(e => console.error(e));
21 |
--------------------------------------------------------------------------------
/src/__mocks__/@aws-sdk/client-lambda.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable @typescript-eslint/explicit-module-boundary-types */
2 |
3 | const PUBLISHED_LAYER_TEST = {
4 | Version: 1,
5 | LayerVersionArn: 'test:layer:version:arn',
6 | };
7 |
8 | export class Lambda {
9 | public publishLayerVersion() {
10 | return PUBLISHED_LAYER_TEST;
11 | }
12 |
13 | public addLayerVersionPermission() {
14 | // Adding layer version permissions...
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/src/__mocks__/fs.ts:
--------------------------------------------------------------------------------
1 | const fs: any = jest.createMockFromModule('fs');
2 |
3 | function readFileSync(input: any) {
4 | return input;
5 | }
6 |
7 | fs.readFileSync = readFileSync;
8 |
9 | module.exports = fs;
10 |
--------------------------------------------------------------------------------
/src/__mocks__/logger.ts:
--------------------------------------------------------------------------------
1 | // eslint-disable-next-line @typescript-eslint/no-var-requires
2 | const consola = require('consola');
3 |
4 | const loggerModule: typeof consola = jest.genMockFromModule('../logger');
5 |
6 | loggerModule.logger.withScope = function (): any {
7 | return this;
8 | };
9 |
10 | module.exports = loggerModule;
11 |
--------------------------------------------------------------------------------
/src/__tests__/config.test.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Tests of our ability to read craft config files. (This is NOT general test
3 | * configuration).
4 | */
5 |
6 | import { readFileSync } from 'fs';
7 | import { dirname, join } from 'path';
8 |
9 | import { compile } from 'json-schema-to-typescript';
10 |
11 | import { getProjectConfigSchema, validateConfiguration } from '../config';
12 |
13 | const configSchemaDir = join(dirname(__dirname), 'schemas');
14 | const configGeneratedTypes = join(configSchemaDir, 'project_config.ts');
15 |
16 | /**
17 | * We compile JSON schema to TypeScript interface as part of tests to compare
18 | * it with the existing file. This is done to be promptly notified about any
19 | * changes to the JSON schema that are not yet reflected in the TS interface.
20 | */
21 | describe('compile-json-schema-to-typescript', () => {
22 | test('does not make any changes to the compiled interface', async () => {
23 | // eslint-disable-next-line @typescript-eslint/no-var-requires
24 | const projectConfig = require('../schemas/projectConfig.schema');
25 | const storedOutput = readFileSync(configGeneratedTypes, {
26 | encoding: 'utf8',
27 | });
28 | const compileOptions = {
29 | style: { singleQuote: true, trailingComma: 'es5' } as any,
30 | };
31 | const generatedOutput = await compile(projectConfig, '', compileOptions);
32 |
33 | expect(generatedOutput).toBeTruthy();
34 | expect(generatedOutput).toBe(storedOutput);
35 | });
36 | });
37 |
38 | describe('validateConfiguration', () => {
39 | test('parses minimal configuration', () => {
40 | const data = { github: { owner: 'getsentry', repo: 'craft' } };
41 |
42 | expect(validateConfiguration(data)).toEqual(data);
43 | });
44 |
45 | test('fails with bad configuration', () => {
46 | expect(() => validateConfiguration({ zoom: 1 }))
47 | .toThrowErrorMatchingInlineSnapshot(`
48 | "Cannot parse configuration file:
49 | data should NOT have additional properties"
50 | `);
51 | });
52 | });
53 |
54 | describe('getProjectConfigSchema', () => {
55 | test('returns non-empty object', () => {
56 | const projectConfigSchema = getProjectConfigSchema();
57 |
58 | expect(projectConfigSchema).toHaveProperty('title');
59 | expect(projectConfigSchema).toHaveProperty('properties');
60 | });
61 | });
62 |
--------------------------------------------------------------------------------
/src/__tests__/index.test.ts:
--------------------------------------------------------------------------------
1 | test('it works', () => {
2 | expect(true).toBeTruthy();
3 | });
4 |
--------------------------------------------------------------------------------
/src/artifact_providers/__tests__/base.test.ts:
--------------------------------------------------------------------------------
1 | import { parseFilterOptions, RawFilterOptions } from '../base';
2 |
3 | describe('parseFilterOptions', () => {
4 | test('empty object', () => {
5 | const rawFilters: RawFilterOptions = {};
6 | const parsedFilters = parseFilterOptions(rawFilters);
7 | expect(parsedFilters).not.toHaveProperty('includeNames');
8 | expect(parsedFilters).not.toHaveProperty('excludeNames');
9 | });
10 |
11 | test.each([
12 | [undefined, undefined],
13 | [undefined, '/exclude/'],
14 | [undefined, /exclude/],
15 | ['/include/', undefined],
16 | [/include/, undefined],
17 | ['/include/', '/exclude/'],
18 | ['/include/', /exclude/],
19 | [/include/, '/exclude/'],
20 | [/include/, /exclude/],
21 | ])(
22 | 'undefined, string and regexp properties',
23 | (includeNames, excludeNames) => {
24 | const rawFilters: RawFilterOptions = {
25 | includeNames: includeNames,
26 | excludeNames: excludeNames,
27 | };
28 | const parsedFilters = parseFilterOptions(rawFilters);
29 |
30 | expect(parsedFilters.includeNames).toStrictEqual(
31 | includeNames && /include/
32 | );
33 |
34 | expect(parsedFilters.excludeNames).toStrictEqual(
35 | excludeNames && /exclude/
36 | );
37 | }
38 | );
39 | });
40 |
--------------------------------------------------------------------------------
/src/artifact_providers/gcs.ts:
--------------------------------------------------------------------------------
1 | import {
2 | BaseArtifactProvider,
3 | RemoteArtifact,
4 | ArtifactProviderConfig,
5 | } from '../artifact_providers/base';
6 | import { CraftGCSClient, getGCSCredsFromEnv } from '../utils/gcsApi';
7 | import { ConfigurationError } from '../utils/errors';
8 |
9 | // TODO (kmclb) this type should be generated by the schema validator; once it
10 | // is, can move to using it as the type for config passed to constructor
11 | /** Necessary config for GCS artifact provider */
12 | export interface GCSArtifactProviderConfig extends ArtifactProviderConfig {
13 | /** Bucket name */
14 | bucket: string;
15 | }
16 |
17 | /**
18 | * Google Cloud Storage artifact provider
19 | */
20 | export class GCSArtifactProvider extends BaseArtifactProvider {
21 | /** Client for interacting with the GCS bucket */
22 | private readonly gcsClient: CraftGCSClient;
23 |
24 | public constructor(config: ArtifactProviderConfig) {
25 | super(config);
26 | const creds = getGCSCredsFromEnv(
27 | {
28 | name: 'CRAFT_GCS_STORE_CREDS_JSON',
29 | },
30 | {
31 | name: 'CRAFT_GCS_STORE_CREDS_PATH',
32 | }
33 | );
34 |
35 | // TODO (kmclb) get rid of this check once config validation is working
36 | if (!config.bucket) {
37 | throw new ConfigurationError(
38 | 'No GCS bucket provided in artifact provider config!'
39 | );
40 | }
41 |
42 | this.gcsClient = new CraftGCSClient({
43 | bucketName: config.bucket,
44 | credentials: creds?.credentials,
45 | projectId: creds?.project_id,
46 | });
47 | }
48 |
49 | /**
50 | * @inheritDoc
51 | */
52 | protected async doDownloadArtifact(
53 | artifact: RemoteArtifact,
54 | downloadDirectory: string
55 | ): Promise {
56 | return this.gcsClient.downloadArtifact(
57 | artifact.storedFile.downloadFilepath,
58 | downloadDirectory
59 | );
60 | }
61 |
62 | /**
63 | * @inheritDoc
64 | */
65 | protected async doListArtifactsForRevision(
66 | revision: string
67 | ): Promise {
68 | const { repoName, repoOwner } = this.config;
69 | return this.gcsClient.listArtifactsForRevision(
70 | repoOwner,
71 | repoName,
72 | revision
73 | );
74 | }
75 | }
76 |
77 | // TODO (kmclb) add support for a directory structure on the artifact provider
78 | // (make desired path into filename when uploading to provider?)
79 |
--------------------------------------------------------------------------------
/src/artifact_providers/none.ts:
--------------------------------------------------------------------------------
1 | import {
2 | BaseArtifactProvider,
3 | RemoteArtifact,
4 | ArtifactProviderConfig,
5 | } from '../artifact_providers/base';
6 |
7 | /**
8 | * Empty artifact provider that does nothing.
9 | */
10 | export class NoneArtifactProvider extends BaseArtifactProvider {
11 | public constructor(
12 | config: ArtifactProviderConfig = {
13 | repoName: 'none',
14 | repoOwner: 'none',
15 | name: 'none',
16 | }
17 | ) {
18 | super(config);
19 | }
20 | /**
21 | * Empty provider cannot download any files.
22 | *
23 | * @returns A promise rejection with an error message
24 | */
25 | protected async doDownloadArtifact(
26 | _artifact: RemoteArtifact,
27 | _downloadDirectory: string
28 | ): Promise {
29 | return Promise.reject(
30 | new Error('NoneProvider does not suuport file downloads!')
31 | );
32 | }
33 |
34 | /**
35 | * Empty provider does not have any artifacts.
36 | *
37 | * @returns An empty array
38 | */
39 | protected async doListArtifactsForRevision(
40 | _revision: string
41 | ): Promise {
42 | return [];
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/commands/__tests__/prepare.test.ts:
--------------------------------------------------------------------------------
1 | import { join as pathJoin } from 'path';
2 | import { spawnProcess } from '../../utils/system';
3 | import { runPreReleaseCommand, checkVersionOrPart } from '../prepare';
4 |
5 | jest.mock('../../utils/system');
6 |
7 | describe('runPreReleaseCommand', () => {
8 | const oldVersion = '2.3.3';
9 | const newVersion = '2.3.4';
10 | const mockedSpawnProcess = spawnProcess as jest.Mock;
11 |
12 | beforeEach(() => {
13 | jest.clearAllMocks();
14 | });
15 |
16 | test('runs with default command', async () => {
17 | expect.assertions(1);
18 |
19 | await runPreReleaseCommand(oldVersion, newVersion);
20 |
21 | expect(mockedSpawnProcess).toBeCalledWith(
22 | '/bin/bash',
23 | [pathJoin('scripts', 'bump-version.sh'), oldVersion, newVersion],
24 | {
25 | env: {
26 | ...process.env,
27 | CRAFT_NEW_VERSION: newVersion,
28 | CRAFT_OLD_VERSION: oldVersion,
29 | },
30 | }
31 | );
32 | });
33 |
34 | test('runs with custom command', async () => {
35 | expect.assertions(1);
36 |
37 | await runPreReleaseCommand(
38 | oldVersion,
39 | newVersion,
40 | 'python ./increase_version.py "argument 1"'
41 | );
42 |
43 | expect(mockedSpawnProcess).toBeCalledWith(
44 | 'python',
45 | ['./increase_version.py', 'argument 1', oldVersion, newVersion],
46 | {
47 | env: {
48 | ...process.env,
49 | CRAFT_NEW_VERSION: newVersion,
50 | CRAFT_OLD_VERSION: oldVersion,
51 | },
52 | }
53 | );
54 | });
55 | });
56 |
57 | describe('checkVersionOrPart', () => {
58 | test('return true for valid version', () => {
59 | const validVersions = [
60 | '2.3.3',
61 | '0.0.1',
62 | ];
63 | for (const v of validVersions) {
64 | expect(
65 | checkVersionOrPart({
66 | newVersion: v,
67 | }, null)
68 | ).toBe(true);
69 | }
70 | });
71 |
72 | test('throw an error for invalid version', () => {
73 | const invalidVersions = [
74 | { v: 'invalid-2.3.3', e: 'Invalid version or version part specified: "invalid-2.3.3"' },
75 | { v: 'v2.3.3', e: 'Invalid version or version part specified: "v2.3.3". Removing the "v" prefix will likely fix the issue' },
76 | { v: 'major', e: 'Version part is not supported yet' },
77 | { v: 'minor', e: 'Version part is not supported yet' },
78 | { v: 'patch', e: 'Version part is not supported yet' },
79 | ];
80 | for (const t of invalidVersions) {
81 | const fn = () => {
82 | checkVersionOrPart({
83 | newVersion: t.v,
84 | }, null);
85 | };
86 | expect(fn).toThrow(t.e);
87 | }
88 | });
89 | });
90 |
--------------------------------------------------------------------------------
/src/commands/__tests__/publish.test.ts:
--------------------------------------------------------------------------------
1 | import { join as pathJoin } from 'path';
2 | import { spawnProcess, hasExecutable } from '../../utils/system';
3 | import { runPostReleaseCommand } from '../publish';
4 |
5 | jest.mock('../../utils/system');
6 |
7 | describe('runPostReleaseCommand', () => {
8 | const newVersion = '2.3.4';
9 | const mockedSpawnProcess = spawnProcess as jest.Mock;
10 | const mockedHasExecutable = hasExecutable as jest.Mock;
11 |
12 | beforeEach(() => {
13 | jest.clearAllMocks();
14 | });
15 |
16 | describe('default script', () => {
17 | test('runs when script exists', async () => {
18 | mockedHasExecutable.mockReturnValue(true);
19 | expect.assertions(1);
20 |
21 | await runPostReleaseCommand(newVersion);
22 |
23 | expect(mockedSpawnProcess).toBeCalledWith(
24 | '/bin/bash',
25 | [pathJoin('scripts', 'post-release.sh'), '', newVersion],
26 | {
27 | env: {
28 | ...process.env,
29 | CRAFT_NEW_VERSION: newVersion,
30 | CRAFT_OLD_VERSION: '',
31 | },
32 | }
33 | );
34 | });
35 |
36 | test('skips when script does not exist', async () => {
37 | mockedHasExecutable.mockReturnValue(false);
38 | expect.assertions(1);
39 |
40 | await runPostReleaseCommand(newVersion);
41 |
42 | expect(mockedSpawnProcess).not.toBeCalled();
43 | });
44 | });
45 |
46 | test('runs with custom command', async () => {
47 | expect.assertions(1);
48 |
49 | await runPostReleaseCommand(
50 | newVersion,
51 | 'python ./increase_version.py "argument 1"'
52 | );
53 |
54 | expect(mockedSpawnProcess).toBeCalledWith(
55 | 'python',
56 | ['./increase_version.py', 'argument 1', '', newVersion],
57 | {
58 | env: {
59 | ...process.env,
60 | CRAFT_NEW_VERSION: newVersion,
61 | CRAFT_OLD_VERSION: '',
62 | },
63 | }
64 | );
65 | });
66 | });
67 |
--------------------------------------------------------------------------------
/src/commands/artifacts.ts:
--------------------------------------------------------------------------------
1 | import { Argv, CommandBuilder } from 'yargs';
2 |
3 | import * as download from './artifacts_cmds/download';
4 | import * as list from './artifacts_cmds/list';
5 |
6 | export const command = ['artifacts '];
7 | export const aliases = ['a', 'artifact'];
8 | export const description = '📦 Manage artifacts';
9 |
10 | /**
11 | * Common options for `artifacts` commands
12 | */
13 | export interface ArtifactsOptions {
14 | rev: string;
15 | }
16 |
17 | export const builder: CommandBuilder = (yargs: Argv) =>
18 | yargs
19 | .option('rev', {
20 | alias: 'r',
21 | description: 'Revision',
22 | type: 'string',
23 | })
24 | .demandCommand()
25 | .demandOption('rev', 'Please specify the revision')
26 | .command(list)
27 | .command(download);
28 |
29 | // This dummy function is to please TypeScript
30 | export const handler = (): void => {
31 | /* pass */
32 | };
33 |
--------------------------------------------------------------------------------
/src/commands/artifacts_cmds/download.ts:
--------------------------------------------------------------------------------
1 | import { logger } from '../../logger';
2 | import { ArtifactsOptions } from '../artifacts';
3 | import type { RemoteArtifact } from '../../artifact_providers/base';
4 | import { getArtifactProviderFromConfig } from '../../config';
5 | import { handleGlobalError, ConfigurationError } from '../../utils/errors';
6 | import { Argv, CommandBuilder } from 'yargs';
7 | import { resolve } from 'path';
8 | import { existsSync, lstatSync } from 'fs';
9 | import mkdirp = require('mkdirp');
10 | import { NoneArtifactProvider } from '../../artifact_providers/none';
11 |
12 | export const command = ['download [NAME..]'];
13 | export const aliases = ['d', 'get'];
14 | export const description = 'Download artifacts';
15 | export const builder: CommandBuilder = (yargs: Argv) =>
16 | yargs
17 | .positional('NAME', {
18 | alias: 'names',
19 | description: 'Artifact name to download',
20 | type: 'string',
21 | })
22 | .array('NAME')
23 | .option('all', {
24 | alias: 'a',
25 | default: false,
26 | description: 'Download all artifacts',
27 | type: 'boolean',
28 | })
29 | .option('directory', {
30 | alias: 'd',
31 | description: 'Target directory',
32 | type: 'string',
33 | });
34 |
35 | /** Options for "download" command */
36 | interface ArtifactsDownloadOptions extends ArtifactsOptions {
37 | names: string[];
38 | directory?: string;
39 | all?: boolean;
40 | }
41 |
42 | /**
43 | * Read/process output directory from command line arguments
44 | *
45 | * @param argv Full path to the target directory
46 | */
47 | async function prepareOutputDirectory(
48 | argv: ArtifactsDownloadOptions
49 | ): Promise {
50 | if (argv.directory) {
51 | const fullPath = resolve(argv.directory);
52 | if (existsSync(fullPath)) {
53 | if (lstatSync(fullPath).isDirectory()) {
54 | return fullPath;
55 | } else {
56 | throw new ConfigurationError(`Not a directory: ${fullPath}`);
57 | }
58 | } else {
59 | logger.debug(`Creating directory: ${fullPath}`);
60 | await mkdirp(fullPath);
61 | return fullPath;
62 | }
63 | } else {
64 | return resolve(process.cwd());
65 | }
66 | }
67 |
68 | /**
69 | * Body of 'artifacts download' command
70 | */
71 | async function handlerMain(argv: ArtifactsDownloadOptions): Promise {
72 | if (!argv.all && argv.names.length === 0) {
73 | throw new ConfigurationError('No names to download, exiting.');
74 | }
75 |
76 | const revision = argv.rev;
77 |
78 | const artifactProvider = await getArtifactProviderFromConfig();
79 | if (artifactProvider instanceof NoneArtifactProvider) {
80 | logger.warn(
81 | `Artifact provider is disabled in the configuration, nothing to do.`
82 | );
83 | return undefined;
84 | }
85 |
86 | const outputDirectory = await prepareOutputDirectory(argv);
87 |
88 | const artifacts = await artifactProvider.listArtifactsForRevision(revision);
89 | if (artifacts.length === 0) {
90 | logger.info(`No artifacts found for revision ${revision}`);
91 | return undefined;
92 | }
93 |
94 | const filesToDownload = argv.all
95 | ? artifacts.map(ar => ar.filename)
96 | : argv.names;
97 | const nameToArtifact = artifacts.reduce((dict, artifact) => {
98 | dict[artifact.filename] = artifact;
99 | return dict;
100 | }, {} as { [index: string]: RemoteArtifact });
101 |
102 | logger.info(`Fetching artifacts for revision: ${revision}`);
103 | for (const name of filesToDownload) {
104 | logger.info(`Artifact to fetch: "${name}"`);
105 | const filteredArtifact = nameToArtifact[name];
106 | if (!filteredArtifact) {
107 | logger.warn(`Artifact "${name}" was not found`);
108 | continue;
109 | }
110 |
111 | const artifactPath = await artifactProvider.downloadArtifact(
112 | filteredArtifact,
113 | outputDirectory
114 | );
115 | logger.info(`Saved artifact to: ${artifactPath}`);
116 | }
117 | }
118 |
119 | /**
120 | * Main command handler
121 | */
122 | export const handler = async (args: {
123 | [argName: string]: any;
124 | }): Promise => {
125 | try {
126 | return await handlerMain(args as ArtifactsDownloadOptions);
127 | } catch (e) {
128 | handleGlobalError(e);
129 | }
130 | };
131 |
--------------------------------------------------------------------------------
/src/commands/artifacts_cmds/list.ts:
--------------------------------------------------------------------------------
1 | import { logger, formatTable } from '../../logger';
2 | import { ArtifactsOptions } from '../artifacts';
3 | import { getArtifactProviderFromConfig } from '../../config';
4 | import { handleGlobalError } from '../../utils/errors';
5 | import { formatSize } from '../../utils/strings';
6 | import { NoneArtifactProvider } from '../../artifact_providers/none';
7 |
8 | export const command = ['list'];
9 | export const aliases = ['l'];
10 | export const description = 'List artifacts';
11 |
12 | /**
13 | * Body of 'artifacts list' command
14 | */
15 | async function handlerMain(argv: ArtifactsOptions): Promise {
16 | const revision = argv.rev;
17 |
18 | const artifactProvider = await getArtifactProviderFromConfig();
19 | if (artifactProvider instanceof NoneArtifactProvider) {
20 | logger.warn(
21 | `Artifact provider is disabled in the configuration, nothing to do.`
22 | );
23 | return undefined;
24 | }
25 |
26 | const artifacts = await artifactProvider.listArtifactsForRevision(revision);
27 |
28 | if (artifacts.length === 0) {
29 | logger.info(`No artifacts found for revision ${revision}`);
30 | return undefined;
31 | }
32 |
33 | const artifactData = artifacts.map(ar => [
34 | ar.filename,
35 | formatSize(ar.storedFile.size),
36 | ar.storedFile.lastUpdated || '',
37 | ]);
38 |
39 | const table = formatTable(
40 | {
41 | head: ['File Name', 'Size', 'Updated'],
42 | style: { head: ['cyan'] },
43 | },
44 | artifactData
45 | );
46 | logger.info(
47 | `Available artifacts for revision ${revision}: \n${table.toString()}\n`
48 | );
49 |
50 | return argv.rev;
51 | }
52 |
53 | /** Main command handler */
54 | export const handler = async (args: {
55 | [argName: string]: any;
56 | }): Promise => {
57 | try {
58 | return await handlerMain(args as ArtifactsOptions);
59 | } catch (e) {
60 | handleGlobalError(e);
61 | }
62 | };
63 |
--------------------------------------------------------------------------------
/src/commands/config.ts:
--------------------------------------------------------------------------------
1 | import { getConfiguration, getGlobalGitHubConfig } from '../config';
2 | import { formatJson } from '../utils/strings';
3 |
4 | export const command = ['config'];
5 | export const description =
6 | 'Print the parsed, processed, and validated Craft config for the current project in pretty-JSON.';
7 |
8 | export async function handler(): Promise {
9 | const github = await getGlobalGitHubConfig();
10 | const config = {
11 | ...getConfiguration(),
12 | github,
13 | };
14 | console.log(formatJson(config));
15 | }
16 |
--------------------------------------------------------------------------------
/src/commands/targets.ts:
--------------------------------------------------------------------------------
1 | import { getConfiguration } from '../config';
2 | import { formatJson } from '../utils/strings';
3 | import { getAllTargetNames } from '../targets';
4 | import { BaseTarget } from '../targets/base';
5 |
6 | export const command = ['targets'];
7 | export const description = 'List defined targets as JSON array';
8 |
9 | export function handler(): any {
10 | const definedTargets = getConfiguration().targets || [];
11 | const possibleTargetNames = new Set(getAllTargetNames());
12 | const allowedTargetNames = definedTargets
13 | .filter(target => target.name && possibleTargetNames.has(target.name))
14 | .map(BaseTarget.getId);
15 |
16 | console.log(formatJson(allowedTargetNames));
17 | }
18 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | import isCI from 'is-ci';
3 | import yargs from 'yargs';
4 |
5 | import { logger, LogLevel } from './logger';
6 | import { readEnvironmentConfig } from './utils/env';
7 | import { envToBool, setGlobals } from './utils/helpers';
8 | import { initSentrySdk } from './utils/sentry';
9 | import { getPackageVersion } from './utils/version';
10 |
11 | // Commands
12 | import * as prepare from './commands/prepare';
13 | import * as publish from './commands/publish';
14 | import * as targets from './commands/targets';
15 | import * as config from './commands/config';
16 | import * as artifacts from './commands/artifacts';
17 |
18 | function printVersion(): void {
19 | if (!process.argv.includes('-v') && !process.argv.includes('--version')) {
20 | // Print the current version
21 | logger.debug(`craft ${getPackageVersion()}`);
22 | }
23 | }
24 |
25 | const GLOBAL_BOOLEAN_FLAGS = {
26 | 'no-input': {
27 | coerce: envToBool,
28 | default: isCI,
29 | describe: 'Suppresses all user prompts',
30 | global: true,
31 | },
32 | 'dry-run': {
33 | coerce: envToBool,
34 | // TODO(byk): Deprecate this in favor of CRAFT_DRY_RUN
35 | default: process.env.DRY_RUN,
36 | global: true,
37 | describe: 'Dry run mode: do not perform any real actions',
38 | },
39 | };
40 |
41 | /**
42 | * This function is to pre-process and fix one of yargs' shortcomings:
43 | * We want to use some flags as booleans: just their existence on the CLI should
44 | * set them to true. That said since we also allow setting them through
45 | * environment variables, we need to parse many string values that would set
46 | * them to false. Moreover, we want to be able to override already-set env
47 | * variables with the `--flag=no` kind of notation (using the `=` symbol) but
48 | * not via the positional argument notation (`--flag no`). The only way to do
49 | * this is to define them as string arguments and then _inject_ a truthy string
50 | * if we notice the flag is passed standalone (ie `--flag`).
51 | * @param argv The raw process.argv array
52 | * @returns The processed, injected version of the argv array to pass to yargs
53 | */
54 | function fixGlobalBooleanFlags(argv: string[]): string[] {
55 | const result = [];
56 | for (const arg of argv) {
57 | result.push(arg);
58 | if (arg.slice(2) in GLOBAL_BOOLEAN_FLAGS) {
59 | result.push('1');
60 | }
61 | }
62 | return result;
63 | }
64 |
65 | /**
66 | * Main entrypoint
67 | */
68 | function main(): void {
69 | printVersion();
70 |
71 | readEnvironmentConfig();
72 |
73 | initSentrySdk();
74 |
75 | const argv = fixGlobalBooleanFlags(process.argv.slice(2));
76 |
77 | yargs
78 | .parserConfiguration({
79 | 'boolean-negation': false,
80 | })
81 | .env('CRAFT')
82 | .command(prepare)
83 | .command(publish)
84 | .command(targets)
85 | .command(config)
86 | .command(artifacts)
87 | .demandCommand()
88 | .version(getPackageVersion())
89 | .alias('v', 'version')
90 | .help()
91 | .alias('h', 'help')
92 | .options(GLOBAL_BOOLEAN_FLAGS)
93 | .option('log-level', {
94 | default: 'Info',
95 | choices: Object.keys(LogLevel).filter(level => isNaN(Number(level))),
96 | coerce: level => level[0].toUpperCase() + level.slice(1).toLowerCase(),
97 | describe: 'Logging level',
98 | global: true,
99 | })
100 | .strictCommands()
101 | .showHelpOnFail(true)
102 | .middleware(setGlobals)
103 | .parse(argv);
104 | }
105 |
106 | main();
107 |
--------------------------------------------------------------------------------
/src/logger.ts:
--------------------------------------------------------------------------------
1 | import { addBreadcrumb, Severity } from '@sentry/node';
2 | import Table from 'cli-table';
3 | import consola, {
4 | BasicReporter,
5 | Consola,
6 | ConsolaReporterLogObject,
7 | LogLevel,
8 | } from 'consola';
9 |
10 | /**
11 | * Format a list as a table
12 | *
13 | * @param options Options that are passed to cli-table constructor
14 | * @param values A list (of lists) of values
15 | */
16 | export function formatTable(
17 | options: Record,
18 | values: any[]
19 | ): string {
20 | const table = new Table(options);
21 | table.push(...values);
22 | return table.toString();
23 | }
24 |
25 | /** Reporter that sends logs to Sentry */
26 | class SentryBreadcrumbReporter extends BasicReporter {
27 | public log(logObj: ConsolaReporterLogObject) {
28 | const breadcrumb = {
29 | message: this.formatLogObj(logObj),
30 | level: logObj.type as Severity,
31 | };
32 | addBreadcrumb(breadcrumb);
33 | }
34 | }
35 |
36 | export { LogLevel as LogLevel };
37 | const loggers: Consola[] = [];
38 | function createLogger(tag?: string) {
39 | const loggerInstance = consola.withDefaults({ tag });
40 | loggerInstance.addReporter(new SentryBreadcrumbReporter());
41 | loggerInstance.withScope = createLogger;
42 | loggers.push(loggerInstance);
43 | return loggerInstance;
44 | }
45 |
46 | export const logger = createLogger();
47 | // Pause until we set the logging level from helpers#setGlobals
48 | // This allows us to enqueue debug logging even before we set the
49 | // logging level. These are flushed as soon as we run `logger.resume()`.
50 | logger.pause();
51 |
52 | export function setLevel(logLevel: LogLevel): void {
53 | consola.level = logLevel;
54 | for (const loggerInstance of loggers) {
55 | loggerInstance.level = logLevel;
56 | loggerInstance.resume();
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/src/schemas/projectConfig.schema.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * We store JSON-schema for project configuration in the TS file, so it is
3 | * properly seen and copied by TS-compiler.
4 | */
5 |
6 | const projectConfigJsonSchema = {
7 | title: 'CraftProjectConfig',
8 | description: 'Craft project-specific configuration',
9 | type: 'object',
10 | properties: {
11 | github: {
12 | title: 'GitHubGlobalConfig',
13 | description: 'Global (non-target!) GitHub configuration for the project',
14 | type: 'object',
15 | properties: {
16 | owner: {
17 | type: 'string',
18 | },
19 | repo: {
20 | type: 'string',
21 | },
22 | // TODO(byk): This is now obsolete, only in-place to keep bw compat
23 | // deprecate and remove?
24 | projectPath: {
25 | type: 'string',
26 | },
27 | },
28 | additionalProperties: false,
29 | required: ['owner', 'repo'],
30 | },
31 | targets: {
32 | type: 'array',
33 | items: { $ref: '#/definitions/targetConfig' },
34 | },
35 | preReleaseCommand: { type: 'string' },
36 | postReleaseCommand: { type: 'string' },
37 | releaseBranchPrefix: { type: 'string' },
38 | changelog: { type: 'string' },
39 | changelogPolicy: {
40 | title: 'ChangelogPolicy',
41 | description: 'Different policies for changelog management',
42 | type: 'string',
43 | enum: ['auto', 'simple', 'none'],
44 | tsEnumNames: ['Auto', 'Simple', 'None'],
45 | },
46 | minVersion: {
47 | type: 'string',
48 | pattern: '^\\d+\\.\\d+\\.\\d+.*$',
49 | },
50 | requireNames: {
51 | type: 'array',
52 | items: { type: 'string' },
53 | },
54 | statusProvider: {
55 | title: 'BaseStatusProvider',
56 | description: 'Which service should be used for status checks',
57 | type: 'object',
58 | properties: {
59 | name: {
60 | title: 'StatusProviderName',
61 | description: 'Name of the status provider',
62 | type: 'string',
63 | enum: ['github'],
64 | tsEnumNames: ['GitHub'],
65 | },
66 | config: {
67 | type: 'object',
68 | },
69 | },
70 | additionalProperties: false,
71 | required: ['name'],
72 | },
73 | artifactProvider: {
74 | title: 'BaseArtifactProvider',
75 | description: 'Which service should be used for artifact storage',
76 | type: 'object',
77 | properties: {
78 | name: {
79 | title: 'ArtifactProviderName',
80 | description: 'Name of the artifact provider',
81 | type: 'string',
82 | enum: ['gcs', 'github', 'none'],
83 | tsEnumNames: ['GCS', 'GitHub', 'None'],
84 | },
85 | config: {
86 | type: 'object',
87 | },
88 | },
89 | additionalProperties: false,
90 | required: ['name'],
91 | },
92 | },
93 | additionalProperties: false,
94 |
95 | definitions: {
96 | targetConfig: {
97 | title: 'TargetConfig',
98 | description: 'Generic target configuration',
99 | type: 'object',
100 | properties: {
101 | name: {
102 | type: 'string',
103 | },
104 | id: {
105 | type: 'string',
106 | },
107 | includeNames: {
108 | type: 'string',
109 | },
110 | excludeNames: {
111 | type: 'string',
112 | },
113 | },
114 | required: ['name'],
115 | },
116 |
117 | /**
118 | * FIXME: these definitions are NOT used at the moment.
119 | * Reason: referencing (extending) targetConfig definition results into
120 | * duplicated TargetConfig interfaces in the TS file.
121 | *
122 | * e.g.
123 | *
124 | * interface GitHubTargetConfig extends TargetConfig {}
125 | *
126 | * and
127 | *
128 | * interface NpmTargetConfig extends TargetConfig1 {}
129 | *
130 | * ...where TargetConfig and TargetConfig1 have the same definition.
131 | *
132 | * Related GitHub tickets:
133 | * https://github.com/bcherny/json-schema-to-typescript/issues/142
134 | * https://github.com/bcherny/json-schema-to-typescript/issues/56
135 | * https://github.com/bcherny/json-schema-to-typescript/issues/132
136 | *
137 | */
138 | githubConfig: {
139 | title: 'GitHubTargetConfig',
140 | description: 'Configuration options for the GitHub target',
141 | extends: { $ref: '#/definitions/targetConfig' },
142 | properties: {
143 | changelog: {
144 | type: 'string',
145 | },
146 | name: { type: 'string', enum: ['github'] },
147 | },
148 | required: ['name'],
149 | additionalProperties: false,
150 | },
151 | npmConfig: {
152 | title: 'NpmTargetConfig',
153 | description: 'Configuration options for the NPM target',
154 | extends: { $ref: '#/definitions/targetConfig' },
155 | properties: {
156 | access: {
157 | type: 'string',
158 | },
159 | },
160 | additionalProperties: false,
161 | },
162 | cratesConfig: {
163 | title: 'CratesTargetConfig',
164 | description: 'Configuration options for the Crates target',
165 | extends: { $ref: '#/definitions/targetConfig' },
166 | properties: {
167 | noDevDeps: {
168 | type: 'boolean',
169 | },
170 | },
171 | additionalProperties: false,
172 | },
173 | },
174 | };
175 |
176 | module.exports = projectConfigJsonSchema;
177 |
--------------------------------------------------------------------------------
/src/schemas/project_config.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * This file was automatically generated by json-schema-to-typescript.
3 | * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file,
4 | * and run json-schema-to-typescript to regenerate this file.
5 | */
6 |
7 | /**
8 | * Craft project-specific configuration
9 | */
10 | export interface CraftProjectConfig {
11 | github?: GitHubGlobalConfig;
12 | targets?: TargetConfig[];
13 | preReleaseCommand?: string;
14 | postReleaseCommand?: string;
15 | releaseBranchPrefix?: string;
16 | changelog?: string;
17 | changelogPolicy?: ChangelogPolicy;
18 | minVersion?: string;
19 | requireNames?: string[];
20 | statusProvider?: BaseStatusProvider;
21 | artifactProvider?: BaseArtifactProvider;
22 | }
23 | /**
24 | * Global (non-target!) GitHub configuration for the project
25 | */
26 | export interface GitHubGlobalConfig {
27 | owner: string;
28 | repo: string;
29 | projectPath?: string;
30 | }
31 | /**
32 | * Generic target configuration
33 | */
34 | export interface TargetConfig {
35 | name: string;
36 | id?: string;
37 | includeNames?: string;
38 | excludeNames?: string;
39 | [k: string]: any;
40 | }
41 | /**
42 | * Which service should be used for status checks
43 | */
44 | export interface BaseStatusProvider {
45 | name: StatusProviderName;
46 | config?: {
47 | [k: string]: any;
48 | };
49 | }
50 | /**
51 | * Which service should be used for artifact storage
52 | */
53 | export interface BaseArtifactProvider {
54 | name: ArtifactProviderName;
55 | config?: {
56 | [k: string]: any;
57 | };
58 | }
59 |
60 | /**
61 | * Different policies for changelog management
62 | */
63 | export const enum ChangelogPolicy {
64 | Auto = 'auto',
65 | Simple = 'simple',
66 | None = 'none',
67 | }
68 | /**
69 | * Name of the status provider
70 | */
71 | export const enum StatusProviderName {
72 | GitHub = 'github',
73 | }
74 | /**
75 | * Name of the artifact provider
76 | */
77 | export const enum ArtifactProviderName {
78 | GCS = 'gcs',
79 | GitHub = 'github',
80 | None = 'none',
81 | }
82 |
--------------------------------------------------------------------------------
/src/status_providers/base.ts:
--------------------------------------------------------------------------------
1 | import ora from 'ora';
2 |
3 | import { sleep } from '../utils/async';
4 |
5 | import { reportError } from '../utils/errors';
6 |
7 | import { logger as loggerRaw } from '../logger';
8 | import { GitHubGlobalConfig } from 'src/schemas/project_config';
9 |
10 | const MILLISECONDS = 1000;
11 | /** Max number of seconds to wait for the build to finish */
12 | const BUILD_STATUS_POLLING_MAX = 60 * 60 * MILLISECONDS;
13 |
14 | /** Interval in seconds while polling provider */
15 | const BUILD_POLLING_INTERVAL = 30 * MILLISECONDS;
16 |
17 | /**
18 | * Allowed commit statuses that status providers may report
19 | */
20 | export enum CommitStatus {
21 | /** Commit is still being tested/checked/etc. */
22 | PENDING = 'pending',
23 | /** All required commit checks have passed successfully */
24 | SUCCESS = 'success',
25 | /** One or more commit checks failed */
26 | FAILURE = 'failure',
27 | /** Commit could not be found */
28 | NOT_FOUND = 'not_found',
29 | }
30 |
31 | export interface StatusProviderConfig {
32 | name: string;
33 |
34 | /** Other, provider-specific config options */
35 | [key: string]: any;
36 | }
37 |
38 | /** Repository information */
39 | // eslint-disable-next-line @typescript-eslint/no-empty-interface
40 | export interface RepositoryInfo {}
41 |
42 | /**
43 | * Base class for commit status providers
44 | */
45 | export abstract class BaseStatusProvider {
46 | protected readonly logger: typeof loggerRaw;
47 |
48 | public constructor(
49 | public readonly config: StatusProviderConfig,
50 | public readonly githubConfig: GitHubGlobalConfig
51 | ) {
52 | this.logger = loggerRaw.withScope(`[status-provider/${config.name}]`);
53 | }
54 | /**
55 | * Gets a status for the given revision
56 | *
57 | * @param revision Revision SHA
58 | */
59 | public abstract getRevisionStatus(revision: string): Promise;
60 |
61 | /**
62 | * Gets repository information (as seen by the provider)
63 | */
64 | public abstract getRepositoryInfo(): Promise;
65 |
66 | /**
67 | * Waits for the builds to finish for the revision
68 | *
69 | * @param revision Git revision SHA
70 | */
71 | public async waitForTheBuildToSucceed(revision: string): Promise {
72 | // Status spinner
73 | const spinner = ora();
74 | const startTime = Date.now();
75 | let firstIteration = true;
76 |
77 | while (true) {
78 | const status = await this.getRevisionStatus(revision);
79 | this.logger.debug(`Got status "${status}" for revision ${revision}`);
80 |
81 | if (status === CommitStatus.SUCCESS) {
82 | if (spinner.isSpinning) {
83 | spinner.succeed();
84 | }
85 | this.logger.info(`Revision ${revision} has been built successfully.`);
86 | return;
87 | } else if (status === CommitStatus.FAILURE) {
88 | if (spinner.isSpinning) {
89 | spinner.fail();
90 | }
91 | reportError(
92 | `Build(s) for revision ${revision} have not succeeded. Please check the revision's status.`
93 | );
94 | return;
95 | } else if (firstIteration) {
96 | this.logger.info(
97 | status === CommitStatus.NOT_FOUND
98 | ? `Revision ${revision} has not been found, waiting for a bit.`
99 | : `Revision ${revision} has been found.`
100 | );
101 | }
102 |
103 | if (Date.now() - startTime > BUILD_STATUS_POLLING_MAX) {
104 | throw new Error(
105 | `Waited for more than ${BUILD_STATUS_POLLING_MAX} seconds for the build to finish. Aborting.`
106 | );
107 | }
108 |
109 | firstIteration = false;
110 |
111 | // Format as "YYYY-MM-DD hh:mm:ss"
112 | const timeString = new Date()
113 | .toISOString()
114 | .replace(/T/, ' ')
115 | .replace(/\..+/, '');
116 | // Update the spinner
117 | const waitMessage = `[${timeString}] Waiting for CI builds, next check in ${
118 | BUILD_POLLING_INTERVAL / 1000
119 | } seconds...`;
120 | spinner.text = waitMessage;
121 | if (!spinner.isSpinning) {
122 | spinner.start();
123 | }
124 | await sleep(BUILD_POLLING_INTERVAL);
125 | }
126 | }
127 | }
128 |
--------------------------------------------------------------------------------
/src/targets/__tests__/commitOnGitRepository.test.ts:
--------------------------------------------------------------------------------
1 | import { pushArchiveToGitRepository } from '../commitOnGitRepository';
2 | import childProcess from 'child_process';
3 |
4 | const execSyncSpy = jest.spyOn(childProcess, 'execSync');
5 |
6 | const mockClone = jest.fn();
7 | const mockCheckout = jest.fn();
8 | const mockRaw = jest.fn();
9 | const mockCommit = jest.fn();
10 | const mockAddTag = jest.fn();
11 |
12 | jest.mock('simple-git', () => () => ({
13 | clone: mockClone,
14 | checkout: mockCheckout,
15 | raw: mockRaw,
16 | commit: mockCommit,
17 | addTag: mockAddTag,
18 | }));
19 |
20 | test('Basic commit-on-git-repository functionality', async () => {
21 | execSyncSpy.mockImplementationOnce(() => {
22 | return Buffer.from('noop');
23 | });
24 |
25 | await pushArchiveToGitRepository({
26 | archivePath: '/tmp/my-archive.tgz',
27 | branch: 'main',
28 | createTag: true,
29 | repositoryUrl: 'https://github.com/getsentry/sentry-deno',
30 | stripComponents: 1,
31 | version: '1.2.3',
32 | });
33 |
34 | expect(mockClone).toHaveBeenCalledWith(
35 | 'https://github.com/getsentry/sentry-deno',
36 | expect.any(String)
37 | );
38 | expect(mockCheckout).toHaveBeenCalledWith('main');
39 | expect(mockRaw).toHaveBeenCalledWith('rm', '-r', '.');
40 | expect(execSyncSpy).toHaveBeenCalledWith(
41 | 'tar -zxvf /tmp/my-archive.tgz --strip-components 1',
42 | expect.objectContaining({ cwd: expect.any(String) })
43 | );
44 | expect(mockRaw).toHaveBeenCalledWith('add', '--all');
45 | expect(mockCommit).toHaveBeenCalledWith('release: 1.2.3');
46 | expect(mockAddTag).toHaveBeenCalledWith('1.2.3');
47 | expect(mockRaw).toHaveBeenCalledWith(
48 | 'push',
49 | 'https://github.com/getsentry/sentry-deno',
50 | '--force'
51 | );
52 | expect(mockRaw).toHaveBeenCalledWith(
53 | 'push',
54 | 'https://github.com/getsentry/sentry-deno',
55 | '--tags'
56 | );
57 | });
58 |
59 | describe('With authentication', () => {
60 | let oldToken: string | undefined;
61 |
62 | beforeEach(() => {
63 | oldToken = process.env['GITHUB_API_TOKEN'];
64 | });
65 |
66 | afterEach(() => {
67 | process.env['GITHUB_API_TOKEN'] = oldToken;
68 | });
69 |
70 | test('adds GitHub pat to repository url', async () => {
71 | execSyncSpy.mockImplementationOnce(() => {
72 | return Buffer.from('noop');
73 | });
74 |
75 | process.env['GITHUB_API_TOKEN'] = 'test-token';
76 |
77 | await pushArchiveToGitRepository({
78 | archivePath: '/tmp/my-archive.tgz',
79 | branch: 'main',
80 | createTag: true,
81 | repositoryUrl: 'https://github.com/getsentry/sentry-deno',
82 | stripComponents: 1,
83 | version: '1.2.3',
84 | });
85 |
86 | expect(mockClone).toHaveBeenCalledWith(
87 | 'https://test-token@github.com/getsentry/sentry-deno',
88 | expect.any(String)
89 | );
90 |
91 | expect(mockRaw).toHaveBeenCalledWith(
92 | 'push',
93 | 'https://test-token@github.com/getsentry/sentry-deno',
94 | '--force'
95 | );
96 |
97 | expect(mockRaw).toHaveBeenCalledWith(
98 | 'push',
99 | 'https://test-token@github.com/getsentry/sentry-deno',
100 | '--tags'
101 | );
102 | });
103 | });
104 |
--------------------------------------------------------------------------------
/src/targets/__tests__/crates.test.ts:
--------------------------------------------------------------------------------
1 | import { CrateDependency, CratePackage, CratesTarget } from '../crates';
2 | import { NoneArtifactProvider } from '../../artifact_providers/none';
3 |
4 | jest.mock('../../utils/system');
5 |
6 | function cratePackageFactory(name: string): CratePackage {
7 | return {
8 | dependencies: [],
9 | id: name,
10 | manifest_path: '',
11 | name,
12 | version: '1.0.0',
13 | publish: null,
14 | };
15 | }
16 |
17 | function cratePackageToDependency(cratePackage: CratePackage): CrateDependency {
18 | return {
19 | name: cratePackage.name,
20 | req: '1.0.0',
21 | kind: null,
22 | };
23 | }
24 |
25 | function makeDev(dependency: CrateDependency): CrateDependency {
26 | return {
27 | ...dependency,
28 | kind: 'dev',
29 | };
30 | }
31 |
32 | describe('getPublishOrder', () => {
33 | process.env.CRATES_IO_TOKEN = 'xxx';
34 | const target = new CratesTarget(
35 | {
36 | name: 'crates',
37 | noDevDeps: true,
38 | },
39 | new NoneArtifactProvider(),
40 | { owner: 'getsentry', repo: 'craft' }
41 | );
42 |
43 | test('sorts crate packages properly', () => {
44 | const packages = ['p1', 'p2', 'p3', 'p4'].map(cratePackageFactory);
45 | const [p1, p2, p3, p4] = packages;
46 | p1.dependencies = [p2, p3].map(cratePackageToDependency);
47 | p3.dependencies = [p4].map(cratePackageToDependency);
48 | const sortedPackages = [p2, p4, p3, p1];
49 |
50 | expect(target.getPublishOrder(packages)).toEqual(sortedPackages);
51 | });
52 |
53 | test('does not fail on a single package', () => {
54 | const packages = [cratePackageFactory('p1')];
55 | expect(target.getPublishOrder(packages)).toEqual(packages);
56 | });
57 |
58 | test('errors on circular dependencies', () => {
59 | const packages = ['p1', 'p2'].map(cratePackageFactory);
60 | const [p1, p2] = packages;
61 |
62 | p1.dependencies = [cratePackageToDependency(p2)];
63 | p2.dependencies = [cratePackageToDependency(p1)];
64 |
65 | expect(() => target.getPublishOrder(packages)).toThrowError(Error);
66 | });
67 |
68 | test('excludes dev dependencies', () => {
69 | const packages = ['p1', 'p2'].map(cratePackageFactory);
70 | const [p1, p2] = packages;
71 |
72 | p1.dependencies = [cratePackageToDependency(p2)];
73 | p2.dependencies = [makeDev(cratePackageToDependency(p1))];
74 |
75 | const sortedPackages = [p2, p1];
76 | expect(target.getPublishOrder(packages)).toEqual(sortedPackages);
77 | });
78 | });
79 |
--------------------------------------------------------------------------------
/src/targets/__tests__/github.test.ts:
--------------------------------------------------------------------------------
1 | import { isLatestRelease } from '../github';
2 |
3 | describe('isLatestRelease', () => {
4 | it('works with missing latest release', () => {
5 | const latestRelease = undefined;
6 | const version = '1.2.3';
7 |
8 | const actual = isLatestRelease(latestRelease, version);
9 | expect(actual).toBe(true);
10 | });
11 |
12 | it('works with unparseable latest release', () => {
13 | const latestRelease = { tag_name: 'foo' };
14 | const version = '1.2.3';
15 |
16 | const actual = isLatestRelease(latestRelease, version);
17 | expect(actual).toBe(true);
18 | });
19 |
20 | it('works with unparseable new version', () => {
21 | const latestRelease = { tag_name: 'v1.0.0' };
22 | const version = 'foo';
23 |
24 | const actual = isLatestRelease(latestRelease, version);
25 | expect(actual).toBe(true);
26 | });
27 |
28 | describe('with v-prefix', () => {
29 | it('detects larger new version', () => {
30 | const latestRelease = { tag_name: 'v1.1.0' };
31 | const version = '1.2.0';
32 |
33 | const actual = isLatestRelease(latestRelease, version);
34 | expect(actual).toBe(true);
35 | });
36 |
37 | it('detects smaller new version', () => {
38 | const latestRelease = { tag_name: 'v1.1.0' };
39 | const version = '1.0.1';
40 |
41 | const actual = isLatestRelease(latestRelease, version);
42 | expect(actual).toBe(false);
43 | });
44 | });
45 |
46 | describe('without v-prefix', () => {
47 | it('detects larger new version', () => {
48 | const latestRelease = { tag_name: '1.1.0' };
49 | const version = '1.2.0';
50 |
51 | const actual = isLatestRelease(latestRelease, version);
52 | expect(actual).toBe(true);
53 | });
54 |
55 | it('detects smaller new version', () => {
56 | const latestRelease = { tag_name: '1.1.0' };
57 | const version = '1.0.1';
58 |
59 | const actual = isLatestRelease(latestRelease, version);
60 | expect(actual).toBe(false);
61 | });
62 | });
63 | });
64 |
--------------------------------------------------------------------------------
/src/targets/__tests__/index.test.ts:
--------------------------------------------------------------------------------
1 | import { getAllTargetNames, getTargetByName } from '..';
2 | import { GitHubTarget } from '../github';
3 |
4 | describe('getTargetByName', () => {
5 | test('converts target name to class', () => {
6 | expect(getTargetByName('github')).toBe(GitHubTarget);
7 | });
8 | });
9 |
10 | describe('getAllTargetNames', () => {
11 | test('retrieves all target names', () => {
12 | expect(getAllTargetNames()).toContain('github');
13 | });
14 | });
15 |
--------------------------------------------------------------------------------
/src/targets/__tests__/mavenDiskIo.test.ts:
--------------------------------------------------------------------------------
1 | import { NoneArtifactProvider } from '../../artifact_providers/none';
2 | import {
3 | MavenTarget
4 | } from '../maven';
5 | import { withTempDir } from '../../utils/files';
6 | import { promises as fsPromises } from 'fs';
7 | import { join } from 'path';
8 |
9 | jest.mock('../../utils/gpg');
10 | jest.mock('../../utils/system');
11 |
12 | const DEFAULT_OPTION_VALUE = 'my_default_value';
13 |
14 | function getRequiredTargetConfig(): any {
15 | return {
16 | GPG_PASSPHRASE: DEFAULT_OPTION_VALUE,
17 | OSSRH_USERNAME: DEFAULT_OPTION_VALUE,
18 | OSSRH_PASSWORD: DEFAULT_OPTION_VALUE,
19 | mavenCliPath: DEFAULT_OPTION_VALUE,
20 | mavenSettingsPath: DEFAULT_OPTION_VALUE,
21 | mavenRepoId: DEFAULT_OPTION_VALUE,
22 | mavenRepoUrl: DEFAULT_OPTION_VALUE,
23 | android: false,
24 | kmp: false,
25 | };
26 | }
27 |
28 | function createMavenTarget(
29 | targetConfig?: Record
30 | ): MavenTarget {
31 | process.env.GPG_PRIVATE_KEY = DEFAULT_OPTION_VALUE;
32 | process.env.GPG_PASSPHRASE = DEFAULT_OPTION_VALUE;
33 | process.env.OSSRH_USERNAME = DEFAULT_OPTION_VALUE;
34 | process.env.OSSRH_PASSWORD = DEFAULT_OPTION_VALUE;
35 |
36 | const finalConfig = targetConfig ? targetConfig : getRequiredTargetConfig();
37 | const mergedConfig = {
38 | name: 'maven',
39 | ...finalConfig,
40 | };
41 | return new MavenTarget(mergedConfig, new NoneArtifactProvider());
42 | }
43 |
44 | describe('maven disk io', () => {
45 | test('fileExists', async () => {
46 | await withTempDir(async (tmpDir): Promise => {
47 | const target = createMavenTarget();
48 |
49 | expect(await target.fileExists('a/random/path')).toBe(false);
50 |
51 | // a folder should return false
52 | expect(await target.fileExists(tmpDir)).toBe(false);
53 |
54 | const file = join(tmpDir, 'module.json');
55 |
56 | // when the file doesn't exist it should return false
57 | expect(await target.fileExists(file)).toBe(false);
58 | await fsPromises.writeFile(file, 'abc');
59 |
60 | // once the file is written, it should exist
61 | expect(await target.fileExists(file)).toBe(true);
62 |
63 | });
64 | });
65 |
66 | test('fixModuleFileName', async () => {
67 | await withTempDir(async (tmpDir): Promise => {
68 | const target = createMavenTarget();
69 |
70 | const file = join(tmpDir, 'module.json');
71 | await fsPromises.writeFile(file, 'abc');
72 |
73 | const moduleFile = join(tmpDir, 'sentry-java-1.0.0.module');
74 | // when fix module is called with proper file names
75 | await target.fixModuleFileName(tmpDir, moduleFile);
76 |
77 | // it should rename the file
78 | expect(await target.fileExists(file)).toBe(false);
79 | expect(await target.fileExists(moduleFile)).toBe(true);
80 |
81 | });
82 | });
83 |
84 | test('fixModuleFileName no-op', async () => {
85 | await withTempDir(async (tmpDir): Promise => {
86 | const target = createMavenTarget();
87 |
88 | const file = join(tmpDir, 'sentry-java-1.0.0.module');
89 | await fsPromises.writeFile(file, 'abc');
90 |
91 | // when fix module is called, but the proper file already exists
92 | await target.fixModuleFileName(tmpDir, file);
93 |
94 | // it should still exist after calling fixModuleFileName
95 | expect(await target.fileExists(file)).toBe(true);
96 |
97 | });
98 | });
99 |
100 | test('fixModuleFileName non-existant-files', async () => {
101 | await withTempDir(async (tmpDir): Promise => {
102 | const target = createMavenTarget();
103 |
104 | const file = join(tmpDir, 'sentry-java-1.0.0.module');
105 | await target.fixModuleFileName(tmpDir, file);
106 | });
107 | });
108 | });
109 |
--------------------------------------------------------------------------------
/src/targets/__tests__/npm.test.ts:
--------------------------------------------------------------------------------
1 | import { getPublishTag, getLatestVersion } from '../npm';
2 | import * as system from '../../utils/system';
3 |
4 | const defaultNpmConfig = {
5 | useYarn: false,
6 | token: 'xxx',
7 | };
8 |
9 | describe('getLatestVersion', () => {
10 | let spawnProcessMock: jest.SpyInstance;
11 |
12 | beforeEach(() => {
13 | spawnProcessMock = jest
14 | .spyOn(system, 'spawnProcess')
15 | .mockImplementation(() => Promise.reject('does not exist'));
16 | });
17 |
18 | afterEach(() => {
19 | spawnProcessMock.mockReset();
20 | });
21 |
22 | it('returns undefined if package name does not exist', async () => {
23 | const actual = await getLatestVersion(
24 | 'sentry-xx-this-does-not-exist',
25 | defaultNpmConfig
26 | );
27 | expect(actual).toEqual(undefined);
28 | expect(spawnProcessMock).toBeCalledTimes(1);
29 | expect(spawnProcessMock).toBeCalledWith(
30 | 'npm',
31 | ['info', 'sentry-xx-this-does-not-exist', 'version'],
32 | expect.objectContaining({})
33 | );
34 | });
35 |
36 | it('returns version for valid package name', async () => {
37 | spawnProcessMock = jest
38 | .spyOn(system, 'spawnProcess')
39 | .mockImplementation(() =>
40 | Promise.resolve(Buffer.from('7.20.0\n', 'utf-8'))
41 | );
42 | const actual = await getLatestVersion('@sentry/browser', defaultNpmConfig);
43 | expect(actual).toBe('7.20.0');
44 | expect(spawnProcessMock).toBeCalledTimes(1);
45 | expect(spawnProcessMock).toBeCalledWith(
46 | 'npm',
47 | ['info', '@sentry/browser', 'version'],
48 | expect.objectContaining({})
49 | );
50 | });
51 | });
52 |
53 | describe('getPublishTag', () => {
54 | let spawnProcessMock: jest.SpyInstance;
55 |
56 | beforeEach(() => {
57 | spawnProcessMock = jest
58 | .spyOn(system, 'spawnProcess')
59 | .mockImplementation(() => Promise.reject('does not exist'));
60 | });
61 |
62 | afterEach(() => {
63 | spawnProcessMock.mockReset();
64 | });
65 |
66 | it('returns undefined without a checkPackageName', async () => {
67 | const logger = {
68 | warn: jest.fn(),
69 | } as any;
70 | const actual = await getPublishTag(
71 | '1.0.0',
72 | undefined,
73 | defaultNpmConfig,
74 | logger
75 | );
76 | expect(actual).toEqual(undefined);
77 | expect(logger.warn).not.toHaveBeenCalled();
78 | expect(spawnProcessMock).not.toBeCalled();
79 | });
80 |
81 | it('returns undefined for unexisting package name', async () => {
82 | const logger = {
83 | warn: jest.fn(),
84 | } as any;
85 | const actual = await getPublishTag(
86 | '1.0.0',
87 | 'sentry-xx-does-not-exist',
88 | defaultNpmConfig,
89 | logger
90 | );
91 | expect(actual).toEqual(undefined);
92 | expect(logger.warn).toHaveBeenCalledTimes(1);
93 | expect(logger.warn).toHaveBeenCalledWith(
94 | 'Could not fetch current version for package sentry-xx-does-not-exist'
95 | );
96 | expect(spawnProcessMock).toBeCalledTimes(1);
97 | });
98 |
99 | it('returns undefined for invalid package version', async () => {
100 | spawnProcessMock = jest
101 | .spyOn(system, 'spawnProcess')
102 | .mockImplementation(() =>
103 | Promise.resolve(Buffer.from('weird-version', 'utf-8'))
104 | );
105 |
106 | const logger = {
107 | warn: jest.fn(),
108 | } as any;
109 | const actual = await getPublishTag(
110 | '1.0.0',
111 | '@sentry/browser',
112 | defaultNpmConfig,
113 | logger
114 | );
115 | expect(actual).toEqual(undefined);
116 | expect(logger.warn).toHaveBeenCalledTimes(1);
117 | expect(logger.warn).toHaveBeenCalledWith(
118 | 'Could not fetch current version for package @sentry/browser'
119 | );
120 | expect(spawnProcessMock).toBeCalledTimes(1);
121 | });
122 |
123 | it('returns next for prereleases', async () => {
124 | const logger = {
125 | warn: jest.fn(),
126 | } as any;
127 | const actual = await getPublishTag(
128 | '1.0.0-alpha.1',
129 | undefined,
130 | defaultNpmConfig,
131 | logger
132 | );
133 | expect(actual).toBe('next');
134 | expect(logger.warn).toHaveBeenCalledTimes(2);
135 | expect(logger.warn).toHaveBeenCalledWith(
136 | 'Detected pre-release version for npm package!'
137 | );
138 | expect(logger.warn).toHaveBeenCalledWith(
139 | 'Adding tag "next" to not make it "latest" in registry.'
140 | );
141 | expect(spawnProcessMock).not.toBeCalled();
142 | });
143 |
144 | it('returns old for older versions', async () => {
145 | spawnProcessMock = jest
146 | .spyOn(system, 'spawnProcess')
147 | .mockImplementation(() =>
148 | Promise.resolve(Buffer.from('7.20.0\n', 'utf-8'))
149 | );
150 |
151 | const logger = {
152 | warn: jest.fn(),
153 | } as any;
154 |
155 | const actual = await getPublishTag(
156 | '1.0.0',
157 | '@sentry/browser',
158 | defaultNpmConfig,
159 | logger
160 | );
161 | expect(actual).toBe('old');
162 | expect(logger.warn).toHaveBeenCalledTimes(2);
163 | expect(logger.warn).toHaveBeenCalledWith(
164 | expect.stringMatching(
165 | /Detected older version than currently published version \(([\d.]+)\) for @sentry\/browser/
166 | )
167 | );
168 | expect(logger.warn).toHaveBeenCalledWith(
169 | 'Adding tag "old" to not make it "latest" in registry.'
170 | );
171 | expect(spawnProcessMock).toBeCalledTimes(1);
172 | });
173 | });
174 |
--------------------------------------------------------------------------------
/src/targets/__tests__/powershell.test.ts:
--------------------------------------------------------------------------------
1 | import { spawnProcess } from '../../utils/system';
2 | import { NoneArtifactProvider } from '../../artifact_providers/none';
3 | import { ConfigurationError } from '../../utils/errors';
4 | import { PowerShellTarget } from '../powershell';
5 |
6 | jest.mock('fs');
7 | jest.mock('../../utils/system');
8 |
9 | /** Returns a new PowerShellTarget test instance. */
10 | function getPwshTarget(): PowerShellTarget {
11 | return new PowerShellTarget(
12 | {
13 | name: 'powershell',
14 | module: 'moduleName',
15 | repository: 'repositoryName',
16 | },
17 | new NoneArtifactProvider()
18 | );
19 | }
20 |
21 | function setPwshEnvironmentVariables() {
22 | process.env.POWERSHELL_API_KEY = 'test access key';
23 | }
24 |
25 | describe('pwsh environment variables', () => {
26 | const oldEnvVariables = process.env;
27 |
28 | beforeEach(() => {
29 | jest.resetModules(); // Clear the cache.
30 | process.env = { ...oldEnvVariables }; // Restore environment
31 | });
32 |
33 | afterAll(() => {
34 | process.env = { ...oldEnvVariables }; // Restore environment
35 | });
36 |
37 | function deleteTargetOptionsFromEnvironment() {
38 | if ('POWERSHELL_API_KEY' in process.env) {
39 | delete process.env.POWERSHELL_API_KEY;
40 | }
41 | }
42 |
43 | test('errors on missing environment variables', () => {
44 | deleteTargetOptionsFromEnvironment();
45 | try {
46 | getPwshTarget();
47 | } catch (e) {
48 | expect(e instanceof ConfigurationError).toBe(true);
49 | }
50 | });
51 |
52 | test('success on environment variables', () => {
53 | deleteTargetOptionsFromEnvironment();
54 | setPwshEnvironmentVariables();
55 | getPwshTarget();
56 | });
57 | });
58 |
59 | describe('config', () => {
60 | function clearConfig(target: PowerShellTarget): void {
61 | target.psConfig.apiKey = '';
62 | target.psConfig.repository = '';
63 | target.psConfig.module = '';
64 | }
65 |
66 | test('fails with missing config parameters', async () => {
67 | const target = getPwshTarget();
68 | clearConfig(target);
69 | try {
70 | await target.publish('', '');
71 | } catch (error) {
72 | expect(error).toBeInstanceOf(ConfigurationError);
73 | expect(error.message).toBe(
74 | 'Missing project configuration parameter(s): apiKey,repository,module');
75 | }
76 | });
77 | });
78 |
79 | describe('publish', () => {
80 | const mockedSpawnProcess = spawnProcess as jest.Mock;
81 | const spawnOptions = { enableInDryRunMode: true, showStdout: true }
82 |
83 | beforeEach(() => {
84 | setPwshEnvironmentVariables();
85 | jest.clearAllMocks();
86 | });
87 |
88 |
89 | test('error on missing artifact', async () => {
90 | const target = getPwshTarget();
91 | target.getArtifactsForRevision = jest.fn()
92 | .mockImplementation(() => []).bind(PowerShellTarget);
93 |
94 | // `publish` should report an error. When it's not dry run, the error is
95 | // thrown; when it's on dry run, the error is logged and `undefined` is
96 | // returned. Thus, both alternatives have been considered.
97 | try {
98 | const noPackageFound = await target.publish('version', 'revision');
99 | expect(noPackageFound).toBe(undefined);
100 | } catch (error) {
101 | expect(error).toBeInstanceOf(Error);
102 | expect(error.message).toMatch(/there are no matching artifacts/);
103 | }
104 | });
105 |
106 | test('error on having too many artifacts', async () => {
107 | const target = getPwshTarget();
108 | target.getArtifactsForRevision = jest.fn()
109 | .mockImplementation(() => ['file1', 'file2']).bind(PowerShellTarget);
110 |
111 | // `publish` should report an error. When it's not dry run, the error is
112 | // thrown; when it's on dry run, the error is logged and `undefined` is
113 | // returned. Thus, both alternatives have been considered.
114 | try {
115 | await target.publish('1.0', 'sha');
116 | } catch (error) {
117 | expect(error).toBeInstanceOf(Error);
118 | expect(error.message).toMatch(/found multiple matching artifacts/);
119 | }
120 | });
121 |
122 | test('prints pwsh info', async () => {
123 | const target = getPwshTarget();
124 | try {
125 | await target.publish('1.0', 'sha');
126 | } catch (error) {
127 | expect(error).toBeInstanceOf(Error);
128 | expect(error.message).toMatch(/there are no matching artifact/);
129 | }
130 | expect(mockedSpawnProcess).toBeCalledWith('pwsh', ['--version'], {}, spawnOptions);
131 | expect(mockedSpawnProcess).toBeCalledWith('pwsh',
132 | [
133 | '-Command',
134 | `$ErrorActionPreference = 'Stop'
135 |
136 | $info = Get-Command -Name Publish-Module
137 | "Module name: $($info.ModuleName)"
138 | "Module version: $($info.Module.Version)"
139 | "Module path: $($info.Module.Path)"
140 | `
141 | ], {}, spawnOptions);
142 | });
143 |
144 | test('publish-module runs with expected args', async () => {
145 | const target = getPwshTarget();
146 | await target.publishModule('/path/to/module');
147 | expect(mockedSpawnProcess).toBeCalledWith('pwsh',
148 | [
149 | '-Command',
150 | `$ErrorActionPreference = 'Stop'
151 |
152 | Publish-Module -Path '/path/to/module' \`
153 | -Repository 'repositoryName' \`
154 | -NuGetApiKey 'test access key' \`
155 | -WhatIf:$false
156 | `
157 | ], {}, spawnOptions);
158 | });
159 | });
160 |
--------------------------------------------------------------------------------
/src/targets/__tests__/pypi.test.ts:
--------------------------------------------------------------------------------
1 | import { PypiTarget } from '../pypi';
2 | import { NoneArtifactProvider } from '../../artifact_providers/none';
3 | import { RemoteArtifact } from '../../artifact_providers/base';
4 |
5 | jest.mock('../../utils/system', () => ({
6 | ...jest.requireActual('../../utils/system'),
7 | checkExecutableIsPresent: jest.fn(),
8 | }));
9 |
10 | describe('pypi', () => {
11 | const oldEnv = { ...process.env };
12 |
13 | beforeEach(() => {
14 | process.env.TWINE_USERNAME = '__token__';
15 | process.env.TWINE_PASSWORD = 'getsentry/craft:bogus';
16 | });
17 |
18 | afterAll(() => {
19 | process.env = { ...oldEnv };
20 | });
21 |
22 | test('it uploads all artifacts in a single twine call', async () => {
23 | const target = new PypiTarget({name: 'pypi'}, new NoneArtifactProvider());
24 | target.getArtifactsForRevision = jest
25 | .fn()
26 | .mockResolvedValueOnce([
27 | { filename: 'pkg-1-py3-none-macos_11_0_arm64.whl' },
28 | { filename: 'pkg-1-py3-none-manylinux_2_17_x86_64.whl' },
29 | { filename: 'pkg-1.tar.gz' },
30 | ]);
31 | target.artifactProvider.downloadArtifact = jest.fn(
32 | async (artifact: RemoteArtifact, _downloadDirectory?: string | undefined) => `downloaded/path/${artifact.filename}`
33 | );
34 | const upload = jest.fn();
35 | target.uploadAssets = upload;
36 |
37 | await target.publish('version', 'deadbeef');
38 |
39 | expect(upload.mock.lastCall[0]).toStrictEqual([
40 | 'downloaded/path/pkg-1-py3-none-macos_11_0_arm64.whl',
41 | 'downloaded/path/pkg-1-py3-none-manylinux_2_17_x86_64.whl',
42 | 'downloaded/path/pkg-1.tar.gz',
43 | ]);
44 | });
45 | });
46 |
--------------------------------------------------------------------------------
/src/targets/__tests__/registry.test.ts:
--------------------------------------------------------------------------------
1 | jest.mock('../../utils/githubApi.ts');
2 | import { getGitHubClient } from '../../utils/githubApi';
3 | import { RegistryConfig, RegistryTarget } from '../registry';
4 | import { NoneArtifactProvider } from '../../artifact_providers/none';
5 | import { RegistryPackageType } from '../../utils/registry';
6 |
7 | describe('getUpdatedManifest', () => {
8 | let mockClient: jest.Mock;
9 |
10 | beforeEach(() => {
11 | jest.resetAllMocks();
12 | mockClient = jest.fn();
13 | (getGitHubClient as jest.MockedFunction<
14 | typeof getGitHubClient
15 | // @ts-ignore we only need to mock a subset
16 | >).mockReturnValue({ graphql: mockClient });
17 | });
18 |
19 | const target = new RegistryTarget(
20 | { name: 'pypi' },
21 | new NoneArtifactProvider(),
22 | { owner: 'testSourceOwner', repo: 'testSourceRepo' }
23 | );
24 |
25 | it('check if created_at exists', async () => {
26 | const registryConfig: RegistryConfig = {
27 | type: RegistryPackageType.SDK,
28 | canonicalName: 'example-package',
29 | };
30 | const packageManifest = {
31 | canonical: 'example-package',
32 | };
33 | const canonical = 'example-package';
34 | const version = '1.2.3';
35 | const revision = 'abc123';
36 |
37 | const updatedManifest = await target.getUpdatedManifest(
38 | registryConfig,
39 | packageManifest,
40 | canonical,
41 | version,
42 | revision
43 | );
44 |
45 | // check if property created_at exists
46 | expect(updatedManifest).toHaveProperty('created_at');
47 | });
48 | });
49 |
--------------------------------------------------------------------------------
/src/targets/__tests__/sentryPypi.test.ts:
--------------------------------------------------------------------------------
1 | import { WHEEL_REGEX, uniquePackages } from '../sentryPypi';
2 |
3 | describe('WHEEL_REGEX', () => {
4 | it('matches a wheel filename', () => {
5 | const filename = 'pkg_name-1.2.3-py3-none-any.whl';
6 | const match = WHEEL_REGEX.exec(filename) as RegExpExecArray;
7 | expect(match[0]).toEqual(filename);
8 | expect(match[1]).toEqual('pkg_name');
9 | expect(match[2]).toEqual('1.2.3');
10 | });
11 |
12 | it('does not match an sdist', () => {
13 | expect(WHEEL_REGEX.exec('pkg-name-123.tar.gz')).toEqual(null);
14 | });
15 |
16 | it('does not match wheel build numbers', () => {
17 | expect(WHEEL_REGEX.exec('pkg_name-1.2.3-1-py3-none-any.whl')).toEqual(null);
18 | });
19 | });
20 |
21 | describe('uniquePackages', () => {
22 | it('reproduces the trivial list', () => {
23 | expect(uniquePackages([])).toEqual([]);
24 | });
25 |
26 | it('translates wheels to ==', () => {
27 | expect(uniquePackages(['pkg-1-py3-none-any.whl'])).toEqual(['pkg==1']);
28 | });
29 |
30 | it('dedupes packages', () => {
31 | const ret = uniquePackages([
32 | 'pkg-1-py3-none-any.whl',
33 | 'pkg-1-py2-none-any.whl',
34 | ]);
35 | expect(ret).toEqual(['pkg==1']);
36 | });
37 |
38 | it('sorts the output', () => {
39 | const ret = uniquePackages([
40 | 'b-1-py3-none-any.whl',
41 | 'a-2-py3-none-any.whl',
42 | ]);
43 | expect(ret).toEqual(['a==2', 'b==1']);
44 | });
45 | });
46 |
--------------------------------------------------------------------------------
/src/targets/__tests__/symbolCollector.test.ts:
--------------------------------------------------------------------------------
1 | import { withTempDir } from '../../utils/files';
2 | import { NoneArtifactProvider } from '../../artifact_providers/none';
3 | import { checkExecutableIsPresent, spawnProcess } from '../../utils/system';
4 | import { SymbolCollector, SYM_COLLECTOR_BIN_NAME } from '../symbolCollector';
5 |
6 | jest.mock('../../utils/files');
7 | jest.mock('../../utils/system');
8 | jest.mock('fs', () => {
9 | const original = jest.requireActual('fs');
10 | return {
11 | ...original,
12 | promises: {
13 | mkdir: jest.fn(() => {
14 | /** do nothing */
15 | }),
16 | },
17 | };
18 | });
19 |
20 | const customConfig = {
21 | batchType: 'batchType',
22 | bundleIdPrefix: 'bundleIdPrefix-',
23 | };
24 |
25 | function getSymbolCollectorInstance(
26 | config: Record = { testKey: 'testVal' }
27 | ): SymbolCollector {
28 | return new SymbolCollector(
29 | {
30 | name: 'symbol-collector',
31 | ...config,
32 | },
33 | new NoneArtifactProvider()
34 | );
35 | }
36 |
37 | describe('target config', () => {
38 | test('symbol collector not present in path', () => {
39 | (checkExecutableIsPresent as jest.MockedFunction<
40 | typeof checkExecutableIsPresent
41 | >).mockImplementationOnce(() => {
42 | throw new Error('Checked for executable');
43 | });
44 |
45 | expect(getSymbolCollectorInstance).toThrowErrorMatchingInlineSnapshot(
46 | `"Checked for executable"`
47 | );
48 | expect(checkExecutableIsPresent).toHaveBeenCalledTimes(1);
49 | expect(checkExecutableIsPresent).toHaveBeenCalledWith(
50 | SYM_COLLECTOR_BIN_NAME
51 | );
52 | });
53 |
54 | test('config missing', () => {
55 | (checkExecutableIsPresent as jest.MockedFunction<
56 | typeof checkExecutableIsPresent
57 | >) = jest.fn();
58 |
59 | expect(getSymbolCollectorInstance).toThrowErrorMatchingInlineSnapshot(
60 | '"The required `batchType` parameter is missing in the configuration file. ' +
61 | 'See the documentation for more details."'
62 | );
63 | });
64 |
65 | test('symbol collector present and config ok', () => {
66 | (checkExecutableIsPresent as jest.MockedFunction<
67 | typeof checkExecutableIsPresent
68 | >) = jest.fn();
69 |
70 | const symCollector = getSymbolCollectorInstance(customConfig);
71 | const actualConfig = symCollector.symbolCollectorConfig;
72 | expect(checkExecutableIsPresent).toHaveBeenCalledTimes(1);
73 | expect(checkExecutableIsPresent).toHaveBeenLastCalledWith(
74 | SYM_COLLECTOR_BIN_NAME
75 | );
76 | expect(actualConfig).toHaveProperty('serverEndpoint');
77 | expect(actualConfig).toHaveProperty('batchType');
78 | expect(actualConfig).toHaveProperty('bundleIdPrefix');
79 | });
80 | });
81 |
82 | describe('publish', () => {
83 | test('no artifacts found', () => {
84 | const symCollector = getSymbolCollectorInstance(customConfig);
85 | symCollector.getArtifactsForRevision = jest
86 | .fn()
87 | .mockReturnValueOnce(() => []);
88 | expect(spawnProcess).not.toHaveBeenCalled();
89 | });
90 |
91 | test('with artifacts', async () => {
92 | (withTempDir as jest.MockedFunction).mockImplementation(
93 | async cb => await cb('tmpDir')
94 | );
95 | (spawnProcess as jest.MockedFunction<
96 | typeof spawnProcess
97 | >).mockImplementation(() => Promise.resolve(undefined));
98 |
99 | const mockedArtifacts = ['artifact1', 'artifact2', 'artifact3'];
100 |
101 | const symCollector = getSymbolCollectorInstance(customConfig);
102 | symCollector.getArtifactsForRevision = jest
103 | .fn()
104 | .mockReturnValueOnce(mockedArtifacts);
105 | symCollector.artifactProvider.downloadArtifact = jest.fn();
106 |
107 | await symCollector.publish('version', 'revision');
108 |
109 | expect(symCollector.getArtifactsForRevision).toHaveBeenCalledTimes(1);
110 | expect(
111 | symCollector.artifactProvider.downloadArtifact
112 | ).toHaveBeenCalledTimes(mockedArtifacts.length);
113 |
114 | expect(spawnProcess).toHaveBeenCalledTimes(1);
115 | const [cmd, args] = (spawnProcess as jest.MockedFunction<
116 | typeof spawnProcess
117 | >).mock.calls[0] as string[];
118 | expect(cmd).toBe(SYM_COLLECTOR_BIN_NAME);
119 | expect(args).toMatchInlineSnapshot(`
120 | [
121 | "--upload",
122 | "directory",
123 | "--path",
124 | "tmpDir",
125 | "--batch-type",
126 | "batchType",
127 | "--bundle-id",
128 | "bundleIdPrefix-version",
129 | "--server-endpoint",
130 | "https://symbol-collector.services.sentry.io/",
131 | ]
132 | `);
133 | });
134 | });
135 |
--------------------------------------------------------------------------------
/src/targets/__tests__/upm.test.ts:
--------------------------------------------------------------------------------
1 | import { setGlobals } from '../../utils/helpers';
2 | import { NoneArtifactProvider } from '../../artifact_providers/none';
3 | import { ARTIFACT_NAME, UpmTarget } from '../upm';
4 |
5 | describe('UPM Target', () => {
6 | const cleanEnv = { ...process.env };
7 | let upmTarget: UpmTarget;
8 |
9 | beforeEach(() => {
10 | process.env = {
11 | ...cleanEnv,
12 | GITHUB_TOKEN: 'test github token',
13 | };
14 | setGlobals({ 'dry-run': false, 'log-level': 'Info', 'no-input': true });
15 | jest.resetAllMocks();
16 |
17 | upmTarget = new UpmTarget(
18 | {
19 | name: 'upm-test',
20 | releaseRepoOwner: 'getsentry-test',
21 | releaseRepoName: 'unity-test',
22 | },
23 | new NoneArtifactProvider(),
24 | { owner: 'testSourceOwner', repo: 'testSourceRepo' }
25 | );
26 | });
27 |
28 | describe('artifacts', () => {
29 | beforeEach(() => {
30 | setGlobals({ 'dry-run': false, 'log-level': 'Info', 'no-input': true });
31 | });
32 | test.each`
33 | artifacts | error
34 | ${[]} | ${'Cannot publish UPM: No release artifact found.'}
35 | ${['file1', 'file2']} | ${'Cannot publish UPM: Failed to find "' + ARTIFACT_NAME + '" in the artifacts.'}
36 | `(
37 | 'error with artifact count $artifacts.length',
38 | async ({ artifacts, error }) => {
39 | upmTarget.getArtifactsForRevision = jest
40 | .fn()
41 | .mockResolvedValueOnce(artifacts);
42 |
43 | await expect(upmTarget.fetchArtifact('revision')).rejects.toThrow(
44 | error
45 | );
46 | }
47 | );
48 | });
49 |
50 | // TODO(byk): Add more tests for this
51 | describe.skip('publish', () => {
52 | beforeEach(() => {
53 | upmTarget.fetchArtifact = jest
54 | .fn()
55 | .mockResolvedValueOnce({ filename: 'artifact.zip' });
56 | upmTarget.artifactProvider.downloadArtifact = jest
57 | .fn()
58 | .mockResolvedValueOnce('some/test/path');
59 | });
60 |
61 | test('publish', () => {
62 | return expect(
63 | upmTarget.publish('version', 'revision')
64 | ).resolves.not.toThrow();
65 | });
66 | });
67 | });
68 |
--------------------------------------------------------------------------------
/src/targets/base.ts:
--------------------------------------------------------------------------------
1 | import { logger as loggerRaw } from '../logger';
2 | import { GitHubGlobalConfig, TargetConfig } from '../schemas/project_config';
3 | import {
4 | parseFilterOptions,
5 | RawFilterOptions,
6 | ParsedFilterOptions,
7 | } from '../artifact_providers/base';
8 | import { stringToRegexp } from '../utils/filters';
9 | import {
10 | BaseArtifactProvider,
11 | RemoteArtifact,
12 | } from '../artifact_providers/base';
13 |
14 | /**
15 | * Base class for all remote targets
16 | */
17 | export class BaseTarget {
18 | public readonly id: string;
19 | protected readonly logger: typeof loggerRaw;
20 | /** Artifact provider */
21 | public readonly artifactProvider: BaseArtifactProvider;
22 | /** Unparsed target configuration */
23 | public readonly config: TargetConfig;
24 | /** Artifact filtering options for the target */
25 | public readonly filterOptions: ParsedFilterOptions;
26 | /** GitHub repo configuration */
27 | public readonly githubRepo?: GitHubGlobalConfig;
28 |
29 | public static getId(target: TargetConfig): string {
30 | return target.id
31 | ? `${target.name}[${target.id}]`
32 | : target.name || '__undefined__';
33 | }
34 |
35 | public constructor(
36 | config: TargetConfig,
37 | artifactProvider: BaseArtifactProvider,
38 | githubRepo?: GitHubGlobalConfig
39 | ) {
40 | this.logger = loggerRaw.withScope(`[target/${config.name}]`);
41 | this.artifactProvider = artifactProvider;
42 | this.config = config;
43 | this.id = BaseTarget.getId(config);
44 | this.githubRepo = githubRepo;
45 | this.filterOptions = {};
46 | if (this.config.includeNames) {
47 | this.filterOptions.includeNames = stringToRegexp(
48 | this.config.includeNames
49 | );
50 | }
51 | if (this.config.excludeNames) {
52 | this.filterOptions.excludeNames = stringToRegexp(
53 | this.config.excludeNames
54 | );
55 | }
56 | }
57 |
58 | /**
59 | * Publish artifacts for this target
60 | *
61 | * @param version New version to be released
62 | * @param revision Git commit SHA to be published
63 | */
64 | public async publish(
65 | _version: string,
66 |
67 | _revision: string
68 | ): Promise {
69 | throw new Error('Not implemented');
70 | return;
71 | }
72 |
73 | /**
74 | * A helper proxy function that takes passed include/exclude target regex
75 | * into account.
76 | *
77 | * @param revision Git commit SHA to be published
78 | * @param defaultFilterOptions Default filtering options
79 | * @returns A list of relevant artifacts
80 | */
81 | public async getArtifactsForRevision(
82 | revision: string,
83 | defaultFilterOptions: RawFilterOptions = {}
84 | ): Promise {
85 | const filterOptions = {
86 | ...parseFilterOptions(defaultFilterOptions),
87 | ...this.filterOptions,
88 | };
89 | this.logger.debug(
90 | `Getting artifact list for revision "${revision}", filtering options: {includeNames: ${String(
91 | filterOptions.includeNames
92 | )}, excludeNames:${String(filterOptions.excludeNames)}}`
93 | );
94 | return this.artifactProvider.filterArtifactsForRevision(
95 | revision,
96 | filterOptions
97 | );
98 | }
99 | }
100 |
--------------------------------------------------------------------------------
/src/targets/cocoapods.ts:
--------------------------------------------------------------------------------
1 | import { Octokit } from '@octokit/rest';
2 | import * as fs from 'fs';
3 | import { basename, join } from 'path';
4 | import { promisify } from 'util';
5 |
6 | import { GitHubGlobalConfig, TargetConfig } from '../schemas/project_config';
7 | import { ConfigurationError, reportError } from '../utils/errors';
8 | import { withTempDir } from '../utils/files';
9 | import { getFile, getGitHubClient } from '../utils/githubApi';
10 | import { checkExecutableIsPresent, spawnProcess } from '../utils/system';
11 | import { BaseTarget } from './base';
12 | import { BaseArtifactProvider } from '../artifact_providers/base';
13 | const writeFile = promisify(fs.writeFile);
14 |
15 | const DEFAULT_COCOAPODS_BIN = 'pod';
16 |
17 | /**
18 | * Command to launch cocoapods
19 | */
20 | const COCOAPODS_BIN = process.env.COCOAPODS_BIN || DEFAULT_COCOAPODS_BIN;
21 |
22 | /** Options for "cocoapods" target */
23 | export interface CocoapodsTargetOptions {
24 | /** Path to the spec file inside the repo */
25 | specPath: string;
26 | }
27 |
28 | /**
29 | * Target responsible for publishing to Cocoapods registry
30 | */
31 | export class CocoapodsTarget extends BaseTarget {
32 | /** Target name */
33 | public readonly name: string = 'cocoapods';
34 | /** Target options */
35 | public readonly cocoapodsConfig: CocoapodsTargetOptions;
36 | /** GitHub client */
37 | public readonly github: Octokit;
38 | /** GitHub repo configuration */
39 | public readonly githubRepo: GitHubGlobalConfig;
40 |
41 | public constructor(
42 | config: TargetConfig,
43 | artifactProvider: BaseArtifactProvider,
44 | githubRepo: GitHubGlobalConfig
45 | ) {
46 | super(config, artifactProvider, githubRepo);
47 | this.cocoapodsConfig = this.getCocoapodsConfig();
48 | this.github = getGitHubClient();
49 | this.githubRepo = githubRepo;
50 | checkExecutableIsPresent(COCOAPODS_BIN);
51 | }
52 |
53 | /**
54 | * Extracts Cocoapods target options from the environment
55 | */
56 | public getCocoapodsConfig(): CocoapodsTargetOptions {
57 | const specPath = this.config.specPath;
58 | if (!specPath) {
59 | throw new ConfigurationError('No podspec path provided!');
60 | }
61 |
62 | return {
63 | specPath,
64 | };
65 | }
66 |
67 | /**
68 | * Performs a release to Cocoapods
69 | *
70 | * @param version New version to be released
71 | * @param revision Git commit SHA to be published
72 | */
73 | public async publish(_version: string, revision: string): Promise {
74 | const { owner, repo } = this.githubRepo;
75 | const specPath = this.cocoapodsConfig.specPath;
76 |
77 | this.logger.info(`Loading podspec from ${owner}/${repo}:${specPath}`);
78 | const specContents = await getFile(
79 | this.github,
80 | owner,
81 | repo,
82 | specPath,
83 | revision
84 | );
85 |
86 | if (!specContents) {
87 | reportError(`Podspec not found at ${owner}/${repo}:${specPath}`);
88 | return undefined;
89 | }
90 |
91 | const fileName = basename(specPath);
92 |
93 | await withTempDir(
94 | async directory => {
95 | const filePath = join(directory, fileName);
96 | await writeFile(filePath, specContents, 'utf8');
97 |
98 | this.logger.info(`Pushing podspec "${fileName}" to cocoapods...`);
99 | await spawnProcess(COCOAPODS_BIN, ['setup']);
100 | await spawnProcess(
101 | COCOAPODS_BIN,
102 | ['trunk', 'push', fileName, '--allow-warnings', '--synchronous'],
103 | {
104 | cwd: directory,
105 | env: {
106 | ...process.env,
107 | },
108 | }
109 | );
110 | },
111 | true,
112 | 'craft-cocoapods-'
113 | );
114 |
115 | this.logger.info('Cocoapods release complete');
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/src/targets/docker.ts:
--------------------------------------------------------------------------------
1 | import { TargetConfig } from '../schemas/project_config';
2 | import { BaseArtifactProvider } from '../artifact_providers/base';
3 | import { ConfigurationError } from '../utils/errors';
4 | import { renderTemplateSafe } from '../utils/strings';
5 | import { checkExecutableIsPresent, spawnProcess } from '../utils/system';
6 | import { BaseTarget } from './base';
7 |
8 | const DEFAULT_DOCKER_BIN = 'docker';
9 |
10 | /**
11 | * Command to launch docker
12 | */
13 | const DOCKER_BIN = process.env.DOCKER_BIN || DEFAULT_DOCKER_BIN;
14 |
15 | /** Options for "docker" target */
16 | export interface DockerTargetOptions {
17 | username: string;
18 | password: string;
19 | /** Source image path, like `us.gcr.io/sentryio/craft` */
20 | source: string;
21 | /** Full name template for the source image path, defaults to `{{{source}}}:{{{revision}}}` */
22 | sourceTemplate: string;
23 | /** Full name template for the target image path, defaults to `{{{target}}}:{{{version}}}` */
24 | targetTemplate: string;
25 | /** Target image path, like `getsentry/craft` */
26 | target: string;
27 | }
28 |
29 | /**
30 | * Target responsible for publishing releases on Docker Hub (https://hub.docker.com)
31 | */
32 | export class DockerTarget extends BaseTarget {
33 | /** Target name */
34 | public readonly name: string = 'docker';
35 | /** Target options */
36 | public readonly dockerConfig: DockerTargetOptions;
37 |
38 | public constructor(
39 | config: TargetConfig,
40 | artifactProvider: BaseArtifactProvider
41 | ) {
42 | super(config, artifactProvider);
43 | this.dockerConfig = this.getDockerConfig();
44 | checkExecutableIsPresent(DOCKER_BIN);
45 | }
46 |
47 | /**
48 | * Extracts Docker target options from the environment
49 | */
50 | public getDockerConfig(): DockerTargetOptions {
51 | if (!process.env.DOCKER_USERNAME || !process.env.DOCKER_PASSWORD) {
52 | throw new ConfigurationError(
53 | `Cannot perform Docker release: missing credentials.
54 | Please use DOCKER_USERNAME and DOCKER_PASSWORD environment variables.`.replace(
55 | /^\s+/gm,
56 | ''
57 | )
58 | );
59 | }
60 |
61 | return {
62 | password: process.env.DOCKER_PASSWORD,
63 | source: this.config.source,
64 | target: this.config.target,
65 | sourceTemplate: this.config.sourceFormat || '{{{source}}}:{{{revision}}}',
66 | targetTemplate: this.config.targetFormat || '{{{target}}}:{{{version}}}',
67 | username: process.env.DOCKER_USERNAME,
68 | };
69 | }
70 |
71 | /**
72 | * Logs into docker client with the provided username and password in config
73 | *
74 | * NOTE: This may change the globally logged in Docker user on the system
75 | */
76 | public async login(): Promise {
77 | const { username, password } = this.dockerConfig;
78 | return spawnProcess(DOCKER_BIN, [
79 | 'login',
80 | `--username=${username}`,
81 | `--password=${password}`,
82 | ]);
83 | }
84 |
85 | /**
86 | * Copies an existing local or remote docker image to a new destination.
87 | *
88 | * Requires BuildKit / `docker buildx` to be installed.
89 | *
90 | * @param sourceRevision The tag/revision for the source image
91 | * @param version The release version for the target image
92 | */
93 | async copy(sourceRevision: string, version: string): Promise {
94 | const sourceImage = renderTemplateSafe(this.dockerConfig.sourceTemplate, {
95 | ...this.dockerConfig,
96 | revision: sourceRevision,
97 | });
98 | const targetImage = renderTemplateSafe(this.dockerConfig.targetTemplate, {
99 | ...this.dockerConfig,
100 | version,
101 | });
102 |
103 | this.logger.debug(`Copying image from ${sourceImage} to ${targetImage}...`);
104 | return spawnProcess(
105 | DOCKER_BIN,
106 | ['buildx', 'imagetools', 'create', '--tag', targetImage, sourceImage],
107 | {},
108 | { showStdout: true }
109 | );
110 | }
111 |
112 | /**
113 | * Pushes a source image to Docker Hub
114 | *
115 | * @param version The new version
116 | * @param revision The SHA revision of the new version
117 | */
118 | public async publish(version: string, revision: string): Promise {
119 | await this.login();
120 | await this.copy(revision, version);
121 |
122 | this.logger.info('Docker release complete');
123 | }
124 | }
125 |
--------------------------------------------------------------------------------
/src/targets/gem.ts:
--------------------------------------------------------------------------------
1 | import {
2 | BaseArtifactProvider,
3 | RemoteArtifact,
4 | } from '../artifact_providers/base';
5 | import { reportError } from '../utils/errors';
6 | import { checkExecutableIsPresent, spawnProcess } from '../utils/system';
7 | import { BaseTarget } from './base';
8 | import { TargetConfig } from '../schemas/project_config';
9 |
10 | const DEFAULT_GEM_BIN = 'gem';
11 |
12 | /**
13 | * Command to launch gem
14 | */
15 | const GEM_BIN = process.env.GEM_BIN || DEFAULT_GEM_BIN;
16 |
17 | /**
18 | * RegExp for gems
19 | */
20 | const DEFAULT_GEM_REGEX = /^.*(\.gem)$/;
21 |
22 | /**
23 | * Target responsible for publishing releases to Ruby Gems (https://rubygems.org)
24 | */
25 | export class GemTarget extends BaseTarget {
26 | /** Target name */
27 | public readonly name: string = 'gem';
28 |
29 | public constructor(
30 | config: TargetConfig,
31 | artifactProvider: BaseArtifactProvider
32 | ) {
33 | super(config, artifactProvider);
34 | checkExecutableIsPresent(GEM_BIN);
35 | }
36 |
37 | /**
38 | * Uploads a gem to rubygems
39 | *
40 | * @param path Absolute path to the archive to upload
41 | * @returns A promise that resolves when the gem pushed
42 | */
43 | public async pushGem(path: string): Promise {
44 | return spawnProcess(GEM_BIN, ['push', path]);
45 | }
46 |
47 | /**
48 | * Pushes a gem to rubygems.org
49 | *
50 | * @param version New version to be released
51 | * @param revision Git commit SHA to be published
52 | */
53 | public async publish(_version: string, revision: string): Promise {
54 | this.logger.debug('Fetching artifact list...');
55 | const packageFiles = await this.getArtifactsForRevision(revision, {
56 | includeNames: DEFAULT_GEM_REGEX,
57 | });
58 |
59 | if (!packageFiles.length) {
60 | reportError('Cannot push gem: no packages found');
61 | return undefined;
62 | }
63 |
64 | await Promise.all(
65 | packageFiles.map(async (file: RemoteArtifact) => {
66 | const path = await this.artifactProvider.downloadArtifact(file);
67 | this.logger.info(`Pushing gem "${file.filename}"`);
68 | return this.pushGem(path);
69 | })
70 | );
71 |
72 | this.logger.info('Successfully registered gem');
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/src/targets/hex.ts:
--------------------------------------------------------------------------------
1 | import simpleGit from 'simple-git';
2 |
3 | import { BaseTarget } from './base';
4 | import { withTempDir } from '../utils/files';
5 | import { checkExecutableIsPresent, spawnProcess } from '../utils/system';
6 | import { GitHubGlobalConfig, TargetConfig } from '../schemas/project_config';
7 | import { BaseArtifactProvider } from '../artifact_providers/base';
8 | import { reportError } from '../utils/errors';
9 |
10 | const DEFAULT_MIX_BIN = 'mix';
11 |
12 | /**
13 | * Command to launch mix
14 | */
15 | const MIX_BIN = process.env.MIX_BIN || DEFAULT_MIX_BIN;
16 |
17 | /**
18 | * Target responsible for publishing releases to Hex, the Elixir/Erlang package manager.
19 | * https://hex.pm
20 | */
21 | export class HexTarget extends BaseTarget {
22 | /** Target name */
23 | public readonly name: string = 'hex';
24 | /** GitHub repo configuration */
25 | public readonly githubRepo: GitHubGlobalConfig;
26 |
27 | public constructor(
28 | config: TargetConfig,
29 | artifactProvider: BaseArtifactProvider,
30 | githubRepo: GitHubGlobalConfig
31 | ) {
32 | super(config, artifactProvider, githubRepo);
33 | checkExecutableIsPresent(MIX_BIN);
34 | this.checkApiKey();
35 | this.githubRepo = githubRepo;
36 | }
37 |
38 | /**
39 | * Check that API key is set in env for publishing.
40 | */
41 | checkApiKey() {
42 | if (!process.env.HEX_API_KEY) {
43 | reportError(
44 | `Cannot publish to hex.pm: missing credentials.
45 | Please use HEX_API_KEY environment variable to pass the API token.`
46 | );
47 | }
48 | }
49 |
50 | /**
51 | * Clones a repository.
52 | *
53 | * @param config Git configuration specifying the repository to clone.
54 | * @param revision The commit SHA that should be checked out after the clone.
55 | * @param directory The directory to clone into.
56 | */
57 | async cloneRepository(
58 | config: GitHubGlobalConfig,
59 | revision: string,
60 | directory: string
61 | ): Promise {
62 | const { owner, repo } = config;
63 | const git = simpleGit(directory);
64 | const url = `https://github.com/${owner}/${repo}.git`;
65 |
66 | this.logger.info(`Cloning ${owner}/${repo} into ${directory}`);
67 | await git.clone(url, directory);
68 | await git.checkout(revision);
69 | }
70 |
71 | /**
72 | * Publishes package to hex.pm using mix hex.publish
73 | *
74 | * @param version New version to be released
75 | * @param revision Git commit SHA to be published
76 | */
77 | public async publish(_version: string, revision: string): Promise {
78 | await withTempDir(
79 | async directory => {
80 | await this.cloneRepository(this.githubRepo, revision, directory);
81 |
82 | const spawnOptions = { cwd: directory };
83 | const spawnProcessOptions = { showStdout: true };
84 | await spawnProcess(MIX_BIN, ['local.hex', '--force'], spawnOptions, spawnProcessOptions);
85 | await spawnProcess(MIX_BIN, ['local.rebar', '--force'], spawnOptions, spawnProcessOptions);
86 | await spawnProcess(MIX_BIN, ['deps.get'], spawnOptions, spawnProcessOptions);
87 | await spawnProcess(MIX_BIN, ['hex.publish', '--yes'], spawnOptions, spawnProcessOptions);
88 | },
89 | true,
90 | 'craft-hex-'
91 | );
92 |
93 | this.logger.info('Hex release complete');
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/src/targets/index.ts:
--------------------------------------------------------------------------------
1 | import { BaseTarget } from './base';
2 | import { BrewTarget } from './brew';
3 | import { CocoapodsTarget } from './cocoapods';
4 | import { CratesTarget } from './crates';
5 | import { DockerTarget } from './docker';
6 | import { GcsTarget } from './gcs';
7 | import { GemTarget } from './gem';
8 | import { GhPagesTarget } from './ghPages';
9 | import { GitHubTarget } from './github';
10 | import { NpmTarget } from './npm';
11 | import { NugetTarget } from './nuget';
12 | import { SentryPypiTarget } from './sentryPypi';
13 | import { PypiTarget } from './pypi';
14 | import { RegistryTarget } from './registry';
15 | import { AwsLambdaLayerTarget } from './awsLambdaLayer';
16 | import { UpmTarget } from './upm';
17 | import { MavenTarget } from './maven';
18 | import { SymbolCollector } from './symbolCollector';
19 | import { PubDevTarget } from './pubDev';
20 | import { HexTarget } from './hex';
21 | import { CommitOnGitRepositoryTarget } from './commitOnGitRepository';
22 | import { PowerShellTarget } from './powershell';
23 |
24 | export const TARGET_MAP: { [key: string]: typeof BaseTarget } = {
25 | brew: BrewTarget,
26 | cocoapods: CocoapodsTarget,
27 | crates: CratesTarget,
28 | docker: DockerTarget,
29 | gcs: GcsTarget,
30 | gem: GemTarget,
31 | 'gh-pages': GhPagesTarget,
32 | github: GitHubTarget,
33 | npm: NpmTarget,
34 | nuget: NugetTarget,
35 | pypi: PypiTarget,
36 | 'sentry-pypi': SentryPypiTarget,
37 | registry: RegistryTarget,
38 | 'aws-lambda-layer': AwsLambdaLayerTarget,
39 | upm: UpmTarget,
40 | maven: MavenTarget,
41 | 'symbol-collector': SymbolCollector,
42 | 'pub-dev': PubDevTarget,
43 | hex: HexTarget,
44 | 'commit-on-git-repository': CommitOnGitRepositoryTarget,
45 | powershell: PowerShellTarget,
46 | };
47 |
48 | /** Targets that are treated specially */
49 | export enum SpecialTarget {
50 | /** This targets does not do any publishing, only related workflow actions (e.g. merging the release branch) */
51 | None = 'none',
52 | /** This target is an alias for running all configured targets */
53 | All = 'all',
54 | }
55 |
56 | /**
57 | * Get a list of all available targets
58 | *
59 | * @returns List of targets
60 | */
61 | export function getAllTargetNames(): string[] {
62 | return Object.keys(TARGET_MAP);
63 | }
64 |
65 | /**
66 | * Convert target name to class object
67 | *
68 | * @param targetName Name of the target
69 | * @returns Corresponding target class or undefined
70 | */
71 | export function getTargetByName(
72 | targetName: string
73 | ): typeof BaseTarget | undefined {
74 | return TARGET_MAP[targetName];
75 | }
76 |
--------------------------------------------------------------------------------
/src/targets/nuget.ts:
--------------------------------------------------------------------------------
1 | import { TargetConfig } from '../schemas/project_config';
2 | import { ConfigurationError, reportError } from '../utils/errors';
3 | import { checkExecutableIsPresent, spawnProcess } from '../utils/system';
4 | import { BaseTarget } from './base';
5 | import {
6 | BaseArtifactProvider,
7 | RemoteArtifact,
8 | } from '../artifact_providers/base';
9 |
10 | /** Command to launch dotnet tools */
11 | export const NUGET_DOTNET_BIN = process.env.NUGET_DOTNET_BIN || 'dotnet';
12 |
13 | /** Default Nuget registry URL */
14 | export const DEFAULT_NUGET_SERVER_URL = 'https://api.nuget.org/v3/index.json';
15 |
16 | /** A regular expression used to find the package tarball */
17 | const DEFAULT_NUGET_REGEX = /^.*\d\.\d.*\.nupkg$/;
18 | const SYMBOLS_NUGET_REGEX = /^.*\d\.\d.*\.snupkg$/;
19 |
20 | /** Nuget target configuration options */
21 | export interface NugetTargetOptions {
22 | /** Nuget API token */
23 | apiToken: string;
24 | /** Nuget server URL */
25 | serverUrl: string;
26 | }
27 |
28 | /**
29 | * Target responsible for publishing releases on Nuget
30 | */
31 | export class NugetTarget extends BaseTarget {
32 | /** Target name */
33 | public readonly name: string = 'nuget';
34 | /** Target options */
35 | public readonly nugetConfig: NugetTargetOptions;
36 |
37 | public constructor(
38 | config: TargetConfig,
39 | artifactProvider: BaseArtifactProvider
40 | ) {
41 | super(config, artifactProvider);
42 | this.nugetConfig = this.getNugetConfig();
43 | checkExecutableIsPresent(NUGET_DOTNET_BIN);
44 | }
45 |
46 | /**
47 | * Extracts Nuget target options from the raw configuration
48 | */
49 | protected getNugetConfig(): NugetTargetOptions {
50 | if (!process.env.NUGET_API_TOKEN) {
51 | throw new ConfigurationError(
52 | `Cannot perform Nuget release: missing credentials.
53 | Please use NUGET_API_TOKEN environment variable.`
54 | );
55 | }
56 | return {
57 | apiToken: process.env.NUGET_API_TOKEN,
58 | serverUrl: this.config.serverUrl || DEFAULT_NUGET_SERVER_URL,
59 | };
60 | }
61 |
62 | /**
63 | * Uploads an archive to Nuget using "dotnet nuget"
64 | *
65 | * @param path Absolute path to the archive to upload
66 | * @returns A promise that resolves when the upload has completed
67 | */
68 | public async uploadAsset(path: string): Promise {
69 | const args = [
70 | 'nuget',
71 | 'push',
72 | path,
73 | '--api-key',
74 | '${NUGET_API_TOKEN}',
75 | '--source',
76 | this.nugetConfig.serverUrl,
77 | ];
78 | // Run outside the repository folder to avoid global.json constraints
79 | // (we don't need specific dotnet/workload versions just to upload to nuget)
80 | const spawnOptions = { cwd: '/' };
81 | return spawnProcess(NUGET_DOTNET_BIN, args, spawnOptions);
82 | }
83 |
84 | /**
85 | * Publishes a package tarball to the Nuget registry
86 | *
87 | * @param version New version to be released
88 | * @param revision Git commit SHA to be published
89 | */
90 | public async publish(_version: string, revision: string): Promise {
91 | this.logger.debug('Fetching artifact list...');
92 | const packageFiles = await this.getArtifactsForRevision(revision, {
93 | includeNames: DEFAULT_NUGET_REGEX,
94 | });
95 | const symbolFiles = await this.getArtifactsForRevision(revision, {
96 | includeNames: SYMBOLS_NUGET_REGEX,
97 | });
98 |
99 | if (!packageFiles.length) {
100 | reportError(
101 | 'Cannot release to Nuget: there are no Nuget packages found!'
102 | );
103 | }
104 |
105 | // Emit the .NET version for informational purposes.
106 | this.logger.info('.NET Version:');
107 | await spawnProcess(NUGET_DOTNET_BIN, ['--version']);
108 |
109 | // Also emit the nuget version, which is informative and works around a bug.
110 | // See https://github.com/NuGet/Home/issues/12159#issuecomment-1278360511
111 | this.logger.info('Nuget Version:');
112 | await spawnProcess(NUGET_DOTNET_BIN, ['nuget', '--version']);
113 |
114 | await Promise.all(
115 | packageFiles.map(async (file: RemoteArtifact) => {
116 | const path = await this.artifactProvider.downloadArtifact(file);
117 |
118 | // If an artifact containing a .snupkg file exists with the same base
119 | // name as the .nupkg file, then download it to the same location.
120 | // It will be picked up automatically when pushing the .nupkg.
121 |
122 | // Note, this approach is required vs sending them separately, because
123 | // we need to send the .nupkg *first*, and it must succeed before the
124 | // .snupkg is sent.
125 |
126 | const symbolFileName = file.filename.replace('.nupkg', '.snupkg');
127 | const symbolFile = symbolFiles.find(f => f.filename === symbolFileName);
128 | if (symbolFile) {
129 | await this.artifactProvider.downloadArtifact(symbolFile);
130 | }
131 |
132 | this.logger.info(
133 | `Uploading file "${file.filename}" via "dotnet nuget"` +
134 | (symbolFile ? `, including symbol file "${symbolFile.filename}"` : '')
135 | );
136 | return this.uploadAsset(path);
137 | })
138 | );
139 |
140 | this.logger.info('Nuget release complete');
141 | }
142 | }
143 |
--------------------------------------------------------------------------------
/src/targets/pypi.ts:
--------------------------------------------------------------------------------
1 | import { TargetConfig } from '../schemas/project_config';
2 | import {
3 | BaseArtifactProvider,
4 | RemoteArtifact,
5 | } from '../artifact_providers/base';
6 | import { ConfigurationError, reportError } from '../utils/errors';
7 | import { checkExecutableIsPresent, spawnProcess } from '../utils/system';
8 | import { BaseTarget } from './base';
9 |
10 | const DEFAULT_TWINE_BIN = 'twine';
11 |
12 | /**
13 | * Command to launch twine
14 | */
15 | const TWINE_BIN = process.env.TWINE_BIN || DEFAULT_TWINE_BIN;
16 |
17 | /**
18 | * RegExp for Python packages
19 | */
20 | const DEFAULT_PYPI_REGEX = /^.*\d\.\d.*(\.whl|\.gz|\.zip)$/;
21 |
22 | /** Options for "pypi" target */
23 | export interface PypiTargetOptions {
24 | /** Twine username */
25 | twineUsername: string;
26 | /** Twine password */
27 | twinePassword: string;
28 | }
29 |
30 | /**
31 | * Target responsible for publishing releases on PyPI (Python package index)
32 | */
33 | export class PypiTarget extends BaseTarget {
34 | /** Target name */
35 | public readonly name: string = 'pypi';
36 | /** Target options */
37 | public readonly pypiConfig: PypiTargetOptions;
38 |
39 | public constructor(
40 | config: TargetConfig,
41 | artifactProvider: BaseArtifactProvider
42 | ) {
43 | super(config, artifactProvider);
44 | this.pypiConfig = this.getPypiConfig();
45 | checkExecutableIsPresent(TWINE_BIN);
46 | }
47 |
48 | /**
49 | * Extracts PyPI target options from the environment
50 | */
51 | public getPypiConfig(): PypiTargetOptions {
52 | if (!process.env.TWINE_USERNAME || !process.env.TWINE_PASSWORD) {
53 | throw new ConfigurationError(
54 | `Cannot perform PyPI release: missing credentials.
55 | Please use TWINE_USERNAME and TWINE_PASSWORD environment variables.`.replace(
56 | /^\s+/gm,
57 | ''
58 | )
59 | );
60 | }
61 | return {
62 | twinePassword: process.env.TWINE_PASSWORD,
63 | twineUsername: process.env.TWINE_USERNAME,
64 | };
65 | }
66 |
67 | async uploadAssets(paths: string[]): Promise {
68 | // TODO: Sign the package with "--sign"
69 | return spawnProcess(TWINE_BIN, ['upload', ...paths]);
70 | }
71 |
72 | /**
73 | * Uploads all files to PyPI using Twine
74 | *
75 | * Requires twine to be configured in the environment (either beforehand or
76 | * via enviroment).
77 | *
78 | * @param version New version to be released
79 | * @param revision Git commit SHA to be published
80 | */
81 | public async publish(_version: string, revision: string): Promise {
82 | this.logger.debug('Fetching artifact list...');
83 | const packageFiles = await this.getArtifactsForRevision(revision, {
84 | includeNames: DEFAULT_PYPI_REGEX,
85 | });
86 |
87 | if (!packageFiles.length) {
88 | reportError('Cannot release to PyPI: no packages found');
89 | return undefined;
90 | }
91 |
92 | const paths = await Promise.all(
93 | packageFiles.map(async (file: RemoteArtifact) => {
94 | this.logger.info(`Uploading file "${file.filename}" via twine`);
95 | return this.artifactProvider.downloadArtifact(file);
96 | })
97 | );
98 | await this.uploadAssets(paths);
99 |
100 | this.logger.info('PyPI release complete');
101 | }
102 | }
103 |
--------------------------------------------------------------------------------
/src/targets/sentryPypi.ts:
--------------------------------------------------------------------------------
1 | import fs from 'fs';
2 | import path from 'path';
3 | import { Octokit } from '@octokit/rest';
4 | import { TargetConfig } from '../schemas/project_config';
5 | import { BaseArtifactProvider } from '../artifact_providers/base';
6 | import { withTempDir } from '../utils/files';
7 | import { ConfigurationError, reportError } from '../utils/errors';
8 | import { spawnProcess } from '../utils/system';
9 | import { BaseTarget } from './base';
10 | import { getGitHubClient } from '../utils/githubApi';
11 |
12 | /**
13 | * RegExp for Python packages
14 | */
15 | export const WHEEL_REGEX = /^([^-]+)-([^-]+)-[^-]+-[^-]+-[^-]+\.whl$/;
16 |
17 | export function uniquePackages(filenames: Array): Array {
18 | const versions = filenames.map(filename => {
19 | const match = WHEEL_REGEX.exec(filename) as RegExpExecArray;
20 | return `${match[1]}==${match[2]}`;
21 | });
22 | return [...new Set(versions)].sort();
23 | }
24 |
25 | /**
26 | * Target responsible for publishing internal packages on internal PyPI
27 | */
28 | export class SentryPypiTarget extends BaseTarget {
29 | /** Target name */
30 | public readonly name: string = 'sentry-pypi';
31 | /** GitHub client */
32 | private readonly github: Octokit;
33 |
34 | public constructor(
35 | config: TargetConfig,
36 | artifactProvider: BaseArtifactProvider
37 | ) {
38 | super(config, artifactProvider);
39 |
40 | if (!('internalPypiRepo' in this.config)) {
41 | throw new ConfigurationError(
42 | 'Missing project configuration parameter: internalPypiRepo'
43 | );
44 | }
45 |
46 | this.github = getGitHubClient();
47 | }
48 |
49 | /**
50 | * Creates a pull request in the target pypi repo
51 | *
52 | * @param version New version to be released
53 | * @param revision Git commit SHA to be published
54 | */
55 | public async publish(_version: string, revision: string): Promise {
56 | this.logger.debug('Fetching artifact list...');
57 | const packageFiles = await this.getArtifactsForRevision(revision, {
58 | includeNames: WHEEL_REGEX,
59 | });
60 |
61 | if (!packageFiles.length) {
62 | reportError('Cannot release to PyPI: no packages found');
63 | return undefined;
64 | }
65 |
66 | const versions = uniquePackages(packageFiles.map(f => f.filename));
67 |
68 | const [owner, repo] = this.config.internalPypiRepo.split('/');
69 |
70 | const [contents, tree, commit] = await withTempDir(async directory => {
71 | await spawnProcess(
72 | 'git',
73 | [
74 | 'clone',
75 | '--quiet',
76 | '--depth=1',
77 | `https://github.com/${this.config.internalPypiRepo}`,
78 | directory,
79 | ],
80 | {},
81 | { enableInDryRunMode: true }
82 | );
83 |
84 | await spawnProcess(
85 | 'python3',
86 | ['-m', 'add_pkg', '--skip-resolve', ...versions],
87 | { cwd: directory },
88 | { enableInDryRunMode: true }
89 | );
90 |
91 | const contents = fs.readFileSync(path.join(directory, 'packages.ini'), {
92 | encoding: 'utf-8',
93 | });
94 | const tree = ((await spawnProcess(
95 | 'git',
96 | ['-C', directory, 'rev-parse', 'HEAD:'],
97 | {},
98 | { enableInDryRunMode: true }
99 | )) as Buffer)
100 | .toString('utf-8')
101 | .trim();
102 | const commit = ((await spawnProcess(
103 | 'git',
104 | ['-C', directory, 'rev-parse', 'HEAD'],
105 | {},
106 | { enableInDryRunMode: true }
107 | )) as Buffer)
108 | .toString('utf-8')
109 | .trim();
110 | return [contents, tree, commit];
111 | });
112 |
113 | // making a commit involves:
114 |
115 | // 1. build a tree based on the previous tree
116 | const newTree = (
117 | await this.github.git.createTree({
118 | owner,
119 | repo,
120 | tree: [
121 | {
122 | path: 'packages.ini',
123 | mode: '100644',
124 | type: 'blob',
125 | content: contents,
126 | },
127 | ],
128 | base_tree: tree,
129 | })
130 | ).data.sha;
131 |
132 | // 2. make a commit
133 | const message = `update ${versions.join(' ')}`;
134 | const newCommit = (
135 | await this.github.git.createCommit({
136 | owner,
137 | repo,
138 | message,
139 | tree: newTree,
140 | parents: [commit],
141 | })
142 | ).data.sha;
143 |
144 | // 3. make a branch
145 | const branchName = `craft-release-${revision}`;
146 | await this.github.git.createRef({
147 | owner,
148 | repo,
149 | ref: `refs/heads/${branchName}`,
150 | sha: newCommit,
151 | });
152 |
153 | // 4. make a PR!
154 | await this.github.rest.pulls.create({
155 | owner,
156 | repo,
157 | head: branchName,
158 | base: 'main',
159 | title: message,
160 | });
161 |
162 | this.logger.info('internal PyPI release complete');
163 | }
164 | }
165 |
--------------------------------------------------------------------------------
/src/targets/symbolCollector.ts:
--------------------------------------------------------------------------------
1 | import { BaseArtifactProvider } from '../artifact_providers/base';
2 | import { TargetConfig } from '../schemas/project_config';
3 | import { ConfigurationError } from '../utils/errors';
4 | import { BaseTarget } from './base';
5 | import { withTempDir } from '../utils/files';
6 | import { promises as fsPromises } from 'fs';
7 | import { checkExecutableIsPresent, spawnProcess } from '../utils/system';
8 | import { join } from 'path';
9 |
10 | const DEFAULT_SYM_COLLECTOR_SERVER_ENDPOINT =
11 | 'https://symbol-collector.services.sentry.io/';
12 | /**
13 | * Name of the binary of the symbol collector.
14 | * Must be available in the path.
15 | */
16 | export const SYM_COLLECTOR_BIN_NAME = 'SymbolCollector.Console';
17 |
18 | /** Config options for the "symbol-collector" target. */
19 | interface SymbolCollectorTargetConfig {
20 | /** Server endpoint to upload symbols. */
21 | serverEndpoint: string;
22 | /** batch-type of the symbols to be uploaded. */
23 | batchType: string;
24 | /** Prefix of the bundle ID to be uploaded. */
25 | bundleIdPrefix: string;
26 | }
27 |
28 | export class SymbolCollector extends BaseTarget {
29 | /** Target name */
30 | public readonly name: string = 'symbol-collector';
31 | /** Target options */
32 | public readonly symbolCollectorConfig: SymbolCollectorTargetConfig;
33 |
34 | public constructor(
35 | config: TargetConfig,
36 | artifactProvider: BaseArtifactProvider
37 | ) {
38 | super(config, artifactProvider);
39 | this.symbolCollectorConfig = this.getSymbolCollectorConfig();
40 | }
41 |
42 | private getSymbolCollectorConfig(): SymbolCollectorTargetConfig {
43 | // The Symbol Collector should be available in the path
44 | checkExecutableIsPresent(SYM_COLLECTOR_BIN_NAME);
45 |
46 | if (!this.config.batchType) {
47 | throw new ConfigurationError(
48 | 'The required `batchType` parameter is missing in the configuration file. ' +
49 | 'See the documentation for more details.'
50 | );
51 | }
52 | if (!this.config.bundleIdPrefix) {
53 | throw new ConfigurationError(
54 | 'The required `bundleIdPrefix` parameter is missing in the configuration file. ' +
55 | 'See the documentation for more details.'
56 | );
57 | }
58 |
59 | return {
60 | serverEndpoint:
61 | this.config.serverEndpoint || DEFAULT_SYM_COLLECTOR_SERVER_ENDPOINT,
62 | batchType: this.config.batchType,
63 | bundleIdPrefix: this.config.bundleIdPrefix,
64 | };
65 | }
66 |
67 | public async publish(version: string, revision: string): Promise {
68 | const bundleId = this.symbolCollectorConfig.bundleIdPrefix + version;
69 | const artifacts = await this.getArtifactsForRevision(revision, {
70 | includeNames: this.config.includeNames,
71 | excludeNames: this.config.excludeNames,
72 | });
73 |
74 | if (artifacts.length === 0) {
75 | this.logger.warn(`Didn't found any artifacts after filtering`);
76 | return;
77 | }
78 |
79 | this.logger.debug(`Found ${artifacts.length} symbol artifacts.`);
80 |
81 | await withTempDir(async dir => {
82 | // Download all artifacts in the same parent directory, where the symbol
83 | // collector will recursively look for and deal with them.
84 | // Since there are files with the same name, download them in different
85 | // directories.
86 | this.logger.debug('Downloading artifacts...');
87 | await Promise.all(
88 | artifacts.map(async (artifact, index) => {
89 | const subdirPath = join(dir, String(index));
90 | await fsPromises.mkdir(subdirPath);
91 | await this.artifactProvider.downloadArtifact(artifact, subdirPath);
92 | })
93 | );
94 |
95 | const cmdOutput = await spawnProcess(SYM_COLLECTOR_BIN_NAME, [
96 | '--upload',
97 | 'directory',
98 | '--path',
99 | dir,
100 | '--batch-type',
101 | this.symbolCollectorConfig.batchType,
102 | '--bundle-id',
103 | bundleId,
104 | '--server-endpoint',
105 | this.symbolCollectorConfig.serverEndpoint,
106 | ]);
107 |
108 | if (cmdOutput) {
109 | if (cmdOutput.length === 0) {
110 | this.logger.info(`The command didn't have any output.`);
111 | } else {
112 | this.logger.info('Command output:\n', cmdOutput.toString());
113 | }
114 | }
115 | });
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/src/targets/upm.ts:
--------------------------------------------------------------------------------
1 | import { Octokit } from '@octokit/rest';
2 | import simpleGit from 'simple-git';
3 | import {
4 | getGitHubApiToken,
5 | getGitHubClient,
6 | GitHubRemote,
7 | } from '../utils/githubApi';
8 |
9 | import { GitHubTarget } from './github';
10 | import { GitHubGlobalConfig, TargetConfig } from '../schemas/project_config';
11 | import { BaseTarget } from './base';
12 | import {
13 | BaseArtifactProvider,
14 | RemoteArtifact,
15 | } from '../artifact_providers/base';
16 | import { reportError } from '../utils/errors';
17 | import { extractZipArchive } from '../utils/system';
18 | import { withTempDir } from '../utils/files';
19 | import { isDryRun } from '../utils/helpers';
20 | import { isPreviewRelease } from '../utils/version';
21 | import { NoneArtifactProvider } from '../artifact_providers/none';
22 |
23 | /** Name of the artifact that contains the UPM package */
24 | export const ARTIFACT_NAME = 'package-release.zip';
25 |
26 | /**
27 | * Target responsible for publishing to upm registry
28 | */
29 | export class UpmTarget extends BaseTarget {
30 | /** Target name */
31 | public readonly name: string = 'upm';
32 | /** GitHub client */
33 | public readonly github: Octokit;
34 | /** Internal GitHub Target */
35 | private readonly githubTarget: GitHubTarget;
36 |
37 | public constructor(
38 | config: TargetConfig,
39 | artifactProvider: BaseArtifactProvider,
40 | githubRepo: GitHubGlobalConfig
41 | ) {
42 | super(config, artifactProvider, githubRepo);
43 |
44 | this.github = getGitHubClient();
45 |
46 | const githubTargetConfig = {
47 | name: 'github',
48 | tagPrefix: config.tagPrefix,
49 | owner: config.releaseRepoOwner,
50 | repo: config.releaseRepoName,
51 | };
52 |
53 | this.githubTarget = new GitHubTarget(
54 | githubTargetConfig,
55 | new NoneArtifactProvider(),
56 | githubRepo
57 | );
58 | }
59 |
60 | /**
61 | * Fetches the artifact for the provided revision.
62 | *
63 | * @param revision Git commit SHA for the artifact to be published.
64 | * @returns The requested artifact. When no artifacts found or multiple
65 | * artifacts have been found, returns undefined in dry-run mode and
66 | * throws an exception in "normal" mode.
67 | */
68 | public async fetchArtifact(
69 | revision: string
70 | ): Promise {
71 | const packageFiles = await this.getArtifactsForRevision(revision);
72 | if (packageFiles.length === 0) {
73 | reportError('Cannot publish UPM: No release artifact found.');
74 | return;
75 | }
76 |
77 | const packageFile = packageFiles.find(
78 | ({ filename }) => filename === ARTIFACT_NAME
79 | );
80 | if (packageFile === undefined) {
81 | reportError(
82 | `Cannot publish UPM: Failed to find "${ARTIFACT_NAME}" in the artifacts.`
83 | );
84 | }
85 |
86 | return packageFile;
87 | }
88 |
89 | /**
90 | * Performs a release to upm
91 | *
92 | * @param version New version to be released
93 | * @param revision Git commit SHA to be published
94 | */
95 | public async publish(version: string, revision: string): Promise {
96 | this.logger.info('Fetching artifact...');
97 | const packageFile = await this.fetchArtifact(revision);
98 | if (!packageFile) {
99 | return;
100 | }
101 |
102 | this.logger.info(
103 | `Found artifact: "${packageFile.filename}", downloading...`
104 | );
105 | const artifactPath = await this.artifactProvider.downloadArtifact(
106 | packageFile
107 | );
108 |
109 | const remote = new GitHubRemote(
110 | this.config.releaseRepoOwner,
111 | this.config.releaseRepoName,
112 | getGitHubApiToken()
113 | );
114 | const remoteAddr = remote.getRemoteString();
115 | this.logger.debug(`Target release repository: ${remoteAddr}`);
116 |
117 | await withTempDir(
118 | async directory => {
119 | const git = simpleGit(directory);
120 | this.logger.info(`Cloning ${remoteAddr} to ${directory}...`);
121 | await git.clone(remote.getRemoteStringWithAuth(), directory);
122 |
123 | this.logger.info('Clearing the repository.');
124 | await git.rm(['-r', '-f', '.']);
125 |
126 | this.logger.info(`Extracting "${packageFile.filename}".`);
127 | await extractZipArchive(artifactPath, directory);
128 |
129 | this.logger.info('Adding files to repository.');
130 | await git.add(['.']);
131 | const commitResult = await git.commit(`release ${version}`);
132 | if (!commitResult.commit) {
133 | throw new Error(
134 | 'Commit on target repository failed. Maybe there were no changes at all?'
135 | );
136 | }
137 | const targetRevision = await git.revparse([commitResult.commit]);
138 |
139 | if (isDryRun()) {
140 | this.logger.info('[dry-run]: git push origin main');
141 | } else {
142 | await git.push(['origin', 'main']);
143 | const changes = await this.githubTarget.getChangelog(version);
144 | const isPrerelease = isPreviewRelease(version);
145 | const draftRelease = await this.githubTarget.createDraftRelease(
146 | version,
147 | targetRevision,
148 | changes
149 | );
150 | await this.githubTarget.publishRelease(draftRelease,
151 | { makeLatest: !isPrerelease });
152 | }
153 | },
154 | true,
155 | '_craft-release-upm-'
156 | );
157 |
158 | this.logger.info('UPM release complete');
159 | }
160 | }
161 |
--------------------------------------------------------------------------------
/src/types/nvar.ts:
--------------------------------------------------------------------------------
1 | declare module 'nvar';
2 |
--------------------------------------------------------------------------------
/src/types/split.d.ts:
--------------------------------------------------------------------------------
1 | declare module 'split';
2 |
--------------------------------------------------------------------------------
/src/utils/__fixtures__/gcsApi.ts:
--------------------------------------------------------------------------------
1 | import { RemoteArtifact } from '../../artifact_providers/base';
2 |
3 | export const dogsGHOrg = 'dogs-rule';
4 |
5 | export const squirrelRepo = 'squirrel-operations';
6 |
7 | export const squirrelBucket = 'squirrel-chasing';
8 |
9 | export const squirrelSimulatorCommit =
10 | '4d6169736579203c33203c3320436861726c6965';
11 |
12 | export const squirrelStatsCommit = '3c3320446f67732061726520677265617421203c33';
13 |
14 | export const gcsCredsJSON = JSON.stringify({
15 | project_id: 'o-u-t-s-i-d-e',
16 | private_key: 'DoGsArEgReAtSoMeSeCrEtStUfFhErE',
17 | client_email: 'might_huntress@dogs.com',
18 | other_stuff: 'can be anything',
19 | tail_wagging: 'true',
20 | barking: 'also VERY true',
21 | });
22 |
23 | export const squirrelStatsArtifact: RemoteArtifact = {
24 | filename: 'march-2020-stats.csv',
25 | mimeType: 'text/csv',
26 | storedFile: {
27 | downloadFilepath: 'captured-squirrels/march-2020-stats.csv',
28 | filename: 'march-2020-stats.csv',
29 | lastUpdated: '2020-03-30T19:14:44.694Z',
30 | size: 112112,
31 | },
32 | };
33 |
34 | export const squirrelStatsLocalPath = './temp/march-2020-stats.csv';
35 |
36 | export const squirrelStatsBucketPath = {
37 | path: 'stats/2020/',
38 | };
39 |
40 | export const squirrelSimulatorArtifact: RemoteArtifact = {
41 | filename: 'bundle.js',
42 | mimeType: 'application/json',
43 | storedFile: {
44 | downloadFilepath: 'squirrel-simulator/bundle.js',
45 | filename: 'bundle.js',
46 | lastUpdated: '2020-03-30T19:14:44.694Z',
47 | size: 123112,
48 | },
49 | };
50 |
51 | export const squirrelSimulatorLocalPath = './dist/bundle.js';
52 |
53 | export const squirrelSimulatorBucketPath = {
54 | path: '/simulator/v1.12.1/dist/',
55 | metadata: { cacheControl: `public, max-age=3600` },
56 | };
57 |
58 | export {
59 | squirrelSimulatorGCSFileObj,
60 | squirrelStatsGCSFileObj,
61 | } from './gcsFileObj';
62 |
63 | export const tempDownloadDirectory = './temp/';
64 |
--------------------------------------------------------------------------------
/src/utils/__fixtures__/gcsFileObj.ts:
--------------------------------------------------------------------------------
1 | // Note: All but the last two objects in this file (the ones which are exported)
2 | // exist purely because real-life GCS File objects are enormous, deeply nested,
3 | // and full of repetition, so breaking them up is the only way to have any idea
4 | // of what's in them.
5 |
6 | const acl = {
7 | owners: {},
8 | readers: {},
9 | writers: {},
10 | pathPrefix: '/acl',
11 | };
12 |
13 | const defaultAcl = {
14 | ...acl,
15 | pathPrefix: '/defaultObjectAcl',
16 | };
17 |
18 | const aclWithDefault = {
19 | ...acl,
20 | default: defaultAcl,
21 | };
22 |
23 | const aclRoles = {
24 | OWNER_ROLE: 'OWNER',
25 | READER_ROLE: 'READER',
26 | WRITER_ROLE: 'WRITER',
27 | };
28 |
29 | const scopes = [
30 | 'https://www.googleapis.com/auth/iam',
31 | 'https://www.googleapis.com/auth/cloud-platform',
32 | 'https://www.googleapis.com/auth/devstorage.full_control',
33 | ];
34 |
35 | const authClient = {
36 | jsonContent: {
37 | client_email: 'mighty_huntress@dogs.com',
38 | private_key: 'DoGsArEgReAtSoMeSeCrEtStUfFhErE',
39 | },
40 | cachedCredential: {
41 | domain: null,
42 | _events: {},
43 | _eventsCount: 0,
44 | transporter: {},
45 | credentials: {
46 | access_token: 'IaMaGoOdDoGpLeAsElEtMeIn',
47 | token_type: 'Bearer',
48 | expiry_date: 1585600265000,
49 | refresh_token: 'jwt-placeholder',
50 | },
51 | certificateExpiry: null,
52 | refreshTokenPromises: {},
53 | eagerRefreshThresholdMillis: 300000,
54 | email: 'mighty_huntress@dogs.com',
55 | key: 'DoGsArEgReAtSoMeSeCrEtStUfFhErE',
56 | scopes,
57 | gtoken: {
58 | token: 'IaMaGoOdDoGpLeAsElEtMeIn',
59 | expiresAt: 1585600265000,
60 | rawToken: {
61 | access_token: 'IaMaGoOdDoGpLeAsElEtMeIn',
62 | expires_in: 3599,
63 | token_type: 'Bearer',
64 | },
65 | tokenExpires: null,
66 | key: 'DoGsArEgReAtSoMeSeCrEtStUfFhErE',
67 | iss: 'mighty_huntress@dogs.com',
68 | scope:
69 | 'https://www.googleapis.com/auth/iam https://www.googleapis.com/auth/cloud-platform https://www.googleapis.com/auth/devstorage.full_control',
70 | },
71 | },
72 | _cachedProjectId: 'o-u-t-s-i-d-e',
73 | scopes,
74 | };
75 |
76 | const storage = {
77 | baseUrl: 'https://www.googleapis.com/storage/v1',
78 | globalInterceptors: [],
79 | interceptors: [],
80 | packageJson: '',
81 | projectId: 'o-u-t-s-i-d-e',
82 | projectIdRequired: false,
83 | authClient,
84 | acl: aclRoles,
85 | };
86 |
87 | const bucket = {
88 | domain: null,
89 | _events: {},
90 | _eventsCount: 0,
91 | metadata: {},
92 | baseUrl: '/b',
93 | parent: storage,
94 | id: 'squirrel-chasing',
95 | methods: {
96 | create: true,
97 | },
98 | interceptors: [],
99 | name: 'squirrel-chasing',
100 | storage,
101 | acl: aclWithDefault,
102 | iam: {
103 | resourceId_: 'buckets/[object Promise]',
104 | },
105 | };
106 |
107 | export const squirrelStatsGCSFileObj = {
108 | domain: null,
109 | _events: {},
110 | _eventsCount: 0,
111 | metadata: {
112 | kind: 'storage#object',
113 | id: 'squirrel-chasing/captured-squirrels/march-2020-stats.csv/12312012',
114 | selfLink:
115 | 'https://www.googleapis.com/storage/v1/b/squirrel-chasing/o/captured-squirrels%2Fmarch-2020-stats.csv',
116 | mediaLink:
117 | 'https://www.googleapis.com/download/storage/v1/b/squirrel-chasing/o/captured-squirrels%2Fmarch-2020-stats.csv?generation=12312012&alt=media',
118 | name: 'captured-squirrels/march-2020-stats.csv',
119 | bucket: 'squirrel-chasing',
120 | generation: '12312012',
121 | metageneration: '1',
122 | contentType: 'text/csv',
123 | storageClass: 'STANDARD',
124 | size: '112112',
125 | md5Hash: 'DOX0leRinotMTM7EGGXpjQ==',
126 | crc32c: 'fVcyCg==',
127 | etag: 'CI/UrJz0wugCEAE=',
128 | timeCreated: '2020-03-30T19:14:44.694Z',
129 | updated: '2020-03-30T19:14:44.694Z',
130 | timeStorageClassUpdated: '2020-03-30T19:14:44.694Z',
131 | },
132 | baseUrl: '/o',
133 | parent: bucket,
134 | id: 'captured-squirrels%2Fmarch-2020-stats.csv',
135 | methods: {},
136 | interceptors: [],
137 | bucket,
138 | storage,
139 | name: 'captured-squirrels/march-2020-stats.csv',
140 | acl,
141 | };
142 |
143 | export const squirrelSimulatorGCSFileObj = {
144 | domain: null,
145 | _events: {},
146 | _eventsCount: 0,
147 | metadata: {
148 | kind: 'storage#object',
149 | id: 'squirrel-chasing/squirrel-simulator/bundle.js/11212012',
150 | selfLink:
151 | 'https://www.googleapis.com/storage/v1/b/squirrel-chasing/o/squirrel-simulator%2Fbundle.js',
152 | mediaLink:
153 | 'https://www.googleapis.com/download/storage/v1/b/squirrel-chasing/o/squirrel-simulator%2Fbundle.js?generation=11212012&alt=media',
154 | name: 'squirrel-simulator/bundle.js',
155 | bucket: 'squirrel-chasing',
156 | generation: '11212012',
157 | metageneration: '1',
158 | contentType: 'application/javascript',
159 | storageClass: 'STANDARD',
160 | size: '123112',
161 | md5Hash: 'DOX0leRinotMTM7EGGXpjQ==',
162 | crc32c: 'fVcyCg==',
163 | etag: 'CI/UrJz0wugCEAE=',
164 | timeCreated: '2020-03-30T19:14:44.694Z',
165 | updated: '2020-03-30T19:14:44.694Z',
166 | timeStorageClassUpdated: '2020-03-30T19:14:44.694Z',
167 | },
168 | baseUrl: '/o',
169 | parent: bucket,
170 | id: 'squirrel-simulator%2Fbundle.js',
171 | methods: {},
172 | interceptors: [],
173 | bucket,
174 | storage,
175 | name: 'squirrel-simulator/bundle.js',
176 | acl,
177 | };
178 |
--------------------------------------------------------------------------------
/src/utils/__fixtures__/listFiles/a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getsentry/craft/f4949f4b275e495e949bfd88e50125f284680446/src/utils/__fixtures__/listFiles/a
--------------------------------------------------------------------------------
/src/utils/__fixtures__/listFiles/b:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getsentry/craft/f4949f4b275e495e949bfd88e50125f284680446/src/utils/__fixtures__/listFiles/b
--------------------------------------------------------------------------------
/src/utils/__fixtures__/listFiles/subdir/.empty:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/getsentry/craft/f4949f4b275e495e949bfd88e50125f284680446/src/utils/__fixtures__/listFiles/subdir/.empty
--------------------------------------------------------------------------------
/src/utils/__tests__/awsLambdaLayerManager.test.ts:
--------------------------------------------------------------------------------
1 | import * as awsManager from '../awsLambdaLayerManager';
2 |
3 | const CANONICAL_SEPARATOR = ':';
4 |
5 | const COMPATIBLE_RUNTIME_DATA = {
6 | name: 'test runtime',
7 | versions: ['test version 1', 'test version 2'],
8 | };
9 | const AWS_TEST_REGIONS = ['test aws region 1', 'test aws region 2'];
10 |
11 | function getTestAwsLambdaLayerManager(): awsManager.AwsLambdaLayerManager {
12 | return new awsManager.AwsLambdaLayerManager(
13 | COMPATIBLE_RUNTIME_DATA,
14 | 'test layer name',
15 | 'test license',
16 | Buffer.alloc(0),
17 | AWS_TEST_REGIONS
18 | );
19 | }
20 |
21 | describe('canonical', () => {
22 | test('get canonical name', () => {
23 | const manager = getTestAwsLambdaLayerManager();
24 | const canonicalSuffix = manager
25 | .getCanonicalName()
26 | .split(CANONICAL_SEPARATOR)[1];
27 | expect(canonicalSuffix).toBe('test runtime');
28 | });
29 | });
30 |
31 | describe('utils', () => {
32 | test('account from arn', () => {
33 | const testAccount = 'ACCOUNT_NUMBER';
34 | const testArn =
35 | 'arn:aws:lambda:region:' + testAccount + ':layer:layerName:version';
36 | expect(awsManager.getAccountFromArn(testArn)).toBe(testAccount);
37 | });
38 | });
39 |
40 | describe('layer publishing', () => {
41 | test('publish to single region', async () => {
42 | const regionTest = 'region-test';
43 | const manager = getTestAwsLambdaLayerManager();
44 | const publishedLayer = await manager.publishLayerToRegion(regionTest);
45 | expect(publishedLayer.region).toStrictEqual(regionTest);
46 | });
47 |
48 | test('publish to all regions', async () => {
49 | const manager = getTestAwsLambdaLayerManager();
50 | const pubishedLayers = await manager.publishToAllRegions();
51 | const publishedRegions = pubishedLayers.map(layer => layer.region);
52 | expect(publishedRegions).toStrictEqual(AWS_TEST_REGIONS);
53 | });
54 | });
55 |
--------------------------------------------------------------------------------
/src/utils/__tests__/files.test.ts:
--------------------------------------------------------------------------------
1 | import { existsSync, rmdirSync } from 'fs';
2 | import { join, resolve } from 'path';
3 |
4 | import { listFiles, withTempDir } from '../files';
5 |
6 | describe('listFiles', () => {
7 | const testDir = resolve(__dirname, '../__fixtures__/listFiles');
8 | const testFiles = ['a', 'b'].map(f => join(testDir, f));
9 |
10 | test('returns only files', async () => {
11 | expect.assertions(1);
12 | const files = await listFiles(testDir);
13 | expect(files).toEqual(testFiles);
14 | });
15 | });
16 |
17 | describe('withTempDir', () => {
18 | async function testDirectories(
19 | callback: (arg: any) => any,
20 | cleanupEnabled = true
21 | ): Promise {
22 | let directory = '';
23 | try {
24 | await withTempDir(dir => {
25 | directory = dir;
26 | expect(existsSync(directory)).toBeTruthy();
27 | return callback(directory);
28 | }, cleanupEnabled);
29 | } finally {
30 | if (cleanupEnabled) {
31 | // We intentionally do not block on the clean up operation
32 | // so wait ~100ms before checking
33 | await new Promise(resolve => setTimeout(resolve, 100));
34 | expect(existsSync(directory)).toBeFalsy();
35 | } else {
36 | expect(existsSync(directory)).toBeTruthy();
37 | }
38 | }
39 | }
40 |
41 | test('creates and removes synchronously', async () => {
42 | expect.assertions(2);
43 | await testDirectories(() => true);
44 | });
45 |
46 | test('creates and removes on error', async () => {
47 | try {
48 | expect.assertions(3);
49 | await testDirectories(() => {
50 | throw new Error('fail');
51 | });
52 | } catch (e) {
53 | expect(e.message).toBe('fail');
54 | }
55 | });
56 |
57 | test('creates and does not remove if cleanup flag is specified', async () => {
58 | expect.assertions(2);
59 | let tempDir = '';
60 | await testDirectories(arg => {
61 | tempDir = arg;
62 | }, false);
63 | // Cleanup
64 | rmdirSync(tempDir);
65 | });
66 |
67 | test('creates and removes on Promise resolution', async () => {
68 | expect.assertions(2);
69 | await testDirectories(() => Promise.resolve('success'));
70 | });
71 |
72 | test('creates and removes on Promise rejection', async () => {
73 | try {
74 | expect.assertions(3);
75 | await testDirectories(() => Promise.reject(new Error('fail')));
76 | } catch (e) {
77 | expect(e.message).toBe('fail');
78 | }
79 | });
80 |
81 | test('returns the callback return value synchronously', async () => {
82 | expect.assertions(1);
83 | const result = await withTempDir(() => 'result');
84 | expect(result).toBe('result');
85 | });
86 |
87 | test('returns the callback return value asynchronously', async () => {
88 | expect.assertions(1);
89 | const result = await withTempDir(() => Promise.resolve('result'));
90 | expect(result).toBe('result');
91 | });
92 | });
93 |
--------------------------------------------------------------------------------
/src/utils/__tests__/filters.test.ts:
--------------------------------------------------------------------------------
1 | import { stringToRegexp } from '../filters';
2 |
3 | describe('stringToRegexp', () => {
4 | test('converts string without special characters', () => {
5 | expect(stringToRegexp('/simple/')).toEqual(/simple/);
6 | });
7 |
8 | test('converts string with special characters', () => {
9 | expect(stringToRegexp('/sim.le\\d+/')).toEqual(/sim.le\d+/);
10 | });
11 |
12 | test('uses regexp modifiers', () => {
13 | expect(stringToRegexp('/[!?]{2}\\w+/gi')).toEqual(/[!?]{2}\w+/gi);
14 | });
15 |
16 | test('is not confused by multiple slashes', () => {
17 | expect(stringToRegexp('/file1/file2/i')).toEqual(/file1\/file2/i);
18 | });
19 |
20 | test('is source of regex what we think', () => {
21 | expect(stringToRegexp('/none/').source).toEqual('none');
22 | });
23 |
24 | test('raises an error if the value is not surrounded by slashes', () => {
25 | expect.assertions(1);
26 | try {
27 | stringToRegexp('no-slashes');
28 | } catch (e) {
29 | expect(e.message).toMatch(/invalid regexp/i);
30 | }
31 | });
32 | });
33 |
--------------------------------------------------------------------------------
/src/utils/__tests__/githubApi.test.ts:
--------------------------------------------------------------------------------
1 | import { Octokit } from '@octokit/rest';
2 |
3 | import { getFile } from '../githubApi';
4 |
5 | const mockRepos = {
6 | getContent: jest.fn(),
7 | };
8 |
9 | jest.mock('@octokit/rest', () => ({
10 | Octokit: jest.fn().mockImplementation(() => ({ repos: mockRepos })),
11 | }));
12 |
13 | describe('getFile', () => {
14 | const github = new Octokit();
15 | const owner = 'owner';
16 | const repo = 'repo';
17 |
18 | const getContent = (github.repos.getContent as unknown) as jest.Mock;
19 |
20 | test('loads and decodes the file', async () => {
21 | expect.assertions(2);
22 | const testContent = 'test content.';
23 |
24 | getContent.mockReturnValue({
25 | data: { content: Buffer.from(testContent).toString('base64') },
26 | });
27 |
28 | const content = await getFile(
29 | github,
30 | owner,
31 | repo,
32 | '/path/to/file',
33 | 'v1.0.0'
34 | );
35 | expect(getContent).toHaveBeenCalledWith({
36 | owner: 'owner',
37 | path: '/path/to/file',
38 | ref: 'v1.0.0',
39 | repo: 'repo',
40 | });
41 |
42 | expect(content).toBe(testContent);
43 | });
44 |
45 | test('returns null for missing files', async () => {
46 | expect.assertions(1);
47 |
48 | getContent.mockImplementation(() => {
49 | const e = new Error('file not found') as any;
50 | e.status = 404;
51 | throw e;
52 | });
53 |
54 | const content = await getFile(
55 | github,
56 | owner,
57 | repo,
58 | '/path/to/missing',
59 | 'v1.0.0'
60 | );
61 | expect(content).toBe(undefined);
62 | });
63 |
64 | test('rejects all other errors', async () => {
65 | expect.assertions(3);
66 |
67 | const errorText = 'internal server error';
68 | getContent.mockImplementation(() => {
69 | const e = new Error(errorText) as any;
70 | e.status = 500;
71 | throw e;
72 | });
73 |
74 | try {
75 | await getFile(github, owner, repo, '/path/to/missing', 'v1.0.0');
76 | } catch (e: any) {
77 | expect(e.message).toMatch(errorText);
78 | expect(e.status).toBe(500);
79 | expect(e.code).toBe(undefined);
80 | }
81 | });
82 | });
83 |
--------------------------------------------------------------------------------
/src/utils/__tests__/gpg.test.ts:
--------------------------------------------------------------------------------
1 | import { promises as fsPromises } from 'fs';
2 | import { importGPGKey } from '../gpg';
3 | import { spawnProcess } from '../system';
4 |
5 | jest.mock('../system');
6 |
7 | jest.mock('fs', () => ({
8 | ...jest.requireActual('fs'),
9 | promises: {
10 | writeFile: jest.fn(() => Promise.resolve()),
11 | unlink: jest.fn(),
12 | },
13 | }));
14 |
15 | describe('importGPGKey', () => {
16 | const KEY = 'very_private_key_like_for_real_really_private';
17 | const PRIVATE_KEY_FILE_MATCHER = expect.stringMatching(/private-key.asc$/);
18 |
19 | test('should write key to temp file', async () => {
20 | importGPGKey(KEY);
21 | expect(fsPromises.writeFile).toHaveBeenCalledWith(
22 | PRIVATE_KEY_FILE_MATCHER,
23 | KEY
24 | );
25 | });
26 |
27 | test('should remove file with the key afterwards', async () => {
28 | importGPGKey(KEY);
29 | expect(spawnProcess).toHaveBeenCalledWith('gpg', [
30 | '--batch',
31 | '--import',
32 | PRIVATE_KEY_FILE_MATCHER,
33 | ]);
34 | });
35 |
36 | test('should call gpg command to load the key', async () => {
37 | importGPGKey(KEY);
38 | expect(fsPromises.unlink).toHaveBeenCalledWith(PRIVATE_KEY_FILE_MATCHER);
39 | });
40 | });
41 |
--------------------------------------------------------------------------------
/src/utils/__tests__/helpers.test.ts:
--------------------------------------------------------------------------------
1 | import { envToBool } from '../helpers';
2 |
3 | describe('envToBool', () =>
4 | test.each([
5 | [undefined, false],
6 | [null, false],
7 | [false, false],
8 | ['undefined', false],
9 | ['null', false],
10 | ['', false],
11 | ['0', false],
12 | ['no', false],
13 | [true, true],
14 | ['true', true],
15 | [1, true],
16 | ['1', true],
17 | ['yes', true],
18 | ['dogs are great!', true],
19 | ])('From %j we should get "%s"', (envVar, result) =>
20 | expect(envToBool(envVar)).toBe(result)
21 | ));
22 |
--------------------------------------------------------------------------------
/src/utils/__tests__/objects.test.ts:
--------------------------------------------------------------------------------
1 | import { clearObjectProperties } from '../objects';
2 |
3 | describe('clearObjectProperties', () => {
4 | test('clears enumerable properties', () => {
5 | const obj = { a: 1, test: 'hello', f: () => 0, o: { 1: 2 } };
6 |
7 | expect(clearObjectProperties(obj)).toEqual({});
8 | expect(obj).toEqual({});
9 | });
10 | });
11 |
--------------------------------------------------------------------------------
/src/utils/__tests__/packagePath.test.ts:
--------------------------------------------------------------------------------
1 | import { parseCanonical } from '../packagePath';
2 |
3 | describe('parseCanonical', () => {
4 | test('parses valid cases properly', async () => {
5 | expect(parseCanonical('pypi:sentry-sdk')).toEqual(['pypi', 'sentry-sdk']);
6 | expect(parseCanonical('npm:@sentry/browser')).toEqual([
7 | 'npm',
8 | '@sentry',
9 | 'browser',
10 | ]);
11 | expect(parseCanonical('test-registry:a.1/b.2/c.3')).toEqual([
12 | 'test-registry',
13 | 'a.1',
14 | 'b.2',
15 | 'c.3',
16 | ]);
17 | });
18 |
19 | test('allows colons as path separators', async () => {
20 | expect(parseCanonical('maven:io.sentry:sentry')).toEqual([
21 | 'maven',
22 | 'io.sentry',
23 | 'sentry',
24 | ]);
25 | });
26 |
27 | test('throws an error for invalid canonical names', async () => {
28 | function expectRaisesError(name: string): void {
29 | try {
30 | parseCanonical(name);
31 | throw new Error(`Should fail for canonical name: ${name}`);
32 | } catch (e) {
33 | expect(e.message).toMatch(/cannot parse/i);
34 | }
35 | }
36 |
37 | expectRaisesError('invalid');
38 | expectRaisesError('invalid:');
39 | expectRaisesError('a/b');
40 | expectRaisesError('registry:a/');
41 | });
42 | });
43 |
--------------------------------------------------------------------------------
/src/utils/__tests__/strings.test.ts:
--------------------------------------------------------------------------------
1 | import {
2 | renderTemplateSafe,
3 | sanitizeObject,
4 | formatSize,
5 | formatJson,
6 | } from '../strings';
7 |
8 | describe('sanitizeObject', () => {
9 | test('processes empty object', () => {
10 | expect(sanitizeObject({})).toEqual({});
11 | });
12 |
13 | test('throws an error if given non-object', () => {
14 | function expectRaisesError(value: any): void {
15 | try {
16 | sanitizeObject(value);
17 | throw new Error(`Should fail for canonical name: ${value}`);
18 | } catch (e) {
19 | expect(e.message).toMatch(/cannot normalize/i);
20 | }
21 | }
22 | expectRaisesError(123);
23 | expectRaisesError('a');
24 | expectRaisesError(null);
25 | });
26 |
27 | test('processes simple objects without changes', () => {
28 | expect(sanitizeObject({ 1: 2 })).toEqual({ 1: 2 });
29 | });
30 |
31 | test('processes nested objects without changes', () => {
32 | expect(sanitizeObject({ 1: { a: { 3: true } }, 2: 'b' })).toEqual({
33 | 1: { a: { 3: true } },
34 | 2: 'b',
35 | });
36 | });
37 |
38 | test('ignores function values', () => {
39 | expect(sanitizeObject({ f: () => true })).toEqual({});
40 | });
41 |
42 | test('replaces null with undefined', () => {
43 | expect(sanitizeObject({ 1: null })).toEqual({ 1: undefined });
44 | });
45 |
46 | test('normalizes keys with dots', () => {
47 | expect(sanitizeObject({ '1.2.3': 3 })).toEqual({
48 | '1.2.3': 3,
49 | '1__2__3': 3,
50 | });
51 | });
52 | });
53 |
54 | describe('renderTemplateSafe', () => {
55 | test('renders basic template', () => {
56 | expect(renderTemplateSafe('x{{ var }}', { var: 123 })).toBe('x123');
57 | });
58 |
59 | test('renders nested values', () => {
60 | expect(renderTemplateSafe('x{{ var.d }}', { var: { d: 123 } })).toBe(
61 | 'x123'
62 | );
63 | });
64 |
65 | test('renders nested values with dotted keys', () => {
66 | expect(renderTemplateSafe('x{{ var.d__1 }}', { var: { 'd.1': 123 } })).toBe(
67 | 'x123'
68 | );
69 | });
70 |
71 | test('does not render globals', () => {
72 | expect(renderTemplateSafe('{{ process }}', {})).toBe('');
73 | });
74 | });
75 |
76 | describe('formatSize', () => {
77 | test('formats byte sizes', () => {
78 | expect(formatSize(123)).toBe('123 B');
79 | });
80 | test('formats kilobyte sizes', () => {
81 | expect(formatSize(125952)).toBe('123.0 kB');
82 | });
83 | test('formats megabyte sizes', () => {
84 | expect(formatSize(1289748)).toBe('1.23 MB');
85 | });
86 | });
87 |
88 | describe('formatJson', () => {
89 | test('formats an integer', () => {
90 | expect(formatJson(123)).toBe('123');
91 | });
92 | test('formats an object', () => {
93 | expect(formatJson({ int: 123, str: 'hello', array: [2, 3, 4] })).toBe(
94 | `{
95 | "int": 123,
96 | "str": "hello",
97 | "array": [
98 | 2,
99 | 3,
100 | 4
101 | ]
102 | }`
103 | );
104 | });
105 | test('serializes an error', () => {
106 | const errorStr = formatJson(Error('oops'));
107 | expect(errorStr).toContain('Error: oops');
108 | expect(errorStr).toContain('at Object');
109 | });
110 | });
111 |
--------------------------------------------------------------------------------
/src/utils/checksum.ts:
--------------------------------------------------------------------------------
1 | import {
2 | BaseArtifactProvider,
3 | RemoteArtifact,
4 | } from '../artifact_providers/base';
5 | import { ConfigurationError } from './errors';
6 | import { HashAlgorithm, HashOutputFormat } from './system';
7 |
8 | /** Describes a checksum entry. */
9 | export interface ChecksumEntry {
10 | /** Checksum (hash) algorithm */
11 | algorithm: HashAlgorithm;
12 | /** Checksum format */
13 | format: HashOutputFormat;
14 | }
15 |
16 | /**
17 | * Checks the provided checksums configuration.
18 | *
19 | * TODO: this all has to be replaced with JSON schema.
20 | *
21 | * @param checksums Raw checksum configuration.
22 | */
23 | export function castChecksums(checksums: any[]): ChecksumEntry[] {
24 | if (!checksums) {
25 | return [];
26 | }
27 | if (!Array.isArray(checksums)) {
28 | throw new ConfigurationError(
29 | 'Invalid type of "checksums": should be an array'
30 | );
31 | }
32 | return checksums.map(
33 | (item: any): ChecksumEntry => {
34 | if (typeof item !== 'object' || !item.algorithm || !item.format) {
35 | throw new ConfigurationError(
36 | `Invalid checksum type: ${JSON.stringify(item)}`
37 | );
38 | }
39 | if (
40 | !Object.values(HashAlgorithm).includes(item.algorithm) ||
41 | !Object.values(HashOutputFormat).includes(item.format)
42 | ) {
43 | throw new ConfigurationError(
44 | `Invalid checksum type: ${JSON.stringify(item)}`
45 | );
46 | }
47 | return {
48 | algorithm: item.algorithm,
49 | format: item.format,
50 | };
51 | }
52 | );
53 | }
54 |
55 | /**
56 | * Retrieves a mapping from the provided checksums to the computed checksum.
57 | * @param checksums List of checksums to be calculated.
58 | * @param artifact The artifact to calculate the checksums of.
59 | * @param artifactProvider The artifact provider to get the checksum of.
60 | */
61 | export async function getArtifactChecksums(
62 | checksums: ChecksumEntry[],
63 | artifact: RemoteArtifact,
64 | artifactProvider: BaseArtifactProvider
65 | ): Promise<{
66 | [key: string]: string;
67 | }> {
68 | const fileChecksums: { [key: string]: string } = {};
69 | for (const checksumType of checksums) {
70 | const { algorithm, format } = checksumType;
71 | const currentChecksum = await artifactProvider.getChecksum(
72 | artifact,
73 | algorithm,
74 | format
75 | );
76 | fileChecksums[`${algorithm}-${format}`] = currentChecksum;
77 | }
78 | return fileChecksums;
79 | }
80 |
--------------------------------------------------------------------------------
/src/utils/errors.ts:
--------------------------------------------------------------------------------
1 | import { logger } from '../logger';
2 | import { isDryRun } from './helpers';
3 | import { captureException } from '@sentry/node';
4 |
5 | /**
6 | * Custom error class that describes client configuration errors
7 | */
8 | export class ConfigurationError extends Error {
9 | // We have to do the following because of: https://github.com/Microsoft/TypeScript/issues/13965
10 | // Otherwise we cannot use instanceof later to catch a given type
11 | /** Error prototype */
12 | public __proto__: Error;
13 |
14 | public constructor(message?: string) {
15 | const trueProto = new.target.prototype;
16 | super(message);
17 |
18 | this.__proto__ = trueProto;
19 | }
20 | }
21 |
22 | /**
23 | * Writes an error or message to "error" log if in dry-mode, throws an error
24 | * otherwise
25 | *
26 | * @param error Error object or error message
27 | * @param errorLogger Optional logger to use
28 | */
29 | export function reportError(
30 | error: unknown,
31 | errorLogger: {
32 | error: (...message: string[]) => void;
33 | [key: string]: any;
34 | } = logger
35 | ): void {
36 | if (!isDryRun()) {
37 | // wrap the error in an Error object if it isn't already one
38 | const errorObj = error instanceof Error ? error : new Error(String(error));
39 | throw errorObj;
40 | } else {
41 | // conversely, convert the error to a string if it isn't already one
42 | const errorStr = typeof error === 'string' ? error : String(error);
43 | errorLogger.error(`[dry-run] ${errorStr}`);
44 | }
45 | }
46 |
47 | /**
48 | * Processes an uncaught exception on the global level
49 | *
50 | * Sends the error to Sentry if Sentry SDK is configured.
51 | * It is expected that the program is terminated soon after
52 | * this function is called.
53 | *
54 | * @param e Error (exception) object to handle
55 | */
56 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
57 | export function handleGlobalError(e: any): void {
58 | if (!(e instanceof ConfigurationError)) {
59 | captureException(e);
60 | }
61 | logger.error(e);
62 | process.exitCode = 1;
63 | }
64 |
--------------------------------------------------------------------------------
/src/utils/files.ts:
--------------------------------------------------------------------------------
1 | import * as fs from 'fs';
2 | import * as os from 'os';
3 | import * as path from 'path';
4 | import rimraf from 'rimraf';
5 | import * as tmp from 'tmp';
6 | import * as util from 'util';
7 |
8 | import { filterAsync } from './async';
9 | import { logger } from '../logger';
10 |
11 | const lstat = util.promisify(fs.lstat);
12 | const readdirp = util.promisify(fs.readdir);
13 | const mkdtemp = util.promisify(fs.mkdtemp);
14 | const readdir = util.promisify(fs.readdir);
15 |
16 | /**
17 | * Lists all files traversing through subfolders.
18 | *
19 | * The path should be given absolute. Relative paths are evaluated from the
20 | * current working directory. Throws if the path is missing. The resulting
21 | * file paths are joined with the path argument, and thus also absolute or
22 | * relative depending on the input parameter.
23 | *
24 | * @param directory The path to the directory
25 | * @returns A list of paths to files within the directory
26 | */
27 | export async function scan(
28 | directory: string,
29 | results: string[] = []
30 | ): Promise {
31 | const files = await readdirp(directory);
32 | for (const f of files) {
33 | const fullPath = path.join(directory, f);
34 | const stat = await lstat(fullPath);
35 | if (stat.isDirectory()) {
36 | await scan(fullPath, results);
37 | } else {
38 | results.push(fullPath);
39 | }
40 | }
41 | return results;
42 | }
43 |
44 | /**
45 | * Lists all direct files within the specified directory, skipping directories
46 | * and symlinks
47 | *
48 | * The path should be given absolute. Relative paths are evaluated from the
49 | * current working directory. Throws if the path is missing. The resulting
50 | * file paths are joined with the path argument, and thus also absolute or
51 | * relative depending on the input parameter.
52 | *
53 | * @param directory The path to the directory
54 | * @returns A list of paths to files within the directory
55 | */
56 | export async function listFiles(directory: string): Promise {
57 | const files = await readdir(directory);
58 | const paths = files.map(name => path.join(directory, name));
59 | return filterAsync(paths, async filePath => {
60 | const stats = await lstat(filePath);
61 | return stats.isFile();
62 | });
63 | }
64 |
65 | /**
66 | * Execute an asynchronous callback within a temp directory
67 | *
68 | * If "cleanup" flag is set to true, automatically removes the directory and
69 | * all contents when the callback finishes or throws.
70 | *
71 | * @param callback A callback that receives the directory path
72 | * @param prefix A prefix to put in front of the new directory
73 | * @param cleanup A flag that configures clean-up behavior
74 | * @returns The return value of the callback
75 | */
76 | export async function withTempDir(
77 | callback: (arg: string) => T | Promise,
78 | cleanup = true,
79 | prefix = 'craft-'
80 | ): Promise {
81 | const directory = await mkdtemp(path.join(os.tmpdir(), prefix));
82 | try {
83 | return await callback(directory);
84 | } finally {
85 | if (cleanup) {
86 | rimraf(directory, err => {
87 | // XXX(BYK): intentionally DO NOT await unlinking as we do not want
88 | // to block (both in terms of waiting for IO and the success of the
89 | // operation) finishing the task at hand. If unlinking fails, we honestly
90 | // don't care as this is already a temporary file and will be removed
91 | // eventually by the OS. And it doesn't make sense to wait until this op
92 | // finishes then as nothing relies on the removal of this file.
93 | if (err) {
94 | logger.trace(`Couldn't remove temp dir ${directory}:`, err);
95 | }
96 | });
97 | }
98 | }
99 | }
100 |
101 | /**
102 | * Execute an asynchronous callback with a temporary file
103 | *
104 | * If "cleanup" flag is set to true, automatically removes the file when the
105 | * callback finishes or throws.
106 | *
107 | * @param callback A callback that receives the file path
108 | * @param prefix A prefix to put in front of the new file
109 | * @param cleanup A flag that configures clean-up behavior
110 | * @returns The return value of the callback
111 | */
112 | export async function withTempFile(
113 | callback: (arg: string) => T | Promise,
114 | cleanup = true,
115 | prefix = 'craft-'
116 | ): Promise {
117 | tmp.setGracefulCleanup();
118 | const tmpFile = tmp.fileSync({ prefix });
119 | try {
120 | return await callback(tmpFile.name);
121 | } finally {
122 | if (cleanup) {
123 | tmpFile.removeCallback();
124 | }
125 | }
126 | }
127 |
128 | /**
129 | * Detect the content-type based on the file's extension.
130 | *
131 | * @param artifactName Name of the artifact to check
132 | * @returns A content-type string, or undefined if the artifact name doesn't
133 | * have a known extension
134 | */
135 | export function detectContentType(artifactName: string): string | undefined {
136 | const extensionToType: Array<[RegExp, string]> = [
137 | [/\.js$/, 'application/javascript; charset=utf-8'],
138 | [/\.js\.map$/, 'application/json; charset=utf-8'],
139 | ];
140 | for (const entry of extensionToType) {
141 | const [regex, contentType] = entry;
142 | if (artifactName.match(regex)) {
143 | return contentType;
144 | }
145 | }
146 | return undefined;
147 | }
148 |
--------------------------------------------------------------------------------
/src/utils/filters.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Converts the given string to a regular expression object
3 | *
4 | * The passed string should be in the form of a JS regular expression, i.e.
5 | * surrounded with slashes and with optional modifiers after the closing slash.
6 | * Examples:
7 | * "/\\w+[123]/"
8 | * "/hello/i"
9 | *
10 | * Invalid arguments:
11 | * "hithere" // No slashes
12 | * "/craft/xyz" // Invalid modifiers
13 | *
14 | * @param str String to convert
15 | * @returns Regular expression object that was created from the string
16 | */
17 | export function stringToRegexp(str: string): RegExp {
18 | const firstSlash = str.indexOf('/');
19 | const lastSlash = str.lastIndexOf('/');
20 | if (firstSlash !== 0 || lastSlash < 2) {
21 | throw new TypeError(`Invalid RegExp string specified: ${str}`);
22 | }
23 | const regexpString = str.slice(1, lastSlash);
24 | const regexpModifiers = str.slice(lastSlash + 1);
25 | return new RegExp(regexpString, regexpModifiers);
26 | }
27 |
--------------------------------------------------------------------------------
/src/utils/git.ts:
--------------------------------------------------------------------------------
1 | import simpleGit, { SimpleGit } from 'simple-git';
2 |
3 | import { getConfigFileDir } from '../config';
4 | import { ConfigurationError } from './errors';
5 | import { logger } from '../logger';
6 |
7 | export interface GitChange {
8 | hash: string;
9 | title: string;
10 | body: string;
11 | pr: string | null;
12 | }
13 |
14 | // This regex relies on the default GitHub behavior where it appends the PR
15 | // number to the end of the commit title as: `fix: Commit title (#123)`.
16 | // This makes it very cheap and quick to extract the associated PR number just
17 | // from the commit log locally.
18 | // If this fails at some future, we can always revert back to using the GitHub
19 | // API that gives you the PRs associated with a commit:
20 | // https://docs.github.com/en/rest/commits/commits#list-pull-requests-associated-with-a-commit
21 | export const PRExtractor = /(?<=\(#)\d+(?=\)$)/;
22 |
23 | export async function getDefaultBranch(
24 | git: SimpleGit,
25 | remoteName: string
26 | ): Promise {
27 | // This part is courtesy of https://stackoverflow.com/a/62397081/90297
28 | return stripRemoteName(
29 | await git
30 | .remote(['set-head', remoteName, '--auto'])
31 | .revparse(['--abbrev-ref', `${remoteName}/HEAD`]),
32 | remoteName
33 | );
34 | }
35 |
36 | export async function getLatestTag(git: SimpleGit): Promise {
37 | // This part is courtesy of https://stackoverflow.com/a/7261049/90297
38 | return (await git.raw('describe', '--tags', '--abbrev=0')).trim();
39 | }
40 |
41 | export async function getChangesSince(
42 | git: SimpleGit,
43 | rev: string
44 | ): Promise {
45 | const { all: commits } = await git.log({
46 | from: rev,
47 | to: 'HEAD',
48 | // The symmetric option defaults to true, giving us all the different commits
49 | // reachable from both `from` and `to` whereas what we are interested in is only the ones
50 | // reachable from `to` and _not_ from `from` so we get a "changelog" kind of list.
51 | // One is `A - B` and the other is more like `A XOR B`. We want `A - B`.
52 | // See https://github.com/steveukx/git-js#git-log and
53 | // https://git-scm.com/docs/gitrevisions#_dotted_range_notations for more
54 | symmetric: false,
55 | '--no-merges': null,
56 | // Limit changes to the CWD to better support monorepos
57 | // this should still return all commits for individual repos when run from
58 | // the repo root.
59 | file: '.',
60 | });
61 | return commits.map(commit => ({
62 | hash: commit.hash,
63 | title: commit.message,
64 | body: commit.body,
65 | pr: commit.message.match(PRExtractor)?.[0] || null,
66 | }));
67 | }
68 |
69 | export function stripRemoteName(
70 | branch: string | undefined,
71 | remoteName: string
72 | ): string {
73 | const branchName = branch || '';
74 | const remotePrefix = remoteName + '/';
75 | if (branchName.startsWith(remotePrefix)) {
76 | return branchName.slice(remotePrefix.length);
77 | }
78 | return branchName;
79 | }
80 |
81 | export async function getGitClient(): Promise {
82 | const configFileDir = getConfigFileDir() || '.';
83 | // Move to the directory where the config file is located
84 | process.chdir(configFileDir);
85 | logger.debug(`Working directory:`, process.cwd());
86 |
87 | const git = simpleGit(configFileDir);
88 | const isRepo = await git.checkIsRepo();
89 | if (!isRepo) {
90 | throw new ConfigurationError('Not in a git repository!');
91 | }
92 | return git;
93 | }
94 |
--------------------------------------------------------------------------------
/src/utils/githubApi.ts:
--------------------------------------------------------------------------------
1 | import { Octokit } from '@octokit/rest';
2 |
3 | import { LogLevel, logger } from '../logger';
4 |
5 | import { ConfigurationError } from './errors';
6 |
7 | /**
8 | * Abstraction for GitHub remotes
9 | */
10 | export class GitHubRemote {
11 | /** GitHub owner */
12 | public readonly owner: string;
13 | /** GitHub repository name */
14 | public readonly repo: string;
15 | /** GitHub username */
16 | protected username?: string;
17 | /** GitHub personal authentication token */
18 | protected apiToken?: string;
19 | /** GitHub hostname */
20 | protected readonly GITHUB_HOSTNAME: string = 'github.com';
21 | /** Protocol prefix */
22 | protected readonly PROTOCOL_PREFIX: string = 'https://';
23 | /** Url in the form of /OWNER/REPO/ */
24 | protected readonly url: string;
25 |
26 | public constructor(
27 | owner: string,
28 | repo: string,
29 | apiToken?: string
30 | ) {
31 | this.owner = owner;
32 | this.repo = repo;
33 | if (apiToken) {
34 | this.setAuth(apiToken);
35 | }
36 | this.url = `/${this.owner}/${this.repo}/`;
37 | }
38 |
39 | /**
40 | * Sets authentication arguments: username and personal API token
41 | *
42 | * @param username GitHub username
43 | * @param apiToken GitHub API token
44 | */
45 | public setAuth(apiToken: string): void {
46 | this.username = 'placeholderusername';
47 | this.apiToken = apiToken;
48 | }
49 |
50 | /**
51 | * Returns an HTTP-based git remote
52 | *
53 | * It is guaranteed not to contain any sensitive information (e.g. API tokens)
54 | */
55 | public getRemoteString(): string {
56 | return this.PROTOCOL_PREFIX + this.GITHUB_HOSTNAME + this.url;
57 | }
58 |
59 | /**
60 | * Returns an HTTP-based git remote with embedded HTTP basic auth
61 | *
62 | * It MAY contain sensitive information (e.g. API tokens)
63 | */
64 | public getRemoteStringWithAuth(): string {
65 | const authData =
66 | this.username && this.apiToken
67 | ? `${this.username}:${this.apiToken}@`
68 | : '';
69 | return this.PROTOCOL_PREFIX + authData + this.GITHUB_HOSTNAME + this.url;
70 | }
71 | }
72 |
73 | /**
74 | * Gets GitHub API token from environment
75 | *
76 | * @returns GitHub authentication token if found
77 | */
78 | export function getGitHubApiToken(): string {
79 | const githubApiToken =
80 | process.env.GITHUB_TOKEN || process.env.GITHUB_API_TOKEN;
81 | if (!githubApiToken) {
82 | throw new ConfigurationError(
83 | 'GitHub target: GITHUB_TOKEN not found in the environment'
84 | );
85 | }
86 | return githubApiToken;
87 | }
88 |
89 | const _GitHubClientCache: Record = {};
90 |
91 | /**
92 | * Gets an authenticated GitHub client object
93 | *
94 | * The authentication token is taken from the environment, if not provided.
95 | *
96 | * @param token GitHub authentication token
97 | * @returns GitHub client
98 | */
99 | export function getGitHubClient(token = ''): Octokit {
100 | const githubApiToken = token || getGitHubApiToken();
101 |
102 | if (!_GitHubClientCache[githubApiToken]) {
103 | const attrs: any = {
104 | auth: `token ${githubApiToken}`,
105 | };
106 |
107 | // Silence debug logs, as they do not provide any useful information
108 | // about the requests, yet they are very noisy and make it difficult
109 | // to track what's going on.
110 | if (logger.level >= LogLevel.Debug) {
111 | attrs.log = {
112 | info: (message: string) => logger.debug(message),
113 | };
114 | }
115 |
116 | // eslint-disable-next-line @typescript-eslint/no-var-requires
117 | const { retry } = require('@octokit/plugin-retry');
118 | const octokitWithRetries = Octokit.plugin(retry);
119 | _GitHubClientCache[githubApiToken] = new octokitWithRetries(attrs);
120 | }
121 |
122 | return _GitHubClientCache[githubApiToken];
123 | }
124 |
125 | /**
126 | * Loads a file from the context's repository
127 | *
128 | * @param github GitHub client
129 | * @param owner Repository owner
130 | * @param repo Repository name
131 | * @param path The path of the file in the repository
132 | * @param ref The string name of commit / branch / tag
133 | * @returns The decoded file contents
134 | */
135 | export async function getFile(
136 | github: Octokit,
137 | owner: string,
138 | repo: string,
139 | path: string,
140 | ref: string
141 | ): Promise {
142 | try {
143 | const response = await github.repos.getContent({
144 | owner,
145 | path,
146 | ref,
147 | repo,
148 | });
149 | // Response theoretically could be a list of files
150 | if (response.data instanceof Array || !('content' in response.data)) {
151 | return undefined;
152 | }
153 | return Buffer.from(response.data.content, 'base64').toString();
154 | } catch (e: any) {
155 | if (e.status === 404) {
156 | return undefined;
157 | }
158 | throw e;
159 | }
160 | }
161 |
--------------------------------------------------------------------------------
/src/utils/gpg.ts:
--------------------------------------------------------------------------------
1 | import { tmpdir } from 'os';
2 | import { promises as fsPromises } from 'fs';
3 | import * as path from 'path';
4 | import { spawnProcess } from './system';
5 |
6 | export async function importGPGKey(privateKey: string): Promise {
7 | const PRIVATE_KEY_FILE = path.join(tmpdir(), 'private-key.asc');
8 |
9 | await fsPromises.writeFile(PRIVATE_KEY_FILE, privateKey);
10 | await spawnProcess(`gpg`, ['--batch', '--import', PRIVATE_KEY_FILE]);
11 | await fsPromises.unlink(PRIVATE_KEY_FILE);
12 | }
13 |
--------------------------------------------------------------------------------
/src/utils/helpers.ts:
--------------------------------------------------------------------------------
1 | import prompts from 'prompts';
2 | import { logger, LogLevel, setLevel } from '../logger';
3 |
4 | const FALSY_ENV_VALUES = new Set(['', 'undefined', 'null', '0', 'false', 'no']);
5 | export function envToBool(envVar: unknown): boolean {
6 | const normalized = String(envVar).toLowerCase();
7 | return !FALSY_ENV_VALUES.has(normalized);
8 | }
9 |
10 | interface GlobalFlags {
11 | [flag: string]: any;
12 | 'dry-run': boolean;
13 | 'no-input': boolean;
14 | 'log-level': keyof typeof LogLevel;
15 | }
16 |
17 | const GLOBAL_FLAGS: GlobalFlags = {
18 | 'dry-run': false,
19 | 'no-input': false,
20 | 'log-level': 'Info',
21 | };
22 |
23 | export function setGlobals(argv: GlobalFlags): void {
24 | for (const globalFlag of Object.keys(GLOBAL_FLAGS)) {
25 | GLOBAL_FLAGS[globalFlag] = argv[globalFlag];
26 | }
27 | logger.trace('Global flags:', GLOBAL_FLAGS);
28 | setLevel(LogLevel[GLOBAL_FLAGS['log-level']]);
29 | logger.trace('Argv: ', argv);
30 | }
31 |
32 | export function isDryRun(): boolean {
33 | return GLOBAL_FLAGS['dry-run'];
34 | }
35 |
36 | /**
37 | * Prompt the user that everything is OK and we should proceed
38 | */
39 | export async function promptConfirmation(): Promise {
40 | if (hasInput()) {
41 | const { isReady } = await prompts({
42 | message: 'Is everything OK? Type "yes" to proceed:',
43 | name: 'isReady',
44 | type: 'text',
45 | // Force the user to type something that is not empty or one letter such
46 | // as y/n to make sure this is a concious choice.
47 | validate: (input: string) =>
48 | input.length >= 2 || 'Please type "yes" to proceed',
49 | });
50 | if (isReady.toLowerCase() !== 'yes') {
51 | logger.error('Oh, okay. Aborting.');
52 | process.exit(1);
53 | }
54 | } else {
55 | logger.debug('Skipping the confirmation prompt.');
56 | }
57 | }
58 |
59 | export function hasInput(): boolean {
60 | return !GLOBAL_FLAGS['no-input'];
61 | }
62 |
--------------------------------------------------------------------------------
/src/utils/objects.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Clears all enumerable properties from the object
3 | *
4 | * @param obj Random object
5 | * @returns The input object with deleted properties
6 | */
7 | export function clearObjectProperties(obj: Record): any {
8 | for (const prop of Object.keys(obj)) {
9 | delete obj[prop];
10 | }
11 | return obj;
12 | }
13 |
--------------------------------------------------------------------------------
/src/utils/packagePath.ts:
--------------------------------------------------------------------------------
1 | import * as path from 'path';
2 | import { RegistryPackageType } from '../utils/registry';
3 | import { ConfigurationError } from './errors';
4 |
5 | /**
6 | * Returns the path to the SDK, given its canonical name.
7 | *
8 | * @param canonical The SDK's canonical name.
9 | * @returns The SDK path.
10 | */
11 | function getSdkPackagePath(canonical: string): string {
12 | const packageDirs = parseCanonical(canonical);
13 | return path.join('packages', ...packageDirs);
14 | }
15 |
16 | /**
17 | * Returns the path to the app, given its canonical name.
18 | *
19 | * @param canonical The app's canonical name.
20 | * @returns The app path.
21 | */
22 | function getAppPackagePath(canonical: string): string {
23 | const packageDirs = parseCanonical(canonical);
24 | if (packageDirs[0] !== 'app') {
25 | throw new ConfigurationError(
26 | `Invalid canonical entry for an app: ${canonical}`
27 | );
28 | }
29 | return path.join('apps', ...packageDirs.slice(1));
30 | }
31 |
32 | /**
33 | * Returns the path to the package from its canonical name.
34 | *
35 | * @param packageType The type of the registry package.
36 | * @param canonical The app's canonical name.
37 | */
38 | export function getPackageDirPath(
39 | packageType: RegistryPackageType,
40 | canonical: string
41 | ): string {
42 | switch (packageType) {
43 | case RegistryPackageType.SDK:
44 | return getSdkPackagePath(canonical);
45 | case RegistryPackageType.APP:
46 | return getAppPackagePath(canonical);
47 | default:
48 | throw new ConfigurationError(
49 | `Unknown registry package type: ${packageType}`
50 | );
51 | }
52 | }
53 |
54 | /**
55 | * Parses registry canonical name to a list of registry directories
56 | *
57 | * Colons can be used as separators in addition to forward slashes.
58 | *
59 | * Examples:
60 | * "npm:@sentry/browser" -> ["npm", "@sentry", "browser"]
61 | * "maven:io.sentry:sentry" -> ["maven", "io.sentry", "sentry"]
62 | *
63 | * @param canonicalName Registry canonical name
64 | * @returns A list of directories
65 | */
66 | export function parseCanonical(canonicalName: string): string[] {
67 | const registrySepPosition = canonicalName.indexOf(':');
68 | if (registrySepPosition === -1) {
69 | throw new ConfigurationError(
70 | `Cannot parse canonical name for the package: ${canonicalName}`
71 | );
72 | }
73 | const registry = canonicalName.slice(0, registrySepPosition);
74 | const packageName = canonicalName.slice(registrySepPosition + 1);
75 |
76 | const packageDirs = packageName.split(/[:/]/);
77 | if (packageDirs.some(x => !x)) {
78 | throw new ConfigurationError(
79 | `Cannot parse canonical name for the package: ${canonicalName}`
80 | );
81 | }
82 | return [registry, ...packageDirs];
83 | }
84 |
--------------------------------------------------------------------------------
/src/utils/registry.ts:
--------------------------------------------------------------------------------
1 | import { promises as fsPromises, existsSync } from 'fs';
2 | import * as path from 'path';
3 |
4 | import { logger } from '../logger';
5 | import { createSymlinks } from './symlink';
6 | import { reportError } from './errors';
7 | import { GitHubRemote } from './githubApi';
8 | import { getPackageDirPath } from '../utils/packagePath';
9 |
10 | /** Type of the registry package */
11 | export enum RegistryPackageType {
12 | /** App is a generic package type that doesn't belong to any specific registry */
13 | APP = 'app',
14 | /** SDK is a package hosted in one of public registries (PyPI, NPM, etc.) */
15 | SDK = 'sdk',
16 | }
17 |
18 | /**
19 | * Gets the package manifest version in the given directory.
20 | *
21 | * @param baseDir Base directory for the registry clone
22 | * @param packageDirPath The package directory.
23 | * @param version The package version.
24 | */
25 | export async function getPackageManifest(
26 | baseDir: string,
27 | type: RegistryPackageType,
28 | canonicalName: string,
29 | version: string
30 | ): Promise<{ versionFilePath: string; packageManifest: any }> {
31 | const packageDirPath = getPackageDirPath(type, canonicalName);
32 | const versionFilePath = path.join(baseDir, packageDirPath, `${version}.json`);
33 | if (existsSync(versionFilePath)) {
34 | reportError(`Version file for "${version}" already exists. Aborting.`);
35 | }
36 | const packageManifestPath = path.join(baseDir, packageDirPath, 'latest.json');
37 | logger.debug('Reading the current configuration from', packageManifestPath);
38 | return {
39 | versionFilePath,
40 | packageManifest:
41 | JSON.parse(
42 | await fsPromises.readFile(packageManifestPath, { encoding: 'utf-8' })
43 | ) || {},
44 | };
45 | }
46 |
47 | /**
48 | * Updates the manifest to the version in the path and creates the symlinks to
49 | * the new version.
50 | *
51 | * @param updatedManifest The updated manifest.
52 | * @param version The new version to be updated.
53 | * @param versionFilePath The path of the version file.
54 | * @param previousVersion The previous version.
55 | */
56 | export async function updateManifestSymlinks(
57 | updatedManifest: unknown,
58 | version: string,
59 | versionFilePath: string,
60 | previousVersion: string
61 | ): Promise {
62 | const manifestString = JSON.stringify(updatedManifest, undefined, 2) + '\n';
63 | logger.trace('Updated manifest', manifestString);
64 | logger.debug(`Writing updated manifest to "${versionFilePath}"...`);
65 | await fsPromises.writeFile(versionFilePath, manifestString);
66 | createSymlinks(versionFilePath, version, previousVersion);
67 | }
68 |
69 | export const DEFAULT_REGISTRY_REMOTE = new GitHubRemote(
70 | 'getsentry',
71 | 'sentry-release-registry'
72 | );
73 |
--------------------------------------------------------------------------------
/src/utils/sentry.ts:
--------------------------------------------------------------------------------
1 | import { arch, hostname, platform, release, userInfo } from 'os';
2 |
3 | import * as Sentry from '@sentry/node';
4 |
5 | import { logger } from '../logger';
6 | import { getPackageVersion } from './version';
7 |
8 | /**
9 | * Initializes Sentry SDK if CRAFT_SENTRY_SDN is set
10 | */
11 | export function initSentrySdk(): void {
12 | const sentryDsn = (process.env.CRAFT_SENTRY_DSN || '').trim();
13 | if (!sentryDsn.startsWith('http')) {
14 | logger.debug(
15 | 'Not initializing Sentry SDK - no valid DSN found in environment or ' +
16 | 'config files'
17 | );
18 | return;
19 | }
20 |
21 | logger.debug('Sentry DSN found in the environment, initializing the SDK');
22 | Sentry.init({ dsn: sentryDsn });
23 |
24 | Sentry.configureScope(scope => {
25 | scope.setTag('os-username', userInfo().username);
26 | scope.setTag('os-hostname', hostname());
27 | scope.setTag('os-platform', platform());
28 | scope.setTag('os-arch', arch());
29 | scope.setTag('os-release', release());
30 |
31 | scope.setExtra('argv', process.argv);
32 | scope.setExtra('craft-version', getPackageVersion());
33 | scope.setExtra('working-directory', process.cwd());
34 | });
35 | }
36 |
--------------------------------------------------------------------------------
/src/utils/strings.ts:
--------------------------------------------------------------------------------
1 | import * as mustache from 'mustache';
2 | import * as util from 'util';
3 |
4 | /**
5 | * Sanitizes object attributes
6 | *
7 | * Non-object and non-scalar values are recursively removed. Additionally,
8 | * keys that contain dots are duplicated, and dots are replaced with double
9 | * underscores.
10 | *
11 | * @param obj Object to normalize
12 | * @returns Normalized object
13 | */
14 | export function sanitizeObject(obj: Record): any {
15 | if (typeof obj !== 'object' || obj === null) {
16 | throw new Error(`Cannot normalize value: ${obj}`);
17 | }
18 |
19 | const result: { [_: string]: any } = {};
20 | for (const key of Object.keys(obj)) {
21 | const value = obj[key];
22 | const valueType = typeof value;
23 | let newValue;
24 |
25 | // Allowed value types
26 | if (['boolean', 'string', 'number', 'undefined'].indexOf(valueType) > -1) {
27 | newValue = value;
28 | } else if (value === null) {
29 | newValue = undefined;
30 | } else if (valueType === 'object') {
31 | newValue = sanitizeObject(value);
32 | } else {
33 | continue;
34 | }
35 | result[key] = newValue;
36 | const normalizedKey = key.replace(/\./g, '__');
37 | if (key !== normalizedKey) {
38 | result[normalizedKey] = newValue;
39 | }
40 | }
41 | return result;
42 | }
43 |
44 | /**
45 | * Renders the given template in a safe way
46 | *
47 | * No expressions or logic is allowed, only values and attribute access (via
48 | * dots) are allowed. Under the hood, Mustache templates are used.
49 | *
50 | * @param template Mustache template
51 | * @param context Template data
52 | * @returns Rendered template
53 | */
54 | export function renderTemplateSafe(
55 | template: string,
56 | context: Record
57 | ): string {
58 | return mustache.render(template, sanitizeObject(context));
59 | }
60 |
61 | /**
62 | * Formats file size as kilobytes/megabytes
63 | *
64 | * @param size Size to format
65 | */
66 | export function formatSize(size: number): string {
67 | if (size < 1024) {
68 | return `${size} B`;
69 | }
70 | const kilobytes = size / 1024.0;
71 | if (kilobytes < 1024) {
72 | return `${kilobytes.toFixed(1)} kB`;
73 | } else {
74 | const megabytes = kilobytes / 1024.0;
75 | return `${megabytes.toFixed(2)} MB`;
76 | }
77 | }
78 |
79 | /**
80 | * Serializes the given object in a readable way
81 | *
82 | * @param obj Object to print out
83 | */
84 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
85 | export function formatJson(obj: any): string {
86 | const result = JSON.stringify(obj, null, 4);
87 | if (obj instanceof Error && result === '{}') {
88 | // Error that doesn't implement toJSON()
89 | return util.format(obj);
90 | } else {
91 | return result;
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/src/utils/symlink.ts:
--------------------------------------------------------------------------------
1 | import * as fs from 'fs';
2 | import * as path from 'path';
3 |
4 | import { logger } from '../logger';
5 | import { ConfigurationError } from './errors';
6 | import {
7 | SemVer,
8 | parseVersion,
9 | semVerToString,
10 | versionGreaterOrEqualThan,
11 | } from './version';
12 |
13 | /**
14 | * Creates a symlink, overwriting the existing one
15 | *
16 | * @param target Target path
17 | * @param newFile Path to the new symlink
18 | */
19 | function forceSymlink(target: string, newFile: string): void {
20 | if (fs.existsSync(newFile)) {
21 | fs.unlinkSync(newFile);
22 | }
23 | fs.symlinkSync(target, newFile);
24 | }
25 |
26 | /**
27 | * Create symbolic links to the new version file
28 | *
29 | * "latest.json", "{major}.json" and "{minor}.json" links are respectively not
30 | * updated if the new version is "older" (e.g., it's a patch release for an
31 | * older major version) than the currently linked versions.
32 | *
33 | * @param versionFilePath Path to the new version file
34 | * @param newVersion The new version
35 | * @param oldVersion The previous latest version
36 | */
37 | export function createSymlinks(
38 | versionFilePath: string,
39 | newVersion: string,
40 | oldVersion?: string
41 | ): void {
42 | const parsedNewVersion = parseVersion(newVersion) || undefined;
43 | if (!parsedNewVersion) {
44 | throw new ConfigurationError(`Cannot parse version: "${parsedNewVersion}"`);
45 | }
46 | const parsedOldVersion =
47 | (oldVersion ? parseVersion(oldVersion) : undefined) || undefined;
48 |
49 | const baseVersionName = path.basename(versionFilePath);
50 | const packageDir = path.dirname(versionFilePath);
51 |
52 | if (
53 | !parsedOldVersion ||
54 | versionGreaterOrEqualThan(parsedNewVersion, parsedOldVersion)
55 | ) {
56 | logger.debug('Symlink "latest.json"', {
57 | before: oldVersion,
58 | after: newVersion,
59 | });
60 | forceSymlink(baseVersionName, path.join(packageDir, 'latest.json'));
61 | }
62 |
63 | // Read possibly existing symlinks for major and minor versions of the new version
64 | const existingLinkedMajorVersion = getExistingSymlinkedVersion(
65 | path.join(packageDir, `${parsedNewVersion.major}.json`)
66 | );
67 | const existingLinkedMinorVersion = getExistingSymlinkedVersion(
68 | path.join(
69 | packageDir,
70 | `${parsedNewVersion.major}.${parsedNewVersion.minor}.json`
71 | )
72 | );
73 |
74 | // link {major}.json if there's no link yet for that major
75 | // or if the new version is newer than the currently linked one
76 | if (
77 | !existingLinkedMajorVersion ||
78 | versionGreaterOrEqualThan(parsedNewVersion, existingLinkedMajorVersion)
79 | ) {
80 | const majorVersionLink = `${parsedNewVersion.major}.json`;
81 | logger.debug(`Symlink "${majorVersionLink}"`, {
82 | before:
83 | existingLinkedMajorVersion &&
84 | semVerToString(existingLinkedMajorVersion),
85 | after: newVersion,
86 | });
87 | forceSymlink(baseVersionName, path.join(packageDir, majorVersionLink));
88 | }
89 |
90 | // link {minor}.json if there's no link yet for that minor
91 | // or if the new version is newer than the currently linked one
92 | if (
93 | !existingLinkedMinorVersion ||
94 | versionGreaterOrEqualThan(parsedNewVersion, existingLinkedMinorVersion)
95 | ) {
96 | const minorVersionLink = `${parsedNewVersion.major}.${parsedNewVersion.minor}.json`;
97 | logger.debug(`Symlink "${minorVersionLink}"`, {
98 | before:
99 | existingLinkedMinorVersion &&
100 | semVerToString(existingLinkedMinorVersion),
101 | after: newVersion,
102 | });
103 | forceSymlink(baseVersionName, path.join(packageDir, minorVersionLink));
104 | }
105 | }
106 |
107 | function getExistingSymlinkedVersion(symlinkPath: string): SemVer | null {
108 | try {
109 | // using lstat instead of exists because broken symlinks return false for exists
110 | fs.lstatSync(symlinkPath);
111 | } catch {
112 | // this means the symlink doesn't exist
113 | return null;
114 | }
115 | const linkedFile = fs.readlinkSync(symlinkPath);
116 | return parseVersion(path.basename(linkedFile));
117 | }
118 |
--------------------------------------------------------------------------------
/src/utils/version.ts:
--------------------------------------------------------------------------------
1 | import { getGitTagPrefix } from '../config';
2 |
3 | /**
4 | * Regular expression for matching semver versions.
5 | *
6 | * Modified to match version components
7 | * Copyright (c) Sindre Sorhus (sindresorhus.com)
8 | * @see https://github.com/sindresorhus/semver-regex
9 | */
10 | const semverRegex = () =>
11 | /\bv?(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(?:-?([\da-z-]+(?:\.[\da-z-]+)*))?(?:\+([\da-z-]+(?:\.[\da-z-]+)*))?\b/gi;
12 |
13 | /**
14 | * Extracts a version number from the given text.
15 | *
16 | * In case the version contains a leading "v", it is stripped from the result.
17 | * All semantic versions are supported. See {@link http://semver.org/} for
18 | * more information.
19 | *
20 | * @param text Some text containing a version
21 | * @returns The extracted version or null
22 | */
23 | export function getVersion(text: string): string | null {
24 | const matches = semverRegex().exec(text);
25 | const version = matches && matches[0];
26 | return version && version[0].toLowerCase() === 'v'
27 | ? version.substr(1)
28 | : version;
29 | }
30 |
31 | /**
32 | * Checks if the provided text is a valid version string.
33 | *
34 | * @param text String to check
35 | * @returns true if the string is a valid semantic version, false otherwise
36 | */
37 | export function isValidVersion(text: string): boolean {
38 | return !!text && text === getVersion(text);
39 | }
40 |
41 | /**
42 | * SemVer is a parsed semantic version.
43 | */
44 | export interface SemVer {
45 | /** The major version number */
46 | major: number;
47 | /** The minor version number */
48 | minor: number;
49 | /** The patch version number */
50 | patch: number;
51 | /** Optional pre-release specifier */
52 | pre?: string;
53 | /** Optional build metadata */
54 | build?: string;
55 | }
56 |
57 | /**
58 | * Parses a version number from the given text.
59 | *
60 | * @param text Some text containing a version
61 | * @returns The parsed version or null
62 | */
63 | export function parseVersion(text: string): SemVer | null {
64 | const matches = semverRegex().exec(text);
65 | return (
66 | matches && {
67 | build: matches[5],
68 | major: parseInt(matches[1], 10),
69 | minor: parseInt(matches[2], 10),
70 | patch: parseInt(matches[3], 10),
71 | pre: matches[4],
72 | }
73 | );
74 | }
75 |
76 | /**
77 | * Returns "true" if version v1 is greater than version v2
78 | *
79 | * Example: "1.2.3" is greater than "1.1.0"
80 | */
81 | export function versionGreaterOrEqualThan(v1: SemVer, v2: SemVer): boolean {
82 | if (v1.major !== v2.major) {
83 | return v1.major > v2.major;
84 | } else if (v1.minor !== v2.minor) {
85 | return v1.minor > v2.minor;
86 | } else if (v1.patch !== v2.patch) {
87 | return v1.patch > v2.patch;
88 | } else if (!v1.pre && v2.pre) {
89 | return true;
90 | } else if (v1.pre && !v2.pre) {
91 | return false;
92 | } else if (v1.pre && v2.pre && v1.pre !== v2.pre && /^\d+$/.test(v1.pre) && /^\d+$/.test(v2.pre)) {
93 | return v1.pre > v2.pre;
94 | } else if (v1.build || v2.build || v1.pre || v2.pre) {
95 | throw new Error(
96 | `Cannot compare the two versions: "${JSON.stringify(
97 | v1
98 | )}" and "${JSON.stringify(v2)}"`
99 | );
100 | }
101 | return true;
102 | }
103 |
104 | /**
105 | * A regular expression to detect that a version is a pre-release version.
106 | */
107 | export const PREVIEW_RELEASE_REGEX = /(?:[^a-z])(preview|pre|rc|dev|alpha|beta|unstable|a|b)(?:[^a-z]|$)/i;
108 |
109 | /**
110 | * Checks that the provided string is a pre-release version.
111 | *
112 | * @param text Version string to check
113 | * @returns True if the string looks like a pre-release version
114 | */
115 | export function isPreviewRelease(text: string): boolean {
116 | return isValidVersion(text) && !!text.match(PREVIEW_RELEASE_REGEX);
117 | }
118 |
119 | /**
120 | * Returns the Git version based on the provided version.
121 | *
122 | * If no tag prefix is provided, it is taken from the configuration.
123 | *
124 | * @param version Version we're releasing
125 | * @param tagPrefix Git tag prefix
126 | * @returns Git tag
127 | */
128 | export function versionToTag(version: string, tagPrefix?: string): string {
129 | const prefix = tagPrefix === undefined ? getGitTagPrefix() : tagPrefix;
130 | return `${prefix}${version}`;
131 | }
132 |
133 | /**
134 | * Reads "package.json" from project root and returns its contents.
135 | */
136 | export function getPackage(): any {
137 | const pkg = require('../../package.json') || {};
138 | // Sanity check
139 | if (Object.keys(pkg).length === 0) {
140 | throw new Error('Invalid package.json: the file is empty!');
141 | }
142 | return pkg;
143 | }
144 |
145 | /**
146 | * Reads the package's version from "package.json".
147 | */
148 | export function getPackageVersion(): string {
149 | const { version } = getPackage();
150 | // We set process.env.CRAFT_BUILD_SHA at build time
151 | const buildInfo = process.env.CRAFT_BUILD_SHA;
152 |
153 | return buildInfo ? `${version} (${buildInfo})` : version;
154 | }
155 |
156 | /**
157 | * Returns the stringified version of the passed SemVer object.
158 | */
159 | export function semVerToString(s: SemVer) {
160 | return `${s.major}.${s.minor}.${s.patch}${s.pre ? `-${s.pre}` : ''}${
161 | s.build ? `+${s.build}` : ''
162 | }`;
163 | }
164 |
--------------------------------------------------------------------------------
/tsconfig.build.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "@sentry/typescript/tsconfig.json",
3 | "compilerOptions": {
4 | "lib": ["es2020"],
5 | "target": "es2020",
6 | "skipLibCheck": true,
7 | "forceConsistentCasingInFileNames": true,
8 | "baseUrl": ".",
9 | "outDir": "dist",
10 | "rootDir": "src",
11 | "types": ["node"],
12 | "noImplicitThis": false,
13 | "esModuleInterop": true,
14 | "importHelpers": true,
15 | "noEmitHelpers": false,
16 | "useUnknownInCatchVariables": false
17 | },
18 | "include": ["src/**/*.ts"],
19 | "exclude": ["dist/**/*", "**/__mocks__/**", "**/__tests__/**"]
20 | }
21 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.build.json",
3 | "compilerOptions": {
4 | "types": ["node", "jest"],
5 | "plugins": []
6 | },
7 | "include": ["src/**/*.ts", "**/__mocks__/**/*.ts", "**/__tests_/**/*.ts"],
8 | "exclude": ["dist/**/*"]
9 | }
10 |
--------------------------------------------------------------------------------