├── .editorconfig ├── .env.example ├── .eslintrc ├── .gitignore ├── .prettierrc ├── .travis.yml ├── .vscode ├── extensions.json └── settings.json ├── LICENSE ├── README.md ├── app.json ├── index.js ├── jest.config.js ├── lib ├── __fixtures__ │ ├── created.tag.json │ └── listFiles │ │ ├── a │ │ ├── b │ │ └── subdir │ │ └── .empty ├── __mocks__ │ ├── context.js │ ├── github.js │ ├── logger.js │ ├── node-fetch.js │ ├── stores.js │ ├── targets.js │ └── try-require.js ├── __tests__ │ ├── __snapshots__ │ │ └── request.test.js.snap │ ├── changes.test.js │ ├── files.test.js │ ├── request.test.js │ ├── utils.test.js │ └── version.test.js ├── changes.js ├── defaults.js ├── files.js ├── index.js ├── request.js ├── stores │ ├── __tests__ │ │ └── index.test.js │ ├── index.js │ ├── s3.js │ └── zeus.js ├── targets │ ├── __mocks__ │ │ └── releaseContext.js │ ├── __tests__ │ │ └── index.test.js │ ├── brew.js │ ├── cargo.js │ ├── github.js │ ├── index.js │ ├── npm.js │ ├── pods.js │ └── pypi.js ├── utils.js └── version.js ├── package.json └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | insert_final_newline = true 6 | trim_trailing_whitespace = true 7 | charset = utf-8 8 | 9 | [*.{js,json,jsx,css,scss,less,yml}] 10 | indent_style = space 11 | indent_size = 2 12 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # Configuration of your GitHub App 2 | APP_ID= 3 | WEBHOOK_SECRET=development 4 | 5 | # Uncomment this to get verbose logging 6 | # LOG_LEVEL=trace # or `info` to show less 7 | 8 | # Subdomain to use for localtunnel server. Defaults to your local username. 9 | # SUBDOMAIN= 10 | 11 | # Disables all side effects 12 | DRY_RUN=true 13 | 14 | # Start deployments immediately 15 | RELEASE_TIMEOUT=0 16 | 17 | # AWS S3 bucket and credentials 18 | S3_BUCKET= 19 | S3_ACCESS_KEY= 20 | S3_SECRET_KEY= 21 | 22 | # Twine configuration for wheel uploads 23 | TWINE_BIN=twine 24 | TWINE_USERNAME= 25 | TWINE_PASSWORD= 26 | 27 | # Cocoapods configuration 28 | COCOAPODS_BIN=pod 29 | COCOAPODS_TRUNK_TOKEN= 30 | 31 | # Zeus store credentials 32 | ZEUS_API_TOKEN= 33 | 34 | # Cargo / crates.io credentials 35 | CARGO_REGISTRY_TOKEN= 36 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["airbnb-base", "prettier"] 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | coverage 2 | node_modules 3 | npm-debug.log 4 | *.pem 5 | .env 6 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "es5", 3 | "singleQuote": true 4 | } 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | cache: yarn 3 | 4 | git: 5 | depth: 1 6 | 7 | node_js: 8 | - 8 9 | - 9 10 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | // See http://go.microsoft.com/fwlink/?LinkId=827846 3 | // for the documentation about the extensions.json format 4 | "recommendations": [ 5 | "dbaeumer.vscode-eslint", 6 | "EditorConfig.editorconfig", 7 | "esbenp.prettier-vscode", 8 | "Orta.vscode-jest" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.formatOnType": true, 3 | "editor.formatOnPaste": true, 4 | "editor.formatOnSave": true, 5 | "editor.rulers": [80], 6 | "files.autoSave": "onWindowChange", 7 | "files.trimTrailingWhitespace": true, 8 | "files.insertFinalNewline": true, 9 | 10 | // Plugin Settings 11 | "eslint.autoFixOnSave": true 12 | } 13 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2017, Sentry (https://sentry.io/) and individual contributors. 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Probot: Release 2 | 3 | > a GitHub App built with [probot](https://github.com/probot/probot) that 4 | > publishes build artifacts to various third party services. 5 | 6 | ![](https://user-images.githubusercontent.com/1433023/32178062-1a715f32-bd8c-11e7-9d4d-7e51593a8f28.png) 7 | 8 | ## Table of Contents 9 | 10 | * [Target Configuration](#target-configuration) 11 | * [GitHub](#github-github) 12 | * [NPM](#npm-npm) 13 | * [Python Package Index](#python-package-index-pypi) 14 | * [Cocoapods](#cocoapods-pods) 15 | * [Homebrew](#homebrew-brew) 16 | * [Cargo](#cargo-cargo) 17 | * [Store Configuration](#store-configuration) 18 | * [Amazon S3](#amazon-s3-s3) 19 | * [Zeus CI](#zeus-ci-zeus) 20 | * [Setup](#setup) 21 | * [Github App](#github-app) 22 | * [Development](#development) 23 | * [Testing](#testing) 24 | * [Deployment](#deployment) 25 | 26 | ## Configuration 27 | 28 | The bot will only be active in repositories that contain `.github/release.yml`. 29 | In these repositories it will listen for tags and start a release if all status 30 | checks associated to the tag's commit are successful. In case a commit has no 31 | status checks, the release is skipped. 32 | 33 | This file specifies release targets, stores and more: 34 | 35 | | Option | Description | 36 | | --------------- | ------------------------------------------------------------------------------- | 37 | | `store` | **optional**. The store for release artifacts (see below). | 38 | | `targets` | **optional**. List of release targets (see below). | 39 | | `ignoredChecks` | **optional**. A list of ignored status checks. Can be prefixes or entire names. | 40 | 41 | ## Target Configuration 42 | 43 | The configuration specifies which release targets to run for the repository. To 44 | run more targets, list the target identifiers under the `target` key. If the 45 | target key is empty, the bot defaults to the `"github"` target: 46 | 47 | ```yaml 48 | targets: 49 | - name: github 50 | ``` 51 | 52 | ### GitHub (`github`) 53 | 54 | Create a release on Github. If a Markdown changelog is present in the 55 | repository, this target tries to read the release name and description from the 56 | changelog. Otherwise, defaults to the tag name and tag's commit message. 57 | 58 | **Environment** 59 | 60 | _none_ 61 | 62 | **Configuration** 63 | 64 | | Option | Description | 65 | | ----------- | -------------------------------------------------------------------- | 66 | | `changelog` | **optional**. Path to the changelog file. Defaults to `CHANGELOG.md` | 67 | 68 | **Example:** 69 | 70 | ```yaml 71 | targets: 72 | - name: github 73 | changelog: CHANGES 74 | ``` 75 | 76 | ### NPM (`npm`) 77 | 78 | Releases a NPM package to the public registry. This requires a package tarball 79 | generated by `npm pack` in the release assets. The file will be uploaded to the 80 | registry with `npm publish`. This requires NPM to be authenticated with 81 | sufficient permissions to publish the package. 82 | 83 | **Environment** 84 | 85 | The `npm` utility must be installed on the system. 86 | 87 | | Name | Description | 88 | | --------- | ----------------------------------------------------------- | 89 | | `NPM_BIN` | **optional**. Path to the npm executable. Defaults to `npm` | 90 | 91 | **Configuration** 92 | 93 | | Option | Description | 94 | | -------- | ------------------------------------------------------------------- | 95 | | `access` | **optional**. Visibility for scoped packages. Defaults to `private` | 96 | 97 | **Example** 98 | 99 | ```yaml 100 | targets: 101 | - name: npm 102 | access: private 103 | ``` 104 | 105 | ### Python Package Index (`pypi`) 106 | 107 | Uploads source dists and wheels to the Python Package Index with twine. The 108 | source code bundle or wheels must be in the release assets. 109 | 110 | **Environment** 111 | 112 | The `twine` package must be installed on the system. 113 | 114 | | Name | Description | 115 | | ---------------- | ----------------------------------------------------- | 116 | | `TWINE_USERNAME` | User name for PyPI with access rights for the package | 117 | | `TWINE_PASSWORD` | Password for the PyPI user | 118 | | `TWINE_BIN` | **optional**. Path to twine. Defaults to `twine` | 119 | 120 | **Configuration** 121 | 122 | _none_ 123 | 124 | **Example** 125 | 126 | ```yaml 127 | targets: 128 | - pypi 129 | ``` 130 | 131 | ### Cocoapods (`pods`) 132 | 133 | Pushes a new podspec to the central cocoapods repository. The Podspec is fetched 134 | from the Github repository with the tag that is being released. No release 135 | assets are required for this target. 136 | 137 | **Environment** 138 | 139 | The `cocoapods` gem must be installed on the system. 140 | 141 | | Name | Description | 142 | | ----------------------- | ----------------------------------------- | 143 | | `COCOAPODS_TRUNK_TOKEN` | The access token to the cocoapods account | 144 | | `COCOAPODS_BIN` | **optional**. Path to cocoapods. | 145 | 146 | **Configuration** 147 | 148 | | Option | Description | 149 | | ------ | ------------------------------------------ | 150 | | `spec` | Path to the Podspec file in the repository | 151 | 152 | **Example** 153 | 154 | ```yaml 155 | targets: 156 | - name: pods 157 | spec: MyProject.podspec 158 | ``` 159 | 160 | ### Homebrew (`brew`) 161 | 162 | Pushes a new or updated homebrew formula to a brew tap repository. The formula 163 | is committed directly to the master branch of the tap on GitHub, therefore the 164 | bot needs rights to commit to `master` on that repository. Therefore, formulas 165 | on `homebrew/core` are not supported, yet. 166 | 167 | The tap is configured with the mandatory `tap` parameter in the same format as 168 | the `brew` utility. A tap `/` will expand to the GitHub repository 169 | `github.com:/homebrew-`. 170 | 171 | The formula contents are given as configuration value and can be interpolated 172 | with `${ variable }`. The interpolation context contains the following 173 | variables: 174 | 175 | * `ref`: The tag's reference name. Usually the version number 176 | * `sha`: The tag's commit SHA 177 | * `checksums`: A map containing sha256 checksums for every release asset. Use 178 | the full filename to access the sha, e.g. `checksums['MyProgram.exe']` 179 | 180 | **Environment** 181 | 182 | _none_ 183 | 184 | **Configuration** 185 | 186 | | Option | Description | 187 | | ---------- | ------------------------------------------------------------------ | 188 | | `tap` | The name of the homebrew tap used to access the GitHub repo | 189 | | `template` | The template for contents of the formula file (ruby code) | 190 | | `formula` | **optional**. Name of the formula. Defaults to the repository name | 191 | | `path` | **optional**. Path to store the formula in. Defaults to `Formula` | 192 | 193 | **Example** 194 | 195 | ```yaml 196 | targets: 197 | - name: brew 198 | tap: octocat/tools # Expands to github.com:octocat/homebrew-tools 199 | formula: myproject # Creates the file myproject.rb 200 | path: HomebrewFormula # Creates the file in HomebrewFormula/ 201 | template: > 202 | class MyProject < Formula 203 | desc "This is a test for homebrew formulae" 204 | homepage "https://github.com/octocat/my-project" 205 | url "https://github.com/octocat/my-project/releases/download/${ref}/binary-darwin" 206 | version "${ref}" 207 | sha256 "${checksums['binary-darwin']}" 208 | 209 | def install 210 | mv "binary-darwin", "myproject" 211 | bin.install "myproject" 212 | end 213 | end 214 | ``` 215 | 216 | ### Cargo (`cargo`) 217 | 218 | Publishes a single package or entire workspace on the public crate registry 219 | ([crates.io](https://crates.io)). If the workspace contains multiple crates, 220 | they are published in an order depending on their dependencies. 221 | 222 | **Environment** 223 | 224 | Rustup must be installed and configured on the system. 225 | 226 | | Name | Description | 227 | | ---------------------- | ----------------------------------------------------------- | 228 | | `CARGO_REGISTRY_TOKEN` | The access token to the crates.io account | 229 | | `CARGO_BIN` | **optional**. Path to cargo. Defaults to `cargo`. | 230 | | `CARGO_HOME` | **optional**. Path to the cargo installation. | 231 | | `RUSTUP_HOME` | **optional**. Path to the rustup (toolchains) installation. | 232 | 233 | **Configuration** 234 | 235 | _none_ 236 | 237 | **Example** 238 | 239 | ```yaml 240 | targets: 241 | - cargo 242 | ``` 243 | 244 | ## Store Configuration 245 | 246 | The app downloads release artifacts from a configured store provider. After 247 | building release assets on a CI server, they need to be uploaded to this store. 248 | The default store is Amazon S3 and can be changed via the `store` parameter in 249 | the cofiguration file: 250 | 251 | ```yaml 252 | store: s3 253 | ``` 254 | 255 | ### Amazon S3 (`s3`) 256 | 257 | Download artifacts from an Amazon S3 bucket. To create a bucket, please refer to 258 | the 259 | [official instructions](http://docs.aws.amazon.com/AmazonS3/latest/gsg/CreatingABucket.html). 260 | Inside this bucket, the bot will always look for a folder with the schema 261 | `//`. 262 | 263 | **Environment** 264 | 265 | | Name | Description | 266 | | --------------- | ------------------------------------------------ | 267 | | `S3_BUCKET` | The name of the S3 bucket to download files from | 268 | | `S3_ACCESS_KEY` | The public access key for the bucket | 269 | | `S3_SECRET_KEY` | The secret access key for the bucket | 270 | 271 | **Configuration** 272 | 273 | ```yaml 274 | store: s3 275 | ``` 276 | 277 | ### Zeus CI (`zeus`) 278 | 279 | Download artifacts from [Zeus](https://zeus.ci). This requires an 280 | [API token](https://zeus.ci/settings/token) for a user with access to all 281 | repositories. Also, each repository must be activated and a hook configured in 282 | the repository settings. 283 | 284 | To upload releases, use the [zeus-ci](https://npmjs.org/package/zeus-ci) command 285 | line utility. 286 | 287 | **Environment** 288 | 289 | | Name | Description | 290 | | ----------------- | ------------------------------------------------------------------ | 291 | | `ZEUS_API_TOKEN` | API token to authorize with Zeus CI | 292 | | `ZEUS_SERVER_URL` | **optional**. The URL to reach Zeus. Defaults to `https://zeus.ci` | 293 | 294 | **Configuration** 295 | 296 | ```yaml 297 | store: zeus 298 | ``` 299 | 300 | ## Setup 301 | 302 | This Probot app requires authentication tokens and credentials for third party 303 | apps in environment variables. Depending on the release targets you wish to use 304 | in your installation, you may omit some of the environment variables below. 305 | 306 | The project contains a template for environment variables located at 307 | `.env.example`. Copy this file to `.env` in the project root and adjust all 308 | environment variables. 309 | 310 | ### Github App 311 | 312 | First, create a GitHub App by following the instructions 313 | [here](https://probot.github.io/docs/deployment/#create-the-github-app). Then, 314 | make sure to download the private key and place it in the root directory of this 315 | application or set it via the `PRIVATE_KEY` environment variable. Finally, set 316 | the following environment variables: 317 | 318 | | Name | Description | 319 | | ---------------- | ---------------------------------------------------- | 320 | | `APP_ID` | Unique ID of the GitHub App | 321 | | `WEBHOOK_SECRET` | Random webhook secret configured during app creation | 322 | 323 | ### Development 324 | 325 | To start the development server, make sure the following environment variables 326 | are set: 327 | 328 | | Name | Description | 329 | | ----------------- | ------------------------------------------------- | 330 | | `DRY_RUN` | Disables actual releases. Set to `true` | 331 | | `RELEASE_TIMEOUT` | Timeout before a release starts. Defaults to `60` | 332 | | `SUBDOMAIN` | Subdomain for localtunnel to receive webhooks | 333 | | `LOG_LEVEL` | Sets the loggers output verbosity. Set to `debug` | 334 | 335 | Then, install dependencies and run the bot with: 336 | 337 | ```sh 338 | # Install dependencies 339 | yarn 340 | 341 | # Run the bot 342 | yarn start 343 | 344 | # Run test watchers 345 | yarn test:watch 346 | ``` 347 | 348 | We highly recommend to use VSCode and install the recommended extensions. They 349 | will configure your IDE to match the coding style, invoke auto formatters every 350 | time you save and run tests in the background for you. No need to run the 351 | watchers manually. 352 | 353 | ### Testing 354 | 355 | The bot includes an automated test suite that includes unit tests, linting and 356 | formating checks. Additionally, this command generates a coverage report in 357 | `coverage/`. You can run it with npm: 358 | 359 | ```sh 360 | yarn test 361 | ``` 362 | 363 | We use [prettier](https://prettier.io/) for auto-formatting and 364 | [eslint](https://eslint.org/) as linter. Both tools can automatically fix most 365 | issues for you. To invoke them, simply run: 366 | 367 | ```sh 368 | yarn fix 369 | ``` 370 | 371 | ## Deployment 372 | 373 | If you would like to run your own instance of this app, see the 374 | [docs for deployment](https://probot.github.io/docs/deployment/). 375 | 376 | This app requires these **Permissions** for the GitHub App: 377 | 378 | * **Commit statuses**: Read-only 379 | * **Repository contents**: Read & write 380 | 381 | Also, the following **Events** need to be subscribed: 382 | 383 | * **Status**: Commit status updated from the API 384 | * **Create**: Branch or tag created 385 | * **Delete**: Branch or tag deleted 386 | 387 | Also, make sure all required environment variables are present in the production 388 | environment. 389 | -------------------------------------------------------------------------------- /app.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "PRIVATE_KEY": { 4 | "description": 5 | "the private key you downloaded when creating the GitHub App" 6 | }, 7 | "APP_ID": { 8 | "description": "the ID of your GitHub App" 9 | }, 10 | "WEBHOOK_SECRET": { 11 | "description": "the secret configured for your GitHub App" 12 | }, 13 | "S3_BUCKET": { 14 | "description": "AWS S3 bucket containing build assets" 15 | }, 16 | "S3_ACCESS_KEY": { 17 | "description": "Access key for AWS S3" 18 | }, 19 | "S3_SECRET_KEY": { 20 | "description": "Access secret key for AWS S3" 21 | }, 22 | "TWINE_USERNAME": { 23 | "description": "Username to upload python packages to PyPI" 24 | }, 25 | "TWINE_PASSWORD": { 26 | "description": "Password to upload python packages to PyPI" 27 | }, 28 | "COCOAPODS_TRUNK_TOKEN": { 29 | "description": "Authentication token for cocoapods" 30 | }, 31 | "ZEUS_API_TOKEN": { 32 | "description": "API token to authenticate Zeus CI" 33 | }, 34 | "CARGO_REGISTRY_TOKEN": { 35 | "description": "Token to authenticate with crates.io" 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('./lib'); 2 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | collectCoverage: true, 3 | testEnvironment: 'node', 4 | }; 5 | -------------------------------------------------------------------------------- /lib/__fixtures__/created.tag.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref": "v0.4.1", 3 | "ref_type": "tag", 4 | "repository": { 5 | "name": "zeus-cli", 6 | "owner": { 7 | "login": "getsentry" 8 | } 9 | }, 10 | "organization": { 11 | "login": "getsentry" 12 | }, 13 | "installation": { 14 | "id": 54758 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /lib/__fixtures__/listFiles/a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/probot-release/895ca933a33ac33850ee62728ce2321aec4860b6/lib/__fixtures__/listFiles/a -------------------------------------------------------------------------------- /lib/__fixtures__/listFiles/b: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/probot-release/895ca933a33ac33850ee62728ce2321aec4860b6/lib/__fixtures__/listFiles/b -------------------------------------------------------------------------------- /lib/__fixtures__/listFiles/subdir/.empty: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/getsentry/probot-release/895ca933a33ac33850ee62728ce2321aec4860b6/lib/__fixtures__/listFiles/subdir/.empty -------------------------------------------------------------------------------- /lib/__mocks__/context.js: -------------------------------------------------------------------------------- 1 | const Github = require('./github'); 2 | 3 | class Context { 4 | /** 5 | * Creates a new mock context. 6 | * 7 | * The payload is used to initialize this context. It should contain an owner 8 | * and repo field to emulate a Github repository. To simulate an issue, add 9 | * an additional issue field. 10 | * 11 | * @param {object} payload The initial payload 12 | */ 13 | constructor(payload = {}) { 14 | const issue = { 15 | owner: Context.OWNER, 16 | repo: Context.REPO, 17 | issue: Context.ISSUE, 18 | }; 19 | 20 | this.payload = Object.assign(issue, payload); 21 | this.github = new Github(); 22 | } 23 | 24 | /** 25 | * Returns all properties identifying the repository of this context. 26 | * 27 | * If props are specified, they are merged into the result. Note that the 28 | * props will overwrite the payload, if the same keys are used. 29 | * 30 | * @param {object} props Additional props to include in the return value 31 | * @returns {object} The merged object including payload and props 32 | */ 33 | repo(props = {}) { 34 | const { owner, repo } = this.payload; 35 | return Object.assign({}, { owner, repo }, props); 36 | } 37 | 38 | /** 39 | * Returns all properties identifying the issue/PR of this context. 40 | * 41 | * If props are specified, they are merged into the result. Note that the 42 | * props will overwrite the payload, if the same keys are used. 43 | * 44 | * @param {object} props Additional props to include in the return value 45 | * @returns {object} The merged object including payload and props 46 | */ 47 | issue(props = {}) { 48 | const { owner, repo, issue } = this.payload; 49 | return Object.assign({}, { owner, repo, issue }, props); 50 | } 51 | } 52 | 53 | Context.OWNER = '__owner__'; 54 | Context.REPO = '__repo__'; 55 | Context.ISSUE = '__issue__'; 56 | 57 | module.exports = Context; 58 | -------------------------------------------------------------------------------- /lib/__mocks__/github.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | const defaultHelpers = { 4 | /** 5 | * Defines a response to the API request that will be returned for all 6 | * subsequent calls. 7 | * 8 | * @param {object} data Any data that the server would have returned 9 | * @param {object} meta Optional response metadata (status and headers) 10 | * @returns {this} 11 | */ 12 | mockResponse(data, meta = {}) { 13 | return this.mockReturnValue({ data, meta }); 14 | }, 15 | 16 | /** 17 | * Defines a response to the API request that will be returned for the 18 | * next call only. 19 | * 20 | * @param {object} data Any data that the server would have returned 21 | * @param {object} meta Optional response metadata (status and headers) 22 | * @returns {this} 23 | */ 24 | mockResponseOnce(data) { 25 | return this.mockReturnValueOnce({ data }); 26 | }, 27 | 28 | /** 29 | * Defines an API error that will be thrown for all subsequent calls. 30 | * 31 | * @param {number} code The response status code 32 | * @param {string} message An optional message to include in the error 33 | * @returns {this} 34 | */ 35 | mockError(code, message) { 36 | return this.mockImplementation(() => { 37 | const e = new Error(message); 38 | e.code = code; 39 | throw e; 40 | }); 41 | }, 42 | 43 | /** 44 | * Defines an API error that will be thrown for the next call only. 45 | * 46 | * @param {number} code The response status code 47 | * @param {string} message An optional message to include in the error 48 | * @returns {this} 49 | */ 50 | mockErrorOnce(code, message) { 51 | return this.mockImplementationOnce(() => { 52 | const e = new Error(message); 53 | e.code = code; 54 | throw e; 55 | }); 56 | }, 57 | }; 58 | 59 | module.exports = class Github { 60 | /** 61 | * Defines a new mock function with additional helpers 62 | * 63 | * - mockResponse / mockResponseOnce: set success response data 64 | * - mockError / mockErrorOnce: set error objectes 65 | * 66 | * @param {object} helpers Additional helpers to include in this endpoint 67 | * @returns {function} A jest mock function 68 | */ 69 | static fn(helpers = {}) { 70 | return Object.assign(jest.fn(), defaultHelpers, helpers); 71 | } 72 | 73 | constructor() { 74 | this.repos = { 75 | getContent: Github.fn({ 76 | /** 77 | * Mocks the requested file's contents for all subsequent calls. The 78 | * contents will be base64 encoded in the response. 79 | * 80 | * @param {string} content File contents to be returned by the API 81 | * @returns {this} 82 | */ 83 | mockContent(content) { 84 | return this.mockResponse({ 85 | content: Buffer.from(content).toString('base64'), 86 | }); 87 | }, 88 | /** 89 | * Mocks the requested file's contents for the next call. The contents 90 | * will be base64 encoded in the response. 91 | * 92 | * @param {string} content File contents to be returned by the API 93 | * @returns {this} 94 | */ 95 | mockContentOnce(content) { 96 | this.mockResponseOnce({ 97 | content: Buffer.from(content).toString('base64'), 98 | }); 99 | }, 100 | }), 101 | }; 102 | } 103 | }; 104 | -------------------------------------------------------------------------------- /lib/__mocks__/logger.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | const logger = jest.fn(); 4 | logger.error = logger; 5 | logger.warn = logger; 6 | logger.info = logger; 7 | logger.debug = logger; 8 | module.exports = logger; 9 | -------------------------------------------------------------------------------- /lib/__mocks__/node-fetch.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | module.exports = jest.fn(); 3 | -------------------------------------------------------------------------------- /lib/__mocks__/stores.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | let lastStore = null; 4 | 5 | const createStore = jest.fn().mockReturnValue(() => { 6 | const store = { 7 | downloadFile: jest 8 | .fn() 9 | .mockImplementation(() => Promise.resolve(store.FILE_PATH)), 10 | downloadFiles: jest 11 | .fn() 12 | .mockImplementation(() => Promise.resolve(store.FILE_PATHS)), 13 | listFiles: jest 14 | .fn() 15 | .mockImplementation(() => Promise.resolve(store.FILE_LIST)), 16 | downloadAll: jest 17 | .fn() 18 | .mockImplementation(() => Promise.resolve(store.FILE_PATHS)), 19 | getCapabilities: jest.fn(), 20 | }; 21 | 22 | store.FILE = { name: 'file' }; 23 | store.FILE_LIST = [createStore.FILE]; 24 | store.FILE_PATH = '/path/to/some/file'; 25 | store.FILE_PATHS = [createStore.FILE_PATH]; 26 | store.CAPABILITIES = { TYPE: true }; 27 | 28 | lastStore = store; 29 | return store; 30 | }); 31 | 32 | createStore.getLastStore = () => lastStore; 33 | module.exports = createStore; 34 | -------------------------------------------------------------------------------- /lib/__mocks__/targets.js: -------------------------------------------------------------------------------- 1 | let success = true; 2 | 3 | function runTarget() { 4 | return success 5 | ? Promise.resolve() 6 | : Promise.reject(new Error('expected failure')); 7 | } 8 | 9 | runTarget.mockSuccess = () => { 10 | success = true; 11 | }; 12 | 13 | runTarget.mockFailure = () => { 14 | success = false; 15 | }; 16 | 17 | module.exports = runTarget; 18 | -------------------------------------------------------------------------------- /lib/__mocks__/try-require.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | module.exports = jest.fn(); 4 | -------------------------------------------------------------------------------- /lib/__tests__/__snapshots__/request.test.js.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`allows Accept header overrides 1`] = ` 4 | Array [ 5 | "http://example.org", 6 | Object { 7 | "headers": Object { 8 | "Accept": "text/html", 9 | }, 10 | }, 11 | ] 12 | `; 13 | 14 | exports[`passes all parameters to fetch 1`] = ` 15 | Array [ 16 | "http://example.org", 17 | Object { 18 | "headers": Object { 19 | "Accept": "application/json", 20 | "Authorization": "bearer yo!", 21 | }, 22 | "method": "POST", 23 | }, 24 | ] 25 | `; 26 | -------------------------------------------------------------------------------- /lib/__tests__/changes.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | const { findChangeset } = require('../changes'); 4 | 5 | test('extracts a single change', () => { 6 | const name = 'Version 1.0.0'; 7 | const body = 'this is a test'; 8 | const markdown = `# Changelog\n## ${name}\n${body}\n`; 9 | const changes = findChangeset(markdown, 'v1.0.0'); 10 | expect(changes).toEqual({ name, body }); 11 | }); 12 | 13 | test('extracts a change between headings', () => { 14 | const name = 'Version 1.0.0'; 15 | const body = 'this is a test'; 16 | 17 | const markdown = `# Changelog 18 | ## 1.0.1 19 | newer 20 | 21 | ## ${name} 22 | ${body} 23 | 24 | ## 0.9.0 25 | older 26 | `; 27 | 28 | const changes = findChangeset(markdown, 'v1.0.0'); 29 | expect(changes).toEqual({ name, body }); 30 | }); 31 | 32 | test('extracts changes from underlined headings', () => { 33 | const name = 'Version 1.0.0'; 34 | const body = 'this is a test'; 35 | const markdown = `Changelog\n====\n${name}\n----\n${body}\n`; 36 | const changes = findChangeset(markdown, 'v1.0.0'); 37 | expect(changes).toEqual({ name, body }); 38 | }); 39 | 40 | test('extracts changes from alternating headings', () => { 41 | const name = 'Version 1.0.0'; 42 | const body = 'this is a test'; 43 | 44 | const markdown = `# Changelog 45 | ## 1.0.1 46 | newer 47 | 48 | ${name} 49 | ------- 50 | ${body} 51 | 52 | ## 0.9.0 53 | older 54 | `; 55 | 56 | const changes = findChangeset(markdown, 'v1.0.0'); 57 | expect(changes).toEqual({ name, body }); 58 | }); 59 | 60 | test('returns null if the tag is no valid version', () => { 61 | const changes = findChangeset('', 'not a version'); 62 | expect(changes).toBe(null); 63 | }); 64 | 65 | test('returns null if no changeset is found', () => { 66 | const markdown = `# Changelog 67 | ## 1.0.1 68 | newer 69 | 70 | ## 0.9.0 71 | older 72 | `; 73 | 74 | const changes = findChangeset(markdown, 'v1.0.0'); 75 | expect(changes).toBe(null); 76 | }); 77 | -------------------------------------------------------------------------------- /lib/__tests__/files.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | /* eslint-disable global-require */ 3 | 4 | describe('listFiles', () => { 5 | const { listFiles } = require('../files'); 6 | const { join, resolve } = require('path'); 7 | const testDir = resolve(__dirname, '../__fixtures__/listFiles'); 8 | const testFiles = ['a', 'b'].map(f => join(testDir, f)); 9 | 10 | test('returns only files', async () => { 11 | expect.assertions(1); 12 | const files = await listFiles(testDir); 13 | expect(files).toEqual(testFiles); 14 | }); 15 | }); 16 | 17 | describe('withTempDir', () => { 18 | const { existsSync } = require('fs'); 19 | const { withTempDir } = require('../files'); 20 | 21 | async function testDirectories(callback) { 22 | let directory = null; 23 | 24 | try { 25 | await withTempDir(dir => { 26 | directory = dir; 27 | expect(existsSync(directory)).toBeTruthy(); 28 | return callback(directory); 29 | }); 30 | } finally { 31 | expect(existsSync(directory)).toBeFalsy(); 32 | } 33 | } 34 | 35 | test('creates and removes synchronously', async () => { 36 | expect.assertions(2); 37 | await testDirectories(() => {}); 38 | }); 39 | 40 | test('creates and removes on error', async () => { 41 | try { 42 | expect.assertions(3); 43 | await testDirectories(() => { 44 | throw new Error('fail'); 45 | }); 46 | } catch (e) { 47 | expect(e.message).toBe('fail'); 48 | } 49 | }); 50 | 51 | test('creates and removes on Promise resolution', async () => { 52 | expect.assertions(2); 53 | await testDirectories(() => Promise.resolve('success')); 54 | }); 55 | 56 | test('creates and removes on Promise rejection', async () => { 57 | try { 58 | expect.assertions(3); 59 | await testDirectories(() => Promise.reject(new Error('fail'))); 60 | } catch (e) { 61 | expect(e.message).toBe('fail'); 62 | } 63 | }); 64 | 65 | test('returns the callback return value synchronously', async () => { 66 | expect.assertions(1); 67 | const result = await withTempDir(() => 'result'); 68 | expect(result).toBe('result'); 69 | }); 70 | 71 | test('returns the callback return value asynchronously', async () => { 72 | expect.assertions(1); 73 | const result = await withTempDir(() => Promise.resolve('result')); 74 | expect(result).toBe('result'); 75 | }); 76 | }); 77 | -------------------------------------------------------------------------------- /lib/__tests__/request.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | // TODO(ja): Update to jest v21.3.0 once released: 4 | // - use expect(promise).rejects.toThrow() 5 | // - use expect(jest.fn).toMatchSnapshot() 6 | 7 | const fetch = require('node-fetch'); 8 | const request = require('../request'); 9 | 10 | const mockPromise = (value, reason) => 11 | new Promise((resolve, reject) => { 12 | setImmediate(() => (reason ? reject(reason) : resolve(value))); 13 | }); 14 | 15 | function mockFetch(status, json, statusText) { 16 | const ok = status >= 200 && status <= 300; 17 | fetch.mockReturnValue(mockPromise({ status, ok, json, statusText })); 18 | } 19 | 20 | beforeEach(() => { 21 | fetch.mockReset(); 22 | }); 23 | 24 | test('passes all parameters to fetch', async () => { 25 | expect.assertions(1); 26 | mockFetch(200, () => mockPromise()); 27 | 28 | await request('http://example.org', { 29 | method: 'POST', 30 | headers: { Authorization: 'bearer yo!' }, 31 | }); 32 | 33 | expect(fetch.mock.calls[0]).toMatchSnapshot(); 34 | }); 35 | 36 | test('allows Accept header overrides', async () => { 37 | expect.assertions(1); 38 | mockFetch(200, () => mockPromise()); 39 | 40 | await request('http://example.org', { 41 | headers: { Accept: 'text/html' }, 42 | }); 43 | 44 | expect(fetch.mock.calls[0]).toMatchSnapshot(); 45 | }); 46 | 47 | test('resolves the parsed JSON result for status 200', async () => { 48 | expect.assertions(1); 49 | mockFetch(200, () => mockPromise({ foo: 'bar' })); 50 | const result = await request('http://example.org'); 51 | expect(result).toEqual({ foo: 'bar' }); 52 | }); 53 | 54 | test('resolves undefined for status 204', async () => { 55 | expect.assertions(1); 56 | mockFetch(204, () => { 57 | throw new Error('should not be called'); 58 | }); 59 | 60 | const result = await request('http://example.org'); 61 | expect(result).toBeUndefined(); 62 | }); 63 | 64 | test('throws an error containing the status text', async () => { 65 | expect.assertions(1); 66 | mockFetch(400, () => mockPromise(null, 'empty'), 'BAD REQUEST'); 67 | 68 | try { 69 | await request('http://example.org'); 70 | } catch (e) { 71 | expect(e.message).toBe('400 BAD REQUEST'); 72 | } 73 | }); 74 | 75 | test('throws an error containing the resolved error message', async () => { 76 | expect.assertions(1); 77 | const message = 'Error message'; 78 | mockFetch(400, () => mockPromise({ message })); 79 | 80 | try { 81 | await request('http://example.org'); 82 | } catch (e) { 83 | expect(e.message).toBe(message); 84 | } 85 | }); 86 | 87 | test('falls back to the status text when parsing errors', async () => { 88 | expect.assertions(1); 89 | mockFetch(400, () => mockPromise({}), 'BAD REQUEST'); 90 | 91 | try { 92 | await request('http://example.org'); 93 | } catch (e) { 94 | expect(e.message).toBe('400 BAD REQUEST'); 95 | } 96 | }); 97 | -------------------------------------------------------------------------------- /lib/__tests__/utils.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | /* eslint-disable global-require */ 3 | 4 | describe('filterAsync', () => { 5 | const { filterAsync } = require('../utils'); 6 | 7 | test('filters with sync predicate', async () => { 8 | expect.assertions(1); 9 | const filtered = await filterAsync([1, 2, 3, 4], i => i > 2); 10 | expect(filtered).toEqual([3, 4]); 11 | }); 12 | 13 | test('filters with async predicate', async () => { 14 | expect.assertions(1); 15 | 16 | const predicate = i => 17 | new Promise(resolve => setTimeout(() => resolve(i > 2), i * 100)); 18 | const filtered = await filterAsync([1, 2, 3, 4], predicate); 19 | expect(filtered).toEqual([3, 4]); 20 | }); 21 | 22 | test('passes filter arguments to the predicate', async () => { 23 | expect.assertions(1); 24 | 25 | const arr = [1]; 26 | const predicate = jest.fn(); 27 | 28 | await filterAsync(arr, predicate); 29 | expect(predicate).toHaveBeenCalledWith(1, 0, arr); 30 | }); 31 | 32 | test('passes this to the predicate', async () => { 33 | expect.assertions(1); 34 | 35 | const that = {}; 36 | await filterAsync( 37 | [1], 38 | function predicate() { 39 | expect(this).toBe(that); 40 | }, 41 | that 42 | ); 43 | }); 44 | }); 45 | 46 | describe('forEachChained', () => { 47 | const { forEachChained } = require('../utils'); 48 | 49 | test('invokes synchronous actions', async () => { 50 | expect.assertions(1); 51 | 52 | const fun = jest.fn(); 53 | const arr = ['a', 'b', 'c']; 54 | await forEachChained(arr, fun); 55 | 56 | expect(fun.mock.calls).toEqual([ 57 | ['a', 0, arr], 58 | ['b', 1, arr], 59 | ['c', 2, arr], 60 | ]); 61 | }); 62 | 63 | test('invokes asynchronous actions sequentially', async () => { 64 | expect.assertions(1); 65 | 66 | const fun = jest.fn(); 67 | const arr = [500, 300, 100]; 68 | 69 | fun.mockImplementation( 70 | timeout => new Promise(resolve => setTimeout(resolve, timeout)) 71 | ); 72 | 73 | await forEachChained(arr, fun); 74 | expect(fun.mock.calls).toEqual([ 75 | [500, 0, arr], 76 | [300, 1, arr], 77 | [100, 2, arr], 78 | ]); 79 | }); 80 | 81 | test('passes this to the action', async () => { 82 | expect.assertions(1); 83 | 84 | const that = {}; 85 | await forEachChained( 86 | [1], 87 | function action() { 88 | expect(this).toBe(that); 89 | }, 90 | that 91 | ); 92 | }); 93 | }); 94 | 95 | describe('promiseProps', () => { 96 | const { promiseProps } = require('../utils'); 97 | 98 | test('awaits an empty object', async () => { 99 | expect.assertions(1); 100 | const result = await promiseProps({}); 101 | expect(result).toEqual({}); 102 | }); 103 | 104 | test('awaits a plain object', async () => { 105 | expect.assertions(1); 106 | const result = await promiseProps({ foo: 'foo', bar: 42 }); 107 | expect(result).toEqual({ foo: 'foo', bar: 42 }); 108 | }); 109 | 110 | test('awaits an object with promises', async () => { 111 | expect.assertions(1); 112 | const result = await promiseProps({ 113 | foo: Promise.resolve('foo'), 114 | bar: Promise.resolve(42), 115 | }); 116 | expect(result).toEqual({ foo: 'foo', bar: 42 }); 117 | }); 118 | }); 119 | 120 | describe('getFile', () => { 121 | const { getFile } = require('../utils'); 122 | const Context = require('../__mocks__/context'); 123 | 124 | test('loads and decodes the file', async () => { 125 | expect.assertions(2); 126 | 127 | const context = new Context({ owner: 'owner', repo: 'repo' }); 128 | const { getContent } = context.github.repos; 129 | getContent.mockContent('test content.'); 130 | 131 | const content = await getFile(context, '/path/to/file', 'v1.0.0'); 132 | expect(getContent).toHaveBeenCalledWith({ 133 | owner: 'owner', 134 | repo: 'repo', 135 | path: '/path/to/file', 136 | ref: 'v1.0.0', 137 | }); 138 | 139 | expect(content).toBe('test content.'); 140 | }); 141 | 142 | test('returns null for missing files', async () => { 143 | expect.assertions(1); 144 | 145 | const context = new Context({ owner: 'owner', repo: 'repo' }); 146 | const { getContent } = context.github.repos; 147 | getContent.mockError(404, 'file not found'); 148 | 149 | const content = await getFile(context, '/path/to/missing', 'v1.0.0'); 150 | expect(content).toBe(null); 151 | }); 152 | 153 | test('rejects all other errors', async () => { 154 | expect.assertions(1); 155 | 156 | const context = new Context({ owner: 'owner', repo: 'repo' }); 157 | const { getContent } = context.github.repos; 158 | getContent.mockError(500, 'internal server error'); 159 | 160 | try { 161 | await getFile(context, '/path/to/missing', 'v1.0.0'); 162 | } catch (e) { 163 | expect(e.message).toMatch(/internal server error/); 164 | } 165 | }); 166 | }); 167 | 168 | describe('isSorted', () => { 169 | const { isSorted } = require('../utils'); 170 | 171 | const MODES = [ 172 | { title: 'default', descending: undefined }, 173 | { title: 'ascending', descending: false }, 174 | { title: 'descending', descending: true }, 175 | ]; 176 | 177 | MODES.forEach(({ title, descending }) => 178 | describe(`sort: ${title}`, () => { 179 | function prepare(arr) { 180 | return arr && descending ? arr.reverse() : arr; 181 | } 182 | 183 | test('is true for empty arrays', () => { 184 | expect(isSorted([], descending)).toBe(true); 185 | }); 186 | 187 | test('is true for a single element', () => { 188 | expect(isSorted([1], descending)).toBe(true); 189 | }); 190 | 191 | test('verifies strict order', () => { 192 | const arr = prepare([1, 2, 3]); 193 | expect(isSorted(arr, descending)).toBe(true); 194 | }); 195 | 196 | test('verifies loose order', () => { 197 | const arr = prepare([1, 2, 2]); 198 | expect(isSorted(arr, descending)).toBe(true); 199 | }); 200 | 201 | test('detects disorder', () => { 202 | const arr = prepare([1, 2, 1]); 203 | expect(isSorted(arr, descending)).toBe(false); 204 | }); 205 | 206 | test('works with ASCII strings', () => { 207 | const arr = prepare(['1', 'A', 'a']); 208 | expect(isSorted(arr, descending)).toBe(true); 209 | }); 210 | 211 | test('returns false for elements with wrong types', () => { 212 | const arr = prepare([1, {}, 3]); 213 | expect(isSorted(arr, descending)).toBe(false); 214 | }); 215 | 216 | test('returns false for null elements', () => { 217 | const arr = prepare([1, null, 3]); 218 | expect(isSorted(arr, descending)).toBe(false); 219 | }); 220 | 221 | test('returns false for undefined elements', () => { 222 | const arr = prepare([1, undefined, 3]); 223 | expect(isSorted(arr, descending)).toBe(false); 224 | }); 225 | }) 226 | ); 227 | }); 228 | 229 | describe('cloneContext', () => { 230 | const { cloneContext } = require('../utils'); 231 | const Context = require('../__mocks__/context'); 232 | 233 | test('sets the correct constructor', () => { 234 | const clone = cloneContext(new Context()); 235 | expect(clone.constructor).toBe(Context); 236 | }); 237 | 238 | test('sets the correct prototype', () => { 239 | const clone = cloneContext(new Context({ owner: 'foo', repo: 'bar' })); 240 | expect(clone.repo()).toEqual({ owner: 'foo', repo: 'bar' }); 241 | }); 242 | 243 | test('creates a new instance', () => { 244 | const context = new Context(); 245 | const clone = cloneContext(context); 246 | expect(clone).not.toBe(context); 247 | }); 248 | 249 | test('clones default properties', () => { 250 | const context = new Context(); 251 | const clone = cloneContext(context); 252 | expect(clone).toEqual(context); 253 | }); 254 | 255 | test('clones additional properties', () => { 256 | const context = new Context(); 257 | context.additional = 'property'; 258 | const clone = cloneContext(context); 259 | expect(clone).toEqual(context); 260 | }); 261 | 262 | test('clones a context with additional properties', () => { 263 | const clone = cloneContext(new Context(), { foo: 'bar' }); 264 | expect(clone.constructor).toBe(Context); 265 | expect(clone.foo).toBe('bar'); 266 | }); 267 | 268 | test('overrides existing attributes', () => { 269 | const clone = cloneContext(new Context(), { payload: { a: 1 } }); 270 | expect(clone.constructor).toBe(Context); 271 | expect(clone.payload).toEqual({ a: 1 }); 272 | }); 273 | }); 274 | 275 | describe('spawn', () => { 276 | const { spawn } = require('../utils'); 277 | 278 | test('resolves on success', async () => { 279 | expect.assertions(1); 280 | await spawn('test', ['1']); 281 | expect(true).toBe(true); 282 | }); 283 | 284 | test('resolves to the standard output', async () => { 285 | expect.assertions(1); 286 | const output = await spawn('echo', ['foobar']); 287 | expect(output.toString()).toEqual('foobar\n'); 288 | }); 289 | 290 | test('rejects invalid arguments', async () => { 291 | try { 292 | expect.assertions(1); 293 | await spawn(); 294 | } catch (e) { 295 | expect(e.message).toMatch(/non-empty string/); 296 | } 297 | }); 298 | 299 | test('rejects on non-zero exit code', async () => { 300 | try { 301 | expect.assertions(2); 302 | await spawn('test', ['']); 303 | } catch (e) { 304 | expect(e.code).toBe(1); 305 | expect(e.message).toMatch(/code 1/); 306 | } 307 | }); 308 | 309 | test('rejects on error', async () => { 310 | try { 311 | expect.assertions(1); 312 | await spawn('this_command_does_not_exist'); 313 | } catch (e) { 314 | expect(e.message).toMatch(/ENOENT/); 315 | } 316 | }); 317 | 318 | test('attaches args on error', async () => { 319 | try { 320 | expect.assertions(1); 321 | await spawn('test', ['x', 'y']); 322 | } catch (e) { 323 | expect(e.args).toEqual(['x', 'y']); 324 | } 325 | }); 326 | 327 | test('attaches options on error', async () => { 328 | try { 329 | expect.assertions(1); 330 | await spawn('test', [], { cwd: 'file://yo.js' }); 331 | } catch (e) { 332 | expect(e.options).toEqual({ cwd: 'file://yo.js' }); 333 | } 334 | }); 335 | 336 | test('strip env from options on error', async () => { 337 | try { 338 | expect.assertions(1); 339 | await spawn('test', [], { env: { x: 123, password: 456 } }); 340 | } catch (e) { 341 | expect(e.options.env).toEqual(['x', 'password']); 342 | } 343 | }); 344 | 345 | test('logs stdout to a logger', async () => { 346 | expect.assertions(1); 347 | const logger = { debug: jest.fn() }; 348 | await spawn('echo', ['foobar'], undefined, logger); 349 | expect(logger.debug).toHaveBeenCalledWith('echo: foobar'); 350 | }); 351 | 352 | test('logs stderr to a logger', async () => { 353 | expect.assertions(1); 354 | const logger = { debug: jest.fn() }; 355 | await spawn('sh', ['-c', 'echo foobar 1>&2'], undefined, logger); 356 | expect(logger.debug).toHaveBeenCalledWith('sh: foobar'); 357 | }); 358 | }); 359 | -------------------------------------------------------------------------------- /lib/__tests__/version.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | const { getVersion, parseVersion } = require('../version'); 4 | 5 | test('extracts a basic SemVer versions', () => { 6 | expect(getVersion('1.0.0')).toBe('1.0.0'); 7 | }); 8 | 9 | test('extracts a SemVer version with leading "v"', () => { 10 | expect(getVersion('v1.0.0')).toBe('1.0.0'); 11 | }); 12 | 13 | test('extracts a SemVer version from text', () => { 14 | expect(getVersion('1.0.0 (foobar)')).toBe('1.0.0'); 15 | }); 16 | 17 | test('parses a full SemVer version', () => { 18 | expect(parseVersion('1.2.3')).toEqual({ 19 | major: 1, 20 | minor: 2, 21 | patch: 3, 22 | }); 23 | }); 24 | 25 | test('parses a SemVer with leading "v"', () => { 26 | expect(parseVersion('v1.2.3')).toEqual({ 27 | major: 1, 28 | minor: 2, 29 | patch: 3, 30 | }); 31 | }); 32 | 33 | test('parses a pre-release SemVer', () => { 34 | expect(parseVersion('v1.2.3-beta')).toEqual({ 35 | major: 1, 36 | minor: 2, 37 | patch: 3, 38 | pre: 'beta', 39 | }); 40 | }); 41 | 42 | test('parses a complicated pre-release SemVer', () => { 43 | expect(parseVersion('v1.2.3-beta.1')).toEqual({ 44 | major: 1, 45 | minor: 2, 46 | patch: 3, 47 | pre: 'beta.1', 48 | }); 49 | }); 50 | 51 | test('parses a SemVer with build metadata', () => { 52 | expect(parseVersion('v1.2.3+linux')).toEqual({ 53 | major: 1, 54 | minor: 2, 55 | patch: 3, 56 | build: 'linux', 57 | }); 58 | }); 59 | 60 | test('parses a pre-release SemVer with build metadata', () => { 61 | expect(parseVersion('v1.2.3-beta+linux')).toEqual({ 62 | major: 1, 63 | minor: 2, 64 | patch: 3, 65 | pre: 'beta', 66 | build: 'linux', 67 | }); 68 | }); 69 | -------------------------------------------------------------------------------- /lib/changes.js: -------------------------------------------------------------------------------- 1 | const { getVersion } = require('./version'); 2 | 3 | /** 4 | * A single changeset with name and description 5 | * 6 | * @typedef {object} Changeset 7 | * @prop {string} name The name of this changeset 8 | * @prop {string} body The markdown body describing the changeset 9 | */ 10 | 11 | /** 12 | * Extracts a specific changeset from a markdown document 13 | * 14 | * The changes are bounded by a header preceding the changes and an optional 15 | * header at the end. If the latter is omitted, the markdown document will be 16 | * reat until its end. The title of the changes will be extracted from the 17 | * given header. 18 | * 19 | * @param {string} markdown The full changelog markdown 20 | * @param {RegExpExecArray} header The header of the section to extract 21 | * @param {RegExpExecArray?} nextHeader An optional header of the next section 22 | * @returns {Changeset} The extracted changes 23 | */ 24 | function extractChangeset(markdown, header, nextHeader) { 25 | const start = header.index + header[0].length; 26 | const end = nextHeader ? nextHeader.index : undefined; 27 | const body = markdown.substring(start, end).trim(); 28 | const name = (header[1] || header[2]).trim(); 29 | return { name, body }; 30 | } 31 | 32 | /** 33 | * Searches for a changeset within the given markdown 34 | * 35 | * @param {string} markdown The markdown containing the changeset 36 | * @param {string} tag A git tag containing a version number 37 | * @returns {Changeset?} The changeset if found; otherwise null 38 | */ 39 | function findChangeset(markdown, tag) { 40 | const version = getVersion(tag); 41 | if (version == null) { 42 | return null; 43 | } 44 | 45 | const regex = /^ *## *([^\n]+?) *#* *(?:\n+|$)|^([^\n]+)\n *(?:-){2,} *(?:\n+|$)/gm; 46 | for ( 47 | let match = regex.exec(markdown); 48 | match != null; 49 | match = regex.exec(markdown) 50 | ) { 51 | if (getVersion(match[1] || match[2]) === version) { 52 | return extractChangeset(markdown, match, regex.exec(markdown)); 53 | } 54 | } 55 | 56 | return null; 57 | } 58 | 59 | module.exports = { 60 | findChangeset, 61 | }; 62 | -------------------------------------------------------------------------------- /lib/defaults.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | store: 's3', 3 | targets: ['github'], 4 | }; 5 | -------------------------------------------------------------------------------- /lib/files.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const os = require('os'); 3 | const path = require('path'); 4 | const rimrafCallback = require('rimraf'); 5 | const util = require('util'); 6 | const { filterAsync } = require('./utils'); 7 | 8 | const lstat = util.promisify(fs.lstat); 9 | const mkdtemp = util.promisify(fs.mkdtemp); 10 | const readdir = util.promisify(fs.readdir); 11 | const rimraf = util.promisify(rimrafCallback); 12 | 13 | /** 14 | * Lists all direct files within the specified directory, skipping directories 15 | * and symlinks 16 | * 17 | * The path should be given aboslute. Relative paths are evaluated from the 18 | * current working directory. Throws if the path is missing. The resulting 19 | * file paths are joined with the path argument, and thus also absolute or 20 | * relative depending on the input parameter. 21 | * 22 | * @param {string} directory The path to the directory 23 | * @returns {Promise} A list of paths to files within the directory 24 | * @async 25 | */ 26 | async function listFiles(directory) { 27 | const files = await readdir(directory); 28 | const paths = files.map(name => path.join(directory, name)); 29 | return filterAsync(paths, async filePath => { 30 | const stats = await lstat(filePath); 31 | return stats.isFile(); 32 | }); 33 | } 34 | 35 | /** 36 | * Execute an asynchronous callback within a temp directory 37 | * 38 | * Automatically removes the directory and all contents when the callback 39 | * finishes or throws. 40 | * 41 | * @param {Function} callback A callback that receives the directory path 42 | * @returns {Promise} The return value of the callback 43 | * @async 44 | */ 45 | async function withTempDir(callback) { 46 | const directory = await mkdtemp(path.join(os.tmpdir(), 'release-')); 47 | try { 48 | return await callback(directory); 49 | } finally { 50 | await rimraf(directory); 51 | } 52 | } 53 | 54 | module.exports = { 55 | listFiles, 56 | withTempDir, 57 | }; 58 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | const defaults = require('./defaults'); 3 | const { withTempDir } = require('./files'); 4 | const createStore = require('./stores'); 5 | const runTarget = require('./targets'); 6 | const { isSorted } = require('./utils'); 7 | 8 | /** 9 | * Git Reference type: Tag 10 | */ 11 | const REF_TYPE_TAG = 'tag'; 12 | 13 | /** 14 | * Status check state: Pending 15 | */ 16 | const STATE_PENDING = 'pending'; 17 | 18 | /** 19 | * Status check state: Success 20 | */ 21 | const STATE_SUCCESS = 'success'; 22 | 23 | /** 24 | * Configuration file used to activate this bot 25 | */ 26 | const CONFIG_NAME = 'release.yml'; 27 | 28 | /** 29 | * Time to wait before starting a release 30 | */ 31 | const RELEASE_TIMEOUT = 32 | process.env.RELEASE_TIMEOUT === '' ? 60 : process.env.RELEASE_TIMEOUT; 33 | 34 | /** 35 | * Holds timeouts for deferred releases 36 | */ 37 | const scheduledReleases = {}; 38 | 39 | /** 40 | * Internal cache for tags by repository 41 | */ 42 | const tagCache = {}; 43 | 44 | /** 45 | * Logger instance provided by probot 46 | */ 47 | let logger; 48 | 49 | /** 50 | * Retrieves the parsed configuration file from the context's repository, if any 51 | * 52 | * If the config is present in the repository, it is merged with defaults. 53 | * 54 | * @param {Context} context Github context 55 | * @returns {Promise} The configuration file as object or null 56 | * @async 57 | */ 58 | async function getConfig(context) { 59 | const config = await context.config(CONFIG_NAME); 60 | return config && { ...defaults, ...config }; 61 | } 62 | 63 | /** 64 | * Resolves a git tag and returns the object it points to, most likely a commit 65 | * 66 | * This is especially useful when resolving annotated tags, as passing the tag's sha 67 | * resolves the actual commit it points to. 68 | * 69 | * @param {Context} context Github context 70 | * @param {string} sha The SHA of the tag to resolve 71 | * @returns {Promise} The tag object containing a "type" and "sha" 72 | * @async 73 | */ 74 | async function getTagObject(context, sha) { 75 | const params = context.repo({ sha }); 76 | const response = await context.github.gitdata.getTag(params); 77 | return response.data.object; 78 | } 79 | 80 | /** 81 | * Resolves a git reference (e.g. branch or tag) and returns the object it points to 82 | * 83 | * If the ref points to an annotated tag, the tag is resolved and the inner object is 84 | * resolved instead. Be sure to pass the entire reference name including its type, 85 | * e.g.: "tags/v1.0.0". 86 | * 87 | * @param {Context} context Github context 88 | * @param {String} ref The git reference to resolve 89 | * @returns {Promise} The reference object containing a "type" and "sha" 90 | */ 91 | async function getReference(context, ref) { 92 | const params = context.repo({ ref }); 93 | const response = await context.github.gitdata.getReference(params); 94 | 95 | const { object } = response.data; 96 | if (object.type === 'tag') { 97 | return getTagObject(context, object.sha); 98 | } 99 | 100 | return object; 101 | } 102 | 103 | /** 104 | * Fetches all tags of the context's repository 105 | * 106 | * The tag requests are cached infinitely by repository. To add or remove tags, use 107 | * {@link addTag} and {@link removeTag} respectively. 108 | * 109 | * @param {Context} context Github context 110 | * @returns {Promise} The list of tags, each containing a "ref" and "sha" 111 | * @async 112 | */ 113 | function getTags(context) { 114 | const { owner, repo } = context.repo(); 115 | const key = `${owner}/${repo}`; 116 | 117 | const cached = tagCache[key]; 118 | if (cached) { 119 | // Serve the cached promise 120 | return cached; 121 | } 122 | 123 | // Directly store the promise in the cache to allow for concurrent requests. 124 | // The caller has to resolve it anyway, even when loading from the cache. 125 | logger.info(`Loading all tags for ${owner}/${repo}`); 126 | tagCache[key] = context.github.paginate( 127 | context.github.repos.getTags({ owner, repo, per_page: 100 }), 128 | result => result.data.map(tag => ({ ref: tag.name, sha: tag.commit.sha })) 129 | ); 130 | 131 | return tagCache[key]; 132 | } 133 | 134 | /** 135 | * Tries to find a tag referring to the given commit SHA. 136 | * 137 | * @param {Context} context Github context 138 | * @param {String} sha A full commit SHA 139 | * @returns {Promise} The tag object containing "ref" and "sha", if 140 | * found; otherwise null 141 | * @async 142 | */ 143 | async function findTag(context, sha) { 144 | const tags = await getTags(context); 145 | return tags.find(tag => tag.sha === sha); 146 | } 147 | 148 | /** 149 | * Adds a new tag to the cached list of tags 150 | * 151 | * This method can be called even if the cache is cold. In this case, all tags 152 | * will be loaded first, and then the tag will be inserted, to avoid race 153 | * conditions. If the tag is already registered, it is removed and re-added to 154 | * the cache to ensure its sha is up to date. 155 | * 156 | * @param {Context} context Github context 157 | * @param {String} ref The name of this tag (without "tags/") 158 | * @param {String} sha The commit SHA that this tag points to 159 | * @returns {Promise} The new tag object containing "ref" and "sha" 160 | * @async 161 | */ 162 | async function addTag(context, ref, sha) { 163 | const { owner, repo } = context.repo(); 164 | logger.info(`Adding tag ${ref} to ${owner}/${repo}`); 165 | 166 | const tags = await getTags(context); 167 | const index = tags.findIndex(tag => tag.ref === ref); 168 | if (index >= 0) { 169 | tags.splice(index, 1); 170 | } 171 | 172 | const tag = { ref, sha }; 173 | tags.push(tag); 174 | return tag; 175 | } 176 | 177 | /** 178 | * Removes a tag from the cache 179 | * 180 | * @param {Context} context Github context 181 | * @param {String} ref The name of this tag (without "tags/") 182 | * @returns {Promise} True if a tag was found and removed; otherwise false 183 | * @async 184 | */ 185 | async function removeTag(context, ref) { 186 | const { owner, repo } = context.repo(); 187 | logger.info(`Removing tag ${ref} from ${owner}/${repo}`); 188 | 189 | const tags = await getTags(context); 190 | const index = tags.findIndex(tag => tag.ref === ref); 191 | if (index >= 0) { 192 | tags.splice(index, 1); 193 | return true; 194 | } 195 | 196 | return false; 197 | } 198 | 199 | /** 200 | * Resolves all status checks for a given reference (e.g. a branch or tag) 201 | * 202 | * NOTE that the list might contain multiple versions of the same status check, 203 | * identified by the "context" property. Use {@link filterLatestStatuses} to 204 | * only retrieve the most recent status checks (as shown by Github). 205 | * 206 | * @param {Context} context Github context 207 | * @param {String} ref A tag name 208 | * @returns {Promise} A list of status check objects 209 | * @async 210 | */ 211 | async function getStatuses(context, ref) { 212 | // NOTE we assume that there are not more than roughly 30 status checks to fit on one page 213 | // Try to get statuses in chronological order first, as always delivered by Github 214 | const response = await context.github.repos.getStatuses( 215 | context.repo({ ref }) 216 | ); 217 | if (isSorted(response.data.map(status => status.updated_at), true)) { 218 | return response.data; 219 | } 220 | 221 | // For some reason, the statuses on the first page were not sorted by "updated_at" 222 | // To be safe, continue to fetch all pages and sort manually 223 | logger.warn(`Statuses of commit ${ref} were not sorted by created_at`); 224 | const statuses = await context.github.paginate( 225 | response, 226 | result => result.data 227 | ); 228 | return _.sortBy(statuses, status => status.updated_at); 229 | } 230 | 231 | /** 232 | * Removes all succeeded status checks from the given list 233 | * 234 | * Status checks are considered succeeded if there is another check with the 235 | * same "context", but a later "updated_at" value. If the configuration 236 | * specifies "ignoredChecks" then those status checks will be omitted from the 237 | * list. 238 | * 239 | * @param {object[]} statuses A list of status checks 240 | * @param {object} config Release configuration for the repository 241 | * @returns {object[]} The list of filtered status checks 242 | */ 243 | function filterLatestStatuses(statuses, config) { 244 | const ignoredChecks = config.ignoredChecks || []; 245 | const filtered = statuses.filter( 246 | status => !ignoredChecks.some(check => status.context.startsWith(check)) 247 | ); 248 | 249 | const statusesByContext = _.groupBy(filtered, status => status.context); 250 | return _.values(statusesByContext).map(context => 251 | _.maxBy(context, status => status.updated_at) 252 | ); 253 | } 254 | 255 | /** 256 | * Releases build artifacts to all configured targets 257 | * 258 | * Creates a store object for the configured provider and passes it to all 259 | * configured targets, if any. The store is able to list all release artifacts 260 | * and download them to a local temp directory. This directory is removed after 261 | * the release has completed. 262 | * 263 | * @param {Context} context Github context 264 | * @param {object} tag A tag object containing "ref" and "sha" 265 | * @param {object} config Release configuration for the repository 266 | * @returns A promise that resolves when the release has completed 267 | * @async 268 | */ 269 | async function performRelease(context, tag, config) { 270 | const { owner, repo } = context.repo(); 271 | logger.info(`Starting scheduled release of ${owner}/${repo}:${tag.ref}`); 272 | 273 | await withTempDir(async downloadDirectory => { 274 | try { 275 | const store = createStore( 276 | config.store, 277 | context.repo({ ref: tag.ref, sha: tag.sha }), 278 | downloadDirectory, 279 | logger 280 | ); 281 | 282 | const runs = config.targets.map(target => 283 | runTarget(target, context, tag, store, logger).catch(e => 284 | logger.error(e) 285 | ) 286 | ); 287 | 288 | await Promise.all(runs); 289 | } catch (e) { 290 | logger.error(e); 291 | } 292 | }); 293 | } 294 | 295 | /** 296 | * Handles a newly created or updated Github tag 297 | * 298 | * If the tag has no status checks attached or some of them are still pending, 299 | * it is skipped. If at least one status check failed, an error is reported and 300 | * the tag is skipped. 301 | * 302 | * If a release for the same tag had been scheduled, it is cancelled. This 303 | * prevents repeated releases due to cascading or rapidly changing status 304 | * checks reported by third party services (e.g. code coverage or CI). 305 | * 306 | * @param {Context} context Github context 307 | * @param {object} tag A tag object containing "ref" and "sha" 308 | * @param {object} config Configurations for this task 309 | * @returns A promise that resolves when the tag has been processed 310 | * @async 311 | */ 312 | async function processTag(context, tag, config) { 313 | if (config == null) { 314 | throw new Error('Missing release config'); 315 | } 316 | 317 | const { owner, repo } = context.repo(); 318 | const id = `${owner}/${repo}:${tag.ref}`; 319 | logger.info(`Processing tag ${id} (${tag.sha})`); 320 | 321 | const statuses = await getStatuses(context, tag.ref); 322 | const latestStatuses = filterLatestStatuses(statuses, config); 323 | 324 | // Prevent a previously scheduled release. In case this status update is 325 | // successful again, we will reschedule down below. 326 | const scheduled = scheduledReleases[id]; 327 | if (scheduled != null) { 328 | clearTimeout(scheduled); 329 | delete scheduledReleases[id]; 330 | } 331 | 332 | if (latestStatuses.length === 0) { 333 | // We assume that status checks have been configured but haven't started yet 334 | // This means, we'll come back here once status checks have been added 335 | logger.info(`Skipping release of ${id} as no status checks were found`); 336 | return; 337 | } 338 | 339 | if (latestStatuses.some(status => status.state === STATE_PENDING)) { 340 | // Checks are still running, so no reason to proceed 341 | logger.info(`Skipping release of ${id} as status checks are pending`); 342 | return; 343 | } 344 | 345 | if (latestStatuses.some(status => status.state !== STATE_SUCCESS)) { 346 | // Some checks have failed, skip this release 347 | logger.info(`Skipping release of ${id} as status checks have failed`); 348 | return; 349 | } 350 | 351 | if (config.targets.length === 0) { 352 | // Only proceed to download if we are actually releasing 353 | logger.info(`Skipping release of ${id} since no targets were configured`); 354 | return; 355 | } 356 | 357 | // All checks have cleared, we're ready to release now 358 | logger.info(`Scheduling release of ${id} in ${RELEASE_TIMEOUT} seconds`); 359 | scheduledReleases[id] = setTimeout(() => { 360 | delete scheduledReleases[id]; 361 | performRelease(context, tag, config).catch(logger.error); 362 | }, RELEASE_TIMEOUT * 1000); 363 | } 364 | 365 | module.exports = robot => { 366 | logger = robot.log; 367 | 368 | // Add created tags to the cache and create a release, if applicable 369 | // see https://developer.github.com/v3/activity/events/types/#createevent 370 | robot.on('create', async context => { 371 | // Ignore everything except tags 372 | if (context.payload.ref_type !== REF_TYPE_TAG) { 373 | return; 374 | } 375 | 376 | // Ignore repos without config file 377 | const config = await getConfig(context); 378 | if (config == null) { 379 | return; 380 | } 381 | 382 | const { ref } = context.payload; 383 | const reference = await getReference(context, `tags/${ref}`); 384 | const tag = await addTag(context, ref, reference.sha); 385 | await processTag(context, tag, config); 386 | }); 387 | 388 | // Remove deleted tags from the cache 389 | // see https://developer.github.com/v3/activity/events/types/#deleteevent 390 | robot.on('delete', async context => { 391 | // Ignore everything except tags 392 | if (context.payload.ref_type !== REF_TYPE_TAG) { 393 | return; 394 | } 395 | 396 | // Ignore repos without config file 397 | const config = await getConfig(context); 398 | if (config == null) { 399 | return; 400 | } 401 | 402 | await removeTag(context, context.payload.ref); 403 | }); 404 | 405 | // Create a release for succeeded status checks of a tag 406 | // see https://developer.github.com/v3/activity/events/types/#statusevent 407 | robot.on('status', async context => { 408 | // Ignore repos without config file 409 | const config = await getConfig(context); 410 | if (config == null) { 411 | return; 412 | } 413 | 414 | const tag = await findTag(context, context.payload.sha); 415 | if (tag == null) { 416 | // Ignore, we're only interested in status checks of tags 417 | return; 418 | } 419 | 420 | await processTag(context, tag, config); 421 | }); 422 | }; 423 | -------------------------------------------------------------------------------- /lib/request.js: -------------------------------------------------------------------------------- 1 | const fetch = require('node-fetch'); 2 | 3 | /** 4 | * Parses an error message from the given response 5 | * 6 | * First, this function tries to parse an error message from the response body. 7 | * If this does not work, it falls back to the HTTP status text. 8 | * 9 | * @param {Response} response A fetch Response object 10 | * @returns {Promise} A promise that resolves the error 11 | * @async 12 | */ 13 | async function parseError(response) { 14 | const statusMessage = `${response.status} ${response.statusText}`; 15 | 16 | try { 17 | const json = await response.json(); 18 | return new Error((json && json.message) || statusMessage); 19 | } catch (e) { 20 | return new Error(statusMessage); 21 | } 22 | } 23 | 24 | /** 25 | * Performs an AJAX request to the given url with the specified options using 26 | * fetch 27 | * 28 | * After the request has finished, the result is parsed and checked for errors. 29 | * In case of an error, the response message is thrown as an error. On success, 30 | * the parsed JSON is passed into the promise. 31 | * 32 | * @param {string} url The destination of the AJAX call 33 | * @param {object} options Options to the {@link fetch} call 34 | * @returns {Promise} A Promise to the parsed response body 35 | * @async 36 | */ 37 | async function request(url, options = {}) { 38 | const { headers = {} } = options; 39 | if (!headers.Accept) { 40 | headers.Accept = 'application/json'; 41 | } 42 | 43 | const response = await fetch(url, { ...options, headers }); 44 | if (!response.ok) { 45 | throw await parseError(response); 46 | } 47 | 48 | return response.status === 204 ? undefined : response.json(); 49 | } 50 | 51 | module.exports = request; 52 | -------------------------------------------------------------------------------- /lib/stores/__tests__/index.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | /* global fail */ 3 | 4 | const tryRequire = require('try-require'); 5 | const createStore = require('../index'); 6 | 7 | beforeEach(() => { 8 | jest.clearAllMocks(); 9 | }); 10 | 11 | test('requires a store of the same name', () => { 12 | const storeFn = jest.fn(); 13 | tryRequire.mockReturnValue(storeFn); 14 | 15 | createStore('type'); 16 | expect(tryRequire).toHaveBeenCalledWith('./type', expect.anything()); 17 | }); 18 | 19 | test('invokes the store function', () => { 20 | const storeFn = jest.fn(); 21 | tryRequire.mockReturnValue(storeFn); 22 | 23 | const commit = { owner: 'owner', repo: 'repo', sha: 'feedface' }; 24 | const logger = { debug: true }; 25 | createStore('type', commit, '/some/path', logger); 26 | 27 | expect(storeFn).toHaveBeenCalledWith(commit, '/some/path', logger); 28 | }); 29 | 30 | test('returns the created store', () => { 31 | const store = {}; 32 | const storeFn = jest.fn().mockReturnValue(store); 33 | tryRequire.mockReturnValue(storeFn); 34 | 35 | const result = createStore('type'); 36 | expect(result).toBe(store); 37 | }); 38 | 39 | test('throws for unknown stores', () => { 40 | tryRequire.mockReturnValue(null); 41 | expect(() => createStore('type')).toThrow(/unknown store/i); 42 | }); 43 | 44 | test('throws when store type is missing', () => { 45 | tryRequire.mockImplementation(() => fail('Not allowed')); 46 | expect(() => createStore('')).toThrow(/invalid store type/i); 47 | }); 48 | 49 | test('throws for a store called "index"', () => { 50 | tryRequire.mockImplementation(() => fail('Not allowed')); 51 | expect(() => createStore('index')).toThrow(/invalid store type/i); 52 | }); 53 | 54 | test('defaults to the console as logger', () => { 55 | const storeFn = jest.fn(); 56 | tryRequire.mockReturnValue(storeFn); 57 | 58 | const commit = { owner: 'owner', repo: 'repo', sha: 'feedface' }; 59 | createStore('type', commit, '/some/path'); 60 | 61 | expect(storeFn).toHaveBeenCalledWith(commit, '/some/path', console); 62 | }); 63 | -------------------------------------------------------------------------------- /lib/stores/index.js: -------------------------------------------------------------------------------- 1 | const tryRequire = require('try-require'); 2 | 3 | /** 4 | * Loads an initializes the specified store implementation. 5 | * If no store with the given type can be found, an error is thrown. 6 | * 7 | * @param {string} type Identifier of the store type (e.g. "zeus") 8 | * @param {object} commit A repository and commit to lookup 9 | * @param {string} downloadDirectory Path to a local cache directory 10 | * @param {object} logger An optional logger 11 | * @returns {Store} A store implementation 12 | */ 13 | function createStore(type, commit, downloadDirectory, logger = console) { 14 | if (!type || type === 'index') { 15 | throw new Error(`Invalid store type "${type}"`); 16 | } 17 | 18 | const storeFn = tryRequire(`./${type}`, require); 19 | if (storeFn == null) { 20 | throw new Error(`Unknown store "${type}"`); 21 | } 22 | 23 | return storeFn(commit, downloadDirectory, logger); 24 | } 25 | 26 | module.exports = createStore; 27 | -------------------------------------------------------------------------------- /lib/stores/s3.js: -------------------------------------------------------------------------------- 1 | const { basename, join } = require('path'); 2 | const s3 = require('s3'); 3 | 4 | const { 5 | /** 6 | * Access key for AWS S3 7 | */ 8 | S3_ACCESS_KEY, 9 | 10 | /** 11 | * Access secret key for AWS S3 12 | */ 13 | S3_SECRET_KEY, 14 | 15 | /** 16 | * AWS S3 bucket containing build assets 17 | */ 18 | S3_BUCKET, 19 | } = process.env; 20 | 21 | /** 22 | * AWS S3 client used to retrieve assets 23 | */ 24 | const client = s3.createClient({ 25 | s3Options: { 26 | accessKeyId: S3_ACCESS_KEY, 27 | secretAccessKey: S3_SECRET_KEY, 28 | }, 29 | }); 30 | 31 | /** 32 | * A store implementation for Amazon S3 33 | * 34 | * @param {object} commit A repository and commit to lookup 35 | * @param {string} downloadDirectory Path to a local cache directory 36 | * @param {object} logger An optional logger 37 | * @returns {object} The store bound to the commit 38 | */ 39 | module.exports = (commit, downloadDirectory, logger = console) => { 40 | const downloadCache = {}; 41 | let fileCache = null; 42 | 43 | /** 44 | * Downloads a file from the store 45 | * 46 | * The file is placed in the download directory. It is only downloaded once 47 | * when invoked multiple times. If the file does not exist, an error is 48 | * thrown. Use {@link listFiles} to retrieve available files. 49 | * 50 | * @param {object} file A file object to download 51 | * @returns {Promise} Absolute path to the local copy of the file 52 | * @async 53 | */ 54 | function downloadFile(file) { 55 | const cached = downloadCache[file.key]; 56 | if (cached) { 57 | return cached; 58 | } 59 | 60 | logger.debug(`Downloading S3 file ${file.key} to ${downloadDirectory}`); 61 | const localFile = join(downloadDirectory, file.name); 62 | const downloader = client.downloadFile({ 63 | localFile, 64 | s3Params: { 65 | Bucket: S3_BUCKET, 66 | Key: file.key, 67 | }, 68 | }); 69 | 70 | const promise = new Promise((resolve, reject) => { 71 | // NOTE: The timeout is necessary to be able to list files immediately 72 | downloader.on('end', () => setTimeout(() => resolve(localFile), 100)); 73 | downloader.on('error', reject); 74 | }); 75 | 76 | downloadCache[file.key] = promise; 77 | return promise; 78 | } 79 | 80 | /** 81 | * Downloads a list of files from the store 82 | * 83 | * The files are placed in the download directory. Each file is only 84 | * downloaded once when invoked multiple times. If one of the files 85 | * does not exist, an error is thrown. Use {@link listFiles} to 86 | * retrieve available files. 87 | * 88 | * @param {object[]} files A list of files to download 89 | * @returns {Promise} Absolute paths to local copies of all files 90 | * @async 91 | */ 92 | function downloadFiles(files) { 93 | return Promise.all(files.map(file => downloadFile(file))); 94 | } 95 | 96 | /** 97 | * Maps an Amazon S3 object to a file object 98 | * 99 | * @param {object} entry An Amazon object entry 100 | * @returns {object} A file object 101 | */ 102 | function mapEntry(entry) { 103 | return { 104 | key: entry.Key, 105 | name: basename(entry.Key), 106 | }; 107 | } 108 | 109 | /** 110 | * Retrieves a list of files stored for the commit 111 | * 112 | * Searches for all files stored in the configured S3 bucket in a folder 113 | * named after the repository and commit. If the folder is not found, an 114 | * error is thrown. 115 | * 116 | * The list is only loaded once if invoked multiple times. 117 | * 118 | * @returns {Promise} A list of file objects 119 | * @async 120 | */ 121 | function listFiles() { 122 | if (fileCache != null) { 123 | return fileCache; 124 | } 125 | 126 | const { owner, repo, sha } = commit; 127 | const s3Path = `${owner}/${repo}/${sha}/`; 128 | logger.debug(`Loading list of S3 files in ${s3Path}`); 129 | 130 | const job = client.listObjects({ 131 | s3Params: { 132 | Bucket: S3_BUCKET, 133 | Prefix: s3Path, 134 | }, 135 | }); 136 | 137 | const entries = []; 138 | fileCache = new Promise((resolve, reject) => { 139 | job.on('data', data => entries.push(...data.Contents)); 140 | job.on('end', () => resolve(entries.map(mapEntry))); 141 | job.on('error', reject); 142 | }); 143 | 144 | return fileCache; 145 | } 146 | 147 | /** 148 | * Downloads all files stored for the commit 149 | * 150 | * Searches for all files stored in the configured S3 bucket and 151 | * downloads them to the download directory. Each file is only 152 | * downloaded once when invoked multiple times. 153 | * 154 | * @returns {Promise} Absolute paths to local copies of all files 155 | * @async 156 | */ 157 | async function downloadAll() { 158 | const files = await listFiles(); 159 | return downloadFiles(files); 160 | } 161 | 162 | /** 163 | * Returns all capabilities of this store provider. 164 | * 165 | * @returns {object} The capabilities object 166 | */ 167 | function getCapabilities() { 168 | return { 169 | TYPE: false, 170 | }; 171 | } 172 | 173 | return { 174 | downloadAll, 175 | downloadFile, 176 | downloadFiles, 177 | listFiles, 178 | getCapabilities, 179 | }; 180 | }; 181 | -------------------------------------------------------------------------------- /lib/stores/zeus.js: -------------------------------------------------------------------------------- 1 | const { createWriteStream } = require('fs'); 2 | const { join } = require('path'); 3 | const request = require('request'); 4 | 5 | /** 6 | * API token to access Zeus 7 | */ 8 | const { ZEUS_API_TOKEN } = process.env; 9 | 10 | /** 11 | * Server URL for Zeus 12 | */ 13 | const ZEUS_SERVER_URL = process.env.ZEUS_SERVER_URL || 'https://zeus.ci'; 14 | 15 | /** 16 | * A store implementation for Zeus 17 | * 18 | * @param {object} commit A repository and commit to lookup 19 | * @param {string} downloadDirectory Path to a local cache directory 20 | * @param {object} logger An optional logger 21 | * @returns {object} The store bound to the commit 22 | */ 23 | module.exports = (commit, downloadDirectory, logger = console) => { 24 | const downloadCache = {}; 25 | let fileCache = null; 26 | 27 | /** 28 | * Downloads a file from the store 29 | * 30 | * The file is placed in the download directory. It is only downloaded once 31 | * when invoked multiple times. If the file does not exist, an error is 32 | * thrown. Use {@link listFiles} to retrieve available files. 33 | * 34 | * @param {object} file A file object to download 35 | * @returns {Promise} Absolute path to the local copy of the file 36 | * @async 37 | */ 38 | function downloadFile(file) { 39 | const cached = downloadCache[file.url]; 40 | if (cached) { 41 | return cached; 42 | } 43 | 44 | const url = `${ZEUS_SERVER_URL}/${file.url}`; 45 | logger.debug(`Downloading Zeus file ${url} to ${downloadDirectory}`); 46 | 47 | const localFile = join(downloadDirectory, file.name); 48 | const stream = request 49 | .get(url, { auth: { bearer: ZEUS_API_TOKEN } }) 50 | .pipe(createWriteStream(localFile)); 51 | 52 | const promise = new Promise((resolve, reject) => { 53 | // NOTE: The timeout is necessary to be able to list files immediately 54 | stream.on('finish', () => setTimeout(() => resolve(localFile), 100)); 55 | stream.on('error', reject); 56 | }); 57 | 58 | downloadCache[file.url] = promise; 59 | return promise; 60 | } 61 | 62 | /** 63 | * Downloads a list of files from the store 64 | * 65 | * The files are placed in the download directory. Each file is only 66 | * downloaded once when invoked multiple times. If one of the files 67 | * does not exist, an error is thrown. Use {@link listFiles} to 68 | * retrieve available files. 69 | * 70 | * @param {object[]} files A list of files to download 71 | * @returns {Promise} Absolute paths to local copies of all files 72 | * @async 73 | */ 74 | function downloadFiles(files) { 75 | return Promise.all(files.map(file => downloadFile(file))); 76 | } 77 | 78 | /** 79 | * Retrieves a list of files stored for the commit 80 | * 81 | * The list is only loaded once if invoked multiple times. 82 | * 83 | * @returns {Promise} A list of file objects 84 | * @async 85 | */ 86 | function listFiles() { 87 | if (fileCache != null) { 88 | return fileCache; 89 | } 90 | 91 | const { owner, repo, sha } = commit; 92 | const url = `${ZEUS_SERVER_URL}/api/repos/gh/${owner}/${repo}/revisions/${ 93 | sha 94 | }/artifacts`; 95 | const auth = { bearer: ZEUS_API_TOKEN }; 96 | logger.debug( 97 | `Loading list of Zeus artifacts for ${owner}/${repo}/revisions/${sha}` 98 | ); 99 | 100 | fileCache = new Promise((resolve, reject) => { 101 | request(url, { auth }, (error, response, body) => { 102 | if (error) { 103 | reject(error); 104 | return; 105 | } 106 | 107 | if (response.statusCode !== 200) { 108 | reject( 109 | new Error(`Request failed with status code ${response.statusCode}`) 110 | ); 111 | return; 112 | } 113 | 114 | const artifacts = JSON.parse(body).map(artifact => ({ 115 | name: artifact.name, 116 | type: artifact.type, 117 | url: artifact.download_url, 118 | })); 119 | 120 | resolve(artifacts); 121 | }); 122 | }); 123 | 124 | return fileCache; 125 | } 126 | 127 | /** 128 | * Downloads all files stored for the commit 129 | * 130 | * Retrieves the full list of artifacts from Zeus and stores them in the 131 | * download directory. Each file is only downloaded once when invoked 132 | * multiple times. 133 | * 134 | * @returns {Promise} Absolute paths to local copies of all files 135 | * @async 136 | */ 137 | async function downloadAll() { 138 | const files = await listFiles(); 139 | return downloadFiles(files); 140 | } 141 | 142 | /** 143 | * Returns all capabilities of this store provider. 144 | * 145 | * @returns {object} The capabilities object 146 | */ 147 | function getCapabilities() { 148 | return { 149 | TYPE: true, 150 | }; 151 | } 152 | 153 | return { 154 | downloadAll, 155 | downloadFile, 156 | downloadFiles, 157 | listFiles, 158 | getCapabilities, 159 | }; 160 | }; 161 | -------------------------------------------------------------------------------- /lib/targets/__mocks__/releaseContext.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | 3 | const Context = require('../../__mocks__/context'); 4 | const logger = require('../../__mocks__/logger'); 5 | const createStore = require('../../stores'); 6 | 7 | jest.mock('../../stores'); 8 | 9 | class ReleaseContext extends Context { 10 | constructor(config = {}, issue = {}) { 11 | super(issue); 12 | 13 | this.config = config; 14 | this.logger = logger; 15 | this.store = createStore(); 16 | this.tag = { ref: ReleaseContext.TAG_REF, sha: ReleaseContext.TAG_SHA }; 17 | } 18 | } 19 | 20 | ReleaseContext.TAG_REF = 'v1.0.0'; 21 | ReleaseContext.TAG_SHA = '2720b2c9037c0258118517725d2ac0d6364ae585'; 22 | 23 | module.exports = ReleaseContext; 24 | -------------------------------------------------------------------------------- /lib/targets/__tests__/index.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env jest */ 2 | /* global fail */ 3 | 4 | const tryRequire = require('try-require'); 5 | const { cloneContext } = require('../../utils'); 6 | const runTarget = require('../index'); 7 | 8 | jest.mock('../../utils'); 9 | 10 | beforeEach(() => { 11 | jest.clearAllMocks(); 12 | }); 13 | 14 | test('requires a target of the same name', async () => { 15 | const targetFn = jest.fn(); 16 | tryRequire.mockReturnValue(targetFn); 17 | 18 | expect.assertions(1); 19 | await runTarget('name'); 20 | expect(tryRequire).toHaveBeenCalledWith('./name', expect.anything()); 21 | }); 22 | 23 | test('requires a target of the same name from config', async () => { 24 | const targetFn = jest.fn(); 25 | tryRequire.mockReturnValue(targetFn); 26 | 27 | expect.assertions(1); 28 | await runTarget({ name: 'name' }); 29 | expect(tryRequire).toHaveBeenCalledWith('./name', expect.anything()); 30 | }); 31 | 32 | test('rejects when target config is missing', () => { 33 | tryRequire.mockImplementation(() => fail('Not allowed')); 34 | 35 | const err = new Error('Missing target specification'); 36 | return expect(runTarget(null)).rejects.toEqual(err); 37 | }); 38 | 39 | test('rejects when target name is missing', () => { 40 | tryRequire.mockImplementation(() => fail('Not allowed')); 41 | 42 | const err = new Error('Missing target specification'); 43 | return expect(runTarget({})).rejects.toEqual(err); 44 | }); 45 | 46 | test('rejects empty target name', () => { 47 | tryRequire.mockImplementation(() => fail('Not allowed')); 48 | 49 | const err = new Error('Missing target specification'); 50 | return expect(runTarget('')).rejects.toEqual(err); 51 | }); 52 | 53 | test('rejects unknown targets', () => { 54 | tryRequire.mockReturnValue(null); 55 | const err = new Error('Unknown deploy target "name"'); 56 | return expect(runTarget('name')).rejects.toEqual(err); 57 | }); 58 | 59 | test('creates a release context', async () => { 60 | const targetFn = jest.fn(); 61 | tryRequire.mockReturnValue(targetFn); 62 | 63 | const config = { name: 'name' }; 64 | const context = { context: true }; 65 | const tag = { tag: true }; 66 | const store = { store: true }; 67 | const logger = { logger: true }; 68 | 69 | expect.assertions(1); 70 | await runTarget(config, context, tag, store, logger); 71 | expect(cloneContext).lastCalledWith(context, { 72 | config, 73 | tag, 74 | logger, 75 | store, 76 | }); 77 | }); 78 | 79 | test('invokes the target function', async () => { 80 | const targetFn = jest.fn(); 81 | tryRequire.mockReturnValue(targetFn); 82 | 83 | const clonedContext = { context: 'cloned' }; 84 | cloneContext.mockReturnValue(clonedContext); 85 | 86 | expect.assertions(1); 87 | await runTarget('name'); 88 | expect(targetFn).lastCalledWith(clonedContext); 89 | }); 90 | -------------------------------------------------------------------------------- /lib/targets/brew.js: -------------------------------------------------------------------------------- 1 | const crypto = require('crypto'); 2 | const { shouldPerform } = require('dryrun'); 3 | const { createReadStream } = require('fs'); 4 | const _ = require('lodash'); 5 | const { basename } = require('path'); 6 | const { promiseProps } = require('../utils'); 7 | 8 | /** 9 | * Regex used to parse homebrew taps (github repositories) 10 | */ 11 | const TAP_REGEX = /^([a-z\d](?:[a-z\d]|-(?=[a-z\d])){0,38})\/([-_.\w\d]+)$/i; 12 | 13 | /** 14 | * Extracts repository information for a homebrew tap from the given context 15 | * 16 | * If no explicit tap is given, 'homebrew/core' is assumed. Otherwise, the 17 | * string "/>tap>" is transformed to "/homebrew-". 18 | * 19 | * @param {object} config Configuration for the brew target 20 | * @returns {object} The owner and repository of the tap 21 | */ 22 | function getTapRepo(config) { 23 | const { tap } = config; 24 | if (!tap) { 25 | return { 26 | owner: 'homebrew', 27 | repo: 'homebrew-core', 28 | }; 29 | } 30 | 31 | const match = TAP_REGEX.exec(tap); 32 | if (!match) { 33 | throw new Error(`Invalid tap name: ${tap}`); 34 | } 35 | 36 | return { 37 | owner: match[1], 38 | repo: `homebrew-${match[2]}`, 39 | }; 40 | } 41 | 42 | /** 43 | * Calculates the checksum of a file's contents 44 | * 45 | * @param {string} path The path to a file to process 46 | * @param {string} algorithm A crypto algorithm, defaults to "sha256" 47 | * @returns {Promise} The checksum as hex string 48 | * @async 49 | */ 50 | function calculateChecksum(path, algorithm = 'sha256') { 51 | const stream = createReadStream(path); 52 | const hash = crypto.createHash(algorithm); 53 | 54 | return new Promise((resolve, reject) => { 55 | stream.on('data', data => hash.update(data, 'utf8')); 56 | stream.on('end', () => resolve(hash.digest('hex'))); 57 | stream.on('error', err => reject(err)); 58 | }); 59 | } 60 | 61 | /** 62 | * Resolves the content sha of a formula at the specified location. If the 63 | * formula does not exist, `null` is returned. 64 | * 65 | * @param {Context} context A Github context 66 | * @param {object} tap Owner and repository of the tap 67 | * @param {string} path The path to the formula 68 | * @returns {Promise} The SHA of the file, if it exists; otherwise null 69 | * @async 70 | */ 71 | async function getFormulaSha(context, tap, path) { 72 | const { logger, github } = context; 73 | 74 | try { 75 | logger.debug(`Loading SHA for ${tap.owner}/${tap.repo}:${path}`); 76 | const response = await github.repos.getContent({ ...tap, path }); 77 | return response.data.sha; 78 | } catch (err) { 79 | if (err.code === 404) { 80 | return null; 81 | } 82 | 83 | throw err; 84 | } 85 | } 86 | 87 | /** 88 | * Pushes a new formula to a homebrew tap 89 | * 90 | * @param {TargetContext} context Enriched Github context 91 | * @returns {Promise} A promise that resolves when the release has finished 92 | * @async 93 | */ 94 | module.exports = async context => { 95 | const { config, github, logger, store, tag } = context; 96 | const { formula, path, template } = config; 97 | const { owner, repo } = context.repo(); 98 | const { ref, sha } = tag; 99 | 100 | if (!template) { 101 | throw new Error('Missing template parameter in "brew" configuration'); 102 | } 103 | 104 | // Get default formula name and location from the config 105 | const formulaName = formula || repo; 106 | const formulaPath = 107 | path == null ? `Formula/${formulaName}.rb` : `${path}/${formulaName}.rb`; 108 | 109 | // Format checksums and the tag version into the formula file 110 | const files = await store.downloadAll(); 111 | const fileMap = _.keyBy(files, file => basename(file)); 112 | const promises = _.mapValues(fileMap, file => calculateChecksum(file)); 113 | const checksums = await promiseProps(promises); 114 | const data = _.template(template)({ ref, sha, checksums }); 115 | logger.debug(`Homebrew formula for ${formulaName}:\n${data}`); 116 | 117 | // Try to find the repository to publish in 118 | const tapRepo = getTapRepo(config); 119 | if (tapRepo.owner !== owner) { 120 | // TODO: Create a PR if we have no push rights to this repo 121 | logger.warn('Skipping homebrew release: PRs not supported yet'); 122 | return; 123 | } 124 | 125 | const params = { 126 | owner: tapRepo.owner, 127 | repo: tapRepo.repo, 128 | path: formulaPath, 129 | message: `release: ${formulaName} ${ref}`, 130 | content: Buffer.from(data).toString('base64'), 131 | sha: await getFormulaSha(context, tapRepo, formulaPath), 132 | }; 133 | 134 | logger.info( 135 | `Releasing ${owner}/${repo} tag ${tag.ref} ` + 136 | `to homebrew tap ${tapRepo.owner}/${tapRepo.repo} ` + 137 | `formula ${formulaName}` 138 | ); 139 | 140 | if (params.sha == null) { 141 | logger.debug( 142 | `Creating new file ${params.owner}/${params.repo}:${params.path}` 143 | ); 144 | if (shouldPerform()) { 145 | github.repos.createFile(params); 146 | } 147 | } else { 148 | logger.debug( 149 | `Updating file ${params.owner}/${params.repo}:${params.path} (${ 150 | params.sha 151 | })` 152 | ); 153 | if (shouldPerform()) { 154 | github.repos.updateFile(params); 155 | } 156 | } 157 | 158 | logger.info('Homebrew release completed'); 159 | }; 160 | -------------------------------------------------------------------------------- /lib/targets/cargo.js: -------------------------------------------------------------------------------- 1 | const { shouldPerform } = require('dryrun'); 2 | const _ = require('lodash'); 3 | const fetch = require('node-fetch'); 4 | const tar = require('tar'); 5 | const { withTempDir } = require('../files'); 6 | const { forEachChained, spawn } = require('../utils'); 7 | 8 | /** 9 | * Command to launch Rustup's cargo 10 | */ 11 | const CARGO_BIN = process.env.CARGO_BIN || 'cargo'; 12 | 13 | /** 14 | * @typedef {object} Dependency A package dependency specification 15 | * @prop {string} name Unique name of the package 16 | * @prop {string} req The required version range 17 | */ 18 | 19 | /** 20 | * @typedef {object} Package A cargo package 21 | * @prop {string} id Unique identifier containing name, version and location 22 | * @prop {string} name The unique name of the cargo package 23 | * @prop {string} version The current version of this package 24 | * @prop {string} manifest_path Path to the manifest in the local workspace 25 | * @prop {Dependency[]} dependencies The full list of package dependencies 26 | */ 27 | 28 | /** 29 | * @typedef {object} MetaData Metadata on the current cargo workspace 30 | * @prop {Package[]} packages The full list of packages in this workspace 31 | * @prop {string[]} workspace_members IDs of the packages in this workspace 32 | */ 33 | 34 | /** 35 | * Downloads the entire repository contents of the tag 36 | * 37 | * The contents are compressed into a tarball and returned in a buffer that can 38 | * be streamed for extraction. 39 | * 40 | * @param {Context} context Enriched Github context 41 | * @returns {Promise} The tarball data as stream 42 | * @async 43 | */ 44 | async function downloadSources(context) { 45 | const { owner, repo } = context.repo(); 46 | const { ref, sha } = context.tag; 47 | 48 | context.logger.info(`Downloading sources for ${owner}/${repo}:${sha}`); 49 | const url = `https://github.com/${owner}/${repo}/archive/${ref}.tar.gz`; 50 | const response = await fetch(url); 51 | return response.body; 52 | } 53 | 54 | /** 55 | * Extracts a source code tarball in the specified directory 56 | * 57 | * The tarball should contain a top level directory that contains all source 58 | * files. The contents of that directory are directly extracted into the `cwd` 59 | * location. 60 | * 61 | * @param {Stream} stream A stream containing the source tarball 62 | * @param {string} cwd Path to the directory to extract in 63 | * @returns {Promise} A promise that resolves when the tarball has been extracted 64 | * @async 65 | */ 66 | function extractSources(stream, cwd) { 67 | return new Promise(resolve => { 68 | stream 69 | .pipe(tar.extract({ strip: 1, cwd })) 70 | .on('finish', () => setTimeout(resolve, 100)); 71 | }); 72 | } 73 | 74 | /** 75 | * Downloads source code of the Github repository and puts it in the specified 76 | * directory 77 | * 78 | * @param {Context} context Enriched Github context 79 | * @param {string} directory A directory to extract to 80 | * @returns {Promise} A promise that resolves when the sources are ready 81 | * @async 82 | */ 83 | async function downloadAndExtract(context, directory) { 84 | const stream = await downloadSources(context); 85 | context.logger.info(`Extracting sources to ${directory}`); 86 | return extractSources(stream, directory); 87 | } 88 | 89 | /** 90 | * Resolves cargo metadata for the project located in the specified directory 91 | * 92 | * Cargo metadata comprises the name and version of the root package, as well as 93 | * a flat list of its local dependencies and their respective versions. The full 94 | * list of dependencies is not included in this metadata. 95 | * 96 | * @param {Context} context Enhanced Github context 97 | * @param {string} directory Path to the root crate / package 98 | * @returns {Promise} An object containing cargo metadata 99 | * @async 100 | */ 101 | async function getCargoMetadata(context, directory) { 102 | const { logger } = context; 103 | const args = [ 104 | 'metadata', 105 | '--manifest-path', 106 | `${directory}/Cargo.toml`, 107 | '--no-deps', 108 | '--format-version=1', 109 | ]; 110 | 111 | logger.info(`Loading workspace information from ${directory}/Cargo.toml`); 112 | const json = await spawn(CARGO_BIN, args, undefined, logger); 113 | return JSON.parse(json); 114 | } 115 | 116 | /** 117 | * Determines the topological order in which to publish crates 118 | * 119 | * The order is determined by the dependency graph. In order to publish a crate, 120 | * all its dependencies have to be available on the index first. Therefore, this 121 | * method performs a topological sort of the list of given packages. 122 | * 123 | * Note that the actual order of packages in the result is indeterministic. 124 | * However, the topological order will always be consistent. 125 | * 126 | * @param {Package[]} packages A list of cargo packages (i.e. crates) 127 | * @returns {Package[]} The sorted list of packages 128 | */ 129 | function getPublishOrder(packages) { 130 | const remaining = _.keyBy(packages, p => p.name); 131 | const ordered = []; 132 | 133 | // We iterate until there are no packages left. Note that cargo will already 134 | // check for cycles in the dependency graph and fail if its not a DAG. 135 | while (!_.isEmpty(remaining)) { 136 | _.filter( 137 | remaining, 138 | // Find all packages with no remaining workspace dependencies 139 | p => p.dependencies.filter(dep => remaining[dep.name]).length === 0 140 | ).forEach(next => { 141 | ordered.push(next); 142 | delete remaining[next.name]; 143 | }); 144 | } 145 | 146 | return ordered; 147 | } 148 | 149 | /** 150 | * Publishes the given package on crates.io 151 | * 152 | * @param {Context} context Enriched Github context 153 | * @param {Package} crate A cargo package to publish 154 | * @returns {Promise} A promise that resolves when the package has been published 155 | * @async 156 | */ 157 | async function publishPackage(context, crate) { 158 | const { logger } = context; 159 | const args = [ 160 | 'publish', 161 | '--manifest-path', 162 | crate.manifest_path, 163 | '--no-verify', 164 | ]; 165 | 166 | logger.info(`Releasing crate ${crate.name} version ${crate.version}`); 167 | return shouldPerform() && spawn(CARGO_BIN, args, undefined, logger); 168 | } 169 | 170 | /** 171 | * Publishes an entire workspace on crates.io 172 | * 173 | * If the workspace contains multiple packages with dependencies, they are 174 | * published in topological order. This ensures that once a package has been 175 | * published, all its requirements are available on the index as well. 176 | * 177 | * @param {Context} context Enriched Github context 178 | * @param {string} directory The path to the root package 179 | * @returns {Promise} A promise that resolves when the workspace has been published 180 | * @async 181 | */ 182 | async function publishWorkspace(context, directory) { 183 | const metadata = await getCargoMetadata(context, directory); 184 | const packages = metadata.packages.filter(p => 185 | metadata.workspace_members.includes(p.id) 186 | ); 187 | 188 | const crates = getPublishOrder(packages); 189 | return forEachChained(crates, crate => publishPackage(context, crate)); 190 | } 191 | 192 | /** 193 | * Pushes a cargo package or workspace on crates.io 194 | * 195 | * @param {Context} context Enriched Github context 196 | * @returns {Promise} A promise that resolves when the release has finished 197 | * @async 198 | */ 199 | module.exports = async context => { 200 | const { logger } = context; 201 | 202 | if (!process.env.CARGO_REGISTRY_TOKEN) { 203 | logger.warn('Skipping cargo release due to missing token'); 204 | return; 205 | } 206 | 207 | await withTempDir(async directory => { 208 | await downloadAndExtract(context, directory); 209 | await publishWorkspace(context, directory); 210 | logger.info(`Cargo release completed`); 211 | }); 212 | }; 213 | -------------------------------------------------------------------------------- /lib/targets/github.js: -------------------------------------------------------------------------------- 1 | const { shouldPerform } = require('dryrun'); 2 | const { basename } = require('path'); 3 | const { findChangeset } = require('../changes'); 4 | const { getFile } = require('../utils'); 5 | 6 | /** 7 | * Path to the changelog file in the target repository 8 | * TODO: Make this configurable 9 | */ 10 | const CHANGELOG_PATH = 'CHANGELOG.md'; 11 | 12 | /** 13 | * Gets an existing or creates a new release for the given tag 14 | * 15 | * The release name and description body is loaded from CHANGELOG.md in the 16 | * respective tag, if present. Otherwise, the release name defaults to the 17 | * tag and the body to the commit it points to. 18 | * 19 | * @param {Context} context Github context 20 | * @param {string} tag Tag name for this release 21 | * @returns {Promise} The newly created release 22 | * @async 23 | */ 24 | async function getOrCreateRelease(context, tag) { 25 | const { config, github, logger } = context; 26 | 27 | try { 28 | const response = await github.repos.getReleaseByTag(context.repo({ tag })); 29 | return response.data; 30 | } catch (err) { 31 | if (err.code !== 404) { 32 | throw err; 33 | } 34 | 35 | // Release hasn't been found, so create one 36 | } 37 | 38 | const changelog = await getFile( 39 | context, 40 | config.changelog || CHANGELOG_PATH, 41 | tag 42 | ); 43 | const changes = changelog && findChangeset(changelog, tag); 44 | 45 | const params = context.repo({ 46 | tag_name: tag, 47 | draft: false, 48 | prerelease: false, 49 | ...changes, 50 | }); 51 | 52 | if (changes) { 53 | logger.info(`Found changelog for ${params.owner}/${params.repo}:${tag}`); 54 | } 55 | 56 | logger.info(`Creating release ${params.owner}/${params.repo}:${tag}`); 57 | if (!shouldPerform()) { 58 | return { id: 42, tag_name: tag, html_url: '[no url during DRY_RUN]' }; 59 | } 60 | 61 | const created = await github.repos.createRelease(params); 62 | return created.data; 63 | } 64 | 65 | /** 66 | * Uploads all files to a Github release 67 | * 68 | * This always creates a new release on github. If a release for the same tag 69 | * exists already, it is automatically converted to draft. All files are 70 | * uploaded to the Github release. 71 | * 72 | * @param {Context} context Enriched Github context 73 | * @returns {Promise} A promise that resolves when the release has finished 74 | * @async 75 | */ 76 | module.exports = async context => { 77 | const { github, logger, store, tag } = context; 78 | const release = await getOrCreateRelease(context, tag.ref); 79 | 80 | const files = await store.listFiles(); 81 | await Promise.all( 82 | files.map(async file => { 83 | const path = await store.downloadFile(file); 84 | const name = basename(path); 85 | const params = context.repo({ 86 | id: release.id, 87 | filePath: path, 88 | name, 89 | }); 90 | 91 | logger.info( 92 | `Uploading asset "${name}" to ${params.owner}/${params.repo}:${ 93 | release.tag_name 94 | }` 95 | ); 96 | return shouldPerform() ? github.repos.uploadAsset(params) : null; 97 | }) 98 | ); 99 | 100 | logger.info(`Github release completed: ${release.html_url}`); 101 | }; 102 | -------------------------------------------------------------------------------- /lib/targets/index.js: -------------------------------------------------------------------------------- 1 | const tryRequire = require('try-require'); 2 | const { cloneContext } = require('../utils'); 3 | 4 | /** 5 | * Performs the release to a specific target 6 | * 7 | * The target can either be specified via a string containing its name ore an 8 | * object with the name as key. All further properties of the object will be 9 | * passed as context to the target. 10 | * 11 | * The target also receives the current logger instance and the tag via the 12 | * context, as well as the store to retrieve release artifacts. It can then 13 | * decide which artifacts will be included in the release. 14 | * 15 | * @param {object | string} target Target name or configuration 16 | * @param {Context} context Github context 17 | * @param {object} tag A tag object containing "ref" and "sha" 18 | * @param {string[]} store A store bound to the commit 19 | * @returns {Promise} A promise that resolves when the release has succeeded 20 | * @async 21 | */ 22 | async function runTarget(target, context, tag, store, logger) { 23 | const config = typeof target === 'string' ? { name: target } : target; 24 | if (!config || !config.name) { 25 | throw new Error('Missing target specification'); 26 | } 27 | 28 | const targetFn = tryRequire(`./${config.name}`, require); 29 | if (targetFn == null) { 30 | throw new Error(`Unknown deploy target "${config.name}"`); 31 | } 32 | 33 | const targetContext = cloneContext(context, { 34 | config, 35 | tag, 36 | logger, 37 | store, 38 | }); 39 | 40 | await targetFn(targetContext); 41 | } 42 | 43 | module.exports = runTarget; 44 | -------------------------------------------------------------------------------- /lib/targets/npm.js: -------------------------------------------------------------------------------- 1 | const { shouldPerform } = require('dryrun'); 2 | const { spawn } = require('../utils'); 3 | 4 | /** 5 | * Command to launch npm 6 | */ 7 | const NPM_BIN = process.env.NPM_BIN || 'npm'; 8 | 9 | /** 10 | * Parameter used to reset NPM to its default registry. 11 | * If launched from yarn, this parameter is overwritten. 12 | * @see https://github.com/lerna/lerna/issues/896#issuecomment-311894609 13 | */ 14 | const NPM_REGISTRY = '--registry=https://registry.npmjs.org/'; 15 | 16 | /** 17 | * A regular expression used to find the package tarball 18 | */ 19 | const PACKAGE_REGEX = /.*\.tgz$/; 20 | 21 | /** 22 | * Publishes the tarball to the NPM registry 23 | * 24 | * @param {string} path Absolute path to the tarball to upload 25 | * @param {object} logger An optional logger to pipe stdout and stderr to 26 | * @returns {Promise} A promise that resolves when the upload has completed 27 | * @async 28 | */ 29 | function publishPackage(path, access, logger) { 30 | const args = ['publish', NPM_REGISTRY, path]; 31 | 32 | if (access) { 33 | // This parameter is only necessary for scoped packages, otherwise 34 | // it can be left blank 35 | args.push(`--access=${access}`); 36 | } 37 | 38 | return spawn(NPM_BIN, args, undefined, logger); 39 | } 40 | 41 | /** 42 | * Publishes a package tarball on the NPM registry 43 | * 44 | * @param {Context} context Enriched Github context 45 | * @returns {Promise} A promise that resolves when the release has finished 46 | * @async 47 | */ 48 | module.exports = async context => { 49 | const { config, logger, store } = context; 50 | 51 | const files = await store.listFiles(); 52 | const packageFile = files.find(file => PACKAGE_REGEX.test(file.name)); 53 | if (packageFile == null) { 54 | logger.info('Skipping NPM release since there is no package tarball'); 55 | return; 56 | } 57 | 58 | const packagePath = await store.downloadFile(packageFile); 59 | logger.info(`Releasing ${packageFile.name} to NPM`); 60 | if (shouldPerform()) { 61 | await publishPackage(packagePath, config.access, logger); 62 | } 63 | 64 | logger.info('NPM release completed'); 65 | }; 66 | -------------------------------------------------------------------------------- /lib/targets/pods.js: -------------------------------------------------------------------------------- 1 | const { shouldPerform } = require('dryrun'); 2 | const fs = require('fs'); 3 | const { basename, join } = require('path'); 4 | const { promisify } = require('util'); 5 | const { withTempDir } = require('../files'); 6 | const { getFile, spawn } = require('../utils'); 7 | 8 | const writeFile = promisify(fs.writeFile); 9 | 10 | /** 11 | * Command to launch cocoapods 12 | */ 13 | const COCOAPODS_BIN = process.env.COCOAPODS_BIN || 'pod'; 14 | 15 | /** 16 | * Pushes a new Podspec to Cocoapods 17 | * 18 | * @param {Context} context Enriched Github context 19 | * @returns {Promise} A promise that resolves when the release has finished 20 | * @async 21 | */ 22 | module.exports = async context => { 23 | const { config, logger, tag } = context; 24 | const { owner, repo } = context.repo(); 25 | 26 | if (!process.env.COCOAPODS_TRUNK_TOKEN) { 27 | logger.warn('Skipping cocoapods release due to missing trunk token'); 28 | return; 29 | } 30 | 31 | if (config.spec == null) { 32 | logger.warn(`Missing podspec configuration for ${owner}/${repo}`); 33 | return; 34 | } 35 | 36 | logger.info(`Loading podspec from ${owner}/${repo}:${config.spec}`); 37 | const spec = await getFile(context, config.spec, tag.ref); 38 | if (spec == null) { 39 | logger.warn(`Podspec not found at ${owner}/${repo}:${config.spec}`); 40 | return; 41 | } 42 | 43 | await withTempDir(async directory => { 44 | const fileName = basename(config.spec); 45 | const filePath = join(directory, fileName); 46 | await writeFile(filePath, spec, 'utf8'); 47 | 48 | logger.info(`Pushing podspec ${fileName} to cocoapods`); 49 | if (shouldPerform()) { 50 | await spawn(COCOAPODS_BIN, ['setup'], undefined, logger); 51 | await spawn( 52 | COCOAPODS_BIN, 53 | ['trunk', 'push', fileName], 54 | { cwd: directory, env: process.env }, 55 | logger 56 | ); 57 | } 58 | 59 | logger.info(`Cocoapods release completed: ${fileName}`); 60 | }); 61 | }; 62 | -------------------------------------------------------------------------------- /lib/targets/pypi.js: -------------------------------------------------------------------------------- 1 | const { shouldPerform } = require('dryrun'); 2 | const { extname } = require('path'); 3 | const { spawn } = require('../utils'); 4 | 5 | /** 6 | * Command to launch twine 7 | */ 8 | const TWINE_BIN = process.env.TWINE_BIN || 'twine'; 9 | 10 | /** 11 | * White list for file extensions uploaded to PyPI 12 | */ 13 | const WHEEL_EXTENSIONS = ['.whl', '.gz', '.zip']; 14 | 15 | /** 16 | * @typedef {object} TwineCredentials 17 | * @prop {string} TWINE_USERNAME 18 | * @prop {string} TWINE_PASSWORD 19 | */ 20 | 21 | /** 22 | * Uploads a wheel to PyPI using twine 23 | * 24 | * @param {string} path Absolute path to the wheel to upload 25 | * @param {object} logger An optional logger to pipe stdout and stderr to 26 | * @returns {Promise} A promise that resolves when the upload has completed 27 | * @async 28 | */ 29 | function uploadAsset(path, logger) { 30 | // TODO: Sign the wheel with "--sign" 31 | return spawn(TWINE_BIN, ['upload', path], undefined, logger); 32 | } 33 | 34 | /** 35 | * Uploads all files to PyPI using Twine 36 | * 37 | * Requires twine to be configured in the environment (see .env.example). Only 38 | * *.whl files are uploaded. 39 | * 40 | * @param {Context} context Enriched Github context 41 | * @returns {Promise} A promise that resolves when the release has finished 42 | * @async 43 | */ 44 | module.exports = async context => { 45 | const { logger, store, tag } = context; 46 | 47 | if (!process.env.TWINE_USERNAME || !process.env.TWINE_PASSWORD) { 48 | logger.warn('Skipping PyPI release due to missing credentials'); 49 | return; 50 | } 51 | 52 | const files = await store.listFiles(); 53 | const wheelFiles = files.filter(file => 54 | WHEEL_EXTENSIONS.includes(extname(file.name)) 55 | ); 56 | if (wheelFiles.length === 0) { 57 | logger.info('Skipping PyPI release since there are no wheels'); 58 | return; 59 | } 60 | 61 | const { owner, repo } = context.repo(); 62 | logger.info( 63 | `Releasing ${wheelFiles.length} wheels for ${owner}/${repo} tag ${ 64 | tag.ref 65 | } to PyPI` 66 | ); 67 | 68 | await Promise.all( 69 | wheelFiles.map(async file => { 70 | const path = await store.downloadFile(file); 71 | logger.info(`Uploading asset "${file.name}" via twine`); 72 | return shouldPerform() && uploadAsset(path, logger); 73 | }) 74 | ); 75 | 76 | logger.info('PyPI release completed'); 77 | }; 78 | -------------------------------------------------------------------------------- /lib/utils.js: -------------------------------------------------------------------------------- 1 | const child = require('child_process'); 2 | const _ = require('lodash'); 3 | const split = require('split'); 4 | 5 | /** 6 | * Asynchronously calls the predicate on every element of the array and filters 7 | * for all elements where the predicate resolves to true. 8 | * 9 | * @param {Array} array An array to filter 10 | * @param {Function} predicate A predicate function that resolves to a boolean 11 | * @param {any} thisArg Optional argument passed as this to the predicate 12 | * @returns {Promise} The filtered array 13 | * @async 14 | */ 15 | async function filterAsync(array, predicate, thisArg) { 16 | const verdicts = await Promise.all(array.map(predicate, thisArg)); 17 | return array.filter((element, index) => verdicts[index]); 18 | } 19 | 20 | /** 21 | * Asynchronously calls the iteratee on each element of the array one element at 22 | * a time. This results in a chain of asynchronous actions that resolves once 23 | * the last item action has completed. In contrast, `Promise.all` exectues each 24 | * promise simultaneously. 25 | * 26 | * The iteratee is invoked as with `Array.forEach`: It receives the current 27 | * element, index and the array. This is bound to `thisArg` if present. 28 | * 29 | * @param {Array} array An array to iterate over 30 | * @param {Function} iteratee An action function that receives the element 31 | * @param {any} thisArg Optional argument passed as this to the action 32 | * @returns {Promise} Resolves when the last action has completed 33 | * @async 34 | */ 35 | async function forEachChained(array, iteratee, thisArg) { 36 | return array.reduce( 37 | (prev, ...args) => prev.then(() => iteratee.apply(thisArg, args)), 38 | Promise.resolve(), 39 | thisArg 40 | ); 41 | } 42 | 43 | /** 44 | * Returns a promise that resolves when each value of the given object resolves. 45 | * Works just like `Promise.all`, just on objects. 46 | * 47 | * @param {object} object An object with one or more 48 | * @returns {Promise} A promise that resolves with each value 49 | * @async 50 | */ 51 | async function promiseProps(object) { 52 | const pairs = _.toPairs(object).map(async ([key, value]) => [ 53 | key, 54 | await value, 55 | ]); 56 | 57 | return _.fromPairs(await Promise.all(pairs)); 58 | } 59 | 60 | /** 61 | * Loads a file from the context's repository 62 | * 63 | * @param {Context} context Github context 64 | * @param {string} path The path of the file in the repository 65 | * @param {string} ref The string name of commit / branch / tag 66 | * @returns {Promise} The decoded file contents 67 | * @async 68 | */ 69 | async function getFile(context, path, ref) { 70 | const params = context.repo({ path, ref }); 71 | try { 72 | const response = await context.github.repos.getContent(params); 73 | return Buffer.from(response.data.content, 'base64').toString(); 74 | } catch (err) { 75 | if (err.code === 404) { 76 | return null; 77 | } 78 | 79 | throw err; 80 | } 81 | } 82 | 83 | /** 84 | * Checks whether the given element is sorted 85 | * 86 | * @param {array} array An array containing potentially sorted elements 87 | * @param {bool} descending Whether to check for descending sort order 88 | */ 89 | function isSorted(array, descending = false) { 90 | const ordered = descending 91 | ? (prev, next) => prev >= next 92 | : (prev, next) => prev <= next; 93 | 94 | return array.every( 95 | (element, index) => index === 0 || ordered(array[index - 1], element) 96 | ); 97 | } 98 | 99 | /** 100 | * Clones the given github context and attaches parameters to it 101 | * 102 | * @param {Context} context Github context 103 | * @param {object} params Optional params to assign to the new context 104 | * @returns {Context} A cloned instance of the context with additional parameters 105 | */ 106 | function cloneContext(context, params = {}) { 107 | return Object.assign(new context.constructor(), context, params); 108 | } 109 | 110 | /** 111 | * Strips env values from the options object 112 | * 113 | * @param {object} options Optional options passed to spawn 114 | */ 115 | function stripEnv(options = {}) { 116 | const override = options.env && { env: Object.keys(options.env) }; 117 | return Object.assign({}, options, override); 118 | } 119 | 120 | /** 121 | * Creates an error object and attaches the error code 122 | * 123 | * @param {number|string} code A non-zero error code 124 | * @param {string} command The command to run 125 | * @param {string[]} args Optional arguments to pass to the command 126 | * @param {object} options Optional options to pass to child_process.spawn 127 | * @returns {Error} The error with code 128 | */ 129 | function processError(code, command, args, options) { 130 | const error = new Error(`Process "${command}" errored with code ${code}`); 131 | error.code = code; 132 | error.args = args; 133 | error.options = stripEnv(options); 134 | return error; 135 | } 136 | 137 | /** 138 | * Asynchronously spawns a child process 139 | * 140 | * @param {string} command The command to run 141 | * @param {string[]} args Optional arguments to pass to the command 142 | * @param {object} options Optional options to pass to child_process.spawn 143 | * @param {object} logger A logger to pipe stdout and stderr to 144 | * @returns {Promise} A promise that resolves to the standard output when 145 | * the child process exists 146 | * @async 147 | */ 148 | function spawn(command, args, options, logger) { 149 | return new Promise((resolve, reject) => { 150 | const chunks = []; 151 | 152 | // NOTE: On Linux, stdout and stderr might flush immediately after the 153 | // process exists. By adding a 0 timeout, we can make sure that the promise 154 | // is not resolved before both pipes have finished. 155 | const succeed = () => setTimeout(() => resolve(Buffer.concat(chunks)), 0); 156 | const fail = e => reject(processError(e.code, command, args, options)); 157 | 158 | try { 159 | const process = child 160 | .spawn(command, args, options) 161 | .on('exit', code => (code === 0 ? succeed() : fail({ code }))) 162 | .on('error', error => fail(error)); 163 | 164 | process.stdout.on('data', chunk => chunks.push(chunk)); 165 | 166 | if (logger) { 167 | process.stdout 168 | .pipe(split()) 169 | .on('data', data => logger.debug(`${command}: ${data}`)); 170 | process.stderr 171 | .pipe(split()) 172 | .on('data', data => logger.debug(`${command}: ${data}`)); 173 | } 174 | } catch (error) { 175 | reject(error); 176 | } 177 | }); 178 | } 179 | 180 | module.exports = { 181 | promiseProps, 182 | cloneContext, 183 | filterAsync, 184 | forEachChained, 185 | getFile, 186 | isSorted, 187 | spawn, 188 | }; 189 | -------------------------------------------------------------------------------- /lib/version.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Regular expression for matching semver versions 3 | * 4 | * Modified to match version components 5 | * Copyright (c) Sindre Sorhus (sindresorhus.com) 6 | * @see https://github.com/sindresorhus/semver-regex 7 | */ 8 | const semverRegex = () => 9 | /\bv?(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(?:-([\da-z-]+(?:\.[\da-z-]+)*))?(?:\+([\da-z-]+(?:\.[\da-z-]+)*))?\b/gi; 10 | 11 | /** 12 | * Extracts a version number from the given text 13 | * 14 | * In case the version contains a leading "v", it is stripped from the result. 15 | * All semantic versions are supported. See {@link http://semver.org/} for 16 | * more information. 17 | * 18 | * @param {string} text Some text containing a version 19 | * @returns {string} The extracted version or null 20 | */ 21 | function getVersion(text) { 22 | const matches = semverRegex().exec(text); 23 | const version = matches && matches[0]; 24 | return version && version[0].toLowerCase() === 'v' 25 | ? version.substr(1) 26 | : version; 27 | } 28 | 29 | /** 30 | * @typedef {object} SemVer Parsed semantic version 31 | * 32 | * @prop {number} major The major version number 33 | * @prop {number} minor The minor version number 34 | * @prop {number} patch The patch version number 35 | * @prop {string?} pre Optional pre-release specifier 36 | * @prop {string?} build Optional build metadata 37 | */ 38 | 39 | /** 40 | * Parses a version number from the given text 41 | * 42 | * @param {string} text Some text containing a version 43 | * @returns {SemVer?} The parsed version or null 44 | */ 45 | function parseVersion(text) { 46 | const matches = semverRegex().exec(text); 47 | return ( 48 | matches && { 49 | major: parseInt(matches[1], 10), 50 | minor: parseInt(matches[2], 10), 51 | patch: parseInt(matches[3], 10), 52 | pre: matches[4], 53 | build: matches[5], 54 | } 55 | ); 56 | } 57 | 58 | module.exports = { 59 | getVersion, 60 | parseVersion, 61 | }; 62 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "probot-release", 3 | "version": "0.8.9", 4 | "description": "Publishes build artifacts to GitHub releases", 5 | "author": "Jan Michael Auer ", 6 | "license": "BSD-3-Clause", 7 | "repository": "https://github.com/getsentry/probot-release.git", 8 | "scripts": { 9 | "fix:eslint": "eslint --fix lib", 10 | "fix:prettier": "prettier --write 'lib/**/*.js'", 11 | "fix": "npm-run-all fix:eslint fix:prettier", 12 | "start": "probot run ./index.js", 13 | "test:jest": "jest", 14 | "test:eslint": "eslint lib", 15 | "test:prettier": "prettier-check 'lib/**/*.js'", 16 | "test": "npm-run-all test:jest test:eslint test:prettier", 17 | "test:watch": "jest --watch --notify" 18 | }, 19 | "dependencies": { 20 | "dryrun": "^1.0.0", 21 | "lodash": "^4.17.4", 22 | "node-fetch": "^1.7.3", 23 | "probot": "^0.11.0", 24 | "request": "^2.83.0", 25 | "rimraf": "^2.6.2", 26 | "s3": "^4.4.0", 27 | "semver-regex": "^1.0.0", 28 | "split": "^1.0.1", 29 | "tar": "^4.0.2", 30 | "try-require": "^1.2.1" 31 | }, 32 | "devDependencies": { 33 | "eslint": "^4.11.0", 34 | "eslint-config-airbnb-base": "^12.1.0", 35 | "eslint-config-prettier": "^2.8.0", 36 | "eslint-plugin-import": "^2.8.0", 37 | "jest": "^21.2.1", 38 | "localtunnel": "^1.8.2", 39 | "npm-run-all": "^4.1.2", 40 | "prettier": "^1.8.2", 41 | "prettier-check": "^2.0.0" 42 | }, 43 | "engines": { 44 | "node": ">=8" 45 | } 46 | } 47 | --------------------------------------------------------------------------------