├── .dockerignore ├── .eslintrc ├── .eslintignore ├── .gitignore ├── example.png ├── .vscode └── settings.json ├── bin ├── index.js └── up-to-code.js ├── .babelrc ├── src ├── bunyan-truncate-serializer.js ├── decorate-function-logger.js ├── promises.js ├── exec.js ├── logger.js ├── pkg.js ├── github.js ├── index.js └── gitlab.js ├── Dockerfile ├── test ├── promises.js ├── index.js └── gitlab.js ├── LICENSE ├── package.json ├── README.md └── newrelic.js /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | repos -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "porch" 3 | } -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | repos 3 | newrelic.js -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | repos 3 | .env 4 | newrelic_agent.log 5 | npm-debug.log -------------------------------------------------------------------------------- /example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/porchdotcom/up-to-code/HEAD/example.png -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "search.exclude": { 3 | "**/node_modules": true, 4 | "repos/**": true 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /bin/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | require('newrelic'); 4 | require('babel-polyfill'); 5 | require('babel-register'); 6 | require('q').longStackSupport = true; 7 | 8 | require('./up-to-code'); 9 | -------------------------------------------------------------------------------- /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["es2015"], 3 | "plugins": [ 4 | "babel-plugin-transform-class-properties", 5 | "babel-plugin-transform-es2015-parameters", 6 | "transform-object-rest-spread" 7 | ] 8 | } -------------------------------------------------------------------------------- /src/bunyan-truncate-serializer.js: -------------------------------------------------------------------------------- 1 | import { isString, isPlainObject, truncate } from 'lodash'; 2 | 3 | export default body => { 4 | if (isString(body)) { 5 | return truncate(body); 6 | } else if (isPlainObject(body)) { 7 | return JSON.parse(JSON.stringify(body, (k, v) => isString(v) ? truncate(v) : v)); 8 | } 9 | return body; 10 | }; 11 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:4 2 | 3 | RUN git config --global user.name uptocode \ 4 | && git config --global user.email uptocode@porch.com \ 5 | && git config --global push.default simple 6 | 7 | RUN npm install -g npm@3.10.5 8 | 9 | WORKDIR /opt/build 10 | COPY package.json /opt/build/ 11 | RUN npm install --production 12 | 13 | COPY . /opt/build/ 14 | 15 | ENTRYPOINT ["node", "bin/index.js"] 16 | -------------------------------------------------------------------------------- /test/promises.js: -------------------------------------------------------------------------------- 1 | import { until } from '../src/promises'; 2 | import assert from 'assert'; 3 | 4 | describe.only('until', () => { 5 | it('keeps going until the condition is true', () => { 6 | let condition = false; 7 | let count = 0; 8 | setTimeout(() => { 9 | condition = true; 10 | }, 1000); 11 | 12 | return until(() => { 13 | count++; 14 | return condition; 15 | }, 100).then(() => { 16 | assert(count > 5, count); 17 | assert(count < 15, count); 18 | }); 19 | }); 20 | }); 21 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | import { updateGitlabRepoDependency } from '../src'; 2 | 3 | describe.skip('index', () => { 4 | it('update gitlab repo with dependency', () => ( 5 | updateGitlabRepoDependency({ 6 | name: '', 7 | packageName: '', 8 | githubToken: process.env.GITHUB_TOKEN, 9 | githubOrg: process.env.GITHUB_ORG, 10 | gitlabHost: process.env.GITLAB_HOST, 11 | gitlabOrg: process.env.GITLAB_ORG, 12 | gitlabToken: process.env.GITLAB_TOKEN, 13 | gitlabUser: process.env.GITLAB_USER 14 | }) 15 | )); 16 | }); 17 | -------------------------------------------------------------------------------- /src/decorate-function-logger.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import { omit } from 'lodash'; 3 | 4 | const blacklist = [ 5 | 'githubOrg', 6 | 'githubToken', 7 | 'gitlabOrg', 8 | 'gitlabToken', 9 | 'gitlabHost', 10 | 'gitlabUser' 11 | ]; 12 | export default fn => ({ logger: parentLogger, ...rest }) => { 13 | assert(parentLogger, 'function expected logger argument'); 14 | const params = omit(rest, blacklist); 15 | const logger = parentLogger.child(params); 16 | return fn({ logger, ...rest }).catch(err => { 17 | logger.warn({ err }); 18 | throw err; 19 | }); 20 | }; 21 | -------------------------------------------------------------------------------- /src/promises.js: -------------------------------------------------------------------------------- 1 | import Q from 'q'; 2 | 3 | // promise version of filter...resolve to boolean 4 | export const filter = (arr, fn) => { 5 | const ret = []; 6 | return Q.all(arr.map(elem => { 7 | return Q.fcall(() => { 8 | return fn(elem); 9 | }).then(include => { 10 | if (include) { 11 | ret.push(elem); 12 | } 13 | }); 14 | })).thenResolve(ret); 15 | }; 16 | 17 | export const until = (fn, delay) => ( 18 | Q.fcall(() => ( 19 | fn() 20 | )).then(condition => ( 21 | condition || Q.delay(delay).then(() => until(fn, delay)) 22 | )) 23 | ); 24 | -------------------------------------------------------------------------------- /src/exec.js: -------------------------------------------------------------------------------- 1 | import Q from 'q'; 2 | import childProcess from 'child_process'; 3 | import VError from 'verror'; 4 | 5 | export default (cmd, { logger, ...options } = {}) => { 6 | const defer = Q.defer(); 7 | logger.trace(`exec ${cmd}`); 8 | childProcess.exec(cmd, { 9 | ...options 10 | }, (err, stdout, stderr) => { 11 | logger.trace({ stdout, stderr }); 12 | 13 | if (err) { 14 | defer.reject(new VError({ 15 | cause: err, 16 | info: { 17 | stdout, 18 | stderr 19 | } 20 | })); 21 | } else { 22 | defer.resolve(stdout); 23 | } 24 | }); 25 | return defer.promise; 26 | }; 27 | -------------------------------------------------------------------------------- /src/logger.js: -------------------------------------------------------------------------------- 1 | import bunyan from 'bunyan'; 2 | import PrettyStream from 'bunyan-prettystream'; 3 | import NewRelicStream from 'bunyan-newrelic-stream'; 4 | import errorSerializer from 'bunyan-error-serializer'; 5 | import truncateSerializer from './bunyan-truncate-serializer'; 6 | 7 | const prettyStdOut = new PrettyStream(); 8 | prettyStdOut.pipe(process.stdout); 9 | 10 | export default bunyan.createLogger({ 11 | name: 'up-to-code', 12 | streams: [{ 13 | level: 'trace', 14 | type: 'raw', 15 | stream: prettyStdOut 16 | }, { 17 | level: 'error', 18 | type: 'raw', 19 | stream: new NewRelicStream() 20 | }], 21 | src: true, 22 | serializers: { 23 | err: errorSerializer, 24 | body: truncateSerializer 25 | } 26 | }); 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Porch.com 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /bin/up-to-code.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import parseArgs from 'minimist'; 3 | import uptocode from '../src'; 4 | import logger from '../src/logger'; 5 | 6 | const { 7 | 'package-name': packageName, 8 | 'github-org': githubOrg, 9 | 'github-token': githubToken, 10 | 'gitlab-org': gitlabOrg, 11 | 'gitlab-user': gitlabUser, 12 | 'gitlab-token': gitlabToken, 13 | 'gitlab-host': gitlabHost, 14 | metadata 15 | } = parseArgs(process.argv.slice(2)); 16 | 17 | assert(packageName, 'npm module required'); 18 | assert(githubOrg, 'github organization required'); 19 | assert(githubToken, 'github token required'); 20 | assert(gitlabOrg, 'gitlab organization required'); 21 | assert(gitlabToken, 'gitlab authentication token required'); 22 | assert(gitlabHost, 'gitlab host required'); 23 | assert(gitlabUser, 'gitlab user required'); 24 | 25 | uptocode({ 26 | packageName, 27 | githubOrg, 28 | githubToken, 29 | gitlabOrg, 30 | gitlabToken, 31 | gitlabHost, 32 | gitlabUser, 33 | metadata, 34 | logger 35 | }).then(() => { 36 | logger.info('success'); 37 | process.exit(0); 38 | }).catch(err => { 39 | logger.info(`err ${err.stack}`); 40 | process.exit(1); 41 | }); 42 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "up-to-code", 3 | "version": "1.0.0", 4 | "private": true, 5 | "scripts": { 6 | "clean": "rimraf repos", 7 | "lint": "eslint .", 8 | "start": "babel-node bin/index.js", 9 | "test": "mocha --require babel-core/register --require dotenv/config --timeout 60000" 10 | }, 11 | "bin": { 12 | "uptocode": "bin/index.js" 13 | }, 14 | "dependencies": { 15 | "babel-cli": "^6.6.5", 16 | "babel-plugin-transform-class-properties": "^6.19.0", 17 | "babel-plugin-transform-es2015-parameters": "^6.21.0", 18 | "babel-plugin-transform-object-rest-spread": "^6.20.2", 19 | "babel-polyfill": "^6.13.0", 20 | "babel-preset-es2015": "^6.18.0", 21 | "babel-register": "^6.11.6", 22 | "bunyan": "^1.8.5", 23 | "bunyan-error-serializer": "^1.0.4", 24 | "bunyan-newrelic-stream": "1.0.3", 25 | "bunyan-prettystream": "^0.1.3", 26 | "clean-stack": "^1.1.1", 27 | "git-url-parse": "^6.1.0", 28 | "github": "^0.2.4", 29 | "gitlab": "^1.7.1", 30 | "json-file-plus": "^3.3.0", 31 | "lodash": "^4.8.2", 32 | "minimist": "^1.2.0", 33 | "newrelic": "^4.8.1", 34 | "q": "^1.4.1", 35 | "request": "^2.74.0", 36 | "semver": "^5.3.0", 37 | "semver-regex": "^1.0.0", 38 | "verror": "^1.9.0" 39 | }, 40 | "devDependencies": { 41 | "babel-core": "^6.14.0", 42 | "babel-eslint": "^6.0.2", 43 | "dotenv": "^2.0.0", 44 | "eslint": "^2.7.0", 45 | "eslint-config-porch": "^5.0.0", 46 | "eslint-plugin-import": "^1.12.0", 47 | "eslint-plugin-mocha": "^2.1.0", 48 | "eslint-plugin-react": "^6.0.0", 49 | "mocha": "^3.0.2", 50 | "rimraf": "^2.5.2" 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /test/gitlab.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import GitLab from '../src/gitlab'; 3 | import Q from 'q'; 4 | 5 | describe('gitlab', () => { 6 | const gitlab = new GitLab({ 7 | org: process.env.GITLAB_ORG, 8 | token: process.env.GITLAB_TOKEN, 9 | host: process.env.GITLAB_HOST 10 | }); 11 | 12 | it('fetches all repos', () => ( 13 | Q.all([ 14 | gitlab.fetchRepos(), 15 | gitlab.fetchRepos() 16 | ]).spread((repos1, repos2) => { 17 | assert.deepEqual(repos1, repos2); 18 | }) 19 | )); 20 | 21 | it('fetches fluxible-resolver repo', () => ( 22 | gitlab.fetchRepo({ 23 | repo: 'fluxible-resolver' 24 | }).tap(({ id, name }) => { 25 | assert.equal(id, 932); 26 | assert.equal(name, 'fluxible-resolver'); 27 | }) 28 | )); 29 | 30 | it('creates merge request markdown', () => ( 31 | gitlab.createPackageChangeMarkdown({ 32 | base: '593ddbc95d4c38130a38b73325282326110cec7f', 33 | head: '17b17563919b7915141d3c4a130916c2dd02a4ca', 34 | repo: 'fluxible-resolver' 35 | }).tap(markdown => { 36 | assert.equal(markdown, [ 37 | '### Diff', 38 | '', 39 | '[593ddbc95d4c38130a38b73325282326110cec7f...17b17563919b7915141d3c4a130916c2dd02a4ca](https://gitlab.porch.com/porchdotcom/fluxible-resolver/compare/593ddbc95d4c38130a38b73325282326110cec7f...17b17563919b7915141d3c4a130916c2dd02a4ca)', 40 | '', 41 | '### Commits', 42 | '', 43 | '- Patrick Williams- [recompose update](https://gitlab.porch.com/porchdotcom/fluxible-resolver/commit/17b17563919b7915141d3c4a130916c2dd02a4ca)' 44 | ].join('\n')); 45 | }) 46 | )); 47 | }); 48 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Up to Code 2 | =========== 3 | 4 | Up-to-code keeps your node apps up to date. Every time you publish a new 5 | library, Up-to-code will create a beautiful pull request for every node app in 6 | your organization that needs a version bump. 7 | 8 | # About 9 | 10 | Let's say you work at a company that has lots of node apps and lots of 11 | javascript libraries. When you make a change to one of those libraries, you 12 | would like your node apps to update to the latest version. Up-to-code 13 | automates this process by creating a pull request to update the version in 14 | package.json. The Up-to-code pull request will include links to easily see a 15 | diff of the updates, new version, and link to other similar pull requests. 16 | This gives the app owner a chance to review the updates and keep their project 17 | up to code. 18 | 19 | ## Example 20 | A Up-to-code pull request contains lots of useful info: 21 | 22 | ![pull request example](example.png) 23 | 24 | 25 | # Usage 26 | 27 | ## Running with docker 28 | 29 | The easiest way to use Up-to-code is docker. After publishing a new version 30 | of awesome-js-library, you can make sure all apps are up to date by running a 31 | docker command like this: 32 | 33 | docker run \ 34 | -e npm_config_registry=http://npm.example.com/nexus/content/groups/npmall/ \ 35 | -e npm_config_email=npm@example.com \ 36 | -e npm_config_always_auth=true \ 37 | -e npm_config__auth=npmpassword \ 38 | porchdotcom/up-to-code:latest \ 39 | --package-name=awesome-js-library \ 40 | --github-org=myorg \ 41 | --github-token=abcdef12abcdef12abcdef12abcdef12abcdef12 \ 42 | --gitlab-user=uptocodeacct \ 43 | --gitlab-org=myorg \ 44 | --gitlab-token=abcd123 \ 45 | --gitlab-host=gitlab.example.com 46 | 47 | ### Docker hub 48 | 49 | The docker build [up-to-code can be found on dockerhub](https://hub.docker.com/r/porchdotcom/up-to-code/) 50 | 51 | ## Options 52 | 53 | ### --package-name 54 | 55 | The recently published library that your projects depend on. 56 | 57 | ### --github-org 58 | 59 | The name of the github organization containing your projects. 60 | 61 | ### --github-token 62 | 63 | Github token with permissions to read repos and create pull requests. 64 | 65 | ### --gitlab-user 66 | 67 | Gitlab user name that will read repos and create pull requests. 68 | 69 | ### --gitlab-token 70 | 71 | Gitlab token granting permissions to read repos and create merge requests. 72 | 73 | ### --gitlab-org 74 | 75 | The name of the gitlab organization containing your projects. 76 | 77 | ### --gitlab-host 78 | 79 | Domain name of self hosted gitlab instance. For example `gitlab.porch.com` 80 | 81 | ## Environment Variables 82 | 83 | [NPM configuration](https://docs.npmjs.com/misc/config) is passed to Up-to-code through environment variables. For 84 | example, if you have a private npm registry, you should configure it using `npm_config_registry=http://mynpm.example.com`. 85 | Anything required to access your npm host in your .npmrc file should be passed to Up-to-code through and environment variable. 86 | 87 | ### Commonly used npm config 88 | 89 | At a minimum, you likely need to define the following environment variables for 90 | a private npm registry: 91 | 92 | npm_config_registry 93 | npm_config__auth 94 | 95 | 96 | # Supported platforms 97 | 98 | Up-to-code can create pull requests on github and gitlab. It has been tested 99 | with sinopia and nexus npm registries. 100 | 101 | -------------------------------------------------------------------------------- /src/pkg.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import jsonFile from 'json-file-plus'; 3 | import Q from 'q'; 4 | import exec from './exec'; 5 | import semverRegex from 'semver-regex'; 6 | import { memoize } from 'lodash'; 7 | import { satisfies } from 'semver'; 8 | 9 | // update to exact versions 10 | // return whether the new version is not a major bump (ie, safe to merge without review) 11 | const exactVersion = version => { 12 | const match = version.match(semverRegex()); 13 | assert(match, `${version} must be valid semver`); 14 | return match[0]; 15 | }; 16 | const getPublishedVersion = memoize((packageName, logger) => ( 17 | exec(`npm view ${packageName} version`, { logger }).then(version => version.trim()) 18 | )); 19 | 20 | const getVersion = ({ path, packageName }) => ( 21 | Q.fcall(() => ( 22 | jsonFile(path) 23 | )).then(file => ( 24 | file.get() 25 | )).then(({ 26 | dependencies = {}, 27 | devDependencies = {}, 28 | peerDependencies = {} 29 | }) => { 30 | if (dependencies.hasOwnProperty(packageName)) { 31 | assert(!devDependencies.hasOwnProperty(packageName), `${packageName} found in both dependencies and devDependencies`); 32 | assert(!peerDependencies.hasOwnProperty(packageName), `${packageName} found in both dependencies and peerDependencies`); 33 | return exactVersion(dependencies[packageName]); 34 | } 35 | if (devDependencies.hasOwnProperty(packageName)) { 36 | return exactVersion(devDependencies[packageName]); 37 | } 38 | if (peerDependencies.hasOwnProperty(packageName)) { 39 | return exactVersion(peerDependencies[packageName]); 40 | } 41 | throw new Error(`${packageName} not found`); 42 | }) 43 | ); 44 | 45 | const updateVersion = ({ path, packageName, logger }) => ( 46 | Q.fcall(() => ( 47 | jsonFile(path) 48 | )).then(file => ( 49 | Q.all([ 50 | file.get(), 51 | getPublishedVersion(packageName, logger) 52 | ]).spread(({ 53 | dependencies = {}, 54 | devDependencies = {}, 55 | peerDependencies = {} 56 | }, version) => { 57 | if (dependencies.hasOwnProperty(packageName)) { 58 | assert(!satisfies(version, dependencies[packageName]), `${packageName} latest version ${version} matches existing dependency ${dependencies[packageName]}`); 59 | return file.set({ 60 | dependencies: { 61 | [packageName]: `^${version}` 62 | } 63 | }); 64 | } 65 | if (devDependencies.hasOwnProperty(packageName)) { 66 | assert(!satisfies(version, devDependencies[packageName]), `${packageName} latest version ${version} matches existing devDependency version ${devDependencies[packageName]}`); 67 | return file.set({ 68 | devDependencies: { 69 | [packageName]: `^${version}` 70 | } 71 | }); 72 | } 73 | if (peerDependencies.hasOwnProperty(packageName)) { 74 | assert(!satisfies(version, peerDependencies[packageName]), `${packageName} latest version ${version} matches existing peerDependency version ${peerDependencies[packageName]}`); 75 | return file.set({ 76 | peerDependencies: { 77 | [packageName]: `^${version}` 78 | } 79 | }); 80 | } 81 | throw new Error(`${packageName} not found`); 82 | }).then(() => ( 83 | file.save() 84 | )) 85 | )) 86 | ); 87 | 88 | export default ({ path, packageName, logger }) => ( 89 | Q.fcall(() => ( 90 | getVersion({ path, packageName, logger }) 91 | )).then(before => ( 92 | Q.fcall(() => ( 93 | updateVersion({ path, packageName, logger }) 94 | )).then(() => ( 95 | getVersion({ path, packageName, logger }) 96 | )).then(after => ( 97 | [before, after] 98 | )) 99 | )) 100 | ); 101 | -------------------------------------------------------------------------------- /src/github.js: -------------------------------------------------------------------------------- 1 | import Q from 'q'; 2 | import GitHubApi from 'github'; 3 | import { uniqBy } from 'lodash'; 4 | import assert from 'assert'; 5 | import { filter } from './promises'; 6 | 7 | const PAGE_LENGTH = 100; 8 | const HELPSCORE_SCM = 'helpscore-scm'; 9 | const MAX_CHANGELOG_COMMITS = 25; 10 | 11 | export default class GitHub { 12 | constructor({ token, org }) { 13 | this.api = new GitHubApi({ 14 | version: '3.0.0' 15 | }); 16 | this.api.authenticate({ 17 | type: 'token', 18 | token: token 19 | }); 20 | this.org = org; 21 | } 22 | 23 | fetchRepos({ logger }) { 24 | logger.trace('fetchRepos'); 25 | 26 | const getReposPage = page => { 27 | const defer = Q.defer(); 28 | this.api.repos.getFromOrg({ 29 | org: this.org, 30 | page: page, 31 | per_page: PAGE_LENGTH 32 | }, defer.makeNodeResolver()); 33 | return defer.promise.then(pageRepos => { 34 | if (pageRepos.length === PAGE_LENGTH) { 35 | return getReposPage(page + 1).then(nextPageRepos => [...pageRepos, ...nextPageRepos]); 36 | } 37 | return pageRepos; 38 | }); 39 | }; 40 | 41 | return Q.fcall(() => ( 42 | getReposPage(0) 43 | )).then(repos => ( 44 | uniqBy(repos, 'id') 45 | )).then(repos => ( 46 | repos.filter(({ permissions: { push }}) => !!push) 47 | )).tap(repos => ( 48 | logger.trace(`${repos.length} repos found`) 49 | )); 50 | } 51 | 52 | fetchDependantRepos({ packageName, logger }) { 53 | logger.trace(`fetchDependantRepos ${packageName}`); 54 | 55 | return Q.fcall(() => ( 56 | this.fetchRepos({ logger }) 57 | )).then(repos => ( 58 | repos.filter(({ language }) => /javascript/i.test(language)) 59 | )).then(repos => ( 60 | filter(repos, ({ name: repo }) => ( 61 | Q.fcall(() => { 62 | const defer = Q.defer(); 63 | this.api.repos.getContent({ 64 | user: this.org, 65 | repo, 66 | path: 'package.json' 67 | }, defer.makeNodeResolver()); 68 | return defer.promise.then(({ content, encoding }) => { 69 | return JSON.parse(new Buffer(content, encoding).toString()); 70 | }); 71 | }).then(({ dependencies = {}, devDependencies = {}, peerDependencies = {} }) => ( 72 | dependencies.hasOwnProperty(packageName) || 73 | devDependencies.hasOwnProperty(packageName) || 74 | peerDependencies.hasOwnProperty(packageName) 75 | )).catch(() => false) 76 | )) 77 | )); 78 | } 79 | 80 | createPullRequest({ body, title, head, repo, logger }) { 81 | logger.trace(`createPullRequest ${title}, ${head}, ${repo}`); 82 | 83 | return Q.fcall(() => { 84 | const defer = Q.defer(); 85 | this.api.pullRequests.getAll({ 86 | user: this.org, 87 | repo, 88 | state: 'open', 89 | head: `${this.org}:${head}` // https://mikedeboer.github.io/node-github/#api-pullRequests-getAll 90 | }, defer.makeNodeResolver()); 91 | return defer.promise; 92 | }).then(prs => { 93 | const defer = Q.defer(); 94 | if (!!prs.length) { 95 | assert.equal(prs.length, 1, `${head} not found`); 96 | 97 | const [{ number }] = prs; 98 | this.api.pullRequests.update({ 99 | user: this.org, 100 | repo, 101 | number, 102 | title, 103 | body 104 | }, defer.makeNodeResolver()); 105 | } else { 106 | this.api.pullRequests.create({ 107 | user: this.org, 108 | repo, 109 | title, 110 | base: 'master', 111 | head, 112 | body 113 | }, defer.makeNodeResolver()); 114 | } 115 | return defer.promise; 116 | }); 117 | } 118 | 119 | createPackageChangeMarkdown({ repo, head, base, logger }) { 120 | logger.trace(`createPackageChangeMarkdown ${base}, ${head}, ${repo}`); 121 | 122 | return Q.fcall(() => { 123 | const defer = Q.defer(); 124 | this.api.repos.compareCommits({ 125 | user: this.org, 126 | repo, 127 | base, 128 | head 129 | }, defer.makeNodeResolver()); 130 | return defer.promise; 131 | }).then(({ commits }) => ([ 132 | '### Diff', 133 | `[${base}...${head}](http://github.com/${this.org}/${repo}/compare/${base}...${head})`, 134 | '### Commits', 135 | commits.map(({ 136 | commit: { 137 | author: { 138 | name 139 | } = {}, 140 | message 141 | }, 142 | html_url // eslint-disable-line camelcase 143 | }) => ( 144 | `${( 145 | name === HELPSCORE_SCM ? '' : `- __${name}__` 146 | )}- [${message.split('\n')[0]}](${html_url})` // eslint-disable-line camelcase 147 | )).reverse().slice(0, MAX_CHANGELOG_COMMITS).join('\n') 148 | ].join('\n\n'))); 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import newrelic from 'newrelic'; 2 | import Q from 'q'; 3 | import path from 'path'; 4 | import GitHub from './github'; 5 | import GitLab from './gitlab'; 6 | import assert from 'assert'; 7 | import exec from './exec'; 8 | import updateDependency from './pkg'; 9 | import { major } from 'semver'; 10 | import decorateFunctionLogger from './decorate-function-logger'; 11 | import parse from 'git-url-parse'; 12 | 13 | const GITHUB_HOSTNAME = 'github.com'; 14 | 15 | const getPackageBranchName = packageName => `up-to-code-${packageName}`; 16 | 17 | const getPackageChangeMarkdown = decorateFunctionLogger(({ base, head, packageName, gitlabHost, githubOrg, githubToken, gitlabOrg, gitlabToken, logger }) => ( 18 | Q.fcall(() => ( 19 | exec(`npm view ${packageName} repository.url`, { logger }) 20 | )).then(stdout => ( 21 | parse(stdout).resource 22 | )).then(hostname => { 23 | const isGithubHosted = hostname === GITHUB_HOSTNAME; 24 | const isGitlabHosted = hostname === gitlabHost; 25 | 26 | assert(isGithubHosted || isGitlabHosted, `git host could not be determined from package "${packageName}" repository url "${hostname}`); 27 | assert(!!isGithubHosted ^ !!isGitlabHosted, 'multiple git repos found'); 28 | 29 | if (isGithubHosted) { 30 | const github = new GitHub({ org: githubOrg, token: githubToken }); 31 | return github.createPackageChangeMarkdown({ repo: packageName, base, head, logger }); 32 | } 33 | if (isGitlabHosted) { 34 | const gitlab = new GitLab({ org: gitlabOrg, token: gitlabToken, host: gitlabHost }); 35 | return gitlab.createPackageChangeMarkdown({ repo: packageName, base, head, logger }); 36 | } 37 | throw new Error('git repo not found'); 38 | }).catch(err => { 39 | logger.error({ err }); 40 | throw err; 41 | }) 42 | )); 43 | 44 | const updateGithubRepoDependency = decorateFunctionLogger(({ 45 | repo, 46 | packageName, 47 | githubToken, 48 | githubOrg, 49 | gitlabHost, 50 | gitlabOrg, 51 | gitlabToken, 52 | metadata, 53 | logger 54 | }) => { 55 | logger.trace(`time to clone and update github repo ${repo}`); 56 | const cwd = `repos/github/${repo}`; 57 | return Q.fcall(() => ( 58 | logger.trace('clone') 59 | )).then(() => ( 60 | exec(`git clone --depth 1 https://${githubToken}@github.com/${githubOrg}/${repo}.git ${cwd}`, { logger }) 61 | )).then(() => ( 62 | logger.trace('checkout') 63 | )).then(() => ( 64 | exec(`git checkout -B ${getPackageBranchName(packageName)}`, { cwd, logger }) 65 | )).then(() => ( 66 | logger.trace('version bump') 67 | )).then(() => ( 68 | updateDependency({ 69 | path: path.resolve(cwd, 'package.json'), 70 | packageName, 71 | logger 72 | }) 73 | )).then(([before, after]) => ( 74 | getPackageChangeMarkdown({ 75 | packageName, 76 | base: `v${before}`, 77 | head: `v${after}`, 78 | gitlabHost, 79 | githubOrg, 80 | githubToken, 81 | gitlabOrg, 82 | gitlabToken, 83 | logger 84 | }) 85 | .catch(() => '') 86 | )).then(body => ( 87 | Q.fcall(() => { 88 | logger.trace('commit'); 89 | return exec(`git commit -a -m "Up-to-code bump of ${packageName}"`, { cwd, logger }); 90 | }).then(() => { 91 | logger.trace('push'); 92 | return exec('git push -fu origin HEAD', { cwd, logger }); 93 | }).then(() => { 94 | logger.trace('create pull request'); 95 | const github = new GitHub({ org: githubOrg, token: githubToken, logger }); 96 | return github.createPullRequest({ 97 | body: `${body}${metadata ? `\n\n> ${metadata}` : ''}`, 98 | title: `Up to code - ${packageName}`, 99 | head: getPackageBranchName(packageName), 100 | repo, 101 | logger 102 | }); 103 | }) 104 | )).catch(err => ( 105 | logger.error(err) 106 | )); 107 | }); 108 | 109 | export const updateGitlabRepoDependency = decorateFunctionLogger(({ 110 | repo, 111 | packageName, 112 | githubToken, 113 | githubOrg, 114 | gitlabHost, 115 | gitlabOrg, 116 | gitlabToken, 117 | gitlabUser, 118 | metadata, 119 | logger 120 | }) => { 121 | logger.trace(`time to clone and update gitlab repo ${repo}`); 122 | const cwd = `repos/gitlab/${repo}`; 123 | return Q.fcall(() => { 124 | logger.trace('clone'); 125 | return exec(`git clone --depth 1 https://${gitlabUser}:${gitlabToken}@${gitlabHost}/${gitlabOrg}/${repo}.git ${cwd}`, { logger }); 126 | }).then(() => { 127 | logger.trace('checkout'); 128 | return exec(`git checkout -B ${getPackageBranchName(packageName)}`, { cwd, logger }); 129 | }).then(() => { 130 | logger.trace('version bump'); 131 | return updateDependency({ 132 | path: path.resolve(cwd, 'package.json'), 133 | packageName, 134 | logger 135 | }); 136 | }).then(([before, after]) => { 137 | const breakingChange = major(before) !== major(after); 138 | return Q.fcall(() => ( 139 | getPackageChangeMarkdown({ 140 | packageName, 141 | base: `v${before}`, 142 | head: `v${after}`, 143 | gitlabHost, 144 | githubOrg, 145 | githubToken, 146 | gitlabOrg, 147 | gitlabToken, 148 | logger 149 | }) 150 | .catch(() => '') 151 | )).then(description => ( 152 | Q.fcall(() => { 153 | logger.trace('diff'); 154 | return exec('git diff', { cwd, logger }); 155 | }).then(() => { 156 | logger.trace('commit'); 157 | return exec(`git commit -a -m "Up to code bump of ${packageName}"`, { cwd, logger }); 158 | }).then(() => { 159 | logger.trace('push'); 160 | return exec(`git push -fu origin ${getPackageBranchName(packageName)}`, { cwd, logger }); 161 | }).then(() => { 162 | logger.trace('create merge request'); 163 | const gitlab = new GitLab({ org: gitlabOrg, token: gitlabToken, host: gitlabHost, logger }); 164 | return gitlab.createMergeRequest({ 165 | description: `${description}${metadata ? `\n\n> ${metadata}` : ''}`, 166 | title: `${breakingChange ? 'WIP: ' : '' }Up to code - ${packageName} v${after}`, 167 | head: getPackageBranchName(packageName), 168 | repo, 169 | accept: !breakingChange, 170 | logger 171 | }); 172 | }) 173 | )); 174 | }).catch(err => ( 175 | logger.error(err) 176 | )); 177 | }); 178 | 179 | const createNewRelicTransaction = fn => ( 180 | newrelic.createBackgroundTransaction('up-to-code', () => ( 181 | Q.fcall(() => ( 182 | fn() 183 | )).finally(() => ( 184 | newrelic.endTransaction() 185 | )) 186 | ))() 187 | ); 188 | 189 | 190 | export default decorateFunctionLogger(({ 191 | packageName, 192 | githubOrg, 193 | githubToken, 194 | gitlabOrg, 195 | gitlabToken, 196 | gitlabHost, 197 | gitlabUser, 198 | metadata, 199 | logger 200 | }) => { 201 | logger.trace(`${packageName}`); 202 | 203 | const github = new GitHub({ org: githubOrg, token: githubToken }); 204 | const gitlab = new GitLab({ org: gitlabOrg, token: gitlabToken, host: gitlabHost }); 205 | 206 | return Q.all([ 207 | Q.fcall(() => ( 208 | github.fetchDependantRepos({ packageName, logger }) 209 | )).then(githubRepos => Q.allSettled(githubRepos.map(({ name: repo }) => ( 210 | createNewRelicTransaction(() => ( 211 | updateGithubRepoDependency({ 212 | repo, 213 | packageName, 214 | githubToken, 215 | githubOrg, 216 | gitlabHost, 217 | gitlabOrg, 218 | gitlabToken, 219 | metadata, 220 | logger 221 | }) 222 | )) 223 | )))), 224 | Q.fcall(() => ( 225 | gitlab.fetchDependantRepos({ packageName, logger }) 226 | )).then(gitlabRepos => Q.allSettled(gitlabRepos.map(({ name: repo }) => ( 227 | createNewRelicTransaction(() => ( 228 | updateGitlabRepoDependency({ 229 | repo, 230 | packageName, 231 | githubToken, 232 | githubOrg, 233 | gitlabHost, 234 | gitlabOrg, 235 | gitlabToken, 236 | gitlabUser, 237 | metadata, 238 | logger 239 | }) 240 | )) 241 | )))) 242 | ]).finally(() => { 243 | const defer = Q.defer(); 244 | newrelic.shutdown({ collectPendingData: true }, defer.makeNodeResolver()); 245 | return defer.promise; 246 | }); 247 | }); 248 | -------------------------------------------------------------------------------- /src/gitlab.js: -------------------------------------------------------------------------------- 1 | import assert from 'assert'; 2 | import Q from 'q'; 3 | import request from 'request'; 4 | import url from 'url'; 5 | import { memoize, uniqBy } from 'lodash'; 6 | import { filter, until } from './promises'; 7 | import decorateFunctionLogger from './decorate-function-logger'; 8 | 9 | const INTERVAL = 60000; 10 | const PAGE_LENGTH = 100; 11 | const MAX_CHANGELOG_COMMITS = 25; 12 | 13 | const apiRaw = decorateFunctionLogger(({ logger, ...options }) => { 14 | const defer = Q.defer(); 15 | request(options, defer.makeNodeResolver()); 16 | return defer.promise.spread((res, body) => { 17 | logger.trace({ statusCode: res.statusCode }, 'api request'); 18 | assert(res.statusCode < 400, body); 19 | return body; 20 | }); 21 | }); 22 | 23 | const apiCached = memoize(apiRaw, ({ logger, ...options }) => JSON.stringify(options)); // eslint-disable-line no-unused-vars 24 | 25 | const api = ({ cached = false, ...options }) => cached ? apiCached(options) : apiRaw(options); 26 | 27 | export default class GitLab { 28 | constructor({ token, org, host }) { 29 | this.api = options => api({ 30 | json: true, 31 | baseUrl: url.format({ 32 | protocol: 'https:', 33 | host, 34 | pathname: '/api/v4' 35 | }), 36 | headers: { 37 | 'PRIVATE-TOKEN': token 38 | }, 39 | ...options 40 | }); 41 | 42 | this.paginate = ({ qs, ...options }) => { 43 | const getPage = page => ( 44 | Q.fcall(() => ( 45 | this.api({ 46 | ...options, 47 | qs: { 48 | ...qs, 49 | page, 50 | per_page: PAGE_LENGTH 51 | } 52 | }) 53 | )).then(res => { 54 | if (res.length === PAGE_LENGTH) { 55 | return getPage(page + 1).then(nextPageRes => uniqBy([...res, ...nextPageRes], 'id')); 56 | } 57 | return res; 58 | }) 59 | ); 60 | 61 | return getPage(0); 62 | }; 63 | this.host = host; 64 | this.org = org; 65 | } 66 | 67 | fetchRepo = decorateFunctionLogger(({ repo, logger }) => { 68 | logger.trace(`fetchRepo ${repo}`); 69 | 70 | return Q.fcall(() => ( 71 | this.fetchRepos({ logger }) 72 | )).then(repos => ( 73 | repos.find(({ name }) => name === repo) 74 | )).finally(() => { 75 | logger.trace(`fetchRepo ${repo} complete`); 76 | }); 77 | }); 78 | 79 | fetchRepos = decorateFunctionLogger(({ logger }) => { 80 | logger.trace('fetchRepos'); 81 | return Q.fcall(() => ( 82 | this.paginate({ 83 | logger, 84 | cached: true, 85 | uri: '/projects' 86 | }) 87 | )).then(repos => ( 88 | repos.filter(({ namespace: { name }}) => name === this.org) 89 | )).tap(repos => ( 90 | logger.trace(`${repos.length} repos found`) 91 | )); 92 | }); 93 | 94 | createPackageChangeMarkdown = decorateFunctionLogger(({ base, head, repo, logger }) => { 95 | logger.trace(`createPackageChangeMarkdown ${base} ${head} ${repo}`); 96 | 97 | return Q.fcall(() => ( 98 | this.fetchRepo({ repo, logger }) 99 | )).then(({ id }) => ( 100 | this.api({ 101 | logger, 102 | cached: true, 103 | uri: `/projects/${id}/repository/compare`, 104 | qs: { 105 | from: base, 106 | to: head 107 | } 108 | }) 109 | )).then(({ commits }) => ([ 110 | '### Diff', 111 | `[${base}...${head}](https://${this.host}/${this.org}/${repo}/compare/${base}...${head})`, 112 | '### Commits', 113 | commits.map(({ 114 | id, 115 | author_name: authorName, 116 | title 117 | }) => { 118 | const strippedTitle = title.replace(' [ci skip]', '').replace(' [skip ci]', ''); 119 | return `- ${authorName} - [${strippedTitle}](https://${this.host}/${this.org}/${repo}/commit/${id})`; // eslint-disable-line camelcase 120 | }).reverse().slice(0, MAX_CHANGELOG_COMMITS).join('\n') 121 | ].join('\n\n'))); 122 | }); 123 | 124 | fetchDependantRepos = decorateFunctionLogger(({ packageName, logger }) => { 125 | logger.trace(`fetchDependantRepos ${packageName}`); 126 | 127 | return Q.fcall(() => ( 128 | this.fetchRepos({ logger }) 129 | )).then(repos => ( 130 | filter(repos, ({ id }) => ( 131 | Q.fcall(() => ( 132 | this.api({ 133 | logger, 134 | cached: true, 135 | uri: `/projects/${id}/repository/files/package.json/raw`, 136 | qs: { 137 | ref: 'master' 138 | } 139 | }) 140 | )).then(({ dependencies = {}, devDependencies = {}, peerDependencies = {} }) => ( 141 | dependencies.hasOwnProperty(packageName) || 142 | devDependencies.hasOwnProperty(packageName) || 143 | peerDependencies.hasOwnProperty(packageName) 144 | )).catch(() => false) 145 | )) 146 | )); 147 | }); 148 | 149 | createMergeRequest = decorateFunctionLogger(({ description, title, head, repo, accept, logger }) => { 150 | logger.trace(`createMergeRequest ${title}, ${head}, ${repo}, ${accept}`); 151 | 152 | return Q.fcall(() => ( 153 | this.fetchRepo({ repo, logger }) 154 | )).then(({ id }) => ( 155 | Q.fcall(() => ( 156 | this.paginate({ 157 | logger, 158 | uri: `/projects/${id}/merge_requests`, 159 | qs: { 160 | state: 'opened' 161 | } 162 | }) 163 | )).then(mergeRequests => ( 164 | mergeRequests.filter(({ 165 | target_branch: targetBranch, 166 | source_branch: sourceBranch 167 | }) => ( 168 | targetBranch === 'master' && 169 | sourceBranch === head 170 | )) 171 | )).then(mrs => { 172 | if (!!mrs.length) { 173 | assert.equal(mrs.length, 1, `${head} not found`); 174 | 175 | const [mr] = mrs; 176 | return Q.fcall(() => ( 177 | this.api({ 178 | logger, 179 | method: 'PUT', 180 | uri: `/projects/${id}/merge_requests/${mr.id}`, 181 | body: { 182 | title, 183 | description 184 | } 185 | }) 186 | )).tap(({ 187 | title: updatedTitle, 188 | description: updatedDescription 189 | }) => { 190 | assert.equal(updatedTitle, mr.title, `title for mr ${mr.id} failed to update`); 191 | assert.equal(updatedDescription, mr.description, `description for mr ${mr.id} failed to update`); 192 | }); 193 | } 194 | return this.api({ 195 | logger, 196 | method: 'POST', 197 | uri: `/projects/${id}/merge_requests`, 198 | body: { 199 | source_branch: head, 200 | target_branch: 'master', 201 | title, 202 | description 203 | } 204 | }); 205 | }).then(mr => { 206 | if (accept) { 207 | logger.trace('accepting merge request'); 208 | 209 | const isIssueOpen = true; // https://gitlab.com/gitlab-org/gitlab-ce/issues/22740 210 | 211 | return Q.fcall(() => { 212 | return isIssueOpen && Q.delay(INTERVAL).then(() => ( 213 | this.paginate({ 214 | logger, 215 | uri: `/projects/${id}/pipelines` 216 | }) 217 | )).then(pipelines => ( 218 | pipelines.find(({ sha }) => sha === mr.sha) 219 | )).tap(pipeline => { 220 | assert(pipeline, `pipeline for ${mr.sha} required`); 221 | }).tap(() => ( 222 | logger.trace('waiting for pipeline') 223 | )).then(pipeline => ( 224 | Q.fcall(() => ( 225 | // wait for the pipeline to complete 226 | until(() => ( 227 | this.api({ 228 | logger, 229 | uri: `/projects/${id}/pipelines/${pipeline.id}` 230 | }).then(({ status }) => { 231 | logger.trace(`pipeline status ${status}`); 232 | return status !== 'running' && status !== 'pending' && status !== 'created'; 233 | }) 234 | ), INTERVAL) 235 | )).then(() => { 236 | logger.trace('pipeline no longer running'); 237 | // ensure the pipeline was successful 238 | return this.api({ 239 | logger, 240 | uri: `/projects/${id}/pipelines/${pipeline.id}` 241 | }).then(({ status }) => { 242 | logger.trace(`pipeline status ${status}`); 243 | assert.equal(status, 'success', `pipeline ${pipeline.id} was unsuccessful with status "${status}"`); 244 | }); 245 | }).then(() => ( 246 | // ensure that the mr hasn't been updated 247 | this.api({ 248 | logger, 249 | uri: `/projects/${id}/merge_request/${mr.id}` 250 | }).then(current => { 251 | logger.trace(`merge request update check ${mr.sha} ${current.sha}`); 252 | assert.equal(current.sha, mr.sha, `pipeline for ${mr.sha} has completed, but mr is now for the git commit ${current.sha}`); 253 | }) 254 | )) 255 | )); 256 | }).then(() => ( 257 | this.api({ 258 | logger, 259 | method: 'PUT', 260 | uri: `/projects/${id}/merge_requests/${mr.id}/merge`, 261 | body: { 262 | should_remove_source_branch: true, 263 | merge_when_build_succeeds: true 264 | } 265 | }).then(() => { 266 | logger.trace(`merged merge request ${title} ${head} ${repo}`); 267 | }) 268 | )); 269 | } 270 | logger.trace('not auto merging merge request'); 271 | return Q.resolve(); 272 | }) 273 | )); 274 | }); 275 | } 276 | -------------------------------------------------------------------------------- /newrelic.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | /** 4 | * This file includes all of the configuration variables used by the Node.js 5 | * module. If there's a configurable element of the module and it's not 6 | * described in here, there's been a terrible mistake. 7 | */ 8 | exports.config = { 9 | /** 10 | * Array of application names. 11 | * 12 | * @env NEW_RELIC_APP_NAME 13 | */ 14 | app_name: [], 15 | /** 16 | * The user's license key. Must be set by per-app configuration file. 17 | * 18 | * @env NEW_RELIC_LICENSE_KEY 19 | */ 20 | license_key: '', 21 | /** 22 | * Hostname for the New Relic collector proxy. 23 | * 24 | * You shouldn't need to change this. 25 | * 26 | * @env NEW_RELIC_HOST 27 | */ 28 | host: 'collector.newrelic.com', 29 | /** 30 | * The port on which the collector proxy will be listening. 31 | * 32 | * You shouldn't need to change this. 33 | * 34 | * @env NEW_RELIC_PORT 35 | */ 36 | port: 443, 37 | /** 38 | * Whether or not to use SSL to connect to New Relic's servers. 39 | * 40 | * @env NEW_RELIC_USE_SSL 41 | */ 42 | ssl: true, 43 | /** 44 | * Proxy url 45 | * 46 | * A proxy url can be used in place of setting 47 | * proxy_host, proxy_port, proxy_user, and proxy_pass. 48 | * 49 | * e.g. http://user:pass@host:port/ 50 | * 51 | * Setting proxy will override other proxy settings. 52 | * 53 | * @env NEW_RELIC_PROXY_URL 54 | */ 55 | proxy: '', 56 | /** 57 | * Proxy host to use to connect to the internet. 58 | * 59 | * @env NEW_RELIC_PROXY_HOST 60 | */ 61 | proxy_host: '', 62 | /** 63 | * Proxy port to use to connect to the internet. 64 | * 65 | * @env NEW_RELIC_PROXY_PORT 66 | */ 67 | proxy_port: '', 68 | /** 69 | * Proxy user name when required. 70 | * 71 | * @env NEW_RELIC_PROXY_USER 72 | */ 73 | proxy_user: '', 74 | /** 75 | * Proxy password when required. 76 | * 77 | * @env NEW_RELIC_PROXY_PASS 78 | */ 79 | proxy_pass: '', 80 | /** 81 | * Custom SSL certificates 82 | * 83 | * If your proxy uses a custom SSL certificate, you can add the CA text to 84 | * this array, one entry per certificate. 85 | * 86 | * The easiest way to do this is with `fs.readFileSync` e.g. 87 | * 88 | * certificates: [ 89 | * require('fs').readFileSync('custom.crt', 'utf8') // don't forget the utf8 90 | * ] 91 | * 92 | */ 93 | certificates: [], 94 | /** 95 | * You may want more control over how the module is configured and want to 96 | * disallow the use of New Relic's server-side configuration. To do so, set 97 | * this parameter to true. Some configuration information is required to make 98 | * the module work properly with the rest of New Relic, but settings such as 99 | * apdex_t and capture_params will not be override-able by New Relic with this 100 | * setting in effect. 101 | * 102 | * @env NEW_RELIC_IGNORE_SERVER_CONFIGURATION 103 | */ 104 | ignore_server_configuration: false, 105 | /** 106 | * Whether the module is enabled. 107 | * 108 | * @env NEW_RELIC_ENABLED 109 | */ 110 | agent_enabled: false, 111 | /** 112 | * The default Apdex tolerating / threshold value for applications, in 113 | * seconds. The default for Node is apdexT to 100 milliseconds, which is 114 | * lower than New Relic standard, but Node.js applications tend to be more 115 | * latency-sensitive than most. 116 | * 117 | * @env NEW_RELIC_APDEX 118 | */ 119 | apdex_t: 0.100, 120 | /** 121 | * Whether to capture parameters in the request URL in slow transaction 122 | * traces and error traces. Because this can pass sensitive data, it's 123 | * disabled by default. If there are specific parameters you want ignored, 124 | * use ignored_params. 125 | * 126 | * @env NEW_RELIC_CAPTURE_PARAMS 127 | */ 128 | capture_params: true, 129 | /** 130 | * Array of parameters you don't want captured off request URLs in slow 131 | * transaction traces and error traces. 132 | * 133 | * @env NEW_RELIC_IGNORED_PARAMS 134 | */ 135 | ignored_params: [], 136 | logging: { 137 | /** 138 | * Verbosity of the module's logging. This module uses bunyan 139 | * (https://github.com/trentm/node-bunyan) for its logging, and as such the 140 | * valid logging levels are 'fatal', 'error', 'warn', 'info', 'debug' and 141 | * 'trace'. Logging at levels 'info' and higher is very terse. For support 142 | * requests, attaching logs captured at 'trace' level are extremely helpful 143 | * in chasing down bugs. 144 | * 145 | * @env NEW_RELIC_LOG_LEVEL 146 | */ 147 | level: 'info', 148 | /** 149 | * Where to put the log file -- by default just uses process.cwd + 150 | * 'newrelic_agent.log'. A special case is a filepath of 'stdout', 151 | * in which case all logging will go to stdout, or 'stderr', in which 152 | * case all logging will go to stderr. 153 | * 154 | * @env NEW_RELIC_LOG 155 | */ 156 | filepath: require('path').join(process.cwd(), 'newrelic_agent.log'), 157 | /** 158 | * Whether to write to a log file at all 159 | * 160 | * @env NEW_RELIC_LOG_ENABLED 161 | */ 162 | enabled: true, 163 | 164 | /** 165 | * Enables extra debugging at `warn` level. No need to enable except under 166 | * specific debugging conditions. 167 | */ 168 | diagnostics: false 169 | }, 170 | 171 | audit_log: { 172 | 173 | /** 174 | * Enables logging of out bound traffic from the Agent to the Collector. 175 | * This field is ignored if trace level logging is enabled. 176 | * With trace logging, all traffic is logged. 177 | * 178 | * @env NEW_RELIC_AUDIT_LOG_ENABLED 179 | */ 180 | enabled: false, 181 | 182 | /** 183 | * Specify which methods are logged. Used in conjuction with the audit_log flag 184 | * If audit_log is enabled and this property is empty, all methods will be logged 185 | * Otherwise, if the audit log is enabled, only the methods specified 186 | * in the filter will be logged 187 | * Methods include: error_data, metric_data, and analytic_event_data 188 | * 189 | * @env NEW_RELIC_AUDIT_LOG_ENDPOINTS 190 | */ 191 | endpoints: [] 192 | }, 193 | /** 194 | * Whether to collect & submit error traces to New Relic. 195 | * 196 | * @env NEW_RELIC_ERROR_COLLECTOR_ENABLED 197 | */ 198 | error_collector: { 199 | /** 200 | * Disabling the error tracer just means that errors aren't collected 201 | * and sent to New Relic -- it DOES NOT remove any instrumentation. 202 | */ 203 | enabled: true, 204 | /** 205 | * List of HTTP error status codes the error tracer should disregard. 206 | * Ignoring a status code means that the transaction is not renamed to 207 | * match the code, and the request is not treated as an error by the error 208 | * collector. 209 | * 210 | * Defaults to 404 NOT FOUND. 211 | * 212 | * @env NEW_RELIC_ERROR_COLLECTOR_IGNORE_ERROR_CODES 213 | */ 214 | ignore_status_codes: [404], 215 | /** 216 | * Whether error events are collected. 217 | */ 218 | capture_events: true, 219 | /** 220 | * The agent will collect all error events up to this number per minute. 221 | * If there are more than that, a statistical sampling will be collected. 222 | * Currently this uses a reservoir sampling algorithm. 223 | * 224 | * By increasing this setting you are both increasing the memory 225 | * requirements of the agent as well as increasing the payload to the New 226 | * Relic servers. The memory concerns are something you should consider for 227 | * your own server's sake. The payload of events is compressed, but if it 228 | * grows too large the New Relic servers may reject it. 229 | */ 230 | max_event_samples_stored: 100 231 | }, 232 | /** 233 | * Options regarding collecting system information. Used for system 234 | * utilization based pricing scheme. 235 | */ 236 | utilization: { 237 | /** 238 | * This flag dictates whether the agent attempts to reach out to AWS 239 | * to get info about the vm the process is running on. 240 | * 241 | * @env NEW_RELIC_UTILIZATION_DETECT_AWS 242 | */ 243 | detect_aws: true, 244 | /** 245 | * This flag dictates whether the agent attempts to reach out to AWS 246 | * to get info about the container the process is running in. 247 | * 248 | * @env NEW_RELIC_UTILIZATION_DETECT_DOCKER 249 | */ 250 | detect_docker: true 251 | }, 252 | transaction_tracer: { 253 | /** 254 | * Whether to collect & submit slow transaction traces to New Relic. The 255 | * instrumentation is loaded regardless of this setting, as it's necessary 256 | * to gather metrics. Disable the agent to prevent the instrumentation from 257 | * loading. 258 | * 259 | * @env NEW_RELIC_TRACER_ENABLED 260 | */ 261 | enabled: true, 262 | /** 263 | * The duration at below which the slow transaction tracer should collect a 264 | * transaction trace. If set to 'apdex_f', the threshold will be set to 265 | * 4 * apdex_t, which with a default apdex_t value of 500 milliseconds will 266 | * be 2 seconds. 267 | * 268 | * If a time is provided, it is set in seconds. 269 | * 270 | * @env NEW_RELIC_TRACER_THRESHOLD 271 | */ 272 | transaction_threshold: 'apdex_f', 273 | /** 274 | * Increase this parameter to increase the diversity of the slow 275 | * transaction traces recorded by your application over time. Confused? 276 | * Read on. 277 | * 278 | * Transactions are named based on the request (see the README for the 279 | * details of how requests are mapped to transactions), and top_n refers to 280 | * the "top n slowest transactions" grouped by these names. The module will 281 | * only replace a recorded trace with a new trace if the new trace is 282 | * slower than the previous slowest trace of that name. The default value 283 | * for this setting is 20, as the transaction trace view page also defaults 284 | * to showing the 20 slowest transactions. 285 | * 286 | * If you want to record the absolute slowest transaction over the last 287 | * minute, set top_n to 0 or 1. This used to be the default, and has a 288 | * problem in that it will allow one very slow route to dominate your slow 289 | * transaction traces. 290 | * 291 | * The module will always record at least 5 different slow transactions in 292 | * the reporting periods after it starts up, and will reset its internal 293 | * slow trace aggregator if no slow transactions have been recorded for the 294 | * last 5 harvest cycles, restarting the aggregation process. 295 | * 296 | * @env NEW_RELIC_TRACER_TOP_N 297 | */ 298 | top_n: 20, 299 | 300 | /** 301 | * This option affects both slow-queries and record_sql for transaction 302 | * traces. It can have one of 3 values: 'off', 'obfuscated' or 'raw' 303 | * When it is 'off' no slow queries will be captured, and backtraces 304 | * and sql will not be included in transaction traces. If it is 'raw' 305 | * or 'obfuscated' and other criteria (slow_sql.enabled etc) are met 306 | * for a query. The raw or obfuscated sql will be included in the 307 | * transaction trace and a slow query sample will be collected. 308 | */ 309 | record_sql: 'off', 310 | 311 | /** 312 | * This option affects both slow-queries and record_sql for transaction 313 | * traces. This is the minimum duration a query must take (in ms) for it 314 | * to be considered for for slow query and inclusion in transaction traces. 315 | */ 316 | explain_threshold: 500 317 | }, 318 | /** 319 | * Whether to enable internal supportability metrics and diagnostics. You're 320 | * welcome to turn these on, but they will probably be most useful to the 321 | * New Relic node engineering team. 322 | */ 323 | debug: { 324 | /** 325 | * Whether to collect and submit internal supportability metrics alongside 326 | * application performance metrics. 327 | * 328 | * @env NEW_RELIC_DEBUG_METRICS 329 | */ 330 | internal_metrics: false, 331 | /** 332 | * Traces the execution of the transaction tracer. Requires logging.level 333 | * to be set to 'trace' to provide any useful output. 334 | * 335 | * WARNING: The tracer tracing data is likely only to be intelligible to a 336 | * small number of people inside New Relic, so you should probably only 337 | * enable tracer tracing if asked to by New Relic, because it will affect 338 | * performance significantly. 339 | * 340 | * @env NEW_RELIC_DEBUG_TRACER 341 | */ 342 | tracer_tracing: false 343 | }, 344 | /** 345 | * Rules for naming or ignoring transactions. 346 | */ 347 | rules: { 348 | /** 349 | * A list of rules of the format {pattern: 'pattern', name: 'name'} for 350 | * matching incoming request URLs and naming the associated New Relic 351 | * transactions. Both pattern and name are required. Additional attributes 352 | * are ignored. Patterns may have capture groups (following JavaScript 353 | * conventions), and names will use $1-style replacement strings. See 354 | * the documentation for addNamingRule for important caveats. 355 | * 356 | * @env NEW_RELIC_NAMING_RULES 357 | */ 358 | name: [], 359 | /** 360 | * A list of patterns for matching incoming request URLs to be ignored by 361 | * the agent. Patterns may be strings or regular expressions. 362 | * 363 | * By default, socket.io long-polling is ignored. 364 | * 365 | * @env NEW_RELIC_IGNORING_RULES 366 | */ 367 | ignore: [ 368 | '^\/socket\.io\/.*\/xhr-polling/' 369 | ] 370 | }, 371 | /** 372 | * By default, any transactions that are not affected by other bits of 373 | * naming logic (the API, rules, or metric normalization rules) will 374 | * have their names set to 'NormalizedUri/*'. Setting this value to 375 | * false will set them instead to Uri/path/to/resource. Don't change 376 | * this setting unless you understand the implications of New Relic's 377 | * metric grouping issues and are confident your application isn't going 378 | * to run afoul of them. Your application could end up getting black holed! 379 | * Nobody wants that. 380 | * 381 | * @env NEW_RELIC_ENFORCE_BACKSTOP 382 | */ 383 | enforce_backstop: true, 384 | /** 385 | * Browser Monitoring 386 | * 387 | * Browser monitoring lets you correlate transactions between the server and browser 388 | * giving you accurate data on how long a page request takes, from request, 389 | * through the server response, up until the actual page render completes. 390 | */ 391 | browser_monitoring: { 392 | 393 | /** 394 | * Enable browser monitoring header generation. 395 | * 396 | * This does not auto-instrument, rather it enables the agent to generate headers. 397 | * The newrelic module can generate the appropriate header necessary for Browser Monitoring 411 | * This script must be manually injected into your templates, as high as possible 412 | * in the header, but _after_ any X-UA-COMPATIBLE HTTP-EQUIV meta tags. 413 | * Otherwise you may hurt IE! 414 | * 415 | * This method must be called _during_ a transaction, and must be called every 416 | * time you want to generate the headers. 417 | * 418 | * Do *not* reuse the headers between users, or even between requests. 419 | * 420 | * @env NEW_RELIC_BROWSER_MONITOR_ENABLE 421 | */ 422 | enable: true, 423 | 424 | /** 425 | * Request un-minified sources from the server. 426 | * 427 | * @env NEW_RELIC_BROWSER_MONITOR_DEBUG 428 | */ 429 | debug: false 430 | }, 431 | /** 432 | * Transaction Events 433 | * 434 | * Transaction events are sent to New Relic Insights. This event data 435 | * includes transaction timing, transaction name, and any custom parameters. 436 | * 437 | * Read more here: http://newrelic.com/insights 438 | */ 439 | transaction_events: { 440 | /** 441 | * If this is disabled, the agent does not collect, nor try to send, 442 | * analytic data. 443 | */ 444 | enabled: true, 445 | 446 | /** 447 | * The agent will collect all events up to this number per minute. If 448 | * there are more than that, a statistical sampling will be collected. 449 | */ 450 | max_samples_per_minute: 10000, 451 | 452 | /** 453 | * This is used if the agent is unable to send events to the collector. 454 | * The values from the previous harvest cycle will be merged into the next 455 | * one with this option as the limit. 456 | * 457 | * This should be *greater* than max_samples_per_minute or you'll see odd 458 | * behavior. You probably want at least double the value, but more is okay 459 | * as long as you can handle the memory overhead. 460 | */ 461 | max_samples_stored: 20000 462 | }, 463 | 464 | /** 465 | * Custom Insights Events 466 | * 467 | * Custom insights events are JSON object that are sent to New Relic 468 | * Insights. You can tell the agent to send your custom events via the 469 | * `newrelic.recordCustomEvent()` API. These events are sampled once the max 470 | * reservoir size is reached. You can tune this setting below. 471 | * 472 | * Read more here: http://newrelic.com/insights 473 | */ 474 | custom_insights_events: { 475 | /** 476 | * If this is disabled, the agent does not collect, nor try to send, custom 477 | * event data. 478 | */ 479 | enabled: true, 480 | /** 481 | * The agent will collect all events up to this number per minute. If there 482 | * are more than that, a statistical sampling will be collected. Current 483 | * this uses a reservoir sampling algorithm. 484 | * 485 | * By increasing this setting you are both increasing the memory 486 | * requirements of the agent as well as increasing the payload to the New 487 | * Relic servers. The memory concerns are something you should consider for 488 | * your own server's sake. The payload of events is compressed, but if it 489 | * grows too large the New Relic servers may reject it. 490 | */ 491 | max_samples_stored: 1000 492 | }, 493 | /** 494 | * This is used to configure properties about the user's host name. 495 | */ 496 | process_host: { 497 | /** 498 | * Configurable display name for hosts 499 | * 500 | * @env NEW_RELIC_PROCESS_HOST_DISPLAY_NAME 501 | */ 502 | display_name: '', 503 | /** 504 | * ip address preference when creating hostnames 505 | * 506 | * @env NEW_RELIC_IPV_PREFERENCE 507 | */ 508 | ipv_preference: '4' 509 | }, 510 | 511 | 512 | /** 513 | * High Security 514 | * 515 | * High security mode (v2) is a setting which prevents any sensitive data from 516 | * being sent to New Relic. The local setting must match the server setting. 517 | * If there is a mismatch the agent will log a message and act as if it is 518 | * disabled. 519 | * 520 | * Attributes of high security mode (when enabled): 521 | * * requires SSL 522 | * * does not allow capturing of http params 523 | * * does not allow custom params 524 | * 525 | * To read more see: https://docs.newrelic.com/docs/subscriptions/high-security 526 | */ 527 | high_security: false, 528 | 529 | /** 530 | * Labels 531 | * 532 | * An object of label names and values that will be applied to the data sent 533 | * from this agent. Both label names and label values have a maximum length of 534 | * 255 characters. This object should contain at most 64 labels. 535 | */ 536 | labels: {}, 537 | 538 | /** 539 | * These options control behavior for slow queries, but do not affect sql 540 | * nodes in transaction traces. 541 | * slow_sql.enabled enables and disables slow_sql recording 542 | * slow_sql.max_samples sets the maximum number of slow query samples that 543 | * will be collected in a single harvest cycle. 544 | */ 545 | slow_sql: { 546 | enabled: false, 547 | max_samples: 10 548 | }, 549 | 550 | /** 551 | * Controls behavior of datastore instance metrics. 552 | * 553 | * @property {bool} [instance_reporting.enabled=true] 554 | * Enables reporting the host and port/path/id of database servers. Default 555 | * is `true`. 556 | * 557 | * @property {bool} [database_name_reporting.enabled=true] 558 | * Enables reporting of database/schema names. Default is `true`. 559 | */ 560 | datastore_tracer: { 561 | instance_reporting: {enabled: true}, 562 | database_name_reporting: {enabled: true} 563 | } 564 | } --------------------------------------------------------------------------------