├── .editorconfig
├── .github
└── ISSUE_TEMPLATE.md
├── .gitignore
├── .npmignore
├── .travis.yml
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Dockerfile
├── LICENSE
├── README.md
├── content
├── diff-commits.js
├── fail-issue.js
├── initial-pr.js
├── invalid-config-issue.js
├── nodejs-deprecate-issue.js
├── nodejs-release-issue.js
├── payment-activated.js
├── payment-required.js
├── stale-initial-pr-reminder.js
├── stripe-cancel-survey.js
├── template.js
├── timeout-issue.js
└── update-pr.js
├── couchdb
├── config
│ └── .gitkeep
├── installations
│ ├── .gitkeep
│ └── _design
│ │ └── by_login
│ │ └── views
│ │ └── by_login
│ │ └── map.js
├── monorepo
│ ├── .gitkeep
│ └── pouchdb.json
├── npm
│ ├── .gitkeep
│ └── _design
│ │ └── monorepo-releases-by-time
│ │ └── views
│ │ └── monorepo-releases-by-time
│ │ └── map.js
├── payments
│ ├── .gitkeep
│ └── _design
│ │ └── by_stripe
│ │ └── views
│ │ └── by_stripe
│ │ └── map.js
├── repositories
│ ├── .gitkeep
│ └── _design
│ │ ├── billing
│ │ └── views
│ │ │ └── billing
│ │ │ ├── map.js
│ │ │ └── reduce
│ │ ├── branch_by_dependency
│ │ └── views
│ │ │ └── branch_by_dependency
│ │ │ └── map.js
│ │ ├── branch_by_group
│ │ └── views
│ │ │ └── branch_by_group
│ │ │ └── map.js
│ │ ├── branch_by_monorepo_release_group
│ │ └── views
│ │ │ └── branch_by_monorepo_release_group
│ │ │ └── map.js
│ │ ├── by_account
│ │ └── views
│ │ │ └── by_account
│ │ │ └── map.js
│ │ ├── by_branch_sha
│ │ └── views
│ │ │ └── by_branch_sha
│ │ │ └── map.js
│ │ ├── by_dependency
│ │ └── views
│ │ │ └── by_dependency
│ │ │ └── map.js
│ │ ├── by_full_name
│ │ └── views
│ │ │ └── by_full_name
│ │ │ └── map.js
│ │ ├── by_pr
│ │ └── views
│ │ │ └── by_pr
│ │ │ └── map.js
│ │ ├── initial_pr_payment
│ │ └── views
│ │ │ └── initial_pr_payment
│ │ │ └── map.js
│ │ ├── issue_open_by_dependency
│ │ └── views
│ │ │ └── issue_open_by_dependency
│ │ │ └── map.js
│ │ ├── open_initial_pr
│ │ └── views
│ │ │ └── open_initial_pr
│ │ │ └── map.js
│ │ ├── open_invalid_config_issue
│ │ └── views
│ │ │ └── open_invalid_config_issue
│ │ │ └── map.js
│ │ ├── pr_open_by_dependency
│ │ └── views
│ │ │ └── pr_open_by_dependency
│ │ │ └── map.js
│ │ ├── pr_open_by_dependency_and_group
│ │ └── views
│ │ │ └── pr_open_by_dependency_and_group
│ │ │ └── map.js
│ │ ├── private_by_account
│ │ └── views
│ │ │ └── private_by_account
│ │ │ └── map.js
│ │ └── repo-by-org
│ │ └── views
│ │ └── repo-by-org
│ │ └── map.js
├── token-audits
│ └── .gitkeep
└── tokens
│ └── .gitkeep
├── deploy
├── index.js
├── jest.setup.js
├── jobs
├── cancel-stripe-subscription.js
├── create-group-version-branch.js
├── create-initial-branch.js
├── create-initial-pr-comment.js
├── create-initial-pr.js
├── create-initial-subgroup-branch.js
├── create-initial-subgroup-pr-comment.js
├── create-initial-subgroup-pr.js
├── create-version-branch.js
├── deprecate-nodejs-version.js
├── github-event.js
├── github-event
│ ├── check_run
│ │ └── completed.js
│ ├── installation
│ │ ├── created.js
│ │ └── deleted.js
│ ├── installation_repositories
│ │ ├── added.js
│ │ └── removed.js
│ ├── issues
│ │ └── closed.js
│ ├── marketplace_purchase
│ │ ├── cancelled.js
│ │ ├── changed.js
│ │ └── purchased.js
│ ├── pull_request
│ │ ├── closed.js
│ │ └── opened.js
│ ├── push.js
│ ├── repository
│ │ ├── archived.js
│ │ └── privatized.js
│ └── status.js
├── initial-timeout-pr.js
├── invalid-config-file.js
├── monorepo-supervisor.js
├── payment-changed.js
├── payment-required.js
├── registry-change.js
├── reset.js
├── schedule-stale-initial-pr-reminders.js
├── send-stale-initial-pr-reminder.js
├── send-stripe-cancel-survey.js
├── stripe-event.js
├── sync-repos.js
├── update-nodejs-version.js
└── update-payments.js
├── lib
├── branches-to-delete.js
├── comms.js
├── create-branch.js
├── dbs.js
├── default-commit-messages.js
├── default-pr-titles.js
├── delete-branches.js
├── diff-greenkeeper-json.js
├── diff-package-json.js
├── enterprise-setup.js
├── env.js
├── get-config.js
├── get-diff-commits.js
├── get-exec-tokens.js
├── get-files.js
├── get-infos.js
├── get-message.js
├── get-ranged-version.js
├── get-release.js
├── get-token.js
├── github-queue.js
├── github.js
├── handle-branch-status.js
├── invalid-config-file.js
├── lockfile.js
├── monorepo.js
├── network-error-codes.js
├── normalize-plan-name.js
├── npm-registry-client.js
├── on-branch-status.js
├── open-issue.js
├── payments.js
├── repository-docs.js
├── rollbar.js
├── statsd.js
├── updated-at.js
├── upsert.js
├── validate-greenkeeper-json.js
└── worker.js
├── package.json
├── start-couchdb
├── test
├── content
│ ├── __snapshots__
│ │ └── fail-issue.js.snap
│ ├── fail-issue.js
│ ├── initial-pr.js
│ └── timeout-issue.js
├── helpers
│ ├── enterprise-private-key.js
│ ├── module-cache-helpers.js
│ └── remove-if-exists.js
├── jobs
│ ├── __snapshots__
│ │ ├── create-group-version-branch.js.snap
│ │ ├── create-initial-pr-comment.js.snap
│ │ ├── create-initial-pr.js.snap
│ │ └── create-version-branch.js.snap
│ ├── cancel-stripe-subscription.js
│ ├── create-group-version-branch.js
│ ├── create-initial-branch.js
│ ├── create-initial-pr-comment.js
│ ├── create-initial-pr.js
│ ├── create-initial-subgroup-branch.js
│ ├── create-initial-subgroup-pr.js
│ ├── create-version-branch.js
│ ├── deprecate-nodejs-version.js
│ ├── github-event.js
│ ├── github-event
│ │ ├── check_suite
│ │ │ └── completed.js
│ │ ├── installation
│ │ │ ├── created.js
│ │ │ └── deleted.js
│ │ ├── installation_repositories
│ │ │ ├── added.js
│ │ │ └── removed.js
│ │ ├── issues
│ │ │ └── closed.js
│ │ ├── marketplace_purchase
│ │ │ ├── cancelled.js
│ │ │ ├── changed.js
│ │ │ └── purchased.js
│ │ ├── pull_request
│ │ │ ├── closed.js
│ │ │ └── opened.js
│ │ ├── push.js
│ │ ├── repository
│ │ │ ├── archived.js
│ │ │ └── privatized.js
│ │ └── status.js
│ ├── initial-timeout-pr.js
│ ├── invalid-config-file.js
│ ├── monorepo-supervisor.js
│ ├── payment-required.js
│ ├── registry-change.js
│ ├── reset.js
│ ├── send-stale-initial-pr-reminder.js
│ ├── send-stripe-cancel-survey.js
│ ├── stripe-event.js
│ ├── update-nodejs-version.js
│ └── update-payments.js
├── lib
│ ├── __snapshots__
│ │ └── lockfile.js.snap
│ ├── branches-to-delete.js
│ ├── create-branch
│ │ ├── __snapshots__
│ │ │ ├── create-branch.js.snap
│ │ │ └── yarn-workspaces.js.snap
│ │ ├── create-branch.js
│ │ └── yarn-workspaces.js
│ ├── dbs.js
│ ├── delete-branches.js
│ ├── diff-greenkeeper-json.js
│ ├── diff-package-json.js
│ ├── enterprise-setup.js
│ ├── get-config.js
│ ├── get-diff-commits.js
│ ├── get-files.js
│ ├── get-infos.js
│ ├── get-message.js
│ ├── get-ranged-version.js
│ ├── get-release.js
│ ├── get-token.js
│ ├── github.js
│ ├── handle-branch-status.js
│ ├── lockfile.js
│ ├── monorepo.js
│ ├── open-issue.js
│ ├── payments.js
│ ├── repository-docs.js
│ ├── updated-at.js
│ ├── validate-greenkeeper-json.js
│ └── worker.js
├── readme.md
└── utils
│ ├── __snapshots__
│ └── initial-branch-utils.js.snap
│ ├── initial-branch-utils.js
│ └── utils.js
└── utils
├── initial-branch-utils.js
├── registry-change-utils.js
└── utils.js
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 |
5 | end_of_line = lf
6 | insert_final_newline = true
7 | indent_style = space
8 | indent_size = 2
9 | trim_trailing_whitespace = true
10 |
11 | [*.md]
12 | trim_trailing_whitespace = false
13 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
9 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
2 | .env
3 | .vscode
4 | package-lock.json
5 |
6 | # jest coverage output
7 | coverage/
8 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | *.log
2 | *.dump
3 | *.swp
4 | .DS_Store
5 | .env
6 | .travis.yml
7 | .editorconfig
8 | Dockerfile
9 | deploy
10 | test
11 | coverage
12 | start-couchdb
13 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: node_js
2 | services:
3 | - docker
4 | cache:
5 | directories:
6 | - $HOME/.npm
7 | notifications:
8 | email: false
9 | node_js: 8
10 | before_install:
11 | - npm install -g npm@5.2.0
12 | install: npm install
13 | after_success: npm run deploy
14 |
15 | # Trigger a push build on master and greenkeeper branches + PRs build on every branches
16 | # Avoid double build on PRs (See https://github.com/travis-ci/travis-ci/issues/1147)
17 | branches:
18 | only:
19 | - master
20 | - /^greenkeeper.*$/
21 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
6 |
7 | ## Our Standards
8 |
9 | Examples of behavior that contributes to creating a positive environment include:
10 |
11 | * Using welcoming and inclusive language
12 | * Being respectful of differing viewpoints and experiences
13 | * Gracefully accepting constructive criticism
14 | * Focusing on what is best for the community
15 | * Showing empathy towards other community members
16 |
17 | Examples of unacceptable behavior by participants include:
18 |
19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances
20 | * Trolling, insulting/derogatory comments, and personal or political attacks
21 | * Public or private harassment
22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission
23 | * Other conduct which could reasonably be considered inappropriate in a professional setting
24 |
25 | ## Our Responsibilities
26 |
27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
28 |
29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
30 |
31 | ## Scope
32 |
33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
34 |
35 | ## Enforcement
36 |
37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at coc@greenkeeper.io. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
38 |
39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
40 |
41 | ## Attribution
42 |
43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
44 |
45 | [homepage]: http://contributor-covenant.org
46 | [version]: http://contributor-covenant.org/version/1/4/
47 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM mhart/alpine-node:8
2 |
3 | ARG PKG_VERSION
4 | ADD ./node_modules ./node_modules
5 | ADD greenkeeper-jobs-${PKG_VERSION}.tgz ./
6 | WORKDIR /package
7 |
8 | CMD ["npm", "start"]
9 |
--------------------------------------------------------------------------------
/content/diff-commits.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const md = require('./template')
3 |
4 | const getSha = (commit) => _.take(commit.sha, 7).join('')
5 |
6 | const commitListItem = (commit) => md`- ${md.link(md.code(getSha(commit)), commit.html_url)} ${commit.commit.message.split('\n')[0]}
`
7 |
8 | module.exports = (diff) => md`
9 | The new version differs by ${diff.total_commits} commits${diff.behind_by && ` ahead by ${diff.ahead_by}, behind by ${diff.behind_by}`}.
10 |
11 | ${_.take(diff.commits.reverse(), 15).map(commitListItem)}
12 |
13 | ${diff.commits.length > 15 && `There are ${diff.commits.length} commits in total.`}
14 |
15 | See the [full diff](${diff.html_url})
16 | `
17 |
--------------------------------------------------------------------------------
/content/fail-issue.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const md = require('./template')
3 | const { generateGitHubCompareURL } = require('../utils/utils')
4 |
5 | const notDevDependency = ({ dependency }) => md`
6 | ${dependency} is a direct dependency of this project, and **it is very likely causing it to break**. If other packages depend on yours, this update is probably also breaking those in turn.
7 | `
8 |
9 | const devDependency = ({ dependency, dependencyType }) => md`
10 | ${dependency} is a ${dependencyType.replace(/ies$/, 'y')} of this project. It **might not break your production code or affect downstream projects**, but probably breaks your build or test tools, which may **prevent deploying or publishing**.
11 | `
12 |
13 | /*
14 | This formats both statuses and checks:
15 |
16 | status key -> check_run key
17 | state -> check_run.conclusion
18 | context -> check_run.name
19 | description -> check_run.output.summary
20 | target_url -> no equivalent, is included in summary
21 |
22 | */
23 | const individualStatusOrCheck = (status) => {
24 | let output = `- ${status.state === 'success' ? '✅' : '❌'} **${status.context}:** ${status.description} `
25 | if (status.target_url) {
26 | output += `([Details](${status.target_url})).`
27 | }
28 | return output
29 | }
30 |
31 | const headline = ({ monorepoGroupName, packageUpdateList, dependencyType, dependency, dependencyLink, oldVersionResolved, version }) => {
32 | if (monorepoGroupName) {
33 | if (packageUpdateList) {
34 | return `## There have been updates to the *${monorepoGroupName}* monorepo:\n\n + ${packageUpdateList}`
35 | } else {
36 | return `## There have been updates to the *${monorepoGroupName}* monorepo`
37 | }
38 | } else {
39 | return `## The ${dependencyType.replace('ies', 'y')} [${dependency}](${dependencyLink}) was updated from \`${oldVersionResolved}\` to \`${version}\`.`
40 | }
41 | }
42 | const ciStatuses = ({ statuses }) => md`
43 |
44 | Status Details
45 |
46 | ${statuses.map(status => individualStatusOrCheck(status))}
47 |
48 | `
49 |
50 | module.exports = ({ version, dependencyLink, owner, repo, base, head, dependency, oldVersionResolved, dependencyType, statuses, release, diffCommits, monorepoGroupName, packageUpdateList }) => {
51 | const compareURL = generateGitHubCompareURL(`${owner}/${repo}`, base, head)
52 | return md`
53 | ${headline({ monorepoGroupName, packageUpdateList, dependencyType, dependency, dependencyLink, oldVersionResolved, version })}
54 |
55 | 🚨 [View failing branch](${compareURL}).
56 |
57 | This version is **covered** by your **current version range** and after updating it in your project **the build failed**.
58 |
59 | ${monorepoGroupName && `This monorepo update includes releases of one or more dependencies which all belong to the [${monorepoGroupName} group definition](https://github.com/greenkeeperio/monorepo-definitions).`
60 | }
61 |
62 | ${
63 | dependencyType === 'dependencies'
64 | ? notDevDependency({ dependency })
65 | : devDependency({ dependency, dependencyType })
66 | }
67 |
68 | ${_.get(statuses, 'length') && ciStatuses({ statuses })}
69 |
70 | ---
71 |
72 | ${_.compact([release, diffCommits])}
73 |
74 |
75 |
76 | FAQ and help
77 |
78 | There is a collection of [frequently asked questions](https://greenkeeper.io/faq.html). If those don’t help, you can always [ask the humans behind Greenkeeper](https://github.com/greenkeeperio/greenkeeper/issues/new).
79 |
80 |
81 | ---
82 |
83 |
84 | Your [Greenkeeper](https://greenkeeper.io) Bot :palm_tree:
85 | `
86 | }
87 |
--------------------------------------------------------------------------------
/content/invalid-config-issue.js:
--------------------------------------------------------------------------------
1 | const md = require('./template')
2 |
3 | module.exports = (messages, isBlockingInitialPR) => {
4 | let messageList = messages.map((message, index) => {
5 | return `${index + 1}. ${message}`
6 | }).join('\n')
7 |
8 | return md`We have detected a problem with your Greenkeeper config file ${isBlockingInitialPR ? 'which is preventing Greenkeeper from opening its initial pull request' : ''} 🚨
9 |
10 | Greenkeeper currently can’t work with your \`greenkeeper.json\` config file because it is invalid. We found the following issue${messages.length === 1 ? '' : 's'}:
11 |
12 | ${messageList}
13 |
14 | Please correct ${messages.length === 1 ? 'this' : 'these'} and commit the fix to your default branch (usually master)${isBlockingInitialPR ? ' so Greenkeeper can run on this repository' : ''}. Greenkeeper will pick up your changes and try again. If in doubt, please consult the [config documentation](https://greenkeeper.io/docs.html#config).
15 |
16 | Here’s an example of a valid \`greenkeeper.json\`:
17 |
18 | \`\`\`javascript
19 | {
20 | "groups": {
21 | "frontend": {
22 | "packages": [
23 | "webapp/package.json",
24 | "cms/package.json",
25 | "analytics/package.json"
26 | ]
27 | },
28 | "build": {
29 | "packages": [
30 | "package.json"
31 | ]
32 | }
33 | },
34 | "ignore": [
35 | "standard",
36 | "eslint"
37 | ]
38 | }
39 | \`\`\`
40 |
41 | This files tells Greenkeeper to handle all dependency updates in two groups. All files in the \`frontend\` group will receive updates together, in one issue or PR, and the root-level \`package.json\` in the \`build\` group will be treated separately. In addition, Greenkeeper will never send updates for the \`standard\` and \`eslint\` packages.
42 |
43 | 🤖 🌴
44 |
45 | `
46 | }
47 |
--------------------------------------------------------------------------------
/content/nodejs-deprecate-issue.js:
--------------------------------------------------------------------------------
1 | const md = require('./template')
2 | const { generateGitHubCompareURL } = require('../utils/utils')
3 |
4 | // messages: { tooComplicated: 1, inRange: 1, updated: 1 }
5 | const showEngineTransformMessages = function (messages) {
6 | if (!messages) return
7 | let output = ''
8 | output += messages.updated > 0 ? `- The engines config in ${messages.updated} of your \`package.json\` files was updated to the new lowest actively supported Node.js version\n` : ''
9 | if (output === '') return
10 | return output
11 | }
12 |
13 | const showNVMRCMessage = function (nvmrcModified) {
14 | if (nvmrcModified) return '- Replaced the deprecated Node.js version in your `.nvmrc` with the new lowest actively supported one\n'
15 | }
16 |
17 | const showTravisMessage = function (travisModified) {
18 | if (travisModified) return '- Upgraded away from the deprecated version in your `.travis.yml`\n'
19 | }
20 |
21 | const showBlogpost = function (announcementURL) {
22 | if (announcementURL) return `\nYou can find out more about the deprecation and possible update strategies [in this Node.js foundation announcement](${announcementURL}).`
23 | }
24 |
25 | module.exports = ({ owner, repo, base, head, nodeVersion, codeName, newLowestVersion, newLowestCodeName, travisModified, nvmrcModified, engineTransformMessages, announcementURL }) => {
26 | const compareURL = generateGitHubCompareURL(`${owner}/${repo}`, base, head)
27 | return md`
28 | ## Version ${nodeVersion} of Node.js (code name ${codeName}) has been deprecated! 🚑
29 |
30 | This means that it is no longer maintained and will not receive any more security updates. Version ${newLowestVersion} (${newLowestCodeName}) is now the lowest actively maintained Node.js version.
31 | To see what effect this update from ${nodeVersion} to ${newLowestVersion} would have on your code, Greenkeeper has already created a branch with the following changes:
32 | ${showTravisMessage(travisModified)}${showNVMRCMessage(nvmrcModified)}${showEngineTransformMessages(engineTransformMessages)}
33 | If you’re interested in removing support for Node.js ${nodeVersion} from this repo, you can open a PR with these changes.
34 | ${showBlogpost(announcementURL)}
35 |
36 |
37 | More information on this issue
38 |
39 | Greenkeeper has checked the \`engines\` key in any \`package.json\` file, the \`.nvmrc\` file, and the \`.travis.yml\` file, if present.
40 | - In \`engines\`, any occurance of ${nodeVersion} was replaced with ${newLowestVersion}
41 | - \`.nvmrc\` was updated to Node.js ${nodeVersion}
42 | - \`.travis.yml\` was only changed if there was a root-level \`node_js\` key that specified Node.js ${nodeVersion}. In this case, ${nodeVersion} was replaced with ${newLowestVersion}. We didn’t touch job or matrix configurations because these tend to be quite specific and complex, and it’s difficult to infer what the intentions were.
43 |
44 | For many simpler \`.travis.yml\` configurations, these changes should already suffice, but depending on what you’re doing it may require additional work or may not be applicable at all. We’re also aware that you may have good reasons to continue supporting Node.js ${nodeVersion}, which is why this was sent as an issue and not a pull request. Feel free to delete it without comment, I’m a humble robot and won’t feel rejected 🤖
45 |
46 |
47 |
48 | ---
49 |
50 |
51 | FAQ and help
52 |
53 | There is a collection of [frequently asked questions](https://greenkeeper.io/faq.html). If those don’t help, you can always [ask the humans behind Greenkeeper](https://github.com/greenkeeperio/greenkeeper/issues/new).
54 |
55 |
56 | ---
57 |
58 |
59 | Your [Greenkeeper](https://greenkeeper.io) Bot :palm_tree:
60 | `
61 | }
62 |
--------------------------------------------------------------------------------
/content/nodejs-release-issue.js:
--------------------------------------------------------------------------------
1 | const md = require('./template')
2 | const { generateGitHubCompareURL } = require('../utils/utils')
3 |
4 | // messages: { tooComplicated: 1, inRange: 1, updated: 1 }
5 | const showEngineTransformMessages = function (messages) {
6 | if (!messages) return
7 | let output = ''
8 | output += messages.updated > 0 ? `- The engines config in ${messages.updated} of your \`package.json\` files was updated to the new Node.js version\n` : ''
9 | output += messages.inRange > 0 ? `- The new Node.js version is in-range for the engines in ${messages.inRange} of your \`package.json\` files, so that was left alone\n` : ''
10 | output += messages.tooComplicated > 0 ? `- The engines config in ${messages.tooComplicated} of your \`package.json\` files was too ambiguous to be updated automatically\n` : ''
11 | if (output === '') return
12 | return output
13 | }
14 |
15 | const showNVMRCMessage = function (nvmrcModified) {
16 | if (nvmrcModified) return '- Replaced the old Node.js version in your `.nvmrc` with the new one\n'
17 | }
18 |
19 | const showTravisMessage = function (travisModified) {
20 | if (travisModified) return '- Added the new Node.js version to your `.travis.yml`\n'
21 | }
22 |
23 | module.exports = ({ owner, repo, base, head, nodeVersion, codeName, travisModified, nvmrcModified, engineTransformMessages }) => {
24 | const compareURL = generateGitHubCompareURL(`${owner}/${repo}`, base, head)
25 | return md`
26 | ## Version ${nodeVersion} of Node.js (code name ${codeName}) has been released! 🎊
27 |
28 | To see what happens to your code in Node.js ${nodeVersion}, Greenkeeper has created a branch with the following changes:
29 | ${showTravisMessage(travisModified)}${showNVMRCMessage(nvmrcModified)}${showEngineTransformMessages(engineTransformMessages)}
30 | If you’re interested in upgrading this repo to Node.js ${nodeVersion}, you can open a PR with these changes. Please note that this issue is just intended as a friendly reminder and the PR as a possible starting point for getting your code running on Node.js ${nodeVersion}.
31 |
32 |
33 | More information on this issue
34 |
35 | Greenkeeper has checked the \`engines\` key in any \`package.json\` file, the \`.nvmrc\` file, and the \`.travis.yml\` file, if present.
36 | - \`engines\` was only updated if it defined a single version, not a range.
37 | - \`.nvmrc\` was updated to Node.js ${nodeVersion}
38 | - \`.travis.yml\` was only changed if there was a root-level \`node_js\` key that didn’t already include Node.js ${nodeVersion}, such as \`node\` or \`lts/*\`. In this case, the new version was appended to the list. We didn’t touch job or matrix configurations because these tend to be quite specific and complex, and it’s difficult to infer what the intentions were.
39 |
40 | For many simpler \`.travis.yml\` configurations, these changes should already suffice, but depending on what you’re doing it may require additional work or may not be applicable at all. We’re also aware that you may have good reasons to not update to Node.js ${nodeVersion}, which is why this was sent as an issue and not a pull request. Feel free to delete it without comment, I’m a humble robot and won’t feel rejected :robot:
41 |
42 |
43 |
44 | ---
45 |
46 |
47 | FAQ and help
48 |
49 | There is a collection of [frequently asked questions](https://greenkeeper.io/faq.html). If those don’t help, you can always [ask the humans behind Greenkeeper](https://github.com/greenkeeperio/greenkeeper/issues/new).
50 |
51 |
52 | ---
53 |
54 |
55 | Your [Greenkeeper](https://greenkeeper.io) Bot :palm_tree:
56 | `
57 | }
58 |
--------------------------------------------------------------------------------
/content/payment-activated.js:
--------------------------------------------------------------------------------
1 | const md = require('./template')
2 | const env = require('../lib/env')
3 |
4 | module.exports = ({ accountToken }) => {
5 | if (env.IS_ENTERPRISE) {
6 | return ''
7 | }
8 |
9 | return md`💸 Payment has been activated 💸
10 | Enabling Greenkeeper on this repository by merging this pull request might increase your monthly payment. If you’re unsure, please [check your billing status](https://account.greenkeeper.io/status?token=${accountToken})`
11 | }
12 |
--------------------------------------------------------------------------------
/content/payment-required.js:
--------------------------------------------------------------------------------
1 | const md = require('./template')
2 |
3 | module.exports = () =>
4 | md`🚨 You privatised your repo. 🚨
5 |
6 | Hello!
7 | You have set your repository to private. From now on Greenkeeper is no longer free. We have disabled your repo for now.
8 | Please enter your payment information at [account.greenkeeper.io](https://account.greenkeeper.io).
9 | For pricing information, please visit [https://greenkeeper.io/#pricing](https://greenkeeper.io/#pricing).
10 | `
11 |
--------------------------------------------------------------------------------
/content/stale-initial-pr-reminder.js:
--------------------------------------------------------------------------------
1 | const md = require('./template')
2 |
3 | module.exports = () => md`
4 | Hey there 👋,
5 |
6 | we noticed that this PR isn’t merged yet. Just to let you know: if you don't merge this PR, _Greenkeeper will not be enabled_ on this repo, and you won't receive updates for your dependencies.
7 |
8 | If you don't want to enable Greenkeeper here, just close or ignore this PR, we won't nag you again. ✌️
9 |
10 | Have a great day! 🌴
11 | `
12 |
--------------------------------------------------------------------------------
/content/stripe-cancel-survey.js:
--------------------------------------------------------------------------------
1 | const md = require('./template')
2 |
3 | module.exports = () => md`
4 | Hey there 👋,
5 |
6 | we noticed that you canceled your Greenkeeper subscription.
7 | To be able to improve our product and services in the future, we would be happy if you could answer a few quick questions from the cancellation survey below.
8 | http://bit.ly/2zs3ouq
9 |
10 | Have a great day! 🌴
11 | `
12 |
--------------------------------------------------------------------------------
/content/template.js:
--------------------------------------------------------------------------------
1 | const { flatten, zip } = require('lodash')
2 |
3 | function template (strings, ...keys) {
4 | keys = keys.map(key => {
5 | if (Array.isArray(key)) return key.join('\n')
6 | return key || ''
7 | })
8 | return flatten(zip(strings, keys)).join('')
9 | }
10 |
11 | template.link = (text, url) => `[${text}](${url})`
12 | template.code = (text) => '`' + text + '`'
13 |
14 | module.exports = template
15 |
--------------------------------------------------------------------------------
/content/timeout-issue.js:
--------------------------------------------------------------------------------
1 | const md = require('./template')
2 | const env = require('../lib/env')
3 |
4 | module.exports = ({ fullName }) =>
5 | md`🚨 You need to enable Continuous Integration on Greenkeeper branches of this repository. 🚨
6 |
7 | To enable Greenkeeper, you need to make sure that a [commit status](https://help.github.com/articles/about-statuses/) is reported on all branches. This is required by Greenkeeper because it uses your CI build statuses to figure out when to notify you about breaking changes.
8 |
9 | Since we didn’t receive a CI status on the ${branchLink(fullName)} branch, it’s possible that you don’t have CI set up yet.
10 | We recommend using:
11 | - [CircleCI](https://circleci.com)
12 | - [Travis CI](https://travis-ci.org)
13 | - [Buildkite](https://buildkite.com/)
14 | - [CodeShip](https://codeship.com)
15 | - [Azure Pipelines](https://azure.microsoft.com/en-us/services/devops/pipelines)
16 | - [TeamCity](https://www.jetbrains.com/teamcity)
17 | - [Buddy](https://buddy.works)
18 | - [AppVeyor](https://www.appveyor.com)
19 | But Greenkeeper will work with every other CI service as well.
20 |
21 | If you _have_ already set up a CI for this repository, you might need to check how it’s configured. Make sure it is set to run on all new branches. If you don’t want it to run on absolutely every branch, you can whitelist branches starting with ${md.code('greenkeeper/')}.
22 |
23 | Once you have installed and configured CI on this repository correctly, you’ll need to re-trigger Greenkeeper’s initial pull request. To do this, please click the 'fix repo' button on [account.greenkeeper.io](https://account.greenkeeper.io).
24 | `
25 | function branchLink (fullName) {
26 | return md.link(
27 | md.code('greenkeeper/initial'),
28 | `${env.GITHUB_URL}/${fullName}/commits/greenkeeper/initial`
29 | )
30 | }
31 |
--------------------------------------------------------------------------------
/content/update-pr.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const md = require('./template')
3 |
4 | module.exports = ({
5 | version, dependencyLink, dependency, monorepoGroupName, release, diffCommits, oldVersionResolved, type, packageUpdateList, license, licenseHasChanged, previousLicense, publisher
6 | }) => {
7 | const hasReleaseInfo = release && diffCommits
8 | return md`
9 | ${monorepoGroupName
10 | ? `## There have been updates to the *${monorepoGroupName}* monorepo: \n\n${packageUpdateList}`
11 | : `## The ${type.replace('ies', 'y')} [${dependency}](${dependencyLink}) was updated from \`${oldVersionResolved}\` to \`${version}\`.`
12 | }
13 | ${monorepoGroupName ? 'These versions are' : 'This version is'} **not covered** by your **current version range**.
14 |
15 | If you don’t accept this pull request, your project will work just like it did before. However, you might be missing out on a bunch of new features, fixes and/or performance improvements from the dependency update.
16 | ${monorepoGroupName && `\nThis monorepo update includes releases of one or more dependencies which all belong to the [${monorepoGroupName} group definition](https://github.com/greenkeeperio/monorepo-definitions).\n`
17 | }
18 | ---
19 |
20 | ${publisher && `**Publisher:** [${publisher}](https://www.npmjs.com/~${publisher})`}
21 | ${license && `**License:** ${licenseHasChanged ? `This package’s license **has changed** from \`${previousLicense}\` to \`${license}\` in this release 🤔` : `${license}`}`}
22 |
23 | ${hasReleaseInfo
24 | ? _.compact([release, diffCommits])
25 | : `[Find out more about this release](${dependencyLink}).`
26 | }
27 |
28 | ---
29 |
30 |
31 | FAQ and help
32 |
33 | There is a collection of [frequently asked questions](https://greenkeeper.io/faq.html). If those don’t help, you can always [ask the humans behind Greenkeeper](https://github.com/greenkeeperio/greenkeeper/issues/new).
34 |
35 |
36 | ---
37 |
38 |
39 | Your [Greenkeeper](https://greenkeeper.io) bot :palm_tree:
40 |
41 | `
42 | }
43 |
--------------------------------------------------------------------------------
/couchdb/config/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/greenkeeperio/greenkeeper/4a850daea5910a1c91a663ba7be5fce7c7437bc1/couchdb/config/.gitkeep
--------------------------------------------------------------------------------
/couchdb/installations/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/greenkeeperio/greenkeeper/4a850daea5910a1c91a663ba7be5fce7c7437bc1/couchdb/installations/.gitkeep
--------------------------------------------------------------------------------
/couchdb/installations/_design/by_login/views/by_login/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | emit(doc.login)
3 | }
4 |
--------------------------------------------------------------------------------
/couchdb/monorepo/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/greenkeeperio/greenkeeper/4a850daea5910a1c91a663ba7be5fce7c7437bc1/couchdb/monorepo/.gitkeep
--------------------------------------------------------------------------------
/couchdb/monorepo/pouchdb.json:
--------------------------------------------------------------------------------
1 | {
2 | "packages": [
3 | "pouchdb",
4 | "pouchdb-abstract-mapreduce",
5 | "pouchdb-adapter-fruitdown",
6 | "pouchdb-adapter-http",
7 | "pouchdb-adapter-idb",
8 | "pouchdb-adapter-indexeddb",
9 | "pouchdb-adapter-leveldb",
10 | "pouchdb-adapter-leveldb-core",
11 | "pouchdb-adapter-localstorage",
12 | "pouchdb-adapter-memory",
13 | "pouchdb-adapter-node-websql",
14 | "pouchdb-adapter-utils",
15 | "pouchdb-adapter-websql",
16 | "pouchdb-adapter-websql-core",
17 | "pouchdb-binary-utils",
18 | "pouchdb-browser",
19 | "pouchdb-changes-filter",
20 | "pouchdb-checkpointer",
21 | "pouchdb-collate",
22 | "pouchdb-collections",
23 | "pouchdb-core",
24 | "pouchdb-debug",
25 | "pouchdb-errors",
26 | "pouchdb-fetch",
27 | "pouchdb-find",
28 | "pouchdb-for-coverage",
29 | "pouchdb-generate-replication-id",
30 | "pouchdb-json",
31 | "pouchdb-mapreduce",
32 | "pouchdb-mapreduce-utils",
33 | "pouchdb-md5",
34 | "pouchdb-merge",
35 | "pouchdb-node",
36 | "pouchdb-replication",
37 | "pouchdb-selector-core",
38 | "pouchdb-utils",
39 | "sublevel-pouchdb"
40 | ]
41 | }
42 |
--------------------------------------------------------------------------------
/couchdb/npm/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/greenkeeperio/greenkeeper/4a850daea5910a1c91a663ba7be5fce7c7437bc1/couchdb/npm/.gitkeep
--------------------------------------------------------------------------------
/couchdb/npm/_design/monorepo-releases-by-time/views/monorepo-releases-by-time/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc._id.substr(0, 9) !== 'monorepo:') {
3 | return
4 | }
5 | emit(doc.updatedAt)
6 | }
7 |
--------------------------------------------------------------------------------
/couchdb/payments/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/greenkeeperio/greenkeeper/4a850daea5910a1c91a663ba7be5fce7c7437bc1/couchdb/payments/.gitkeep
--------------------------------------------------------------------------------
/couchdb/payments/_design/by_stripe/views/by_stripe/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | emit(doc.stripeSubscriptionId)
3 | }
4 |
--------------------------------------------------------------------------------
/couchdb/repositories/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/greenkeeperio/greenkeeper/4a850daea5910a1c91a663ba7be5fce7c7437bc1/couchdb/repositories/.gitkeep
--------------------------------------------------------------------------------
/couchdb/repositories/_design/billing/views/billing/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.private && doc.enabled) {
3 | emit(doc.accountId)
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/billing/views/billing/reduce:
--------------------------------------------------------------------------------
1 | _count
2 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/branch_by_dependency/views/branch_by_dependency/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'branch' || doc.referenceDeleted) return
3 | emit([doc.repositoryId, doc.dependency, doc.dependencyType])
4 | }
5 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/branch_by_group/views/branch_by_group/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'branch' || doc.referenceDeleted) return
3 | if (doc.head && typeof doc.head === 'string') {
4 | var branchName = doc.head.split('/')
5 | if (branchName[1]) {
6 | var initialGroup = branchName[1].split('initial-')
7 | if (initialGroup[1]) {
8 | emit([doc.repositoryId, initialGroup[1]])
9 | } else {
10 | emit([doc.repositoryId, branchName[1]])
11 | }
12 | }
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/branch_by_monorepo_release_group/views/branch_by_monorepo_release_group/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'branch' || doc.referenceDeleted || !doc.monorepoGroupName) return
3 | emit([doc.repositoryId, doc.monorepoGroupName])
4 | }
5 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/by_account/views/by_account/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.accountId) {
3 | emit(doc.accountId)
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/by_branch_sha/views/by_branch_sha/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'branch') return
3 | emit(doc.sha)
4 | }
5 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/by_dependency/views/by_dependency/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'repository' || !doc.enabled || !doc.packages) return
3 | var types = ['dependencies', 'devDependencies', 'optionalDependencies', 'peerDependencies']
4 | for (var filename in doc.packages) {
5 | for (var i in types) {
6 | var type = types[i]
7 | if (!doc.packages[filename][type]) continue
8 | for (var dep in doc.packages[filename][type]) {
9 | emit(dep, {
10 | fullName: doc.fullName,
11 | accountId: doc.accountId,
12 | filename: filename,
13 | type: type,
14 | oldVersion: doc.packages[filename][type][dep]
15 | })
16 | }
17 | }
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/by_full_name/views/by_full_name/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'repository') return
3 |
4 | emit(doc.fullName.toLowerCase())
5 | }
6 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/by_pr/views/by_pr/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'pr') return
3 | emit([String(doc.repositoryId), doc.head])
4 | }
5 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/initial_pr_payment/views/initial_pr_payment/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type === 'pr' && doc.initial && doc.state === 'open') {
3 | emit(doc._id.split(':')[0])
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/issue_open_by_dependency/views/issue_open_by_dependency/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'issue' || doc.state !== 'open') return
3 | emit([doc.repositoryId, doc.dependency])
4 | }
5 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/open_initial_pr/views/open_initial_pr/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if(doc.type === 'pr' && doc.initial && doc.state === 'open' && !doc.staleInitialPRReminder) {
3 | emit(doc.createdAt);
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/open_invalid_config_issue/views/open_invalid_config_issue/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if(doc.type === 'issue' && doc.invalidConfig && doc.state === 'open') {
3 | emit(doc.repositoryId);
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/pr_open_by_dependency/views/pr_open_by_dependency/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'pr' || doc.initial || doc.state !== 'open') return
3 | emit([doc.repositoryId, doc.dependency])
4 | }
5 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/pr_open_by_dependency_and_group/views/pr_open_by_dependency_and_group/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type !== 'pr' || doc.initial || doc.state !== 'open') return
3 | emit([doc.repositoryId, doc.dependency, doc.group])
4 | }
5 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/private_by_account/views/private_by_account/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if (doc.type === 'repository' && doc.private) {
3 | emit(doc.accountId)
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/couchdb/repositories/_design/repo-by-org/views/repo-by-org/map.js:
--------------------------------------------------------------------------------
1 | function (doc) {
2 | if(doc.type === "repository") {
3 | emit(doc.fullName.split("/")[0]);
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/couchdb/token-audits/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/greenkeeperio/greenkeeper/4a850daea5910a1c91a663ba7be5fce7c7437bc1/couchdb/token-audits/.gitkeep
--------------------------------------------------------------------------------
/couchdb/tokens/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/greenkeeperio/greenkeeper/4a850daea5910a1c91a663ba7be5fce7c7437bc1/couchdb/tokens/.gitkeep
--------------------------------------------------------------------------------
/deploy:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # immediately fail if an error occurs
4 | set -e
5 |
6 | export NAME=jobs
7 | export PUSH_TO_REPLICATED=true
8 | export ZDT_DEPLOYMENT=false
9 | export NO_VHOST=true
10 | export GITHUB_TOKEN=${GITHUB_TOKEN:-$GH_TOKEN}
11 |
12 | git clone https://${GITHUB_TOKEN}@github.com/neighbourhoodie/gk-deploy-scripts.git
13 |
14 | ./gk-deploy-scripts/deploy
15 |
--------------------------------------------------------------------------------
/jest.setup.js:
--------------------------------------------------------------------------------
1 | /*
2 | This module runs code to configure or set up the testing framework before each test.
3 | This script file runs immediately after the test framework has been installed in the environment.
4 | */
5 | jest.setTimeout(30000)
6 |
--------------------------------------------------------------------------------
/jobs/cancel-stripe-subscription.js:
--------------------------------------------------------------------------------
1 | const env = require('../lib/env')
2 | const stripe = require('stripe')(env.STRIPE_SECRET_KEY)
3 | const dbs = require('../lib/dbs')
4 | const upsert = require('../lib/upsert')
5 |
6 | module.exports = async function ({ accountId, stripeSubscriptionId }) {
7 | const { payments } = await dbs()
8 | await stripe.subscriptions.del(stripeSubscriptionId)
9 | await upsert(payments, accountId, {
10 | stripeSubscriptionId: null,
11 | stripeItemId: null
12 | })
13 | }
14 |
--------------------------------------------------------------------------------
/jobs/create-initial-pr-comment.js:
--------------------------------------------------------------------------------
1 | const crypto = require('crypto')
2 | const Log = require('gk-log')
3 | const _ = require('lodash')
4 | const statsd = require('../lib/statsd')
5 | const dbs = require('../lib/dbs')
6 | const env = require('../lib/env')
7 | const githubQueue = require('../lib/github-queue')
8 | const upsert = require('../lib/upsert')
9 |
10 | const prContent = require('../content/initial-pr')
11 |
12 | module.exports = async function (
13 | { repository, branchDoc, combined, installationId, accountId, prDocId }
14 | ) {
15 | accountId = String(accountId)
16 | const { repositories } = await dbs()
17 | const logs = dbs.getLogsDb()
18 | const repositoryId = String(repository.id)
19 |
20 | const prDoc = await repositories.get(prDocId)
21 | if (prDoc.initialPrCommentSent) return
22 |
23 | const repodoc = await repositories.get(repositoryId)
24 | const log = Log({ logsDb: logs, accountId, repoSlug: repodoc.fullName, context: 'create-initial-pr-comment' })
25 | log.info('started')
26 |
27 | const [owner, repo] = repodoc.fullName.split('/')
28 | const {
29 | head,
30 | travisModified,
31 | depsUpdated,
32 | badgeAdded,
33 | badgeUrl
34 | } = branchDoc
35 |
36 | branchDoc = await upsert(repositories, branchDoc._id, {
37 | statuses: combined.statuses,
38 | processed: true,
39 | state: combined.state
40 | })
41 | log.info('branchDoc: updated to `processed: true`', { branchDoc })
42 |
43 | const ghqueue = githubQueue(installationId)
44 |
45 | const ghRepo = await ghqueue.read(github => github.repos.get({ owner, repo }))
46 | log.info('github: repository info', { repositoryInfo: ghRepo })
47 | const issue = await ghqueue.read(github => github.issues.get({
48 | owner,
49 | repo,
50 | number: prDoc.number
51 | }))
52 | log.info('github: pull request info', { pullRequestInfo: issue })
53 |
54 | if (issue.state !== 'open') {
55 | log.warn('exited: pr is closed')
56 | return
57 | }
58 |
59 | if (issue.locked) {
60 | log.warn('exited: pr is locked')
61 | return
62 | }
63 |
64 | const secret = repodoc.private &&
65 | crypto
66 | .createHmac('sha256', env.NPMHOOKS_SECRET)
67 | .update(String(installationId))
68 | .digest('hex')
69 |
70 | const accountTokenUrl = `https://account.greenkeeper.io/status?token=${repodoc.accountToken}`
71 |
72 | const files = _.get(repodoc, 'files', {})
73 |
74 | await ghqueue.write(github => github.issues.createComment({
75 | owner,
76 | repo,
77 | body: prContent({
78 | depsUpdated,
79 | ghRepo,
80 | newBranch: head,
81 | badgeUrl: badgeAdded && badgeUrl,
82 | travisModified,
83 | secret,
84 | installationId,
85 | success: combined.state === 'success',
86 | enabled: false,
87 | accountTokenUrl,
88 | files
89 | }),
90 | number: prDoc.number
91 | }))
92 | statsd.increment('initial_pullrequest_comments')
93 | log.success('success')
94 |
95 | await upsert(repositories, prDocId, {
96 | initialPrCommentSent: true
97 | })
98 | }
99 |
--------------------------------------------------------------------------------
/jobs/create-initial-subgroup-pr-comment.js:
--------------------------------------------------------------------------------
1 | const crypto = require('crypto')
2 | const Log = require('gk-log')
3 | const _ = require('lodash')
4 | const dbs = require('../lib/dbs')
5 | const env = require('../lib/env')
6 | const githubQueue = require('../lib/github-queue')
7 | const upsert = require('../lib/upsert')
8 |
9 | const prContent = require('../content/initial-pr')
10 |
11 | module.exports = async function (
12 | { repository, branchDoc, combined, installationId, accountId, prDocId, groupName }
13 | ) {
14 | accountId = String(accountId)
15 | const { repositories } = await dbs()
16 | const logs = dbs.getLogsDb()
17 | const repositoryId = String(repository.id)
18 |
19 | const prDoc = await repositories.get(prDocId)
20 | if (prDoc.initialPrCommentSent) return
21 |
22 | const repodoc = await repositories.get(repositoryId)
23 | const log = Log({ logsDb: logs, accountId, repoSlug: repodoc.fullName, context: 'create-initial-subgroup-pr-comment' })
24 | log.info('started')
25 |
26 | const [owner, repo] = repodoc.fullName.split('/')
27 | const {
28 | head,
29 | travisModified,
30 | depsUpdated,
31 | badgeAdded,
32 | badgeUrl
33 | } = branchDoc
34 |
35 | branchDoc = await upsert(repositories, branchDoc._id, {
36 | statuses: combined.statuses,
37 | processed: true,
38 | state: combined.state
39 | })
40 | log.info('branchDoc: updated to `processed: true`', { branchDoc })
41 |
42 | const ghqueue = githubQueue(installationId)
43 |
44 | const ghRepo = await ghqueue.read(github => github.repos.get({ owner, repo }))
45 | log.info('github: repository info', { repositoryInfo: ghRepo })
46 | const issue = await ghqueue.read(github => github.issues.get({
47 | owner,
48 | repo,
49 | number: prDoc.number
50 | }))
51 | log.info('github: pull request info', { pullRequestInfo: issue })
52 |
53 | if (issue.state !== 'open') {
54 | log.warn('exited: pr is closed')
55 | return
56 | }
57 |
58 | if (issue.locked) {
59 | log.warn('exited: pr is locked')
60 | return
61 | }
62 |
63 | const secret = repodoc.private &&
64 | crypto
65 | .createHmac('sha256', env.NPMHOOKS_SECRET)
66 | .update(String(installationId))
67 | .digest('hex')
68 |
69 | const accountTokenUrl = `https://account.greenkeeper.io/status?token=${repodoc.accountToken}`
70 |
71 | const files = _.get(repodoc, 'files', {})
72 |
73 | await ghqueue.write(github => github.issues.createComment({
74 | owner,
75 | repo,
76 | body: prContent({
77 | depsUpdated,
78 | ghRepo,
79 | newBranch: head,
80 | badgeUrl: badgeAdded && badgeUrl,
81 | travisModified,
82 | secret,
83 | installationId,
84 | success: combined.state === 'success',
85 | enabled: false,
86 | accountTokenUrl,
87 | files,
88 | groupName
89 | }),
90 | number: prDoc.number
91 | }))
92 | log.success('success')
93 |
94 | await upsert(repositories, prDocId, {
95 | initialPrCommentSent: true
96 | })
97 | }
98 |
--------------------------------------------------------------------------------
/jobs/github-event.js:
--------------------------------------------------------------------------------
1 | const { resolve } = require('path')
2 |
3 | module.exports = function ({ type, action }) {
4 | const paths = [__dirname, 'github-event', type]
5 | if (action) paths.push(action)
6 | const requirePath = resolve(...paths)
7 | if (!requirePath.startsWith(__dirname)) {
8 | throw new Error('do not escape jobs folder')
9 | }
10 |
11 | try {
12 | var handler = require(requirePath)
13 | } catch (e) {
14 | if (e.code === 'MODULE_NOT_FOUND') return
15 |
16 | throw e
17 | }
18 |
19 | return handler(...arguments)
20 | }
21 |
--------------------------------------------------------------------------------
/jobs/github-event/check_run/completed.js:
--------------------------------------------------------------------------------
1 | /*
2 |
3 | jobs/github-event/check_run/completed.js
4 |
5 | Receives webhook events for when a check run is completed.
6 | Docs: https://developer.github.com/v3/activity/events/types/#checkrunevent
7 |
8 | This is the handler for the`completed` action, which according to the docs doesn’t exist for this endpoint, but according to reality actually does.
9 | */
10 |
11 | const onBranchStatus = require('../../../lib/on-branch-status')
12 |
13 | module.exports = async function ({ check_run, repository, installation }) { // eslint-disable-line
14 | const { status, conclusion, head_sha } = check_run // eslint-disable-line
15 | // This shouldn’t be possible, since this is the completed event handler, but hey.
16 | if (status !== 'completed') return
17 | return onBranchStatus(repository, head_sha, installation)
18 | }
19 |
--------------------------------------------------------------------------------
/jobs/github-event/installation/created.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const Log = require('gk-log')
3 |
4 | const dbs = require('../../../lib/dbs')
5 | const getToken = require('../../../lib/get-token')
6 | const GitHub = require('../../../lib/github')
7 | const { createDocs } = require('../../../lib/repository-docs')
8 | const statsd = require('../../../lib/statsd')
9 | const upsert = require('../../../lib/upsert')
10 |
11 | module.exports = async function ({ installation }) {
12 | const { installations, repositories: reposDb } = await dbs()
13 | const logs = dbs.getLogsDb()
14 | const log = Log({
15 | logsDb: logs,
16 | accountId: installation.account.id,
17 | repoSlug: null,
18 | context: 'installation-created'
19 | })
20 |
21 | log.info('started')
22 | const docId = String(installation.account.id)
23 | const doc = await upsert(
24 | installations,
25 | docId,
26 | Object.assign(
27 | {
28 | installation: installation.id
29 | },
30 | _.pick(installation.account, ['login', 'type'])
31 | )
32 | )
33 | log.info('Installation Document created', { installation: doc })
34 |
35 | const { token } = await getToken(doc.installation)
36 | const github = GitHub({ auth: `token ${token}` })
37 |
38 | let repositories
39 | // getting installation repos from github
40 | try {
41 | // For some reason, the accept header is not part of this
42 | // Octokit API
43 | const options = github.apps.listRepos.endpoint.merge({
44 | headers: {
45 | accept: 'application/vnd.github.machine-man-preview+json'
46 | },
47 | per_page: 100
48 | })
49 | // Paginate does not actually flatten results into a single result array
50 | // as it should, according to the docs, possibly due to these:
51 | // https://github.com/octokit/rest.js/issues/1161
52 | // https://github.com/octokit/routes/issues/329
53 | const results = await github.paginate(options)
54 | // So we flatten them ourselves
55 | repositories = _.flatten(results.map((result) => result.repositories))
56 | } catch (error) {
57 | log.error('error: could not fetch repositories from GitHub', { error })
58 | }
59 |
60 | if (!repositories.length) {
61 | log.warn('exited: no repositories found')
62 | return
63 | }
64 |
65 | log.info(`github: fetched ${repositories.length} installation repositories`)
66 | statsd.increment('repositories', repositories.length)
67 |
68 | let repoDocs = []
69 | try {
70 | repoDocs = createDocs({
71 | repositories,
72 | accountId: doc._id
73 | })
74 | // saving installation repos to db
75 | log.info(`Preparing to write ${repoDocs.length} repoDocs to the DB`)
76 | await reposDb.bulkDocs(repoDocs)
77 | } catch (error) {
78 | log.error('error: could not write repoDocs', { error })
79 | }
80 | statsd.increment('installs')
81 | statsd.event('install')
82 |
83 | // scheduling create-initial-branch jobs
84 | log.success('starting create-initial-branch job', { repositories: repoDocs })
85 | return _(repoDocs)
86 | .map(repository => ({
87 | data: {
88 | name: 'create-initial-branch',
89 | repositoryId: repository._id,
90 | accountId: repository.accountId
91 | }
92 | }))
93 | .value()
94 | }
95 |
--------------------------------------------------------------------------------
/jobs/github-event/installation/deleted.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const Log = require('gk-log')
3 |
4 | const dbs = require('../../../lib/dbs')
5 | const statsd = require('../../../lib/statsd')
6 |
7 | module.exports = async function ({ installation }) {
8 | const { installations, repositories: reposDb } = await dbs()
9 | const logs = dbs.getLogsDb()
10 | const key = String(installation.account.id)
11 | const log = Log({
12 | logsDb: logs,
13 | accountId: installation.account.id,
14 | repoSlug: null,
15 | context: 'installation-deleted'
16 | })
17 | log.info('started')
18 | // deleting installation repos from db
19 | const repositories = await reposDb.query('by_account', {
20 | key,
21 | include_docs: true
22 | })
23 | log.info('database: gathering all repositories', { repositories })
24 | statsd.decrement('repositories', repositories.length)
25 | await reposDb.bulkDocs(
26 | repositories.rows.map(repo => _.assign(repo.doc, { _deleted: true }))
27 | )
28 | log.info(
29 | 'database: add `_deleted: true` to all repositories of that account'
30 | )
31 |
32 | // deleting installation from db
33 | await installations.remove(await installations.get(key))
34 | log.success('success')
35 |
36 | statsd.decrement('installs')
37 | statsd.event('uninstall')
38 | }
39 |
--------------------------------------------------------------------------------
/jobs/github-event/installation_repositories/added.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const Log = require('gk-log')
3 | const promiseRetry = require('promise-retry')
4 |
5 | const dbs = require('../../../lib/dbs')
6 | const GithubQueue = require('../../../lib/github-queue')
7 | const statsd = require('../../../lib/statsd')
8 |
9 | const { createDocs } = require('../../../lib/repository-docs')
10 |
11 | const max404Retries = 5
12 |
13 | module.exports = async function ({ installation, repositories_added }) { // eslint-disable-line
14 | const { repositories: reposDb } = await dbs()
15 | const logs = dbs.getLogsDb()
16 |
17 | const log = Log({
18 | logsDb: logs,
19 | accountId: installation.account.id,
20 | repoSlug: null,
21 | context: 'installation-repositories-added'
22 | })
23 | log.info('started', { repositories_added })
24 | if (!repositories_added.length) {
25 | log.warn('exited: no repositories selected')
26 | return
27 | }
28 | // spam :(
29 | if (['23046691', '1623538'].includes(installation.account.id) ||
30 | (repositories_added[0] && repositories_added[0].fullName &&
31 | (repositories_added[0].fullName.includes('dalavanmanphonsy') ||
32 | repositories_added[0].fullName.includes('CNXTEoEorg')))) {
33 | log.warn('exited: spam')
34 | return
35 | }
36 |
37 | const repositories = await Promise.mapSeries(repositories_added, doc => {
38 | const [owner, repo] = doc.full_name.split('/')
39 | return GithubQueue(installation.id).read(github => {
40 | return promiseRetry((retry, number) => {
41 | /*
42 | if we get a 404 here, log, and try again a few times.
43 | we’re doing the retry here so we can job-specific logs
44 | and to keep the retry logic in lib/github.js simple
45 | */
46 | return github.repos.get({ owner, repo })
47 | .catch(error => {
48 | if (error.status === 404) {
49 | if (number === max404Retries) {
50 | // ignore and log failure here
51 | log.warn(`repo not found on attempt #${number}: gving up`)
52 | } else {
53 | log.warn(`repo not found on attempt #${number}: retrying`)
54 | retry(error)
55 | }
56 | } else { // not a 404, throw normally
57 | throw error
58 | }
59 | })
60 | }, {
61 | retries: max404Retries,
62 | minTimeout: process.env.NODE_ENV === 'testing' ? 1 : 3000
63 | })
64 | })
65 | })
66 |
67 | log.info('added repositories', repositories)
68 |
69 | statsd.increment('repositories', repositories.length)
70 |
71 | const repoDocs = await createDocs({
72 | repositories,
73 | accountId: String(installation.account.id)
74 | })
75 |
76 | // saving installation repos to db
77 | await reposDb.bulkDocs(repoDocs)
78 |
79 | // scheduling create-initial-branch jobs
80 | log.success('starting create-initial-branch job')
81 | return _(repoDocs)
82 | .map(repository => ({
83 | data: {
84 | name: 'create-initial-branch',
85 | repositoryId: repository._id,
86 | accountId: repository.accountId
87 | }
88 | }))
89 | .value()
90 | }
91 |
--------------------------------------------------------------------------------
/jobs/github-event/installation_repositories/removed.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const Log = require('gk-log')
3 |
4 | const dbs = require('../../../lib/dbs')
5 | const statsd = require('../../../lib/statsd')
6 | const env = require('../../../lib/env')
7 | const { maybeUpdatePaymentsJob } = require('../../../lib/payments')
8 |
9 | module.exports = async function ({ installation, repositories_removed }) { // eslint-disable-line
10 | const { repositories: reposDb } = await dbs()
11 | const logs = dbs.getLogsDb()
12 |
13 | const accountId = String(installation.account.id)
14 | const repoIds = _.map(repositories_removed, repo => String(repo.id))
15 | const log = Log({
16 | logsDb: logs,
17 | accountId: installation.account.id,
18 | repoSlug: null,
19 | context: 'installation-repositories-removed'
20 | })
21 | log.info('started', { repositories_removed })
22 | // branches and prs will only be deleted on a complete uninstall
23 | const repositories = _(
24 | (await reposDb.query('by_account', {
25 | key: accountId,
26 | include_docs: true
27 | })).rows
28 | )
29 | .map('doc')
30 | .filter(doc => repoIds.some(id => doc._id === id))
31 | .map(doc => _.assign(doc, { _deleted: true }))
32 | .value()
33 |
34 | log.info('database: add `_deleted: true` to selected repositories', {
35 | repositories
36 | })
37 | statsd.decrement('repositories', repositories.length)
38 |
39 | await reposDb.bulkDocs(repositories)
40 |
41 | if (env.IS_ENTERPRISE) {
42 | return
43 | }
44 |
45 | const hasPrivateRepos = repositories.some(repo => repo.private)
46 |
47 | log.success('starting maybeUpdatePaymentsJob', { hasPrivateRepos })
48 |
49 | return maybeUpdatePaymentsJob({ accountId, isPrivate: hasPrivateRepos })
50 | }
51 |
--------------------------------------------------------------------------------
/jobs/github-event/issues/closed.js:
--------------------------------------------------------------------------------
1 | const Log = require('gk-log')
2 |
3 | const dbs = require('../../../lib/dbs')
4 | const upsert = require('../../../lib/upsert')
5 |
6 | module.exports = async function ({ issue, repository }) {
7 | const { repositories } = await dbs()
8 | const logs = dbs.getLogsDb()
9 | const log = Log({ logsDb: logs, accountId: repository.owner.id, repoSlug: repository.full_name, context: 'issues-closed' })
10 | log.info('started', { issue })
11 | const issueDocId = `${repository.id}:issue:${issue.number}`
12 |
13 | try {
14 | await repositories.get(issueDocId)
15 | } catch (err) {
16 | if (err.status === 404) {
17 | log.warn('database: issue document was not found', { error: err })
18 | return
19 | }
20 | log.error('database: retrieving the issue document failed', { error: err })
21 | throw err
22 | }
23 |
24 | await upsert(repositories, issueDocId, { state: 'closed' })
25 | log.success('database: issue document successfully updated with `state: closed`')
26 | }
27 |
--------------------------------------------------------------------------------
/jobs/github-event/marketplace_purchase/cancelled.js:
--------------------------------------------------------------------------------
1 | const Log = require('gk-log')
2 |
3 | const dbs = require('../../../lib/dbs')
4 | const upsert = require('../../../lib/upsert')
5 |
6 | module.exports = async function ({ marketplace_purchase }) { // eslint-disable-line
7 | const { payments } = await dbs()
8 | const logs = dbs.getLogsDb()
9 | const log = Log({
10 | logsDb: logs,
11 | accountId: marketplace_purchase.account.id,
12 | repoSlug: null,
13 | context: 'marketplace-purchase-cancelled'
14 | })
15 | log.info('started', { marketplace_purchase })
16 | const accountId = String(marketplace_purchase.account.id)
17 |
18 | try {
19 | await upsert(payments, accountId, { plan: 'free' })
20 | log.success('database: paymentDoc was updated', { plan: 'free' })
21 | } catch (error) {
22 | log.error('database: could not update paymentDoc', { error })
23 | throw error
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/jobs/github-event/marketplace_purchase/changed.js:
--------------------------------------------------------------------------------
1 | const Log = require('gk-log')
2 |
3 | const dbs = require('../../../lib/dbs')
4 | const upsert = require('../../../lib/upsert')
5 | const normalizePlanName = require('../../../lib/normalize-plan-name')
6 |
7 | module.exports = async function ({ marketplace_purchase }) { // eslint-disable-line
8 | const { payments } = await dbs()
9 | const logs = dbs.getLogsDb()
10 | const log = Log({
11 | logsDb: logs,
12 | accountId: marketplace_purchase.account.id,
13 | repoSlug: null,
14 | context: 'marketplace-purchase-changed'
15 | })
16 | log.info('started', { marketplace_purchase })
17 | const accountId = String(marketplace_purchase.account.id)
18 |
19 | try {
20 | const plan = normalizePlanName(marketplace_purchase.plan.name)
21 | await upsert(payments, accountId, { plan })
22 | log.success('database: paymentDoc was updated', { plan })
23 | } catch (error) {
24 | log.error('database: could not update paymentDoc', { error })
25 | throw error
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/jobs/github-event/marketplace_purchase/purchased.js:
--------------------------------------------------------------------------------
1 | const Log = require('gk-log')
2 |
3 | const dbs = require('../../../lib/dbs')
4 | const upsert = require('../../../lib/upsert')
5 | const normalizePlanName = require('../../../lib/normalize-plan-name')
6 |
7 | module.exports = async function ({ marketplace_purchase }) { // eslint-disable-line
8 | const { payments } = await dbs()
9 | const logs = dbs.getLogsDb()
10 | const log = Log({
11 | logsDb: logs,
12 | accountId: marketplace_purchase.account.id,
13 | repoSlug: null,
14 | context: 'marketplace-purchase-created'
15 | })
16 | log.info('started', { marketplace_purchase })
17 | const accountId = String(marketplace_purchase.account.id)
18 | let paymentDoc
19 |
20 | await upsert(payments, accountId, {
21 | plan: normalizePlanName(marketplace_purchase.plan.name)
22 | })
23 |
24 | try {
25 | paymentDoc = await payments.get(String(accountId))
26 | log.success('database: paymentDoc created', { paymentDoc })
27 | } catch (error) {
28 | log.error('database: no paymentDoc created', { error })
29 | if (error.status !== 404) throw error
30 | }
31 |
32 | if (paymentDoc && paymentDoc.stripeSubscriptionId) {
33 | log.info('scheduled `cancel-stripe-subscription` job')
34 | return {
35 | data: {
36 | name: 'cancel-stripe-subscription',
37 | accountId: paymentDoc._id,
38 | stripeSubscriptionId: paymentDoc.stripeSubscriptionId
39 | }
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/jobs/github-event/pull_request/closed.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 |
3 | const githubQueue = require('../../../lib/github-queue')
4 | const dbs = require('../../../lib/dbs')
5 | const upsert = require('../../../lib/upsert')
6 | const env = require('../../../lib/env')
7 | const { maybeUpdatePaymentsJob } = require('../../../lib/payments')
8 |
9 | module.exports = async function (data) {
10 | const { repositories } = await dbs()
11 | const { pull_request: pullRequest, repository, installation } = data
12 | const prDocId = `${repository.id}:pr:${pullRequest.id}`
13 |
14 | try {
15 | await repositories.get(prDocId)
16 | } catch (err) {
17 | if (err.status === 404) return
18 | throw err
19 | }
20 |
21 | const prdoc = await upsert(
22 | repositories,
23 | prDocId,
24 | _.pick(pullRequest, ['state', 'merged'])
25 | )
26 | if (!prdoc.merged || !prdoc.initial) return
27 |
28 | let repoDoc = await repositories.get(String(repository.id))
29 |
30 | const [owner, repo] = repository.full_name.split('/')
31 |
32 | repoDoc = await upsert(repositories, String(repository.id), {
33 | enabled: true
34 | })
35 | try {
36 | await githubQueue(installation.id).write(github => github.gitdata.deleteRef({
37 | owner,
38 | repo,
39 | ref: `heads/${prdoc.head}`
40 | }))
41 | } catch (e) {}
42 |
43 | if (!env.IS_ENTERPRISE) {
44 | return maybeUpdatePaymentsJob({ accountId: repoDoc.accountId, isPrivate: repoDoc.private })
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/jobs/github-event/pull_request/opened.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../../lib/dbs')
2 | const env = require('../../../lib/env')
3 | const getConfig = require('../../../lib/get-config')
4 | const { getActiveBilling, getAccountNeedsMarketplaceUpgrade } = require('../../../lib/payments')
5 | const githubQueue = require('../../../lib/github-queue')
6 |
7 | module.exports = async function (data) {
8 | const { repositories } = await dbs()
9 | const { pull_request: pullRequest, repository, installation } = data
10 | const repositoryId = String(repository.id)
11 | const prDocId = `${repositoryId}:pr:${pullRequest.id}`
12 | const repoDoc = await repositories.get(repositoryId)
13 | const [owner, repo] = repository.full_name.split('/')
14 | const config = getConfig(repoDoc)
15 |
16 | const wasCreatedByGreenkeeper = pullRequest.user.type === 'Bot' && pullRequest.user.login.substr(0, 11) === 'greenkeeper'
17 | if (wasCreatedByGreenkeeper) return
18 |
19 | const isInitialGreenkeeperBranch = pullRequest.head.ref === `${config.branchPrefix}initial`
20 | if (!isInitialGreenkeeperBranch) return
21 |
22 | const accountId = repository.owner.id
23 |
24 | await repositories.put(
25 | {
26 | _id: prDocId,
27 | repositoryId,
28 | accountId: accountId,
29 | type: 'pr',
30 | initial: true,
31 | number: pullRequest.number,
32 | head: pullRequest.head.ref,
33 | state: pullRequest.state,
34 | merged: pullRequest.merged,
35 | createdAt: new Date().toJSON(),
36 | createdByUser: true
37 | }
38 | )
39 |
40 | if (!repoDoc.private || env.IS_ENTERPRISE) {
41 | return
42 | }
43 |
44 | const ghqueue = githubQueue(installation.id)
45 |
46 | const billingAccount = await getActiveBilling(accountId)
47 | const hasBillingAccount = !!billingAccount
48 | const accountNeedsMarketplaceUpgrade = await getAccountNeedsMarketplaceUpgrade(accountId)
49 |
50 | if (!hasBillingAccount || accountNeedsMarketplaceUpgrade) {
51 | const targetUrl = accountNeedsMarketplaceUpgrade ? 'https://github.com/marketplace/greenkeeper/' : 'https://account.greenkeeper.io/'
52 |
53 | await ghqueue.write(github => github.repos.createStatus({
54 | owner,
55 | repo,
56 | sha: pullRequest.head.sha,
57 | state: 'pending',
58 | context: 'greenkeeper/payment',
59 | description: 'Payment required, merging will have no effect',
60 | target_url: targetUrl
61 | }))
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/jobs/github-event/repository/archived.js:
--------------------------------------------------------------------------------
1 | /*
2 |
3 | jobs/github-event/repository/archived.js
4 |
5 | Hook receiver for the repository archived event (https://developer.github.com/v3/activity/events/types/#repositoryevent)
6 |
7 | When a repository is archived, we want to disable it so Greenkeeper will
8 | stop trying to act on it when one of its dependencies is updated.
9 |
10 | */
11 |
12 | const Log = require('gk-log')
13 |
14 | const env = require('../../../lib/env')
15 | const dbs = require('../../../lib/dbs')
16 | const { maybeUpdatePaymentsJob } = require('../../../lib/payments')
17 | const { updateDoc } = require('../../../lib/repository-docs')
18 |
19 | module.exports = async function ({ repository }) {
20 | const { repositories } = await dbs()
21 | const logs = dbs.getLogsDb()
22 | const log = Log({ logsDb: logs, accountId: repository.owner.id, repoSlug: repository.full_name, context: 'repo-archived' })
23 | log.info(`disabling ${repository.full_name}`)
24 |
25 | const repositoryId = String(repository.id)
26 | let repoDoc = await repositories.get(repositoryId)
27 | repoDoc.enabled = false
28 | repoDoc.archived = true
29 | await updateDoc(repositories, repository, repoDoc)
30 | if (!env.IS_ENTERPRISE) {
31 | return maybeUpdatePaymentsJob({ accountId: repoDoc.accountId, isPrivate: repoDoc.private })
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/jobs/github-event/repository/privatized.js:
--------------------------------------------------------------------------------
1 | /*
2 | jobs/github-event/repository/privatized.js
3 | Hook receiver for the repository privatized event (https://developer.github.com/v3/activity/events/types/#repositoryevent)
4 | */
5 |
6 | const Log = require('gk-log')
7 |
8 | const env = require('../../../lib/env')
9 | const dbs = require('../../../lib/dbs')
10 | const { maybeUpdatePaymentsJob } = require('../../../lib/payments')
11 | const { updateDoc } = require('../../../lib/repository-docs')
12 |
13 | module.exports = async function ({ repository }) {
14 | const { repositories } = await dbs()
15 | const logs = dbs.getLogsDb()
16 | const log = Log({ logsDb: logs, accountId: repository.owner.id, repoSlug: repository.full_name, context: 'repo-privatized' })
17 | log.info(`set ${repository.full_name} to private`)
18 |
19 | const repositoryId = String(repository.id)
20 | let repoDoc = await repositories.get(repositoryId)
21 | repoDoc.enabled = false
22 | repoDoc.private = true
23 | await updateDoc(repositories, repository, repoDoc)
24 |
25 | if (!env.IS_ENTERPRISE) {
26 | log.warn('payment required')
27 | return maybeUpdatePaymentsJob({ accountId: repoDoc.accountId, isPrivate: repoDoc.private, repositoryId })
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/jobs/github-event/status.js:
--------------------------------------------------------------------------------
1 | const onBranchStatus = require('../../lib/on-branch-status')
2 |
3 | module.exports = async function ({ state, sha, repository, installation }) {
4 | // not a success or failure state
5 | if (!['success', 'failure', 'error'].includes(state)) return
6 | return onBranchStatus(repository, sha, installation)
7 | }
8 |
--------------------------------------------------------------------------------
/jobs/initial-timeout-pr.js:
--------------------------------------------------------------------------------
1 | const Log = require('gk-log')
2 |
3 | const dbs = require('../lib/dbs')
4 | const statsd = require('../lib/statsd')
5 | const githubQueue = require('../lib/github-queue')
6 | const updatedAt = require('../lib/updated-at')
7 | const timeoutBody = require('../content/timeout-issue')
8 | const getConfig = require('../lib/get-config')
9 |
10 | module.exports = async function ({ repositoryId, accountId, repoSlug }) {
11 | const { installations, repositories } = await dbs()
12 | const installation = await installations.get(String(accountId))
13 | const installationId = installation.installation
14 | const logs = dbs.getLogsDb()
15 | const log = Log({ logsDb: logs, accountId, repoSlug, context: 'initial-timeout' })
16 | log.info(`Looking for initial PR doc for ${repoSlug}`)
17 | const initialPullRequests = await repositories.query('by_pr', {
18 | key: [String(repositoryId), 'greenkeeper/initial']
19 | })
20 | const prWasCreated = initialPullRequests.rows.length > 0
21 | if (prWasCreated) {
22 | log.success(`Found one or more initial PR docs for ${repoSlug}`, {
23 | initialPullRequests
24 | })
25 | return
26 | }
27 | log.warn(`No initial PR doc for ${repoSlug} found`)
28 |
29 | const repoDoc = await repositories.get(String(repositoryId))
30 | const { fullName } = repoDoc
31 | const [owner, repo] = fullName.split('/')
32 | const { label } = getConfig(repoDoc)
33 |
34 | const { number } = await githubQueue(installationId).write(github => github.issues.create({
35 | owner,
36 | repo,
37 | title: `Action required: Greenkeeper could not be activated 🚨`,
38 | body: timeoutBody({ fullName }),
39 | labels: [label]
40 | }))
41 | log.info(`Sent timeout issue #${number} for ${repoSlug}`, {
42 | issueDoc: `${repositoryId}:issue:${number}`,
43 | repositoryId
44 | })
45 | statsd.increment('initial_issues')
46 |
47 | await repositories.put(
48 | updatedAt({
49 | _id: `${repositoryId}:issue:${number}`,
50 | type: 'issue',
51 | initial: true,
52 | repositoryId,
53 | number,
54 | state: 'open'
55 | })
56 | )
57 | }
58 |
--------------------------------------------------------------------------------
/jobs/invalid-config-file.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 |
3 | const dbs = require('../lib/dbs')
4 | const statsd = require('../lib/statsd')
5 | const githubQueue = require('../lib/github-queue')
6 | const updatedAt = require('../lib/updated-at')
7 | const invalidConfigBody = require('../content/invalid-config-issue')
8 | const getConfig = require('../lib/get-config')
9 |
10 | module.exports = async function ({ repositoryId, accountId, messages, isBlockingInitialPR }) {
11 | const { installations, repositories } = await dbs()
12 | const installation = await installations.get(String(accountId))
13 | const installationId = installation.installation
14 |
15 | const openIssues = _.get(
16 | await repositories.query('open_invalid_config_issue', {
17 | key: repositoryId,
18 | include_docs: true
19 | }),
20 | 'rows'
21 | )
22 | // don't send too many issues!
23 | if (openIssues && openIssues.length) return
24 |
25 | const repoDoc = await repositories.get(String(repositoryId))
26 | const { fullName } = repoDoc
27 | const [owner, repo] = fullName.split('/')
28 | const { label } = getConfig(repoDoc)
29 |
30 | const { number } = await githubQueue(installationId).write(github => github.issues.create({
31 | owner,
32 | repo,
33 | title: `Invalid Greenkeeper configuration file`,
34 | body: invalidConfigBody(messages, isBlockingInitialPR),
35 | labels: [label]
36 | }))
37 |
38 | statsd.increment('invalid_config_issues')
39 |
40 | await repositories.put(
41 | updatedAt({
42 | _id: `${repositoryId}:issue:${number}`,
43 | type: 'issue',
44 | initial: false,
45 | invalidConfig: true,
46 | repositoryId,
47 | number,
48 | state: 'open'
49 | })
50 | )
51 | }
52 |
--------------------------------------------------------------------------------
/jobs/monorepo-supervisor.js:
--------------------------------------------------------------------------------
1 | const Log = require('gk-log')
2 |
3 | const dbs = require('../lib/dbs')
4 | const { getMonorepoGroupNameForPackage } = require('../lib/monorepo')
5 | const { notifyAdmin } = require('../lib/comms')
6 |
7 | /*
8 | Ihis job is being run every $interval (say 5 minutes).
9 |
10 | It checks npm/_design/monorepo-releases-by-time for any
11 | releases that should have gone out >= 30 minutes ago.
12 |
13 | If the view returns any results, start a registry-change job
14 | for any of the sub-packages witht the current version.
15 |
16 | registry-change will in turn call create-version-branch, which
17 | then cleans up release documents.
18 |
19 | Release documents have this structure:
20 | {
21 | _id: monorepo:monogroup:version,
22 | updatedAt: new Date().toJSON(),
23 | distTags: Array(),
24 | distTag: String()
25 | }
26 | */
27 |
28 | const { pendingMonorepoReleases } = require('../lib/monorepo')
29 |
30 | async function sendSlackNotification (dependency) {
31 | const groupName = await getMonorepoGroupNameForPackage(dependency)
32 | const message = `There has been an incomplete release of the monorepo \`${groupName}\`, not all modules listed in the monorepo definition have been released together. This _may_ mean that the release definition for this monorepo is out of date.`
33 | notifyAdmin(message)
34 | }
35 |
36 | module.exports = async function () {
37 | const logs = dbs.getLogsDb()
38 | const log = Log({ logsDb: logs, accountId: null, repoSlug: null, context: 'monorepo-supervisor' })
39 |
40 | const releases = await pendingMonorepoReleases()
41 |
42 | log.info(`starting ${releases.length} monorepo releases`, { releases })
43 |
44 | const jobs = releases.map((release) => {
45 | // We don't want ths for now
46 | // remove if condifion to activate slacknotification again
47 | if (release.slack) sendSlackNotification(release.dependency)
48 |
49 | return {
50 | data: {
51 | name: 'registry-change',
52 | dependency: release.dependency,
53 | distTags: release.distTags,
54 | distTag: release.distTag,
55 | versions: release.versions,
56 | force: true
57 | }
58 | }
59 | })
60 |
61 | log.info(`created ${jobs.length} jobs`, { jobs })
62 |
63 | return jobs
64 | }
65 |
--------------------------------------------------------------------------------
/jobs/payment-changed.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 |
3 | const env = require('../lib/env')
4 | const dbs = require('../lib/dbs')
5 | const githubQueue = require('../lib/github-queue')
6 | const paymentActivatedText = require('../content/payment-activated')
7 |
8 | module.exports = async function ({ accountId }) {
9 | if (env.IS_ENTERPRISE) {
10 | // not sure what called me, but I should not run
11 | return
12 | }
13 |
14 | const { installations, repositories, payments } = await dbs()
15 | const installation = await installations.get(accountId)
16 | const ghqueue = githubQueue(installation.installation)
17 |
18 | let payment = {}
19 | try {
20 | payment = await payments.get(String(accountId))
21 | } catch (e) {}
22 |
23 | if (payment.stripeSubscriptionId) {
24 | await paymentAdded({ repositories, accountId, ghqueue })
25 | } else {
26 | throw new Error('No payment')
27 | }
28 | }
29 |
30 | async function paymentAdded ({ repositories, accountId, ghqueue }) {
31 | const dbResult = _.map(
32 | (await repositories.query('private_by_account', {
33 | key: accountId,
34 | include_docs: true
35 | })).rows,
36 | 'doc'
37 | )
38 | const allRepos = _.keyBy(dbResult, '_id')
39 | const initialPrs = _.get(
40 | await repositories.query('initial_pr_payment', {
41 | keys: _.keys(allRepos),
42 | include_docs: true
43 | }),
44 | 'rows'
45 | )
46 |
47 | for (let pr of initialPrs) {
48 | const { head, number, state } = pr.doc
49 | if (state !== 'open') continue
50 | const repoDoc = allRepos[pr.key]
51 | const accountToken = repoDoc.accountToken
52 | const [owner, repo] = repoDoc.fullName.split('/')
53 | const sha = _.get(
54 | await ghqueue.write(github => github.gitdata.getRef({ owner, repo, ref: `heads/${head}` })),
55 | 'object.sha'
56 | )
57 |
58 | if (!sha) throw new Error('Missing sha')
59 |
60 | await setSuccessStatus({ ghqueue, owner, repo, sha, accountToken })
61 | await commentPaymentWarning({ ghqueue, owner, repo, number, accountToken })
62 | }
63 | }
64 |
65 | async function setSuccessStatus ({ ghqueue, owner, repo, sha, accountToken }) {
66 | await ghqueue.write(github => github.repos.createStatus({
67 | owner,
68 | repo,
69 | sha,
70 | state: 'success',
71 | target_url: `https://account.greenkeeper.io?token=${accountToken}`,
72 | description: 'Payment has been activated',
73 | context: 'greenkeeper/payment'
74 | }))
75 | }
76 |
77 | async function commentPaymentWarning (
78 | { ghqueue, owner, repo, number, accountToken }
79 | ) {
80 | await ghqueue.write(github => github.issues.createComment({
81 | owner,
82 | repo,
83 | number,
84 | body: paymentActivatedText({ accountToken })
85 | }))
86 | }
87 |
--------------------------------------------------------------------------------
/jobs/payment-required.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../lib/dbs')
2 | const githubQueue = require('../lib/github-queue')
3 | const updatedAt = require('../lib/updated-at')
4 | const getConfig = require('../lib/get-config')
5 | const paymentRequiredBody = require('../content/payment-required')
6 |
7 | module.exports = async function ({ accountId, repositoryId }) {
8 | const { installations, repositories } = await dbs()
9 | const installation = await installations.get(String(accountId))
10 | const installationId = installation.installation
11 |
12 | const repoDoc = await repositories.get(String(repositoryId))
13 | const { fullName } = repoDoc
14 | const [owner, repo] = fullName.split('/')
15 | const { label } = getConfig(repoDoc)
16 |
17 | const { number } = await githubQueue(installationId).write(github => github.issues.create({
18 | owner,
19 | repo,
20 | title: 'Payment required',
21 | body: paymentRequiredBody(),
22 | labels: [label]
23 | }))
24 |
25 | await repositories.put(
26 | updatedAt({
27 | _id: `${repositoryId}:issue:${number}`,
28 | type: 'issue',
29 | initial: false,
30 | repositoryId,
31 | number,
32 | state: 'open'
33 | })
34 | )
35 | }
36 |
--------------------------------------------------------------------------------
/jobs/schedule-stale-initial-pr-reminders.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../lib/dbs')
2 |
3 | module.exports = async function () {
4 | const { repositories } = await dbs()
5 | const minAgeInDays = 7
6 | const maxAgeInDays = 14
7 | const startDate = new Date(Date.now() - maxAgeInDays * 24 * 60 * 60 * 1000).toJSON()
8 | const endDate = new Date(Date.now() - minAgeInDays * 24 * 60 * 60 * 1000).toJSON()
9 |
10 | const stalePRs = await repositories.query('open_initial_pr', {
11 | startkey: startDate,
12 | endkey: endDate,
13 | inclusive_end: true,
14 | include_docs: true
15 | })
16 | return stalePRs.rows.map(function (row) {
17 | return {
18 | data: {
19 | name: 'send-stale-initial-pr-reminder',
20 | prNumber: row.doc.number,
21 | repositoryId: row.doc.repositoryId,
22 | accountId: row.doc.accountId
23 | }
24 | }
25 | })
26 | }
27 |
--------------------------------------------------------------------------------
/jobs/send-stale-initial-pr-reminder.js:
--------------------------------------------------------------------------------
1 | const githubQueue = require('../lib/github-queue')
2 | const dbs = require('../lib/dbs')
3 | const Log = require('gk-log')
4 | const upsert = require('../lib/upsert')
5 | const statsd = require('../lib/statsd')
6 | const staleInitialPRReminderComment = require('../content/stale-initial-pr-reminder')
7 |
8 | module.exports = async function (
9 | { prNumber, repositoryId, accountId }
10 | ) {
11 | accountId = String(accountId)
12 | repositoryId = String(repositoryId)
13 |
14 | const { installations, repositories } = await dbs()
15 | const logs = dbs.getLogsDb()
16 | const installation = await installations.get(accountId)
17 | const repository = await repositories.get(repositoryId)
18 | const installationId = installation.installation
19 | const ghqueue = githubQueue(installationId)
20 |
21 | const log = Log({ logsDb: logs, accountId, repoSlug: repository.fullName, context: 'send-stale-initial-pr-reminder' })
22 |
23 | log.info('started')
24 |
25 | if (repository.enabled) {
26 | log.info('stopped: repository enabled')
27 | return
28 | }
29 |
30 | if (repository.staleInitialPRReminder) {
31 | log.info('stopped: stale PR reminder already sent')
32 | return
33 | }
34 |
35 | const [owner, repo] = repository.fullName.split('/')
36 |
37 | const issue = await ghqueue.read(github => github.issues.get({
38 | owner,
39 | repo,
40 | number: prNumber
41 | }))
42 |
43 | if (issue.state !== 'open' || issue.locked) {
44 | log.info('stopped: issue closed or locked')
45 | return
46 | }
47 |
48 | await ghqueue.write(github => github.issues.createComment({
49 | owner,
50 | repo,
51 | number: prNumber,
52 | body: staleInitialPRReminderComment()
53 | }))
54 |
55 | try {
56 | await upsert(repositories, repositoryId, {
57 | staleInitialPRReminder: true
58 | })
59 | } catch (e) {
60 | log.warn('db: upsert failed', { repositoryId })
61 | throw e
62 | }
63 |
64 | statsd.increment('stale-initial-pr-reminder')
65 | }
66 |
--------------------------------------------------------------------------------
/jobs/send-stripe-cancel-survey.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../lib/dbs')
2 | const env = require('../lib/env')
3 | const stripe = require('stripe')(env.STRIPE_SECRET_KEY)
4 | const nodemailer = require('nodemailer')
5 | const content = require('../content/stripe-cancel-survey')
6 | // get the paymentsDoc with the accountId
7 | // if document has a subsciptionId -> exit
8 |
9 | module.exports = async function ({ accountId, stripeSubscriptionId }) {
10 | const { payments } = await dbs()
11 | try {
12 | const paymentsDoc = await payments.get(accountId)
13 | if (paymentsDoc.stripeSubscriptionId) return
14 | } catch (e) {
15 | if (e.status === 404) return
16 | throw e
17 | }
18 |
19 | // get subscription from stripe
20 | const subscription = await stripe.subscriptions.retrieve(stripeSubscriptionId)
21 | if (subscription.canceled_at === null) return
22 | // with the subscription get the user from stripe
23 | const customer = await stripe.customers.retrieve(subscription.customer)
24 | if (!customer.email) return
25 | // send email
26 | const transporter = nodemailer.createTransport({
27 | host: env.EMAIL_HOST,
28 | port: env.EMAIL_PORT,
29 | auth: {
30 | user: env.EMAIL_USER,
31 | pass: env.EMAIL_PASSWORD
32 | }
33 | })
34 |
35 | const message = {
36 | to: customer.email,
37 | from: env.EMAIL_FROM,
38 | subject: 'Thank you for trying Greenkeeper',
39 | text: content()
40 | }
41 |
42 | await new Promise((resolve, reject) => {
43 | transporter.sendMail(message, (error, info) => {
44 | if (error) {
45 | reject(error)
46 | } else {
47 | resolve(info)
48 | }
49 | })
50 | })
51 | }
52 |
--------------------------------------------------------------------------------
/jobs/stripe-event.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../lib/dbs')
2 | const env = require('../lib/env')
3 | const stripe = require('stripe')(env.STRIPE_SECRET_KEY)
4 | const _ = require('lodash')
5 |
6 | const events = [
7 | 'customer.subscription.created',
8 | 'customer.subscription.deleted'
9 | ]
10 | module.exports = async function ({ id }) {
11 | const { payments } = await dbs()
12 | const { type, data } = await stripe.events.retrieve(id)
13 | if (!_.includes(events, type)) return
14 | const subscriptionId = data.object.id
15 | const paymentDoc = _.get(
16 | await payments.query('by_stripe', {
17 | key: subscriptionId,
18 | include_docs: true
19 | }),
20 | 'rows[0].doc'
21 | )
22 | // TODO: retry in case this is a race condition
23 | if (!paymentDoc) throw new Error('no payment in database')
24 |
25 | if (type === 'customer.subscription.created') {
26 | return {
27 | data: {
28 | name: 'payment-changed',
29 | accountId: paymentDoc._id
30 | }
31 | }
32 | }
33 | if (type === 'customer.subscription.deleted') {
34 | await payments.put(
35 | _.assign(paymentDoc, {
36 | stripeSubscriptionId: null,
37 | plan: 'free',
38 | repos: 1
39 | })
40 | )
41 |
42 | return {
43 | data: {
44 | name: 'send-stripe-cancel-survey',
45 | stripeSubscriptionId: subscriptionId,
46 | accountId: paymentDoc._id
47 | }
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/jobs/sync-repos.js:
--------------------------------------------------------------------------------
1 | const redis = require('redis')
2 | const { promisify } = require('util')
3 | const Log = require('gk-log')
4 | const { flatten } = require('lodash')
5 |
6 | const dbs = require('../lib/dbs')
7 | const { createDocs } = require('../lib/repository-docs')
8 | const GitHub = require('../lib/github')
9 | const getToken = require('../lib/get-token')
10 |
11 | module.exports = async function ({ accountId }) {
12 | const { installations, repositories } = await dbs()
13 | const logs = dbs.getLogsDb()
14 | const log = Log({ logsDb: logs, accountId: accountId, repoSlug: null, context: 'sync-repos' })
15 | log.info(`started`)
16 |
17 | const installation = await installations.get(String(accountId))
18 | const installationId = installation.installation
19 |
20 | let dbRepos
21 | let gitHubRepos
22 | let accountName
23 | try {
24 | const repoDoc = await installations.get(String(accountId))
25 | accountName = repoDoc.login
26 | const { rows: currentRepos } = await repositories.query('repo-by-org/repo-by-org', {
27 | key: accountName,
28 | reduce: false,
29 | include_docs: true
30 | })
31 | dbRepos = currentRepos.map(repo => repo.doc.fullName.split('/')[1])
32 | } catch (error) {
33 | log.error('Could not get repos from database', { error: error.message })
34 | }
35 |
36 | try {
37 | const { token } = await getToken(Number(installationId))
38 | const github = GitHub({ auth: `token ${token}` })
39 | const options = github.apps.listRepos.endpoint.merge({
40 | headers: {
41 | accept: 'application/vnd.github.machine-man-preview+json'
42 | },
43 | per_page: 100
44 | })
45 |
46 | // Paginate does not actually flatten results into a single result array
47 | // as it should, according to the docs, possibly due to these:
48 | // https://github.com/octokit/rest.js/issues/1161
49 | // https://github.com/octokit/routes/issues/329
50 | const results = await github.paginate(options)
51 | // So we flatten them ourselves
52 | gitHubRepos = flatten(results.map((result) => result.repositories))
53 | } catch (error) {
54 | log.error('Could not get repos from Github', { error: error.message })
55 | }
56 | log.info('There are more active repos on GitHub than in our database.')
57 |
58 | // create missing repositories
59 | let reposToCreate = []
60 | gitHubRepos.map(ghRepo => {
61 | if (!dbRepos.includes(ghRepo.name)) {
62 | reposToCreate.push(ghRepo)
63 | }
64 | })
65 |
66 | log.info('Starting to create missing repoDocs')
67 | try {
68 | const repoDocs = await createDocs({
69 | repositories: reposToCreate,
70 | accountId: String(accountId)
71 | })
72 | await repositories.bulkDocs(repoDocs)
73 | } catch (error) {
74 | log.warn('Could not create repository', { error: error.message })
75 | }
76 |
77 | const client = redis.createClient(process.env.REDIS_URL)
78 | const del = promisify(client.del.bind(client))
79 | await del(`sync_${accountName}`)
80 | }
81 |
--------------------------------------------------------------------------------
/jobs/update-payments.js:
--------------------------------------------------------------------------------
1 | const env = require('../lib/env')
2 | const { getActiveBilling, getAmountOfCurrentlyPrivateAndEnabledRepos } = require('../lib/payments')
3 | const stripe = require('stripe')(env.STRIPE_SECRET_KEY)
4 |
5 | module.exports = async ({ accountId }) => {
6 | const billingAccount = await getActiveBilling(accountId)
7 | // ignore non-stripe users
8 | // checking for stripeSubscriptionId instead of stripeItemId because in
9 | // jobs/stripe-event.js L33-L40 only then stripeSubscriptionId is set to null
10 | if (!billingAccount || !billingAccount.stripeSubscriptionId) return
11 |
12 | const currentlyPrivateAndEnabledRepos = await getAmountOfCurrentlyPrivateAndEnabledRepos(accountId)
13 |
14 | // charge for new repo from Stripe
15 | const baseRepos = (['org', 'org_eur', 'org_year', 'org_year_eur'].includes(billingAccount.plan)) ? 10 : 0
16 | const newQuantity = Math.max(baseRepos, currentlyPrivateAndEnabledRepos)
17 | await stripe.subscriptionItems.update(billingAccount.stripeItemId, {
18 | quantity: newQuantity
19 | })
20 | }
21 |
--------------------------------------------------------------------------------
/lib/comms.js:
--------------------------------------------------------------------------------
1 | const env = require('../lib/env')
2 | const slackNotify = require('slack-notify')
3 |
4 | const notifyAdmin = (message) => {
5 | // TODO: send email notifications to us/Enterprise admins with env.ADMIN_EMAIL or similar
6 | const payload = {
7 | channel: '#gk-dev',
8 | username: 'Greenkeeper Admin Notification',
9 | icon_emoji: ':incoming_envelope:',
10 | attachments: [
11 | {
12 | fallback: message,
13 | color: 'warning',
14 | title: 'Incomplete monorepo release',
15 | text: message
16 | }
17 | ]
18 | }
19 | sendSlackMessage(payload)
20 | }
21 |
22 | const sendSlackMessage = (payload) => {
23 | if (!env.SLACK_HOOK) return
24 | const slackResponse = slackNotify(env.SLACK_HOOK)
25 | slackResponse.send(payload)
26 | }
27 |
28 | module.exports = {
29 | notifyAdmin
30 | }
31 |
--------------------------------------------------------------------------------
/lib/dbs.js:
--------------------------------------------------------------------------------
1 | const { resolve } = require('url')
2 |
3 | global.Promise = require('bluebird')
4 |
5 | const _ = require('lodash')
6 | const bootstrap = require('couchdb-bootstrap')
7 | const PouchDB = require('pouchdb-http')
8 | .plugin(require('pouchdb-mapreduce'))
9 | .plugin(require('pouchdb-upsert'))
10 | const { promisify } = require('bluebird')
11 | const promiseRetry = require('promise-retry')
12 | const errorCodes = require('../lib/network-error-codes')
13 |
14 | const env = require('./env')
15 |
16 | const retryMethods = [
17 | 'upsert',
18 | 'get',
19 | 'put',
20 | 'post',
21 | 'remove',
22 | 'bulkDocs',
23 | 'allDocs',
24 | 'query',
25 | 'bulkGet'
26 | ]
27 | const retryHandler = {
28 | get: (target, name) => function (...args) {
29 | const original = target[name]
30 |
31 | if (!retryMethods.includes(name)) return original.apply(target, args)
32 |
33 | return promiseRetry(
34 | retry => {
35 | return original.apply(target, args).catch(err => {
36 | const type = err.code || err.message
37 | if (!errorCodes.includes(type)) {
38 | throw err
39 | }
40 |
41 | retry(err)
42 | })
43 | },
44 | {
45 | retries: 5,
46 | minTimeout: 3000
47 | }
48 | )
49 | }
50 | }
51 |
52 | function getLogsDbName () {
53 | const date = new Date()
54 | return 'logs-' + date.toISOString().substr(0, 7) + (env.isProduction ? '' : '-staging')
55 | }
56 |
57 | let currentLogsDbName = getLogsDbName()
58 | let currentLogsDb
59 |
60 | function getLogsDb () {
61 | const logsDbName = getLogsDbName()
62 | if (!currentLogsDb || logsDbName !== currentLogsDbName) {
63 | // either we are just starting up and there is no db
64 | // instance yet, or we switched over in the month, so
65 | // we need a new db instance
66 | const db = new PouchDB(resolve(env.COUCH_URL, logsDbName))
67 | currentLogsDb = new Proxy(db, retryHandler)
68 | currentLogsDbName = logsDbName
69 | }
70 | return currentLogsDb
71 | }
72 |
73 | async function getDb () {
74 | const result = await promisify(bootstrap)(env.COUCH_URL, 'couchdb', {
75 | mapDbName: dbname => dbname + (env.isProduction ? '' : (env.NODE_ENV === 'testing' ? '-testing-' + process.pid : '-staging')),
76 | concurrency: 1
77 | })
78 |
79 | return _(result.push)
80 | .mapValues((v, name) => new PouchDB(resolve(env.COUCH_URL, name)))
81 | .mapValues(db => new Proxy(db, retryHandler))
82 | .mapKeys((v, name) => {
83 | return name
84 | })
85 | .mapKeys((v, name) => name.replace('-staging', '').replace(/-testing-\d+/, ''))
86 | .value()
87 | }
88 |
89 | module.exports = _.memoize(getDb)
90 | module.exports.getLogsDb = getLogsDb
91 |
--------------------------------------------------------------------------------
/lib/default-commit-messages.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable no-template-curly-in-string */
2 | const defaultCommitMessages = {
3 | addConfigFile: 'chore: add Greenkeeper config file',
4 | updateConfigFile: 'chore: update Greenkeeper config file',
5 | initialBadge: 'docs(readme): add Greenkeeper badge',
6 | initialDependencies: 'chore(package): update dependencies',
7 | initialBranches: 'chore(travis): whitelist greenkeeper branches',
8 | dependencyUpdate: 'fix(package): update ${dependency} to version ${version}',
9 | devDependencyUpdate: 'chore(package): update ${dependency} to version ${version}',
10 | lockfileUpdate: 'chore(package): update lockfile ${lockfilePath}',
11 | dependencyPin: 'fix: pin ${dependency} to ${oldVersion}',
12 | devDependencyPin: 'chore: pin ${dependency} to ${oldVersion}',
13 | // Conditionally appended to dependencyUpdate
14 | closes: '\n\nCloses #${number}'
15 | }
16 | /* eslint-enable no-template-curly-in-string */
17 |
18 | module.exports = { defaultCommitMessages }
19 |
--------------------------------------------------------------------------------
/lib/default-pr-titles.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable no-template-curly-in-string */
2 | const defaultPrTitles = {
3 | initialPR: 'Update dependencies to enable Greenkeeper 🌴',
4 | initialPrBadge: 'Add badge to enable Greenkeeper 🌴',
5 | initialPrBadgeOnly: 'Add Greenkeeper badge 🌴',
6 | initialSubgroupPR: 'Update dependencies for ${group} 🌴',
7 | basicPR: 'Update ${dependency} to the latest version 🚀',
8 | groupPR: 'Update ${dependency} in group ${group} to the latest version 🚀'
9 | }
10 | /* eslint-enable no-template-curly-in-string */
11 |
12 | module.exports = { defaultPrTitles }
13 |
--------------------------------------------------------------------------------
/lib/delete-branches.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../lib/dbs')
2 | const updatedAt = require('../lib/updated-at')
3 | const githubQueue = require('./github-queue')
4 |
5 | module.exports = async function (
6 | { installationId, fullName, repositoryId },
7 | branch
8 | ) {
9 | const { repositories } = await dbs()
10 | const [owner, repo] = fullName.split('/')
11 | if (!branch) return
12 | let referenceDeleted = false
13 | try {
14 | await githubQueue(installationId).write(github => github.gitdata.deleteRef({
15 | owner,
16 | repo,
17 | ref: `heads/${branch.head}`
18 | }))
19 | referenceDeleted = true
20 | } catch (e) {}
21 | updatedAt(Object.assign(branch, { referenceDeleted }))
22 |
23 | return repositories.bulkDocs([branch])
24 | }
25 |
--------------------------------------------------------------------------------
/lib/diff-greenkeeper-json.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 |
3 | module.exports = function (oldFile, newFile) {
4 | const changes = { added: [], removed: [], modified: [] }
5 | if (!newFile || !oldFile) return changes
6 | // greenkeeper.json was deleted
7 | if (_.isEmpty(newFile) && oldFile.groups) {
8 | return _.set(changes, 'removed', Object.keys(oldFile.groups))
9 | }
10 | // new groups added
11 | _.set(changes, 'added', _.difference(_.keys(newFile.groups), _.keys(oldFile.groups)))
12 | // groups removed
13 | _.set(changes, 'removed', _.difference(_.keys(oldFile.groups), _.keys(newFile.groups)))
14 | // groups modified
15 | _.set(changes, 'modified', _.compact(_.map(oldFile.groups, (group, key) => {
16 | if (newFile.groups[key] && !_.isEqual(group.packages, newFile.groups[key].packages)) return key
17 | })))
18 |
19 | return changes
20 | }
21 |
--------------------------------------------------------------------------------
/lib/diff-package-json.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 |
3 | const types = [
4 | 'dependencies',
5 | 'devDependencies',
6 | 'optionalDependencies',
7 | 'peerDependencies'
8 | ]
9 |
10 | module.exports = function (a, b, groupName) {
11 | const changes = {}
12 | types.forEach(type => {
13 | _.keys(_.get(a, type)).forEach(dep => {
14 | const before = _.get(a, [type, dep])
15 | const after = _.get(b, [type, dep])
16 | if (_.get(a, [type, dep]) === _.get(b, [type, dep])) return
17 | let change = 'modified'
18 | if (after === undefined) change = 'removed'
19 | _.set(changes, [type, dep], {
20 | change,
21 | before,
22 | after,
23 | groupName
24 | })
25 | })
26 | _.keys(_.get(b, type)).forEach(dep => {
27 | if (_.has(changes, [type, dep])) return
28 | if (_.get(b, [type, dep]) === _.get(a, [type, dep])) return
29 | _.set(changes, [type, dep], {
30 | change: 'added',
31 | before: _.get(a, [type, dep]),
32 | after: _.get(b, [type, dep])
33 | })
34 | })
35 | })
36 | return changes
37 | }
38 |
--------------------------------------------------------------------------------
/lib/enterprise-setup.js:
--------------------------------------------------------------------------------
1 | const { resolve } = require('url')
2 | const PouchDB = require('pouchdb-http')
3 |
4 | const env = require('./env')
5 | const dbNames = ['_users']
6 |
7 | function createDB (host, dbName) {
8 | const dbUrl = resolve(host, dbName)
9 | const db = new PouchDB(dbUrl)
10 | return db.info()
11 | }
12 |
13 | function removeDB (host, dbName) {
14 | const dbUrl = resolve(host, dbName)
15 | const db = new PouchDB(dbUrl)
16 | return db.destroy()
17 | }
18 |
19 | function generateLogsName (date) {
20 | return `logs-${date.toISOString().substr(0, 7)}${env.isProduction ? '' : '-staging'}`
21 | }
22 |
23 | function getEnterpriseDBNames () {
24 | const today = new Date()
25 | const nextMonth = new Date(new Date().setMonth(today.getMonth() + 1))
26 |
27 | // if we don't have a logsDB for the actual month, push it to the array
28 | const actualLogs = generateLogsName(today)
29 | if (!dbNames.includes(actualLogs)) dbNames.push(actualLogs)
30 |
31 | // if we don't have a logsDB for the next month, push it to the array
32 | const nextLogs = generateLogsName(nextMonth)
33 | if (!dbNames.includes(nextLogs)) dbNames.push(nextLogs)
34 |
35 | return dbNames
36 | }
37 |
38 | module.exports = async function () {
39 | const host = env.COUCH_URL
40 | const enterpriseDbNames = await getEnterpriseDBNames()
41 |
42 | // if we have '_users and logs, older than 12 month, remove the oldest logsDB
43 | if (enterpriseDbNames.length > 13) {
44 | await removeDB(host, enterpriseDbNames[1])
45 | enterpriseDbNames.splice(1, 1)
46 | }
47 |
48 | // create /_users and logs if not exists
49 | return Promise.all(
50 | enterpriseDbNames.map(dbName => {
51 | return createDB(host, dbName)
52 | })
53 | )
54 | }
55 |
--------------------------------------------------------------------------------
/lib/env.js:
--------------------------------------------------------------------------------
1 | const envalid = require('envalid')
2 | const parseURL = require('url').parse
3 | const { bool, str, url, num, host } = envalid
4 | if (process.env.GITHUB_HOST) {
5 | try {
6 | const parsed = parseURL(process.env.GITHUB_HOST)
7 | process.env.GITHUB_URL = `${parsed.protocol}//${parsed.host}`
8 | } catch (e) {
9 | // fallback to default
10 | }
11 | }
12 |
13 | let environmentConfig = {
14 | PORT: num({ default: 5000 }),
15 | COUCH_URL: url({ devDefault: 'http://localhost:5984/' }),
16 | ISSUER_ID: str({ devDefault: '504', default: '505' }),
17 | PRIVATE_KEY: str({ devDefault: 'so-secret' }),
18 | AMQP_URL: url({ devDefault: 'amqp://localhost' }),
19 | EVENTS_QUEUE_NAME: str({ devDefault: 'events-dev' }),
20 | JOBS_QUEUE_NAME: str({ devDefault: 'jobs-dev' }),
21 | WORKER_SIZE: num({ default: require('os').cpus().length }),
22 | REDIS_URL: url({ default: 'redis://redis:6379', devDefault: 'redis://localhost:6379' }),
23 | NODE_ENV: str({
24 | choices: ['testing', 'development', 'staging', 'production'],
25 | devDefault: 'development'
26 | }),
27 | STATSD_HOST: str({ default: '172.17.0.1' }),
28 | NPM_REGISTRY: url({ default: 'https://registry.npmjs.org/' }),
29 | GITHUB_URL: url({ default: 'https://github.com' }),
30 | BADGES_SECRET: str({ devDefault: 'badges-secret' }),
31 | BADGES_HOST: str({ default: 'badges.greenkeeper.io' }),
32 | NPMHOOKS_SECRET: str({ devDefault: 'make-secrets-great-again' }),
33 | IS_ENTERPRISE: bool({ default: false }),
34 | HOOKS_HOST: str({ default: 'hooks.greenkeeper.io' }),
35 | GK_HOST: host({ default: 'greenkeeper.io' }),
36 | EXEC_SERVER_URL: url({ devDefault: 'http://localhost:1234' })
37 | }
38 |
39 | if (!process.env.IS_ENTERPRISE) {
40 | // These environment variables are only relevant for SaaS
41 | Object.assign(environmentConfig, {
42 | EMAIL_HOST: str({ devDefault: 'smtp.mandrillapp.com' }),
43 | EMAIL_PORT: num({ devDefault: 578 }),
44 | EMAIL_USER: str({ devDefault: 'email@dev.com' }),
45 | EMAIL_PASSWORD: str({ devDefault: 'emailpassword' }),
46 | EMAIL_FROM: str({ devDefault: 'support@greenkeeper.io' }),
47 | ROLLBAR_TOKEN_JOBS: str({ devDefault: '' }),
48 | STRIPE_SECRET_KEY: str({ devDefault: 'stripe-token' })
49 | })
50 | }
51 |
52 | module.exports = envalid.cleanEnv(process.env, environmentConfig)
53 |
--------------------------------------------------------------------------------
/lib/get-config.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const mergejson = require('mergejson')
3 | const { defaultCommitMessages } = require('./default-commit-messages')
4 | const { defaultPrTitles } = require('./default-pr-titles')
5 |
6 | module.exports = repository => {
7 | const greenkeeperConfig = _.get(repository, 'greenkeeper', {})
8 | const packageJSONConfig = _.get(repository, ['packages', 'package.json', 'greenkeeper'], {})
9 |
10 | const mergedConfig = mergejson(greenkeeperConfig, packageJSONConfig)
11 |
12 | // Make a copy instead of mutating the original repoDoc config!
13 | return _.defaultsDeep(JSON.parse(JSON.stringify(mergedConfig)), {
14 | label: 'greenkeeper',
15 | branchPrefix: 'greenkeeper/',
16 | ignore: [],
17 | commitMessages: defaultCommitMessages,
18 | prTitles: defaultPrTitles
19 | })
20 | }
21 |
--------------------------------------------------------------------------------
/lib/get-diff-commits.js:
--------------------------------------------------------------------------------
1 | const diffContent = require('../content/diff-commits')
2 | const githubQueue = require('./github-queue')
3 | const env = require('./env')
4 |
5 | module.exports = async function ({ installationId, owner, repo, base, head }) {
6 | if (!(base && head)) return null
7 |
8 | const ghqueue = githubQueue(installationId)
9 |
10 | try {
11 | const diff = await ghqueue.read(github => github.repos.compareCommits({ base, head, owner, repo }))
12 |
13 | if (!diff) return ''
14 |
15 | var body = await ghqueue.read(github => github.markdown.render({
16 | text: diffContent(diff),
17 | mode: 'gfm',
18 | context: `${owner}/${repo}`
19 | }))
20 | } catch (e) {}
21 |
22 | if (!body) return ''
23 |
24 | body = body.replace(
25 | /href="https?:\/\/github\.com\//gmi,
26 | `href="https://urls.${env.GK_HOST}/`
27 | )
28 |
29 | return `
30 | Commits
31 | ${body}
32 | `
33 | }
34 |
--------------------------------------------------------------------------------
/lib/get-exec-tokens.js:
--------------------------------------------------------------------------------
1 | async function getExecTokens ({
2 | installationId,
3 | repoDoc
4 | }, log) {
5 | try {
6 | const dbs = require('./dbs')
7 | const { tokens, 'token-audits': tokenAudits } = await dbs() // eslint-disable-line
8 |
9 | /*
10 | This is the structure of the tokens 'model'
11 | _id: `${accountId}
12 | tokens: {
13 | ${repoId}: {
14 | npm: ${token},
15 | github: ${token}
16 | }
17 | }
18 | */
19 | let execTokens = ''
20 | let repositoryTokens = ''
21 | try {
22 | repositoryTokens = await tokens.get(repoDoc.accountId)
23 | log.info('repository tokens received')
24 | } catch (error) {
25 | if (error.status === 404) {
26 | log.info(`No repository token set`, { error })
27 | } else {
28 | log.error(`Unable to get repository token`, { error })
29 | }
30 | }
31 |
32 | if (repositoryTokens && repositoryTokens.tokens[repoDoc._id]) {
33 | execTokens = JSON.stringify(repositoryTokens.tokens[repoDoc._id])
34 | const datetime = new Date().toISOString().substr(0, 19).replace(/[^0-9]/g, '')
35 |
36 | // write audit log entry to 'token-audits' db
37 | // log entry type: 'read'
38 | try {
39 | await tokenAudits.put({
40 | _id: `${installationId}:${repoDoc._id}:${datetime}:read`,
41 | keys: Object.keys(repositoryTokens.tokens[repoDoc._id])
42 | })
43 | } catch (error) {
44 | log.error(`Unable to store token audit log`, { installationId, repositoryId: repoDoc._id, error })
45 | }
46 | }
47 | return execTokens
48 | } catch (error) {
49 | log.error(`Error while fetching repo tokens`, { installationId, repositoryId: repoDoc._id, error })
50 | }
51 | }
52 |
53 | module.exports = {
54 | getExecTokens
55 | }
56 |
--------------------------------------------------------------------------------
/lib/get-infos.js:
--------------------------------------------------------------------------------
1 | const url = require('url')
2 |
3 | const _ = require('lodash')
4 | const githubFromGit = require('github-url-from-git')
5 |
6 | const getRelease = require('./get-release')
7 | const getDiffCommits = require('./get-diff-commits')
8 | const statsd = require('../lib/statsd')
9 |
10 | // returns a url object if you pass in a GitHub repositoryURL,
11 | // returns a string with an npm URL if you just pass in a dependency name
12 | function getDependencyURL ({ repositoryURL, dependency }) {
13 | // githubURL is an object!
14 | const githubURL = url.parse(
15 | githubFromGit(repositoryURL) || ''
16 | )
17 | if (dependency && !githubURL.href) {
18 | return `https://www.npmjs.com/package/${dependency}`
19 | }
20 | return githubURL
21 | }
22 |
23 | // removes logins and hashes etc.
24 | function getFormattedDependencyURL ({ repositoryURL, dependency }) {
25 | return url.format(getDependencyURL({ repositoryURL, dependency }))
26 | }
27 |
28 | async function getInfos (
29 | { installationId, dependency, version, diffBase, versions }
30 | ) {
31 | const infos = {}
32 |
33 | const baseVersionData = versions[diffBase]
34 | const versionData = versions[version]
35 | if (!baseVersionData || !versionData) return infos
36 |
37 | const depGhUrl = getDependencyURL({ repositoryURL: _.get(versionData, 'repository.url') })
38 | // remove the leading slash to get the slug
39 | // depGhUrl.pathname: '/colors/monorepo'
40 | const slug = depGhUrl.pathname && depGhUrl.pathname.replace(/^\//, '')
41 | if (!slug) return infos
42 |
43 | const [owner, repo] = slug.split('/')
44 |
45 | infos.release = await getRelease({
46 | installationId,
47 | owner,
48 | repo,
49 | version,
50 | sha: versionData.gitHead
51 | })
52 |
53 | if (!baseVersionData.gitHead || !versionData.gitHead) return infos
54 |
55 | infos.diffCommits = await getDiffCommits({
56 | installationId,
57 | owner,
58 | repo,
59 | base: baseVersionData.gitHead,
60 | head: versionData.gitHead
61 | })
62 |
63 | return infos
64 | }
65 |
66 | function resolver ({ dependency, version, diffBase }) {
67 | return `${dependency}${version}${diffBase}`
68 | }
69 |
70 | const memoizedGetInfos = _.memoize(getInfos, resolver)
71 |
72 | if (process.env.NODE_ENV !== 'testing') {
73 | setInterval(() => {
74 | statsd.gauge('get_infos_cached', memoizedGetInfos.cache.size)
75 | }, 60000)
76 | }
77 |
78 | module.exports = {
79 | getInfos: memoizedGetInfos,
80 | getDependencyURL,
81 | getFormattedDependencyURL
82 | }
83 |
--------------------------------------------------------------------------------
/lib/get-message.js:
--------------------------------------------------------------------------------
1 | const { defaultCommitMessages } = require('./default-commit-messages')
2 | const { defaultPrTitles } = require('./default-pr-titles')
3 |
4 | function replaceVariables (message, variables) {
5 | let replacedString = message
6 |
7 | Object.keys(variables).forEach((key) => {
8 | replacedString = replacedString.replace(`\${${key}}`, variables[key])
9 | })
10 |
11 | return replacedString
12 | }
13 |
14 | function hasInvalidVar (message) {
15 | return message.match(/\${.+?}/)
16 | }
17 |
18 | function getMessage (commitMessages, messageKey, values) {
19 | if (!Object.prototype.hasOwnProperty.call(commitMessages, messageKey)) {
20 | throw new Error(`Unknown message messageKey '${messageKey}'`)
21 | }
22 |
23 | // get rid of null and undefined
24 | const templateValues = Object.assign({}, values)
25 |
26 | let commitMessage = replaceVariables(commitMessages[messageKey], templateValues)
27 |
28 | // if someone replaced the variable name with something else,
29 | // return the default message for that messageKey
30 | if (hasInvalidVar(commitMessage)) {
31 | commitMessage = replaceVariables(defaultCommitMessages[messageKey], templateValues)
32 | }
33 |
34 | return commitMessage
35 | }
36 |
37 | function getPrTitle ({ version, dependency, group, prTitles }) {
38 | const variables = { dependency, group }
39 | if (!prTitles[version]) {
40 | throw new Error('exited: Unknown PR key')
41 | }
42 |
43 | let prTitle = replaceVariables(prTitles[version], variables)
44 |
45 | // if someone replaced the variable name with something else,
46 | // return the default pr title for that messageKey
47 | if (hasInvalidVar(prTitle)) {
48 | prTitle = replaceVariables(defaultPrTitles[version], variables)
49 | }
50 |
51 | return prTitle
52 | }
53 |
54 | module.exports = {
55 | getMessage,
56 | getPrTitle
57 | }
58 |
--------------------------------------------------------------------------------
/lib/get-ranged-version.js:
--------------------------------------------------------------------------------
1 | const semver = require('semver')
2 |
3 | const caret = /^\d+(\.(x|\*)){0,2}$/i
4 | const tilde = /^\d+\.\d+(\.(x|\*))?$/i
5 | const wildcard = /^(x|\*)(\.(x|\*)){0,2}$/i
6 |
7 | function extractPrefix (rawVersion) {
8 | const version = rawVersion.trim()
9 | if (!version || wildcard.test(version)) return '>='
10 | if (version === 'latest' || version === 'next') return '^'
11 | if (semver.valid(version) || !semver.validRange(version)) return ''
12 |
13 | if (version[0] === '>') return '>='
14 | if (version[0] === '^' || caret.test(version)) return '^'
15 | if (version[0] === '~' || tilde.test(version)) return '~'
16 |
17 | return ''
18 | }
19 |
20 | module.exports = (version, oldVersion) => extractPrefix(oldVersion) + version
21 | module.exports.extractPrefix = extractPrefix
22 |
--------------------------------------------------------------------------------
/lib/get-release.js:
--------------------------------------------------------------------------------
1 | const githubQueue = require('./github-queue')
2 | const env = require('./env')
3 |
4 | module.exports = async function ({ installationId, owner, repo, version, sha }) {
5 | const headers = {
6 | accept: 'application/vnd.github.v3.html+json'
7 | }
8 | const ghqueue = githubQueue(installationId)
9 |
10 | let result
11 | try {
12 | result = await ghqueue.read(github => github.repos.getReleaseByTag({
13 | headers,
14 | owner,
15 | repo,
16 | tag: `v${version}`
17 | }))
18 | } catch (err) {
19 | try {
20 | result = await ghqueue.read(github => github.repos.getReleaseByTag({
21 | headers,
22 | owner,
23 | repo,
24 | tag: `${version}`
25 | }))
26 | } catch (err) {
27 | try {
28 | const { tag } = await ghqueue.read(github => github.gitdata.getTag({ owner, repo, tag_sha: sha }))
29 |
30 | result = await ghqueue.read(github => github.repos.getReleaseByTag({
31 | headers,
32 | owner,
33 | repo,
34 | tag
35 | }))
36 | } catch (err) {
37 | return ''
38 | }
39 | }
40 | }
41 |
42 | if (!result || !result.body_html) return ''
43 |
44 | // We need the redirect because GitHub would parse these as related issues otherwise.
45 | // If we did that for a react issue, GK would add 10000 related issues to the react issue :o
46 | const body = result.body_html.replace(
47 | /href="https?:\/\/github\.com\//gmi,
48 | `href="https://urls.${env.GK_HOST}/`
49 | )
50 |
51 | return `
52 | Release Notes for ${result.name || result.tag_name}
53 |
54 | ${body}
55 | \n`
56 | }
57 |
--------------------------------------------------------------------------------
/lib/get-token.js:
--------------------------------------------------------------------------------
1 | const zlib = require('zlib')
2 |
3 | const { Client, Policy } = require('catbox')
4 | const jwt = require('jsonwebtoken')
5 | const retry = require('retry-promise').default
6 | const { promisify } = require('bluebird')
7 |
8 | const env = require('./env')
9 | const Github = require('./github')
10 |
11 | let cert
12 | if (env.IS_ENTERPRISE) {
13 | // In GKE, we don’t zip the private key, so we don’t need to unzip it here.
14 | // This is because zipping is unnecessarily complicated in Replicated, and
15 | // just base64 encoding achieves our goal of getting the key into a single line
16 | // as well, it’s just a pretty long line.
17 | cert = Buffer.from(env.PRIVATE_KEY, 'base64')
18 | } else {
19 | cert = zlib.gunzipSync(Buffer.from(env.PRIVATE_KEY, 'base64'))
20 | }
21 |
22 | const client = new Client(require('catbox-memory'))
23 | const cache = new Policy(
24 | {
25 | expiresIn: env.NODE_ENV === 'testing' ? 10000 : 30 * 60 * 1000,
26 | generateTimeout: false,
27 | generateFunc: (id, next) => {
28 | getToken(Number(id)).then(token => next(null, token)).catch(next)
29 | }
30 | },
31 | client,
32 | 'installation-token'
33 | )
34 | const cacheStarted = promisify(client.start, { context: client })()
35 |
36 | module.exports = async id => {
37 | await cacheStarted
38 | return promisify(cache.get, { context: cache })(String(id))
39 | }
40 |
41 | async function getToken (iss) {
42 | const token = jwt.sign({}, cert, {
43 | algorithm: 'RS256',
44 | expiresIn: '1m',
45 | issuer: env.ISSUER_ID
46 | })
47 |
48 | let github = Github({ auth: `Bearer ${token}` })
49 |
50 | const result = (await github.apps.createInstallationToken({
51 | installation_id: parseInt(iss, 10)
52 | })).data
53 |
54 | // making sure this token is valid
55 | // GitHub sometimes gives us bad credential errors
56 | // with completly fresh tokens
57 | github = Github({ auth: `token ${result.token}` })
58 |
59 | await retry(
60 | {
61 | max: 5,
62 | backoff: 300
63 | },
64 | async (num) => {
65 | try {
66 | return await github.rateLimit.get({})
67 | } catch (e) {
68 | // rate limit might be disabled on GitHub Enterprise
69 | if (!e.toString().match(/Rate limiting is not enabled/)) {
70 | throw e
71 | }
72 | return {}
73 | }
74 | }
75 | )
76 |
77 | return result
78 | }
79 |
--------------------------------------------------------------------------------
/lib/github.js:
--------------------------------------------------------------------------------
1 | const url = require('url')
2 |
3 | const _ = require('lodash')
4 | const promiseRetry = require('promise-retry')
5 | const errorCodes = require('../lib/network-error-codes')
6 | const env = require('../lib/env')
7 |
8 | const ghRetry = (octokit) => {
9 | octokit.hook.error('request', (error, options) => {
10 | const type = error.status || error.message
11 | if (!errorCodes.includes(type)) {
12 | throw error
13 | }
14 |
15 | return promiseRetry(retry => {
16 | return octokitRequest(options).catch(error => {
17 | const type = error.status || error.message
18 | if (!errorCodes.includes(type)) {
19 | throw error
20 | }
21 |
22 | retry(error)
23 | })
24 | }, {
25 | retries: 5,
26 | minTimeout: 3000
27 | })
28 | })
29 | }
30 |
31 | const Octokit = require('@octokit/rest').plugin(ghRetry)
32 | const octokitRequest = require('@octokit/request')
33 |
34 | const Github = function (options) {
35 | if (env.GITHUB_URL !== 'https://github.com') {
36 | options.baseUrl = url.resolve(env.GITHUB_URL, '/api/v3')
37 | }
38 | const octokit = new Octokit(options)
39 | return octokit
40 | }
41 |
42 | module.exports = options => new Github(
43 | _.defaultsDeep(options || {}, {
44 | userAgent: 'Greenkeeper'
45 | })
46 | )
47 |
--------------------------------------------------------------------------------
/lib/invalid-config-file.js:
--------------------------------------------------------------------------------
1 | const updatedAt = require('./updated-at')
2 | const _ = require('lodash')
3 |
4 | async function getInvalidConfigIssueNumber (repositories, repositoryId) {
5 | return _.get(
6 | await repositories.query('open_invalid_config_issue', {
7 | key: repositoryId,
8 | include_docs: true
9 | }),
10 | 'rows[0].doc.number'
11 | )
12 | }
13 | async function invalidConfigFile ({ repoDoc, config, repositories, repository, repositoryId, details, log, isBlockingInitialPR = false }) {
14 | log.warn('validation of greenkeeper.json failed', { error: details, greenkeeperJson: repoDoc.greenkeeper })
15 | // reset greenkeeper config in repoDoc to the previous working version and start an 'invalid-config-file' job
16 | _.set(repoDoc, ['greenkeeper'], config)
17 | await updateDoc(repositories, repository, repoDoc)
18 | // If the config file is invalid, open an issue with validation errors and don’t do anything else in this file:
19 | // - no initial branch should be created (?)
20 | // - no initial subgroup branches should (or can be) be created
21 | // - no branches need to be deleted (we can’t be sure the changes are valid)
22 |
23 | return {
24 | data: {
25 | name: 'invalid-config-file',
26 | messages: _.map(details, 'formattedMessage'),
27 | errors: details,
28 | repositoryId,
29 | accountId: repoDoc.accountId,
30 | isBlockingInitialPR
31 | }
32 | }
33 | }
34 |
35 | function updateDoc (repositories, repository, repoDoc) {
36 | // Danger: this function receives inconsistent inputs from create-initial-branch.js,
37 | // where a repoDoc is used in place of a repository object.
38 | // Problem: repository keys are snake_case, repoDoc are camelCase!
39 | // We handle this with the ||
40 | return repositories.put(
41 | updatedAt(
42 | Object.assign(repoDoc, {
43 | private: repository.private,
44 | fullName: repository.full_name || repository.fullName,
45 | fork: repository.fork,
46 | hasIssues: repository.has_issues || repository.hasIssues
47 | })
48 | )
49 | )
50 | }
51 |
52 | module.exports = {
53 | invalidConfigFile,
54 | getInvalidConfigIssueNumber
55 | }
56 |
--------------------------------------------------------------------------------
/lib/lockfile.js:
--------------------------------------------------------------------------------
1 | const request = require('request-promise')
2 | const promiseRetry = require('promise-retry')
3 | const Log = require('gk-log')
4 |
5 | const dbs = require('../lib/dbs')
6 | const errorCodes = require('../lib/network-error-codes')
7 | const env = require('./env')
8 |
9 | module.exports = {
10 | getNewLockfile
11 | }
12 | // # getNewLockfile
13 | // find next server
14 | // send data to server
15 | // increase in/flight job count for server
16 | // if network error
17 | // -> try next server
18 | // else
19 | // -> return result
20 | // decrease in-flight jon count for server
21 | // # find next server
22 | // get doc from couchdb: config
23 | // sort list by least jobs in flight
24 | // return least busy server
25 | let jobCountByServer = {}
26 | async function findNextServer () {
27 | const { config } = await dbs()
28 | let servers
29 | try {
30 | // { servers: [....] }
31 | const doc = await config.get('exec-servers')
32 | servers = doc.servers
33 | } catch (e) {
34 | servers = [env.EXEC_SERVER_URL]
35 | }
36 | const sortedServers = servers.sort((a, b) => {
37 | const jobsA = jobCountByServer[a] || 0
38 | const jobsB = jobCountByServer[b] || 0
39 | return jobsA < jobsB ? -1 : 1
40 | })
41 | return sortedServers[0]
42 | }
43 | async function getNewLockfile ({
44 | packageJson,
45 | packages,
46 | workspaceRoot,
47 | lock,
48 | type,
49 | repositoryTokens }) {
50 | const logs = dbs.getLogsDb()
51 | const log = Log({
52 | logsDb: logs,
53 | accountId: 'lockfile',
54 | repoSlug: null,
55 | context: 'lockfile'
56 | })
57 | const nextServer = await findNextServer()
58 | jobCountByServer[nextServer] = jobCountByServer[nextServer] ? jobCountByServer[nextServer] + 1 : 1
59 | return promiseRetry((retry, number) => {
60 | return request({
61 | uri: nextServer,
62 | method: 'POST',
63 | json: true,
64 | body: {
65 | type,
66 | packageJson,
67 | packages,
68 | workspaceRoot,
69 | lock,
70 | repositoryTokens
71 | }
72 | })
73 | .then(result => {
74 | jobCountByServer[nextServer]--
75 | if ((result instanceof Error) || result.message) {
76 | // result is either `error`, `{ok: false}`, `{ok: false, error}` or `{ok: true, contents}`
77 | const error = result
78 | throw error
79 | } else {
80 | return result
81 | }
82 | })
83 | .catch(error => {
84 | if (number >= 3) {
85 | log.error(`could not get lockfile from ${nextServer}, attempt #${number}: giving up`)
86 | jobCountByServer[nextServer]--
87 | throw error
88 | }
89 | const type = error.statusCode ? error.statusCode : error.error.code
90 | if (errorCodes.includes(type)) {
91 | log.warn(`could not get lockfile, attempt #${number}: retrying`)
92 | jobCountByServer[nextServer]--
93 | retry(error)
94 | } else {
95 | log.warn(`could not get lockfile, attempt #${number}: stopping because of ${error}`)
96 | jobCountByServer[nextServer]--
97 | throw error
98 | }
99 | })
100 | }, {
101 | retries: 3,
102 | minTimeout: 3000
103 | })
104 | }
105 |
--------------------------------------------------------------------------------
/lib/network-error-codes.js:
--------------------------------------------------------------------------------
1 | module.exports = ['ETIMEDOUT', 'ECONNREFUSED', 'ECONNRESET', 'ESOCKETTIMEDOUT', 'EAI_AGAIN', 'HPE_INVALID_CONSTANT']
2 |
--------------------------------------------------------------------------------
/lib/normalize-plan-name.js:
--------------------------------------------------------------------------------
1 | module.exports = function normalizePlanName (planName) {
2 | return planName.toLowerCase().replace(' ', '')
3 | }
4 |
--------------------------------------------------------------------------------
/lib/npm-registry-client.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const RegClient = require('npm-registry-client')
3 |
4 | module.exports = opts => new RegClient(
5 | _.defaults(opts, {
6 | log: _([
7 | 'error',
8 | 'warn',
9 | 'info',
10 | 'verbose',
11 | 'silly',
12 | 'http',
13 | 'pause',
14 | 'resume'
15 | ])
16 | .mapKeys(k => k)
17 | .mapValues(() => _.noop)
18 | .value()
19 | })
20 | )
21 |
--------------------------------------------------------------------------------
/lib/open-issue.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const jsonInPlace = require('json-in-place')
3 | const semver = require('semver')
4 |
5 | const dbs = require('./dbs')
6 | const createBranch = require('./create-branch')
7 | const updatedAt = require('./updated-at')
8 | const getConfig = require('./get-config')
9 | const { getMessage } = require('./get-message')
10 | const statsd = require('./statsd')
11 | const githubQueue = require('./github-queue')
12 |
13 | const issueContent = require('../content/fail-issue')
14 |
15 | module.exports = async function (
16 | {
17 | installationId,
18 | repositoryId,
19 | accountId,
20 | owner,
21 | repo,
22 | version,
23 | dependency,
24 | dependencyType,
25 | oldVersionResolved,
26 | base,
27 | head,
28 | dependencyLink,
29 | release,
30 | diffCommits,
31 | statuses,
32 | monorepoGroupName,
33 | packageUpdateList
34 | }
35 | ) {
36 | const { repositories } = await dbs()
37 | const repoDoc = await repositories.get(repositoryId)
38 | const { branchPrefix, label, commitMessages } = getConfig(repoDoc)
39 |
40 | const body = issueContent({
41 | dependencyLink,
42 | oldVersionResolved,
43 | owner,
44 | repo,
45 | head,
46 | base,
47 | version,
48 | dependency,
49 | dependencyType,
50 | release,
51 | diffCommits,
52 | statuses,
53 | monorepoGroupName,
54 | packageUpdateList
55 | })
56 | const { number } = await githubQueue(installationId).write(github => github.issues.create({
57 | owner,
58 | repo,
59 | title: `An in-range update of ${dependency} is breaking the build 🚨`,
60 | body,
61 | labels: [label]
62 | }))
63 |
64 | statsd.increment('update_issues')
65 |
66 | if (!semver.valid(version) || !semver.valid(oldVersionResolved)) return
67 |
68 | const newBranch = `${branchPrefix}${dependency}-pin-${oldVersionResolved}`
69 |
70 | function transform (content) {
71 | const parsed = jsonInPlace(content)
72 | parsed.set([dependencyType, dependency], oldVersionResolved)
73 | return parsed.toString()
74 | }
75 |
76 | const messageValues = { dependency, oldVersion: oldVersionResolved }
77 | const messageKey = dependencyType === 'devDependencies' ? 'dependencyPin' : 'devDependencyPin'
78 |
79 | const sha = await createBranch({
80 | installationId,
81 | owner,
82 | repoName: repo,
83 | branch: base,
84 | newBranch,
85 | path: 'package.json',
86 | transform,
87 | commitMessageTemplates: commitMessages,
88 | message: getMessage(commitMessages, messageKey, messageValues)
89 | })
90 |
91 | await repositories.bulkDocs(
92 | [
93 | {
94 | _id: `${repositoryId}:branch:${sha}`,
95 | type: 'branch',
96 | purpose: 'pin',
97 | sha,
98 | base,
99 | head: newBranch,
100 | dependency,
101 | dependencyType,
102 | version: oldVersionResolved,
103 | repositoryId,
104 | accountId
105 | },
106 | {
107 | _id: `${repositoryId}:issue:${number}`,
108 | type: 'issue',
109 | repositoryId,
110 | version,
111 | number,
112 | dependency,
113 | state: 'open'
114 | }
115 | ].map(_.ary(updatedAt, 1))
116 | )
117 | }
118 |
--------------------------------------------------------------------------------
/lib/payments.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const dbs = require('../lib/dbs')
3 |
4 | const validPaidPlanNames = [
5 | 'org', 'org_year', 'org_eur', 'org_year_eur',
6 | 'personal', 'personal_year', 'personal_eur', 'personal_year_eur',
7 | 'team', 'business'
8 | ]
9 |
10 | async function hasStripeBilling (accountId) {
11 | const activeBilling = await getActiveBilling(accountId)
12 | return !!activeBilling && !!activeBilling.stripeSubscriptionId
13 | }
14 |
15 | async function getActiveBilling (accountId) {
16 | if (!accountId) throw new Error('getActiveBilling requires accountId')
17 | const { payments } = await dbs()
18 | try {
19 | const doc = await payments.get(String(accountId))
20 | const { plan } = doc
21 | if (validPaidPlanNames.includes(plan)) return doc
22 | } catch (e) {
23 | if (e.status !== 404) throw e
24 | }
25 | return false
26 | }
27 |
28 | async function hasPaidAccount (accountId, log) {
29 | if (!accountId) return false
30 |
31 | const { payments } = await dbs()
32 | try {
33 | const doc = await payments.get(String(accountId))
34 | if (doc.plan === 'opensource' || doc.plan === 'free') return false
35 | } catch (error) {
36 | log.warn(`Could not get payment info for accountId ${accountId}`)
37 | if (error.status !== 404) return false
38 | }
39 | return true
40 | }
41 |
42 | async function maybeUpdatePaymentsJob ({ accountId, isPrivate, repositoryId }) {
43 | if (isPrivate && (await hasStripeBilling(accountId))) {
44 | return {
45 | data: {
46 | name: 'update-payments',
47 | accountId
48 | }
49 | }
50 | }
51 |
52 | // this is for users without stripe account, who made their public repo private
53 | if (isPrivate && repositoryId && (!await hasStripeBilling(accountId))) {
54 | // open an issue: we need payment information
55 | return {
56 | data: {
57 | name: 'payment-required',
58 | accountId,
59 | repositoryId
60 | }
61 | }
62 | }
63 | }
64 |
65 | async function getAmountOfCurrentlyPrivateAndEnabledRepos (accountId) {
66 | const { repositories } = await dbs()
67 |
68 | const billing = await repositories.query('billing', {
69 | key: accountId,
70 | group_level: 1,
71 | reduce: true
72 | })
73 | return _.get(billing, 'rows[0].value', 0)
74 | }
75 |
76 | async function getAccountNeedsMarketplaceUpgrade (accountId) {
77 | const { payments } = await dbs()
78 | try {
79 | const paymentDoc = await payments.get(String(accountId))
80 | if (!paymentDoc.plan) return false
81 | if (paymentDoc.plan === 'opensource') return true
82 | if (paymentDoc.plan === 'team') {
83 | if (await module.exports.getAmountOfCurrentlyPrivateAndEnabledRepos(accountId) > 15) {
84 | return true // team plan & repo limit reached
85 | }
86 | return false // team plan & repo limit *not* reached
87 | }
88 | } catch (error) {
89 | if (error.status !== 404) throw error
90 | }
91 | return false // all other plans plan
92 | }
93 |
94 | module.exports = {
95 | hasStripeBilling,
96 | getActiveBilling,
97 | hasPaidAccount,
98 | maybeUpdatePaymentsJob,
99 | getAmountOfCurrentlyPrivateAndEnabledRepos,
100 | getAccountNeedsMarketplaceUpgrade
101 | }
102 |
--------------------------------------------------------------------------------
/lib/repository-docs.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const crypto = require('crypto')
3 |
4 | const updatedAt = require('./updated-at')
5 | const {
6 | getFiles,
7 | formatPackageJson,
8 | getGreenkeeperConfigFile,
9 | getPackagePathsFromConfigFile
10 | } = require('./get-files')
11 | const { validate } = require('./validate-greenkeeper-json')
12 |
13 | module.exports = {
14 | createDocs,
15 | updateRepoDoc,
16 | updateDoc
17 | }
18 |
19 | // trigger (several) initial-subgroup-pr(s):
20 | // - if a package.json is added/renamed/moved in the greenkeeper.json
21 | // - if a greenkeeper.json is added
22 |
23 | async function updateRepoDoc ({ installationId, doc, filePaths, log }) {
24 | const fullName = doc.fullName
25 | const oldGreenkeeperConfig = doc.greenkeeper
26 | // set a default empty config so the job can continue if the file get fails for some reason
27 | let greenkeeperConfigFile = {}
28 | try {
29 | greenkeeperConfigFile = await getGreenkeeperConfigFile(installationId, fullName, log)
30 | } catch (e) {
31 | throw e
32 | } finally {
33 | if (!_.isEmpty(greenkeeperConfigFile)) {
34 | log.info('UpdateRepoDoc: Fetched greenkeeper.json from GitHub', greenkeeperConfigFile)
35 | }
36 | _.set(doc, ['greenkeeper'], greenkeeperConfigFile)
37 | const defaultFiles = {
38 | 'package.json': [],
39 | 'package-lock.json': [],
40 | 'yarn.lock': [],
41 | 'npm-shrinkwrap.json': [],
42 | 'pnpm-lock.yaml': []
43 | }
44 | let filePathsFromConfig = []
45 |
46 | if (!_.isEmpty(greenkeeperConfigFile)) {
47 | if (validate(greenkeeperConfigFile).error) {
48 | log.info('UpdateRepoDoc: setting file paths to the ones from the old greenkeeper.json')
49 | filePathsFromConfig = getPackagePathsFromConfigFile(oldGreenkeeperConfig)
50 | } else {
51 | log.info('UpdateRepoDoc: setting file paths to the ones found via greenkeeper.json')
52 | filePathsFromConfig = getPackagePathsFromConfigFile(greenkeeperConfigFile)
53 | }
54 | }
55 |
56 | // try to get file paths from either the autodiscovered filePaths
57 | // or from the greenkeeper.json
58 | if (!_.isEmpty(filePaths)) {
59 | log.info('UpdateRepoDoc: setting file paths to the ones found per autodiscovery')
60 | filePathsFromConfig = getPackagePathsFromConfigFile({ groups: { default: { packages: filePaths } } })
61 | }
62 | log.info('UpdateRepoDoc: requesting files from GitHub', { files: filePathsFromConfig })
63 | const filesFromConfig = _.isEmpty(filePathsFromConfig)
64 | ? await getFiles({ installationId, fullName, sha: doc.headSha, log })
65 | : await getFiles({ installationId, fullName, files: filePathsFromConfig, sha: doc.headSha, log })
66 |
67 | const files = _.merge(filesFromConfig, defaultFiles)
68 | // handles multiple paths for files like this:
69 | // files: {
70 | // package.json: ['package.json', 'backend/package.json', 'frontend/package.json']
71 | // package-lock.json: ['package-lock.json', 'backend/package-lock.json']
72 | // npm-shrinkwrap.json: [],
73 | // yarn.lock: []
74 | // }
75 | doc.files = _.mapValues(files, fileType => fileType
76 | .filter(file => !!file.content)
77 | .map(file => file.path))
78 |
79 | // formats *all* the package.json files
80 | const pkg = formatPackageJson(files['package.json'])
81 |
82 | if (!pkg) {
83 | _.unset(doc, ['packages'])
84 | } else {
85 | _.set(doc, ['packages'], pkg)
86 | }
87 |
88 | log.info('UpdateRepoDoc: doc updated', { doc })
89 | }
90 | }
91 |
92 | function createDocs ({ repositories, accountId }) {
93 | return repositories.map(repo => updatedAt({
94 | _id: String(repo.id),
95 | type: 'repository',
96 | enabled: false,
97 | accountId,
98 | fullName: repo.full_name,
99 | private: repo.private,
100 | fork: repo.fork,
101 | hasIssues: repo.has_issues,
102 | accountToken: crypto.randomBytes(32).toString('hex'),
103 | packages: {}
104 | }))
105 | }
106 |
107 | function updateDoc (repositories, repository, repoDoc) {
108 | return repositories.put(
109 | updatedAt(
110 | Object.assign(repoDoc, {
111 | private: repository.private,
112 | fullName: repository.full_name,
113 | fork: repository.fork,
114 | hasIssues: repository.has_issues
115 | })
116 | )
117 | )
118 | }
119 |
--------------------------------------------------------------------------------
/lib/rollbar.js:
--------------------------------------------------------------------------------
1 | const { resolve } = require('path')
2 |
3 | const Rollbar = require('rollbar')
4 |
5 | const env = require('./env')
6 | const pkg = require('../package.json')
7 |
8 | const enabled = env.NODE_ENV !== 'development' && !env.IS_ENTERPRISE
9 |
10 | if (enabled) {
11 | module.exports = new Rollbar({
12 | accessToken: env.ROLLBAR_TOKEN_JOBS,
13 | environment: env.NODE_ENV,
14 | code_version: `v${pkg.version}`,
15 | root: resolve(__dirname, '../'),
16 | handleUncaughtExceptions: true,
17 | handleUnhandledRejections: true,
18 | exitOnUncaughtException: true
19 | })
20 | } else {
21 | module.exports = new Rollbar({ enabled: false })
22 | }
23 |
--------------------------------------------------------------------------------
/lib/statsd.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 | const StatsD = require('hot-shots')
3 |
4 | const env = require('./env')
5 | const rollbar = require('./rollbar')
6 |
7 | module.exports = new StatsD({
8 | host: env.STATSD_HOST,
9 | prefix: 'jobs.',
10 | globalTags: [env.NODE_ENV],
11 | mock: _.includes(['development', 'testing'], env.NODE_ENV),
12 | errorHandler: err => rollbar.error(err)
13 | })
14 |
--------------------------------------------------------------------------------
/lib/updated-at.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 |
3 | module.exports = function (doc, event) {
4 | const now = new Date().toJSON()
5 | if (!doc.createdAt) doc.createdAt = now
6 | if (event) {
7 | doc.updatedAt = _([doc.updatedAt, { timestamp: now, event }])
8 | .flatten()
9 | .compact()
10 | .value()
11 | return doc
12 | }
13 | if (Array.isArray(doc.updatedAt)) {
14 | doc.updatedAt.push(now)
15 | return doc
16 | }
17 | doc.updatedAt = now
18 | return doc
19 | }
20 |
--------------------------------------------------------------------------------
/lib/upsert.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 |
3 | const updatedAt = require('./updated-at')
4 |
5 | module.exports = async function (db, id, diff, keep) {
6 | let doc
7 | // upsert(docId, diffFun, cb, hasTimeout)
8 | await db.upsert(id, (old = {}) => {
9 | const keptOldValues = _.pick(old, keep)
10 | doc = updatedAt(Object.assign(old, diff, keptOldValues))
11 | return doc
12 | }, null, true)
13 |
14 | return doc
15 | }
16 |
--------------------------------------------------------------------------------
/lib/validate-greenkeeper-json.js:
--------------------------------------------------------------------------------
1 | const Joi = require('joi')
2 |
3 | // a package path is either the term 'package.json' or
4 | // a releative path that ends in package.json
5 | // alternative regex: ^([^/]|p).*ackage\.json$
6 | const packagePathSchema = Joi.string().regex(/^([.a-zA-Z0-9_@-]+\/[.a-zA-Z0-9_@/-]*)?package\.json$/)
7 |
8 | const schema = Joi.object().keys({
9 | groups: Joi.object().pattern(/^[a-zA-Z0-9_-]+$/,
10 | Joi.object().keys({
11 | packages: Joi.array().items(packagePathSchema).required(),
12 | ignore: Joi.array()
13 | }).optionalKeys(['ignore'])
14 | ),
15 | ignore: Joi.array()
16 | }).optionalKeys(['ignore'])
17 |
18 | function validate (file) {
19 | // Abort early still doesn’t handle multiple _nested_ errors (in the same branch of the JSON tree).
20 | let errors = Joi.validate(file, schema, {
21 | abortEarly: false
22 | })
23 | if (errors.error) {
24 | errors.error.details.map((e) => {
25 | // Fall back to the standard Joi message if we don’t have a better one
26 | e.formattedMessage = e.message
27 | if (e.type === 'object.base' && e.path.length === 0) {
28 | e.formattedMessage = 'It seems as if your `greenkeeper.json` is not valid JSON. You can check the validity of JSON files with [JSONLint](https://jsonlint.com/), for example.'
29 | }
30 | if (e.type === 'string.regex.base') {
31 | e.formattedMessage = `The package path \`${e.context.value}\` in the group \`${e.path[1]}\` is invalid. It must be a relative path to a \`package.json\` file. The path may not start with a slash, and it must end in \`package.json\`. Allowed characters for a path are alphanumeric, underscores, dashes, periods and the @ symbol (a-zA-Z_-.@).`
32 | }
33 | if (e.type === 'string.regex.base' && e.context.value.startsWith('/')) {
34 | e.formattedMessage = `The package path \`${e.context.value}\` in the group \`${e.path[1]}\` must be relative and not start with a slash.`
35 | }
36 | if (e.type === 'string.regex.base' && !e.context.value.endsWith('package.json')) {
37 | e.formattedMessage = `The package path \`${e.context.value}\` in the group \`${e.path[1]}\` must end with \`package.json\`.`
38 | }
39 | if (e.type === 'object.allowUnknown' && e.path[0] === 'groups') {
40 | e.formattedMessage = `The group name \`${e.context.child}\` is invalid. Group names may only contain alphanumeric characters, underscores and dashes (a-zA-Z_-).`
41 | }
42 | if (e.type === 'object.allowUnknown' && e.path[0] !== 'groups') {
43 | e.formattedMessage = `The root-level key \`${e.context.child}\` is invalid. If you meant to add a group named \`${e.context.child}\`, please put it in a root-level \`groups\` object. Valid root-level keys are \`groups\` and \`ignore\`.`
44 | }
45 | if (e.message === '"packages" is required') {
46 | e.formattedMessage = `The group \`${e.path[1]}\` must contain a \`packages\` key. This must contain an array of paths to the \`package.json\` files you want handled in this group, eg. \`packages: ['cli-tool/package.json', 'analytics/package.json']\`.`
47 | }
48 | })
49 | }
50 |
51 | return errors
52 | }
53 |
54 | module.exports = {
55 | validate
56 | }
57 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@greenkeeper/jobs",
3 | "version": "0.0.0-development",
4 | "dependencies": {
5 | "@octokit/rest": "16.19.0",
6 | "amqplib": "^0.5.0",
7 | "bluebird": "^3.4.6",
8 | "catbox": "^7.1.3",
9 | "catbox-memory": "^2.0.4",
10 | "couchdb-bootstrap": "14.1.1",
11 | "envalid": "^5.0.0",
12 | "escape-string-regexp": "^2.0.0",
13 | "github-url-from-git": "^1.4.0",
14 | "gk-log": "1.5.0",
15 | "greenkeeper-monorepo-definitions": "^1.19.1",
16 | "hot-shots": "^5.0.0",
17 | "joi": "^14.0.0",
18 | "js-yaml": "^3.7.0",
19 | "json-in-place": "^1.0.1",
20 | "jsonwebtoken": "^8.1.1",
21 | "lodash": "^4.17.10",
22 | "mergejson": "^1.0.30",
23 | "micromatch": "^4.0.2",
24 | "nodemailer": "^6.0.0",
25 | "npm-registry-client": "^8.3.0",
26 | "pouchdb-http": "^6.0.2",
27 | "pouchdb-mapreduce": "^6.0.5",
28 | "pouchdb-upsert": "greenkeeperio/upsert#add_update-timeout",
29 | "promise-queue": "^2.2.3",
30 | "promise-retry": "^1.1.1",
31 | "readme-badger": "^0.3.0",
32 | "redis": "^2.8.0",
33 | "request": "^2.75.0",
34 | "request-promise": "^4.1.1",
35 | "retry-promise": "^1.0.0",
36 | "rollbar": "^2.0.3",
37 | "semver": "^6.0.0",
38 | "slack-notify": "^0.1.6",
39 | "stripe": "^6.0.0",
40 | "yml-in-place": "^1.0.2"
41 | },
42 | "devDependencies": {
43 | "jest": "^22.4.2",
44 | "lolex": "^4.0.1",
45 | "nock": "^10.0.0",
46 | "prettier-standard-formatter": "^0.222222222222222.333333333333333",
47 | "simple-mock": "^0.8.0",
48 | "standard": "^12.0.1",
49 | "weak": "^1.0.1"
50 | },
51 | "engines": {
52 | "node": "8"
53 | },
54 | "license": "Apache-2.0",
55 | "jest": {
56 | "testRegex": "/test/.*\\.js$",
57 | "testPathIgnorePatterns": [
58 | "/node_modules/",
59 | "/test/helpers/.*\\.js$"
60 | ],
61 | "collectCoverage": true,
62 | "unmockedModulePathPatterns": [
63 | "/node_modules/nock"
64 | ],
65 | "setupTestFrameworkScriptFile": "./jest.setup.js",
66 | "testEnvironment": "node"
67 | },
68 | "publishConfig": {
69 | "access": "restricted"
70 | },
71 | "repository": {
72 | "type": "git",
73 | "url": "git+https://github.com/greenkeeperio/greenkeeper.git"
74 | },
75 | "scripts": {
76 | "format": "prettier-standard-formatter jobs lib test",
77 | "db:start": "./start-couchdb",
78 | "deploy": "./deploy",
79 | "pretest": "standard && npm run db:start",
80 | "start": "node index.js",
81 | "test": "npm run test:chunked",
82 | "test:localdb": "COUCH_URL=http://localhost:5984 standard && npm run test:chunked",
83 | "test:chunked": "npm run test:lib && npm run test:jobs && npm run test:github && npm run test:rest",
84 | "test:sequential": "NODE_ENV=testing jest -i",
85 | "test:parallel": "NODE_ENV=testing jest",
86 | "test:lib": "NODE_ENV=testing jest lib --logHeapUsage -i",
87 | "test:jobs": "NODE_ENV=testing jest jobs/*.js --logHeapUsage -i",
88 | "test:github": "NODE_ENV=testing jest jobs/github-event --logHeapUsage -i",
89 | "test:rest": "NODE_ENV=testing jest content utils --logHeapUsage -i"
90 | },
91 | "standard": {
92 | "env": {
93 | "jest": true
94 | },
95 | "globals": [
96 | "jest",
97 | "expect",
98 | "describe",
99 | "test",
100 | "beforeAll",
101 | "beforeEach",
102 | "afterAll",
103 | "afterEach"
104 | ],
105 | "ignore": [
106 | "couchdb"
107 | ]
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/start-couchdb:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # immediately fail if an error occurs
4 | set -e
5 |
6 |
7 | if [[ -n "$GK_COUCHDB" ]]; then
8 | echo "Using existing CouchDB on :5984"
9 | curl -sX DELETE $GK_COUCHDB/{installations,npm,payments,repositories,monorepo}-staging 2>&1 > /dev/null
10 | else
11 | echo "Starting CouchDB via Docker"
12 | docker rm -fv gk-couchdb || true
13 | docker run -d -p 5984:5984 --name gk-couchdb apache/couchdb:2.3.1
14 | fi
15 |
16 | TIMEOUT=280
17 | TRIES=0
18 | printf 'Waiting for CouchDB to be available to us'
19 |
20 | until $(curl --output /dev/null --silent --head --fail http://localhost:5984); do
21 | printf '.'
22 | TRIES=$TRIES+1
23 |
24 | if [[ $TRIES -eq $TIMEOUT ]]; then
25 | echo 'Timed out waiting for CouchDB to be available'
26 | exit 1
27 | fi
28 |
29 | sleep 1
30 | done
31 |
32 | # speed up couchdb operations
33 | curl -sX PUT http://localhost:5984/_node/_local/_config/couchdb/delayed_commits -d '"true"' > /dev/null 2>&1
34 |
--------------------------------------------------------------------------------
/test/content/__snapshots__/fail-issue.js.snap:
--------------------------------------------------------------------------------
1 | // Jest Snapshot v1, https://goo.gl/fbAQLP
2 |
3 | exports[`Fails issue content Displays monorepo issue correctly 1`] = `
4 | "
5 | ## There have been updates to the *babel7* monorepo
6 |
7 | 🚨 [View failing branch](https://github.com/gilliam/brazil/compare/master...gilliam:feat%2Fwings).
8 |
9 | This version is **covered** by your **current version range** and after updating it in your project **the build failed**.
10 |
11 | This monorepo update includes releases of one or more dependencies which all belong to the [babel7 group definition](https://github.com/greenkeeperio/monorepo-definitions).
12 |
13 |
14 | tuttle is a direct dependency of this project, and **it is very likely causing it to break**. If other packages depend on yours, this update is probably also breaking those in turn.
15 |
16 |
17 |
18 |
19 | Status Details
20 |
21 | - ✅ **continuous-integration/travis-ci/pr:**
The build **passed**.
22 |
23 |
24 |
25 | ---
26 |
27 | 14.2.0
28 |
29 | Commits
30 | A list of commits
31 |
32 |
33 |
34 |
35 | FAQ and help
36 |
37 | There is a collection of [frequently asked questions](https://greenkeeper.io/faq.html). If those don’t help, you can always [ask the humans behind Greenkeeper](https://github.com/greenkeeperio/greenkeeper/issues/new).
38 |
39 |
40 | ---
41 |
42 |
43 | Your [Greenkeeper](https://greenkeeper.io) Bot :palm_tree:
44 | "
45 | `;
46 |
47 | exports[`Fails issue content Displays statuses and checks correctly 1`] = `
48 | "
49 | ## The dependency [tuttle](http://lol.cat) was updated from \`14.0.0\` to \`14.2.0\`.
50 |
51 | 🚨 [View failing branch](https://github.com/gilliam/brazil/compare/master...gilliam:feat%2Fwings).
52 |
53 | This version is **covered** by your **current version range** and after updating it in your project **the build failed**.
54 |
55 |
56 |
57 |
58 | tuttle is a direct dependency of this project, and **it is very likely causing it to break**. If other packages depend on yours, this update is probably also breaking those in turn.
59 |
60 |
61 |
62 |
63 | Status Details
64 |
65 | - ✅ **continuous-integration/travis-ci/push:** The Travis CI build passed ([Details](http://lol.cat)).
66 | - ❌ **continuous-integration/travis-ci/pr:** The Travis CI build failed ([Details](http://lol.cat)).
67 | - ✅ **continuous-integration/travis-ci/pr:**
The build **passed**.
68 |
69 |
70 |
71 | ---
72 |
73 | 14.2.0
74 |
75 | Commits
76 | A list of commits
77 |
78 |
79 |
80 |
81 | FAQ and help
82 |
83 | There is a collection of [frequently asked questions](https://greenkeeper.io/faq.html). If those don’t help, you can always [ask the humans behind Greenkeeper](https://github.com/greenkeeperio/greenkeeper/issues/new).
84 |
85 |
86 | ---
87 |
88 |
89 | Your [Greenkeeper](https://greenkeeper.io) Bot :palm_tree:
90 | "
91 | `;
92 |
--------------------------------------------------------------------------------
/test/content/fail-issue.js:
--------------------------------------------------------------------------------
1 | describe('Fails issue content', async () => {
2 | test('Displays statuses and checks correctly', async () => {
3 | const content = require('../../content/fail-issue')
4 |
5 | const issueContent = content({
6 | version: '14.2.0',
7 | dependencyLink: 'http://lol.cat',
8 | owner: 'gilliam',
9 | repo: 'brazil',
10 | base: 'master',
11 | head: 'feat/wings',
12 | dependency: 'tuttle',
13 | oldVersionResolved: '14.0.0',
14 | dependencyType: 'dependencies',
15 | statuses: [
16 | {
17 | state: 'success',
18 | context: 'continuous-integration/travis-ci/push',
19 | description: 'The Travis CI build passed',
20 | target_url: 'http://lol.cat'
21 | }, {
22 | state: 'failure',
23 | context: 'continuous-integration/travis-ci/pr',
24 | description: 'The Travis CI build failed',
25 | target_url: 'http://lol.cat'
26 | }, {
27 | state: 'success',
28 | context: 'continuous-integration/travis-ci/pr',
29 | description: '
The build **passed**.'
30 | }
31 | ],
32 | release: '14.2.0',
33 | diffCommits: `
34 | Commits
35 | A list of commits
36 | `,
37 | monorepoGroupName: ''
38 | })
39 | expect(issueContent).toBeTruthy()
40 | expect(issueContent).toMatchSnapshot()
41 | })
42 |
43 | test('Displays monorepo issue correctly', async () => {
44 | const content = require('../../content/fail-issue')
45 |
46 | const issueContent = content({
47 | version: '14.2.0',
48 | dependencyLink: 'http://lol.cat',
49 | owner: 'gilliam',
50 | repo: 'brazil',
51 | base: 'master',
52 | head: 'feat/wings',
53 | dependency: 'tuttle',
54 | oldVersionResolved: '14.0.0',
55 | dependencyType: 'dependencies',
56 | statuses: [
57 | {
58 | state: 'success',
59 | context: 'continuous-integration/travis-ci/pr',
60 | description: '
The build **passed**.'
61 | }
62 | ],
63 | release: '14.2.0',
64 | diffCommits: `
65 | Commits
66 | A list of commits
67 | `,
68 | monorepoGroupName: 'babel7',
69 | packageUpdateList: ''
70 | })
71 | expect(issueContent).toBeTruthy()
72 | expect(issueContent).toMatchSnapshot()
73 | })
74 | })
75 |
--------------------------------------------------------------------------------
/test/content/initial-pr.js:
--------------------------------------------------------------------------------
1 | const { cleanCache, requireFresh } = require('../helpers/module-cache-helpers')
2 |
3 | describe('initial pr content', async () => {
4 | beforeEach(() => {
5 | jest.resetModules()
6 | delete process.env.HOOKS_HOST
7 | cleanCache('../../lib/env')
8 | })
9 |
10 | test('includes set up guide for hooks if secret was provided', async () => {
11 | const content = requireFresh('../../content/initial-pr')
12 |
13 | const prContent = content({ ghRepo: 'finnp/abc', secret: 'S3CR3T' })
14 | expect(prContent).toMatch('https://hooks.greenkeeper.io/npm')
15 | })
16 |
17 | test('includes the link to the custom hooks host', async () => {
18 | process.env.HOOKS_HOST = 'custom-hooks-host.com'
19 | const content = requireFresh('../../content/initial-pr')
20 |
21 | const prContent = content({ ghRepo: 'finnp/abc', secret: 'S3CR3T' })
22 | expect(prContent).toMatch('custom-hooks-host.com')
23 | })
24 | })
25 |
--------------------------------------------------------------------------------
/test/content/timeout-issue.js:
--------------------------------------------------------------------------------
1 | const { cleanCache, requireFresh } = require('../helpers/module-cache-helpers')
2 |
3 | describe('timeout issue content', async () => {
4 | beforeEach(() => {
5 | jest.resetModules()
6 | delete process.env.GITHUB_HOST
7 | delete process.env.GITHUB_URL
8 | cleanCache('../../lib/env')
9 | })
10 |
11 | test('includes the link to the initial branch on the custom github host', async () => {
12 | process.env.GITHUB_HOST = 'https://enterprise.github/api/v3/'
13 | const content = requireFresh('../../content/timeout-issue')
14 |
15 | const issueContent = content({ fullName: 'finnp/abc' })
16 | // includes the link to the repo at the custom host
17 | expect(issueContent).toMatch(/enterprise\.github\/finnp\/abc/)
18 | })
19 |
20 | test('includes the link to the initial branch on the regular github host', async () => {
21 | const content = requireFresh('../../content/timeout-issue')
22 |
23 | const issueContent = content({ fullName: 'finnp/abc' })
24 | // includes the link to the repo at github.com
25 | expect(issueContent).toMatch(/github\.com\/finnp\/abc/)
26 | })
27 | })
28 |
--------------------------------------------------------------------------------
/test/helpers/enterprise-private-key.js:
--------------------------------------------------------------------------------
1 | // This key is only base64 encoded, not zipped, since this is what GKE3 does. Use this as process.env.PRIVATE_KEY in tests in an enterprise context
2 | module.exports = 'LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlCUEFJQkFBSkJBS0ZoM3pQclRrcUd4ODNUeTJVbGVzTzZ0L0psQXlKMC9vUitHMndQVVk3NndyT1lWYVJlCkp1T2lFTDh4YW5UeFdlTnpsKzlJakxEK0hJdUMvOGp1c0dzQ0F3RUFBUUpBTk80NzNmU0VkaThrbzE2ZTdHclIKakJiV3REcnJQMGJ1SDVpWTkrVkNVOGxuNlB3OVNPdkRvQ3I4Q2JMaFdNTHJoRzEwZVM4MVVWazdvYk1uWkQ3TQpDUUloQU5Ca04yV2llNjFrV3ZVaHQ4Q2NkcitreHplKzY5UUVSdHF3UmpRY2VFUnZBaUVBeGtCU3JhZ0FVRnlaCjZmU01ZcEh2THdkMXlGc1pJS240VlJSYjgwMzM2Y1VDSVFDb0xWaXZ3ek9BdVk3V2FjRzd4Z25ubS9uU3VWZmwKSVJaMWd2RUZPVHl1TFFJaEFJL0VlYkNoVW9qMGZsRmhIS1VtdTFOWmo1cWFKYURBWWV3ZzlZVjlsemtaQWlFQQpwOFoveUpIVzcwUHRCeDZ5RUdSU0ZhT1FxU01NTk84MmMwS2FjdnA1U24wPQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQo='
3 |
--------------------------------------------------------------------------------
/test/helpers/module-cache-helpers.js:
--------------------------------------------------------------------------------
1 | function cleanCache (module) {
2 | delete require.cache[require.resolve(module)]
3 | }
4 |
5 | function requireFresh (module) {
6 | cleanCache(module)
7 | return require(module)
8 | }
9 |
10 | module.exports = {
11 | cleanCache,
12 | requireFresh
13 | }
14 |
--------------------------------------------------------------------------------
/test/helpers/remove-if-exists.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Tries to delete a document in Couch, but does not fail if it doesn't exist
3 | *
4 | * @param {PouchDB} db - the PouchDB object of the database
5 | * @param {(string[]|...string)} ids - the ids of the documents to be deleted
6 | */
7 | module.exports = async function (db, ...ids) {
8 | const idsToDelete = Array.isArray(ids[0]) ? ids[0] : ids
9 | return Promise.all(
10 | idsToDelete.map(async (id) => {
11 | try {
12 | return await db.remove(await db.get(id))
13 | } catch (e) {
14 | if (e.status !== 404) {
15 | throw e
16 | }
17 | }
18 | })
19 | )
20 | }
21 |
--------------------------------------------------------------------------------
/test/jobs/cancel-stripe-subscription.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | const dbs = require('../../lib/dbs')
4 | const removeIfExists = require('../helpers/remove-if-exists')
5 |
6 | const cancelStripeSubscription = require('../../jobs/cancel-stripe-subscription')
7 |
8 | nock.disableNetConnect()
9 | nock.enableNetConnect('localhost')
10 |
11 | afterAll(async () => {
12 | const { payments } = await dbs()
13 | await Promise.all([
14 | removeIfExists(payments, '123')
15 | ])
16 | })
17 |
18 | test('Cancel Stripe Subscription', async () => {
19 | const { payments } = await dbs()
20 | expect.assertions(3)
21 |
22 | nock('https://api.stripe.com/v1')
23 | .delete('/subscriptions/345')
24 | .reply(200, () => {
25 | // Stripe called
26 | expect(true).toBeTruthy()
27 | return {
28 | stripeSubscriptionId: '345'
29 | }
30 | })
31 |
32 | await payments.put({
33 | _id: '123',
34 | plan: 'team',
35 | stripeCustomerId: 'cus_abc',
36 | stripeItemId: 'si_xyz',
37 | stripeSubscriptionId: '345'
38 | })
39 |
40 | await cancelStripeSubscription({
41 | accountId: '123',
42 | stripeSubscriptionId: '345'
43 | })
44 |
45 | const payment = await payments.get('123')
46 | expect(payment.stripeItemId).toBeNull()
47 | expect(payment.stripeSubscriptionId).toBeNull()
48 | })
49 |
--------------------------------------------------------------------------------
/test/jobs/create-initial-subgroup-pr.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | const dbs = require('../../lib/dbs')
4 | const removeIfExists = require('../helpers/remove-if-exists')
5 |
6 | describe('create-initial-subgroup-pr', async () => {
7 | beforeEach(() => {
8 | jest.resetModules()
9 | })
10 |
11 | test('create subgroup initial pr for monorepo', async () => {
12 | const createInitial = require('../../jobs/create-initial-subgroup-pr')
13 | const { repositories } = await dbs()
14 |
15 | await repositories.put({
16 | _id: 'mono',
17 | accountId: '123',
18 | fullName: 'petra/monorepo'
19 | })
20 |
21 | await repositories.put({
22 | _id: 'mono:branch:monorepo1',
23 | type: 'branch',
24 | initial: false,
25 | subgroupInitial: true,
26 | sha: 'monorepo1',
27 | base: 'master',
28 | head: 'greenkeeper/initial-frontend',
29 | processed: false,
30 | depsUpdated: true,
31 | badgeUrl: 'https://badges.greenkeeper.io/petra/monorepo.svg',
32 | createdAt: '2017-01-13T17:33:56.698Z',
33 | updatedAt: '2017-01-13T17:33:56.698Z'
34 | })
35 |
36 | expect.assertions(8)
37 |
38 | nock('https://api.github.com')
39 | .post('/app/installations/11/access_tokens')
40 | .optionally()
41 | .reply(200, {
42 | token: 'secret'
43 | })
44 | .get('/rate_limit')
45 | .optionally()
46 | .reply(200, {})
47 | .get('/repos/petra/monorepo')
48 | .reply(200, {
49 | default_branch: 'custom'
50 | })
51 | .post('/repos/petra/monorepo/statuses/monorepo1')
52 | .reply(201, () => {
53 | // verify status added
54 | expect(true).toBeTruthy()
55 | return {}
56 | })
57 | .post(
58 | '/repos/petra/monorepo/pulls',
59 | ({ head }) => head === 'greenkeeper/initial-frontend'
60 | )
61 | .reply(201, (uri, requestBody) => {
62 | // pull request created
63 | expect(true).toBeTruthy()
64 | const body = JSON.parse(requestBody).body
65 | expect(body).toMatch('This pull request **updates all your dependencies in the group `frontend` to their latest version**')
66 | expect(body).toMatch('How to ignore certain dependencies for this group')
67 | expect(body).not.toMatch('**Important: Greenkeeper will only start watching this repository’s dependency updates after you merge this initial pull request**.')
68 | expect(body).not.toMatch('greenkeeper.ignore')
69 | expect(body).not.toMatch('but only after **you merge this pull request**.')
70 | return {
71 | id: 333,
72 | number: 3
73 | }
74 | })
75 | .post(
76 | '/repos/petra/monorepo/issues/3/labels',
77 | body => body.labels[0] === 'greenkeeper'
78 | )
79 | .reply(201, () => {
80 | // label created
81 | expect(true).toBeTruthy()
82 | return {}
83 | })
84 |
85 | const branchDoc = await repositories.get('mono:branch:monorepo1')
86 | await createInitial({
87 | repository: { id: 'mono' },
88 | branchDoc: branchDoc,
89 | combined: {
90 | state: 'success',
91 | combined: []
92 | },
93 | installationId: 11,
94 | accountId: '123',
95 | groupName: 'frontend'
96 | })
97 | })
98 |
99 | afterAll(async () => {
100 | const { repositories } = await dbs()
101 |
102 | await Promise.all([
103 | removeIfExists(repositories, 'mono', 'mono:branch:monorepo1')
104 | ])
105 | })
106 | })
107 |
--------------------------------------------------------------------------------
/test/jobs/github-event.js:
--------------------------------------------------------------------------------
1 | describe('github-event index', () => {
2 | beforeEach(() => {
3 | jest.clearAllMocks()
4 | jest.resetModules()
5 | })
6 |
7 | test('calls the resolve function', () => {
8 | expect.assertions(1)
9 |
10 | jest.mock('path', () => {
11 | return {
12 | resolve: (dirname, eventType, type) => {
13 | // resolve is called with /foo
14 | expect(`${dirname}/${eventType}/${type}`).toEqual(`${dirname}/github-event/foo`)
15 | return dirname
16 | }
17 | }
18 | })
19 | const githubEvent = require('../../jobs/github-event.js')
20 |
21 | githubEvent({ type: 'foo' })
22 | })
23 |
24 | test('calls the resolve function with action', () => {
25 | expect.assertions(1)
26 |
27 | jest.mock('path', () => {
28 | return {
29 | resolve: (dirname, eventType, type, action) => {
30 | // resolve is called with /foo/bar
31 | expect(`${dirname}/${eventType}/${type}/${action}`).toEqual(`${dirname}/github-event/foo/bar`)
32 | return dirname
33 | }
34 | }
35 | })
36 | const githubEvent = require('../../jobs/github-event.js')
37 |
38 | githubEvent({ type: 'foo', action: 'bar' }, '456')
39 | })
40 | })
41 |
--------------------------------------------------------------------------------
/test/jobs/github-event/installation/created.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 | const _ = require('lodash')
3 |
4 | const dbs = require('../../../../lib/dbs')
5 | const removeIfExists = require('../../../helpers/remove-if-exists')
6 | const createInstallation = require('../../../../jobs/github-event/installation/created')
7 |
8 | afterAll(async () => {
9 | const { installations, repositories } = await dbs()
10 |
11 | await Promise.all([
12 | removeIfExists(installations, '2'),
13 | removeIfExists(repositories, '123', '234')
14 | ])
15 | require('../../../../lib/statsd').close()
16 | })
17 |
18 | test('github-event installation created', async () => {
19 | const { installations, repositories } = await dbs()
20 | nock('https://api.github.com')
21 | .post('/app/installations/1/access_tokens')
22 | .optionally()
23 | .reply(200, {
24 | token: 'secret'
25 | })
26 | .get('/rate_limit')
27 | .optionally()
28 | .reply(200, {})
29 | .get('/installation/repositories?per_page=100')
30 | .reply('200', {
31 | repositories: [
32 | {
33 | id: 123,
34 | full_name: 'bar/repo',
35 | private: true
36 | }
37 | ] }, {
38 | Link: '; rel="next"'
39 | })
40 | .get('/installation/repositories?per_page=100&page=2')
41 | .reply('200', {
42 | repositories: [
43 | {
44 | id: 234,
45 | full_name: 'bar/repo2',
46 | private: false
47 | }
48 | ] })
49 |
50 | const newJobs = await createInstallation({
51 | installation: {
52 | id: 1,
53 | account: {
54 | id: 2,
55 | login: 'bar',
56 | type: 'baz'
57 | },
58 | repositories_url: 'https://api.github.com/installation/repositories'
59 | }
60 | })
61 |
62 | expect(newJobs).toHaveLength(2)
63 |
64 | const repos = await Promise.all([
65 | repositories.get('123'),
66 | repositories.get('234')
67 | ])
68 |
69 | expect(_.uniq(_.map(newJobs, 'data.name'))).toContain('create-initial-branch')
70 |
71 | newJobs.forEach((job, i) => {
72 | expect(job.data.repositoryId).toEqual(repos[i]._id)
73 | expect(job.data.accountId).toEqual('2')
74 | })
75 |
76 | const [repo] = repos
77 | expect(repo._id).toEqual('123')
78 | expect(repo.enabled).toBeFalsy()
79 | expect(repo.accountId).toEqual('2')
80 | expect(repo.fullName).toEqual('bar/repo')
81 | expect(repo.private).toBeTruthy()
82 |
83 | const doc = await installations.get('2')
84 | expect(doc.installation).toBe(1)
85 | expect(doc.login).toBe('bar')
86 | expect(doc.type).toBe('baz')
87 | })
88 |
--------------------------------------------------------------------------------
/test/jobs/github-event/installation/deleted.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../../../lib/dbs')
2 | const githubEvent = require('../../../../jobs/github-event')
3 |
4 | test('github-event installation deleted', async () => {
5 | expect.assertions(3)
6 | const { installations, repositories } = await dbs()
7 |
8 | await Promise.all([
9 | installations.put({
10 | _id: '2',
11 | installation: 1
12 | }),
13 | repositories.put({
14 | _id: '4',
15 | accountId: '2'
16 | })
17 | ])
18 |
19 | const newJobs = await githubEvent({
20 | type: 'installation',
21 | action: 'deleted',
22 | installation: { account: { id: 2 } }
23 | })
24 |
25 | expect(newJobs).toBeFalsy()
26 |
27 | try {
28 | await installations.get('2')
29 | } catch (e) {
30 | // installation is deleted
31 | expect(e.status).toBe(404)
32 | }
33 |
34 | const repos = await repositories.query('by_account', {
35 | key: '2'
36 | })
37 |
38 | // repositories are deleted
39 | expect(repos.rows).toHaveLength(0)
40 | })
41 |
--------------------------------------------------------------------------------
/test/jobs/github-event/installation_repositories/removed.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../../../lib/dbs')
2 | const githubEvent = require('../../../../jobs/github-event')
3 |
4 | test('github-event installation_repositories removed', async () => {
5 | const { repositories } = await dbs()
6 |
7 | await repositories.bulkDocs([
8 | { _id: '22', accountId: '2' },
9 | { _id: '23', accountId: '2' },
10 | { _id: '24', accountId: '2' },
11 | { _id: '25', accountId: '2' },
12 | { _id: '26', accountId: '3' }
13 | ])
14 |
15 | const newJobs = await githubEvent({
16 | type: 'installation_repositories',
17 | action: 'removed',
18 | installation: { account: { id: 2 } },
19 | repositories_removed: [{ id: 22 }, { id: 25 }, { id: 26 }]
20 | })
21 | expect(newJobs).toBeFalsy()
22 |
23 | const repos = await repositories.query('by_account', {
24 | key: '2'
25 | })
26 | expect(repos.rows).toHaveLength(2)
27 | })
28 |
--------------------------------------------------------------------------------
/test/jobs/github-event/issues/closed.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../../../lib/dbs')
2 | const removeIfExists = require('../../../helpers/remove-if-exists')
3 | const closeIssue = require('../../../../jobs/github-event/issues/closed')
4 |
5 | afterAll(async () => {
6 | const { repositories } = await dbs()
7 | await removeIfExists(repositories, '42:issue:666')
8 | })
9 |
10 | test('github-event issues closed', async () => {
11 | const { repositories } = await dbs()
12 |
13 | await repositories.put({
14 | _id: '42:issue:666',
15 | dependency: '@finnpauls/dep',
16 | version: '2.2.2',
17 | repositoryId: '42'
18 | })
19 |
20 | const newJob = await closeIssue({
21 | issue: {
22 | number: 666
23 | },
24 | repository: {
25 | id: 42,
26 | full_name: 'test/test',
27 | owner: {
28 | id: 1234
29 | }
30 | }
31 | })
32 |
33 | expect(newJob).toBeFalsy()
34 | const issue = await repositories.get('42:issue:666')
35 |
36 | expect(issue.state).toEqual('closed')
37 | expect(issue.updatedAt).toBeTruthy()
38 | })
39 |
--------------------------------------------------------------------------------
/test/jobs/github-event/marketplace_purchase/cancelled.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../../../lib/dbs')
2 | const removeIfExists = require('../../../helpers/remove-if-exists.js')
3 | const cancelPurchase = require('../../../../jobs/github-event/marketplace_purchase/cancelled')
4 |
5 | describe('marketplace canceled', async () => {
6 | afterAll(async () => {
7 | const { payments } = await dbs()
8 | await removeIfExists(payments, '444')
9 | })
10 |
11 | test('change entry in payments database to `free`', async () => {
12 | const { payments } = await dbs()
13 | await payments.put({
14 | _id: '444',
15 | plan: 'team'
16 | })
17 |
18 | const newJobs = await cancelPurchase({
19 | marketplace_purchase: {
20 | account: {
21 | type: 'Organization',
22 | id: 444,
23 | login: 'GitHub'
24 | },
25 | plan: {
26 | id: 9,
27 | name: 'Team',
28 | description: 'A really, super professional-grade CI solution',
29 | monthly_price_in_cents: 9999,
30 | yearly_price_in_cents: 11998,
31 | price_model: 'flat-rate',
32 | unit_name: null,
33 | bullets: [
34 | 'This is the first bullet of the plan',
35 | 'This is the second bullet of the plan'
36 | ]
37 | }
38 | }
39 | })
40 |
41 | expect(newJobs).toBeFalsy()
42 |
43 | const payment = await payments.get('444')
44 | expect(payment.plan).toEqual('free')
45 | })
46 | })
47 |
--------------------------------------------------------------------------------
/test/jobs/github-event/marketplace_purchase/changed.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../../../lib/dbs')
2 | const removeIfExists = require('../../../helpers/remove-if-exists.js')
3 | const changePurchase = require('../../../../jobs/github-event/marketplace_purchase/changed')
4 |
5 | describe('marketplace changed', async () => {
6 | afterAll(async () => {
7 | const { payments } = await dbs()
8 | await removeIfExists(payments, '444')
9 | })
10 |
11 | test('change entry in payments database', async () => {
12 | const { payments } = await dbs()
13 | await payments.put({
14 | _id: '444',
15 | plan: 'team'
16 | })
17 |
18 | const newJobs = await changePurchase({
19 | marketplace_purchase: {
20 | account: {
21 | type: 'Organization',
22 | id: 444,
23 | login: 'GitHub'
24 | },
25 | plan: {
26 | id: 9,
27 | name: 'Open Source',
28 | description: 'A really, super professional-grade CI solution',
29 | monthly_price_in_cents: 9999,
30 | yearly_price_in_cents: 11998,
31 | price_model: 'flat-rate',
32 | unit_name: null,
33 | bullets: [
34 | 'This is the first bullet of the plan',
35 | 'This is the second bullet of the plan'
36 | ]
37 | }
38 | }
39 | })
40 |
41 | expect(newJobs).toBeFalsy()
42 |
43 | const payment = await payments.get('444')
44 | expect(payment.plan).toEqual('opensource')
45 | })
46 | })
47 |
--------------------------------------------------------------------------------
/test/jobs/github-event/repository/archived.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../../../lib/dbs')
2 | const removeIfExists = require('../../../helpers/remove-if-exists')
3 |
4 | beforeEach(() => {
5 | jest.resetModules()
6 | })
7 |
8 | afterAll(async () => {
9 | const { repositories } = await dbs()
10 | await removeIfExists(repositories, 'publicRepoToBeArchived', 'privateRepoToBeArchived')
11 | })
12 |
13 | test('github-event public repository archived', async () => {
14 | const repoArchived = require('../../../../jobs/github-event/repository/archived')
15 | const { repositories } = await dbs()
16 |
17 | await repositories.put({
18 | _id: 'publicRepoToBeArchived',
19 | enabled: true,
20 | private: false,
21 | accountId: 'greebles'
22 | })
23 |
24 | const newJob = await repoArchived({
25 | repository: {
26 | id: 'publicRepoToBeArchived',
27 | full_name: 'test/test',
28 | owner: {
29 | id: 1234
30 | },
31 | private: false
32 | }
33 | })
34 |
35 | expect(newJob).toBeFalsy()
36 | const repo = await repositories.get('publicRepoToBeArchived')
37 |
38 | expect(repo.enabled).toBeFalsy()
39 | expect(repo.archived).toBeTruthy()
40 | })
41 |
42 | test('github-event private repository archived', async () => {
43 | expect.assertions(4)
44 | jest.mock('../../../../lib/payments', () => {
45 | const payments = require.requireActual('../../../../lib/payments')
46 | payments.maybeUpdatePaymentsJob = async () => {
47 | // This must be called if the repo is private
48 | expect(true).toBeTruthy()
49 | // pretend this is a private repo with stripe payment
50 | return Promise.resolve({
51 | data: {
52 | name: 'update-payments',
53 | accountId: 'muppets'
54 | }
55 | })
56 | }
57 | return payments
58 | })
59 | const repoArchived = require('../../../../jobs/github-event/repository/archived')
60 | const { repositories } = await dbs()
61 |
62 | await repositories.put({
63 | _id: 'privateRepoToBeArchived',
64 | enabled: true,
65 | private: true,
66 | accountId: 'muppets'
67 | })
68 |
69 | const newJob = await repoArchived({
70 | repository: {
71 | id: 'privateRepoToBeArchived',
72 | full_name: 'test/test',
73 | owner: {
74 | id: 1234
75 | },
76 | private: true
77 | }
78 | })
79 |
80 | expect(newJob).toBeTruthy()
81 | const repo = await repositories.get('privateRepoToBeArchived')
82 |
83 | expect(repo.enabled).toBeFalsy()
84 | expect(repo.archived).toBeTruthy()
85 | })
86 |
--------------------------------------------------------------------------------
/test/jobs/github-event/repository/privatized.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../../../lib/dbs')
2 | const removeIfExists = require('../../../helpers/remove-if-exists')
3 |
4 | beforeEach(() => {
5 | jest.resetModules()
6 | })
7 |
8 | afterAll(async () => {
9 | const { repositories } = await dbs()
10 | await removeIfExists(repositories, 'publicRepoToBePrivatized')
11 | })
12 |
13 | test('github-event public repository privatized with stripe account', async () => {
14 | const repoPrivatized = require('../../../../jobs/github-event/repository/privatized')
15 | const { repositories } = await dbs()
16 |
17 | await repositories.put({
18 | _id: 'publicRepoToBePrivatized',
19 | enabled: true,
20 | private: false,
21 | accountId: 'mumble'
22 | })
23 |
24 | jest.mock('../../../../lib/payments', () => {
25 | const payments = require.requireActual('../../../../lib/payments')
26 | payments.maybeUpdatePaymentsJob = async () => {
27 | // pretend this is a private repo with stripe payment
28 | return Promise.resolve({
29 | data: {
30 | name: 'update-payments',
31 | accountId: 'mumble'
32 | }
33 | })
34 | }
35 | return payments
36 | })
37 | const newJob = await repoPrivatized({
38 | repository: {
39 | id: 'publicRepoToBePrivatized',
40 | full_name: 'mumble/bumble',
41 | owner: {
42 | id: 1234
43 | },
44 | private: true
45 | }
46 | })
47 |
48 | // update-payment job
49 | expect(newJob).toBeTruthy()
50 | const repo = await repositories.get('publicRepoToBePrivatized')
51 | expect(repo.enabled).toBeFalsy()
52 | expect(repo.private).toBeTruthy()
53 | })
54 |
55 | test('github-event public repository privatized without stripe account', async () => {
56 | const repoPrivatized = require('../../../../jobs/github-event/repository/privatized')
57 | const { repositories } = await dbs()
58 |
59 | await repositories.put({
60 | _id: 'publicRepoToBePrivatizedNoStripe',
61 | enabled: true,
62 | private: false,
63 | accountId: 'mumble'
64 | })
65 |
66 | jest.mock('../../../../lib/payments', () => {
67 | const payments = require.requireActual('../../../../lib/payments')
68 | payments.maybeUpdatePaymentsJob = async () => {
69 | return Promise.resolve({
70 | data: {
71 | name: 'payment-required',
72 | accountId: 'mumble',
73 | repositoryId: 'elbmum'
74 | }
75 | })
76 | }
77 | return payments
78 | })
79 | const newJob = await repoPrivatized({
80 | repository: {
81 | id: 'publicRepoToBePrivatizedNoStripe',
82 | full_name: 'mumble/bumble',
83 | owner: {
84 | id: 1234
85 | },
86 | private: true
87 | }
88 | })
89 |
90 | // update-payment job
91 | expect(newJob).toBeTruthy()
92 | const repo = await repositories.get('publicRepoToBePrivatizedNoStripe')
93 | expect(repo.enabled).toBeFalsy()
94 | expect(repo.private).toBeTruthy()
95 | })
96 |
--------------------------------------------------------------------------------
/test/jobs/initial-timeout-pr.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | const dbs = require('../../lib/dbs')
4 | const removeIfExists = require('../helpers/remove-if-exists')
5 |
6 | const initTimeoutPr = require('../../jobs/initial-timeout-pr')
7 |
8 | describe('initial-timeout-pr', async () => {
9 | beforeAll(async () => {
10 | const { installations, repositories } = await dbs()
11 | await installations.put({
12 | _id: '10101',
13 | installation: 37
14 | })
15 | await repositories.put({
16 | _id: '666',
17 | fullName: 'finnp/test'
18 | })
19 | })
20 |
21 | afterAll(async () => {
22 | const { repositories, installations } = await dbs()
23 | await Promise.all([
24 | removeIfExists(installations, '10101', '1338'),
25 | removeIfExists(repositories, '666', '666:pr:11', '666:issue:10')
26 | ])
27 | })
28 |
29 | test('create', async () => {
30 | const githubMock = nock('https://api.github.com')
31 | .post('/app/installations/37/access_tokens')
32 | .reply(200, {
33 | token: 'secret'
34 | })
35 | .get('/rate_limit')
36 | .reply(200, {})
37 | .post('/repos/finnp/test/issues', ({ title, body, labels }) => {
38 | expect(title).toBeTruthy()
39 | expect(body).toBeTruthy()
40 | expect(labels).toContain('greenkeeper')
41 | return true
42 | })
43 | .reply(201, () => {
44 | // issue created
45 | expect(true).toBeTruthy()
46 | return {
47 | number: 10
48 | }
49 | })
50 |
51 | const newJobs = await initTimeoutPr({
52 | repositoryId: 666,
53 | accountId: 10101,
54 | repoSlug: 'finnp/test'
55 | })
56 | expect(newJobs).toBeFalsy()
57 |
58 | const { repositories } = await dbs()
59 | const issue = await repositories.get('666:issue:10')
60 | expect(issue.initial).toBeTruthy()
61 | expect(issue.type).toEqual('issue')
62 | expect(issue.number).toBe(10)
63 | expect(issue.repositoryId).toBe(666)
64 | githubMock.done()
65 | })
66 |
67 | test('already exists', async () => {
68 | nock('https://api.github.com') // no request should be made
69 | expect.assertions(2)
70 |
71 | const { installations, repositories } = await dbs()
72 | await installations.put({
73 | _id: '1338',
74 | installation: 38
75 | })
76 | await repositories.put({
77 | _id: '6666:pr:11',
78 | type: 'pr',
79 | repositoryId: '6666',
80 | head: 'greenkeeper/initial',
81 | fullName: 'giraffe/spaceship'
82 | })
83 |
84 | const newJobs = await initTimeoutPr({
85 | repositoryId: 6666,
86 | accountId: 1338,
87 | repoSlug: 'giraffe/spaceship'
88 | })
89 |
90 | expect(newJobs).toBeFalsy()
91 |
92 | try {
93 | await repositories.get('6666:issue:10')
94 | } catch (e) {
95 | // throws
96 | expect(true).toBeTruthy()
97 | }
98 | })
99 | })
100 |
--------------------------------------------------------------------------------
/test/jobs/monorepo-supervisor.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../lib/dbs')
2 | const removeIfExists = require('../helpers/remove-if-exists')
3 |
4 | describe('monorepo supervisor', async () => {
5 | beforeEach(() => {
6 | jest.resetModules()
7 | jest.clearAllMocks()
8 | // Mock away sending admin notifications so we don‘t get spammed when tests run.
9 | jest.mock('../../lib/comms', () => {
10 | const lib = require.requireActual('../../lib/comms')
11 | lib.notifyAdmin = () => {}
12 | return lib
13 | })
14 | })
15 | afterAll(async () => {
16 | const { npm } = await dbs()
17 | await Promise.all([
18 | removeIfExists(npm, 'monorepo:wibbly', 'monorepo:wobbly')
19 | ])
20 | })
21 |
22 | test('start 2 jobs for 2 pending releases', async () => {
23 | jest.mock('../../lib/monorepo', () => {
24 | const lib = require.requireActual('../../lib/monorepo')
25 | lib.pendingMonorepoReleases = () => {
26 | return [{
27 | _id: 'monorepo:wobbly',
28 | distTags: {
29 | latest: '2.0.0'
30 | },
31 | versions: {
32 | '2.0.0': {
33 | gitHead: 'timey'
34 | },
35 | '1.0.0': {
36 | gitHead: 'wimey'
37 | }
38 | },
39 | dependency: 'wobbly'
40 | },
41 | {
42 | _id: 'monorepo:wibbly',
43 | distTags: {
44 | latest: '2.0.0'
45 | },
46 | versions: {
47 | '2.0.0': {
48 | gitHead: 'smurf'
49 | },
50 | '1.0.0': {
51 | gitHead: 'sky'
52 | }
53 | },
54 | dependency: 'wibbly'
55 | }]
56 | }
57 | lib.getMonorepoGroupNameForPackage = (dependencyName) => {
58 | return dependencyName === 'wobbly' ? 'timelord' : 'tardis'
59 | }
60 | return lib
61 | })
62 | const monorepoSupervisor = require('../../jobs/monorepo-supervisor')
63 | const newJob = await monorepoSupervisor()
64 | expect(newJob).toBeTruthy()
65 | expect(newJob[0].data.name).toEqual('registry-change')
66 | expect(newJob[0].data.dependency).toEqual('wobbly')
67 | expect(newJob[1].data.dependency).toEqual('wibbly')
68 | })
69 |
70 | test('no pending releases', async () => {
71 | jest.mock('../../lib/monorepo', () => {
72 | const lib = require.requireActual('../../lib/monorepo')
73 | lib.pendingMonorepoReleases = () => {
74 | return []
75 | }
76 | return lib
77 | })
78 | const monorepoSupervisor = require('../../jobs/monorepo-supervisor')
79 | const newJob = await monorepoSupervisor()
80 | expect(newJob).toHaveLength(0)
81 | })
82 | })
83 |
--------------------------------------------------------------------------------
/test/jobs/payment-required.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | const dbs = require('../../lib/dbs')
4 | const removeIfExists = require('../helpers/remove-if-exists')
5 |
6 | describe('payment-required', async () => {
7 | beforeAll(async () => {
8 | const { repositories, installations } = await dbs()
9 |
10 | await installations.put({
11 | _id: '111',
12 | installation: 11,
13 | plan: 'free'
14 | })
15 |
16 | await repositories.put({
17 | _id: '1_payment-required',
18 | accountId: '111',
19 | fullName: 'jacoba/private',
20 | enabled: true,
21 | private: true
22 | })
23 | })
24 |
25 | beforeEach(() => {
26 | jest.resetModules()
27 | jest.clearAllMocks()
28 | })
29 |
30 | afterAll(async () => {
31 | const { repositories, installations } = await dbs()
32 | await Promise.all([
33 | removeIfExists(repositories, 'payment-required'),
34 | removeIfExists(installations, '111')
35 | ])
36 | })
37 |
38 | test('create payment-required issue', async () => {
39 | expect.assertions(9)
40 | const githubMock = nock('https://api.github.com')
41 | .post('/app/installations/11/access_tokens')
42 | .optionally()
43 | .reply(200, {
44 | token: 'secret'
45 | })
46 | .get('/rate_limit')
47 | .optionally()
48 | .reply(200)
49 | .post('/repos/jacoba/private/issues', ({ title, body, labels }) => {
50 | expect(title).toEqual('Payment required')
51 | expect(body).toMatch(/🚨 You privatised your repo. 🚨/)
52 | expect(body).toMatch(/Please enter your payment information at/)
53 | expect(labels[0]).toEqual('greenkeeper')
54 | return true
55 | })
56 | .reply(201, () => {
57 | return {
58 | number: 10
59 | }
60 | })
61 |
62 | const paymentRequired = require('../../jobs/payment-required')
63 | const newJob = await paymentRequired({ accountId: '111', repositoryId: '1_payment-required' })
64 | expect(newJob).toBeFalsy()
65 |
66 | const { repositories } = await dbs()
67 | const issue = await repositories.get('1_payment-required:issue:10')
68 | expect(issue.initial).toBeFalsy()
69 | expect(issue.type).toEqual('issue')
70 | expect(issue.number).toBe(10)
71 | expect(issue.repositoryId).toBe('1_payment-required')
72 | githubMock.done()
73 | })
74 | })
75 |
--------------------------------------------------------------------------------
/test/jobs/send-stripe-cancel-survey.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 | const dbs = require('../../lib/dbs')
3 |
4 | const timeToWaitAfterTests = 1000
5 | const waitFor = (milliseconds) => {
6 | return new Promise((resolve) => {
7 | setTimeout(resolve, milliseconds)
8 | })
9 | }
10 |
11 | nock.disableNetConnect()
12 | nock.enableNetConnect('localhost')
13 |
14 | describe('send-stripe-cancel-survey', async () => {
15 | beforeEach(async () => {
16 | const { payments } = await dbs()
17 | await payments.put({
18 | _id: '1',
19 | stripeSubscriptionId: null
20 | })
21 | await payments.put({
22 | _id: '2',
23 | stripeSubscriptionId: 'hello'
24 | })
25 |
26 | jest.clearAllMocks()
27 | })
28 |
29 | afterEach(async () => {
30 | nock.cleanAll()
31 | const { payments } = await dbs()
32 | await payments.remove(await payments.get('1'))
33 | await payments.remove(await payments.get('2'))
34 | })
35 |
36 | jest.mock('nodemailer', () => {
37 | return {
38 | createTransport: () => {
39 | return {
40 | sendMail (message, callback) {
41 | callback(null, {})
42 | }
43 | }
44 | }
45 | }
46 | })
47 | const sendStripeCancelSurvey = require('../../jobs/send-stripe-cancel-survey')
48 |
49 | test('exit if the paymentsDoc has a stripeSubscriptionId', async () => {
50 | expect.assertions(1)
51 | nock('https://api.stripe.com')
52 | .get('/v1/subscriptions/oldSubscriptionId')
53 | .optionally()
54 | .reply(200, () => {
55 | // should not have contacted stripe
56 | expect(false).toBeFalsy()
57 | return {}
58 | })
59 |
60 | await sendStripeCancelSurvey({
61 | accountId: '2',
62 | stripeSubscriptionId: 'oldSubscriptionId'
63 | })
64 | expect(true).toBeTruthy()
65 | await waitFor(timeToWaitAfterTests)
66 | })
67 |
68 | test('exit if canceled_at in the stripe subscription is null', async () => {
69 | expect.assertions(1)
70 |
71 | nock('https://api.stripe.com')
72 | .get('/v1/subscriptions/oldSubscriptionId')
73 | .reply(200, {
74 | canceled_at: null,
75 | customer: 'julia'
76 | })
77 | .get('/v1/customers/julia')
78 | .optionally()
79 | .reply(200, () => {
80 | // should not have contacted stripe
81 | expect(false).toBeFalsy()
82 | return {}
83 | })
84 |
85 | await sendStripeCancelSurvey({
86 | accountId: '1',
87 | stripeSubscriptionId: 'oldSubscriptionId'
88 | })
89 | expect(true).toBeTruthy()
90 |
91 | await waitFor(timeToWaitAfterTests)
92 | })
93 |
94 | test('exit if stripe customer has no email', async () => {
95 | expect.assertions(1)
96 |
97 | nock('https://api.stripe.com')
98 | .get('/v1/subscriptions/oldSubscriptionId')
99 | .reply(200, {
100 | canceled_at: 'timestamp',
101 | customer: 'julia'
102 | })
103 | .get('/v1/customers/julia')
104 | .reply(200, {
105 | email: ''
106 | })
107 |
108 | await sendStripeCancelSurvey({
109 | accountId: '1',
110 | stripeSubscriptionId: 'oldSubscriptionId'
111 | })
112 | expect(true).toBeTruthy()
113 |
114 | await waitFor(timeToWaitAfterTests)
115 | })
116 |
117 | test('send email', async () => {
118 | expect.assertions(2)
119 |
120 | jest.resetModules()
121 | jest.mock('nodemailer', () => {
122 | return {
123 | createTransport: () => {
124 | return {
125 | sendMail (message, callback) {
126 | expect(true).toBeTruthy()
127 | callback(null, {})
128 | }
129 | }
130 | }
131 | }
132 | })
133 | const sendStripeCancelSurvey = require('../../jobs/send-stripe-cancel-survey')
134 |
135 | nock('https://api.stripe.com')
136 | .get('/v1/subscriptions/oldSubscriptionId')
137 | .reply(200, {
138 | canceled_at: 'timestamp',
139 | customer: 'julia'
140 | })
141 | .get('/v1/customers/julia')
142 | .reply(200, {
143 | email: 'julia@julia.com'
144 | })
145 |
146 | await sendStripeCancelSurvey({
147 | accountId: '1',
148 | stripeSubscriptionId: 'oldSubscriptionId'
149 | })
150 | expect(true).toBeTruthy()
151 |
152 | await waitFor(timeToWaitAfterTests)
153 | })
154 | })
155 |
--------------------------------------------------------------------------------
/test/jobs/stripe-event.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | const dbs = require('../../lib/dbs')
4 | const removeIfExists = require('../helpers/remove-if-exists')
5 |
6 | nock.disableNetConnect()
7 | nock.enableNetConnect('localhost')
8 |
9 | afterAll(async () => {
10 | const { payments } = await dbs()
11 | await removeIfExists(payments, '1')
12 | })
13 |
14 | test('enqueue email job when recieving stripe cancel event', async () => {
15 | const { payments } = await dbs()
16 | const stripeEvent = require('../../jobs/stripe-event')
17 |
18 | // "api.stripe.com:443/v1/events/stripe_test_Id"
19 | nock('https://api.stripe.com')
20 | .get('/v1/events/stripe_test_Id')
21 | .reply(200, {
22 | type: 'customer.subscription.deleted',
23 | data: {
24 | object: {
25 | id: 'stripe_test_SubscriptionId'
26 | }
27 | }
28 |
29 | })
30 | await payments.put({
31 | _id: '1',
32 | stripeSubscriptionId: 'stripe_test_SubscriptionId'
33 | })
34 |
35 | expect.assertions(3)
36 | const job = await stripeEvent({
37 | id: 'stripe_test_Id'
38 | })
39 |
40 | expect(job.data.name).toEqual('send-stripe-cancel-survey')
41 | expect(job.data.stripeSubscriptionId).toEqual('stripe_test_SubscriptionId')
42 | expect(job.data.accountId).toEqual('1')
43 | })
44 |
--------------------------------------------------------------------------------
/test/jobs/update-payments.js:
--------------------------------------------------------------------------------
1 | const dbs = require('../../lib/dbs')
2 | const removeIfExists = require('../helpers/remove-if-exists')
3 |
4 | describe('update-payments', async () => {
5 | beforeAll(async () => {
6 | const { repositories, installations } = await dbs()
7 |
8 | await installations.put({
9 | _id: '111',
10 | installation: 11,
11 | plan: 'free'
12 | })
13 |
14 | await repositories.put({
15 | _id: '1_update-payments',
16 | accountId: '111',
17 | fullName: 'finnp/private1',
18 | enabled: true,
19 | private: true
20 | })
21 | await repositories.put({
22 | _id: '2_update-payments',
23 | accountId: '111',
24 | fullName: 'finnp/private2',
25 | enabled: true,
26 | private: true
27 | })
28 | await repositories.put({
29 | _id: '3_update-payments',
30 | accountId: '111',
31 | fullName: 'finnp/public',
32 | enabled: true,
33 | private: false
34 | })
35 | await repositories.put({
36 | _id: '4',
37 | accountId: '11',
38 | fullName: 'other/private',
39 | enabled: true,
40 | private: true
41 | })
42 | })
43 |
44 | beforeEach(() => {
45 | jest.resetModules()
46 | jest.clearAllMocks()
47 | })
48 |
49 | afterAll(async () => {
50 | const { repositories, installations } = await dbs()
51 | await Promise.all([
52 | removeIfExists(repositories, '1_update-payments', '2_update-payments', '3_update-payments', '4'),
53 | removeIfExists(installations, '111')
54 | ])
55 | })
56 |
57 | test('update stripe', async () => {
58 | expect.assertions(3)
59 |
60 | // To mock only specific modules, use require.requireActual to restore the original modules,
61 | // then overwrite the one you want to mock
62 | jest.mock('../../lib/payments', () => {
63 | const payments = require.requireActual('../../lib/payments')
64 | payments.getActiveBilling = async () => {
65 | return {
66 | plan: 'personal',
67 | stripeSubscriptionId: 'stripe123',
68 | stripeItemId: 'si123'
69 | }
70 | }
71 | return payments
72 | })
73 |
74 | jest.mock('stripe', key => key => {
75 | return {
76 | subscriptionItems: {
77 | update: (stripeItemId, { quantity }) => {
78 | expect(quantity).toBe(2)
79 | expect(stripeItemId).toEqual('si123')
80 | }
81 | }
82 | }
83 | })
84 | const updatePayments = require('../../jobs/update-payments')
85 |
86 | const newJob = await updatePayments({ accountId: '111' })
87 | expect(newJob).toBeFalsy()
88 | })
89 |
90 | test('ignore if stripeSubscriptionId is missing', async () => {
91 | expect.assertions(1)
92 |
93 | jest.mock('../../lib/payments', () => {
94 | const payments = require.requireActual('../../lib/payments')
95 | payments.getActiveBilling = async () => {
96 | return {
97 | plan: 'org'
98 | }
99 | }
100 | return payments
101 | })
102 |
103 | jest.mock('stripe', key => key => {
104 | return {
105 | subscriptionItems: {
106 | update: (stripeItemId, { quantity }) => {
107 | console.log('fail: stripe was called')
108 | expect(false).toBeFalsy()
109 | }
110 | }
111 | }
112 | })
113 | const updatePayments = require('../../jobs/update-payments')
114 |
115 | const newJob = await updatePayments({ accountId: '111' })
116 | expect(newJob).toBeFalsy()
117 | })
118 | })
119 |
--------------------------------------------------------------------------------
/test/lib/__snapshots__/lockfile.js.snap:
--------------------------------------------------------------------------------
1 | // Jest Snapshot v1, https://goo.gl/fbAQLP
2 |
3 | exports[`getNewLockfile with changed package-lock.json 1`] = `
4 | Object {
5 | "lock": "{\\"name\\":\\"greenkeeper\\",\\"version\\":\\"1.0.0\\",\\"lockfileVersion\\":1,\\"requires\\":true,\\"dependencies\\":{\\"jest\\": {\\"version\\": \\"22.4.2\\"}}}",
6 | "packageJson": "{\\"name\\": \\"greenkeeper\\",\\"devDependencies\\": {\\"jest\\": \\"^22.4.3\\"}}",
7 | "type": "npm",
8 | }
9 | `;
10 |
11 | exports[`getNewLockfile with package-lock.json 1`] = `
12 | Object {
13 | "lock": "{\\"name\\":\\"greenkeeper\\",\\"version\\":\\"1.0.0\\",\\"lockfileVersion\\":1,\\"requires\\":true,\\"dependencies\\":{\\"jest\\": {\\"version\\": \\"22.4.2\\"}}}",
14 | "packageJson": "{\\"name\\": \\"greenkeeper\\",\\"devDependencies\\": {\\"jest\\": \\"^22.4.2\\"}}",
15 | "type": "npm",
16 | }
17 | `;
18 |
--------------------------------------------------------------------------------
/test/lib/create-branch/__snapshots__/create-branch.js.snap:
--------------------------------------------------------------------------------
1 | // Jest Snapshot v1, https://goo.gl/fbAQLP
2 |
3 | exports[`create branch with lockfiles change a package.json and generate its lockfile for pnpm 1`] = `
4 | Object {
5 | "lock": "{\\"devDependencies\\":{\\"jest\\":\\"1.1.1\\"}}",
6 | "packageJson": "{\\"devDependencies\\":{\\"jest\\":\\"1.2.0\\"}}",
7 | "repositoryTokens": "",
8 | "type": "pnpm",
9 | }
10 | `;
11 |
12 | exports[`create branch with lockfiles change one file (package.json) and generate its lockfile (yarn) 1`] = `
13 | Object {
14 | "lock": "{\\"devDependencies\\":{\\"jest\\":\\"1.1.1\\"}}",
15 | "packageJson": "{\\"devDependencies\\":{\\"jest\\":\\"1.2.0\\"}}",
16 | "repositoryTokens": "",
17 | "type": "yarn",
18 | }
19 | `;
20 |
21 | exports[`create branch with lockfiles change one file (package.json) and generate its lockfile 1`] = `
22 | Object {
23 | "lock": "{\\"devDependencies\\":{\\"jest\\":\\"1.1.1\\"}}",
24 | "packageJson": "{\\"devDependencies\\":{\\"jest\\":\\"1.2.0\\"}}",
25 | "repositoryTokens": "",
26 | "type": "npm",
27 | }
28 | `;
29 |
30 | exports[`create branch with lockfiles change one file (package.json) and generate its lockfile with tokens 1`] = `
31 | Object {
32 | "lock": "{\\"devDependencies\\":{\\"jest\\":\\"1.1.1\\"}}",
33 | "packageJson": "{\\"devDependencies\\":{\\"jest\\":\\"1.2.0\\"}}",
34 | "repositoryTokens": "{\\"npm\\":\\"12345\\",\\"github\\":\\"54321\\"}",
35 | "type": "npm",
36 | }
37 | `;
38 |
39 | exports[`create branch with lockfiles change two files (package.json, frontend/package.json) and generate their lockfiles 1`] = `
40 | Object {
41 | "lock": "{\\"devDependencies\\":{\\"jest\\":\\"1.1.1\\"}}",
42 | "packageJson": "{\\"devDependencies\\":{\\"jest\\":\\"1.2.0\\"}}",
43 | "repositoryTokens": "",
44 | "type": "npm",
45 | }
46 | `;
47 |
48 | exports[`create branch with lockfiles change two files (package.json, frontend/package.json) and generate their lockfiles 2`] = `
49 | Object {
50 | "lock": "{\\"devDependencies\\":{\\"jest\\":\\"1.1.1\\"}}",
51 | "packageJson": "{\\"devDependencies\\":{\\"jest\\":\\"1.2.0\\"}}",
52 | "repositoryTokens": "",
53 | "type": "npm",
54 | }
55 | `;
56 |
57 | exports[`create branch with lockfiles don’t generate the same lockfile multiple times 1`] = `
58 | Object {
59 | "lock": "{\\"devDependencies\\":{\\"jest\\":\\"1.1.1\\",\\"west\\":\\"1.1.1\\"}}",
60 | "packageJson": "{\\"devDependencies\\":{\\"jest\\":\\"1.2.0\\",\\"west\\":\\"1.5.0\\"}}",
61 | "repositoryTokens": "",
62 | "type": "npm",
63 | }
64 | `;
65 |
66 | exports[`create branch with lockfiles handle exec server 500 gracefully 1`] = `
67 | Object {
68 | "lock": "{\\"devDependencies\\":{\\"jest\\":\\"1.1.1\\"}}",
69 | "packageJson": "{\\"devDependencies\\":{\\"jest\\":\\"1.2.0\\"}}",
70 | "repositoryTokens": "",
71 | "type": "npm",
72 | }
73 | `;
74 |
--------------------------------------------------------------------------------
/test/lib/create-branch/__snapshots__/yarn-workspaces.js.snap:
--------------------------------------------------------------------------------
1 | // Jest Snapshot v1, https://goo.gl/fbAQLP
2 |
3 | exports[`create branch with yarn workspace lockfiles handle a complex yarn workspace 1`] = `
4 | Object {
5 | "lock": "{\\"very-excellent-lockfile\\":\\"nah\\"}",
6 | "packages": Object {
7 | "non-root/docs/package.json": Object {
8 | "dependencies": Object {
9 | "react": "2.0.0",
10 | },
11 | },
12 | "non-root/jobs/first-job/package.json": Object {
13 | "dependencies": Object {
14 | "react": "2.0.0",
15 | },
16 | },
17 | "non-root/jobs/second-job/package.json": Object {
18 | "dependencies": Object {
19 | "react": "2.0.0",
20 | },
21 | },
22 | "non-root/outside-workspace/package.json": Object {
23 | "dependencies": Object {
24 | "nothing": "1.0.0",
25 | },
26 | },
27 | "non-root/package.json": Object {
28 | "dependencies": Object {
29 | "nothing": "1.0.0",
30 | },
31 | "workspaces": Array [
32 | "jobs/*",
33 | "docs",
34 | ],
35 | },
36 | },
37 | "repositoryTokens": "",
38 | "type": "yarn",
39 | "workspaceRoot": "non-root/package.json",
40 | }
41 | `;
42 |
43 | exports[`create branch with yarn workspace lockfiles handle a simple yarn workspace 1`] = `
44 | Object {
45 | "lock": "{\\"very-excellent-lockfile\\":\\"nah\\"}",
46 | "packages": Object {
47 | "jobs/first-job/package.json": Object {
48 | "dependencies": Object {
49 | "react": "2.0.0",
50 | },
51 | },
52 | "package.json": Object {
53 | "dependencies": Object {
54 | "react": "2.0.0",
55 | },
56 | "workspaces": Array [
57 | "jobs/*",
58 | ],
59 | },
60 | },
61 | "repositoryTokens": "",
62 | "type": "yarn",
63 | "workspaceRoot": "package.json",
64 | }
65 | `;
66 |
--------------------------------------------------------------------------------
/test/lib/dbs.js:
--------------------------------------------------------------------------------
1 | const lolex = require('lolex')
2 | const dbs = require('../../lib/dbs')
3 |
4 | describe('dbs access', () => {
5 | test('getLogsDb memoization', async () => {
6 | const clock = lolex.install({ now: new Date() })
7 |
8 | const logsDb = dbs.getLogsDb()
9 |
10 | // validate db instance is equal
11 | const nextLogsDb = dbs.getLogsDb()
12 | // we should use nextLogsDb.name here, but a Node bug
13 | // in Proxys prevents us from getting at the original
14 | // value here.
15 | expect(nextLogsDb).toBe(logsDb)
16 |
17 | // jump one month
18 | clock.tick(60 * 60 * 24 * 45 * 1000)
19 | const nextMonthsLogsDb = dbs.getLogsDb()
20 | // we should use nextLogsDb.name here, but a Node bug
21 | // in Proxys prevents us from getting at the original
22 | // value here.
23 | expect(nextMonthsLogsDb).not.toBe(logsDb)
24 |
25 | clock.uninstall()
26 | })
27 | })
28 |
--------------------------------------------------------------------------------
/test/lib/delete-branches.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | const dbs = require('../../lib/dbs')
4 | const removeIfExists = require('../helpers/remove-if-exists')
5 |
6 | const deleteBranches = require('../../lib/delete-branches')
7 |
8 | nock.disableNetConnect()
9 | nock.enableNetConnect('localhost')
10 |
11 | afterAll(async () => {
12 | const { repositories } = await dbs()
13 | await Promise.all([
14 | removeIfExists(repositories, '6464:branch:deadbeef', '6465:branch:deadbeef')
15 | ])
16 | })
17 |
18 | test('deleteBranches', async () => {
19 | const { repositories } = await dbs()
20 |
21 | await repositories.put({
22 | _id: '6464:branch:deadbeef',
23 | type: 'branch',
24 | repositoryId: '6464',
25 | head: 'greenkeeper/standard-9.0.0',
26 | dependency: 'standard',
27 | version: '9.0.0',
28 | dependencyType: 'dependencies'
29 | })
30 |
31 | nock('https://api.github.com')
32 | .post('/app/installations/123/access_tokens')
33 | .optionally()
34 | .reply(200, {
35 | token: 'secret'
36 | })
37 | .get('/rate_limit')
38 | .optionally()
39 | .reply(200, {})
40 | .delete('/repos/brot/lecker/git/refs/heads/greenkeeper/standard-9.0.0')
41 | .reply(200)
42 |
43 | const branch = await repositories.get('6464:branch:deadbeef')
44 | await deleteBranches(
45 | { installationId: 123, fullName: 'brot/lecker', repositoryId: '6464' },
46 | branch
47 | )
48 | const branchAfterDelete = await repositories.get('6464:branch:deadbeef')
49 | expect(branchAfterDelete.referenceDeleted).toBeTruthy()
50 | })
51 |
52 | test('deleteBranches failed to delete', async () => {
53 | const { repositories } = await dbs()
54 |
55 | await repositories.put({
56 | _id: '6465:branch:deadbeef',
57 | type: 'branch',
58 | repositoryId: '6465',
59 | head: 'greenkeeper/standard-9.0.0',
60 | dependency: 'standard',
61 | version: '9.0.0',
62 | dependencyType: 'dependencies'
63 | })
64 |
65 | nock('https://api.github.com')
66 | .post('/app/installations/123/access_tokens')
67 | .optionally()
68 | .reply(200, {
69 | token: 'secret'
70 | })
71 | .get('/rate_limit')
72 | .optionally()
73 | .reply(200, {})
74 | .delete('/repos/brot/lecker/git/refs/heads/greenkeeper/standard-9.0.0')
75 | .reply(500)
76 |
77 | const branch = await repositories.get('6465:branch:deadbeef')
78 | await deleteBranches(
79 | { installationId: 123, fullName: 'brot/lecker', repositoryId: '6465' },
80 | branch
81 | )
82 | const branchAfterDelete = await repositories.get('6465:branch:deadbeef')
83 | expect(branchAfterDelete.referenceDeleted).toBeFalsy()
84 | })
85 |
--------------------------------------------------------------------------------
/test/lib/diff-package-json.js:
--------------------------------------------------------------------------------
1 | const diff = require('../../lib/diff-package-json')
2 |
3 | test('no change', () => {
4 | const a = {
5 | name: 'a',
6 | dependencies: {
7 | lodash: '^1.0.0'
8 | }
9 | }
10 | const b = {
11 | name: 'b',
12 | dependencies: {
13 | lodash: '^1.0.0'
14 | }
15 | }
16 | expect(diff(a, b)).toEqual({})
17 | })
18 |
19 | test('update dependency', () => {
20 | const a = {
21 | dependencies: {
22 | lodash: '^1.0.0'
23 | }
24 | }
25 | const b = {
26 | dependencies: {
27 | lodash: '^2.0.0'
28 | }
29 | }
30 |
31 | const expected = {
32 | dependencies: {
33 | lodash: {
34 | change: 'modified',
35 | before: '^1.0.0',
36 | after: '^2.0.0'
37 | }
38 | }
39 | }
40 | expect(diff(a, b)).toMatchObject(expected)
41 | })
42 |
43 | test('add dependency', () => {
44 | const a = {
45 | dependencies: {
46 | lodash: '^1.0.0'
47 | }
48 | }
49 | const b = {
50 | dependencies: {
51 | lodash: '^1.0.0',
52 | async: '^1.0.0'
53 | }
54 | }
55 | const expected = {
56 | dependencies: {
57 | async: {
58 | change: 'added',
59 | before: undefined,
60 | after: '^1.0.0'
61 | }
62 | }
63 | }
64 | expect(diff(a, b)).toMatchObject(expected)
65 | })
66 |
67 | test('remove dependency', () => {
68 | const a = {
69 | dependencies: {
70 | lodash: '^1.0.0',
71 | async: '^1.0.0'
72 | }
73 | }
74 | const b = {
75 | dependencies: {
76 | lodash: '^1.0.0'
77 | }
78 | }
79 | const expected = {
80 | dependencies: {
81 | async: {
82 | change: 'removed',
83 | before: '^1.0.0',
84 | after: undefined
85 | }
86 | }
87 | }
88 | expect(diff(a, b)).toMatchObject(expected)
89 | })
90 |
--------------------------------------------------------------------------------
/test/lib/enterprise-setup.js:
--------------------------------------------------------------------------------
1 | const lolex = require('lolex')
2 |
3 | const enterpriseSetup = require('../../lib/enterprise-setup')
4 |
5 | describe('enterprise setup', () => {
6 | test('creates LogsDb for actual and next month', async () => {
7 | const clock = lolex.install()
8 | const enterpriseDbs = await enterpriseSetup()
9 |
10 | expect(enterpriseDbs).toHaveLength(3)
11 | expect(enterpriseDbs[0].db_name).toEqual('_users')
12 | expect(enterpriseDbs[1].db_name).toEqual('logs-1970-01-staging')
13 | expect(enterpriseDbs[2].db_name).toEqual('logs-1970-02-staging')
14 |
15 | clock.uninstall()
16 | })
17 |
18 | test('removes LogsDbs if there are older than 12 month', async () => {
19 | const clock = lolex.install()
20 |
21 | let enterpriseDbs = await enterpriseSetup()
22 | expect(enterpriseDbs).toHaveLength(3)
23 | expect(enterpriseDbs[0].db_name).toEqual('_users')
24 | expect(enterpriseDbs[1].db_name).toEqual('logs-1970-01-staging')
25 | expect(enterpriseDbs[2].db_name).toEqual('logs-1970-02-staging')
26 |
27 | // jump 9 months
28 | for (let i = 3; i <= 12; i++) {
29 | clock.tick(60 * 60 * 24 * 31 * 1000)
30 | enterpriseDbs = await enterpriseSetup()
31 | let dateI = i
32 | if (i <= 9) dateI = '0' + i
33 | expect(enterpriseDbs[i].db_name).toEqual(`logs-1970-${dateI}-staging`)
34 | }
35 |
36 | clock.tick(60 * 60 * 24 * 31 * 1000)
37 | enterpriseDbs = await enterpriseSetup()
38 | expect(enterpriseDbs[0].db_name).toEqual('_users')
39 | expect(enterpriseDbs[1].db_name).toEqual('logs-1970-02-staging')
40 | expect(enterpriseDbs[12].db_name).toEqual('logs-1971-01-staging')
41 |
42 | clock.uninstall()
43 | })
44 | })
45 |
--------------------------------------------------------------------------------
/test/lib/get-diff-commits.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | const getDiffCommits = require('../../lib/get-diff-commits')
4 |
5 | test('get-diff-commits', async () => {
6 | expect.assertions(4)
7 |
8 | nock('https://api.github.com')
9 | .post('/app/installations/123/access_tokens')
10 | .optionally()
11 | .reply(200, {
12 | token: 'secret'
13 | })
14 | .get('/rate_limit')
15 | .optionally()
16 | .reply(200, {})
17 | .get('/repos/finnp/test/compare/dead...beef')
18 | .reply(200, () => {
19 | // GitHub endpoint called
20 | expect(true).toBeTruthy()
21 | return {
22 | total_commits: 1,
23 | behind_by: 0,
24 | html_url: '...',
25 | commits: [
26 | {
27 | sha: 'deadbeef',
28 | commit: {
29 | message: 'abccommitmessage'
30 | }
31 | }
32 | ]
33 | }
34 | })
35 | .post('/markdown', ({ text }) => {
36 | expect(text).toMatch(/abccommitmessage/)
37 | return true
38 | })
39 | .reply(200, 'body ', {
40 | 'content-type': 'text/html;charset=utf-8'
41 | })
42 |
43 | const diff = await getDiffCommits({
44 | installationId: '123',
45 | owner: 'finnp',
46 | repo: 'test',
47 | base: 'dead',
48 | head: 'beef'
49 | })
50 | expect(diff).toMatch(/Commits<\/summary>/)
51 | expect(diff).toMatch(/https:\/\/urls.greenkeeper.io/)
52 | })
53 |
--------------------------------------------------------------------------------
/test/lib/get-infos.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | describe('get-infos', () => {
4 | test('get changelog', async () => {
5 | jest.mock('../../lib/get-diff-commits', () => () => {
6 | return 'diff commits'
7 | })
8 | const { getInfos } = require('../../lib/get-infos')
9 |
10 | nock('https://api.github.com')
11 | .post('/app/installations/123/access_tokens')
12 | .optionally()
13 | .reply(200, {
14 | token: 'secret'
15 | })
16 | .get('/rate_limit')
17 | .optionally()
18 | .reply(200, {})
19 | .get('/repos/finnp/test/releases/tags/v2.2.2')
20 | .reply(200, {
21 | body_html: 'Cool new features! also fixed #1',
22 | name: 'thename',
23 | html_url: 'http://github.com/link/to/thing'
24 | })
25 |
26 | const infos = await getInfos({
27 | installationId: '123',
28 | dependency: '@finnpauls/dep',
29 | version: '2.2.2',
30 | diffBase: '1.0.0',
31 | relevantDependencies: [],
32 | versions: {
33 | '1.0.0': {
34 | gitHead: 'deadbeef100'
35 | },
36 | '2.2.2': {
37 | gitHead: 'deadbeef222',
38 | repository: {
39 | url: 'https://github.com/finnp/test'
40 | }
41 | }
42 | }
43 | })
44 |
45 | expect(infos.diffCommits).toEqual('diff commits')
46 | expect(infos.release).toMatch(/Cool new features/)
47 | expect(infos.release).toMatch(/thename/)
48 | expect(infos.release).toMatch(/https:\/\/urls.greenkeeper.io\/finnp\/test\/issues\/1/)
49 | })
50 |
51 | test('get changelog for monorepo dependency', async () => {
52 | jest.mock('../../lib/get-diff-commits', () => () => {
53 | return 'diff commits'
54 | })
55 | const { getInfos } = require('../../lib/get-infos')
56 |
57 | nock('https://api.github.com')
58 | .post('/app/installations/123/access_tokens')
59 | .optionally()
60 | .reply(200, {
61 | token: 'secret'
62 | })
63 | .get('/rate_limit')
64 | .optionally()
65 | .reply(200, {})
66 | .get('/repos/pouchdb/pouchdb/releases/tags/v2.2.2')
67 | .reply(200, {
68 | body_html: 'Cool new features! also fixed #1',
69 | name: 'thename',
70 | html_url: 'http://github.com/link/to/thing'
71 | })
72 |
73 | const infos = await getInfos({
74 | installationId: '123',
75 | dependency: 'pouchdb-core',
76 | version: '2.2.2',
77 | diffBase: '1.0.0',
78 | monorepoGroupName: 'pouchdb',
79 | versions: {
80 | '1.0.0': {
81 | gitHead: 'deadbeef100'
82 | },
83 | '2.2.2': {
84 | gitHead: 'deadbeef222',
85 | repository: {
86 | url: 'https://github.com/pouchdb/pouchdb'
87 | }
88 | }
89 | }
90 | })
91 |
92 | expect(infos.diffCommits).toEqual('diff commits')
93 | expect(infos.release).toMatch(/Cool new features/)
94 | expect(infos.release).toMatch(/thename/)
95 | expect(infos.release).toMatch(/https:\/\/urls.greenkeeper.io\/pouchdb\/pouchdb\/issues\/1/)
96 | })
97 | })
98 |
--------------------------------------------------------------------------------
/test/lib/get-ranged-version.js:
--------------------------------------------------------------------------------
1 | const { extractPrefix } = require('../../lib/get-ranged-version')
2 | const _ = require('lodash')
3 |
4 | test('get ranged version', () => {
5 | const assertions = {
6 | latest: '^',
7 | next: '^',
8 | 'no-semver': '',
9 | '1.0.0': '',
10 | '~1.0.0': '~',
11 | '^1.0.0': '^',
12 | '>1.0.0': '>=',
13 | '>=1.0.0': '>=',
14 | '*': '>=',
15 | '1.x.x': '^',
16 | '1.*.*': '^',
17 | '1.X.*': '^',
18 | '20.0.*': '~',
19 | '1.0.x': '~',
20 | '<1.0.0': '',
21 | 'x.X.x': '>=',
22 | '*.*.*': '>=',
23 | 'x.X.*': '>=',
24 | x: '>=',
25 | 'x.*': '>=',
26 | '*.*': '>=',
27 | '1.x': '^',
28 | '1': '^',
29 | '1.0': '~',
30 | '': '>='
31 | }
32 |
33 | expect.assertions(Object.keys(assertions))
34 | _.each(assertions, (prefix, range) => expect(extractPrefix(range)).toEqual(prefix))
35 | })
36 |
--------------------------------------------------------------------------------
/test/lib/get-release.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | const getRelease = require('../../lib/get-release')
4 |
5 | test('get-release from tag with v prefix', async () => {
6 | nock('https://api.github.com')
7 | .post('/app/installations/123/access_tokens')
8 | .optionally()
9 | .reply(200, {
10 | token: 'secret'
11 | })
12 | .get('/rate_limit')
13 | .optionally()
14 | .reply(200, {})
15 | .get('/repos/finnp/test/releases/tags/v1.33.7')
16 | .reply(200, {
17 | body_html: 'body ',
18 | name: 'v1.33.7'
19 | })
20 |
21 | const notes = await getRelease({
22 | installationId: '123',
23 | owner: 'finnp',
24 | repo: 'test',
25 | version: '1.33.7'
26 | })
27 | expect(notes).toMatch(/Release Notes for v1.33.7<\/summary>/)
28 | expect(notes).toMatch(/https:\/\/urls.greenkeeper.io/)
29 | })
30 |
31 | test('get-release from tag with version as name', async () => {
32 | nock('https://api.github.com')
33 | .post('/app/installations/123/access_tokens')
34 | .optionally()
35 | .reply(200, {
36 | token: 'secret'
37 | })
38 | .get('/rate_limit')
39 | .optionally()
40 | .reply(200, {})
41 | .get('/repos/finnp/test/releases/tags/v1.33.7')
42 | .reply(404)
43 | .get('/repos/finnp/test/releases/tags/1.33.7')
44 | .reply(200, {
45 | body_html: 'body ',
46 | name: '1.33.7'
47 | })
48 |
49 | const notes = await getRelease({
50 | installationId: '123',
51 | owner: 'finnp',
52 | repo: 'test',
53 | version: '1.33.7'
54 | })
55 |
56 | expect(notes).toMatch(/Release Notes for 1.33.7<\/summary>/)
57 | expect(notes).toMatch(/https:\/\/urls.greenkeeper.io/)
58 | })
59 |
60 | test('get-release from tag at sha', async () => {
61 | nock('https://api.github.com')
62 | .post('/app/installations/123/access_tokens')
63 | .optionally()
64 | .reply(200, {
65 | token: 'secret'
66 | })
67 | .get('/rate_limit')
68 | .optionally()
69 | .reply(200, {})
70 | .get('/repos/finnp/test/releases/tags/v1.33.7')
71 | .reply(404)
72 | .get('/repos/finnp/test/releases/tags/1.33.7')
73 | .reply(404)
74 | .get('/repos/finnp/test/git/tags/deadbeef')
75 | .reply(200, {
76 | tag: 'lolwat'
77 | })
78 | .get('/repos/finnp/test/releases/tags/lolwat')
79 | .reply(200, {
80 | body_html: 'body ',
81 | tag_name: '1.33.7'
82 | })
83 |
84 | const notes = await getRelease({
85 | installationId: '123',
86 | owner: 'finnp',
87 | repo: 'test',
88 | version: '1.33.7',
89 | sha: 'deadbeef'
90 | })
91 |
92 | expect(notes).toMatch(/Release Notes for 1.33.7<\/summary>/)
93 | expect(notes).toMatch(/https:\/\/urls.greenkeeper.io/)
94 | })
95 |
--------------------------------------------------------------------------------
/test/lib/get-token.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 |
3 | nock.disableNetConnect()
4 | nock.enableNetConnect('localhost')
5 |
6 | test('get token', async () => {
7 | nock('https://api.github.com', {
8 | reqheaders: { Authorization: 'Bearer jwtToken' }
9 | })
10 | .post('/app/installations/1337/access_tokens')
11 | .reply(200, { token: 'the-token' })
12 |
13 | nock('https://api.github.com', {
14 | reqheaders: { Authorization: 'token the-token' }
15 | })
16 | .get('/rate_limit')
17 | .reply(401)
18 | .get('/rate_limit')
19 | .reply(200)
20 |
21 | jest.mock('zlib', () => {
22 | return {
23 | gunzipSync: () => 'cert'
24 | }
25 | }).mock('jsonwebtoken', () => {
26 | return {
27 | sign: (payload, cert) => {
28 | if (cert === 'cert') return 'jwtToken'
29 | }
30 | }
31 | })
32 | const getToken = require('../../lib/get-token')
33 |
34 | const token = (await getToken(1337)).token
35 | expect(token).toEqual('the-token') // uncached
36 | expect(token).toEqual('the-token') // cached
37 | })
38 |
--------------------------------------------------------------------------------
/test/lib/github.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 | const simple = require('simple-mock')
3 |
4 | nock.disableNetConnect()
5 | nock.enableNetConnect('localhost')
6 |
7 | test('parse github host', async () => {
8 | expect.assertions(1)
9 |
10 | nock('https://enterprise.github')
11 | .get('/api/v3/repos/greenkeeperio/greenkeeper')
12 | .reply(200, () => {
13 | expect(true).toBeTruthy()
14 | })
15 |
16 | simple.mock(process.env, 'GITHUB_HOST', 'https://enterprise.github')
17 |
18 | const Github = require('../../lib/github')
19 | const github = Github()
20 |
21 | try {
22 | await github.repos.get({ owner: 'greenkeeperio', repo: 'greenkeeper' })
23 | } catch (error) {
24 | expect(error).toBeFalsy()
25 | }
26 |
27 | simple.restore()
28 | })
29 |
--------------------------------------------------------------------------------
/test/lib/lockfile.js:
--------------------------------------------------------------------------------
1 | const nock = require('nock')
2 | const { getNewLockfile } = require('../../lib/lockfile')
3 |
4 | describe('getNewLockfile', async () => {
5 | beforeEach(() => {
6 | jest.resetModules()
7 | })
8 |
9 | nock.disableNetConnect()
10 | nock.enableNetConnect('localhost:5984')
11 | const lock = '{"name":"greenkeeper","version":"1.0.0","lockfileVersion":1,"requires":true,"dependencies":{"jest": {"version": "22.4.2"}}}'
12 |
13 | test('with changed package-lock.json', async () => {
14 | const { getNewLockfile } = require('../../lib/lockfile')
15 | const packageJson = '{"name": "greenkeeper","devDependencies": {"jest": "^22.4.3"}}'
16 | const newLock = '{"name":"greenkeeper","version":"1.0.0","lockfileVersion":1,"requires":true,"dependencies":{"jest": {"version": "22.4.3"}}}'
17 |
18 | nock('http://localhost:1234')
19 | .post('/', (body) => {
20 | expect(typeof body.type).toBe('string')
21 | expect(typeof body.packageJson).toBe('string')
22 | expect(typeof body.lock).toBe('string')
23 | expect(body).toMatchSnapshot()
24 | return true
25 | })
26 | .reply(200, () => {
27 | return {
28 | ok: true,
29 | contents: newLock
30 | }
31 | })
32 |
33 | await getNewLockfile({ packageJson, lock, type: 'npm' })
34 | })
35 |
36 | test('with package-lock.json', async () => {
37 | const packageJson = '{"name": "greenkeeper","devDependencies": {"jest": "^22.4.2"}}'
38 |
39 | nock('http://localhost:1234')
40 | .post('/', (body) => {
41 | expect(typeof body.type).toBe('string')
42 | expect(typeof body.packageJson).toBe('string')
43 | expect(typeof body.lock).toBe('string')
44 | expect(body).toMatchSnapshot()
45 | return true
46 | })
47 | .reply(200, () => ({ ok: false }))
48 |
49 | await getNewLockfile({ packageJson, lock, type: 'npm' })
50 | })
51 |
52 | test('with package-lock.json with Network Error', async () => {
53 | const httpTraffic = nock('http://localhost:1234')
54 | .post('/', (body) => {
55 | return true
56 | })
57 | .replyWithError({ code: 'ETIMEDOUT' })
58 | .post('/', (body) => {
59 | return true
60 | })
61 | .reply(200, () => ({ ok: true }))
62 |
63 | const packageJson = '{"name": "greenkeeper","devDependencies": {"jest": "^22.4.2"}}'
64 | await getNewLockfile({ packageJson, lock, type: 'npm' })
65 | expect(httpTraffic.isDone()).toBeTruthy()
66 | expect(httpTraffic.pendingMocks().length).toEqual(0)
67 | })
68 | })
69 |
--------------------------------------------------------------------------------
/test/lib/updated-at.js:
--------------------------------------------------------------------------------
1 | const updatedAt = require('../../lib/updated-at')
2 |
3 | test('set updatedAt and createdAt to timestamp', () => {
4 | const doc = { _id: '1' }
5 | const updatedDoc = updatedAt(doc)
6 |
7 | expect(updatedDoc._id).toEqual('1')
8 | expect(updatedDoc.createdAt).toHaveLength(24)
9 | expect(updatedDoc.updatedAt).toHaveLength(24)
10 | })
11 |
12 | test('set updatedAt to timestamp', () => {
13 | const doc = {
14 | _id: '1',
15 | createdAt: 'now',
16 | updatedAt: 'now'
17 | }
18 | const updatedDoc = updatedAt(doc)
19 | expect(updatedDoc._id).toEqual('1')
20 | expect(updatedDoc.createdAt).toEqual('now')
21 | expect(updatedDoc.updatedAt).toHaveLength(24)
22 | })
23 |
24 | test('set updatedAt to timestamp array', () => {
25 | const doc = {
26 | _id: '1',
27 | createdAt: 'now',
28 | updatedAt: ['now']
29 | }
30 | const updatedDoc = updatedAt(doc)
31 | expect(updatedDoc._id).toEqual('1')
32 | expect(updatedDoc.createdAt).toEqual('now')
33 | expect(updatedDoc.updatedAt instanceof Array).toBeTruthy()
34 | expect(updatedDoc.updatedAt).toHaveLength(2)
35 | expect(updatedDoc.updatedAt[0]).toEqual('now')
36 | expect(updatedDoc.updatedAt[1]).toHaveLength(24)
37 | })
38 |
39 | test('set updatedAt to timestamp object array', () => {
40 | const doc = { _id: '1' }
41 | const updatedDoc = updatedAt(doc, 'update user')
42 | expect(updatedDoc._id).toEqual('1')
43 | expect(updatedDoc.createdAt).toHaveLength(24)
44 | expect(updatedDoc.updatedAt instanceof Array).toBeTruthy()
45 | expect(updatedDoc.updatedAt).toHaveLength(1)
46 | expect(updatedDoc.updatedAt[0].timestamp).toHaveLength(24)
47 | expect(updatedDoc.updatedAt[0].event).toEqual('update user')
48 | })
49 |
--------------------------------------------------------------------------------
/test/utils/__snapshots__/initial-branch-utils.js.snap:
--------------------------------------------------------------------------------
1 | // Jest Snapshot v1, https://goo.gl/fbAQLP
2 |
3 | exports[`initial branch utils getDependenciesFromPackageFiles duplicate dependency across dep type 1`] = `
4 | Array [
5 | Object {
6 | "name": "@finnpauls/dep",
7 | "type": "devDependencies",
8 | "version": "1.0.0",
9 | },
10 | ]
11 | `;
12 |
13 | exports[`initial branch utils getDependenciesFromPackageFiles single devDependency 1`] = `
14 | Array [
15 | Object {
16 | "name": "@finnpauls/dep",
17 | "type": "devDependencies",
18 | "version": "1.0.0",
19 | },
20 | ]
21 | `;
22 |
23 | exports[`initial branch utils getDependenciesFromPackageFiles workspace 1`] = `
24 | Array [
25 | Object {
26 | "name": "@finnpauls/blup",
27 | "type": "devDependencies",
28 | "version": "1.0.0",
29 | },
30 | Object {
31 | "name": "florp",
32 | "type": "dependencies",
33 | "version": "1.2.3",
34 | },
35 | ]
36 | `;
37 |
--------------------------------------------------------------------------------
/utils/registry-change-utils.js:
--------------------------------------------------------------------------------
1 | const _ = require('lodash')
2 |
3 | async function getAllAccounts (installations, results) {
4 | const limit = 200
5 | let skip = 0
6 | let allAccounts = []
7 | const accountIds = _.compact(_.map(_.flattenDeep(results), 'value.accountId'))
8 |
9 | // send multiple smaller allDocs requests and paginate them.
10 | while (true) {
11 | const partialAccounts = await module.exports.getAllDocs(installations, skip, limit, accountIds)
12 | if (partialAccounts.length === 0) break
13 |
14 | skip += limit
15 | allAccounts = [...allAccounts, ...partialAccounts]
16 | }
17 |
18 | return _.keyBy(_.compact(_.map(allAccounts, 'doc')), '_id')
19 | }
20 |
21 | async function getAllDocs (db, skip, limit, accountIds) {
22 | return (await db.allDocs({
23 | keys: accountIds,
24 | limit,
25 | skip,
26 | include_docs: true
27 | })).rows
28 | }
29 |
30 | async function getAllMonorepoDocs (repositories, keysToFindMonorepoDocs) {
31 | const limit = 200
32 | let skip = 0
33 | let monorepoDocs = []
34 |
35 | // send multiple smaller query requests and paginate them.
36 | while (true) {
37 | const partialMonorepoDocs = await module.exports.queryDocs(repositories, skip, limit, keysToFindMonorepoDocs)
38 | if (partialMonorepoDocs.length === 0) break
39 |
40 | skip += limit
41 | monorepoDocs = [...monorepoDocs, ...partialMonorepoDocs]
42 | }
43 | return monorepoDocs
44 | }
45 |
46 | async function queryDocs (db, skip, limit, monorepoKeys) {
47 | return (await db.query('by_full_name', {
48 | keys: monorepoKeys,
49 | limit,
50 | skip,
51 | include_docs: true
52 | })).rows
53 | }
54 |
55 | module.exports = {
56 | getAllAccounts,
57 | getAllDocs,
58 | getAllMonorepoDocs,
59 | queryDocs
60 | }
61 |
--------------------------------------------------------------------------------