├── .tool-versions
├── test
├── mocha.opts
├── fixtures-good-archive.zip
├── fixtures-permissions-wildcard.json
├── fixtures-permissions-missing-path.json
├── fixtures-bad-scm-url-archive.zip
├── github-test.js
├── mocha.env
├── permissions-test.js
├── pipeline-test.js
└── index-test.js
├── .editorconfig
├── .github
├── renovate.json
├── release-drafter.yml
└── workflows
│ └── release-drafter.yml
├── Dockerfile
├── lib
├── config.js
├── pipeline.js
├── github.js
└── permissions.js
├── Makefile
├── .eslintrc.cjs
├── package.json
├── .dockerignore
├── Jenkinsfile
├── .eslintignore
├── .gitignore
├── README.adoc
├── index.js
└── IncrementalsPlugin.js
/.tool-versions:
--------------------------------------------------------------------------------
1 | nodejs 21.7.3
2 |
--------------------------------------------------------------------------------
/test/mocha.opts:
--------------------------------------------------------------------------------
1 | --require test/mocha.env
2 |
--------------------------------------------------------------------------------
/test/fixtures-good-archive.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jenkins-infra/incrementals-publisher/main/test/fixtures-good-archive.zip
--------------------------------------------------------------------------------
/test/fixtures-permissions-wildcard.json:
--------------------------------------------------------------------------------
1 | {
2 | "jenkinsci/bom": [
3 | "io/jenkins/tools/bom/bom-*"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/test/fixtures-permissions-missing-path.json:
--------------------------------------------------------------------------------
1 | {
2 | "jenkinsci/bom": [
3 | "io/jenkins/tools/invalid-path"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/test/fixtures-bad-scm-url-archive.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jenkins-infra/incrementals-publisher/main/test/fixtures-bad-scm-url-archive.zip
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # EditorConfig is awesome: http://EditorConfig.org
2 | root = true
3 |
4 | [*.{js,jsx,json}]
5 | end_of_line = lf
6 | insert_final_newline = true
7 | charset = utf-8
8 | indent_style = space
9 | indent_size = 2
10 |
--------------------------------------------------------------------------------
/.github/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": [
4 | "config:base",
5 | ":semanticCommitsDisabled",
6 | "schedule:monthly"
7 | ],
8 | "rebaseWhen": "conflicted"
9 | }
10 |
--------------------------------------------------------------------------------
/test/github-test.js:
--------------------------------------------------------------------------------
1 | import assert from "assert";
2 | import github from "../lib/github.js";
3 |
4 | describe("The GitHub helpers", function () {
5 | it("FIXME - make tests", function () {
6 | assert.ok(github.commitExists);
7 | assert.ok(github.createStatus);
8 | });
9 | });
10 |
--------------------------------------------------------------------------------
/.github/release-drafter.yml:
--------------------------------------------------------------------------------
1 | # See https://github.com/jenkinsci/.github/blob/master/.github/release-drafter.adoc
2 | _extends: jenkinsci/.github
3 | # Required for https://github.com/jenkins-infra/pipeline-library/blob/master/vars/buildDockerAndPublishImage.groovy
4 | name-template: 'next'
5 | tag-template: 'next'
6 |
--------------------------------------------------------------------------------
/test/mocha.env:
--------------------------------------------------------------------------------
1 | process.env.NODE_ENV = 'test'
2 | process.env.JENKINS_HOST = 'https://ci.jenkins.io/';
3 | process.env.INCREMENTAL_URL = 'https://fake-repo.jenkins-ci.org/incrementals/'
4 | process.env.GITHUB_APP_ID = 'fake-github-app-id';
5 | process.env.GITHUB_APP_PRIVATE_KEY = 'fake-github-app-key';
6 | process.env.ARTIFACTORY_KEY = 'invalid-key';
7 | process.env.JENKINS_AUTH = 'fake-jenkins-auth';
8 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:21.7.3
2 |
3 | ENV NODE_ENV production
4 | ENV PORT 3000
5 | EXPOSE 3000
6 |
7 | # Create app directory
8 | WORKDIR /usr/src/app
9 |
10 | # Install app dependencies
11 | # A wildcard is used to ensure both package.json AND package-lock.json are copied
12 | # where available (npm@5+)
13 | COPY package*.json ./
14 |
15 | RUN npm install
16 |
17 | # Bundle app source
18 | COPY . .
19 |
20 | CMD [ "node", "index.js" ]
21 |
--------------------------------------------------------------------------------
/.github/workflows/release-drafter.yml:
--------------------------------------------------------------------------------
1 | name: Release Drafter
2 | on:
3 | workflow_dispatch:
4 | push:
5 | release:
6 | types: [released]
7 | # Only allow 1 release-drafter build at a time to avoid creating multiple "next" releases
8 | concurrency: "release-drafter"
9 | jobs:
10 | update_release_draft:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: release-drafter/release-drafter@3f0f87098bd6b5c5b9a36d49c41d998ea58f9348 # v6
14 | env:
15 | # This token is generated automatically by default in GitHub Actions: no need to create it manually
16 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
17 |
--------------------------------------------------------------------------------
/lib/config.js:
--------------------------------------------------------------------------------
1 | const config = {};
2 |
3 | Object.entries({
4 | GITHUB_APP_ID: "invalid-dummy-id",
5 | GITHUB_APP_PRIVATE_KEY: "invalid-dummy-secret",
6 | PERMISSIONS_URL: "https://ci.jenkins.io/job/Infra/job/repository-permissions-updater/job/master/lastSuccessfulBuild/artifact/json/github.index.json",
7 | JENKINS_HOST: "https://ci.jenkins.io/",
8 | INCREMENTAL_URL: "https://repo.jenkins-ci.org/incrementals/",
9 | ARTIFACTORY_KEY: "invalid-key",
10 | JENKINS_AUTH: "",
11 | PORT: "3000",
12 | BUILD_METADATA_URL: "",
13 | FOLDER_METADATA_URL: "",
14 | ARCHIVE_URL: "",
15 | PRESHARED_KEY: "",
16 | }).forEach(([key, value]) => {
17 | Object.defineProperty(config, key, {
18 | get() {return process.env[key] || value},
19 | enumerable: true,
20 | configurable: false
21 | });
22 | });
23 |
24 | export default config;
25 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | #!make
2 | .DEFAULT_GOAL := build
3 |
4 | ifndef REGISTRY
5 | override REGISTRY = halkeye
6 | endif
7 | NAME := incrementals-publisher
8 | VERSION := latest
9 | NAME_VERSION := $(NAME):$(VERSION)
10 | TAGNAME := $(REGISTRY)/$(NAME_VERSION)
11 |
12 | .PHONY: build
13 | build: ## Build docker image
14 | docker build -t $(TAGNAME) .
15 |
16 | .PHONY: push
17 | push: ## push to docker hub
18 | docker push $(TAGNAME)
19 |
20 | .PHONY: push
21 | kill: ## kill the running process
22 | docker kill $(NAME)
23 |
24 | .SHELL := /bin/bash
25 | .PHONY: run
26 | run: ## run the docker hub
27 | docker run \
28 | -it \
29 | --rm \
30 | -p 3000:3000 \
31 | --name $(NAME) \
32 | $(TAGNAME)
33 |
34 | .PHONY: test
35 | test: ## run tests outside of docker
36 | [ -e node_modules ] || npm install
37 | npm run test
38 |
39 | .PHONY: help
40 | help:
41 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
42 |
--------------------------------------------------------------------------------
/.eslintrc.cjs:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | "env": {
3 | "es6": true,
4 | "node": true
5 | },
6 | "extends": [
7 | "eslint:recommended",
8 | "plugin:import/recommended",
9 | ],
10 | "globals": {
11 | "Atomics": "readonly",
12 | "SharedArrayBuffer": "readonly"
13 | },
14 | // needed for import.meta
15 | "parser": "@babel/eslint-parser",
16 | "parserOptions": {
17 | "ecmaVersion": 2022,
18 | "sourceType": "module",
19 | "requireConfigFile": false,
20 | "babelOptions": {
21 | "babelrc": false,
22 | "configFile": false,
23 | "presets": ["@babel/preset-env"],
24 | },
25 | },
26 | "overrides": [
27 | {
28 | files: [
29 | "test/**.js"
30 | ],
31 | env: {
32 | mocha: true
33 | },
34 | plugins: ["mocha"],
35 | extends: [
36 | "eslint:recommended",
37 | "plugin:import/recommended",
38 | "plugin:mocha/recommended"
39 | ],
40 | rules: {
41 | }
42 | },
43 | ],
44 | "rules": {
45 | "indent": ["error", 2],
46 | "quotes": ["error", "double"],
47 | "key-spacing": ["error", {"mode": "strict"}],
48 | "require-atomic-updates": "off",
49 | "import/extensions": ["error", "ignorePackages"],
50 | },
51 | };
52 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "incrementals-publisher",
3 | "version": "1.4.2",
4 | "description": "",
5 | "private": true,
6 | "type": "module",
7 | "main": "index.js",
8 | "scripts": {
9 | "lint": "eslint .",
10 | "dev": "nodemon index.js",
11 | "test": "mocha -R mocha-multi-reporters"
12 | },
13 | "repository": {
14 | "type": "git",
15 | "url": "git+https://github.com/jenkins-infra/community-functions.git"
16 | },
17 | "author": "R Tyler Croy",
18 | "license": "MIT",
19 | "bugs": {
20 | "url": "https://github.com/jenkins-infra/community-functions/issues"
21 | },
22 | "homepage": "https://github.com/jenkins-infra/community-functions#readme",
23 | "devDependencies": {
24 | "@babel/core": "^7.22.10",
25 | "@babel/eslint-parser": "^7.22.10",
26 | "eslint": "^8.48.0",
27 | "eslint-plugin-import": "^2.27.5",
28 | "eslint-plugin-mocha": "^10.1.0",
29 | "mocha": "^10.2.0",
30 | "mocha-multi-reporters": "^1.1.7",
31 | "nodemon": "^2.0.22",
32 | "simple-mock": "^0.8.0"
33 | },
34 | "dependencies": {
35 | "@babel/preset-env": "^7.23.2",
36 | "@octokit/auth-app": "^6.0.1",
37 | "@octokit/rest": "^20.0.1",
38 | "bcrypt": "^5.1.0",
39 | "body-parser": "^1.20.2",
40 | "express": "^4.18.2",
41 | "express-async-wrap": "^1.0.0",
42 | "express-winston": "^4.0.5",
43 | "helmet": "^7.0.0",
44 | "node-fetch": "^3.3.1",
45 | "node-stream-zip": "^1.11.3",
46 | "wildcard-match": "^5.1.0",
47 | "winston": "^3.11.0",
48 | "xml2js": "^0.6.2"
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/node
3 | # Edit at https://www.gitignore.io/?templates=node
4 |
5 | ### Node ###
6 | # Logs
7 | logs
8 | *.log
9 | npm-debug.log*
10 | yarn-debug.log*
11 | yarn-error.log*
12 | lerna-debug.log*
13 |
14 | # Diagnostic reports (https://nodejs.org/api/report.html)
15 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
16 |
17 | # Runtime data
18 | pids
19 | *.pid
20 | *.seed
21 | *.pid.lock
22 |
23 | # Directory for instrumented libs generated by jscoverage/JSCover
24 | lib-cov
25 |
26 | # Coverage directory used by tools like istanbul
27 | coverage
28 | *.lcov
29 |
30 | # nyc test coverage
31 | .nyc_output
32 |
33 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
34 | .grunt
35 |
36 | # Bower dependency directory (https://bower.io/)
37 | bower_components
38 |
39 | # node-waf configuration
40 | .lock-wscript
41 |
42 | # Compiled binary addons (https://nodejs.org/api/addons.html)
43 | build/Release
44 |
45 | # Dependency directories
46 | node_modules/
47 | jspm_packages/
48 |
49 | # TypeScript v1 declaration files
50 | typings/
51 |
52 | # TypeScript cache
53 | *.tsbuildinfo
54 |
55 | # Optional npm cache directory
56 | .npm
57 |
58 | # Optional eslint cache
59 | .eslintcache
60 |
61 | # Optional REPL history
62 | .node_repl_history
63 |
64 | # Output of 'npm pack'
65 | *.tgz
66 |
67 | # Yarn Integrity file
68 | .yarn-integrity
69 |
70 | # dotenv environment variables file
71 | .env
72 | .env.test
73 |
74 | # parcel-bundler cache (https://parceljs.org/)
75 | .cache
76 |
77 | # next.js build output
78 | .next
79 |
80 | # nuxt.js build output
81 | .nuxt
82 |
83 | # vuepress build output
84 | .vuepress/dist
85 |
86 | # Serverless directories
87 | .serverless/
88 |
89 | # FuseBox cache
90 | .fusebox/
91 |
92 | # DynamoDB Local files
93 | .dynamodb/
94 |
95 | # End of https://www.gitignore.io/api/node
96 | .git/
97 |
--------------------------------------------------------------------------------
/Jenkinsfile:
--------------------------------------------------------------------------------
1 | pipeline {
2 | options {
3 | timeout(time: 60, unit: 'MINUTES')
4 | ansiColor('xterm')
5 | disableConcurrentBuilds(abortPrevious: true)
6 | buildDiscarder logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '5')
7 | }
8 |
9 | agent {
10 | label 'linux-arm64-docker || arm64linux'
11 | }
12 |
13 | environment {
14 | NODE_ENV = 'production'
15 | TZ = "UTC"
16 | NETLIFY = "true"
17 | }
18 |
19 | stages {
20 | stage('Check for typos') {
21 | steps {
22 | sh '''typos --format sarif > typos.sarif || true'''
23 | }
24 | post {
25 | always {
26 | recordIssues(tools: [sarif(id: 'typos', name: 'Typos', pattern: 'typos.sarif')])
27 | }
28 | }
29 | }
30 |
31 | stage('Install Dependencies') {
32 | environment {
33 | NODE_ENV = 'development'
34 | }
35 | steps {
36 | sh 'asdf install'
37 | sh 'npm ci'
38 | }
39 | }
40 |
41 | stage('Lint') {
42 | steps {
43 | sh '''
44 | npx eslint --format checkstyle . > eslint-results.json
45 | '''
46 | }
47 | post {
48 | always {
49 | recordIssues(
50 | enabledForFailure: true,
51 | tools: [
52 | esLint(pattern: 'eslint-results.json'),
53 | ])
54 | }
55 | }
56 | }
57 |
58 | stage('Test') {
59 | steps {
60 | sh 'npm run test --if-present'
61 | }
62 | }
63 |
64 | stage('Build') {
65 | steps {
66 | sh 'npm run build --if-present'
67 | }
68 | }
69 |
70 | stage('Release') {
71 | steps {
72 | buildDockerAndPublishImage('incrementals-publisher', [
73 | publishToPrivateAzureRegistry: true,
74 | targetplatforms: 'linux/arm64',
75 | disablePublication: !infra.isInfra(),
76 | ])
77 | }
78 | }
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/.eslintignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/node
3 | # Edit at https://www.gitignore.io/?templates=node
4 |
5 | ### Node ###
6 | # Logs
7 | logs
8 | *.log
9 | npm-debug.log*
10 | yarn-debug.log*
11 | yarn-error.log*
12 | lerna-debug.log*
13 |
14 | # Diagnostic reports (https://nodejs.org/api/report.html)
15 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
16 |
17 | # Runtime data
18 | pids
19 | *.pid
20 | *.seed
21 | *.pid.lock
22 |
23 | # Directory for instrumented libs generated by jscoverage/JSCover
24 | lib-cov
25 |
26 | # Coverage directory used by tools like istanbul
27 | coverage
28 | *.lcov
29 |
30 | # nyc test coverage
31 | .nyc_output
32 |
33 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
34 | .grunt
35 |
36 | # Bower dependency directory (https://bower.io/)
37 | bower_components
38 |
39 | # node-waf configuration
40 | .lock-wscript
41 |
42 | # Compiled binary addons (https://nodejs.org/api/addons.html)
43 | build/Release
44 |
45 | # Dependency directories
46 | node_modules/
47 | jspm_packages/
48 |
49 | # TypeScript v1 declaration files
50 | typings/
51 |
52 | # TypeScript cache
53 | *.tsbuildinfo
54 |
55 | # Optional npm cache directory
56 | .npm
57 |
58 | # Optional eslint cache
59 | .eslintcache
60 |
61 | # Optional REPL history
62 | .node_repl_history
63 |
64 | # Output of 'npm pack'
65 | *.tgz
66 |
67 | # Yarn Integrity file
68 | .yarn-integrity
69 |
70 | # dotenv environment variables file
71 | .env
72 | .env.test
73 |
74 | # parcel-bundler cache (https://parceljs.org/)
75 | .cache
76 |
77 | # next.js build output
78 | .next
79 |
80 | # nuxt.js build output
81 | .nuxt
82 |
83 | # vuepress build output
84 | .vuepress/dist
85 |
86 | # Serverless directories
87 | .serverless/
88 |
89 | # FuseBox cache
90 | .fusebox/
91 |
92 | # DynamoDB Local files
93 | .dynamodb/
94 |
95 | # End of https://www.gitignore.io/api/node
96 | xunit.xml
97 |
98 | github-app.pem
99 | .config
100 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/node
3 | # Edit at https://www.gitignore.io/?templates=node
4 |
5 | ### Node ###
6 | # Logs
7 | logs
8 | *.log
9 | npm-debug.log*
10 | yarn-debug.log*
11 | yarn-error.log*
12 | lerna-debug.log*
13 |
14 | # Diagnostic reports (https://nodejs.org/api/report.html)
15 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
16 |
17 | # Runtime data
18 | pids
19 | *.pid
20 | *.seed
21 | *.pid.lock
22 |
23 | # Directory for instrumented libs generated by jscoverage/JSCover
24 | lib-cov
25 |
26 | # Coverage directory used by tools like istanbul
27 | coverage
28 | *.lcov
29 |
30 | # nyc test coverage
31 | .nyc_output
32 |
33 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
34 | .grunt
35 |
36 | # Bower dependency directory (https://bower.io/)
37 | bower_components
38 |
39 | # node-waf configuration
40 | .lock-wscript
41 |
42 | # Compiled binary addons (https://nodejs.org/api/addons.html)
43 | build/Release
44 |
45 | # Dependency directories
46 | node_modules/
47 | jspm_packages/
48 |
49 | # TypeScript v1 declaration files
50 | typings/
51 |
52 | # TypeScript cache
53 | *.tsbuildinfo
54 |
55 | # Optional npm cache directory
56 | .npm
57 |
58 | # Optional eslint cache
59 | .eslintcache
60 |
61 | # Optional REPL history
62 | .node_repl_history
63 |
64 | # Output of 'npm pack'
65 | *.tgz
66 |
67 | # Yarn Integrity file
68 | .yarn-integrity
69 |
70 | # dotenv environment variables file
71 | .env
72 | .env.test
73 |
74 | # parcel-bundler cache (https://parceljs.org/)
75 | .cache
76 |
77 | # next.js build output
78 | .next
79 |
80 | # nuxt.js build output
81 | .nuxt
82 |
83 | # vuepress build output
84 | .vuepress/dist
85 |
86 | # Serverless directories
87 | .serverless/
88 |
89 | # FuseBox cache
90 | .fusebox/
91 |
92 | # DynamoDB Local files
93 | .dynamodb/
94 |
95 | # End of https://www.gitignore.io/api/node
96 | xunit.xml
97 |
98 | github-app.pem
99 | .config
100 |
--------------------------------------------------------------------------------
/lib/pipeline.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This module has a few functions which help process Pipeline related metadata
3 | */
4 | export default {
5 | /*
6 | * takes in a JSON object expected to be returned by JSON API
7 | * requests to Jenkins such as:
8 | * https://ci.jenkins.io/job/structs-plugin/job/PR-36/3/api/json?tree=actions[revision[hash,pullHash]]
9 | *
10 | * @return Object containing a `hash` property
11 | */
12 | processBuildMetadata: (metadata) => {
13 | let response = {};
14 | if (metadata.actions) {
15 | for (const action of metadata.actions) {
16 | if (action._class === "jenkins.scm.api.SCMRevisionAction" && action.revision) {
17 | response.hash = action.revision.hash || action.revision.pullHash;
18 | return response
19 | }
20 | if (action._class === "hudson.plugins.git.util.BuildDetails" && action.build && action.build.revision) {
21 | response.hash = action.build.revision.SHA1;
22 | return response
23 | }
24 | }
25 | }
26 | return response;
27 | },
28 |
29 | /*
30 | * Return a generated API URL for fetching specific commit information for
31 | * this Pipeline
32 | */
33 | getBuildApiUrl: (build_url) => {
34 | return build_url + "api/json?tree=actions[revision[hash,pullHash]]";
35 | },
36 |
37 | /*
38 | * takes in a JSON object expected to be returned by JSON API
39 | * requests to Jenkins such as:
40 | * https://ci.jenkins.io/job/structs-plugin/api/json?tree=sources[source[repoOwner,repository]]
41 | *
42 | * @return Object containing `owner` and `repo` properties
43 | */
44 | processFolderMetadata: (metadata) => {
45 | let response = {};
46 |
47 | if (metadata.sources) {
48 | metadata.sources.forEach((source) => {
49 | response.owner = source.source.repoOwner;
50 | response.repo = source.source.repository;
51 | });
52 | }
53 |
54 | return response;
55 | },
56 |
57 | /*
58 | * Return a generated API URL for fetching repository information for
59 | * this Pipeline
60 | */
61 | getFolderApiUrl: (build_url) => {
62 | return build_url + "../../../api/json?tree=sources[source[repoOwner,repository]]";
63 | },
64 |
65 | /*
66 | * Return the generated URL to the archive.zip generated by some incrementals
67 | * build tooling (consult JEP-305)
68 | */
69 | getArchiveUrl: (build_url, hash) => {
70 | let shortHash = hash.substring(0, 12);
71 | // https://github.com/jenkinsci/incrementals-tools/pull/24
72 | let versionPattern = shortHash.replace(/[ab]/g, "$&*");
73 | return build_url + "artifact/**/*" + versionPattern + "*/*" + versionPattern + "*/*zip*/archive.zip";
74 | }
75 | };
76 |
--------------------------------------------------------------------------------
/lib/github.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This module provides some helpers for working with GitHub for the
3 | * incrementals publishing
4 | */
5 |
6 | import {Octokit} from "@octokit/rest";
7 |
8 | import {createAppAuth} from "@octokit/auth-app";
9 |
10 | const APP_ID = process.env.GITHUB_APP_ID;
11 | const PRIVATE_KEY = process.env.GITHUB_APP_PRIVATE_KEY;
12 |
13 | const INSTALLATION_ID = process.env.GITHUB_APP_INSTALLATION_ID || 22187127
14 |
15 | async function getRestClient() {
16 | return new Octokit({
17 | authStrategy: createAppAuth,
18 | auth: {
19 | appId: APP_ID,
20 | privateKey: PRIVATE_KEY,
21 | installationId: INSTALLATION_ID
22 | }
23 | })
24 | }
25 |
26 | function summary(entries) {
27 | const links = `#### Download link${entries.length > 1 ? "s" : ""}:\n${entries.map(entry => `- [${entry.artifactId}](${entry.url})`).join("\n")}`
28 |
29 | const hpi = entries
30 | .filter(entry => entry.packaging == "hpi")
31 | .map(entry => `${entry.artifactId}:incrementals;${entry.groupId};${entry.version}`)
32 |
33 | // Output the input format section only if there is a plugin, not usable with other type of artifact
34 | return hpi.length == 0 ? links : `${links}\n\n#### Plugin Installation Manager input format: ([documentation](https://github.com/jenkinsci/plugin-installation-manager-tool/#plugin-input-format))\n
${hpi.join("\n")}`
35 | }
36 |
37 | function text(entries) {
38 | const metadata = entries
39 | .map(entry => `<dependency>
<groupId>${entry.groupId}</groupId>
<artifactId>${entry.artifactId}</artifactId>
<version>${entry.version}</version>
</dependency>`)
40 | .join("
")
41 |
42 | return `${metadata}`
43 | }
44 |
45 | export default {
46 | commitExists: async (owner, repo, ref) => {
47 | const github = await getRestClient();
48 | /*
49 | * Ensure that the commit is actually present in our repository! No sense
50 | * doing any work with it if it's somehow not published.
51 | */
52 | const commit = await github.repos.getCommit({owner, repo, ref});
53 | // Here is where you could port https://github.com/jglick/incrementals-downstream-publisher/blob/10073f484d35edc3928f7808419c81a6eb48df62/src/main/java/io/jenkins/tools/incrementals_downstream_publisher/Main.java#L107-L111
54 | // so as to print information about commit signatures, or even enforce them.
55 | return !!commit;
56 | },
57 |
58 | createStatus: async (owner, repo, head_sha, entries) => {
59 | const github = await getRestClient();
60 |
61 | return github.rest.checks.create({
62 | owner,
63 | repo,
64 | name: "Incrementals",
65 | head_sha,
66 | status: "completed",
67 | conclusion: "success",
68 | details_url: entries[0].url,
69 | output: {
70 | title: `Deployed version ${entries[0].version} to Incrementals`,
71 | summary: summary(entries),
72 | text: text(entries)
73 | }
74 | });
75 | }
76 | };
77 |
--------------------------------------------------------------------------------
/README.adoc:
--------------------------------------------------------------------------------
1 | = Incremental Build Publisher
2 |
3 |
4 | This service is responsible for uploading incrementally built (see
5 | link:https://github.com/jenkinsci/jep/tree/master/jep/305[JEP-305]) core and
6 | plugin artifacts into the incrementals Maven repository (see
7 | link:https://github.com/jenkins-infra/iep/tree/master/iep-009[IEP-9]).
8 |
9 |
10 | [[env]]
11 | == Environment Variables
12 |
13 | |===
14 | | Variable | Description
15 |
16 | | `GITHUB_APP_ID`
17 | | The App ID for a GitHub app
18 |
19 | | `GITHUB_APP_PRIVATE_KEY`
20 | | The private key for a GitHub app
21 |
22 | | `GITHUB_APP_INSTALLATION_ID`
23 | | The installation ID for the organisation / user account that the GitHub app has been installed to
24 |
25 | | `JENKINS_HOST`
26 | | A Jenkins instance (defaults to `https://ci.jenkins.io/`) to which URLs are
27 | expected to conform.
28 |
29 | | `INCREMENTAL_URL`
30 | | A Maven repository which should be treated as the incrementals destination,
31 | defaults to `https://repo.jenkins-ci.org/incrementals/`
32 |
33 | | `ARTIFACTORY_KEY`
34 | | An Artifactory user's API key (from the link:https://repo.jenkins-ci.org/webapp/#/profile[user profile in Artifactory])
35 |
36 | | `PERMISSIONS_URL`
37 | | A URL pointing to the generated repository-permissions-updater output,
38 | defaults to
39 | `https://ci.jenkins.io/job/Infra/job/repository-permissions-updater/job/master/lastSuccessfulBuild/artifact/json/github.index.json`
40 |
41 | | `JENKINS_AUTH`
42 | | A `username:password` or `username:apiToken` authentication to Jenkins. link:https://ci.jenkins.io/me/configure[API Token]
43 |
44 | | `PRESHARED_KEY`
45 | | A string that is required for Authorization: Bearer $token authentication to allow access
46 |
47 | |===
48 |
49 | === Optional/Development Variables
50 |
51 | |===
52 | | Variable | Description
53 |
54 | | `BUILD_METADATA_URL`
55 | | URL which will serve up JSON which represents metadata about a Pipeline Run (link:https://gist.github.com/rtyler/6b601864e676d0f0735c1399e291ddf4#file-gistfile1-txt[example])
56 |
57 | | `FOLDER_METADATA_URL`
58 | | Same but for metadata of the repository folder.
59 |
60 | | `ARCHIVE_URL`
61 | | URL to a `.zip` file which represents an example release zip generated by the incrementals release tooling.
62 | |===
63 |
64 | == Valid Requests
65 |
66 | This service only supports one kind of request, and is expected to only be
67 | called by the `buildPlugin()` step or other Jenkins Pipelines which are
68 | publishing incrementally built artifacts into Artifactory.
69 |
70 | [source,json]
71 | ----
72 | {
73 | "build_url" : "https://ci.jenkins.io/job/structs-plugin/job/PR-36/3/"
74 | }
75 | ----
76 |
77 | This URL is expected to be the value of a `BUILD_URL` environment variable from
78 | Jenkins.
79 |
80 | == Testing
81 |
82 | Unit tests can simply be run by executing `make check` in this directory.
83 |
84 | For acceptance testing, please set the appropriate <> in a terminal and
85 | then execute `make run` in the repository root directory.
86 |
87 | Once the Azure Functions container has come online, requests can be sent to the
88 | Function, for example:
89 |
90 | [source,bash]
91 | ----
92 | curl -H "Authorization: Bearer ${PRESHARED_KEY}" -H 'Content-Type: application/json' -i -d '{"build_url":"https://ci.jenkins.io/job/Plugins/job/jenkins-infra-test-plugin/job/master/52/"}' http://localhost:3000
93 | ----
94 |
--------------------------------------------------------------------------------
/lib/permissions.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This module just has some helpers to make checking permissions easier
3 | */
4 |
5 | import fetch from "node-fetch";
6 |
7 | import StreamZip from "node-stream-zip";
8 | import util from "util";
9 | import xml2js from "xml2js";
10 | import config from "./config.js";
11 | import wcmatch from "wildcard-match";
12 |
13 | export default {
14 | fetch: () => {
15 | return fetch(config.PERMISSIONS_URL);
16 | },
17 |
18 | verify: async (log, target, archive, entries, permsResponse, hash) => {
19 | const permissions = await permsResponse.json();
20 | return new Promise((resolve, reject) => {
21 | const applicable = permissions[target];
22 |
23 | if (!applicable) {
24 | reject(util.format("No applicable permissions for %s, check jenkins-infra/repository-permissions-updater has the right configuration,", target));
25 | return
26 | }
27 |
28 | const zip = new StreamZip({file: archive});
29 |
30 | zip.on("entry", async function (entry) {
31 | let ok = !!applicable.find(file => {
32 | const isMatch = wcmatch(file, { separator: "|" })
33 | return isMatch(entry.name) || entry.name.startsWith(file);
34 | });
35 | if (!ok) {
36 | this.emit("error", new Error(util.format("No permissions for %s", entry.name)));
37 | return
38 | }
39 | if (entry.name.endsWith(".pom")) {
40 | const pomXml = zip.entryDataSync(entry.name);
41 | xml2js.parseString(pomXml, (err, result) => {
42 | if (!result.project.scm) {
43 | this.emit("error", new Error(util.format("Missing section in %s", entry.name)));
44 | return
45 | }
46 | const scm = result.project.scm[0];
47 | if (!scm.url) {
48 | this.emit("error", new Error(util.format("Missing section in of %s", entry.name)));
49 | return
50 | }
51 | const url = scm.url[0];
52 | if (!scm.tag) {
53 | this.emit("error", new Error(util.format("Missing section in of %s", entry.name)));
54 | return
55 | }
56 | const tag = scm.tag[0];
57 | const groupId = result.project.groupId[0];
58 | const artifactId = result.project.artifactId[0];
59 | const version = result.project.version[0];
60 | const packaging = result.project.packaging ? result.project.packaging[0] : ""
61 | entries.push({
62 | artifactId,
63 | groupId,
64 | version,
65 | packaging,
66 | path: entry.name
67 | });
68 | log.info(util.format("Parsed %s with url=%s tag=%s GAV=%s:%s:%s", entry.name, url, tag, groupId, artifactId, version));
69 | const expectedPath = groupId.replace(/[.]/g, "/") + "/" + artifactId + "/" + version + "/" + artifactId + "-" + version + ".pom";
70 | if (tag !== hash) {
71 | this.emit("error", new Error(`Wrong commit hash in /project/scm/tag, expected ${hash}, got ${tag}`));
72 | return
73 | } else if (!url.match("^https?://github[.]com/.+$")) {
74 | this.emit("error", new Error("Wrong URL in /project/scm/url"));
75 | return
76 | } else if (expectedPath !== entry.name) {
77 | this.emit("error", new Error(util.format("Wrong GAV: %s vs. %s", expectedPath, entry.name)));
78 | return
79 | }
80 | });
81 | }
82 | });
83 |
84 | zip.on("ready", () => {
85 | zip.close();
86 | resolve(true);
87 | });
88 |
89 | zip.on("error", (err) => { reject(new Error("ZIP error: " + err)); });
90 | });
91 | },
92 | };
93 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | import {readFile} from "fs/promises";
2 | import fetch from "node-fetch";
3 | import bcrypt from "bcrypt";
4 | import express from "express";
5 | import winston from "winston";
6 | import expressWinston from "express-winston";
7 | import bodyParser from "body-parser";
8 | import helmet from "helmet";
9 | import asyncWrap from "express-async-wrap";
10 | import config from "./lib/config.js";
11 | import {IncrementalsPlugin} from "./IncrementalsPlugin.js";
12 |
13 | const packageJson = JSON.parse(await readFile(new URL("./package.json", import.meta.url)));
14 |
15 | const app = express()
16 | const port = config.PORT
17 |
18 | const logger = winston.createLogger({
19 | level: "debug",
20 | transports: [
21 | new winston.transports.Console({})
22 | ],
23 | format: winston.format.combine(
24 | winston.format.timestamp(),
25 | winston.format.align(),
26 | winston.format.splat(),
27 | winston.format.printf(info => `${info.timestamp} ${info.level}: ${info.message}`)
28 | ),
29 | exitOnError: false, // do not exit on handled exceptions
30 | });
31 |
32 | /* Logger after routing */
33 | app.use(expressWinston.logger({
34 | winstonInstance: logger,
35 | }));
36 |
37 | app.use(helmet());
38 |
39 | // parse application/x-www-form-urlencoded
40 | app.use(bodyParser.urlencoded({extended: false}))
41 |
42 | // parse application/json
43 | app.use(bodyParser.json())
44 |
45 | const healthchecks = {
46 | jenkins: async function () {
47 | const jenkinsOpts = {};
48 | if (!config.JENKINS_AUTH) {
49 | return {jenkins: "no_auth"};
50 | }
51 |
52 | jenkinsOpts.headers = {"Authorization": "Basic " + new Buffer.from(config.JENKINS_AUTH, "utf8").toString("base64")};
53 | const response = await fetch(config.JENKINS_HOST + "/whoAmI/api/json", jenkinsOpts)
54 | if (response.status !== 200) {
55 | throw new Error("Unable to talk to jenkins");
56 | }
57 | await response.json();
58 | return {jenkins: "ok"}
59 | }
60 | }
61 |
62 | app.get("/readiness", asyncWrap(async (req, res) => {
63 | res.status(200);
64 | let responseJson = {errors: []};
65 | for (const key of Object.keys(healthchecks)) {
66 | try {
67 | responseJson = {...responseJson, ...(await healthchecks[key]())};
68 | } catch (e) {
69 | logger.error(`Healthcheck: ${e}`);
70 | responseJson.errors.push(key);
71 | res.status(500);
72 | }
73 | }
74 | res.json(responseJson);
75 | }));
76 |
77 | app.get("/liveness", asyncWrap(async (_req, res) => {
78 | res.status(200).json({
79 | status: "OK",
80 | version: packageJson.version
81 | });
82 | }));
83 |
84 | const encodedPassword = bcrypt.hashSync(config.PRESHARED_KEY, 10);
85 |
86 | app.post("/", asyncWrap(async (req, res) => {
87 | const authorization = (req.get("Authorization") || "").replace(/^Bearer /, "");
88 | // we bcrypt so nobody can learn from timing attacks
89 | // https://www.npmjs.com/package/bcrypt#a-note-on-timing-attacks
90 | const check = await bcrypt.compare(authorization, encodedPassword);
91 | if (!check) {
92 | res.status(403).send("Not authorized");
93 | return
94 | }
95 |
96 | const context = {log: logger};
97 | const obj = new IncrementalsPlugin(context, {body: req.body});
98 | res.send((await obj.main()).body);
99 | }))
100 |
101 | /*Error handler goes last */
102 | app.use(function (err, req, res, next) {
103 | logger.error(err.stack)
104 | res.status(err.status || err.code || 400).send(err.message || "Unknown error");
105 | next()
106 | })
107 |
108 | // Handle ^C
109 | process.on("SIGINT", shutdown);
110 |
111 | // Do graceful shutdown
112 | function shutdown() {
113 | logger.info("Got SIGINT");
114 | process.exit();
115 | }
116 |
117 | app.listen(port, () => {
118 | logger.info(`Incrementals listening at http://localhost:${port}`)
119 | })
120 |
--------------------------------------------------------------------------------
/test/permissions-test.js:
--------------------------------------------------------------------------------
1 | import {readFileSync} from "fs";
2 | import path from "path";
3 | import assert from "assert";
4 | import Permissions from "../lib/permissions.js";
5 |
6 | const readJSON = (filename) => JSON.parse(readFileSync(new URL(filename, import.meta.url)));
7 |
8 | describe("The Permissions helpers", function () {
9 | it("Fails with bad url error", async function () {
10 | const folderMetadataParsed = {
11 | owner: "jenkinsci",
12 | repo: "bom"
13 | }
14 | const buildMetadataParsed = {
15 | hash: "149af85f094da863ddc294e50b5d8caaab549f95"
16 | }
17 |
18 | const repoPath = path.join(folderMetadataParsed.owner, folderMetadataParsed.repo);
19 | const entries = [];
20 | let perms = {
21 | status: 200,
22 | json: () => readJSON("./fixtures-permissions.json")
23 | }
24 | assert.rejects(
25 | () => Permissions.verify(
26 | {info: () => true},
27 | repoPath,
28 | path.resolve("./test/fixtures-bad-scm-url-archive.zip"),
29 | entries,
30 | perms,
31 | buildMetadataParsed.hash
32 | ),
33 | {
34 | name: "Error",
35 | message: "ZIP error: Error: Missing section in of io/jenkins/tools/bom/bom/2.176.1-rc41.149af85f094d/bom-2.176.1-rc41.149af85f094d.pom"
36 | }
37 | )
38 | })
39 | it("Fails with no permissions error", async function () {
40 | const folderMetadataParsed = {
41 | owner: "jenkinsci",
42 | repo: "bom"
43 | }
44 | const buildMetadataParsed = {
45 | hash: "5055257e4d28adea76fc34fdde4e025347405bae"
46 | }
47 |
48 | const repoPath = path.join(folderMetadataParsed.owner, folderMetadataParsed.repo);
49 | const entries = [];
50 | let perms = {
51 | status: 200,
52 | json: () => readJSON("./fixtures-permissions-missing-path.json")
53 | }
54 | assert.rejects(
55 | () => Permissions.verify(
56 | {info: () => true},
57 | repoPath,
58 | path.resolve("./test/fixtures-good-archive.zip"),
59 | entries,
60 | perms,
61 | buildMetadataParsed.hash
62 | ),
63 | {
64 | name: "Error",
65 | message: "ZIP error: Error: No permissions for io/jenkins/tools/bom/bom-2.222.x/29-rc793.5055257e4d28/bom-2.222.x-29-rc793.5055257e4d28.pom"
66 | }
67 | )
68 | })
69 | it("Succeeds with good pom", async function () {
70 | const folderMetadataParsed = {
71 | owner: "jenkinsci",
72 | repo: "bom"
73 | }
74 | const buildMetadataParsed = {
75 | hash: "5055257e4d28adea76fc34fdde4e025347405bae"
76 | }
77 |
78 | const repoPath = path.join(folderMetadataParsed.owner, folderMetadataParsed.repo);
79 | const entries = [];
80 | let perms = {
81 | status: 200,
82 | json: () => readJSON("./fixtures-permissions.json")
83 | };
84 | const response = await Permissions.verify(
85 | {info: () => true},
86 | repoPath,
87 | path.resolve("./test/fixtures-good-archive.zip"),
88 | entries,
89 | perms,
90 | buildMetadataParsed.hash
91 | );
92 | assert.equal(response, true)
93 | })
94 | it("Succeeds with wildcard path", async function () {
95 | const folderMetadataParsed = {
96 | owner: "jenkinsci",
97 | repo: "bom"
98 | }
99 | const buildMetadataParsed = {
100 | hash: "5055257e4d28adea76fc34fdde4e025347405bae"
101 | }
102 |
103 | const repoPath = path.join(folderMetadataParsed.owner, folderMetadataParsed.repo);
104 | const entries = [];
105 | let perms = {
106 | status: 200,
107 | json: () => readJSON("./fixtures-permissions-wildcard.json")
108 | };
109 | const response = await Permissions.verify(
110 | {info: () => true},
111 | repoPath,
112 | path.resolve("./test/fixtures-good-archive.zip"),
113 | entries,
114 | perms,
115 | buildMetadataParsed.hash
116 | );
117 | assert.equal(response, true)
118 | })
119 | });
120 |
--------------------------------------------------------------------------------
/test/pipeline-test.js:
--------------------------------------------------------------------------------
1 | import assert from "assert";
2 | import pipeline from "../lib/pipeline.js";
3 |
4 | describe("The Pipeline helpers", function() {
5 | let build_url = "https://ci.jenkins.io/job/structs-plugin/job/PR-36/3/";
6 |
7 | describe("processBuildMetadata", function() {
8 | let metadata = {
9 | "_class": "org.jenkinsci.plugins.workflow.job.WorkflowRun",
10 | "actions": [
11 | {
12 | "_class": "hudson.model.CauseAction"
13 | },
14 | {
15 | "_class": "jenkins.scm.api.SCMRevisionAction",
16 | "revision": {
17 | "_class": "jenkins.plugins.git.AbstractGitSCMSource$SCMRevisionImpl",
18 | "hash": "abc131cc3bf56309a05b3fe8b086b265d14f2a61"
19 | }
20 | },
21 | {
22 | "_class": "hudson.plugins.git.util.BuildData"
23 | },
24 | {
25 | "_class": "hudson.plugins.git.GitTagAction"
26 | },
27 | {
28 |
29 | },
30 | {
31 | "_class": "org.jenkinsci.plugins.workflow.cps.EnvActionImpl"
32 | },
33 | {
34 |
35 | },
36 | {
37 |
38 | },
39 | {
40 | "_class": "org.jenkinsci.plugins.workflow.job.views.FlowGraphAction"
41 | },
42 | {
43 |
44 | },
45 | {
46 |
47 | }
48 | ]
49 | };
50 |
51 | it("should return the right hash", function() {
52 | const value = pipeline.processBuildMetadata(metadata);
53 | assert.equal(value.hash, "abc131cc3bf56309a05b3fe8b086b265d14f2a61");
54 | });
55 |
56 | let metadata2 = { // https://ci.jenkins.io/job/Core/job/jenkins/job/master/888/api/json?tree=actions[revision[hash,pullHash]]&pretty
57 | "_class": "org.jenkinsci.plugins.workflow.job.WorkflowRun",
58 | "actions": [
59 | {
60 | "_class": "hudson.model.CauseAction"
61 | },
62 | {
63 | "_class": "jenkins.metrics.impl.TimeInQueueAction"
64 | },
65 | {
66 |
67 | },
68 | {
69 | "_class": "jenkins.scm.api.SCMRevisionAction",
70 | "revision": {
71 | "_class": "jenkins.plugins.git.AbstractGitSCMSource$SCMRevisionImpl"
72 | }
73 | },
74 | {
75 |
76 | },
77 | {
78 | "_class": "hudson.plugins.git.util.BuildData"
79 | },
80 | {
81 | "_class": "hudson.plugins.git.GitTagAction"
82 | },
83 | {
84 |
85 | },
86 | {
87 | "_class": "hudson.plugins.git.util.BuildData"
88 | },
89 | {
90 | "_class": "org.jenkinsci.plugins.workflow.cps.EnvActionImpl"
91 | },
92 | {
93 | "_class": "hudson.plugins.git.util.BuildData"
94 | },
95 | {
96 |
97 | },
98 | {
99 |
100 | },
101 | {
102 | "_class": "hudson.tasks.junit.TestResultAction"
103 | },
104 | {
105 |
106 | },
107 | {
108 |
109 | },
110 | {
111 |
112 | },
113 | {
114 |
115 | },
116 | {
117 |
118 | },
119 | {
120 |
121 | },
122 | {
123 | "_class": "org.jenkinsci.plugins.workflow.job.views.FlowGraphAction"
124 | },
125 | {
126 |
127 | },
128 | {
129 |
130 | }
131 | ]
132 | };
133 |
134 | it("should return no hash", function() {
135 | const value = pipeline.processBuildMetadata(metadata2);
136 | assert.equal(value.hash, null);
137 | });
138 |
139 | it("gracefully tolerates lack of any authentication", function() {
140 | const value = pipeline.processBuildMetadata({});
141 | assert.equal(value.hash, null);
142 | });
143 | });
144 |
145 | describe("getBuildApiUrl", function() {
146 | it("should generate an api/json URL", function() {
147 | const url = pipeline.getBuildApiUrl(build_url);
148 | assert.ok(url);
149 | assert.ok(url.match("api/json"));
150 | });
151 | });
152 |
153 | describe("getArchiveUrl", function() {
154 | it("should generate an archive.zip URL", function() {
155 | let hash = "acbd4";
156 | const url = pipeline.getArchiveUrl(build_url, hash);
157 | assert.strictEqual(url, "https://ci.jenkins.io/job/structs-plugin/job/PR-36/3/artifact/**/*a*cb*d4*/*a*cb*d4*/*zip*/archive.zip");
158 | });
159 | });
160 |
161 | });
162 |
--------------------------------------------------------------------------------
/test/index-test.js:
--------------------------------------------------------------------------------
1 | import {readFileSync} from "fs";
2 | import assert from "assert";
3 | import simple from "simple-mock";
4 | // eslint-disable-next-line
5 | import fetch from "node-fetch";
6 | import path from "path";
7 | import {IncrementalsPlugin} from "../IncrementalsPlugin.js";
8 | import permissions from "../lib/permissions.js";
9 |
10 | const readJSON = (filename) => JSON.parse(readFileSync(new URL(filename, import.meta.url)));
11 |
12 | const urlResults = {
13 | "https://repo.jenkins-ci.org/incrementals/io/jenkins/tools/bom/bom-2.222.x/29-rc793.5055257e4d28/bom-2.222.x-29-rc793.5055257e4d28.pom": {
14 | status: 404
15 | },
16 | "https://ci.jenkins.io/job/Tools/job/bom/job/PR-22/5/api/json?tree=actions[revision[hash,pullHash]]": {
17 | status: 200,
18 | results: () => {
19 | return {
20 | actions: [{
21 | "_class": "jenkins.scm.api.SCMRevisionAction",
22 | "revision": {
23 | "_class": "org.jenkinsci.plugins.github_branch_source.PullRequestSCMRevision",
24 | "pullHash": "5055257e4d28adea76fc34fdde4e025347405bae"
25 | }
26 | }]
27 | }
28 | }
29 | },
30 | "https://ci.jenkins.io/job/Tools/job/bom/job/PR-22/5/../../../api/json?tree=sources[source[repoOwner,repository]]": {
31 | status: 200,
32 | results: () => {
33 | return {"_class": "org.jenkinsci.plugins.workflow.multibranch.WorkflowMultiBranchProject", "sources": [{"source": {"_class": "org.jenkinsci.plugins.github_branch_source.GitHubSCMSource", "repoOwner": "jenkinsci", "repository": "bom"}}]}
34 | }
35 | },
36 | "https://ci.jenkins.io/job/Infra/job/repository-permissions-updater/job/master/lastSuccessfulBuild/artifact/json/github.index.json": {
37 | status: 200,
38 | results: () => readJSON("./fixtures-permissions.json")
39 | },
40 | "https://fake-repo.jenkins-ci.org/incrementals/io/jenkins/tools/bom/bom-2.222.x/29-rc793.5055257e4d28/bom-2.222.x-29-rc793.5055257e4d28.pom": {
41 | status: 404,
42 | results: () => "Not found"
43 | }
44 | }
45 |
46 | describe("Handling incremental publisher webhook events", function () {
47 | let ctx = {};
48 | let data = {
49 | body: {}
50 | };
51 | let run = async () => {
52 | ctx.res = {};
53 | try {
54 | const obj = new IncrementalsPlugin(ctx, data);
55 | ctx.res = await obj.main();
56 | } catch (err) {
57 | ctx.res = {
58 | status: err.code || 400,
59 | body: err.message || "Unknown error"
60 | };
61 | }
62 | };
63 | let asyncFetch = async (url, opts) => {
64 | if (!url) {
65 | throw new Error("no url provided");
66 | }
67 | if (!urlResults[url]) {
68 | console.warn("Mock URL is not found, fetching real url", url);
69 | return fetch(url, opts);
70 | }
71 | return {
72 | status: urlResults[url].status,
73 | json: () => {const resultsFunc = urlResults[url].results; return resultsFunc()}
74 | };
75 | }
76 |
77 | beforeEach(function () {
78 | ctx.log = simple.mock();
79 | //simple.mock(ctx.log, 'info', (...args) => console.log('[INFO]', ...args));
80 | //simple.mock(ctx.log, 'error', (...args) => console.log('[ERROR]', ...args));
81 | simple.mock(ctx.log, "info", () => true);
82 | simple.mock(ctx.log, "error", () => true);
83 | simple.mock(IncrementalsPlugin.prototype, "downloadFile", async () => path.resolve("./test/fixtures-good-archive.zip"));
84 | simple.mock(IncrementalsPlugin.prototype.github, "commitExists", async () => true);
85 | simple.mock(IncrementalsPlugin.prototype.github, "createStatus", async () => true);
86 | simple.mock(IncrementalsPlugin.prototype, "uploadToArtifactory", async () => {
87 | return {
88 | status: 200,
89 | statusText: "Success"
90 | };
91 | });
92 | simple.mock(IncrementalsPlugin.prototype, "fetch", asyncFetch);
93 | simple.mock(IncrementalsPlugin.prototype.permissions, "fetch", async () => {
94 | return {
95 | status: 200,
96 | json: async () => readJSON("./fixtures-permissions.json")
97 | }
98 | });
99 | });
100 | afterEach(function () {simple.restore()});
101 |
102 | describe("without parameters", function () {
103 | it("should require a parameter", async function () {
104 | await run();
105 | assert.equal(ctx.res.status, 400);
106 | assert.equal(ctx.res.body, "The incrementals-publisher invocation was missing the build_url attribute");
107 | });
108 | });
109 |
110 | describe("without a build_url matching JENKINS_HOST", function () {
111 | it("should return a 400", async function () {
112 | data.body.build_url = "https://example.com/foo/bar";
113 | await run();
114 | assert.equal(ctx.res.status, 400);
115 | assert.equal(ctx.res.body, "This build_url is not supported");
116 | });
117 | });
118 |
119 | describe("with a weird build_url", function () {
120 | it("should return a 400", async function () {
121 | data.body.build_url = "https://ci.jenkins.io/junk/";
122 | await run();
123 | assert.equal(ctx.res.status, 400);
124 | assert.equal(ctx.res.body, "This build_url is malformed");
125 | });
126 | });
127 |
128 | describe("with a bogus build_url", function () {
129 | for (let u of [
130 | "https://ci.jenkins.io/job/hack?y/123/",
131 | "https://ci.jenkins.io/job/hack#y/123/",
132 | // There may be legitimate use cases for, say, %20, but validation might be tricky and YAGNI.
133 | "https://ci.jenkins.io/job/hack%79/123/",
134 | "https://ci.jenkins.io/job/../123/",
135 | "https://ci.jenkins.io/job/./123/",
136 | "https://ci.jenkins.io/job/ok/123//",
137 | ]) {
138 | it(u + " should return a 400", async function () {
139 | data.body.build_url = u;
140 | await run();
141 | assert.equal(ctx.res.status, 400);
142 | assert.equal(ctx.res.body, "This build_url is malformed");
143 | });
144 | }
145 | });
146 | describe("error verifying permissions", function () {
147 | beforeEach(function () {
148 | simple.mock(permissions, "verify", () => {
149 | return new Promise(function (resolve, reject) {
150 | reject(new Error("This is my error"));
151 | });
152 | });
153 | });
154 | it("should output an error", async function () {
155 | data.body.build_url = "https://ci.jenkins.io/job/Tools/job/bom/job/PR-22/5/";
156 | await run();
157 | assert.equal(ctx.res.body, "Invalid archive retrieved from Jenkins, perhaps the plugin is not properly incrementalized?\nError: This is my error from https://ci.jenkins.io/job/Tools/job/bom/job/PR-22/5/artifact/**/*5055257e4d28*/*5055257e4d28*/*zip*/archive.zip");
158 | assert.equal(ctx.res.status, 400);
159 | });
160 | });
161 | describe("success", function () {
162 | it("should claim all is a success", async function () {
163 | data.body.build_url = "https://ci.jenkins.io/job/Tools/job/bom/job/PR-22/5/";
164 | await run();
165 | assert.equal(ctx.res.body, "Response from Artifactory: Success\n");
166 | assert.equal(ctx.res.status, 200);
167 | });
168 | });
169 | });
170 |
--------------------------------------------------------------------------------
/IncrementalsPlugin.js:
--------------------------------------------------------------------------------
1 | /*
2 | * This Azure Function is responsible for processing information related to an
3 | * incrementals release and bouncing the artifacts into Artifactory
4 | */
5 |
6 | import fs from "fs";
7 |
8 | import fetch from "node-fetch";
9 | import os from "os";
10 | import path from "path";
11 | import util from "util";
12 | import url from "url";
13 | import config from "./lib/config.js";
14 | import github from "./lib/github.js";
15 | import pipeline from "./lib/pipeline.js";
16 | import permissions from "./lib/permissions.js";
17 |
18 | const TEMP_ARCHIVE_DIR = path.join(os.tmpdir(), "incrementals-");
19 | const mkdtemp = util.promisify(fs.mkdtemp);
20 |
21 | /*
22 | * Small helper function to make failing a request more concise
23 | */
24 | class ExtendableError extends Error {
25 | constructor(message) {
26 | super(message);
27 | this.name = this.constructor.name;
28 | if (typeof Error.captureStackTrace === "function") {
29 | Error.captureStackTrace(this, this.constructor);
30 | } else {
31 | this.stack = (new Error(message)).stack;
32 | }
33 | }
34 | }
35 |
36 | class FailRequestError extends ExtendableError {
37 | constructor(message, code = 400) {
38 | super(message);
39 | this.code = code;
40 | }
41 | }
42 |
43 | class SuccessRequestError extends ExtendableError {
44 | // ignorable, error, don't fail the build
45 | constructor(message, code = 200) {
46 | super(message);
47 | this.code = code;
48 | }
49 | }
50 |
51 |
52 | class IncrementalsPlugin {
53 | constructor(context, data) {
54 | this.context = context;
55 | this.data = data;
56 | }
57 |
58 | get permissions() {
59 | return permissions;
60 | }
61 |
62 | get github() {
63 | return github
64 | }
65 |
66 | get pipeline() {
67 | return pipeline
68 | }
69 |
70 | // wrapper for easier mocking
71 | fetch(...args) {
72 | return fetch(...args);
73 | }
74 |
75 | async uploadToArtifactory(archivePath, pomURL) {
76 | const upload = await this.fetch(util.format("%sarchive.zip", config.INCREMENTAL_URL),
77 | {
78 | headers: {
79 | "X-Explode-Archive": true,
80 | "X-Explode-Archive-Atomic": true,
81 | "X-JFrog-Art-Api": config.ARTIFACTORY_KEY,
82 | },
83 | method: "PUT",
84 | body: fs.createReadStream(archivePath)
85 | });
86 | this.context.log.info("Upload result for pom: %s, status: %s, full error: %s", pomURL, upload.status, await upload.text());
87 | return upload;
88 | }
89 |
90 |
91 | async downloadFile(archiveUrl, fetchOpts) {
92 | let tmpDir = await mkdtemp(TEMP_ARCHIVE_DIR);
93 | this.context.log.info("Prepared a temp dir for the archive %s", tmpDir);
94 | const archivePath = path.join(tmpDir, "archive.zip");
95 |
96 | const res = await fetch(archiveUrl, fetchOpts)
97 | await new Promise((resolve, reject) => {
98 | const fileStream = fs.createWriteStream(archivePath);
99 | res.body.pipe(fileStream);
100 | res.body.on("error", (err) => {
101 | fileStream.close();
102 | reject(err);
103 | });
104 | fileStream.on("finish", function () {
105 | fileStream.close();
106 | resolve();
107 | });
108 | });
109 |
110 | return archivePath
111 | }
112 |
113 | isValidUrl(buildUrl) {
114 | const parsedUrl = url.parse(buildUrl);
115 | const parsedJenkinsHost = url.parse(config.JENKINS_HOST);
116 |
117 | if (`${parsedUrl.protocol}//${parsedUrl.host}` != `${parsedJenkinsHost.protocol}//${parsedJenkinsHost.host}`) {
118 | throw new FailRequestError("This build_url is not supported");
119 | }
120 | if (!parsedUrl.path.match("/(job/[a-zA-Z0-9._-]+/)+[0-9]+/$") || buildUrl.includes("/../") || buildUrl.includes("/./")) {
121 | throw new FailRequestError("This build_url is malformed");
122 | }
123 | }
124 |
125 | async main() {
126 | const buildUrl = this.data.body.build_url;
127 | /* If we haven't received any valid data, just bail early */
128 | if (!buildUrl) {
129 | throw new FailRequestError("The incrementals-publisher invocation was missing the build_url attribute")
130 | }
131 |
132 | try {
133 | this.isValidUrl(buildUrl);
134 | } catch (buildUrlError) {
135 | this.context.log.error("Malformed", {buildUrl: buildUrl, JENKINS_HOST: config.JENKINS_HOST});
136 | throw buildUrlError;
137 | }
138 |
139 | // Starting some async operations early which we will need later
140 | let perms = this.permissions.fetch();
141 |
142 | const jenkinsOpts = {};
143 | if (config.JENKINS_AUTH) {
144 | jenkinsOpts.headers = {"Authorization": "Basic " + new Buffer.from(config.JENKINS_AUTH, "utf8").toString("base64")};
145 | }
146 |
147 | /*
148 | * The first step is to take the buildUrl and fetch some metadata about this
149 | * specific Pipeline Run
150 | */
151 | let buildMetadataUrl = config.BUILD_METADATA_URL || this.pipeline.getBuildApiUrl(buildUrl);
152 | this.context.log.info("Retrieving metadata from %s", buildMetadataUrl)
153 | let buildMetadata = await this.fetch(buildMetadataUrl, jenkinsOpts);
154 | if (buildMetadata.status !== 200) {
155 | this.context.log.error("Failed to fetch Pipeline build metadata", buildMetadata);
156 | }
157 | let buildMetadataJSON = await buildMetadata.json();
158 |
159 | if (!buildMetadataJSON) {
160 | this.context.log.error("I was unable to parse any build JSON metadata", buildMetadata);
161 | throw new FailRequestError();
162 | }
163 | let buildMetadataParsed = this.pipeline.processBuildMetadata(buildMetadataJSON);
164 |
165 | if (!buildMetadataParsed.hash) {
166 | this.context.log.error("Unable to retrieve a hash or pullHash", buildMetadataJSON);
167 | throw new SuccessRequestError(`Did not find a Git commit hash associated with this build. Some plugins on ${config.JENKINS_HOST} may not yet have been updated with JENKINS-50777 REST API enhancements. Skipping deployment.\n`)
168 | }
169 |
170 | let folderMetadata = await this.fetch(config.FOLDER_METADATA_URL || this.pipeline.getFolderApiUrl(buildUrl), jenkinsOpts);
171 | if (folderMetadata.status !== 200) {
172 | this.context.log.error("Failed to fetch Pipeline folder metadata", folderMetadata);
173 | }
174 | let folderMetadataJSON = await folderMetadata.json();
175 | if (!folderMetadataJSON) {
176 | this.context.log.error("I was unable to parse any folder JSON metadata", folderMetadata);
177 | throw new FailRequestError();
178 | }
179 | let folderMetadataParsed = this.pipeline.processFolderMetadata(folderMetadataJSON);
180 | if (!folderMetadataParsed.owner || !folderMetadataParsed.repo) {
181 | this.context.log.error("Unable to retrieve an owner or repo", folderMetadataJSON);
182 | throw new FailRequestError("Unable to retrieve an owner or repo");
183 | }
184 |
185 | if (!(await this.github.commitExists(folderMetadataParsed.owner, folderMetadataParsed.repo, buildMetadataParsed.hash))) {
186 | this.context.log.error("This request was using a commit which does not exist, or was ambiguous, on GitHub!", buildMetadataParsed.hash);
187 | throw new FailRequestError("Could not find commit (non-existent or ambiguous)");
188 | }
189 | this.context.log.info("Metadata loaded repo: %s/%s hash: %s", folderMetadataParsed.owner, folderMetadataParsed.repo, buildMetadataParsed.hash);
190 |
191 | /*
192 | * Once we have some data about the Pipeline, we can fetch the actual
193 | * `archive.zip` which has all the right data within it
194 | */
195 | let archiveUrl = config.ARCHIVE_URL || this.pipeline.getArchiveUrl(buildUrl, buildMetadataParsed.hash);
196 |
197 | const archivePath = await this.downloadFile(archiveUrl, jenkinsOpts)
198 | this.context.log.info("Downloaded archiveURL: %s to path: %s", archiveUrl, archivePath);
199 |
200 |
201 | /*
202 | * Once we have an archive.zip, we need to check our permissions based off of
203 | * the repository-permissions-updater results
204 | */
205 | perms = await perms;
206 | if (perms.status !== 200) {
207 | this.context.log.error("Failed to get our permissions %o", perms);
208 | throw new FailRequestError("Failed to retrieve permissions");
209 | }
210 | const repoPath = util.format("%s/%s", folderMetadataParsed.owner, folderMetadataParsed.repo);
211 | let entries = [];
212 | this.context.log.info("Downloaded file size %d", fs.statSync(archivePath).size);
213 | try {
214 | await this.permissions.verify(this.context.log, repoPath, archivePath, entries, perms, buildMetadataParsed.hash);
215 | } catch (err) {
216 | this.context.log.error("Invalid archive %o", err);
217 | throw new FailRequestError(`Invalid archive retrieved from Jenkins, perhaps the plugin is not properly incrementalized?\n${err} from ${archiveUrl}`);
218 | }
219 |
220 | if (entries.length === 0) {
221 | this.context.log.error("Empty archive");
222 | throw new SuccessRequestError(`Skipping deployment as no artifacts were found with the expected path, typically due to a PR merge build not up to date with its base branch: ${archiveUrl}\n`)
223 | }
224 | this.context.log.info("Archive entries %o", entries);
225 |
226 | const pom = entries[0].path;
227 | this.context.log.info("Found a POM %s", pom);
228 | const pomURL = config.INCREMENTAL_URL + pom;
229 | const check = await this.fetch(pomURL);
230 | if (check.status === 200) {
231 | this.context.log.info("Already exists for pom: %s", pomURL);
232 | throw new SuccessRequestError(`Already deployed, not attempting to redeploy: ${pomURL}\n`)
233 | }
234 |
235 | /*
236 | * Finally, we can upload to Artifactory
237 | */
238 | const upload = await this.uploadToArtifactory(archivePath, pomURL);
239 |
240 | const entriesForDisplay = entries.map(entry => {
241 | return {
242 | artifactId: entry.artifactId,
243 | groupId: entry.groupId,
244 | version: entry.version,
245 | packaging: entry.packaging,
246 | url: config.INCREMENTAL_URL + entry.path.replace(/[^/]+$/, "")
247 | };
248 | })
249 |
250 | const result = await this.github.createStatus(folderMetadataParsed.owner, folderMetadataParsed.repo, buildMetadataParsed.hash, entriesForDisplay)
251 | // ignore any actual errors, just log it
252 | .catch(err => err);
253 |
254 | if (result.status >= 300) {
255 | this.context.log.error("Failed to create github status, code: %d for repo: %s/%s, check your GitHub credentials, err: %s", result.status, folderMetadataParsed.owner, folderMetadataParsed.repo, result);
256 | } else {
257 | this.context.log.info("Created github status for pom: %s", pom);
258 | }
259 |
260 | return {
261 | status: upload.status,
262 | body: "Response from Artifactory: " + upload.statusText + "\n"
263 | };
264 | }
265 | }
266 |
267 | export {IncrementalsPlugin};
268 |
--------------------------------------------------------------------------------