├── services ├── seeders │ ├── .gitignore │ └── 20180424162454-null-update-level.js ├── initdb.d │ └── .gitignore ├── .dockerignore ├── .yardopts ├── public │ └── favicon.ico ├── .rspec ├── .yamllint ├── Dockerfile.migrations ├── .sequelizerc ├── cli-test │ ├── ingest.test.js │ ├── plugin-dependency.test.js │ ├── url-resolver.test.js │ └── plugin-manifest.test.js ├── .editorconfig ├── scripts │ ├── upload-update-level │ └── check-psql ├── src │ ├── hooks │ │ ├── dbtimestamp.js │ │ ├── internalonly.js │ │ ├── internalapi.js │ │ ├── logger.js │ │ └── ensureuuid.js │ ├── index.js │ ├── models │ │ ├── index.js │ │ ├── connection.js │ │ ├── registration.js │ │ ├── tainted.js │ │ ├── update.js │ │ ├── instance.js │ │ └── version.js │ ├── services │ │ ├── tainted │ │ │ ├── tainted.service.js │ │ │ └── tainted.hooks.js │ │ ├── index.js │ │ ├── errorTelemetry │ │ │ ├── errorTelemetry.service.js │ │ │ ├── errorTelemetry.class.js │ │ │ └── errorTelemetry.hooks.js │ │ ├── versions │ │ │ ├── versions.service.js │ │ │ └── versions.hooks.js │ │ ├── update │ │ │ ├── update.service.js │ │ │ └── update.hooks.js │ │ ├── status │ │ │ ├── status.service.js │ │ │ └── status.hooks.js │ │ └── registration │ │ │ ├── registration.hooks.js │ │ │ └── registration.service.js │ ├── app.hooks.js │ ├── homepage.js │ ├── middleware │ │ └── index.js │ ├── libs │ │ ├── auth-verifier.js │ │ └── sentry.js │ ├── sequelize.js │ ├── channels.js │ └── sequelize-swagger.js ├── test │ ├── services │ │ ├── update.test.js │ │ ├── versions.test.js │ │ ├── status.test.js │ │ ├── versions.hooks.test.js │ │ ├── errortelemetry.hooks.test.js │ │ ├── errortelemetry.test.js │ │ ├── registration.test.js │ │ └── update.hooks.test.js │ ├── app.hooks.test.js │ ├── libs │ │ ├── sentry.test.js │ │ └── auth-verifier.test.js │ ├── hooks │ │ ├── dbtimestamp.test.js │ │ ├── internalapi.test.js │ │ └── ensureuuid.test.js │ └── middleware.test.js ├── config │ ├── test.json │ ├── production.json │ ├── default.json │ └── database.js ├── acceptance │ ├── app.test.js │ ├── rand-patch.js │ ├── services │ │ ├── tainted.test.js │ │ └── registration.test.js │ └── helpers.js ├── migrations │ ├── 20181009204000-drop-uuid_checksum_uniq-key.js │ ├── 20180725143206-create-errorlog.js │ ├── 20180725143203-create-connections.js │ ├── 20180725143204-create-registrations.js │ ├── 20180725143201-create-updates.js │ ├── 20180913193732-create-tainted.js │ ├── 20180725143202-create-instances.js │ └── 20180725143205-create-versions.js ├── docker-compose.yml ├── cli │ ├── update-center.js │ ├── plugin-manifest.js │ ├── manifest.js │ ├── plugin-dependency.js │ ├── url-resolver.js │ └── ingest.js ├── Dockerfile ├── wait-for-postgres.sh ├── .eslintrc.json ├── .gitignore └── package.json ├── distribution ├── .gitignore ├── client │ ├── public │ │ ├── docs │ │ │ └── .gitignore │ │ ├── static │ │ │ ├── demon_256.png │ │ │ ├── magic_v8.gif │ │ │ └── magician_256.png │ │ └── 502.html │ ├── config │ │ ├── default.json │ │ └── test.json │ ├── src │ │ └── lib │ │ │ ├── request-options.ts │ │ │ ├── __mocks__ │ │ │ └── fs.js │ │ │ ├── rand-patch.ts │ │ │ ├── checksum.ts │ │ │ ├── periodic.ts │ │ │ ├── supervisord.ts │ │ │ ├── ui.ts │ │ │ ├── storage.ts │ │ │ └── error-telemetry.ts │ ├── .gitignore │ ├── test │ │ ├── ui.test.ts │ │ ├── checksum.test.ts │ │ ├── periodic.test.ts │ │ ├── snapshotter.test.ts │ │ ├── client.test.ts │ │ ├── error-telemetry.test.ts │ │ └── storage.test.ts │ ├── ui-test │ │ └── index.test.js │ ├── tsconfig.json │ ├── testlib │ │ └── helpers.ts │ ├── Makefile │ ├── webpack.config.js │ ├── scripts │ │ └── render-docs │ ├── .eslintrc.json │ ├── ui │ │ └── index.js │ ├── README.adoc │ ├── package.json │ └── patches │ │ └── promise-request-retry+1.0.1.patch ├── config │ ├── as-code │ │ ├── no-executor.yaml │ │ ├── core.yaml │ │ ├── metrics-healthchecking.yaml │ │ └── create-admin-user.yaml │ ├── logging.properties │ ├── README.adoc │ └── supervisord.conf ├── packaging-list.scripts.txt ├── .yamllint ├── tests │ ├── flavors │ │ ├── aws-ec2-cloud │ │ │ └── docker-compose.aws-ec2-cloud.yml │ │ ├── docker-cloud │ │ │ └── docker-compose.docker-cloud.yml │ │ └── java11-docker-cloud │ │ │ └── docker-compose.java11-docker-cloud.yml │ └── rollback-tests.sh ├── packaging-list.www.txt ├── packaging-list.config.txt ├── packaging-list.client.txt ├── flavors │ ├── Makefile │ ├── aws-ec2-cloud │ │ ├── config │ │ │ └── as-code │ │ │ │ ├── artifact-manager-s3.yaml │ │ │ │ └── ec2-cloud.yaml │ │ └── Dockerfile │ ├── docker-cloud │ │ ├── config │ │ │ ├── as-code │ │ │ │ └── docker-cloud.yaml │ │ │ └── supervisord.conf │ │ └── Dockerfile │ ├── java11-docker-cloud │ │ ├── config │ │ │ ├── as-code │ │ │ │ └── docker-cloud.yaml │ │ │ └── supervisord.conf │ │ └── Dockerfile │ └── build-or-push-flavors.sh ├── bin │ └── start-dev.sh ├── scripts │ ├── start-client.sh │ └── jenkins-evergreen.sh ├── docker-compose.squid-cache.yml └── docker-compose.yml ├── .gitignore ├── tools ├── npm ├── yamllint ├── jsonlint ├── jq ├── shellcheck ├── mvn ├── compose └── node ├── .dockerignore ├── activate ├── docs ├── developer │ ├── meetings │ │ ├── README.adoc │ │ ├── _YYYY-MM-DD-subject-of-meeting │ │ │ └── README.adoc │ │ ├── 2018-03-23-JENKINS-49852-pipeline-usage-telemetry │ │ │ └── README.adoc │ │ ├── 2018-05-07-existing-telemetry-setup-on-jenkins-io │ │ │ └── README.adoc │ │ ├── 2018-05-30-JENKINS-51299-developer-access-howto │ │ │ └── README.adoc │ │ └── 2018-03-18-JENKINS-49406-quality-bar │ │ │ └── README.adoc │ └── USE-CASES.adoc └── index.adoc ├── node.mk ├── LICENSE.txt ├── Makefile └── HACKING.adoc /services/seeders/.gitignore: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /distribution/.gitignore: -------------------------------------------------------------------------------- 1 | commit.txt 2 | -------------------------------------------------------------------------------- /services/initdb.d/.gitignore: -------------------------------------------------------------------------------- 1 | *.sql 2 | -------------------------------------------------------------------------------- /services/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | -------------------------------------------------------------------------------- /distribution/client/public/docs/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.sw* 2 | build/ 3 | shunit2 4 | .vscode/ 5 | -------------------------------------------------------------------------------- /tools/npm: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | exec $(dirname $0)/node npm $@ 4 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | client/test 2 | services/ 3 | tools/ 4 | tests/ 5 | -------------------------------------------------------------------------------- /services/.yardopts: -------------------------------------------------------------------------------- 1 | --no-private --protected app/**/*.rb - README.adoc LICENSE 2 | -------------------------------------------------------------------------------- /activate: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | export PATH=./node_modules/.bin:${PWD}/tools:${PATH} 4 | -------------------------------------------------------------------------------- /distribution/config/as-code/no-executor.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | jenkins: 3 | numExecutors: 0 4 | -------------------------------------------------------------------------------- /distribution/config/as-code/core.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | jenkins: 3 | systemMessage: "Welcome to Jenkins Evergreen!" 4 | -------------------------------------------------------------------------------- /services/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenkins-infra/evergreen/HEAD/services/public/favicon.ico -------------------------------------------------------------------------------- /distribution/packaging-list.scripts.txt: -------------------------------------------------------------------------------- 1 | jenkins-evergreen.sh 2 | jenkins-support 3 | jenkins.sh 4 | start-client.sh 5 | -------------------------------------------------------------------------------- /services/.rspec: -------------------------------------------------------------------------------- 1 | --color 2 | --order rand 3 | --warnings 4 | --format progress 5 | --format html 6 | --out reports/rspec.html 7 | -------------------------------------------------------------------------------- /distribution/client/config/default.json: -------------------------------------------------------------------------------- 1 | { 2 | "host" : "0.0.0.0", 3 | "port" : 8081, 4 | "public": "../public/" 5 | } 6 | -------------------------------------------------------------------------------- /distribution/client/config/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "host" : "0.0.0.0", 3 | "port" : 8081, 4 | "public": "../public/" 5 | } 6 | -------------------------------------------------------------------------------- /tools/yamllint: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | exec docker run --rm \ 4 | -w ${PWD} \ 5 | -v ${PWD}:${PWD} \ 6 | boiyaa/yamllint:1.8.1 $@ 7 | -------------------------------------------------------------------------------- /distribution/client/public/static/demon_256.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenkins-infra/evergreen/HEAD/distribution/client/public/static/demon_256.png -------------------------------------------------------------------------------- /distribution/client/public/static/magic_v8.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenkins-infra/evergreen/HEAD/distribution/client/public/static/magic_v8.gif -------------------------------------------------------------------------------- /distribution/client/public/static/magician_256.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jenkins-infra/evergreen/HEAD/distribution/client/public/static/magician_256.png -------------------------------------------------------------------------------- /distribution/client/src/lib/request-options.ts: -------------------------------------------------------------------------------- 1 | 2 | export interface RequestOptions { 3 | retry?: number, 4 | delay?: number, 5 | factor?: number, 6 | }; 7 | -------------------------------------------------------------------------------- /distribution/.yamllint: -------------------------------------------------------------------------------- 1 | --- 2 | rules: 3 | line-length: 4 | max: 100 5 | allow-non-breakable-words: true 6 | allow-non-breakable-inline-mappings: false 7 | -------------------------------------------------------------------------------- /services/.yamllint: -------------------------------------------------------------------------------- 1 | --- 2 | rules: 3 | line-length: 4 | max: 100 5 | allow-non-breakable-words: true 6 | allow-non-breakable-inline-mappings: false 7 | -------------------------------------------------------------------------------- /distribution/tests/flavors/aws-ec2-cloud/docker-compose.aws-ec2-cloud.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | 4 | instance: 5 | image: jenkins/evergreen:aws-ec2-cloud 6 | -------------------------------------------------------------------------------- /tools/jsonlint: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | exec docker run --rm \ 4 | -w "${PWD}" \ 5 | -v "${PWD}:${PWD}" \ 6 | -i \ 7 | sahsu/docker-jsonlint jsonlint $@ 8 | -------------------------------------------------------------------------------- /distribution/client/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | coverage/ 3 | public/main.js 4 | public/main.js.map 5 | # Docs is rsynced in for the build from the root directory 6 | docs/ 7 | -------------------------------------------------------------------------------- /tools/jq: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | 5 | exec docker run --rm \ 6 | -w "${PWD}" \ 7 | -v "${PWD}:${PWD}" \ 8 | -i \ 9 | realguess/jq:1.4 jq $@ 10 | -------------------------------------------------------------------------------- /distribution/client/test/ui.test.ts: -------------------------------------------------------------------------------- 1 | 2 | import UI from '../src/lib/ui'; 3 | 4 | describe('The UI module', () => { 5 | it('should be a singleton', () => { 6 | expect(UI).toBe(UI); 7 | }); 8 | }); 9 | -------------------------------------------------------------------------------- /services/Dockerfile.migrations: -------------------------------------------------------------------------------- 1 | FROM jenkinsciinfra/evergreen-backend:latest 2 | 3 | ENV PATH ./node_modules/.bin:$PATH 4 | 5 | COPY .sequelizerc . 6 | 7 | CMD sh -c 'sequelize db:migrate && sequelize db:seed:all' 8 | -------------------------------------------------------------------------------- /tools/shellcheck: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | exec docker run --rm \ 4 | -w "${PWD}" \ 5 | -v "${PWD}:${PWD}" \ 6 | koalaman/shellcheck@sha256:6dfafef2730b851e7a8bceda7f2dbef93efb709932865924cb497423b60be582 $@ 7 | -------------------------------------------------------------------------------- /distribution/client/ui-test/index.test.js: -------------------------------------------------------------------------------- 1 | 2 | const index = require('../ui/index'); 3 | 4 | describe('Application root', () => { 5 | it('should be an object', () => { 6 | expect(index).toBeTruthy(); 7 | }); 8 | }); 9 | -------------------------------------------------------------------------------- /distribution/packaging-list.www.txt: -------------------------------------------------------------------------------- 1 | # Include static error pages for nginx to show before the app boots 2 | [[:digit:]]*.html 3 | # Include our static assets just to be sure nginx can show them if it needs to 4 | static*** 5 | -------------------------------------------------------------------------------- /services/.sequelizerc: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | 3 | module.exports = { 4 | 'config': path.resolve('config', 'database.js'), 5 | 'models-path': path.resolve('src', 'models') 6 | } 7 | 8 | // vim: ft=javascript 9 | -------------------------------------------------------------------------------- /distribution/tests/flavors/docker-cloud/docker-compose.docker-cloud.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | 4 | instance: 5 | image: jenkins/evergreen:docker-cloud 6 | volumes: 7 | - /var/run/docker.sock:/var/run/docker.sock 8 | -------------------------------------------------------------------------------- /distribution/packaging-list.config.txt: -------------------------------------------------------------------------------- 1 | # Avoid including our development mode squid conf 2 | - squid.conf 3 | # Include our background process configurations 4 | *.conf 5 | logging.properties 6 | # Include our configuration as code 7 | as-code*** 8 | -------------------------------------------------------------------------------- /distribution/tests/flavors/java11-docker-cloud/docker-compose.java11-docker-cloud.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | 4 | instance: 5 | image: jenkins/evergreen:java11-docker-cloud 6 | volumes: 7 | - /var/run/docker.sock:/var/run/docker.sock 8 | -------------------------------------------------------------------------------- /services/cli-test/ingest.test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Ingest = require('../cli/ingest'); 4 | 5 | describe('Ingest', () => { 6 | it('should be constructable', () => { 7 | expect(new Ingest()).toBeInstanceOf(Ingest); 8 | }); 9 | }); 10 | -------------------------------------------------------------------------------- /services/.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = space 6 | indent_size = 2 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | [*.md] 13 | trim_trailing_whitespace = false 14 | -------------------------------------------------------------------------------- /services/scripts/upload-update-level: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | exec curl --data-raw "{\"commit\":\"container-tests\",\"manifest\":$(cat ingest.json)}" \ 4 | -H 'Authorization: the API calls are coming from inside the house' \ 5 | -H 'Content-Type: application/json' \ 6 | http://localhost:3030/update 7 | -------------------------------------------------------------------------------- /services/src/hooks/dbtimestamp.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This hook adds the necessary database timestamps for any request coming into 3 | * the application 4 | */ 5 | 6 | module.exports = function (fieldName) { 7 | return async context => { 8 | context.data[fieldName] = new Date(); 9 | return context; 10 | }; 11 | }; 12 | -------------------------------------------------------------------------------- /services/test/services/update.test.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const app = require('../../src/app'); 3 | 4 | describe('\'update\' service', () => { 5 | it('registered the service', () => { 6 | const service = app.service('update'); 7 | 8 | assert.ok(service, 'Registered the service'); 9 | }); 10 | }); 11 | -------------------------------------------------------------------------------- /docs/developer/meetings/README.adoc: -------------------------------------------------------------------------------- 1 | = Meetings recap 2 | 3 | 4 | .Meetings 5 | |=== 6 | | Date | Subject 7 | | 2018-03-23 | Initial Pipeline Usage Telemetry Discussion 8 | | 2018-03-18 | How to test the data snapshotting system (JENKINS-49406) 9 | | 2018-05-07 | How to test the data snapshotting system (JENKINS-49406) 10 | |=== 11 | -------------------------------------------------------------------------------- /services/test/services/versions.test.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const app = require('../../src/app'); 3 | 4 | describe('\'versions\' service', () => { 5 | it('registered the service', () => { 6 | const service = app.service('versions'); 7 | 8 | assert.ok(service, 'Registered the service'); 9 | }); 10 | }); 11 | -------------------------------------------------------------------------------- /distribution/client/src/lib/__mocks__/fs.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // eslint-disable-next-line no-console 4 | console.log('Using a memfs filesystem...'); 5 | 6 | const memfs = require('memfs'); 7 | const vol = new memfs.Volume(); 8 | const fs = memfs.createFsFromVolume(vol); 9 | 10 | module.exports = fs; 11 | module.exports.volume = vol; 12 | -------------------------------------------------------------------------------- /tools/mvn: -------------------------------------------------------------------------------- 1 | #!/bin/sh -e 2 | 3 | MAVEN_REPO=~/.m2/ 4 | mkdir -p "$MAVEN_REPO/repository" 5 | 6 | exec docker run --rm \ 7 | -v "$(pwd)":/usr/src/mymaven \ 8 | -w /usr/src/mymaven \ 9 | -v "$MAVEN_REPO:/var/maven/.m2" \ 10 | --user $( id -u ) -e MAVEN_CONFIG=/var/maven/.m2 \ 11 | maven:3.5.3-jdk-8 mvn -Duser.home=/var/maven $@ 12 | -------------------------------------------------------------------------------- /services/test/app.hooks.test.js: -------------------------------------------------------------------------------- 1 | const hooks = require('../src/app.hooks'); 2 | 3 | describe('global application hooks', () => { 4 | it('should have before/after/error properties', () => { 5 | expect(hooks).toHaveProperty('before'); 6 | expect(hooks).toHaveProperty('after'); 7 | expect(hooks).toHaveProperty('error'); 8 | }); 9 | }); 10 | -------------------------------------------------------------------------------- /distribution/config/as-code/metrics-healthchecking.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | unclassified: 3 | metricsaccesskey: 4 | accessKeys: 5 | - key: "evergreen" 6 | description: "Key for evergreen health-check" 7 | canHealthCheck: true 8 | canPing: false 9 | canThreadDump: false 10 | canMetrics: false 11 | origins: "*" 12 | -------------------------------------------------------------------------------- /distribution/packaging-list.client.txt: -------------------------------------------------------------------------------- 1 | # Include everything relevant from the source directory 2 | ./client/package.json 3 | ./client/package-lock.json 4 | ./client/config/ 5 | ./client/build/ 6 | ./client/build/lib/ 7 | ./client/patches/ 8 | # Inlude the public site information 9 | ./client/public/ 10 | ./client/public/docs/ 11 | ./client/public/static/ 12 | -------------------------------------------------------------------------------- /services/config/test.json: -------------------------------------------------------------------------------- 1 | { 2 | "host": "127.0.0.1", 3 | "port": 3030, 4 | "public": "../public/", 5 | "paginate": { 6 | "default": 10, 7 | "max": 50 8 | }, 9 | "postgres": "postgres://postgres:grassisevergreener@localhost:5432/evergreen_test", 10 | "internalAPI" : { 11 | "secret" : "this is really a poor secret" 12 | } 13 | } 14 | 15 | -------------------------------------------------------------------------------- /distribution/flavors/Makefile: -------------------------------------------------------------------------------- 1 | container: 2 | ./build-or-push-flavors.sh build 3 | 4 | publish: container 5 | ./build-or-push-flavors.sh push 6 | 7 | lint: 8 | ../../tools/shellcheck *.sh 9 | ../../tools/jsonlint --quiet ./aws-ec2-cloud/CloudFormation/cloudformation-template.json 10 | ../../tools/yamllint --strict */config/as-code/*.yaml 11 | 12 | .PHONY: container publish lint 13 | -------------------------------------------------------------------------------- /distribution/flavors/aws-ec2-cloud/config/as-code/artifact-manager-s3.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | unclassified: 3 | artifactManager: 4 | artifactManagerFactories: 5 | - jclouds: 6 | provider: s3 7 | aws: 8 | awsCredentials: 9 | region: "${REGION}" 10 | s3: 11 | # TODO: create through CloudFormation 12 | container: "${ARTIFACT_MANAGER_S3_BUCKET_NAME}" 13 | prefix: "jenkins_data/" 14 | -------------------------------------------------------------------------------- /services/acceptance/app.test.js: -------------------------------------------------------------------------------- 1 | const rp = require('request-promise'); 2 | const h = require('./helpers'); 3 | 4 | describe('Feathers application tests', () => { 5 | beforeAll(done => h.startApp(done)); 6 | afterAll(done => h.stopApp(done)); 7 | 8 | it('starts and shows the index page', () => { 9 | return rp(h.getUrl()).then(body => { 10 | expect(body).toEqual(expect.stringContaining('')); 11 | }); 12 | }); 13 | }); 14 | -------------------------------------------------------------------------------- /distribution/client/src/lib/rand-patch.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * This is to patch stupid behavior by brorand 3 | * (https://github.com/indutny/brorand) 4 | * which is used underneath elliptic for generating keys. 5 | * 6 | * It seems to not believe that we're in nodejs when we really are 7 | */ 8 | 9 | import crypto from 'crypto'; 10 | import rand from 'brorand'; 11 | 12 | rand.Rand.prototype._rand = function _rand(n) { 13 | return crypto.randomBytes(n); 14 | }; 15 | -------------------------------------------------------------------------------- /distribution/client/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "allowJs": true, 4 | "alwaysStrict" : true, 5 | "outDir": "./build", 6 | "skipLibCheck": true, 7 | "lib" : ["es2018"], 8 | "module": "commonjs", 9 | "moduleResolution": "node", 10 | "esModuleInterop": true, 11 | "importHelpers" : true, 12 | "target": "es2018", 13 | "sourceMap": true 14 | }, 15 | "include": [ 16 | "./src/**/*" 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /services/acceptance/rand-patch.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This is to patch stupid behavior by brorand 3 | * (https://github.com/indutny/brorand) 4 | * which is used underneath elliptic for generating keys. 5 | * 6 | * It seems to not believe that we're in nodejs when we really are 7 | */ 8 | 9 | const crypto = require('crypto'); 10 | const rand = require('brorand'); 11 | 12 | rand.Rand.prototype._rand = function _rand(n) { 13 | return crypto.randomBytes(n); 14 | }; 15 | -------------------------------------------------------------------------------- /services/src/index.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | const logger = require('winston'); 3 | const app = require('./app'); 4 | const port = app.get('port'); 5 | const server = app.listen(port); 6 | 7 | process.on('unhandledRejection', (reason, p) => 8 | logger.error('Unhandled Rejection at: Promise ', p, reason) 9 | ); 10 | 11 | server.on('listening', () => 12 | logger.info('Feathers application started on http://%s:%d', app.get('host'), port) 13 | ); 14 | -------------------------------------------------------------------------------- /distribution/client/testlib/helpers.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * This module contains functions which are helpful for running across multiple 3 | * tests 4 | */ 5 | import fs from 'fs'; 6 | 7 | export class Helpers { 8 | constructor () { 9 | } 10 | 11 | checkFileExists(filename) { 12 | return fs.existsSync(filename); 13 | } 14 | 15 | touchFile(filename) { 16 | return fs.closeSync(fs.openSync(filename, 'w')); 17 | } 18 | } 19 | 20 | export default new Helpers(); 21 | -------------------------------------------------------------------------------- /services/migrations/20181009204000-drop-uuid_checksum_uniq-key.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: (queryInterface, Sequelize) => { 4 | return queryInterface.removeConstraint('versions', 'uuid_checksum_uniq'); 5 | }, 6 | down: (queryInterface, Sequelize) => { 7 | return queryInterface.addConstraint('versions', 8 | ['uuid', 'checksum'], 9 | { 10 | type: 'UNIQUE', 11 | name: 'uuid_checksum_uniq', 12 | } 13 | ); 14 | } 15 | }; 16 | -------------------------------------------------------------------------------- /services/config/production.json: -------------------------------------------------------------------------------- 1 | { 2 | "host": "0.0.0.0", 3 | "port": 3030, 4 | "public": "../public/", 5 | "paginate": { 6 | "default": 10, 7 | "max": 50 8 | }, 9 | "postgres": "set to DB_CONNECTION_STRING in the environment", 10 | "jwt": { 11 | "name": "evergreen-jwt", 12 | "secret": "set to EVERGREEN_JWT_SECRET in the environment", 13 | "expiresIn": "14d" 14 | }, 15 | "internalAPI" : { 16 | "secret" : "set to EVERGREEN_INTERNAL_API_SECRET in the environment" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /services/src/models/index.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const glob = require('glob'); 3 | 4 | /* 5 | * Populate module.exports with a constructed model using the given +app+ 6 | */ 7 | module.exports = function(app) { 8 | let models = {}; 9 | glob.sync(path.join(__dirname, '*.js')).forEach((file) => { 10 | const name = path.basename(file, '.js'); 11 | if (name != 'index') { 12 | models[name] = require(path.resolve(file))(app); 13 | } 14 | }); 15 | app.set('models', models); 16 | return models; 17 | }; 18 | -------------------------------------------------------------------------------- /distribution/bin/start-dev.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | { 4 | make run 5 | } & 6 | { 7 | sleep 20 8 | until $(curl --output /dev/null --silent --head --fail http://localhost:3030); do 9 | sleep 3 10 | done 11 | } 12 | 13 | echo "*** backend is up - executing curl command ***" 14 | curl --data-raw "{\"commit\":\"container-tests\",\"manifest\":$(cat ../services/ingest.json)}" \ 15 | -H 'Authorization: the API calls are coming from inside the house' \ 16 | -H 'Content-Type: application/json' \ 17 | http://localhost:3030/update 18 | 19 | wait 20 | -------------------------------------------------------------------------------- /distribution/flavors/docker-cloud/config/as-code/docker-cloud.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | jenkins: 3 | clouds: 4 | - docker: 5 | name: "docker" 6 | dockerApi: 7 | dockerHost: 8 | uri: "tcp://localhost:2375/" 9 | templates: 10 | - labelString: "agent" 11 | dockerTemplateBase: 12 | image: 'batmat/jenkins-ssh-agent' 13 | remoteFs: "/home/jenkins" 14 | connector: 15 | attach: 16 | user: "jenkins" 17 | instanceCapStr: "10" 18 | -------------------------------------------------------------------------------- /distribution/config/as-code/create-admin-user.yaml: -------------------------------------------------------------------------------- 1 | jenkins: 2 | # Agent to master security 3 | remotingSecurity: 4 | enabled: true 5 | securityRealm: 6 | local: 7 | allowsSignup: false 8 | users: 9 | - id: "admin" 10 | password: ${JENKINS_ADMIN_PASSWORD} 11 | authorizationStrategy: 12 | loggedInUsersCanDoAnything: 13 | allowAnonymousRead: false 14 | 15 | #CSRF issuer 16 | crumbIssuer: 17 | standard: 18 | excludeClientIPFromCrumb: false 19 | security: 20 | remotingCLI: 21 | enabled: false 22 | -------------------------------------------------------------------------------- /distribution/flavors/java11-docker-cloud/config/as-code/docker-cloud.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | jenkins: 3 | clouds: 4 | - docker: 5 | name: "docker" 6 | dockerApi: 7 | dockerHost: 8 | uri: "tcp://localhost:2375/" 9 | templates: 10 | - labelString: "agent" 11 | dockerTemplateBase: 12 | image: 'batmat/jenkins-ssh-agent' 13 | remoteFs: "/home/jenkins" 14 | connector: 15 | attach: 16 | user: "jenkins" 17 | instanceCapStr: "10" 18 | -------------------------------------------------------------------------------- /services/src/services/tainted/tainted.service.js: -------------------------------------------------------------------------------- 1 | const createService = require('feathers-sequelize'); 2 | const createModel = require('../../models/tainted'); 3 | const hooks = require('./tainted.hooks'); 4 | 5 | module.exports = function (app) { 6 | const options = { 7 | name: 'tainted', 8 | Model: createModel(app) 9 | }; 10 | 11 | let service = createService(options); 12 | service.docs = { 13 | description: 'Mark an update level tainted for a specific instance', 14 | }; 15 | app.use('/update/tainted', service); 16 | app.service('/update/tainted').hooks(hooks.getHooks()); 17 | }; 18 | -------------------------------------------------------------------------------- /services/config/default.json: -------------------------------------------------------------------------------- 1 | { 2 | "host": "0.0.0.0", 3 | "port": 3030, 4 | "public": "../public/", 5 | "paginate": { 6 | "default": 10, 7 | "max": 50 8 | }, 9 | "postgres": "postgres://postgres:grassisevergreener@localhost:5432/evergreen_development", 10 | "jwt": { 11 | "name": "evergreen-jwt", 12 | "secret": "an evergreen is a plant that has leaves throughout the year, always green", 13 | "expiresIn": "14d" 14 | }, 15 | "internalAPI" : { 16 | "secret" : "the API calls are coming from inside the house" 17 | }, 18 | "sentry": { 19 | "url": "https://sentry-dsn@example.com/project-id" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /services/src/app.hooks.js: -------------------------------------------------------------------------------- 1 | // Application hooks that run for every service 2 | const logger = require('./hooks/logger'); 3 | 4 | module.exports = { 5 | before: { 6 | all: [ logger() ], 7 | find: [], 8 | get: [], 9 | create: [], 10 | update: [], 11 | patch: [], 12 | remove: [] 13 | }, 14 | 15 | after: { 16 | all: [ logger() ], 17 | find: [], 18 | get: [], 19 | create: [], 20 | update: [], 21 | patch: [], 22 | remove: [] 23 | }, 24 | 25 | error: { 26 | all: [ logger() ], 27 | find: [], 28 | get: [], 29 | create: [], 30 | update: [], 31 | patch: [], 32 | remove: [] 33 | } 34 | }; 35 | -------------------------------------------------------------------------------- /services/seeders/20180424162454-null-update-level.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /* 4 | * This seed just creates a null Update Level which is a special case to 5 | * indicate that the registered instance has no updates whatsoever yet 6 | */ 7 | 8 | module.exports = { 9 | up: (queryInterface, Sequelize) => { 10 | return queryInterface.bulkInsert('updates', [ 11 | { 12 | commit: '', 13 | channel: 'general', 14 | manifest: '{}', 15 | tainted: false, 16 | createdAt: Sequelize.fn('NOW'), 17 | updatedAt: Sequelize.fn('NOW') 18 | } 19 | ]); 20 | }, 21 | 22 | down: (queryInterface, Sequelize) => { 23 | } 24 | }; 25 | -------------------------------------------------------------------------------- /distribution/flavors/build-or-push-flavors.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | IMAGE_NAME=jenkins/evergreen 5 | 6 | set -xe 7 | 8 | if [[ "$1" == "build" ]]; then 9 | for dir in *-cloud 10 | do 11 | echo "Building $dir directory" 12 | # Changing up a directory to ensure we have the full build context 13 | (cd ../ && \ 14 | docker build --build-arg FLAVOR="$dir" -t "$IMAGE_NAME:$dir" -f "flavors/$dir/Dockerfile" .) 15 | done 16 | elif [[ "$1" == "push" ]]; then 17 | for dir in *-cloud 18 | do 19 | echo "Push $dir image" 20 | docker push "$IMAGE_NAME:$dir" 21 | done 22 | else 23 | echo "Unknown parameter, failing." 24 | exit 1 25 | fi 26 | -------------------------------------------------------------------------------- /services/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | db: 4 | image: postgres:alpine 5 | environment: 6 | - 'POSTGRES_PASSWORD=grassisevergreener' 7 | # Used for psql non-interactive scripting 8 | - 'PGPASSWORD=grassisevergreener' 9 | - 'POSTGRES_DB=evergreen_development' 10 | ports: 11 | - '5432:5432' 12 | 13 | node: 14 | image: node:9-alpine 15 | command: 'npm run start' 16 | working_dir: $PWD 17 | environment: 18 | - 'PATH=$PWD/node_modules/.bin:$PATH' 19 | - 'DB_TRACING=$DB_TRACING' 20 | - 'DEBUG=$DEBUG' 21 | volumes: 22 | - $PWD:$PWD 23 | ports: 24 | - '3030:3030' 25 | depends_on: 26 | - db 27 | -------------------------------------------------------------------------------- /docs/developer/meetings/_YYYY-MM-DD-subject-of-meeting/README.adoc: -------------------------------------------------------------------------------- 1 | = YYYY-MM-DD - Subject 2 | :toc: 3 | 4 | == Notes 5 | 6 | === Context 7 | 8 | //// 9 | Reminder: we follow the _Four Open_ approach. 10 | So we must try to give the necessary context to a newcomer here. 11 | Roughtly, 5 lines or so to make the content here reasonably approachable by an external contributor. 12 | 13 | //// 14 | 15 | === Attendees 16 | 17 | * Jenkins Butler 18 | * ... 19 | 20 | == Summary 21 | 22 | /// Summarize here 23 | 24 | == Actions 25 | 26 | //// 27 | Summarize the actions that ought to be done. 28 | Explain why none are listed if none. 29 | //// 30 | 31 | |=== 32 | | ACTION | Person 33 | | Build quicker | Butler 34 | |=== 35 | -------------------------------------------------------------------------------- /services/src/services/index.js: -------------------------------------------------------------------------------- 1 | const status = require('./status/status.service'); 2 | const registration = require('./registration/registration.service'); 3 | const tainted = require('./tainted/tainted.service'); 4 | 5 | const versions = require('./versions/versions.service.js'); 6 | 7 | const update = require('./update/update.service.js'); 8 | 9 | const errorTelemetry = require('./errorTelemetry/errorTelemetry.service.js'); 10 | 11 | // eslint-disable-next-line no-unused-vars 12 | module.exports = function (app) { 13 | app.configure(status); 14 | app.configure(registration); 15 | app.configure(versions); 16 | app.configure(update); 17 | app.configure(errorTelemetry); 18 | app.configure(tainted); 19 | }; 20 | -------------------------------------------------------------------------------- /distribution/client/Makefile: -------------------------------------------------------------------------------- 1 | include ../../node.mk 2 | 3 | SHELL := /bin/bash 4 | PATH := ./node_modules/.bin:../../tools:$(PATH) 5 | 6 | all:: build 7 | 8 | lint:: compile 9 | 10 | check:: compile 11 | 12 | unit:: compile 13 | 14 | compile: depends 15 | tsc 16 | 17 | docs: 18 | rsync -avz --exclude=developer ../../docs . 19 | node ./scripts/render-docs 20 | 21 | build: depends docs compile 22 | webpack-cli 23 | 24 | watch-compile: 25 | tsc -w 26 | 27 | run: depends build 28 | EVERGREEN_HOME=/tmp/ \ 29 | EVERGREEN_DATA=/tmp/ \ 30 | EVERGREEN_DISABLE_SNAPSHOT=true \ 31 | EVERGREEN_ENDPOINT=http://127.0.0.1:3030 \ 32 | FLAVOR=docker-cloud \ 33 | npm run start 34 | 35 | .PHONY: run build docs compile 36 | -------------------------------------------------------------------------------- /services/cli/update-center.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fs = require('fs'); 4 | 5 | class UpdateCenter { 6 | constructor() { 7 | } 8 | 9 | static fromFile(fileName) { 10 | const data = JSON.parse(fs.readFileSync(fileName)); 11 | 12 | /* 13 | * process the 'gav' to make the components independently useful 14 | */ 15 | Object.values(data.plugins).forEach((plugin) => { 16 | // eslint-disable-next-line no-unused-vars 17 | const [group, artifact, version] = plugin.gav.split(':'); 18 | plugin.groupId = group; 19 | plugin.artifactId = artifact; 20 | }); 21 | let updates = new UpdateCenter(); 22 | return Object.assign(updates, data); 23 | } 24 | } 25 | 26 | module.exports = UpdateCenter; 27 | -------------------------------------------------------------------------------- /services/migrations/20180725143206-create-errorlog.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: (queryInterface, Sequelize) => { 4 | return queryInterface.createTable('errorLogs', { 5 | id: { 6 | allowNull: false, 7 | autoIncrement: true, 8 | primaryKey: true, 9 | type: Sequelize.INTEGER 10 | }, 11 | log: { 12 | type: Sequelize.STRING 13 | }, 14 | createdAt: { 15 | allowNull: false, 16 | type: Sequelize.DATE 17 | }, 18 | updatedAt: { 19 | allowNull: false, 20 | type: Sequelize.DATE 21 | } 22 | }); 23 | }, 24 | down: (queryInterface, Sequelize) => { 25 | return queryInterface.dropTable('errorLogs'); 26 | } 27 | }; 28 | -------------------------------------------------------------------------------- /services/scripts/check-psql: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # This script is largely intended ensure that the database is up for services 4 | # before continuing. 5 | # 6 | # If we cannot find some basic tables we expect in a few tries, exit non-zero 7 | 8 | 9 | REPO_ROOT=$(dirname "${BASH_SOURCE[0]}")/../.. 10 | 11 | for i in $(seq 1 5); do 12 | ${REPO_ROOT}/tools/compose run --rm db psql -h db -U postgres \ 13 | -d evergreen_development \ 14 | -c "\\d+" > /dev/null 15 | 16 | if [ $? -eq 0 ]; then 17 | exit 0; 18 | fi; 19 | 20 | echo ">> Waiting for the database to sync.." 21 | 22 | sleep $i 23 | done; 24 | 25 | echo ">> Could not find an \`instances\` table, assuming the database is unprepared" 26 | exit 1 27 | -------------------------------------------------------------------------------- /tools/compose: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # This file is a simple shim script to ensure that wherever we are building, we 4 | # have a docker-compose that we can use 5 | 6 | VERSION="1.20.0" 7 | REPO_ROOT=$(dirname "${BASH_SOURCE[0]}")/../ 8 | BUILD_DIR="${REPO_ROOT}/build" 9 | LOCAL_COMPOSE=${REPO_ROOT}/build/docker-compose 10 | 11 | mkdir -p ${BUILD_DIR} 12 | 13 | if [ ! -f "${LOCAL_COMPOSE}" ]; then 14 | echo ">> No local docker-compose, downloading ${VERSION}" 15 | OS=$(uname -s) 16 | ARCH=$(uname -m) 17 | curl -SL \ 18 | https://github.com/docker/compose/releases/download/${VERSION}/docker-compose-${OS}-${ARCH} \ 19 | -o ${LOCAL_COMPOSE} 20 | chmod +x ${LOCAL_COMPOSE} 21 | fi; 22 | 23 | exec ${LOCAL_COMPOSE} "${@}" 24 | -------------------------------------------------------------------------------- /services/src/services/errorTelemetry/errorTelemetry.service.js: -------------------------------------------------------------------------------- 1 | /* 2 | * The `versions` service is responsible for handling the "audit trail" of 3 | * Jenkins instance version information. 4 | * 5 | * The `version` information for a given instance should be considered append 6 | * only to the backend data store, and retrieval of the "current" version will 7 | * simply be taking the last of version records associated with the instance. 8 | */ 9 | 10 | const hooks = require('./errorTelemetry.hooks'); 11 | const ErrorTelemetryService = require('./errorTelemetry.class'); 12 | 13 | module.exports = function (app) { 14 | app.use('/telemetry/error', new ErrorTelemetryService(app)); 15 | app.service('telemetry/error').hooks(hooks.getHooks()); 16 | }; 17 | -------------------------------------------------------------------------------- /distribution/flavors/aws-ec2-cloud/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jenkins/evergreen:base 2 | 3 | ARG FLAVOR=null 4 | ENV FLAVOR ${FLAVOR} 5 | 6 | # Prepare the flavor specific parts of the distribution 7 | # https://github.com/moby/moby/issues/35018, cannot use $user below 8 | COPY --chown=jenkins:jenkins build/evergreen-${FLAVOR}.zip / 9 | RUN cd / && unzip -q evergreen-${FLAVOR}.zip && chown -R jenkins:jenkins /evergreen 10 | RUN rm -f /evergreen-${FLAVOR}.zip 11 | 12 | # Jenkins directory is a volume, so configuration and build history 13 | # can be persisted and survive image upgrades 14 | # Important: this must be done *after* all file system changes have been made 15 | # by the Dockerfile 16 | VOLUME ${EVERGREEN_HOME} 17 | CMD /usr/bin/supervisord -c $EVERGREEN_HOME/config/supervisord.conf 18 | -------------------------------------------------------------------------------- /distribution/flavors/docker-cloud/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jenkins/evergreen:base 2 | 3 | ARG FLAVOR=null 4 | ENV FLAVOR ${FLAVOR} 5 | 6 | # Prepare the flavor specific parts of the distribution 7 | # https://github.com/moby/moby/issues/35018, cannot use $user below 8 | COPY --chown=jenkins:jenkins build/evergreen-${FLAVOR}.zip / 9 | RUN cd / && unzip -q evergreen-${FLAVOR}.zip && chown -R jenkins:jenkins /evergreen 10 | RUN rm -f /evergreen-${FLAVOR}.zip 11 | 12 | # Jenkins directory is a volume, so configuration and build history 13 | # can be persisted and survive image upgrades 14 | # Important: this must be done *after* all file system changes have been made 15 | # by the Dockerfile 16 | VOLUME ${EVERGREEN_HOME} 17 | CMD /usr/bin/supervisord -c $EVERGREEN_HOME/config/supervisord.conf 18 | -------------------------------------------------------------------------------- /distribution/client/webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = { 4 | entry: { 5 | main: './ui/index.js', 6 | }, 7 | devServer: { 8 | contentBase: path.join(__dirname, 'public'), 9 | }, 10 | devtool: 'source-map', 11 | module: { 12 | rules: [ 13 | // All output '.js' files will have any sourcemaps re-processed by 'source-map-loader'. 14 | { enforce: "pre", test: /\.js$/, loader: "source-map-loader" }, 15 | 16 | { 17 | test: /\.css$/, 18 | use: ['style-loader', 'css-loader'] 19 | }, 20 | ] 21 | }, 22 | plugins: [ 23 | ], 24 | resolve: { 25 | extensions: ['.tsx', '.ts', '.js', '.jsx', '.json'] 26 | }, 27 | output: { 28 | path: path.resolve(__dirname, 'public'), 29 | } 30 | }; 31 | 32 | -------------------------------------------------------------------------------- /services/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:10 as builder 2 | 3 | ARG APP_DIR=/srv/evergreen 4 | WORKDIR ${APP_DIR} 5 | 6 | ADD package*json ${APP_DIR}/ 7 | 8 | RUN npm ci 9 | 10 | # Doing a multi-stage build to reset some stuff for a smaller image 11 | FROM node:9-alpine 12 | 13 | ARG APP_DIR=/srv/evergreen 14 | WORKDIR ${APP_DIR} 15 | 16 | COPY --from=builder ${APP_DIR} . 17 | 18 | COPY src ${APP_DIR}/src 19 | COPY migrations ${APP_DIR}/migrations 20 | COPY config ${APP_DIR}/config 21 | COPY assets ${APP_DIR}/assets 22 | COPY public ${APP_DIR}/public 23 | COPY views ${APP_DIR}/views 24 | COPY commit.txt ${APP_DIR}/ 25 | 26 | EXPOSE 3030 27 | 28 | COPY wait-for-postgres.sh /wait-for-postgres.sh 29 | RUN apk add --update-cache postgresql-client && chmod a+x /wait-for-postgres.sh 30 | CMD npm run start 31 | -------------------------------------------------------------------------------- /distribution/config/logging.properties: -------------------------------------------------------------------------------- 1 | .level=INFO 2 | handlers=java.util.logging.ConsoleHandler,java.util.logging.FileHandler 3 | 4 | java.util.logging.SimpleFormatter.format=[%4$s][%1$tF %1$tT] %5$s (from %2$s)%6$s%n 5 | 6 | java.util.logging.ConsoleHandler.level=INFO 7 | java.util.logging.ConsoleHandler.formatter=java.util.logging.SimpleFormatter 8 | 9 | java.util.logging.FileHandler.level=INFO 10 | java.util.logging.FileHandler.formatter=java.util.logging.SimpleFormatter 11 | java.util.logging.FileHandler.encoding=UTF-8 12 | java.util.logging.FileHandler.limit=10000000 13 | java.util.logging.FileHandler.count=5 14 | # FIXME: Ideally, we should find a way to use $JENKINS_VAR below instead of the hardcoded value 15 | java.util.logging.FileHandler.pattern=/evergreen/data/jenkins/var/logs/jenkins.log.%g 16 | -------------------------------------------------------------------------------- /distribution/client/scripts/render-docs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const fs = require('fs'); 4 | const path = require('path'); 5 | const logger = require('winston'); 6 | const Asciidoctor = require('asciidoctor.js'); 7 | 8 | logger.LEVEL = 'debug'; 9 | const docsDir = path.join(__dirname, '../docs'); 10 | const outputDir = path.join(__dirname, '../public/docs'); 11 | 12 | const doctor = new Asciidoctor(); 13 | 14 | fs.readdirSync(docsDir).filter(f => f.endsWith('.adoc')).forEach((fileName) => { 15 | logger.info('Converting', fileName); 16 | doctor.convertFile(path.join(docsDir, fileName)); 17 | }); 18 | 19 | fs.readdirSync(docsDir).filter(f => f.endsWith('.html')).forEach((fileName) => { 20 | fs.renameSync(path.join(docsDir, fileName), path.join(outputDir, fileName)); 21 | }); 22 | -------------------------------------------------------------------------------- /services/src/models/connection.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // See http://docs.sequelizejs.com/en/latest/docs/models-definition/ 4 | // for more of what you can do here. 5 | const Sequelize = require('sequelize'); 6 | const DataTypes = Sequelize.DataTypes; 7 | 8 | module.exports = function (app) { 9 | const sequelizeClient = app.get('sequelizeClient'); 10 | const connection = sequelizeClient.define('connections', { 11 | uuid: { 12 | allowNull: false, 13 | type: DataTypes.UUID, 14 | description: 'An evergreen-client\'s generated from registration UUID', 15 | }, 16 | lastConnectedAt: { 17 | type: DataTypes.DATE 18 | }, 19 | }); 20 | 21 | // eslint-disable-next-line no-unused-vars 22 | connection.associate = function (models) { 23 | }; 24 | 25 | return connection; 26 | }; 27 | -------------------------------------------------------------------------------- /services/wait-for-postgres.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # wait-for-postgres.sh 3 | 4 | # Shim to avoid the service to start before the DB is really available: 5 | # https://docs.docker.com/compose/startup-order/ 6 | # FIXME: write reconnection logic (?) -- maybe not, since this is supposed to be a local-dev only issue I guess 7 | set -euo pipefail 8 | 9 | host="$1" 10 | shift 11 | port="$1" 12 | shift 13 | 14 | cmd="$@" 15 | 16 | maxAttempts=10 17 | 18 | until psql -h "$host" -p "$port" -U "postgres" -c '\q'; do 19 | maxAttempts=$(( $maxAttempts - 1 )) 20 | if [[ $maxAttempts <= 0 ]]; then 21 | >&2 echo "Maximum number of attempts reached: exitting" 22 | exit 1 23 | fi 24 | >&2 echo "Postgres is unavailable - sleeping" 25 | sleep 1 26 | done 27 | 28 | >&2 echo "Postgres is up - executing command" 29 | exec $cmd 30 | -------------------------------------------------------------------------------- /services/src/hooks/internalonly.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This hooks only allows internal service calls to the specified service 3 | * method 4 | */ 5 | const errors = require('@feathersjs/errors'); 6 | 7 | module.exports = function(context) { 8 | if (context.type !== 'before') { 9 | throw new Error('The `internalOnly` hook should only be used as a `before` hook.'); 10 | } 11 | /* This is an internal call and should be allowed */ 12 | if (!context.params.provider) { 13 | return context; 14 | } 15 | 16 | throw new errors.MethodNotAllowed(`The ${context.method} is not allowed on this service`); 17 | }; 18 | 19 | module.exports.swagger = { 20 | description: 'This method is guarded and only callable from within the Evergreen services application itself', 21 | summary: 'Only available for internal service calls', 22 | }; 23 | -------------------------------------------------------------------------------- /services/src/hooks/internalapi.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This hooks restricts the given API to our "internal API" calls which are 3 | * intended for back office type calls from our own internal automation. 4 | * 5 | * These APIs are not intended to be used by the evergreen-client or any other 6 | * callers 7 | */ 8 | 9 | const errors = require('@feathersjs/errors'); 10 | 11 | module.exports = function(context) { 12 | let authorization = context.params.headers.authorization; 13 | /* 14 | * Override the internalAPI secret if it has been provided by the environment 15 | */ 16 | let secret = process.env.EVERGREEN_INTERNAL_API_SECRET || context.app.get('internalAPI').secret; 17 | 18 | if (authorization != secret) { 19 | throw new errors.NotAuthenticated('This API is unavailable'); 20 | } 21 | 22 | return context; 23 | }; 24 | -------------------------------------------------------------------------------- /distribution/client/test/checksum.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | jest.mock('fs'); 4 | 5 | const fs = require('fs'); 6 | 7 | import Checksum from '../src/lib/checksum'; 8 | 9 | describe('Checksum', () => { 10 | beforeEach(() => { 11 | /* Make sure memfs is flushed every time */ 12 | fs.volume.reset(); 13 | }); 14 | describe('signatureFromFile', () => { 15 | const validFile = '/jest-test'; 16 | 17 | beforeEach(() => { 18 | fs.writeFileSync(validFile, 'hello world'); 19 | }); 20 | 21 | it('should return a string', () => { 22 | expect(Checksum.signatureFromFile(validFile)).toBeTruthy(); 23 | }); 24 | 25 | it('should return null for a non-existent file', () => { 26 | expect(Checksum.signatureFromFile('/tmp/no-way-this-file.ever.exists/i-hope')).toBeFalsy(); 27 | }); 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /distribution/flavors/java11-docker-cloud/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM jenkins/evergreen:docker-cloud 2 | 3 | ARG FLAVOR=null 4 | ENV FLAVOR ${FLAVOR} 5 | 6 | # Prepare the flavor specific parts of the distribution 7 | # https://github.com/moby/moby/issues/35018, cannot use $user below 8 | COPY --chown=jenkins:jenkins build/evergreen-${FLAVOR}.zip / 9 | RUN cd / && unzip -qo evergreen-${FLAVOR}.zip && chown -R jenkins:jenkins /evergreen 10 | RUN rm -f /evergreen-${FLAVOR}.zip 11 | 12 | RUN curl -L --show-error https://download.java.net/java/GA/jdk11/13/GPL/openjdk-11.0.1_linux-x64_bin.tar.gz --output openjdk.tar.gz && \ 13 | echo "7a6bb980b9c91c478421f865087ad2d69086a0583aeeb9e69204785e8e97dcfd openjdk.tar.gz" | sha256sum -c && \ 14 | tar xvzf openjdk.tar.gz && \ 15 | mv jdk-11.0.1/ /usr/java && \ 16 | rm openjdk.tar.gz 17 | ENV PATH=/usr/java/bin:$PATH 18 | -------------------------------------------------------------------------------- /distribution/client/src/lib/checksum.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import crypto from 'crypto'; 4 | import fs from 'fs'; 5 | import * as logger from 'winston'; 6 | 7 | export default class Checksum { 8 | /* 9 | * Generate a SHA-256 checksum signature from the provided relative or 10 | * absolute file path 11 | * 12 | * @param {string} Properly formed path to file 13 | * @return {string} hex-encoded sha256 signature 14 | */ 15 | static signatureFromFile(filePath) { 16 | try { 17 | return crypto.createHash('sha256') 18 | .update(fs.readFileSync(filePath)) 19 | .digest('hex'); 20 | } catch (err) { 21 | if (err.code == 'ENOENT') { 22 | logger.error('The file path does not exist and cannot provide a signature', filePath); 23 | return null; 24 | } 25 | throw err; 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /services/migrations/20180725143203-create-connections.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: (queryInterface, Sequelize) => { 4 | return queryInterface.createTable('connections', { 5 | id: { 6 | allowNull: false, 7 | autoIncrement: true, 8 | primaryKey: true, 9 | type: Sequelize.INTEGER 10 | }, 11 | uuid: { 12 | allowNull: false, 13 | type: Sequelize.UUID 14 | }, 15 | lastConnectedAt: { 16 | type: Sequelize.DATE 17 | }, 18 | createdAt: { 19 | allowNull: false, 20 | type: Sequelize.DATE 21 | }, 22 | updatedAt: { 23 | allowNull: false, 24 | type: Sequelize.DATE 25 | } 26 | }); 27 | }, 28 | down: (queryInterface, Sequelize) => { 29 | return queryInterface.dropTable('connections'); 30 | } 31 | }; 32 | -------------------------------------------------------------------------------- /services/config/database.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This module exists to dynamically construct the right configuration for the 3 | * sequelize command line tool for migrations. 4 | */ 5 | 6 | const fs = require('fs'); 7 | const path = require('path'); 8 | 9 | const nodeEnv = process.env.NODE_ENV || 'development'; 10 | 11 | let connectorConfig = null; 12 | let connectorConfigFile = path.join(__dirname, `${nodeEnv}.json`); 13 | 14 | if (fs.existsSync(connectorConfigFile)) { 15 | connectorConfig = JSON.parse(fs.readFileSync(connectorConfigFile)); 16 | } 17 | else { 18 | connectorConfig = JSON.parse(fs.readFileSync(path.join(__dirname, 'default.json'))); 19 | } 20 | 21 | module.exports = {}; 22 | module.exports[nodeEnv] = { 23 | url : process.env.DB_CONNECTION_STRING || connectorConfig['postgres'], 24 | dialect : 'postgresql', 25 | seederStorage: 'sequelize', 26 | }; 27 | -------------------------------------------------------------------------------- /services/test/services/status.test.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This file contains integration tests for the 'status' service which is 3 | * primarily involved in managing `Instance` data 4 | */ 5 | 6 | const assert = require('assert'); 7 | 8 | const app = require('../../src/app'); 9 | 10 | describe('\'status\' service', () => { 11 | beforeEach(async () => { 12 | /* Need to forcefully await to ensure that we don't execute any other 13 | * tests 14 | */ 15 | await app.service('status').remove(null, { query: { $limit: 1000 } }); 16 | }); 17 | 18 | it('registered the service', () => { 19 | const service = app.service('status'); 20 | 21 | assert.ok(service, 'Registered the service'); 22 | }); 23 | 24 | it('has no status by default', async () => { 25 | const items = await app.service('status').find(); 26 | assert.equal(items.length, 0); 27 | }); 28 | }); 29 | -------------------------------------------------------------------------------- /services/src/models/registration.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const Sequelize = require('sequelize'); 3 | const DataTypes = Sequelize.DataTypes; 4 | 5 | module.exports = function (app) { 6 | const sequelizeClient = app.get('sequelizeClient'); 7 | const reg = sequelizeClient.define('registrations', { 8 | uuid: { 9 | allowNull: false, 10 | type: DataTypes.UUID, 11 | }, 12 | pubKey: { 13 | type: DataTypes.STRING, 14 | description: 'Client-side generated ECDH hex encoded public key', 15 | }, 16 | curve: { 17 | type: DataTypes.STRING, 18 | description: 'ECC curve associated with the ECDSA keypair (only `secp256k1` supported)', 19 | }, 20 | createdAt: { 21 | type: DataTypes.DATE 22 | }, 23 | }); 24 | 25 | // eslint-disable-next-line no-unused-vars 26 | reg.associate = function (models) { 27 | }; 28 | 29 | return reg; 30 | }; 31 | -------------------------------------------------------------------------------- /distribution/config/README.adoc: -------------------------------------------------------------------------------- 1 | = Jenkins Evergreen instance configuration 2 | 3 | This folder contains various configuration files used to build the _Jenkins Evergreen_ distribution. 4 | 5 | 6 | == `supervisord.conf` 7 | 8 | Configures the container's entrypoint process: 9 | link:http://supervisord.org/[supervisord] 10 | to properly run the services necessary in the container for Jenkins Evergreen 11 | including `evergreen-client` and Jenkins itself. 12 | 13 | == `logging.properties` 14 | 15 | A Log4J configuration file to customize the logging output from Jenkins in 16 | conjunction with the 17 | link:https://github.com/jenkinsci/evergreen-plugin[Evergreen plugin]. 18 | 19 | == `jenkins-configuration.yaml` 20 | 21 | Root-level static configuration for use by the Jenkins instance, utilizing the 22 | link:https://github.com/jenkinsci/configuration-as-code-plugin[Configuration as Code plugin] 23 | 24 | -------------------------------------------------------------------------------- /distribution/scripts/start-client.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Wrapper to force the client to wait for the backend to become available 4 | # Should only be useful during development 5 | set -euo pipefail 6 | 7 | if [ ! -z "${DEVELOPMENT:-}" ]; then 8 | sleepTime=8 9 | echo "DEVELOPMENT MODE: client will wait $sleepTime before starting, to give time to the backend to start and receive a first UL" 10 | sleep $sleepTime 11 | 12 | maxAttempts=30 13 | until curl -s "$EVERGREEN_ENDPOINT" --output /dev/null ; do 14 | maxAttempts=$(( maxAttempts - 1 )) 15 | if [[ $maxAttempts -le 0 ]]; then 16 | >&2 echo "Maximum number of attempts reached: exiting" 17 | exit 1 18 | fi 19 | >&2 echo "Backend is unavailable - sleeping for some more time" 20 | sleep 1 21 | done 22 | 23 | else 24 | echo "Client is starting up" 25 | fi 26 | 27 | export PATH=/usr/bin:/usr/local/bin:$PATH 28 | exec npm run start 29 | -------------------------------------------------------------------------------- /services/migrations/20180725143204-create-registrations.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: (queryInterface, Sequelize) => { 4 | return queryInterface.createTable('registrations', { 5 | id: { 6 | allowNull: false, 7 | autoIncrement: true, 8 | primaryKey: true, 9 | type: Sequelize.INTEGER 10 | }, 11 | uuid: { 12 | allowNull: false, 13 | type: Sequelize.UUID 14 | }, 15 | pubKey: { 16 | type: Sequelize.STRING 17 | }, 18 | createdAt: { 19 | allowNull: false, 20 | type: Sequelize.DATE 21 | }, 22 | updatedAt: { 23 | allowNull: false, 24 | type: Sequelize.DATE 25 | }, 26 | curve: { 27 | allowNull: false, 28 | type: Sequelize.STRING 29 | } 30 | }); 31 | }, 32 | down: (queryInterface, Sequelize) => { 33 | return queryInterface.dropTable('registrations'); 34 | } 35 | }; 36 | -------------------------------------------------------------------------------- /services/src/models/tainted.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Sequelize = require('sequelize'); 4 | const DataTypes = Sequelize.DataTypes; 5 | 6 | module.exports = function (app) { 7 | const sequelizeClient = app.get('sequelizeClient'); 8 | const tainted = sequelizeClient.define('tainteds', { 9 | uuid: { 10 | allowNull: false, 11 | description: 'An evergreen-client\'s generated from registration UUID', 12 | type: DataTypes.UUID, 13 | }, 14 | updateId: { 15 | allowNull: false, 16 | type: DataTypes.INTEGER, 17 | description: 'Current `updates` level for the instance', 18 | }, 19 | createdAt: { 20 | type: DataTypes.DATE, 21 | }, 22 | updatedAt: { 23 | type: DataTypes.DATE 24 | }, 25 | }); 26 | 27 | // eslint-disable-next-line no-unused-vars 28 | tainted.associate = function (models) { 29 | tainted.belongsTo(models.updates); 30 | }; 31 | 32 | return tainted; 33 | }; 34 | -------------------------------------------------------------------------------- /distribution/client/test/periodic.test.ts: -------------------------------------------------------------------------------- 1 | import Periodic from '../src/lib/periodic'; 2 | 3 | describe('The periodic module', () => { 4 | /* Just a simple fake app for unit test 5 | */ 6 | describe('runHourly()', () => { 7 | it('allow registration of an hourly callback', () => { 8 | const p = new Periodic(); 9 | expect(p.runHourly('jest-fun', () => {})).toBeTruthy(); 10 | }); 11 | }); 12 | 13 | describe('runDaily()', () => { 14 | it('allows registration of a daily callback', () => { 15 | const p = new Periodic(); 16 | expect(p.runDaily('jest-fun', () => {})).toBeTruthy(); 17 | }); 18 | }); 19 | 20 | describe('computeOffset()', () => { 21 | const p = new Periodic(); 22 | 23 | it('should return a number between 0-59', () => { 24 | const offset = p.computeOffset(); 25 | expect(offset).toBeGreaterThanOrEqual(0); 26 | expect(offset).toBeLessThanOrEqual(59); 27 | }); 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /services/test/libs/sentry.test.js: -------------------------------------------------------------------------------- 1 | const Sentry = require('../../src/libs/sentry'); 2 | 3 | describe('Sentry lib', () => { 4 | beforeEach(() => { 5 | this.sentry = new Sentry(); 6 | }); 7 | 8 | it('does nothing with empty data', () => { 9 | this.sentry.sendOutput(null); 10 | }); 11 | 12 | it('maps JUL levels correctly', () => { 13 | expect(this.sentry.mapJavaLogLevel(null)).toBe('info'); 14 | expect(this.sentry.mapJavaLogLevel('severe')).toBe('error'); 15 | expect(this.sentry.mapJavaLogLevel('warning')).toBe('warning'); 16 | expect(this.sentry.mapJavaLogLevel('config')).toBe('info'); 17 | expect(this.sentry.mapJavaLogLevel('info')).toBe('info'); 18 | expect(this.sentry.mapJavaLogLevel('fine')).toBe('debug'); 19 | expect(this.sentry.mapJavaLogLevel('finer')).toBe('debug'); 20 | expect(this.sentry.mapJavaLogLevel('finest')).toBe('debug'); 21 | expect(this.sentry.mapJavaLogLevel('foobar')).toBe('info'); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /services/src/models/update.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const Sequelize = require('sequelize'); 3 | const DataTypes = Sequelize.DataTypes; 4 | 5 | module.exports = function (app) { 6 | const sequelizeClient = app.get('sequelizeClient'); 7 | const update = sequelizeClient.define('updates', { 8 | commit: { 9 | type: DataTypes.STRING, 10 | description: 'Commit SHA1 of the source file for the Update Level', 11 | }, 12 | channel: { 13 | type: DataTypes.STRING, 14 | }, 15 | manifest: { 16 | type: DataTypes.JSON, 17 | description: 'JSON serialized format of an `ingest.yaml`', 18 | }, 19 | tainted: { 20 | type: DataTypes.BOOLEAN, 21 | }, 22 | createdAt: { 23 | type: DataTypes.DATE, 24 | }, 25 | updatedAt: { 26 | type: DataTypes.DATE 27 | }, 28 | }); 29 | 30 | // eslint-disable-next-line no-unused-vars 31 | update.associate = function (models) { 32 | }; 33 | 34 | return update; 35 | }; 36 | -------------------------------------------------------------------------------- /services/src/hooks/logger.js: -------------------------------------------------------------------------------- 1 | // A hook that logs service method before, after and error 2 | // See https://github.com/winstonjs/winston for documentation 3 | // about the logger. 4 | const logger = require('winston'); 5 | 6 | // To see more detailed messages, uncomment the following line 7 | // logger.level = 'debug'; 8 | 9 | module.exports = function () { 10 | return context => { 11 | // This debugs the service call and a stringified version of the hook context 12 | // You can customize the mssage (and logger) to your needs 13 | logger.debug(`${context.type} app.service('${context.path}').${context.method}()`); 14 | 15 | if (typeof context.toJSON === 'function') { 16 | logger.debug('Hook Context', JSON.stringify(context, null, ' ')); 17 | } 18 | 19 | if (context.error) { 20 | logger.error(`Error during call: ${context.error}`); 21 | logger.silly('Full context ***', context.error, '*** End full error context'); 22 | } 23 | }; 24 | }; 25 | -------------------------------------------------------------------------------- /node.mk: -------------------------------------------------------------------------------- 1 | # Clever: https://stackoverflow.com/a/324782 2 | NODE:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))/tools/node 3 | 4 | all:: check 5 | 6 | lint:: depends 7 | $(NODE) npm run eslint 8 | 9 | fix-formatting: depends 10 | $(NODE) npm run eslint -- --fix 11 | 12 | check:: lint 13 | $(MAKE) unit 14 | 15 | unit:: depends 16 | if [ -z "$${SKIP_TESTS}" ]; then $(NODE) npm run test; \ 17 | else echo "Tests are skipped!"; fi; 18 | 19 | debug-unit: depends 20 | $(NODE) node --inspect-brk=0.0.0.0:9229 node_modules/.bin/jest --runInBand --bail --forceExit test/ 21 | 22 | depends: node_modules 23 | 24 | node_modules: package-lock.json package.json 25 | # Checking to see if the directory exists because npm install updates the 26 | # directory every time it runs, busting the GNU/Make cache causing rebuilds 27 | if [ ! -d node_modules ]; then $(NODE) npm ci; fi; 28 | 29 | clean:: 30 | rm -rf vendor node_modules build 31 | 32 | .PHONY: all check clean depends run unit lint 33 | -------------------------------------------------------------------------------- /services/src/services/versions/versions.service.js: -------------------------------------------------------------------------------- 1 | /* 2 | * The `versions` service is responsible for handling the "audit trail" of 3 | * Jenkins instance version information. 4 | * 5 | * The `version` information for a given instance should be considered append 6 | * only to the backend data store, and retrieval of the "current" version will 7 | * simply be taking the last of version records associated with the instance. 8 | */ 9 | 10 | const createService = require('feathers-sequelize'); 11 | const createModel = require('../../models/version'); 12 | const hooks = require('./versions.hooks'); 13 | 14 | module.exports = function (app) { 15 | const options = { 16 | name: 'versions', 17 | Model: createModel(app) 18 | }; 19 | 20 | let service = createService(options); 21 | service.docs = { 22 | description: 'Store a given instance\'s core and plugin version information', 23 | }; 24 | app.use('/versions', service); 25 | app.service('versions').hooks(hooks.getHooks()); 26 | }; 27 | -------------------------------------------------------------------------------- /services/test/services/versions.hooks.test.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const hooks = require('../../src/services/versions/versions.hooks'); 3 | 4 | describe('versions service hooks', () => { 5 | describe('computeManifestChecksum()', () => { 6 | let context = { 7 | data: { 8 | uuid: 'some-uuid', 9 | manifest: { 10 | first: 'alpha', 11 | second: 'bravo', 12 | third: 'charlie', 13 | } 14 | } 15 | }; 16 | 17 | it('should return a hash based off the manifest', () => { 18 | hooks.computeManifestChecksum(context); 19 | assert.equal(typeof context.data.checksum, 'string'); 20 | }); 21 | 22 | it('should return consistent hashes', () => { 23 | let second = JSON.parse(JSON.stringify(context)); 24 | hooks.computeManifestChecksum(context); 25 | hooks.computeManifestChecksum(second); 26 | assert.equal(context.data.checksum, 27 | second.data.checksum); 28 | }); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /services/acceptance/services/tainted.test.js: -------------------------------------------------------------------------------- 1 | const request = require('request-promise'); 2 | const h = require('../helpers'); 3 | 4 | describe('Tainted service acceptance tests', () => { 5 | beforeAll((done) => h.startApp(done)); 6 | afterAll(done => h.stopApp(done)); 7 | 8 | beforeEach(async () => { 9 | let { token, uuid } = await h.registerAndAuthenticate(); 10 | this.token = token; 11 | this.uuid = uuid; 12 | }); 13 | 14 | describe('POST /update/tainted', () => { 15 | it('should allow marking a level', () => { 16 | return request({ 17 | url: h.getUrl('/update/tainted'), 18 | method: 'POST', 19 | json: true, 20 | resolveWithFullResponse: true, 21 | headers: { 'Authorization': this.token }, 22 | body: { 23 | uuid: this.uuid, 24 | level: 1, 25 | }, 26 | }) 27 | .then(res => expect(res.statusCode).toEqual(201)) 28 | .catch(err => expect(err).toBeFalsy()); 29 | }); 30 | }); 31 | }); 32 | -------------------------------------------------------------------------------- /services/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "es6": true, 4 | "node": true, 5 | "jest": true 6 | }, 7 | "parserOptions": { 8 | "ecmaVersion": 2017 9 | }, 10 | "extends": "eslint:recommended", 11 | "rules": { 12 | "indent": [ 13 | "error", 14 | 2 15 | ], 16 | "linebreak-style": [ 17 | "error", 18 | "unix" 19 | ], 20 | "quotes": [ 21 | "error", 22 | "single" 23 | ], 24 | "semi": [ 25 | "error", 26 | "always" 27 | ], 28 | "prefer-arrow-callback": [ 29 | "error", 30 | { "allowNamedFunctions": true } 31 | ], 32 | "no-var": "error", 33 | "keyword-spacing": "error", 34 | "block-spacing": "error", 35 | "space-before-blocks": "error", 36 | "spaced-comment": "error", 37 | "space-infix-ops": "error", 38 | "semi-spacing": "error", 39 | "template-tag-spacing": "error", 40 | "curly": "error", 41 | "brace-style": "error", 42 | "no-trailing-spaces": "error", 43 | "prefer-template": "error" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /services/migrations/20180725143201-create-updates.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: (queryInterface, Sequelize) => { 4 | return queryInterface.createTable('updates', { 5 | id: { 6 | allowNull: false, 7 | autoIncrement: true, 8 | primaryKey: true, 9 | type: Sequelize.INTEGER 10 | }, 11 | commit: { 12 | type: Sequelize.STRING 13 | }, 14 | manifest: { 15 | type: Sequelize.JSON 16 | }, 17 | createdAt: { 18 | allowNull: false, 19 | type: Sequelize.DATE 20 | }, 21 | updatedAt: { 22 | allowNull: false, 23 | type: Sequelize.DATE 24 | }, 25 | tainted: { 26 | defaultValue: false, 27 | type: Sequelize.BOOLEAN 28 | }, 29 | channel: { 30 | defaultValue: 'general', 31 | allowNull: false, 32 | type: Sequelize.STRING 33 | } 34 | }); 35 | }, 36 | down: (queryInterface, Sequelize) => { 37 | return queryInterface.dropTable('updates'); 38 | } 39 | }; 40 | -------------------------------------------------------------------------------- /services/test/hooks/dbtimestamp.test.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const feathers = require('@feathersjs/feathers'); 3 | const dbtimestamp = require('../../src/hooks/dbtimestamp'); 4 | 5 | describe('\'dbtimestamp\' hook', () => { 6 | let app; 7 | 8 | beforeEach(() => { 9 | app = feathers(); 10 | 11 | app.use('/dummy', { 12 | async get(id) { 13 | return { id }; 14 | }, 15 | async create(id) { 16 | return id; 17 | } 18 | }); 19 | 20 | app.service('dummy').hooks({ 21 | before: { 22 | create: dbtimestamp('createdAt') 23 | } 24 | }); 25 | }); 26 | 27 | it('does not run the hook on get()', async () => { 28 | const result = await app.service('dummy').get('test'); 29 | 30 | assert.deepEqual(result, { id: 'test' }); 31 | }); 32 | 33 | it('runs the hook on create', async () => { 34 | const result = await app.service('dummy').create({ id: 'test' }); 35 | 36 | assert.ok(result.createdAt, 'Should have a createdAt timestamp'); 37 | }); 38 | }); 39 | -------------------------------------------------------------------------------- /services/test/libs/auth-verifier.test.js: -------------------------------------------------------------------------------- 1 | const AuthVerifier = require('../../src/libs/auth-verifier'); 2 | const { Verifier } = require('@feathersjs/authentication-local'); 3 | 4 | describe('Auth Verifier', () => { 5 | const options = { 6 | // Make server a no-op to allow construction 7 | service: true, 8 | }; 9 | it('should be insance of a verifier', () => { 10 | expect((new AuthVerifier(null, options))).toBeInstanceOf(Verifier); 11 | }); 12 | 13 | describe('_comparePassword()', () => { 14 | const entity = { 15 | uuid: 'jest-uuid', 16 | curve: 'secp256k1', 17 | pubkey: 'bogus', 18 | }; 19 | 20 | beforeEach(() => { 21 | this.verifier = new AuthVerifier(null, options); 22 | }); 23 | 24 | it('should reject if the password (signed uuid) is bad', () => { 25 | const signature = JSON.stringify({ 26 | r: 'bad', 27 | s: 'signature', 28 | }); 29 | return expect(this.verifier._comparePassword(entity, signature)).rejects.toBeInstanceOf(Error); 30 | }); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /distribution/client/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "es6": true, 4 | "node": true, 5 | "jest": true 6 | }, 7 | "parserOptions": { 8 | "ecmaVersion": 2017 9 | }, 10 | "extends": "eslint:recommended", 11 | "rules": { 12 | "indent": [ 13 | "error", 14 | 2 15 | ], 16 | "linebreak-style": [ 17 | "error", 18 | "unix" 19 | ], 20 | "quotes": [ 21 | "error", 22 | "single" 23 | ], 24 | "semi": [ 25 | "error", 26 | "always" 27 | ], 28 | "prefer-arrow-callback": [ 29 | "error", 30 | { "allowNamedFunctions": true } 31 | ], 32 | "no-var": "error", 33 | "keyword-spacing": "error", 34 | "block-spacing": "error", 35 | "space-before-blocks": "error", 36 | "spaced-comment": "error", 37 | "space-infix-ops": "error", 38 | "semi-spacing": "error", 39 | "template-tag-spacing": "error", 40 | "curly": "error", 41 | "brace-style": "error", 42 | "no-trailing-spaces": "error", 43 | "prefer-template": "error" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /services/src/services/update/update.service.js: -------------------------------------------------------------------------------- 1 | const createService = require('./update.class'); 2 | const hooks = require('./update.hooks'); 3 | const createModel = require('../../models/update'); 4 | 5 | const internalOnly = require('../../hooks/internalonly'); 6 | 7 | module.exports = function (app) { 8 | const Model = createModel(app); 9 | 10 | const options = { 11 | app: app, 12 | name: 'update', 13 | Model, 14 | }; 15 | 16 | // Initialize our service with any options it requires 17 | let service = createService(options); 18 | service.docs = { 19 | description: 'Manage and retrieve Update Levels for Evergreen clients', 20 | create: { 21 | description: 'Create a new Update Level based off an ingest.yaml', 22 | }, 23 | get: { 24 | description: 'Retrieve the computed Update Manifest for the given evergreen-client', 25 | }, 26 | find: internalOnly.swagger, 27 | remove: internalOnly.swagger, 28 | }; 29 | app.use('/update', service); 30 | app.service('update').hooks(hooks.getHooks()); 31 | }; 32 | -------------------------------------------------------------------------------- /distribution/docker-compose.squid-cache.yml: -------------------------------------------------------------------------------- 1 | # This file is docker-compose override file, cf. 2 | # https://docs.docker.com/compose/extends/#adding-and-overriding-configuration 3 | # It is use in conjunction with docker-compose.yml core file to add a specific configuration 4 | # to set up an Squid proxy to accelerate plugins downloads during Evergreen development. 5 | # This file is automatically used by the tests/*tests.sh files. 6 | # 7 | # To disable it, define the "DISABLE_PROXY_CACHE" environment variable as follows: 8 | # export DISABLE_PROXY_CACHE=true 9 | version: '3' 10 | services: 11 | 12 | instance: 13 | environment: 14 | - 'HTTP_PROXY=http://cache:3128' 15 | - 'http_proxy=http://cache:3128' 16 | - 'NO_PROXY=localhost,127.0.0.1' 17 | - 'no_proxy=localhost,127.0.0.1' 18 | 19 | cache: 20 | image: sameersbn/squid:3.3.8-23 21 | ports: 22 | - '3128:3128' 23 | volumes: 24 | - squid-cache:/var/spool/squid3 25 | - ./config/squid.conf:/etc/squid3/squid.conf:ro 26 | volumes: 27 | squid-cache: 28 | external: true 29 | -------------------------------------------------------------------------------- /distribution/config/supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | nodaemon=true 3 | 4 | [inet_http_server] 5 | port=:9001 6 | 7 | [rpcinterface:supervisor] 8 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface 9 | 10 | [program:evergreen-client] 11 | environment=HOME=%(ENV_EVERGREEN_HOME)s # Needed for Git or Node JENKINS-53856 12 | command=/evergreen/scripts/start-client.sh 13 | directory=%(ENV_EVERGREEN_HOME)s/client 14 | stdout_logfile=/dev/stdout 15 | stdout_logfile_maxbytes=0 16 | redirect_stderr=true 17 | startsecs=2 18 | startretries=20 19 | user=jenkins 20 | 21 | [program:jenkins] 22 | command=/evergreen/scripts/jenkins-evergreen.sh 23 | directory=%(ENV_JENKINS_HOME)s 24 | stdout_logfile=/dev/stdout 25 | stdout_logfile_maxbytes=0 26 | redirect_stderr=true 27 | startsecs=10 28 | startretries=0 29 | user=jenkins 30 | 31 | [program:nginx] 32 | command=/usr/sbin/nginx -c /evergreen/config/nginx.conf -g "daemon off;" 33 | stdout_logfile=/dev/stdout 34 | stdout_logfile_maxbytes=0 35 | redirect_stderr=true 36 | startsecs=10 37 | startretries=0 38 | 39 | 40 | # vim: ft=ini 41 | -------------------------------------------------------------------------------- /services/src/homepage.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Simple express handler for generating a dynamic home page 3 | */ 4 | const fs = require('fs'); 5 | 6 | module.exports = (app) => { 7 | return async (req, res) => { 8 | const sequelize = app.get('sequelizeClient'); 9 | const Instance = app.get('models').instance; 10 | const instances = await Instance.findAll({ 11 | attributes: [ 12 | 'updateId', 13 | [sequelize.fn('COUNT', sequelize.col('id')), 'num_instances'], 14 | ], 15 | group: ['updateId'] 16 | }); 17 | 18 | let levels = {}; 19 | instances.map(r => levels[r.get('updateId')] = r.get('num_instances')); 20 | 21 | app.service('update').find({ 22 | query: { 23 | $limit: 5, 24 | $sort: { 25 | createdAt: -1, 26 | } 27 | }, 28 | }).then((updates) => { 29 | res.render('index', { 30 | updates: updates, 31 | levels: levels, 32 | instances: instances, 33 | connections: app.channel('authenticated').length, 34 | commit: fs.readFileSync('./commit.txt'), 35 | }); 36 | }); 37 | }; 38 | }; 39 | -------------------------------------------------------------------------------- /distribution/flavors/aws-ec2-cloud/config/as-code/ec2-cloud.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | jenkins: 3 | clouds: 4 | - amazonEC2: 5 | cloudName: "ec2" 6 | instanceCapStr: 20 7 | # this shouldn't be needed, since without explicit creds this should 8 | # already be used but let's be explicit to avoid issues. 9 | useInstanceProfileForCredentials: true 10 | privateKey: "${PRIVATE_KEY}" 11 | region: "${REGION}" 12 | templates: 13 | - description: "EC2 Agent" 14 | ami: "${AGENT_AMI}" 15 | labelString: "agent" 16 | type: "T2Xlarge" 17 | securityGroups: "${AGENT_SECURITY_GROUP}" 18 | subnetId: "${AGENT_SUBNET}" 19 | remoteFS: "/home/ec2-user" 20 | remoteAdmin: "ec2-user" 21 | initScript: > 22 | sudo yum update -y; 23 | sudo yum remove -y java-1.7.0-openjdk ; 24 | sudo yum install -y docker java-1.8.0-openjdk-devel; 25 | sudo service docker start; 26 | sudo usermod -a -G docker ec2-user; 27 | sudo docker info; 28 | -------------------------------------------------------------------------------- /services/src/middleware/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This module contains some simple and basic express middleware for running 3 | * the Evergreen backend service layer. 4 | * 5 | * If any of these become more than a few lines, they should be moved into 6 | * their own modules and properly unit tested 7 | */ 8 | 9 | module.exports = function (app) { 10 | /* 11 | * Add headers onto all our request objects for use by feathers hooks 12 | * 13 | * `context.params.headers` 14 | * 15 | * This seems to be required in order to make bearer tokens with 16 | * @feathersjs/authentication-jwt work 17 | */ 18 | app.all('*', (request, response, next) => { 19 | if (request.headers) { 20 | request.feathers.headers = request.headers; 21 | } 22 | next(); 23 | }); 24 | 25 | /* 26 | * Remove redundant slashes in the URL for properly routing 27 | * 28 | * For example: //authentication -> /authentication which ensures that the 29 | * request is routed correctly 30 | */ 31 | app.all('*', (request, response, next) => { 32 | request.url = request.url.replace(/\/+/, '/'); 33 | next(); 34 | }); 35 | }; 36 | -------------------------------------------------------------------------------- /tools/node: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # If we're executing from a normal shell, allow input. If we're in a 4 | # subprocess, like under Jenkins Pipeline, don't allow it 5 | tty -s 6 | if [ $? -eq 0 ]; then 7 | TTY_ARGS="-ti" 8 | fi; 9 | 10 | COMMAND="$@" 11 | 12 | # The caller is passing in some custom paraneters to the `node` binary, so we 13 | # need to override the entrypoint to explicitly call node 14 | if [[ "${1}" =~ "--" ]]; then 15 | COMMAND="node $@"; 16 | fi; 17 | 18 | 19 | exec docker run --net host --rm ${TTY_ARGS} \ 20 | -u $(id -u):$(id -g) \ 21 | -w "${PWD}" \ 22 | --mount type=tmpfs,destination=/.npm \ 23 | --mount type=tmpfs,destination=/.config \ 24 | -v "${PWD}:${PWD}" \ 25 | -e "PATH=$PWD/node_modules/.bin:/usr/local/bin:$PATH" \ 26 | -e LANG=C.UTF-8 \ 27 | -e "DB_TRACING=$DB_TRACING" \ 28 | -e "LOG_LEVEL=$LOG_LEVEL" \ 29 | -e "DEBUG=$DEBUG" \ 30 | -e "FLAVOR=$FLAVOR" \ 31 | -e "SENTRY_DSN=$SENTRY_DSN" \ 32 | $(printenv | grep -i \^evergreen | awk '{ print "-e", $1 }') \ 33 | $(printenv | grep -i \^node | awk '{ print "-e", $1 }') \ 34 | node:10 \ 35 | ${COMMAND} 36 | -------------------------------------------------------------------------------- /services/src/models/instance.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const Sequelize = require('sequelize'); 3 | const DataTypes = Sequelize.DataTypes; 4 | 5 | module.exports = function (app) { 6 | const sequelizeClient = app.get('sequelizeClient'); 7 | const instance = sequelizeClient.define('instances', { 8 | uuid: { 9 | allowNull: false, 10 | type: DataTypes.UUID, 11 | description: 'An evergreen-client\'s generated from registration UUID', 12 | }, 13 | timezone: { 14 | type: DataTypes.STRING, 15 | description: 'Timezone for the evergreen-client, e.g. America/Los_Angeles', 16 | }, 17 | flavor: { 18 | type: DataTypes.STRING, 19 | description: 'Flavor describing the instance, e.g. `docker-cloud`', 20 | }, 21 | updateId: { 22 | type: DataTypes.BIGINT, 23 | description: 'Current `updates` level for the instance', 24 | }, 25 | createdAt: { 26 | type: DataTypes.DATE, 27 | }, 28 | updatedAt: { 29 | type: DataTypes.DATE 30 | }, 31 | }); 32 | 33 | instance.associate = function (models) { 34 | instance.belongsTo(models.updates); 35 | }; 36 | 37 | return instance; 38 | }; 39 | -------------------------------------------------------------------------------- /services/src/services/versions/versions.hooks.js: -------------------------------------------------------------------------------- 1 | const authentication = require('@feathersjs/authentication'); 2 | const dbtimestamp = require('../../hooks/dbtimestamp'); 3 | const ensureMatchingUUID = require('../../hooks/ensureuuid'); 4 | const hash = require('object-hash'); 5 | 6 | class VersionsHooks { 7 | constructor() { 8 | } 9 | 10 | getHooks() { 11 | return { 12 | before: { 13 | all: [ 14 | authentication.hooks.authenticate(['jwt']) 15 | ], 16 | find: [], 17 | get: [], 18 | create: [ 19 | ensureMatchingUUID, 20 | dbtimestamp('createdAt'), 21 | this.computeManifestChecksum, 22 | ], 23 | update: [], 24 | patch: [], 25 | remove: [] 26 | }, 27 | after: {}, 28 | error: {}, 29 | }; 30 | } 31 | 32 | 33 | /* 34 | * This function will compute the checksum of the manifest sent to the 35 | * backend 36 | */ 37 | computeManifestChecksum(context) { 38 | context.data.checksum = hash.MD5(context.data.manifest); 39 | return context; 40 | } 41 | } 42 | 43 | module.exports = new VersionsHooks(); 44 | -------------------------------------------------------------------------------- /services/src/libs/auth-verifier.js: -------------------------------------------------------------------------------- 1 | const ecc = require('elliptic'); 2 | const logger = require('winston'); 3 | const { Verifier } = require('@feathersjs/authentication-local'); 4 | 5 | /* 6 | * AuthVerifier is a custom Verifier class for the FeathersJS authentication 7 | * support 8 | * 9 | * Since Feathers already has the machinery to look up entities in our 10 | * registration, all this Verifier must do is validate the signature matching 11 | * the entityt 12 | */ 13 | class AuthVerifier extends Verifier { 14 | /* 15 | * For compatibility, the 'signature' parameter is expected to be a JSON 16 | * encoded signature object 17 | */ 18 | _comparePassword(entity, signature) { 19 | const ec = new ecc.ec(entity.curve); 20 | const key = ec.keyFromPublic(entity.pubKey, 'hex'); 21 | 22 | try { 23 | if (!key.verify(entity.uuid, JSON.parse(signature))) { 24 | return Promise.reject(false); 25 | } 26 | } catch (err) { 27 | logger.error('Improperly formed signature sent', err.message); 28 | return Promise.reject(err); 29 | } 30 | return Promise.resolve(entity); 31 | } 32 | } 33 | 34 | module.exports = AuthVerifier; 35 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2018 CloudBees, Inc, and a number of other contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /services/src/models/version.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | const Sequelize = require('sequelize'); 3 | const DataTypes = Sequelize.DataTypes; 4 | 5 | module.exports = function (app) { 6 | const sequelizeClient = app.get('sequelizeClient'); 7 | const version = sequelizeClient.define('versions', { 8 | uuid: { 9 | allowNull: false, 10 | type: DataTypes.UUID, 11 | description: 'An evergreen-client\'s generated from registration UUID', 12 | }, 13 | manifest: { 14 | type: DataTypes.JSON, 15 | description: 'Version manifest in the format described in JEP-307', 16 | }, 17 | manifestSchemaVersion: { 18 | type: DataTypes.INTEGER, 19 | description: 'Schema version for the "Version Manifest" (e.g. 1)', 20 | }, 21 | checksum: { 22 | type: DataTypes.STRING, 23 | description: 'MD5 checksum of the version manifest for easy sorting and comparison', 24 | }, 25 | createdAt: { 26 | type: DataTypes.DATE, 27 | }, 28 | updatedAt: { 29 | type: DataTypes.DATE, 30 | }, 31 | }); 32 | 33 | // eslint-disable-next-line no-unused-vars 34 | version.associate = function(models) { 35 | }; 36 | 37 | return version; 38 | }; 39 | -------------------------------------------------------------------------------- /distribution/scripts/jenkins-evergreen.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | 5 | passwordFileLocation="$JENKINS_HOME/secrets/initialAdminPassword" 6 | passwordFileLocationDirectory="$( dirname "$passwordFileLocation" )" 7 | 8 | generateNewAdminPassword() { 9 | echo -n "Creating $passwordFileLocationDirectory... " 10 | mkdir -p "$passwordFileLocationDirectory" 11 | echo "Done." 12 | 13 | echo -n "Generating admin password... " 14 | echo $RANDOM | md5sum | cut -d ' ' -f 1 > "$passwordFileLocation" 15 | echo "Done. Password value stored in $passwordFileLocation file." 16 | } 17 | 18 | if [[ -f $passwordFileLocation ]]; then 19 | echo "Password file already exists, not generating a new one." 20 | else 21 | generateNewAdminPassword 22 | fi 23 | 24 | JENKINS_ADMIN_PASSWORD="$( cat "$passwordFileLocation" )" 25 | 26 | # Intended for ease of development. By default, password is obviously *not* put in logs. 27 | if [[ "${INSECURE_SHOW_ADMIN_PASSWORD:-false}" == "true" ]]; then 28 | echo "[WARNING] INSECURE_SHOW_ADMIN_PASSWORD defined, it should only ever be done for testing." 29 | echo "[admin password] $JENKINS_ADMIN_PASSWORD" 30 | fi 31 | 32 | export JENKINS_ADMIN_PASSWORD 33 | exec "${EVERGREEN_HOME}"/scripts/jenkins.sh 34 | -------------------------------------------------------------------------------- /services/cli/plugin-manifest.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const PluginDependency = require('./plugin-dependency'); 4 | 5 | /* 6 | * Representation of a plugin's MANIFEST.MF 7 | */ 8 | class PluginManifest { 9 | constructor(data) { 10 | this.data = data; 11 | this.dependencies = []; 12 | } 13 | 14 | static load(data) { 15 | return new PluginManifest(data); 16 | } 17 | 18 | parse() { 19 | let dependencies = []; 20 | // Set to true if the next line is awrapped set of dependencies 21 | let depWrap = false; 22 | this.data.split('\n').forEach((line) => { 23 | if ((depWrap) && (!line.startsWith(' '))) { 24 | depWrap = false; 25 | } 26 | 27 | const matches = line.match(/^Plugin-Dependencies: (.*)?/); 28 | 29 | if (matches) { 30 | dependencies.push(matches[1]); 31 | depWrap = true; 32 | } else if (depWrap) { 33 | dependencies.push(line.trim()); 34 | } 35 | }); 36 | 37 | dependencies = dependencies.join('').split(','); 38 | 39 | this.dependencies = dependencies 40 | .map(entry => PluginDependency.fromEntry(entry)) 41 | .filter(d => d); 42 | return this; 43 | } 44 | } 45 | 46 | module.exports = PluginManifest; 47 | 48 | 49 | -------------------------------------------------------------------------------- /services/migrations/20180913193732-create-tainted.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: (queryInterface, Sequelize) => { 4 | return queryInterface.createTable('tainteds', { 5 | id: { 6 | allowNull: false, 7 | autoIncrement: true, 8 | primaryKey: true, 9 | type: Sequelize.INTEGER 10 | }, 11 | uuid: { 12 | type: Sequelize.UUID 13 | }, 14 | updateId: { 15 | type: Sequelize.INTEGER, 16 | references: { 17 | model: 'updates', 18 | key: 'id' 19 | }, 20 | }, 21 | createdAt: { 22 | allowNull: false, 23 | defaultValue: Sequelize.literal('NOW()'), 24 | type: Sequelize.DATE 25 | }, 26 | updatedAt: { 27 | allowNull: false, 28 | defaultValue: Sequelize.literal('NOW()'), 29 | type: Sequelize.DATE 30 | } 31 | }).then(() => { 32 | return queryInterface.addConstraint('tainteds', 33 | ['uuid', 'updateId'], 34 | { 35 | type: 'UNIQUE', 36 | name: 'uuid_updateid_uniq', 37 | }, 38 | ); 39 | }); 40 | }, 41 | down: (queryInterface, Sequelize) => { 42 | return queryInterface.dropTable('tainteds'); 43 | } 44 | }; 45 | -------------------------------------------------------------------------------- /services/src/services/tainted/tainted.hooks.js: -------------------------------------------------------------------------------- 1 | const authentication = require('@feathersjs/authentication'); 2 | const ensureMatchingUUID = require('../../hooks/ensureuuid'); 3 | const internalOnly = require('../../hooks/internalonly'); 4 | 5 | class TaintedHooks { 6 | constructor() { 7 | } 8 | 9 | getHooks() { 10 | return { 11 | before: { 12 | all: [ 13 | authentication.hooks.authenticate(['jwt']) 14 | ], 15 | find: [ 16 | internalOnly 17 | ], 18 | get: [ 19 | internalOnly 20 | ], 21 | create: [ 22 | ensureMatchingUUID, 23 | /* 24 | * For API consistency we want clients to just send their level, 25 | * which is actually just an updateId :) 26 | */ 27 | (context) => { 28 | context.data.updateId = context.data.level; 29 | }, 30 | ], 31 | update: [ 32 | internalOnly 33 | ], 34 | patch: [ 35 | internalOnly 36 | ], 37 | remove: [ 38 | internalOnly 39 | ], 40 | }, 41 | after: {}, 42 | error: {}, 43 | }; 44 | } 45 | } 46 | 47 | module.exports = new TaintedHooks(); 48 | -------------------------------------------------------------------------------- /services/migrations/20180725143202-create-instances.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: (queryInterface, Sequelize) => { 4 | return queryInterface.createTable('instances', { 5 | id: { 6 | allowNull: false, 7 | autoIncrement: true, 8 | primaryKey: true, 9 | type: Sequelize.INTEGER 10 | }, 11 | uuid: { 12 | allowNull: false, 13 | unique: true, 14 | type: Sequelize.UUID 15 | }, 16 | timezone: { 17 | type: Sequelize.STRING 18 | }, 19 | updateId: { 20 | type: Sequelize.BIGINT, 21 | references: { 22 | model: 'updates', 23 | key: 'id' 24 | }, 25 | onDelete: 'no action', 26 | onUpdate: 'no action' 27 | }, 28 | updatedAt: { 29 | allowNull: false, 30 | type: Sequelize.DATE 31 | }, 32 | createdAt: { 33 | allowNull: false, 34 | type: Sequelize.DATE 35 | }, 36 | flavor: { 37 | defaultValue: 'docker-cloud', 38 | allowNull: false, 39 | type: Sequelize.STRING 40 | } 41 | }); 42 | }, 43 | down: (queryInterface, Sequelize) => { 44 | return queryInterface.dropTable('instances'); 45 | } 46 | }; 47 | -------------------------------------------------------------------------------- /services/migrations/20180725143205-create-versions.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | module.exports = { 3 | up: (queryInterface, Sequelize) => { 4 | return queryInterface.createTable('versions', { 5 | id: { 6 | allowNull: false, 7 | autoIncrement: true, 8 | primaryKey: true, 9 | type: Sequelize.INTEGER 10 | }, 11 | uuid: { 12 | allowNull: false, 13 | type: Sequelize.STRING 14 | }, 15 | manifest: { 16 | type: Sequelize.JSON 17 | }, 18 | manifestSchemaVersion: { 19 | type: Sequelize.INTEGER 20 | }, 21 | createdAt: { 22 | allowNull: false, 23 | type: Sequelize.DATE 24 | }, 25 | updatedAt: { 26 | allowNull: false, 27 | type: Sequelize.DATE 28 | }, 29 | checksum: { 30 | allowNull: false, 31 | type: Sequelize.STRING 32 | } 33 | }).then(() => { 34 | return queryInterface.addConstraint('versions', 35 | ['uuid', 'checksum'], 36 | { 37 | type: 'UNIQUE', 38 | name: 'uuid_checksum_uniq', 39 | } 40 | ); 41 | }); 42 | }, 43 | down: (queryInterface, Sequelize) => { 44 | return queryInterface.dropTable('versions'); 45 | } 46 | }; 47 | -------------------------------------------------------------------------------- /distribution/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | db: 4 | image: postgres:alpine 5 | environment: 6 | - 'POSTGRES_PASSWORD=grassisevergreener' 7 | # Used for psql non-interactive scripting 8 | - 'PGPASSWORD=grassisevergreener' 9 | - 'POSTGRES_DB=evergreen_development' 10 | ports: 11 | - '5432:5432' 12 | volumes: 13 | - ../services/initdb.d:/docker-entrypoint-initdb.d 14 | 15 | backend: 16 | image: jenkinsciinfra/evergreen-backend 17 | build: ../services 18 | environment: 19 | - 'DB_TRACING=1' 20 | - 'PGPASSWORD=grassisevergreener' 21 | - 'DB_CONNECTION_STRING=postgres://postgres:grassisevergreener@db:5432/evergreen_development' 22 | ports: 23 | - '3030:3030' 24 | depends_on: 25 | - db 26 | command: ['/wait-for-postgres.sh', "db", "5432", "/usr/local/bin/npm", "run", "start"] 27 | 28 | instance: 29 | image: jenkins/evergreen 30 | build: ./ 31 | environment: 32 | - 'EVERGREEN_ENDPOINT=http://backend:3030' 33 | - 'LOG_LEVEL=debug' 34 | - 'INSECURE_SHOW_ADMIN_PASSWORD=true' 35 | - 'DEVELOPMENT=true' 36 | - 'PROCESS_RETRY_OVERRIDE=10' 37 | ports: 38 | - '8080:80' 39 | depends_on: 40 | - backend 41 | -------------------------------------------------------------------------------- /services/src/services/status/status.service.js: -------------------------------------------------------------------------------- 1 | // Initializes the `status` service on path `/status` 2 | const createService = require('feathers-sequelize'); 3 | const hooks = require('./status.hooks'); 4 | 5 | module.exports = function (app) { 6 | const options = { 7 | id: 'uuid', 8 | /* We need to set raw to false here otherwise feathers-sequelize assumes 9 | * that raw should be turned to true, which changes the output of the 10 | * associations from nested JSON objections, to association.value= 11 | * attributes on the root of the JSON object 12 | */ 13 | raw: false, 14 | Model: app.get('models').instance 15 | }; 16 | 17 | let service = createService(options); 18 | service.events = ['ping']; 19 | service.docs = { 20 | description: 'Manage and retrieve Update Levels for Evergreen clients', 21 | create: { 22 | description: 'Create a new Update Level based off an ingest.yaml', 23 | }, 24 | }; 25 | app.use('/status', service); 26 | 27 | /* Since status.hooks is putting moer than just before/after/error onto 28 | * module.exports, we need to make sure that we're not pushing things which 29 | * feathersjs doesn't consider hooks into the hooks registration 30 | */ 31 | 32 | app.service('status').hooks(hooks.getHooks()); 33 | }; 34 | -------------------------------------------------------------------------------- /services/cli/manifest.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fs = require('fs'); 4 | const yaml = require('js-yaml'); 5 | 6 | const DEFAULT_FILENAME = './essentials.yaml'; 7 | 8 | /* 9 | * Wrapper for the essentials.yaml 10 | */ 11 | class Manifest { 12 | constructor(data, fileName) { 13 | this.data = data; 14 | this.fileName = fileName; 15 | } 16 | 17 | /* 18 | * Read an essentials.yaml and build a Manifest object 19 | * 20 | * @param {string} optional path to an essentials.yaml file 21 | * @return {Manifest} 22 | */ 23 | static loadFile(fileName) { 24 | if (!fileName) { 25 | fileName = DEFAULT_FILENAME; 26 | } 27 | return new Manifest( 28 | yaml.safeLoad(fs.readFileSync(fileName)), 29 | fileName 30 | ); 31 | } 32 | 33 | saveSync() { 34 | return fs.writeFileSync(this.fileName, yaml.safeDump(this.data)); 35 | } 36 | 37 | getPlugins() { 38 | return this.data.spec.plugins; 39 | } 40 | 41 | getActualPlugins() { 42 | return this.data.status.plugins; 43 | } 44 | 45 | getCore() { 46 | return this.data.spec.core; 47 | } 48 | 49 | setStatus(status) { 50 | this.data.status = status; 51 | } 52 | 53 | getEnvironments() { 54 | return this.data.spec.environments; 55 | } 56 | } 57 | 58 | module.exports = Manifest; 59 | -------------------------------------------------------------------------------- /docs/developer/USE-CASES.adoc: -------------------------------------------------------------------------------- 1 | = _Evergreen_ Use Cases 2 | :toc: 3 | :sectnums: 4 | 5 | This document summarizes the Priority 1 use cases for _Jenkins Evergreen_. 6 | 7 | NOTE: This is still early work. 8 | To help the thought process, the cases definition and analysis was done by thinking about what a demonstration would need to go through. 9 | 10 | == Priority 1 Cases: Risk analysis 11 | 12 | === I can start an instance from scratch and it's available 13 | 14 | * Registration & authentication service need to be available: 15 | ** An Internet connection is required 16 | ** Bandwidth needs to be enough to download things in a _reasonable_ time. 17 | (It can probably be more than 5 minutes, if there is an issue.) 18 | * Binaries repository must be available. 19 | 20 | === Jenkins is constantly upgraded safely 21 | 22 | * If the backend becomes unavailable: 23 | ** Check the evergreen-client does reconnect automatically when back. 24 | * it the unavailability happens just after a failed upgrade, can we rollback offline? 25 | 26 | === Starting in a given cloud environment, Evergreen is auto-configured and ready to build with it 27 | 28 | Concrete examples: 29 | 30 | * auto-configuring the docker-plugin if the Docker socket is available, 31 | * auto-configuring with ec2-plugin if AWS. 32 | 33 | == Priority 2 Cases 34 | 35 | === First startup should be done in less than 5 clicks and 5 minutes 36 | -------------------------------------------------------------------------------- /services/test/hooks/internalapi.test.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Validate the internalapi hook properly guards against unauthenticated 3 | * requests 4 | */ 5 | const errors = require('@feathersjs/errors'); 6 | const hook = require('../../src/hooks/internalapi'); 7 | 8 | describe('the `internalApi` hook', () => { 9 | let context = { 10 | params: { 11 | headers: {}, 12 | }, 13 | app: { 14 | get: () => { 15 | // Stubbed to only return the `internalAPI` key 16 | return { 17 | secret: 'a secret', 18 | }; 19 | }, 20 | }, 21 | }; 22 | 23 | describe('without an Authorization header', () => { 24 | it('should throw NotAuthorized', () => { 25 | expect(() => { 26 | hook(context); 27 | }).toThrow(errors.NotAuthenticated); 28 | }); 29 | }); 30 | 31 | describe('with an Authorization header', () => { 32 | beforeEach(() => { 33 | context.params.headers = { 34 | authorization: 'a secret', 35 | }; 36 | }); 37 | 38 | it('should pass-through if the header is valid', () => { 39 | expect(hook(context)).toBe(context); 40 | }); 41 | 42 | it('should throw NotAuthorized if the header is invalid', () => { 43 | context.params.headers.authorization = 'the wrong word!'; 44 | expect(() => { 45 | hook(context); 46 | }).toThrow(errors.NotAuthenticated); 47 | }); 48 | }); 49 | }); 50 | -------------------------------------------------------------------------------- /services/src/services/errorTelemetry/errorTelemetry.class.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const logger = require('winston'); 3 | const path = require('path'); 4 | const mkdirp = require('mkdirp'); 5 | 6 | class ErrorTelemetryService { 7 | constructor(app) { 8 | this.app = app; 9 | if (process.env.NODE_ENV == 'production') { 10 | logger.info('production mode: no file used for error telemetry logging'); 11 | } else { 12 | this.loggingFile = '/tmp/error-telemetry-testing.log'; 13 | const baseDirectory = path.dirname(this.loggingFile); 14 | if (!fs.existsSync(baseDirectory)) { 15 | logger.warn(`${baseDirectory} does not exist, trying to create it.`); 16 | mkdirp(baseDirectory); 17 | } 18 | logger.warn(`Testing mode: ${baseDirectory} will push received logs to ${this.loggingFile} file`); 19 | } 20 | } 21 | create(data) { 22 | // Should be impossible because it passed the hooks step 23 | if (!data) { 24 | return Promise.reject({status:'KO'}); 25 | } 26 | 27 | // Only for testing, file logging of error telemetry is disabled in production 28 | if (this.loggingFile) { 29 | const toWrite = `${new Date()} => ${JSON.stringify(data)}\n\n`; 30 | fs.appendFileSync(this.loggingFile, toWrite); 31 | } 32 | this.app.get('sentry').sendOutput(data); 33 | 34 | return Promise.resolve({status:'OK'}); 35 | } 36 | } 37 | 38 | module.exports = ErrorTelemetryService; 39 | -------------------------------------------------------------------------------- /services/test/middleware.test.js: -------------------------------------------------------------------------------- 1 | const registerMiddleware = require('../src/middleware'); 2 | 3 | describe('express middleware', () => { 4 | // Stub function for the middleware 5 | let next = () => { }; 6 | let callbacks = []; 7 | let app = { 8 | all: (route, fn) => callbacks.push(fn), 9 | }; 10 | let applyMiddleware = (req, res, n) => { 11 | callbacks.forEach(fn => fn(req, res, n)); 12 | }; 13 | 14 | beforeEach(() => { 15 | callbacks = []; 16 | registerMiddleware(app); 17 | }); 18 | 19 | describe('removing redundant slashes', () => { 20 | it('should not affect basic URLs', () => { 21 | let request = { 22 | url: '/chat/', 23 | }; 24 | 25 | let before = request.url; 26 | applyMiddleware(request, undefined, next); 27 | expect(request.url).toBe(before); 28 | }); 29 | 30 | it('should trim slashes on other URLs', () => { 31 | let request = { 32 | url: '//chat/', 33 | }; 34 | applyMiddleware(request, undefined, next); 35 | expect(request.url).toBe('/chat/'); 36 | }); 37 | }); 38 | 39 | it('add headers to the feathers object', () => { 40 | let request = { 41 | url: '/', 42 | feathers: {}, 43 | headers: { 44 | 'Content-Type' : 'application/json', 45 | }, 46 | }; 47 | applyMiddleware(request, undefined, next); 48 | 49 | expect(request).toHaveProperty('feathers.headers', request.headers); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /services/src/services/registration/registration.hooks.js: -------------------------------------------------------------------------------- 1 | const errors = require('@feathersjs/errors'); 2 | const uuid = require('uuid/v4'); 3 | const logger = require('winston'); 4 | 5 | const dbtimestamp = require('../../hooks/dbtimestamp'); 6 | const internalOnly = require('../../hooks/internalonly'); 7 | 8 | module.exports = { 9 | before: { 10 | all: [ 11 | ], 12 | find: [ 13 | internalOnly 14 | ], 15 | get: [ 16 | internalOnly 17 | ], 18 | create: [ 19 | /* We must have a curve in order to handle the public key 20 | */ 21 | (hook) => { 22 | if (!hook.data.curve) { 23 | throw new errors.BadRequest('Client must provide a curve with the request'); 24 | } 25 | }, 26 | 27 | dbtimestamp('createdAt'), 28 | 29 | (hook) => { 30 | hook.data.uuid = uuid(); 31 | logger.debug('Generating uuid for registration.create', hook.data); 32 | return hook; 33 | } 34 | ], 35 | update: [ 36 | internalOnly 37 | ], 38 | patch: [ 39 | internalOnly 40 | ], 41 | remove: [ 42 | internalOnly 43 | ], 44 | }, 45 | 46 | after: { 47 | all: [], 48 | find: [], 49 | get: [], 50 | create: [], 51 | update: [], 52 | patch: [], 53 | remove: [] 54 | }, 55 | 56 | error: { 57 | all: [], 58 | find: [], 59 | get: [], 60 | create: [], 61 | update: [], 62 | patch: [], 63 | remove: [] 64 | } 65 | }; 66 | -------------------------------------------------------------------------------- /services/src/services/registration/registration.service.js: -------------------------------------------------------------------------------- 1 | // Initializes the `registration` service on path `/registration` 2 | const hooks = require('./registration.hooks'); 3 | const createService = require('feathers-sequelize'); 4 | const createModel = require('../../models/registration'); 5 | 6 | const internalOnly = require('../../hooks/internalonly'); 7 | 8 | module.exports = function (app) { 9 | const paginate = app.get('paginate'); 10 | const Model = createModel(app); 11 | 12 | const options = { 13 | name: 'registration', 14 | Model, 15 | paginate 16 | }; 17 | 18 | let service = createService(options); 19 | service.docs = { 20 | description: 'Registration for initial evergreen-client calls', 21 | create: { 22 | summary: 'Register a new evergreen-client', 23 | description: `Generate and store a new UUID for the given client. 24 | 25 | The service expects keys to be generated in a manner similar to those generated by the [elliptic](https://www.npmjs.com/package/elliptic) Node module. 26 | `, 27 | externalDocs: { 28 | description: 'Defined as part of JEP-303', 29 | url: 'https://github.com/jenkinsci/jep/blob/master/jep/303', 30 | }, 31 | }, 32 | find: internalOnly.swagger, 33 | get: internalOnly.swagger, 34 | update: internalOnly.swagger, 35 | patch: internalOnly.swagger, 36 | remove: internalOnly.swagger, 37 | }; 38 | 39 | app.use('/registration', service); 40 | app.service('registration').hooks(hooks); 41 | }; 42 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | ROOT:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) 2 | PATH:=$(ROOT)/tools/:$(PATH) 3 | 4 | clean: 5 | $(MAKE) -C distribution $@ 6 | $(MAKE) -C services $@ 7 | 8 | lint: 9 | $(MAKE) -C distribution $@ 10 | $(MAKE) -C services $@ 11 | 12 | check: 13 | $(MAKE) -C distribution $@ 14 | $(MAKE) -C services $@ 15 | 16 | fix-formatting: 17 | $(MAKE) -C distribution $@ 18 | $(MAKE) -C services $@ 19 | 20 | publish: 21 | $(MAKE) -C distribution $@ 22 | $(MAKE) -C services $@ 23 | 24 | update-package-locks: 25 | # Sigh, the sed for forcing https below is recommended by the npm registry team itself... 26 | # https://npm.community/t/some-packages-have-dist-tarball-as-http-and-not-https/285/13 27 | rm -rf services/node_modules/ services/package-lock.json && \ 28 | rm -rf distribution/client/node_modules/ distribution/client/package-lock.json && \ 29 | cd services/ && npm install && \ 30 | sed -i 's/"resolved": "http:/"resolved": "https:/g' package-lock.json && \ 31 | cd ../distribution/client && npm install && \ 32 | sed -i 's/"resolved": "http:/"resolved": "https:/g' package-lock.json 33 | 34 | npm-audit-fix: 35 | # Sigh, the sed for forcing https below is recommended by the npm registry team itself... 36 | # https://npm.community/t/some-packages-have-dist-tarball-as-http-and-not-https/285/13 37 | cd services/ && npm audit fix && \ 38 | sed -i 's/"resolved": "http:/"resolved": "https:/g' package-lock.json && \ 39 | cd ../distribution/client && npm audit fix && \ 40 | sed -i 's/"resolved": "http:/"resolved": "https:/g' package-lock.json 41 | -------------------------------------------------------------------------------- /distribution/client/src/lib/periodic.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * The Periodic module is responsible for holding onto periodic tasks which 3 | * must be executed regularly 4 | * 5 | */ 6 | 7 | import * as logger from 'winston' 8 | import cron from 'cron'; 9 | 10 | export default class Periodic { 11 | protected readonly jobs : any; 12 | protected readonly offset : number; 13 | 14 | /* 15 | * Requires the feathersjs app instance on initialization 16 | */ 17 | constructor() { 18 | this.jobs = {}; 19 | this.offset = this.computeOffset(); 20 | logger.info('Periodic using minute offset of', this.offset); 21 | } 22 | 23 | runHourly(name, callback) { 24 | logger.info(`Registering periodic hourly task: ${name}`); 25 | let schedule = `${this.offset} * * * *`; 26 | return this.runOnSchedule(name, schedule, callback); 27 | } 28 | 29 | runDaily(name, callback) { 30 | logger.info(`Registering periodic dailytask: ${name}`); 31 | let schedule = `${this.offset} 3 * * *`; 32 | return this.runOnSchedule(name, schedule, callback); 33 | } 34 | 35 | runOnSchedule(name, schedule, callback) { 36 | let job = new cron.CronJob(schedule, callback); 37 | this.jobs[name] = job; 38 | job.start(); 39 | return !!(job); 40 | } 41 | 42 | /* 43 | * Compute an instance specific minute offset for running hourly tasks in a 44 | * way that doesn't cause every client to check in at the same time 45 | * 46 | * @return Number between 0-59 47 | */ 48 | computeOffset() { 49 | return Math.floor(Math.random() * 59); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /services/src/services/errorTelemetry/errorTelemetry.hooks.js: -------------------------------------------------------------------------------- 1 | const errors = require('@feathersjs/errors'); 2 | const logger = require('winston'); 3 | const authentication = require('@feathersjs/authentication'); 4 | const ensureMatchingUUID = require('../../hooks/ensureuuid'); 5 | 6 | const errorTelemetryApiRequiredFields = [ 7 | 'version', 8 | 'timestamp', 9 | 'name', 10 | 'level', 11 | 'message' 12 | ]; 13 | 14 | class ErrorTelemetryHooks { 15 | constructor() { 16 | } 17 | 18 | checkLogFormat(hook) { 19 | if (!hook) { 20 | throw new errors.BadRequest('No hook at all?'); 21 | } 22 | logger.debug('HOOK DATA => ', hook.data); 23 | if (!(hook.data)) { 24 | throw new errors.BadRequest('Missing data'); 25 | } 26 | if (!hook.data.log) { 27 | throw new errors.BadRequest('Missing log field'); 28 | } 29 | errorTelemetryApiRequiredFields.forEach( field => { 30 | if (!hook.data.log[field]) { 31 | throw new errors.BadRequest(`Missing required field '${field}'`); 32 | } 33 | }); 34 | } 35 | 36 | getHooks() { 37 | return { 38 | before: { 39 | all: [ 40 | authentication.hooks.authenticate(['jwt']) 41 | ], 42 | find: [], 43 | get: [], 44 | create: [ 45 | ensureMatchingUUID, 46 | this.checkLogFormat 47 | ], 48 | update: [], 49 | patch: [], 50 | remove: [] 51 | }, 52 | after: {}, 53 | error: {}, 54 | }; 55 | } 56 | } 57 | 58 | module.exports = new ErrorTelemetryHooks(); 59 | -------------------------------------------------------------------------------- /distribution/client/test/snapshotter.test.ts: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | 3 | import tmp from 'tmp'; 4 | import Snapshotter from '../src/lib/snapshotter'; 5 | 6 | describe('The snapshotting module', () => { 7 | let tmpDir = null; 8 | beforeEach(() => { 9 | tmpDir = tmp.dirSync({unsafeCleanup: true}); 10 | }); 11 | 12 | afterEach(() => { 13 | tmpDir.removeCallback(); 14 | }); 15 | 16 | describe('init()', () => { 17 | it('should init a repo', () => { 18 | const snapshotter = new Snapshotter(); 19 | snapshotter.init(tmpDir.name); 20 | 21 | // Called twice does not crash: 22 | // important since this is what will happen 23 | // on container/client restart 24 | snapshotter.init(tmpDir.name); 25 | }); 26 | }); 27 | describe('snapshot()', () => { 28 | it('should create a commit()', () => { 29 | const snapshotter = new Snapshotter(); 30 | snapshotter.init(tmpDir.name); 31 | fs.writeFileSync(`${tmpDir.name}/blah.file`, 'something'); 32 | snapshotter.snapshot('yay test message'); 33 | 34 | const gitIgnore = fs.readFileSync(`${tmpDir.name}/.gitignore`,'utf-8'); 35 | expect(gitIgnore).toContain('/plugins/'); 36 | expect(gitIgnore).toContain('secrets/master.key'); 37 | }); 38 | it('should create a commit even without file()', () => { 39 | const snapshotter = new Snapshotter(); 40 | snapshotter.init(tmpDir.name); 41 | 42 | snapshotter.snapshot('yay test message'); 43 | }); 44 | 45 | // TODO: test ./plugins is "gitignored" 46 | }); 47 | 48 | }); 49 | -------------------------------------------------------------------------------- /distribution/client/test/client.test.ts: -------------------------------------------------------------------------------- 1 | const mkdirp = require('mkdirp'); 2 | 3 | import tmp from 'tmp'; 4 | import Client from '../src/client'; 5 | import Storage from '../src/lib/storage'; 6 | 7 | describe('The base client module', () => { 8 | it('should interpret properly', () => { 9 | expect(Client).toBeTruthy(); 10 | }); 11 | 12 | describe('flavorCheck', () => { 13 | beforeEach( () => { 14 | const evergreenHome = tmp.dirSync({unsafeCleanup: true}).name; 15 | Storage.homeDirectory = (() => evergreenHome ); 16 | mkdirp.sync(Storage.jenkinsHome()); 17 | }); 18 | 19 | it('should throw an error with no flavor defined', () => { 20 | expect(() => { 21 | delete process.env.FLAVOR; 22 | new Client(); 23 | }).toThrow(); 24 | }); 25 | }); 26 | 27 | describe('isOffline()', () => { 28 | let client = null; 29 | 30 | beforeEach( () => { 31 | const evergreenHome = tmp.dirSync({unsafeCleanup: true}).name; 32 | Storage.homeDirectory = (() => evergreenHome ); 33 | mkdirp.sync(Storage.jenkinsHome()); 34 | process.env.FLAVOR = 'docker-cloud'; 35 | client = new Client(); 36 | }); 37 | 38 | 39 | it('should default to false', () => { 40 | expect(client.isOffline()).toBeFalsy(); 41 | }); 42 | 43 | describe('when EVERGREEN_OFFLINE is set', () => { 44 | beforeEach(() => { 45 | jest.resetModules(); 46 | process.env.EVERGREEN_OFFLINE = '1'; 47 | }); 48 | 49 | afterEach(() => { 50 | jest.resetModules(); 51 | }); 52 | 53 | it('should be true', () => { 54 | expect(client.isOffline()).toBeTruthy(); 55 | }); 56 | }); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /services/cli-test/plugin-dependency.test.js: -------------------------------------------------------------------------------- 1 | const PluginDependency = require('../cli/plugin-dependency'); 2 | 3 | describe('PluginDependency', () => { 4 | it('should be constructable', () => { 5 | expect(new PluginDependency()).toBeInstanceOf(PluginDependency); 6 | }); 7 | 8 | 9 | describe('fromRecord()', () => { 10 | it('should create a proper object', () => { 11 | let record = { 12 | groupId: 'io.jenkins', 13 | artifactId: 'jest', 14 | version: '0.1', 15 | }; 16 | let dep = PluginDependency.fromRecord(record); 17 | 18 | expect(dep).toBeInstanceOf(PluginDependency); 19 | expect(dep.version).toEqual(record.version); 20 | expect(dep.artifactId).toEqual(record.artifactId); 21 | }); 22 | }); 23 | 24 | describe('fromEntry', () => { 25 | it('should return null on an empty entry', () => { 26 | expect(PluginDependency.fromEntry('')).toBeNull(); 27 | }); 28 | 29 | it('should handle an entry', () => { 30 | let entry = 'structs:1.9'; 31 | let dep = PluginDependency.fromEntry(entry); 32 | expect(dep).toBeInstanceOf(PluginDependency); 33 | expect(dep.version).toEqual('1.9'); 34 | expect(dep.artifactId).toEqual('structs'); 35 | expect(dep.isOptional()).toBeFalsy(); 36 | }); 37 | 38 | it('should handle an optional entry', () => { 39 | let entry = 'credentials:2.1.16;resolution:=optional'; 40 | let dep = PluginDependency.fromEntry(entry); 41 | expect(dep).toBeInstanceOf(PluginDependency); 42 | expect(dep.version).toEqual('2.1.16'); 43 | expect(dep.artifactId).toEqual('credentials'); 44 | expect(dep.isOptional()).toBeTruthy(); 45 | }); 46 | }); 47 | }); 48 | -------------------------------------------------------------------------------- /services/cli/plugin-dependency.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /* 4 | * Representation of a plugin dependency defined by a plugin's manifest 5 | */ 6 | class PluginDependency { 7 | constructor() { 8 | this.artifactId = null; 9 | this.version = null; 10 | this.optional = false; 11 | this.dependencies = []; 12 | } 13 | 14 | isOptional() { 15 | return this.optional; 16 | } 17 | 18 | /* 19 | * Return the object based on an entry in MANIFEST.MF's Plugin-Dependencies 20 | * metadata 21 | * 22 | * @param {string} Entry from the comma separated list 23 | * @return {PluginDependency} 24 | * @return {null} if there is no dependency 25 | */ 26 | static fromEntry(line) { 27 | if (!line) { 28 | return null; 29 | } 30 | 31 | let dependency = new PluginDependency(); 32 | dependency.optional = !! line.match(/=optional/); 33 | 34 | // credentials:2.1.16;resolution:=optional 35 | // eslint-disable-next-line no-unused-vars 36 | const [spec, unused] = line.split(';'); 37 | const [artifactId, version] = spec.split(':'); 38 | dependency.artifactId = artifactId; 39 | dependency.version = version; 40 | return dependency; 41 | } 42 | 43 | /* 44 | * Return an object based on the record from essentials.yaml 45 | * 46 | * @param {object} plugin record from the essentials.yaml format 47 | * @return {PluginDependency} 48 | */ 49 | static fromRecord(record) { 50 | let dependency = new PluginDependency(); 51 | dependency.version = record.version; 52 | dependency.groupId = record.groupId; 53 | dependency.artifactId = record.artifactId; 54 | 55 | return dependency; 56 | } 57 | } 58 | 59 | module.exports = PluginDependency; 60 | -------------------------------------------------------------------------------- /services/test/services/errortelemetry.hooks.test.js: -------------------------------------------------------------------------------- 1 | const errors = require('@feathersjs/errors'); 2 | 3 | const checkLogFormat = require('../../src/services/errorTelemetry/errorTelemetry.hooks').checkLogFormat; 4 | 5 | describe('Error Telemetry Hooks', () => { 6 | it('should fail with empty parameters', () => { 7 | expect(() => { 8 | checkLogFormat(); 9 | }).toThrow(/No hook at all/); 10 | }); 11 | 12 | it('missing data should be rejected', async() => { 13 | expect(() => { 14 | checkLogFormat({}); 15 | }).toThrow(/Missing data/); 16 | }); 17 | 18 | it('missing log field should be rejected', async() => { 19 | expect(() => { 20 | checkLogFormat({data: {}}); 21 | }).toThrow(/Missing log field/); 22 | }); 23 | 24 | it('missing fields should be rejected', async () => { 25 | const badQueries = [ 26 | {data: {'log':{}}}, 27 | {data: {'log':{'version': 1 }}}, 28 | {data: {'log':{'version': 1, 'timestamp': 1526387942 }}}, 29 | {data: {'log':{'version': 1, 'timestamp': 1526387942 }}}, 30 | {data: {'log':{'version': 1, 'timestamp': 1526387942 }, 'name': 'name'}}, 31 | {data: {'log':{'version': 1, 'timestamp': 1526387942 }, 'name': 'name', 'level': 'info'}}, 32 | ]; 33 | for (let i = 0; i < badQueries.length; i++) { 34 | expect(() => { 35 | checkLogFormat(badQueries[i]); 36 | }).toThrow(/Missing required field/); 37 | } 38 | }); 39 | 40 | it('should pass with valid data', async() => { 41 | let hook = { data : {'log':{'timestamp': 1526387942, 'version': 1, 'name': 'name', 'level': 'info', 'message': 'message' }} }; 42 | expect(() => { 43 | checkLogFormat(hook); 44 | }).not.toThrow(errors.BadRequest); 45 | }); 46 | }); 47 | -------------------------------------------------------------------------------- /services/src/hooks/ensureuuid.js: -------------------------------------------------------------------------------- 1 | const errors = require('@feathersjs/errors'); 2 | const logger = require('winston'); 3 | 4 | /* 5 | * Ensure that the given UUID matches the UUID inside of the JWT 6 | * 7 | * DOES NOT APPLY TO INTERNAL CALLS 8 | */ 9 | module.exports = function(context) { 10 | /* This is an internal call and should be allowed */ 11 | if (!context.params.provider) { 12 | return context; 13 | } 14 | 15 | /* 16 | * If we have no UUID provided by the JWT, bail early 17 | */ 18 | if (!context.params.user) { 19 | throw new errors.BadRequest('Missing token with request'); 20 | } 21 | 22 | if (context.method == 'get') { 23 | if (context.id != context.params.user.uuid) { 24 | throw new errors.NotAuthenticated('Invalid UUID'); 25 | } 26 | return context; 27 | } 28 | 29 | if (context.method == 'find') { 30 | if (!context.params.query.uuid) { 31 | throw new errors.BadRequest('Invalid UUID in query parameters'); 32 | } 33 | if (context.params.query.uuid != context.params.user.uuid) { 34 | throw new errors.NotAuthenticated('Invalid UUID'); 35 | } 36 | return context; 37 | } 38 | 39 | 40 | if (!context.data.uuid) { 41 | throw new errors.BadRequest('Invalid UUID in data body'); 42 | } 43 | 44 | if (context.data.uuid != context.params.user.uuid) { 45 | logger.error('Receiving a request with a UUID not matching the token (%s/%s)', 46 | context.data.uuid, 47 | context.params.user.uuid); 48 | throw new errors.NotAuthenticated('Invalid UUID'); 49 | } 50 | 51 | return context; 52 | }; 53 | 54 | /* 55 | * Describe the parameter requirements for this hook 56 | */ 57 | module.exports.swagger = { 58 | query: { 59 | name: 'uuid', 60 | description: 'The client\'s UUID', 61 | required: true, 62 | schema: { 63 | type: 'string', 64 | }, 65 | }, 66 | }; 67 | -------------------------------------------------------------------------------- /services/cli/url-resolver.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const path = require('path'); 4 | 5 | const INCREMENTALS = 'https://repo.jenkins-ci.org/incrementals/'; 6 | const RELEASES = 'https://repo.jenkins-ci.org/releases/'; 7 | const WAR_MIRROR = 'http://mirrors.jenkins.io/war/'; 8 | 9 | 10 | /* 11 | * The URL Resolver will take a given plugin and return the URLs necessary for 12 | * a plugin 13 | */ 14 | class UrlResolver { 15 | /* 16 | * Compute the mirrored or Artifactory URL for the given core record 17 | * @param {object} corerecord from essentials.yaml 18 | * @return {string} URL to Mirrors/Artifactory 19 | */ 20 | static artifactForCore(core) { 21 | if (this.isIncremental(core)) { 22 | return `${INCREMENTALS}org/jenkins-ci/main/jenkins-war/${core.version}/jenkins-war-${core.version}.war`; 23 | } 24 | return `${WAR_MIRROR}${core.version}/jenkins.war`; 25 | } 26 | /* 27 | * Compute the Artifactory URL for the given plugin record 28 | * 29 | * @param {object} plugin record from essentials.yaml 30 | * @return {string} URL to Artifactory 31 | */ 32 | static artifactForPlugin(plugin) { 33 | const pluginFilename = path.join(plugin.artifactId, plugin.version, `${plugin.artifactId}-${plugin.version}.hpi`); 34 | const groupPath = plugin.groupId.replace(/\./g, '/'); 35 | let url = `${RELEASES}${groupPath}/`; 36 | 37 | if (this.isIncremental(plugin)) { 38 | url = `${INCREMENTALS}${groupPath}/`; 39 | } 40 | return url + pluginFilename; 41 | } 42 | 43 | /* 44 | * Determine whether the given plugin record represents an incremental plugin 45 | * or not 46 | * 47 | * @param {object} plugin record from the essentials.yaml 48 | * @return {boolean} 49 | */ 50 | static isIncremental(plugin) { 51 | return !! plugin.version.match(/(.*?)-rc(\d+)\.(.*)?/); 52 | } 53 | } 54 | 55 | module.exports = UrlResolver; 56 | -------------------------------------------------------------------------------- /services/src/sequelize.js: -------------------------------------------------------------------------------- 1 | const Sequelize = require('sequelize'); 2 | const { Op } = Sequelize; 3 | const operatorsAliases = { 4 | $eq: Op.eq, 5 | $ne: Op.ne, 6 | $gte: Op.gte, 7 | $gt: Op.gt, 8 | $lte: Op.lte, 9 | $lt: Op.lt, 10 | $not: Op.not, 11 | $in: Op.in, 12 | $notIn: Op.notIn, 13 | $is: Op.is, 14 | $like: Op.like, 15 | $notLike: Op.notLike, 16 | $iLike: Op.iLike, 17 | $notILike: Op.notILike, 18 | $regexp: Op.regexp, 19 | $notRegexp: Op.notRegexp, 20 | $iRegexp: Op.iRegexp, 21 | $notIRegexp: Op.notIRegexp, 22 | $between: Op.between, 23 | $notBetween: Op.notBetween, 24 | $overlap: Op.overlap, 25 | $contains: Op.contains, 26 | $contained: Op.contained, 27 | $adjacent: Op.adjacent, 28 | $strictLeft: Op.strictLeft, 29 | $strictRight: Op.strictRight, 30 | $noExtendRight: Op.noExtendRight, 31 | $noExtendLeft: Op.noExtendLeft, 32 | $and: Op.and, 33 | $or: Op.or, 34 | $any: Op.any, 35 | $all: Op.all, 36 | $values: Op.values, 37 | $col: Op.col 38 | }; 39 | 40 | module.exports = function (app) { 41 | const connectionString = process.env.DB_CONNECTION_STRING || app.get('postgres'); 42 | const sequelize = new Sequelize(connectionString, { 43 | dialect: 'postgres', 44 | logging: !!process.env.DB_TRACING, 45 | operatorsAliases, 46 | define: { 47 | freezeTableName: true 48 | } 49 | }); 50 | const oldSetup = app.setup; 51 | 52 | app.set('sequelizeClient', sequelize); 53 | 54 | app.setup = function (...args) { 55 | const result = oldSetup.apply(this, args); 56 | 57 | // Set up data relationships 58 | const models = sequelize.models; 59 | Object.keys(models).forEach(name => { 60 | if ('associate' in models[name]) { 61 | models[name].associate(models); 62 | } 63 | }); 64 | 65 | // Sync to the database 66 | sequelize.sync(); 67 | 68 | return result; 69 | }; 70 | }; 71 | -------------------------------------------------------------------------------- /distribution/client/ui/index.js: -------------------------------------------------------------------------------- 1 | console.info('Loading Evergreen UI'); 2 | 3 | const feathers = require('@feathersjs/feathers'); 4 | const socketio = require('@feathersjs/socketio-client'); 5 | const io = require('socket.io-client'); 6 | 7 | const socket = io('', { 8 | reconnection: true, 9 | reconnectionDelay: 1000, 10 | reconnectionDelayMax : 5000, 11 | reconnectionAttempts: Infinity 12 | }); 13 | 14 | const app = feathers(); 15 | app.configure(socketio(socket)); 16 | 17 | socket.on('connect', () => { 18 | console.info('Connected to the Evergreen socket.io channel'); 19 | for (let el of document.getElementsByClassName('status-indicator')) { 20 | el.setAttribute('class', 'status-indicator connected'); 21 | } 22 | 23 | socket.emit('find', 'messages', {}, (error, data) => { 24 | console.log('Found messages', data); 25 | data.forEach(m => window.addEvergreenMessage(m)); 26 | }); 27 | }); 28 | 29 | socket.on('disconnect', () => { 30 | for (let el of document.getElementsByClassName('status-indicator')) { 31 | el.setAttribute('class', 'status-indicator disconnected'); 32 | } 33 | }); 34 | socket.on('reconnect', () => { 35 | console.info('Reconnecting the socket.io channel'); 36 | }); 37 | 38 | app.service('messages').on('created', (data) => { 39 | console.log('Received message from the backend:', data); 40 | window.addEvergreenMessage(data); 41 | }); 42 | 43 | /* 44 | * Really crappy manual HTML construction, this should clearly be improved in 45 | * the future 46 | */ 47 | window.addEvergreenMessage = (data) => { 48 | const containers = document.getElementsByClassName('messages'); 49 | 50 | const el = document.createElement('div'); 51 | el.setAttribute('class', 'message'); 52 | 53 | const m = document.createElement('pre'); 54 | m.innerHTML = data.message; 55 | 56 | el.appendChild(m); 57 | 58 | for (let item of containers) { 59 | item.prepend(el); 60 | } 61 | }; 62 | -------------------------------------------------------------------------------- /distribution/client/README.adoc: -------------------------------------------------------------------------------- 1 | = evergreen-client 2 | 3 | This directory contains the `evergreen-client` application which provides the 4 | client-side functionality necessary for the Evergreen distribution system. 5 | 6 | The purpose of `evergreen-client` is to facilitate the following: 7 | 8 | * Maintain <> with the Evergreen hosted services layer. 9 | * Orchestrate <> of the `jenkins.war` and plugin `.jpi` files when 10 | instructed by the Evergreen hosted services layer. 11 | * Collect and deliver telemetry as necessary to the Evergreen hosted services 12 | layer. 13 | * Additional capabilities as the needs by Jenkins Evergreen evolve. 14 | 15 | 16 | == Design 17 | 18 | Generally speaking, `evergreen-client` is designed to act as a lightweight 19 | "sidecar" process, running alongside `jenkins.war`, and should conceptually 20 | implement various "commands" to be sent from the Evergreen hosted services 21 | layer. Commands such as `ping`, `flags`, `logs`, etc are intended to be 22 | delivered by a long-lived Server-sent Events channel initiated by 23 | `evergreen-client` to the "Status" service. 24 | 25 | While `evergreen-client` can and may be required to run other executables in 26 | the system, most of the "core" functionality should be implemented in 27 | JavaScript and incorporated into `evergreen-client`. 28 | 29 | `evergreen-client` will also be responsible for managing process lifecycle of 30 | Jenkins via communication with `supervisord`. 31 | 32 | 33 | [[status]] 34 | === Status 35 | 36 | NOTE: This is still a work in progress 37 | 38 | [[upgrades]] 39 | === Upgrading 40 | 41 | NOTE: This is still a work in progress 42 | 43 | Types of upgrades `evergreen-client` should be responsible for: 44 | 45 | * An evergreen manifest (i.e. Jenkins core and plugins), including adding and 46 | removing plugins as the needs of Jenkins Evergreen changes 47 | * Configuration as Code to manage the "automatic sane defaults" aspect of 48 | Jenkins Evergreen. 49 | * `evergreen-client` releases itself. 50 | -------------------------------------------------------------------------------- /docs/index.adoc: -------------------------------------------------------------------------------- 1 | = Evergreen User Guide 2 | 3 | :toc: 4 | :sectanchors: 5 | :description: Jenkins Evergreen built-in User Guide 6 | :author: R Tyler Croy 7 | 8 | **Thank you for installing Jenkins Evergreen!** 9 | 10 | Jenkins Evergreen is an automatically updating rolling distribution system for 11 | Jenkins It consists of server-side, and client-side components to support a 12 | Chrome-like upgrade experience for Jenkins users. 13 | 14 | == Introduction 15 | 16 | [NOTE] 17 | ==== 18 | This built-in documentation is still a work in progress and can be improved by 19 | contributing to the link:https://github.com/jenkins-infra/evergreen[Evergreen 20 | GitHub repository] or by suggesting edits to 21 | link:https://github.com/jenkins-infra/evergreen/edit/master/distribution/client/docs/index.adoc[this 22 | document]. 23 | ==== 24 | 25 | 26 | == Features 27 | 28 | 29 | == For Existing Jenkins Users 30 | 31 | [[managing-plugins]] 32 | === Managing Plugins 33 | 34 | Evergreen is a curated distribution of a large collection of useful features 35 | provided by Evergreen, Jenkins core, and plugins. This entire distribution is tested and deployed together as a cohesive artifact. 36 | 37 | To ensure the stability of the instance, Evergreen intentionally disables the 38 | ability to install or remove additional plugins to the instance. 39 | 40 | [[managing-tools]] 41 | === Configuring Tools 42 | 43 | Jenkins Evergreen is designed for 44 | link:https://jenkins.io/doc/book/pipeline[Jenkins Pipeline] 45 | first and foremost, including the 46 | link:https://jenkins.io/doc/book/pipeline/docker/[built-in Docker support] 47 | which makes using custom tools for a Pipeline much easier, for example: 48 | 49 | [source,grooy] 50 | ---- 51 | pipeline { 52 | agent { 53 | docker { image 'node:7-alpine' } 54 | } 55 | stages { 56 | stage('Test') { 57 | steps { 58 | sh 'node --version' 59 | } 60 | } 61 | } 62 | } 63 | ---- 64 | 65 | Accordingly, configuring legacy "Tools" in Jenkins has been disabled. 66 | -------------------------------------------------------------------------------- /services/test/services/errortelemetry.test.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const assert = require('assert'); 3 | const errors = require('@feathersjs/errors'); 4 | 5 | 6 | const app = require('../../src/app'); 7 | 8 | const errorTelemetryService = 'telemetry/error'; 9 | 10 | describe('\'ErrorTelemetry\' service', () => { 11 | it('registered the service', () => { 12 | const service = app.service(errorTelemetryService); 13 | 14 | assert.ok(service, 'Registered the service'); 15 | }); 16 | }); 17 | 18 | describe('Error Telemetry', () => { 19 | it('should fail with empty parameters', () => { 20 | const service = app.service(errorTelemetryService); 21 | return service.create() 22 | .then(() => assert.fail('Should have failed to create()')) 23 | .catch((err) => assert.ok(err.message.match('^A data object must be provided'))); 24 | }); 25 | 26 | it('missing fields should be rejected', async () => { 27 | const service = app.service(errorTelemetryService); 28 | 29 | const badQueries = [ 30 | {}, 31 | {'log':{'version': 1 }}, 32 | {'log':{'timestamp': 1526387942 }} 33 | ]; 34 | for (let i = 0; i < badQueries.length; i++) { 35 | try { 36 | await service.create(badQueries[i]); 37 | assert.fail('Should have failed above'); 38 | } catch (err) { 39 | // expected 40 | assert.ok( err instanceof errors.BadRequest ); 41 | } 42 | } 43 | 44 | }); 45 | 46 | it('should create a log', async () => { 47 | const service = app.service(errorTelemetryService); 48 | const response = await service.create({ 49 | log: { 50 | version: 1, 51 | timestamp: 1522840762769, 52 | name: 'io.jenkins.plugins.SomeTypicalClass', 53 | level: 'WARNING', 54 | message: 'the message\nand another line', 55 | } 56 | }); 57 | 58 | assert.ok(response, 'A log should have been stored'); 59 | assert.equal(response.status, 'OK', 'The log should have been stored'); 60 | 61 | const fileContent = fs.readFileSync('/tmp/error-telemetry-testing.log'); 62 | assert.notEqual(fileContent, '', 'Log file should not be empty'); 63 | 64 | }); 65 | 66 | }); 67 | -------------------------------------------------------------------------------- /distribution/flavors/docker-cloud/config/supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | nodaemon=true 3 | 4 | [inet_http_server] 5 | port=:9001 6 | 7 | [rpcinterface:supervisor] 8 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface 9 | 10 | [program:evergreen-client] 11 | environment=HOME=%(ENV_EVERGREEN_HOME)s # Needed for Git or Node JENKINS-53856 12 | command=/evergreen/scripts/start-client.sh 13 | directory=%(ENV_EVERGREEN_HOME)s/client 14 | stdout_logfile=/dev/stdout 15 | stdout_logfile_maxbytes=0 16 | redirect_stderr=true 17 | startsecs=2 18 | startretries=20 19 | user=jenkins 20 | 21 | [program:jenkins] 22 | command=/evergreen/scripts/jenkins-evergreen.sh 23 | directory=%(ENV_JENKINS_HOME)s 24 | stdout_logfile=/dev/stdout 25 | stdout_logfile_maxbytes=0 26 | redirect_stderr=true 27 | startsecs=10 28 | startretries=0 29 | user=jenkins 30 | 31 | [program:nginx] 32 | command=/usr/sbin/nginx -c /evergreen/config/nginx.conf -g "daemon off;" 33 | stdout_logfile=/dev/stdout 34 | stdout_logfile_maxbytes=0 35 | redirect_stderr=true 36 | startsecs=10 37 | startretries=0 38 | 39 | # The configuration and usage of socat below requires an explanation. 40 | # The intent is to provide an out-of-the-box Evergreen instance able to run builds using Docker. 41 | # The chosen path for this is to require that the Docker socket be bind-mounted. 42 | # 43 | # As the Jenkins instance user is *not* root, it can not use /var/run/docker.sock by default 44 | # in a typical setup. Because when mounted with `-v /var/run/docker.sock:/var/run/docker.sock` the 45 | # socket will not be writable to the world, and depending on the setup not even readable. 46 | # 47 | # So, there are two paths forward: 48 | # * Either we require that the administrators on the host run `chmod a+rw /var/run/docker.sock`, or 49 | # * we just expose that file as a full TCP socket, listening on the usual 2375 port. 50 | # 51 | # We chose the latter because it seemed less work for the Evergreen users, and hence more in line 52 | # with the overall easiness Jenkins Evergreen aims at providing. 53 | [program:socat] 54 | command=socat -d -d TCP4-LISTEN:2375,fork UNIX-CONNECT:/var/run/docker.sock 55 | startsecs=0 56 | stdout_logfile=/dev/stdout 57 | 58 | # vim: ft=ini 59 | -------------------------------------------------------------------------------- /services/src/channels.js: -------------------------------------------------------------------------------- 1 | const logger = require('winston'); 2 | 3 | module.exports = function(app) { 4 | if (typeof app.channel !== 'function') { 5 | // If no real-time functionality has been configured just return 6 | return; 7 | } 8 | 9 | app.on('connection', connection => { 10 | logger.info('socket.io connection', connection); 11 | // On a new real-time connection, add it to the anonymous channel 12 | app.channel('anonymous').join(connection); 13 | }); 14 | 15 | app.on('login', (authResult, { connection }) => { 16 | logger.info('AUthentication on socket', authResult, connection); 17 | // connection can be undefined if there is no 18 | // real-time connection, e.g. when logging in via REST 19 | if (connection) { 20 | // Obtain the logged in user from the connection 21 | // const user = connection.user; 22 | 23 | // The connection is no longer anonymous, remove it 24 | app.channel('anonymous').leave(connection); 25 | 26 | // Add it to the authenticated user channel 27 | app.channel('authenticated').join(connection); 28 | 29 | // Channels can be named anything and joined on any condition 30 | 31 | // E.g. to send real-time events only to admins use 32 | // if(user.isAdmin) { app.channel('admins').join(connection); } 33 | 34 | // If the user has joined e.g. chat rooms 35 | // if(Array.isArray(user.rooms)) user.rooms.forEach(room => app.channel(`rooms/${room.id}`).join(channel)); 36 | 37 | // Easily organize users by email and userid for things like messaging 38 | // app.channel(`emails/${user.email}`).join(channel); 39 | // app.channel(`userIds/$(user.id}`).join(channel); 40 | } 41 | }); 42 | 43 | // eslint-disable-next-line no-unused-vars 44 | app.publish((data, hook) => { 45 | return app.channel('authenticated'); 46 | }); 47 | 48 | /* 49 | * Expose update/created events to all clients regardless of authentication 50 | * status to ensure that they get passed updates properly 51 | */ 52 | app.service('update').publish('created', () => app.channel('anonymous', 'authenticated')); 53 | app.service('status').publish('ping', () => app.channel('anonymous', 'authenticated')); 54 | }; 55 | -------------------------------------------------------------------------------- /distribution/flavors/java11-docker-cloud/config/supervisord.conf: -------------------------------------------------------------------------------- 1 | [supervisord] 2 | nodaemon=true 3 | 4 | [inet_http_server] 5 | port=:9001 6 | 7 | [rpcinterface:supervisor] 8 | supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface 9 | 10 | [program:evergreen-client] 11 | environment=HOME=%(ENV_EVERGREEN_HOME)s # Needed for Git or Node JENKINS-53856 12 | command=/evergreen/scripts/start-client.sh 13 | directory=%(ENV_EVERGREEN_HOME)s/client 14 | stdout_logfile=/dev/stdout 15 | stdout_logfile_maxbytes=0 16 | redirect_stderr=true 17 | startsecs=2 18 | startretries=20 19 | user=jenkins 20 | 21 | [program:jenkins] 22 | command=/evergreen/scripts/jenkins-evergreen.sh 23 | directory=%(ENV_JENKINS_HOME)s 24 | stdout_logfile=/dev/stdout 25 | stdout_logfile_maxbytes=0 26 | redirect_stderr=true 27 | startsecs=10 28 | startretries=0 29 | user=jenkins 30 | 31 | [program:nginx] 32 | command=/usr/sbin/nginx -c /evergreen/config/nginx.conf -g "daemon off;" 33 | stdout_logfile=/dev/stdout 34 | stdout_logfile_maxbytes=0 35 | redirect_stderr=true 36 | startsecs=10 37 | startretries=0 38 | 39 | # The configuration and usage of socat below requires an explanation. 40 | # The intent is to provide an out-of-the-box Evergreen instance able to run builds using Docker. 41 | # The chosen path for this is to require that the Docker socket be bind-mounted. 42 | # 43 | # As the Jenkins instance user is *not* root, it can not use /var/run/docker.sock by default 44 | # in a typical setup. Because when mounted with `-v /var/run/docker.sock:/var/run/docker.sock` the 45 | # socket will not be writable to the world, and depending on the setup not even readable. 46 | # 47 | # So, there are two paths forward: 48 | # * Either we require that the administrators on the host run `chmod a+rw /var/run/docker.sock`, or 49 | # * we just expose that file as a full TCP socket, listening on the usual 2375 port. 50 | # 51 | # We chose the latter because it seemed less work for the Evergreen users, and hence more in line 52 | # with the overall easiness Jenkins Evergreen aims at providing. 53 | [program:socat] 54 | command=socat -d -d TCP4-LISTEN:2375,fork UNIX-CONNECT:/var/run/docker.sock 55 | startsecs=0 56 | stdout_logfile=/dev/stdout 57 | 58 | # vim: ft=ini 59 | -------------------------------------------------------------------------------- /services/src/sequelize-swagger.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This module was originally created by @Nvveen: 3 | * 4 | * and is licensed under the MIT license. 5 | * 6 | * All this does is helps map Models from sequelize into Swagger documentation 7 | * properly. 8 | */ 9 | 10 | const _ = require('lodash'); 11 | 12 | module.exports = function() { 13 | const app = this; 14 | // Check for swagger 15 | if (_.isNil(app.docs)) { 16 | throw new Error('no swagger defined'); 17 | } 18 | // Iterate over the doc paths, find the service 19 | _(app.docs.paths) 20 | .keys() 21 | .map(path => ({ path, service: app.service(path) })) 22 | .filter(({ service }) => _.isObject(service) && !_.isNil(service.Model)) 23 | .reduce((docs, { path, service }) => { 24 | const name = path.split('/')[1]; 25 | docs.tags = _.map(docs.tags, t => { 26 | if (t.name === name && _.has(service, 'Model.options.description')) { 27 | t.description = service.Model.options.description; 28 | } 29 | return t; 30 | }); 31 | docs.definitions[name] = _(service.Model.rawAttributes) 32 | .filter(a => !_.isNil(a.description)) 33 | .reduce( 34 | (attrs, a) => 35 | _.merge(attrs, { 36 | properties: { 37 | [a.fieldName]: { 38 | type: getType(a.type.key), 39 | description: a.description 40 | } 41 | } 42 | }), 43 | app.docs.definitions[name] 44 | ); 45 | return docs; 46 | }, app.docs); 47 | }; 48 | 49 | function getType(type) { 50 | switch (type) { 51 | case 'STRING': 52 | case 'CHAR': 53 | case 'TEXT': 54 | case 'BLOB': 55 | case 'DATE': 56 | case 'DATEONLY': 57 | case 'TIME': 58 | case 'NOW': 59 | case 'UUID': 60 | return 'string'; 61 | case 'INTEGER': 62 | case 'BIGINT': 63 | return 'integer'; 64 | case 'FLOAT': 65 | case 'DOUBLE': 66 | case 'DECIMAL': 67 | return 'number'; 68 | case 'BOOLEAN': 69 | return 'boolean'; 70 | case 'ARRAY': 71 | return 'array'; 72 | case 'JSON': 73 | return 'object'; 74 | default: 75 | return 'object'; 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /distribution/client/test/error-telemetry.test.ts: -------------------------------------------------------------------------------- 1 | jest.mock('fs'); 2 | 3 | const assert = require('assert'); 4 | const fs = require('fs'); 5 | const path = require('path'); 6 | const mkdirp = require('mkdirp'); 7 | 8 | import ErrorTelemetry from '../src/lib/error-telemetry'; 9 | 10 | describe('Error Telemetry Logging', () => { 11 | beforeEach(() => { 12 | /* Make sure memfs is flushed every time */ 13 | fs.volume.reset(); 14 | }); 15 | 16 | describe('authenticate()', () => { 17 | it('should store values', () => { 18 | const telemetry = new ErrorTelemetry(null, null, null).authenticate('you-you-i-Dee', 'toe-ken-that-guy'); 19 | assert.equal(telemetry.uuid, 'you-you-i-Dee'); 20 | }); 21 | }); 22 | 23 | describe('setup() call', () => { 24 | const errorTelemetryService = new ErrorTelemetry(null, null, null); 25 | 26 | let logsDir = '/evergreen/jenkins/war/logs'; 27 | let logFile = path.join(logsDir, 'evergreen.log.0'); 28 | 29 | beforeEach(() => { 30 | // Set up the directories needed 31 | mkdirp.sync(logsDir); 32 | // Seed our log file with one message to start 33 | fs.writeFileSync(logFile, '{"timestamp":1523451065975,"level":"SEVERE","message":"WAT"}\n'); 34 | }); 35 | 36 | // FIXME: only hackish, the end goal is definitely not to forward to another file 37 | it('writing to evergreen logging file should forward to another', done => { 38 | const forwardedLines = []; 39 | 40 | errorTelemetryService.callErrorTelemetryService = (app,jsonObject) => { 41 | forwardedLines.push(jsonObject.message); 42 | }; 43 | 44 | const response = errorTelemetryService.setup(logFile); 45 | expect(response).not.toBe(Promise); 46 | expect(forwardedLines.length).toEqual(0); 47 | 48 | // when: we write to the file 49 | fs.appendFileSync(logFile, '{"timestamp":1523451065975,"level":"SEVERE","message":"WAT2"}\n'); 50 | 51 | // then: the output function is called, and the mocked file contains what we expect 52 | setTimeout( () => { 53 | expect(forwardedLines.length).toEqual(2); 54 | expect(forwardedLines[1]).toEqual('WAT2'); 55 | done(); 56 | }, 2000); 57 | }); 58 | }); 59 | }); 60 | -------------------------------------------------------------------------------- /docs/developer/meetings/2018-03-23-JENKINS-49852-pipeline-usage-telemetry/README.adoc: -------------------------------------------------------------------------------- 1 | = 2018-03-23 - Initial Pipeline Usage Telemetry Discussion 2 | 3 | The purpose of this meeting was to discuss some of the initial feedback on 4 | link:https://issues.jenkins-ci.org/browse/JENKINS-49852[JENKINS-49852] 5 | in link:https://groups.google.com/d/msg/jenkinsci-dev/F7XjrFx_kC8/L4huCc9TAwAJ[this mailing list thread]. 6 | 7 | 8 | 9 | * Concerns about privacy of the telemetry and metrics 10 | ** Need some review before publicly exposing stats 11 | * Related development work for error telemetry: https://issues.jenkins-ci.org/browse/JENKINS-49805 12 | * Ideal situation (from Sam): 13 | ** Setting up plugin which depends on Pipeline itself and the hooks for metrics 14 | ** "Pipeline Metrics plugin" 15 | * GraphListener and RunListener hooks already exist 16 | * New hooks will be necessary 17 | * "Probably more design and thinking work, than actual coding to get those stats out" -- Sam 18 | ** Should there then be a JEP? 19 | ** Point: the Data Science (the analysis) is in some ways harder than adding metrics collections -- problem that we face with Metrics in general is that it reports a bajillion numbers, maybe a couple dozen of which are generally meaningful/useful 20 | * Sam's concern is that collection has some filtering embedding in it so the "collection data science" and "design" should be thought about of more ahead of time 21 | * Jesse's previous suggestions on hooking loggers is not really valid for numerical metrics/events we would want to collect 22 | * Vivek: It should also be interesting for declarative to collect how many of those use 'script' block 23 | ** Andrew: Since script {} is actually a step, we get that for free with any step invocation per job or aggregate data 24 | * Jesse: two different kinds of telemetry 25 | ** Metrics plugin type data, oriented towards performance over time 26 | ** Unrelated stuff, closer to what we collect in support bundles. Summaries of features being used by the system. Number of configured libraries, etc. Measurements of the feature use and complexity of the system. Not things you want to sample every 5s for example 27 | * One of the hardest things, wip even in hydra, is to find a way to make judging the state of a number of running Pipelines automated, not needing human like it does now -- Sam 28 | -------------------------------------------------------------------------------- /services/acceptance/helpers.js: -------------------------------------------------------------------------------- 1 | /* 2 | * This module contains functions which are helpful for running all acceptance 3 | * tests 4 | */ 5 | const ecc = require('elliptic'); 6 | const request = require('request-promise'); 7 | const url = require('url'); 8 | const app = require('../src/app'); 9 | 10 | require('./rand-patch'); 11 | 12 | class Helpers { 13 | constructor () { 14 | this.curve = 'secp256k1'; 15 | } 16 | 17 | startApp(callback) { 18 | this.server = app.listen(this.port()); 19 | this.server.once('listening', () => callback()); 20 | } 21 | 22 | stopApp(callback) { 23 | this.server.close(callback); 24 | } 25 | 26 | port() { 27 | /* offset a bit so `make run` and `make acceptance` can be run at the same 28 | * time 29 | */ 30 | return (app.get('port') || 3030) + 24; 31 | } 32 | 33 | getUrl(pathname) { 34 | return url.format({ 35 | hostname: app.get('host') || 'localhost', 36 | protocol: 'http', 37 | port: this.port(), 38 | pathname 39 | }); 40 | } 41 | 42 | /* 43 | * Generate a simple elliptic ECDSA keypair for testing 44 | * 45 | * Typically only used when performing mock client operations 46 | */ 47 | generateKeys() { 48 | let ec = new ecc.ec(this.curve); 49 | return ec.genKeyPair(); 50 | } 51 | 52 | /* 53 | * Execute a registration for the given set of keys 54 | * 55 | * @return a registration response object 56 | */ 57 | async register(keys) { 58 | return await request({ 59 | url: this.getUrl('/registration'), 60 | method: 'POST', 61 | json: true, 62 | body: { 63 | pubKey: keys.getPublic('hex'), 64 | curve: this.curve 65 | } 66 | }); 67 | } 68 | 69 | async registerAndAuthenticate() { 70 | this.keys = this.generateKeys(); 71 | this.reg = await this.register(this.keys); 72 | 73 | const signature = this.keys.sign(this.reg.uuid); 74 | const response = await request({ 75 | url: this.getUrl('/authentication'), 76 | method: 'POST', 77 | json: true, 78 | body: { 79 | strategy: 'local', 80 | uuid: this.reg.uuid, 81 | signature: signature 82 | } 83 | }); 84 | this.token = response.accessToken; 85 | 86 | return { token: this.token, uuid: this.reg.uuid }; 87 | } 88 | } 89 | 90 | module.exports = new Helpers(); 91 | -------------------------------------------------------------------------------- /distribution/client/src/lib/supervisord.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * The supervisor module is responsible for interactions with the supervisord 3 | * XML-RPC API: http://supervisord.org/api.html 4 | */ 5 | 6 | import xmlrpc from 'xmlrpc'; 7 | import * as logger from 'winston' 8 | 9 | const client = xmlrpc.createClient('http://localhost:9001/RPC2'); 10 | 11 | export default class Supervisord { 12 | protected readonly client : xmlrpc.Client; 13 | 14 | constructor() { 15 | } 16 | 17 | static isRunning() { 18 | return new Promise((resolve, reject) => { 19 | client.methodCall('supervisor.getState', null, (e, v) => { 20 | if (e) { 21 | return reject(e); 22 | } 23 | return resolve((v.statename == 'RUNNING')); 24 | }); 25 | }); 26 | } 27 | 28 | static printState(name) { 29 | return new Promise((resolve, reject) => { 30 | client.methodCall('supervisor.getProcessInfo', [name], (e, value) => { 31 | if (e) { 32 | return reject(e); 33 | } 34 | return resolve(value.statename == 'RUNNING'); 35 | }); 36 | }); 37 | } 38 | 39 | static isProcessRunning(name) { 40 | return new Promise((resolve, reject) => { 41 | client.methodCall('supervisor.getProcessInfo', [name], (e, value) => { 42 | if (e) { 43 | return reject(e); 44 | } 45 | return resolve((value.statename == 'RUNNING')); 46 | }); 47 | }); 48 | } 49 | 50 | static startProcess(name) { 51 | logger.info(`[supervisord] Starting ${name} process`); 52 | return new Promise((resolve, reject) => { 53 | client.methodCall('supervisor.startProcess', [name,true], (e, value) => { 54 | if (e) { 55 | return reject(e); 56 | } 57 | return resolve(value); 58 | }); 59 | }); 60 | } 61 | 62 | static stopProcess(name) { 63 | logger.info(`[supervisord] Stopping ${name} process`); 64 | return new Promise((resolve, reject) => { 65 | client.methodCall('supervisor.stopProcess', [name,true], (e, value) => { 66 | if (e) { 67 | return reject(e); 68 | } 69 | return resolve(value); 70 | }); 71 | }); 72 | } 73 | 74 | static async restartProcess(name) { 75 | logger.info(`[supervisord] Restarting ${name} process`); 76 | if (await this.isProcessRunning(name)) { 77 | await this.stopProcess(name); 78 | } 79 | 80 | return this.startProcess(name); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /docs/developer/meetings/2018-05-07-existing-telemetry-setup-on-jenkins-io/README.adoc: -------------------------------------------------------------------------------- 1 | = 2018-05-07 - Insights from Olivier about Logging in the K8S Jenkins Infrastructure 2 | :toc: 3 | 4 | == Notes 5 | 6 | === Context 7 | 8 | Now we have link:https://issues.jenkins-ci.org/browse/JENKINS-49811[prototyped the error logging for Jenkins Essentials], we are reaching a point where we need to think what we will put on the server side, and how this will integrate with the existing services inside Jenkins Infrastructure. 9 | Baptiste met Olivier, who was heavily involved in setting up the Kubernetes cluster used behind Jenkins Infrastructure, to discuss this. 10 | 11 | 12 | === Attendees 13 | 14 | * Olivier Vernin 15 | * Baptiste Mathus 16 | 17 | == Summary 18 | 19 | ==== Existing setup from 10k feet 20 | 21 | The existing infrastructure uses FluentD to handle logs events (which are then sent to Azure Log Analytics). 22 | There are currently two types of logs, stream or archive. 23 | The stream ones are sent to Log Analytics and typically have a retention of 2 weeks. 24 | The archive ones are stored using Blob Storage for longer. 25 | The logs for applications running in the Kubernetes cluster are sent using FluentD, for now only from a file source. 26 | 27 | ==== Envisioned setup for Essentials Error Logging 28 | 29 | With our setup, we probably want to send logs from the NodeJS ErrorTelemetry service to the FluentD socket. 30 | Though right now FluentD only has a _file_ input, Olivier says this shouldn't be an issue if we want/need to enable a socket input. 31 | 32 | We will for instance create the right new labels to differentiate between current and new log types. 33 | 34 | Olivier recommends that we are very careful with the logging format, to avoid changing it too often. 35 | He thinks using JSON (like we did) is a sensible choice. 36 | 37 | === More references 38 | 39 | * link:https://github.com/jenkins-infra/iep/tree/master/iep-004#logging[IEP-4: Kubernetes for hosting project applications: 40 | Logging] 41 | * link:https://github.com/olblak/fluentd-k8s-azure[Sample FluentD configs for Azure by Olivier] 42 | * link:https://github.com/jenkinsci/jep/tree/master/jep/304#logging-format[The Essentials Error Logging format, as per JEP 304] 43 | 44 | == Actions 45 | 46 | |=== 47 | | ACTION | Person 48 | | Prototype sending logs to a FluentD socket from the ErrorTelemetry service (link:https://issues.jenkins-ci.org/browse/JENKINS-51175[tracked as JENKINS-51175])| Baptiste Mathus 49 | |=== 50 | -------------------------------------------------------------------------------- /services/cli-test/url-resolver.test.js: -------------------------------------------------------------------------------- 1 | const UrlResolver = require('../cli/url-resolver'); 2 | 3 | describe('UrlResolver', () => { 4 | describe('artifactForCore()', () => { 5 | it('should return a mirrored URL for normal releases', () => { 6 | const core = { 7 | version: '2.135', 8 | }; 9 | const expectedUrl = 'http://mirrors.jenkins.io/war/2.135/jenkins.war'; 10 | expect(UrlResolver.artifactForCore(core)).toEqual(expectedUrl); 11 | }); 12 | 13 | it('should return an incremental URL for an incremental version', () => { 14 | const core = { 15 | version: '2.137-rc15096.84158a22fc46', 16 | }; 17 | const expectedUrl = 'https://repo.jenkins-ci.org/incrementals/org/jenkins-ci/main/jenkins-war/2.137-rc15096.84158a22fc46/jenkins-war-2.137-rc15096.84158a22fc46.war'; 18 | expect(UrlResolver.artifactForCore(core)).toEqual(expectedUrl); 19 | }); 20 | }); 21 | describe('artifactForPlugin()', () => { 22 | it('should handle an incremental version properly', () => { 23 | const plugin = { 24 | artifactId: 'configuration-as-code', 25 | groupId: 'io.jenkins', 26 | version: '0.11-alpha-rc362.942711740b07', 27 | }; 28 | const expectedUrl = 'https://repo.jenkins-ci.org/incrementals/io/jenkins/configuration-as-code/0.11-alpha-rc362.942711740b07/configuration-as-code-0.11-alpha-rc362.942711740b07.hpi'; 29 | expect( 30 | UrlResolver.artifactForPlugin(plugin) 31 | ).toEqual(expectedUrl); 32 | }); 33 | 34 | it('should handle a normal relaese properly', () => { 35 | const plugin = { 36 | artifactId: 'blueocean', 37 | groupId: 'io.jenkins.blueocean', 38 | version: '1.7.2', 39 | }; 40 | const expectedUrl = 'https://repo.jenkins-ci.org/releases/io/jenkins/blueocean/blueocean/1.7.2/blueocean-1.7.2.hpi'; 41 | expect( 42 | UrlResolver.artifactForPlugin(plugin) 43 | ).toEqual(expectedUrl); 44 | }); 45 | }); 46 | 47 | describe('isIncremental()', () => { 48 | it('should return false on a standard semvar', () => { 49 | const plugin = { version: '1.7.2' }; 50 | expect(UrlResolver.isIncremental(plugin)).toBeFalsy(); 51 | }); 52 | 53 | it('should return true for an incremental version', () => { 54 | const plugin = { 55 | version: '0.11-alpha-rc362.942711740b07', 56 | }; 57 | expect(UrlResolver.isIncremental(plugin)).toBeTruthy(); 58 | }); 59 | }); 60 | }); 61 | -------------------------------------------------------------------------------- /docs/developer/meetings/2018-05-30-JENKINS-51299-developer-access-howto/README.adoc: -------------------------------------------------------------------------------- 1 | = 2018-05-30 -- How to provide access to Essentials logs to some Jenkins developers 2 | :toc: 3 | 4 | == Notes 5 | 6 | === Context 7 | 8 | Now link:https://issues.jenkins-ci.org/browse/JENKINS-49811[JENKINS-49811] (and the associated link:https://github.com/jenkinsci/jep/tree/master/jep/308[JEP 308]) are done, we need to move forward and see how we will actually provide access to the generated logs to a given subset of plugin developers. 9 | 10 | That work is tracked as https://issues.jenkins-ci.org/browse/JENKINS-51299[JENKINS-51299]. 11 | 12 | === Attendees 13 | 14 | * Olivier Vernin 15 | * Baptiste Mathus 16 | 17 | == Summary 18 | 19 | Azure Log Analytics is the service in use for logging in the new Kubernetes cluster used for the Jenkins infrastructure. 20 | 21 | * Olivier says it is not integrated, and cannot be, with the existing Jenkins LDAP. 22 | To use roles/users in Azure, it has to use the existing Active Directory setup there. 23 | So, a possibility could be to just run a job, each hour for instance, to sync the external (OpenLDAP) LDAP data, to the _Active Directory_ service in the Azure account for the Jenkins Project. 24 | The master data would stay on the external LDAP, and the Active Directory side would be in read-only and reset regularly from the master one. 25 | 26 | * DataDog was quickly discussed, but ditched because there does not seem anyway to be a way to segregate the things/logs people would have access to. 27 | 28 | * Another possibility, failing an easy path above, would be to set up a dedicated ELK cluster for those logs. 29 | But we want to avoid it because it would be time consuming to set up and operate exclusively for Jenkins Essentials needs. 30 | 31 | 32 | So, we ended up agreeing that Olivier would check what is feasible with regard to data visibility/segregation on Azure Logs Analytics side. 33 | 34 | This work is tracked through link:https://issues.jenkins-ci.org/browse/INFRA-1643[INFRA-1643] 35 | 36 | Relevant documentation for managing accounts and users in Azure: 37 | https://docs.microsoft.com/en-us/azure/log-analytics/log-analytics-manage-access#manage-accounts-and-users 38 | 39 | == Actions 40 | 41 | //// 42 | Summarize the actions that ought to be done. 43 | Explain why none are listed if none. 44 | //// 45 | 46 | |=== 47 | | ACTION | Person 48 | | Work on link:https://issues.jenkins-ci.org/browse/INFRA-1643[INFRA-1643] to have more insights of what is doable.| Olivier 49 | |=== 50 | -------------------------------------------------------------------------------- /services/.gitignore: -------------------------------------------------------------------------------- 1 | ingest.yaml 2 | 3 | # Logs 4 | logs 5 | *.log 6 | 7 | # Runtime data 8 | pids 9 | *.pid 10 | *.seed 11 | 12 | # Directory for instrumented libs generated by jscoverage/JSCover 13 | lib-cov 14 | 15 | # Coverage directory used by tools like istanbul 16 | coverage 17 | 18 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 19 | .grunt 20 | 21 | # Compiled binary addons (http://nodejs.org/api/addons.html) 22 | build/Release 23 | 24 | # Dependency directory 25 | # Commenting this out is preferred by some people, see 26 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git- 27 | node_modules 28 | 29 | # Users Environment Variables 30 | .lock-wscript 31 | 32 | # IDEs and editors (shamelessly copied from @angular/cli's .gitignore) 33 | /.idea 34 | .project 35 | .classpath 36 | .c9/ 37 | *.launch 38 | .settings/ 39 | *.sublime-workspace 40 | 41 | # IDE - VSCode 42 | .vscode/* 43 | !.vscode/settings.json 44 | !.vscode/tasks.json 45 | !.vscode/launch.json 46 | !.vscode/extensions.json 47 | 48 | ### Linux ### 49 | *~ 50 | 51 | # temporary files which can be created if a process still has a handle open of a deleted file 52 | .fuse_hidden* 53 | 54 | # KDE directory preferences 55 | .directory 56 | 57 | # Linux trash folder which might appear on any partition or disk 58 | .Trash-* 59 | 60 | # .nfs files are created when an open file is removed but is still being accessed 61 | .nfs* 62 | 63 | ### OSX ### 64 | *.DS_Store 65 | .AppleDouble 66 | .LSOverride 67 | 68 | # Icon must end with two \r 69 | Icon 70 | 71 | 72 | # Thumbnails 73 | ._* 74 | 75 | # Files that might appear in the root of a volume 76 | .DocumentRevisions-V100 77 | .fseventsd 78 | .Spotlight-V100 79 | .TemporaryItems 80 | .Trashes 81 | .VolumeIcon.icns 82 | .com.apple.timemachine.donotpresent 83 | 84 | # Directories potentially created on remote AFP share 85 | .AppleDB 86 | .AppleDesktop 87 | Network Trash Folder 88 | Temporary Items 89 | .apdisk 90 | 91 | ### Windows ### 92 | # Windows thumbnail cache files 93 | Thumbs.db 94 | ehthumbs.db 95 | ehthumbs_vista.db 96 | 97 | # Folder config file 98 | Desktop.ini 99 | 100 | # Recycle Bin used on file shares 101 | $RECYCLE.BIN/ 102 | 103 | # Windows Installer files 104 | *.cab 105 | *.msi 106 | *.msm 107 | *.msp 108 | 109 | # Windows shortcuts 110 | *.lnk 111 | 112 | # Others 113 | lib/ 114 | data/ 115 | ingest.json 116 | update-center.json 117 | update-center-experimental.json 118 | commit.txt 119 | -------------------------------------------------------------------------------- /services/src/libs/sentry.js: -------------------------------------------------------------------------------- 1 | 2 | const logger = require('winston'); 3 | const Raven = require('raven'); 4 | 5 | 6 | /** 7 | * The sentry module is responsible for passing messages along to 8 | * the sentry.io server. 9 | */ 10 | class Sentry { 11 | /** 12 | * Initializes the Sentry library with the URL to connect to. This must be called at App startup. 13 | * 14 | * @param {string} sentryUrl 15 | */ 16 | constructor(sentryUrl) { 17 | if (!sentryUrl) { 18 | logger.error('No sentry url defined.'); 19 | return; 20 | } 21 | this.raven = new Raven.Client(); 22 | this.raven.config(sentryUrl); 23 | } 24 | 25 | /** 26 | * Map java.util.logging log levels to Sentry.io log levels. 27 | * If the level is missing or invalid, the fallback is "info". 28 | * @param {string} level 29 | * @see {@link https://docs.oracle.com/javase/8/docs/api/java/util/logging/Level.html|java.util.logging.Level} 30 | * @see {@link https://docs.sentry.io/clients/node/usage/#raven-node-additional-data|Sentry.io - Raven - additional data} 31 | */ 32 | mapJavaLogLevel(level) { 33 | if (!level) { 34 | logger.error('Missing log level.'); 35 | return 'info'; 36 | } 37 | 38 | switch (level.toUpperCase()) { 39 | case 'SEVERE': 40 | return 'error'; 41 | case 'WARNING': 42 | return 'warning'; 43 | case 'INFO': 44 | case 'CONFIG': 45 | return 'info'; 46 | case 'FINE': 47 | case 'FINER': 48 | case 'FINEST': 49 | return 'debug'; 50 | default: 51 | logger.warn(`Unknown log level "${level}", using "info"`); 52 | return 'info'; 53 | } 54 | } 55 | 56 | /** 57 | * Send the JSON output to Sentry.io. JSON format is from the Error Telemetry API. 58 | * @param {json} data 59 | */ 60 | sendOutput(data) { 61 | if (!data) { 62 | logger.error('Missing data.'); 63 | return; 64 | } 65 | 66 | const errorData = { 67 | level: this.mapJavaLogLevel(data.log.level), 68 | logger: data.log.name, 69 | user: { 70 | /* 71 | * Different docs on sentry.io suggest different fields here, so why 72 | * not both? 73 | */ 74 | id: data.uuid, 75 | name: data.uuid, 76 | }, 77 | extra: { 78 | id: data.uuid, 79 | uuid: data.uuid, 80 | source: data.log, 81 | flavor: data.flavor, 82 | updateLevel: data.updateLevel 83 | }, 84 | }; 85 | 86 | this.raven.captureMessage(data.log.message, errorData); 87 | } 88 | } 89 | 90 | module.exports = Sentry; 91 | -------------------------------------------------------------------------------- /distribution/client/src/lib/ui.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * The UI module is what is responsible for preparing data to be served by the 3 | * client front-end, which is expected to be a single page web application with 4 | * as ocket.io connection to this module 5 | */ 6 | 7 | const feathers = require('@feathersjs/feathers'); 8 | const express = require('@feathersjs/express'); 9 | const socketio = require('@feathersjs/socketio'); 10 | const configuration = require('@feathersjs/configuration'); 11 | const logger = require('winston'); 12 | 13 | /* 14 | * Simple shim feathers service just to enable events 15 | */ 16 | class MessageService { 17 | protected readonly app : any; 18 | protected recent : Array; 19 | 20 | constructor(app) { 21 | this.app = app; 22 | this.recent = []; 23 | } 24 | 25 | async find() { 26 | return this.recent; 27 | } 28 | 29 | async create(data, params) { 30 | if ((params) && (params.log)) { 31 | logger[params.log](data, params.error); 32 | } else { 33 | logger.debug(`Publishing to the UI: message='${JSON.stringify(data)}, params='${JSON.stringify(params)}'`); 34 | } 35 | this.recent.push(data); 36 | // Only keep the last 100 items 37 | this.recent = this.recent.slice(-100); 38 | return Promise.resolve(data); 39 | } 40 | } 41 | 42 | class UI { 43 | protected readonly app : any; 44 | protected server : any; 45 | 46 | constructor() { 47 | const app = express(feathers()); 48 | this.app = app; 49 | 50 | app.configure(configuration()); 51 | app.configure(express.rest()); 52 | app.configure(socketio()); 53 | app.use('messages', new MessageService(app)); 54 | 55 | /* 56 | * Set up the socket.io channel 57 | */ 58 | app.on('connection', conn => this.app.channel('anonymous').join(conn)); 59 | // Publish all events into the anonymous channel 60 | app.publish(() => app.channel('anonymous')); 61 | } 62 | 63 | /* 64 | * Publish is an explicit method rather than hooking behavior in the winston 65 | * logger because not everything that should be in the logs should 66 | * necessarily be presented to the user. 67 | * 68 | */ 69 | publish(message, params?: any) { 70 | return this.app.service('messages').create({ 71 | message: message, 72 | timestamp: Date.now(), 73 | }, params); 74 | } 75 | 76 | serve() { 77 | this.app.use('/', express.static(this.app.get('public'))); 78 | this.app.use(express.notFound()); 79 | this.server = this.app.listen(this.app.get('port')); 80 | return this; 81 | } 82 | } 83 | 84 | export default new UI(); 85 | -------------------------------------------------------------------------------- /services/src/services/update/update.hooks.js: -------------------------------------------------------------------------------- 1 | const authentication = require('@feathersjs/authentication'); 2 | const errors = require('@feathersjs/errors'); 3 | const SKIP = require('@feathersjs/feathers').SKIP; 4 | 5 | const dbtimestamp = require('../../hooks/dbtimestamp'); 6 | const ensureMatchingUUID = require('../../hooks/ensureuuid'); 7 | const internalOnly = require('../../hooks/internalonly'); 8 | const internalApi = require('../../hooks/internalapi'); 9 | 10 | const updateApiRequiredFields = [ 11 | 'commit', 12 | 'manifest' 13 | ]; 14 | 15 | class UpdateHooks { 16 | constructor() { 17 | } 18 | 19 | /* 20 | * For create() methods, add the default `channel` to the data which will be 21 | * "general" until richer channel management is added 22 | */ 23 | defaultChannel(context) { 24 | context.data.channel = 'general'; 25 | return context; 26 | } 27 | 28 | checkUpdateFormat(hook) { 29 | if (!(hook.data) || !Object.keys(hook.data).length) { 30 | throw new errors.BadRequest('Missing data'); 31 | } 32 | updateApiRequiredFields.forEach( field => { 33 | if (!hook.data[field]) { 34 | throw new errors.BadRequest(`Missing required field '${field}'`); 35 | } 36 | }); 37 | } 38 | 39 | preventRedundantCommits(context) { 40 | return context.app.service('update').find({ 41 | query: { 42 | channel: context.data.channel, 43 | commit: context.data.commit, 44 | }, 45 | }) 46 | .then((records) => { 47 | if (records.length > 0) { 48 | context.statusCode = 304; 49 | context.result = { 50 | error: 'Politely declining to create a redudant Update for this commit', 51 | }; 52 | return SKIP; 53 | } 54 | return context; 55 | }); 56 | } 57 | 58 | getHooks() { 59 | return { 60 | before: { 61 | all: [ 62 | ], 63 | find: [ 64 | internalOnly, 65 | ], 66 | get: [ 67 | authentication.hooks.authenticate(['jwt']), 68 | ensureMatchingUUID, 69 | ], 70 | create: [ 71 | internalApi, 72 | this.checkUpdateFormat, 73 | dbtimestamp('createdAt'), 74 | this.defaultChannel, 75 | this.preventRedundantCommits, 76 | ], 77 | update: [ 78 | ], 79 | patch: [ 80 | internalApi, 81 | dbtimestamp('updatedAt'), 82 | ], 83 | remove: [ 84 | internalOnly, 85 | ], 86 | }, 87 | 88 | after: { 89 | find: [ 90 | ], 91 | }, 92 | error: {} 93 | }; 94 | } 95 | } 96 | 97 | module.exports = new UpdateHooks(); 98 | -------------------------------------------------------------------------------- /services/acceptance/services/registration.test.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const request = require('request-promise'); 3 | const h = require('../helpers'); 4 | 5 | describe('Registration service acceptance tests', () => { 6 | beforeAll(done => h.startApp(done)); 7 | afterAll(done => h.stopApp(done)); 8 | 9 | describe('create()', () => { 10 | it('should create a UUID', async () => { 11 | return request({ 12 | url: h.getUrl('/registration'), 13 | method: 'POST', 14 | json: true, 15 | resolveWithFullResponse: true, 16 | body: { 17 | pubKey: 'pretend-pubkey', 18 | curve: 'secp256k1' 19 | } 20 | }) 21 | .then(res => { 22 | expect(res.statusCode).toEqual(201); 23 | expect(res.body.uuid).toBeTruthy(); 24 | }) 25 | .catch((err) => assert.fail(err)); 26 | }); 27 | 28 | it('should fail when there is no curve', async () => { 29 | return request({ 30 | url: h.getUrl('/registration'), 31 | method: 'POST', 32 | json: true, 33 | body: { 34 | pubKey: 'pretend-pubkey' 35 | } 36 | }) 37 | .then(() => assert.fail('Should have failed')) 38 | .catch(err => expect(err.statusCode).toEqual(400)); 39 | }); 40 | }); 41 | 42 | it('should not support lookups', () => { 43 | return request({ 44 | url: h.getUrl('/registration/some-uuid'), 45 | }) 46 | .then(() => assert.fail('Got a 200 response')) 47 | .catch(err => expect(err.statusCode).toEqual(405)); 48 | }); 49 | 50 | it('should not support updates', () => { 51 | return request({ 52 | url: h.getUrl('/registration/some-uuid'), 53 | method: 'PUT', 54 | }) 55 | .then(() => assert.fail('Got a 200 response')) 56 | .catch(err => expect(err.statusCode).toEqual(405)); 57 | }); 58 | 59 | it('should not support patches', () => { 60 | return request({ 61 | url: h.getUrl('/registration/some-uuid'), 62 | method: 'PATCH', 63 | }) 64 | .then(() => assert.fail('Got a 200 response')) 65 | .catch(err => expect(err.statusCode).toEqual(405)); 66 | }); 67 | 68 | it('should not support deletes', () => { 69 | return request({ 70 | url: h.getUrl('/registration/some-uuid'), 71 | method: 'DELETE', 72 | }) 73 | .then(() => assert.fail('Got a 200 response')) 74 | .catch(err => expect(err.statusCode).toEqual(405)); 75 | }); 76 | 77 | it('should not support finds', () => { 78 | return request({ 79 | url: h.getUrl('/registration'), 80 | }) 81 | .then(() => assert.fail('Got a 200 response')) 82 | .catch(err => expect(err.statusCode).toEqual(405)); 83 | }); 84 | }); 85 | -------------------------------------------------------------------------------- /services/test/services/registration.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-unused-vars */ 2 | 3 | const assert = require('assert'); 4 | const logger = require('winston'); 5 | const app = require('../../src/app'); 6 | 7 | describe('\'registration\' service', () => { 8 | it('registered the service', () => { 9 | const service = app.service('registration'); 10 | 11 | assert.ok(service, 'Registered the service'); 12 | }); 13 | 14 | describe('registration of an instance', () => { 15 | it('should fail with empty parameters', () => { 16 | const service = app.service('registration'); 17 | return service.create() 18 | .then(() => assert.fail('Should have failed to create()')) 19 | .catch((err) => assert.ok(err.message.match('^A data object must be provided'))); 20 | }); 21 | 22 | it('should fail without a curve', () => { 23 | const service = app.service('registration'); 24 | return service.create({ 25 | pubKey: 'ImagineThisIsAnECDHPublicKeyHex' 26 | }) 27 | .then(() => assert.fail('Should have failed')) 28 | .catch((err) => assert.ok(err.message.match('^Client must provide a curve'))); 29 | }); 30 | 31 | it('should return a uuid for successful registration', async () => { 32 | const service = app.service('registration'); 33 | const reg = await service.create({ 34 | pubKey: 'ImagineThisIsAnECDHPublicKeyHex', 35 | curve: 'secp256k1' 36 | }); 37 | 38 | assert.ok(reg.uuid, 'Expected a uuid to be generated on registration'); 39 | }); 40 | 41 | it('should persist a uuid and pubKey on registration', async () => { 42 | const service = app.service('registration'); 43 | const reg = await service.create({ 44 | pubKey: 'ImagineThisIsAnECDHPublicKeyHex', 45 | curve: 'secp256k1' 46 | }); 47 | 48 | assert.ok(reg); 49 | assert.ok(reg.curve); 50 | assert.ok(reg.createdAt); 51 | }); 52 | }); 53 | 54 | describe('looking up a registration', () => { 55 | beforeEach(async () => { 56 | this.reg = await app.service('registration').create({ 57 | pubKey: 'a-hex-key', 58 | curve: 'secp256k1' 59 | }); 60 | }); 61 | 62 | it('should be able to look up by uuid', async () => { 63 | assert.ok(this.reg.uuid, 'Setup did not create the registration properly'); 64 | const service = app.service('registration'); 65 | 66 | const rows = await service.find({ query: { uuid: this.reg.uuid }}); 67 | assert.equal(rows.total, 1, 'Should only have one record per uuid'); 68 | const record = rows.data[0]; 69 | 70 | assert.equal(record.uuid, this.reg.uuid); 71 | assert.equal(record.curve, this.reg.curve); 72 | }); 73 | }); 74 | }); 75 | -------------------------------------------------------------------------------- /distribution/client/public/502.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Uh oh! 7 | 8 | 42 | 43 | 44 |
45 |
46 |
47 | Jenkins Evergreen isn't too magical! 50 |

51 | Jenkins Evergreen experienced an error! 52 |

53 |
54 |
55 | 56 |
57 |
58 |

59 | Something went wrong and Jenkins does not appear to be online! 60 |

61 |

62 | You may view the debug information using the 63 | 64 | Evergreen Status 65 | 66 | page. 67 |

68 |
69 | 70 |
71 | 72 | 79 |
80 | 81 | 82 | -------------------------------------------------------------------------------- /distribution/tests/rollback-tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Note: would have used set -euo pipefail, but ./shunit2 unfortunately fails hard with this :-(. 3 | 4 | current_directory=$(dirname "$0") 5 | # TODO: use the $( cd blah ; ... ) trick to un-relativize path below 6 | export PATH="$current_directory/../../tools:$PATH" 7 | 8 | echo "Debugging: PATH=***$PATH***" 9 | 10 | # shellcheck source=tests/utilities 11 | . "$current_directory/utilities" 12 | 13 | oneTimeSetUp() { 14 | setup_container_under_test 15 | 16 | echo "PWD=$( pwd )" 17 | 18 | upload_update_level "./tests/rollback/1-ingest-ok.json" 19 | 20 | wait_for_jenkins 21 | } 22 | 23 | test_rollback() { 24 | 25 | # Check UL is the correct one (UL 1 or UL 2?!) 26 | # shellcheck disable=SC2016 27 | docker exec "$container_under_test" bash -c 'ls $EVERGREEN_DATA' 28 | # extract 2 from `"level":2` from the updates.json file 29 | # shellcheck disable=SC2016 30 | correctUL=$( docker exec "$container_under_test" bash -c 'cat $EVERGREEN_DATA/updates.json' | \ 31 | grep --only-matching '"level":.' | \ 32 | cut -d : -f 2 ) 33 | assertEquals "Command should have succeeded" 0 "$?" 34 | assertEquals "Should be UL 2" 2 "$correctUL" 35 | 36 | # FIXME: un-harcode the sleep below. We need to wait for the full startup from above, healthcheck included. 37 | # what can happen here is that we'll reach the upload_update_level call below *before* the healthcheck finished 38 | # which will make the new pushed update for UL3 to be ignored because an "update is already running" when calling Update.applyUpdates() 39 | sleep 10 40 | 41 | # upload borked update level to backend 42 | echo "UPLOADING BROKEN UPDATE LEVEL (MISSING CREDENTIALS PLUGIN)" 43 | upload_update_level "./tests/rollback/2-ingest-borked.json" 44 | 45 | # wait enough until upgrade happens, then rollback: check UL is the same as before 46 | now=$( date --iso-8601=seconds ) 47 | echo "Waiting for Jenkins to restart a first time to broken UL3, then back to UL2 (using logs --since=$now)" 48 | wait_for_jenkins "$now" 49 | 50 | # let's now check the upload and upgrade attempt to borked UL3 *actually* happened 51 | # because if this didn't, then we'd still on UL2, but not testing there was a rollback somewhere 52 | # shellcheck disable=SC2016 53 | beforeLastUpdate=$( docker exec "$container_under_test" bash -c 'cat $EVERGREEN_DATA/updates.auditlog' | tail -2 | head -1 | jq -r .updateLevel ) 54 | # shellcheck disable=SC2016 55 | lastUpdate=$( docker exec "$container_under_test" bash -c 'cat $EVERGREEN_DATA/updates.auditlog' | tail -1 | jq -r .updateLevel ) 56 | 57 | assertEquals "Previous UL should be 3, the one expected to be rolled back" 3 "$beforeLastUpdate" 58 | assertEquals "UL should be 2 (gone back to 2 from borked 3)" 2 "$lastUpdate" 59 | 60 | } 61 | 62 | . ./shunit2/shunit2 63 | -------------------------------------------------------------------------------- /services/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "evergreen-services", 3 | "description": "Evergreen hosted service layer", 4 | "version": "0.0.0", 5 | "homepage": "", 6 | "main": "src", 7 | "keywords": [ 8 | "feathers" 9 | ], 10 | "author": {}, 11 | "contributors": [], 12 | "bugs": {}, 13 | "directories": { 14 | "lib": "src", 15 | "test": "test/" 16 | }, 17 | "engines": { 18 | "node": "^9.0.0", 19 | "npm": ">= 3.0.0" 20 | }, 21 | "scripts": { 22 | "test": "npm run eslint && npm run jest", 23 | "eslint": "eslint src/. test/. acceptance/. cli-test/. cli/. --config .eslintrc.json", 24 | "start": "node src/", 25 | "jest": "jest --bail --forceExit ./test", 26 | "acceptance": "jest -i --bail --coverageDirectory=coverage/acceptance --forceExit ./acceptance" 27 | }, 28 | "dependencies": { 29 | "@feathersjs/authentication": "^2.1.11", 30 | "@feathersjs/authentication-jwt": "^2.0.5", 31 | "@feathersjs/authentication-local": "^1.2.5", 32 | "@feathersjs/cli": "^3.8.0", 33 | "@feathersjs/configuration": "^1.0.2", 34 | "@feathersjs/errors": "^3.3.4", 35 | "@feathersjs/express": "^1.2.7", 36 | "@feathersjs/feathers": "^3.2.3", 37 | "@feathersjs/socketio": "^3.2.6", 38 | "compression": "^1.7.3", 39 | "continuation-local-storage": "^3.2.1", 40 | "cores": "^0.8.5", 41 | "cors": "^2.8.4", 42 | "ejs": "^2.6.1", 43 | "elliptic": "^6.4.1", 44 | "event-stream": "^3.3.4", 45 | "feathers-memory": "^2.1.3", 46 | "feathers-sequelize": "^3.1.2", 47 | "feathers-swagger": "^0.7.2", 48 | "helmet": "^3.13.0", 49 | "jsonwebtoken": "^8.3.0", 50 | "object-hash": "^1.3.0", 51 | "pg": "^7.4.3", 52 | "raven": "^2.6.4", 53 | "sequelize": "^4.38.1", 54 | "sequelize-cli": "^4.0.0", 55 | "serve-favicon": "^2.5.0", 56 | "url-parse": "^1.4.3", 57 | "uuid": "^3.3.2", 58 | "winston": "^2.4.4" 59 | }, 60 | "devDependencies": { 61 | "@feathersjs/socketio-client": "^1.1.0", 62 | "cheerio": "^1.0.0-rc.2", 63 | "node-version-compare": "^1.0.1", 64 | "eslint": "^4.19.1", 65 | "hoek": "^5.0.4", 66 | "jest": "^23.6.0", 67 | "js-yaml": "^3.12.0", 68 | "nodemon": "^1.18.4", 69 | "request": "^2.88.0", 70 | "request-promise": "^4.2.2", 71 | "socket.io-client": "^2.1.1", 72 | "yargs": "^12.0.1" 73 | }, 74 | "jest": { 75 | "collectCoverage": true, 76 | "coveragePathIgnorePatterns": [ 77 | "/src/sequelize-swagger.js", 78 | "/src/sequelize.js", 79 | "/src/app.js", 80 | "/src/homepage.js" 81 | ], 82 | "coverageReporters": [ 83 | "json", 84 | "lcov", 85 | "text-summary" 86 | ], 87 | "coverageThreshold": { 88 | "global": { 89 | "statements": 85, 90 | "branches": 63, 91 | "functions": 70, 92 | "lines": 86 93 | } 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /services/cli/ingest.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const fs = require('fs'); 4 | const logger = require('winston'); 5 | const request = require('request-promise'); 6 | 7 | const UrlResolver = require('./url-resolver'); 8 | 9 | /* 10 | * Class responsible for computing the ingest.json based off of a given 11 | * realized essentials.yaml file 12 | */ 13 | class Ingest { 14 | constructor(manifest) { 15 | this.manifest = manifest; 16 | this.ingest = { 17 | timestamp: Date.now(), 18 | core: {}, 19 | plugins: [], 20 | environments: {}, 21 | }; 22 | } 23 | 24 | /* 25 | * Use the manifest passed in to compute all the URLs and fetch the checksums 26 | * for all the references 27 | * 28 | * @return {Promise} 29 | */ 30 | resolveReferences() { 31 | let tasks = []; 32 | 33 | const coreUrl = UrlResolver.artifactForCore(this.manifest.data.status.core); 34 | 35 | this.ingest.core = { 36 | url: coreUrl, 37 | checksum: {}, 38 | }; 39 | 40 | tasks.push( 41 | request(`${coreUrl}.sha256`) 42 | .then((res) => { 43 | Object.assign(this.ingest.core.checksum, { 44 | type: 'sha256', 45 | signature: res.split(' ')[0] 46 | }); 47 | }) 48 | ); 49 | 50 | this.manifest.data.status.plugins 51 | .forEach((plugin) => { 52 | this.ingest.plugins.push(this.fetchDataForPlugin(tasks, plugin)); 53 | }); 54 | 55 | const environments = this.manifest.data.status.environments; 56 | environments.forEach((environment) => { 57 | let env = { 58 | plugins: [], 59 | }; 60 | this.ingest.environments[environment.name] = env; 61 | 62 | if (environment.plugins) { 63 | environment.plugins.forEach((plugin) => { 64 | env.plugins.push(this.fetchDataForPlugin(tasks, plugin)); 65 | }); 66 | } 67 | }); 68 | 69 | return Promise.all(tasks); 70 | } 71 | 72 | fetchDataForPlugin(tasks, plugin) { 73 | const url = UrlResolver.artifactForPlugin(plugin); 74 | let record = Object.assign(plugin, { 75 | url: url, 76 | checksum: {}, 77 | }); 78 | 79 | tasks.push( 80 | this.fetchHeadersFor(url).then((res) => { 81 | Object.assign(record.checksum, { 82 | type: 'sha256', 83 | signature: res.headers['x-checksum-sha256'], 84 | }); 85 | }) 86 | ); 87 | return record; 88 | } 89 | 90 | fetchHeadersFor(url) { 91 | return request({ 92 | method: 'HEAD', 93 | uri: url, 94 | resolveWithFullResponse: true, 95 | }); 96 | } 97 | 98 | saveSync() { 99 | const fileName = 'ingest.json'; 100 | logger.info(`Writing out ingest to ${fileName}`); 101 | return fs.writeFileSync(fileName, 102 | JSON.stringify(this.ingest, undefined, 2)); 103 | } 104 | } 105 | module.exports = Ingest; 106 | -------------------------------------------------------------------------------- /docs/developer/meetings/2018-03-18-JENKINS-49406-quality-bar/README.adoc: -------------------------------------------------------------------------------- 1 | = 2018-03-18 - How to test the data snapshotting system (JENKINS-49406) 2 | 3 | We wanted to discuss testing link:https://issues.jenkins-ci.org/browse/JENKINS-49406[https://issues.jenkins-ci.org/browse/JENKINS-49406] itself, but also possibly the overall quality bar footnote:[the different quality requirements we make sure to test, generally defining them *before* the software is developed, and met before it goes to production :-)] for Jenkins Essentials. 4 | 5 | 6 | == Attendees 7 | 8 | * Raul Arabaolaza 9 | * Baptiste Mathus 10 | 11 | == Notes 12 | 13 | RAUL: This is intended for development time, not deployment like 14 | synthetic transactions 15 | 16 | Idea is Try an upgrade, test all works properly perform a rollback and 17 | test again all is working 18 | 19 | BAPTISTE: We are likely to be able to reuse the “health check” logic 20 | that will have to be developed for evergreen-client itself in 21 | production, to check if Jenkins is running fine. 22 | 23 | RAUL: To read and check the jep part related to testing, also to tell 24 | Isa about it 25 | 26 | RAUL: critical: we need to test the health check :) 27 | 28 | QUESTION: Should we try to implement synthetic transactions as they were 29 | discussed in seville meeting for this or go with ATH which already 30 | exists? 31 | 32 | PROPOSALS for Rollback testing: 33 | 34 | * Make sure there is enough coverage that all possible rollback paths are covered 35 | ** Create a quality bar for rollbacks 36 | *** Make sure you are including some failing scenarios in the quality bar 37 | **** Not only test the happy path, for example: 38 | ***** Made a failed upgrade, test that we are able to detect the upgrade as 39 | a failure, rollback and test that the instance is working perfectly 40 | *****  Made a failed upgrade, test that we are able to detect the upgrade as 41 | a failure, made a failed rollback and test that we are able to detect 42 | the rollback failed 43 | ***** Make sure that in case of different chained rollback strategies we 44 | test each and every one of them 45 | 46 | * Create a healthcheck url to be invoked via CURL for example 47 | 48 | ** We can create a plugin that provides that healthcheck url and 49 | integrate with ST 50 | ** Maybe some work from metrics plugin can be reused 51 | 52 | * Some possible testing flows: 53 | 54 | ** Upgrade run health check (ST), rollback, ST again ¿and ATH? 55 | 56 | *** No work yet on ST that I am aware of, but ST can be later reused for 57 | deployment testing 58 | 59 | ** Run ATH, rollback, ATH again 60 | 61 | *** Some work already done, but ATH is maybe too heavy and coverage is 62 | pretty poor and based on individual plugins not in coherent sets of them 63 | 64 | * This should be done in the “pre canary, staging, or whatever is named” 65 | instances because we want to catch any possible degradation or problems 66 | in long running instances 67 | 68 | SIDE NOTE: Make sure that jenkins is not going to run any job when 69 | upgrading or rolling back to minimize the number of files to worry about 70 | -------------------------------------------------------------------------------- /services/test/hooks/ensureuuid.test.js: -------------------------------------------------------------------------------- 1 | const errors = require('@feathersjs/errors'); 2 | const ensureMatchingUUID = require('../../src/hooks/ensureuuid'); 3 | 4 | describe('ensureuuid hook', () => { 5 | beforeEach(() => { 6 | this.context = { 7 | params: { 8 | provider: 'rest', 9 | user: {}, 10 | query: {}, 11 | }, 12 | data: {}, 13 | }; 14 | }); 15 | 16 | it('should fail if the request does not include a UUID', () => { 17 | expect(() => { 18 | ensureMatchingUUID(this.context); 19 | }).toThrow(errors.BadRequest); 20 | }); 21 | 22 | it('should fail if the JWT uuid and the given UUID are identical', () => { 23 | this.context.data.uuid = 'who i want to be'; 24 | this.context.params.user = { uuid: 'who i be' }; 25 | expect(() => { 26 | ensureMatchingUUID(this.context); 27 | }).toThrow(errors.NotAuthenticated); 28 | }); 29 | 30 | describe('for internal service calls', () => { 31 | beforeEach(() => { 32 | delete this.context.params.provider; 33 | }); 34 | 35 | it('should return successfully', () => { 36 | expect(ensureMatchingUUID(this.context)); 37 | }); 38 | }); 39 | 40 | describe('for find methods which use query parameters', () => { 41 | beforeEach(() => { 42 | this.context.method = 'find'; 43 | }); 44 | 45 | it('should fail with an omitted query parameter', () => { 46 | expect(() => { 47 | ensureMatchingUUID(this.context); 48 | }).toThrow(errors.BadRequest); 49 | }); 50 | 51 | it('should allow the request with a matching `uuid` query param', () => { 52 | let uuid = 'jest-uuid'; 53 | /* This is the property name that JWT would extract to */ 54 | this.context.params.user.uuid = uuid; 55 | this.context.params.query = { uuid: uuid }; 56 | 57 | expect(ensureMatchingUUID(this.context)); 58 | }); 59 | 60 | it('should fail without matching token and query param `uuid`s', () => { 61 | let uuid = 'jest-uuid'; 62 | /* This is the property name that JWT would extract to */ 63 | this.context.params.user.uuid = uuid; 64 | this.context.params.query = { uuid: 'pickles', }; 65 | 66 | expect(() => { 67 | expect(ensureMatchingUUID(this.context)); 68 | }).toThrow(errors.NotAuthenticated); 69 | }); 70 | }); 71 | 72 | describe('for get methods which use the uuid as an id', () => { 73 | let uuid = 'jest-uuid'; 74 | 75 | beforeEach(() => { 76 | this.context.method = 'get'; 77 | /* This is the property name that JWT would extract to */ 78 | this.context.params.user.uuid = uuid; 79 | }); 80 | 81 | it('should be NotAuthenticated when the `id` doesn\'t match the JWT', () => { 82 | this.context.id = 'pickles'; 83 | 84 | expect(() => { 85 | expect(ensureMatchingUUID(this.context)); 86 | }).toThrow(errors.NotAuthenticated); 87 | }); 88 | 89 | it('should allow the request when the `id` matches', () => { 90 | this.context.id = uuid; 91 | expect(ensureMatchingUUID(this.context)).toBe(this.context); 92 | }); 93 | }); 94 | }); 95 | -------------------------------------------------------------------------------- /distribution/client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "evergreen-client", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "src/client.js", 6 | "scripts": { 7 | "start": "node build/client.js", 8 | "pretest": "eslint src/. test/. --config .eslintrc.json", 9 | "eslint": "npm run pretest", 10 | "test": "jest", 11 | "postinstall": "patch-package" 12 | }, 13 | "author": "R Tyler Croy", 14 | "license": "GPL-3.0", 15 | "devDependencies": { 16 | "@types/feathersjs__feathers": "^3.0.5", 17 | "@types/jest": "^23.3.3", 18 | "@types/node": "^10.11.4", 19 | "@types/tmp": "0.0.33", 20 | "asciidoctor.js": "^1.5.7-rc.1", 21 | "css-loader": "^1.0.0", 22 | "eslint": "^4.19.1", 23 | "hoek": "^5.0.4", 24 | "html-webpack-plugin": "^3.2.0", 25 | "jest": "^23.6.0", 26 | "jest-html-reporter": "^2.4.2", 27 | "memfs": "^2.9.4", 28 | "source-map-loader": "^0.2.4", 29 | "style-loader": "^0.23.0", 30 | "ts-jest": "^23.10.3", 31 | "typescript": "^3.1.1", 32 | "webpack": "^4.17.1", 33 | "webpack-cli": "^3.1.0" 34 | }, 35 | "dependencies": { 36 | "@feathersjs/authentication-client": "^1.0.4", 37 | "@feathersjs/configuration": "^2.0.0", 38 | "@feathersjs/express": "^1.2.3", 39 | "@feathersjs/feathers": "^3.1.7", 40 | "@feathersjs/rest-client": "^1.4.1", 41 | "@feathersjs/socketio": "^3.2.2", 42 | "@feathersjs/socketio-client": "^1.1.0", 43 | "cron": "^1.3.0", 44 | "elliptic": "^6.4.1", 45 | "eventsource": "^1.0.5", 46 | "mkdirp": "^0.5.1", 47 | "node-stream-zip": "^1.7.0", 48 | "patch-package": "^5.1.1", 49 | "promise-request-retry": "^1.0.1", 50 | "request-promise": "^4.2.2", 51 | "socket.io-client": "^2.1.1", 52 | "tail": "^1.3.0", 53 | "tslib": "^1.9.3", 54 | "unzip": "^0.1.11", 55 | "url-parse": "^1.4.3", 56 | "uuid": "^3.3.2", 57 | "winston": "^2.4.3", 58 | "xmlrpc": "^1.3.2" 59 | }, 60 | "jest": { 61 | "collectCoverage": true, 62 | "coveragePathIgnorePatterns": [ 63 | "/ui/index.js", 64 | "/src/lib/ui.js" 65 | ], 66 | "transform": { 67 | "^.+\\.tsx?$": "ts-jest" 68 | }, 69 | "testRegex": "(/test/.*|(\\.|/)(test|spec))\\.(jsx?|tsx?)$", 70 | "coverageReporters": [ 71 | "json", 72 | "lcov", 73 | "text-summary" 74 | ], 75 | "moduleFileExtensions": [ 76 | "ts", 77 | "tsx", 78 | "js", 79 | "jsx", 80 | "json", 81 | "node" 82 | ], 83 | "coverageThreshold": { 84 | "global": { 85 | "statements": 54, 86 | "branches": 41, 87 | "functions": 45, 88 | "lines": 54 89 | } 90 | }, 91 | "reporters": [ 92 | "default", 93 | [ 94 | "./node_modules/jest-html-reporter", 95 | { 96 | "pageTitle": "Distribution Client Test Report", 97 | "outputPath": "build/test-report.html", 98 | "includeFailureMsg": true, 99 | "includeConsoleLog": true 100 | } 101 | ] 102 | ] 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /distribution/client/test/storage.test.ts: -------------------------------------------------------------------------------- 1 | jest.mock('fs'); 2 | 3 | const fs = require('fs'); 4 | const mkdirp = require('mkdirp'); 5 | const path = require('path'); 6 | 7 | import h from '../testlib/helpers'; 8 | import Storage from '../src/lib/storage'; 9 | 10 | describe('The storage module', () => { 11 | let dir = '/tmp'; 12 | beforeEach(() => { 13 | process.env.EVERGREEN_HOME = dir; 14 | /* Make sure memfs is flushed every time */ 15 | fs.volume.reset(); 16 | mkdirp.sync(dir); 17 | }); 18 | 19 | describe('homeDirectory()', () => { 20 | it('should return a path', () => { 21 | const home = Storage.homeDirectory(); 22 | // this should look like a path 23 | expect(path.basename(home)).toBeTruthy(); 24 | }); 25 | }); 26 | 27 | describe('jenkinsHome()', () => { 28 | it('should return a path', () => { 29 | const home = Storage.jenkinsHome(); 30 | 31 | // this should look like a path 32 | expect(path.basename(home)).toBeTruthy(); 33 | expect(home.startsWith(Storage.homeDirectory())).toBeTruthy(); 34 | }); 35 | }); 36 | 37 | describe('pluginsDirectory()', () => { 38 | it('should return a path', () => { 39 | const plugins = Storage.pluginsDirectory(); 40 | 41 | // this should look like a path 42 | expect(path.basename(plugins)).toBeTruthy(); 43 | expect(plugins.startsWith(Storage.jenkinsHome())).toBeTruthy(); 44 | }); 45 | }); 46 | 47 | describe('removePlugins()', () => { 48 | it('should return cleanly on empty plugins', async () => { 49 | expect(() => { 50 | Storage.removePlugins(); 51 | }).not.toThrow(); 52 | }); 53 | it('should not error if file not found', async () => { 54 | expect(() => { 55 | Storage.removePlugins(['not-found']); 56 | }).not.toThrow(); 57 | }); 58 | it('should remove all files in a list', async () => { 59 | const filenames = ['first', 'second', 'third', 'fourth']; 60 | const pluginPath = Storage.pluginsDirectory(); 61 | mkdirp.sync(pluginPath); 62 | filenames.forEach((filename) => { 63 | fs.mkdirSync(`${pluginPath}/${filename}`) 64 | h.touchFile(`${pluginPath}/${filename}/testfile`); // make sure we remove a directory with files 65 | h.touchFile(`${pluginPath}/${filename}.hpi`); 66 | h.touchFile(`${pluginPath}/${filename}.jpi`); 67 | expect(h.checkFileExists(`${pluginPath}/${filename}`)).toBeTruthy(); 68 | expect(h.checkFileExists(`${pluginPath}/${filename}/testfile`)).toBeTruthy(); 69 | expect(h.checkFileExists(`${pluginPath}/${filename}.hpi`)).toBeTruthy(); 70 | expect(h.checkFileExists(`${pluginPath}/${filename}.jpi`)).toBeTruthy(); 71 | }); 72 | await Storage.removePlugins(filenames); 73 | filenames.forEach((filename) => { 74 | expect(h.checkFileExists(`${pluginPath}/${filename}/testfile`)).toBeFalsy(); 75 | expect(h.checkFileExists(`${pluginPath}/${filename}`)).toBeFalsy(); 76 | expect(h.checkFileExists(`${pluginPath}/${filename}.hpi`)).toBeFalsy(); 77 | expect(h.checkFileExists(`${pluginPath}/${filename}.jpi`)).toBeFalsy(); 78 | }); 79 | }); 80 | }); 81 | }); 82 | -------------------------------------------------------------------------------- /services/cli-test/plugin-manifest.test.js: -------------------------------------------------------------------------------- 1 | const PluginManifest = require('../cli/plugin-manifest'); 2 | 3 | describe('PluginManifest', () => { 4 | it('should be constructable', () => { 5 | expect(new PluginManifest()).toBeInstanceOf(PluginManifest); 6 | }); 7 | 8 | describe('load()', () => { 9 | it('should return an instance', () => { 10 | expect(PluginManifest.load({})).toBeInstanceOf(PluginManifest); 11 | }); 12 | }); 13 | 14 | describe('parse()', () => { 15 | let manifest = null; 16 | 17 | describe('with empty data', () => { 18 | beforeEach(() => { 19 | manifest = PluginManifest.load(''); 20 | }); 21 | it('should have no dependencies', () => { 22 | let parsed = manifest.parse(); 23 | expect(parsed).toBe(manifest); 24 | expect(parsed.dependencies).toHaveLength(0); 25 | }); 26 | }); 27 | 28 | describe('with a manifest containing no dependencies', () => { 29 | beforeEach(() => { 30 | manifest = PluginManifest.load(` 31 | Manifest-Version: 1.0 32 | Archiver-Version: Plexus Archiver 33 | Created-By: Apache Maven 34 | Built-By: magnayn 35 | Build-Jdk: 1.6.0_22 36 | Extension-Name: AdaptivePlugin 37 | Specification-Title: This (experimental) plug-in exposes the jenkins b 38 | uild extension points (SCM, Build, Publish) to a groovy scripting env 39 | ironment that has some DSL-style extensions for ease of develo 40 | pment. 41 | Implementation-Title: AdaptivePlugin 42 | Implementation-Version: 0.1 43 | Group-Id: jenkins 44 | Short-Name: AdaptivePlugin 45 | Long-Name: Jenkins Adaptive DSL Plugin 46 | Url: http://wiki.jenkins-ci.org/display/JENKINS/Jenkins+Adaptive+Plugin 47 | Plugin-Version: 0.1 48 | Hudson-Version: null 49 | Plugin-Developers: Nigel Magnay:magnayn:nigel.magnay@gmail.com 50 | `); 51 | }); 52 | 53 | it('should have no dependencies', () => { 54 | let parsed = manifest.parse(); 55 | expect(parsed).toBe(manifest); 56 | expect(parsed.dependencies).toHaveLength(0); 57 | }); 58 | }); 59 | 60 | describe('with a manifest containing dependencies', () => { 61 | beforeEach(() => { 62 | manifest = PluginManifest.load(` 63 | Manifest-Version: 1.0 64 | Archiver-Version: Plexus Archiver 65 | Created-By: Apache Maven 66 | Built-By: mwaite 67 | Build-Jdk: 1.8.0_181 68 | Extension-Name: git-client 69 | Specification-Title: Utility plugin for Git support in Jenkins 70 | Implementation-Title: git-client 71 | Implementation-Version: 2.7.3 72 | Group-Id: org.jenkins-ci.plugins 73 | Short-Name: git-client 74 | Long-Name: Jenkins Git client plugin 75 | Url: https://wiki.jenkins.io/display/JENKINS/Git+Client+Plugin 76 | Plugin-Version: 2.7.3 77 | Hudson-Version: 1.625.3 78 | Jenkins-Version: 1.625.3 79 | Plugin-Dependencies: apache-httpcomponents-client-4-api:4.5.3-2.0,cred 80 | entials:2.1.13,jsch:0.1.54.1,ssh-credentials:1.13,structs:1.9 81 | Plugin-Developers: Mark Waite:markewaite:mark.earl.waite@gmail.com 82 | `); 83 | }); 84 | 85 | it('should have dependencies', () => { 86 | let parsed = manifest.parse(); 87 | expect(parsed).toBe(manifest); 88 | expect(parsed.dependencies).toHaveLength(5); 89 | }); 90 | 91 | }); 92 | }); 93 | }); 94 | -------------------------------------------------------------------------------- /services/test/services/update.hooks.test.js: -------------------------------------------------------------------------------- 1 | const hooks = require('../../src/services/update/update.hooks'); 2 | const errors = require('@feathersjs/errors'); 3 | 4 | const SKIP = require('@feathersjs/feathers').SKIP; 5 | 6 | describe('update service hooks', () => { 7 | describe('defaultChannel()', () => { 8 | let context = { 9 | params: {}, 10 | data: {}, 11 | }; 12 | 13 | it('should add the default `channel` to the context.data', () => { 14 | expect(hooks.defaultChannel(context)).toBe(context); 15 | expect(context.data.channel).toBe('general'); 16 | }); 17 | }); 18 | 19 | describe('getHooks()', () => { 20 | it('should have before/after/error properties', () => { 21 | const result = hooks.getHooks(); 22 | expect(result).toHaveProperty('before'); 23 | expect(result).toHaveProperty('after'); 24 | expect(result).toHaveProperty('error'); 25 | }); 26 | }); 27 | 28 | describe('preventRedundantCommits()', () => { 29 | let context = { 30 | app: { 31 | service: {} 32 | }, 33 | data: { 34 | channel: 'general', 35 | commit: '0x0', 36 | } 37 | }; 38 | 39 | it('should not skip on zero records from find()', async () => { 40 | context.app.service = () => { 41 | return { 42 | find: () => { 43 | return new Promise((resolve) => { 44 | resolve([]); 45 | }); 46 | }, 47 | }; 48 | }; 49 | const result = await hooks.preventRedundantCommits(context); 50 | expect(result).not.toBe(SKIP); 51 | }); 52 | 53 | it('skip on records from the find()', async () => { 54 | context.app.service = () => { 55 | return { 56 | find: () => { 57 | return new Promise((resolve) => { 58 | resolve([1, 2]); 59 | }); 60 | }, 61 | }; 62 | }; 63 | await hooks.preventRedundantCommits(context); 64 | expect(context.statusCode).toEqual(304); 65 | }); 66 | }); 67 | 68 | describe('checkUpdateFormat()', () => { 69 | it('should throw a BadRequest if there is no data', () => { 70 | expect(() => { 71 | hooks.checkUpdateFormat({}); 72 | }).toThrow(errors.BadRequest); 73 | }); 74 | 75 | it('should throw a BadRequest if the data is empty', () => { 76 | expect(() => { 77 | hooks.checkUpdateFormat({ 78 | data: {}, 79 | }); 80 | }).toThrow(errors.BadRequest); 81 | }); 82 | 83 | it('should throw a BadRequest if the commit field is missing or empty', () => { 84 | expect(() => { 85 | hooks.checkUpdateFormat({ 86 | data: { 87 | manifest: 'manifest', 88 | } 89 | }); 90 | }).toThrow(errors.BadRequest); 91 | }); 92 | 93 | it('should throw a BadRequest if the manifest field is missing or empty', () => { 94 | expect(() => { 95 | hooks.checkUpdateFormat({ 96 | data: { 97 | commit: 'commit', 98 | } 99 | }); 100 | }).toThrow(errors.BadRequest); 101 | }); 102 | }); 103 | }); 104 | -------------------------------------------------------------------------------- /services/src/services/status/status.hooks.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Status service hooks 3 | */ 4 | 5 | const authentication = require('@feathersjs/authentication'); 6 | const errors = require('@feathersjs/errors'); 7 | const dbtimestamp = require('../../hooks/dbtimestamp'); 8 | const internalOnly = require('../../hooks/internalonly'); 9 | const ensureMatchingUUID = require('../../hooks/ensureuuid'); 10 | 11 | const instanceRequiredFields = [ 12 | 'flavor' 13 | ]; 14 | 15 | /* 16 | * StatusHooks are all the hooks necessary to run the status service properly 17 | */ 18 | class StatusHooks { 19 | constructor () { 20 | } 21 | 22 | validateRequiredFields(hook) { 23 | if (!(hook.data)) { 24 | throw new errors.BadRequest('Missing data'); 25 | } 26 | instanceRequiredFields.forEach( field => { 27 | if (!hook.data[field]) { 28 | throw new errors.BadRequest(`Missing required field '${field}'`); 29 | } 30 | }); 31 | } 32 | 33 | getHooks() { 34 | return { 35 | before: { 36 | all: [ 37 | authentication.hooks.authenticate(['jwt']) 38 | ], 39 | get: [ 40 | this.includeAssociations, 41 | ], 42 | create: [ 43 | ensureMatchingUUID, 44 | this.validateRequiredFields, 45 | dbtimestamp('createdAt'), 46 | module.exports.defaultUpdateLevel, 47 | module.exports.pruneQueryParams, 48 | ], 49 | 50 | update: [ 51 | ensureMatchingUUID, 52 | dbtimestamp('updatedAt'), 53 | ], 54 | patch: [ 55 | ensureMatchingUUID, 56 | dbtimestamp('updatedAt'), 57 | ], 58 | remove: [ 59 | internalOnly, 60 | ] 61 | }, 62 | after: {}, 63 | error: {} 64 | }; 65 | } 66 | 67 | /* 68 | * Include the model's associations in the output from the hook 69 | */ 70 | includeAssociations(context) { 71 | if (!context.params.sequelize) { 72 | context.params.sequelize = {}; 73 | } 74 | Object.assign(context.params.sequelize, { 75 | include: [ context.app.get('models').update ] 76 | }); 77 | return context; 78 | } 79 | 80 | /* 81 | * delete extra parameters included in the query string 82 | */ 83 | pruneQueryParams(context) { 84 | if (context.params.query) { 85 | delete context.params.query.include; 86 | } 87 | return context; 88 | } 89 | 90 | /* 91 | * Default new instances into the latest update record in the `general` channel. 92 | */ 93 | async defaultUpdateLevel(context) { 94 | const updates = context.app.service('update'); 95 | const records = await updates.find({ 96 | query: { 97 | $limit: 1, 98 | $sort: { 99 | createdAt: -1, 100 | }, 101 | }, 102 | }); 103 | 104 | if (records.length === 0) { 105 | throw new Error('Failed to find the latest `general` updates for instance creation'); 106 | } 107 | 108 | context.data.updateId = records[0].id; 109 | return context; 110 | } 111 | } 112 | 113 | /* 114 | * To make things easier to unit test, these hook functions are being exported 115 | */ 116 | module.exports = new StatusHooks(); 117 | -------------------------------------------------------------------------------- /distribution/client/src/lib/storage.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import fs from 'fs-extra'; 4 | import path from 'path'; 5 | 6 | import * as logger from 'winston'; 7 | import UI from './ui'; 8 | 9 | import util from 'util'; 10 | 11 | const remove = util.promisify(fs.remove); 12 | const unlink = util.promisify(fs.unlink); 13 | 14 | /* 15 | * The Storage module simply contains common functions necessary for the 16 | * evergreen-client to store its own data. 17 | */ 18 | export default class Storage { 19 | /* 20 | * Returns the default home directory or the value of EVERGREEN_HOME 21 | * 22 | * @return {string} 23 | */ 24 | static homeDirectory() { 25 | /* The default home directory is /evergreen, see the Dockerfile in the root 26 | * directory of th repository 27 | */ 28 | if (!process.env.EVERGREEN_DATA) { 29 | return '/evergreen/data'; 30 | } 31 | return process.env.EVERGREEN_DATA; 32 | } 33 | 34 | /* 35 | * Returns the JENKINS_HOME used by the instance 36 | * 37 | * @return {string} 38 | */ 39 | static jenkinsHome() { 40 | return path.join( 41 | Storage.homeDirectory(), 42 | 'jenkins', 43 | 'home'); 44 | } 45 | 46 | static jenkinsVar() { 47 | return path.join( 48 | Storage.homeDirectory(), 49 | 'jenkins', 50 | 'var'); 51 | } 52 | 53 | /* 54 | * Returns the directory used for storing plugins 55 | * 56 | * @return {string} 57 | */ 58 | static pluginsDirectory() { 59 | return path.join(Storage.jenkinsHome(), 60 | 'plugins'); 61 | } 62 | 63 | static getBootingFlagFile() { 64 | return path.join(Storage.homeDirectory(), 'booting.txt'); 65 | } 66 | 67 | static setBootingFlag() { 68 | return fs.writeFileSync(Storage.getBootingFlagFile(), Date.now().toString()); 69 | } 70 | 71 | static removeBootingFlag() { 72 | const filePath = Storage.getBootingFlagFile(); 73 | if (fs.existsSync(filePath)) { 74 | fs.unlinkSync(filePath); 75 | } 76 | } 77 | 78 | static removePlugins(plugins?: Array) { 79 | if (!plugins) { 80 | return; 81 | } 82 | const pluginPath = this.pluginsDirectory(); 83 | const retArray = []; 84 | plugins.forEach((plugin) => { 85 | retArray.push( 86 | remove(`${pluginPath}/${plugin}`) 87 | .then(() => { 88 | logger.info(`${pluginPath}/${plugin} was deleted.`); 89 | return true; 90 | }).catch(() => logger.debug(`${pluginPath}/${plugin} was not found.`)) 91 | ); 92 | retArray.push( 93 | unlink(`${pluginPath}/${plugin}.hpi`) 94 | .then(() => { 95 | logger.info(`${pluginPath}/${plugin}.hpi was deleted.`); 96 | UI.publish(`Deleted ${plugin}.hpi`); 97 | return true; 98 | }).catch(() => logger.info(`${pluginPath}/${plugin}.hpi was not found.`)) 99 | ); 100 | retArray.push( 101 | unlink(`${pluginPath}/${plugin}.jpi`) 102 | .then(() => { 103 | logger.info(`${pluginPath}/${plugin}.jpi was deleted.`); 104 | UI.publish(`Deleted ${plugin}.jpi`); 105 | return true; 106 | }).catch(() => logger.debug(`${pluginPath}/${plugin}.jpi was not found.`)) 107 | ); 108 | }); 109 | return Promise.all(retArray); 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /HACKING.adoc: -------------------------------------------------------------------------------- 1 | = How to hack on this project 2 | 3 | [CAUTION] 4 | ==== 5 | The project is under active development. 6 | 7 | So though we have automated tests to keep things working, do never hesitate to 8 | come to us on our 9 | link:https://gitter.im/jenkins-infra/evergreen[Gitter channel] 10 | if you have any questions. 11 | 12 | We welcome people willing to contribute! :fireworks: 13 | 14 | ==== 15 | 16 | == Getting started 17 | 18 | This repository _heavily_ depends on link:https://docker.io[Docker] and 19 | GNU/Make. These tools should be installed and be found via your shell's 20 | `$PATH` environment variable. 21 | 22 | Much of this project is developed and testing using Linux, users of another 23 | platform may not find everything works "out of the box." Please feel free to 24 | submit pull requests to improve support for other development environments. 25 | 26 | 27 | The `Makefile` at the project root should help you getting started. 28 | 29 | `make check` or the more specific `cd distribution && make container-check` is likely where you 30 | want to start to understand how things work, and be able to modify them and see 31 | the impact. 32 | 33 | === Running the whole stack locally 34 | 35 | To develop on a developer box and quite easily set up both the backend services and the Jenkins Evergreen instance, 36 | you need to run the `start-dev.sh` script located in `distribution/bin/start-dev.sh`. 37 | 38 | [source,shell, title=from the repo root] 39 | $ distribution/bin/start-dev.sh 40 | 41 | At some point, you should see logs showing Evergreen has started to download plugins and so on. 42 | 43 | Then open your browser on http://localhost:8080 and you should see your development version for _Jenkins Evergreen_ coming up. 44 | 45 | TIP: Expect some delay for the first time, because you will need to download many dozens of MB. 46 | However, an `squid` proxy cache is set up while using this `make run`, hence these downloads should be very fast the next times. 47 | 48 | === Code style 49 | 50 | We have defined a set of ESLint rules to keep the code style consistent. 51 | To fix most, if not all, of the formatting issues, use `make fix-formatting`. 52 | 53 | === Tests 54 | 55 | Automated testing is exceptionally important for the Jenkins Evergreen effort. 56 | Code should not be introduced without some form of automated testing to ensure 57 | that it it works as specified, and continues to do so. 58 | 59 | We use link:https://ci.jenkins.io/blue[Jenkins] to execute our continuous 60 | integration and continuous delivery processes, which are defined in the 61 | `Jenkinsfile` in the root of this repository. 62 | 63 | ==== Client 64 | 65 | The `evergreen-client` is written in JavaScript and have tests written primarily in 66 | link:https://mochajs.org/[Mocha], which can be run via `make check`. 67 | 68 | ==== Container 69 | 70 | We use link:https://github.com/kward/shunit2[shUnit2] for tests. 71 | 72 | See the `tests/tests.sh` file to see how this works, and if you want to check additional things. 73 | 74 | === Backend 75 | 76 | The backend services are written in JavaScript and have tests written primarily 77 | in link:https://facebook.github.io/jest/[Jest], which can be run via `make check`. 78 | 79 | === How to list available plugins updates 80 | 81 | [source,bash] 82 | ---- 83 | (cd services && make propose-updates) 84 | ---- 85 | 86 | This will tell you which plugins can be upgraded in the 87 | `services/essentials.yaml` file under the `status` section. 88 | -------------------------------------------------------------------------------- /distribution/client/src/lib/error-telemetry.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * The error telemetry module handles sending error logging to the backend. 3 | * 4 | * NOTE: we _might_ want in the future to have only one Telemetry class and 5 | * handle both error and "metrics" telemetry, but let's start with a smaller 6 | * scope at least for now. 7 | */ 8 | import { Tail } from 'tail'; 9 | import fs from 'fs'; 10 | import * as logger from 'winston' 11 | import path from 'path'; 12 | 13 | import Storage from './storage'; 14 | import Update from './update'; 15 | 16 | export interface ErrorTelemetryOptions { 17 | flavor?: string, 18 | }; 19 | 20 | export default class ErrorTelemetry { 21 | protected readonly app : any; 22 | protected readonly update : Update; 23 | protected readonly options : ErrorTelemetryOptions; 24 | 25 | public uuid : string; 26 | public token : string; 27 | 28 | constructor(app, update, options) { 29 | this.app = app; 30 | this.update = update; 31 | this.options = options; 32 | } 33 | 34 | authenticate(uuid, token) { 35 | this.uuid = uuid; 36 | this.token = token; 37 | return this; 38 | } 39 | 40 | /** 41 | * (Private) default behaviour for the output where to send data to when the watched logging file 42 | * has a modification detected. 43 | */ 44 | callErrorTelemetryService(app, logDataObject) { 45 | const api = app.service('telemetry/error'); 46 | 47 | const payload = { 48 | log: logDataObject, 49 | uuid: this.uuid, 50 | flavor: this.options.flavor, 51 | updateLevel: this.update.getCurrentLevel() 52 | }; 53 | 54 | return api.create(payload) 55 | .then(res => logger.debug(`Pushed error log (message='${logDataObject.message}'). Backend response:`, res)) 56 | .catch(err => logger.error('Failed to push log', err)); 57 | } 58 | 59 | /** 60 | * monitoredFile: path to the log file to watch 61 | * outputFunction(app,line): the function that will be called on each new line detected 62 | */ 63 | setup(monitoredFile?: string) { 64 | logger.info('Setting up error logging...'); 65 | let loggingFile = monitoredFile || this.fileToWatch(); 66 | 67 | if (!fs.existsSync(loggingFile)) { 68 | logger.warn(`Logging file ${loggingFile} not found. Still watching the path in case the file gets created later. Can be normal when starting up.`); 69 | } else { 70 | logger.info(`Watching ${loggingFile}`); 71 | } 72 | 73 | const tail = new Tail(loggingFile, { 74 | follow: true, 75 | fromBeginning: true 76 | }); 77 | 78 | tail.on('line', data => { 79 | logger.silly('Reading line:', data); 80 | 81 | try { 82 | this.callErrorTelemetryService(this.app, JSON.parse(data)); 83 | } catch (err) { 84 | logger.error(`Unable to parse as JSON, corrupt log line? ***${data}***`, err); 85 | } 86 | }); 87 | 88 | tail.on('error', error => { 89 | logger.error('Error while setting up file watching:', error); 90 | }); 91 | 92 | logger.info('Error Telemetry Logging file watching configured'); 93 | } 94 | 95 | fileToWatch() { 96 | if (!process.env.ESSENTIALS_LOG_FILE) { 97 | logger.debug('Defaulting to evergreen.log.0'); 98 | return path.join(Storage.jenkinsVar(), 'logs', 'evergreen.log.0'); 99 | } 100 | return process.env.ESSENTIALS_LOG_FILE; 101 | } 102 | } 103 | 104 | module.exports = ErrorTelemetry; 105 | -------------------------------------------------------------------------------- /distribution/client/patches/promise-request-retry+1.0.1.patch: -------------------------------------------------------------------------------- 1 | patch-package 2 | --- a/node_modules/promise-request-retry/index.js 3 | +++ b/node_modules/promise-request-retry/index.js 4 | @@ -1,20 +1,29 @@ 5 | 'use strict'; 6 | const requestPromise = require('request-promise'); 7 | const Promise = require('bluebird'); 8 | -const logger = require('./modules/logger')('request-promise-retry'); 9 | +const logger = require('winston'); 10 | 11 | class rpRetry { 12 | static _rpRetry(options) { 13 | - if(options.verbose_logging) { 14 | - logger.info(`calling ${options.uri} with retry ${options.retry}`); 15 | - } 16 | + 17 | const tries = options.retry || 1; 18 | delete options.retry; 19 | - const fetchDataWithRetry = tryCount => { 20 | + 21 | + const delay = options.delay || 100; // default ms delay between retries 22 | + delete options.delay; 23 | + 24 | + const factor = options.factor || 1; // If absent, delay will always be the same. 25 | + delete options.factor; 26 | + 27 | + if (options.verbose_logging) { 28 | + logger.info(`calling ${options.uri} with retry ${tries}, initial delay=${delay}, factor=${factor}`); 29 | + } 30 | + 31 | + const fetchDataWithRetry = (tryCount, delay) => { 32 | return requestPromise(options) 33 | .then(result => { 34 | - if(options.verbose_logging) { 35 | - logger.info(`Result obtained for ${options.method} request to ${options.uri}`); 36 | + if (options.verbose_logging) { 37 | + logger.info(`Result obtained for ${options.method} request to ${options.uri}`); 38 | } 39 | return Promise.resolve(result); 40 | }) 41 | @@ -22,22 +31,27 @@ class rpRetry { 42 | logger.info(`Encountered error ${err.message} for ${options.method} request to ${options.uri}, retry count ${tryCount}`); 43 | tryCount -= 1; 44 | if (tryCount) { 45 | - return fetchDataWithRetry(tryCount); 46 | + return new Promise((resolve, reject) => { 47 | + setTimeout(() => { 48 | + logger.debug(`waiting for ${delay} ms before next retry for ${options.uri}. Next wait ${delay * factor}`); 49 | + resolve(fetchDataWithRetry(tryCount, delay * factor)); 50 | + }, delay); 51 | + }); 52 | } 53 | return Promise.reject(err); 54 | }); 55 | }; 56 | - return fetchDataWithRetry(tries); 57 | + return fetchDataWithRetry(tries, delay); 58 | } 59 | 60 | static _rp(options) { 61 | - if(options.verbose_logging) { 62 | - logger.info(`calling ${options.uri} without retries`); 63 | + if (options.verbose_logging) { 64 | + logger.info(`calling ${options.uri} without retries`); 65 | } 66 | return requestPromise(options) 67 | .then(result => { 68 | - if(options.verbose_logging) { 69 | - logger.info(`Result obtained for ${options.method} request to ${options.uri}`); 70 | + if (options.verbose_logging) { 71 | + logger.info(`Result obtained for ${options.method} request to ${options.uri}`); 72 | } 73 | return Promise.resolve(result); 74 | }) 75 | --------------------------------------------------------------------------------