├── .gitattributes ├── test ├── fixtures │ ├── es6 │ │ ├── sum.js │ │ ├── package.json │ │ └── index.jsx │ ├── npm │ │ ├── sum.js │ │ ├── index.jsx │ │ ├── package.json │ │ └── webpack.config.js │ ├── other │ │ ├── sum.js │ │ ├── editor.jsx │ │ ├── preview.jsx │ │ ├── publish.jsx │ │ ├── package.json │ │ └── webpack.config.js │ ├── webpack │ │ ├── sum.js │ │ ├── index.jsx │ │ ├── package.json │ │ └── webpack.config.js │ ├── browserify │ │ ├── sum.js │ │ ├── index.jsx │ │ └── package.json │ ├── packages.json │ ├── map.json │ ├── wrhs.toml │ ├── heads.js │ ├── payload-0.0.0.json │ └── v2-payload-0.0.0.json ├── mocks.js ├── lib │ ├── construct │ │ ├── bffs.test.js │ │ ├── progress.test.js │ │ ├── status-writer.test.js │ │ └── index.test.js │ ├── preboots │ │ └── scheduler.test.js │ └── routes │ │ └── index.test.js └── config.json ├── .eslintrc ├── .github ├── ISSUE_TEMPLATE.md ├── ISSUE_TEMPLATE │ ├── documentation.md │ ├── bug_report.md │ └── regression.md ├── LICENSE └── PULL_REQUEST_TEMPLATE.md ├── bin └── server ├── .travis.yml ├── lib ├── preboots │ ├── feedsme.js │ ├── terminate.js │ ├── cdnup.js │ ├── database.js │ ├── nsq.js │ ├── index.js │ ├── npm.js │ └── scheduler.js ├── middlewares │ ├── 404.js │ └── index.js ├── index.js ├── construct │ ├── bffs.js │ ├── cleaner.js │ ├── fitting.js │ ├── rmrf.js │ ├── npm │ │ └── index.js │ ├── packer.js │ ├── status-writer.js │ ├── progress.js │ ├── builder.js │ └── index.js ├── app.js ├── build-query.js └── routes │ └── index.js ├── SECURITY.md ├── .gitignore ├── LICENSE ├── CHANGELOG.md ├── config.example.json ├── package.json ├── CONTRIBUTING.md └── README.md /.gitattributes: -------------------------------------------------------------------------------- 1 | package-lock.json binary 2 | -------------------------------------------------------------------------------- /test/fixtures/es6/sum.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | exports.sum = function sum(a, b) { 4 | return a + b; 5 | } -------------------------------------------------------------------------------- /test/fixtures/npm/sum.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | exports.sum = function sum(a, b) { 4 | return a + b; 5 | } -------------------------------------------------------------------------------- /test/fixtures/other/sum.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | exports.sum = function sum(a, b) { 4 | return a + b; 5 | } -------------------------------------------------------------------------------- /test/fixtures/webpack/sum.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | exports.sum = function sum(a, b) { 4 | return a + b; 5 | } -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "godaddy", 3 | "rules": { 4 | "strict": 0, 5 | "no-shadow": 0 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /test/fixtures/browserify/sum.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | exports.sum = function sum(a, b) { 4 | return a + b; 5 | } -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 👉 Please follow one of these issue templates https://github.com/godaddy/carpenterd/issues/new/choose 2 | -------------------------------------------------------------------------------- /test/fixtures/es6/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "version": "0.0.0", 4 | "description": "ES6 React Test module", 5 | "main": "./index.jsx", 6 | "dependencies": { 7 | "react": "~0.13.3" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /test/fixtures/es6/index.jsx: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import { React } from 'react'; 4 | import { sum } from './sum'; 5 | 6 | export default class Test extends React.Component { 7 | render() { 8 | return

Build an ES6 React component.

; 9 | } 10 | } -------------------------------------------------------------------------------- /test/fixtures/npm/index.jsx: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import { React } from 'react'; 4 | import { sum } from './sum'; 5 | 6 | export default class Test extends React.Component { 7 | render() { 8 | return

NPM Build an ES6 React component.

; 9 | } 10 | } -------------------------------------------------------------------------------- /test/fixtures/other/editor.jsx: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import { React } from 'react'; 4 | import { sum } from './sum'; 5 | 6 | export default class Test extends React.Component { 7 | render() { 8 | return

Editor ES6 React component.

; 9 | } 10 | } -------------------------------------------------------------------------------- /test/fixtures/other/preview.jsx: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import { React } from 'react'; 4 | import { sum } from './sum'; 5 | 6 | export default class Test extends React.Component { 7 | render() { 8 | return

Preview ES6 React component.

; 9 | } 10 | } -------------------------------------------------------------------------------- /test/fixtures/other/publish.jsx: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import { React } from 'react'; 4 | import { sum } from './sum'; 5 | 6 | export default class Test extends React.Component { 7 | render() { 8 | return

Publish ES6 React component.

; 9 | } 10 | } -------------------------------------------------------------------------------- /test/fixtures/webpack/index.jsx: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import { React } from 'react'; 4 | import { sum } from './sum'; 5 | 6 | export default class Test extends React.Component { 7 | render() { 8 | return

Webpack an ES6 React component.

; 9 | } 10 | } -------------------------------------------------------------------------------- /test/fixtures/browserify/index.jsx: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import { React } from 'react'; 4 | import { sum } from './sum'; 5 | 6 | export default class Test extends React.Component { 7 | render() { 8 | return ( 9 |

Browserify an ES6 React component.

10 | ); 11 | } 12 | } -------------------------------------------------------------------------------- /test/fixtures/packages.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "name": "my-client-side-package", 3 | "version": "6.7.0", 4 | "extended": { 5 | "build": "webpack" 6 | } 7 | }, { 8 | "name": "my-other-client-side-package", 9 | "version": "3.4.5", 10 | "extended": { 11 | "build": "webpack" 12 | } 13 | }] 14 | -------------------------------------------------------------------------------- /test/mocks.js: -------------------------------------------------------------------------------- 1 | exports.Writer = Writer; 2 | 3 | function Writer() {} 4 | 5 | Writer.prototype.publish = function (topic, payload, fn) { 6 | setImmediate(fn); 7 | }; 8 | 9 | class Progress { 10 | start() {} 11 | fail() {} 12 | write() {} 13 | done() {} 14 | } 15 | 16 | exports.Progress = Progress 17 | 18 | -------------------------------------------------------------------------------- /bin/server: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 'use strict'; 3 | 4 | require('../lib/').start(function listen(error, app) { 5 | if (error) return app.log.error(error.message); 6 | 7 | const config = app.config.get('https') || app.config.get('http') || {}; 8 | app.log.verbose('Carpenter started on %s:%d', config.hostname, config.port); 9 | }); -------------------------------------------------------------------------------- /test/fixtures/map.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 3, 3 | "file": "script.js.map", 4 | "sources": [ 5 | "app.js", 6 | "content.js", 7 | "widget.js" 8 | ], 9 | "sourceRoot": "/", 10 | "names": [ 11 | "slideUp", 12 | "slideDown", 13 | "save" 14 | ], 15 | "mappings": "AAA0B,kBAAhBA,QAAOC,SACjBD,OAAOC,OAAO" 16 | } 17 | -------------------------------------------------------------------------------- /test/fixtures/webpack/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "version": "0.0.0", 4 | "description": "ES6 React Test module", 5 | "main": "./index.jsx", 6 | "dependencies": { 7 | "react": "~0.13.3" 8 | }, 9 | "devDependencies": { 10 | "babel-core": "~5.8.22", 11 | "babel-loader": "~5.3.2", 12 | "webpack": "~1.11.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /test/fixtures/browserify/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "version": "0.0.0", 4 | "description": "ES6 React Test module", 5 | "main": "./index.jsx", 6 | "browserify": { 7 | "exclude": [ 8 | "react" 9 | ], 10 | "transform": [ 11 | "babelify" 12 | ] 13 | }, 14 | "dependencies": { 15 | "babelify": "~6.3.0", 16 | "react": "~0.13.3" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 📃 Documentation Bug 3 | about: You want to report something that is wrong or missing from the documentation. 4 | labels: "Type: Docs" 5 | --- 6 | 7 | ## 📃 Summary 8 | 11 | 12 | ## Expected documentation 13 | 16 | -------------------------------------------------------------------------------- /test/fixtures/npm/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "version": "0.0.0", 4 | "description": "ES6 React Test module", 5 | "main": "./index.jsx", 6 | "scripts": { 7 | "build": "webpack" 8 | }, 9 | "dependencies": { 10 | "react": "~0.13.3" 11 | }, 12 | "devDependencies": { 13 | "babel-core": "~5.8.22", 14 | "babel-loader": "~5.3.2", 15 | "webpack": "~1.11.0" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /test/fixtures/npm/webpack.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | context: __dirname, 3 | entry: './index.jsx', 4 | externals: { 5 | 'react': 'React' 6 | }, 7 | module: { 8 | loaders: [{ 9 | test: /\.jsx?$/, 10 | exclude: /(node_modules|bower_components)/, 11 | loader: 'babel' 12 | }] 13 | }, 14 | output: { 15 | path: __dirname + '/dist', 16 | filename: 'bundle.js' 17 | } 18 | }; 19 | -------------------------------------------------------------------------------- /test/fixtures/webpack/webpack.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | context: __dirname, 3 | entry: './index.jsx', 4 | externals: { 5 | 'react': 'React' 6 | }, 7 | module: { 8 | loaders: [{ 9 | test: /\.jsx?$/, 10 | exclude: /(node_modules|bower_components)/, 11 | loader: 'babel' 12 | }] 13 | }, 14 | output: { 15 | path: __dirname + '/dist', 16 | filename: 'bundle.js' 17 | } 18 | }; 19 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: required 2 | language: node_js 3 | 4 | node_js: 5 | - "10" 6 | - "12" 7 | 8 | matrix: 9 | allow_failures: 10 | - node_js: "12" 11 | 12 | services: 13 | - docker 14 | 15 | before_install: 16 | - docker pull localstack/localstack 17 | - docker run -d -p 127.0.0.1:4569:4569 --name localstack localstack/localstack 18 | 19 | install: npm install 20 | 21 | env: 22 | - AWS_ACCESS_KEY_ID=foobar AWS_SECRET_ACCESS_KEY=foobar 23 | -------------------------------------------------------------------------------- /.github/LICENSE: -------------------------------------------------------------------------------- 1 | `ISSUE_TEMPLATE.md` and markdown files under the `ISSUE_TEMPLATE` directory are adapted from `react-native` under MIT. 2 | 3 | https://github.com/facebook/react-native/blob/master/.github/ISSUE_TEMPLATE.md 4 | https://github.com/facebook/react-native/blob/37bf2ce/.github/ISSUE_TEMPLATE.md 5 | 6 | https://github.com/facebook/react-native/tree/master/.github/ISSUE_TEMPLATE 7 | https://github.com/facebook/react-native/tree/37bf2ce/.github/ISSUE_TEMPLATE 8 | -------------------------------------------------------------------------------- /lib/preboots/feedsme.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Feedsme = require('feedsme-api-client'); 4 | const url = require('url'); 5 | 6 | /* 7 | * Simply setup the feedsme api client 8 | */ 9 | module.exports = function feedme(app, options, callback) { 10 | const uri = app.config.get('feedsme'); 11 | const proto = url.parse(uri).protocol; 12 | 13 | app.feedsme = new Feedsme({ 14 | agent: app.agents[proto], 15 | uri: uri 16 | }); 17 | 18 | callback(); 19 | }; 20 | -------------------------------------------------------------------------------- /lib/preboots/terminate.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = function preboot(app, options, done) { 4 | /** 5 | * Log the error and end the current request. 6 | * 7 | * @param {HTTPResponse} res Repsonse. 8 | * @param {Error} error Critical error. 9 | * @api public 10 | */ 11 | app.terminate = function terminate(res, error) { 12 | error = error || {}; 13 | 14 | app.log.error(error.message); 15 | res.status(error.code || 500).end(error.message); 16 | }; 17 | 18 | done(); 19 | }; 20 | -------------------------------------------------------------------------------- /test/fixtures/other/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "test", 3 | "version": "0.0.0", 4 | "description": "ES6 React Test module", 5 | "main": "./index.jsx", 6 | "dependencies": { 7 | "react": "~0.13.3" 8 | }, 9 | "devDependencies": { 10 | "babel-core": "~5.8.22", 11 | "babel-loader": "~5.3.2", 12 | "webpack": "~1.11.0" 13 | }, 14 | "config": { 15 | "modes": { 16 | "preview": "preview.jsx", 17 | "editor": "editor.jsx", 18 | "publish": "publish.jsx" 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /lib/middlewares/404.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // 4 | // Return 404 middleware. 5 | // 6 | module.exports = function getFourofour(app) { 7 | /** 8 | * Handle unknown routes. 9 | * 10 | * @param {HTTPRequest} req Incoming HTTP request. 11 | * @param {HTTPResponse} res HTTP Response stream. 12 | * @param {Function} next Completion callback. 13 | * @public 14 | */ 15 | return function fourofour(req, res) { 16 | app.contextLog.error('Not found: %s - %s', req.method, req.url); 17 | 18 | res.status(404).end(); 19 | }; 20 | }; 21 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const App = exports.App = require('./app'); 4 | const path = require('path'); 5 | 6 | /* 7 | * Create a new application and start it. 8 | * 9 | * @param {Object} options Optional configuration. 10 | * @param {Function} done Completion callback. 11 | * @api public 12 | */ 13 | exports.start = function start(options, done) { 14 | if (!done && typeof options === 'function') { 15 | done = options; 16 | options = {}; 17 | } 18 | 19 | const app = new App(path.join(__dirname, '..'), options); 20 | 21 | app.start(function started(error) { 22 | done(error, app); 23 | }); 24 | }; 25 | -------------------------------------------------------------------------------- /lib/construct/bffs.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const BFFS = require('bffs'); 4 | 5 | module.exports = function preboot(app, options, done) { 6 | // 7 | // Load the store configuration, make sure this module is not required before 8 | // the app.config is initialized. 9 | // 10 | 11 | // 12 | // Instantiate the Builds Files Finder Service and expose it as a singleton 13 | // so the connection is reused. 14 | // 15 | app.bffs = new BFFS({ 16 | prefix: app.config.get('bffs:prefix'), 17 | db: app.database, 18 | models: app.models, 19 | cdn: app.config.get('bffs:cdn') 20 | }); 21 | 22 | return void done(); 23 | }; 24 | -------------------------------------------------------------------------------- /lib/construct/cleaner.js: -------------------------------------------------------------------------------- 1 | const rmrf = require('./rmrf').async; 2 | 3 | class Cleaner { 4 | constructor(options) { 5 | this.log = options.log; 6 | } 7 | 8 | /** 9 | * Clean up a given set of paths 10 | * @param {Array[]} paths set of paths to be cleared 11 | * @returns {Promise} Completion handler 12 | */ 13 | async cleanup(paths) { 14 | paths = Array.isArray(paths) ? paths : [paths]; 15 | const tasks = paths.map(async path => { 16 | this.log.info('Cleanup path: %s', path); 17 | await rmrf(path); 18 | }); 19 | try { 20 | await Promise.all(tasks); 21 | } catch (e) { 22 | return e; 23 | } 24 | 25 | return null; 26 | } 27 | } 28 | 29 | module.exports = Cleaner; 30 | -------------------------------------------------------------------------------- /test/fixtures/wrhs.toml: -------------------------------------------------------------------------------- 1 | [minify] 2 | [minify.compress] 3 | unsafe = true 4 | dead_code = true 5 | collapse_vars = true 6 | drop_console = true 7 | conditionals = true 8 | booleans = true 9 | unused = true 10 | if_return = true 11 | join_vars = true 12 | 13 | [minify.mangle] 14 | toplevel = true 15 | 16 | [minify.mangleProperties] 17 | regex = '^(interopRequireDefault|possibleConstructorReturn|phasedRegistrationNames|exports|classCallCheck|createClass|debugTool|displayName|inherits|captured|getNodeFromInstance|bubbled|ReactEventListener|getNativeProps|getReactMountReady|trapBubbledEvent|getName|getInstanceFromNode|enumerable|onNativeOperation|extends|onBeginLifeCycleTimer|registrationNameModules|shouldIntlComponentUpdate|onEndLifeCycleTimer|injection|canUseDOM|_{1}[^_].+)' 18 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 5 | 6 | ## Summary 7 | 8 | 11 | 12 | ## Changelog 13 | 14 | 18 | 19 | ## Test Plan 20 | 21 | 24 | -------------------------------------------------------------------------------- /lib/preboots/cdnup.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const Cdnup = require('cdnup'); 4 | 5 | /** 6 | * 7 | * Preboot to setuo an optional instance of cdnup for uploading fully built npm 8 | * package tarballs to an `s3` compatible store. 9 | * @param {slay.App} app App instance 10 | * @param {Object} options Configurable options 11 | * @param {Function} done Continuation function when finished 12 | * 13 | * @returns {undefined} nothing special 14 | * 15 | */ 16 | module.exports = function (app, options, done) { 17 | // 18 | // Remark: (jcrugzz) do we have a more meaningful config name? 19 | // 20 | const config = app.config.get('cdnup') || options.cdnup; 21 | if (!config) return done(); 22 | 23 | app.cdnup = new Cdnup(config.bucket, config); 24 | done(); 25 | }; 26 | -------------------------------------------------------------------------------- /test/fixtures/other/webpack.config.js: -------------------------------------------------------------------------------- 1 | var path = require('path'); 2 | var webpack = require('webpack'); 3 | var CommonsChunkPlugin = webpack.optimize.CommonsChunkPlugin; 4 | 5 | module.exports = { 6 | context: __dirname, 7 | entry: { 8 | preview: './preview.jsx', 9 | editor: './editor.jsx', 10 | publish: './publish.jsx' 11 | }, 12 | externals: { 13 | 'react': 'React' 14 | }, 15 | module: { 16 | loaders: [{ 17 | test: /\.jsx?$/, 18 | exclude: /(node_modules|bower_components)/, 19 | loader: 'babel' 20 | }] 21 | }, 22 | output: { 23 | path: path.join(__dirname, 'dist'), 24 | filename: '[name].bundle.js', 25 | chunkFilename: '[id].chunk.js' 26 | }, 27 | plugins: [ 28 | new CommonsChunkPlugin('commons.js') 29 | ] 30 | }; 31 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Reporting Security Issues 2 | 3 | We take security very seriously at GoDaddy. We appreciate your efforts to 4 | responsibly disclose your findings, and will make every effort to acknowledge 5 | your contributions. 6 | 7 | ## Where should I report security issues? 8 | 9 | In order to give the community time to respond and upgrade, we strongly urge you 10 | report all security issues privately. 11 | 12 | To report a security issue in one of our Open Source projects email us directly 13 | at **oss@godaddy.com** and include the word "SECURITY" in the subject line. 14 | 15 | This mail is delivered to our Open Source Security team. 16 | 17 | After the initial reply to your report, the team will keep you informed of the 18 | progress being made towards a fix and announcement, and may ask for additional 19 | information or guidance. 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🐛 Bug Report 3 | about: You want to report a reproducible bug in Carpenterd. 4 | labels: "Type: Bug Report" 5 | --- 6 | 7 | ## 🐛 Bug Report 8 | 12 | 13 | ## To Reproduce 14 | 17 | 18 | ## Expected Behavior 19 | 22 | 23 | ## Code Example 24 | 30 | 31 | ## Environment 32 | 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log.* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul/nyc 15 | coverage 16 | .nyc_output 17 | 18 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 19 | .grunt 20 | 21 | # node-waf configuration 22 | .lock-wscript 23 | 24 | # Compiled binary addons (http://nodejs.org/api/addons.html) 25 | build/Release 26 | 27 | # Dependency directory 28 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git 29 | node_modules 30 | 31 | .DS_Store 32 | dist 33 | test/fixtures/webpack/package/webpack/ 34 | test/fixtures/webpack/package/overwritten/ 35 | 36 | # special secret that cant go in git 37 | deploy/warehouse.prod.key.yml 38 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/regression.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 💥 Regression Report 3 | about: You want to report unexpected behavior that worked in previous releases. 4 | labels: "Type: Bug Report", "Impact: Regression" 5 | --- 6 | 7 | ## 💥 Regression Report 8 | 11 | 12 | ## Last working version 13 | 14 | Worked up to version: 15 | 16 | Stopped working in version: 17 | 18 | ## To Reproduce 19 | 20 | 23 | 24 | ## Expected Behavior 25 | 26 | 29 | 30 | ## Code Example 31 | 37 | 38 | ## Environment 39 | 42 | -------------------------------------------------------------------------------- /lib/preboots/database.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const { DynamoDB } = require('aws-sdk'); 4 | const dynamo = require('dynamodb-x'); 5 | const AwsLiveness = require('aws-liveness'); 6 | const wrhs = require('warehouse-models'); 7 | 8 | module.exports = function (app, options, done) { 9 | const ensure = app.config.get('ensure') || options.ensure; 10 | 11 | const region = app.config.get('DATABASE_REGION') 12 | || app.config.get('AWS_REGION') 13 | || app.config.get('database:region') 14 | || (app.get('database') || {}).region; 15 | // Used mainly for localstack usage 16 | const endpoint = app.config.get('DYNAMO_ENDPOINT') 17 | || app.config.get('database:endpoint') 18 | || (app.get('database') || {}).endpoint; 19 | 20 | const dynamoDriver = new DynamoDB({ region, endpoint }); 21 | 22 | dynamo.dynamoDriver(dynamoDriver); 23 | app.models = wrhs(dynamo); 24 | app.database = dynamo; 25 | 26 | new AwsLiveness().waitForServices({ 27 | clients: [dynamoDriver], 28 | waitSeconds: 60 29 | }).then(function () { 30 | if (!ensure) return done(); 31 | app.models.ensure(done); 32 | }).catch(done); 33 | }; 34 | -------------------------------------------------------------------------------- /lib/app.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const slay = require('slay'); 4 | const util = require('util'); 5 | 6 | /** 7 | * @constructor App 8 | * @param {string} root - Root directory of app 9 | * @param {Object} opts - configuration options 10 | * @returns {undefined} 11 | */ 12 | const App = module.exports = function App(root, opts) { 13 | slay.App.call(this, root, opts); 14 | 15 | this.env = process.env.NODE_ENV || 'development'; // eslint-disable-line no-process-env 16 | // 17 | // Load configuration and merge with provided options. 18 | // 19 | this.agents = {}; 20 | 21 | this.after('close', this._onClose.bind(this)); 22 | }; 23 | 24 | util.inherits(App, slay.App); 25 | 26 | 27 | /** 28 | * Close connections when the app closes. 29 | * 30 | * @param {Slay} app Application. 31 | * @param {Object} options Optional configuration. 32 | * @param {Function} fn Completion callback. 33 | * @api private 34 | */ 35 | App.prototype._onClose = function onClose(app, options, fn) { 36 | Object.keys(app.agents).forEach(key => { 37 | app.agents[key].destroy(); 38 | }); 39 | 40 | if (app.redis) app.redis.disconnect(); 41 | setImmediate(fn); 42 | }; 43 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2019 GoDaddy Operating Company, LLC. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /lib/construct/fitting.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | // 4 | // Extract the type of build we are doing 5 | // 6 | module.exports = function fitting(data, options) { 7 | options = options || {}; 8 | 9 | // 10 | // Allow additional rules to be defined and merge against the default. 11 | // 12 | const classy = options.classification; 13 | const keyword = options.keyword || 'check'; 14 | let match = ''; 15 | 16 | // 17 | // The classification can also be read directly from the data. 18 | // Allow opt-in for a `keyword`. This defaults to the `check` property. 19 | // 20 | if (data[keyword] in classy) return data[keyword]; 21 | 22 | // 23 | // Check if there are keywords in the package.json that gives some intel on 24 | // which project/team created these packages. 25 | // 26 | if (!Array.isArray(data.keywords)) data.keywords = []; 27 | 28 | Object.keys(classy).some(function each(project) { 29 | const keywords = classy[project]; 30 | 31 | if (keywords.some(function some(keyword) { 32 | return !!~data.keywords.indexOf(keyword); 33 | })) return !!(match = project); 34 | 35 | return false; 36 | }); 37 | 38 | return match; 39 | }; 40 | -------------------------------------------------------------------------------- /lib/middlewares/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const bodyParser = require('body-parser'); 4 | const pid = process.pid; 5 | let rid = -1; 6 | 7 | const healthcheck = /healthcheck/; 8 | 9 | // 10 | // Add middlewares. 11 | // 12 | module.exports = function middleware(app, options, done) { 13 | app.perform('middleware', function performAfter(next) { 14 | app.use(bodyParser.urlencoded(app.config.get('json'))); 15 | app.use(bodyParser.json(app.config.get('json'))); 16 | 17 | app.use(function httpLogger(req, res, next) { 18 | rid++; 19 | // reduce # of healthcheck logs which can get a bit out of hand 20 | if (healthcheck.test(req.url) && rid % 50 !== 0) return next(); 21 | app.withBreadcrumb({ 22 | pid: pid, 23 | request: rid 24 | }, app.contextLog.info, () => { 25 | app.contextLog.info('%s request - %s', req.method, req.url); 26 | next(); 27 | }); 28 | }); 29 | 30 | app.after('actions', function postRouting(next) { 31 | app.contextLog.verbose('Adding post-routing middleware'); 32 | 33 | app.use(require('./404')(app)); 34 | 35 | next(); 36 | }); 37 | 38 | next(); 39 | }, done); 40 | }; 41 | -------------------------------------------------------------------------------- /test/fixtures/heads.js: -------------------------------------------------------------------------------- 1 | exports.missing = { 2 | 'my-client-side-package': [{ 3 | name: 'my-client-side-package', 4 | version: '6.6.8', 5 | env: 'test', 6 | locale: 'it' 7 | }, { 8 | name: 'my-client-side-package', 9 | version: '6.7.0', 10 | env: 'test', 11 | locale: 'en-US' 12 | }], 13 | 'my-other-client-side-package': [{ 14 | name: 'my-other-client-side-package', 15 | version: '3.4.4', 16 | env: 'test', 17 | locale: 'it' 18 | }, { 19 | name: 'my-other-client-side-package', 20 | version: '3.4.5', 21 | env: 'test', 22 | locale: 'en-US' 23 | }] 24 | }; 25 | 26 | exports.same = { 27 | 'my-client-side-package': [{ 28 | name: 'my-client-side-package', 29 | version: '6.7.0', 30 | env: 'test', 31 | locale: 'it' 32 | }, { 33 | name: 'my-client-side-package', 34 | version: '6.7.0', 35 | env: 'test', 36 | locale: 'en-US' 37 | }], 38 | 'my-other-client-side-package': [{ 39 | name: 'my-other-client-side-package', 40 | version: '3.4.5', 41 | env: 'test', 42 | locale: 'it' 43 | }, { 44 | name: 'my-other-client-side-package', 45 | version: '3.4.5', 46 | env: 'test', 47 | locale: 'en-US' 48 | }] 49 | }; 50 | -------------------------------------------------------------------------------- /lib/construct/rmrf.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const spawn = require('child_process').spawn; 4 | const once = require('one-time'); 5 | const bl = require('bl'); 6 | 7 | function rmrf(filePath, next) { 8 | const done = once(function onError(err) { 9 | if (err && err.message.includes('No such file or directory')) { 10 | return next(); 11 | } 12 | if (err) return next(err); 13 | return next(); 14 | }); 15 | 16 | let errorLogs = ''; 17 | 18 | const child = spawn('rm', ['-rf', filePath], { 19 | env: process.env // eslint-disable-line no-process-env 20 | }); 21 | 22 | child.on('error', done); 23 | 24 | child.stderr.pipe(bl((err, buff) => { 25 | /* eslint consistent-return: 0 */ 26 | if (err) return done(err); 27 | errorLogs = buff.toString(); 28 | })); 29 | 30 | child.on('close', (code) => { 31 | if (code !== 0) { 32 | return done(new Error(`rm -rf exited with code ${code} ${errorLogs}`)); 33 | } 34 | 35 | return done(); 36 | }); 37 | } 38 | 39 | module.exports = rmrf; 40 | module.exports.async = function (filePath) { 41 | return new Promise((resolve, reject) => { 42 | rmrf(filePath, (err) => { 43 | err ? reject(err) : resolve(); 44 | }); 45 | }); 46 | }; 47 | -------------------------------------------------------------------------------- /test/lib/construct/bffs.test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | describe('BFFS', function () { 4 | const assume = require('assume'); 5 | const path = require('path'); 6 | const BFFS = require('bffs'); 7 | 8 | let app = require('../../../lib'); 9 | let bffs; 10 | 11 | this.timeout(3E4) // eslint-disable-line 12 | 13 | before(function (done) { 14 | app.start({ 15 | logger: { 16 | level: 'critical' 17 | }, 18 | ensure: true, 19 | config: { 20 | file: path.join(__dirname, '..', '..', 'config.json'), 21 | overrides: { 22 | http: 0 23 | } 24 | } 25 | }, function (error, application) { 26 | app = application; 27 | bffs = app.bffs; 28 | 29 | done(error); 30 | }); 31 | }); 32 | 33 | after(function (done) { 34 | app.close(done); 35 | }); 36 | 37 | it('is exposed as singleton instance', function () { 38 | assume(app.bffs).is.an('object'); 39 | assume(app.bffs).to.equal(bffs); 40 | assume(app.bffs).to.be.instanceof(BFFS); 41 | }); 42 | 43 | it('provides an interface to the build files finder service', function () { 44 | assume(bffs.build).to.be.a('function'); 45 | assume(bffs.search).to.be.a('function'); 46 | assume(bffs.publish).to.be.a('function'); 47 | }); 48 | }); 49 | -------------------------------------------------------------------------------- /lib/preboots/nsq.js: -------------------------------------------------------------------------------- 1 | const nsq = require('nsq.js-k8'); 2 | 3 | module.exports = function nsqboot(app, options, callback) { 4 | // 5 | // So we will need a good way to get the IP addresses for the nslookupd 6 | // 7 | const config = app.config.get('nsq'); 8 | if (!config) return callback(); 9 | let finished = false; 10 | // 11 | // NSQLOOKUPD doesnt quite get it right when fetching hosts. 12 | // We manually add the full DNS extension so the given hostname works in 13 | // every namespace. 14 | // 15 | config.addrModify = function (addr) { 16 | if (!config.nsqdHostExt) return addr; 17 | let [host, port] = addr.split(':'); // eslint-disable-line prefer-const 18 | host = `${host}.${config.nsqdHostExt}`; 19 | return [host, port].join(':'); 20 | }; 21 | 22 | app.nsq = {}; 23 | const writer = app.nsq.writer = nsq.writer(config); 24 | 25 | writer.on('error response', function (err) { 26 | app.log.error('nsq error response: %s', err.message); 27 | }); 28 | 29 | writer.on('error', function (err) { 30 | if (finished) return app.log.error('nsq error: %s', err.message); 31 | finished = true; 32 | callback(err); 33 | }); 34 | 35 | writer.on('ready', function () { 36 | if (finished) return app.log.info('nsq ready called after preboot'); 37 | finished = true; 38 | callback(); 39 | }); 40 | 41 | }; 42 | -------------------------------------------------------------------------------- /test/fixtures/payload-0.0.0.json: -------------------------------------------------------------------------------- 1 | {"_id":"test","name":"test","description":"A builder test","main":"index.jsx","dist-tags":{"latest":"0.0.0"},"versions":{"0.0.0":{"name":"test","version":"0.0.0","description":"A builder test","main":"index.jsx","keywords":["test","carpenter","webpack"],"author":{"name":"GoDaddy.com, LLC"},"dependencies":{"react":"^0.14.7"},"readme":"ERROR: No README data found!","_id":"test@0.0.0","scripts":{},"_shasum":"db6c3e5f256a7dacc3561f7c8cef711a7d1bd17b","_from":".","_npmVersion":"2.6.1","_nodeVersion":"0.12.7","_npmUser":{"name":"swaagie","email":"info@martijnswaagman.nl"},"maintainers":[{"name":"swaagie","email":"info@martijnswaagman.nl"}],"dist":{"shasum":"db6c3e5f256a7dacc3561f7c8cef711a7d1bd17b","tarball":"http://localhost:8080/test/-/test-0.0.0.tgz"}}},"readme":"ERROR: No README data found!","maintainers":[{"name":"swaagie","email":"info@martijnswaagman.nl"}],"_attachments":{"test-0.0.0.tgz":{"content_type":"application/octet-stream","data":"H4sIAAAAAAAAA+1WwW6cMBDdM18x4rKJsvLasIDUTXJI1R9oe6t6MGAiGjDINilVRL+9Y8yyVVUph2SzisRDwvLMeGYMfjNuefbA78W2dSP5oRu5emVQSuPdDuyYxNE40sDNLXZhAisWRHEQU8ZouKIspMzavXYi/0OnDVeYykv9uL3APL4TPHkAvuS18D+Ab4Q2/sZKHoXSZSOtkBJ8nDQXOlNlaybNpy8xfBY8M/AVF0Ld5F0lnGXNy9GEbEuZix6PVe8UqWp+aqHK4heqbXCUiT6rutxm8G0UoEhZt/44+75xVkZxqYtG1X/bpTwVlXXmTPE9TJm2AgPLrBT6GGi2xsx+x4SRwJ+cu3hWTAkLSWj9Dd7gnfv3nBwT77e6q/EnnSbGM/xHyk/8j5Iwjijyn8VJvPD/LbDutABtVJmZ9d7zRN82ymiCxwFuoOhkZskOOL3gG0gvRyYpYTolgcMVpHtvOPcWFrwAB/7PZfoEMZ7hf8josf8nUYL8D8KELfx/C/zD/7K2/Ienqa8PUKimhvXYHlE/q219OCiJbR7H4gG5KHhXGcgqrrW7GojeYDvWziv52KAbKaSZqgkePXVxOTXpqbhct7d381UBuITjZSM7LCfX2/Z27zr1ub/jggULFrw3/AHWPxhWABAAAA","length":665}}} 2 | -------------------------------------------------------------------------------- /lib/preboots/index.js: -------------------------------------------------------------------------------- 1 | /* eslint no-sync: 0 */ 2 | 'use strict'; 3 | 4 | const HttpsAgent = require('https').Agent; 5 | const HttpAgent = require('http').Agent; 6 | const { format, transports } = require('winston'); 7 | 8 | const agentDefaults = { 9 | keepAlive: true 10 | }; 11 | 12 | class Agents { 13 | constructor(opts) { 14 | const http = new HttpAgent(opts); 15 | const https = new HttpsAgent(opts); 16 | this.http = http; 17 | this.https = https; 18 | this['https:'] = https; 19 | this['http:'] = http; 20 | } 21 | } 22 | 23 | function agents(app, options) { 24 | const opts = app.config.get('agent') || options.agent || agentDefaults; 25 | return new Agents(opts); 26 | } 27 | 28 | module.exports = function preboot(app, options, next) { 29 | // 30 | // Setup the child ochestration and other helpers. 31 | // 32 | app.agents = agents(app, options); 33 | 34 | app.preboot(require('slay-config')()); 35 | 36 | app.preboot(require('slay-log')({ 37 | format: format.combine( 38 | format.timestamp(), 39 | format.splat(), 40 | format.json() 41 | ), 42 | transports: [ 43 | new (transports.Console)() 44 | ] 45 | })); 46 | 47 | app.preboot(require('slay-contextlog')); 48 | app.preboot(require('./database')); 49 | app.preboot(require('./cdnup')); 50 | app.preboot(require('./nsq.js')); 51 | app.preboot(require('./npm')); 52 | app.preboot(require('../construct/bffs')); 53 | app.preboot(require('./scheduler')); 54 | app.preboot(require('../construct')); 55 | app.preboot(require('./terminate')); 56 | app.preboot(require('./feedsme')); 57 | 58 | next(); 59 | }; 60 | -------------------------------------------------------------------------------- /test/fixtures/v2-payload-0.0.0.json: -------------------------------------------------------------------------------- 1 | {"promote":false,"data":{"_id":"test","name":"test","description":"A builder test","main":"index.jsx","dist-tags":{"latest":"0.0.0"},"versions":{"0.0.0":{"name":"test","version":"0.0.0","description":"A builder test","main":"index.jsx","keywords":["test","carpenter","webpack"],"author":{"name":"GoDaddy.com, LLC"},"dependencies":{"react":"^0.14.7"},"readme":"ERROR: No README data found!","_id":"test@0.0.0","scripts":{},"_shasum":"db6c3e5f256a7dacc3561f7c8cef711a7d1bd17b","_from":".","_npmVersion":"2.6.1","_nodeVersion":"0.12.7","_npmUser":{"name":"swaagie","email":"info@martijnswaagman.nl"},"maintainers":[{"name":"swaagie","email":"info@martijnswaagman.nl"}],"dist":{"shasum":"db6c3e5f256a7dacc3561f7c8cef711a7d1bd17b","tarball":"http://localhost:8080/test/-/test-0.0.0.tgz"}}},"readme":"ERROR: No README data found!","maintainers":[{"name":"swaagie","email":"info@martijnswaagman.nl"}],"_attachments":{"test-0.0.0.tgz":{"content_type":"application/octet-stream","data":"H4sIAAAAAAAAA+1WwW6cMBDdM18x4rKJsvLasIDUTXJI1R9oe6t6MGAiGjDINilVRL+9Y8yyVVUph2SzisRDwvLMeGYMfjNuefbA78W2dSP5oRu5emVQSuPdDuyYxNE40sDNLXZhAisWRHEQU8ZouKIspMzavXYi/0OnDVeYykv9uL3APL4TPHkAvuS18D+Ab4Q2/sZKHoXSZSOtkBJ8nDQXOlNlaybNpy8xfBY8M/AVF0Ld5F0lnGXNy9GEbEuZix6PVe8UqWp+aqHK4heqbXCUiT6rutxm8G0UoEhZt/44+75xVkZxqYtG1X/bpTwVlXXmTPE9TJm2AgPLrBT6GGi2xsx+x4SRwJ+cu3hWTAkLSWj9Dd7gnfv3nBwT77e6q/EnnSbGM/xHyk/8j5Iwjijyn8VJvPD/LbDutABtVJmZ9d7zRN82ymiCxwFuoOhkZskOOL3gG0gvRyYpYTolgcMVpHtvOPcWFrwAB/7PZfoEMZ7hf8josf8nUYL8D8KELfx/C/zD/7K2/Ienqa8PUKimhvXYHlE/q219OCiJbR7H4gG5KHhXGcgqrrW7GojeYDvWziv52KAbKaSZqgkePXVxOTXpqbhct7d381UBuITjZSM7LCfX2/Z27zr1ub/jggULFrw3/AHWPxhWABAAAA","length":665}}}} 2 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | ### 4 | 5 | - Enable database config as env variables 6 | 7 | ### 6.0.2 8 | 9 | - Fix `packer.js` log entry so that the `spec.name` field doesn't end up spread in the log entry 10 | 11 | ### 6.0.1 12 | 13 | - [#61] Fix winston formatting broken with `winston@3` upgrade 14 | 15 | ### 6.0.0 16 | 17 | - [#60] Use DynamoDB based models. 18 | 19 | ### 5.1.0 20 | 21 | - Use updated `nsq.js-k8` to get built in retries on publish 22 | 23 | ### 5.0.0 24 | 25 | - Make retryable write-stream for writing with status-writer 26 | 27 | - [#47] Update `README.md` 28 | - Provide links to relevant projects and modules 29 | - Add badges 30 | - Update minor and patch versions of dependencies to resolve security warnings 31 | - [#46] Default documenation 32 | - Add: `CONTRUBUTING.md`, `SECURITY.md` 33 | - update `LICENSE` year 34 | - add `.github` templates 35 | - Give credits for Github templates 36 | 37 | ### 4.0.0 38 | 39 | - [#42] Extract config 40 | - `async/await` conversion 41 | - Preparing for `@wrhs/extract-config`'s eager use of config 42 | 43 | - [#40] Modernize files 44 | - `prototype` over `class` 45 | - Use arrow functions 46 | 47 | ### 2.6.0 48 | 49 | - Add timing information to status nsq messages 50 | - Add additional status nsq messages for unpack, pack, upload 51 | - Allow for `npm install` retries in nsq status messages 52 | 53 | ### 2.3.1 54 | 55 | - [#33] Default `this.target` and use it as the default for `this.rootDir`. 56 | 57 | [#46]: https://github.com/godaddy/carpenterd/pull/46 58 | [#47]: https://github.com/godaddy/carpenterd/pull/47 59 | [#60]: https://github.com/godaddy/carpenterd/pull/60 60 | [#61]: https://github.com/godaddy/carpenterd/pull/61 61 | 62 | -------------------------------------------------------------------------------- /config.example.json: -------------------------------------------------------------------------------- 1 | { 2 | "throttle": 3, 3 | "http": { 4 | "timeout": 1200000, 5 | "hostname": "127.0.0.1", 6 | "port": 1337 7 | }, 8 | "logger": { "silent": true }, 9 | "npm": { 10 | "registry": "https://registry.npmjs.org", 11 | "loglevel": "http" 12 | }, 13 | "json": { 14 | "limit": "100mb", 15 | "extended": false 16 | }, 17 | "builder": { 18 | "retries": 2, 19 | "timeout": 900000, 20 | "target": "/tmp", 21 | "source": "/tmp" 22 | }, 23 | "feedsme": "http://127.0.0.1:1212", 24 | "redis": { 25 | "uri": "redis://127.0.0.1:6379" 26 | }, 27 | "bffs": { 28 | "prefix": "wrhs", 29 | "cdn": { 30 | "prod": { 31 | "acl": "public-read", 32 | "pkgcloud": { 33 | "provider": "amazon", 34 | "endpoint": "aws.amazon.net", 35 | "keyId": "fake", 36 | "key": "fake", 37 | "forcePathBucket": true 38 | }, 39 | "url": "https://mycloudfrontURL.com" 40 | }, 41 | "test": { 42 | "acl": "public-read", 43 | "pkgcloud": { 44 | "provider": "amazon", 45 | "endpoint": "aws.amazon.net", 46 | "keyId": "fake", 47 | "key": "fake", 48 | "forcePathBucket": true 49 | }, 50 | "url": "https://mycloudfrontURL.com" 51 | }, 52 | "dev": { 53 | "acl": "public-read", 54 | "pkgcloud": { 55 | "provider": "amazon", 56 | "endpoint": "aws.amazon.net", 57 | "keyId": "fake", 58 | "key": "fake", 59 | "forcePathBucket": true 60 | }, 61 | "url": "https://mycloudfrontURL.com/" 62 | } 63 | } 64 | }, 65 | "database": { 66 | "endpoint": "aws.amazon.net", 67 | "region": "us-east-1" 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /test/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "throttle": 3, 3 | "http": { 4 | "timeout": 1200000, 5 | "hostname": "127.0.0.1", 6 | "port": 1337 7 | }, 8 | "logger": { "silent": true }, 9 | "npm": { 10 | "registry": "https://registry.npmjs.org", 11 | "loglevel": "http" 12 | }, 13 | "json": { 14 | "limit": "100mb", 15 | "extended": false 16 | }, 17 | "builder": { 18 | "retries": 2, 19 | "timeout": 900000, 20 | "target": "/tmp", 21 | "source": "/tmp" 22 | }, 23 | "feedsme": "http://127.0.0.1:1212", 24 | "redis": { 25 | "uri": "redis://127.0.0.1:6379" 26 | }, 27 | "bffs": { 28 | "prefix": "wrhs", 29 | "cdn": { 30 | "prod": { 31 | "acl": "public-read", 32 | "pkgcloud": { 33 | "accessKeyId": "fakeId", 34 | "secretAccessKey": "fakeKey", 35 | "provider": "amazon", 36 | "endpoint": "http://localhost:4572", 37 | "forcePathBucket": true 38 | }, 39 | "url": "http://localhost:4572" 40 | }, 41 | "test": { 42 | "acl": "public-read", 43 | "pkgcloud": { 44 | "accessKeyId": "fakeId", 45 | "secretAccessKey": "fakeKey", 46 | "provider": "amazon", 47 | "endpoint": "http://localhost:4572", 48 | "forcePathBucket": true 49 | }, 50 | "url": "http://localhost:4572" 51 | }, 52 | "dev": { 53 | "acl": "public-read", 54 | "pkgcloud": { 55 | "accessKeyId": "fakeId", 56 | "secretAccessKey": "fakeKey", 57 | "provider": "amazon", 58 | "endpoint": "http://localhost:4572", 59 | "forcePathBucket": true 60 | }, 61 | "url": "http://localhost:4572" 62 | } 63 | } 64 | }, 65 | "database": { 66 | "endpoint": "http://localhost:4569", 67 | "region": "us-east-1" 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /lib/preboots/npm.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const fs = require('fs'); 3 | const url = require('url'); 4 | const os = require('os'); 5 | 6 | const assign = Object.assign; 7 | 8 | /** 9 | * Make a global npmrc to use as the userconfig for npm installs 10 | * 11 | * @param {slay.App} app Global app object 12 | * @param {Object} options Configuration 13 | * @param {Function} next Completion object 14 | */ 15 | module.exports = function npmboot(app, options, next) { 16 | const opts = assign({ base: os.tmpdir() }, 17 | options, 18 | app.config.get('npm')); 19 | 20 | createNpmrc(opts, (err, npmrc) => { 21 | if (err) return next(err); 22 | app.npmrc = npmrc; 23 | app.after('close', remove(npmrc)); 24 | next(); 25 | }); 26 | }; 27 | 28 | /** 29 | * Remove the given npmrc path from the filesystem 30 | * @function remove 31 | * @param {String} npmrc - Path to npmrc 32 | * @returns {Function} to handle deleting the file within an understudy after 33 | */ 34 | function remove(npmrc) { 35 | return (app, options, done) => { 36 | fs.unlink(npmrc, err => { 37 | if (err && err.code === 'ENOENT') return done(); 38 | done(err); 39 | }); 40 | }; 41 | } 42 | 43 | module.exports.createNpmrc = createNpmrc; 44 | 45 | /** 46 | * Write an npmrc file with a given file 47 | * @function createNpmrc 48 | * @param {Object} opts - options for creating the npmrc 49 | * @param {Function} callback - Continuation function when completed 50 | */ 51 | function createNpmrc(opts, callback) { 52 | const parsed = url.parse(opts.registry || ''); 53 | 54 | let auth = '_auth='; 55 | let hasAuth = false; 56 | if (parsed.auth || opts.auth) { 57 | auth += Buffer.from(parsed.auth || opts.auth, 'utf8').toString('base64'); 58 | hasAuth = true; 59 | } 60 | parsed.auth = null; 61 | 62 | const npmrc = ` 63 | registry=${parsed.format()} 64 | ${hasAuth ? auth : ''} 65 | loglevel=${opts.loglevel}`; 66 | const npmrcPath = path.join(opts.base, '.npmrc-for-life'); 67 | 68 | fs.writeFile(npmrcPath, npmrc, (err) => { 69 | if (err) return callback(err); 70 | callback(null, npmrcPath); 71 | }); 72 | } 73 | -------------------------------------------------------------------------------- /lib/build-query.js: -------------------------------------------------------------------------------- 1 | /* eslint max-params: 0*/ 2 | const semver = require('semver'); 3 | 4 | /** 5 | * BuildQuery Object for parsing build heads 6 | * @public 7 | * @constructor 8 | * @param {Object} pkg - package object 9 | * @param {Object[]} heads - build heads for a given package in the fetched env 10 | */ 11 | function BuildQuery(pkg, heads) { 12 | this.pkg = pkg; 13 | this.heads = heads; 14 | this.latest = this.order(heads); 15 | } 16 | 17 | /** 18 | * Compute and order the heads to return the latest version 19 | * @private 20 | * @function order 21 | * @param {Object[]} heads - Array of head objects 22 | * @returns {String} latest version of the set 23 | */ 24 | BuildQuery.prototype.order = function order(heads) { 25 | return heads.filter(Boolean) 26 | .sort((a, b) => semver.compare(a.version, b.version)) 27 | .reduce((latest, v) => { 28 | return semver.lt(latest, v.version) ? v.version : latest; 29 | }, '0.0.0'); 30 | }; 31 | 32 | /** 33 | * Compute objects to be sent over nsq for missing builds 34 | * @public 35 | * @function missing 36 | * @returns {Object[]} of spec objects to be sent to nsq 37 | */ 38 | BuildQuery.prototype.missing = function () { 39 | return this.heads.map(head => { 40 | return semver.lt(head.version, this.latest) && this.specify(head); 41 | }).filter(Boolean); 42 | }; 43 | 44 | /** 45 | * Create a spec object given a head object and internal data 46 | * @private 47 | * @function specify 48 | * @param {Object} head - A given head object from a missing build 49 | * @returns {Spec} object to be sent to nsq 50 | */ 51 | BuildQuery.prototype.specify = function specify(head) { 52 | return new Spec( 53 | head.name, 54 | head.env, 55 | this.latest, 56 | head.locale, 57 | this.pkg.extended.build 58 | ); 59 | }; 60 | 61 | /** 62 | * Spec Object 63 | * @public 64 | * @constructor 65 | * @param {String} name - name of package 66 | * @param {String} env - env of package 67 | * @param {String} version - version of package 68 | * @param {String} locale - locale of package 69 | * @param {String} type - build type 70 | */ 71 | function Spec(name, env, version, locale, type) { 72 | this.name = name; 73 | this.env = env; 74 | this.version = version; 75 | this.locale = locale; 76 | this.type = type; 77 | } 78 | 79 | module.exports = BuildQuery; 80 | module.exports.Spec = Spec; 81 | -------------------------------------------------------------------------------- /lib/construct/npm/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const path = require('path'); 4 | const fs = require('fs'); 5 | const spawn = require('child_process').spawn; 6 | const once = require('one-time'); 7 | const npm = path.join(require.resolve('npm'), '..', '..', 'bin', 'npm-cli.js'); 8 | 9 | const assign = Object.assign; 10 | const statusKey = 'npm install'; 11 | 12 | exports.install = function (opts, callback) { 13 | const { log, spec, installPath, pkgDir, userconfig, statusWriter } = opts; 14 | const done = once(callback); 15 | const env = assign({}, process.env); // eslint-disable-line no-process-env 16 | 17 | env.NODE_ENV = ~['prod', 'latest'].indexOf(spec.env) 18 | ? 'production' 19 | : 'development'; 20 | 21 | const logs = { 22 | stdout: path.join(installPath, 'stdout.log'), 23 | stderr: path.join(installPath, 'stderr.log') 24 | }; 25 | 26 | log.info('npm logs available for spec: %s@%s', spec.name, spec.version, logs); 27 | statusWriter.writeStart(statusKey, `'npm install' attempt starting`); 28 | 29 | // Danger zone - spawn the child process running ./npm.js 30 | const child = spawn(process.execPath, ['--max_old_space_size=8192', npm] 31 | .concat(['install', `--userconfig=${userconfig}`]), { 32 | env: env, 33 | cwd: pkgDir 34 | }); 35 | 36 | function onFileError(type, path) { 37 | return (err) => { 38 | log.error(`npm install ${type} filestream error for ${path}`, { 39 | message: err.message, 40 | stack: err.stack, 41 | code: err.code 42 | }); 43 | statusWriter.write(null, `ERROR: 'npm install' failed.\nmessage: ${err.message}\nstack: ${err.stack}\ncode: ${err.code}`); 44 | }; 45 | } 46 | 47 | child.on('error', done); 48 | child.stderr.pipe(fs.createWriteStream(logs.stderr)) 49 | .on('error', onFileError('stderr', logs.stderr)); 50 | child.stdout.pipe(fs.createWriteStream(logs.stdout)) 51 | .on('error', onFileError('stdout', logs.stdout)); 52 | 53 | child.on('close', (code) => { 54 | if (code !== 0) { 55 | return fs.readFile(logs.stderr, 'utf8', function (err, text) { 56 | const msg = text || (err && err.message) || `Could not read ${logs.stderr}`; 57 | // Intentionally not an Error event here as it'll get retried externally, 58 | // only if all attempts fail should it be a status error 59 | statusWriter.write(statusKey, { 60 | message: `ERROR: 'npm install' attempt exited with code: ${code}.`, 61 | details: msg 62 | }); 63 | done(new Error(`npm exited with code ${code} ${msg}`)); 64 | }); 65 | } 66 | 67 | statusWriter.write(statusKey, `'npm install' attempt completed successfully`); 68 | return done(); 69 | }); 70 | 71 | }; 72 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "carpenterd", 3 | "version": "6.1.0", 4 | "description": "Build and compile npm packages", 5 | "main": "./lib/index.js", 6 | "bin": { 7 | "carpenterd": "bin/server" 8 | }, 9 | "scripts": { 10 | "start": "bin/server", 11 | "report": "nyc report --reporter=lcov", 12 | "test": "nyc npm run test:mocha", 13 | "test:mocha": "mocha 'test/**/*.test.js'", 14 | "watch": "mocha --watch --reporter spec $(find test -path test/fixtures -prune -o -name '*.test.js')", 15 | "localstack": "docker run -it -p 4569:4569 --rm localstack/localstack", 16 | "eslint": "eslint-godaddy -c .eslintrc lib/ test/lib/", 17 | "posttest": "npm run eslint" 18 | }, 19 | "repository": { 20 | "type": "git", 21 | "url": "git@github.com/godaddy/carpenterd.git" 22 | }, 23 | "keywords": [ 24 | "build", 25 | "compile", 26 | "carpenter", 27 | "webpack", 28 | "builder", 29 | "widget", 30 | "component" 31 | ], 32 | "author": "GoDaddy.com Operating Company LLC", 33 | "license": "MIT", 34 | "dependencies": { 35 | "async": "~3.1.0", 36 | "aws-liveness": "^1.1.0", 37 | "aws-sdk": "^2.569.0", 38 | "bffs": "^7.0.0", 39 | "bl": "^4.0.0", 40 | "body-parser": "^1.19.0", 41 | "cdnup": "^4.0.0", 42 | "clean-css": "~4.2.1", 43 | "create-servers": "^3.1.0", 44 | "diagnostics": "~2.0.2", 45 | "dynamodb-x": "^1.2.3", 46 | "emits": "~3.0.0", 47 | "errs": "~0.3.2", 48 | "feedsme-api-client": "^3.0.0", 49 | "fingerprinting": "~1.0.0", 50 | "from2": "^2.3.0", 51 | "joi": "^14.3.1", 52 | "json-try-parse": "~1.0.0", 53 | "lodash.intersection": "~4.4.0", 54 | "lodash.omit": "^4.5.0", 55 | "minimist": "~1.2.0", 56 | "mkdirp": "^0.5.1", 57 | "node-uuid": "~1.4.3", 58 | "npm": "^6.9.0", 59 | "nsq-stream": "^2.1.0", 60 | "nsq.js-k8": "^1.2.1", 61 | "object-assign": "~4.1.1", 62 | "one-time": "~1.0.0", 63 | "p-limit": "^2.2.1", 64 | "repair": "~0.1.0", 65 | "resolve": "^1.12.0", 66 | "retryme": "^1.1.0", 67 | "safe-regex": "^2.1.1", 68 | "slay": "^3.1.1", 69 | "slay-config": "^2.3.0", 70 | "slay-contextlog": "~2.0.0", 71 | "slay-log": "^2.3.0", 72 | "tar-fs": "^2.0.0", 73 | "through2": "~3.0.1", 74 | "toml": "^3.0.0", 75 | "uglify-js": "^3.6.0", 76 | "walk": "~2.3.14", 77 | "warehouse-models": "^6.0.0", 78 | "winston": "^3.2.0" 79 | }, 80 | "devDependencies": { 81 | "assume": "^2.2.0", 82 | "assume-sinon": "^1.0.1", 83 | "babel-eslint": "^10.0.1", 84 | "clone": "^2.1.2", 85 | "eslint": "^6.1.0", 86 | "eslint-config-godaddy": "^4.0.0", 87 | "eslint-plugin-json": "^2.0.1", 88 | "eslint-plugin-mocha": "^6.0.0", 89 | "hyperquest": "^2.1.3", 90 | "mocha": "^6.2.0", 91 | "nock": "^11.7.0", 92 | "nyc": "^14.1.0", 93 | "rip-out": "^1.0.0", 94 | "sinon": "^7.3.2" 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /test/lib/preboots/scheduler.test.js: -------------------------------------------------------------------------------- 1 | /* eslint max-nested-callbacks: 0 */ 2 | /* eslint no-invalid-this: 0 */ 3 | 4 | const Writer = require('../../mocks').Writer; 5 | const clone = require('clone'); 6 | const async = require('async'); 7 | const assume = require('assume'); 8 | const sinon = require('sinon'); 9 | const packages = require('../../fixtures/packages.json'); 10 | const heads = require('../../fixtures/heads'); 11 | const path = require('path'); 12 | 13 | describe('Scheduler', function () { 14 | this.timeout(5E4); 15 | let app = require('../../../lib'); 16 | let scheduler, Package; 17 | 18 | before(function (done) { 19 | app.start({ 20 | logger: { 21 | level: 'critical' 22 | }, 23 | ensure: true, 24 | config: { 25 | file: path.join(__dirname, '..', '..', 'config.json'), 26 | overrides: { 27 | http: 0, 28 | scheduler: { 29 | topic: 'testing' 30 | } 31 | } 32 | } 33 | }, function (err, application) { 34 | if (err) return done(err); 35 | app = application; 36 | app.nsq = app.nsq || {}; 37 | app.nsq.writer = app.nsq.writer || new Writer(); 38 | app.scheduler.nsq = app.nsq; 39 | scheduler = app.scheduler; 40 | Package = scheduler.models.Package; 41 | 42 | async.parallel(packages.map(pkg => Package.create.bind(Package, pkg)), done); 43 | }); 44 | }); 45 | 46 | after(function (done) { 47 | async.parallel(packages.map(pkg => Package.remove.bind(Package, pkg)), function (error) { 48 | if (error) return done(error); 49 | app.close(done); 50 | }); 51 | }); 52 | 53 | afterEach(function () { 54 | sinon.restore(); 55 | }); 56 | 57 | it('setInterval should schedule a job that completes', function (done) { 58 | const headStub = sinon.stub(scheduler.models.BuildHead, 'findAll'); 59 | headStub.onCall(0).yieldsAsync(null, clone(heads.same['my-client-side-package'])); 60 | headStub.onCall(1).yieldsAsync(null, clone(heads.same['my-other-client-side-package'])); 61 | 62 | scheduler.once('scheduled', (err, counts) => { 63 | assume(err).is.falsey(); 64 | const keys = Object.keys(counts); 65 | assume(keys).length(2); 66 | keys.forEach(k => { 67 | assume(counts[k]).equals(0); 68 | }); 69 | scheduler.clear('test'); 70 | done(); 71 | }); 72 | scheduler.setInterval('test', 1000); 73 | }); 74 | 75 | it('clear should clear any setIntervals that have been called', function () { 76 | scheduler.setInterval('test'); 77 | scheduler.setInterval('prod'); 78 | scheduler.setInterval('dev'); 79 | assume(Array.from(scheduler.intervals.keys())).length(3); 80 | scheduler.clear(); 81 | assume(Array.from(scheduler.intervals.keys())).length(0); 82 | }); 83 | 84 | it('should trigger a single build for each package when running schedule', function (done) { 85 | const headStub = sinon.stub(scheduler.models.BuildHead, 'findAll'); 86 | headStub.onCall(0).yieldsAsync(null, clone(heads.missing['my-client-side-package'])); 87 | headStub.onCall(1).yieldsAsync(null, clone(heads.missing['my-other-client-side-package'])); 88 | 89 | scheduler.schedule('test', function (err, counts) { 90 | assume(err).is.falsey(); 91 | const keys = Object.keys(counts); 92 | assume(keys).length(2); 93 | keys.forEach(k => { 94 | assume(counts[k]).equals(1); 95 | }); 96 | done(); 97 | }); 98 | }); 99 | 100 | it('should trigger zero builds when there are no behind packages', function (done) { 101 | const headStub = sinon.stub(scheduler.models.BuildHead, 'findAll'); 102 | headStub.onCall(0).yieldsAsync(null, clone(heads.same['my-client-side-package'])); 103 | headStub.onCall(1).yieldsAsync(null, clone(heads.same['my-other-client-side-package'])); 104 | 105 | scheduler.schedule('test', function (err, counts) { 106 | assume(err).is.falsey(); 107 | const keys = Object.keys(counts); 108 | assume(keys).length(2); 109 | keys.forEach(k => { 110 | assume(counts[k]).equals(0); 111 | }); 112 | done(); 113 | }); 114 | }); 115 | }); 116 | -------------------------------------------------------------------------------- /lib/routes/index.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const joi = require('joi'); 4 | const errs = require('errs'); 5 | 6 | const buildSchema = joi.object().keys({ 7 | 'name': joi.string().required(), 8 | 'dist-tags': joi.object().required(), 9 | '_attachments': joi.object().required() 10 | }).unknown(true); 11 | 12 | const buildV2Schema = joi.object().keys({ 13 | promote: joi.boolean(), 14 | data: joi.object().keys({ 15 | 'name': joi.string().required(), 16 | 'dist-tags': joi.object().required(), 17 | '_attachments': joi.object().required() 18 | }).unknown(true) 19 | }); 20 | 21 | // 22 | // Define routes. 23 | // 24 | module.exports = function routes(app, options, done) { 25 | app.perform('actions', function performRoutes(next) { 26 | app.contextLog.verbose('Adding application routes'); 27 | 28 | // 29 | // ### /healthcheck 30 | // Simple healthcheck 31 | // 32 | app.routes.get('/healthcheck(.html)?', function (req, res) { 33 | res.end('ok'); 34 | }); 35 | 36 | app.routes.post('/catchup', function catchup(req, res) { 37 | const env = req.body.env; 38 | if (!env) { 39 | return app.terminate(res, errs.create('No env specified', { 40 | code: 400 41 | })); 42 | } 43 | 44 | // MAKE ASYNC/AWAIT 45 | app.scheduler.schedule(env, (err, counts) => { 46 | if (err) return app.terminate(res, err); 47 | 48 | app.contextLog.info('Scheduled catch up builds', counts); 49 | return res.status(201).json(counts); 50 | }); 51 | }); 52 | 53 | // 54 | // TODO: Add some retry to the API client underneath 55 | // 56 | function change({ data, promote }) { 57 | // A specific indicator so that we know it was a publish that didn't come from feedsme 58 | if (!data.env) data.__published = true; 59 | 60 | // MAKE ASYNC/AWAIT 61 | app.feedsme.change(data.env || 'dev', { data: { data, promote } }, function posted(error) { 62 | return error 63 | ? app.contextLog.error('Failed to process changes', error) 64 | : app.contextLog.info('Changes processed'); 65 | }); 66 | } 67 | 68 | // 69 | // ### /v2/build 70 | // Trigger a build and optionally promote it. This route assume `npm publish` 71 | // like JSON that contains the package.json and the package's content as binary 72 | // blob in `attachments`. 73 | // 74 | app.routes.post('/v2/build', function buildV2(req, res) { 75 | // 76 | // Check some basic properties that always should be present. 77 | // 78 | joi.validate(req.body || {}, buildV2Schema, function validated(error, buildOpts) { 79 | if (error) { 80 | return void app.terminate(res, error); 81 | } 82 | 83 | // MAKE ASYNC/AWAIT 84 | return app.construct.build(buildOpts, function building(err) { 85 | if (err) { 86 | return void app.contextLog.error('Failed to build', err); 87 | } 88 | 89 | app.contextLog.info('Build finished, sending change for %s', buildOpts.data.name); 90 | return void change(buildOpts); 91 | }).pipe(res); 92 | }); 93 | }); 94 | 95 | // 96 | // ### /build 97 | // Trigger a build. This route assume `npm publish` like JSON that contains 98 | // the package.json and the package's content as binary blob in `attachments`. 99 | // 100 | app.routes.post('/build', function build(req, res) { 101 | // 102 | // Check some basic properties that always should be present. 103 | // 104 | joi.validate(req.body || {}, buildSchema, function validated(error, data) { 105 | const promote = true; // always promote for v1 106 | 107 | if (error) { 108 | return void app.terminate(res, error); 109 | } 110 | 111 | // MAKE ASYNC/AWAIT 112 | return app.construct.build({ data, promote }, function building(err) { 113 | if (err) { 114 | return void app.contextLog.error('Failed to build', err); 115 | } 116 | 117 | app.contextLog.info('Build finished, sending change for %s', data.name); 118 | return void change({ data, promote }); 119 | }).pipe(res); 120 | }); 121 | }); 122 | 123 | next(); 124 | }, done); 125 | }; 126 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Everyone is welcome to contribute to GoDaddy's Open Source Software. Contributing doesn’t just mean 4 | submitting pull requests. To get involved you can, report or triage bugs and participate in 5 | discussions on the evolution of each project. 6 | 7 | No matter how you want to get involved, we ask that you first learn what’s expected of anyone who 8 | participates in the project by reading the Contribution Guidelines. 9 | 10 | ## Answering Questions 11 | 12 | One of the most important and immediate ways you can support this project is to or 13 | [Github][issues]. Whether you’re helping a newcomer understand a feature or troubleshooting an edge case with a 14 | seasoned developer, your knowledge and experience with JS can go a long way to help others. 15 | 16 | ## Reporting Bugs 17 | 18 | **Do not report potential security vulnerabilities here. Refer to [SECURITY.md](./SECURITY.md) for more 19 | details about the process of reporting security vulnerabilities.** 20 | 21 | Before submitting a ticket, please be sure to have a simple replication of the behavior. 22 | If the issue is isolated to one of the dependencies of this project. Please create a Github issue in that project. 23 | All dependencies are open source software and can be easily found through [npm]. 24 | 25 | Submit a ticket for your issue, assuming one does not already exist: 26 | - Create it on our [Issue Tracker][issues] 27 | - Clearly describe the issue by following the template layout 28 | - Make sure to include steps to reproduce the bug. 29 | - A reproducible (unit) test could be helpful in solving the bug. 30 | - Describe the environment that (re)produced the problem. 31 | 32 | > For a bug to be actionable, it needs to be reproducible. If you or contributors can’t reproduce the bug, 33 | > try to figure out why. Please take care to stay involved in discussions around solving the problem. 34 | 35 | ## Triaging bugs or contributing code 36 | 37 | If you're triaging a bug, try to reduce it. Once a bug can be reproduced, reduce it to the smallest amount of 38 | code possible. Reasoning about a sample or unit test that reproduces a bug in just a few lines of code is 39 | easier than reasoning about a longer sample. 40 | 41 | From a practical perspective, contributions are as simple as: 42 | - Forking the repository on GitHub. 43 | - Making changes to your forked repository. 44 | - When committing, reference your issue (if present) and include a note about the fix. 45 | - If possible, and if applicable, please also add/update unit tests for your changes. 46 | - Push the changes to your fork and submit a pull request to the 'master' branch of the projects' repository. 47 | 48 | If you are interested in making a large change and feel unsure about its overall effect, 49 | please make sure to first discuss the change and reach a consensus with core contributors. 50 | Then ask about the best way to go about making the change. 51 | 52 | ## Code Review 53 | 54 | Any open source project relies heavily on code review to improve software quality: 55 | 56 | > All significant changes, by all developers, must be reviewed before they are committed to the repository. 57 | > Code reviews are conducted on GitHub through comments on pull requests or commits. 58 | > The developer responsible for a code change is also responsible for making all necessary review-related changes. 59 | 60 | Sometimes code reviews will take longer than you would hope for, especially for larger features. 61 | Here are some accepted ways to speed up review times for your patches: 62 | 63 | - Review other people’s changes. If you help out, everybody will be more willing to do the same for you. 64 | Goodwill is our currency. 65 | - Split your change into multiple smaller changes. The smaller your change, the higher the probability that 66 | somebody will take a quick look at it. 67 | - Remember that you’re asking for valuable time from other professional developers. 68 | 69 | **Note that anyone is welcome to review and give feedback on a change, but only people with commit access 70 | to the repository can approve it.** 71 | 72 | ## Attribution of Changes 73 | 74 | When contributors submit a change to this project, after that change is approved, 75 | other developers with commit access may commit it for the author. When doing so, 76 | it is important to retain correct attribution of the contribution. Generally speaking, 77 | Git handles attribution automatically. 78 | 79 | ## Code Documentation 80 | 81 | Ensure that every function in `carpenterd` is documented and follows the standards set by [JSDoc]. Finally, 82 | please stick to the code style as defined by the [Godaddy JS styleguide][style]. 83 | 84 | # Additional Resources 85 | 86 | - [General GitHub Documentation](https://help.github.com/) 87 | - [GitHub Pull Request documentation](https://help.github.com/send-pull-requests/) 88 | - [JSDoc] 89 | 90 | [issues]: https://github.com/godaddy/carpenterd/issues 91 | [JSDoc]: http://usejsdoc.org/ 92 | [npm]: http://npmjs.org/ 93 | [style]: https://github.com/godaddy/javascript/#godaddy-style 94 | -------------------------------------------------------------------------------- /lib/preboots/scheduler.js: -------------------------------------------------------------------------------- 1 | const EE = require('events'); 2 | const async = require('async'); 3 | const once = require('one-time'); 4 | const BuildQuery = require('../build-query'); 5 | const nsqStream = require('nsq-stream'); 6 | const from = require('from2'); 7 | 8 | const assign = Object.assign; 9 | 10 | /** 11 | * Scheduler 12 | * @public 13 | * @constructor 14 | * @param {Object} options - Configuration object 15 | * @param {Object} options.log - Logger 16 | * @param {Object} options.nsq - NSQ object 17 | * @param {String} options.topic - NSQ topic to dispatch onto 18 | * @param {Object} options.models - Data models 19 | * @param {Number} [options.concurrency=5] - Number of jobs to run concurrently 20 | * @param {Number} [options.interval=3600000] - Default interval between catch-up jobs in ms 21 | */ 22 | class Scheduler extends EE { 23 | constructor(options) { 24 | super(); 25 | EE.call(this); 26 | this.log = options.log; 27 | this.nsq = options.nsq; 28 | this.topic = options.topic; 29 | this.models = options.models; 30 | this.conc = options.concurrency || 5; 31 | this.defaultInterval = options.interval || 60 * 60 * 1000; 32 | 33 | this.intervals = new Map(); 34 | } 35 | 36 | /** 37 | * Set an interval to schedule catch up jobs for the given env 38 | * @public 39 | * @function setInterval 40 | * @param {String} env - Environment 41 | * @param {Number} [time] - milliseconds of interval 42 | */ 43 | setInterval(env, time) { 44 | time = time || this.defaultInterval; 45 | const intervalId = setInterval(() => { 46 | this.emit('schedule'); 47 | this.schedule(env, (err, counts) => { 48 | this.emit('scheduled', err, counts); 49 | if (err) return this.log.error('Failed to schedule jobs for %s', env, { 50 | error: err.message, 51 | stack: err.stack 52 | }); 53 | this.log.info('Successfully scheduled catch up jobs for %s', env, counts); 54 | }); 55 | }, time); 56 | 57 | this.intervals.set(env, intervalId); 58 | } 59 | 60 | /** 61 | * Clear the given setIntervals for the given env or all of them 62 | * @public 63 | * @function clear 64 | * @param {String} [env] - Optional env to pass 65 | * @returns {undefined} 66 | */ 67 | clear(env) { 68 | if (env) return this._clear(env); 69 | for (const key of this.intervals.keys()) { 70 | this._clear(key); 71 | } 72 | } 73 | 74 | /** 75 | * Core clearing of setIntervals that cleans up map value as well 76 | * @private 77 | * @function _clear 78 | * @param {String} env - Environment 79 | */ 80 | _clear(env) { 81 | if (this.intervals.has(env)) clearInterval(this.intervals.get(env)); 82 | this.intervals.delete(env); 83 | } 84 | 85 | /** 86 | * Schedule catch up jobs for the given environment over nsq 87 | * @public 88 | * @function schedule 89 | * @param {String} env - Environment to schedule for 90 | * @param {Function} callback - Continuation to call when completed 91 | */ 92 | schedule(env, callback) { 93 | const counts = {}; 94 | function done(err) { 95 | if (err) return callback(err); 96 | callback(null, counts); 97 | } 98 | // 99 | // Scheduling algorithm 100 | // 101 | // 1. Fetch all packages. 102 | // 2. Fetch all build-heads for each package for a given environment 103 | // 3. If a build head version is less than any of its peer build heads, 104 | // trigger a build for that given locale 105 | // 106 | this.packages((err, packages) => { 107 | if (err) return callback(err); 108 | async.eachLimit(packages, this.conc, (pkg, next) => { 109 | const cb = once(next); 110 | const name = pkg.name; 111 | this.models.BuildHead.findAll({ name, env }, (err, heads) => { 112 | if (err) return cb(err); 113 | if (!heads || !heads.length) return cb(); 114 | 115 | const query = new BuildQuery(pkg, heads); 116 | const writer = nsqStream.createWriteStream(this.nsq.writer, this.topic); 117 | 118 | const missing = query.missing(); 119 | const len = missing.length; 120 | this.log.info('%d missing builds. triggering new builds for %s', len, name); 121 | counts[name] = len; 122 | 123 | from.obj(missing) 124 | .pipe(writer) 125 | .on('error', cb) 126 | .once('finish', cb); 127 | }); 128 | }, done); 129 | }); 130 | } 131 | 132 | /** 133 | * Fetch all packages we currently have stored using our special cache table 134 | * for fast fetches 135 | * @public 136 | * @function packages 137 | * @param {Function} callback - Continuation function to call when completed 138 | */ 139 | packages(callback) { 140 | this.models.Package.findAll({}, callback); 141 | } 142 | } 143 | 144 | /** 145 | * scheduler preboot 146 | * @param {slay.App} app Slay application 147 | * @param {Object} options Additional configuration 148 | * @param {Function} callback Continuation function 149 | */ 150 | module.exports = function schedboot(app, options, callback) { 151 | app.scheduler = new Scheduler(assign({ 152 | models: app.models, 153 | nsq: app.nsq, 154 | log: app.log, 155 | topic: app.config.get('nsq:topic') 156 | }, options, app.config.get('scheduler'))); 157 | 158 | callback(); 159 | }; 160 | 161 | module.exports.Scheduler = Scheduler; 162 | -------------------------------------------------------------------------------- /lib/construct/packer.js: -------------------------------------------------------------------------------- 1 | const through = require('through2'); 2 | const once = require('one-time'); 3 | const retry = require('retryme'); 4 | const zlib = require('zlib'); 5 | const npm = require('./npm'); 6 | const tar = require('tar-fs'); 7 | const path = require('path'); 8 | const fs = require('fs'); 9 | 10 | class Packer { 11 | constructor(options) { 12 | this.retry = options.retry; 13 | this.log = options.log; 14 | this.cdnup = options.cdnup; 15 | this.npmrc = options.npmrc; 16 | } 17 | 18 | /** 19 | * Unpack the base64 string content into a proper directory of code 20 | * @param {Object} options options for the process 21 | * @param {String} options.content TBD 22 | * @param {String} options.installPath TBD 23 | * @param {Object} options.statusWriter TBD 24 | * @returns {Promise} completion handler 25 | */ 26 | unpack({ content, installPath, statusWriter }) { 27 | const stream = through(); 28 | const statusKey = 'unpacking'; 29 | statusWriter.writeStart(statusKey); 30 | 31 | return new Promise((resolve, reject) => { 32 | const succeed = once(statusWriter.writeWrap(statusKey, resolve)); 33 | const fail = once(statusWriter.writeWrap(statusKey, reject)); 34 | 35 | stream 36 | .pipe(zlib.Unzip()) // eslint-disable-line new-cap 37 | .once('error', fail) 38 | .pipe(tar.extract(installPath)) 39 | .once('error', fail) 40 | .once('finish', succeed); 41 | 42 | stream.end(Buffer.from(content, 'base64')); 43 | }); 44 | } 45 | 46 | /** 47 | * Install the dependencies of the package with npm. 48 | * Uses the provided environment. 49 | * 50 | * @param {Object} options configuration 51 | * @param {Object} options.spec Spec 52 | * @param {String} options.installPath os.tmpdir base path to run the install in. 53 | * @param {StatusWriter} options.statusWriter The writer for the status-api 54 | * 55 | * @returns {Promise} completion handler 56 | */ 57 | install({ spec, installPath, statusWriter }) { 58 | const pkgDir = path.join(installPath, 'package'); 59 | const statusKey = 'npm install-all'; 60 | 61 | statusWriter.writeStart(statusKey); 62 | const op = retry.op(this.retry); 63 | return new Promise((resolve, reject) => { 64 | op.attempt(next => { 65 | npm.install({ 66 | log: this.log, 67 | userconfig: this.npmrc, 68 | installPath, 69 | pkgDir, 70 | spec, 71 | statusWriter 72 | }, next); 73 | }, statusWriter.writeWrap(statusKey, (err) => { 74 | err ? reject(err) : resolve(); 75 | })); 76 | }); 77 | } 78 | 79 | /** 80 | * Upload the given file to our configured endpoint 81 | * 82 | * @param {Object} options configuration 83 | * @param {Object} options.spec Defines this package 84 | * @param {String} options.tarball Path to tarball 85 | * @param {StatusWriter} options.statusWriter The writer for the status-api 86 | * 87 | * @returns {Promise} completion handler 88 | */ 89 | upload({ spec, tarball, statusWriter }) { 90 | if (!this.cdnup) return Promise.resolve(); 91 | const statusKey = 'uploading'; 92 | statusWriter.writeStart(statusKey); 93 | const filePath = `${encodeURIComponent(spec.name)}-${spec.version}.tgz`; 94 | 95 | const logOpts = { tarball, ...spec }; 96 | return new Promise((resolve) => { 97 | this.cdnup.upload(tarball, filePath, (err, url) => { 98 | statusWriter.writeMaybeError(statusKey, err); 99 | 100 | if (err) { 101 | return this.log.error( 102 | 'Failed to upload tarball for package', 103 | { error: err.message, ...logOpts } 104 | ); 105 | } 106 | 107 | this.log.info('Uploaded tarball for package', { url, ...logOpts }); 108 | resolve(); 109 | }); 110 | }); 111 | } 112 | 113 | /** 114 | * Take the given source directory and create a tarball at the target directory 115 | * 116 | * @param {String} pkgDir Source directory 117 | * @param {String} tarball Target directory 118 | * @param {StatusWriter} statusWriter The writer for the status-api 119 | * 120 | * @returns {Promise} completion handler 121 | * @api public 122 | */ 123 | pack({ pkgDir, tarball, statusWriter }) { 124 | return new Promise((resolve, reject) => { 125 | const statusKey = 'packing'; 126 | const succeed = once(statusWriter.writeWrap(statusKey, resolve)); 127 | const fail = once(statusWriter.writeWrap(statusKey, reject)); 128 | statusWriter.writeStart(statusKey); 129 | 130 | tar.pack(pkgDir) 131 | .once('error', fail) 132 | .pipe(zlib.Gzip()) // eslint-disable-line new-cap 133 | .once('error', fail) 134 | .pipe(fs.createWriteStream(tarball)) 135 | .once('error', fail) 136 | .once('finish', succeed); 137 | }); 138 | } 139 | 140 | /** 141 | * Performs a full npm install & repack operation: 142 | * 1. Unpack the npm publish payload tarball 143 | * 2. Run `npm install` in that directory 144 | * 3. Create a new tarball from that directory (this includes node_modules) 145 | * 4. Upload that re-packed tarball to S3-compatible CDN 146 | * 147 | * @param {Object} spec - specification for build 148 | * @param {String} content – base64 encoded tarball content 149 | * @param {Object} paths - paths object 150 | * @param {StatusWriter} statusWriter - The writer for the status-api 151 | */ 152 | async repack(spec, content, paths, statusWriter) { 153 | const { tarball, installPath } = paths; 154 | const pkgDir = path.join(installPath, 'package'); 155 | 156 | this.log.info('Begin npm install & tarball repack', spec); 157 | 158 | await this.unpack({ content, installPath, statusWriter }); 159 | await this.install({ spec, installPath, statusWriter }); 160 | await this.pack({ pkgDir, tarball, statusWriter }); 161 | await this.upload({ spec, tarball, statusWriter }); 162 | 163 | return { install: installPath, tarball }; 164 | } 165 | } 166 | 167 | module.exports = Packer; 168 | -------------------------------------------------------------------------------- /lib/construct/status-writer.js: -------------------------------------------------------------------------------- 1 | const { performance } = require('perf_hooks'); 2 | const nsqStream = require('nsq-stream'); 3 | 4 | const eventTypes = { 5 | error: 'error', 6 | default: 'event' 7 | }; 8 | 9 | /** 10 | * Writes to the status-api NSQ topic 11 | * 12 | * @class StatusWriter 13 | */ 14 | class StatusWriter { 15 | /** 16 | * Constructor for StatusWriter 17 | * 18 | * @param {Object} opts Options 19 | * @param {Object} [opts.nsq] The nsq options 20 | * @param {Object} [opts.nsq.writer] The optional NSQ writer to also write 21 | * @param {string} [opts.nsq.topic] The topic used to write the NSQ message 22 | * @param {Object} [opts.metadata] Additional metadata 23 | * @constructor 24 | */ 25 | constructor(opts) { 26 | const { nsq = {} } = opts || {}; 27 | this.log = opts.log || { info: function () {}, error: function () {} }; 28 | this.nsqStream = nsq.writer && nsq.topic && nsqStream.createWriteStream(nsq.writer, nsq.topic); 29 | if (this.nsqStream) this.nsqStream.on('error', this._connectionError.bind(this)); 30 | this.metadata = opts && opts.metadata || {}; 31 | this.buildsCompleted = 0; 32 | this.timings = new Map(); 33 | } 34 | 35 | /** 36 | * Error handler for nsqStream. This should really never get called but its 37 | * for the just in case 38 | * 39 | * @param {Error} error Error object from the stream 40 | * @private 41 | */ 42 | _connectionError(error) { 43 | this.log.error('Error from nsq-stream writing status after retries', { message: error.message, stack: error.stack }); 44 | } 45 | 46 | /** 47 | * Writes a message to the status API nsq stream, also starts a performance 48 | * timer to track duration for some event 49 | * 50 | * @param {String} key The key to use to start a new performance timer 51 | * @param {Object|String?} data Data for the event to write, an object or a 52 | * string to be used for the message, if none is provided, one will be constructed based off the key 53 | * @public 54 | */ 55 | writeStart(key, data) { 56 | this.timings.set(key, performance.now()); 57 | this.write(null, data || `'${key}' starting`); 58 | } 59 | 60 | /** 61 | * Writes a message to the status API nsq stream, stops the performance 62 | * timer and adds the timing information to the status message. The 63 | * status message will follow a standard format. The type of status 64 | * message written will be based on whether an error was passed. 65 | * 66 | * @param {String} key The key used to when starting the performance timer 67 | * @param {Error?} err The error (if any). Passing an error will cause 68 | * this to be written as an error event. 69 | * @public 70 | */ 71 | writeMaybeError(key, err) { 72 | if (err) { 73 | this.write(key, { 74 | message: `ERROR: '${key}' exited with code: ${err}.`, 75 | details: err, 76 | event: eventTypes.error 77 | }); 78 | return; 79 | } 80 | 81 | this.write(key); 82 | } 83 | 84 | /** 85 | * Wraps a callback function with a status writer that will complete a performance timer 86 | * 87 | * @param {String} key The key used when starting the performance timer 88 | * @param {Function} done The function to call after writing the event 89 | * @returns {Function} A new callback function that wraps the given callback. 90 | * @public 91 | */ 92 | writeWrap(key, done) { 93 | return (err, data) => { 94 | this.writeMaybeError(key, err); 95 | done(err, data); 96 | }; 97 | } 98 | 99 | /** 100 | * Stops the performance timer and returns the timing information. 101 | * 102 | * @param {String} key The key used when starting the performance timer 103 | * @returns {Object} The performance timing 104 | * @private 105 | */ 106 | getTiming(key) { 107 | const timing = performance.now() - this.timings.get(key); 108 | this.timings.delete(key); 109 | return timing; 110 | } 111 | 112 | /** 113 | * Writes an event to the status API nsq stream. 114 | * 115 | * @param {String?} key The key used when starting the performance timer. 116 | * If providing, timing information will be added to the status message 117 | * @param {Object|String} data Data for the event to write, an object or a string to be used for the message 118 | * @param {String} [data.locale] The locale for the build 119 | * @param {String} [data.message] The human-readable message being written 120 | * @param {Number} [data.progress] The calculated progress for this build 121 | * @returns {undefined} Nothing whatsoever 122 | * @public 123 | */ 124 | write(key, data) { 125 | if (!this._isWriteable()) { 126 | return; 127 | } 128 | 129 | if (key && !data) { 130 | data = { message: `'${key}' completed successfully` }; 131 | } 132 | 133 | if (!data) { 134 | return; 135 | } 136 | 137 | if (typeof data === 'string') { 138 | data = { message: data }; 139 | } 140 | 141 | const payload = { 142 | eventType: data.event === 'error' ? eventTypes.error : eventTypes.default, 143 | message: data.message, 144 | locale: data.locale, 145 | details: data.details 146 | }; 147 | 148 | if (key) { 149 | payload.timing = this.getTiming(key); 150 | } 151 | 152 | this.nsqStream.write(this._makeSpec(payload)); 153 | } 154 | 155 | /** 156 | * Writes the end status to the status API nsq stream. 157 | * 158 | * @param {String} type The event type (ignored, error, queued) 159 | * @param {Error} [err] The error object that caused the end to occur 160 | * @public 161 | */ 162 | end(type, err) { 163 | if (!this._isWriteable()) { 164 | return; 165 | } 166 | 167 | this.nsqStream.end(this._makeSpec({ 168 | eventType: type, 169 | total: this.buildsCompleted, 170 | message: err ? err.message : 'Builds Queued' 171 | })); 172 | } 173 | 174 | /** 175 | * Test to see if writes should be allowed 176 | * 177 | * @returns {Boolean} True if writes are allowed 178 | * @private 179 | */ 180 | _isWriteable() { 181 | return this.nsqStream && !this.nsqStream._writableState.ended; 182 | } 183 | 184 | /** 185 | * Creates a spec to be written to the nsq stream 186 | * 187 | * @param {Object} [otherFields={}] Additional fields of the spec to add, overwrites defaults 188 | * @returns {Object} The constructed spec 189 | * @private 190 | */ 191 | _makeSpec(otherFields = {}) { 192 | const { name, env, version, type: buildType } = this.metadata || {}; 193 | return { 194 | eventType: eventTypes.default, 195 | name, 196 | env, 197 | version, 198 | buildType, 199 | ...otherFields 200 | }; 201 | } 202 | } 203 | 204 | module.exports = StatusWriter; 205 | 206 | -------------------------------------------------------------------------------- /lib/construct/progress.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const through = require('through2'); 4 | const once = require('one-time'); 5 | const StatusWriter = require('./status-writer'); 6 | 7 | /** 8 | * Stream the build progress which can be streamed to chunk-encoding responses. 9 | * 10 | * @Constructor 11 | * @param {Object} opts Options 12 | * @param {Object} [opts.nsq] The nsq options 13 | * @param {Object} [opts.nsq.writer] The optional NSQ writer to also write 14 | * @param {string} [opts.nsq.topic] The topic used to write the NSQ message 15 | * @param {Object} [opts.metadata] Additional metadata 16 | * @param {Function} fn Completion callback. 17 | * @api private 18 | */ 19 | class Progress { 20 | constructor(opts = {}, fn) { 21 | this.fn = once(fn || function nope() {}); 22 | this.statusWriter = new StatusWriter(opts); 23 | 24 | // 25 | // Prepare counters for progress reports. 26 | // 27 | this.map = Object.create(null); 28 | 29 | // 30 | // Create a readable and writable stream. 31 | // 32 | this.stream = through(); 33 | } 34 | 35 | /** 36 | * We actually have a single stream that tracks progress for `n` builds so we 37 | * need to handle that. 38 | * 39 | * @param {String} id Unique v4 id. 40 | * @api pubic 41 | */ 42 | init(id) { 43 | this.map[id] = { 44 | n: 0, 45 | total: 0 46 | }; 47 | } 48 | 49 | /** 50 | * Write the error and end the stream. 51 | * 52 | * @param {Error} error Failure in the build process. 53 | * @param {String} id Optional unique id v4. 54 | * @param {Object} [opts] Options object to be merged into the write 55 | * @param {String} [opts.locale] Locale of the build being started 56 | * @returns {Progress} fluent interface 57 | * @api public 58 | */ 59 | fail(error, id, opts = {}) { 60 | // 61 | // Could be an object or string 62 | // TODO: Use something like `errs` 63 | // 64 | if (!(error instanceof Error)) { 65 | error = typeof error !== 'string' 66 | ? error.message || error.code + ': ' + error.path 67 | : error; 68 | 69 | error = new Error(error); 70 | } 71 | 72 | return this.write({ 73 | ...opts, 74 | message: error.message, 75 | event: 'error' 76 | }, id); 77 | } 78 | 79 | /** 80 | * Start progress with some default data. 81 | * 82 | * @param {String} id Optional unique id v4. 83 | * @param {Number} n Optional number of steps to configure the progress instance 84 | * @param {Object} [opts] Options object to be merged into the write 85 | * @param {String} [opts.locale] Locale of the build being started 86 | * @returns {Progress} fluent interface 87 | * @api public 88 | */ 89 | start(id, n, opts = {}) { 90 | this.init(id); 91 | 92 | if (n) this.steps(id, n); 93 | 94 | return this.write({ 95 | ...opts, 96 | event: 'task', 97 | message: 'start', 98 | progress: 0 99 | }, id); 100 | } 101 | 102 | /** 103 | * Increment total with n steps for progress indication. 104 | * 105 | * @param {String} id Optional unique id v4. 106 | * @param {Number} n Steps to add. 107 | * @returns {Progress} fluent interface 108 | * @api public 109 | */ 110 | steps(id, n) { 111 | if (!this.map[id]) this.init(id); 112 | this.map[id].total += n; 113 | 114 | return this; 115 | } 116 | 117 | /** 118 | * End progress with some default data and end the stream. 119 | * 120 | * @param {String} id Optional unique id v4. 121 | * @param {Object} [opts] Options object to be merged into the write 122 | * @param {String} [opts.locale] Locale of the build being started 123 | * @returns {Progress} fluent interface 124 | * @api public 125 | */ 126 | done(id, opts = {}) { 127 | this.statusWriter.buildsCompleted++; 128 | this.cleanup(id); 129 | return this.write({ 130 | ...opts, 131 | event: 'task', 132 | message: 'Successfully queued build', 133 | progress: 100 134 | }, id); 135 | } 136 | 137 | /** 138 | * Notify the build was not executed but ignored. 139 | * 140 | * @returns {Progress} fluent interface 141 | * @api public 142 | */ 143 | ignore() { 144 | this.fn(); 145 | this.cleanup(); 146 | this.statusWriter.end('ignored'); 147 | return this.write({ 148 | event: 'task', 149 | message: 'ignored', 150 | progress: -1 151 | }).end(); 152 | } 153 | 154 | /** 155 | * Write the JSON progress data to the stream. 156 | * 157 | * @param {Object} data JSON data to send to the user. If a string is sent it's assumed that is the message being written. 158 | * @param {String} id Optional unique id v4. 159 | * @param {Object} [options] Options object 160 | * @param {Boolean} [options.skipNsq] True if no NSQ event should be written, typically used for end-states 161 | * @returns {Progress} fluent interface 162 | * @api public 163 | */ 164 | write(data, id, options = {}) { 165 | if (typeof data === 'string') { 166 | data = { message: data }; 167 | } else if (typeof data !== 'object') { 168 | data = {}; 169 | } 170 | id = data.id || id; 171 | 172 | if (data.progress && this.map[id]) { 173 | this.map[id].n++; 174 | data.progress = this.state(id); 175 | } 176 | 177 | data.id = data.id || id || 'generic'; 178 | data.timestamp = Date.now(); 179 | 180 | if (!this.stream._writableState.ended) { 181 | this.stream.write( 182 | Buffer.from(JSON.stringify(data) + '\n', 'utf-8') 183 | ); 184 | } 185 | 186 | if ((!options || !options.skipNsq) 187 | && data.message !== 'start') this.statusWriter.write(null, data); 188 | 189 | return this; 190 | } 191 | 192 | /** 193 | * Calculate the progress for the provided build. 194 | * 195 | * @param {String} id Unique id v4. 196 | * @returns {Number} Progress. 197 | * @api public 198 | */ 199 | state(id) { 200 | if (!this.map[id]) return 0; 201 | return Math.round((this.map[id].n / this.map[id].total) * 100); 202 | } 203 | 204 | /** 205 | * Close the stream. 206 | * 207 | * @param {Error} err Optional error 208 | * @returns {Progress} fluent interface 209 | * @api public 210 | */ 211 | end(err) { 212 | this.fn(err); 213 | 214 | if (err) this.write({ 215 | message: err.message, 216 | type: 'error' 217 | }, null, { skipNsq: true }); 218 | 219 | this.stream.end(); 220 | this.statusWriter.end(err ? 'error' : 'queued', err); 221 | 222 | return this; 223 | } 224 | 225 | /** 226 | * Delete all build progress per id. 227 | * 228 | * @param {String} id Optional unique id v4. 229 | * @api public 230 | */ 231 | cleanup(id) { 232 | if (id) delete this.map[id]; 233 | else { Object.keys(this.map).forEach(key => delete this.map[key]); } 234 | } 235 | 236 | /** 237 | * Wrap the pipe method to ensure the stream is resumed. 238 | * 239 | * @param {Stream} destination Writable stream. 240 | * @returns {Stream} provided destination. 241 | * @api public 242 | */ 243 | pipe(destination) { 244 | return this.stream.pipe(destination); 245 | } 246 | } 247 | 248 | // 249 | // Export the Progress constructor. 250 | // 251 | module.exports = Progress; 252 | -------------------------------------------------------------------------------- /lib/construct/builder.js: -------------------------------------------------------------------------------- 1 | const uuid = require('node-uuid'); 2 | const assign = require('object-assign'); 3 | const omit = require('lodash.omit'); 4 | const limiter = require('p-limit'); 5 | 6 | class Builder { 7 | constructor(context) { 8 | this.context = context; 9 | } 10 | 11 | /** 12 | * 13 | * Handle logging and resetting of state for a given build 14 | * @function _buildError 15 | * @param {Error} err - Error that occurred 16 | * @param {Object} spec - Build spec 17 | * @api private 18 | */ 19 | _buildError(err, spec) { 20 | const { app } = this.context; 21 | app.contextLog.error('Build error occurred, someone should know %s', err.message, { 22 | name: spec.name, 23 | env: spec.env, 24 | version: spec.version 25 | }); 26 | const key = this.context._key(spec); 27 | 28 | delete this.context.failures[key]; 29 | // We could send a notification of some sort here 30 | } 31 | 32 | /** 33 | * Initiate a new build process as child through Gjallarhorn. 34 | * @param {Object} object build options 35 | * @param {Boolean} options.promote Should the build be promoted? 36 | * @param {Object} options.data package data 37 | * @param {Progress} options.progress Expose created progress instance 38 | * @returns {Promise} completion handler 39 | * @api public 40 | */ 41 | async build({ promote, data, progress }) { 42 | // will likely need to this.unpack here 43 | // which means we need to create paths earlier 44 | let spec; 45 | try { 46 | spec = await this.context.specs(data); 47 | } catch (error) { 48 | return void progress.fail(error); 49 | } 50 | 51 | const { statusWriter } = progress; 52 | spec.promote = promote; 53 | statusWriter.metadata = spec; 54 | 55 | /** 56 | * No-build flag was added to the package.json to indicate there are no 57 | * build requirements for this package. This should however trigger 58 | * dependent builds so return early without error. 59 | */ 60 | if (!spec.type || spec.build === false) { 61 | this.context.app.contextLog.info('ignoring build, does not meet criteria', spec); 62 | return progress.ignore(); 63 | } 64 | 65 | /** 66 | * Supply additional configuration. Data will be handed off to the spawne 67 | * child processes and should not be used elsewhere to prevent contamination 68 | * of data. 69 | */ 70 | const content = this.content(data, spec); 71 | spec.source = this.context.source; 72 | spec.target = this.context.target; 73 | 74 | const paths = await this.context.prepare(spec, content, statusWriter); 75 | 76 | // 77 | // Give the child process the path to the tarball to extract which 78 | // contains the `npm.install` 79 | // 80 | this.context.app.contextLog.info('building %s with spec', spec.name, spec); 81 | const statusKey = 'Queueing all builds'; 82 | 83 | statusWriter.writeStart(statusKey); 84 | 85 | const limit = limiter(this.context.throttle); 86 | 87 | const tasks = spec.locales.map(locale => { 88 | return limit(() => this.buildPerLocale({ 89 | progress, 90 | locale, 91 | spec 92 | })); 93 | }); 94 | 95 | try { 96 | await Promise.all(tasks); 97 | statusWriter.write(statusKey); 98 | } catch (err) { 99 | this._buildError(err, spec); 100 | this.context.app.contextLog.info('Clean up build artifacts for %s', spec.name, spec); 101 | } 102 | 103 | // When we are all said and done, end the progress stream 104 | const buildErr = await this.context.cleaner.cleanup(Object.keys(paths).map(key => paths[key])); 105 | progress.end(buildErr); 106 | } 107 | 108 | /** 109 | * Extract package content from the JSON body. 110 | * 111 | * @param {Object} data Package data. 112 | * @param {Object} spec Descriptive package information. 113 | * @returns {String} base64 encoded string. 114 | * @api private 115 | */ 116 | content(data, spec) { 117 | const name = spec.name + '-' + spec.version + '.tgz'; 118 | 119 | data = data || {}; 120 | data._attachments = data._attachments || {}; 121 | data._attachments[name] = data._attachments[name] || {}; 122 | 123 | return data._attachments[name].data || ''; 124 | } 125 | 126 | /** 127 | * Downloads the package tarball based on the given `spec`, builds that `spec` 128 | * given the written tarball and reports back via a progress stream 129 | * @param {Object} opts Options for the locale-specific build. 130 | * @param {String} opts.locale BCP-47 locale name (e.g. en-US, fr, etc). 131 | * @param {Object} opts.spec Specification object for the given build. 132 | * @param {Stream} opts.progress Progress "pseudo-stream" to report build progress on. 133 | * @returns {Promise} completion handler 134 | * @api private 135 | */ 136 | buildPerLocale({ progress, spec, locale }) { 137 | const { app, topic } = this.context; 138 | const id = uuid.v4(); 139 | 140 | /** 141 | * There are 3 events per ID. This is a stub of progress before we 142 | * remove it in the next pass of the refactor as progress will need to 143 | * exist in an external service. We use 2 here so that the `finished` 144 | * event is the only 100 which gets sent when done is called 145 | */ 146 | progress.start(id, 2, { locale }); 147 | const current = assign({ locale, id }, omit(spec, 'locales')); 148 | 149 | app.contextLog.info('Start build for locale %s', locale, { 150 | locale, 151 | name: spec.name, 152 | version: spec.version, 153 | env: spec.env, 154 | promote: spec.promote, 155 | id 156 | }); 157 | 158 | /** 159 | * Launch the build process with the specifications and attach 160 | * a supervisor to communicate all events back to the developer. 161 | */ 162 | progress.write({ 163 | locale, 164 | progress: true, 165 | message: `Queuing ${current.type} build for ${current.name}`, 166 | id 167 | }); 168 | 169 | const freshSpec = { 170 | name: spec.name, 171 | env: spec.env, 172 | version: spec.version, 173 | locale: locale, 174 | type: spec.type, 175 | promote: spec.promote 176 | }; 177 | 178 | this.context.emit('queue', topic, freshSpec); 179 | return new Promise((resolve, reject) => { 180 | // retries are built in here now 181 | this.context.nsq.writer.publish(topic, freshSpec, err => { 182 | if (err) { 183 | app.contextLog.error('Build queue %s for %s env: %s failed %j', current.id, current.name, current.env); 184 | progress.fail(err, id, { locale }); 185 | app.contextLog.error('Error in writing job to nsq %s for %s: %s', err.stack, spec.name, err.message, { 186 | locale, 187 | name: spec.name, 188 | version: spec.version, 189 | env: spec.env, 190 | promote: spec.promote, 191 | id 192 | }); 193 | return reject(err); 194 | } 195 | 196 | this.context.emit('queued', topic, freshSpec); 197 | app.contextLog.info('Finished queuing locale %s', locale, { 198 | locale: locale, 199 | env: spec.env, 200 | version: spec.version, 201 | name: spec.name, 202 | id: id 203 | }); 204 | 205 | progress.done(id, { locale }); 206 | resolve(); 207 | }); 208 | }); 209 | } 210 | } 211 | 212 | module.exports = Builder; 213 | -------------------------------------------------------------------------------- /test/lib/construct/progress.test.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | /* eslint max-statements: 0 */ 3 | 4 | describe('Progress', function () { 5 | const Progress = require('../../../lib/construct/progress'); 6 | const uuid = '87e29af5-094f-48fd-bafa-42e59f88c472'; 7 | const Stream = require('stream'); 8 | const rip = require('rip-out'); 9 | const assume = require('assume'); 10 | const sinon = require('sinon'); 11 | assume.use(require('assume-sinon')); 12 | 13 | const nsqTopic = 'not-the-real-status-topic'; 14 | let progress; 15 | let nsqProgress; 16 | let writerSpy; 17 | const nsqMetadata = { 18 | name: 'somePkg', 19 | env: 'lol', 20 | version: '1.2.3.4.5', 21 | type: 'tessellate' 22 | }; 23 | const nsqExpected = { 24 | ...rip(nsqMetadata, 'type'), 25 | buildType: nsqMetadata.type, 26 | locale: 'lol-CAT' 27 | }; 28 | 29 | function extend(options, streamId, next) { 30 | progress.write = function (data, id) { 31 | /* eslint callback-return: 0 */ 32 | assume(id).equals(streamId); 33 | assume(data).to.be.an('object'); 34 | 35 | for (const key of Object.keys(options)) { 36 | assume(data).to.have.property(key, options[key]); 37 | } 38 | 39 | if (typeof next === 'function') next(); 40 | 41 | return progress; 42 | }; 43 | } 44 | 45 | beforeEach(function () { 46 | progress = new Progress(); 47 | const writer = { publish: function () {} }; 48 | nsqProgress = new Progress({ 49 | nsq: { 50 | topic: nsqTopic, 51 | writer: writer 52 | }, 53 | metadata: nsqMetadata 54 | }); 55 | writerSpy = sinon.spy(writer, 'publish'); 56 | }); 57 | 58 | afterEach(function () { 59 | progress = null; 60 | nsqProgress = null; 61 | writerSpy = null; 62 | }); 63 | 64 | it('is exposed as constructor', function () { 65 | assume(Progress).is.an('function'); 66 | assume(progress).to.be.instanceof(Progress); 67 | }); 68 | 69 | describe('#fail', function () { 70 | it('is a function', function () { 71 | assume(progress.fail).to.be.a('function'); 72 | assume(progress.fail).to.have.length(2); 73 | }); 74 | 75 | it('writes the the error to stream', function (done) { 76 | const msg = 'testing message'; 77 | 78 | extend({ 79 | event: 'error', 80 | message: msg 81 | }, uuid, done); 82 | 83 | progress.fail(new Error(msg), uuid); 84 | }); 85 | 86 | it('converts strings to errors', function (done) { 87 | const msg = 'no error will be converted'; 88 | 89 | extend({ 90 | event: 'error', 91 | message: msg 92 | }, uuid, done); 93 | 94 | progress.fail(msg, uuid); 95 | }); 96 | 97 | it('can handle native node errors', function (done) { 98 | const err = { 99 | code: 'ENOENT', 100 | path: '/some/test/file' 101 | }; 102 | 103 | extend({ 104 | event: 'error', 105 | message: err.code + ': ' + err.path 106 | }, uuid, done); 107 | 108 | progress.fail(err, uuid); 109 | }); 110 | }); 111 | 112 | describe('#start', function () { 113 | it('is a function', function () { 114 | assume(progress.start).to.be.a('function'); 115 | assume(progress.start).to.have.length(2); 116 | }); 117 | 118 | it('writes default start object data to stream', function (done) { 119 | extend({ 120 | event: 'task', 121 | message: 'start', 122 | progress: 0 123 | }, uuid, done); 124 | 125 | progress.start(uuid); 126 | }); 127 | 128 | it('ignores start event when writing to NSQ', function () { 129 | nsqProgress.start(uuid, 2, { locale: nsqExpected.locale }); 130 | assume(writerSpy).is.not.calledWithMatch(nsqTopic, { 131 | eventType: 'event', 132 | message: sinon.match(`start\nProgress: 0`), 133 | ...nsqExpected 134 | }); 135 | }); 136 | }); 137 | 138 | describe('#done', function () { 139 | it('is a function', function () { 140 | assume(progress.done).to.be.a('function'); 141 | assume(progress.done).to.have.length(1); 142 | }); 143 | 144 | it('writes default start object data to stream', function (done) { 145 | extend({ 146 | event: 'task', 147 | message: 'Successfully queued build', 148 | progress: 100 149 | }, uuid, done); 150 | 151 | progress.done(uuid); 152 | }); 153 | 154 | it('writes done event to nsq stream', function () { 155 | nsqProgress.done(uuid, { locale: nsqExpected.locale }); 156 | assume(writerSpy).is.calledWithMatch(nsqTopic, { 157 | eventType: 'event', 158 | message: sinon.match(`Successfully queued build`), 159 | ...nsqExpected 160 | }); 161 | }); 162 | }); 163 | 164 | describe('#ignore', function () { 165 | it('is a function', function () { 166 | assume(progress.ignore).to.be.a('function'); 167 | assume(progress.ignore).to.have.length(0); 168 | }); 169 | 170 | it('writes ignore data to stream', function (done) { 171 | /* eslint no-undefined: 0 */ 172 | extend({ 173 | event: 'task', 174 | message: 'ignored', 175 | progress: -1 176 | }, undefined, done); 177 | 178 | progress.ignore(); 179 | }); 180 | 181 | it('writes ignore event to nsq stream', function () { 182 | nsqProgress.buildsCompleted = 0; 183 | nsqProgress.ignore(); 184 | 185 | assume(writerSpy).is.calledWithMatch(nsqTopic, { 186 | eventType: 'ignored', 187 | total: 0, 188 | message: 'Builds Queued', 189 | ...rip(nsqExpected, 'locale') 190 | }); 191 | }); 192 | }); 193 | 194 | describe('#steps', function () { 195 | it('is a function', function () { 196 | assume(progress.steps).to.be.a('function'); 197 | assume(progress.steps).to.have.length(2); 198 | }); 199 | 200 | it('increments the total counter and one', function () { 201 | progress.start(uuid); 202 | assume(progress.map[uuid].total).to.equal(0); 203 | 204 | progress.steps(uuid, 5); 205 | assume(progress.map[uuid].total).to.equal(5); 206 | 207 | progress.steps(uuid, 3); 208 | assume(progress.map[uuid].total).to.equal(8); 209 | }); 210 | }); 211 | 212 | describe('#end', function () { 213 | it('is a function', function () { 214 | assume(progress.end).to.be.a('function'); 215 | assume(progress.end).to.have.length(1); 216 | }); 217 | 218 | it('ends the stream', function () { 219 | assume(progress.stream._writableState.ended).to.equal(false); 220 | assume(progress.stream._readableState.ended).to.equal(false); 221 | progress.end(); 222 | 223 | assume(progress.stream._writableState.ended).to.equal(true); 224 | assume(progress.stream._readableState.ended).to.equal(true); 225 | }); 226 | 227 | it('writes end event to nsq stream', function () { 228 | nsqProgress.statusWriter.buildsCompleted = 7; 229 | nsqProgress.end(); 230 | 231 | assume(writerSpy).is.calledWithMatch(nsqTopic, { 232 | eventType: 'queued', 233 | total: 7, 234 | message: 'Builds Queued', 235 | ...rip(nsqExpected, 'locale') 236 | }); 237 | }); 238 | 239 | it('writes end event with error to nsq stream', function () { 240 | nsqProgress.statusWriter.buildsCompleted = 1; 241 | const errorMessage = 'printer on fire'; 242 | nsqProgress.end(new Error(errorMessage)); 243 | 244 | assume(writerSpy).is.calledWithMatch(nsqTopic, { 245 | eventType: 'error', 246 | total: 1, 247 | message: errorMessage, 248 | ...rip(nsqExpected, 'locale') 249 | }); 250 | }); 251 | }); 252 | 253 | describe('#state', function () { 254 | it('is a function', function () { 255 | assume(progress.state).to.be.a('function'); 256 | assume(progress.state).to.have.length(1); 257 | }); 258 | 259 | it('returns the total progress for found ids', function () { 260 | progress.map.first = {}; 261 | progress.map.first.n = 3; 262 | progress.map.first.total = 10; 263 | 264 | assume(progress.state('first')).to.equal(30); 265 | assume(progress.state('unknown')).to.equal(0); 266 | }); 267 | }); 268 | 269 | describe('#pipe', function () { 270 | it('is a function', function () { 271 | assume(progress.pipe).to.be.a('function'); 272 | assume(progress.pipe).to.have.length(1); 273 | }); 274 | 275 | it('will pipe to the destination and can start the stream', function () { 276 | const s = new Stream(); 277 | 278 | progress.stream.pipe = function (destination) { 279 | assume(destination).to.equal(s); 280 | return destination; 281 | }; 282 | 283 | const result = progress.pipe(s); 284 | assume(result).to.equal(s); 285 | assume(result).to.be.instanceof(Stream); 286 | 287 | progress.start(); 288 | const start = JSON.parse(progress.stream._readableState.buffer.head.data.toString()); 289 | assume(start).to.have.property('message', 'start'); 290 | assume(start).to.have.property('event', 'task'); 291 | assume(start).to.have.property('progress', 0); 292 | }); 293 | }); 294 | }); 295 | -------------------------------------------------------------------------------- /test/lib/construct/status-writer.test.js: -------------------------------------------------------------------------------- 1 | /* eslint max-nested-callbacks: 0 */ 2 | const StatusWriter = require('../../../lib/construct/status-writer'); 3 | const nsqStream = require('nsq-stream'); 4 | const assume = require('assume'); 5 | const sinon = require('sinon'); 6 | const { performance } = require('perf_hooks'); 7 | assume.use(require('assume-sinon')); 8 | 9 | describe('StatusWriter', function () { 10 | let writer; 11 | let mockNsqWriter, mockWriteStream; 12 | const metadata = { 13 | name: 'SomeName', 14 | env: 'test', 15 | version: '1.2.3-4', 16 | type: 'webpack' 17 | }; 18 | const defaultMessage = { 19 | eventType: 'event', 20 | locale: sinon.match.falsy, 21 | details: sinon.match.falsy, 22 | name: metadata.name, 23 | env: metadata.env, 24 | version: metadata.version, 25 | buildType: metadata.type 26 | }; 27 | const defaultTopic = 'SomeTopic'; 28 | 29 | beforeEach(function () { 30 | mockNsqWriter = { publish: sinon.stub() }; // Not an accurate stub, just a placeholder 31 | 32 | mockWriteStream = { 33 | write: sinon.stub(), 34 | end: sinon.stub(), 35 | on: sinon.stub(), 36 | _writableState: {} 37 | }; 38 | 39 | sinon.stub(nsqStream, 'createWriteStream').returns(mockWriteStream); 40 | 41 | writer = new StatusWriter({ 42 | nsq: { 43 | writer: mockNsqWriter, 44 | topic: defaultTopic 45 | }, 46 | metadata 47 | }); 48 | }); 49 | 50 | afterEach(function () { 51 | sinon.restore(); 52 | }); 53 | 54 | 55 | describe('constructor', function () { 56 | it('sets up writer with topic', function () { 57 | assume(writer).exists(); 58 | assume(nsqStream.createWriteStream).calledWith(mockNsqWriter, defaultTopic); 59 | }); 60 | 61 | it('doesn\'t create stream if no writer', function () { 62 | sinon.reset(); 63 | writer = new StatusWriter({ 64 | nsq: { 65 | topic: defaultTopic 66 | }, 67 | metadata 68 | }); 69 | assume(nsqStream.createWriteStream).was.not.called(); 70 | }); 71 | 72 | it('doesn\'t create stream if no topic', function () { 73 | sinon.reset(); 74 | writer = new StatusWriter({ 75 | nsq: { 76 | writer: mockNsqWriter 77 | }, 78 | metadata 79 | }); 80 | assume(nsqStream.createWriteStream).was.not.called(); 81 | }); 82 | 83 | it('sets up metadata', function () { 84 | assume(writer).exists(); 85 | assume(writer.metadata).to.be.equal(metadata); 86 | }); 87 | 88 | it('doesn\'t need metadata', function () { 89 | writer = new StatusWriter({ 90 | nsq: { 91 | writer: mockNsqWriter, 92 | topic: defaultTopic 93 | } 94 | }); 95 | assume(writer.metadata).to.deep.equal({}); 96 | }); 97 | }); 98 | 99 | describe('.write', function () { 100 | it('noops without a writeStream', function () { 101 | writer.nsqStream = null; 102 | 103 | writer.write(null, 'foo'); 104 | 105 | assume(mockWriteStream.write).was.not.called(); 106 | }); 107 | 108 | it('noops without key and data', function () { 109 | writer.write(null, null); 110 | 111 | assume(mockWriteStream.write).was.not.called(); 112 | }); 113 | 114 | it('can write simple strings as messages', function () { 115 | writer.write(null, 'some string'); 116 | 117 | assume(mockWriteStream.write).was.calledWithMatch({ 118 | ...defaultMessage, 119 | message: 'some string' 120 | }); 121 | }); 122 | 123 | it('can write objects', function () { 124 | writer.write(null, { 125 | message: 'much message', 126 | locale: 'do-GE', 127 | details: 'very detail' 128 | }); 129 | 130 | assume(mockWriteStream.write).was.calledWithMatch({ 131 | ...defaultMessage, 132 | message: 'much message', 133 | locale: 'do-GE', 134 | details: 'very detail' 135 | }); 136 | }); 137 | 138 | it('writes timing information if given a key', function () { 139 | writer.timings.set('theNotTooDistantFuture', performance.now() - 20); 140 | writer.write('theNotTooDistantFuture', 'There was a guy named Joel'); 141 | 142 | assume(mockWriteStream.write).was.calledWithMatch({ 143 | ...defaultMessage, 144 | message: 'There was a guy named Joel', 145 | timing: sinon.match.number 146 | }); 147 | }); 148 | 149 | it('writes a complete event when just a key is provided', function () { 150 | writer.timings.set('scienceFacts', performance.now() - 20); 151 | writer.write('scienceFacts'); 152 | 153 | assume(mockWriteStream.write).was.calledWithMatch({ 154 | ...defaultMessage, 155 | message: `'scienceFacts' completed successfully`, 156 | timing: sinon.match.number 157 | }); 158 | }); 159 | 160 | it('clears timer for the given key', function () { 161 | writer.timings.set('eats', performance.now() - 50); 162 | writer.timings.set('breathes', performance.now() - 40); 163 | writer.timings.set('scienceFacts', performance.now() - 20); 164 | writer.write('scienceFacts'); 165 | 166 | assume(writer.timings.has('scienceFacts')).is.false(); 167 | assume(writer.timings.has('eats')).is.true(); 168 | assume(writer.timings.has('breathes')).is.true(); 169 | }); 170 | 171 | it('can write Errors', function () { 172 | writer.write(null, { 173 | message: 'How does he eat and breathe?', 174 | event: 'error' 175 | }); 176 | 177 | assume(mockWriteStream.write).was.calledWithMatch({ 178 | ...defaultMessage, 179 | message: 'How does he eat and breathe?', 180 | eventType: 'error' 181 | }); 182 | }); 183 | }); 184 | 185 | describe('end', function () { 186 | it('noops if no stream', function () { 187 | writer.nsqStream = null; 188 | 189 | writer.end('foo'); 190 | 191 | assume(mockWriteStream.end).was.not.called(); 192 | }); 193 | 194 | it('writes an end', function () { 195 | writer.buildsCompleted = 8675309; 196 | writer.end('foo'); 197 | 198 | assume(mockWriteStream.end).was.calledWithMatch({ 199 | ...defaultMessage, 200 | eventType: 'foo', 201 | total: 8675309, 202 | message: 'Builds Queued' 203 | }); 204 | }); 205 | 206 | it('can end on an error', function () { 207 | writer.buildsCompleted = 8675309; 208 | writer.end('error', new Error('Jenny don\'t change your number')); 209 | 210 | assume(mockWriteStream.end).was.calledWithMatch({ 211 | ...defaultMessage, 212 | eventType: 'error', 213 | total: 8675309, 214 | message: 'Jenny don\'t change your number' 215 | }); 216 | }); 217 | }); 218 | 219 | describe('writeStart', function () { 220 | it('writes a status message and starts a timer', function () { 221 | assume(writer.timings.has('myKey')).to.be.false(); 222 | writer.writeStart('myKey', 'some string'); 223 | 224 | assume(mockWriteStream.write).was.calledWithMatch({ 225 | ...defaultMessage, 226 | message: 'some string' 227 | }); 228 | 229 | assume(writer.timings.has('myKey')).to.be.true(); 230 | assume(writer.timings.get('myKey')).is.a('number'); 231 | }); 232 | 233 | it('writes a default status message', function () { 234 | assume(writer.timings.has('myKey')).to.be.false(); 235 | writer.writeStart('myKey'); 236 | 237 | assume(mockWriteStream.write).was.calledWithMatch({ 238 | ...defaultMessage, 239 | message: `'myKey' starting` 240 | }); 241 | 242 | assume(writer.timings.has('myKey')).to.be.true(); 243 | assume(writer.timings.get('myKey')).is.a('number'); 244 | }); 245 | }); 246 | 247 | describe('writeMaybeError', function () { 248 | it('writes a default event if no error', function () { 249 | writer.timings.set('scienceFacts', performance.now() - 20); 250 | writer.writeMaybeError('scienceFacts'); 251 | 252 | assume(mockWriteStream.write).was.calledWithMatch({ 253 | ...defaultMessage, 254 | message: `'scienceFacts' completed successfully`, 255 | timing: sinon.match.number 256 | }); 257 | }); 258 | 259 | it('writes an error event if error passed', function () { 260 | writer.timings.set('scienceFacts', performance.now() - 20); 261 | const error = new Error('How does he eat and breathe?'); 262 | writer.writeMaybeError('scienceFacts', error); 263 | 264 | assume(mockWriteStream.write).was.calledWithMatch({ 265 | ...defaultMessage, 266 | eventType: 'error', 267 | message: `ERROR: 'scienceFacts' exited with code: Error: How does he eat and breathe?.`, 268 | details: sinon.match(details => details && details.message === error.message), 269 | timing: sinon.match.number 270 | }); 271 | }); 272 | }); 273 | 274 | describe('writeWrap', function () { 275 | it('returns a callback-wrapped function that writes a default event if no error', function (done) { 276 | writer.timings.set('scienceFacts', performance.now() - 20); 277 | const newCallback = writer.writeWrap('scienceFacts', function (err, data) { 278 | assume(err).is.falsey(); 279 | assume(data).to.equal('la la la'); 280 | 281 | assume(mockWriteStream.write).was.calledWithMatch({ 282 | ...defaultMessage, 283 | message: `'scienceFacts' completed successfully`, 284 | timing: sinon.match.number 285 | }); 286 | 287 | done(); 288 | }); 289 | 290 | assume(newCallback).is.a('function'); 291 | newCallback(null, 'la la la'); 292 | }); 293 | 294 | it('returns a callback-wrapped function that writes an error event if error passed', function (done) { 295 | writer.timings.set('scienceFacts', performance.now() - 20); 296 | const error = new Error('How does he eat and breathe?'); 297 | const newCallback = writer.writeWrap('scienceFacts', function (err, data) { 298 | assume(err).is.truthy(); 299 | assume(err.message).to.equal(error.message); 300 | assume(data).to.equal('la la la'); 301 | 302 | assume(mockWriteStream.write).was.calledWithMatch({ 303 | ...defaultMessage, 304 | eventType: 'error', 305 | message: `ERROR: 'scienceFacts' exited with code: Error: How does he eat and breathe?.`, 306 | details: sinon.match(details => details && details.message === error.message), 307 | timing: sinon.match.number 308 | }); 309 | 310 | done(); 311 | }); 312 | 313 | assume(newCallback).is.a('function'); 314 | newCallback(error, 'la la la'); 315 | }); 316 | }); 317 | }); 318 | -------------------------------------------------------------------------------- /test/lib/routes/index.test.js: -------------------------------------------------------------------------------- 1 | /* eslint max-nested-callbacks: 0 */ 2 | 'use strict'; 3 | 4 | const hyperquest = require('hyperquest'); 5 | const assume = require('assume'); 6 | const path = require('path'); 7 | const nock = require('nock'); 8 | const url = require('url'); 9 | const fs = require('fs'); 10 | const sinon = require('sinon'); 11 | const Writer = require('../../mocks').Writer; 12 | 13 | const Agent = require('http').Agent; 14 | 15 | const agent = new Agent({ keepAlive: true }); 16 | 17 | const application = require('../../../lib/'); 18 | 19 | assume.use(require('assume-sinon')); 20 | 21 | afterEach(function () { 22 | sinon.restore(); 23 | }); 24 | 25 | describe('Application routes', function () { 26 | this.timeout(5E5); // eslint-disable-line 27 | let app; 28 | 29 | const payload = path.join(__dirname, '..', '..', 'fixtures', 'payload-0.0.0.json'); 30 | const v2payload = path.join(__dirname, '..', '..', 'fixtures', 'v2-payload-0.0.0.json'); 31 | const configFile = path.join(__dirname, '..', '..', 'config.json'); 32 | function getPayload(filepath) { 33 | return JSON.parse(fs.readFileSync(filepath)); // eslint-disable-line 34 | } 35 | 36 | function nockFeedme() { 37 | nock(app.config.get('feedsme')) 38 | .post('/v2/change') 39 | .reply(200, function reply(uri, body) { 40 | const pkgjson = getPayload(payload); 41 | 42 | assume(body).is.a('object'); 43 | assume(body.name).equals(pkgjson.name); 44 | assume(body.version).equals(pkgjson.version); 45 | assume(body.dependencies).deep.equals(pkgjson.dependencies); 46 | 47 | nock.cleanAll(); 48 | return { ok: true }; 49 | }); 50 | } 51 | 52 | before(function (done) { 53 | application.start({ 54 | logger: { 55 | level: 'critical' 56 | }, 57 | ensure: true, 58 | config: { 59 | file: configFile, 60 | overrides: { 61 | http: { 62 | hostname: '127.0.0.1', 63 | port: 0, 64 | timeout: 12000000 65 | }, 66 | builder: { 67 | topic: 'build' 68 | } 69 | } 70 | } 71 | }, function (error, appInstance) { 72 | if (error) return done(error); 73 | app = appInstance; 74 | app.nsq = app.nsq || {}; 75 | app.nsq.writer = app.nsq.writer || new Writer(); 76 | app.construct.nsq = app.nsq; 77 | done(error); 78 | }); 79 | }); 80 | 81 | after(function (done) { 82 | agent.destroy(); 83 | app.close(done); 84 | }); 85 | 86 | function createRequest(method, pathname, next) { 87 | const socket = app.servers.http.address(); 88 | const target = {}; 89 | target.port = socket.port; 90 | target.hostname = '127.0.0.1'; 91 | 92 | method = method || 'get'; 93 | target.protocol = 'http:'; 94 | target.pathname = pathname; 95 | 96 | return hyperquest[method](url.format(target), { 97 | agent: agent, 98 | headers: { 99 | 'Content-Type': 'application/json' 100 | } 101 | }, next); 102 | } 103 | 104 | function validateMessages(data) { 105 | data = JSON.parse(data); 106 | 107 | assume(data.task).to.not.equal('ignored'); 108 | assume(data).to.have.property('progress'); 109 | assume(data).to.have.property('message'); 110 | assume(data.progress).to.be.a('number'); 111 | assume(data.progress).to.not.equal(-1); 112 | assume(data.timestamp).to.be.a('number'); 113 | assume(data.id).to.be.a('string'); 114 | } 115 | 116 | describe('/v2/build', function () { 117 | it('accepts npm publish JSON payloads and returns finished task messages', function (done) { 118 | nockFeedme(); 119 | 120 | fs.createReadStream(v2payload) 121 | .pipe(createRequest('post', 'v2/build')) 122 | .on('error', done) 123 | .on('end', done) 124 | .on('data', validateMessages); 125 | }); 126 | 127 | it('returns an error if payload expectations are not satisfied', function (done) { 128 | const postData = getPayload(v2payload); 129 | 130 | delete postData.data._attachments; 131 | 132 | const post = createRequest('post', 'v2/build').on('error', done).on('data', function (resData) { 133 | assume(resData).to.be.an('buffer'); 134 | assume(resData.toString()).to.include('"_attachments" is required'); 135 | 136 | done(); 137 | }); 138 | 139 | post.end(Buffer.from(JSON.stringify(postData))); 140 | }); 141 | 142 | it('can create minified builds', function (done) { 143 | const postData = getPayload(v2payload); 144 | const spy = sinon.spy(app.construct.nsq.writer, 'publish'); 145 | nockFeedme(); 146 | postData.data.env = 'prod'; 147 | 148 | const post = createRequest('post', 'v2/build') 149 | .on('error', done) 150 | .on('data', validateMessages) 151 | .on('end', done); 152 | 153 | post.end(Buffer.from(JSON.stringify(postData))); 154 | 155 | app.construct.once('queued', function (topic, spec) { 156 | assume(topic).equals('build'); 157 | assume(spy.called).true(); 158 | assume(spec.name).equals(postData.data.name); 159 | assume(spec.env).equals(postData.data.env); 160 | assume(spec.type).is.a('string'); 161 | assume(spec.version).is.a('string'); 162 | assume(spec.promote).equals(postData.promote); 163 | }); 164 | }); 165 | 166 | it('can run multiple builds for different locales', function (done) { 167 | const postData = getPayload(v2payload); 168 | const cache = {}; 169 | 170 | let calledOnce = true; 171 | nockFeedme(); 172 | 173 | postData.data.versions['0.0.0'].locales = ['en-US', 'en-GB']; 174 | const post = createRequest('post', 'v2/build') 175 | .on('error', done) 176 | .on('data', function (resData) { 177 | resData = JSON.parse(resData); 178 | assume(resData).to.have.property('id'); 179 | assume(app.construct.valid(resData.id)).to.equal(true); 180 | if (!cache[resData.id]) cache[resData.id] = 0; 181 | cache[resData.id]++; 182 | }) 183 | .on('end', function () { 184 | assume(calledOnce).to.equal(true); 185 | assume(Object.keys(cache)).to.have.length(2); 186 | 187 | for (const id of Object.keys(cache)) { 188 | assume(cache[id]).to.equal(3); 189 | } 190 | 191 | calledOnce = false; 192 | done(); 193 | }); 194 | 195 | post.end(Buffer.from(JSON.stringify(postData))); 196 | }); 197 | 198 | it('sends the payload to the feedsme service after a successful build', function (next) { 199 | const feedStub = sinon.stub(app.feedsme, 'change').yieldsAsync(null, null); 200 | nockFeedme(); 201 | 202 | fs.createReadStream(v2payload).pipe(createRequest('post', 'v2/build')) 203 | .on('data', validateMessages) 204 | .on('error', next) 205 | .on('end', () => { 206 | assume(feedStub).is.calledWithMatch('dev', sinon.match({ 207 | data: { 208 | data: { __published: true }, 209 | promote: false 210 | } 211 | }), sinon.match.func); 212 | next(); 213 | }); 214 | }); 215 | }); 216 | 217 | describe('/build', function () { 218 | it('accepts npm publish JSON payloads and returns finished task messages', function (done) { 219 | nockFeedme(); 220 | 221 | fs.createReadStream(payload) 222 | .pipe(createRequest('post', 'build')) 223 | .on('error', done) 224 | .on('end', done) 225 | .on('data', validateMessages); 226 | }); 227 | 228 | it('returns an error if payload expectations are not satisfied', function (done) { 229 | const data = getPayload(payload); 230 | 231 | delete data._attachments; 232 | 233 | const post = createRequest('post', 'build').on('error', done).on('data', function (resData) { 234 | assume(resData).to.be.an('buffer'); 235 | assume(resData.toString()).to.include('"_attachments" is required'); 236 | 237 | done(); 238 | }); 239 | 240 | post.end(Buffer.from(JSON.stringify(data))); 241 | }); 242 | 243 | it('can create minified builds', function (done) { 244 | const data = getPayload(payload); 245 | const spy = sinon.spy(app.construct.nsq.writer, 'publish'); 246 | nockFeedme(); 247 | data.env = 'prod'; 248 | 249 | const post = createRequest('post', 'build') 250 | .on('error', done) 251 | .on('data', validateMessages) 252 | .on('end', done); 253 | 254 | post.end(Buffer.from(JSON.stringify(data))); 255 | 256 | app.construct.once('queued', function (topic, spec) { 257 | assume(topic).equals('build'); 258 | assume(spy.called); 259 | assume(spec.name).equals(data.name); 260 | assume(spec.env).equals(data.env); 261 | assume(spec.type); 262 | assume(spec.version); 263 | }); 264 | }); 265 | 266 | it('can run multiple builds for different locales', function (done) { 267 | const data = getPayload(payload); 268 | const cache = {}; 269 | 270 | let calledOnce = true; 271 | nockFeedme(); 272 | 273 | data.versions['0.0.0'].locales = ['en-US', 'en-GB']; 274 | const post = createRequest('post', 'build') 275 | .on('error', done) 276 | .on('data', function (resData) { 277 | resData = JSON.parse(resData); 278 | assume(resData).to.have.property('id'); 279 | assume(app.construct.valid(resData.id)).to.equal(true); 280 | if (!cache[resData.id]) cache[resData.id] = 0; 281 | cache[resData.id]++; 282 | }) 283 | .on('end', function () { 284 | assume(calledOnce).to.equal(true); 285 | assume(Object.keys(cache)).to.have.length(2); 286 | 287 | for (const id of Object.keys(cache)) { 288 | assume(cache[id]).to.equal(3); 289 | } 290 | 291 | calledOnce = false; 292 | done(); 293 | }); 294 | 295 | post.end(Buffer.from(JSON.stringify(data))); 296 | }); 297 | 298 | it('sends the payload to the feedsme service after a successful build', function (next) { 299 | const feedStub = sinon.stub(app.feedsme, 'change').yieldsAsync(null, null); 300 | nockFeedme(); 301 | 302 | fs.createReadStream(payload).pipe(createRequest('post', 'build')) 303 | .on('data', validateMessages) 304 | .on('error', next) 305 | .on('end', () => { 306 | assume(feedStub).is.calledWithMatch('dev', sinon.match({ 307 | data: { 308 | data: { __published: true }, 309 | promote: true 310 | } 311 | }), sinon.match.func); 312 | next(); 313 | }); 314 | }); 315 | }); 316 | 317 | }); 318 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `carpenterd` 2 | 3 | > ⚠️ **DEPRECATED**: This package is no longer maintained and has been deprecated. Please use an alternative solution or contact the maintainers for more information. 4 | 5 | [![Version npm](https://img.shields.io/npm/v/carpenterd.svg?style=flat-square)](https://www.npmjs.com/package/carpenterd) 6 | [![License](https://img.shields.io/npm/l/carpenterd.svg?style=flat-square)](https://github.com/godaddy/carpenterd/blob/master/LICENSE) 7 | [![npm Downloads](https://img.shields.io/npm/dm/carpenterd.svg?style=flat-square)](https://npmcharts.com/compare/carpenterd?minimal=true) 8 | [![Build Status](https://travis-ci.org/godaddy/carpenterd.svg?branch=master)](https://travis-ci.org/godaddy/carpenterd) 9 | [![Dependencies](https://img.shields.io/david/godaddy/carpenterd.svg?style=flat-square)](https://github.com/godaddy/carpenterd/blob/master/package.json) 10 | 11 | Build and compile npm packages to run in the browser. This API is capable of 12 | building modules through different build systems. The aim is to have full 13 | cross-build-system API that serves a single file to be used in the browser. 14 | Note that this API should only be hit from [`warehouse.ai`][warehouse.ai]. 15 | 16 | ## Install 17 | 18 | ``` 19 | git clone git@github.com/godaddy/carpenterd.git 20 | npm install 21 | ``` 22 | 23 | ## Usage 24 | 25 | Make sure [BFFS] is configured against a running NoSQL database. Development, 26 | staging and test configurations assume this instance is available on the 27 | localhost. Without a database builds will not be stored. 28 | 29 | ```bash 30 | npm start 31 | ``` 32 | 33 | ## API 34 | 35 | The API consists of two methods. Running this as an API allows the entire 36 | build process to run independantly as a microservice. `POST` routes only 37 | accept `application/json`. 38 | 39 | ### POST /build 40 | 41 | Trigger a new build for the package specified in the payload. Configuration 42 | properties are merged in with the provided specification. For example the 43 | *registry* that is used to install the package will be merged in. This route 44 | expects a POST payload that is similar to `npm publish`. 45 | 46 | **Payload:** 47 | 48 | ```js 49 | { 50 | "_id": "test", 51 | "name": "test", // Used as key for storage. 52 | "description": "A builder test", 53 | "main": "index.jsx", // Entry file if not defined in build system. 54 | "dist-tags": { 55 | "latest": "0.0.0" // Used to extract the version. 56 | }, 57 | "build": "webpack", // Overrule the build system type. 58 | "main": "index.jsx", 59 | "_attachments":{ 60 | "test-0.0.0.tgz": { 61 | "data": "...", // base64 encoded tarball of npm pack. 62 | "length": 665 63 | } 64 | } 65 | } 66 | ``` 67 | 68 | The route will stream whiteline delimited JSON as response. The `id` is the 69 | unique *v4* id generated that can also be used to cancel the build. 70 | 71 | **Example:** 72 | 73 | ``` 74 | curl -vX POST -H "Content-Type: application/json" -d @payload-0.0.0.json http://localhost:1337/build 75 | 76 | Accept: application/json 77 | Accept-Encoding: gzip, deflate 78 | Content-Type: application/json; charset=utf-8 79 | Host: localhost:6064 80 | 81 | {"event":"task","message":"start","progress":0,"id":"95cf09e6-3a4b-42b2-a3ef-d52b8a3e9ae0","timestamp":1438958247119} 82 | {"event":"task","message":"init","progress":14,"id":"95cf09e6-3a4b-42b2-a3ef-d52b8a3e9ae0","timestamp":1438958247120} 83 | {"event":"task","message":"unpack","progress":29,"id":"95cf09e6-3a4b-42b2-a3ef-d52b8a3e9ae0","timestamp":1438958247120} 84 | {"event":"task","message":"exists","progress":43,"id":"95cf09e6-3a4b-42b2-a3ef-d52b8a3e9ae0","timestamp":1438958248603} 85 | {"event":"task","message":"read","progress":57,"id":"95cf09e6-3a4b-42b2-a3ef-d52b8a3e9ae0","timestamp":1438958248605} 86 | {"event":"task","message":"install","progress":72,"id":"95cf09e6-3a4b-42b2-a3ef-d52b8a3e9ae0","timestamp":1438958249945} 87 | {"event":"task","message":"assemble","progress":86,"id":"95cf09e6-3a4b-42b2-a3ef-d52b8a3e9ae0","timestamp":1438958250210} 88 | {"event":"task","message":"finished","progress":100,"id":"95cf09e6-3a4b-42b2-a3ef-d52b8a3e9ae0","timestamp":1438958250226} 89 | ``` 90 | 91 | ## Build systems 92 | 93 | `carpenterd` will orchestrate builds as specified in the package. Builds are 94 | distributed to [NSQ]. [`carpenterd-worker` instances][carpenterd-worker] 95 | subscribe to NSQ and perform the actual builds. To maximize the developer 96 | experience it will use the same configuration you use locally. In any case 97 | the result should equal the local build output, with the exception of 98 | additional minification, etc. Minification will only be performed if the 99 | `env` is set to `prod`, e.g. for `npm dist-tags 'package@version' prod`. 100 | The following builds systems are currently available. 101 | 102 | * _Browserify:_ will read the `main` file as determined from the 103 | `package.json` and bundle all modules that are imported/required. 104 | Configuration is usually part of any dependant `package.json`. This build 105 | has no explicit configuration, it will simply execute [browserify]. The 106 | complete output file with the CommonJS require wrapper is exposed to [BFFS]. 107 | 108 | * _Webpack:_ will read the default [webpack] configuration 109 | (`webpack.config.js`). There are no imposed limitations on the configuration. 110 | However, the output directory will have to be `./dist` by our convention. 111 | All files in the output directory will be published to [BFFS]. 112 | 113 | ### Identification of build system type 114 | 115 | Specify a build system in `package.json` with the `build` keyword or use 116 | any of the following terms in the keywords: 117 | 118 | 1. Webpack: `webpack` 119 | 2. Browserify: `browserify` 120 | 121 | Alternatively specifying the build system name on the `package.json` with 122 | the relative path to the configuration file will also classify the build 123 | system, for example: `webpack: '/path/to/config.js'`. 124 | 125 | ### Forcefully ignore builds 126 | 127 | If a published package should not run any builds at all, provide a 128 | `build: false` flag in the package.json. 129 | 130 | ```json 131 | { 132 | "name": "package", 133 | "version": "1.0.0", 134 | "build": false, 135 | ... 136 | } 137 | ``` 138 | 139 | Note: the module/package can also be published directly to a module registry. 140 | However, if you want to ensure dependents are build whenever your module is 141 | published this flag can be useful. 142 | 143 | ### Configuration 144 | 145 | Each environment specifies a different set of default options for the builder. 146 | For instance which registry to run `npm install` against. Each build instance 147 | has a maximum runtime of `15` minutes. This value can be changed in the 148 | configuration. 149 | 150 | #### Secure setup 151 | 152 | By default `carpenterd` runs as an service over `http` and has no 153 | authentication in place. Setup the configuration to have [Slay] use `https` 154 | and use authentication middleware, for example [authboot]. 155 | Store API keys and tokens in an encrypted config with [whisper.json][whisper]. 156 | 157 | #### Per build specifications 158 | 159 | Variables and specifications required for a build are discerned from a 160 | combination of `package.json`, build system configuration files and 161 | defaults from Carpenters configuration. 162 | 163 | **type:** can be supplied as `build` property on the package.json or is 164 | extracted from the keywords. Defaults to Webpack. 165 | 166 | **target:** writes the package and its dependencies to a temporary folder 167 | named after `build.id` a unique v4 `id`. After building this folder is 168 | removed from the file system to save disk space. 169 | 170 | **version:** read from the package.json `dist-tags.latest`. Has no default. 171 | 172 | **name:** Defaults to the package.json `name` property, e.g. the modules name. 173 | 174 | **locale:** Uses the locales specified on the `package.json`. Each unique 175 | locale triggers a new build. The build will have the environment variables 176 | `LANG` and `LOCALE` set for each build. These values default to `en-US`. 177 | 178 | #### wrhs.toml 179 | 180 | The files listed here need to be relative of the root project so that they can 181 | be properly read from disk. This gives you more fine tune control over what 182 | files get published to the CDN in any given environment. 183 | 184 | 185 | ```toml 186 | [files] 187 | prod = ['dist/js/app.min.js', 'dist/css/app.min.css'] 188 | test = ['dist/js/app.js', 'dist/css/app.css'] 189 | dev = ['dist/js/app.js', 'dist/css/app.css']; 190 | ``` 191 | 192 | ## Status-Api 193 | 194 | Carpenterd supports posting messages to the [warehouse.ai] status-api via [NSQ]. 195 | It will post messages to the nsq topic configured at: 196 | 197 | ```js 198 | { 199 | // ...other configuration 200 | "nsq": { 201 | "statusTopic": "an-nsq-topic", // topic that you choose for the status-api to consume 202 | // ...other nsq setup 203 | }, 204 | // ...other configuration 205 | } 206 | ``` 207 | 208 | The [NSQ] payloads will be object that take the form: 209 | 210 | ```js 211 | { 212 | eventType: "event|queued|error|ignored", // The type of status event that occurred 213 | name: "package-name", 214 | env: "dev", // The environment that is being built 215 | version: "1.2.3", // The version of the build 216 | locale: "en-US", // (Optional) The locale that is being built 217 | buildType: "webpack", // The type of the build (typically just webpack) 218 | total: 5, // (Optional) The number of builds that were queued 219 | message: "Description of what happened" 220 | } 221 | ``` 222 | 223 | ### Event Types 224 | 225 | In the status-api NSQ payload there is a field called `eventType`. 226 | The possible values that carpenterd will send are: 227 | 228 | - `event` - Used for interim statuses that a user might care about, 229 | but doesn't affect/progress the overall build status 230 | - `queued` - Used to indicated how many builds were queued with 231 | `carpenter-worker` 232 | - `error` - Used to indicate that `carpenterd` encountered an error and wasn't 233 | able to queue all the builds 234 | - `ignored` - Used to indicate that the build was ignored and no builds were 235 | queued. Typically this is because the package was not configured to have a 236 | build or was set to not build. 237 | 238 | ## Tests 239 | 240 | Run an AWS local cloud stack, pull `latest` [localstack]. 241 | This requires `docker` [to be setup][docker]. 242 | 243 | ```sh 244 | docker pull localstack/localstack:latest 245 | npm run localstack 246 | ``` 247 | 248 | Run tests in a separate terminal. 249 | 250 | ```sh 251 | npm test 252 | ``` 253 | 254 | [warehouse.ai]: https://github.com/godaddy/warehouse.ai 255 | [NSQ]: https://github.com/nsqio/nsq 256 | [BFFS]: https://github.com/warehouseai/bffs 257 | [webpack]: https://webpack.js.org/ 258 | [carpenterd-worker]: https://github.com/godaddy/carpenterd-worker 259 | [Slay]: https://github.com/godaddy/slay 260 | [authboot]: https://github.com/warehouseai/authboot 261 | [whisper]: https://github.com/jcrugzz/whisper.json 262 | [browserify]: http://browserify.org/ 263 | [Babel]: https://babeljs.io/ 264 | [docker]: https://docs.docker.com/get-started/ 265 | -------------------------------------------------------------------------------- /test/lib/construct/index.test.js: -------------------------------------------------------------------------------- 1 | /* eslint max-nested-callbacks: 0 */ 2 | /* eslint no-invalid-this: 0 */ 3 | /* eslint max-statements: 0 */ 4 | 'use strict'; 5 | 6 | const Writer = require('../../mocks').Writer; 7 | const MockProgress = require('../../mocks').Progress; 8 | const sinon = require('sinon'); 9 | 10 | describe('Construct', function () { 11 | this.timeout(3E4); 12 | 13 | const Progress = require('../../../lib/construct/progress'); 14 | const assume = require('assume'); 15 | const path = require('path'); 16 | const fs = require('fs'); 17 | const rip = require('rip-out'); 18 | const uuid = '87e29af5-094f-48fd-bafa-42e59f88c472'; 19 | assume.use(require('assume-sinon')); 20 | 21 | const statusTopic = 'some-status-topic'; 22 | const queueingTopic = 'queue-the-build'; 23 | let app = require('../../../lib'); 24 | let construct; 25 | 26 | before(function (done) { 27 | app.start({ 28 | logger: { 29 | level: 'critical' 30 | }, 31 | ensure: true, 32 | config: { 33 | file: path.join(__dirname, '..', '..', 'config.json'), 34 | overrides: { 35 | http: 0 36 | } 37 | } 38 | }, function (error, application) { 39 | app = application; 40 | app.construct.nsq = app.construct.nsq || {}; 41 | app.construct.nsq.writer = app.construct.nsq.writer || new Writer(); 42 | app.construct.statusTopic = statusTopic; 43 | app.construct.topic = queueingTopic; 44 | construct = app.construct; 45 | 46 | done(error); 47 | }); 48 | }); 49 | 50 | after(function (done) { 51 | app.close(done); 52 | }); 53 | afterEach(function () { 54 | sinon.restore(); 55 | }); 56 | 57 | function assertNsqLocaleProgress(writerSpy, locale, buildType, promote = false) { 58 | const commonPayload = { 59 | name: 'test', 60 | env: 'dev', 61 | buildType, 62 | locale 63 | }; 64 | 65 | assume(writerSpy).is.calledWithMatch(statusTopic, { 66 | ...commonPayload, 67 | eventType: 'event', 68 | message: sinon.match(`Queuing ${buildType} build for test`) 69 | }); 70 | assume(writerSpy).is.calledWithMatch(statusTopic, { 71 | ...commonPayload, 72 | eventType: 'event', 73 | message: sinon.match('Successfully queued build') 74 | }); 75 | 76 | assume(writerSpy).is.calledWithMatch(queueingTopic, { 77 | ...rip(commonPayload, 'buildType'), 78 | type: commonPayload.buildType, 79 | promote 80 | }); 81 | } 82 | 83 | it('is exposed as singleton instance and wraps gjallarhorn child orchestration', function () { 84 | assume(construct).is.an('object'); 85 | }); 86 | 87 | it('has warehouse models reference', function () { 88 | assume(construct.models).to.be.an('object'); 89 | assume(construct.models).to.have.property('Package'); 90 | }); 91 | 92 | describe('#valid', function () { 93 | it('is a function', function () { 94 | assume(construct.valid).to.be.a('function'); 95 | assume(construct.valid).to.have.length(1); 96 | }); 97 | 98 | it('checks the validatity of an uuid v4', function () { 99 | assume(construct.valid('87e29af5-094f-48fd-bafa-42e59f88c472')).to.equal(true); 100 | assume(construct.valid('1')).to.equal(false); 101 | }); 102 | }); 103 | 104 | describe('#specs', function () { 105 | let local; 106 | 107 | function data() { 108 | return { 109 | 'name': 'test', 110 | 'type': 'something silly', 111 | 'dist-tags': { latest: '1.0.0' }, 112 | 'env': 'staging', 113 | 'versions': { 114 | '1.0.0': { 115 | name: 'test', 116 | build: 'browserify', 117 | webpack: '/path/to/config.js', 118 | version: '1.0.0' 119 | } 120 | } 121 | }; 122 | } 123 | 124 | it('is a function', function () { 125 | assume(construct.specs).to.be.a('asyncfunction'); 126 | assume(construct.specs).to.have.length(1); 127 | }); 128 | 129 | it('returns build specifications bffs understands', async function () { 130 | const result = await construct.specs(data()); 131 | assume(result).to.be.a('object'); 132 | assume(result).to.have.property('type', 'browserify'); 133 | assume(result).to.have.property('version', '1.0.0'); 134 | assume(result).to.have.property('env', 'test'); 135 | assume(result).to.have.property('name', 'test'); 136 | assume(result).to.have.property('entry'); 137 | }); 138 | 139 | it('will not default to any build specifications', async function () { 140 | local = data(); 141 | local.build = 'unknown'; 142 | delete local.versions; 143 | 144 | const result = await construct.specs(local); 145 | assume(result).to.have.property('type'); 146 | assume(result).to.have.property('entry'); 147 | }); 148 | 149 | it('can also use keywords to identify the build type', async function () { 150 | local = data(); 151 | 152 | local.versions['1.0.0'].keywords = ['webpack']; 153 | delete local.versions['1.0.0'].build; 154 | 155 | const result = await construct.specs(local); 156 | assume(result).to.have.property('type', 'webpack'); 157 | }); 158 | 159 | it('will check properties on package.json', async function () { 160 | local = data(); 161 | 162 | delete local.versions['1.0.0'].build; 163 | const result = await construct.specs(local); 164 | assume(result).to.be.a('object'); 165 | assume(result).to.have.property('type', 'webpack'); 166 | assume(result).to.have.property('version', '1.0.0'); 167 | assume(result).to.have.property('env', 'test'); 168 | assume(result).to.have.property('name', 'test'); 169 | assume(result).to.have.property('entry', '/path/to/config.js'); 170 | }); 171 | 172 | it('will only supply paths to data.entry if the property matches a builder', async function () { 173 | local = data(); 174 | 175 | delete local.versions['1.0.0'].build; 176 | local.versions['1.0.0'].webpack = { 177 | some: 'unsave object' 178 | }; 179 | 180 | const result = await construct.specs(local); 181 | 182 | assume(result).to.be.a('object'); 183 | assume(result).to.have.property('version', '1.0.0'); 184 | assume(result).to.have.property('env', 'test'); 185 | assume(result).to.have.property('name', 'test'); 186 | assume(result).to.have.property('entry'); 187 | 188 | }); 189 | }); 190 | 191 | describe('#_buildError', function () { 192 | it('should execte _buildError and succeed', function () { 193 | const spec = { 194 | name: 'test', 195 | version: '1.0.0', 196 | env: 'dev', 197 | type: 'webpack' 198 | }; 199 | 200 | construct.builder._buildError(new Error('whatever'), spec); 201 | }); 202 | }); 203 | 204 | describe('#getLocales', function () { 205 | const localeData = { 206 | 'dist-tags': { latest: '1.0.0' }, 207 | 'versions': { 208 | '1.0.0': { 209 | locales: ['en-GB', 'nl-NL', 'de-DE'] 210 | } 211 | } 212 | }; 213 | 214 | it('is a function', function () { 215 | assume(construct.getLocales).to.be.a('function'); 216 | assume(construct.getLocales).to.have.length(1); 217 | }); 218 | 219 | it('extracts the package.json from the payload', async function () { 220 | const locales = await construct.getLocales(localeData); 221 | assume(locales).to.be.an('array'); 222 | assume(locales).to.include('nl-NL'); 223 | }); 224 | 225 | it('defaults to en-US if no locale is specified', async function () { 226 | delete localeData.versions['1.0.0'].locales; 227 | 228 | const locales = await construct.getLocales(localeData); 229 | assume(locales).to.be.an('array'); 230 | assume(locales).to.include('en-US'); 231 | }); 232 | 233 | it('generates an intersection of locales for all dependencies', async function () { 234 | const packages = { 235 | myPackage: { 236 | name: 'myPackage', 237 | locales: ['en-GB', 'nl-NL', 'de-DE'], 238 | dependencies: { 239 | subsub: '1.0.0' 240 | } 241 | }, 242 | subsub: { 243 | name: 'subsub', 244 | locales: ['en-GB', 'nl-NL', 'en-US'] 245 | } 246 | }; 247 | 248 | sinon 249 | .stub(construct.models.Package, 'get') 250 | .onFirstCall().callsArgWith(1, null, packages.myPackage) 251 | .onSecondCall().callsArgWith(1, null, packages.react) 252 | .onThirdCall().callsArgWith(1, null, packages.subsub); 253 | 254 | localeData.versions['1.0.0'].dependencies = { 255 | myPackage: 'some latest version', 256 | react: '0.13.3' 257 | }; 258 | 259 | const locales = await construct.getLocales(localeData); 260 | assume(locales).to.be.an('array'); 261 | assume(locales).to.include('en-GB'); 262 | assume(locales).to.include('nl-NL'); 263 | assume(locales).to.not.include('de-DE'); 264 | }); 265 | }); 266 | 267 | describe('#extractPackage', function () { 268 | it('is a function', function () { 269 | assume(construct.extractPackage).to.be.a('function'); 270 | assume(construct.extractPackage).to.have.length(1); 271 | }); 272 | 273 | it('extracts the package.json from the payload', function () { 274 | const pkg = construct.extractPackage({ 275 | 'dist-tags': { latest: '1.0.0' }, 276 | 'versions': { 277 | '1.0.0': { 278 | name: 'test' 279 | } 280 | } 281 | }); 282 | 283 | assume(pkg).to.be.an('object'); 284 | assume(pkg).to.have.property('name', 'test'); 285 | }); 286 | }); 287 | 288 | describe('#purge', function () { 289 | it('is a function', function () { 290 | assume(construct.purge).to.be.a('function'); 291 | assume(construct.purge).to.have.length(0); 292 | }); 293 | 294 | it('will remove folders that have exceeded timeout duration * retries', function (done) { 295 | const config = construct.app.config.get('builder'); 296 | const timeout = config.timeout; 297 | 298 | fs.mkdir(path.join(config.target, uuid), function (error) { 299 | if (error) return done(error); 300 | 301 | construct.timeout = -1; 302 | construct.purge(); 303 | 304 | return construct.once('purge', function (err, n) { 305 | assume(err).to.be.falsey(); 306 | assume(n).to.equal(1); 307 | 308 | fs.readdir(config.target, function (e, files) { 309 | assume(e).to.be.falsey(); 310 | assume(files.filter(construct.valid)).to.have.length(0); 311 | construct.timeout = timeout; 312 | 313 | done(); 314 | }); 315 | }); 316 | }); 317 | }); 318 | }); 319 | 320 | describe('#build', function () { 321 | it('is a function', function () { 322 | assume(construct.build).to.be.a('function'); 323 | assume(construct.build).to.have.length(2); 324 | }); 325 | 326 | it('launches a build process and returns a progress stream', function (done) { 327 | const prepareStub = sinon.stub(Object.getPrototypeOf(construct), 'prepare').resolves({}); 328 | const progress = construct.build({ 329 | promote: false, 330 | data: { 331 | 'name': 'test', 332 | 'versions': { 333 | '1.0.0': { 334 | name: 'test', 335 | keywords: [ 336 | 'webpack' 337 | ] 338 | } 339 | }, 340 | 'dist-tags': { 341 | latest: '1.0.0' 342 | } 343 | } 344 | }, function (error) { 345 | assume(error).to.be.falsey(); 346 | assume(prepareStub).is.called(1); 347 | done(); 348 | }); 349 | 350 | assume(progress).to.be.instanceof(Progress); 351 | }); 352 | 353 | it('returns early if the package.json has a build flag that is set to false', function (done) { 354 | const progress = construct.build({ 355 | promote: false, 356 | data: { 357 | 'name': 'test', 358 | 'dist-tags': { 359 | latest: '1.0.0' 360 | }, 361 | 'versions': { 362 | '1.0.0': { 363 | build: false 364 | } 365 | } 366 | } 367 | }, function (error) { 368 | assume(error).to.be.falsey(); 369 | done(); 370 | }); 371 | 372 | progress.stream.once('data', function (data) { 373 | data = JSON.parse(data); 374 | 375 | assume(data).to.have.property('progress', -1); 376 | assume(data).to.have.property('message', 'ignored'); 377 | assume(data).to.have.property('event', 'task'); 378 | }); 379 | }); 380 | 381 | it('writes out the expected nsq messages', function (done) { 382 | const writerSpy = sinon.spy(construct.nsq.writer, 'publish'); 383 | const constructProto = Object.getPrototypeOf(construct); 384 | const prepareStub = sinon.stub(constructProto, 'prepare').resolves({}); 385 | const getLocalesStub = sinon.stub(constructProto, 'getLocales').resolves(['en-LOL', 'not-REAL']); 386 | 387 | const progress = construct.build({ 388 | promote: false, 389 | data: { 390 | 'name': 'test', 391 | 'versions': { 392 | '1.0.0': { 393 | name: 'test', 394 | keywords: [ 395 | 'webpack' 396 | ] 397 | } 398 | }, 399 | 'dist-tags': { 400 | latest: '1.0.0' 401 | } 402 | } 403 | }, function (error) { 404 | assume(error).to.be.falsey(); 405 | assume(prepareStub).is.called(1); 406 | assume(getLocalesStub).is.called(1); 407 | // We end the work as soon as everything is queued, even though we may still end up doing a bit more 408 | setTimeout(() => { 409 | // start, progress, finished, and actual queueing per locale (en-LOL, not-REAL) and progress start/end 410 | assume(writerSpy).is.called(9); 411 | 412 | assertNsqLocaleProgress(writerSpy, 'en-LOL', 'webpack'); 413 | assertNsqLocaleProgress(writerSpy, 'not-REAL', 'webpack'); 414 | 415 | assume(writerSpy).is.calledWithMatch(statusTopic, { 416 | eventType: 'queued', 417 | name: 'test', 418 | env: 'dev', 419 | buildType: 'webpack', 420 | total: 2, 421 | message: 'Builds Queued' 422 | }); 423 | 424 | done(); 425 | }, 100); 426 | }); 427 | 428 | assume(progress).to.be.instanceof(Progress); 429 | }); 430 | }); 431 | 432 | describe('#construct.builder', function () { 433 | 434 | it('should publish to nsq only once for a single locale if there is no error on publish', async function () { 435 | const spec = { 436 | name: 'my-package', 437 | version: '7.0.0', 438 | env: 'dev', 439 | promote: true, 440 | type: 'webpack' 441 | }; 442 | const progress = new MockProgress(); 443 | const locale = 'en-US'; 444 | const writerStub = sinon.stub(construct.nsq.writer, 'publish'); 445 | writerStub.yieldsAsync(null, 'woooo'); 446 | 447 | await construct.builder.buildPerLocale({ spec, progress, locale }); 448 | assume(writerStub).is.called(1); 449 | }); 450 | }); 451 | }); 452 | -------------------------------------------------------------------------------- /lib/construct/index.js: -------------------------------------------------------------------------------- 1 | /* eslint max-params: 0 */ 2 | /* eslint max-statements: 0 */ 3 | 'use strict'; 4 | 5 | const EventEmitter = require('events').EventEmitter; 6 | const intersect = require('lodash.intersection'); 7 | const Progress = require('./progress'); 8 | const { promisify } = require('util'); 9 | const Cleaner = require('./cleaner'); 10 | const Builder = require('./builder'); 11 | const Packer = require('./packer'); 12 | const crypto = require('crypto'); 13 | const mkdirp = require('mkdirp'); 14 | const mkdirpAsync = promisify(mkdirp); 15 | const async = require('async'); 16 | const errs = require('errs'); 17 | const fitting = require('./fitting'); 18 | const rmrf = require('./rmrf'); 19 | const path = require('path'); 20 | const fs = require('fs'); 21 | const os = require('os'); 22 | const emits = require('emits'); 23 | 24 | // 25 | // Available build systems. 26 | // 27 | const available = [ 28 | 'browserify', 29 | 'webpack', 30 | 'npm' 31 | ]; 32 | 33 | // 34 | // Map environment values from natives to bffs. 35 | // 36 | const envs = { 37 | production: 'prod', 38 | development: 'dev', 39 | staging: 'test', 40 | latest: 'prod', 41 | test: 'test', 42 | prod: 'prod', 43 | dev: 'dev' 44 | }; 45 | 46 | /** 47 | * Construct the builder orchestrator, provide options and a factory loader. 48 | * 49 | * @Constructor 50 | * @param {Slay} app Slay/Express instance. 51 | * @param {Object} options Optional. 52 | * @api public 53 | */ 54 | class Constructor extends EventEmitter { 55 | constructor(app, options) { 56 | super(options); 57 | 58 | this.app = app; 59 | this.cdnup = app.cdnup; 60 | this.models = app.models; 61 | this.throttle = options.throttle || app.config.get('throttle') || 10; 62 | // Default to max 2s backoff and 1 retry attempt 63 | this.retry = options.retry || app.config.get('build:retry') || { retries: 1, max: 2000 }; 64 | this.maxFailures = options.maxFailures || app.config.get('maxFailures') || 2; 65 | this.failures = {}; 66 | 67 | this.timeout = options.timeout || 15 * 6E4; 68 | this.purgeRetries = (options.retries || 0) + 5; 69 | 70 | this.source = options.source; 71 | this.target = options.target || os.tmpdir(); 72 | this.rootDir = options.target || this.target; 73 | this.installRoot = options.install || path.join(this.rootDir, 'install'); 74 | this.tarRoot = options.tarballs || path.join(this.rootDir, 'tarballs'); 75 | 76 | this.nsq = app.nsq; 77 | this.topic = options.topic || app.config.get('nsq:topic'); 78 | this.statusTopic = options.statusTopic || app.config.get('nsq:statusTopic'); 79 | // 80 | // Clean the configured temporary build folder, use a 1:1 mapping with the build 81 | // timeout. Do not run the cleaning if running in development. Default to 15 82 | // minutes to prevent a interval of 0. 83 | // 84 | if (app.config.get('env') !== 'development') setInterval( 85 | this.purge.bind(this), 86 | this.timeout 87 | ); 88 | this.emits = emits; 89 | this.packer = new Packer({ 90 | retry: this.retry, 91 | log: app.contextLog, 92 | cdnup: this.cdnup, 93 | npmrc: app.npmrc 94 | }); 95 | this.cleaner = new Cleaner({ 96 | log: app.contextLog 97 | }); 98 | this.builder = new Builder(this); 99 | } 100 | 101 | /** 102 | * Initiate a new build process as child through Gjallarhorn. 103 | * 104 | * @param {Object} opts Build options. 105 | * @param {Object} opts.data Package data. 106 | * @param {Boolean} opts.promote Should the build be promoted? 107 | * @param {Function} done Completion callback. 108 | * @returns {Progress} Expose created progress instance. 109 | * @api public 110 | */ 111 | build({ promote, data }, done) { 112 | const { app } = this; 113 | const progress = new Progress({ 114 | nsq: { 115 | writer: this.nsq.writer, 116 | topic: this.statusTopic, 117 | log: app.contextLog 118 | } 119 | }, done); 120 | 121 | // 122 | // Compile build specifications from the data. 123 | // 124 | app.contextLog.info('compile build spec for %s', data.name, { 125 | name: data.name 126 | }); 127 | 128 | this.builder.build({ promote, data, progress }) 129 | .then(() => {/* we don't care about the output */}) 130 | .catch(err => done(err)); 131 | 132 | return progress; 133 | } 134 | 135 | 136 | /** 137 | * 138 | * Create a key based on the spec that is unique to the set of builds 139 | * @function _key 140 | * @param {Object} spec - Build spec 141 | * @returns {String} unique key for given build 142 | */ 143 | _key(spec) { 144 | return [spec.name, spec.env, spec.version].join('!'); 145 | } 146 | 147 | /** 148 | * Prepare the build by unpacking, running npm install and then creating 149 | * a tarball to be used for extraction when running the actual build. 150 | * 151 | * @param {Object} spec Specification object for the given build 152 | * @param {String} content Base64 string representing the package content 153 | * @param {StatusWriter} statusWriter The writer for the status-api 154 | * @api public 155 | */ 156 | async prepare(spec, content, statusWriter) { 157 | const { app } = this; 158 | app.contextLog.info('Prepare build for all locales: %s', spec.name, spec); 159 | 160 | const paths = await this._createPaths(spec); 161 | 162 | // 163 | // First see if this package has already been built for a different env, we 164 | // should only build a single version once 165 | // 166 | try { 167 | await this.checkAndDownload(spec, paths); 168 | } catch (err) { 169 | if (err.install) { 170 | await this.packer.repack(spec, content, paths, statusWriter); 171 | } else { 172 | throw err; 173 | } 174 | } 175 | 176 | return paths; 177 | } 178 | 179 | /** 180 | * Check to see if this package version combo exists and download and use that 181 | * tarball instead if it does 182 | * 183 | * @function checkAndDownload 184 | * @param {Object} spec - specification for build 185 | * @param {Object} paths - paths object 186 | * @returns {Promise} completion handler 187 | * @api private 188 | */ 189 | checkAndDownload(spec, paths) { 190 | return new Promise((resolve, reject) => { 191 | const { app } = this; 192 | 193 | if (!this.cdnup) { 194 | app.contextLog.info('%s: cdnup not configured. Skip download attempt.', spec.name); 195 | return reject(errs.create({ 196 | message: 'cdnup is not configured', 197 | install: true 198 | })); 199 | } 200 | 201 | const pkgcloud = this.cdnup.client; 202 | const filename = `${encodeURIComponent(spec.name)}-${spec.version}.tgz`; 203 | 204 | pkgcloud.getFile(this.cdnup.bucket, filename, (err, file) => { 205 | if (err || !file) { 206 | app.contextLog.info('%s: tarball %s not found in remote storage', spec.name, filename); 207 | return reject(errs.create({ 208 | message: 'Tarball not found', 209 | install: true 210 | })); 211 | } 212 | 213 | function onError(err) { 214 | app.contextLog.error('Tarball download error for %s: %s', spec.name, err.message); 215 | reject(err); 216 | } 217 | 218 | function onFinish() { 219 | app.contextLog.info('Tarball download ok for %s: %s', spec.name, paths.tarball); 220 | resolve(); 221 | } 222 | 223 | pkgcloud.download({ 224 | container: this.cdnup.bucket, 225 | remote: filename 226 | }).pipe(fs.createWriteStream(paths.tarball)) 227 | .once('error', onError) 228 | .on('finish', onFinish); 229 | }); 230 | }); 231 | } 232 | 233 | /** 234 | * Extract package content from the JSON body. 235 | * 236 | * @param {Object} data Package data. 237 | * @param {Object} spec Descriptive package information. 238 | * @returns {String} base64 encoded string. 239 | * @api private 240 | */ 241 | content(data, spec) { 242 | const name = spec.name + '-' + spec.version + '.tgz'; 243 | 244 | data = data || {}; 245 | data._attachments = data._attachments || {}; 246 | data._attachments[name] = data._attachments[name] || {}; 247 | 248 | return data._attachments[name].data || ''; 249 | } 250 | 251 | /** 252 | * Get the package.json content from the payload. 253 | * 254 | * @param {Object} data Payload content. 255 | * @returns {Object} Package.json 256 | * @api private 257 | */ 258 | extractPackage(data) { 259 | data = data || {}; 260 | 261 | // 262 | // JUST IN CASE we get a cassandra based data piece we check for distTags 263 | // first 264 | // 265 | const version = (data.distTags || data['dist-tags'] || {}).latest; 266 | return (data.versions || {})[version] || {}; 267 | } 268 | 269 | /** 270 | * Get allowed locales from the package.json. 271 | * 272 | * @param {Object} data Build specifications. 273 | * @returns {Promise} completion handler 274 | * @api private 275 | */ 276 | getLocales(data) { 277 | return new Promise((resolve) => { 278 | const { app } = this; 279 | const cache = []; 280 | let locales = []; 281 | 282 | /** 283 | * Setup the queue to get all dependencies per package and push the main 284 | * package.json as first task. 285 | */ 286 | app.contextLog.info('calculate locales for %s', data.name, { 287 | name: data.name 288 | }); 289 | 290 | /** 291 | * Fetch the package.json of each dependency, ignore already fetched dependencies 292 | * to prevent circular dependencies from filling up the queue eternally. 293 | * 294 | * @param {Object} pkg Package.json object 295 | * @param {Function} next Completion callback. 296 | * @api private 297 | */ 298 | const queue = async.queue((pkg, next) => { 299 | cache.push(pkg.name); 300 | 301 | // Push locales if they are defined as array. 302 | if (Array.isArray(pkg.locales)) { 303 | locales.push(pkg.locales.filter(Boolean)); 304 | } 305 | 306 | /** 307 | * Fetch the package.json of each dependency and add the processed 308 | * package.json to the queue. 309 | */ 310 | async.each(Object.keys(pkg.dependencies || {}) 311 | .filter(name => !~cache.indexOf(name)), (name, fn) => { 312 | this.models.Package.get({ name }, (err, pkgData) => { 313 | if (err) return void fn(err); 314 | 315 | queue.push(pkgData || {}); 316 | return void fn(); 317 | }); 318 | }, next); 319 | }, 100); 320 | 321 | queue.push(this.extractPackage(data)); 322 | 323 | /** 324 | * The async queue completed, calculate the intersect of all locale lists. 325 | * Fallback to `en-US` if no intersecting locales remain. 326 | */ 327 | queue.drain(function complete() { 328 | locales = intersect.apply(intersect, locales); 329 | app.contextLog.info('calculated locales for %s', data.name, { 330 | locales: locales, 331 | name: data.name 332 | }); 333 | 334 | resolve(locales.length ? locales : ['en-US']); 335 | }); 336 | }); 337 | } 338 | 339 | /** 340 | * Extract descriptive package information from the JSON body. 341 | * 342 | * @param {Object} data Package data. 343 | * @returns {Promise} completion handler 344 | * @api private 345 | */ 346 | async specs(data) { 347 | const { app } = this; 348 | const classification = { 349 | browserify: ['browserify'], 350 | webpack: ['webpack'], 351 | npm: ['npm'] 352 | }; 353 | 354 | let entry; 355 | let type; 356 | 357 | const pkg = this.extractPackage(data); 358 | 359 | type = fitting(pkg, { 360 | keyword: 'build', 361 | classification: classification 362 | }); 363 | 364 | 365 | /** 366 | * Try to infer the build system from package.json properties if no build 367 | * system type has been found yet. 368 | */ 369 | if (!type) { 370 | const properties = Object.keys(pkg); 371 | type = Object.keys(classification).filter(function checkProperties(key) { 372 | return ~properties.indexOf(key) && typeof pkg[key] === 'string'; 373 | })[0]; 374 | } 375 | 376 | /** 377 | * Only use the build property as entry if it is a path or file location, 378 | * config objects should be ignored. 379 | */ 380 | if (typeof pkg[type] === 'string') { 381 | entry = pkg[type]; 382 | } 383 | 384 | 385 | // Read the dependency tree and each package.json to determine locales. 386 | let locales; 387 | try { 388 | locales = await this.getLocales(data); 389 | } catch (e) { 390 | app.contextLog.error('error constructing spec', e); 391 | throw e; 392 | } 393 | 394 | return { 395 | type: available[available.indexOf(type)], 396 | env: envs[data.env] || 'dev', 397 | build: pkg.build !== false, 398 | version: pkg.version, 399 | locales: locales, 400 | name: pkg.name, 401 | entry: entry 402 | }; 403 | } 404 | 405 | /** 406 | * Check the validity of the uuid. Allows defensive querying, 407 | * e.g. ignore invalid ids asap. 408 | * 409 | * @param {String} id Unique id v4. 410 | * @returns {Boolean} Valid uuid or not. 411 | * @api public 412 | */ 413 | valid(id) { 414 | return /^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$/i.test(id); 415 | } 416 | 417 | /** 418 | * Helper method that cleans the temporary build folder. 419 | * @api private 420 | */ 421 | purge() { 422 | const { app, target } = this; 423 | const age = this.timeout * this.purgeRetries; 424 | 425 | fs.readdir(target, (error, files) => { 426 | if (error || !files) { 427 | app.contextLog.error('Error reading files to purge from %s: %s', target, error || 'No files found'); 428 | return; 429 | } 430 | 431 | files = files.filter(this.valid); 432 | // MAKE ASYNC/AWAIT 433 | async.reduce(files, 0, (i, file, next) => { 434 | file = path.join(target, file); 435 | 436 | fs.stat(file, function getAge(err, stat) { 437 | if (err) return void next(null, i); 438 | 439 | // 440 | // Be defensive and use modified time to determine if the folder is older 441 | // than age (max build time * the numer of attempts + 1). This should 442 | // prevent child content from being accidently removed at the last 443 | // millisecond, which could result in a failed build. 444 | // 445 | if (Date.now() - age <= new Date(stat.mtime).getTime()) { 446 | app.contextLog.info('Skip purge of file: %s', file); 447 | return next(null, i); 448 | } 449 | 450 | app.contextLog.info('Purge outdated file: %s', file); 451 | return void rmrf(file, function removed(rmError) { 452 | next(rmError, i + 1); 453 | }); 454 | }); 455 | }, (err, n) => { 456 | if (err) { 457 | app.contextLog.error(err); 458 | return void this.emit('purge', error, 0); 459 | } 460 | 461 | app.contextLog.info('Purged %s outdated files from temporary target location', n); 462 | return void this.emit('purge', null, n); 463 | }); 464 | }); 465 | } 466 | 467 | /** 468 | * Create the given paths for a tarball download or npm install 469 | * 470 | * @param {Object} spec Build specification 471 | * @returns {Promise} Completion handler 472 | * @api private 473 | */ 474 | async _createPaths(spec) { 475 | const { app, installRoot, tarRoot } = this; 476 | const uniq = `${encodeURIComponent(spec.name)}-${spec.version}-${spec.env}-${crypto.randomBytes(5).toString('hex')}`; 477 | const installPath = path.join(installRoot, uniq); 478 | const tarball = path.join(tarRoot, uniq + '.tgz'); 479 | const paths = { installPath, tarball }; 480 | 481 | app.contextLog.info('Create paths for %s spec: %s@%s', spec.env, spec.name, spec.version, paths); 482 | 483 | await Promise.all([ 484 | mkdirpAsync(installRoot), 485 | mkdirpAsync(tarRoot) 486 | ]); 487 | 488 | return paths; 489 | } 490 | } 491 | 492 | // 493 | // Expose the builder instance. 494 | // 495 | module.exports = function preboot(app, options, done) { 496 | const config = app.config.get('builder'); 497 | app.construct = new Constructor(app, config); 498 | done(); 499 | }; 500 | --------------------------------------------------------------------------------