├── .eslintignore ├── .eslintrc ├── .gitignore ├── appveyor.yml ├── lib ├── tmp-file.js ├── cache.js ├── worker.js └── uglifier.js ├── test ├── lib │ ├── tmp-file.js │ ├── worker.js │ ├── cache.js │ └── uglifer.js └── index.js ├── index.js ├── package.json ├── CHANGELOG.md └── README.md /.eslintignore: -------------------------------------------------------------------------------- 1 | coverage/ 2 | .nyc_output/ 3 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base" 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | npm-debug.log 3 | coverage 4 | .nyc_output 5 | package-lock.json 6 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | image: 2 | - Ubuntu 3 | 4 | environment: 5 | # Test against these versions of Node.js and io.js 6 | matrix: 7 | # node.js 8 | - nodejs_version: "10" 9 | - nodejs_version: "12" 10 | - nodejs_version: "14" 11 | 12 | # Install scripts. (runs after repo cloning) 13 | install: 14 | - nvm install $nodejs_version 15 | - npm install 16 | 17 | # Post-install test scripts. 18 | test_script: 19 | # Output useful info for debugging. 20 | - node --version 21 | - npm --version 22 | # run tests 23 | - npm test 24 | 25 | after_test: 26 | - './node_modules/.bin/nyc report --reporter=text-lcov | ./node_modules/.bin/coveralls' -------------------------------------------------------------------------------- /lib/tmp-file.js: -------------------------------------------------------------------------------- 1 | const tmp = require('tmp'); 2 | const fs = require('fs'); 3 | 4 | // make sure that we clean up the temp files even if the process exits unexpectedly 5 | tmp.setGracefulCleanup(); 6 | 7 | function update(filename, newContent) { 8 | fs.writeFileSync(filename, newContent, 'utf8'); 9 | } 10 | 11 | module.exports = { 12 | create(content) { 13 | const filename = tmp.fileSync().name; 14 | update(filename, content); 15 | return filename; 16 | }, 17 | 18 | remove(filename) { 19 | fs.unlinkSync(filename); 20 | }, 21 | 22 | update, 23 | 24 | read(filename) { 25 | return fs.readFileSync(filename, 'utf8'); 26 | }, 27 | }; 28 | -------------------------------------------------------------------------------- /test/lib/tmp-file.js: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import fs from 'fs'; 3 | import tmpFile from '../../lib/tmp-file'; 4 | 5 | const myContent = 'somecontent'; 6 | 7 | test('tmpFile.create creates a real file and retuns its filename', (t) => { 8 | const fileName = tmpFile.create(myContent); 9 | t.truthy(fs.existsSync(fileName)); 10 | t.is(fs.readFileSync(fileName, 'utf8'), myContent); 11 | fs.unlinkSync(fileName); 12 | }); 13 | 14 | test('tmpFile.update updates the tmpFile\'s content synchronously', (t) => { 15 | const tmpFileName = tmpFile.create('randoContent'); 16 | tmpFile.update(tmpFileName, myContent); 17 | t.is(fs.readFileSync(tmpFileName, 'utf8'), myContent); 18 | fs.unlinkSync(tmpFileName); 19 | }); 20 | 21 | test('tmpFile.remove deletes the file from disk', (t) => { 22 | const tmpFileName = tmpFile.create(myContent); 23 | t.truthy(fs.existsSync(tmpFileName)); 24 | tmpFile.remove(tmpFileName); 25 | t.falsy(fs.existsSync(tmpFileName)); 26 | }); 27 | 28 | test('tmpFile.read functions reads tmpFile contents synchronously', (t) => { 29 | const tmpFileName = tmpFile.create(myContent); 30 | t.is(tmpFile.read(tmpFileName), fs.readFileSync(tmpFileName, 'utf8')); 31 | fs.unlinkSync(tmpFileName); 32 | }); 33 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const uglifier = require("./lib/uglifier"); 2 | 3 | function sourceMapError(lib) { 4 | return `You should not pass options.${lib}.sourceMap, did you mean options.sourceMap?`; 5 | } 6 | 7 | function ParallelUglifyPlugin(options) { 8 | if (options.uglifyJS && options.terser) { 9 | throw new TypeError( 10 | "You cannot use both uglifyJS and terser for the same plugin." 11 | ); 12 | } 13 | 14 | if (options.uglifyJS && options.uglifyJS.sourceMap) { 15 | throw new TypeError(sourceMapError("uglifyJS")); 16 | } 17 | 18 | if (options.terser && options.terser.sourceMap) { 19 | throw new TypeError(sourceMapError("terser")); 20 | } 21 | this.options = options; 22 | 23 | if (!(this.options.uglifyJS || this.options.terser)) { 24 | this.options.uglifyJS = {}; 25 | } 26 | } 27 | 28 | ParallelUglifyPlugin.prototype.apply = function apply(compiler) { 29 | compiler.hooks.compilation.tap("ParallelUglifyPlugin", (compilation) => { 30 | compilation.hooks.processAssets.tapAsync( 31 | { 32 | name: "ParallelUglifyPlugin", 33 | stage: compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_SIZE, 34 | }, 35 | (chunks, callback) => { 36 | uglifier.processAssets(compilation, this.options).then(() => { 37 | callback(); 38 | }); 39 | } 40 | ); 41 | }); 42 | 43 | compiler.hooks.done.tap("ParallelUglifyPlugin", () => { 44 | uglifier.pruneCache(this.options); 45 | }); 46 | }; 47 | 48 | module.exports = ParallelUglifyPlugin; 49 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "webpack-parallel-uglify-plugin", 3 | "version": "2.0.0", 4 | "description": "A webpack plugin to run uglifyjs in parallel.", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "nyc --reporter=html ava", 8 | "lint": "eslint ." 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "git://github.com/gdborton/webpack-parallel-uglify-plugin.git" 13 | }, 14 | "keywords": [ 15 | "compress", 16 | "compressor", 17 | "min", 18 | "minification", 19 | "minifier", 20 | "minify", 21 | "optimize", 22 | "optimizer", 23 | "terser", 24 | "uglify", 25 | "uglify-es", 26 | "uglify-js", 27 | "webpack", 28 | "webpack-plugin" 29 | ], 30 | "ava": { 31 | "require": [ 32 | "esm" 33 | ] 34 | }, 35 | "author": "Gary Borton ", 36 | "license": "ISC", 37 | "devDependencies": { 38 | "acorn": "^5.1.1", 39 | "ava": "^3.12.1", 40 | "coveralls": "^3.1.0", 41 | "escodegen": "^1.8.1", 42 | "eslint": "^7.8.1", 43 | "eslint-config-airbnb-base": "^14.2.0", 44 | "eslint-plugin-import": "^2.22.0", 45 | "eslint-plugin-jsx-a11y": "^6.3.1", 46 | "esm": "^3.2.25", 47 | "nyc": "^15.1.0", 48 | "sinon": "^1.17.4", 49 | "webpack": "^4.44.1" 50 | }, 51 | "dependencies": { 52 | "babel-code-frame": "^6.26.0", 53 | "glob": "^7.0.5", 54 | "mkdirp": "^0.5.1", 55 | "pify": "^3.0.0", 56 | "terser": "^5.3.5", 57 | "tmp": "0.0.29", 58 | "uglify-js": "^3.12.1", 59 | "webpack-sources": "^1.0.0", 60 | "worker-farm": "^1.3.1" 61 | }, 62 | "peerDependencies": { 63 | "webpack": "*" 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /lib/cache.js: -------------------------------------------------------------------------------- 1 | const crypto = require('crypto'); 2 | const fs = require('fs'); 3 | const path = require('path'); 4 | const glob = require('glob'); 5 | const pkg = require('../package.json'); 6 | 7 | // Small helper function to quickly create a hash from any given string. 8 | function createHashFromContent(content) { 9 | const hash = crypto.createHash('sha256'); 10 | hash.update(content); 11 | return hash.digest('hex'); 12 | } 13 | 14 | /** 15 | * Create a cache key from both the source, and the options used to minify the file. 16 | */ 17 | function createCacheKey(source, options) { 18 | const content = `${source} ${JSON.stringify(options)} ${JSON.stringify(pkg)}`; 19 | return createHashFromContent(content); 20 | } 21 | 22 | /** 23 | * Attempt to read from cache. If the read fails, or cacheDir isn't defined return null. 24 | */ 25 | function retrieveFromCache(cacheKey, cacheDir) { 26 | if (cacheDir) { 27 | try { 28 | return fs.readFileSync(path.join(cacheDir, `${cacheKey}.js`), 'utf8'); 29 | } catch (e) { 30 | // this just means it is uncached. 31 | return null; 32 | } 33 | } 34 | return null; 35 | } 36 | 37 | /** 38 | * Remove unused files from the cache. This prevents the cache from growing indefinitely. 39 | */ 40 | function pruneCache(usedCacheKeys, allCacheKeys, cacheDir) { 41 | if (cacheDir) { 42 | const unusedKeys = allCacheKeys.filter((key) => usedCacheKeys.indexOf(key) === -1); 43 | unusedKeys.forEach((key) => { 44 | fs.unlinkSync(path.join(cacheDir, `${key}.js`)); 45 | }); 46 | } 47 | } 48 | 49 | function getCacheKeysFromDisk(cacheDir) { 50 | if (cacheDir) { 51 | return glob.sync(path.join(cacheDir, '*.js')).map((fileName) => path.basename(fileName, '.js')); 52 | } 53 | return []; 54 | } 55 | 56 | /** 57 | * Attempt to write the file to the cache. 58 | */ 59 | function saveToCache(cacheKey, minifiedCode, cacheDir) { 60 | if (cacheDir) { 61 | fs.writeFileSync(path.join(cacheDir, `${cacheKey}.js`), minifiedCode); 62 | } 63 | } 64 | 65 | module.exports = { 66 | createHashFromContent, 67 | createCacheKey, 68 | retrieveFromCache, 69 | pruneCache, 70 | getCacheKeysFromDisk, 71 | saveToCache, 72 | }; 73 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## 2.0.0 2 | - Breaking: Replaced `uglify-es` with `terser` ([#66](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/66)) 3 | - Breaking: Update to webpack v5 api, dropping support for older versions. ([#68](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/68)) 4 | - Chore: Updated dependencies to address identified vulnerabilities. ([#65](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/65)) 5 | 6 | ## 1.1.2 7 | - Enable source maps by default. ([#61](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/61)) 8 | 9 | ## 1.1.1 10 | - Updated uglify dependencies to include recent features/bug fixes. 11 | 12 | ## 1.1.0 13 | - Added babel-code-frame to pretty print SyntaxError messages, this should help in debugging failed minification. ([#45](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/45)) 14 | 15 | ## 1.0.2 16 | - Fix issue with uglifyES options not being applied. ([#42](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/42)) 17 | 18 | ## 1.0.1 19 | 20 | - Delay pruning of the cache until compilation is completely done. ([#41](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/41)) 21 | 22 | ## 1.0.0 23 | 24 | - Breaking: Updated to [uglify-js](https://github.com/mishoo/UglifyJS2)@3, syntax remains the same, but the options you provide uglifyJS may have changed. ([#31](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/31)) 25 | - Breaking: Plugin now ignores your config's `devtool` option. Enable source maps by initializing the plugin with `sourceMap: true`. ([#23](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/23)) 26 | - New: Added [uglify-es](https://github.com/mishoo/UglifyJS2/tree/harmony) support. To use, provide the plugin with uglifyES options instead of uglifyJS. ([#35](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/35)) 27 | - Fix: `workerCount` option is no longer being ignored. ([#33](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/33)) 28 | 29 | ## 0.4.2 30 | 31 | - New: Added this change log. 32 | - New: Added support for test/include/exclude options. ([#26](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/26)) 33 | - Fix: Addressed issue with creating too many event listeners for child processes. ([#25](https://github.com/gdborton/webpack-parallel-uglify-plugin/pull/25)) 34 | -------------------------------------------------------------------------------- /test/lib/worker.js: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import uglify from 'uglify-js'; 3 | import sinon from 'sinon'; 4 | import { RawSource, OriginalSource } from 'webpack-sources'; 5 | import tmpFile from '../../lib/tmp-file'; 6 | import cache from '../../lib/cache'; 7 | 8 | const codeSource = 'function test () { void(0); }'; 9 | const rawSource = new RawSource(codeSource); 10 | const originalSource = new OriginalSource(codeSource); 11 | const sourceAndMap = rawSource.sourceAndMap(); 12 | const options = { 13 | uglifyJS: { }, 14 | }; 15 | const originalContent = JSON.stringify({ 16 | source: sourceAndMap.source, 17 | map: sourceAndMap.map, 18 | options, 19 | }); 20 | const minifiedContent = uglify.minify(codeSource, { }); 21 | 22 | // ava is multi process and uses process.on, 23 | // so we stub it to be sure it doesn't get in the way. 24 | const stubbedOn = sinon.stub(process, 'on'); 25 | const worker = require('../../lib/worker'); 26 | 27 | const { minify, processMessage } = worker; 28 | 29 | stubbedOn.restore(); 30 | 31 | let stubbedRead; 32 | let stubbedUpdate; 33 | test.beforeEach(() => { 34 | stubbedRead = sinon.stub(tmpFile, 'read', () => originalContent); 35 | stubbedUpdate = sinon.stub(tmpFile, 'update'); 36 | }); 37 | 38 | test.afterEach(() => { 39 | stubbedRead.restore(); 40 | stubbedUpdate.restore(); 41 | }); 42 | 43 | test.serial('minify should not return a map if called with a RawSource object', (t) => { 44 | const { map } = rawSource.sourceAndMap(); 45 | return minify(codeSource, map, undefined, uglify).then(result => { 46 | t.is(result.map, undefined); 47 | t.is(result.code, minifiedContent.code); // should produce the same minified content. 48 | }); 49 | }); 50 | 51 | test.serial( 52 | 'minify should return a valid source map if called with an OriginalSource object', 53 | (t) => { 54 | const { map } = originalSource.sourceAndMap(); 55 | return minify(codeSource, map, undefined, uglify).then(result => { 56 | t.truthy(result.map); 57 | t.is(result.code, minifiedContent.code); // should produce the same minified content. 58 | }); 59 | }, 60 | ); 61 | 62 | test.serial.cb('processMessage should minify the file passed via a tmpFile message', (t) => { 63 | const tmpFileName = 'asdf'; 64 | 65 | processMessage(tmpFileName, (error) => { 66 | if (error) { t.end(error); } 67 | const cacheKey = cache.createCacheKey(codeSource + false, options); 68 | t.true(stubbedUpdate.calledWith(tmpFileName, JSON.stringify({ 69 | source: minifiedContent.code, 70 | map: minifiedContent.map, 71 | cacheKey, 72 | }))); 73 | 74 | t.end(); 75 | }); 76 | }); 77 | -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | /* eslint no-new:0 */ 2 | import fs from 'fs'; 3 | import glob from 'glob'; 4 | import sinon from 'sinon'; 5 | import test from 'ava'; 6 | import WebpackParallelUglifyPlugin from '../index'; 7 | 8 | let sandbox; 9 | 10 | test.beforeEach(() => { 11 | sandbox = sinon.sandbox.create(); 12 | }); 13 | 14 | test.afterEach(() => { 15 | sandbox.restore(); 16 | }); 17 | 18 | test('creating a WebpackParallelUglifyPlugin instance w/ both uglify options throws', (t) => { 19 | t.throws(() => { 20 | new WebpackParallelUglifyPlugin({ 21 | uglifyJS: {}, 22 | terser: {}, 23 | }); 24 | }); 25 | }); 26 | 27 | test('creating a WebpackParallelUglifyPlugin instance with uglify.sourceMap throws', (t) => { 28 | t.throws(() => { 29 | new WebpackParallelUglifyPlugin({ 30 | uglifyJS: { sourceMap: true }, 31 | }); 32 | }); 33 | 34 | t.throws(() => { 35 | new WebpackParallelUglifyPlugin({ 36 | terser: { sourceMap: true }, 37 | }); 38 | }); 39 | }); 40 | 41 | test('providing no uglify options defaults to uglifyJS: {}', (t) => { 42 | const plugin = new WebpackParallelUglifyPlugin({}); 43 | t.deepEqual(plugin.options, { uglifyJS: {} }); 44 | }); 45 | 46 | function FakeCompilation() { 47 | this.hooks = { 48 | processAssets: { 49 | tapAsync: () => { 50 | 51 | } 52 | } 53 | } 54 | } 55 | 56 | function FakeCompiler() { 57 | const callbacks = {}; 58 | const fakeCompilation = new FakeCompilation(); 59 | this.assets = []; 60 | this.hooks = { 61 | compilation: { 62 | tap: (pluginName, callback) => { 63 | callbacks['compilation'] = () => callback(fakeCompilation); 64 | } 65 | }, 66 | done: { 67 | tap: (pluginName, callback) => { 68 | callbacks['done'] = callback; 69 | } 70 | } 71 | } 72 | 73 | this.fireEvent = (event, ...args) => { 74 | callbacks[event].apply(this, args); 75 | }; 76 | } 77 | 78 | test('deleting unused cache files after all asset optimizations', (t) => { 79 | const originalRead = fs.readFileSync; 80 | sandbox.stub(fs, 'unlinkSync'); 81 | sandbox.stub(fs, 'writeFileSync'); 82 | sandbox.stub(fs, 'mkdirSync'); 83 | sandbox.stub(fs, 'readFileSync', (filePath, encoding) => ( 84 | filePath.match(/fake_cache_dir/) 85 | ? 'filecontents' 86 | : originalRead(filePath, encoding) 87 | )); 88 | 89 | sandbox.stub(glob, 'sync').returns( 90 | [ 91 | '/fake_cache_dir/file1.js', 92 | '/fake_cache_dir/file2.js', 93 | ], 94 | ); 95 | 96 | const uglifyPlugin = new WebpackParallelUglifyPlugin({ 97 | uglifyJS: {}, 98 | cacheDir: '/fake_cache_dir/', 99 | }); 100 | 101 | const compiler = new FakeCompiler(); 102 | uglifyPlugin.apply(compiler); 103 | compiler.fireEvent('compilation', compiler); 104 | t.is(fs.unlinkSync.callCount, 0, 'Cache should not be cleared by optimize-chunk-assets'); 105 | compiler.fireEvent('done'); 106 | t.deepEqual( 107 | fs.unlinkSync.args, 108 | [['/fake_cache_dir/file1.js'], ['/fake_cache_dir/file2.js']], 109 | 'Unused cache files should be removed after compilation', 110 | ); 111 | }); 112 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # webpack-parallel-uglify-plugin [![Build status](https://ci.appveyor.com/api/projects/status/v1xvpvx0xfumv9fh/branch/master?svg=true)](https://ci.appveyor.com/project/gdborton/webpack-parallel-uglify-plugin/branch/master) [![Coverage Status](https://coveralls.io/repos/github/gdborton/webpack-parallel-uglify-plugin/badge.svg?branch=master)](https://coveralls.io/github/gdborton/webpack-parallel-uglify-plugin?branch=master) 2 | 3 | This plugin serves to help projects with many entry points speed up their builds. The UglifyJS plugin provided with webpack runs sequentially on each of the output files. This plugin runs uglify in parallel with one thread for each of your available cpus. This can lead to significantly reduced build times as minification is very CPU intensive. 4 | 5 | ## Config 6 | 7 | Configuring is straightforward. 8 | 9 | ```javascript 10 | import ParallelUglifyPlugin from 'webpack-parallel-uglify-plugin'; 11 | 12 | module.exports = { 13 | plugins: [ 14 | new ParallelUglifyPlugin({ 15 | // Optional regex, or array of regex to match file against. Only matching files get minified. 16 | // Defaults to /.js$/, any file ending in .js. 17 | test, 18 | include, // Optional regex, or array of regex to include in minification. Only matching files get minified. 19 | exclude, // Optional regex, or array of regex to exclude from minification. Matching files are not minified. 20 | cacheDir, // Optional absolute path to use as a cache. If not provided, caching will not be used. 21 | workerCount, // Optional int. Number of workers to run uglify. Defaults to num of cpus - 1 or asset count (whichever is smaller) 22 | sourceMap, // Optional Boolean. This slows down the compilation. Defaults to false. 23 | uglifyJS: { 24 | // These pass straight through to uglify-js@3. 25 | // Cannot be used with terser. 26 | // Defaults to {} if not neither uglifyJS or terser are provided. 27 | // You should use this option if you need to ensure es5 support. uglify-js will produce an 28 | // error message if it comes across any es6 code that it can't parse. 29 | }, 30 | terser: { 31 | // These pass straight through to terser. 32 | // Cannot be used with uglifyJS. 33 | // terser is a fork of uglify-es, a version of uglify that supports ES6+ version of uglify 34 | // that understands newer es6 syntax. You should use this option if the files that you're 35 | // minifying do not need to run in older browsers/versions of node. 36 | } 37 | }), 38 | ], 39 | }; 40 | ``` 41 | 42 | ### Example Timings 43 | 44 | These times were found by running webpack on a very large build, producing 493 output files and totaling 144.24 MiB before minifying. All times are listed with fully cached babel-loader for consistency. 45 | 46 | **Note:** I no longer have access to the huge project that I was testing this on. 47 | 48 | ``` 49 | No minification: Webpack build complete in: 86890ms (1m 26s) 50 | Built in uglify plugin: Webpack build complete in: 2543548ms (42m 23s) 51 | With parallel plugin: Webpack build complete in: 208671ms (3m 28s) 52 | With parallel/cache: Webpack build complete in: 98524ms (1m 38s) 53 | ``` 54 | -------------------------------------------------------------------------------- /lib/worker.js: -------------------------------------------------------------------------------- 1 | const codeFrame = require('babel-code-frame'); 2 | const cache = require('./cache'); 3 | const tmpFile = require('./tmp-file'); 4 | 5 | const BOGUS_SOURCEMAP_STRING = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'; 6 | // This has to be broken apart or ava/nyc will try to use it when creating coverage reports. 7 | const URL_PREFIX = '//# sourceMappingURL='; 8 | const BOGUS_SOURCEMAP_URL = `\n${URL_PREFIX}${BOGUS_SOURCEMAP_STRING}`; 9 | 10 | function minify(source, map, uglifyOptions, uglifier) { 11 | // inSourceMap doesn't work without outSourceMap, and uglify adds this as a url to the resulting 12 | // code. We'd rather have the plugin respect our devtool setting, so we're going to provide a 13 | // bogus filename, then strip it out after. 14 | const opts = { ...uglifyOptions }; 15 | if (map) { 16 | Object.assign(opts, { 17 | sourceMap: { 18 | content: map, 19 | includeSources: true, 20 | url: BOGUS_SOURCEMAP_STRING, 21 | }, 22 | }); 23 | } 24 | 25 | const result = uglifier.minify(source, opts); 26 | const prom = result.then ? result : Promise.resolve(result); 27 | return prom.then(resolved => { 28 | if (resolved.error) { 29 | if (resolved.error.name === 'SyntaxError') { 30 | const frame = codeFrame(source, resolved.error.line, resolved.error.col); 31 | const errorMessage = `${resolved.error.name}: ${resolved.error.message}\n${frame}`; 32 | throw new SyntaxError(errorMessage); 33 | } 34 | 35 | throw resolved.error; 36 | } 37 | 38 | resolved.code = resolved.code.replace(new RegExp(BOGUS_SOURCEMAP_URL), ''); 39 | 40 | return result; 41 | }); 42 | } 43 | 44 | /** 45 | * Note: We're passing messages via tmpFiles as this is faster than passing through ipc. 46 | * In this function msgLocation is the tmp file's name, so that we know where to look 47 | * for our message. 48 | * 49 | * We expect the messages to have the following format: 50 | * { 51 | * assetName: 'someFileName.js', 52 | * source: 'function() {}', 53 | * map: 'a source map string if enabled', 54 | * cacheDir: 'location to cache results', 55 | * options: { 56 | * pluginOptions, 57 | * }, 58 | * } 59 | */ 60 | 61 | function processMessage(msgLocation, callback) { 62 | try { 63 | const messageContents = tmpFile.read(msgLocation); 64 | const message = JSON.parse(messageContents); 65 | const { source } = message; 66 | const { map } = message; 67 | 68 | const cacheKey = cache.createCacheKey(source + !!map, message.options); 69 | // We do not check the cache here because we already determined that this asset yields a cache 70 | // miss in the parent process. 71 | const { terser } = message.options; 72 | const { uglifyJS } = message.options; 73 | // eslint-disable-next-line global-require 74 | const uglifier = terser ? require('terser') : require('uglify-js'); 75 | minify(source, map, terser || uglifyJS, uglifier) 76 | .then(minifiedContent => { 77 | cache.saveToCache(cacheKey, JSON.stringify({ 78 | source: minifiedContent.code, 79 | map: minifiedContent.map, 80 | }), message.cacheDir); 81 | 82 | tmpFile.update(msgLocation, JSON.stringify({ 83 | source: minifiedContent.code, 84 | map: minifiedContent.map, 85 | cacheKey, 86 | })); 87 | callback(null, msgLocation); 88 | }).catch(e => { 89 | callback(e.message, msgLocation); 90 | }); 91 | } catch (e) { 92 | callback(e.message, msgLocation); 93 | } 94 | } 95 | 96 | module.exports = { 97 | minify, 98 | processMessage, 99 | }; 100 | -------------------------------------------------------------------------------- /test/lib/cache.js: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import path from 'path'; 3 | import sinon from 'sinon'; 4 | import fs from 'fs'; 5 | import glob from 'glob'; 6 | import cache from '../../lib/cache'; 7 | 8 | const testFiles = ['throw', 'test']; 9 | 10 | let stubbedRead; 11 | let stubbedWrite; 12 | let stubbedDelete; 13 | let stubbedGlob; 14 | test.beforeEach(() => { 15 | const originalRead = fs.readFileSync; 16 | stubbedRead = sinon.stub(fs, 'readFileSync', (filePath, encoding) => { 17 | const fileName = path.basename(filePath, '.js'); 18 | if (testFiles.indexOf(fileName) === -1) { 19 | return originalRead(filePath, encoding); 20 | } 21 | if (fileName === 'throw') throw new Error('error'); 22 | return 'filecontents'; 23 | }); 24 | stubbedDelete = sinon.stub(fs, 'unlinkSync'); 25 | stubbedWrite = sinon.stub(fs, 'writeFileSync'); 26 | stubbedGlob = sinon.stub(glob, 'sync', () => ( 27 | [ 28 | '/abs/file/path1.js', 29 | '/abs/file/path2.js', 30 | ] 31 | )); 32 | }); 33 | 34 | test.afterEach(() => { 35 | stubbedRead.restore(); 36 | stubbedWrite.restore(); 37 | stubbedDelete.restore(); 38 | stubbedGlob.restore(); 39 | }); 40 | 41 | test.serial('cacheKeyGenerator should return a sha256 hash for a given file name', (t) => { 42 | const result = cache.createHashFromContent('asdf'); 43 | t.is(typeof result, 'string'); 44 | t.is(result.length, 64); 45 | }); 46 | 47 | test.serial('retrieveFromCache should return cached content', (t) => { 48 | const cacheDir = '/dev/null'; 49 | const cacheKey = 'test'; 50 | const result = cache.retrieveFromCache(cacheKey, cacheDir); 51 | const cachedFile = path.join(cacheDir, `${cacheKey}.js`); 52 | t.true(stubbedRead.calledWith(cachedFile)); 53 | t.is(result, 'filecontents'); 54 | }); 55 | 56 | test.serial('retrieveFromCache should return a falsy value if the cache file does not exist', (t) => { 57 | const cacheDir = '/dev/null'; 58 | const cacheKey = 'throw'; 59 | const result = cache.retrieveFromCache(cacheKey, cacheDir); 60 | t.falsy(result); 61 | }); 62 | 63 | test.serial('saveToCache should write results to a cached file', (t) => { 64 | const cacheDir = '/cacheDir'; 65 | const minifiedCode = 'minifiedCode;'; 66 | const cacheKey = 'mycachekey'; 67 | cache.saveToCache(cacheKey, minifiedCode, cacheDir); 68 | t.true(stubbedWrite.calledWith(path.join(cacheDir, `${cacheKey}.js`), minifiedCode)); 69 | }); 70 | 71 | test.serial('saveToCache should not write anything if no cacheDir is defined', (t) => { 72 | const minifiedCode = 'minifiedCode;'; 73 | const cacheKey = 'mycachekey'; 74 | cache.saveToCache(cacheKey, minifiedCode, undefined); 75 | t.false(stubbedWrite.called); 76 | }); 77 | 78 | test.serial('pruneCache is a noop if cacheDir is not provided', (t) => { 79 | t.notThrows(() => { 80 | cache.pruneCache('invalidbutunused', 'alsoinvalidbutunused', undefined); 81 | }); 82 | }); 83 | 84 | test.serial('pruneCache removes cache files that are unused', (t) => { 85 | cache.pruneCache(['usedKey'], ['usedKey', 'unusedKey1', 'unusedKey2'], 'cacheDir'); 86 | t.true(stubbedDelete.calledWith(path.join('cacheDir', 'unusedKey1.js'))); 87 | t.true(stubbedDelete.calledWith(path.join('cacheDir', 'unusedKey2.js'))); 88 | t.false(stubbedDelete.calledWith(path.join('cacheDir', 'usedKey.js'))); 89 | }); 90 | 91 | test.serial('getCacheKeysFromDisk returns the filename of js files in the cacheDir', (t) => { 92 | const result = cache.getCacheKeysFromDisk('doesnotmatter'); 93 | t.is(result[0], 'path1'); 94 | t.is(result[1], 'path2'); 95 | }); 96 | 97 | test.serial('getCacheKeysFromDisk returns an emtpy array if a cacheDir is not provided', (t) => { 98 | const result = cache.getCacheKeysFromDisk(undefined); 99 | t.deepEqual(result, []); 100 | }); 101 | -------------------------------------------------------------------------------- /lib/uglifier.js: -------------------------------------------------------------------------------- 1 | // eslint-disable-line strict 2 | 3 | const os = require('os'); 4 | const mkdirp = require('mkdirp'); 5 | const webpackSources = require('webpack-sources'); 6 | const workerFarm = require('worker-farm'); 7 | const pify = require('pify'); 8 | const ModuleFilenameHelpers = require('webpack/lib/ModuleFilenameHelpers'); 9 | 10 | const { SourceMapSource } = webpackSources; 11 | const { RawSource } = webpackSources; 12 | const cache = require('./cache'); 13 | const tmpFile = require('./tmp-file'); 14 | /** 15 | * Determines how many workers to create. 16 | * Should be available cpus minus 1 or the number of assets to minify, whichever is smaller. 17 | */ 18 | function workerCount(options, assetCount) { 19 | if (options.workerCount) { 20 | return options.workerCount; 21 | } 22 | return Math.min(assetCount, Math.max(1, os.cpus().length - 1)); 23 | } 24 | 25 | const usedCacheKeys = []; 26 | 27 | function processAssets(compilation, options) { 28 | const assetHash = compilation.assets; 29 | const useSourceMaps = options.sourceMap || false; 30 | if (options.cacheDir) { 31 | mkdirp.sync(options.cacheDir); 32 | } 33 | 34 | // Create a copy of the options object that omits the cacheDir field. This is necessary because 35 | // we include the options object when creating cache keys, and some cache directory paths may not 36 | // be stable across multiple runs. 37 | const optionsWithoutCacheDir = { ...options }; 38 | optionsWithoutCacheDir.cacheDir = undefined; 39 | 40 | // By default the `test` config should match every file ending in .js 41 | options.test = options.test || /\.js$/i; // eslint-disable-line no-param-reassign 42 | const assets = Object.keys(assetHash) 43 | .filter(ModuleFilenameHelpers.matchObject.bind(null, options)); 44 | 45 | // For assets that are cached, we read from the cache here rather than doing so in the worker. 46 | // This is a relatively fast operation, so this lets us avoid creating several workers in cases 47 | // when we have a near 100% cache hit rate. 48 | const cacheKeysOnDisk = new Set(cache.getCacheKeysFromDisk(options.cacheDir)); 49 | const uncachedAssets = []; 50 | 51 | assets.forEach((assetName) => { 52 | // sourceAndMap() is an expensive function, so we'll create the cache key from just source(), 53 | // and whether or not we should be using source maps. 54 | const source = assetHash[assetName].source(); 55 | const cacheKey = cache.createCacheKey(source + useSourceMaps, optionsWithoutCacheDir); 56 | usedCacheKeys.push(cacheKey); 57 | if (cacheKeysOnDisk.has(cacheKey)) { 58 | // Cache hit, so let's read from the disk and mark this cache key as used. 59 | const content = JSON.parse(cache.retrieveFromCache(cacheKey, options.cacheDir)); 60 | if (content.map) { 61 | assetHash[assetName] = new SourceMapSource(content.source, assetName, content.map); 62 | } else { 63 | assetHash[assetName] = new RawSource(content.source); 64 | } 65 | } else { 66 | // Cache miss, so we'll need to minify this in a worker. 67 | uncachedAssets.push(assetName); 68 | } 69 | }); 70 | 71 | const farm = workerFarm( 72 | { 73 | autoStart: true, 74 | maxConcurrentCallsPerWorker: 1, 75 | maxConcurrentWorkers: workerCount(options, uncachedAssets.length), 76 | maxRetries: 2, // Allow for a couple of transient errors. 77 | }, 78 | require.resolve('./worker'), 79 | ['processMessage'], 80 | ); 81 | 82 | const minify = pify(farm.processMessage); 83 | 84 | const minificationPromises = uncachedAssets.map((assetName) => { 85 | const asset = assetHash[assetName]; 86 | const tmpFileName = tmpFile.create(JSON.stringify({ 87 | assetName, 88 | options: optionsWithoutCacheDir, 89 | source: asset.source(), 90 | map: useSourceMaps ? asset.map() : null, 91 | cacheDir: options.cacheDir, 92 | })); 93 | 94 | return minify(tmpFileName) 95 | .then(() => { 96 | const content = tmpFile.read(tmpFileName); 97 | const msg = JSON.parse(content); 98 | if (msg.map) { 99 | assetHash[assetName] = new SourceMapSource(msg.source, assetName, msg.map); // eslint-disable-line no-param-reassign, max-len 100 | } else { 101 | assetHash[assetName] = new RawSource(msg.source); // eslint-disable-line no-param-reassign, max-len 102 | } 103 | }) 104 | .catch((e) => { 105 | const builtError = new Error(`Encountered an error while minifying ${assetName}:\n${e}`); 106 | compilation.errors.push(builtError); 107 | }); 108 | }); 109 | 110 | function endWorkers() { 111 | workerFarm.end(farm); // at this point we're done w/ the farm, it can be killed 112 | } 113 | 114 | return Promise.all(minificationPromises) 115 | .then(endWorkers) 116 | .catch(endWorkers); 117 | } 118 | 119 | function pruneCache(options) { 120 | cache.pruneCache(usedCacheKeys, cache.getCacheKeysFromDisk(options.cacheDir), options.cacheDir); 121 | } 122 | 123 | module.exports = { 124 | processAssets, 125 | pruneCache, 126 | workerCount, 127 | }; 128 | -------------------------------------------------------------------------------- /test/lib/uglifer.js: -------------------------------------------------------------------------------- 1 | import test from 'ava'; 2 | import sinon from 'sinon'; 3 | import os from 'os'; 4 | import uglifyJS from 'uglify-js'; 5 | import escodegen from 'escodegen'; 6 | import { parse } from 'acorn'; 7 | import { SourceMapSource } from 'webpack-sources'; 8 | 9 | const { 10 | workerCount, 11 | processAssets, 12 | } = require('../../lib/uglifier'); 13 | 14 | const filename = 'somefile.js'; 15 | const testedFilename = 'testedFilename.js'; 16 | 17 | const badCode = 'func () {con.log("a)}'; 18 | const unminifedSource = '(function name(){console.log(0)})()'; 19 | 20 | // Generate a source map, this is setup to match as closely as possible the input source map that 21 | // webpack provides. 22 | const ast = parse(unminifedSource); 23 | const unminifiedSourceMap = JSON.parse(escodegen.generate(ast, { 24 | file: 'x', 25 | sourceMap: true, 26 | })); 27 | 28 | const { map, code: minifiedSource } = uglifyJS.minify({ x: unminifedSource }, { 29 | sourceMap: { 30 | content: unminifiedSourceMap, 31 | }, 32 | }); 33 | 34 | const minifiedSourceMap = new SourceMapSource(minifiedSource, filename, map).map(); 35 | 36 | function createFakeCompilationObject() { 37 | return { 38 | assets: { 39 | [filename]: { 40 | source() { 41 | return unminifedSource; 42 | }, 43 | map() { 44 | return unminifiedSourceMap; 45 | }, 46 | }, 47 | [testedFilename]: { 48 | source() { 49 | return unminifedSource; 50 | }, 51 | map() { 52 | return unminifiedSourceMap; 53 | }, 54 | }, 55 | }, 56 | options: {}, 57 | errors: [], 58 | }; 59 | } 60 | 61 | function createFakeES6CompilationObject() { 62 | return { 63 | assets: { 64 | 'someFile.js': { 65 | source() { 66 | return 'const test = () => { function asdf(){console.log("a")} asdf()}; test();'; 67 | }, 68 | map() { 69 | return null; 70 | }, 71 | }, 72 | }, 73 | options: {}, 74 | errors: [], 75 | }; 76 | } 77 | 78 | function assertNoError(compilationObject, t) { 79 | t.is(compilationObject.errors.length, 0); 80 | } 81 | 82 | test('assumptions', (t) => { 83 | // This is basically to ensure that our direct uglify.minify calls in this file output what we 84 | // expect. If it doesn't output what we expect, then we might have to rework the source map logic. 85 | const expectedMinifiedSource = 'console.log(0);'; 86 | t.is(expectedMinifiedSource, minifiedSource); 87 | }); 88 | 89 | test.serial('workerCount should be cpus - 1 if assetCount is >= cpus', (t) => { 90 | const cpuStub = sinon.stub(os, 'cpus', () => ({ length: 8 })); 91 | const assetCount = 10; 92 | const options = {}; 93 | t.is(workerCount(options, assetCount), 7); 94 | cpuStub.restore(); 95 | }); 96 | 97 | test.serial('workerCount should be assetCount if assetCount is < cpus', (t) => { 98 | const cpuStub = sinon.stub(os, 'cpus', () => ({ length: 8 })); 99 | const assetCount = 5; 100 | const options = {}; 101 | t.is(workerCount(options, assetCount), 5); 102 | cpuStub.restore(); 103 | }); 104 | 105 | test('workerCount should follow options', (t) => { 106 | const assetCount = 5; 107 | const options = { 108 | workerCount: 2, 109 | }; 110 | t.is(workerCount(options, assetCount), 2); 111 | }); 112 | 113 | test.serial('workerCount should take options before checking assets or cpu', (t) => { 114 | const cpuStub = sinon.stub(os, 'cpus', () => ({ length: 2 })); 115 | const assetCount = 2; 116 | const options = { 117 | workerCount: 4, 118 | }; 119 | t.is(workerCount(options, assetCount), 4); 120 | cpuStub.restore(); 121 | }); 122 | 123 | test('processAssets minifies each of the assets in the compilation object', (t) => { 124 | const fakeCompilationObject = createFakeCompilationObject(); 125 | return processAssets(fakeCompilationObject, {}).then(() => { 126 | const minifiedResult = fakeCompilationObject.assets[filename].source(); 127 | t.is(minifiedResult, minifiedSource); 128 | assertNoError(fakeCompilationObject, t); 129 | }); 130 | }); 131 | 132 | test('processAssets respects test option', (t) => { 133 | const fakeCompilationObject = createFakeCompilationObject(); 134 | return processAssets(fakeCompilationObject, { 135 | test: /tested/, 136 | }).then(() => { 137 | const unmatchedResult = fakeCompilationObject.assets[filename].source(); 138 | const matchedResult = fakeCompilationObject.assets[testedFilename].source(); 139 | assertNoError(fakeCompilationObject, t); 140 | t.is(unmatchedResult, unminifedSource); 141 | t.is(matchedResult, minifiedSource); 142 | }); 143 | }); 144 | 145 | test('processAssets respects include option', (t) => { 146 | const fakeCompilationObject = createFakeCompilationObject(); 147 | return processAssets(fakeCompilationObject, { 148 | include: [/tested/], 149 | }).then(() => { 150 | const unmatchedResult = fakeCompilationObject.assets[filename].source(); 151 | const matchedResult = fakeCompilationObject.assets[testedFilename].source(); 152 | assertNoError(fakeCompilationObject, t); 153 | t.is(unmatchedResult, unminifedSource); 154 | t.is(matchedResult, minifiedSource); 155 | }); 156 | }); 157 | 158 | test('processAssets respects exclude option', (t) => { 159 | const fakeCompilationObject = createFakeCompilationObject(); 160 | return processAssets(fakeCompilationObject, { 161 | exclude: [/tested/], 162 | }).then(() => { 163 | const unmatchedResult = fakeCompilationObject.assets[filename].source(); 164 | const matchedResult = fakeCompilationObject.assets[testedFilename].source(); 165 | assertNoError(fakeCompilationObject, t); 166 | t.is(unmatchedResult, minifiedSource); 167 | t.is(matchedResult, unminifedSource); 168 | }); 169 | }); 170 | 171 | test('processAssets respects sourceMap:true', (t) => { 172 | const fakeCompilationObject = createFakeCompilationObject(); 173 | return processAssets(fakeCompilationObject, { 174 | sourceMap: true, 175 | uglifyJS: {}, 176 | }).then(() => { 177 | const assetSourceMap = fakeCompilationObject.assets[filename]; 178 | assertNoError(fakeCompilationObject, t); 179 | t.deepEqual(assetSourceMap.map(), minifiedSourceMap); 180 | }); 181 | }); 182 | 183 | test('processAssets respects sourceMap:false', (t) => { 184 | const fakeCompilationObject = createFakeCompilationObject(); 185 | return processAssets(fakeCompilationObject, { 186 | sourceMap: false, 187 | uglifyJS: {}, 188 | }).then(() => { 189 | const assetSourceMap = fakeCompilationObject.assets[filename]; 190 | t.is(assetSourceMap.source(), minifiedSource); 191 | assertNoError(fakeCompilationObject, t); 192 | t.is(assetSourceMap.map(), null); 193 | }); 194 | }); 195 | 196 | test('invalid JS should generate an error', (t) => { 197 | const errorCompilationObject = { 198 | assets: { 199 | 'somefile.js': { 200 | source() { 201 | return badCode; 202 | }, 203 | }, 204 | }, 205 | errors: [], 206 | }; 207 | 208 | let realErrorMessage; 209 | try { 210 | const result = uglifyJS.minify(badCode); 211 | if (result.error) throw result.error; 212 | } catch (e) { 213 | realErrorMessage = e.message; 214 | } 215 | 216 | return processAssets(errorCompilationObject, { 217 | sourceMap: false, 218 | uglifyJS: {}, 219 | }).then(() => { 220 | t.truthy(errorCompilationObject.errors[0].message.includes(realErrorMessage)); 221 | }); 222 | }); 223 | 224 | test('Passing uglifyJS options throws an error when minifying es6', (t) => { 225 | const es6CompilationObject = createFakeES6CompilationObject(); 226 | return processAssets(es6CompilationObject, { 227 | sourceMap: false, 228 | uglifyJS: {}, 229 | }).then(() => { 230 | t.is(es6CompilationObject.errors.length, 1); 231 | }); 232 | }); 233 | 234 | test('Passing terser options does not throw an error when minifying es6', (t) => { 235 | const es6CompilationObject = createFakeES6CompilationObject(); 236 | return processAssets(es6CompilationObject, { 237 | sourceMap: false, 238 | terser: {}, 239 | }).then(() => { 240 | assertNoError(es6CompilationObject, t); 241 | }); 242 | }); 243 | --------------------------------------------------------------------------------