├── .gitignore ├── test ├── util │ └── tmpDir.js ├── disable.js ├── gc.js ├── cache.js └── log.js ├── .travis.yml ├── LICENSE ├── package.json ├── CHANGELOG.md ├── stats.js ├── index.js └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .test/* 3 | npm-debug.log 4 | -------------------------------------------------------------------------------- /test/util/tmpDir.js: -------------------------------------------------------------------------------- 1 | const tmp = require('os-tmpdir') 2 | const rimraf = require('rimraf') 3 | const mkdirp = require('mkdirp') 4 | const path = require('path') 5 | const tmpDir = path.join(tmp(), 'browserify-persist-fs') 6 | 7 | rimraf.sync(tmpDir) 8 | mkdirp.sync(tmpDir) 9 | 10 | module.exports = tmpDir 11 | -------------------------------------------------------------------------------- /test/disable.js: -------------------------------------------------------------------------------- 1 | const test = require('tap').test 2 | const brfypersist = require('../') 3 | const tmpDir = require('./util/tmpDir') 4 | const path = require('path') 5 | 6 | test('disabling it returns in null', function (t) { 7 | t.equals(brfypersist('', {}, null, true), null) 8 | t.end() 9 | }) 10 | 11 | test('disabled with log should still give a logger', function (t) { 12 | const tmpFolder = path.join(tmpDir, 'disable_log') 13 | t.notEquals(brfypersist(tmpFolder, {}, function () {}, false), null) 14 | t.end() 15 | }) 16 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - stable 4 | - '6' 5 | - '5' 6 | - '4' 7 | sudo: false 8 | env: 9 | global: 10 | secure: IlYn41b3Pa58M2AX5A9Te1k3KVkbLWs5uHphub6iu5oOUAWTEqwYdzxdEJ6cyBut5uhCQ+Zdl6Uyr+ESPuj3C46kVhXeCBxh2iB4E3OlSAQpYQIQYOkIYnk2rLd1Vpx7rKFB2GAhLRIPI079LzZ0qqQiTpxfxKmBPx/09m1QDQ3gA9hzNlECCcNHQd1sp6dC/Hb35lq7ybVVLiyJwAq/KHLYAG/fSXZ+kZcVzJhFypwGJUnCF+iusA3E8r0bF9t4f0OSY6iNDy2xq0V5vBkwXwGf9gPTyZ+QcQvED7fmAZVuKskYjbhSH+3Cd2AWPG1Z5qBqdGlTYUeI6f8J4YdwAB5JGylOQiPbTjo44nXbImdUKkaO9n3gzvCthRYJuSSSIKwq/359RFGxbtOJcPni9vvJ9eLb//xORxk9kBlrGGBSKUGgJz3vgTf2b8v86G9FNA1gAakj0YnijULPwD9CSEOa0kIR1nVsjd54Sj5duxjtJAuZgYqxC85YsIDbW4WDckav9vdDfbs3gZaK60HCWQEh11sfDwkY/5OOAyQEBIY6vnilXRAPSH+b1wGlguy8wfi+/pMbQoMBN4yr2tz0JjdVeSEu+IxHCfPxgi9UfN3LCReqCWy4OYkDcM5DomvWD2w+ftlHe/jheefxjN5W1Lj8I7QwmflPzm7nfCOvw7Q= 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Martin Heidegger 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "browserify-persist-fs", 3 | "version": "1.2.2", 4 | "description": "Efficient and stable persistent cache for browserify", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "npm run lint && npm run coverage", 8 | "release": "./node_modules/.bin/standard-version", 9 | "lint": "./node_modules/.bin/standard", 10 | "unit": "./node_modules/.bin/tap -- test/*.js", 11 | "coverage": "./node_modules/.bin/tap --coverage -- test/*.js" 12 | }, 13 | "keywords": [ 14 | "browserify", 15 | "cache", 16 | "fs", 17 | "persistant", 18 | "fast" 19 | ], 20 | "author": "Martin Heidegger ", 21 | "license": "MIT", 22 | "dependencies": { 23 | "after": "^0.8.2", 24 | "mkdirp": "^0.5.1", 25 | "xtend": "^4.0.1" 26 | }, 27 | "devDependencies": { 28 | "bluebird": "^3.4.7", 29 | "module-deps": "^6.0.0", 30 | "os-tmpdir": "^1.0.2", 31 | "rimraf": "^2.5.4", 32 | "standard": "^10.0.3", 33 | "standard-version": "^4.3.0", 34 | "tap": "^11.1.0" 35 | }, 36 | "repository": { 37 | "type": "git", 38 | "url": "git+https://github.com/martinheidegger/browserify-persist-fs.git" 39 | }, 40 | "bugs": { 41 | "url": "https://github.com/martinheidegger/browserify-persist-fs/issues" 42 | }, 43 | "homepage": "https://github.com/martinheidegger/browserify-persist-fs#readme" 44 | } 45 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | 3 | All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. 4 | 5 | 6 | ## [1.2.2](https://github.com/martinheidegger/browserify-persist-fs/compare/v1.2.1...v1.2.2) (2018-02-19) 7 | 8 | 9 | ### Bug Fixes 10 | 11 | * Preventing parallel file write calls to be executed. ([db747dc](https://github.com/martinheidegger/browserify-persist-fs/commit/db747dc)) 12 | 13 | 14 | 15 | 16 | ## [1.2.1](https://github.com/martinheidegger/browserify-persist-fs/compare/v1.2.0...v1.2.1) (2017-02-16) 17 | 18 | 19 | ### Bug Fixes 20 | 21 | * **cosmetic:** removed not-finished reference to parallel ([1f131e3](https://github.com/martinheidegger/browserify-persist-fs/commit/1f131e3)) 22 | 23 | 24 | 25 | 26 | # [1.2.0](https://github.com/martinheidegger/browserify-persist-fs/compare/v1.1.0...v1.2.0) (2017-02-16) 27 | 28 | 29 | ### Features 30 | 31 | * **stats:** Added statistics renderer ([2bc916b](https://github.com/martinheidegger/browserify-persist-fs/commit/2bc916b)) 32 | 33 | 34 | 35 | 36 | # [1.1.0](https://github.com/martinheidegger/browserify-persist-fs/compare/v1.0.0...v1.1.0) (2017-02-05) 37 | 38 | 39 | ### Features 40 | 41 | * **gc:** Added garbage collection support. ([e28877d](https://github.com/martinheidegger/browserify-persist-fs/commit/e28877d)) 42 | -------------------------------------------------------------------------------- /stats.js: -------------------------------------------------------------------------------- 1 | function fmt (float) { 2 | if (float === undefined) { 3 | float = 0 4 | } 5 | return ((float * 100) | 100) / 100 6 | } 7 | 8 | module.exports = function () { 9 | var stats = [] 10 | function process () { 11 | return stats.reduce(function (compiled, entry) { 12 | compiled.read += (entry.durations.cache || 0) + (entry.durations.read || 0) 13 | compiled.generate += (entry.durations.generate || 0) 14 | if (entry.err) { 15 | compiled.errorFiles += 1 16 | } else if (entry.cached) { 17 | compiled.cached += 1 18 | } else { 19 | compiled.built += 1 20 | } 21 | return compiled 22 | }, { 23 | read: 0, 24 | generate: 0, 25 | cached: 0, 26 | built: 0, 27 | errorFiles: 0, 28 | slowest: stats 29 | .sort(function (a, b) { 30 | return a.durations.total > b.durations.total ? -1 : 1 31 | }) 32 | .slice(0, 20) 33 | }) 34 | } 35 | return { 36 | update: function (entry) { 37 | stats.push(entry) 38 | }, 39 | process: process, 40 | render: function (deletionErr, deletedFiles) { 41 | var data = process() 42 | return '' + 43 | 'Avg. duration per file for reading: ' + fmt(data.read / stats.length) + 'ms\n' + 44 | 'Avg. duration per file for generating: ' + fmt(data.generate / stats.length) + 'ms\n' + 45 | 'Files built: ' + data.built + '\n' + 46 | 'Files with error: ' + data.errorFiles + '\n' + 47 | 'Files cached: ' + data.cached + '\n' + 48 | 'Garbage collected files: ' + (deletedFiles ? deletedFiles.length : deletionErr) + '\n' + 49 | 'Slowest files:\n' + data.slowest.map(function (entry) { 50 | return '- ' + entry.file + ' (total: ' + 51 | fmt(entry.durations.total) + 'ms, reading: ' + 52 | fmt(entry.durations.read) + 'ms, generating: ' + 53 | fmt(entry.durations.generate) + 'ms)' 54 | }).join('\n') 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /test/gc.js: -------------------------------------------------------------------------------- 1 | const test = require('tap').test 2 | const brfypersist = require('../') 3 | const tmpDir = require('./util/tmpDir') 4 | const Promise = require('bluebird') 5 | const path = require('path') 6 | const mkdirp = require('mkdirp') 7 | const fs = require('fs') 8 | 9 | function processFile (source, cb) { 10 | cb(null, { 11 | source: source + 'abcd', 12 | deps: ['fancy', 'button'], 13 | pkg: {} 14 | }) 15 | } 16 | 17 | function prepareGarbage (delayedBy, name, options) { 18 | const tmpTarget = path.join(tmpDir, 'gc_target', name) 19 | const tmpSource = path.join(tmpDir, 'gc_source', name) 20 | 21 | function createFile (name, data) { 22 | var file = path.join(tmpSource, name) 23 | fs.writeFileSync(file, data) 24 | return file 25 | } 26 | 27 | mkdirp.sync(tmpSource) 28 | 29 | var persist = Promise.promisify(brfypersist(tmpTarget, {})) 30 | 31 | const files = [ 32 | createFile('a.js', 'a'), 33 | createFile('b.js', 'bcde'), 34 | createFile('c.js', 'cdefghi') 35 | ] 36 | 37 | var delay = 0 38 | return Promise.all(files.map(function (file) { 39 | return new Promise(function (resolve, reject) { 40 | setTimeout(function () { 41 | resolve(persist(file, null, null, processFile)) 42 | }, (delay += 1) * delayedBy) 43 | }) 44 | })).then(function () { 45 | return Promise.promisify(persist.gc)(options) 46 | }).then(function (deleted) { 47 | return Promise.map(deleted, function (deletedFile) { 48 | return new Promise(function (resolve, reject) { 49 | fs.access(deletedFile, function (err) { 50 | if (!err) { 51 | return reject(new Error('Expected `' + deletedFile + '` to be deleted.')) 52 | } 53 | resolve() 54 | }) 55 | }) 56 | }).then(function () { 57 | return deleted 58 | }) 59 | }) 60 | } 61 | 62 | test('garbage collect without options should do nothing', function (t) { 63 | return prepareGarbage(0, 'size', { 64 | }).then(function (data) { 65 | t.equals(data.length, 0) 66 | }) 67 | }) 68 | 69 | test('garbage collect by size', function (t) { 70 | return prepareGarbage(0, 'size', { 71 | maxSize: 100 72 | }).then(function (data) { 73 | t.equals(data.length, 2) 74 | }) 75 | }) 76 | 77 | test('garbage collect by age', function (t) { 78 | return prepareGarbage(2000, 'age', { 79 | maxAge: 3000 80 | }).then(function (data) { 81 | t.equals(data.length, 1) 82 | }) 83 | }) 84 | 85 | test('garbage collect by count', function (t) { 86 | return prepareGarbage(0, 'age', { 87 | maxCount: 2 88 | }).then(function (data) { 89 | t.equals(data.length, 1) 90 | }) 91 | }) 92 | -------------------------------------------------------------------------------- /test/cache.js: -------------------------------------------------------------------------------- 1 | const tap = require('tap') 2 | const Promise = require('bluebird') 3 | const test = tap.test 4 | const brfypersist = require('../') 5 | const fs = require('fs') 6 | const path = require('path') 7 | const tmpDir = require('./util/tmpDir') 8 | const mkdirp = require('mkdirp') 9 | const tmpSource = path.join(tmpDir, 'cache_source') 10 | const tmpTarget = path.join(tmpDir, 'cache_target') 11 | 12 | mkdirp.sync(tmpSource) 13 | 14 | function createFile (name, data) { 15 | var file = path.join(tmpSource, name) 16 | fs.writeFileSync(file, data) 17 | return file 18 | } 19 | 20 | function processFile (source, cb) { 21 | cb(null, { 22 | source: source + ' // docs', 23 | deps: ['foo', 'bar'], 24 | pkg: { 25 | name: 'baz' 26 | } 27 | }) 28 | } 29 | 30 | function wait (duration) { 31 | return function () { 32 | return new Promise(function (resolve) { 33 | setTimeout(resolve, duration) 34 | }) 35 | } 36 | } 37 | 38 | const fileA = createFile('a.js', 'var a = 1') 39 | const fileB = createFile('b.js', 'var b = 1') 40 | 41 | test('cache one file', function (t) { 42 | var persist = Promise.promisify(brfypersist(tmpTarget, {})) 43 | return persist(fileA, null, null, processFile) 44 | .then(function (resultA) { 45 | return persist(fileA, null, null, processFile) 46 | .then(function (resultB) { 47 | t.notEqual(resultA, resultB) 48 | t.same(resultA, resultB) 49 | t.same(resultA.source, 'var a = 1 // docs') 50 | t.same(resultA.deps, ['foo', 'bar']) 51 | t.same(resultA.pkg.name, 'baz') 52 | }) 53 | }) 54 | }) 55 | 56 | test('parallel caching only writes once', function (t) { 57 | var persist = Promise.promisify(brfypersist(tmpTarget, {})) 58 | var _firstFallback 59 | var fallback = function (source, cb) { 60 | if (!_firstFallback) { 61 | _firstFallback = cb 62 | return 63 | } 64 | setImmediate(function () { 65 | // Triggering a writing of the cache in the same tick 66 | _firstFallback(null, {first: true}) 67 | cb(null, {first: false}) 68 | }) 69 | } 70 | return Promise.all([ 71 | persist(fileB, null, null, fallback), 72 | persist(fileB, null, null, fallback) 73 | ]) 74 | .then(wait(100)) // The cache file may not have been written yet 75 | .then(function (results) { 76 | const cacheFilePath = path.join(tmpTarget, 'bf21a9e8fbc5a3846fb05b4fa0859e0917b2202f_d63c02b75372e4e64783538bff55c8f18ce4cf0c.json') 77 | t.ok(fs.existsSync(cacheFilePath), 'tmp created') 78 | t.same(JSON.parse(fs.readFileSync(cacheFilePath, 'utf-8')), {first: true}, 'Only the first write succeeded') 79 | }) 80 | }) 81 | -------------------------------------------------------------------------------- /test/log.js: -------------------------------------------------------------------------------- 1 | const tap = require('tap') 2 | const Promise = require('bluebird') 3 | const test = tap.test 4 | const brfypersist = require('../') 5 | const tmpDir = require('./util/tmpDir') 6 | const path = require('path') 7 | const fs = require('fs') 8 | const mkdirp = require('mkdirp') 9 | 10 | function throwIfCalled (errorMessage) { 11 | return function () { 12 | throw new Error(errorMessage) 13 | } 14 | } 15 | 16 | function processFile (source, cb) { 17 | cb(null, { 18 | source: source + 'abcd', 19 | deps: ['fancy', 'button'], 20 | pkg: {} 21 | }) 22 | } 23 | 24 | function processError (source, cb) { 25 | cb(new Error('some-error')) 26 | } 27 | 28 | function testLog (t, tmpSource, persist, log, isLogOnly) { 29 | function createFile (name, data) { 30 | var file = path.join(tmpSource, name) 31 | fs.writeFileSync(file, data) 32 | return file 33 | } 34 | 35 | mkdirp.sync(tmpSource) 36 | 37 | const nonExistent = path.join(tmpSource, 'non_existent.js') 38 | const fileA = createFile('a.js', 'Hello World') 39 | const fileB = createFile('b.js', 'Foo') 40 | const fileC = createFile('c.js', 'Error') 41 | 42 | return Promise.all([ 43 | persist(nonExistent, null, null, throwIfCalled('Shouldnt try to fallback on non-existent file')) 44 | .catch(function () { 45 | // not important for this test 46 | }), 47 | persist(fileA, null, null, processFile), 48 | persist(fileB, null, null, processFile), 49 | persist(fileC, null, null, processError) 50 | .catch(function () { 51 | // not important for this test 52 | }) 53 | ]).then(function () { 54 | return persist(fileA, null, null, isLogOnly ? processFile : throwIfCalled('It Should be already cached!')) 55 | }).then(function () { 56 | log = log.sort(function (a, b) { 57 | return a.file > b.file ? 1 : -1 58 | }) 59 | var files = log.reduce(function (files, entry) { 60 | var arr = files[entry.file] 61 | if (!arr) { 62 | arr = [] 63 | files[entry.file] = arr 64 | } 65 | arr.push(entry) 66 | return files 67 | }, {}) 68 | 69 | t.equals(files[nonExistent].length, 1) 70 | t.notSame(files[nonExistent][0].err, null, 'passing through of the error to the log') 71 | t.same(files[nonExistent][0].cacheFile, null, 'If reading of a file throws an error, the cacheFile can not be evaluated') 72 | t.same(files[nonExistent][0].durations.cache, null) 73 | t.same(files[nonExistent][0].durations.generate, null) 74 | t.type(files[nonExistent][0].durations.total, 'number') 75 | t.type(files[nonExistent][0].durations.read, 'number') 76 | 77 | t.equals(files[fileA].length, 2) 78 | t.equals(files[fileA][0].cached, false) 79 | t.type(files[fileA][0].durations.cache, isLogOnly ? 'undefined' : 'number') 80 | t.type(files[fileA][0].durations.generate, 'number') 81 | t.equals(files[fileA][0].sizes.input, 11) 82 | t.equals(files[fileA][0].sizes.output, 15) 83 | 84 | if (isLogOnly) { 85 | t.equals(files[fileA][1].cached, false, 'When its cached a cached flag should be set') 86 | t.same(files[fileA][1].durations.cache, null, 'No caching means no cache number') 87 | t.type(files[fileA][1].durations.generate, 'number', 'Generate may not be called when it is cached') 88 | } else { 89 | t.equals(files[fileA][1].cached, true, 'When its cached a cached flag should be set') 90 | t.type(files[fileA][1].durations.cache, 'number', 'Cache needs to be recorded when loading from cache') 91 | t.same(files[fileA][1].durations.generate, null, 'Generate may not be called when it is cached') 92 | } 93 | 94 | t.equals(files[fileB].length, 1) 95 | t.equals(files[fileB][0].sizes.input, 3) 96 | t.equals(files[fileB][0].sizes.output, 7) 97 | 98 | t.equals(files[fileC].length, 1) 99 | t.same(files[fileC][0].err.message, 'some-error') 100 | t.end() 101 | }) 102 | } 103 | 104 | test('logging with cache enabled', function (t) { 105 | const tmpTarget = path.join(tmpDir, 'log_enabled_target') 106 | const tmpSource = path.join(tmpDir, 'log_enabled_source') 107 | var log = [] 108 | var persist = Promise.promisify(brfypersist(tmpTarget, {}, function (logInfo) { 109 | log.push(logInfo) 110 | }, false)) 111 | return testLog(t, tmpSource, persist, log, false) 112 | }) 113 | 114 | test('logging with cache disabled', function (t) { 115 | const tmpTarget = path.join(tmpDir, 'log_disabled_target') 116 | const tmpSource = path.join(tmpDir, 'log_disabled_source') 117 | var log = [] 118 | var persist = Promise.promisify(brfypersist(tmpTarget, {}, function (logInfo) { 119 | log.push(logInfo) 120 | }, true)) 121 | return testLog(t, tmpSource, persist, log, true) 122 | }) 123 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const readFile = fs.readFile 3 | const writeFile = fs.writeFile 4 | const crypto = require('crypto') 5 | const path = require('path') 6 | const hrtime = process.hrtime 7 | const after = require('after') 8 | const xtend = require('xtend') 9 | 10 | function createHash (input) { 11 | return crypto.createHash('sha1').update(input).digest('hex') 12 | } 13 | 14 | function msSince (time) { 15 | var diff = hrtime(time) 16 | return (diff[0] * 1e9 + diff[1]) * 0.000001 17 | } 18 | 19 | function disabledLog (log, file, id, pkg, fallback, cb) { 20 | var start = hrtime() 21 | var readTime 22 | var generateTime 23 | var inputSize 24 | var outputSize 25 | var doLog = function (err) { 26 | log({ 27 | file: file, 28 | cacheFile: undefined, 29 | err: err, 30 | cached: false, 31 | durations: { 32 | read: readTime, 33 | cache: undefined, 34 | generate: generateTime, 35 | total: msSince(start) 36 | }, 37 | sizes: { 38 | input: inputSize, 39 | output: outputSize 40 | } 41 | }) 42 | } 43 | readFile(file, 'utf8', function (err, fileData) { 44 | inputSize = fileData && fileData.length 45 | readTime = msSince(start) 46 | if (err) { 47 | doLog(err) 48 | return cb(err) 49 | } 50 | var generateStart = hrtime() 51 | fallback(fileData, function (err, data) { 52 | outputSize = data && data.source.length 53 | generateTime = msSince(generateStart) 54 | doLog(err) 55 | cb(err, data) 56 | }) 57 | }) 58 | } 59 | 60 | function asyncMap (input, processor, parallel, cb) { 61 | if (input.length === 0) { 62 | return cb(null, []) 63 | } 64 | var output = [] 65 | parallel = Math.min(parallel, input.length) 66 | var next = after(parallel, function (err) { 67 | err ? cb(err) : cb(null, output) 68 | }) 69 | for (var i = 0; i < parallel; ++i) processOne() 70 | function processOne () { 71 | if (input.length === 0) { 72 | return next() 73 | } 74 | processor(input.shift(), function (err, data) { 75 | if (err) { 76 | return next(err) 77 | } 78 | output.push(data) 79 | processOne() 80 | }) 81 | } 82 | } 83 | 84 | function getFileStats (folder, file, cb) { 85 | file = path.join(folder, file) 86 | fs.stat(file, function (err, stat) { 87 | err ? cb(err) : cb(null, { 88 | file: file, 89 | atime: stat.atime, 90 | size: stat.size 91 | }) 92 | }) 93 | } 94 | 95 | function getFolderStats (folder, parallel, cb) { 96 | fs.readdir(folder, function (err, files) { 97 | err ? cb(err) : asyncMap(files, getFileStats.bind(null, folder), parallel, cb) 98 | }) 99 | } 100 | 101 | function rmf (file, cb) { 102 | fs.unlink(file, function () { 103 | cb() // eat the error 104 | }) 105 | } 106 | 107 | function gc (folder, opts, cb) { 108 | opts = xtend({ 109 | maxCount: Number.MAX_SAFE_INTEGER, 110 | maxSize: Number.MAX_SAFE_INTEGER, 111 | maxAge: Number.MAX_SAFE_INTEGER, 112 | parallel: 5 113 | }, opts) 114 | getFolderStats(folder, opts.parallel, function (err, stats) { 115 | if (err) { 116 | return cb(err) 117 | } 118 | var totalSize = 0 119 | var filesToDelete = stats.sort(function (a, b) { 120 | return a.atime > b.atime ? 1 : -1 121 | }).filter(function (stat, nr) { 122 | totalSize += stat.size 123 | var age = Date.now() - stat.atime.getTime() 124 | return !( 125 | age < opts.maxAge && 126 | totalSize < opts.maxSize && 127 | nr < opts.maxCount 128 | ) 129 | }).map(function (stat) { 130 | return stat.file 131 | }) 132 | asyncMap(filesToDelete.concat(), rmf, opts.parallel, function (err) { 133 | err ? cb(err) : cb(null, filesToDelete) 134 | }) 135 | }) 136 | } 137 | 138 | module.exports = function (folder, hash, log, disable) { 139 | if (disable) { 140 | if (!log) { 141 | return null 142 | } 143 | return disabledLog.bind(null, log) 144 | } 145 | const cachePrefix = path.join(folder, createHash(JSON.stringify(hash))) 146 | require('mkdirp').sync(folder) 147 | var handler = function (file, id, pkg, fallback, cb) { 148 | var start = hrtime() 149 | var readTime 150 | var cacheReadTime 151 | var generateTime 152 | var cacheFile 153 | var fileSize 154 | var generatedSize 155 | var doLog 156 | if (log) { 157 | doLog = function (err, cached) { 158 | log({ 159 | file: file, 160 | cacheFile: cacheFile, 161 | err: err, 162 | cached: cached, 163 | sizes: { 164 | input: fileSize, 165 | output: generatedSize 166 | }, 167 | durations: { 168 | read: readTime || undefined, 169 | cache: cacheReadTime || undefined, 170 | generate: generateTime || undefined, 171 | total: msSince(start) 172 | } 173 | }) 174 | } 175 | } 176 | readFile(file, 'utf8', function (err, fileData) { 177 | if (doLog) { 178 | readTime = msSince(start) 179 | if (fileData) fileSize = fileData.length 180 | } 181 | if (err) { 182 | if (doLog) doLog(err, false) 183 | return cb(err) 184 | } 185 | var fileHash = createHash(fileData) 186 | var cacheStart = hrtime() 187 | cacheFile = path.join(cachePrefix + '_' + fileHash + '.json') 188 | return readFile(cacheFile, 'utf8', function (_err, rawCacheData) { 189 | // ignore error 190 | if (doLog) cacheReadTime = msSince(cacheStart) 191 | if (!rawCacheData) { 192 | var generateStart = hrtime() 193 | return fallback(fileData, function (err, data) { 194 | if (doLog) { 195 | generateTime = msSince(generateStart) 196 | if (data) { 197 | generatedSize = data.source.length 198 | } 199 | doLog(err, false) 200 | } 201 | if (err) { 202 | return cb(err) 203 | } 204 | writeFile(cacheFile, JSON.stringify(data, null, 2), {flag: 'wx'}, function () { 205 | // Don't wait, don't care 206 | }) 207 | cb(null, data) 208 | }) 209 | } 210 | var cacheData = JSON.parse(rawCacheData) 211 | generatedSize = cacheData.source.length 212 | doLog && doLog(null, true) 213 | cb(null, cacheData) 214 | }) 215 | }) 216 | } 217 | handler.gc = gc.bind(null, folder) 218 | return handler 219 | } 220 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🚀 A Browserify Cache for Maximum speed. 2 | 3 | [![Build Status](https://travis-ci.org/martinheidegger/browserify-persist-fs.svg?branch=master)](https://travis-ci.org/martinheidegger/browserify-persist-fs) 4 | [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/) 5 | [![Coverage Status](https://coveralls.io/repos/github/martinheidegger/browserify-persist-fs/badge.svg)](https://coveralls.io/github/martinheidegger/browserify-persist-fs) 6 | 7 | `browserify-persist-fs` stores the computation results for every file 8 | processed in a `cache` folder which makes recomputation of previous executions 9 | extremely fast _(particularily useful for CI!)_. 10 | 11 | **Oh**❗️ It also comes with a logging API that can help you figure out why 12 | your browserify execution is slow and which files cost most time! 13 | 14 | > In our production we were able to reduce repeated requests from 40s → 6s 🎉 15 | 16 | ## Temporary disclaimer 17 | 18 | In order to user `browserify-persist-fs` you need to have a version of browserify 19 | that depends on [`module-deps`](https://github.com/substack/module-deps) with a 20 | version >= 4.1.0 installed. 21 | 22 | By installing a clean version of browserify v14.1.0 or newer, 23 | you will get the required `module-deps` version. 24 | 25 | ## Installation & Setup 26 | 27 | Specify `browserify-persist-fs` as `persistentCache` option. 28 | 29 | ```javascript 30 | const browserify = require('@leichtgewicht/browserify') // for the time being... 31 | const browserifyPersistFs = require('browserify-persist-fs')( 32 | '.cache', // The folder where things should be stored 33 | {}, // "hashObject": And object that is used to figure out if the configuration has changed 34 | null, // Optional log handler (default: null) 35 | false // Pass in true to disable the cache (default: false) 36 | ) 37 | const bundle = browserify({ 38 | persistentCache: browserifyPersistFs 39 | }) 40 | ``` 41 | 42 | ## Identity of builds 43 | 44 | When you build something with browserify you can have a lot of ways in to modify 45 | the resulting output: `transforms`, `debug`, `sourcemap`, etc. Since it is 46 | impossible to figure out automatically what properties may exist, **you have to 47 | specify how the build is different**. 48 | 49 | The second property, the `hashObject`, should be used to make sure that different 50 | configurations of browserify don't use the same cache directory. 51 | 52 | Usually it contains a mixture of version specifications and config flags: 53 | 54 | ```javascript 55 | const browserifyPersistFs = require('browserify-persist-fs')('.cache', 56 | { 57 | debug: true, 58 | transforms: [ 59 | require('browserify/package.json').version, 60 | require('browserify-shim/package.json').version 61 | require('uglifyify/package.json').version 62 | ] 63 | } 64 | ) 65 | ``` 66 | 67 | Make sure that this results in a good idea to ensure developer happiness ☀️ 🙆 68 | 69 | _A [PR](https://github.com/martinheidegger/browserify-persist-fs) to make this 70 | process better would be highly welcome._ 71 | 72 | ## Garbage Collection 73 | 74 | `browserify-persist-fs` does not automatically delete old cache files. You will 75 | run out of disk space if the old files are not regularly deleted. 76 | 77 | `browserify-persist-fs` offers an API that allows you to delete old files: 78 | 79 | ```javascript 80 | const browserifyPersistFs = require('browserify-persist-fs')('.cache', { /*...*/ }) 81 | browserifyPersistFs.gc({ 82 | maxAge: 100000, // Age of a file in milliseconds (Default: Number.MAX_SAFE_INTEGER) 83 | maxCount: 10000, // Maximum count of files in the cache folder (Default: Number.MAX_SAFE_INTEGER) 84 | maxSize: 10000, // Maximum size in bytes that all files accumulatively might have (Default: Number.MAX_SAFE_INTEGER) 85 | parallel: 10 // Maximum parallel processes to run (Default: 20) 86 | }, function (err, deletedFiles) { 87 | // deletedFiles holds the path of all files that got deleted 88 | }) 89 | ``` 90 | 91 | You have to specify at least `maxAge`, `maxCount` or `maxSize`. Any combination 92 | is possible as well. 93 | 94 | ## Logging 95 | 96 | ```javascript 97 | const browserifyPersistFs = require('browserify-persist-fs')('.cache', {}, 98 | log 99 | ) 100 | 101 | function log (entry) { 102 | entry.file // File that has been loaded 103 | entry.err // In case an error occurred 104 | entry.cacheFile // The cache file location that has been used 105 | entry.durations.total // Total time it took to process this entry 106 | entry.durations.read // Time it took to read the source file 107 | entry.durations.cache // Time it took to read the cached content 108 | entry.durations.generate // Time it took to generate the resulting file 109 | entry.sizes.input // Size of the input file 110 | entry.sizes.output // Size of the output file 111 | } 112 | ``` 113 | 114 | ## Statistics 115 | 116 | Using the above-mentioned Logging capabilities it is possible to generate 117 | statistics about the project that you are rendering. These statistics can 118 | give clarity about why builds are slow and if everything worked right. 119 | 120 | `browserify-persist-fs` comes with a small statistics module that can generate 121 | a useful view: 122 | 123 | ```javascript 124 | const stats = require('browserify-persist-fs/stats')() 125 | const browserifyPersistFs = require('browserify-persist-fs')('.cache', {}, stats.update) 126 | 127 | // ... 128 | // After processing and gc: 129 | 130 | browserifyPersistFs.gc({/*...*/, function (err, deletedFiles) { 131 | console.log(stats.render(err, deletedFiles)) 132 | }) 133 | ``` 134 | 135 | which should show something like: 136 | 137 | ``` 138 | Avg. duration pre file for reading: 109.9ms 139 | Avg. duration per file for generating: 1ms 140 | Files built: 0 141 | Files with error: 0 142 | Files cached: 1155 143 | Garbage collected files: 0 144 | Slowest files: 145 | - /Users/martinheidegger/project/client/js/app/components/draw/DrawObjectText.js (total: 257ms, reading: 105.96ms, generating: 1ms) 146 | - /Users/martinheidegger/project/client/js/app/components/draw/DrawObjectCanvas.js (total: 255.98ms, reading: 106.21ms, generating: 1ms) 147 | - /Users/martinheidegger/project/node_modules/rgb2hex/index.js (total: 255.9ms, reading: 126.53ms, generating: 1ms) 148 | - /Users/martinheidegger/project/client/js/app/components/draw/DrawObjectPen.js (total: 255.73ms, reading: 106.07ms, generating: 1ms) 149 | - /Users/martinheidegger/project/client/js/app/components/draw/StampTool.js (total: 254.7ms, reading: 105.96ms, generating: 1ms) 150 | - /Users/martinheidegger/project/client/js/app/components/draw/ToolButtons.js (total: 254.68ms, reading: 106.23ms, generating: 1ms) 151 | - /Users/martinheidegger/project/node_modules/react-bootstrap/lib/utils/index.js (total: 252.14ms, reading: 129.16ms, generating: 1ms) 152 | - /Users/martinheidegger/project/node_modules/jsondiffpatch/src/main.js (total: 247.01ms, reading: 130.37ms, generating: 1ms) 153 | - /Users/martinheidegger/project/node_modules/react/lib/ReactPropTypeLocationNames.js (total: 244.29ms, reading: 101.02ms, generating: 1ms) 154 | - /Users/martinheidegger/project/node_modules/react/lib/checkReactTypeSpec.js (total: 244.31ms, reading: 99.56ms, generating: 1ms) 155 | - /Users/martinheidegger/project/node_modules/react/lib/canDefineProperty.js (total: 244.21ms, reading: 101.09ms, generating: 1ms) 156 | - /Users/martinheidegger/project/node_modules/react/lib/traverseAllChildren.js (total: 244.21ms, reading: 100.87ms, generating: 1ms) 157 | - /Users/martinheidegger/project/node_modules/react/lib/ReactPropTypesSecret.js (total: 243.18ms, reading: 99.58ms, generating: 1ms) 158 | - /Users/martinheidegger/project/node_modules/react/lib/reactProdInvariant.js (total: 242.94ms, reading: 101.01ms, generating: 1ms) 159 | - /Users/martinheidegger/project/node_modules/react/lib/PooledClass.js (total: 243.1ms, reading: 100.84ms, generating: 1ms) 160 | - /Users/martinheidegger/project/node_modules/react/lib/ReactComponentTreeHook.js (total: 243.02ms, reading: 99.56ms, generating: 1ms) 161 | - /Users/martinheidegger/project/node_modules/react/lib/ReactNoopUpdateQueue.js (total: 242.93ms, reading: 99.58ms, generating: 1ms) 162 | - /Users/martinheidegger/project/node_modules/react/lib/getIteratorFn.js (total: 241.89ms, reading: 99.83ms, generating: 1ms) 163 | - /Users/martinheidegger/project/node_modules/react/lib/ReactElementSymbol.js (total: 241.83ms, reading: 98.3ms, generating: 1ms) 164 | - /Users/martinheidegger/project/node_modules/react/lib/ReactCurrentOwner.js (total: 241.67ms, reading: 98.31ms, generating: 1ms) 165 | ``` 166 | 167 | ## License 168 | 169 | MIT 170 | 171 | ## Mentions 172 | 173 | I was able to work on this thanks to [Nota](https://notainc.com) that produces 174 | [Scrapbox](https://scrapbox.io) and [Gyazo](https://gyazo.com) since we 175 | needed this to make our build run on speed!🏃‍ 176 | --------------------------------------------------------------------------------