├── .babelrc
├── .browserslistrc
├── .clean-publish
├── .editorconfig
├── .eslintrc
├── .gitignore
├── .huskyrc
├── .lintstagedrc
├── .size-limit
├── .travis.yml
├── .trigenscriptsrc
├── CHANGELOG.md
├── LICENSE
├── README.md
├── jest.config.json
├── package.json
├── rollup.config.js
├── src
├── index.js
└── task-proxy.js
├── test
├── .eslintrc
└── plugin.spec.js
└── yarn.lock
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "exclude": "node_modules/**",
3 | "presets": [
4 | "babel-preset-trigen"
5 | ],
6 | "env": {
7 | "test": {
8 | "presets": [
9 | [
10 | "babel-preset-trigen",
11 | {
12 | "targets": {
13 | "node": "current"
14 | },
15 | "commonjs": true
16 | }
17 | ]
18 | ]
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/.browserslistrc:
--------------------------------------------------------------------------------
1 | extends browserslist-config-trigen/node
2 |
--------------------------------------------------------------------------------
/.clean-publish:
--------------------------------------------------------------------------------
1 | {
2 | "packageManager": "yarn"
3 | }
4 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | indent_style = tab
5 | charset = utf-8
6 | trim_trailing_whitespace = true
7 | insert_final_newline = true
8 |
9 | [{package.json,manifest.json,*.yml}]
10 | indent_style = space
11 | indent_size = 2
12 |
13 | [*.md]
14 | indent_style = space
15 | indent_size = 4
16 | trim_trailing_whitespace = false
17 |
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "trigen/base",
3 | "env": {
4 | "node": true
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 |
6 | # Runtime data
7 | pids
8 | *.pid
9 | *.seed
10 |
11 | # Directory for instrumented libs generated by jscoverage/JSCover
12 | lib-cov
13 |
14 | # Coverage directory used by tools like istanbul
15 | coverage
16 | .nyc_output
17 |
18 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
19 | .grunt
20 |
21 | # node-waf configuration
22 | .lock-wscript
23 |
24 | # Compiled binary addons (http://nodejs.org/api/addons.html)
25 | build/Release
26 |
27 | # Dependency directory
28 | node_modules
29 |
30 | # Optional npm cache directory
31 | .npm
32 |
33 | # Optional REPL history
34 | .node_repl_history
35 |
36 | # OS stuff
37 | ._*
38 | .DS_Store
39 |
40 | # Some caches
41 | .*cache
42 |
43 | # Compiled dist
44 | lib
45 | package
46 | build
47 |
48 | .env
49 |
--------------------------------------------------------------------------------
/.huskyrc:
--------------------------------------------------------------------------------
1 | {
2 | "hooks": {
3 | "pre-commit": "lint-staged",
4 | "pre-push": "npm test"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/.lintstagedrc:
--------------------------------------------------------------------------------
1 | {
2 | "src/**/*.{js,jsx}": [
3 | "trigen-scripts lint:js",
4 | "git add"
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/.size-limit:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "path": "lib/index.js",
4 | "limit": "20 KB",
5 | "webpack": false,
6 | "running": false
7 | }
8 | ]
9 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: false
2 | git:
3 | depth: 1
4 | branches:
5 | except: /^v\d/
6 | language: node_js
7 | node_js:
8 | - "lts/*"
9 | - "8"
10 | cache:
11 | directories:
12 | - node_modules
13 | after_success: npm run coverage
14 |
--------------------------------------------------------------------------------
/.trigenscriptsrc:
--------------------------------------------------------------------------------
1 | [
2 | "@trigen/scripts-plugin-babel",
3 | "@trigen/scripts-plugin-eslint",
4 | "@trigen/scripts-plugin-jest",
5 | "@trigen/scripts-plugin-rollup",
6 | "@trigen/scripts-preset-lib",
7 | "@trigen/scripts-plugin-size-limit"
8 | ]
9 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
6 | and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
7 |
8 |
26 |
27 | ## [1.1.3] - 2019-08-10
28 | ### Changed
29 | - `new Buffer` -> `Buffer.from`;
30 | - Docs.
31 |
32 | ## [1.1.2] - 2019-06-08
33 | ### Changed
34 | - `trigen-scripts` as dev tool;
35 | - Dependencies were updated.
36 |
37 | ## [1.1.1] - 2019-01-23
38 | ### Fixed
39 | - [#75](https://github.com/jgable/gulp-cache/issues/75)
40 |
41 | ## [1.1.0] - 2019-01-07
42 | ### Changed
43 | - Dependencies were updated;
44 | - README.md was updated.
45 |
46 | ## [1.0.2] - 2017-12-30
47 | ### Changed
48 | - `gulp-util` -> `plugin-error`
49 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2014 - present Jacob Gable
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of
6 | this software and associated documentation files (the "Software"), to deal in
7 | the Software without restriction, including without limitation the rights to
8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9 | the Software, and to permit persons to whom the Software is furnished to do so,
10 | subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # gulp-cache
2 |
3 | [![NPM version][npm]][npm-url]
4 | [![Node version][node]][node-url]
5 | [![Dependencies status][deps]][deps-url]
6 | [![Build status][build]][build-url]
7 | [![Coverage status][coverage]][coverage-url]
8 |
9 | [npm]: https://img.shields.io/npm/v/gulp-cache.svg
10 | [npm-url]: https://www.npmjs.com/package/gulp-cache
11 |
12 | [node]: https://img.shields.io/node/v/gulp-cache.svg
13 | [node-url]: https://nodejs.org
14 |
15 | [deps]: https://img.shields.io/david/jgable/gulp-cache.svg
16 | [deps-url]: https://david-dm.org/jgable/gulp-cache
17 |
18 | [build]: https://travis-ci.org/jgable/gulp-cache.svg?branch=master
19 | [build-url]: https://travis-ci.org/jgable/gulp-cache
20 |
21 | [coverage]: https://img.shields.io/coveralls/jgable/gulp-cache.svg
22 | [coverage-url]: https://coveralls.io/r/jgable/gulp-cache
23 |
24 | A temp file based caching proxy task for [gulp](http://gulpjs.com/).
25 |
26 | ## Install
27 |
28 | ```bash
29 | npm i -D gulp-cache
30 | # or
31 | yarn add -D gulp-cache
32 | ```
33 |
34 | ## Usage
35 |
36 | ```js
37 | import gulp from 'gulp';
38 | import favicons from 'gulp-favicons';
39 | import srcset from 'gulp-srcset';
40 | import cache from 'gulp-cache';
41 |
42 | gulp.task('favicon', () =>
43 | gulp.src('src/favicon.svg')
44 | .pipe(cache(
45 | // Target plugin, the output of which will be cached.
46 | favicons(faviconsConfig),
47 | // Options for `gulp-cache` plugin.
48 | {
49 | // Bucket to store favicons in cache.
50 | name: 'favicons'
51 | }
52 | ))
53 | .pipe(gulp.dest('./favicons'))
54 | );
55 |
56 | gulp.task('images', () =>
57 | gulp.src('src/**/*.{jpg,png,svg}')
58 | .pipe(cache(
59 | // Target plugin, the output of which will be cached.
60 | srcset(srcsetRules),
61 | // Options for `gulp-cache` plugin.
62 | {
63 | // Bucket to store images in cache.
64 | name: 'images'
65 | }
66 | ))
67 | .pipe(gulp.dest('./images'))
68 | );
69 | ```
70 |
71 |
72 | Complex usage example
73 |
74 | ```js
75 | import fs from 'fs';
76 | import gulp from 'gulp';
77 | import jshint from 'gulp-jshint';
78 | import cache from 'gulp-cache';
79 |
80 | const jsHintVersion = '2.4.1';
81 | const jshintOptions = fs.readFileSync('.jshintrc');
82 |
83 | function makeHashKey(file) {
84 | // Key off the file contents, jshint version and options
85 | return `${file.contents.toString('utf8')}${jshintVersion}${jshintOptions}`;
86 | }
87 |
88 | gulp.task('lint', () =>
89 | gulp.src('src/**/*.js')
90 | .pipe(cache(
91 | // Target plugin, the output of which will be cached.
92 | jshint('.jshintrc'),
93 | // Options for `gulp-cache` plugin.
94 | {
95 | key: makeHashKey,
96 | // What on the result indicates it was successful
97 | success(jshintedFile) {
98 | return jshintedFile.jshint.success;
99 | },
100 | // What to store as the result of the successful action
101 | value(jshintedFile) {
102 | // Will be extended onto the file object on a cache hit next time task is ran
103 | return {
104 | jshint: jshintedFile.jshint
105 | };
106 | }
107 | }
108 | ))
109 | .pipe(jshint.reporter('default'))
110 | });
111 | ```
112 |
113 |
114 |
115 | ## API
116 |
117 | ### `cache(pluginToCache [, options])`
118 |
119 | #### `pluginToCache`
120 |
121 | Target plugin, the output of which will be cached.
122 |
123 | #### `options`
124 |
125 | Options for `gulp-cache` plugin.
126 |
127 | ##### `options.fileCache`
128 |
129 | > [Optional] Where to store the cache objects
130 |
131 | - Defaults to `new Cache({ cacheDirName: 'gulp-cache' })`
132 |
133 | - Create your own with [`new cache.Cache({ cacheDirName: 'custom-cache' })`](https://github.com/jgable/cache-swap)
134 |
135 | ##### `options.name`
136 |
137 | > [Optional] The name of the bucket which stores the cached objects
138 |
139 | - Defaults to `default`
140 |
141 | ##### `options.key`
142 |
143 | > [Optional] What to use to determine the uniqueness of an input file for this task.
144 |
145 | - Can return a string or a `Promise` that resolves to a string.
146 |
147 | - The result of this method is converted to a unique MD5 hash automatically; no need to do this yourself.
148 |
149 | - Defaults to `file.contents` if a Buffer, or `undefined` if a Stream.
150 |
151 | ##### `options.success`
152 |
153 | > [Optional] How to determine if the resulting file was successful.
154 |
155 | - Must return a truthy value that is used to determine whether to cache the result of the task. `Promise` is supported.
156 |
157 | - Defaults to true, so any task results will be cached.
158 |
159 | ##### `options.value`
160 |
161 | > [Optional] What to store as the cached result of the task.
162 |
163 | - Can be a function that returns an Object or a `Promise` that resolves to an Object.
164 |
165 | - Can also be set to a string that will be picked of the task result file.
166 |
167 | - The result of this method is run through `JSON.stringify` and stored in a temp file for later retrieval.
168 |
169 | - Defaults to `'contents'` which will grab the resulting file.contents and store them as a string.
170 |
171 | ## Clearing the cache
172 |
173 | If you find yourself needing to clear the cache, there is a handy dandy `cache.clearAll()` method:
174 |
175 | ```js
176 | import cache from 'gulp-cache';
177 |
178 | gulp.task('clear', () =>
179 | cache.clearAll()
180 | );
181 | ```
182 |
183 | You can then run it with `gulp clear`.
184 |
185 | ## One-to-many caching
186 |
187 | To support one-to-many caching in Your Gulp-plugin, you should:
188 |
189 | * Use `clone` method, to save `_cachedKey` property:
190 | ```js
191 | const outputFile1 = inputFile.clone({ contents: false });
192 | const outputFile2 = inputFile.clone({ contents: false });
193 |
194 | outputFile1.contents = new Buffer(...);
195 | outputFile2.contents = new Buffer(...);
196 |
197 | const outputFiles = [
198 | outputFile1,
199 | outputFile2,
200 | ...
201 | ];
202 | ```
203 | * Or, do it manually:
204 | ```js
205 | const outputFiles = [
206 | new Vinyl({..., _cachedKey: inputFile._cachedKey}),
207 | new Vinyl({..., _cachedKey: inputFile._cachedKey}),
208 | ...
209 | ];
210 | ```
211 |
212 | ## License
213 |
214 | [The MIT License (MIT)](./LICENSE)
215 |
216 | Copyright (c) 2014 - present [Jacob Gable](http://jacobgable.com)
217 |
--------------------------------------------------------------------------------
/jest.config.json:
--------------------------------------------------------------------------------
1 | {
2 | "testEnvironment": "node",
3 | "testRegex": "/test/.*\\.spec\\.(jsx?|tsx?)$",
4 | "transform": {
5 | "^.+\\.(jsx?|tsx?)$": "babel-jest"
6 | },
7 | "collectCoverage": true,
8 | "collectCoverageFrom": [
9 | "src/**/*.{js,jsx,ts,tsx}",
10 | "!**/*.d.ts",
11 | "!**/node_modules/**"
12 | ],
13 | "coverageReporters": [
14 | "lcovonly",
15 | "text"
16 | ]
17 | }
18 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "gulp-cache",
3 | "version": "1.1.3",
4 | "description": "A cache proxy plugin for Gulp",
5 | "author": "Jacob Gable (http://jacobgable.com)",
6 | "contributors": [
7 | "Tyler Akins (https://github.com/fidian)",
8 | "Shinnosuke Watanabe (https://github.com/shinnn)",
9 | "Dan Green (https://trigen.pro)"
10 | ],
11 | "license": "MIT",
12 | "repository": {
13 | "type": "git",
14 | "url": "https://github.com/jgable/gulp-cache"
15 | },
16 | "bugs": {
17 | "url": "https://github.com/jgable/gulp-cache/issues"
18 | },
19 | "main": "lib/index.js",
20 | "engines": {
21 | "node": ">=8.0.0"
22 | },
23 | "scripts": {
24 | "lint": "trigen-scripts lint",
25 | "jest": "trigen-scripts jest",
26 | "checkSize": "trigen-scripts checkSize",
27 | "test": "trigen-scripts test",
28 | "start": "trigen-scripts start",
29 | "build": "trigen-scripts build",
30 | "cleanPublish": "trigen-scripts cleanPublish",
31 | "coverage": "cat ./coverage/lcov.info | coveralls"
32 | },
33 | "keywords": [
34 | "gulpplugin",
35 | "gulp",
36 | "cache"
37 | ],
38 | "dependencies": {
39 | "@babel/runtime": "^7.5.5",
40 | "cache-swap": "^0.3.0",
41 | "core-js": "3",
42 | "object.pick": "^1.3.0",
43 | "plugin-error": "^1.0.1",
44 | "through2": "3.0.1",
45 | "vinyl": "^2.2.0"
46 | },
47 | "devDependencies": {
48 | "@trigen/scripts": "2.6.3",
49 | "@trigen/scripts-plugin-babel": "2.6.3",
50 | "@trigen/scripts-plugin-eslint": "2.6.3",
51 | "@trigen/scripts-plugin-jest": "2.6.4",
52 | "@trigen/scripts-plugin-rollup": "2.4.0",
53 | "@trigen/scripts-plugin-size-limit": "2.6.3",
54 | "@trigen/scripts-preset-lib": "2.6.3",
55 | "eslint-plugin-jest": "^22.15.0",
56 | "rollup-plugin-eslint": "^6.0.0",
57 | "rollup-plugin-json": "^4.0.0",
58 | "sinon": "7.4.1"
59 | },
60 | "files": [
61 | "lib"
62 | ]
63 | }
64 |
--------------------------------------------------------------------------------
/rollup.config.js:
--------------------------------------------------------------------------------
1 | import {
2 | external
3 | } from '@trigen/scripts-plugin-rollup/helpers';
4 | import { eslint } from 'rollup-plugin-eslint';
5 | import json from 'rollup-plugin-json';
6 | import commonjs from 'rollup-plugin-commonjs';
7 | import babel from 'rollup-plugin-babel';
8 | import pkg from './package.json';
9 |
10 | const plugins = [
11 | eslint({
12 | exclude: ['**/*.json', 'node_modules/**'],
13 | throwOnError: true
14 | }),
15 | json({
16 | preferConst: true
17 | }),
18 | commonjs(),
19 | babel({
20 | runtimeHelpers: true
21 | })
22 | ];
23 |
24 | export default {
25 | input: 'src/index.js',
26 | plugins,
27 | external: external(pkg, true),
28 | output: {
29 | file: pkg.main,
30 | format: 'cjs',
31 | sourcemap: 'inline'
32 | }
33 | };
34 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | import PluginError from 'plugin-error';
2 | import through from 'through2';
3 | import Cache from 'cache-swap';
4 | import File from 'vinyl';
5 | import pick from 'object.pick';
6 | import { version as VERSION } from '../package.json';
7 | import TaskProxy from './task-proxy';
8 |
9 | const fileCache = new Cache({ cacheDirName: 'gulp-cache' });
10 |
11 | function defaultKey(file) {
12 | return `${VERSION}${file.contents.toString('base64')}`;
13 | }
14 |
15 | function defaultRestore(restored) {
16 |
17 | if (restored.contents) {
18 | // Handle node 0.11 buffer to JSON as object with { type: 'buffer', data: [...] }
19 | if (restored && restored.contents && Array.isArray(restored.contents.data)) {
20 | restored.contents = Buffer.from(restored.contents.data);
21 | } else
22 | if (Array.isArray(restored.contents)) {
23 | restored.contents = Buffer.from(restored.contents);
24 | } else
25 | if (typeof restored.contents === 'string') {
26 | restored.contents = Buffer.from(restored.contents, 'base64');
27 | }
28 | }
29 |
30 | const restoredFile = new File(restored);
31 |
32 | // Restore any properties that the original task put on the file;
33 | // but omit the normal properties of the file
34 | Object.keys(restored).forEach((key) => {
35 |
36 | if (File.isCustomProp(key)) {
37 | restoredFile[key] = restored[key];
38 | }
39 | });
40 |
41 | return restoredFile;
42 | }
43 |
44 | function defaultValue(file) {
45 |
46 | const vinylProps = ['cwd', 'base', 'contents', 'stat', 'history', 'path'];
47 | const customProps = Object.keys(file).filter(File.isCustomProp);
48 |
49 | // Convert from a File object (from vinyl) into a plain object
50 | return pick(file, [
51 | ...vinylProps,
52 | ...customProps
53 | ]);
54 | }
55 |
56 | const defaultOptions = {
57 | fileCache,
58 | name: 'default',
59 | success: true,
60 | key: defaultKey,
61 | restore: defaultRestore,
62 | value: defaultValue
63 | };
64 |
65 | plugin.Cache = Cache;
66 | plugin.fileCache = fileCache;
67 | plugin.defaultOptions = defaultOptions;
68 |
69 | export default function plugin(task, inputOptions = {}) {
70 | // Check for required task option
71 | if (!task) {
72 | throw new PluginError('gulp-cache', 'Must pass a task to cache()');
73 | }
74 |
75 | const options = {
76 | ...plugin.defaultOptions,
77 | ...(task.cacheable || {}),
78 | ...inputOptions
79 | };
80 | const taskProxy = new TaskProxy(task, options);
81 |
82 | function each(file, enc, next) {
83 |
84 | if (file.isNull()) {
85 | next(null, file);
86 | return;
87 | }
88 |
89 | if (file.isStream()) {
90 | next(new PluginError('gulp-cache', 'Cannot operate on stream sources'));
91 | return;
92 | }
93 |
94 | const signals = taskProxy.processFile(file);
95 |
96 | signals.on('error', (err) => {
97 | next(new PluginError('gulp-cache', err));
98 | });
99 |
100 | signals.on('file', (file) => {
101 | this.push(file);
102 | });
103 |
104 | signals.on('done', () => {
105 | next(null);
106 | });
107 | }
108 |
109 | function flush(next) {
110 | taskProxy.flush(next);
111 | }
112 |
113 | return through.obj(each, flush);
114 | }
115 |
116 | plugin.clear =
117 | function clear(inputOptions) {
118 |
119 | const options = {
120 | ...plugin.defaultOptions,
121 | ...inputOptions
122 | };
123 | const taskProxy = new TaskProxy(null, options);
124 |
125 | async function each(file, enc, next) {
126 |
127 | if (file.isNull()) {
128 | next(null, file);
129 | return;
130 | }
131 |
132 | if (file.isStream()) {
133 | next(new PluginError('gulp-cache', 'Cannot operate on stream sources'));
134 | return;
135 | }
136 |
137 | try {
138 | await taskProxy.removeCachedResult(file);
139 | next(null, file);
140 | return;
141 | } catch (err) {
142 | next(new PluginError('gulp-cache', err));
143 | return;
144 | }
145 | }
146 |
147 | return through.obj(each);
148 | };
149 |
150 | plugin.clearAll =
151 | function clearAll() {
152 | return new Promise((resolve, reject) => {
153 | fileCache.clear(null, (err) => {
154 |
155 | if (err) {
156 | reject(new PluginError(
157 | 'gulp-cache',
158 | `Problem clearing the cache: ${err.message}`
159 | ));
160 | return;
161 | }
162 |
163 | resolve();
164 | });
165 | });
166 | };
167 |
--------------------------------------------------------------------------------
/src/task-proxy.js:
--------------------------------------------------------------------------------
1 | import EventEmitter from 'events';
2 | import crypto from 'crypto';
3 | import File from 'vinyl';
4 | import pick from 'object.pick';
5 |
6 | const whitespaces = 2;
7 | const eventListenersCount = 3;
8 |
9 | function makeHash(key) {
10 | return crypto.createHash('md5').update(key).digest('hex');
11 | }
12 |
13 | export default class TaskProxy {
14 |
15 | constructor(task, inputOptions) {
16 |
17 | this.task = task;
18 | this.options = inputOptions;
19 | this._cacheQueue = new Map();
20 | this._removeListeners = [];
21 |
22 | if (task) {
23 | this.patchTask();
24 | }
25 | }
26 |
27 | patchTask() {
28 |
29 | const { task } = this;
30 | const { _transform } = task;
31 |
32 | task._transform = (chunk, encoding, next) => {
33 |
34 | Reflect.apply(_transform, task, [chunk, encoding, (...args) => {
35 | next(...args); // eslint-disable-line
36 | task.emit('gulp-cache:transformed');
37 | }]);
38 | };
39 | }
40 |
41 | processFile(inputFile, signals = new EventEmitter()) {
42 |
43 | process.nextTick(() => {
44 | this._processFileAsync(inputFile, signals);
45 | });
46 |
47 | return signals;
48 | }
49 |
50 | async _processFileAsync(inputFile, signals = new EventEmitter()) {
51 |
52 | const cached = await this._checkForCachedValue(inputFile);
53 | // If we found a cached value
54 | // The path of the cache key should also be identical to the original one when the file path changed inside the task
55 | const cachedValue = cached.value;
56 | const cachedValueIsEmpty = !Array.isArray(cachedValue) || !cachedValue.length;
57 | const cachedValuesWithNormalPaths = cachedValueIsEmpty ? [] : cachedValue.filter(
58 | file =>
59 | (!file.gulpCache$filePathChangedInsideTask || file.gulpCache$originalPath === inputFile.path)
60 | && (!file.gulpCache$fileBaseChangedInsideTask || file.gulpCache$originalBase === inputFile.base)
61 | );
62 |
63 | if (cachedValuesWithNormalPaths.length) {
64 |
65 | cachedValuesWithNormalPaths.forEach((cachedFile) => {
66 | // Extend the cached value onto the file, but don't overwrite original path info
67 | const file = new File({
68 | // custom properties
69 | ...cachedFile,
70 | // file info
71 | ...pick(inputFile, ['cwd', 'base', 'stat', 'history', 'path']),
72 | // file contents
73 | contents: cachedFile.contents
74 | });
75 |
76 | // Restore the file path if it was set
77 | if (cachedFile.path && cachedFile.gulpCache$filePathChangedInsideTask) {
78 | file.path = cachedFile.path;
79 | }
80 |
81 | // Restore the file base if it was set
82 | if (cachedFile.base && cachedFile.gulpCache$fileBaseChangedInsideTask) {
83 | file.base = cachedFile.base;
84 | }
85 |
86 | Reflect.deleteProperty(file, 'gulpCache$filePathChangedInsideTask');
87 | Reflect.deleteProperty(file, 'gulpCache$fileBaseChangedInsideTask');
88 | Reflect.deleteProperty(file, 'gulpCache$originalPath');
89 | Reflect.deleteProperty(file, 'gulpCache$originalBase');
90 |
91 | signals.emit('file', file);
92 | });
93 |
94 | signals.emit('done');
95 |
96 | this._removeListeners.push(() => {
97 | // Remove all listeners from `signals`
98 | signals.removeAllListeners();
99 | });
100 |
101 | return;
102 | }
103 |
104 | this._runProxiedTaskAndQueueCache(inputFile, cached.key, signals);
105 | }
106 |
107 | async flush(next) {
108 |
109 | const { task } = this;
110 |
111 | try {
112 |
113 | if (typeof task._flush == 'function') {
114 | task._flush(async (...args) => {
115 | await this._flush();
116 | next(...args);
117 | });
118 | } else {
119 | await this._flush();
120 | next();
121 | return;
122 | }
123 |
124 | } catch (err) {
125 | next(err);
126 | return;
127 | }
128 | }
129 |
130 | async _flush() {
131 |
132 | this._removeListeners.forEach((remove) => {
133 | remove();
134 | });
135 |
136 | this._removeListeners = [];
137 |
138 | await Promise.all(
139 | Array.from(this._cacheQueue).map(
140 | async ([cachedKey, files]) =>
141 | this._storeCachedResult(cachedKey, files)
142 | )
143 | );
144 |
145 | this._cacheQueue = new Map();
146 | }
147 |
148 | async removeCachedResult(file) {
149 |
150 | const cachedKey = await this._getFileKey(file);
151 |
152 | return this._removeCached(
153 | this.options.name,
154 | cachedKey
155 | );
156 | }
157 |
158 | async _getFileKey(file) {
159 |
160 | const { key: getKey } = this.options;
161 | const key = await getKey(file);
162 |
163 | return key ? makeHash(key) : key;
164 | }
165 |
166 | async _checkForCachedValue(file) {
167 |
168 | const key = await this._getFileKey(file);
169 |
170 | // If no key returned, bug out early
171 | if (!key) {
172 | return {
173 | value: null,
174 | key
175 | };
176 | }
177 |
178 | const { name: cacheName, restore } = this.options;
179 | const cached = await this._getCached(cacheName, key);
180 |
181 | if (!cached) {
182 | return {
183 | value: null,
184 | key
185 | };
186 | }
187 |
188 | let parsedContents = null;
189 |
190 | try {
191 | parsedContents = JSON.parse(cached.contents);
192 | } catch (err) {
193 | parsedContents = [{ cached: cached.contents }];
194 | }
195 |
196 | if (restore) {
197 | parsedContents = parsedContents.map(
198 | (parsedFile) => {
199 |
200 | const restoredFile = restore(parsedFile);
201 |
202 | // Force restore service properties
203 | restoredFile.gulpCache$filePathChangedInsideTask = parsedFile.gulpCache$filePathChangedInsideTask;
204 | restoredFile.gulpCache$fileBaseChangedInsideTask = parsedFile.gulpCache$fileBaseChangedInsideTask;
205 | restoredFile.gulpCache$originalPath = parsedFile.gulpCache$originalPath;
206 | restoredFile.gulpCache$originalBase = parsedFile.gulpCache$originalBase;
207 |
208 | return restoredFile;
209 | }
210 | );
211 | }
212 |
213 | return {
214 | value: parsedContents,
215 | key
216 | };
217 | }
218 |
219 | async _getValueFromResult(result) {
220 |
221 | const { value: getValue } = this.options;
222 |
223 | if (typeof getValue !== 'function') {
224 |
225 | if (typeof getValue === 'string') {
226 | return {
227 | [getValue]: result[getValue]
228 | };
229 | }
230 |
231 | return getValue;
232 | }
233 |
234 | return getValue(result);
235 | }
236 |
237 | async _storeCachedResult(key, result) {
238 |
239 | // If we didn't have a cachedKey, skip caching result
240 | if (!key) {
241 | return result;
242 | }
243 |
244 | const { options } = this;
245 | const files = (await Promise.all(result.map(
246 | async ({ file, meta }) => {
247 |
248 | if (options.success !== true && !(await options.success(file))) {
249 | return null;
250 | }
251 |
252 | return Object.assign(
253 | await this._getValueFromResult(file),
254 | meta
255 | );
256 | }
257 | ))).filter(Boolean);
258 |
259 | return this._addCached(
260 | this.options.name,
261 | key,
262 | JSON.stringify(files, null, whitespaces)
263 | );
264 | }
265 |
266 | async _queueCache(file, cachedKey, originalBase, originalPath) {
267 |
268 | const { _cacheQueue } = this;
269 | const item = {
270 | file: file.clone({ contents: false }),
271 | meta: {
272 | // Check if the task changed the file path
273 | gulpCache$filePathChangedInsideTask: file.path !== originalPath,
274 | // Check if the task changed the base path
275 | gulpCache$fileBaseChangedInsideTask: file.base !== originalBase,
276 | // Keep track of the original path
277 | gulpCache$originalPath: originalPath,
278 | // Keep track of the original base
279 | gulpCache$originalBase: originalBase
280 | }
281 | };
282 |
283 | if (_cacheQueue.has(cachedKey)) {
284 | _cacheQueue.get(cachedKey).push(item);
285 | } else {
286 | _cacheQueue.set(cachedKey, [item]);
287 | }
288 | }
289 |
290 | _runProxiedTaskAndQueueCache(file, cachedKey, signals = new EventEmitter()) {
291 |
292 | const originalBase = file.base;
293 | const originalPath = file.path;
294 |
295 | signals.on('cache', (file) => {
296 | this._queueCache(file, cachedKey, originalBase, originalPath);
297 | signals.emit('file', file);
298 | });
299 |
300 | return this._runProxiedTask(file, cachedKey, signals);
301 | }
302 |
303 | _runProxiedTask(file, cachedKey, signals = new EventEmitter()) {
304 |
305 | const { task } = this;
306 | const hasCacheListener = Boolean(signals.listenerCount('cache'));
307 |
308 | function onError(err) {
309 | signals.emit('error', err);
310 | }
311 |
312 | function onData(datum) {
313 |
314 | if (datum._cachedKey !== cachedKey) {
315 | return;
316 | }
317 |
318 | Reflect.deleteProperty(datum, '_cachedKey');
319 |
320 | if (hasCacheListener) {
321 | signals.emit('cache', datum);
322 | } else {
323 | signals.emit('file', datum);
324 | }
325 | }
326 |
327 | function onTransformed() {
328 | signals.emit('done');
329 | }
330 |
331 | this._removeListeners.push(() => {
332 | // Be good citizens and remove our listeners
333 | task.removeListener('error', onError);
334 | task.removeListener('gulp-cache:transformed', onTransformed);
335 | task.removeListener('data', onData);
336 |
337 | // Reduce the maxListeners back down
338 | task.setMaxListeners(task._maxListeners - eventListenersCount);
339 |
340 | // Remove all listeners from `signals`
341 | signals.removeAllListeners();
342 | });
343 |
344 | // Bump up max listeners to prevent memory leak warnings
345 | const currMaxListeners = task._maxListeners || 0;
346 |
347 | task.setMaxListeners(currMaxListeners + eventListenersCount);
348 |
349 | task.on('data', onData);
350 | task.once('gulp-cache:transformed', onTransformed);
351 | task.once('error', onError);
352 |
353 | file._cachedKey = cachedKey;
354 |
355 | // Run through the other task and grab output (or error)
356 | task.write(file);
357 |
358 | return signals;
359 | }
360 |
361 | /**
362 | * Cache promise wrappers.
363 | */
364 |
365 | _addCached(...args) {
366 | return new Promise((resolve, reject) => {
367 | this.options.fileCache.addCached(...args, (err, res) => {
368 |
369 | if (err) {
370 | reject(err);
371 | return;
372 | }
373 |
374 | resolve(res);
375 | });
376 | });
377 | }
378 |
379 | _getCached(...args) {
380 | return new Promise((resolve, reject) => {
381 | this.options.fileCache.getCached(...args, (err, res) => {
382 |
383 | if (err) {
384 | reject(err);
385 | return;
386 | }
387 |
388 | resolve(res);
389 | });
390 | });
391 | }
392 |
393 | _removeCached(...args) {
394 | return new Promise((resolve, reject) => {
395 | this.options.fileCache.removeCached(...args, (err) => {
396 |
397 | if (err) {
398 | reject(err);
399 | return;
400 | }
401 |
402 | resolve();
403 | });
404 | });
405 | }
406 | }
407 |
--------------------------------------------------------------------------------
/test/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "plugins": [
3 | "jest"
4 | ],
5 | "env": {
6 | "jest/globals": true
7 | },
8 | "rules": {
9 | "no-magic-numbers": "off",
10 | "max-nested-callbacks": "off"
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/test/plugin.spec.js:
--------------------------------------------------------------------------------
1 | import crypto from 'crypto';
2 | import path from 'path';
3 | import File from 'vinyl';
4 | import through from 'through2';
5 | import sinon from 'sinon';
6 | import cache from '../src';
7 |
8 | describe('gulp-cache', () => {
9 |
10 | let sandbox = null;
11 | let fakeFileHandler = null;
12 | let fakeTask = null;
13 |
14 | beforeEach((done) => {
15 |
16 | sandbox = sinon.createSandbox();
17 |
18 | // Spy on the fakeFileHandler to check if it gets called later
19 | fakeFileHandler = sandbox.spy((file, enc, cb) => {
20 |
21 | file.ran = true;
22 |
23 | if (Buffer.isBuffer(file.contents)) {
24 | file.contents = new Buffer(`${String(file.contents)}-modified`);
25 | }
26 |
27 | cb(null, file);
28 | });
29 |
30 | fakeTask = through.obj(fakeFileHandler);
31 |
32 | cache.fileCache.clear('default', done);
33 | });
34 |
35 | afterEach(() => {
36 | sandbox.restore();
37 | });
38 |
39 | it('throws an error if no task is passed', () => {
40 | expect(() => cache()).toThrow();
41 | });
42 |
43 | it('exposes the Cache object for creating custom Caches', () => {
44 | expect(cache.Cache).toBeTruthy();
45 | });
46 |
47 | it('pass through the directories', (done) => {
48 |
49 | const directory = new File();
50 | const proxied = cache(fakeTask);
51 |
52 | proxied
53 | .on('data', (file) => {
54 | expect(file).toEqual(directory);
55 | expect(file.isNull()).toBe(true);
56 | done();
57 | })
58 | .end(new File());
59 | });
60 |
61 | describe('in streaming mode', () => {
62 | it('does not work', (done) => {
63 | // Create a proxied plugin stream
64 | const proxied = cache(fakeTask, {
65 | key(file, cb) {
66 | // For testing async key generation
67 | setTimeout(() => {
68 | cb(null, '123');
69 | }, 1);
70 | },
71 | value(file, cb) {
72 | // For testing async value generation
73 | setTimeout(() => {
74 | cb(null, {
75 | ran: file.ran,
76 | cached: true
77 | });
78 | }, 1);
79 | }
80 | });
81 |
82 | proxied
83 | .on('error', (err) => {
84 | expect(err.message).toBe('Cannot operate on stream sources');
85 | done();
86 | })
87 | .end(new File({ contents: through() }));
88 | });
89 | });
90 |
91 | describe('in buffered mode', () => {
92 | it('only caches successful tasks', (done) => {
93 | // Create a proxied plugin stream
94 | const valStub = sandbox.stub().returns({
95 | ran: true,
96 | cached: true
97 | });
98 |
99 | cache(fakeTask, {
100 | success() {
101 | return false;
102 | },
103 | value: valStub
104 | })
105 | .on('data', () => {
106 | expect(valStub.called).toBe(false);
107 | done();
108 | })
109 | .end(new File({
110 | contents: new Buffer('abufferwiththiscontent')
111 | }));
112 | });
113 |
114 | it('sets the content correctly on subsequently ran cached tasks', (done) => {
115 | // Create a proxied plugin stream
116 | const proxied = cache(fakeTask, {
117 | success() {
118 | return true;
119 | }
120 | });
121 |
122 | proxied.once('data', (file) => {
123 | expect(String(file.contents)).toBe('abufferwiththiscontent-modified');
124 |
125 | proxied.once('data', (file2) => {
126 | expect(file2.isBuffer()).toBe(true);
127 | expect(String(file2.contents)).toBe('abufferwiththiscontent-modified');
128 | });
129 |
130 | proxied.end(new File({
131 | contents: new Buffer('abufferwiththiscontent')
132 | }));
133 | });
134 |
135 | proxied.write(new File({
136 | contents: new Buffer('abufferwiththiscontent')
137 | }));
138 |
139 | proxied.on('end', done);
140 | });
141 |
142 | it('can proxy a task with specific options', (done) => {
143 | // create the fake file
144 | const fakeFile = new File({
145 | contents: new Buffer('abufferwiththiscontent')
146 | });
147 | const otherFile = new File({
148 | contents: new Buffer('abufferwiththiscontent')
149 | });
150 | const opts = {
151 | value(file) {
152 | return {
153 | ran: file.ran,
154 | cached: true,
155 | contents: file.contents || file._contents
156 | };
157 | }
158 | };
159 | // Create a proxied plugin stream
160 | let proxied = cache(fakeTask, opts);
161 |
162 | // write the fake file to it
163 | proxied.write(fakeFile);
164 |
165 | // wait for the file to come back out
166 | proxied.once('data', file => proxied._flush(() => {
167 | // make sure it came out the same way it went in
168 | expect(file.isBuffer()).toBe(true);
169 |
170 | // check the contents are same
171 | expect(String(file.contents)).toEqual('abufferwiththiscontent-modified');
172 | // Check it assigned the proxied task result
173 | expect(file.ran).toEqual(true);
174 | expect(file.cached).toBeFalsy();
175 |
176 | // Check the original task was called
177 | expect(fakeFileHandler.called).toEqual(true);
178 |
179 | // Reset for the second run through
180 | fakeFileHandler.resetHistory();
181 | // Refresh proxied
182 | proxied = cache(fakeTask, opts);
183 | // Write the same file again, should be cached result
184 | proxied.write(otherFile);
185 |
186 | proxied.once('data', (secondFile) => {
187 |
188 | expect(secondFile.isBuffer()).toEqual(true);
189 |
190 | expect(String(secondFile.contents)).toEqual('abufferwiththiscontent-modified');
191 |
192 | // Cached value should have been applied
193 | expect(secondFile.ran).toEqual(true);
194 | expect(secondFile.cached).toEqual(true);
195 |
196 | // Should not have called the original task
197 | expect(fakeFileHandler.called).toEqual(false);
198 |
199 | done();
200 | });
201 | }));
202 | });
203 |
204 | it('can proxy a task using task.cacheable', (done) => {
205 | // Let the task define the cacheable aspects.
206 | fakeTask.cacheable = {
207 | key: sandbox.spy(file => String(file.contents)),
208 | success: sandbox.stub().returns(true),
209 | value: sandbox.spy(file => ({
210 | ran: true,
211 | cached: true,
212 | contents: file.contents || file._contents
213 | }))
214 | };
215 |
216 | let proxied = cache(fakeTask);
217 |
218 | // write the fake file to it
219 | proxied.write(new File({ contents: new Buffer('abufferwiththiscontent') }));
220 |
221 | // wait for the file to come back out
222 | proxied.once('data', file => proxied._flush(() => {
223 | // make sure it came out the same way it went in
224 | expect(file.isBuffer()).toEqual(true);
225 |
226 | // check the contents are same
227 | expect(String(file.contents)).toEqual('abufferwiththiscontent-modified');
228 |
229 | // Verify the cacheable options were used.
230 | expect(fakeTask.cacheable.key.called).toEqual(true);
231 | expect(fakeTask.cacheable.success.called).toEqual(true);
232 | expect(fakeTask.cacheable.value.called).toEqual(true);
233 | // Reset for the second run through
234 | fakeTask.cacheable.key.resetHistory();
235 | fakeTask.cacheable.success.resetHistory();
236 | fakeTask.cacheable.value.resetHistory();
237 | fakeFileHandler.resetHistory();
238 | // Refresh proxied
239 | proxied = cache(fakeTask);
240 | // Write the same file again, should be cached result
241 | proxied.write(new File({ contents: new Buffer('abufferwiththiscontent') }));
242 |
243 | proxied.once('data', secondFile => proxied._flush(() => {
244 | expect(fakeTask.cacheable.key.called).toEqual(true);
245 | expect(fakeTask.cacheable.success.called).toEqual(false);
246 | expect(fakeTask.cacheable.value.called).toEqual(false);
247 | // Should not have called the original task
248 | expect(fakeFileHandler.called).toEqual(false);
249 | // Cached value should have been applied
250 | expect(secondFile.cached).toEqual(true);
251 | done();
252 | }));
253 | }));
254 | });
255 |
256 | it('can proxy a task using task.cacheable with user overrides', (done) => {
257 | // Let the task define the cacheable aspects.
258 | fakeTask.cacheable = {
259 | key: sandbox.spy(file => String(file.contents)),
260 | success: sandbox.stub().returns(true),
261 | value: sandbox.stub().returns({
262 | ran: true,
263 | cached: true
264 | })
265 | };
266 |
267 | const overriddenValue = sandbox.stub().returns({
268 | ran: true,
269 | cached: true,
270 | overridden: true
271 | });
272 | const opts = { value: overriddenValue };
273 | // write the fake file to it
274 | let proxied = cache(fakeTask, opts);
275 |
276 | proxied.write(new File({
277 | contents: new Buffer('abufferwiththiscontent')
278 | }));
279 |
280 | // wait for the file to come back out
281 | proxied.once('data', file => proxied._flush(() => {
282 | // make sure it came out the same way it went in
283 | expect(file.isBuffer()).toEqual(true);
284 |
285 | // check the contents are same
286 | expect(String(file.contents)).toEqual('abufferwiththiscontent-modified');
287 |
288 | // Verify the cacheable options were used.
289 | expect(fakeTask.cacheable.key.called).toEqual(true);
290 | expect(fakeTask.cacheable.success.called).toEqual(true);
291 | expect(fakeTask.cacheable.value.called).toEqual(false);
292 | expect(overriddenValue.called).toEqual(true);
293 |
294 | fakeTask.cacheable.key.resetHistory();
295 | fakeTask.cacheable.success.resetHistory();
296 | fakeTask.cacheable.value.resetHistory();
297 | overriddenValue.resetHistory();
298 | fakeFileHandler.resetHistory();
299 |
300 | // Refresh proxied
301 | proxied = cache(fakeTask, opts);
302 | // Write the same file again, should be cached result
303 | proxied.write(new File({
304 | contents: new Buffer('abufferwiththiscontent')
305 | }));
306 |
307 | proxied.once('data', secondFile => proxied._flush(() => {
308 | // Cached value should have been applied
309 | expect(secondFile.cached).toEqual(true);
310 | expect(secondFile.overridden).toEqual(true);
311 |
312 | expect(fakeTask.cacheable.key.called).toEqual(true);
313 | expect(fakeTask.cacheable.success.called).toEqual(false);
314 | expect(fakeTask.cacheable.value.called).toEqual(false);
315 | expect(overriddenValue.called).toEqual(false);
316 |
317 | // Should not have called the original task
318 | expect(fakeFileHandler.called).toEqual(false);
319 |
320 | done();
321 | }));
322 | }));
323 | });
324 |
325 | it('can be passed just a string for the value', (done) => {
326 |
327 | const opts = { value: 'ran' };
328 | // Create a proxied plugin stream
329 | let proxied = cache(fakeTask, opts);
330 |
331 | proxied.write(new File({
332 | contents: new Buffer('abufferwiththiscontent')
333 | }));
334 |
335 | proxied.once('data', file => proxied._flush(() => {
336 | // Check it assigned the proxied task result
337 | expect(file.ran).toEqual(true);
338 |
339 | // Refresh proxied
340 | proxied = cache(fakeTask, opts);
341 |
342 | // Write the same file again, should be cached result
343 | proxied.end(new File({
344 | contents: new Buffer('abufferwiththiscontent')
345 | }));
346 |
347 | proxied.once('data', secondFile => proxied._flush(() => {
348 | // Cached value should have been applied
349 | expect(secondFile.ran).toEqual(true);
350 | done();
351 | }));
352 | }));
353 | });
354 |
355 | it('can store changed contents of files', (done) => {
356 | const updatedFileHandler = sandbox.spy((file, enc, cb) => {
357 | file.contents = new Buffer('updatedcontent');
358 | cb(null, file);
359 | });
360 |
361 | fakeTask = through.obj(updatedFileHandler);
362 |
363 | // Create a proxied plugin stream
364 | let proxied = cache(fakeTask);
365 |
366 | // write the fake file to it
367 | proxied.write(new File({
368 | contents: new Buffer('abufferwiththiscontent')
369 | }));
370 |
371 | // wait for the file to come back out
372 | proxied.once('data', file => proxied._flush(() => {
373 | // Check for updated content
374 | expect(String(file.contents)).toEqual('updatedcontent');
375 |
376 | // Check original handler was called
377 | expect(updatedFileHandler.called).toEqual(true);
378 |
379 | updatedFileHandler.resetHistory();
380 |
381 | // Refresh proxied
382 | proxied = cache(fakeTask);
383 |
384 | proxied.once('data', () => proxied._flush(() => {
385 | expect(String(file.contents)).toEqual('updatedcontent');
386 |
387 | // Check original handler was not called.
388 | expect(updatedFileHandler.called).toEqual(false);
389 |
390 | done();
391 | }));
392 |
393 | // Write the same file again, should be cached result
394 | proxied.write(new File({
395 | contents: new Buffer('abufferwiththiscontent')
396 | }));
397 | }));
398 | });
399 |
400 | it('can store one-to-many cache', (done) => {
401 |
402 | const updatedFileHandler = sandbox.spy(function each(file, enc, cb) {
403 |
404 | const outputFile1 = file.clone({ contents: false });
405 | const outputFile2 = file.clone({ contents: false });
406 |
407 | outputFile1.contents = new Buffer(`${String(file.contents)}-1`);
408 | outputFile2.contents = new Buffer(`${String(file.contents)}-2`);
409 |
410 | this.push(outputFile1);
411 | this.push(outputFile2);
412 |
413 | cb(null);
414 | });
415 | const pushedFilesCount = 2;
416 | const targetFile = new File({
417 | contents: new Buffer('abufferwiththiscontent')
418 | });
419 |
420 | fakeTask = through.obj(updatedFileHandler);
421 |
422 | const opts = {
423 | value: sandbox.spy(cache.defaultOptions.value),
424 | restore: sandbox.spy(cache.defaultOptions.restore)
425 | };
426 | // Create a proxied plugin stream
427 | let proxied = cache(fakeTask, opts);
428 | let count = 0;
429 |
430 | cacheStep();
431 |
432 | function cacheStep() {
433 |
434 | proxied.on('data', (file) => {
435 |
436 | if (count == 0) {
437 | expect(String(file.contents)).toEqual('abufferwiththiscontent-1');
438 | } else {
439 | expect(String(file.contents)).toEqual('abufferwiththiscontent-2');
440 | }
441 |
442 | count++;
443 | });
444 |
445 | proxied.on('end', () => {
446 | expect(count).toEqual(pushedFilesCount);
447 | expect(opts.value.called).toEqual(true);
448 | expect(opts.restore.called).toEqual(false);
449 | fromCacheStep();
450 | });
451 |
452 | // write the fake file to it
453 | proxied.end(targetFile);
454 | }
455 |
456 | function fromCacheStep() {
457 |
458 | opts.value.resetHistory();
459 | opts.restore.resetHistory();
460 |
461 | proxied = cache(fakeTask, opts);
462 | count = 0;
463 |
464 | proxied.on('data', (file) => {
465 |
466 | if (count == 0) {
467 | expect(String(file.contents)).toEqual('abufferwiththiscontent-1');
468 | } else {
469 | expect(String(file.contents)).toEqual('abufferwiththiscontent-2');
470 | }
471 |
472 | count++;
473 | });
474 |
475 | proxied.on('end', () => {
476 | expect(count).toEqual(pushedFilesCount);
477 | expect(opts.value.called).toEqual(false);
478 | expect(opts.restore.called).toEqual(true);
479 | done();
480 | });
481 |
482 | // write the fake file to it
483 | proxied.end(targetFile);
484 | }
485 | });
486 |
487 | it('does not throw memory leak warning when proxying tasks', (done) => {
488 |
489 | const delay = 10;
490 | const filesCount = 30;
491 |
492 | fakeTask = through.obj((file, enc, cb) => {
493 | setTimeout(() => {
494 | file.contents = new Buffer(`${file.contents.toString()} updated`);
495 |
496 | cb(null, file);
497 | }, delay);
498 | });
499 |
500 | const proxied = cache(fakeTask);
501 | const origMaxListeners = fakeTask._maxListeners;
502 | const errSpy = sandbox.spy(console, 'error');
503 | let processedCount = 0;
504 |
505 | proxied
506 | .on('data', () => {
507 | processedCount += 1;
508 | })
509 | .on('end', () => {
510 | expect(processedCount).toEqual(filesCount);
511 | expect(errSpy.called).toEqual(false, 'Called console.error');
512 | expect(fakeTask._maxListeners).toEqual(origMaxListeners || 0);
513 |
514 | done();
515 | });
516 |
517 | Array.from({
518 | length: filesCount
519 | }).forEach((_, i) => {
520 | proxied.write(new File({
521 | contents: new Buffer(`Test File ${i}`)
522 | }));
523 | });
524 |
525 | proxied.end();
526 | });
527 |
528 | it('sets the cache based on file contents and path', (done) => {
529 | const filePath = path.join(process.cwd(), 'test', 'fixtures', 'in', 'file1.txt');
530 | const otherFilePath = path.join(process.cwd(), 'test', 'fixtures', 'in', 'file2.txt');
531 | const updatedFileHandler = sandbox.spy((file, enc, cb) => {
532 | file.contents = new Buffer('updatedcontent');
533 |
534 | cb(null, file);
535 | });
536 |
537 | fakeTask = through.obj(updatedFileHandler);
538 |
539 | // Create a proxied plugin stream
540 | let proxied = cache(fakeTask);
541 |
542 | // write the fake file to it
543 | proxied.write(new File({
544 | path: filePath,
545 | contents: new Buffer('abufferwiththiscontent')
546 | }));
547 |
548 | // wait for the file to come back out
549 | proxied.once('data', file => proxied._flush(() => {
550 | // Check original handler was called
551 | expect(updatedFileHandler.called).toEqual(true);
552 |
553 | // Check the path is on there
554 | expect(file.path).toEqual(filePath);
555 |
556 | updatedFileHandler.resetHistory();
557 |
558 | // Refresh proxied
559 | proxied = cache(fakeTask);
560 |
561 | // Write a file with same content but different path, should be cached result
562 | proxied.write(new File({
563 | path: otherFilePath,
564 | contents: new Buffer('abufferwiththiscontent')
565 | }));
566 |
567 | proxied.once('data', secondFile => proxied._flush(() => {
568 | // Check for different file path
569 | expect(secondFile.path).toBeTruthy();
570 | expect(secondFile.path).toEqual(otherFilePath);
571 |
572 | // Check original handler was not called
573 | expect(updatedFileHandler.called).toEqual(false);
574 |
575 | done();
576 | }));
577 | }));
578 | });
579 |
580 | it('sets the cache based on file contents and path and keeps track of file path changes within the task', (done) => {
581 | const filePath = path.join(process.cwd(), 'test', 'fixtures', 'in', 'file1.txt');
582 | const otherFilePath = path.join(process.cwd(), 'test', 'fixtures', 'in', 'file2.txt');
583 | const outputFilePath = targetPath => targetPath.replace(/^(.*)\.txt$/i, '$1.txt2');
584 | const updatedFileHandler = sandbox.spy((file, enc, cb) => {
585 | file.contents = new Buffer('updatedcontent');
586 | // Change file path
587 | file.path = outputFilePath(file.path);
588 | cb(null, file);
589 | });
590 |
591 | fakeTask = through.obj(updatedFileHandler);
592 |
593 | // Create a proxied plugin stream
594 | let proxied = cache(fakeTask);
595 |
596 | // write the fake file to it
597 | proxied.write(new File({
598 | path: filePath,
599 | contents: new Buffer('abufferwiththiscontent')
600 | }));
601 |
602 | // wait for the file to come back out
603 | proxied.once('data', file => proxied._flush(() => {
604 | // Check original handler was called
605 | expect(updatedFileHandler.called).toEqual(true);
606 |
607 | // Check it still has the changed output path
608 | expect(file.path).toEqual(outputFilePath(filePath));
609 |
610 | updatedFileHandler.resetHistory();
611 |
612 | // Refresh proxied
613 | proxied = cache(fakeTask);
614 |
615 | // Write another file with the same contents and validate cache result
616 | proxied.write(new File({
617 | path: otherFilePath,
618 | contents: new Buffer('abufferwiththiscontent')
619 | }));
620 |
621 | proxied.once('data', secondFile => proxied._flush(() => {
622 | // Check it still has the changed output path
623 | expect(secondFile.path).toEqual(outputFilePath(otherFilePath));
624 |
625 | // Check original handler was called
626 | expect(updatedFileHandler.called).toEqual(true);
627 |
628 | updatedFileHandler.resetHistory();
629 |
630 | // Refresh proxied
631 | proxied = cache(fakeTask);
632 |
633 | // Write same file again and validate cache result
634 | proxied.write(new File({
635 | path: otherFilePath,
636 | contents: new Buffer('abufferwiththiscontent')
637 | }));
638 |
639 | proxied.once('data', thirdFile => proxied._flush(() => {
640 | // Check it still has the changed output path
641 | expect(thirdFile.path).toEqual(outputFilePath(otherFilePath));
642 |
643 | // Check original handler was not called
644 | expect(updatedFileHandler.called).toEqual(false);
645 |
646 | done();
647 | }));
648 | }));
649 | }));
650 | });
651 |
652 | it('keeps track of file path changes within the task', (done) => {
653 | const filePath = path.join(process.cwd(), 'test', 'fixtures', 'in', 'file1.txt');
654 | const outputFilePath = filePath.replace(/^(.*)\.txt$/i, '$1.txt2');
655 | const updatedFileHandler = sandbox.spy((file, enc, cb) => {
656 | file.contents = new Buffer('updatedcontent');
657 | // Change file path
658 | file.path = outputFilePath;
659 | cb(null, file);
660 | });
661 |
662 | fakeTask = through.obj(updatedFileHandler);
663 |
664 | // Create a proxied plugin stream
665 | let proxied = cache(fakeTask);
666 |
667 | // write the fake file to it
668 | proxied.write(new File({
669 | path: filePath,
670 | contents: new Buffer('abufferwiththiscontent')
671 | }));
672 |
673 | // wait for the file to come back out
674 | proxied.once('data', file => proxied._flush(() => {
675 | // Check original handler was called
676 | expect(updatedFileHandler.called).toBe(true);
677 |
678 | // Check it still has the changed output path
679 | expect(file.path).toBe(outputFilePath);
680 |
681 | updatedFileHandler.resetHistory();
682 |
683 | // Refresh proxied
684 | proxied = cache(fakeTask);
685 |
686 | // Write same file again and validate cache result
687 | proxied.write(new File({
688 | path: filePath,
689 | contents: new Buffer('abufferwiththiscontent')
690 | }));
691 |
692 | proxied.once('data', secondFile => proxied._flush(() => {
693 | // Check it still has the changed output path
694 | expect(secondFile.path).toBe(outputFilePath);
695 |
696 | // Check original handler was not called
697 | expect(updatedFileHandler.called).toBe(false);
698 |
699 | done();
700 | }));
701 | }));
702 | });
703 | });
704 |
705 | it('does nothing when it tries to clear a directory', (done) => {
706 | cache.clear()
707 | .on('data', (file) => {
708 | expect(file.isNull()).toBe(true);
709 | done();
710 | })
711 | .end(new File());
712 | });
713 |
714 | it('cannot clear specific stream cache', (done) => {
715 | cache.clear()
716 | .on('error', (err) => {
717 | expect(err.message).toBe('Cannot operate on stream sources');
718 | done();
719 | })
720 | .end(new File({ contents: through() }));
721 | });
722 |
723 | it('can clear specific buffer cache', (done) => {
724 | const fakeFileCache = {
725 | removeCached: sandbox.spy((category, hash, cb) => cb())
726 | };
727 |
728 | cache.clear({
729 | name: 'somename',
730 | fileCache: fakeFileCache,
731 | key() {
732 | return 'somekey';
733 | }
734 | })
735 | .on('data', () => {
736 | const someKeyHash = crypto.createHash('md5').update('somekey').digest('hex');
737 |
738 | expect(fakeFileCache.removeCached.calledWith('somename', someKeyHash)).toBe(true);
739 | done();
740 | })
741 | .end(new File({ contents: new Buffer('something') }));
742 | });
743 |
744 | it('can clear all the cache', () => {
745 | cache.clearAll();
746 | });
747 |
748 | it('can clear all the cache with Promise', () =>
749 | cache.clearAll()
750 | );
751 | });
752 |
--------------------------------------------------------------------------------