├── .github
├── dependabot.yml
└── workflows
│ └── node.js.yml
├── .gitignore
├── .npmignore
├── .prettierrc
├── CHANGELOG.md
├── LICENSE.txt
├── README.md
├── bin
└── folder-hash
├── cli.js
├── examples
├── async-example.js
├── ignore-gitignore.js
├── ignore-rootName.js
├── readme-example1.js
├── readme-with-callbacks.js
├── readme-with-promises.js
└── sample.js
├── index.js
├── package-lock.json
├── package.json
└── test
├── _common.js
├── base.js
├── files.js
├── folders.js
├── issue-14.js
├── issue-146.js
├── parameters.js
└── symbolic-links.js
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: npm
4 | directory: "/"
5 | schedule:
6 | interval: daily
7 | open-pull-requests-limit: 10
8 | ignore:
9 | - dependency-name: memfs
10 | versions:
11 | - 3.2.1
12 | - dependency-name: prettier
13 | versions:
14 | - 2.2.1
15 | - dependency-name: mocha
16 | versions:
17 | - 8.3.2
18 | - dependency-name: chai
19 | versions:
20 | - 4.3.1
21 | - 4.3.4
22 | - dependency-name: graceful-fs
23 | versions:
24 | - 4.2.5
25 |
--------------------------------------------------------------------------------
/.github/workflows/node.js.yml:
--------------------------------------------------------------------------------
1 | # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs
3 |
4 | name: Test suite
5 |
6 | on:
7 | push:
8 | branches: [ "main" ]
9 | pull_request:
10 | branches: [ "main", "ci" ]
11 |
12 | jobs:
13 | test-linux:
14 | runs-on: ubuntu-latest
15 |
16 | strategy:
17 | matrix:
18 | node-version: [ 18.x, 20.x, 22.x, lts/*, latest ]
19 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
20 |
21 | steps:
22 | - uses: actions/checkout@v4
23 | - name: Use Node.js ${{ matrix.node-version }}
24 | uses: actions/setup-node@v4
25 | with:
26 | node-version: ${{ matrix.node-version }}
27 | cache: 'npm'
28 | - run: npm ci
29 | - run: npm run build --if-present
30 | - run: npm test
31 | - run: npm run cover
32 |
33 | test-win:
34 | runs-on: windows-latest
35 |
36 | strategy:
37 | matrix:
38 | node-version: [ lts/*, latest ]
39 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
40 |
41 | steps:
42 | - uses: actions/checkout@v4
43 | - name: Use Node.js ${{ matrix.node-version }}
44 | uses: actions/setup-node@v4
45 | with:
46 | node-version: ${{ matrix.node-version }}
47 | cache: 'npm'
48 | - run: npm ci
49 | - run: npm run build --if-present
50 | - run: npm test
51 | - run: npm run cover
52 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | sample-folder
3 | doc/**
4 | test_coverage/**
5 | .nyc_output
6 | .npmrc
7 | .idea
8 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | yarn.lock
2 | .circleci
3 | test_coverage
4 | *.tgz
5 | .nyc_output
6 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true,
3 | "trailingComma": "all",
4 | "arrowParens": "avoid",
5 | "printWidth": 100
6 | }
7 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## 4.1.1 (2025-01-07)
4 | - Re-add support for node>=10 #234
5 |
6 | ## 4.1.0 (2025-01-02)
7 | - Pass options to crypto.createHash from #217
8 | - Update dependencies
9 | - Known issue: Only works on node>=20 #234
10 |
11 | ## 4.0.4 (2023-01-11)
12 | - Fix #176 by removing `graceful-fs` from `cli.js`
13 |
14 | ## 4.0.3 (2023-01-10)
15 | - Fix #146 and remove `graceful-fs`
16 | - Update dependencies
17 |
18 | ## 4.0.2 (2022-02-15)
19 | - Update dependencies
20 | - Minor changes to README
21 |
22 | ## 4.0.1 (2021-03-13)
23 | - Fix sorting of files #90
24 |
25 | ## 4.0.0 (2020-10-12)
26 | - Breaking change: Allow combination of include and exclude rules #67
27 | - Breaking change: Drop support for node < 10.0
28 | - Add options to handle symbolic links see comment on #41
29 |
30 | ## 3.3.0 (2019-10-07)
31 | - Add command line interface
32 |
33 | ## 3.2.0 (2019-07-28)
34 | - Allow setting `include` and `exclude` as functions in addition to arrays
35 |
36 | ## 3.1.0 (2019-06-13)
37 | - Add `ignoreBasename` option
38 |
39 | ## 3.0.0 (2018-04-12)
40 | - Breaking changes: Folders and files are sorted before creating a hash, the hashes on Linux and Windows 10 are now the same
41 |
42 | ## 2.0.0 (2018-04-10)
43 | - Breaking change: Drop support for node 0.10.x
44 | - Breaking change: Introduce include glob patterns to the options object
45 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 | Copyright (c) 2015 Marc Walter
3 |
4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
5 |
6 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
7 |
8 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | Create a hash checksum over a folder or a file.
2 | The hashes are propagated upwards, the hash that is returned for a folder is generated over all the hashes of its children.
3 | The hashes are generated with the _sha1_ algorithm and returned in _base64_ encoding by default.
4 |
5 | Each file returns a name and a hash, and each folder returns additionally an array of children (file or folder elements).
6 |
7 | ## Usage
8 |
9 | First, install folder-hash with `npm install --save folder-hash` or `yarn add folder-hash`.
10 |
11 | ### Simple example
12 |
13 | To see differences to the last version of this package, I would create hashes over all _.js_ and _.json_ files. But ignore everything inside folders starting with a dot, and also from the folders _node_modules_, _test_coverage_. The structure of the options object is documented below.
14 | This example is also stored in [./examples/readme-example1.js](/examples/readme-example1.js).
15 |
16 | ```js
17 | const { hashElement } = require('folder-hash');
18 |
19 | const options = {
20 | folders: { exclude: ['.*', 'node_modules', 'test_coverage'] },
21 | files: { include: ['*.js', '*.json'] },
22 | };
23 |
24 | console.log('Creating a hash over the current folder:');
25 | hashElement('.', options)
26 | .then(hash => {
27 | console.log(hash.toString());
28 | })
29 | .catch(error => {
30 | return console.error('hashing failed:', error);
31 | });
32 | ```
33 |
34 | The returned information looks for example like this:
35 |
36 | ```
37 | Creating a hash over the current folder:
38 | { name: '.', hash: 'YZOrKDx9LCLd8X39PoFTflXGpRU=,'
39 | children: [
40 | { name: 'examples', hash: 'aG8wg8np5SGddTnw1ex74PC9EnM=,'
41 | children: [
42 | { name: 'readme-example1.js', hash: 'Xlw8S2iomJWbxOJmmDBnKcauyQ8=' }
43 | { name: 'readme-with-callbacks.js', hash: 'ybvTHLCQBvWHeKZtGYZK7+6VPUw=' }
44 | { name: 'readme-with-promises.js', hash: '43i9tE0kSFyJYd9J2O0nkKC+tmI=' }
45 | { name: 'sample.js', hash: 'PRTD9nsZw3l73O/w5B2FH2qniFk=' }
46 | ]}
47 | { name: 'index.js', hash: 'kQQWXdgKuGfBf7ND3rxjThTLVNA=' }
48 | { name: 'package.json', hash: 'w7F0S11l6VefDknvmIy8jmKx+Ng=' }
49 | { name: 'test', hash: 'H5x0JDoV7dEGxI65e8IsencDZ1A=,'
50 | children: [
51 | { name: 'parameters.js', hash: '3gCEobqzHGzQiHmCDe5yX8weq7M=' }
52 | { name: 'test.js', hash: 'kg7p8lbaVf1CPtWLAIvkHkdu1oo=' }
53 | ]}
54 | ]}
55 | ```
56 |
57 | And the structure may be traversed to e.g. create incremental backups.
58 |
59 | It is also possible to only match the full path and not the basename. The same configuration could look like this:
60 | _You should be aware that \*nix and Windows behave differently, so please use caution._
61 |
62 | ```js
63 | const options = {
64 | folders: {
65 | exclude: ['.*', '**.*', '**node_modules', '**test_coverage'],
66 | matchBasename: false,
67 | matchPath: true,
68 | },
69 | files: {
70 | //include: ['**.js', '**.json' ], // Windows
71 | include: ['*.js', '**/*.js', '*.json', '**/*.json'], // *nix
72 | matchBasename: false,
73 | matchPath: true,
74 | },
75 | };
76 | ```
77 |
78 | ### Parameters for the hashElement function
79 |
80 |
81 |
82 |
83 | Name |
84 | Type |
85 | Attributes |
86 | Description |
87 |
88 |
89 |
90 |
91 | name |
92 |
93 | string
94 | |
95 |
96 | |
97 | element name or an element's path |
98 |
99 |
100 | dir |
101 |
102 | string
103 | |
104 |
105 | <optional>
106 | |
107 | directory that contains the element (generated from name if omitted) |
108 |
109 |
110 | options |
111 |
112 | Object
113 | |
114 |
115 | <optional>
116 | |
117 |
118 | Options object (see below)
119 | |
120 |
121 |
122 | callback |
123 |
124 | fn
125 | |
126 |
127 | <optional>
128 | |
129 | Error-first callback function |
130 |
131 |
132 |
133 |
134 | ## Options
135 |
136 | ### Default values
137 |
138 | ```js
139 | {
140 | algo: 'sha1', // see crypto.getHashes() for options in your node.js REPL
141 | algoOptions: {}, // Allowed for some algorithms, see https://nodejs.org/api/crypto.html#cryptocreatehashalgorithm-options
142 | encoding: 'base64', // 'base64', 'base64url', 'hex' or 'binary'
143 | files: {
144 | exclude: [],
145 | include: [],
146 | matchBasename: true,
147 | matchPath: false,
148 | ignoreBasename: false,
149 | ignoreRootName: false
150 | },
151 | folders: {
152 | exclude: [],
153 | include: [],
154 | matchBasename: true,
155 | matchPath: false,
156 | ignoreRootName: false
157 | },
158 | symbolicLinks: {
159 | include: true,
160 | ignoreBasename: false,
161 | ignoreTargetPath: true,
162 | ignoreTargetContent: false,
163 | ignoreTargetContentAfterError: false,
164 | }
165 | }
166 | ```
167 |
168 |
169 |
170 |
171 | Name |
172 | Type |
173 | Attributes |
174 | Default |
175 | Description |
176 |
177 |
178 |
179 |
180 | algo |
181 |
182 | string
183 | |
184 |
185 | <optional>
186 | |
187 |
188 | 'sha1'
189 | |
190 | checksum algorithm, see options in crypto.getHashes() |
191 |
192 |
193 | algoOptions |
194 |
195 | Object
196 | |
197 |
198 | <optional>
199 | |
200 |
201 | {}
202 | |
203 | checksum algorithm options, see docs |
204 |
205 |
206 | encoding |
207 |
208 | string
209 | |
210 |
211 | <optional>
212 | |
213 |
214 | 'base64'
215 | |
216 | encoding of the resulting hash. One of 'base64', 'base64url', 'hex' or 'binary' |
217 |
218 |
219 | files |
220 |
221 | Object
222 | |
223 |
224 | <optional>
225 | |
226 |
227 | Rules object (see below)
228 | |
229 |
230 |
231 | folders |
232 |
233 | Object
234 | |
235 |
236 | <optional>
237 | |
238 |
239 | Rules object (see below)
240 | |
241 |
242 |
243 | symLinks |
244 |
245 | Object
246 | |
247 |
248 | <optional>
249 | |
250 |
251 | Symlink options (see below)
252 | |
253 |
254 |
255 |
256 |
257 | #### Rules object properties
258 |
259 |
260 |
261 |
262 | Name |
263 | Type |
264 | Attributes |
265 | Default |
266 | Description |
267 |
268 |
269 |
270 |
271 | exclude |
272 |
273 | Array.<string> || Function
274 | |
275 |
276 | <optional>
277 | |
278 |
279 | []
280 | |
281 | Array of optional exclude glob patterns, see minimatch doc. Can also be a function which returns true if the passed file is excluded. |
282 |
283 |
284 | include |
285 |
286 | Array.<string> || Function
287 | |
288 |
289 | <optional>
290 | |
291 |
292 | []
293 | |
294 | Array of optional include glob patterns, see minimatch doc. Can also be a function which returns true if the passed file is included. |
295 |
296 |
297 | matchBasename |
298 |
299 | bool
300 | |
301 |
302 | <optional>
303 | |
304 |
305 | true
306 | |
307 | Match the glob patterns to the file/folder name |
308 |
309 |
310 | matchPath |
311 |
312 | bool
313 | |
314 |
315 | <optional>
316 | |
317 |
318 | false
319 | |
320 | Match the glob patterns to the file/folder path |
321 |
322 |
323 | ignoreBasename |
324 |
325 | bool
326 | |
327 |
328 | <optional>
329 | |
330 |
331 | false
332 | |
333 | Set to true to calculate the hash without the basename element |
334 |
335 |
336 | ignoreRootName |
337 |
338 | bool
339 | |
340 |
341 | <optional>
342 | |
343 |
344 | false
345 | |
346 | Set to true to calculate the hash without the basename of the root (first) element |
347 |
348 |
349 |
350 |
351 | ### Symlink options
352 |
353 | Configure how symbolic links should be hashed.
354 | To understand how the options can be combined to create a specific behavior, look into [test/symbolic-links.js](https://github.com/marc136/node-folder-hash/blob/master/test/symbolic-links.js).
355 |
356 |
357 |
358 |
359 | Name |
360 | Type |
361 | Default |
362 | Description |
363 |
364 |
365 |
366 |
367 | include |
368 |
369 | bool
370 | |
371 |
372 | true
373 | |
374 | If false, symbolic links are not handled at all. A folder with three symbolic links inside will have no children entries. |
375 |
376 |
377 | ignoreBasename |
378 |
379 | bool
380 | |
381 |
382 | false
383 | |
384 | Set to true to calculate the hash without the basename element |
385 |
386 |
387 | ignoreTargetPath |
388 |
389 | bool
390 | |
391 |
392 | true
393 | |
394 | If false, the resolved link target is added to the hash (uses fs.readlink) |
395 |
396 |
397 | ignoreTargetContent |
398 |
399 | bool
400 | |
401 |
402 | false
403 | |
404 | If true, will only assess the basename and target path (as configured in the other options) |
405 |
406 |
407 | ignoreTargetContentAfterError |
408 |
409 | bool
410 | |
411 |
412 | false
413 | |
414 | If true, will ignore all errors while trying to hash symbolic links and only assess the basename and target path (as configured in other options). E.g. a missing target (ENOENT) or access permissions (EPERM). |
415 |
416 |
417 |
418 |
419 | ## Command line usage
420 |
421 | After installing it globally via
422 |
423 | ```
424 | $ npm install -g folder-hash
425 | ```
426 |
427 | You can use it like this:
428 |
429 | ```
430 | # local folder
431 | $ folder-hash -c config.json .
432 | # local folder
433 | $ folder-hash
434 | # global folder
435 | $ folder-hash /user/bin
436 | ```
437 |
438 | It also allows to pass an optional JSON configuration file with the `-c` or `--config` flag, which should contain the same configuration as when using the JavaScript API.
439 |
440 | You can also use a local version of folder-hash like this:
441 |
442 | ```
443 | $ npx folder-hash --help
444 | Use folder-hash on cli like this:
445 | folder-hash [--config ]
446 | ```
447 |
448 | ## Examples
449 |
450 | ### Other examples using promises
451 |
452 | See file _./examples/readme-with-promises.js_
453 |
454 | ```js
455 | const path = require('path');
456 | const { hashElement } = require('folder-hash');
457 |
458 | // pass element name and folder path separately
459 | hashElement('test', path.join(__dirname, '..'))
460 | .then(hash => {
461 | console.log('Result for folder "../test":', hash.toString(), '\n');
462 | })
463 | .catch(error => {
464 | return console.error('hashing failed:', error);
465 | });
466 |
467 | // pass element path directly
468 | hashElement(__dirname)
469 | .then(hash => {
470 | console.log(`Result for folder "${__dirname}":`);
471 | console.log(hash.toString(), '\n');
472 | })
473 | .catch(error => {
474 | return console.error('hashing failed:', error);
475 | });
476 |
477 | // pass options (example: exclude dotFolders)
478 | const options = { encoding: 'hex', folders: { exclude: ['.*'] } };
479 | hashElement(__dirname, options)
480 | .then(hash => {
481 | console.log('Result for folder "' + __dirname + '" (with options):');
482 | console.log(hash.toString(), '\n');
483 | })
484 | .catch(error => {
485 | return console.error('hashing failed:', error);
486 | });
487 | ```
488 |
489 | ### Other examples using error-first callbacks
490 |
491 | See _./examples/readme-with-callbacks.js_
492 |
493 | ```js
494 | const path = require('path');
495 | const { hashElement } = require('folder-hash');
496 |
497 | // pass element name and folder path separately
498 | hashElement('test', path.join(__dirname, '..'), (error, hash) => {
499 | if (error) {
500 | return console.error('hashing failed:', error);
501 | } else {
502 | console.log('Result for folder "../test":', hash.toString(), '\n');
503 | }
504 | });
505 |
506 | // pass element path directly
507 | hashElement(__dirname, (error, hash) => {
508 | if (error) {
509 | return console.error('hashing failed:', error);
510 | } else {
511 | console.log('Result for folder "' + __dirname + '":');
512 | console.log(hash.toString(), '\n');
513 | }
514 | });
515 |
516 | // pass options (example: exclude dotFiles)
517 | const options = { algo: 'md5', files: { exclude: ['.*'], matchBasename: true } };
518 | hashElement(__dirname, options, (error, hash) => {
519 | if (error) {
520 | return console.error('hashing failed:', error);
521 | } else {
522 | console.log('Result for folder "' + __dirname + '":');
523 | console.log(hash.toString());
524 | }
525 | });
526 |
527 | // pass algoOptions (example: shake256)
528 | // see https://nodejs.org/api/crypto.html#cryptocreatehashalgorithm-options
529 | // only supported in node v12.8 and higher
530 | const options = { algo: 'shake256', algoOptions: { outputLength: 5 }, files: { exclude: ['.*'], matchBasename: true } };
531 | hashElement(__dirname, options, (error, hash) => {
532 | if (error) {
533 | return console.error('hashing failed:', error);
534 | } else {
535 | console.log('Result for folder "' + __dirname + '":');
536 | console.log(hash.toString());
537 | }
538 | });
539 | ```
540 |
541 | ## Behavior
542 |
543 | The behavior is documented and verified in the unit tests. Execute `npm test` or `mocha test`, and have a look at the _test_ subfolder.
544 | You can also have a look at the [test suite report page](https://github.com/marc136/node-folder-hash/actions/workflows/node.js.yml)
545 |
546 | ### Creating hashes over files (with default options)
547 |
548 | **The hashes are the same if:**
549 |
550 | - A file is checked again
551 | - Two files have the same name and content (but exist in different folders)
552 |
553 | **The hashes are different if:**
554 |
555 | - A file was renamed or its content was changed
556 | - Two files have the same name but different content
557 | - Two files have the same content but different names
558 |
559 | ### Creating hashes over folders (with default options)
560 |
561 | Content means in this case a folder's children - both the files and the subfolders with their children.
562 |
563 | **The hashes are the same if:**
564 |
565 | - A folder is checked again
566 | - Two folders have the same name and content (but have different parent folders)
567 |
568 | **The hashes are different if:**
569 |
570 | - A file somewhere in the directory structure was renamed or its content was changed
571 | - Two folders have the same name but different content
572 | - Two folders have the same content but different names
573 |
574 | ## License
575 |
576 | MIT, see LICENSE.txt
577 |
--------------------------------------------------------------------------------
/bin/folder-hash:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | require('../cli')(process.argv.slice(2));
4 |
--------------------------------------------------------------------------------
/cli.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const lib = require('./index');
3 |
4 | function program(cliArgs) {
5 | let args;
6 | try {
7 | args = parseArgs(cliArgs);
8 | } catch (ex) {
9 | error(ex);
10 | }
11 |
12 | if (args.help) {
13 | printHelp();
14 | process.exit(0);
15 | }
16 |
17 | let config;
18 | if (args.config) {
19 | try {
20 | config = JSON.parse(fs.readFileSync(args.config, { encoding: 'utf-8' }));
21 | } catch (err) {
22 | console.error('Could not parse configuration from file ' + args.config);
23 | console.error('Maybe try a JSON config like this instead?\n');
24 | console.error(JSON.stringify(lib.defaults, undefined, 2));
25 | process.exit(1);
26 | }
27 | }
28 |
29 | return lib
30 | .hashElement(args.src || process.cwd(), config)
31 | .then(result => console.log(result.toString()))
32 | .catch(error);
33 | }
34 |
35 | function parseArgs(args) {
36 | let help, config, src;
37 |
38 | for (let index = 0; index < args.length; index++) {
39 | switch (args[index]) {
40 | case '-h':
41 | case '--help':
42 | help = true;
43 | break;
44 |
45 | case '-c':
46 | case '--config':
47 | if (!args[++index]) {
48 | throw new Error(`Need to supply a JSON config file after "${args[index]}"`);
49 | }
50 | config = args[index];
51 | break;
52 |
53 | default:
54 | if (!src) {
55 | src = args[index];
56 | } else {
57 | console.log(`Ignoring param "${args[index]}"`);
58 | }
59 | break;
60 | }
61 | }
62 |
63 | return { help, config, src };
64 | }
65 |
66 | function error(err) {
67 | console.error('ERROR:', err.message || err.name || err);
68 | process.exit(1);
69 | }
70 |
71 | function printHelp() {
72 | console.log('Use folder-hash on cli like this:');
73 | console.log(' folder-hash [--config ] ');
74 | }
75 |
76 | module.exports = program;
77 |
--------------------------------------------------------------------------------
/examples/async-example.js:
--------------------------------------------------------------------------------
1 | const { Volume } = require('memfs'),
2 | prep = volume => require('../index').prep(volume, Promise);
3 |
4 | const hashElement = prep(
5 | Volume.fromJSON({
6 | 'abc.txt': 'awesome content',
7 | 'def/ghi.js': 'awesome content',
8 | }),
9 | );
10 |
11 | async function example() {
12 | const options = { files: { ignoreRootName: true } };
13 |
14 | const abc = await hashElement('abc.txt', options);
15 | const def = await hashElement('def/ghi.js', options);
16 |
17 | console.log(`abc.hash == def.hash ? ${abc.hash === def.hash}`);
18 | console.log(` abc.hash ${abc.hash}\n def.hash ${def.hash}`);
19 | }
20 |
21 | example();
22 |
--------------------------------------------------------------------------------
/examples/ignore-gitignore.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Shows how the exclude option can be used with a function.
3 | *
4 | * Real-life usage could be to exclude gitignored files
5 | */
6 |
7 | const { hashElement } = require('../index');
8 | const fs = require('fs');
9 | const ignore = require('ignore');
10 |
11 | const gitignoreContents = fs.readFileSync('../.gitignore').toString().split('\n');
12 | const ig = ignore().add(gitignoreContents);
13 |
14 | function shouldExclude(name) {
15 | return ig.ignores(name);
16 | }
17 |
18 | hashElement('../', {
19 | files: {
20 | exclude: shouldExclude,
21 | },
22 | folders: {
23 | exclude: shouldExclude,
24 | },
25 | }).then(hash => {
26 | console.log('hash of everything that is not gitignored: ', hash);
27 | });
28 |
--------------------------------------------------------------------------------
/examples/ignore-rootName.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Shows how the ignoreRootName option can be used.
3 | *
4 | * Real-life usage could be to compare
5 | * - two folders with the same content but different names (e.g. for backups)
6 | * - if two files have the same content
7 | */
8 |
9 | const { hashElement } = require('../index');
10 |
11 | async function folder(f) {
12 | await hashElement(f, { folders: { ignoreRootName: false } }).then(hash => {
13 | console.log(`hash of folder ${f} when name is not ignored:`, hash);
14 | });
15 |
16 | await hashElement(f, { folders: { ignoreRootName: true } }).then(hash => {
17 | console.log(`hash of folder ${f} when ignoring its name:`, hash);
18 | });
19 | }
20 |
21 | async function file() {
22 | const f = 'ignore-rootName.js';
23 | console.log('\n---\n');
24 |
25 | await hashElement(f, { files: { ignoreRootName: false } }).then(hash => {
26 | console.log(`default hash of file:`, hash);
27 | });
28 |
29 | await hashElement(f, { files: { ignoreRootName: true } }).then(hash => {
30 | console.log(`hash of file when ignoring its name:`, hash);
31 | });
32 | }
33 |
34 | folder('../test').then(file);
35 |
--------------------------------------------------------------------------------
/examples/readme-example1.js:
--------------------------------------------------------------------------------
1 | // execute from the base folder
2 | // node examples\readme-example1.js
3 |
4 | const { hashElement } = require('../index.js');
5 |
6 | const options = {
7 | folders: { exclude: ['.*', 'node_modules', 'test_coverage'] },
8 | files: { include: ['*.js', '*.json'] },
9 | };
10 |
11 | const options2 = {
12 | folders: {
13 | exclude: ['.*', '**.*', '**node_modules', '**test_coverage'],
14 | matchBasename: false,
15 | matchPath: true,
16 | },
17 | files: {
18 | //include: ['**.js', '**.json' ], // Windows
19 | include: ['*.js', '**/*.js', '*.json', '**/*.json'], // *nix
20 | matchBasename: false,
21 | matchPath: true,
22 | },
23 | };
24 |
25 | console.log('Creating a hash over the current folder:');
26 | hashElement('.', options)
27 | .then(hash => {
28 | console.log(hash.toString(), '\n');
29 | })
30 | .catch(error => {
31 | return console.error('hashing failed:', error);
32 | });
33 |
--------------------------------------------------------------------------------
/examples/readme-with-callbacks.js:
--------------------------------------------------------------------------------
1 | // execute from the base folder
2 | // node examples\readme-with-callbacks.js
3 |
4 | const path = require('path');
5 | const { hashElement } = require('../index.js');
6 |
7 | // pass element name and folder path separately
8 | hashElement('test', path.join(__dirname, '..'), (error, hash) => {
9 | if (error) {
10 | return console.error('hashing failed:', error);
11 | } else {
12 | console.log('Result for folder "../test":', hash.toString(), '\n');
13 | }
14 | });
15 |
16 | // pass element path directly
17 | hashElement(__dirname, (error, hash) => {
18 | if (error) {
19 | return console.error('hashing failed:', error);
20 | } else {
21 | console.log('Result for folder "' + __dirname + '":');
22 | console.log(hash.toString(), '\n');
23 | }
24 | });
25 |
26 | // pass options (example: exclude dotFiles)
27 | const options = { algo: 'md5', files: { exclude: ['.*'], matchBasename: true } };
28 | hashElement(__dirname, options, (error, hash) => {
29 | if (error) {
30 | return console.error('hashing failed:', error);
31 | } else {
32 | console.log('Result for folder "' + __dirname + '":');
33 | console.log(hash.toString());
34 | }
35 | });
36 |
--------------------------------------------------------------------------------
/examples/readme-with-promises.js:
--------------------------------------------------------------------------------
1 | // execute from the base folder
2 | // node examples\readme-with-promises.js
3 |
4 | const path = require('path');
5 | const { hashElement } = require('../index.js');
6 |
7 | // pass element name and folder path separately
8 | hashElement('test', path.join(__dirname, '..'))
9 | .then(hash => {
10 | console.log('Result for folder "../test":', hash.toString(), '\n');
11 | })
12 | .catch(error => {
13 | return console.error('hashing failed:', error);
14 | });
15 |
16 | // pass element path directly
17 | hashElement(__dirname)
18 | .then(hash => {
19 | console.log(`Result for folder "${__dirname}":`);
20 | console.log(hash.toString(), '\n');
21 | })
22 | .catch(error => {
23 | return console.error('hashing failed:', error);
24 | });
25 |
26 | // pass options (example: exclude dotFolders)
27 | const options = { encoding: 'hex', folders: { exclude: ['.*'] } };
28 | hashElement(__dirname, options)
29 | .then(hash => {
30 | console.log('Result for folder "' + __dirname + '" (with options):');
31 | console.log(hash.toString(), '\n');
32 | })
33 | .catch(error => {
34 | return console.error('hashing failed:', error);
35 | });
36 |
--------------------------------------------------------------------------------
/examples/sample.js:
--------------------------------------------------------------------------------
1 | const crypto = require('crypto'),
2 | path = require('path');
3 |
4 | const hashFolder = require('../index.js');
5 |
6 | console.log(`Known hash algorithms:\n'${crypto.getHashes().join(`', '`)}'\n`);
7 |
8 | const dir = path.resolve(__dirname, '../');
9 |
10 | hashFolder
11 | .hashElement('README.md', dir)
12 | .then(result => {
13 | console.log('\nCreated a hash over a single file:');
14 | console.log(result.toString());
15 | })
16 | .catch(reason => {
17 | console.error(`\nPromise rejected due to:\n${reason}\n\n`);
18 | });
19 |
20 | hashFolder.hashElement(
21 | dir,
22 | {
23 | files: { exclude: ['.*'], matchBasename: true },
24 | folders: { include: ['examples', 'test'], matchBasename: true },
25 | },
26 | (err, result) => {
27 | if (err) {
28 | console.error(`\nFailed to create a hash due to:\n${err}`);
29 | } else {
30 | console.log('\nCreated a hash over a folder:');
31 | console.log(result.toString());
32 | }
33 | },
34 | );
35 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | const crypto = require('crypto'),
2 | debug = require('debug'),
3 | minimatch = require('minimatch'),
4 | path = require('path');
5 |
6 | const defaultOptions = {
7 | algo: 'sha1', // see crypto.getHashes() for options
8 | algoOptions: {},
9 | encoding: 'base64', // 'base64', 'base64url', 'hex' or 'binary'
10 | files: {
11 | exclude: [],
12 | include: [],
13 | matchBasename: true,
14 | matchPath: false,
15 | ignoreBasename: false,
16 | ignoreRootName: false,
17 | },
18 | folders: {
19 | exclude: [],
20 | include: [],
21 | matchBasename: true,
22 | matchPath: false,
23 | ignoreBasename: false,
24 | ignoreRootName: false,
25 | },
26 | symbolicLinks: {
27 | include: true,
28 | ignoreBasename: false,
29 | ignoreTargetPath: true,
30 | ignoreTargetContent: false,
31 | ignoreTargetContentAfterError: false,
32 | },
33 | };
34 |
35 | // Use the environment variable DEBUG to log output, e.g. `set DEBUG=fhash:*`
36 | const log = {
37 | match: debug('fhash:match'),
38 | params: params => {
39 | debug('fhash:parameters')(params);
40 | return params;
41 | },
42 | err: debug('fhash:err'),
43 | symlink: debug('fhash:symlink'),
44 | queue: debug('fhash:queue'),
45 | glob: debug('fhash:glob'),
46 | };
47 |
48 | function prep(fs) {
49 | let queue = [];
50 | let queueTimer = undefined;
51 |
52 | function hashElement(name, dir, options, callback) {
53 | callback = arguments[arguments.length - 1];
54 |
55 | return parseParameters(arguments)
56 | .then(({ basename, dir, options }) => {
57 | // this is only used for the root level
58 | options.skipMatching = true;
59 | return fs.promises
60 | .lstat(path.join(dir, basename))
61 | .then(stats => {
62 | stats.name = basename;
63 | return stats;
64 | })
65 | .then(stats => hashElementPromise(stats, dir, options, true));
66 | })
67 | .then(result => {
68 | if (isFunction(callback)) {
69 | return callback(undefined, result);
70 | } else {
71 | return result;
72 | }
73 | })
74 | .catch(reason => {
75 | log.err('Fatal error:', reason);
76 | if (isFunction(callback)) {
77 | return callback(reason);
78 | } else {
79 | throw reason;
80 | }
81 | });
82 | }
83 |
84 | /**
85 | * @param {fs.Stats} stats folder element, can also be of type fs.Dirent
86 | * @param {string} dirname
87 | * @param {Options} options
88 | * @param {boolean} isRootElement
89 | */
90 | function hashElementPromise(stats, dirname, options, isRootElement = false) {
91 | const name = stats.name;
92 | let promise = undefined;
93 | if (stats.isDirectory()) {
94 | promise = hashFolderPromise(name, dirname, options, isRootElement);
95 | } else if (stats.isFile()) {
96 | promise = hashFilePromise(name, dirname, options, isRootElement);
97 | } else if (stats.isSymbolicLink()) {
98 | promise = hashSymLinkPromise(name, dirname, options, isRootElement);
99 | } else {
100 | log.err('hashElementPromise cannot handle ', stats);
101 | return Promise.resolve({ name, hash: 'Error: unknown element type' });
102 | }
103 |
104 | return promise.catch(err => {
105 | if (err.code && (err.code === 'EMFILE' || err.code === 'ENFILE')) {
106 | log.queue(`queued ${dirname}/${name} because of ${err.code}`);
107 |
108 | const promise = new Promise((resolve, reject) => {
109 | queue.push(() => {
110 | log.queue(`Will processs queued ${dirname}/${name}`);
111 | return hashElementPromise(stats, dirname, options, isRootElement)
112 | .then(ok => resolve(ok))
113 | .catch(err => reject(err));
114 | });
115 | });
116 |
117 | if (queueTimer === undefined) {
118 | queueTimer = setTimeout(processQueue, 0);
119 | }
120 | return promise;
121 | }
122 |
123 | throw err;
124 | });
125 | }
126 |
127 | function processQueue() {
128 | queueTimer = undefined;
129 | const runnables = queue;
130 | queue = [];
131 | runnables.forEach(run => run());
132 | }
133 |
134 | async function hashFolderPromise(name, dir, options, isRootElement = false) {
135 | const folderPath = path.join(dir, name);
136 | let ignoreBasenameOnce = options.ignoreBasenameOnce;
137 | delete options.ignoreBasenameOnce;
138 |
139 | if (options.skipMatching) {
140 | // this is currently only used for the root folder
141 | log.match(`skipped '${folderPath}'`);
142 | delete options.skipMatching;
143 | } else if (ignore(name, folderPath, options.folders)) {
144 | return undefined;
145 | }
146 |
147 | const files = await fs.promises.readdir(folderPath, { withFileTypes: true });
148 | const children = await Promise.all(
149 | files
150 | .sort((a, b) => a.name.localeCompare(b.name))
151 | .map(child => hashElementPromise(child, folderPath, options)),
152 | );
153 |
154 | if (ignoreBasenameOnce) options.ignoreBasenameOnce = true;
155 | const hash = new HashedFolder(name, children.filter(notUndefined), options, isRootElement);
156 | return hash;
157 | }
158 |
159 | function hashFilePromise(name, dir, options, isRootElement = false) {
160 | const filePath = path.join(dir, name);
161 |
162 | if (options.skipMatching) {
163 | // this is currently only used for the root folder
164 | log.match(`skipped '${filePath}'`);
165 | delete options.skipMatching;
166 | } else if (ignore(name, filePath, options.files)) {
167 | return Promise.resolve(undefined);
168 | }
169 |
170 | return new Promise((resolve, reject) => {
171 | try {
172 | const hash = crypto.createHash(options.algo, options.algoOptions);
173 | if (
174 | options.files.ignoreBasename ||
175 | options.ignoreBasenameOnce ||
176 | (isRootElement && options.files.ignoreRootName)
177 | ) {
178 | delete options.ignoreBasenameOnce;
179 | log.match(`omitted name of ${filePath} from hash`);
180 | } else {
181 | hash.update(name);
182 | }
183 |
184 | const f = fs.createReadStream(filePath);
185 | f.on('error', err => {
186 | reject(err);
187 | });
188 | f.pipe(hash, { end: false });
189 |
190 | f.on('end', () => {
191 | const hashedFile = new HashedFile(name, hash, options.encoding);
192 | return resolve(hashedFile);
193 | });
194 | } catch (ex) {
195 | return reject(ex);
196 | }
197 | });
198 | }
199 |
200 | async function hashSymLinkPromise(name, dir, options, isRootElement = false) {
201 | const target = await fs.promises.readlink(path.join(dir, name));
202 | log.symlink(`handling symbolic link ${name} -> ${target}`);
203 | if (options.symbolicLinks.include) {
204 | if (options.symbolicLinks.ignoreTargetContent) {
205 | return symLinkIgnoreTargetContent(name, target, options, isRootElement);
206 | } else {
207 | return symLinkResolve(name, dir, target, options, isRootElement);
208 | }
209 | } else {
210 | log.symlink('skipping symbolic link');
211 | return Promise.resolve(undefined);
212 | }
213 | }
214 |
215 | function symLinkIgnoreTargetContent(name, target, options, isRootElement) {
216 | delete options.skipMatching; // only used for the root level
217 | log.symlink('ignoring symbolic link target content');
218 | const hash = crypto.createHash(options.algo, options.algoOptions);
219 | if (!options.symbolicLinks.ignoreBasename && !(isRootElement && options.files.ignoreRootName)) {
220 | log.symlink('hash basename');
221 | hash.update(name);
222 | }
223 | if (!options.symbolicLinks.ignoreTargetPath) {
224 | log.symlink('hash targetpath');
225 | hash.update(target);
226 | }
227 | return Promise.resolve(new HashedFile(name, hash, options.encoding));
228 | }
229 |
230 | async function symLinkResolve(name, dir, target, options, isRootElement) {
231 | delete options.skipMatching; // only used for the root level
232 | if (options.symbolicLinks.ignoreBasename) {
233 | options.ignoreBasenameOnce = true;
234 | }
235 |
236 | try {
237 | const stats = await fs.promises.stat(path.join(dir, name));
238 | stats.name = name;
239 | const temp = await hashElementPromise(stats, dir, options, isRootElement);
240 |
241 | if (!options.symbolicLinks.ignoreTargetPath) {
242 | const hash = crypto.createHash(options.algo, options.algoOptions);
243 | hash.update(temp.hash);
244 | log.symlink('hash targetpath');
245 | hash.update(target);
246 | temp.hash = hash.digest(options.encoding);
247 | }
248 | return temp;
249 | } catch (err) {
250 | if (options.symbolicLinks.ignoreTargetContentAfterError) {
251 | log.symlink(`Ignoring error "${err.code}" when hashing symbolic link ${name}`, err);
252 | const hash = crypto.createHash(options.algo, options.algoOptions);
253 | if (
254 | !options.symbolicLinks.ignoreBasename &&
255 | !(isRootElement && options.files.ignoreRootName)
256 | ) {
257 | hash.update(name);
258 | }
259 | if (!options.symbolicLinks.ignoreTargetPath) {
260 | hash.update(target);
261 | }
262 | return new HashedFile(name, hash, options.encoding);
263 | } else {
264 | log.symlink(`Error "${err.code}": When hashing symbolic link ${name}`, err);
265 | throw err;
266 | }
267 | }
268 | }
269 |
270 | function ignore(name, path, rules) {
271 | if (rules.exclude) {
272 | if (rules.matchBasename && rules.exclude(name)) {
273 | log.match(`exclude basename '${name}'`);
274 | return true;
275 | } else if (rules.matchPath && rules.exclude(path)) {
276 | log.match(`exclude path '${path}'`);
277 | return true;
278 | }
279 | }
280 | if (rules.include) {
281 | if (rules.matchBasename && rules.include(name)) {
282 | log.match(`include basename '${name}'`);
283 | return false;
284 | } else if (rules.matchPath && rules.include(path)) {
285 | log.match(`include path '${path}'`);
286 | return false;
287 | } else {
288 | log.match(`include rule failed for path '${path}'`);
289 | return true;
290 | }
291 | }
292 |
293 | log.match(`Will not ignore unmatched '${path}'`);
294 | return false;
295 | }
296 |
297 | return hashElement;
298 | }
299 |
300 | function parseParameters(args) {
301 | let basename = args[0],
302 | dir = args[1],
303 | options_ = args[2];
304 |
305 | if (!isString(basename)) {
306 | return Promise.reject(new TypeError('First argument must be a string'));
307 | }
308 |
309 | if (!isString(dir)) {
310 | dir = path.dirname(basename);
311 | basename = path.basename(basename);
312 | options_ = args[1];
313 | }
314 |
315 | // parse options (fallback default options)
316 | if (!isObject(options_)) options_ = {};
317 | const options = {
318 | algo: options_.algo || defaultOptions.algo,
319 | algoOptions: options_.algoOptions || defaultOptions.algoOptions,
320 | encoding: options_.encoding || defaultOptions.encoding,
321 | files: Object.assign({}, defaultOptions.files, options_.files),
322 | folders: Object.assign({}, defaultOptions.folders, options_.folders),
323 | match: Object.assign({}, defaultOptions.match, options_.match),
324 | symbolicLinks: Object.assign({}, defaultOptions.symbolicLinks, options_.symbolicLinks),
325 | };
326 |
327 | // transform match globs to Regex
328 | options.files.exclude = reduceGlobPatterns(options.files.exclude, 'exclude files');
329 | options.files.include = reduceGlobPatterns(options.files.include, 'include files');
330 | options.folders.exclude = reduceGlobPatterns(options.folders.exclude, 'exclude folders');
331 | options.folders.include = reduceGlobPatterns(options.folders.include, 'include folders');
332 |
333 | return Promise.resolve(log.params({ basename, dir, options }));
334 | }
335 |
336 | const HashedFolder = function HashedFolder(name, children, options, isRootElement = false) {
337 | this.name = name;
338 | this.children = children;
339 |
340 | const hash = crypto.createHash(options.algo, options.algoOptions);
341 | if (
342 | options.folders.ignoreBasename ||
343 | options.ignoreBasenameOnce ||
344 | (isRootElement && options.folders.ignoreRootName)
345 | ) {
346 | delete options.ignoreBasenameOnce;
347 | log.match(`omitted name of folder ${name} from hash`);
348 | } else {
349 | hash.update(name);
350 | }
351 | children.forEach(child => {
352 | if (child.hash) {
353 | hash.update(child.hash);
354 | }
355 | });
356 |
357 | this.hash = hash.digest(options.encoding);
358 | };
359 |
360 | HashedFolder.prototype.toString = function (padding = '') {
361 | const first = `${padding}{ name: '${this.name}', hash: '${this.hash}',\n`;
362 | padding += ' ';
363 |
364 | return `${first}${padding}children: ${this.childrenToString(padding)}}`;
365 | };
366 |
367 | HashedFolder.prototype.childrenToString = function (padding = '') {
368 | if (this.children.length === 0) {
369 | return '[]';
370 | } else {
371 | const nextPadding = padding + ' ';
372 | const children = this.children.map(child => child.toString(nextPadding)).join('\n');
373 | return `[\n${children}\n${padding}]`;
374 | }
375 | };
376 |
377 | const HashedFile = function HashedFile(name, hash, encoding) {
378 | this.name = name;
379 | this.hash = hash.digest(encoding);
380 | };
381 |
382 | HashedFile.prototype.toString = function (padding = '') {
383 | return padding + "{ name: '" + this.name + "', hash: '" + this.hash + "' }";
384 | };
385 |
386 | function isFunction(any) {
387 | return typeof any === 'function';
388 | }
389 |
390 | function isString(str) {
391 | return typeof str === 'string' || str instanceof String;
392 | }
393 |
394 | function isObject(obj) {
395 | return obj !== null && typeof obj === 'object';
396 | }
397 |
398 | function notUndefined(obj) {
399 | return typeof obj !== 'undefined';
400 | }
401 |
402 | function reduceGlobPatterns(globs, name) {
403 | if (isFunction(globs)) {
404 | log.glob(`Using function to ${name}`);
405 | return globs;
406 | } else if (!globs || !Array.isArray(globs) || globs.length === 0) {
407 | return undefined;
408 | } else {
409 | // combine globs into one single RegEx
410 | const regex = new RegExp(
411 | globs
412 | .reduce((acc, exclude) => {
413 | return acc + '|' + minimatch.makeRe(exclude).source;
414 | }, '')
415 | .substr(1),
416 | );
417 | log.glob(`Reduced glob patterns to ${name}`, { from: globs, to: regex });
418 | return param => regex.test(param);
419 | }
420 | }
421 |
422 | module.exports = {
423 | defaults: defaultOptions,
424 | hashElement: prep(require('fs')),
425 | // exposed for testing
426 | prep,
427 | parseParameters,
428 | };
429 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "folder-hash",
3 | "version": "4.1.1",
4 | "description": "Create a hash checksum over a folder and its content - its children and their content",
5 | "main": "index.js",
6 | "bin": {
7 | "folder-hash": "bin/folder-hash"
8 | },
9 | "scripts": {
10 | "start": "node sample.js",
11 | "test": "mocha --reporter spec test",
12 | "cover": "nyc mocha test",
13 | "format": "prettier --write *.js examples/ test/",
14 | "doc": "./node_modules/.bin/jsdoc index.js -R README.md -d doc"
15 | },
16 | "author": {
17 | "name": "Marc Walter",
18 | "email": "walter.marc@outlook.com"
19 | },
20 | "license": "MIT",
21 | "files": [
22 | "cli.js",
23 | "index.js",
24 | "bin"
25 | ],
26 | "repository": {
27 | "type": "git",
28 | "url": "https://github.com/marc136/node-folder-hash.git"
29 | },
30 | "nyc": {
31 | "reporter": [
32 | "lcov",
33 | "text"
34 | ],
35 | "report-dir": "test_coverage"
36 | },
37 | "dependencies": {
38 | "debug": "4.4.0",
39 | "minimatch": "7.4.6"
40 | },
41 | "devDependencies": {
42 | "chai": "^4.3.6",
43 | "chai-as-promised": "^7.1.1",
44 | "clone": "^2.1.2",
45 | "ignore": "^5.2.0",
46 | "jsdoc": "4.0.2",
47 | "memfs": "^3.4.1",
48 | "mocha": "11.0.1",
49 | "nyc": "17.1.0",
50 | "prettier": "~2.8.2"
51 | },
52 | "engines": {
53 | "node": ">=10.10.0"
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/test/_common.js:
--------------------------------------------------------------------------------
1 | const clone = require('clone'),
2 | { Volume } = require('memfs'),
3 | assert = require('assert'),
4 | chai = require('chai'),
5 | chaiAsPromised = require('chai-as-promised'),
6 | should = chai.should(),
7 | inspect = obj => console.log(require('util').inspect(obj, false, null));
8 |
9 | chai.use(chaiAsPromised);
10 |
11 | const folderHash = require('../index'),
12 | prep = volume => folderHash.prep(volume);
13 |
14 | const defaultOptions = () => clone(folderHash.defaults);
15 |
16 | module.exports = {
17 | folderHash,
18 | prep,
19 | Volume,
20 | chai,
21 | should,
22 | inspect,
23 | defaultOptions,
24 | };
25 |
--------------------------------------------------------------------------------
/test/base.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const { prep, Volume, should, inspect } = require('./_common');
3 |
4 | describe('Should generate hashes', function () {
5 | const json = {};
6 | const dir = 'folder',
7 | basename = 'file1';
8 | json[path.join(dir, basename)] = 'file content';
9 | const hashElement = prep(Volume.fromJSON(json));
10 |
11 | const checkHash = result => {
12 | should.exist(result);
13 | should.exist(result.hash);
14 | result.hash.should.equal('11OqJSEmDW280Sst6dycitwlfCI=');
15 | };
16 |
17 | describe('when called as a promise', function () {
18 | it('with element and folder passed as two strings', function () {
19 | return hashElement(basename, dir).then(checkHash);
20 | });
21 |
22 | it('with element path passed as one string', function () {
23 | return hashElement(path.join(dir, basename)).then(checkHash);
24 | });
25 |
26 | it('with options passed', function () {
27 | var options = {
28 | algo: 'sha1',
29 | encoding: 'base64',
30 | excludes: [],
31 | match: {
32 | basename: false,
33 | path: false,
34 | },
35 | };
36 | return hashElement(basename, dir, options).then(checkHash);
37 | });
38 |
39 | it('with algoOptions passed', function () {
40 | const checkAlgoOptionHash = result => {
41 | should.exist(result);
42 | should.exist(result.hash);
43 | result.hash.should.equal('d89f885449');
44 | };
45 |
46 | var options = {
47 | algo: 'shake256',
48 | algoOptions: { outputLength: 5 },
49 | encoding: 'hex',
50 | excludes: [],
51 | match: {
52 | basename: false,
53 | path: false,
54 | },
55 | };
56 | return hashElement(basename, dir, options).then(checkAlgoOptionHash);
57 | });
58 | });
59 |
60 | describe('when executed with an error-first callback', function () {
61 | it('with element and folder passed as two strings', function () {
62 | return hashElement(basename, dir, function (err, result) {
63 | should.not.exist(err);
64 | checkHash(result);
65 | });
66 | });
67 |
68 | it('with element path passed as one string', function () {
69 | return hashElement(path.join(dir, basename), function (err, result) {
70 | should.not.exist(err);
71 | checkHash(result);
72 | });
73 | });
74 |
75 | it('with options passed', function () {
76 | var options = {
77 | algo: 'sha1',
78 | encoding: 'base64',
79 | excludes: [],
80 | match: {
81 | basename: false,
82 | path: false,
83 | },
84 | };
85 | return hashElement(path.join(dir, basename), options, function (err, result) {
86 | should.not.exist(err);
87 | checkHash(result);
88 | });
89 | });
90 | });
91 |
92 | describe('and', function () {
93 | it('should return a string representation', function () {
94 | const fs = Volume.fromJSON({ 'folder/file.txt': 'content' });
95 | fs.mkdirSync('folder/empty_folder');
96 |
97 | return prep(fs)('folder').then(hash => {
98 | should.exist(hash);
99 | const str = hash.toString();
100 | should.exist(str);
101 | should.equal(str.length > 10, true);
102 | });
103 | });
104 | });
105 | });
106 |
--------------------------------------------------------------------------------
/test/files.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const { prep, Volume, should, inspect } = require('./_common');
3 |
4 | describe('Generating hashes over files, it', function () {
5 | it('should return the same hash if a file was not changed', function () {
6 | const file = path.join('folder, file');
7 | const fs = Volume.fromJSON({ file: 'content' }, 'folder');
8 | const hash = prep(fs);
9 |
10 | return hash('file', 'folder').then(hash1 => {
11 | fs.writeFileSync(file, 'content');
12 | return hash('file', 'folder').then(result => {
13 | result.hash.should.equal(hash1.hash);
14 | });
15 | });
16 | });
17 |
18 | it('should return the same hash if a file has the same name and content, but exists in a different folder', function () {
19 | const json = {};
20 | json[path.join('folder one', 'file.txt')] = 'not empty';
21 | json[path.join('another folder', 'file.txt')] = 'not empty';
22 | const hash = prep(Volume.fromJSON(json));
23 |
24 | return Promise.all([
25 | hash(path.join('folder one', 'file.txt')),
26 | hash(path.join('another folder', 'file.txt')),
27 | ]).then(results => results[0].hash.should.equal(results[1].hash));
28 | });
29 |
30 | it('should return a different hash if the file has the same name but a different content', function () {
31 | const json = {};
32 | json[path.join('folder1', 'file.txt')] = '1st file';
33 | json[path.join('folder2', 'file.txt')] = '2nd file';
34 | const hash = prep(Volume.fromJSON(json));
35 |
36 | return Promise.all([hash('file.txt', 'folder1'), hash('file.txt', 'folder2')]).then(results =>
37 | results[0].hash.should.not.equal(results[1].hash),
38 | );
39 | });
40 |
41 | it('should return a different hash if the file has the same content but a different name', function () {
42 | const hash = prep(Volume.fromJSON({ one: 'content', two: 'content' }));
43 | return Promise.all([hash('one'), hash('two')]).then(results => {
44 | return results[0].hash.should.not.equal(results[1].hash);
45 | });
46 | });
47 |
48 | it('generates the same hash if only the name differs and ignoreRootName is set', function () {
49 | const hashElement = prep(
50 | Volume.fromJSON({
51 | 'abc.txt': 'awesome content',
52 | 'def/ghi.js': 'awesome content',
53 | }),
54 | );
55 | const options = { files: { ignoreRootName: true } };
56 |
57 | return Promise.all([hashElement('abc.txt', options), hashElement('def/ghi.js', options)]).then(
58 | function (hashes) {
59 | return hashes[0].hash.should.equal(hashes[1].hash);
60 | },
61 | );
62 | });
63 |
64 | it('generates the same hash if ignoreBasename is true and the files have the same content', function () {
65 | const hashElement = prep(
66 | Volume.fromJSON({
67 | abc: 'awesome content',
68 | 'def/ghi.js': 'awesome content',
69 | }),
70 | );
71 | const options = { files: { ignoreBasename: true } };
72 | return Promise.all([hashElement('abc', options), hashElement('def/ghi.js', options)]).then(
73 | function (hashes) {
74 | return hashes[0].hash.should.equal(hashes[1].hash);
75 | },
76 | );
77 | });
78 | });
79 |
--------------------------------------------------------------------------------
/test/folders.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 | const { prep, Volume, should, inspect } = require('./_common');
3 |
4 | describe('Generating a hash over a folder, it', function () {
5 | function recAssertHash(hash) {
6 | assert.ok(hash.hash);
7 | if (hash.children && hash.children.length > 0) {
8 | hash.children.forEach(recAssertHash);
9 | }
10 | }
11 |
12 | it('generates a hash over the folder name and over the combination hashes of all its children', function () {
13 | const hashElement = prep(
14 | Volume.fromJSON({
15 | 'abc/def': 'abc/def',
16 | 'abc/ghi/jkl/file.js': 'content',
17 | 'abc/ghi/jkl/file2.js': 'content',
18 | 'abc/ghi/folder/data.json': 'content',
19 | 'abc/ghi/folder/subfolder/today.log': 'content',
20 | }),
21 | );
22 |
23 | const checkChildren = current => {
24 | should.exist(current.hash);
25 | if (current.children && current.children.length > 0) {
26 | current.children.forEach(checkChildren);
27 | }
28 | };
29 |
30 | return hashElement('abc').then(checkChildren);
31 | });
32 |
33 | it('ignores things with an exclude function', function () {
34 | const hashElement = prep(
35 | Volume.fromJSON({
36 | 'abc/def': 'abc/def',
37 | 'abc/ghi/jkl/file.js': 'content',
38 | 'abc/ghi/jkl/file2.js': 'content',
39 | 'abc/ghi/folder/data.json': 'content',
40 | 'abc/ghi/folder/subfolder/today.log': 'content',
41 | }),
42 | );
43 |
44 | const options = {
45 | folders: {
46 | exclude: path => path.includes('ghi'),
47 | },
48 | };
49 |
50 | const checkChildren = current => {
51 | current.children.length.should.equal(1);
52 | current.children[0].name.should.equal('def');
53 | };
54 |
55 | return hashElement('abc', options).then(checkChildren);
56 | });
57 |
58 | it('also allows to hash `../`', async function () {
59 | const hashElement = prep(
60 | Volume.fromJSON({
61 | 'abc/def': 'abc/def',
62 | 'abc/ghi/jkl/file.js': 'content',
63 | 'abc/ghi/jkl/file2.js': 'content',
64 | 'abc/ghi/folder/data.json': 'content',
65 | 'abc/ghi/folder/subfolder/today.log': 'content',
66 | }),
67 | );
68 |
69 | const options = {
70 | folders: {
71 | exclude: ['jkl'],
72 | },
73 | };
74 |
75 | const checkChildren = current => {
76 | current.children.length.should.equal(1);
77 | current.children[0].name.should.equal('folder');
78 | };
79 |
80 | await hashElement('abc/ghi', options).then(checkChildren);
81 |
82 | await hashElement('../', 'abc/ghi/jkl', options).then(checkChildren);
83 | });
84 |
85 | it('generates different hashes if the folders have the same content but different names', function () {
86 | const hashElement = prep(
87 | Volume.fromJSON({
88 | 'folder1/file1': 'content',
89 | '2nd folder/file1': 'content',
90 | }),
91 | );
92 |
93 | return Promise.all([hashElement('folder1'), hashElement('2nd folder')]).then(
94 | ([first, second]) => {
95 | should.exist(first.hash);
96 | first.hash.should.not.equal(second.hash);
97 | should.exist(first.children[0].hash);
98 | first.children[0].hash.should.equal(second.children[0].hash);
99 | },
100 | );
101 | });
102 |
103 | it('generates different hashes if the folders have the same name but different content (one file content changed)', function () {
104 | const hashElement = prep(
105 | Volume.fromJSON({
106 | 'folder1/folder2/file1': 'content',
107 | '2nd folder/file1': 'content',
108 | }),
109 | );
110 |
111 | return Promise.all([hashElement('folder1'), hashElement('2nd folder')]).then(
112 | ([first, second]) => {
113 | should.exist(first.hash);
114 | first.hash.should.not.equal(second.hash);
115 | },
116 | );
117 | });
118 |
119 | it('generates the same hash if the folders have the same name and the same content', function () {
120 | const hashElement = prep(
121 | Volume.fromJSON({
122 | 'first/file1': 'content',
123 | 'first/folder/file2': 'abc',
124 | 'first/folder/file3': 'abcd',
125 | '2nd/folder/first/file1': 'content',
126 | '2nd/folder/first/folder/file2': 'abc',
127 | '2nd/folder/first/folder/file3': 'abcd',
128 | }),
129 | );
130 |
131 | return Promise.all([
132 | hashElement('first'),
133 | hashElement('first', path.join('2nd', 'folder')),
134 | ]).then(([first, second]) => {
135 | should.exist(first.hash);
136 | first.hash.should.equal(second.hash);
137 | });
138 | });
139 |
140 | it('generates the same hash if the folders have the same content but different file order', function () {
141 | const hashElement = prep(
142 | Volume.fromJSON({
143 | 'first/file1': 'content',
144 | 'first/folder/file2': 'abc',
145 | 'first/folder/file3': 'abcd',
146 | '2nd/folder/first/file1': 'content',
147 | '2nd/folder/first/folder/file3': 'abcd',
148 | '2nd/folder/first/folder/file2': 'abc',
149 | }),
150 | );
151 |
152 | return Promise.all([
153 | hashElement('first'),
154 | hashElement('first', path.join('2nd', 'folder')),
155 | ]).then(([first, second]) => {
156 | should.exist(first.hash);
157 | first.hash.should.equal(second.hash);
158 | });
159 | });
160 |
161 | it('generates the same hash if the only file with different content is ignored', function () {
162 | const hashElement = prep(
163 | Volume.fromJSON({
164 | 'base/file1': 'content',
165 | 'base/folder/file2': 'abc',
166 | 'base/folder/file3': 'abcd',
167 | '2nd/base/file1': 'content',
168 | '2nd/base/folder/file2': 'another content',
169 | '2nd/base/folder/file3': 'abcd',
170 | '3rd/base/file1': 'content',
171 | '3rd/base/dummy': '',
172 | '3rd/base/folder/file3': 'abcd',
173 | }),
174 | );
175 |
176 | return Promise.all([
177 | hashElement('base', {
178 | files: {
179 | exclude: ['**/file2', '**file2'],
180 | matchBasename: false,
181 | matchPath: true,
182 | },
183 | }),
184 | hashElement(path.join('2nd', 'base'), {
185 | files: {
186 | exclude: ['file2'],
187 | matchBasename: true,
188 | matchPath: false,
189 | },
190 | }),
191 | hashElement('base', '3rd', {
192 | files: {
193 | exclude: ['dummy'],
194 | matchBasename: true,
195 | matchPath: false,
196 | },
197 | }),
198 | ]).then(result => {
199 | should.exist(result[0].hash);
200 | result[0].hash.should.equal(result[1].hash);
201 | result[1].hash.should.equal(result[2].hash);
202 | });
203 | });
204 |
205 | it('generates the same hash if all differences are ignored', function () {
206 | const hashElement = prep(
207 | Volume.fromJSON({
208 | 'base/file1': 'content',
209 | 'base/.gitignore': 'empty',
210 | 'base/folder/file2': '2',
211 | '2nd/base/file1': 'content',
212 | '2nd/base/folder/file2': '2',
213 | '2nd/base/folder/.git/one': '1',
214 | '3rd/base/file1': 'content',
215 | '3rd/base/folder/file2': '2',
216 | '3rd/base/folder/.hidden': 'hidden',
217 | '3rd/base/.hidden/file': 'hidden',
218 | }),
219 | );
220 |
221 | return Promise.all([
222 | hashElement('base', {
223 | files: {
224 | exclude: ['**/.*', '**.*'],
225 | matchBasename: false,
226 | matchPath: true,
227 | },
228 | }),
229 | hashElement(path.join('2nd', 'base'), {
230 | folders: {
231 | exclude: ['**/.*', '**.*'],
232 | matchBasename: false,
233 | matchPath: true,
234 | },
235 | }),
236 | hashElement('base', '3rd', {
237 | files: { exclude: ['.*'] },
238 | folders: { exclude: ['.*'] },
239 | }),
240 | ]).then(result => {
241 | should.exist(result[0].hash);
242 | result[0].hash.should.equal(result[1].hash);
243 | result[1].hash.should.equal(result[2].hash);
244 | });
245 | });
246 |
247 | it('ignores a folder if it is both included and excluded', async function () {
248 | const hashElement = prep(
249 | Volume.fromJSON({
250 | 'base/file1': 'content',
251 | 'base/folder/file2': '2',
252 | 'base/folder2/file3': '3',
253 | }),
254 | );
255 |
256 | async function verify(options) {
257 | const result = await hashElement('base', options);
258 | should.exist(result.hash);
259 | should.exist(result.children);
260 | result.children.length.should.equal(2);
261 | result.children[0].name.should.equal('file1');
262 | result.children[1].name.should.equal('folder2');
263 | }
264 |
265 | const include1 = process.platform === 'win32' ? '*' : '**/*';
266 |
267 | await verify({
268 | folders: {
269 | exclude: [path.join('**', 'folder')],
270 | include: [include1],
271 | matchBasename: false,
272 | matchPath: true,
273 | },
274 | });
275 |
276 | await verify({
277 | folders: {
278 | exclude: ['folder'],
279 | include: ['*'],
280 | matchBasename: true,
281 | matchPath: false,
282 | },
283 | });
284 | });
285 |
286 | it('only includes the wanted folders', function () {
287 | const hashElement = prep(
288 | Volume.fromJSON({
289 | 'abc/file': 'content',
290 | 'def/file': 'content',
291 | 'abc2/file': 'content',
292 | 'abc3/file': 'content',
293 | }),
294 | );
295 |
296 | return Promise.all([
297 | hashElement('./', {
298 | folders: {
299 | include: ['abc*'],
300 | matchBasename: true,
301 | matchPath: false,
302 | },
303 | }),
304 | hashElement('./', {
305 | folders: {
306 | include: ['**abc*'],
307 | matchBasename: false,
308 | matchPath: true,
309 | },
310 | }),
311 | ]).then(result => {
312 | should.exist(result[0].children);
313 | result[0].children.length.should.equal(3);
314 | result[0].hash.should.equal(result[1].hash);
315 | });
316 | });
317 |
318 | it('only includes the wanted files', function () {
319 | const hashElement = prep(
320 | Volume.fromJSON({
321 | 'file1.js': 'file1',
322 | 'file1.abc.js': 'content',
323 | 'file1.js.ext': 'ignore',
324 | 'def/file1.js': 'content',
325 | 'def/file1.json': 'ignore',
326 | }),
327 | );
328 |
329 | return Promise.all([
330 | hashElement('./', {
331 | files: {
332 | include: ['*.js'],
333 | matchBasename: true,
334 | matchPath: false,
335 | },
336 | }),
337 | hashElement('./', {
338 | files: {
339 | include: ['**/*.js', '**.js'],
340 | matchBasename: false,
341 | matchPath: true,
342 | },
343 | }),
344 | ]).then(result => {
345 | //console.log(result.map(r => r.toString()).join('\n'));
346 | should.exist(result[0].children);
347 | result[0].children.length.should.equal(3);
348 | result[0].hash.should.equal(result[1].hash);
349 | });
350 | });
351 |
352 | it('generates the same hash if the folders only differ in name and ignoreRootName is set', function () {
353 | const hashElement = prep(
354 | Volume.fromJSON({
355 | 'abc/def/ghi': 'content of ghi',
356 | 'abc/file1.js': '//just a comment',
357 | 'def/def/ghi': 'content of ghi',
358 | 'def/file1.js': '//just a comment',
359 | 'def/def/.ignored': 'ignored',
360 | }),
361 | );
362 | const options = {
363 | folders: { ignoreRootName: true },
364 | files: { exclude: ['.*'] },
365 | };
366 |
367 | return Promise.all([hashElement('abc', options), hashElement('def', options)]).then(function (
368 | hashes,
369 | ) {
370 | return hashes[0].hash.should.equal(hashes[1].hash);
371 | });
372 | });
373 |
374 | it('generates the same hash if the folders only differ in name and ignoreBasename is set', function () {
375 | const hashElement = prep(
376 | Volume.fromJSON({
377 | 'abc/def/ghi': 'content of ghi',
378 | 'abc/file1.js': '//just a comment',
379 | 'def/def/ghi': 'content of ghi',
380 | 'def/file1.js': '//just a comment',
381 | 'def/def/.ignored': 'ignored',
382 | }),
383 | );
384 | const options = {
385 | folders: { ignoreBasename: true },
386 | files: { exclude: ['.*'] },
387 | };
388 |
389 | return Promise.all([hashElement('abc', options), hashElement('def', options)]).then(function (
390 | hashes,
391 | ) {
392 | return hashes[1].hash.should.equal(hashes[0].hash);
393 | });
394 | });
395 | });
396 |
--------------------------------------------------------------------------------
/test/issue-14.js:
--------------------------------------------------------------------------------
1 | const { Volume, should, inspect } = require('./_common');
2 | const folderHash = require('../index');
3 |
4 | describe('Issue 14: Create hashes only over the file content (basename is ignored)', function () {
5 | const expected = 'BA8G/XdAkkeNRQd09bowxdp4rMg=';
6 | const fs = Volume.fromJSON({
7 | 'folder/file.txt': 'content',
8 | 'folder/2ndfile.txt': 'content',
9 | });
10 | const hashElement = folderHash.prep(fs, Promise);
11 | const folder = 'folder';
12 |
13 | it('in folder-hash <= 3.0.0', function () {
14 | const options = { files: { ignoreRootName: true } };
15 |
16 | const files = fs.readdirSync(folder);
17 | return Promise.all(files.map(basename => hashElement(basename, folder, options))).then(
18 | hashes => {
19 | hashes[0].name.should.not.equal(hashes[1].name);
20 | hashes[0].hash.should.equal(hashes[1].hash);
21 | hashes[0].hash.should.equal(expected);
22 | },
23 | );
24 | });
25 |
26 | it('in folder-hash > 3.0.x', function () {
27 | const options = { files: { ignoreBasename: true } };
28 | return hashElement(folder, options).then(hash => {
29 | hash.children[0].name.should.not.equal(hash.children[1].name);
30 | hash.children[0].hash.should.equal(hash.children[1].hash);
31 | hash.children[1].hash.should.equal(expected);
32 | });
33 | });
34 | });
35 |
36 | /*
37 | it.only('generates the same hash if two files have the same content and ignoreBasename is set', function () {
38 | const hashElement = prep(Volume.fromJSON({
39 | 'file.txt': 'content',
40 | '3rdfile': 'content1',
41 | '2ndfile.txt': 'content'
42 | }));
43 | const options = {
44 | files: { ignoreBasename: true }
45 | };
46 | return hashElement('.', options).then(function (hash) {
47 | console.log('hash', hash)
48 | // files are alphabetically sorted
49 | hash.children[0].name.should.equal('2ndfile.txt');
50 | hash.children[1].name.should.equal('3rdfile');
51 | hash.children[0].hash.should.equal(hash.children[2].hash);
52 | hash.children[2].hash.should.not.equal(hash.children[1].hash);
53 | });
54 | });
55 | //*/
56 |
--------------------------------------------------------------------------------
/test/issue-146.js:
--------------------------------------------------------------------------------
1 | const { folderHash, should, inspect } = require('./_common');
2 | const { Volume } = require('memfs');
3 |
4 | it('Issue 146: Handle `EMFILE` and `ENFILE` errors gracefully', async function () {
5 | const expected = 'BA8G/XdAkkeNRQd09bowxdp4rMg=';
6 | const fs = Volume.fromJSON({
7 | 'folder/file.txt': 'content',
8 | 'folder/file1.txt': 'content',
9 | 'folder/file2.txt': 'content',
10 | 'folder/b/file1.txt': 'content',
11 | 'folder/b/file2.txt': 'content',
12 | 'folder/b/file3.txt': 'content',
13 | });
14 | let counter = 0;
15 | const readdir = fs.promises.readdir;
16 | fs.promises.readdir = (path, options) => {
17 | counter++;
18 | if (counter > 1 && counter < 5) {
19 | throw { code: 'EMFILE', message: 'fake readdir error' };
20 | } else if (counter < 10) {
21 | throw { code: 'ENFILE', message: 'fake readdir error' };
22 | } else {
23 | return readdir(path, options);
24 | }
25 | };
26 | const hashElement = folderHash.prep(fs, Promise);
27 | const folder = 'folder';
28 | const options = {};
29 | const result = await hashElement(folder, options);
30 | // ensure that the errors were raised
31 | counter.should.be.greaterThanOrEqual(7);
32 | result.hash.should.equal('rGaf5+7Q5VwsunfiBL9XobKDio4=');
33 | });
34 |
--------------------------------------------------------------------------------
/test/parameters.js:
--------------------------------------------------------------------------------
1 | /**
2 | * This file tests the parameters
3 | */
4 |
5 | const folderHash = require('../index'),
6 | assert = require('assert'),
7 | chai = require('chai'),
8 | chaiAsPromised = require('chai-as-promised');
9 |
10 | chai.use(chaiAsPromised);
11 | const should = chai.should();
12 |
13 | describe('Initialization', function () {
14 | function checkError(err) {
15 | err.name.should.equal('TypeError');
16 | err.message.should.equal('First argument must be a string');
17 | }
18 |
19 | it('should reject if no name was passed', function () {
20 | return folderHash
21 | .hashElement()
22 | .then(result => {
23 | throw new Error(result);
24 | })
25 | .catch(checkError);
26 | });
27 |
28 | it('should call an error callback if no name was passed', function () {
29 | return folderHash.hashElement(err => {
30 | should.exist(err);
31 | checkError(err);
32 | });
33 | });
34 | });
35 |
36 | describe('Parse parameters', function () {
37 | it('should not change the supplied options object', function () {
38 | const params = {
39 | algo: 'some',
40 | files: { exclude: ['abc', 'def'], include: [] },
41 | folders: { exclude: [], include: ['abc', 'def'] },
42 | match: { basename: false, path: 'true' },
43 | };
44 | const str = JSON.stringify(params);
45 |
46 | return folderHash
47 | .parseParameters('abc', params)
48 | .then(() => JSON.stringify(params).should.equal(str));
49 | });
50 |
51 | it('should parse an empty exclude array to undefined', function () {
52 | const params = {
53 | algo: 'some',
54 | files: { exclude: [] },
55 | match: { basename: false, path: 'true' },
56 | };
57 |
58 | return folderHash.parseParameters('abc', params).then(parsed => {
59 | should.exist(parsed.options.files);
60 | should.equal(parsed.options.files.exclude, undefined);
61 | });
62 | });
63 |
64 | it('should default excludes to undefined', function () {
65 | return folderHash.parseParameters('abc', { files: undefined }).then(parsed => {
66 | should.exist(parsed.options.folders);
67 | should.equal(parsed.options.folders.exclude, undefined);
68 | });
69 | });
70 | });
71 |
--------------------------------------------------------------------------------
/test/symbolic-links.js:
--------------------------------------------------------------------------------
1 | const { defaultOptions, prep, Volume } = require('./_common');
2 | const crypto = require('crypto'),
3 | path = require('path');
4 |
5 | describe('When hashing a symbolic link', async function () {
6 | it('should follow a symbolic link follow=resolve (default)', function () {
7 | const fs = Volume.fromJSON({ file: 'content' }, 'folder');
8 | fs.symlinkSync('folder/file', 'soft-link');
9 | const hash = prep(fs);
10 |
11 | return hash('.', {}).then(result => {
12 | const symlink = result.children[1];
13 | // symlink.hash.should.equal(mkhash('soft-link', 'content'));
14 | symlink.hash.should.equal('BQv/kSJnDNedkXlw/tpcXpf+Mzc=');
15 | const target = result.children[0].children[0];
16 | const msg =
17 | 'The symlink name is part of the hash, the symlink and its target must have different hashes';
18 | symlink.hash.should.not.equal(target.hash, msg);
19 | });
20 | });
21 |
22 | it('can skip symbolic links', function () {
23 | const fs = Volume.fromJSON({ file: 'a' }, 'folder');
24 | fs.symlinkSync('non-existing', 'l1');
25 | fs.symlinkSync('folder/file', 'l2');
26 | const hash = prep(fs);
27 |
28 | const options = { symbolicLinks: { include: false } };
29 | return Promise.all([
30 | hash('l1', options).should.eventually.be.undefined,
31 | hash('l2', options).should.eventually.be.undefined,
32 | hash('.', options)
33 | .then(result => result.children.length)
34 | .should.eventually.become(1),
35 | ]);
36 | });
37 |
38 | it('can ignore the target content', async function () {
39 | const fs = Volume.fromJSON({ file: 'a' }, 'folder');
40 | fs.symlinkSync('non-existing', 'l1');
41 | fs.symlinkSync('folder/file', 'l2');
42 | const hash = prep(fs);
43 |
44 | const options = { symbolicLinks: { ignoreTargetContent: true } };
45 |
46 | const expected = {
47 | l1: toHash(['l1']),
48 | l2: toHash(['l2']),
49 | };
50 |
51 | const l1 = await hash('l1', options);
52 | l1.hash.should.equal(expected.l1);
53 | const l2 = await hash('l2', options);
54 | l2.hash.should.equal(expected.l2);
55 | });
56 | });
57 |
58 | describe('Hashing the symlink to a folder and the folder should return the same hash when', function () {
59 | it('they have the same basename', async function () {
60 | const fs = Volume.fromJSON({ a1: 'a', b2: 'bb' }, 'folder');
61 | fs.mkdirSync('horst');
62 | fs.symlinkSync('folder', 'horst/folder');
63 | const hash = prep(fs);
64 |
65 | const options = {
66 | symbolicLinks: {
67 | include: true,
68 | ignoreTargetPath: true,
69 | },
70 | };
71 |
72 | const expected = await hash('folder', options);
73 | const actual = await hash('horst/folder', options);
74 | actual.should.deep.equal(expected);
75 | });
76 |
77 | it('the basename is ignored', async function () {
78 | const fs = Volume.fromJSON({ a1: 'a', b2: 'bb' }, 'folder');
79 | fs.symlinkSync('folder', 'folder-link');
80 | const hash = prep(fs);
81 |
82 | const options = {
83 | folders: { ignoreBasename: true },
84 | symbolicLinks: {
85 | ignoreTargetPath: true,
86 | ignoreBasename: true,
87 | },
88 | };
89 |
90 | const expected = await hash('folder', options);
91 | const actual = await hash('folder-link', options);
92 | // the names will be different
93 | delete expected.name;
94 | delete actual.name;
95 | actual.should.deep.equal(expected);
96 | });
97 | });
98 |
99 | describe('When symbolicLinks.ignoreTargetContent is true', function () {
100 | const fs = Volume.fromJSON({ file: 'a' }, 'folder');
101 | fs.symlinkSync('non-existing', 'l1');
102 | fs.symlinkSync('folder/file', 'l2');
103 | fs.symlinkSync('folder', 'l3');
104 | const hash = prep(fs);
105 |
106 | it('hashes the name and target path', async function () {
107 | const options = {
108 | symbolicLinks: {
109 | include: true,
110 | ignoreTargetContent: true,
111 | ignoreTargetPath: false,
112 | },
113 | };
114 | let result = await hash('l2', options);
115 | const expected = toHash(['l2', resolvePath('folder/file')]);
116 | return result.hash.should.equal(expected);
117 | });
118 |
119 | it('hashes the target path', async function () {
120 | const options = {
121 | symbolicLinks: {
122 | include: true,
123 | ignoreTargetContent: true,
124 | ignoreTargetPath: false,
125 | ignoreBasename: true,
126 | },
127 | };
128 | let result = await hash('l2', options);
129 | const expected = toHash([resolvePath('folder/file')]);
130 | return result.hash.should.equal(expected);
131 | });
132 |
133 | it('will not fail if the target is missing', async function () {
134 | const options = {
135 | symbolicLinks: {
136 | include: true,
137 | ignoreTargetContent: true,
138 | ignoreTargetPath: false,
139 | },
140 | };
141 | let result = await hash('l1', options);
142 | const expected = toHash(['l1', resolvePath('non-existing')]);
143 | return result.hash.should.equal(expected);
144 | });
145 | });
146 |
147 | describe('When symbolicLinks.include equals "resolve"', function () {
148 | const fs = Volume.fromJSON({ file: 'a' }, 'folder');
149 | fs.symlinkSync('non-existing', 'l1');
150 | fs.symlinkSync('folder/file', 'l2');
151 | fs.symlinkSync('folder', 'l3');
152 | const hash = prep(fs);
153 |
154 | function hashWithResolvedTargetPath(first, targetPath) {
155 | const withoutTargetPath = toHash(first);
156 | return toHash([withoutTargetPath, resolvePath(targetPath)]);
157 | }
158 |
159 | it('can create a hash over basename file content and target path', async function () {
160 | const options = {
161 | symbolicLinks: {
162 | include: true,
163 | ignoreTargetPath: false,
164 | ignoreBasename: false,
165 | },
166 | };
167 |
168 | const expected = hashWithResolvedTargetPath(['l2', 'a'], 'folder/file');
169 | let result = await hash('l2', options);
170 | return result.hash.should.equal(expected);
171 | });
172 |
173 | it('can create a hash over target path and file content', async function () {
174 | const options1 = {
175 | // this will ignore all file basenames
176 | files: { ignoreBasename: true },
177 | symbolicLinks: {
178 | include: true,
179 | ignoreTargetPath: false,
180 | },
181 | };
182 |
183 | const expected = hashWithResolvedTargetPath(['a'], 'folder/file');
184 | const result1 = await hash('l2', options1);
185 | result1.hash.should.equal(expected);
186 |
187 | const options2 = {
188 | // this will only ignore symbolic link basenames
189 | files: { ignoreBasename: false },
190 | symbolicLinks: {
191 | include: true,
192 | ignoreTargetPath: false,
193 | ignoreBasename: true,
194 | },
195 | };
196 | const result2 = await hash('l2', options2);
197 | return result2.hash.should.equal(result1.hash);
198 | });
199 |
200 | describe('Issue 41: Ignore missing symbolic link targets', async function () {
201 | // Note: The different link types are only relevant on windows
202 | await ['file', 'dir', 'junction'].map(linkType);
203 | });
204 | });
205 |
206 | function linkType(type) {
207 | describe(`If a "${type}" symlink target does not exist`, function () {
208 | it('should throw an ENOENT error with default options', function () {
209 | const fs = new Volume.fromJSON({ file: 'content' });
210 | fs.symlinkSync('non-existing-file', 'soft-link', type);
211 | const hash = prep(fs);
212 |
213 | const expected = /ENOENT/;
214 | return hash('.').should.eventually.be.rejectedWith(expected);
215 | });
216 |
217 | it('should hash only the name if ignoreTargetContentAfterError is true', function () {
218 | const fs = Volume.fromJSON({ file: 'content' });
219 | fs.symlinkSync('non-existing-file', 'soft-link', type);
220 | const hash = prep(fs);
221 | const options = { symbolicLinks: { ignoreTargetContentAfterError: true } };
222 |
223 | return hash('.', options).then(result => {
224 | result.children[1].hash.should.equal('2rAbS3Cr1VJjcXABKQhmBD2SS3s=');
225 | result.hash.should.equal('EYegpWpT309Zil1L80VZMTy6UZc=');
226 | });
227 | });
228 |
229 | it('should hash the name and target path if configured', function () {
230 | const fs = Volume.fromJSON({ file: 'content' });
231 | fs.symlinkSync('non-existing-file', 'soft-link', type);
232 | const hash = prep(fs);
233 | const options = {
234 | symbolicLinks: {
235 | ignoreTargetContentAfterError: true,
236 | ignoreTargetPath: false,
237 | },
238 | };
239 |
240 | return hash('soft-link', options).then(result => {
241 | const expected = toHash(['soft-link', resolvePath('non-existing-file')]);
242 | result.hash.should.equal(expected);
243 | });
244 | });
245 |
246 | it('should hash the name if all symlink errors are ignored', function () {
247 | const fs = Volume.fromJSON({ file: 'content' });
248 | fs.symlinkSync('non-existing-file', 'soft-link', type);
249 | const hash = prep(fs);
250 | const options = { symbolicLinks: { ignoreTargetContentAfterError: true } };
251 |
252 | return hash('soft-link', options).then(result => {
253 | const expected = toHash(['soft-link']);
254 | result.hash.should.equal(expected);
255 | });
256 | });
257 | });
258 | }
259 |
260 | /* helpers */
261 |
262 | function toHash(strings) {
263 | const hash = crypto.createHash(defaultOptions().algo);
264 | for (const str of strings) {
265 | hash.update(str);
266 | }
267 | return hash.digest(defaultOptions().encoding);
268 | }
269 |
270 | function resolvePath(string) {
271 | if (process.platform === 'win32') {
272 | return path.posix.resolve(string);
273 | } else {
274 | return path.resolve(string);
275 | }
276 | }
277 |
--------------------------------------------------------------------------------