├── .gitignore ├── .npmrc ├── .travis.yml ├── LICENSE ├── README.md ├── index.js ├── package.json ├── scripts ├── generate-fixture-index.js └── unpack-fixtures.js └── test ├── _hash-fake-repo.js ├── async.js ├── fixtures ├── README.md ├── index.json └── packed │ ├── dirty-repo.tar │ ├── fake-repo-parent.tar │ ├── head-is-a-commit.tar │ ├── head-is-a-ref.tar │ ├── just-a-package.tar │ ├── not-a-package.tar │ ├── repo-with-packed-refs.tar │ └── repo-without-refs.tar └── sync.js /.gitignore: -------------------------------------------------------------------------------- 1 | .node-version 2 | .nyc_output 3 | node_modules 4 | coverage 5 | /test/fixtures/unpacked 6 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | package-lock=false 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | os: 3 | - windows 4 | - linux 5 | node_js: 6 | - 11 7 | - 10 8 | - 8 9 | script: npm run coverage 10 | after_success: npx codecov --file=./coverage/lcov.info 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | ISC License (ISC) 2 | Copyright (c) 2016-2017, Mark Wubben (novemberborn.net) 3 | 4 | Permission to use, copy, modify, and/or distribute this software for any purpose 5 | with or without fee is hereby granted, provided that the above copyright notice 6 | and this permission notice appear in all copies. 7 | 8 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH 9 | REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND 10 | FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, 11 | INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS 12 | OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER 13 | TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF 14 | THIS SOFTWARE. 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # package-hash 2 | 3 | Generates a hash for an installed npm package, useful for salting caches. 4 | [AVA](https://github.com/sindresorhus/ava) for example caches precompiled test 5 | files. It generates a salt for its cache based on the various packages that are 6 | used when compiling the test files. 7 | 8 | `package-hash` can generate an appropriate hash based on the package location 9 | (on disk) and the `package.json` file. This hash is salted with a hash 10 | for the `package-hash` itself. 11 | 12 | `package-hash` can detect when the package-to-be-hashed is a Git repository. In 13 | the AVA example this is useful when you're debugging one of the packages used to 14 | compile the test files. You can clone it locally and use `npm link` so AVA can 15 | find the clone. The hash will include the HEAD (`.git/HEAD`) and its 16 | corresponding ref (e.g. `.git/refs/heads/master`), any packed refs 17 | (`.git/packed-refs`), as well as the diff (`git diff`) for any non-committed 18 | changes. This makes it really easy to test your changes without having to 19 | explicitly clear the cache in the parent project. 20 | 21 | ## Installation 22 | 23 | ```console 24 | $ npm install --save package-hash 25 | ``` 26 | 27 | ## Usage 28 | 29 | ```js 30 | const packageHash = require('package-hash') 31 | 32 | // Asynchronously: 33 | const hash = await packageHash(require.resolve('babel-core/package.json')) 34 | 35 | // Synchronously: 36 | const hash = packageHash.sync(require.resolve('babel-core/package.json')) 37 | ``` 38 | 39 | `packageHash()` / `packageHash.sync()` must be called with a file path for an 40 | existing `package.json` file. To get the path to an npm package it's easiest to 41 | use `require.resolve('the-name/package.json')`. 42 | 43 | You can provide multiple paths: 44 | 45 | ```js 46 | const hash = await packageHash([ 47 | require.resolve('babel-core/package.json'), 48 | require.resolve('babel-preset-es2015/package.json') 49 | ]) 50 | ``` 51 | 52 | An optional salt value can also be provided: 53 | 54 | ```js 55 | const hash = await packageHash(require.resolve('babel-core/package.json'), 'salt value') 56 | ``` 57 | 58 | ## API 59 | 60 | ### `packageHash(paths, salt?)` 61 | 62 | `paths: string | string[]` ➜ can be a single file path, or an array of paths. 63 | 64 | `salt: Array | Buffer | Object | string` ➜ optional. If an `Array` or `Object` (not `null`) it is first converted to a JSON string. 65 | 66 | Returns a promise for the hex-encoded hash string. 67 | 68 | ### `packageHash.sync(paths, salt?)` 69 | 70 | `paths: string | string[]` ➜ can be a single file path, or an array of paths. 71 | 72 | `salt: Array | Buffer | Object | string` ➜ optional. If an `Array` or `Object` (not `null`) it is first converted to a JSON string. 73 | 74 | Returns a hex-encoded hash string. 75 | 76 | ## Compatibility 77 | 78 | `package-hash` has been tested with Node.js 8 and above, including Windows 79 | support. 80 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const cp = require('child_process') // eslint-disable-line security/detect-child-process 4 | const path = require('path') 5 | const {promisify} = require('util') 6 | 7 | const fs = require('graceful-fs') 8 | const flattenDeep = require('lodash.flattendeep') 9 | const hasha = require('hasha') 10 | const releaseZalgo = require('release-zalgo') 11 | 12 | const PACKAGE_FILE = require.resolve('./package.json') 13 | const TEN_MEBIBYTE = 1024 * 1024 * 10 14 | const execFile = promisify(cp.execFile) 15 | 16 | const readFile = { 17 | async: promisify(fs.readFile), 18 | sync: fs.readFileSync 19 | } 20 | 21 | const tryReadFile = { 22 | async (file) { 23 | return readFile.async(file).catch(() => null) 24 | }, 25 | 26 | sync (file) { 27 | try { 28 | return fs.readFileSync(file) 29 | } catch (err) { 30 | return null 31 | } 32 | } 33 | } 34 | 35 | const tryExecFile = { 36 | async (file, args, options) { 37 | return execFile(file, args, options) 38 | .then(({stdout}) => stdout) 39 | .catch(() => null) 40 | }, 41 | 42 | sync (file, args, options) { 43 | try { 44 | return cp.execFileSync(file, args, options) 45 | } catch (err) { 46 | return null 47 | } 48 | } 49 | } 50 | 51 | const git = { 52 | tryGetRef (zalgo, dir, head) { 53 | const m = /^ref: (.+)$/.exec(head.toString('utf8').trim()) 54 | if (!m) return null 55 | 56 | return zalgo.run(tryReadFile, path.join(dir, '.git', m[1])) 57 | }, 58 | 59 | tryGetDiff (zalgo, dir) { 60 | return zalgo.run(tryExecFile, 61 | 'git', 62 | // Attempt to get consistent output no matter the platform. Diff both 63 | // staged and unstaged changes. 64 | ['--no-pager', 'diff', 'HEAD', '--no-color', '--no-ext-diff'], 65 | { 66 | cwd: dir, 67 | maxBuffer: TEN_MEBIBYTE, 68 | env: Object.assign({}, process.env, { 69 | // Force the GIT_DIR to prevent git from diffing a parent repository 70 | // in case the directory isn't actually a repository. 71 | GIT_DIR: path.join(dir, '.git') 72 | }), 73 | // Ignore stderr. 74 | stdio: ['ignore', 'pipe', 'ignore'] 75 | }) 76 | } 77 | } 78 | 79 | function addPackageData (zalgo, pkgPath) { 80 | const dir = path.dirname(pkgPath) 81 | 82 | return zalgo.all([ 83 | dir, 84 | zalgo.run(readFile, pkgPath), 85 | zalgo.run(tryReadFile, path.join(dir, '.git', 'HEAD')) 86 | .then(head => { 87 | if (!head) return [] 88 | 89 | return zalgo.all([ 90 | zalgo.run(tryReadFile, path.join(dir, '.git', 'packed-refs')), 91 | git.tryGetRef(zalgo, dir, head), 92 | git.tryGetDiff(zalgo, dir) 93 | ]) 94 | .then(results => { 95 | return [head].concat(results.filter(Boolean)) 96 | }) 97 | }) 98 | ]) 99 | } 100 | 101 | function computeHash (zalgo, paths, pepper, salt) { 102 | const inputs = [] 103 | if (pepper) inputs.push(pepper) 104 | 105 | if (typeof salt !== 'undefined') { 106 | if (Buffer.isBuffer(salt) || typeof salt === 'string') { 107 | inputs.push(salt) 108 | } else if (typeof salt === 'object' && salt !== null) { 109 | inputs.push(JSON.stringify(salt)) 110 | } else { 111 | throw new TypeError('Salt must be an Array, Buffer, Object or string') 112 | } 113 | } 114 | 115 | // TODO: Replace flattenDeep with Array#flat(Infinity) after node.js 10 is dropped 116 | return zalgo.all(paths.map(pkgPath => addPackageData(zalgo, pkgPath))) 117 | .then(furtherInputs => hasha(flattenDeep([inputs, furtherInputs]), {algorithm: 'sha256'})) 118 | } 119 | 120 | let ownHash = null 121 | let ownHashPromise = null 122 | function run (zalgo, paths, salt) { 123 | if (!ownHash) { 124 | return zalgo.run({ 125 | async () { 126 | if (!ownHashPromise) { 127 | ownHashPromise = computeHash(zalgo, [PACKAGE_FILE]) 128 | } 129 | return ownHashPromise 130 | }, 131 | sync () { 132 | return computeHash(zalgo, [PACKAGE_FILE]) 133 | } 134 | }) 135 | .then(hash => { 136 | ownHash = Buffer.from(hash, 'hex') 137 | ownHashPromise = null 138 | return run(zalgo, paths, salt) 139 | }) 140 | } 141 | 142 | if (paths === PACKAGE_FILE && typeof salt === 'undefined') { 143 | // Special case that allow the pepper value to be obtained. Mainly here for 144 | // testing purposes. 145 | return zalgo.returns(ownHash.toString('hex')) 146 | } 147 | 148 | paths = Array.isArray(paths) ? paths : [paths] 149 | return computeHash(zalgo, paths, ownHash, salt) 150 | } 151 | 152 | module.exports = (paths, salt) => { 153 | try { 154 | return run(releaseZalgo.async(), paths, salt) 155 | } catch (err) { 156 | return Promise.reject(err) 157 | } 158 | } 159 | module.exports.sync = (paths, salt) => { 160 | const result = run(releaseZalgo.sync(), paths, salt) 161 | return releaseZalgo.unwrapSync(result) 162 | } 163 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "package-hash", 3 | "version": "4.0.0", 4 | "description": "Generates a hash for an installed npm package, useful for salting caches", 5 | "main": "index.js", 6 | "files": [ 7 | "index.js" 8 | ], 9 | "engines": { 10 | "node": ">=8" 11 | }, 12 | "scripts": { 13 | "lint": "as-i-preach", 14 | "unpack-fixtures": "node scripts/unpack-fixtures.js", 15 | "pregenerate-fixture-index": "npm run unpack-fixtures", 16 | "generate-fixture-index": "node scripts/generate-fixture-index.js", 17 | "pretest": "npm run unpack-fixtures", 18 | "test": "ava", 19 | "posttest": "npm run lint", 20 | "coverage": "nyc npm test", 21 | "watch:test": "npm run test -- --watch" 22 | }, 23 | "repository": { 24 | "type": "git", 25 | "url": "git+https://github.com/novemberborn/package-hash.git" 26 | }, 27 | "author": "Mark Wubben (https://novemberborn.net/)", 28 | "license": "ISC", 29 | "bugs": { 30 | "url": "https://github.com/novemberborn/package-hash/issues" 31 | }, 32 | "homepage": "https://github.com/novemberborn/package-hash#readme", 33 | "dependencies": { 34 | "graceful-fs": "^4.1.15", 35 | "hasha": "^5.0.0", 36 | "lodash.flattendeep": "^4.4.0", 37 | "release-zalgo": "^1.0.0" 38 | }, 39 | "devDependencies": { 40 | "@novemberborn/as-i-preach": "^11.0.0", 41 | "ava": "^2.4.0", 42 | "codecov": "^3.3.0", 43 | "nyc": "^15.0.0", 44 | "rimraf": "^3.0.0", 45 | "tar": "^5.0.5" 46 | }, 47 | "nyc": { 48 | "cache": true, 49 | "exclude": [ 50 | "scripts", 51 | "test" 52 | ], 53 | "reporter": [ 54 | "html", 55 | "lcov", 56 | "text" 57 | ] 58 | }, 59 | "standard-engine": "@novemberborn/as-i-preach" 60 | } 61 | -------------------------------------------------------------------------------- /scripts/generate-fixture-index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const execFileSync = require('child_process').execFileSync // eslint-disable-line security/detect-child-process 4 | const readFileSync = require('fs').readFileSync 5 | const writeFileSync = require('fs').writeFileSync 6 | const join = require('path').posix.join 7 | const resolve = require('path').posix.resolve 8 | 9 | const indexFile = resolve(__dirname, '..', 'test', 'fixtures', 'index.json') 10 | const unpackedDir = resolve(__dirname, '..', 'test', 'fixtures', 'unpacked') 11 | 12 | const files = { 13 | 'dirty-repo': [ 14 | 'package.json', 15 | '.git/HEAD', 16 | '.git/refs/heads/master' 17 | ], 18 | 'fake-repo-parent/fake-repo': [ 19 | 'package.json', 20 | '.git/HEAD' 21 | ], 22 | 'head-is-a-commit': [ 23 | 'package.json', 24 | '.git/HEAD' 25 | ], 26 | 'head-is-a-ref': [ 27 | 'package.json', 28 | '.git/HEAD', 29 | '.git/refs/heads/master' 30 | ], 31 | 'just-a-package': [ 32 | 'package.json' 33 | ], 34 | 'repo-with-packed-refs': [ 35 | 'package.json', 36 | '.git/HEAD', 37 | '.git/packed-refs' 38 | ], 39 | 'repo-without-refs': [ 40 | 'package.json', 41 | '.git/HEAD' 42 | ] 43 | } 44 | 45 | const index = { 46 | files: Object.keys(files).reduce((acc, fixture) => { 47 | acc[fixture] = files[fixture].reduce((read, file) => { 48 | read[file] = readFileSync(join(unpackedDir, fixture, file), 'base64') 49 | return read 50 | }, {}) 51 | return acc 52 | }, {}) 53 | } 54 | 55 | index.diffs = { 56 | 'dirty-repo': execFileSync('git', ['--no-pager', 'diff', 'HEAD', '--no-color', '--no-ext-diff'], { 57 | cwd: join(unpackedDir, 'dirty-repo'), 58 | env: Object.assign({}, process.env, { 59 | GIT_DIR: join(unpackedDir, 'dirty-repo/.git') 60 | }) 61 | }).toString('base64') 62 | } 63 | 64 | writeFileSync(indexFile, JSON.stringify(index, null, 2) + '\n') 65 | -------------------------------------------------------------------------------- /scripts/unpack-fixtures.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fs = require('fs') 4 | const path = require('path') 5 | const rimraf = require('rimraf') 6 | const tar = require('tar') 7 | 8 | const unpackedDir = path.resolve(__dirname, '..', 'test', 'fixtures', 'unpacked') 9 | const packedDir = path.resolve(__dirname, '..', 'test', 'fixtures', 'packed') 10 | 11 | rimraf.sync(unpackedDir) 12 | fs.mkdirSync(unpackedDir) 13 | 14 | fs.readdirSync(packedDir) 15 | .filter(name => /\.tar$/.test(name)) 16 | .map(name => path.join(packedDir, name)) 17 | .forEach(file => { 18 | tar.extract({cwd: unpackedDir, file, sync: true, strict: true}) 19 | }) 20 | -------------------------------------------------------------------------------- /test/_hash-fake-repo.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const resolve = require('path').resolve 4 | const sync = require('..').sync 5 | 6 | sync(resolve(__dirname, 'fixtures', 'unpacked', 'fake-repo-parent', 'fake-repo', 'package.json')) 7 | -------------------------------------------------------------------------------- /test/async.js: -------------------------------------------------------------------------------- 1 | // Need this until @novemberborn/as-i-preach allows t.throwsAsync 2 | /* eslint ava/use-t-well: "off" */ 3 | import {randomBytes} from 'crypto' 4 | import {join, resolve as resolvePath} from 'path' 5 | 6 | import test from 'ava' 7 | import hasha from 'hasha' 8 | 9 | import packageHash from '..' 10 | import {files, diffs} from './fixtures/index.json' 11 | 12 | function resolveFixture (...args) { 13 | return resolvePath(__dirname, 'fixtures', ...args) 14 | } 15 | 16 | const projectDir = resolvePath(__dirname, '..') 17 | 18 | function bytes (base64) { 19 | if (typeof base64 === 'undefined') return null 20 | 21 | return Buffer.from(base64, 'base64') 22 | } 23 | 24 | const async = (...args) => packageHash(...args).then() 25 | 26 | let ownHash = null 27 | test.serial('hashes itself', async t => { 28 | const [result] = await Promise.all([ 29 | // Run in parallel to provide code coverage to ownHashPromise usage 30 | async(join(projectDir, 'package.json')), 31 | async(join(projectDir, 'package.json')) 32 | ]) 33 | t.true(typeof result === 'string') 34 | t.true(result.length > 0) 35 | ownHash = Buffer.from(result, 'hex') 36 | }) 37 | 38 | test('throws when called with a directory that is not an installed package', async t => { 39 | const err = await t.throwsAsync(async(resolveFixture('unpacked', 'not-a-package', 'package.json'))) 40 | t.is(err.code, 'ENOENT') 41 | }) 42 | 43 | test('throws when called with a non-existent path', async t => { 44 | const err = await t.throwsAsync(async(resolveFixture('does-not-exist', 'package.json'))) 45 | t.is(err.code, 'ENOENT') 46 | }) 47 | 48 | test('can be called with a file', async t => { 49 | const dir = resolveFixture('unpacked', 'just-a-package') 50 | const file = join(dir, 'package.json') 51 | const actual = await async(file) 52 | const expected = hasha([ 53 | ownHash, 54 | dir, 55 | bytes(files['just-a-package']['package.json']) 56 | ], {algorithm: 'sha256'}) 57 | 58 | t.true(actual === expected) 59 | }) 60 | 61 | ;[ 62 | ['null', null], 63 | ['a number', 42], 64 | ['a boolean', false], 65 | ['a function', () => {}] 66 | ].forEach(([label, salt]) => { 67 | test(`salt cannot be ${label}`, async t => { 68 | const err = await t.throwsAsync(async(projectDir, salt), TypeError) 69 | t.is(err.message, 'Salt must be an Array, Buffer, Object or string') 70 | }) 71 | }) 72 | 73 | ;[ 74 | ['can be a Buffer', randomBytes(16)], 75 | ['can be an Array', [{foo: 'bar'}, 'baz'], JSON.stringify([{foo: 'bar'}, 'baz'])], 76 | ['can be an Object', {foo: 'bar'}, JSON.stringify({foo: 'bar'})], 77 | ['can be a string', 'foobar'], 78 | ['is ignored when undefined', undefined, ''] 79 | ].forEach(([label, salt, stringifiedSalt = salt]) => { 80 | test(`salt ${label}`, async t => { 81 | const dir = resolveFixture('unpacked', 'just-a-package') 82 | const file = join(dir, 'package.json') 83 | const actual = await async(file, salt) 84 | const expected = hasha([ 85 | ownHash, 86 | stringifiedSalt, 87 | dir, 88 | bytes(files['just-a-package']['package.json']) 89 | ], {algorithm: 'sha256'}) 90 | 91 | t.true(actual === expected) 92 | }) 93 | }) 94 | 95 | test('can be called with a list of files', async t => { 96 | const salt = randomBytes(16) 97 | const dir = resolveFixture('unpacked', 'head-is-a-commit') 98 | const file = resolveFixture(dir, 'package.json') 99 | const dir2 = resolveFixture('unpacked', 'just-a-package') 100 | const file2 = join(dir2, 'package.json') 101 | 102 | const actual = await async([file, file2], salt) 103 | const expected = hasha([ 104 | ownHash, 105 | salt, 106 | dir, 107 | bytes(files['head-is-a-commit']['package.json']), 108 | bytes(files['head-is-a-commit']['.git/HEAD']), 109 | dir2, 110 | bytes(files['just-a-package']['package.json']) 111 | ], {algorithm: 'sha256'}) 112 | 113 | t.true(actual === expected) 114 | }) 115 | 116 | ;[ 117 | 'dirty-repo', 118 | 'fake-repo-parent/fake-repo', 119 | 'head-is-a-commit', 120 | 'head-is-a-ref', 121 | 'repo-with-packed-refs', 122 | 'repo-without-refs' 123 | ].forEach(fixture => { 124 | test(`${fixture} is hashed correctly`, async t => { 125 | const dir = resolveFixture('unpacked', fixture) 126 | const actual = await async(join(dir, 'package.json')) 127 | const expected = hasha([ 128 | ownHash, 129 | dir, 130 | bytes(files[fixture]['package.json']), 131 | bytes(files[fixture]['.git/HEAD']), 132 | bytes(files[fixture]['.git/packed-refs']), 133 | bytes(files[fixture]['.git/refs/heads/master']), 134 | bytes(diffs[fixture]) 135 | ].filter(Boolean), {algorithm: 'sha256'}) 136 | 137 | t.true(actual === expected) 138 | }) 139 | }) 140 | -------------------------------------------------------------------------------- /test/fixtures/README.md: -------------------------------------------------------------------------------- 1 | The `packed` directory contains tarballs of the various fixtures. These can be 2 | unpacked using `npm run unpack-fixtures`. They'll be unpacked into the 3 | `unpacked` directory, which is ignored through `.gitignore`. 4 | 5 | Use `npm run generate-fixture-index` to generate an index of the fixtures. 6 | 7 | To edit fixtures extract them elsewhere on disk, then create a new tarball and 8 | copy it to the `packed` directory. Use `npm test` to test with the new fixture. 9 | Avoid editing files inside the `unpacked` directory, it's erased every time you 10 | use `npm test`. 11 | -------------------------------------------------------------------------------- /test/fixtures/index.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": { 3 | "dirty-repo": { 4 | "package.json": "Y2hhbmdlcyEK", 5 | ".git/HEAD": "cmVmOiByZWZzL2hlYWRzL21hc3Rlcgo=", 6 | ".git/refs/heads/master": "ZGQ5ZTQ2MTMxOWIwOThiMzljOTVmOTNiNGFkZjhjNzM1ZDYwMThiNQo=" 7 | }, 8 | "fake-repo-parent/fake-repo": { 9 | "package.json": "ZG9lc24ndCBoYXZlIHRvIGJlIHZhbGlkIGpzb24K", 10 | ".git/HEAD": "cmVmOiByZWZzL2hlYWRzL21hc3Rlcgo=" 11 | }, 12 | "head-is-a-commit": { 13 | "package.json": "ZG9lc24ndCBoYXZlIHRvIGJlIHZhbGlkIGpzb24K", 14 | ".git/HEAD": "ZGQ5ZTQ2MTMxOWIwOThiMzljOTVmOTNiNGFkZjhjNzM1ZDYwMThiNQo=" 15 | }, 16 | "head-is-a-ref": { 17 | "package.json": "ZG9lc24ndCBoYXZlIHRvIGJlIHZhbGlkIGpzb24K", 18 | ".git/HEAD": "cmVmOiByZWZzL2hlYWRzL21hc3Rlcgo=", 19 | ".git/refs/heads/master": "ZGQ5ZTQ2MTMxOWIwOThiMzljOTVmOTNiNGFkZjhjNzM1ZDYwMThiNQo=" 20 | }, 21 | "just-a-package": { 22 | "package.json": "ZG9lc24ndCBoYXZlIHRvIGJlIHZhbGlkIGpzb24K" 23 | }, 24 | "repo-with-packed-refs": { 25 | "package.json": "ZG9lc24ndCBoYXZlIHRvIGJlIHZhbGlkIGpzb24K", 26 | ".git/HEAD": "cmVmOiByZWZzL2hlYWRzL21hc3Rlcgo=", 27 | ".git/packed-refs": "IyBwYWNrLXJlZnMgd2l0aDogcGVlbGVkIGZ1bGx5LXBlZWxlZCAKZGQ5ZTQ2MTMxOWIwOThiMzljOTVmOTNiNGFkZjhjNzM1ZDYwMThiNSByZWZzL2hlYWRzL21hc3Rlcgo=" 28 | }, 29 | "repo-without-refs": { 30 | "package.json": "ZG9lc24ndCBoYXZlIHRvIGJlIHZhbGlkIGpzb24K", 31 | ".git/HEAD": "ZGQ5ZTQ2MTMxOWIwOThiMzljOTVmOTNiNGFkZjhjNzM1ZDYwMThiNQo=" 32 | } 33 | }, 34 | "diffs": { 35 | "dirty-repo": "ZGlmZiAtLWdpdCBhL3BhY2thZ2UuanNvbiBiL3BhY2thZ2UuanNvbgppbmRleCA5NWMyYWE5Li5mNDYzYzM0IDEwMDY0NAotLS0gYS9wYWNrYWdlLmpzb24KKysrIGIvcGFja2FnZS5qc29uCkBAIC0xICsxIEBACi1kb2Vzbid0IGhhdmUgdG8gYmUgdmFsaWQganNvbgorY2hhbmdlcyEK" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /test/fixtures/packed/dirty-repo.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/novemberborn/package-hash/e8726a2aaed7606c2e787be2308e173f9f125f5e/test/fixtures/packed/dirty-repo.tar -------------------------------------------------------------------------------- /test/fixtures/packed/fake-repo-parent.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/novemberborn/package-hash/e8726a2aaed7606c2e787be2308e173f9f125f5e/test/fixtures/packed/fake-repo-parent.tar -------------------------------------------------------------------------------- /test/fixtures/packed/head-is-a-commit.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/novemberborn/package-hash/e8726a2aaed7606c2e787be2308e173f9f125f5e/test/fixtures/packed/head-is-a-commit.tar -------------------------------------------------------------------------------- /test/fixtures/packed/head-is-a-ref.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/novemberborn/package-hash/e8726a2aaed7606c2e787be2308e173f9f125f5e/test/fixtures/packed/head-is-a-ref.tar -------------------------------------------------------------------------------- /test/fixtures/packed/just-a-package.tar: -------------------------------------------------------------------------------- 1 | just-a-package/000755 000765 000024 00000000000 12703714135 013752 5ustar00markstaff000000 000000 just-a-package/package.json000644 000765 000024 00000000036 12703714153 016237 0ustar00markstaff000000 000000 doesn't have to be valid json 2 | -------------------------------------------------------------------------------- /test/fixtures/packed/not-a-package.tar: -------------------------------------------------------------------------------- 1 | not-a-package/000755 000765 000024 00000000000 12703714116 013564 5ustar00markstaff000000 000000 not-a-package/empty.txt000644 000765 000024 00000000000 12703714116 015451 0ustar00markstaff000000 000000 -------------------------------------------------------------------------------- /test/fixtures/packed/repo-with-packed-refs.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/novemberborn/package-hash/e8726a2aaed7606c2e787be2308e173f9f125f5e/test/fixtures/packed/repo-with-packed-refs.tar -------------------------------------------------------------------------------- /test/fixtures/packed/repo-without-refs.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/novemberborn/package-hash/e8726a2aaed7606c2e787be2308e173f9f125f5e/test/fixtures/packed/repo-without-refs.tar -------------------------------------------------------------------------------- /test/sync.js: -------------------------------------------------------------------------------- 1 | import {spawnSync} from 'child_process' 2 | import {randomBytes} from 'crypto' 3 | import {join, resolve as resolvePath} from 'path' 4 | 5 | import test from 'ava' 6 | import hasha from 'hasha' 7 | 8 | import {sync} from '..' 9 | import {files, diffs} from './fixtures/index.json' 10 | 11 | function resolveFixture (...args) { 12 | return resolvePath(__dirname, 'fixtures', ...args) 13 | } 14 | 15 | const projectDir = resolvePath(__dirname, '..') 16 | 17 | function bytes (base64) { 18 | if (typeof base64 === 'undefined') return null 19 | 20 | return Buffer.from(base64, 'base64') 21 | } 22 | 23 | let ownHash = null 24 | test.serial('hashes itself', t => { 25 | const result = sync(join(projectDir, 'package.json')) 26 | t.true(typeof result === 'string') 27 | t.true(result.length > 0) 28 | ownHash = Buffer.from(result, 'hex') 29 | }) 30 | 31 | test('throws when called with a directory that is not an installed package', t => { 32 | const err = t.throws(() => sync(resolveFixture('unpacked', 'not-a-package', 'package.json'))) 33 | t.is(err.code, 'ENOENT') 34 | }) 35 | 36 | test('throws when called with a non-existent path', t => { 37 | const err = t.throws(() => sync(resolveFixture('does-not-exist', 'package.json'))) 38 | t.is(err.code, 'ENOENT') 39 | }) 40 | 41 | test('can be called with a file', t => { 42 | const dir = resolveFixture('unpacked', 'just-a-package') 43 | const file = join(dir, 'package.json') 44 | const actual = sync(file) 45 | const expected = hasha([ 46 | ownHash, 47 | dir, 48 | bytes(files['just-a-package']['package.json']) 49 | ], {algorithm: 'sha256'}) 50 | 51 | t.true(actual === expected) 52 | }) 53 | 54 | ;[ 55 | ['null', null], 56 | ['a number', 42], 57 | ['a boolean', false], 58 | ['a function', () => {}] 59 | ].forEach(([label, salt]) => { 60 | test(`salt cannot be ${label}`, t => { 61 | const err = t.throws(() => sync(projectDir, salt), TypeError) 62 | t.is(err.message, 'Salt must be an Array, Buffer, Object or string') 63 | }) 64 | }) 65 | 66 | ;[ 67 | ['can be a Buffer', randomBytes(16)], 68 | ['can be an Array', [{foo: 'bar'}, 'baz'], JSON.stringify([{foo: 'bar'}, 'baz'])], 69 | ['can be an Object', {foo: 'bar'}, JSON.stringify({foo: 'bar'})], 70 | ['can be a string', 'foobar'], 71 | ['is ignored when undefined', undefined, ''] 72 | ].forEach(([label, salt, stringifiedSalt = salt]) => { 73 | test(`salt ${label}`, t => { 74 | const dir = resolveFixture('unpacked', 'just-a-package') 75 | const file = join(dir, 'package.json') 76 | const actual = sync(file, salt) 77 | const expected = hasha([ 78 | ownHash, 79 | stringifiedSalt, 80 | dir, 81 | bytes(files['just-a-package']['package.json']) 82 | ], {algorithm: 'sha256'}) 83 | 84 | t.true(actual === expected) 85 | }) 86 | }) 87 | 88 | test('can be called with a list of files', t => { 89 | const salt = randomBytes(16) 90 | const dir = resolveFixture('unpacked', 'head-is-a-commit') 91 | const file = join(dir, 'package.json') 92 | const dir2 = resolveFixture('unpacked', 'just-a-package') 93 | const file2 = join(dir2, 'package.json') 94 | 95 | const actual = sync([file, file2], salt) 96 | const expected = hasha([ 97 | ownHash, 98 | salt, 99 | dir, 100 | bytes(files['head-is-a-commit']['package.json']), 101 | bytes(files['head-is-a-commit']['.git/HEAD']), 102 | dir2, 103 | bytes(files['just-a-package']['package.json']) 104 | ], {algorithm: 'sha256'}) 105 | 106 | t.true(actual === expected) 107 | }) 108 | 109 | ;[ 110 | 'dirty-repo', 111 | 'fake-repo-parent/fake-repo', 112 | 'head-is-a-commit', 113 | 'head-is-a-ref', 114 | 'repo-with-packed-refs', 115 | 'repo-without-refs' 116 | ].forEach(fixture => { 117 | test(`${fixture} is hashed correctly`, t => { 118 | const dir = resolveFixture('unpacked', fixture) 119 | const actual = sync(join(dir, 'package.json')) 120 | const expected = hasha([ 121 | ownHash, 122 | dir, 123 | bytes(files[fixture]['package.json']), 124 | bytes(files[fixture]['.git/HEAD']), 125 | bytes(files[fixture]['.git/packed-refs']), 126 | bytes(files[fixture]['.git/refs/heads/master']), 127 | bytes(diffs[fixture]) 128 | ].filter(Boolean), {algorithm: 'sha256'}) 129 | 130 | t.true(actual === expected) 131 | }) 132 | }) 133 | 134 | if (spawnSync) { 135 | test('diffing should not write to stderr', t => { 136 | const child = spawnSync(process.execPath, [resolvePath(__dirname, '_hash-fake-repo.js')]) 137 | t.true(child.stderr.toString('utf8') === '') 138 | }) 139 | } 140 | --------------------------------------------------------------------------------