├── .gitignore ├── .travis.yml ├── README.md ├── example.js ├── index.js ├── package.json └── test ├── fixture ├── a │ └── b │ │ └── c │ │ ├── d.txt │ │ └── e.txt └── ignore │ └── .dat │ ├── beep.txt │ └── hi └── index.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | sudo: false 3 | node_js: 4 | - 4 5 | - 5 6 | - 6 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![deprecated](http://badges.github.io/stability-badges/dist/deprecated.svg)](https://dat-ecosystem.org/) 2 | 3 | More info on active projects and modules at [dat-ecosystem.org](https://dat-ecosystem.org/) 4 | 5 | --- 6 | 7 | # hyperdrive-import-files 8 | 9 | Import the contents of a folder into a [hyperdrive](https://github.com/mafintosh/hyperdrive), and optionally keep watching for changes. 10 | 11 | [![Build Status](https://travis-ci.org/juliangruber/hyperdrive-import-files.svg?branch=master)](https://travis-ci.org/juliangruber/hyperdrive-import-files) 12 | 13 | ## Example 14 | 15 | ```js 16 | const hyperdrive = require('hyperdrive') 17 | const memdb = require('memdb') 18 | const hyperImport = require('hyperdrive-import-files') 19 | 20 | const drive = hyperdrive(memdb()) 21 | const archive = drive.createArchive() 22 | 23 | hyperImport(archive, 'a/directory/', err => { 24 | // ... 25 | }) 26 | ``` 27 | 28 | ## Installation 29 | 30 | ```bash 31 | $ npm install hyperdrive-import-files 32 | ``` 33 | 34 | ## API 35 | 36 | ### hyperImport(archive, target, [, options][, cb]) 37 | 38 | Recursively import `target`, which is the path to a directory or file, into `archive` and call `cb` with the potential error. The import happens sequentually. Returns a `status` object. 39 | 40 | Options 41 | 42 | - `watch`: watch files for changes & import on change (archive must be live) 43 | - `overwrite`: allow files in the archive to be overwritten (defaults to true) 44 | - `compareFileContent`: compare import-candidates to archive's internal copy. If false, will only compare mtime and file-size, which is faster but may reslt in false-positives. (defaults to false) 45 | - `basePath`: where in the archive should the files import to? (defaults to '') 46 | - `ignore`: [anymatch](https://npmjs.org/package/anymatch) expression to ignore files 47 | - `dryRun`: step through the import, but don't write any files to the archive (defaults to false) 48 | - `indexing`: Useful if `target === dest` so hyperdrive does not rewrite the files on import. 49 | 50 | To enable watching, set `watch: true`, like this: 51 | 52 | ```js 53 | const status = hyperImport(archive, target, { watch: true }, err => { 54 | console.log('initial import done') 55 | }) 56 | status.on('error', err => { 57 | // ... 58 | }) 59 | // when you want to quit: 60 | status.close() 61 | ``` 62 | 63 | If you want to import into a subfolder, set `basePath`: 64 | 65 | ```js 66 | hyperImport(archive, target, { basePath: '/some/subdir' }, err => {...}) 67 | ``` 68 | 69 | ### status 70 | 71 | Events: 72 | 73 | - `error` (`err`) 74 | - `file imported` ({ `path`, `mode=updated|created` }) 75 | - `file skipped` ({ `path` }) 76 | - `file watch event` ({ `path`, `mode=updated|created` }) 77 | 78 | Properties: 79 | 80 | - `fileCount`: The count of currently known files 81 | - `totalSize`: Total file size in bytes 82 | - `bytesImported`: Amount of bytes imported so far 83 | 84 | ## License 85 | 86 | MIT 87 | -------------------------------------------------------------------------------- /example.js: -------------------------------------------------------------------------------- 1 | var hyperdrive = require('hyperdrive') 2 | var ram = require('random-access-memory') 3 | var hyperImport = require('.') 4 | 5 | var archive = hyperdrive(ram) 6 | 7 | var target = process.argv.slice(2)[0] 8 | 9 | var status = hyperImport(archive, target, { watch: true }, function (err) { 10 | if (err) throw err 11 | console.log('done') 12 | console.log('file count', status.fileCount) 13 | }) 14 | 15 | status.on('file imported', function (s) { 16 | console.log('file imported %s %s', s.path, s.mode) 17 | }) 18 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | var pump = require('pump') 4 | var fs = require('fs') 5 | var join = require('path').join 6 | var relative = require('path').relative 7 | var basename = require('path').basename 8 | var EventEmitter = require('events').EventEmitter 9 | var chokidar = require('chokidar') 10 | var series = require('run-series') 11 | var match = require('anymatch') 12 | var through = require('through2') 13 | var isDuplicate = require('hyperdrive-duplicate') 14 | 15 | var noop = function () {} 16 | 17 | module.exports = function (archive, target, opts, done) { 18 | if (typeof opts === 'function') { 19 | done = opts 20 | opts = {} 21 | } 22 | opts = opts || {} 23 | var watch = opts.watch || opts.live 24 | 25 | var overwrite = opts.overwrite !== false 26 | var dryRun = opts.dryRun === true 27 | var compareFileContent = opts.compareFileContent === true 28 | function emitError (err) { 29 | if (err) status.emit('error', err) 30 | } 31 | done = done || emitError 32 | 33 | var basePath = (typeof opts.basePath === 'string') ? opts.basePath : '' 34 | var watcher 35 | var isWatching = false 36 | 37 | if (watch && archive.live) { 38 | watcher = chokidar.watch([target], { 39 | persistent: true, 40 | ignored: opts.ignore 41 | }) 42 | watcher.once('ready', function () { 43 | watcher.on('add', function (file, stat) { 44 | status.emit('file watch event', {path: file, mode: 'created'}) 45 | consume(file, stat) 46 | }) 47 | watcher.on('change', function (file, stat) { 48 | status.emit('file watch event', {path: file, mode: 'updated'}) 49 | consume(file, stat) 50 | }) 51 | watcher.on('unlink', noop) // TODO 52 | }) 53 | } 54 | 55 | var status = new EventEmitter() 56 | status.close = function () { watcher && watcher.close() } 57 | status.fileCount = 0 58 | status.totalSize = 0 59 | status.bytesImported = 0 60 | 61 | function consume (file, stat, cb) { 62 | cb = cb || emitError 63 | if (opts.ignore && match(opts.ignore, file)) return cb() 64 | if (stat) { 65 | onstat(stat) 66 | } else { 67 | fs.stat(file, function (err, stat) { 68 | if (err) return cb(err) 69 | onstat(stat) 70 | }) 71 | } 72 | 73 | function onstat (stat) { 74 | if (stat.isDirectory()) { 75 | consumeDir(file, stat, cb) 76 | } else { 77 | consumeFile(file, stat, cb) 78 | } 79 | } 80 | } 81 | 82 | function consumeFile (file, stat, cb) { 83 | cb = cb || emitError 84 | var hyperPath = file === target 85 | ? joinHyperPath(basePath, basename(file)) 86 | : joinHyperPath(basePath, relative(target, file)) 87 | 88 | archive.stat(hyperPath, function (err, st) { 89 | if (overwrite) return add(st) 90 | if (err && !st) return add() 91 | status.emit('file skipped', { path: file }) 92 | cb() 93 | }) 94 | 95 | function add (entry) { 96 | // update the stats according to whether this is the initial import 97 | if (!isWatching) { 98 | // initial import, just add 99 | status.fileCount++ 100 | status.totalSize += stat.size 101 | } else { 102 | if (entry) { 103 | // watch update to existing file, remove old and add new 104 | status.totalSize -= entry.size 105 | status.totalSize += stat.size 106 | } else { 107 | // watch addition, just add 108 | status.fileCount++ 109 | status.totalSize += stat.size 110 | } 111 | } 112 | if (!entry) { 113 | next('created') 114 | } else if (entry.size !== stat.size || entry.mtime !== stat.mtime.getTime()) { 115 | if (compareFileContent) { 116 | isDuplicate(archive, file, hyperPath, function (err, duplicate) { 117 | if (!err && duplicate) return skip() 118 | next('updated') 119 | }) 120 | } else { 121 | next('updated') 122 | } 123 | } else { 124 | skip() 125 | } 126 | 127 | function skip () { 128 | status.bytesImported += stat.size 129 | status.emit('file skipped', { path: file }) 130 | cb() 131 | } 132 | } 133 | 134 | function next (mode) { 135 | if (dryRun) { 136 | return pumpDone() 137 | } 138 | var rs = fs.createReadStream(file) 139 | var ws = archive.createWriteStream(hyperPath, {indexing: opts.indexing}) 140 | var increment = through(function (chunk, enc, cb) { 141 | status.bytesImported += chunk.length 142 | cb(null, chunk) 143 | }) 144 | 145 | pump(rs, increment, ws, pumpDone) 146 | function pumpDone (err) { 147 | if (err) return cb(err) 148 | status.emit('file imported', { 149 | path: file, 150 | mode: mode 151 | }) 152 | cb() 153 | } 154 | } 155 | } 156 | 157 | function consumeDir (file, stat, cb) { 158 | cb = cb || emitError 159 | var hyperPath = joinHyperPath(basePath, relative(target, file)) 160 | 161 | function next () { 162 | fs.readdir(file, function (err, _files) { 163 | if (err) return cb(err) 164 | series(_files.map(function (_file) { 165 | return function (cb2) { 166 | consume(join(file, _file), null, cb2) 167 | } 168 | }), cb) 169 | }) 170 | } 171 | 172 | if (dryRun) { 173 | next() 174 | } else { 175 | archive.stat(hyperPath, function (err, st) { 176 | if (!err && st) next() 177 | else archive.mkdir(hyperPath, next) 178 | }) 179 | } 180 | } 181 | 182 | consume(target, null, function (err) { 183 | isWatching = true 184 | done(err) 185 | }) 186 | 187 | return status 188 | } 189 | 190 | function normalizeEntryPath (path) { 191 | if (typeof path === 'string' && path.charAt(0) === '/') { 192 | return path.slice(1) 193 | } 194 | return path 195 | } 196 | 197 | function joinHyperPath (base, path) { 198 | path = join(base, path) 199 | if (path === '.') { 200 | // '.' is returned when base is '' and path is '': aka, the root directory 201 | // in hyperdrive, root should be '' or '/', so we replace it with this special case 202 | return '' 203 | } 204 | return normalizeEntryPath(path) 205 | } 206 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hyperdrive-import-files", 3 | "version": "4.0.0", 4 | "description": "Import some files and folders into a Hyperdrive", 5 | "license": "MIT", 6 | "repository": "juliangruber/hyperdrive-import-files", 7 | "devDependencies": { 8 | "hyperdrive": "^9.0.0", 9 | "random-access-file": "^1.3.0", 10 | "random-access-memory": "^2.3.0", 11 | "rimraf": "^2.6.1", 12 | "standard": "^10.0.1", 13 | "tape": "^4.6.0" 14 | }, 15 | "scripts": { 16 | "test": "tape test && standard" 17 | }, 18 | "dependencies": { 19 | "anymatch": "^1.3.0", 20 | "chokidar": "^1.6.0", 21 | "hyperdrive-duplicate": "^3.0.0", 22 | "pump": "^1.0.1", 23 | "run-series": "^1.1.4", 24 | "through2": "^2.0.3" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /test/fixture/a/b/c/d.txt: -------------------------------------------------------------------------------- 1 | foo 2 | -------------------------------------------------------------------------------- /test/fixture/a/b/c/e.txt: -------------------------------------------------------------------------------- 1 | beep 2 | -------------------------------------------------------------------------------- /test/fixture/ignore/.dat/beep.txt: -------------------------------------------------------------------------------- 1 | boop -------------------------------------------------------------------------------- /test/fixture/ignore/.dat/hi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dat-ecosystem-archive/hyperdrive-import-files/24297e34f5a8e920fd3d7d888b65d90813c81c89/test/fixture/ignore/.dat/hi -------------------------------------------------------------------------------- /test/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | var test = require('tape') 4 | var hyperdrive = require('hyperdrive') 5 | var hyperImport = require('..') 6 | var fs = require('fs') 7 | var path = require('path') 8 | // var raf = require('random-access-file') 9 | var os = require('os') 10 | var rimraf = require('rimraf') 11 | 12 | function tmpdir () { 13 | return fs.mkdtempSync(os.tmpdir() + path.sep + 'pauls-dat-api-test-') 14 | } 15 | 16 | function sort (entries) { 17 | return entries.sort(function (a, b) { 18 | return a.localeCompare(b) 19 | }) 20 | } 21 | 22 | test('cleanup', function (t) { 23 | var base = path.join(__dirname, '/fixture/a/b/c') 24 | fs.readdirSync(base) 25 | .filter(function (file) { 26 | return ['d.txt', 'e.txt'].indexOf(file) === -1 27 | }) 28 | .forEach(function (file) { 29 | rimraf.sync(path.join(base, file)) 30 | }) 31 | t.end() 32 | }) 33 | 34 | test('import directory', function (t) { 35 | t.plan(8) 36 | 37 | var archive = hyperdrive(tmpdir()) 38 | var status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/'), function (err) { 39 | t.error(err) 40 | 41 | archive.readdir('/', function (err, entries) { 42 | t.error(err) 43 | entries = sort(entries) 44 | t.equal(entries.length, 2) 45 | t.equal(entries[0], 'd.txt') 46 | t.equal(entries[1], 'e.txt') 47 | t.equal(status.fileCount, 2) 48 | t.equal(status.totalSize, 9) 49 | t.equal(status.bytesImported, 9) 50 | }) 51 | }) 52 | }) 53 | 54 | test('import file', function (t) { 55 | t.plan(7) 56 | 57 | var archive = hyperdrive(tmpdir()) 58 | var status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/d.txt'), function (err) { 59 | t.error(err) 60 | 61 | archive.readdir('/', function (err, entries) { 62 | t.error(err) 63 | entries = sort(entries) 64 | t.equal(entries.length, 1) 65 | t.equal(entries[0], 'd.txt') 66 | t.equal(status.fileCount, 1) 67 | t.equal(status.totalSize, 4) 68 | t.equal(status.bytesImported, 4) 69 | }) 70 | }) 71 | }) 72 | 73 | test('resume', function (t) { 74 | t.plan(15) 75 | 76 | var archive = hyperdrive(tmpdir()) 77 | archive.ready(function () { 78 | var status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/'), function (err) { 79 | t.error(err) 80 | archive.createWriteStream('d.txt').on('finish', function () { 81 | status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/'), function (err) { 82 | t.error(err) 83 | t.equal(status.fileCount, 2) 84 | t.equal(status.totalSize, 9) 85 | t.equal(status.bytesImported, 9) 86 | }) 87 | status.on('file imported', function (file) { 88 | t.equal(file.mode, 'updated', 'updated') 89 | }) 90 | status.on('file skipped', function (file) { 91 | t.equal(file.path, path.join(__dirname, '/fixture/a/b/c/e.txt')) 92 | }) 93 | }).end('bleerg') 94 | }) 95 | 96 | var i = 0 97 | status.on('file imported', function (file) { 98 | t.equal(file.mode, 'created', 'created') 99 | if (!i++) { 100 | t.equal(status.fileCount, 1) 101 | t.equal(status.totalSize, 4) 102 | t.equal(status.bytesImported, 4) 103 | } else { 104 | t.equal(status.fileCount, 2) 105 | t.equal(status.totalSize, 9) 106 | t.equal(status.bytesImported, 9) 107 | } 108 | }) 109 | }) 110 | }) 111 | 112 | /* 113 | TODO - disabled until hyperdrive supports {latest: true} 114 | test('resume with raf', function (t) { 115 | t.plan(15) 116 | 117 | var dir = path.join(__dirname, '/fixture/a/b/c/') 118 | var archive = hyperdrive(dir) 119 | archive.ready(function() { 120 | var status = hyperImport(archive, dir, { 121 | resume: true 122 | }, function (err) { 123 | t.error(err) 124 | fs.writeFile(path.join(__dirname, '/fixture/a/b/c/d.txt'), 'foo\n', function () { 125 | status = hyperImport(archive, dir, { 126 | resume: true 127 | }, function (err) { 128 | t.error(err) 129 | t.equal(status.fileCount, 2) 130 | t.equal(status.totalSize, 9) 131 | t.equal(status.bytesImported, 9) 132 | }) 133 | status.on('file imported', function (file) { 134 | if (file.path !== path.join(__dirname, '/fixture/a/b/c/d.txt')) t.fail('wrong file') 135 | t.equal(file.mode, 'updated', 'updated') 136 | }) 137 | status.on('file skipped', function (file) { 138 | t.equal(file.path, path.join(__dirname, '/fixture/a/b/c/e.txt')) 139 | }) 140 | }) 141 | }) 142 | 143 | var i = 0 144 | status.on('file imported', function (file) { 145 | t.equal(file.mode, 'created', 'created') 146 | if (!i++) { 147 | t.equal(status.fileCount, 1) 148 | t.equal(status.totalSize, 4) 149 | t.equal(status.bytesImported, 4) 150 | } else { 151 | t.equal(status.fileCount, 2) 152 | t.equal(status.totalSize, 9) 153 | t.equal(status.bytesImported, 9) 154 | } 155 | }) 156 | }) 157 | }) */ 158 | 159 | if (!process.env.TRAVIS) { 160 | test('resume & live', function (t) { 161 | t.plan(13) 162 | 163 | var archive = hyperdrive(tmpdir()) 164 | archive.ready(function () { 165 | var status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/'), { 166 | live: true 167 | }, function (err) { 168 | t.error(err, 'initial import') 169 | var tmp = path.join(__dirname, '/fixture/a/b/c/', Math.random().toString(16).slice(2)) 170 | 171 | status.once('file imported', function (file) { 172 | t.equal(file.mode, 'created', 'created') 173 | t.equal(status.fileCount, 3, 'file count') 174 | t.equal(status.totalSize, 11, 'total size') 175 | t.equal(status.bytesImported, 11, 'bytes imported') 176 | 177 | status.once('file watch event', function (file) { 178 | t.equal(file.mode, 'updated', 'updated') 179 | }) 180 | 181 | status.once('file imported', function (file) { 182 | t.equal(file.mode, 'updated', 'updated') 183 | t.equal(status.fileCount, 3, 'file count') 184 | t.equal(status.totalSize, 12, 'total size') 185 | t.equal(status.bytesImported, 14, 'bytes imported') 186 | status.close() 187 | fs.unlink(tmp, function (err) { t.error(err, 'file removed') }) 188 | }) 189 | 190 | fs.writeFile(tmp, 'you', function (err) { t.error(err, 'file updated') }) 191 | }) 192 | fs.writeFile(tmp, 'yo', function (err) { t.error(err, 'file created') }) 193 | }) 194 | }) 195 | }) 196 | } 197 | 198 | test('optional callback', function (t) { 199 | t.plan(1) 200 | 201 | var archive = hyperdrive(tmpdir()) 202 | var status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/')) 203 | status.once('file imported', function () { t.ok(true) }) 204 | }) 205 | 206 | test('ignore', function (t) { 207 | var archive = hyperdrive(tmpdir()) 208 | var status = hyperImport(archive, path.join(__dirname, '/fixture/ignore'), { 209 | ignore: /\/\.dat\//, 210 | live: true 211 | }, function (err) { 212 | t.error(err, 'no error importing') 213 | fs.writeFile(path.join(__dirname, '/fixture/ignore/.dat/beep.txt'), 'boop', function (err) { 214 | t.error(err, 'no error writing file') 215 | t.end() 216 | status.close() 217 | }) 218 | }) 219 | status.on('file imported', function () { t.ok(false) }) 220 | }) 221 | 222 | test('duplicate directory', function (t) { 223 | var archive = hyperdrive(tmpdir()) 224 | var directory = path.join(__dirname, '/fixture/a/b/c/') 225 | 226 | hyperImport(archive, directory, function (err) { 227 | t.error(err) 228 | hyperImport(archive, directory, { 229 | resume: true 230 | }, function (err) { 231 | t.error(err) 232 | archive.readdir('/', function (err, entries) { 233 | t.error(err) 234 | 235 | entries = sort(entries) 236 | t.equal(entries.length, 2) 237 | t.equal(entries[0], 'd.txt') 238 | t.equal(entries[1], 'e.txt') 239 | t.end() 240 | }) 241 | }) 242 | }) 243 | }) 244 | 245 | test('duplicate subdirectory', function (t) { 246 | var archive = hyperdrive(tmpdir()) 247 | var directory = path.join(__dirname, '/fixture/a/b/') 248 | 249 | hyperImport(archive, directory, function (err) { 250 | t.error(err) 251 | fs.utimes(path.join(directory, 'c'), 0, 0, function () { 252 | hyperImport(archive, directory, { 253 | resume: true 254 | }, function (err) { 255 | t.error(err) 256 | archive.readdir('/c', function (err, entries) { 257 | t.error(err) 258 | 259 | entries = sort(entries) 260 | t.equal(entries[0], 'd.txt') 261 | t.equal(entries[1], 'e.txt') 262 | t.end() 263 | }) 264 | }) 265 | }) 266 | }) 267 | }) 268 | 269 | test('import directory with basePath', function (t) { 270 | t.plan(8) 271 | 272 | var archive = hyperdrive(tmpdir()) 273 | var status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/'), { basePath: 'foo/bar' }, function (err) { 274 | t.error(err) 275 | 276 | archive.readdir('/foo/bar', function (err, entries) { 277 | t.error(err) 278 | entries = sort(entries) 279 | t.equal(entries.length, 2) 280 | t.equal(entries[0], 'd.txt') 281 | t.equal(entries[1], 'e.txt') 282 | t.equal(status.fileCount, 2) 283 | t.equal(status.totalSize, 9) 284 | t.equal(status.bytesImported, 9) 285 | }) 286 | }) 287 | }) 288 | 289 | test('import file with basePath', function (t) { 290 | t.plan(7) 291 | 292 | var archive = hyperdrive(tmpdir()) 293 | var status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/d.txt'), { basePath: 'foo/bar' }, function (err) { 294 | t.error(err) 295 | 296 | archive.readdir('/foo/bar', function (err, entries) { 297 | t.error(err) 298 | entries = sort(entries) 299 | t.equal(entries.length, 1) 300 | t.equal(entries[0], 'd.txt') 301 | t.equal(status.fileCount, 1) 302 | t.equal(status.totalSize, 4) 303 | t.equal(status.bytesImported, 4) 304 | }) 305 | }) 306 | }) 307 | 308 | test('dry run', function (t) { 309 | var archive = hyperdrive(tmpdir()) 310 | var filesAdded = [] 311 | var status = hyperImport(archive, path.join(__dirname, '/fixture/a/b/c/'), { dryRun: true }, function (err) { 312 | t.error(err) 313 | 314 | archive.readdir('/', function (err, entries) { 315 | t.error(err) 316 | t.equal(entries.length, 0) 317 | t.equal(filesAdded.length, 2) 318 | t.equal(status.fileCount, 2) 319 | t.equal(status.totalSize, 9) 320 | t.end() 321 | }) 322 | }) 323 | status.on('file imported', function (e) { 324 | filesAdded.push(e) 325 | }) 326 | }) 327 | 328 | test('compareFileContent', function (t) { 329 | t.plan(13) 330 | 331 | var dir = path.join(__dirname, '/fixture/a/b/c/') 332 | var archive = hyperdrive(tmpdir()) 333 | var status = hyperImport(archive, dir, function (err) { 334 | t.error(err) 335 | 336 | var dPath = path.join(__dirname, '/fixture/a/b/c/d.txt') 337 | fs.writeFileSync(dPath, fs.readFileSync(dPath)) 338 | status = hyperImport(archive, dir, { 339 | resume: true, 340 | compareFileContent: true 341 | }, function (err) { 342 | t.error(err) 343 | t.equal(status.fileCount, 2) 344 | t.equal(status.totalSize, 9) 345 | t.equal(status.bytesImported, 9) 346 | }) 347 | 348 | status.on('file imported', function (file) { 349 | t.fail('should not occur') 350 | }) 351 | }) 352 | 353 | var i = 0 354 | status.on('file imported', function (file) { 355 | t.equal(file.mode, 'created', 'created') 356 | if (!i++) { 357 | t.equal(status.fileCount, 1) 358 | t.equal(status.totalSize, 4) 359 | t.equal(status.bytesImported, 4) 360 | } else { 361 | t.equal(status.fileCount, 2) 362 | t.equal(status.totalSize, 9) 363 | t.equal(status.bytesImported, 9) 364 | } 365 | }) 366 | }) 367 | 368 | // NOTE: this test must be last 369 | test('chokidar bug', function (t) { 370 | // chokidar sometimes keeps the process open 371 | t.end() 372 | process.exit() 373 | }) 374 | --------------------------------------------------------------------------------