├── .gitignore ├── .npmignore ├── .travis.yml ├── README.md ├── ex.js ├── index.js ├── lib ├── cached-value.js ├── integrity.js ├── tar.js └── util.js ├── multi.js ├── package.json ├── single.js └── test ├── append-multi.js ├── append.js ├── integrity ├── fixtures │ ├── good-no-index.tar │ ├── good.tar │ ├── no-trailer-no-index.tar │ ├── no-trailer.tar │ ├── partial-final-file-no-index.tar │ ├── partial-index-content.tar │ ├── partial-index-header.tar │ ├── partial-index-no-content.tar │ ├── partial-trailer-no-index.tar │ └── partial-trailer.tar └── repair.js ├── list-multi.js ├── list.js ├── pop-multi.js ├── pop.js ├── read-multi.js ├── read.js ├── userdata-multi.js ├── userdata.js └── util.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | .travis.yml 2 | test/ 3 | ex.js 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - '10.13.0' 4 | - '12.4.0' 5 | - '12.14.1' 6 | os: 7 | - windows 8 | - osx 9 | - linux 10 | notifications: 11 | email: false 12 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # indexed-tarball 2 | 3 | > a tarball with constant-time reads and modifications 4 | 5 | A small extension to the [tar archive format](https://en.wikipedia.org/wiki/Tar_%28computing%29) to support some additional features: 6 | 7 | 1. Constant time random access reads 8 | 2. Constant time writes (appends) 9 | 3. Constant time deletions (truncation) 10 | 4. Multi-file support 11 | 12 | This is done by generating a special "index file" that is always appended to the end of the tar achive, which maps file paths within the archive to byte offsets. 13 | 14 | ## Compatibility 15 | 16 | Tarballs created with this module are still plain old tar files, and will work with existing utilities. 17 | 18 | ## Usage 19 | 20 | ```js 21 | var Tarball = require('indexed-tarball') 22 | var through = require('through2') 23 | 24 | var tarball = new Tarball('file.tar') 25 | 26 | var t = through() 27 | var ws = tarball.append('hello.txt', done) 28 | 29 | t.pipe(ws) 30 | t.end('hello world') 31 | 32 | function done () 33 | tarball.list(function (err, files) { 34 | console.log('files', files) 35 | 36 | tarball.read('hello.txt') 37 | .on('data', function (buf) { 38 | console.log('data', buf.toString()) 39 | }) 40 | }) 41 | }) 42 | ``` 43 | 44 | outputs 45 | 46 | ``` 47 | files [ 'hello.txt' ] 48 | data hello world 49 | ``` 50 | 51 | ## API 52 | 53 | ```js 54 | var Tarball = require('indexed-tarball') 55 | ``` 56 | 57 | ## var tarball = new Tarball('/path/to/file.tar'[, opts]) 58 | 59 | Creates or opens an indexed tarball. These are compatible with regular tarballs, so no special extension or archiving software is needed. 60 | 61 | If `opts.multifile` is set, further tarballs will be searched for an opened as well. If `opts.maxFileSize` is set as well, this will be used to decide when to "overflow" to a new tarball. See the "Multi-file support" section below for more details. Defaults to 4 gigabytes. 62 | 63 | ## var ws = tarball.append(filepath[, size], cb) 64 | 65 | Returns a writable stream that will be appended to the end of the tarball. 66 | 67 | A `size` of the file may be included, if it is already known. This is used in 68 | the multi-tarball case to anticipate when the file will become too large for 69 | the filesystem and split it into a new tarball *before* writing. If omitted and 70 | the appended file goes over the maximum file size for the filesystem, the 71 | operation will fail and may result in corruption. 72 | 73 | `cb` is called when the write has been completely persisted to disk. 74 | 75 | ## var rs = tarball.read(filepath) 76 | 77 | Returns a readable stream of the data within the archive named by `filepath`. If 78 | the file doesn't exist in the archive, the stream `rs` will emit an error `err` 79 | with `err.notFound` set to `true`. 80 | 81 | ## tarball.pop([filepath, ]cb) 82 | 83 | Truncates the syncfile such that the last file of the archive is dropped. `cb` is called once the change is persisted to disk. 84 | 85 | A `filepath` can optionally be passed in, which will cause an error to be returned if the to-be-popped file does not match `filepath`, as a sanity check. 86 | 87 | ## var rs = tarball.read(filepath) 88 | 89 | Returns a readable stream of the file at `filepath`. 90 | 91 | ## tarball.list(cb) 92 | 93 | Calls `cb` with a list of the paths and metadata (byte offsets) of the files within the archive. 94 | 95 | ## tarball.userdata([data, ]cb) 96 | 97 | Retrieves or sets the current userdata for the tarball. 98 | 99 | indexed-tarball already stores an index in the tarball itself, so you can store arbitrary user data here as well if you'd like. 100 | 101 | If `data` is given, the object is JSON encoded and stored in the tarball as well. If only `cb` is given, the current userdata will be retrieved. 102 | 103 | ## Install 104 | 105 | With [npm](https://npmjs.org/) installed, run 106 | 107 | ``` 108 | $ npm install indexed-tarball 109 | ``` 110 | 111 | ## Multi-file support 112 | 113 | ### How does it work? 114 | 115 | Once a file (e.g. `file.tar`) reaches `opts.maxFileSize` or 4 gigabytes (default), the next file appended will be written to `file.tar.1`. Once it fills, `file.tar.2`, and so forth. Each tarball has its own index file, which are unioned (think set theory) together to allow all files across all tarballs be read and listed without any file scanning. 116 | 117 | ### Caveats? 118 | 119 | If there are multiple files with the same name across the multiple tarballs, the file that comes *latest* in the tarball set wins; the earlier one(s) are ignored. (e.g. if `foo.tar.3` and `foo.tar.7` both contain a file with path `bar/bax/quux.txt`, the one from `foo.tar.7` will always be returned & used. 120 | 121 | Also, currently new appends are always made to the *final* tarball in the set. So if you wrote a lot of files and ended up with `file.tar` and `file.tar.1`, and then `pop`d all of the files until none were left, future `append`s would go to `file.tar.1`, not `file.tar`. Fixing this [is a TODO](https://github.com/noffle/indexed-tarball/issues/1). 122 | 123 | ## License 124 | 125 | MIT 126 | 127 | -------------------------------------------------------------------------------- /ex.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('.') 2 | var through = require('through2') 3 | var pump = require('pump') 4 | 5 | var tarball = new Tarball('file.tar') 6 | 7 | var t = through() 8 | t.end('hello world') 9 | 10 | pump(t, tarball.append('hello.txt', 11, function (err) { 11 | if (err) throw err 12 | tarball.list(function (err, files) { 13 | if (err) throw err 14 | console.log('files', files) 15 | 16 | tarball.read('hello.txt') 17 | .on('data', function (buf) { 18 | console.log('data', buf.toString()) 19 | }) 20 | }) 21 | })) 22 | 23 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | var SingleTarball = require('./single') 2 | var MultiTarball = require('./multi') 3 | 4 | module.exports = IndexedTarball 5 | 6 | function IndexedTarball (filepath, opts) { 7 | if (!(this instanceof IndexedTarball)) return new IndexedTarball(filepath, opts) 8 | opts = opts || {} 9 | 10 | var impl 11 | if (opts.multifile) impl = new MultiTarball(filepath, opts) 12 | else impl = new SingleTarball(filepath, opts) 13 | 14 | this.impl = impl 15 | } 16 | 17 | IndexedTarball.prototype.append = function (filepath, size, cb) { 18 | return this.impl.append(filepath, size, cb) 19 | } 20 | 21 | IndexedTarball.prototype.list = function (cb) { 22 | this.impl.list(cb) 23 | } 24 | 25 | IndexedTarball.prototype.read = function (filepath) { 26 | return this.impl.read(filepath) 27 | } 28 | 29 | IndexedTarball.prototype.pop = function (name, cb) { 30 | this.impl.pop(name, cb) 31 | } 32 | 33 | IndexedTarball.prototype.userdata = function (data, cb) { 34 | this.impl.userdata(data, cb) 35 | } 36 | -------------------------------------------------------------------------------- /lib/cached-value.js: -------------------------------------------------------------------------------- 1 | var rwlock = require('rwlock') 2 | 3 | module.exports = function (fetch) { 4 | var value = undefined 5 | var error = undefined 6 | var lock = false 7 | var waiting = [] 8 | 9 | var output = {} 10 | 11 | output.value = function (cb) { 12 | if (value) return process.nextTick(cb, error, value) 13 | if (lock) return waiting.push(cb) 14 | lock = true 15 | 16 | fetch(function (err, res) { 17 | if (err) { error = err; value = undefined } else { value = res; error = undefined } 18 | lock = false 19 | waiting.forEach(function (f) { f(error, value) }) 20 | waiting = [] 21 | cb(error, value) 22 | }) 23 | } 24 | 25 | output.invalidate = function () { 26 | value = undefined 27 | error = undefined 28 | } 29 | 30 | output.refresh = function (cb) { 31 | this.invalidate() 32 | this.value(cb || noop) 33 | } 34 | 35 | return output 36 | } 37 | 38 | function noop () {} 39 | -------------------------------------------------------------------------------- /lib/integrity.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var tar = require('./tar') 3 | var tarHeader = require('tar-stream/headers') 4 | 5 | var NUL_TRAILER = Buffer.alloc(1024).fill(0) 6 | 7 | module.exports = { 8 | repair: repairTarball 9 | } 10 | 11 | // scan backwards from the end of the tarball 12 | function repairTarball (filepath, cb) { 13 | var fileLen 14 | var fileLen512 15 | 16 | fs.stat(filepath, function (err, stat) { 17 | if (err) return cb(err) 18 | fileLen = stat.size 19 | fileLen512 = fileLen - (stat.size % 512) 20 | if (fileLen !== fileLen512) { 21 | fs.truncate(filepath, fileLen512, function (err) { 22 | if (err) return cb(err) 23 | checkFinalFile() 24 | }) 25 | } else { 26 | checkFinalFile() 27 | } 28 | }) 29 | 30 | function checkFinalFile () { 31 | fs.open(filepath, 'r+', function (err, fd) { 32 | if (err) return cb(err) 33 | tar.readFinalFile(fd, fileLen512 + 1024, function (err, buf, offset, name) { 34 | // Critical failures 35 | if (err && err instanceof Error) return endWithClose(fd, err) 36 | if (err && err.noFiles) return endWithClose(fd, err) 37 | 38 | // Last file is corrupted index 39 | if (err && err.malformed && name === '___index.json') { 40 | regen(fd, offset, function (err) { 41 | if (err) cb(err) 42 | else cb(null, {state: 'repaired', regenIndex: true, dataloss: false}) 43 | }) 44 | return 45 | } 46 | 47 | if (name !== '___index.json') { 48 | // Do full scan + regen index! 49 | fullScanRepair(fd, fileLen512, function (err, dataloss) { 50 | if (err) cb(err) 51 | else cb(null, {state: 'repaired', regenIndex: true, dataloss: dataloss}) 52 | }) 53 | return 54 | } 55 | 56 | // Check that full file + padding is present 57 | var endOfIndex = offset + buf.length + (512 - buf.length % 512) + 512 * 3 58 | if (endOfIndex > fileLen512) { 59 | // index is corrupt; regen 60 | regen(fd, offset, function (err) { 61 | if (err) return cb(err) 62 | else cb(null, {state: 'repaired', regenIndex: true, dataloss: false}) 63 | }) 64 | return 65 | } 66 | 67 | try { 68 | // TODO(noffle): check actual json content too! 69 | JSON.parse(buf.toString()) 70 | writeTrailer(fd, endOfIndex, function (err, wasNeeded) { 71 | if (err) return cb(err) 72 | if (!wasNeeded) cb(null, {state: 'good'}) 73 | else cb(null, {state: 'repaired', regenIndex: false, dataloss: false}) 74 | }) 75 | } catch (e) { 76 | regen(fd, offset, function (err) { 77 | if (err) cb(err) 78 | else cb(null, {state: 'repaired', regenIndex: true, dataloss: false}) 79 | }) 80 | } 81 | }) 82 | }) 83 | } 84 | 85 | function regen (fd, offset, cb) { 86 | fs.ftruncate(fd, offset, function (err) { 87 | if (err) return cb(err) 88 | buildIndex(fd, offset, function (err, safeLength, index) { 89 | if (err) return cb(err) 90 | writeIndex(fd, offset, index, function (err, len) { 91 | if (err) return cb(err) 92 | writeTrailer(fd, offset + len, cb) 93 | }) 94 | }) 95 | }) 96 | } 97 | 98 | function writeTrailer (fd, offset, cb) { 99 | // TODO(noffle): what if offset + 1024 < fileLen512? 100 | var trailer = Buffer.alloc(1024) 101 | fs.read(fd, trailer, 0, 1024, offset, function (err, bytesRead) { 102 | if (err) return cb(err) 103 | // what if we do a partial read but the rest of the bytes are zerod already!? 104 | if (bytesRead === 1024 && trailer.equals(NUL_TRAILER)) return cb() 105 | fs.write(fd, NUL_TRAILER, 0, 1024, offset, function (err, bytesWritten) { 106 | if (err) cb(err) 107 | else if (bytesWritten < 1024) cb(new Error('failed to fully write NUL trailer')) 108 | else cb() 109 | }) 110 | }) 111 | } 112 | 113 | function endWithClose (fd, err, res) { 114 | fs.close(fd, function () { 115 | cb(err, res) 116 | }) 117 | } 118 | } 119 | 120 | function fullScanRepair (fd, fileLen, cb) { 121 | buildIndex(fd, fileLen, function (err, safeLength, index) { 122 | if (err) return cb(err) 123 | if (safeLength < fileLen) { 124 | fs.ftruncate(fd, safeLength, function (err) { 125 | if (err) return cb(err) 126 | writeIndex(fd, safeLength, index, cb) 127 | }) 128 | } else { 129 | writeIndex(fd, safeLength, index, cb) 130 | } 131 | }) 132 | } 133 | 134 | function buildIndex (fd, fileLen, cb) { 135 | var index = {} 136 | 137 | ;(function next (pos) { 138 | checkFile(fd, pos, function (err, header) { 139 | if (err) return cb(err) 140 | if (header === null) return cb(null, fileLen, index) 141 | var leftover = (512 - header.size % 512) % 512 142 | var newPos = pos + header.size + leftover + 512 143 | 144 | // only add to the index if the file is long enough! (it might be truncated) 145 | if (newPos >= fileLen) return cb(null, pos, index) 146 | 147 | index[header.name] = { offset: pos, size: header.size } 148 | next(newPos) 149 | }) 150 | })(0) 151 | } 152 | 153 | function writeIndex (fd, offset, index, cb) { 154 | var meta = { index: index } 155 | try { 156 | var json = Buffer.from(JSON.stringify(meta), 'utf8') 157 | var header = tarHeader.encode({ 158 | name: '___index.json', 159 | type: 'file', 160 | mode: parseInt('644', 8), 161 | uid: 0, 162 | gid: 0, 163 | mtime: new Date(), 164 | size: json.length 165 | }) 166 | var leftover = json.length % 512 === 0 ? 0 : 512 - json.length % 512 167 | var padding = Buffer.alloc(leftover).fill(0) 168 | var finalBuf = Buffer.concat([header, json, padding]) 169 | fs.write(fd, finalBuf, 0, finalBuf.length, offset, function (err, bytesWritten) { 170 | if (err) cb(err) 171 | else if (bytesWritten < finalBuf.length) cb(new Error('failed to fully write index')) 172 | else cb(null, bytesWritten) 173 | }) 174 | } catch (e) { 175 | cb(e) 176 | } 177 | } 178 | 179 | function checkFile (fd, position, cb) { 180 | var sector = Buffer.alloc(512) 181 | fs.read(fd, sector, 0, 512, position, function (err, bytesRead) { 182 | if (err) return cb(err) 183 | if (bytesRead < 512) return cb(new Error('read < 512 bytes')) 184 | try { 185 | var header = tarHeader.decode(sector) 186 | cb(null, header) 187 | } catch (e) { 188 | cb(e) 189 | } 190 | }) 191 | } 192 | -------------------------------------------------------------------------------- /lib/tar.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var CustomError = require('custom-error-class') 3 | 4 | module.exports = { 5 | readFinalFile: readFinalFile 6 | } 7 | 8 | class NoFilesError extends CustomError { 9 | constructor (message) { 10 | super(message) 11 | this.noFiles = true 12 | } 13 | } 14 | 15 | class MalformedError extends CustomError { 16 | constructor (message) { 17 | super(message) 18 | this.malformed = true 19 | } 20 | } 21 | 22 | // Scans a tar archive from the end backwards until it finds the last entry. 23 | // Returns the raw buffer of the last file, and its byte offset where it begins. 24 | function readFinalFile (fd, size, cb) { 25 | var header = Buffer.alloc(512) 26 | var ustarExpected = Buffer.from('7573746172', 'hex') // "ustar" 27 | 28 | next(size - 512 * 3) 29 | 30 | function next (offset) { 31 | if (offset < 0) return cb(new NoFilesError('No files found.')) 32 | 33 | // read file header 34 | fs.read(fd, header, 0, 512, offset, function (err, size, buf) { 35 | if (err) return cb(err) 36 | // look for 'ustar00' pattern at the expected offset 37 | if (ustarExpected.equals(buf.slice(257, 257 + 5))) { 38 | // get the final file's size (octal) 39 | var fileName = unNulTerminateString(buf.slice(0, 100)) 40 | var fileSize = parseInt(buf.slice(124, 124 + 12).toString(), 8) 41 | if (isNaN(fileSize)) return cb(new MalformedError('Malformed file'), null, offset, fileName) 42 | var fileBuf = Buffer.alloc(fileSize) 43 | fs.read(fd, fileBuf, 0, fileSize, offset + 512, function (err, readSize) { 44 | if (err) return cb(err) 45 | // if (fileSize !== readSize) console.error('WARNING: read size !== expected size (' + readSize + ' vs ' + fileSize + ')') 46 | cb(null, fileBuf, offset, fileName) 47 | }) 48 | } else { 49 | next(offset - 512) 50 | } 51 | }) 52 | } 53 | } 54 | 55 | function unNulTerminateString (buf) { 56 | for (var i = 0; i < buf.length; i++) { 57 | if (buf.readUInt8(i) === 0) return buf.slice(0, i).toString() 58 | } 59 | return buf.toString() 60 | } 61 | -------------------------------------------------------------------------------- /lib/util.js: -------------------------------------------------------------------------------- 1 | var Readable = require('stream').Readable 2 | 3 | function fromString (str) { 4 | var data = new Readable() 5 | data._read = function (size) { 6 | if (str.length <= 0) return this.push(null) 7 | var push = str.slice(0, size) 8 | if (this.push(push)) str = str.slice(size) 9 | } 10 | return data 11 | } 12 | 13 | function fromBuffer (buf) { 14 | var data = new Readable() 15 | data._read = function (size) { 16 | if (buf.length <= 0) return this.push(null) 17 | var push = buf.slice(0, size) 18 | if (this.push(push)) buf = buf.slice(size) 19 | } 20 | return data 21 | } 22 | 23 | module.exports = { 24 | fromString: fromString, 25 | fromBuffer: fromBuffer 26 | } 27 | -------------------------------------------------------------------------------- /multi.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var path = require('path') 3 | var pump = require('pump') 4 | var RWLock = require('rwlock') 5 | var through = require('through2') 6 | var readonly = require('read-only-stream') 7 | var IndexedTarball = require('./single') 8 | 9 | module.exports = MultiTarball 10 | 11 | function MultiTarball (filepath, opts) { 12 | opts = opts || {} 13 | 14 | this.filepath = filepath 15 | this.tarballs = [] 16 | this.maxFileSize = opts.maxFileSize || (Math.pow(2, 32) - 1) 17 | 18 | this.lock = new RWLock() 19 | 20 | // Find all of the tarballs belonging to the set. 21 | this._setupTarballs() 22 | } 23 | 24 | MultiTarball.prototype.append = function (filepath, size, cb) { 25 | if (!cb && typeof size === 'function') { 26 | cb = size 27 | size = null 28 | } 29 | var self = this 30 | 31 | var t = through() 32 | 33 | this.lock.writeLock(function (release) { 34 | function done (err) { 35 | release() 36 | cb(err) 37 | } 38 | 39 | // Find the last tarball in the set. 40 | self._getLastTarball(function (err, tarball, index) { 41 | if (err) return done(err) 42 | 43 | // Check if the new file to be added will cause the tarball to exceed its 44 | // maximum size. 45 | tarball.archive.value(function (err, archive) { 46 | if (err) return done(err) 47 | var totalAddedSize = 512 + roundUp(size, 512) 48 | 49 | // Overflow into a brand new tarball 50 | if (archive.fileSize + totalAddedSize > self.maxFileSize) { 51 | var newFilepath = self.filepath + '.' + (index + 1) 52 | tarball = new IndexedTarball(newFilepath) 53 | self.tarballs.push(tarball) 54 | } 55 | 56 | var ws = tarball.append(filepath, done) 57 | t.pipe(ws) 58 | }) 59 | }) 60 | }) 61 | 62 | return t 63 | } 64 | 65 | MultiTarball.prototype.list = function (cb) { 66 | var self = this 67 | this.lock.readLock(function (release) { 68 | var error 69 | var pending = self.tarballs.length 70 | var res = {} 71 | 72 | self.tarballs.forEach(list) 73 | 74 | function done (err) { 75 | error = err || error 76 | pending-- 77 | if (!pending) { 78 | release() 79 | cb(error, error ? undefined : Object.keys(res)) 80 | } 81 | } 82 | 83 | function list (tarball) { 84 | tarball.list(function (err, files) { 85 | if (err) return done(err) 86 | for (var idx in files) { 87 | res[files[idx]] = true 88 | } 89 | done() 90 | }) 91 | } 92 | }) 93 | } 94 | 95 | MultiTarball.prototype.read = function (filepath) { 96 | var self = this 97 | var stream = through() 98 | 99 | this.lock.readLock(function (release) { 100 | self._getFullIndex(function (err, index) { 101 | if (err) stream.emit('error', err) 102 | else if (!index[filepath]) stream.emit('error', new Error('not found')) 103 | else { 104 | pump(index[filepath].tarball.read(filepath), stream, function (err) { 105 | if (err) stream.emit('error', err) 106 | }) 107 | } 108 | release() 109 | }) 110 | }) 111 | 112 | return readonly(stream) 113 | } 114 | 115 | MultiTarball.prototype.pop = function (cb) { 116 | var self = this 117 | 118 | this.lock.writeLock(function (release) { 119 | function done (err) { 120 | release() 121 | cb(err) 122 | } 123 | 124 | self._getLastPopulatedTarball(function (err, tarball) { 125 | if (err) return done(err) 126 | else if (!tarball) return done() 127 | tarball.pop(done) 128 | }) 129 | }) 130 | } 131 | 132 | MultiTarball.prototype.userdata = function (data, cb) { 133 | if (data && !cb && typeof data === 'function') { 134 | cb = data 135 | data = null 136 | } 137 | var self = this 138 | 139 | this.lock.writeLock(function (release) { 140 | function done (err, res) { 141 | release() 142 | cb(err, res) 143 | } 144 | 145 | self._getLastPopulatedTarball(function (err, tarball) { 146 | if (err) return done(err) 147 | else if (!tarball) return done() 148 | tarball.userdata(data, done) 149 | }) 150 | }) 151 | } 152 | 153 | MultiTarball.prototype._setupTarballs = function (cb) { 154 | var self = this 155 | cb = cb || noop 156 | 157 | this.lock.writeLock(function (release) { 158 | function done (err) { 159 | release() 160 | cb(err) 161 | } 162 | 163 | var dir = path.dirname(self.filepath) 164 | fs.readdir(dir, function (err, contents) { 165 | if (err) return done(err) 166 | // TODO: test that the sort function is working & these are in order 167 | self.tarballs = contents 168 | .filter(function (name) { return parseIndexFromFilename(name) !== null }) 169 | .map(function (name) { return new IndexedTarball(name) }) 170 | .sort(tarballCmp) 171 | done() 172 | }) 173 | }) 174 | } 175 | 176 | // Returns the final tarball in the set. A new one will be created if it doesn't exist. 177 | MultiTarball.prototype._getLastTarball = function (cb) { 178 | cb = cb || noop 179 | var tarball 180 | 181 | if (!this.tarballs.length) { 182 | tarball = new IndexedTarball(this.filepath) 183 | this.tarballs.push(tarball) 184 | cb(null, tarball, 0) 185 | } else { 186 | tarball = this.tarballs[this.tarballs.length - 1] 187 | var index = parseIndexFromFilename(tarball.filepath) 188 | cb(null, tarball, index) 189 | } 190 | } 191 | 192 | // Returns the final *populated* tarball in the set. Returns 'null' if none are 193 | // populated. 194 | MultiTarball.prototype._getLastPopulatedTarball = function (cb) { 195 | cb = cb || noop 196 | var self = this 197 | var tarball 198 | 199 | ;(function checkPrevious (idx) { 200 | if (idx < 0) return cb(null) // all empty tarballs! 201 | tarball = self.tarballs[idx] 202 | tarball.archive.value(function (err, meta) { 203 | if (err) return cb(err) 204 | if (meta && meta.index && Object.keys(meta.index).length > 0) { 205 | var index = parseIndexFromFilename(tarball.filepath) 206 | cb(null, tarball, index) 207 | } else { 208 | checkPrevious(idx - 1) 209 | } 210 | }) 211 | })(this.tarballs.length - 1) 212 | } 213 | 214 | // Read the index of *all* tarballs to build a full index. 215 | MultiTarball.prototype._getFullIndex = function (cb) { 216 | var self = this 217 | var index = {} 218 | 219 | // Process tarballs *in order*. This is necessary to avoid earlier duplicate 220 | // filenames overwriting newer ones. 221 | ;(function next (idx) { 222 | if (idx >= self.tarballs.length) return cb(null, index) 223 | 224 | var tarball = self.tarballs[idx] 225 | tarball.archive.value(function (err, _meta) { 226 | if (err) return cb(err) 227 | var _index = _meta.index 228 | for (var key in _index) { 229 | index[key] = Object.assign({}, _index[key]) 230 | index[key].tarball = tarball 231 | } 232 | next(idx + 1) 233 | }) 234 | })(0) 235 | } 236 | 237 | function noop () {} 238 | 239 | // Compares two IndexedTarball instances; sorting them so that the biggest indexed tarball filename comes last. 240 | function tarballCmp (a, b) { 241 | var an = parseIndexFromFilename(a.filepath) 242 | var bn = parseIndexFromFilename(b.filepath) 243 | if (an === null || bn === null) return 0 244 | if (an < bn) return -1 245 | else if (an > bn) return 1 246 | else return 0 247 | } 248 | 249 | // "foobar.tar.2" => 2 250 | // "foobar.tar.3" => 3 251 | // "foobar.tar" => 0 252 | // "foobar.tar.hi" => null 253 | function parseIndexFromFilename (filename) { 254 | if (/\.tar\.[0-9]+$/.test(filename)) { 255 | try { 256 | return parseInt(filename.match(/\.tar\.([0-9]+)$/)[1]) 257 | } catch (e) { 258 | return null 259 | } 260 | } else { 261 | return 0 262 | } 263 | } 264 | 265 | function roundUp (n, nearest) { 266 | var more = 512 - (n % nearest) 267 | return n + more 268 | } 269 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "indexed-tarball", 3 | "description": "a tarball with constant-time reads and modifications", 4 | "author": "Stephen Whitmore ", 5 | "version": "3.1.7", 6 | "repository": { 7 | "url": "git://github.com/noffle/indexed-tarball.git" 8 | }, 9 | "homepage": "https://github.com/noffle/indexed-tarball", 10 | "bugs": "https://github.com/noffle/indexed-tarball/issues", 11 | "main": "index.js", 12 | "scripts": { 13 | "test": "tape test/*.js test/integrity/*.js", 14 | "lint": "standard" 15 | }, 16 | "keywords": [], 17 | "dependencies": { 18 | "custom-error-class": "^1.0.0", 19 | "pump": "^3.0.0", 20 | "read-only-stream": "^2.0.0", 21 | "rwlock": "^5.0.0", 22 | "tar-stream": "^1.6.1", 23 | "through2": "^2.0.3", 24 | "tmp": "0.0.33" 25 | }, 26 | "devDependencies": { 27 | "collect-stream": "^1.2.1", 28 | "md5": "^2.2.1", 29 | "mkdirp": "^0.5.1", 30 | "ncp": "^2.0.0", 31 | "standard": "~10.0.0", 32 | "tape": "~4.6.2" 33 | }, 34 | "license": "MIT" 35 | } 36 | -------------------------------------------------------------------------------- /single.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var RWLock = require('rwlock') 3 | var through = require('through2') 4 | var readonly = require('read-only-stream') 5 | var pump = require('pump') 6 | var tarHeader = require('tar-stream/headers') 7 | var fromBuffer = require('./lib/util').fromBuffer 8 | var cached = require('./lib/cached-value') 9 | var tarUtil = require('./lib/tar') 10 | 11 | module.exports = SingleTarball 12 | 13 | function SingleTarball (filepath, opts) { 14 | this.filepath = filepath 15 | 16 | this.lock = new RWLock() 17 | 18 | if (!fs.existsSync(filepath)) fs.writeFileSync(filepath, '', 'utf8') // touch new file 19 | 20 | this.archive = cached(SingleTarball.prototype._lookupMeta.bind(this)) 21 | this.archive.refresh() 22 | } 23 | 24 | // Append a file and update the index entry. 25 | SingleTarball.prototype.append = function (filepath, size, cb) { 26 | if (!cb && typeof size === 'function') { 27 | cb = size 28 | size = null 29 | } 30 | size = 0 31 | 32 | var self = this 33 | cb = cb || noop 34 | 35 | var t = through(function (chunk, _, next) { 36 | size += chunk.length 37 | next(null, chunk) 38 | }) 39 | 40 | this.lock.writeLock(function (release) { 41 | function done (err) { 42 | release() 43 | cb(err) 44 | } 45 | 46 | // 1. Refresh the index & its byte offset. 47 | self.archive.value(function (err, archive) { 48 | if (err) return done(err) 49 | 50 | if (typeof archive.indexOffset === 'number') { 51 | // 2. Truncate the file to remove the old index. 52 | fs.truncate(self.filepath, archive.indexOffset, function (err) { 53 | if (err) return done(err) 54 | write(archive, archive.indexOffset) 55 | }) 56 | } else { 57 | write(archive, undefined) 58 | } 59 | }) 60 | 61 | function write (archive, start) { 62 | // 3. Prepare the tar archive for appending. 63 | var fsOpts = { 64 | flags: 'r+', 65 | start: start !== undefined ? start : 0 66 | } 67 | if (fsOpts.start < 0) fsOpts.start = 0 68 | var appendStream = fs.createWriteStream(self.filepath, fsOpts) 69 | appendStream.once('error', done) 70 | 71 | // 4. Write tar header, without size info (yet). 72 | var header = tarHeader.encode({ 73 | name: filepath, 74 | type: 'file', 75 | mode: parseInt('644', 8), 76 | uid: 0, 77 | gid: 0, 78 | mtime: new Date(), 79 | size: 0 80 | }) 81 | appendStream.write(header) 82 | 83 | // 5. Write data. 84 | pump(t, appendStream, function (err) { 85 | if (err) return done(err) 86 | 87 | // Detect ENOSPC. This comes up when only one stream write happened, 88 | // which will not trigger an ENOSPC error. 89 | // Tracked by https://github.com/nodejs/node/issues/31908 90 | if (size > appendStream.bytesWritten) { 91 | var err = new Error('insufficient disk space') 92 | err.code = 'ENOSPC' 93 | return done(err) 94 | } 95 | 96 | // 6. Pad the remaining bytes to fit a 512-byte block. 97 | var leftover = 512 - (size % 512) 98 | if (leftover === 512) leftover = 0 99 | 100 | fs.appendFile(self.filepath, Buffer.alloc(leftover), function (err) { 101 | // TODO: file left in a bad state! D: 102 | if (err) return done(err) 103 | 104 | // 7. Open file so we can update the header. 105 | withWritableFile(self.filepath, function (fd, done) { 106 | // TODO: file left in a bad state! D: 107 | if (err) return done(err) 108 | 109 | // 8. Read header. 110 | var header = Buffer.alloc(512) 111 | var headerStart = fsOpts.start || 0 112 | fs.read(fd, header, 0, 512, headerStart, function (err) { 113 | // TODO: file left in a bad state! D: 114 | if (err) return done(err) 115 | 116 | // 9. Update size field. 117 | var sizeStr = toPaddedOctal(size, 12) 118 | header.write(sizeStr, 124, 12, 'utf8') 119 | 120 | // 10. Update checksum field. 121 | var sum = cksum(header) 122 | var ck = toPaddedOctal(sum, 8) 123 | header.write(ck, 148, 8, 'utf8') 124 | 125 | // 11. Write new header. 126 | fs.write(fd, header, 0, 512, headerStart, function (err) { 127 | // TODO: file left in a bad state! D: 128 | if (err) return done(err) 129 | 130 | archive.index[filepath] = { offset: start, size: size } 131 | 132 | // 12. Write the new index to the end of the archive. 133 | appendMeta(fd, headerStart + 512 + size + leftover, archive.meta, done) 134 | }) 135 | }) 136 | }, function (err) { 137 | // TODO: file left in a bad state! D: 138 | if (err) return done(err) 139 | 140 | self.archive.refresh(done) 141 | }) 142 | }) 143 | }) 144 | } 145 | }) 146 | 147 | return t 148 | } 149 | 150 | SingleTarball.prototype.list = function (cb) { 151 | var self = this 152 | 153 | this.lock.readLock(function (release) { 154 | self.archive.value(function (err, archive) { 155 | release() 156 | cb(err, err ? null : Object.keys(archive.index)) 157 | }) 158 | }) 159 | } 160 | 161 | SingleTarball.prototype.read = function (filepath) { 162 | var self = this 163 | var t = through() 164 | 165 | this.lock.readLock(function (release) { 166 | self.archive.value(function (err, archive) { 167 | if (err) { 168 | release() 169 | t.emit('error', err) 170 | return 171 | } 172 | 173 | var entry = archive.index[filepath] 174 | if (!entry) { 175 | release() 176 | process.nextTick(function () { 177 | var err = new Error('that file does not exist in the archive') 178 | err.notFound = true 179 | t.emit('error', err) 180 | }) 181 | return 182 | } 183 | 184 | if (entry.size === 0) { 185 | process.nextTick(function () { 186 | t.end() 187 | release() 188 | }) 189 | return 190 | } 191 | 192 | pump( 193 | fs.createReadStream(self.filepath, { start: entry.offset + 512, end: entry.offset + 512 + entry.size - 1 }), 194 | t, 195 | function (err) { 196 | release() 197 | }) 198 | }) 199 | }) 200 | 201 | return readonly(t) 202 | } 203 | 204 | // TODO: might be nice if this also returned the final file, but we don't want 205 | // to buffer the entire contents, and can't really stream it if it's being 206 | // truncated from the archive file.. 207 | SingleTarball.prototype.pop = function (name, cb) { 208 | if (typeof name === 'function' && !cb) { 209 | cb = name 210 | name = null 211 | } 212 | var self = this 213 | 214 | this.lock.writeLock(function (release) { 215 | function done (err) { 216 | release() 217 | cb(err) 218 | } 219 | 220 | self.archive.value(function (err, archive) { 221 | if (err) return done(err) 222 | 223 | // Get the last file in the archive. 224 | var fname = getFileLargestOffset(archive.index) 225 | if (name && name !== fname) { 226 | return cb(null, new Error('the last file doesnt match the filename given')) 227 | } 228 | var offset = archive.index[fname].offset 229 | 230 | fs.truncate(self.filepath, offset, function (err) { 231 | if (err) return done(err) 232 | delete archive.index[fname] 233 | 234 | withWritableFile(self.filepath, function (fd, done) { 235 | appendMeta(fd, offset, archive.meta, done) 236 | }, function (err) { 237 | if (err) return done(err) 238 | self.archive.refresh(done) 239 | }) 240 | }) 241 | }) 242 | }) 243 | } 244 | 245 | SingleTarball.prototype.userdata = function (data, cb) { 246 | if (data && !cb && typeof data === 'function') { 247 | cb = data 248 | data = null 249 | } 250 | var self = this 251 | 252 | if (!data) { 253 | // get 254 | this.lock.readLock(function (release) { 255 | function done (err, res) { 256 | release() 257 | cb(err, res) 258 | } 259 | 260 | self.archive.value(function (err, archive) { 261 | if (err) return done(err) 262 | done(null, archive.meta.userdata || {}) 263 | }) 264 | }) 265 | } else { 266 | // set 267 | this.lock.writeLock(function (release) { 268 | function done (err) { 269 | release() 270 | cb(err) 271 | } 272 | 273 | self.archive.value(function (err, archive) { 274 | if (err) return done(err) 275 | 276 | var offset = archive.indexOffset 277 | fs.truncate(self.filepath, offset, function (err) { 278 | if (err) return done(err) 279 | 280 | withWritableFile(self.filepath, function (fd, done) { 281 | archive.meta.userdata = data 282 | appendMeta(fd, offset, archive.meta, done) 283 | }, function (err) { 284 | if (err) return done(err) 285 | self.archive.refresh(done) 286 | }) 287 | }) 288 | }) 289 | }) 290 | } 291 | } 292 | 293 | // Search the tar archive backwards for the index file. 294 | SingleTarball.prototype._lookupMeta = function (cb) { 295 | var self = this 296 | 297 | fs.stat(this.filepath, function (err, stat) { 298 | if (err) return cb(err) 299 | var size = stat.size 300 | 301 | // Archive is fresh & empty 302 | if (size < 1024) { 303 | var index = {} 304 | return cb(null, { index: index, indexOffset: 0, fileSize: size, meta: {index: index} }) 305 | } 306 | 307 | fs.open(self.filepath, 'r', function (err, fd) { 308 | if (err) return cb(err) 309 | 310 | tarUtil.readFinalFile(fd, size, function (err, buf, offset) { 311 | if (err) return cb(err) 312 | var meta 313 | try { 314 | meta = JSON.parse(buf.toString()) 315 | } catch (e) { 316 | return cb(e) 317 | } 318 | fs.close(fd, function (err) { 319 | if (err) return cb(err) 320 | // if in old format (2.x.x), upgrade to new format (3.x.x and [hopefully] later) 321 | if (!meta.index) { 322 | var newMeta = { index: meta } 323 | cb(null, { index: newMeta.index, indexOffset: offset, fileSize: size, meta: newMeta }) 324 | } else { 325 | cb(null, { index: meta.index, indexOffset: offset, fileSize: size, meta: meta }) 326 | } 327 | }) 328 | }) 329 | }) 330 | }) 331 | } 332 | 333 | // Returns the entry nearest the end of the index. 334 | function getFileLargestOffset (index) { 335 | var key 336 | for (var name in index) { 337 | var entry = index[name] 338 | if (!key || entry.offset > index[key].offset) key = name 339 | } 340 | return key 341 | } 342 | 343 | function noop () {} 344 | 345 | // tar checksum algorithm (from mafintosh/tar-stream) 346 | var cksum = function (block) { 347 | var sum = 8 * 32 348 | for (var i = 0; i < 148; i++) sum += block[i] 349 | for (var j = 156; j < 512; j++) sum += block[j] 350 | return sum 351 | } 352 | 353 | function toPaddedOctal (number, length) { 354 | var octal = number.toString(8) 355 | var leftover = length - octal.length 356 | var padding = new Array(leftover).fill('0').join('') 357 | return padding + octal 358 | } 359 | 360 | function appendMeta (fd, pos, meta, cb) { 361 | var data = Buffer.from(JSON.stringify(meta), 'utf8') 362 | 363 | var header = tarHeader.encode({ 364 | name: '___index.json', 365 | type: 'file', 366 | mode: parseInt('644', 8), 367 | uid: 0, 368 | gid: 0, 369 | mtime: new Date(), 370 | size: data.length 371 | }) 372 | 373 | // leftover bytes to reach 512 block boundary, plus another 512 * 2 = 1024 to mark the end-of-file 374 | var padding = Buffer.alloc(512 - (data.length % 512) + 512 + 512).fill(0) 375 | 376 | var buf = Buffer.concat([header, data, padding]) 377 | 378 | fs.write(fd, buf, 0, buf.length, pos, cb) 379 | } 380 | 381 | function withWritableFile (filepath, cb, done) { 382 | fs.open(filepath, 'r+', function (err, fd) { 383 | if (err) return cb(err) 384 | cb(fd, function () { 385 | fs.close(fd, done) 386 | }) 387 | }) 388 | } 389 | -------------------------------------------------------------------------------- /test/append-multi.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var path = require('path') 3 | var tmp = require('tmp') 4 | var test = require('tape') 5 | var fromString = require('../lib/util').fromString 6 | var parseTarball = require('./util').parseTarball 7 | 8 | test('can append to a new file', function (t) { 9 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 10 | t.error(err, 'tmpdir setup') 11 | 12 | var filepath = path.join(dir, 'file.tar') 13 | var tarball = new Tarball(filepath, {multifile: true}) 14 | var data = 'greetings friend!' 15 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 16 | t.error(err, 'append ok') 17 | 18 | parseTarball(filepath, function (err, res, index) { 19 | t.error(err, 'parsed tarball ok') 20 | 21 | t.equals(res.length, 2, 'two entries') 22 | 23 | t.equals(res[0].name, 'hello.txt', 'contents match') 24 | t.equals(res[0].type, 'file', 'type matches') 25 | t.equals(res[0].data.toString(), 'greetings friend!') 26 | 27 | t.equals(res[1].name, '___index.json', 'contents match') 28 | t.equals(res[1].type, 'file', 'type matches') 29 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: data.length } }) 30 | 31 | cleanup() 32 | t.end() 33 | }) 34 | })) 35 | }) 36 | }) 37 | 38 | test('can append to an existing file', function (t) { 39 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 40 | t.error(err, 'tmpdir setup') 41 | 42 | var filepath = path.join(dir, 'file.tar') 43 | var tarball = new Tarball(filepath, {multifile: true}) 44 | var data = 'greetings friend!' 45 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 46 | t.error(err, 'append ok') 47 | 48 | data = '# beep boop' 49 | fromString(data).pipe(tarball.append('beep.md', data.length, function (err) { 50 | t.error(err, 'append ok') 51 | 52 | parseTarball(filepath, function (err, res, index) { 53 | t.error(err, 'parsed tarball ok') 54 | 55 | t.equals(res.length, 3, '3 entries') 56 | 57 | t.equals(res[0].name, 'hello.txt', 'name matches') 58 | t.equals(res[0].type, 'file', 'type matches') 59 | t.equals(res[0].data.toString(), 'greetings friend!', 'content matches') 60 | 61 | t.equals(res[1].name, 'beep.md', 'name matches') 62 | t.equals(res[1].type, 'file', 'type matches') 63 | t.equals(res[1].data.toString(), '# beep boop', 'content matches') 64 | 65 | t.equals(res[2].name, '___index.json', 'contents match') 66 | t.equals(res[2].type, 'file', 'type matches') 67 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 17 }, 'beep.md': { offset: 1024, size: 11 } }) 68 | 69 | cleanup() 70 | t.end() 71 | }) 72 | })) 73 | })) 74 | }) 75 | }) 76 | 77 | test('second append overflows into second tarball', function (t) { 78 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 79 | t.error(err, 'tmpdir setup') 80 | 81 | var filepath = path.join(dir, 'file.tar') 82 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 3072}) 83 | var data = 'greetings friend!' 84 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 85 | t.error(err, 'append ok') 86 | 87 | data = '# beep boop' 88 | fromString(data).pipe(tarball.append('beep.md', data.length, function (err) { 89 | t.error(err, 'append ok') 90 | 91 | parseTarball(filepath, function (err, res, index) { 92 | t.error(err, 'parsed tarball ok') 93 | t.equals(res.length, 2, '2 entries') 94 | t.equals(res[0].name, 'hello.txt', 'name matches') 95 | t.equals(res[0].type, 'file', 'type matches') 96 | t.equals(res[0].data.toString(), 'greetings friend!', 'content matches') 97 | t.equals(res[1].name, '___index.json', 'contents match') 98 | t.equals(res[1].type, 'file', 'type matches') 99 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 17 } }) 100 | 101 | parseTarball(filepath + '.1', function (err, res, index) { 102 | t.error(err, 'parsed tarball ok') 103 | t.equals(res.length, 2, '2 entries') 104 | t.equals(res[0].name, 'beep.md', 'name matches') 105 | t.equals(res[0].type, 'file', 'type matches') 106 | t.equals(res[0].data.toString(), '# beep boop', 'content matches') 107 | t.equals(res[1].name, '___index.json', 'contents match') 108 | t.equals(res[1].type, 'file', 'type matches') 109 | t.deepEquals(index, { 'beep.md': { offset: 0, size: 11 } }) 110 | 111 | cleanup() 112 | t.end() 113 | }) 114 | }) 115 | })) 116 | })) 117 | }) 118 | }) 119 | 120 | test('two concurrent writes succeed as expected', function (t) { 121 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 122 | t.error(err, 'tmpdir setup') 123 | 124 | var filepath = path.join(dir, 'file.tar') 125 | var tarball = new Tarball(filepath, {multifile: true}) 126 | var pending = 2 127 | 128 | var data1 = 'greetings friend!' 129 | fromString(data1).pipe(tarball.append('hello.txt', data1.length, function (err) { 130 | t.error(err, 'append ok') 131 | if (!--pending) check() 132 | })) 133 | 134 | var data2 = '# beep boop' 135 | fromString(data2).pipe(tarball.append('beep.md', data2.length, function (err) { 136 | t.error(err, 'append ok') 137 | if (!--pending) check() 138 | })) 139 | 140 | function check () { 141 | parseTarball(filepath, function (err, res, index) { 142 | t.error(err, 'parsed tarball ok') 143 | 144 | t.equals(res.length, 3, '3 entries') 145 | 146 | t.equals(res[0].name, 'hello.txt', 'name matches') 147 | t.equals(res[0].type, 'file', 'type matches') 148 | t.equals(res[0].data.toString(), 'greetings friend!', 'content matches') 149 | 150 | t.equals(res[1].name, 'beep.md', 'name matches') 151 | t.equals(res[1].type, 'file', 'type matches') 152 | t.equals(res[1].data.toString(), '# beep boop', 'content matches') 153 | 154 | t.equals(res[2].name, '___index.json', 'contents match') 155 | t.equals(res[2].type, 'file', 'type matches') 156 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 17 }, 'beep.md': { offset: 1024, size: 11 } }) 157 | 158 | cleanup() 159 | t.end() 160 | }) 161 | } 162 | }) 163 | }) 164 | 165 | test('two concurrent writes causing overflow succeed as expected', function (t) { 166 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 167 | t.error(err, 'tmpdir setup') 168 | 169 | var filepath = path.join(dir, 'file.tar') 170 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 4096}) 171 | var pending = 3 172 | 173 | var data1 = Buffer.alloc(600).fill('t').toString() 174 | fromString(data1).pipe(tarball.append('hello.txt', data1.length, function (err) { 175 | t.error(err, 'append ok') 176 | if (!--pending) check() 177 | })) 178 | 179 | var data2 = '# beep boop' 180 | fromString(data2).pipe(tarball.append('beep.md', data2.length, function (err) { 181 | t.error(err, 'append ok') 182 | if (!--pending) check() 183 | })) 184 | 185 | var data3 = '# deep doop' 186 | fromString(data3).pipe(tarball.append('deep.md', data3.length, function (err) { 187 | t.error(err, 'append ok') 188 | if (!--pending) check() 189 | })) 190 | 191 | function check () { 192 | // 1st tarball 193 | parseTarball(filepath, function (err, res, index) { 194 | t.error(err, 'parsed 1st tarball ok') 195 | t.equals(res.length, 2, '2 entries') 196 | t.equals(res[0].name, 'hello.txt', 'name matches') 197 | t.equals(res[0].type, 'file', 'type matches') 198 | t.equals(res[0].data.toString(), data1, 'content matches') 199 | t.equals(res[1].name, '___index.json', 'contents match') 200 | t.equals(res[1].type, 'file', 'type matches') 201 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 600 } }) 202 | 203 | // 2nd tarball 204 | parseTarball(filepath + '.1', function (err, res, index) { 205 | t.error(err, 'parsed 2nd tarball ok') 206 | t.equals(res.length, 3, '3 entries') 207 | t.equals(res[0].name, 'beep.md', 'name matches') 208 | t.equals(res[0].type, 'file', 'type matches') 209 | t.equals(res[0].data.toString(), '# beep boop', 'content matches') 210 | t.equals(res[1].name, 'deep.md', 'name matches') 211 | t.equals(res[1].type, 'file', 'type matches') 212 | t.equals(res[1].data.toString(), '# deep doop', 'content matches') 213 | t.deepEquals(index, { 214 | 'beep.md': { offset: 0, size: 11 }, 215 | 'deep.md': { offset: 1024, size: 11 } 216 | }) 217 | 218 | cleanup() 219 | t.end() 220 | }) 221 | }) 222 | } 223 | }) 224 | }) 225 | 226 | test('can append to the 1st file of an empty two-file archive', function (t) { 227 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 228 | t.error(err, 'tmpdir setup') 229 | 230 | var filepath = path.join(dir, 'file.tar') 231 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 3072}) 232 | 233 | var data = 'greetings friend!' 234 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 235 | t.error(err, 'append ok') 236 | })) 237 | 238 | var data2 = '# beep boop' 239 | fromString(data2).pipe(tarball.append('beep.md', data2.length, function (err) { 240 | t.error(err, 'append ok') 241 | })) 242 | 243 | tarball.pop(function (err) { 244 | t.error(err, 'pop ok') 245 | 246 | tarball.pop(function (err) { 247 | t.error(err, 'pop ok') 248 | 249 | fromString(data2).pipe(tarball.append('foo/bax.js', data2.length, function (err) { 250 | t.error(err, 'append ok') 251 | 252 | parseTarball(filepath, function (err, res, index) { 253 | t.error(err, 'parsed tarball ok') 254 | t.equals(res.length, 1, '1 entry') 255 | t.equals(res[0].name, '___index.json', 'contents match') 256 | t.equals(res[0].type, 'file', 'type matches') 257 | t.deepEquals(index, {}) 258 | 259 | parseTarball(filepath + '.1', function (err, res, index) { 260 | t.error(err, 'parsed tarball ok') 261 | t.equals(res.length, 2, '2 entries') 262 | t.equals(res[0].name, 'foo/bax.js', 'name matches') 263 | t.equals(res[0].type, 'file', 'type matches') 264 | t.equals(res[0].data.toString(), '# beep boop', 'content matches') 265 | t.equals(res[1].name, '___index.json', 'contents match') 266 | t.equals(res[1].type, 'file', 'type matches') 267 | t.deepEquals(index, { 'foo/bax.js': { offset: 0, size: 11 } }) 268 | 269 | cleanup() 270 | t.end() 271 | }) 272 | }) 273 | })) 274 | }) 275 | }) 276 | }) 277 | }) 278 | -------------------------------------------------------------------------------- /test/append.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var path = require('path') 3 | var tmp = require('tmp') 4 | var test = require('tape') 5 | var fs = require('fs') 6 | var fromString = require('../lib/util').fromString 7 | var parseTarball = require('./util').parseTarball 8 | 9 | test('can append to a new file', function (t) { 10 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 11 | t.error(err, 'tmpdir setup') 12 | 13 | var filepath = path.join(dir, 'file.tar') 14 | var tarball = new Tarball(filepath) 15 | var data = 'greetings friend!' 16 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 17 | t.error(err, 'append ok') 18 | 19 | parseTarball(filepath, function (err, res, index) { 20 | t.error(err, 'parsed tarball ok') 21 | 22 | t.equals(res.length, 2, 'two entries') 23 | 24 | t.equals(res[0].name, 'hello.txt', 'contents match') 25 | t.equals(res[0].type, 'file', 'type matches') 26 | t.equals(res[0].data.toString(), 'greetings friend!') 27 | 28 | t.equals(res[1].name, '___index.json', 'contents match') 29 | t.equals(res[1].type, 'file', 'type matches') 30 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: data.length } }) 31 | 32 | cleanup() 33 | t.end() 34 | }) 35 | })) 36 | }) 37 | }) 38 | 39 | test('can append to an existing file', function (t) { 40 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 41 | t.error(err, 'tmpdir setup') 42 | 43 | var filepath = path.join(dir, 'file.tar') 44 | var tarball = new Tarball(filepath) 45 | var data = 'greetings friend!' 46 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 47 | t.error(err, 'append ok') 48 | 49 | data = '# beep boop' 50 | fromString(data).pipe(tarball.append('beep.md', function (err) { 51 | t.error(err, 'append ok') 52 | 53 | parseTarball(filepath, function (err, res, index) { 54 | t.error(err, 'parsed tarball ok') 55 | 56 | t.equals(res.length, 3, '3 entries') 57 | 58 | t.equals(res[0].name, 'hello.txt', 'name matches') 59 | t.equals(res[0].type, 'file', 'type matches') 60 | t.equals(res[0].data.toString(), 'greetings friend!', 'content matches') 61 | 62 | t.equals(res[1].name, 'beep.md', 'name matches') 63 | t.equals(res[1].type, 'file', 'type matches') 64 | t.equals(res[1].data.toString(), '# beep boop', 'content matches') 65 | 66 | t.equals(res[2].name, '___index.json', 'contents match') 67 | t.equals(res[2].type, 'file', 'type matches') 68 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 17 }, 'beep.md': { offset: 1024, size: 11 } }) 69 | 70 | cleanup() 71 | t.end() 72 | }) 73 | })) 74 | })) 75 | }) 76 | }) 77 | 78 | test('two concurrent writes succeed as expected', function (t) { 79 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 80 | t.error(err, 'tmpdir setup') 81 | 82 | var filepath = path.join(dir, 'file.tar') 83 | var tarball = new Tarball(filepath) 84 | var pending = 2 85 | 86 | var data1 = 'greetings friend!' 87 | fromString(data1).pipe(tarball.append('hello.txt', function (err) { 88 | t.error(err, 'append ok') 89 | if (!--pending) check() 90 | })) 91 | 92 | var data2 = '# beep boop' 93 | fromString(data2).pipe(tarball.append('beep.md', function (err) { 94 | t.error(err, 'append ok') 95 | if (!--pending) check() 96 | })) 97 | 98 | function check () { 99 | parseTarball(filepath, function (err, res, index) { 100 | t.error(err, 'parsed tarball ok') 101 | 102 | t.equals(res.length, 3, '3 entries') 103 | 104 | t.equals(res[0].name, 'hello.txt', 'name matches') 105 | t.equals(res[0].type, 'file', 'type matches') 106 | t.equals(res[0].data.toString(), 'greetings friend!', 'content matches') 107 | 108 | t.equals(res[1].name, 'beep.md', 'name matches') 109 | t.equals(res[1].type, 'file', 'type matches') 110 | t.equals(res[1].data.toString(), '# beep boop', 'content matches') 111 | 112 | t.equals(res[2].name, '___index.json', 'contents match') 113 | t.equals(res[2].type, 'file', 'type matches') 114 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 17 }, 'beep.md': { offset: 1024, size: 11 } }) 115 | 116 | cleanup() 117 | t.end() 118 | }) 119 | } 120 | }) 121 | }) 122 | 123 | test('REGRESSION: check that writing a size=512 file doesn\'t add an extra NUL sector', function (t) { 124 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 125 | t.error(err, 'tmpdir setup') 126 | 127 | var filepath = path.join(dir, 'file.tar') 128 | var tarball = new Tarball(filepath) 129 | var data = Buffer.alloc(512).fill(32) 130 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 131 | t.error(err, 'append ok') 132 | 133 | var stat = fs.statSync(filepath) 134 | t.equals(stat.size, 512 * 6, 'tar archive is 6 sectors') 135 | cleanup() 136 | t.end() 137 | })) 138 | }) 139 | }) 140 | 141 | test('REGRESSION: check that writing a size=0 file doesn\'t fail', function (t) { 142 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 143 | t.error(err, 'tmpdir setup') 144 | 145 | var filepath = path.join(dir, 'file.tar') 146 | var tarball = new Tarball(filepath) 147 | var data = Buffer.alloc(0) 148 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 149 | t.error(err, 'append ok') 150 | 151 | var stat = fs.statSync(filepath) 152 | t.equals(stat.size, 512 * 5, 'tar archive is 5 sectors') 153 | cleanup() 154 | t.end() 155 | })) 156 | }) 157 | }) 158 | -------------------------------------------------------------------------------- /test/integrity/fixtures/good-no-index.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/good-no-index.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/good.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/good.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/no-trailer-no-index.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/no-trailer-no-index.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/no-trailer.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/no-trailer.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/partial-final-file-no-index.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/partial-final-file-no-index.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/partial-index-content.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/partial-index-content.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/partial-index-header.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/partial-index-header.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/partial-index-no-content.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/partial-index-no-content.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/partial-trailer-no-index.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/partial-trailer-no-index.tar -------------------------------------------------------------------------------- /test/integrity/fixtures/partial-trailer.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/digidem/indexed-tarball/105560970dab22b85cc9e7c3e203e4f61e3ed476/test/integrity/fixtures/partial-trailer.tar -------------------------------------------------------------------------------- /test/integrity/repair.js: -------------------------------------------------------------------------------- 1 | var repair = require('../../lib/integrity').repair 2 | var path = require('path') 3 | var test = require('tape') 4 | var ncp = require('ncp') 5 | var mkdirp = require('mkdirp') 6 | var os = require('os') 7 | var md5 = require('md5') 8 | var parseTarball = require('../util.js').parseTarball 9 | 10 | var testdir = path.join(os.tmpdir(), 'test-indexed-tarball-' + Math.random().toString().substring(2)) 11 | mkdirp.sync(testdir) 12 | 13 | function testFixture (name, filepath, expected) { 14 | test(name, function (t) { 15 | var src = path.join(__dirname, 'fixtures', filepath) 16 | var dst = path.join(testdir, filepath) 17 | ncp(src, dst, function (err) { 18 | t.error(err, 'copy fixture') 19 | repair(dst, function (err, res) { 20 | t.error(err, 'repair tarball') 21 | parseTarball(dst, function (err, res) { 22 | t.error(err, 'parse tarball') 23 | t.equals(res.length, Object.keys(expected).length, 'same # of files') 24 | res.forEach(function (entry) { 25 | t.ok(expected[entry.name], 'filename as expected (' + entry.name + ')') 26 | t.equals(entry.data.length, expected[entry.name].size, 'size as expected') 27 | t.equals(md5(entry.data), expected[entry.name].md5, 'hash matches') 28 | }) 29 | t.end() 30 | }) 31 | }) 32 | }) 33 | }) 34 | } 35 | 36 | testFixture('good tarball', 'good.tar', { 37 | 'osm-p2p-db.tar': { 38 | size: 10240, 39 | md5: '4751d44c06370befaa629c791a34245c' 40 | }, 41 | '___index.json': { 42 | size: 99, 43 | md5: '9c5043fb568e4310839f0dddeefe007d' 44 | } 45 | }) 46 | 47 | testFixture('partial NUL trailer', 'partial-trailer.tar', { 48 | 'osm-p2p-db.tar': { 49 | size: 10240, 50 | md5: '4751d44c06370befaa629c791a34245c' 51 | }, 52 | '___index.json': { 53 | size: 54, 54 | md5: 'c18f94481449e80d580269bd159dea96' 55 | } 56 | }) 57 | 58 | testFixture('no NUL trailer', 'no-trailer.tar', { 59 | 'osm-p2p-db.tar': { 60 | size: 10240, 61 | md5: '4751d44c06370befaa629c791a34245c' 62 | }, 63 | '___index.json': { 64 | size: 54, 65 | md5: 'c18f94481449e80d580269bd159dea96' 66 | } 67 | }) 68 | 69 | testFixture('partial index (partial header)', 'partial-index-header.tar', { 70 | 'osm-p2p-db.tar': { 71 | size: 10240, 72 | md5: '4751d44c06370befaa629c791a34245c' 73 | }, 74 | '___index.json': { 75 | size: 54, 76 | md5: 'c18f94481449e80d580269bd159dea96' 77 | } 78 | }) 79 | 80 | testFixture('partial index (partial content)', 'partial-index-content.tar', { 81 | 'osm-p2p-db.tar': { 82 | size: 10240, 83 | md5: '4751d44c06370befaa629c791a34245c' 84 | }, 85 | '___index.json': { 86 | size: 54, 87 | md5: 'c18f94481449e80d580269bd159dea96' 88 | } 89 | }) 90 | 91 | testFixture('partial index (no content)', 'partial-index-no-content.tar', { 92 | 'osm-p2p-db.tar': { 93 | size: 10240, 94 | md5: '4751d44c06370befaa629c791a34245c' 95 | }, 96 | '___index.json': { 97 | size: 54, 98 | md5: 'c18f94481449e80d580269bd159dea96' 99 | } 100 | }) 101 | 102 | testFixture('good tarball (sans index)', 'good-no-index.tar', { 103 | 'osm-p2p-db.tar': { 104 | size: 10240, 105 | md5: '4751d44c06370befaa629c791a34245c' 106 | }, 107 | '___index.json': { 108 | size: 54, 109 | md5: 'c18f94481449e80d580269bd159dea96' 110 | } 111 | }) 112 | 113 | testFixture('partial NUL trailer (sans index)', 'partial-trailer-no-index.tar', { 114 | 'osm-p2p-db.tar': { 115 | size: 10240, 116 | md5: '4751d44c06370befaa629c791a34245c' 117 | }, 118 | '___index.json': { 119 | size: 54, 120 | md5: 'c18f94481449e80d580269bd159dea96' 121 | } 122 | }) 123 | 124 | testFixture('no NUL trailer (sans index)', 'no-trailer-no-index.tar', { 125 | 'osm-p2p-db.tar': { 126 | size: 10240, 127 | md5: '4751d44c06370befaa629c791a34245c' 128 | }, 129 | '___index.json': { 130 | size: 54, 131 | md5: 'c18f94481449e80d580269bd159dea96' 132 | } 133 | }) 134 | 135 | testFixture('truncated final file (sans index)', 'partial-final-file-no-index.tar', { 136 | 'osm-p2p-db.tar': { 137 | size: 10240, 138 | md5: '4751d44c06370befaa629c791a34245c' 139 | }, 140 | '___index.json': { 141 | size: 54, 142 | md5: 'c18f94481449e80d580269bd159dea96' 143 | } 144 | }) 145 | 146 | -------------------------------------------------------------------------------- /test/list-multi.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var path = require('path') 3 | var tmp = require('tmp') 4 | var test = require('tape') 5 | var fromString = require('../lib/util').fromString 6 | 7 | test('can list an archive with one file', function (t) { 8 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 9 | t.error(err, 'tmpdir setup') 10 | 11 | var filepath = path.join(dir, 'file.tar') 12 | var tarball = new Tarball(filepath, {multifile: true}) 13 | append(tarball, 'hello.txt', 'greetings friend', function (err) { 14 | t.error(err, 'append ok') 15 | 16 | tarball.list(function (err, files) { 17 | t.error(err, 'list ok') 18 | t.deepEquals(files, ['hello.txt']) 19 | 20 | cleanup() 21 | t.end() 22 | }) 23 | }) 24 | }) 25 | }) 26 | 27 | test('can list files in three archives', function (t) { 28 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 29 | t.error(err, 'tmpdir setup') 30 | 31 | var filepath = path.join(dir, 'file.tar') 32 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 1024}) 33 | 34 | append(tarball, 'first.txt', '1st', function (err) { 35 | t.error(err, 'append ok') 36 | }) 37 | 38 | append(tarball, 'second.txt', '2nd', function (err) { 39 | t.error(err, 'append ok') 40 | }) 41 | 42 | append(tarball, 'third.txt', '3rd', function (err) { 43 | t.error(err, 'append ok') 44 | }) 45 | 46 | tarball.list(function (err, files) { 47 | t.error(err, 'list ok') 48 | t.deepEquals(files.sort(), ['first.txt', 'second.txt', 'third.txt']) 49 | 50 | cleanup() 51 | t.end() 52 | }) 53 | }) 54 | }) 55 | 56 | test('can list files in three archives with deduplication', function (t) { 57 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 58 | t.error(err, 'tmpdir setup') 59 | 60 | var filepath = path.join(dir, 'file.tar') 61 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 1024}) 62 | 63 | append(tarball, 'first.txt', '1st', function (err) { 64 | t.error(err, 'append ok') 65 | }) 66 | 67 | append(tarball, 'first.txt', '2nd', function (err) { 68 | t.error(err, 'append ok') 69 | }) 70 | 71 | append(tarball, 'first.txt', '3rd', function (err) { 72 | t.error(err, 'append ok') 73 | }) 74 | 75 | tarball.list(function (err, files) { 76 | t.error(err, 'list ok') 77 | t.deepEquals(files.sort(), ['first.txt']) 78 | 79 | cleanup() 80 | t.end() 81 | }) 82 | }) 83 | }) 84 | 85 | function append (tarball, filename, string, cb) { 86 | fromString(string).pipe(tarball.append(filename, string.length, cb)) 87 | } 88 | -------------------------------------------------------------------------------- /test/list.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var path = require('path') 3 | var tmp = require('tmp') 4 | var test = require('tape') 5 | var fromString = require('../lib/util').fromString 6 | 7 | test('can list an archive with one file', function (t) { 8 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 9 | t.error(err, 'tmpdir setup') 10 | 11 | var filepath = path.join(dir, 'file.tar') 12 | var tarball = new Tarball(filepath) 13 | var data = 'greetings friend!' 14 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 15 | t.error(err, 'append ok') 16 | 17 | tarball.list(function (err, files) { 18 | t.error(err, 'list ok') 19 | t.deepEquals(files, ['hello.txt']) 20 | 21 | cleanup() 22 | t.end() 23 | }) 24 | })) 25 | }) 26 | }) 27 | 28 | test('can list an archive with one file (concurrent)', function (t) { 29 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 30 | t.error(err, 'tmpdir setup') 31 | 32 | var filepath = path.join(dir, 'file.tar') 33 | var tarball = new Tarball(filepath) 34 | var data = 'greetings friend!' 35 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 36 | t.error(err, 'append ok') 37 | })) 38 | 39 | tarball.list(function (err, files) { 40 | t.error(err, 'list ok') 41 | t.deepEquals(files, ['hello.txt']) 42 | 43 | cleanup() 44 | t.end() 45 | }) 46 | }) 47 | }) 48 | 49 | test('can list an archive with many files (concurrent)', function (t) { 50 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 51 | t.error(err, 'tmpdir setup') 52 | 53 | var filepath = path.join(dir, 'file.tar') 54 | var tarball = new Tarball(filepath) 55 | 56 | var n = 0 57 | for (var i = 0; i < 100; i++) { 58 | n++ 59 | var data = 'this is message #' + i 60 | fromString(data).pipe(tarball.append('hello_' + i + '.txt', function (err) { 61 | t.error(err, 'append ok') 62 | })) 63 | } 64 | 65 | tarball.list(function (err, files) { 66 | t.error(err, 'list ok') 67 | t.equals(files.length, n) 68 | 69 | cleanup() 70 | t.end() 71 | }) 72 | }) 73 | }) 74 | -------------------------------------------------------------------------------- /test/pop-multi.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var path = require('path') 3 | var tmp = require('tmp') 4 | var test = require('tape') 5 | var fromString = require('../lib/util').fromString 6 | var parseTarball = require('./util').parseTarball 7 | 8 | test('can pop an archive with two files', function (t) { 9 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 10 | t.error(err, 'tmpdir setup') 11 | 12 | var filepath = path.join(dir, 'file.tar') 13 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 512 * 100}) 14 | var data = 'greetings friend!' 15 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 16 | t.error(err, 'append ok') 17 | })) 18 | 19 | data = '# beep boop' 20 | fromString(data).pipe(tarball.append('beep.md', data.length, function (err) { 21 | t.error(err, 'append ok') 22 | })) 23 | 24 | tarball.pop(function (err) { 25 | t.error(err, 'pop ok') 26 | 27 | parseTarball(filepath, function (err, res, index) { 28 | t.error(err, 'parsed tarball ok') 29 | 30 | t.equals(res.length, 2, '2 entries') 31 | 32 | t.equals(res[0].name, 'hello.txt', 'name matches') 33 | t.equals(res[0].type, 'file', 'type matches') 34 | t.equals(res[0].data.toString(), 'greetings friend!', 'content matches') 35 | 36 | t.equals(res[1].name, '___index.json', 'contents match') 37 | t.equals(res[1].type, 'file', 'type matches') 38 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 17 } }) 39 | 40 | cleanup() 41 | t.end() 42 | }) 43 | }) 44 | }) 45 | }) 46 | 47 | test('can pop an archive with one file', function (t) { 48 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 49 | t.error(err, 'tmpdir setup') 50 | 51 | var filepath = path.join(dir, 'file.tar') 52 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 512 * 10}) 53 | var data = 'greetings friend!' 54 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 55 | t.error(err, 'append ok') 56 | })) 57 | 58 | tarball.pop(function (err) { 59 | t.error(err, 'pop ok') 60 | 61 | parseTarball(filepath, function (err, res, index) { 62 | t.error(err, 'parsed tarball ok') 63 | 64 | t.equals(res.length, 1, '1 entry') 65 | 66 | t.equals(res[0].name, '___index.json', 'contents match') 67 | t.equals(res[0].type, 'file', 'type matches') 68 | t.deepEquals(index, {}) 69 | 70 | cleanup() 71 | t.end() 72 | }) 73 | }) 74 | }) 75 | }) 76 | 77 | test('can pop the last file in the 2nd archive of a multi-file archive', function (t) { 78 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 79 | t.error(err, 'tmpdir setup') 80 | 81 | var filepath = path.join(dir, 'file.tar') 82 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 3072}) 83 | var data = 'greetings friend!' 84 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 85 | t.error(err, 'append ok') 86 | })) 87 | 88 | data = '# beep boop' 89 | fromString(data).pipe(tarball.append('beep.md', data.length, function (err) { 90 | t.error(err, 'append ok') 91 | })) 92 | 93 | tarball.pop(function (err) { 94 | t.error(err, 'pop ok') 95 | 96 | parseTarball(filepath, function (err, res, index) { 97 | t.error(err, 'parsed tarball ok') 98 | 99 | t.equals(res.length, 2, '2 entries') 100 | t.equals(res[0].name, 'hello.txt', 'name matches') 101 | t.equals(res[0].type, 'file', 'type matches') 102 | t.equals(res[0].data.toString(), 'greetings friend!', 'content matches') 103 | t.equals(res[1].name, '___index.json', 'contents match') 104 | t.equals(res[1].type, 'file', 'type matches') 105 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 17 } }) 106 | 107 | parseTarball(filepath + '.1', function (err, res, index) { 108 | t.error(err, 'parsed tarball ok') 109 | 110 | t.equals(res.length, 1, '1 entry') 111 | t.equals(res[0].name, '___index.json', 'contents match') 112 | t.equals(res[0].type, 'file', 'type matches') 113 | t.deepEquals(index, {}) 114 | 115 | cleanup() 116 | t.end() 117 | }) 118 | }) 119 | }) 120 | }) 121 | }) 122 | 123 | test('can pop the file in the 1st archive of a multi-file archive', function (t) { 124 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 125 | t.error(err, 'tmpdir setup') 126 | 127 | var filepath = path.join(dir, 'file.tar') 128 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 3072}) 129 | var data = 'greetings friend!' 130 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 131 | t.error(err, 'append ok') 132 | })) 133 | 134 | data = '# beep boop' 135 | fromString(data).pipe(tarball.append('beep.md', data.length, function (err) { 136 | t.error(err, 'append ok') 137 | })) 138 | 139 | tarball.pop(function (err) { 140 | t.error(err, 'pop ok') 141 | 142 | tarball.pop(function (err) { 143 | t.error(err, 'pop ok') 144 | 145 | parseTarball(filepath, function (err, res, index) { 146 | t.error(err, 'parsed tarball ok') 147 | t.equals(res.length, 1, '1 entry') 148 | t.equals(res[0].name, '___index.json', 'contents match') 149 | t.equals(res[0].type, 'file', 'type matches') 150 | t.deepEquals(index, {}) 151 | 152 | parseTarball(filepath + '.1', function (err, res, index) { 153 | t.error(err, 'parsed tarball ok') 154 | t.equals(res.length, 1, '1 entry') 155 | t.equals(res[0].name, '___index.json', 'contents match') 156 | t.equals(res[0].type, 'file', 'type matches') 157 | t.deepEquals(index, {}) 158 | 159 | cleanup() 160 | t.end() 161 | }) 162 | }) 163 | }) 164 | }) 165 | }) 166 | }) 167 | -------------------------------------------------------------------------------- /test/pop.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var path = require('path') 3 | var tmp = require('tmp') 4 | var test = require('tape') 5 | var fromString = require('../lib/util').fromString 6 | var parseTarball = require('./util').parseTarball 7 | 8 | test('can pop an archive with two files', function (t) { 9 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 10 | t.error(err, 'tmpdir setup') 11 | 12 | var filepath = path.join(dir, 'file.tar') 13 | var tarball = new Tarball(filepath) 14 | var data = 'greetings friend!' 15 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 16 | t.error(err, 'append ok') 17 | })) 18 | 19 | data = '# beep boop' 20 | fromString(data).pipe(tarball.append('beep.md', function (err) { 21 | t.error(err, 'append ok') 22 | })) 23 | 24 | tarball.pop(function (err) { 25 | t.error(err, 'pop ok') 26 | 27 | parseTarball(filepath, function (err, res, index) { 28 | t.error(err, 'parsed tarball ok') 29 | 30 | t.equals(res.length, 2, '2 entries') 31 | 32 | t.equals(res[0].name, 'hello.txt', 'name matches') 33 | t.equals(res[0].type, 'file', 'type matches') 34 | t.equals(res[0].data.toString(), 'greetings friend!', 'content matches') 35 | 36 | t.equals(res[1].name, '___index.json', 'contents match') 37 | t.equals(res[1].type, 'file', 'type matches') 38 | t.deepEquals(index, { 'hello.txt': { offset: 0, size: 17 } }) 39 | 40 | cleanup() 41 | t.end() 42 | }) 43 | }) 44 | }) 45 | }) 46 | 47 | test('can pop an archive with one file', function (t) { 48 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 49 | t.error(err, 'tmpdir setup') 50 | 51 | var filepath = path.join(dir, 'file.tar') 52 | var tarball = new Tarball(filepath) 53 | var data = 'greetings friend!' 54 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 55 | t.error(err, 'append ok') 56 | })) 57 | 58 | tarball.pop(function (err) { 59 | t.error(err, 'pop ok') 60 | 61 | parseTarball(filepath, function (err, res, index) { 62 | t.error(err, 'parsed tarball ok') 63 | 64 | t.equals(res.length, 1, '1 entry') 65 | 66 | t.equals(res[0].name, '___index.json', 'contents match') 67 | t.equals(res[0].type, 'file', 'type matches') 68 | t.deepEquals(index, {}) 69 | 70 | cleanup() 71 | t.end() 72 | }) 73 | }) 74 | }) 75 | }) 76 | -------------------------------------------------------------------------------- /test/read-multi.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var collect = require('collect-stream') 3 | var path = require('path') 4 | var tmp = require('tmp') 5 | var test = require('tape') 6 | var fromString = require('../lib/util').fromString 7 | 8 | test('can read an archive with one file', function (t) { 9 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 10 | t.error(err, 'tmpdir setup') 11 | 12 | var filepath = path.join(dir, 'file.tar') 13 | var tarball = new Tarball(filepath, {multifile: true}) 14 | var data = 'greetings friend!' 15 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 16 | t.error(err, 'append ok') 17 | 18 | collect(tarball.read('hello.txt'), function (err, data) { 19 | t.error(err, 'read ok') 20 | t.deepEquals(data.toString(), 'greetings friend!') 21 | 22 | cleanup() 23 | t.end() 24 | }) 25 | })) 26 | }) 27 | }) 28 | 29 | test('cannot read nonexistant file', function (t) { 30 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 31 | t.error(err, 'tmpdir setup') 32 | 33 | var filepath = path.join(dir, 'file.tar') 34 | var tarball = new Tarball(filepath, {multifile: true}) 35 | collect(tarball.read('hello.txt'), function (err, data) { 36 | t.ok(err instanceof Error, 'read failed') 37 | 38 | cleanup() 39 | t.end() 40 | }) 41 | }) 42 | }) 43 | 44 | test('can two files across two archives', function (t) { 45 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 46 | t.error(err, 'tmpdir setup') 47 | 48 | var filepath = path.join(dir, 'file.tar') 49 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 1024}) 50 | 51 | var data = 'greetings friend!' 52 | fromString(data).pipe(tarball.append('hello.txt', data.length, function (err) { 53 | t.error(err, 'append ok') 54 | })) 55 | 56 | data = 'how about a nice game of chess' 57 | fromString(data).pipe(tarball.append('games/chess', data.length, function (err) { 58 | t.error(err, 'append ok') 59 | })) 60 | 61 | collect(tarball.read('hello.txt'), function (err, data) { 62 | t.error(err, 'read ok') 63 | t.deepEquals(data.toString(), 'greetings friend!') 64 | 65 | collect(tarball.read('games/chess'), function (err, data) { 66 | t.error(err, 'read ok') 67 | t.deepEquals(data.toString(), 'how about a nice game of chess') 68 | 69 | collect(tarball.read('foo/bar/baz'), function (err, data) { 70 | t.error(!err, 'read failed ok') 71 | 72 | cleanup() 73 | t.end() 74 | }) 75 | }) 76 | }) 77 | }) 78 | }) 79 | 80 | test('can read all files in an archive with many files', function (t) { 81 | t.plan(301) 82 | 83 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 84 | t.error(err, 'tmpdir setup') 85 | 86 | var filepath = path.join(dir, 'file.tar') 87 | var tarball = new Tarball(filepath, {multifile: true, maxFileSize: 3072}) 88 | 89 | var n = 0 90 | for (var i = 0; i < 100; i++) { 91 | n++ 92 | var data = 'this is message #' + i 93 | fromString(data).pipe(tarball.append('hello_' + i + '.txt', data.length, function (err) { 94 | t.error(err, 'append ok') 95 | })) 96 | } 97 | 98 | for (var i = 0; i < n; i++) { 99 | ;(function (x) { 100 | collect(tarball.read('hello_' + i + '.txt'), function (err, data) { 101 | t.error(err, 'read ok') 102 | t.equals(data.toString(), 'this is message #' + x) 103 | }) 104 | })(i) 105 | } 106 | }) 107 | }) 108 | -------------------------------------------------------------------------------- /test/read.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var collect = require('collect-stream') 3 | var path = require('path') 4 | var tmp = require('tmp') 5 | var test = require('tape') 6 | var fromString = require('../lib/util').fromString 7 | 8 | test('can read an archive with one file', function (t) { 9 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 10 | t.error(err, 'tmpdir setup') 11 | 12 | var filepath = path.join(dir, 'file.tar') 13 | var tarball = new Tarball(filepath) 14 | var data = 'greetings friend!' 15 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 16 | t.error(err, 'append ok') 17 | 18 | collect(tarball.read('hello.txt'), function (err, data) { 19 | t.error(err, 'read ok') 20 | t.deepEquals(data.toString(), 'greetings friend!') 21 | 22 | cleanup() 23 | t.end() 24 | }) 25 | })) 26 | }) 27 | }) 28 | 29 | test('can read an archive with two files', function (t) { 30 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 31 | t.error(err, 'tmpdir setup') 32 | 33 | var filepath = path.join(dir, 'file.tar') 34 | var tarball = new Tarball(filepath) 35 | 36 | var data = 'greetings friend!' 37 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 38 | t.error(err, 'append ok') 39 | })) 40 | 41 | data = 'how about a nice game of chess' 42 | fromString(data).pipe(tarball.append('games/chess', function (err) { 43 | t.error(err, 'append ok') 44 | })) 45 | 46 | collect(tarball.read('hello.txt'), function (err, data) { 47 | t.error(err, 'read ok') 48 | t.deepEquals(data.toString(), 'greetings friend!') 49 | 50 | collect(tarball.read('games/chess'), function (err, data) { 51 | t.error(err, 'read ok') 52 | t.deepEquals(data.toString(), 'how about a nice game of chess') 53 | 54 | collect(tarball.read('foo/bar/baz'), function (err, data) { 55 | t.error(!err, 'read failed ok') 56 | 57 | cleanup() 58 | t.end() 59 | }) 60 | }) 61 | }) 62 | }) 63 | }) 64 | 65 | test('can read all files in an archive with many files', function (t) { 66 | t.plan(301) 67 | 68 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 69 | t.error(err, 'tmpdir setup') 70 | 71 | var filepath = path.join(dir, 'file.tar') 72 | var tarball = new Tarball(filepath) 73 | 74 | var n = 0 75 | for (var i = 0; i < 100; i++) { 76 | n++ 77 | var data = 'this is message #' + i 78 | fromString(data).pipe(tarball.append('hello_' + i + '.txt', function (err) { 79 | t.error(err, 'append ok') 80 | })) 81 | } 82 | 83 | for (var i = 0; i < n; i++) { 84 | ;(function (x) { 85 | collect(tarball.read('hello_' + i + '.txt'), function (err, data) { 86 | t.error(err, 'read ok') 87 | t.equals(data.toString(), 'this is message #' + x) 88 | }) 89 | })(i) 90 | } 91 | }) 92 | }) 93 | 94 | test('REGRESSION: can read a size=0 file', function (t) { 95 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 96 | t.error(err, 'tmpdir setup') 97 | 98 | var filepath = path.join(dir, 'file.tar') 99 | var tarball = new Tarball(filepath) 100 | var data = 'greetings friend!' 101 | tarball.append('hello.txt', function (err) { 102 | t.error(err, 'append ok') 103 | 104 | collect(tarball.read('hello.txt'), function (err, data) { 105 | t.error(err, 'read ok') 106 | t.deepEquals(data, []) 107 | 108 | cleanup() 109 | t.end() 110 | }) 111 | }).end() 112 | }) 113 | }) 114 | -------------------------------------------------------------------------------- /test/userdata-multi.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var path = require('path') 3 | var tmp = require('tmp') 4 | var test = require('tape') 5 | var fromString = require('../lib/util').fromString 6 | var parseTarball = require('./util').parseTarball 7 | 8 | test('get empty userdata', function (t) { 9 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 10 | t.error(err, 'tmpdir setup') 11 | 12 | var filepath = path.join(dir, 'file.tar') 13 | var tarball = new Tarball(filepath, {multifile:true}) 14 | var data = 'greetings friend!' 15 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 16 | t.error(err, 'append ok') 17 | 18 | tarball.userdata(function (err, userdata) { 19 | t.error(err, 'got userdata ok') 20 | t.deepEquals(userdata, {}) 21 | 22 | parseTarball(filepath, function (err, res, index, meta) { 23 | t.error(err, 'parsed tarball ok') 24 | 25 | t.equals(res.length, 2, 'two entries') 26 | 27 | t.equals(res[0].name, 'hello.txt', 'contents match') 28 | t.equals(res[0].type, 'file', 'type matches') 29 | t.equals(res[0].data.toString(), 'greetings friend!') 30 | 31 | t.equals(res[1].name, '___index.json', 'contents match') 32 | t.equals(res[1].type, 'file', 'type matches') 33 | t.deepEquals(meta, { index: { 'hello.txt': { offset: 0, size: data.length } } }) 34 | 35 | cleanup() 36 | t.end() 37 | }) 38 | }) 39 | })) 40 | }) 41 | }) 42 | 43 | test('set + get userdata', function (t) { 44 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 45 | t.error(err, 'tmpdir setup') 46 | 47 | var filepath = path.join(dir, 'file.tar') 48 | var tarball = new Tarball(filepath, {multifile:true}) 49 | var data = 'greetings friend!' 50 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 51 | t.error(err, 'append ok') 52 | 53 | tarball.userdata('hello world', function (err) { 54 | t.error(err, 'set userdata ok') 55 | 56 | tarball.userdata(function (err, userdata) { 57 | t.error(err, 'got userdata ok') 58 | t.deepEquals(userdata, 'hello world') 59 | 60 | parseTarball(filepath, function (err, res, index, meta) { 61 | t.error(err, 'parsed tarball ok') 62 | 63 | t.equals(res.length, 2, 'two entries') 64 | 65 | t.equals(res[0].name, 'hello.txt', 'contents match') 66 | t.equals(res[0].type, 'file', 'type matches') 67 | t.equals(res[0].data.toString(), 'greetings friend!') 68 | 69 | t.equals(res[1].name, '___index.json', 'contents match') 70 | t.equals(res[1].type, 'file', 'type matches') 71 | t.deepEquals(meta, { index: { 'hello.txt': { offset: 0, size: data.length } }, userdata: 'hello world' }) 72 | 73 | cleanup() 74 | t.end() 75 | }) 76 | }) 77 | }) 78 | })) 79 | }) 80 | }) 81 | 82 | test('set userdata, reopen + get', function (t) { 83 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 84 | t.error(err, 'tmpdir setup') 85 | 86 | var filepath = path.join(dir, 'file.tar') 87 | var tarball = new Tarball(filepath, {multifile:true}) 88 | var data = 'greetings friend!' 89 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 90 | t.error(err, 'append ok') 91 | 92 | tarball.userdata('hello world', function (err) { 93 | t.error(err, 'set userdata ok') 94 | 95 | var tball = new Tarball(filepath) 96 | tball.userdata(function (err, userdata) { 97 | t.error(err, 'got userdata ok') 98 | t.deepEquals(userdata, 'hello world') 99 | 100 | parseTarball(filepath, function (err, res, index, meta) { 101 | t.error(err, 'parsed tarball ok') 102 | 103 | t.equals(res.length, 2, 'two entries') 104 | 105 | t.equals(res[0].name, 'hello.txt', 'contents match') 106 | t.equals(res[0].type, 'file', 'type matches') 107 | t.equals(res[0].data.toString(), 'greetings friend!') 108 | 109 | t.equals(res[1].name, '___index.json', 'contents match') 110 | t.equals(res[1].type, 'file', 'type matches') 111 | t.deepEquals(meta, { index: { 'hello.txt': { offset: 0, size: data.length } }, userdata: 'hello world' }) 112 | 113 | cleanup() 114 | t.end() 115 | }) 116 | }) 117 | }) 118 | })) 119 | }) 120 | }) 121 | -------------------------------------------------------------------------------- /test/userdata.js: -------------------------------------------------------------------------------- 1 | var Tarball = require('..') 2 | var path = require('path') 3 | var tmp = require('tmp') 4 | var test = require('tape') 5 | var fromString = require('../lib/util').fromString 6 | var parseTarball = require('./util').parseTarball 7 | 8 | test('get empty userdata', function (t) { 9 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 10 | t.error(err, 'tmpdir setup') 11 | 12 | var filepath = path.join(dir, 'file.tar') 13 | var tarball = new Tarball(filepath) 14 | var data = 'greetings friend!' 15 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 16 | t.error(err, 'append ok') 17 | 18 | tarball.userdata(function (err, userdata) { 19 | t.error(err, 'got userdata ok') 20 | t.deepEquals(userdata, {}) 21 | 22 | parseTarball(filepath, function (err, res, index, meta) { 23 | t.error(err, 'parsed tarball ok') 24 | 25 | t.equals(res.length, 2, 'two entries') 26 | 27 | t.equals(res[0].name, 'hello.txt', 'contents match') 28 | t.equals(res[0].type, 'file', 'type matches') 29 | t.equals(res[0].data.toString(), 'greetings friend!') 30 | 31 | t.equals(res[1].name, '___index.json', 'contents match') 32 | t.equals(res[1].type, 'file', 'type matches') 33 | t.deepEquals(meta, { index: { 'hello.txt': { offset: 0, size: data.length } } }) 34 | 35 | cleanup() 36 | t.end() 37 | }) 38 | }) 39 | })) 40 | }) 41 | }) 42 | 43 | test('set + get userdata', function (t) { 44 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 45 | t.error(err, 'tmpdir setup') 46 | 47 | var filepath = path.join(dir, 'file.tar') 48 | var tarball = new Tarball(filepath) 49 | var data = 'greetings friend!' 50 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 51 | t.error(err, 'append ok') 52 | 53 | tarball.userdata('hello world', function (err) { 54 | t.error(err, 'set userdata ok') 55 | 56 | tarball.userdata(function (err, userdata) { 57 | t.error(err, 'got userdata ok') 58 | t.deepEquals(userdata, 'hello world') 59 | 60 | parseTarball(filepath, function (err, res, index, meta) { 61 | t.error(err, 'parsed tarball ok') 62 | 63 | t.equals(res.length, 2, 'two entries') 64 | 65 | t.equals(res[0].name, 'hello.txt', 'contents match') 66 | t.equals(res[0].type, 'file', 'type matches') 67 | t.equals(res[0].data.toString(), 'greetings friend!') 68 | 69 | t.equals(res[1].name, '___index.json', 'contents match') 70 | t.equals(res[1].type, 'file', 'type matches') 71 | t.deepEquals(meta, { index: { 'hello.txt': { offset: 0, size: data.length } }, userdata: 'hello world' }) 72 | 73 | cleanup() 74 | t.end() 75 | }) 76 | }) 77 | }) 78 | })) 79 | }) 80 | }) 81 | 82 | test('set userdata, reopen + get', function (t) { 83 | tmp.dir({unsafeCleanup: true}, function (err, dir, cleanup) { 84 | t.error(err, 'tmpdir setup') 85 | 86 | var filepath = path.join(dir, 'file.tar') 87 | var tarball = new Tarball(filepath) 88 | var data = 'greetings friend!' 89 | fromString(data).pipe(tarball.append('hello.txt', function (err) { 90 | t.error(err, 'append ok') 91 | 92 | tarball.userdata('hello world', function (err) { 93 | t.error(err, 'set userdata ok') 94 | 95 | var tball = new Tarball(filepath) 96 | tball.userdata(function (err, userdata) { 97 | t.error(err, 'got userdata ok') 98 | t.deepEquals(userdata, 'hello world') 99 | 100 | parseTarball(filepath, function (err, res, index, meta) { 101 | t.error(err, 'parsed tarball ok') 102 | 103 | t.equals(res.length, 2, 'two entries') 104 | 105 | t.equals(res[0].name, 'hello.txt', 'contents match') 106 | t.equals(res[0].type, 'file', 'type matches') 107 | t.equals(res[0].data.toString(), 'greetings friend!') 108 | 109 | t.equals(res[1].name, '___index.json', 'contents match') 110 | t.equals(res[1].type, 'file', 'type matches') 111 | t.deepEquals(meta, { index: { 'hello.txt': { offset: 0, size: data.length } }, userdata: 'hello world' }) 112 | 113 | cleanup() 114 | t.end() 115 | }) 116 | }) 117 | }) 118 | })) 119 | }) 120 | }) 121 | -------------------------------------------------------------------------------- /test/util.js: -------------------------------------------------------------------------------- 1 | var tar = require('tar-stream') 2 | var fs = require('fs') 3 | var collect = require('collect-stream') 4 | 5 | module.exports = { 6 | parseTarball: parseTarball 7 | } 8 | 9 | function parseTarball (filepath, cb) { 10 | var res = [] 11 | var error 12 | 13 | var ex = tar.extract() 14 | fs.createReadStream(filepath).pipe(ex) 15 | 16 | ex.on('entry', function (header, stream, next) { 17 | var e = { 18 | name: header.name, 19 | type: header.type 20 | } 21 | res.push(e) 22 | collect(stream, function (err, data) { 23 | error = err || error 24 | e.data = data 25 | next() 26 | }) 27 | }) 28 | 29 | ex.once('finish', function () { 30 | try { 31 | var meta = JSON.parse(res[res.length - 1].data.toString()) 32 | cb(error, res, meta.index, meta) 33 | } catch (e) { 34 | cb(e) 35 | } 36 | }) 37 | } 38 | --------------------------------------------------------------------------------