├── .gitignore ├── LICENSE ├── README.md ├── examples └── zip.js ├── index.js ├── package-lock.json ├── package.json └── test ├── bl-minimal.js └── test.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | coverage 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Josh Wolfe 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # yazl 2 | 3 | yet another zip library for node. For unzipping, see 4 | [yauzl](https://github.com/thejoshwolfe/yauzl). 5 | 6 | Design principles: 7 | 8 | * Don't block the JavaScript thread. 9 | Use and provide async APIs. 10 | * Keep memory usage under control. 11 | Don't attempt to buffer entire files in RAM at once. 12 | * Prefer to open input files one at a time than all at once. 13 | This is slightly suboptimal for time performance, 14 | but avoids OS-imposed limits on the number of simultaneously open file handles. 15 | 16 | ## Usage 17 | 18 | ```js 19 | var yazl = require("yazl"); 20 | 21 | var zipfile = new yazl.ZipFile(); 22 | zipfile.addFile("file1.txt", "file1.txt"); 23 | // (add only files, not directories) 24 | zipfile.addFile("path/to/file.txt", "path/in/zipfile.txt"); 25 | // pipe() can be called any time after the constructor 26 | zipfile.outputStream.pipe(fs.createWriteStream("output.zip")).on("close", function() { 27 | console.log("done"); 28 | }); 29 | // alternate apis for adding files: 30 | zipfile.addBuffer(Buffer.from("hello"), "hello.txt"); 31 | zipfile.addReadStreamLazy("stdin.txt", cb => cb(null, process.stdin)); 32 | // call end() after all the files have been added 33 | zipfile.end(); 34 | ``` 35 | 36 | ## API 37 | 38 | ### Class: ZipFile 39 | 40 | #### new ZipFile() 41 | 42 | No parameters. 43 | Nothing can go wrong. 44 | 45 | #### addFile(realPath, metadataPath, [options]) 46 | 47 | Adds a file from the file system at `realPath` into the zipfile as `metadataPath`. 48 | Typically `metadataPath` would be calculated as `path.relative(root, realPath)`. 49 | Unzip programs would extract the file from the zipfile as `metadataPath`. 50 | `realPath` is not stored in the zipfile. 51 | 52 | A valid `metadataPath` must not be blank. 53 | If a `metadataPath` contains `"\\"` characters, they will be replaced by `"/"` characters. 54 | After this substitution, a valid `metadataPath` must not start with `"/"` or `/[A-Za-z]:\//`, 55 | and must not contain `".."` path segments. 56 | File paths must not end with `"/"`, but see `addEmptyDirectory()`. 57 | After UTF-8 encoding, `metadataPath` must be at most `0xffff` bytes in length. 58 | 59 | `options` may be omitted or null and has the following structure and default values: 60 | 61 | ```js 62 | { 63 | mtime: stats.mtime, 64 | mode: stats.mode, 65 | compress: true, 66 | compressionLevel: 6, 67 | forceZip64Format: false, 68 | forceDosTimestamp: false, 69 | fileComment: "", // or a UTF-8 Buffer 70 | } 71 | ``` 72 | 73 | Use `mtime` and/or `mode` to override the values 74 | that would normally be obtained by the `fs.Stats` for the `realPath`. 75 | The mode is the unix permission bits and file type. 76 | The mtime and mode are stored in the zip file in the fields "last mod file time", 77 | "last mod file date", and "external file attributes". 78 | yazl does not store group and user ids in the zip file. 79 | 80 | If `compress` is `true`, the file data will be deflated (compression method 8). 81 | If `compress` is `false`, the file data will be stored (compression method 0). 82 | If `compressionLevel` is specified, it will be passed to [`zlib`](https://nodejs.org/api/zlib.html#class-options). 83 | Specifying `compressionLevel: 0` is equivalent to `compress: false`. 84 | If both `compress` and `compressionLevel` are given, asserts that they do not conflict, i.e. `!!compress === !!compressionLevel`. 85 | 86 | If `forceZip64Format` is `true`, yazl will use ZIP64 format in this entry's Data Descriptor 87 | and Central Directory Record even if not needed (this may be useful for testing.). 88 | Otherwise, yazl will use ZIP64 format where necessary. 89 | 90 | Since yazl version 3.3.0, yazl includes the Info-ZIP "universal timestamp" extended field (`0x5455` aka `"UT"`) to encode the `mtime`. 91 | The Info-ZIP timestamp is a more modern encoding for the mtime and is generally recommended. 92 | Set `forceDosTimestamp` to `true` to revert to the pre-3.3.0 behvior, disabling this extended field. 93 | The DOS encoding is always included regardless of this option, because it is required in the fixed-size metadata of every archive entry. 94 | The benefits of the Info-ZIP encoding include: timezone is specified as always UTC, which is better for cloud environments and any teams working in multiple timezones; capable of encoding "time 0", the unix epoch in 1970, which is better for some package managers; the precision is 1-second accurate rather than rounded to the nearest even second. The disadvantages of including this field are: it requires an extra 9 bytes of metadata per entry added to the archive. 95 | 96 | When attempting to encode an `mtime` outside the supported range for either format, such as the year 1970 in the DOS format or the year 2039 for the modern format, the time will clamped to the closest supported time. 97 | 98 | If `fileComment` is a `string`, it will be encoded with UTF-8. 99 | If `fileComment` is a `Buffer`, it should be a UTF-8 encoded string. 100 | In UTF-8, `fileComment` must be at most `0xffff` bytes in length. 101 | This becomes the "file comment" field in this entry's central directory file header. 102 | 103 | Internally, `fs.stat()` is called immediately in the `addFile` function, 104 | and `fs.createReadStream()` is used later when the file data is actually required. 105 | Throughout adding and encoding `n` files with `addFile()`, 106 | the number of simultaneous open files is `O(1)`, probably just 1 at a time. 107 | 108 | #### addReadStream(readStream, metadataPath, [options]) 109 | 110 | Adds a file to the zip file whose content is read from `readStream`. 111 | This method is effectively implemented as `this.addReadStreamLazy(metadataPath, options, cb => cb(null, readStream))`. 112 | 113 | In general, it is recommended to use `addReadStreamLazy` instead of this method 114 | to avoid holding a large number of system resources open for a long time. 115 | This method is provided for backward compatibility, 116 | and for convenience in cases where the `readStream` doesn't require meaningful resources to hold open and waiting. 117 | 118 | #### addReadStreamLazy(metadataPath[, options], getReadStreamFunction) 119 | 120 | Adds a file to the zip file whose content is read from a read stream obtained by calling `getReadStreamFunction(cb)`. 121 | `getReadStreamFunction(cb)` is called with a single callback function. 122 | Your implementation of `getReadStreamFunction` should eventually call `cb(err, readStream)` 123 | and give the `readStream` that provides the contents of the file to add to the zip file. 124 | If `err` is given (if it is truthy), it will be emitted from this `ZipFile` object. 125 | The return value from `cb` is unspecified. 126 | 127 | See `addFile()` for the meaning of the `metadataPath` parameter. 128 | `typeof getReadStreamFunction` must be `'function'`, which is used to determine when `options` has been omitted. 129 | `options` may be omitted or null and has the following structure and default values: 130 | 131 | ```js 132 | { 133 | mtime: new Date(), 134 | mode: 0o100664, 135 | compress: true, 136 | compressionLevel: 6, 137 | forceZip64Format: false, 138 | forceDosTimestamp: false, 139 | fileComment: "", // or a UTF-8 Buffer 140 | size: 12345, // example value 141 | } 142 | ``` 143 | 144 | See `addFile()` for the meaning of `mtime`, `mode`, `compress`, `compressionLevel`, `forceZip64Format`, `forceDosTimestamp`, and `fileComment`. 145 | If `size` is given, it will be checked against the actual number of bytes in the `readStream`, 146 | and an error will be emitted if there is a mismatch. 147 | See the documentation on `calculatedTotalSizeCallback` for why the `size` option exists. 148 | 149 | Note that yazl will `.pipe()` data from `readStream`, so be careful using `.on('data')`. 150 | In certain versions of node, `.on('data')` makes `.pipe()` behave incorrectly. 151 | 152 | Here's an example call to this method to illustrate the function callbacks: 153 | 154 | ```js 155 | zipfile.addReadStreamLazy("path/in/archive.txt", function(cb) { 156 | var readStream = getTheReadStreamSomehow(); 157 | cb(null, readStream); 158 | }); 159 | ``` 160 | 161 | #### addBuffer(buffer, metadataPath, [options]) 162 | 163 | Adds a file to the zip file whose content is `buffer`. 164 | See below for info on the limitations on the size of `buffer`. 165 | See `addFile()` for info about the `metadataPath` parameter. 166 | `options` may be omitted or null and has the following structure and default values: 167 | 168 | ```js 169 | { 170 | mtime: new Date(), 171 | mode: 0o100664, 172 | compress: true, 173 | compressionLevel: 6, 174 | forceZip64Format: false, 175 | forceDosTimestamp: false, 176 | fileComment: "", // or a UTF-8 Buffer 177 | } 178 | ``` 179 | 180 | See `addFile()` for the meaning of `mtime`, `mode`, `compress`, `compressionLevel`, `forceZip64Format`, `forceDosTimestamp`, and `fileComment`. 181 | 182 | This method has the unique property that General Purpose Bit `3` will not be used in the Local File Header. 183 | This doesn't matter for unzip implementations that conform to the Zip File Spec. 184 | However, 7-Zip 9.20 has a known bug where General Purpose Bit `3` is declared an unsupported compression method 185 | (note that it really has nothing to do with the compression method.). 186 | See [issue #11](https://github.com/thejoshwolfe/yazl/issues/11). 187 | If you would like to create zip files that 7-Zip 9.20 can understand, 188 | you must use `addBuffer()` instead of `addFile()`, `addReadStream()`, or `addReadStreamLazy()` for all entries in the zip file 189 | (and `addEmptyDirectory()` is fine too). 190 | 191 | Note that even when yazl provides the file sizes in the Local File Header, 192 | yazl never uses ZIP64 format for Local File Headers due to the size limit on `buffer` (see below). 193 | 194 | ##### Size limitation on buffer 195 | 196 | In order to require the ZIP64 format for a local file header, 197 | the provided `buffer` parameter would need to exceed `0xfffffffe` in length. 198 | Alternatively, the `buffer` parameter might not exceed `0xfffffffe` in length, 199 | but zlib compression fails to compress the buffer and actually inflates the data to more than `0xfffffffe` in length. 200 | Both of these scenarios are not allowed by yazl, and those are enforced by a size limit on the `buffer` parameter. 201 | 202 | According to [this zlib documentation](http://www.zlib.net/zlib_tech.html), 203 | the worst case compression results in "an expansion of at most 13.5%, plus eleven bytes". 204 | Furthermore, some configurations of Node.js impose a size limit of `0x3fffffff` on every `Buffer` object. 205 | Running this size through the worst case compression of zlib still produces a size less than `0xfffffffe` bytes, 206 | 207 | Therefore, yazl enforces that the provided `buffer` parameter must be at most `0x3fffffff` bytes long. 208 | 209 | #### addEmptyDirectory(metadataPath, [options]) 210 | 211 | Adds an entry to the zip file that indicates a directory should be created, 212 | even if no other items in the zip file are contained in the directory. 213 | This method is only required if the zip file is intended to contain an empty directory. 214 | 215 | See `addFile()` for info about the `metadataPath` parameter. 216 | If `metadataPath` does not end with a `"/"`, a `"/"` will be appended. 217 | 218 | `options` may be omitted or null and has the following structure and default values: 219 | 220 | ```js 221 | { 222 | mtime: new Date(), 223 | mode: 040775, 224 | forceDosTimestamp: false, 225 | } 226 | ``` 227 | 228 | See `addFile()` for the meaning of `mtime`, `mode`, and `forceDosTimestamp`. 229 | 230 | #### end([options], [calculatedTotalSizeCallback]) 231 | 232 | Indicates that no more files will be added via `addFile()`, `addReadStream()`, `addReadStreamLazy()`, `addBuffer()`, or `addEmptyDirectory()`, 233 | and causes the eventual close of `outputStream`. 234 | 235 | `options` may be omitted or null and has the following structure and default values: 236 | 237 | ```js 238 | { 239 | forceZip64Format: false, 240 | comment: "", // or a CP437 Buffer 241 | } 242 | ``` 243 | 244 | If `forceZip64Format` is `true`, yazl will include the ZIP64 End of Central Directory Locator 245 | and ZIP64 End of Central Directory Record even if not needed (this may be useful for testing.). 246 | Otherwise, yazl will include these structures if necessary. 247 | 248 | If `comment` is a `string`, it will be encoded with CP437. 249 | If `comment` is a `Buffer`, it should be a CP437 encoded string. 250 | `comment` must be at most `0xffff` bytes in length and must not include the byte sequence `[0x50,0x4b,0x05,0x06]`. 251 | This becomes the ".ZIP file comment" field in the end of central directory record. 252 | Note that in practice, most zipfile readers interpret this field in UTF-8 instead of CP437. 253 | If your string uses only codepoints in the range `0x20...0x7e` 254 | (printable ASCII, no whitespace except for single space `' '`), 255 | then UTF-8 and CP437 (and ASCII) encodings are all identical. 256 | This restriction is recommended for maxium compatibility. 257 | To use UTF-8 encoding at your own risk, pass a `Buffer` into this function; it will not be validated. 258 | 259 | If specified and non-null, `calculatedTotalSizeCallback` is given the parameters `(calculatedTotalSize)` 260 | sometime during or after the call to `end()`. 261 | `calculatedTotalSize` is of type `Number` and can either be `-1` 262 | or the guaranteed eventual size in bytes of the output data that can be read from `outputStream`. 263 | 264 | Note that `calculatedTotalSizeCallback` is usually called well before `outputStream` has piped all its data; 265 | this callback does not mean that the stream is done. 266 | 267 | (The `calculatedTotalSizeCallback` feature was added to this library to support the use case of a web server constructing a zip file dynamically 268 | and serving it without buffering the contents on disk or in ram. 269 | `calculatedTotalSize` can become the `Content-Length` header before piping the `outputStream` as the response body.) 270 | 271 | If `calculatedTotalSize` is `-1`, it means means the total size is too hard to guess before processing the input file data. 272 | To ensure the final size is known, disable compression (set `compress: false` or `compressionLevel: 0`) 273 | in every call to `addFile()`, `addReadStream()`, `addReadStreamLazy()`, and `addBuffer()`, 274 | and additionally specify the optional `size` option in every call to `addReadStream()` and `addReadStreamLazy()`. 275 | 276 | The call to `calculatedTotalSizeCallback` might be delayed if yazl is still waiting for `fs.Stats` for an `addFile()` entry. 277 | If `addFile()` was never called, `calculatedTotalSizeCallback` will be called during the call to `end()`. 278 | It is not required to start piping data from `outputStream` before `calculatedTotalSizeCallback` is called. 279 | `calculatedTotalSizeCallback` will be called only once, and only if this is the first call to `end()`. 280 | 281 | #### outputStream 282 | 283 | A readable stream that will produce the contents of the zip file. 284 | It is typical to pipe this stream to a writable stream created from `fs.createWriteStream()`. 285 | 286 | Internally, large amounts of file data are piped to `outputStream` using `pipe()`, 287 | which means throttling happens appropriately when this stream is piped to a slow destination. 288 | 289 | Data becomes available in this stream soon after calling one of `addFile()`, `addReadStream()`, `addReadStreamLazy()`, `addBuffer()`, or `addEmptyDirectory()`. 290 | Clients can call `pipe()` on this stream at any time, 291 | such as immediately after getting a new `ZipFile` instance, or long after calling `end()`. 292 | 293 | This stream will remain open while you add entries until you `end()` the zip file. 294 | 295 | As a reminder, be careful using both `.on('data')` and `.pipe()` with this stream. 296 | In certain versions of node, you cannot use both `.on('data')` and `.pipe()` successfully. 297 | 298 | ### dateToDosDateTime(jsDate) 299 | 300 | *Deprecated* since yazl 3.3.0. 301 | 302 | This function only remains exported in order to maintain compatibility with older versions of yazl. 303 | It will be removed in yazl 4.0.0 unless someone asks for it to remain supported. 304 | If you ever have a use case for calling this function directly please 305 | [open an issue against yazl](https://github.com/thejoshwolfe/yazl/issues/new) 306 | requesting that this function be properly supported again. 307 | 308 | ## Regarding ZIP64 Support 309 | 310 | yazl automatically uses ZIP64 format to support files and archives over `2^32 - 2` bytes (~4GB) in size 311 | and to support archives with more than `2^16 - 2` (65534) files. 312 | (See the `forceZip64Format` option in the API above for more control over this behavior.) 313 | ZIP64 format is necessary to exceed the limits inherent in the original zip file format. 314 | 315 | ZIP64 format is supported by most popular zipfile readers, but not by all of them. 316 | Notably, the Mac Archive Utility does not understand ZIP64 format (as of writing this), 317 | and will behave very strangely when presented with such an archive. 318 | 319 | ## Output Structure 320 | 321 | The Zip File Spec leaves a lot of flexibility up to the zip file creator. 322 | This section explains and justifies yazl's interpretation and decisions regarding this flexibility. 323 | 324 | This section is probably not useful to yazl clients, 325 | but may be interesting to unzip implementors and zip file enthusiasts. 326 | 327 | ### Disk Numbers 328 | 329 | All values related to disk numbers are `0`, 330 | because yazl has no multi-disk archive support. 331 | (The exception being the Total Number of Disks field in 332 | the ZIP64 End of Central Directory Locator, which is always `1`.) 333 | 334 | ### Version Made By 335 | 336 | Always `0x033f == (3 << 8) | 63`, which means UNIX (3) 337 | and made from the spec version 6.3 (63). 338 | 339 | Note that the "UNIX" has implications in the External File Attributes. 340 | 341 | ### Version Needed to Extract 342 | 343 | Usually `20`, meaning 2.0. This allows filenames and file comments to be UTF-8 encoded. 344 | 345 | When ZIP64 format is used, some of the Version Needed to Extract values will be `45`, meaning 4.5. 346 | When this happens, there may be a mix of `20` and `45` values throughout the zipfile. 347 | 348 | ### General Purpose Bit Flag 349 | 350 | Bit `11` is always set. 351 | Filenames (and file comments) are always encoded in UTF-8, even if the result is indistinguishable from ascii. 352 | 353 | Bit `3` is usually set in the Local File Header. 354 | To support both a streaming input and streaming output api, 355 | it is impossible to know the crc32 before processing the file data. 356 | When bit `3` is set, data Descriptors are given after each file data with this information, as per the spec. 357 | But remember a complete metadata listing is still always available in the central directory record, 358 | so if unzip implementations are relying on that, like they should, 359 | none of this paragraph will matter. 360 | Even so, some popular unzip implementations do not follow the spec. 361 | The Mac Archive Utility requires Data Descriptors to include the optional signature, 362 | so yazl includes the optional data descriptor signature. 363 | When bit `3` is not used, the Mac Archive Utility requires there to be no data descriptor, so yazl skips it in that case. 364 | Additionally, 7-Zip 9.20 does not seem to support bit `3` at all 365 | (see [issue #11](https://github.com/thejoshwolfe/yazl/issues/11)). 366 | 367 | All other bits are unset. 368 | 369 | ### Internal File Attributes 370 | 371 | Always `0`. 372 | The "apparently an ASCII or text file" bit is always unset meaning "apparently binary". 373 | This kind of determination is outside the scope of yazl, 374 | and is probably not significant in any modern unzip implementation. 375 | 376 | ### External File Attributes 377 | 378 | Always `stats.mode << 16`. 379 | This is apparently the convention for "version made by" = `0x03xx` (UNIX). 380 | 381 | Note that for directory entries (see `addEmptyDirectory()`), 382 | it is conventional to use the lower 8 bits for the MS-DOS directory attribute byte. 383 | However, the spec says this is only required if the Version Made By is DOS, 384 | so this library does not do that. 385 | 386 | ### Directory Entries 387 | 388 | When adding a `metadataPath` such as `"parent/file.txt"`, yazl does not add a directory entry for `"parent/"`, 389 | because file entries imply the need for their parent directories. 390 | Unzip clients seem to respect this style of pathing, 391 | and the zip file spec does not specify what is standard in this regard. 392 | 393 | In order to create empty directories, use `addEmptyDirectory()`. 394 | 395 | ### Size of Local File and Central Directory Entry Metadata 396 | 397 | The spec recommends that "The combined length of any directory record and [the file name, 398 | extra field, and comment fields] should not generally exceed 65,535 bytes". 399 | yazl makes no attempt to respect this recommendation. 400 | Instead, each of the fields is limited to 65,535 bytes due to the length of each being encoded as an unsigned 16 bit integer. 401 | 402 | ## Change History 403 | 404 | * 3.3.1 (2024-Nov-23) 405 | * Fix bug in `addReadStreamLazy()` timing that could result in the given `getReadStreamFunction` being called too soon or being called more than once. [issue #87](https://github.com/thejoshwolfe/yazl/issues/87) 406 | * 3.3.0 (2024-Nov-08) 407 | * Add support for encoding timestamps in the more modern Info-ZIP "universal timestamp" extended field (`0x5455` aka `"UT"`): supports years as old as 1901 instead of only 1980, notably including 1970; encodes timestamp in UTC rather than an unspecified system-dependent local timezone. [pull #86](https://github.com/thejoshwolfe/yazl/pull/86) 408 | * Disable spending the extra 9 bytes of metadata per entry with `forceDosTimestamp:true`. 409 | * Out-of-bounds timestamps are now clamped rather than overflowing/underflowing and wrapping around. 410 | * Marked `dateToDosDateTime()` as deprecated. 411 | * 3.2.1 (2024-Nov-03) 412 | * Fix typo in `addBuffer()` where `compressionLevel` wasn't being passed to zlib. 413 | * 3.2.0 (2024-Nov-02) 414 | * Add `compressionLevel` option to `addFile()`, `addBuffer()`, `addReadStream()`, `addReadStreamLazy()`. [issue #84](https://github.com/thejoshwolfe/yazl/issues/84) 415 | * Change wording around `calculatedTotalSizeCallback` again, and fix the documentation incorrectly claiming that `addEmptyDirectory()` was relevant in that context. 416 | * 3.1.0 (2024-Oct-19) 417 | * Add `addReadStreamLazy()` as a replacement for `addReadStream()`. The latter is maintained for compatibility and situational convenience. [issue #74](https://github.com/thejoshwolfe/yazl/issues/74) [pull #80](https://github.com/thejoshwolfe/yazl/pull/80) 418 | * The `add*()` methods will now throw an error if you call them after calling `end()`. The documented semantics have always regarded this as undefined behavior. 419 | * Some subtle and undocumented changes to error handling. Error handling isn't very well tested. 420 | * 3.0.1 (2024-Oct-19) 421 | * Change the documented name of `finalSizeCallback` to `calculatedTotalSizeCallback` to hopefully avoid confusion. [issue #71](https://github.com/thejoshwolfe/yazl/issues/71) 422 | * 3.0.0 (2024-Oct-18) 423 | * Upgrade dependencies. This indirectly drops support for older versions of node. 424 | * 2.5.1 (2018-Nov-30) 425 | * Fix support for old versions of Node and add official support for Node versions 0.10, 4, 6, 8, 10. [pull #49](https://github.com/thejoshwolfe/yazl/pull/49) 426 | * 2.5.0 (2018-Nov-15) 427 | * Add support for `comment` and `fileComment`. [pull #44](https://github.com/thejoshwolfe/yazl/pull/44) 428 | * Avoid `new Buffer()`. [pull #43](https://github.com/thejoshwolfe/yazl/pull/43) 429 | * 2.4.3 (2017-Oct-30) 430 | * Clarify readme. [pull #33](https://github.com/thejoshwolfe/yazl/pull/33) 431 | * 2.4.2 (2016-Oct-26) 432 | * Remove octal literals to make yazl compatible with strict mode. [pull #28](https://github.com/thejoshwolfe/yazl/pull/28) 433 | * 2.4.1 (2016-Jun-30) 434 | * Fix Mac Archive Utility compatibility issue. [issue #24](https://github.com/thejoshwolfe/yazl/issues/24) 435 | * 2.4.0 (2016-Jun-26) 436 | * Add ZIP64 support. [issue #6](https://github.com/thejoshwolfe/yazl/issues/6) 437 | * 2.3.1 (2016-May-09) 438 | * Remove `.npmignore` from npm package. [pull #22](https://github.com/thejoshwolfe/yazl/pull/22) 439 | * 2.3.0 (2015-Dec-23) 440 | * `metadataPath` can have `\` characters now; they will be replaced with `/`. [issue #18](https://github.com/thejoshwolfe/yazl/issues/18) 441 | * 2.2.2 (2015-May-07) 442 | * Fix 7-Zip compatibility issue. [pull request #17](https://github.com/thejoshwolfe/yazl/pull/17) 443 | * 2.2.1 (2015-Mar-27) 444 | * Fix Mac Archive Utility compatibility issue. [issue #14](https://github.com/thejoshwolfe/yazl/issues/14) 445 | * 2.2.0 (2015-Mar-21) 446 | * Avoid using general purpose bit 3 for `addBuffer()` calls. [issue #13](https://github.com/thejoshwolfe/yazl/issues/13) 447 | * 2.1.3 (2015-Mar-20) 448 | * Fix bug when only addBuffer() and end() are called. [issue #12](https://github.com/thejoshwolfe/yazl/issues/12) 449 | * 2.1.2 (2015-Mar-03) 450 | * Fixed typo in parameter validation. [pull request #10](https://github.com/thejoshwolfe/yazl/pull/10) 451 | * 2.1.1 (2015-Mar-01) 452 | * Fixed stack overflow when using addBuffer() in certain ways. [issue #9](https://github.com/thejoshwolfe/yazl/issues/9) 453 | * 2.1.0 (2015-Jan-16) 454 | * Added `addEmptyDirectory()`. 455 | * `options` is now optional for `addReadStream()` and `addBuffer()`. 456 | * 2.0.0 (2014-Sep-24) 457 | * Initial release. 458 | -------------------------------------------------------------------------------- /examples/zip.js: -------------------------------------------------------------------------------- 1 | var usage = 2 | "node " + __filename.replace(/.*[\/\\]/, "") + " " + 3 | "[FILE | --[no-]compress | {--file|--buffer|--stream} | --[no-]zip64 | --[no-]verbose]... -o OUTPUT.zip" + "\n" + 4 | "\n" + 5 | "all arguments and switches are processed in order. for example:" + "\n" + 6 | " node zip.js --compress a.txt --no-compress b.txt -o out.zip" + "\n" + 7 | "would result in compression for a.txt, but not for b.txt."; 8 | var yazl = require("../"); 9 | var fs = require("fs"); 10 | 11 | var zipfile = new yazl.ZipFile(); 12 | var options = {compress: false, forceZip64Format: false}; 13 | var addStrategy = "addFile"; 14 | var verbose = false; 15 | 16 | var args = process.argv.slice(2); 17 | if (Math.max(args.indexOf("-h"), args.indexOf("--help")) !== -1) { 18 | console.log("usage: " + usage); 19 | process.exit(1); 20 | } 21 | // this one's important 22 | if (args.indexOf("-o") === -1) throw new Error("missing -o"); 23 | if (args.indexOf("-o") + 1 >= args.length) throw new Error("missing argument after -o"); 24 | 25 | var its_the_dash_o = false; 26 | args.forEach(function(arg) { 27 | if (its_the_dash_o) { 28 | its_the_dash_o = false; 29 | var stream = arg === "-" ? process.stdout : fs.createWriteStream(arg); 30 | zipfile.outputStream.pipe(stream); 31 | } else if (arg === "--compress") { 32 | options.compress = true; 33 | } else if (arg === "--no-compress") { 34 | options.compress = false; 35 | } else if (arg === "--file") { 36 | addStrategy = "addFile"; 37 | } else if (arg === "--buffer") { 38 | addStrategy = "addBuffer"; 39 | } else if (arg === "--stream") { 40 | addStrategy = "addReadStream"; 41 | } else if (arg === "--no-verbose") { 42 | verbose = false; 43 | } else if (arg === "--verbose") { 44 | verbose = true; 45 | } else if (arg === "--zip64") { 46 | options.forceZip64Format = true; 47 | } else if (arg === "--no-zip64") { 48 | options.forceZip64Format = false; 49 | } else if (arg === "-o") { 50 | its_the_dash_o = true; 51 | } else if (arg === "-") { 52 | zipfile.addReadStream(process.stdin); 53 | } else { 54 | // file thing 55 | var stats = fs.statSync(arg); 56 | if (stats.isFile()) { 57 | switch (addStrategy) { 58 | case "addFile": 59 | if (verbose) console.log("addFile(" + 60 | JSON.stringify(arg) + ", " + 61 | JSON.stringify(arg) + ", " + 62 | JSON.stringify(options) + ");"); 63 | zipfile.addFile(arg, arg, options); 64 | break; 65 | case "addBuffer": 66 | if (verbose) console.log("addBuffer(fs.readFileSync(" + 67 | JSON.stringify(arg) + "), " + 68 | JSON.stringify(arg) + ", " + 69 | JSON.stringify(options) + ");"); 70 | zipfile.addBuffer(fs.readFileSync(arg), arg, options); 71 | break; 72 | case "addReadStream": 73 | if (verbose) console.log("addReadStream(fs.createReadStream(" + 74 | JSON.stringify(arg) + "), " + 75 | JSON.stringify(arg) + ", " + 76 | JSON.stringify(options) + ");"); 77 | zipfile.addReadStream(fs.createReadStream(arg), arg, options); 78 | break; 79 | default: throw new Error(); 80 | } 81 | } else if (stats.isDirectory()) { 82 | if (verbose) console.log("addEmptyDirectory(" + 83 | JSON.stringify(arg) + ", "); 84 | zipfile.addEmptyDirectory(arg); 85 | } else { 86 | throw new Error("what is this: " + arg); 87 | } 88 | } 89 | }); 90 | zipfile.end({forceZip64Format: options.forceZip64Format}, function(calculatedTotalSize) { 91 | console.log("calculatedTotalSize prediction: " + calculatedTotalSize); 92 | }); 93 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | var fs = require("fs"); 2 | var Transform = require("stream").Transform; 3 | var PassThrough = require("stream").PassThrough; 4 | var zlib = require("zlib"); 5 | var util = require("util"); 6 | var EventEmitter = require("events").EventEmitter; 7 | var errorMonitor = require("events").errorMonitor; 8 | var crc32 = require("buffer-crc32"); 9 | 10 | exports.ZipFile = ZipFile; 11 | exports.dateToDosDateTime = dateToDosDateTime; 12 | 13 | util.inherits(ZipFile, EventEmitter); 14 | function ZipFile() { 15 | this.outputStream = new PassThrough(); 16 | this.entries = []; 17 | this.outputStreamCursor = 0; 18 | this.ended = false; // .end() sets this 19 | this.allDone = false; // set when we've written the last bytes 20 | this.forceZip64Eocd = false; // configurable in .end() 21 | this.errored = false; 22 | this.on(errorMonitor, function() { 23 | this.errored = true; 24 | }); 25 | } 26 | 27 | ZipFile.prototype.addFile = function(realPath, metadataPath, options) { 28 | var self = this; 29 | metadataPath = validateMetadataPath(metadataPath, false); 30 | if (options == null) options = {}; 31 | 32 | if (shouldIgnoreAdding(self)) return; 33 | var entry = new Entry(metadataPath, false, options); 34 | self.entries.push(entry); 35 | fs.stat(realPath, function(err, stats) { 36 | if (err) return self.emit("error", err); 37 | if (!stats.isFile()) return self.emit("error", new Error("not a file: " + realPath)); 38 | entry.uncompressedSize = stats.size; 39 | if (options.mtime == null) entry.setLastModDate(stats.mtime); 40 | if (options.mode == null) entry.setFileAttributesMode(stats.mode); 41 | entry.setFileDataPumpFunction(function() { 42 | var readStream = fs.createReadStream(realPath); 43 | entry.state = Entry.FILE_DATA_IN_PROGRESS; 44 | readStream.on("error", function(err) { 45 | self.emit("error", err); 46 | }); 47 | pumpFileDataReadStream(self, entry, readStream); 48 | }); 49 | pumpEntries(self); 50 | }); 51 | }; 52 | 53 | ZipFile.prototype.addReadStream = function(readStream, metadataPath, options) { 54 | this.addReadStreamLazy(metadataPath, options, function(cb) { 55 | cb(null, readStream); 56 | }); 57 | }; 58 | 59 | ZipFile.prototype.addReadStreamLazy = function(metadataPath, options, getReadStreamFunction) { 60 | var self = this; 61 | if (typeof options === "function") { 62 | getReadStreamFunction = options; 63 | options = null; 64 | } 65 | if (options == null) options = {}; 66 | metadataPath = validateMetadataPath(metadataPath, false); 67 | 68 | if (shouldIgnoreAdding(self)) return; 69 | var entry = new Entry(metadataPath, false, options); 70 | self.entries.push(entry); 71 | entry.setFileDataPumpFunction(function() { 72 | entry.state = Entry.FILE_DATA_IN_PROGRESS; 73 | getReadStreamFunction(function(err, readStream) { 74 | if (err) return self.emit("error", err); 75 | pumpFileDataReadStream(self, entry, readStream); 76 | }); 77 | }); 78 | pumpEntries(self); 79 | }; 80 | 81 | ZipFile.prototype.addBuffer = function(buffer, metadataPath, options) { 82 | var self = this; 83 | metadataPath = validateMetadataPath(metadataPath, false); 84 | if (buffer.length > 0x3fffffff) throw new Error("buffer too large: " + buffer.length + " > " + 0x3fffffff); 85 | if (options == null) options = {}; 86 | if (options.size != null) throw new Error("options.size not allowed"); 87 | 88 | if (shouldIgnoreAdding(self)) return; 89 | var entry = new Entry(metadataPath, false, options); 90 | entry.uncompressedSize = buffer.length; 91 | entry.crc32 = crc32.unsigned(buffer); 92 | entry.crcAndFileSizeKnown = true; 93 | self.entries.push(entry); 94 | if (entry.compressionLevel === 0) { 95 | setCompressedBuffer(buffer); 96 | } else { 97 | zlib.deflateRaw(buffer, {level:entry.compressionLevel}, function(err, compressedBuffer) { 98 | setCompressedBuffer(compressedBuffer); 99 | }); 100 | } 101 | function setCompressedBuffer(compressedBuffer) { 102 | entry.compressedSize = compressedBuffer.length; 103 | entry.setFileDataPumpFunction(function() { 104 | writeToOutputStream(self, compressedBuffer); 105 | writeToOutputStream(self, entry.getDataDescriptor()); 106 | entry.state = Entry.FILE_DATA_DONE; 107 | 108 | // don't call pumpEntries() recursively. 109 | // (also, don't call process.nextTick recursively.) 110 | setImmediate(function() { 111 | pumpEntries(self); 112 | }); 113 | }); 114 | pumpEntries(self); 115 | } 116 | }; 117 | 118 | ZipFile.prototype.addEmptyDirectory = function(metadataPath, options) { 119 | var self = this; 120 | metadataPath = validateMetadataPath(metadataPath, true); 121 | if (options == null) options = {}; 122 | if (options.size != null) throw new Error("options.size not allowed"); 123 | if (options.compress != null) throw new Error("options.compress not allowed"); 124 | if (options.compressionLevel != null) throw new Error("options.compressionLevel not allowed"); 125 | 126 | if (shouldIgnoreAdding(self)) return; 127 | var entry = new Entry(metadataPath, true, options); 128 | self.entries.push(entry); 129 | entry.setFileDataPumpFunction(function() { 130 | writeToOutputStream(self, entry.getDataDescriptor()); 131 | entry.state = Entry.FILE_DATA_DONE; 132 | pumpEntries(self); 133 | }); 134 | pumpEntries(self); 135 | }; 136 | 137 | var eocdrSignatureBuffer = bufferFrom([0x50, 0x4b, 0x05, 0x06]); 138 | 139 | ZipFile.prototype.end = function(options, calculatedTotalSizeCallback) { 140 | if (typeof options === "function") { 141 | calculatedTotalSizeCallback = options; 142 | options = null; 143 | } 144 | if (options == null) options = {}; 145 | if (this.ended) return; 146 | this.ended = true; 147 | if (this.errored) return; 148 | this.calculatedTotalSizeCallback = calculatedTotalSizeCallback; 149 | this.forceZip64Eocd = !!options.forceZip64Format; 150 | if (options.comment) { 151 | if (typeof options.comment === "string") { 152 | this.comment = encodeCp437(options.comment); 153 | } else { 154 | // It should be a Buffer 155 | this.comment = options.comment; 156 | } 157 | if (this.comment.length > 0xffff) throw new Error("comment is too large"); 158 | // gotta check for this, because the zipfile format is actually ambiguous. 159 | if (bufferIncludes(this.comment, eocdrSignatureBuffer)) throw new Error("comment contains end of central directory record signature"); 160 | } else { 161 | // no comment. 162 | this.comment = EMPTY_BUFFER; 163 | } 164 | pumpEntries(this); 165 | }; 166 | 167 | function writeToOutputStream(self, buffer) { 168 | self.outputStream.write(buffer); 169 | self.outputStreamCursor += buffer.length; 170 | } 171 | 172 | function pumpFileDataReadStream(self, entry, readStream) { 173 | var crc32Watcher = new Crc32Watcher(); 174 | var uncompressedSizeCounter = new ByteCounter(); 175 | var compressor = entry.compressionLevel !== 0 ? new zlib.DeflateRaw({level:entry.compressionLevel}) : new PassThrough(); 176 | var compressedSizeCounter = new ByteCounter(); 177 | readStream.pipe(crc32Watcher) 178 | .pipe(uncompressedSizeCounter) 179 | .pipe(compressor) 180 | .pipe(compressedSizeCounter) 181 | .pipe(self.outputStream, {end: false}); 182 | compressedSizeCounter.on("end", function() { 183 | entry.crc32 = crc32Watcher.crc32; 184 | if (entry.uncompressedSize == null) { 185 | entry.uncompressedSize = uncompressedSizeCounter.byteCount; 186 | } else { 187 | if (entry.uncompressedSize !== uncompressedSizeCounter.byteCount) return self.emit("error", new Error("file data stream has unexpected number of bytes")); 188 | } 189 | entry.compressedSize = compressedSizeCounter.byteCount; 190 | self.outputStreamCursor += entry.compressedSize; 191 | writeToOutputStream(self, entry.getDataDescriptor()); 192 | entry.state = Entry.FILE_DATA_DONE; 193 | pumpEntries(self); 194 | }); 195 | } 196 | 197 | function determineCompressionLevel(options) { 198 | if (options.compress != null && options.compressionLevel != null) { 199 | if (!!options.compress !== !!options.compressionLevel) throw new Error("conflicting settings for compress and compressionLevel"); 200 | } 201 | if (options.compressionLevel != null) return options.compressionLevel; 202 | if (options.compress === false) return 0; 203 | return 6; 204 | } 205 | 206 | function pumpEntries(self) { 207 | if (self.allDone || self.errored) return; 208 | // first check if calculatedTotalSize is finally known 209 | if (self.ended && self.calculatedTotalSizeCallback != null) { 210 | var calculatedTotalSize = calculateTotalSize(self); 211 | if (calculatedTotalSize != null) { 212 | // we have an answer 213 | self.calculatedTotalSizeCallback(calculatedTotalSize); 214 | self.calculatedTotalSizeCallback = null; 215 | } 216 | } 217 | 218 | // pump entries 219 | var entry = getFirstNotDoneEntry(); 220 | function getFirstNotDoneEntry() { 221 | for (var i = 0; i < self.entries.length; i++) { 222 | var entry = self.entries[i]; 223 | if (entry.state < Entry.FILE_DATA_DONE) return entry; 224 | } 225 | return null; 226 | } 227 | if (entry != null) { 228 | // this entry is not done yet 229 | if (entry.state < Entry.READY_TO_PUMP_FILE_DATA) return; // input file not open yet 230 | if (entry.state === Entry.FILE_DATA_IN_PROGRESS) return; // we'll get there 231 | // start with local file header 232 | entry.relativeOffsetOfLocalHeader = self.outputStreamCursor; 233 | var localFileHeader = entry.getLocalFileHeader(); 234 | writeToOutputStream(self, localFileHeader); 235 | entry.doFileDataPump(); 236 | } else { 237 | // all cought up on writing entries 238 | if (self.ended) { 239 | // head for the exit 240 | self.offsetOfStartOfCentralDirectory = self.outputStreamCursor; 241 | self.entries.forEach(function(entry) { 242 | var centralDirectoryRecord = entry.getCentralDirectoryRecord(); 243 | writeToOutputStream(self, centralDirectoryRecord); 244 | }); 245 | writeToOutputStream(self, getEndOfCentralDirectoryRecord(self)); 246 | self.outputStream.end(); 247 | self.allDone = true; 248 | } 249 | } 250 | } 251 | 252 | function calculateTotalSize(self) { 253 | var pretendOutputCursor = 0; 254 | var centralDirectorySize = 0; 255 | for (var i = 0; i < self.entries.length; i++) { 256 | var entry = self.entries[i]; 257 | // compression is too hard to predict 258 | if (entry.compressionLevel !== 0) return -1; 259 | if (entry.state >= Entry.READY_TO_PUMP_FILE_DATA) { 260 | // if addReadStream was called without providing the size, we can't predict the total size 261 | if (entry.uncompressedSize == null) return -1; 262 | } else { 263 | // if we're still waiting for fs.stat, we might learn the size someday 264 | if (entry.uncompressedSize == null) return null; 265 | } 266 | // we know this for sure, and this is important to know if we need ZIP64 format. 267 | entry.relativeOffsetOfLocalHeader = pretendOutputCursor; 268 | var useZip64Format = entry.useZip64Format(); 269 | 270 | pretendOutputCursor += LOCAL_FILE_HEADER_FIXED_SIZE + entry.utf8FileName.length; 271 | pretendOutputCursor += entry.uncompressedSize; 272 | if (!entry.crcAndFileSizeKnown) { 273 | // use a data descriptor 274 | if (useZip64Format) { 275 | pretendOutputCursor += ZIP64_DATA_DESCRIPTOR_SIZE; 276 | } else { 277 | pretendOutputCursor += DATA_DESCRIPTOR_SIZE; 278 | } 279 | } 280 | 281 | centralDirectorySize += CENTRAL_DIRECTORY_RECORD_FIXED_SIZE + entry.utf8FileName.length + entry.fileComment.length; 282 | if (!entry.forceDosTimestamp) { 283 | centralDirectorySize += INFO_ZIP_UNIVERSAL_TIMESTAMP_EXTRA_FIELD_SIZE; 284 | } 285 | if (useZip64Format) { 286 | centralDirectorySize += ZIP64_EXTENDED_INFORMATION_EXTRA_FIELD_SIZE; 287 | } 288 | } 289 | 290 | var endOfCentralDirectorySize = 0; 291 | if (self.forceZip64Eocd || 292 | self.entries.length >= 0xffff || 293 | centralDirectorySize >= 0xffff || 294 | pretendOutputCursor >= 0xffffffff) { 295 | // use zip64 end of central directory stuff 296 | endOfCentralDirectorySize += ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE + ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE; 297 | } 298 | endOfCentralDirectorySize += END_OF_CENTRAL_DIRECTORY_RECORD_SIZE + self.comment.length; 299 | return pretendOutputCursor + centralDirectorySize + endOfCentralDirectorySize; 300 | } 301 | 302 | function shouldIgnoreAdding(self) { 303 | if (self.ended) throw new Error("cannot add entries after calling end()"); 304 | if (self.errored) return true; 305 | return false; 306 | } 307 | 308 | var ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE = 56; 309 | var ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE = 20; 310 | var END_OF_CENTRAL_DIRECTORY_RECORD_SIZE = 22; 311 | function getEndOfCentralDirectoryRecord(self, actuallyJustTellMeHowLongItWouldBe) { 312 | var needZip64Format = false; 313 | var normalEntriesLength = self.entries.length; 314 | if (self.forceZip64Eocd || self.entries.length >= 0xffff) { 315 | normalEntriesLength = 0xffff; 316 | needZip64Format = true; 317 | } 318 | var sizeOfCentralDirectory = self.outputStreamCursor - self.offsetOfStartOfCentralDirectory; 319 | var normalSizeOfCentralDirectory = sizeOfCentralDirectory; 320 | if (self.forceZip64Eocd || sizeOfCentralDirectory >= 0xffffffff) { 321 | normalSizeOfCentralDirectory = 0xffffffff; 322 | needZip64Format = true; 323 | } 324 | var normalOffsetOfStartOfCentralDirectory = self.offsetOfStartOfCentralDirectory; 325 | if (self.forceZip64Eocd || self.offsetOfStartOfCentralDirectory >= 0xffffffff) { 326 | normalOffsetOfStartOfCentralDirectory = 0xffffffff; 327 | needZip64Format = true; 328 | } 329 | if (actuallyJustTellMeHowLongItWouldBe) { 330 | if (needZip64Format) { 331 | return ( 332 | ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE + 333 | ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE + 334 | END_OF_CENTRAL_DIRECTORY_RECORD_SIZE 335 | ); 336 | } else { 337 | return END_OF_CENTRAL_DIRECTORY_RECORD_SIZE; 338 | } 339 | } 340 | 341 | var eocdrBuffer = bufferAlloc(END_OF_CENTRAL_DIRECTORY_RECORD_SIZE + self.comment.length); 342 | // end of central dir signature 4 bytes (0x06054b50) 343 | eocdrBuffer.writeUInt32LE(0x06054b50, 0); 344 | // number of this disk 2 bytes 345 | eocdrBuffer.writeUInt16LE(0, 4); 346 | // number of the disk with the start of the central directory 2 bytes 347 | eocdrBuffer.writeUInt16LE(0, 6); 348 | // total number of entries in the central directory on this disk 2 bytes 349 | eocdrBuffer.writeUInt16LE(normalEntriesLength, 8); 350 | // total number of entries in the central directory 2 bytes 351 | eocdrBuffer.writeUInt16LE(normalEntriesLength, 10); 352 | // size of the central directory 4 bytes 353 | eocdrBuffer.writeUInt32LE(normalSizeOfCentralDirectory, 12); 354 | // offset of start of central directory with respect to the starting disk number 4 bytes 355 | eocdrBuffer.writeUInt32LE(normalOffsetOfStartOfCentralDirectory, 16); 356 | // .ZIP file comment length 2 bytes 357 | eocdrBuffer.writeUInt16LE(self.comment.length, 20); 358 | // .ZIP file comment (variable size) 359 | self.comment.copy(eocdrBuffer, 22); 360 | 361 | if (!needZip64Format) return eocdrBuffer; 362 | 363 | // ZIP64 format 364 | // ZIP64 End of Central Directory Record 365 | var zip64EocdrBuffer = bufferAlloc(ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE); 366 | // zip64 end of central dir signature 4 bytes (0x06064b50) 367 | zip64EocdrBuffer.writeUInt32LE(0x06064b50, 0); 368 | // size of zip64 end of central directory record 8 bytes 369 | writeUInt64LE(zip64EocdrBuffer, ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE - 12, 4); 370 | // version made by 2 bytes 371 | zip64EocdrBuffer.writeUInt16LE(VERSION_MADE_BY, 12); 372 | // version needed to extract 2 bytes 373 | zip64EocdrBuffer.writeUInt16LE(VERSION_NEEDED_TO_EXTRACT_ZIP64, 14); 374 | // number of this disk 4 bytes 375 | zip64EocdrBuffer.writeUInt32LE(0, 16); 376 | // number of the disk with the start of the central directory 4 bytes 377 | zip64EocdrBuffer.writeUInt32LE(0, 20); 378 | // total number of entries in the central directory on this disk 8 bytes 379 | writeUInt64LE(zip64EocdrBuffer, self.entries.length, 24); 380 | // total number of entries in the central directory 8 bytes 381 | writeUInt64LE(zip64EocdrBuffer, self.entries.length, 32); 382 | // size of the central directory 8 bytes 383 | writeUInt64LE(zip64EocdrBuffer, sizeOfCentralDirectory, 40); 384 | // offset of start of central directory with respect to the starting disk number 8 bytes 385 | writeUInt64LE(zip64EocdrBuffer, self.offsetOfStartOfCentralDirectory, 48); 386 | // zip64 extensible data sector (variable size) 387 | // nothing in the zip64 extensible data sector 388 | 389 | 390 | // ZIP64 End of Central Directory Locator 391 | var zip64EocdlBuffer = bufferAlloc(ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE); 392 | // zip64 end of central dir locator signature 4 bytes (0x07064b50) 393 | zip64EocdlBuffer.writeUInt32LE(0x07064b50, 0); 394 | // number of the disk with the start of the zip64 end of central directory 4 bytes 395 | zip64EocdlBuffer.writeUInt32LE(0, 4); 396 | // relative offset of the zip64 end of central directory record 8 bytes 397 | writeUInt64LE(zip64EocdlBuffer, self.outputStreamCursor, 8); 398 | // total number of disks 4 bytes 399 | zip64EocdlBuffer.writeUInt32LE(1, 16); 400 | 401 | 402 | return Buffer.concat([ 403 | zip64EocdrBuffer, 404 | zip64EocdlBuffer, 405 | eocdrBuffer, 406 | ]); 407 | } 408 | 409 | function validateMetadataPath(metadataPath, isDirectory) { 410 | if (metadataPath === "") throw new Error("empty metadataPath"); 411 | metadataPath = metadataPath.replace(/\\/g, "/"); 412 | if (/^[a-zA-Z]:/.test(metadataPath) || /^\//.test(metadataPath)) throw new Error("absolute path: " + metadataPath); 413 | if (metadataPath.split("/").indexOf("..") !== -1) throw new Error("invalid relative path: " + metadataPath); 414 | var looksLikeDirectory = /\/$/.test(metadataPath); 415 | if (isDirectory) { 416 | // append a trailing '/' if necessary. 417 | if (!looksLikeDirectory) metadataPath += "/"; 418 | } else { 419 | if (looksLikeDirectory) throw new Error("file path cannot end with '/': " + metadataPath); 420 | } 421 | return metadataPath; 422 | } 423 | 424 | var EMPTY_BUFFER = bufferAlloc(0); 425 | 426 | // this class is not part of the public API 427 | function Entry(metadataPath, isDirectory, options) { 428 | this.utf8FileName = bufferFrom(metadataPath); 429 | if (this.utf8FileName.length > 0xffff) throw new Error("utf8 file name too long. " + utf8FileName.length + " > " + 0xffff); 430 | this.isDirectory = isDirectory; 431 | this.state = Entry.WAITING_FOR_METADATA; 432 | this.setLastModDate(options.mtime != null ? options.mtime : new Date()); 433 | this.forceDosTimestamp = !!options.forceDosTimestamp; 434 | if (options.mode != null) { 435 | this.setFileAttributesMode(options.mode); 436 | } else { 437 | this.setFileAttributesMode(isDirectory ? 0o40775 : 0o100664); 438 | } 439 | if (isDirectory) { 440 | this.crcAndFileSizeKnown = true; 441 | this.crc32 = 0; 442 | this.uncompressedSize = 0; 443 | this.compressedSize = 0; 444 | } else { 445 | // unknown so far 446 | this.crcAndFileSizeKnown = false; 447 | this.crc32 = null; 448 | this.uncompressedSize = null; 449 | this.compressedSize = null; 450 | if (options.size != null) this.uncompressedSize = options.size; 451 | } 452 | if (isDirectory) { 453 | this.compressionLevel = 0; 454 | } else { 455 | this.compressionLevel = determineCompressionLevel(options); 456 | } 457 | this.forceZip64Format = !!options.forceZip64Format; 458 | if (options.fileComment) { 459 | if (typeof options.fileComment === "string") { 460 | this.fileComment = bufferFrom(options.fileComment, "utf-8"); 461 | } else { 462 | // It should be a Buffer 463 | this.fileComment = options.fileComment; 464 | } 465 | if (this.fileComment.length > 0xffff) throw new Error("fileComment is too large"); 466 | } else { 467 | // no comment. 468 | this.fileComment = EMPTY_BUFFER; 469 | } 470 | } 471 | Entry.WAITING_FOR_METADATA = 0; 472 | Entry.READY_TO_PUMP_FILE_DATA = 1; 473 | Entry.FILE_DATA_IN_PROGRESS = 2; 474 | Entry.FILE_DATA_DONE = 3; 475 | Entry.prototype.setLastModDate = function(date) { 476 | this.mtime = date; 477 | var dosDateTime = dateToDosDateTime(date); 478 | this.lastModFileTime = dosDateTime.time; 479 | this.lastModFileDate = dosDateTime.date; 480 | }; 481 | Entry.prototype.setFileAttributesMode = function(mode) { 482 | if ((mode & 0xffff) !== mode) throw new Error("invalid mode. expected: 0 <= " + mode + " <= " + 0xffff); 483 | // http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute/14727#14727 484 | this.externalFileAttributes = (mode << 16) >>> 0; 485 | }; 486 | // doFileDataPump() should not call pumpEntries() directly. see issue #9. 487 | Entry.prototype.setFileDataPumpFunction = function(doFileDataPump) { 488 | this.doFileDataPump = doFileDataPump; 489 | this.state = Entry.READY_TO_PUMP_FILE_DATA; 490 | }; 491 | Entry.prototype.useZip64Format = function() { 492 | return ( 493 | (this.forceZip64Format) || 494 | (this.uncompressedSize != null && this.uncompressedSize > 0xfffffffe) || 495 | (this.compressedSize != null && this.compressedSize > 0xfffffffe) || 496 | (this.relativeOffsetOfLocalHeader != null && this.relativeOffsetOfLocalHeader > 0xfffffffe) 497 | ); 498 | } 499 | var LOCAL_FILE_HEADER_FIXED_SIZE = 30; 500 | var VERSION_NEEDED_TO_EXTRACT_UTF8 = 20; 501 | var VERSION_NEEDED_TO_EXTRACT_ZIP64 = 45; 502 | // 3 = unix. 63 = spec version 6.3 503 | var VERSION_MADE_BY = (3 << 8) | 63; 504 | var FILE_NAME_IS_UTF8 = 1 << 11; 505 | var UNKNOWN_CRC32_AND_FILE_SIZES = 1 << 3; 506 | Entry.prototype.getLocalFileHeader = function() { 507 | var crc32 = 0; 508 | var compressedSize = 0; 509 | var uncompressedSize = 0; 510 | if (this.crcAndFileSizeKnown) { 511 | crc32 = this.crc32; 512 | compressedSize = this.compressedSize; 513 | uncompressedSize = this.uncompressedSize; 514 | } 515 | 516 | var fixedSizeStuff = bufferAlloc(LOCAL_FILE_HEADER_FIXED_SIZE); 517 | var generalPurposeBitFlag = FILE_NAME_IS_UTF8; 518 | if (!this.crcAndFileSizeKnown) generalPurposeBitFlag |= UNKNOWN_CRC32_AND_FILE_SIZES; 519 | 520 | // local file header signature 4 bytes (0x04034b50) 521 | fixedSizeStuff.writeUInt32LE(0x04034b50, 0); 522 | // version needed to extract 2 bytes 523 | fixedSizeStuff.writeUInt16LE(VERSION_NEEDED_TO_EXTRACT_UTF8, 4); 524 | // general purpose bit flag 2 bytes 525 | fixedSizeStuff.writeUInt16LE(generalPurposeBitFlag, 6); 526 | // compression method 2 bytes 527 | fixedSizeStuff.writeUInt16LE(this.getCompressionMethod(), 8); 528 | // last mod file time 2 bytes 529 | fixedSizeStuff.writeUInt16LE(this.lastModFileTime, 10); 530 | // last mod file date 2 bytes 531 | fixedSizeStuff.writeUInt16LE(this.lastModFileDate, 12); 532 | // crc-32 4 bytes 533 | fixedSizeStuff.writeUInt32LE(crc32, 14); 534 | // compressed size 4 bytes 535 | fixedSizeStuff.writeUInt32LE(compressedSize, 18); 536 | // uncompressed size 4 bytes 537 | fixedSizeStuff.writeUInt32LE(uncompressedSize, 22); 538 | // file name length 2 bytes 539 | fixedSizeStuff.writeUInt16LE(this.utf8FileName.length, 26); 540 | // extra field length 2 bytes 541 | fixedSizeStuff.writeUInt16LE(0, 28); 542 | return Buffer.concat([ 543 | fixedSizeStuff, 544 | // file name (variable size) 545 | this.utf8FileName, 546 | // extra field (variable size) 547 | // no extra fields 548 | ]); 549 | }; 550 | var DATA_DESCRIPTOR_SIZE = 16; 551 | var ZIP64_DATA_DESCRIPTOR_SIZE = 24; 552 | Entry.prototype.getDataDescriptor = function() { 553 | if (this.crcAndFileSizeKnown) { 554 | // the Mac Archive Utility requires this not be present unless we set general purpose bit 3 555 | return EMPTY_BUFFER; 556 | } 557 | if (!this.useZip64Format()) { 558 | var buffer = bufferAlloc(DATA_DESCRIPTOR_SIZE); 559 | // optional signature (required according to Archive Utility) 560 | buffer.writeUInt32LE(0x08074b50, 0); 561 | // crc-32 4 bytes 562 | buffer.writeUInt32LE(this.crc32, 4); 563 | // compressed size 4 bytes 564 | buffer.writeUInt32LE(this.compressedSize, 8); 565 | // uncompressed size 4 bytes 566 | buffer.writeUInt32LE(this.uncompressedSize, 12); 567 | return buffer; 568 | } else { 569 | // ZIP64 format 570 | var buffer = bufferAlloc(ZIP64_DATA_DESCRIPTOR_SIZE); 571 | // optional signature (unknown if anyone cares about this) 572 | buffer.writeUInt32LE(0x08074b50, 0); 573 | // crc-32 4 bytes 574 | buffer.writeUInt32LE(this.crc32, 4); 575 | // compressed size 8 bytes 576 | writeUInt64LE(buffer, this.compressedSize, 8); 577 | // uncompressed size 8 bytes 578 | writeUInt64LE(buffer, this.uncompressedSize, 16); 579 | return buffer; 580 | } 581 | }; 582 | var CENTRAL_DIRECTORY_RECORD_FIXED_SIZE = 46; 583 | var INFO_ZIP_UNIVERSAL_TIMESTAMP_EXTRA_FIELD_SIZE = 9; 584 | var ZIP64_EXTENDED_INFORMATION_EXTRA_FIELD_SIZE = 28; 585 | Entry.prototype.getCentralDirectoryRecord = function() { 586 | var fixedSizeStuff = bufferAlloc(CENTRAL_DIRECTORY_RECORD_FIXED_SIZE); 587 | var generalPurposeBitFlag = FILE_NAME_IS_UTF8; 588 | if (!this.crcAndFileSizeKnown) generalPurposeBitFlag |= UNKNOWN_CRC32_AND_FILE_SIZES; 589 | 590 | var izutefBuffer = EMPTY_BUFFER; 591 | if (!this.forceDosTimestamp) { 592 | // Here is one specification for this: https://commons.apache.org/proper/commons-compress/apidocs/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.html 593 | // See also the Info-ZIP source code unix/unix.c:set_extra_field() and zipfile.c:ef_scan_ut_time(). 594 | izutefBuffer = bufferAlloc(INFO_ZIP_UNIVERSAL_TIMESTAMP_EXTRA_FIELD_SIZE); 595 | // 0x5455 Short tag for this extra block type ("UT") 596 | izutefBuffer.writeUInt16LE(0x5455, 0); 597 | // TSize Short total data size for this block 598 | izutefBuffer.writeUInt16LE(INFO_ZIP_UNIVERSAL_TIMESTAMP_EXTRA_FIELD_SIZE - 4, 2); 599 | // See Info-ZIP source code zip.h for these constant values: 600 | var EB_UT_FL_MTIME = (1 << 0); 601 | var EB_UT_FL_ATIME = (1 << 1); 602 | // Note that we set the atime flag despite not providing the atime field. 603 | // The central directory version of this extra field is specified to never contain the atime field even when the flag is set. 604 | // We set it to match the Info-ZIP behavior in order to minimize incompatibility with zip file readers that may have rigid input expectations. 605 | // Flags Byte info bits 606 | izutefBuffer.writeUInt8(EB_UT_FL_MTIME | EB_UT_FL_ATIME, 4); 607 | // (ModTime) Long time of last modification (UTC/GMT) 608 | var timestamp = Math.floor(this.mtime.getTime() / 1000); 609 | if (timestamp < -0x80000000) timestamp = -0x80000000; // 1901-12-13T20:45:52.000Z 610 | if (timestamp > 0x7fffffff) timestamp = 0x7fffffff; // 2038-01-19T03:14:07.000Z 611 | izutefBuffer.writeUInt32LE(timestamp, 5); 612 | } 613 | 614 | var normalCompressedSize = this.compressedSize; 615 | var normalUncompressedSize = this.uncompressedSize; 616 | var normalRelativeOffsetOfLocalHeader = this.relativeOffsetOfLocalHeader; 617 | var versionNeededToExtract = VERSION_NEEDED_TO_EXTRACT_UTF8; 618 | var zeiefBuffer = EMPTY_BUFFER; 619 | if (this.useZip64Format()) { 620 | normalCompressedSize = 0xffffffff; 621 | normalUncompressedSize = 0xffffffff; 622 | normalRelativeOffsetOfLocalHeader = 0xffffffff; 623 | versionNeededToExtract = VERSION_NEEDED_TO_EXTRACT_ZIP64; 624 | 625 | // ZIP64 extended information extra field 626 | zeiefBuffer = bufferAlloc(ZIP64_EXTENDED_INFORMATION_EXTRA_FIELD_SIZE); 627 | // 0x0001 2 bytes Tag for this "extra" block type 628 | zeiefBuffer.writeUInt16LE(0x0001, 0); 629 | // Size 2 bytes Size of this "extra" block 630 | zeiefBuffer.writeUInt16LE(ZIP64_EXTENDED_INFORMATION_EXTRA_FIELD_SIZE - 4, 2); 631 | // Original Size 8 bytes Original uncompressed file size 632 | writeUInt64LE(zeiefBuffer, this.uncompressedSize, 4); 633 | // Compressed Size 8 bytes Size of compressed data 634 | writeUInt64LE(zeiefBuffer, this.compressedSize, 12); 635 | // Relative Header Offset 8 bytes Offset of local header record 636 | writeUInt64LE(zeiefBuffer, this.relativeOffsetOfLocalHeader, 20); 637 | // Disk Start Number 4 bytes Number of the disk on which this file starts 638 | // (omit) 639 | } 640 | 641 | // central file header signature 4 bytes (0x02014b50) 642 | fixedSizeStuff.writeUInt32LE(0x02014b50, 0); 643 | // version made by 2 bytes 644 | fixedSizeStuff.writeUInt16LE(VERSION_MADE_BY, 4); 645 | // version needed to extract 2 bytes 646 | fixedSizeStuff.writeUInt16LE(versionNeededToExtract, 6); 647 | // general purpose bit flag 2 bytes 648 | fixedSizeStuff.writeUInt16LE(generalPurposeBitFlag, 8); 649 | // compression method 2 bytes 650 | fixedSizeStuff.writeUInt16LE(this.getCompressionMethod(), 10); 651 | // last mod file time 2 bytes 652 | fixedSizeStuff.writeUInt16LE(this.lastModFileTime, 12); 653 | // last mod file date 2 bytes 654 | fixedSizeStuff.writeUInt16LE(this.lastModFileDate, 14); 655 | // crc-32 4 bytes 656 | fixedSizeStuff.writeUInt32LE(this.crc32, 16); 657 | // compressed size 4 bytes 658 | fixedSizeStuff.writeUInt32LE(normalCompressedSize, 20); 659 | // uncompressed size 4 bytes 660 | fixedSizeStuff.writeUInt32LE(normalUncompressedSize, 24); 661 | // file name length 2 bytes 662 | fixedSizeStuff.writeUInt16LE(this.utf8FileName.length, 28); 663 | // extra field length 2 bytes 664 | fixedSizeStuff.writeUInt16LE(izutefBuffer.length + zeiefBuffer.length, 30); 665 | // file comment length 2 bytes 666 | fixedSizeStuff.writeUInt16LE(this.fileComment.length, 32); 667 | // disk number start 2 bytes 668 | fixedSizeStuff.writeUInt16LE(0, 34); 669 | // internal file attributes 2 bytes 670 | fixedSizeStuff.writeUInt16LE(0, 36); 671 | // external file attributes 4 bytes 672 | fixedSizeStuff.writeUInt32LE(this.externalFileAttributes, 38); 673 | // relative offset of local header 4 bytes 674 | fixedSizeStuff.writeUInt32LE(normalRelativeOffsetOfLocalHeader, 42); 675 | 676 | return Buffer.concat([ 677 | fixedSizeStuff, 678 | // file name (variable size) 679 | this.utf8FileName, 680 | // extra field (variable size) 681 | izutefBuffer, 682 | zeiefBuffer, 683 | // file comment (variable size) 684 | this.fileComment, 685 | ]); 686 | }; 687 | Entry.prototype.getCompressionMethod = function() { 688 | var NO_COMPRESSION = 0; 689 | var DEFLATE_COMPRESSION = 8; 690 | return this.compressionLevel === 0 ? NO_COMPRESSION : DEFLATE_COMPRESSION; 691 | }; 692 | 693 | // These are intentionally computed in the current system timezone 694 | // to match how the DOS encoding operates in this library. 695 | var minDosDate = new Date(1980, 0, 1); 696 | var maxDosDate = new Date(2107, 11, 31, 23, 59, 58); 697 | function dateToDosDateTime(jsDate) { 698 | // Clamp out of bounds timestamps. 699 | if (jsDate < minDosDate) jsDate = minDosDate; 700 | else if (jsDate > maxDosDate) jsDate = maxDosDate; 701 | 702 | var date = 0; 703 | date |= jsDate.getDate() & 0x1f; // 1-31 704 | date |= ((jsDate.getMonth() + 1) & 0xf) << 5; // 0-11, 1-12 705 | date |= ((jsDate.getFullYear() - 1980) & 0x7f) << 9; // 0-128, 1980-2108 706 | 707 | var time = 0; 708 | time |= Math.floor(jsDate.getSeconds() / 2); // 0-59, 0-29 (lose odd numbers) 709 | time |= (jsDate.getMinutes() & 0x3f) << 5; // 0-59 710 | time |= (jsDate.getHours() & 0x1f) << 11; // 0-23 711 | 712 | return {date: date, time: time}; 713 | } 714 | 715 | function writeUInt64LE(buffer, n, offset) { 716 | // can't use bitshift here, because JavaScript only allows bitshifting on 32-bit integers. 717 | var high = Math.floor(n / 0x100000000); 718 | var low = n % 0x100000000; 719 | buffer.writeUInt32LE(low, offset); 720 | buffer.writeUInt32LE(high, offset + 4); 721 | } 722 | 723 | function defaultCallback(err) { 724 | if (err) throw err; 725 | } 726 | 727 | util.inherits(ByteCounter, Transform); 728 | function ByteCounter(options) { 729 | Transform.call(this, options); 730 | this.byteCount = 0; 731 | } 732 | ByteCounter.prototype._transform = function(chunk, encoding, cb) { 733 | this.byteCount += chunk.length; 734 | cb(null, chunk); 735 | }; 736 | 737 | util.inherits(Crc32Watcher, Transform); 738 | function Crc32Watcher(options) { 739 | Transform.call(this, options); 740 | this.crc32 = 0; 741 | } 742 | Crc32Watcher.prototype._transform = function(chunk, encoding, cb) { 743 | this.crc32 = crc32.unsigned(chunk, this.crc32); 744 | cb(null, chunk); 745 | }; 746 | 747 | var cp437 = '\u0000☺☻♥♦♣♠•◘○◙♂♀♪♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~⌂ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ '; 748 | if (cp437.length !== 256) throw new Error("assertion failure"); 749 | var reverseCp437 = null; 750 | 751 | function encodeCp437(string) { 752 | if (/^[\x20-\x7e]*$/.test(string)) { 753 | // CP437, ASCII, and UTF-8 overlap in this range. 754 | return bufferFrom(string, "utf-8"); 755 | } 756 | 757 | // This is the slow path. 758 | if (reverseCp437 == null) { 759 | // cache this once 760 | reverseCp437 = {}; 761 | for (var i = 0; i < cp437.length; i++) { 762 | reverseCp437[cp437[i]] = i; 763 | } 764 | } 765 | 766 | var result = bufferAlloc(string.length); 767 | for (var i = 0; i < string.length; i++) { 768 | var b = reverseCp437[string[i]]; 769 | if (b == null) throw new Error("character not encodable in CP437: " + JSON.stringify(string[i])); 770 | result[i] = b; 771 | } 772 | 773 | return result; 774 | } 775 | 776 | function bufferAlloc(size) { 777 | bufferAlloc = modern; 778 | try { 779 | return bufferAlloc(size); 780 | } catch (e) { 781 | bufferAlloc = legacy; 782 | return bufferAlloc(size); 783 | } 784 | function modern(size) { 785 | return Buffer.allocUnsafe(size); 786 | } 787 | function legacy(size) { 788 | return new Buffer(size); 789 | } 790 | } 791 | function bufferFrom(something, encoding) { 792 | bufferFrom = modern; 793 | try { 794 | return bufferFrom(something, encoding); 795 | } catch (e) { 796 | bufferFrom = legacy; 797 | return bufferFrom(something, encoding); 798 | } 799 | function modern(something, encoding) { 800 | return Buffer.from(something, encoding); 801 | } 802 | function legacy(something, encoding) { 803 | return new Buffer(something, encoding); 804 | } 805 | } 806 | function bufferIncludes(buffer, content) { 807 | bufferIncludes = modern; 808 | try { 809 | return bufferIncludes(buffer, content); 810 | } catch (e) { 811 | bufferIncludes = legacy; 812 | return bufferIncludes(buffer, content); 813 | } 814 | function modern(buffer, content) { 815 | return buffer.includes(content); 816 | } 817 | function legacy(buffer, content) { 818 | for (var i = 0; i <= buffer.length - content.length; i++) { 819 | for (var j = 0;; j++) { 820 | if (j === content.length) return true; 821 | if (buffer[i + j] !== content[j]) break; 822 | } 823 | } 824 | return false; 825 | } 826 | } 827 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "yazl", 3 | "version": "3.3.1", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "yazl", 9 | "version": "3.3.1", 10 | "license": "MIT", 11 | "dependencies": { 12 | "buffer-crc32": "^1.0.0" 13 | }, 14 | "devDependencies": { 15 | "yauzl": "^3.2.0" 16 | } 17 | }, 18 | "node_modules/buffer-crc32": { 19 | "version": "1.0.0", 20 | "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", 21 | "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", 22 | "engines": { 23 | "node": ">=8.0.0" 24 | } 25 | }, 26 | "node_modules/pend": { 27 | "version": "1.2.0", 28 | "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", 29 | "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", 30 | "dev": true 31 | }, 32 | "node_modules/yauzl": { 33 | "version": "3.2.0", 34 | "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-3.2.0.tgz", 35 | "integrity": "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w==", 36 | "dev": true, 37 | "dependencies": { 38 | "buffer-crc32": "~0.2.3", 39 | "pend": "~1.2.0" 40 | }, 41 | "engines": { 42 | "node": ">=12" 43 | } 44 | }, 45 | "node_modules/yauzl/node_modules/buffer-crc32": { 46 | "version": "0.2.13", 47 | "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", 48 | "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", 49 | "dev": true, 50 | "engines": { 51 | "node": "*" 52 | } 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "yazl", 3 | "version": "3.3.1", 4 | "description": "yet another zip library for node", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "node test/test.js" 8 | }, 9 | "repository": { 10 | "type": "git", 11 | "url": "git+https://github.com/thejoshwolfe/yazl.git" 12 | }, 13 | "keywords": [ 14 | "zip", 15 | "stream", 16 | "archive", 17 | "file" 18 | ], 19 | "author": "Josh Wolfe ", 20 | "license": "MIT", 21 | "bugs": { 22 | "url": "https://github.com/thejoshwolfe/yazl/issues" 23 | }, 24 | "homepage": "https://github.com/thejoshwolfe/yazl", 25 | "dependencies": { 26 | "buffer-crc32": "^1.0.0" 27 | }, 28 | "devDependencies": { 29 | "yauzl": "^3.2.0" 30 | }, 31 | "files": [ 32 | "index.js" 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /test/bl-minimal.js: -------------------------------------------------------------------------------- 1 | // This module is inspired by bl by Rod Vagg. https://github.com/rvagg/bl 2 | 3 | const {Duplex} = require("node:stream"); 4 | class BufferList extends Duplex { 5 | constructor(endCallback) { 6 | super(); 7 | this.endCallback = endCallback; 8 | this.chunks = []; 9 | } 10 | 11 | append(chunk) { 12 | this.chunks.push(chunk); 13 | return this; 14 | } 15 | 16 | _write(chunk, encoding, callback) { 17 | this.chunks.push(chunk); 18 | callback(); 19 | } 20 | _final(callback) { 21 | if (this.endCallback != null) { 22 | // Collect all the buffered data and provide it as a single buffer. 23 | this.endCallback(null, Buffer.concat(this.chunks)); 24 | } 25 | callback(); 26 | } 27 | 28 | _read(size) { 29 | this.push(Buffer.concat(this.chunks)); 30 | this.push(null); 31 | } 32 | } 33 | 34 | module.exports = BufferList; 35 | -------------------------------------------------------------------------------- /test/test.js: -------------------------------------------------------------------------------- 1 | var fs = require("fs"); 2 | var yazl = require("../"); 3 | var yauzl = require("yauzl"); 4 | var BufferList = require("./bl-minimal.js"); 5 | 6 | // Test: 7 | // * filename canonicalization. 8 | // * addFile, addReadStream, and addBuffer 9 | // * extracting the zip file (via yauzl) gives the correct contents. 10 | // * compress: false 11 | // * specifying mode and mtime options, but not checking them. 12 | // * verifying compression method defaults to true. 13 | (function() { 14 | var fileMetadata = { 15 | mtime: new Date(), 16 | mode: 0o100664, 17 | }; 18 | var zipfile = new yazl.ZipFile(); 19 | zipfile.addFile(__filename, "unicōde.txt"); 20 | zipfile.addFile(__filename, "without-compression.txt", {compress: false}); 21 | zipfile.addReadStream(fs.createReadStream(__filename), "readStream.txt", fileMetadata); 22 | var expectedContents = fs.readFileSync(__filename); 23 | zipfile.addBuffer(expectedContents, "with/directories.txt", fileMetadata); 24 | zipfile.addBuffer(expectedContents, "with\\windows-paths.txt", fileMetadata); 25 | zipfile.end(function(calculatedTotalSize) { 26 | if (calculatedTotalSize !== -1) throw new Error("calculatedTotalSize is impossible to know before compression"); 27 | zipfile.outputStream.pipe(new BufferList(function(err, data) { 28 | if (err) throw err; 29 | yauzl.fromBuffer(data, function(err, zipfile) { 30 | if (err) throw err; 31 | zipfile.on("entry", function(entry) { 32 | var expectedCompressionMethod = entry.fileName === "without-compression.txt" ? 0 : 8; 33 | if (entry.compressionMethod !== expectedCompressionMethod) throw new Error("expected " + entry.fileName + " compression method " + expectedCompressionMethod + ". found: " + entry.compressionMethod); 34 | zipfile.openReadStream(entry, function(err, readStream) { 35 | if (err) throw err; 36 | readStream.pipe(new BufferList(function(err, data) { 37 | if (err) throw err; 38 | if (!expectedContents.equals(data)) throw new Error("unexpected contents"); 39 | console.log(entry.fileName + ": pass"); 40 | })); 41 | }); 42 | }); 43 | }); 44 | })); 45 | }); 46 | })(); 47 | 48 | // Test: 49 | // * specifying compressionLevel varies the output size. 50 | // * specifying compressionLevel:0 disables compression. 51 | (function() { 52 | var options = { 53 | mtime: new Date(), 54 | mode: 0o100664, 55 | }; 56 | var zipfile = new yazl.ZipFile(); 57 | options.compressionLevel = 1; 58 | zipfile.addFile(__filename, "level1.txt", options); 59 | options.compressionLevel = 9; 60 | zipfile.addFile(__filename, "level9.txt", options); 61 | options.compressionLevel = 0; 62 | zipfile.addFile(__filename, "level0.txt", options); 63 | zipfile.end(function(calculatedTotalSize) { 64 | if (calculatedTotalSize !== -1) throw new Error("calculatedTotalSize is impossible to know before compression"); 65 | zipfile.outputStream.pipe(new BufferList(function(err, data) { 66 | if (err) throw err; 67 | yauzl.fromBuffer(data, function(err, zipfile) { 68 | if (err) throw err; 69 | 70 | var fileNameToSize = {}; 71 | zipfile.on("entry", function(entry) { 72 | fileNameToSize[entry.fileName] = entry.compressedSize; 73 | var expectedCompressionMethod = entry.fileName === "level0.txt" ? 0 : 8; 74 | if (entry.compressionMethod !== expectedCompressionMethod) throw new Error("expected " + entry.fileName + " compression method " + expectedCompressionMethod + ". found: " + entry.compressionMethod); 75 | }); 76 | zipfile.on("end", function() { 77 | var size0 = fileNameToSize["level0.txt"]; 78 | var size1 = fileNameToSize["level1.txt"]; 79 | var size9 = fileNameToSize["level9.txt"]; 80 | // Note: undefined coerces to NaN which always results in the comparison evaluating to `false`. 81 | if (!(size0 >= size1)) throw new Error("Compression level 1 inflated size. expected: " + size0 + " >= " + size1); 82 | if (!(size1 >= size9)) throw new Error("Compression level 9 inflated size. expected: " + size1 + " >= " + size9); 83 | console.log("compressionLevel (" + size0 + " >= " + size1 + " >= " + size9 + "): pass"); 84 | }); 85 | }); 86 | })); 87 | }); 88 | })(); 89 | 90 | // Test: 91 | // * specifying mtime outside the bounds of dos format but in bounds for unix format. 92 | // * forceDosTimestamp, and verifying the lower clamping for dos format. 93 | // * specifying mtime after 2038, and verifying the clamping for unix format. 94 | (function() { 95 | var options = { 96 | mtime: new Date(0), // unix epoch 97 | mode: 0o100664, 98 | compress: false, 99 | }; 100 | var zipfile = new yazl.ZipFile(); 101 | zipfile.addFile(__filename, "modern-1970.txt", options); 102 | options.forceDosTimestamp = true; 103 | zipfile.addFile(__filename, "dos-1970.txt", options); 104 | options.forceDosTimestamp = false; 105 | options.mtime = new Date(2080, 1, 1); // year 2080 is beyond the unix range. 106 | zipfile.addFile(__filename, "2080.txt", options); 107 | zipfile.end(function(calculatedTotalSize) { 108 | if (calculatedTotalSize === -1) throw new Error("calculatedTotalSize should be known"); 109 | zipfile.outputStream.pipe(new BufferList(function(err, data) { 110 | if (err) throw err; 111 | if (data.length !== calculatedTotalSize) throw new Error("calculatedTotalSize prediction is wrong. " + calculatedTotalSize + " !== " + data.length); 112 | yauzl.fromBuffer(data, function(err, zipfile) { 113 | if (err) throw err; 114 | zipfile.on("entry", function(entry) { 115 | switch (entry.fileName) { 116 | case "modern-1970.txt": 117 | if (entry.getLastModDate().getTime() !== 0) throw new Error("expected unix epoch to be encodable. found: " + entry.getLastModDate()); 118 | break; 119 | case "dos-1970.txt": 120 | var year = entry.getLastModDate().getFullYear(); 121 | if (!(1979 <= year && year <= 1981)) throw new Error("expected dos format year to be clamped to 1980ish. found: " + entry.getLastModDate()); 122 | break; 123 | case "2080.txt": 124 | if (entry.getLastModDate().getUTCFullYear() !== 2038) throw new Error("expected timestamp clamped down to year 2038. found: " + entry.getLastModDate()); 125 | break; 126 | default: throw new Error(entry.fileName); 127 | } 128 | }); 129 | zipfile.on("end", function() { 130 | console.log("timestamp encodings: pass"); 131 | }); 132 | }); 133 | })); 134 | }); 135 | })(); 136 | 137 | // Test: 138 | // * forceZip64Format for various subsets of entries. 139 | // * specifying size for addReadStream. 140 | // * calculatedTotalSize should always be known. 141 | // * calculatedTotalSize is correct. 142 | (function() { 143 | var zip64Combinations = [ 144 | [0, 0, 0, 0, 0], 145 | [1, 1, 0, 0, 0], 146 | [0, 0, 1, 0, 0], 147 | [0, 0, 0, 1, 0], 148 | [0, 0, 0, 0, 1], 149 | [1, 1, 1, 1, 1], 150 | ]; 151 | zip64Combinations.forEach(function(zip64Config) { 152 | var options = { 153 | compress: false, 154 | size: null, 155 | forceZip64Format: false, 156 | }; 157 | var zipfile = new yazl.ZipFile(); 158 | options.forceZip64Format = !!zip64Config[0]; 159 | zipfile.addFile(__filename, "asdf.txt", options); 160 | options.forceZip64Format = !!zip64Config[1]; 161 | zipfile.addFile(__filename, "fdsa.txt", options); 162 | options.forceZip64Format = !!zip64Config[2]; 163 | zipfile.addBuffer(bufferFrom("buffer"), "buffer.txt", options); 164 | options.forceZip64Format = !!zip64Config[3]; 165 | 166 | var someBuffer = bufferFrom("stream"); 167 | options.size = someBuffer.length; 168 | zipfile.addReadStream(new BufferList().append(someBuffer), "stream.txt", options); 169 | options.size = null; 170 | 171 | zipfile.end({forceZip64Format:!!zip64Config[4]}, function(calculatedTotalSize) { 172 | if (calculatedTotalSize === -1) throw new Error("calculatedTotalSize should be known"); 173 | zipfile.outputStream.pipe(new BufferList(function(err, data) { 174 | if (data.length !== calculatedTotalSize) throw new Error("calculatedTotalSize prediction is wrong. " + calculatedTotalSize + " !== " + data.length); 175 | console.log("calculatedTotalSize(" + zip64Config.join("") + "): pass"); 176 | })); 177 | }); 178 | }); 179 | })(); 180 | 181 | // Test adding empty directories and verifying their names in the resulting zipfile. 182 | (function() { 183 | var zipfile = new yazl.ZipFile(); 184 | zipfile.addFile(__filename, "a.txt"); 185 | zipfile.addBuffer(bufferFrom("buffer"), "b.txt"); 186 | zipfile.addReadStream(new BufferList().append(bufferFrom("stream")), "c.txt"); 187 | zipfile.addEmptyDirectory("d/"); 188 | zipfile.addEmptyDirectory("e"); 189 | zipfile.end(function(calculatedTotalSize) { 190 | if (calculatedTotalSize !== -1) throw new Error("calculatedTotalSize should be unknown"); 191 | zipfile.outputStream.pipe(new BufferList(function(err, data) { 192 | if (err) throw err; 193 | yauzl.fromBuffer(data, function(err, zipfile) { 194 | if (err) throw err; 195 | var entryNames = ["a.txt", "b.txt", "c.txt", "d/", "e/"]; 196 | zipfile.on("entry", function(entry) { 197 | var expectedName = entryNames.shift(); 198 | if (entry.fileName !== expectedName) { 199 | throw new Error("unexpected entry fileName: " + entry.fileName + ", expected: " + expectedName); 200 | } 201 | }); 202 | zipfile.on("end", function() { 203 | if (entryNames.length === 0) console.log("optional parameters and directories: pass"); 204 | }); 205 | }); 206 | })); 207 | }); 208 | })(); 209 | 210 | // Test: 211 | // * just calling addBuffer() and no other add functions. 212 | // * calculatedTotalSize should be known and correct for addBuffer with compress:false. 213 | // * addBuffer with compress:false disables compression. 214 | (function() { 215 | var zipfile = new yazl.ZipFile(); 216 | zipfile.addBuffer(bufferFrom("hello"), "hello.txt", {compress: false}); 217 | zipfile.end(function(calculatedTotalSize) { 218 | if (calculatedTotalSize === -1) throw new Error("calculatedTotalSize should be known"); 219 | zipfile.outputStream.pipe(new BufferList(function(err, data) { 220 | if (err) throw err; 221 | if (data.length !== calculatedTotalSize) throw new Error("calculatedTotalSize prediction is wrong. " + calculatedTotalSize + " !== " + data.length); 222 | yauzl.fromBuffer(data, function(err, zipfile) { 223 | if (err) throw err; 224 | var entryNames = ["hello.txt"]; 225 | zipfile.on("entry", function(entry) { 226 | var expectedName = entryNames.shift(); 227 | if (entry.fileName !== expectedName) { 228 | throw new Error("unexpected entry fileName: " + entry.fileName + ", expected: " + expectedName); 229 | } 230 | var expectedCompressionMethod = 0; 231 | if (entry.compressionMethod !== expectedCompressionMethod) throw new Error("expected " + entry.fileName + " compression method " + expectedCompressionMethod + ". found: " + entry.compressionMethod); 232 | }); 233 | zipfile.on("end", function() { 234 | if (entryNames.length === 0) console.log("justAddBuffer: pass"); 235 | }); 236 | }); 237 | })); 238 | }); 239 | })(); 240 | 241 | // Test: 242 | // * zipfile with no entries. 243 | // * comment can be string or Buffer. 244 | // * archive comment uses CP437 encoding for non-ASCII strings. (or rather that yazl and yauzl agree on the encoding.) 245 | var weirdChars = '\u0000☺☻♥♦♣♠•◘○◙♂♀♪♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼⌂ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ '; 246 | (function() { 247 | var testCases = [ 248 | ["Hello World", "Hello World"], 249 | [bufferFrom("Hello"), "Hello"], 250 | [weirdChars, weirdChars], 251 | ]; 252 | testCases.forEach(function(testCase, i) { 253 | var zipfile = new yazl.ZipFile(); 254 | zipfile.end({ 255 | comment: testCase[0], 256 | }, function(calculatedTotalSize) { 257 | if (calculatedTotalSize === -1) throw new Error("calculatedTotalSize should be known"); 258 | zipfile.outputStream.pipe(new BufferList(function(err, data) { 259 | if (err) throw err; 260 | if (data.length !== calculatedTotalSize) throw new Error("calculatedTotalSize prediction is wrong. " + calculatedTotalSize + " !== " + data.length); 261 | yauzl.fromBuffer(data, function(err, zipfile) { 262 | if (err) throw err; 263 | if (zipfile.comment !== testCase[1]) { 264 | throw new Error("comment is wrong. " + JSON.stringify(zipfile.comment) + " !== " + JSON.stringify(testCase[1])); 265 | } 266 | console.log("comment(" + i + "): pass"); 267 | }); 268 | })); 269 | }); 270 | }); 271 | })(); 272 | 273 | // Test ensuring that archive comment cannot create an ambiguous zip file. 274 | (function() { 275 | var zipfile = new yazl.ZipFile(); 276 | try { 277 | zipfile.end({ 278 | comment: bufferFrom("01234567890123456789" + "\x50\x4b\x05\x06" + "01234567890123456789") 279 | }); 280 | } catch (e) { 281 | if (e.toString().indexOf("comment contains end of central directory record signature") !== -1) { 282 | console.log("block eocdr signature in comment: pass"); 283 | return; 284 | } 285 | } 286 | throw new Error("expected error for including eocdr signature in comment"); 287 | })(); 288 | 289 | // Test: 290 | // * specifying fileComment via addBuffer. 291 | // * fileComment can be string or Buffer. 292 | // * yauzl and yazl agree on the encoding. 293 | // * calculatedTotalSize is known and correct with compress:false. 294 | (function() { 295 | var testCases = [ 296 | ["Hello World!", "Hello World!"], 297 | [bufferFrom("Hello!"), "Hello!"], 298 | [weirdChars, weirdChars], 299 | ]; 300 | testCases.forEach(function(testCase, i) { 301 | var zipfile = new yazl.ZipFile(); 302 | zipfile.addBuffer(bufferFrom("hello"), "hello.txt", {compress: false, fileComment: testCase[0]}); 303 | zipfile.end(function(calculatedTotalSize) { 304 | if (calculatedTotalSize === -1) throw new Error("calculatedTotalSize should be known"); 305 | zipfile.outputStream.pipe(new BufferList(function(err, data) { 306 | if (err) throw err; 307 | if (data.length !== calculatedTotalSize) throw new Error("calculatedTotalSize prediction is wrong. " + calculatedTotalSize + " !== " + data.length); 308 | yauzl.fromBuffer(data, function(err, zipfile) { 309 | if (err) throw err; 310 | var entryNames = ["hello.txt"]; 311 | zipfile.on("entry", function(entry) { 312 | var expectedName = entryNames.shift(); 313 | if (entry.fileComment !== testCase[1]) { 314 | throw new Error("fileComment is wrong. " + JSON.stringify(entry.fileComment) + " !== " + JSON.stringify(testCase[1])); 315 | } 316 | }); 317 | zipfile.on("end", function() { 318 | if (entryNames.length === 0) console.log("fileComment(" + i + "): pass"); 319 | }); 320 | }); 321 | })); 322 | }); 323 | }); 324 | })(); 325 | 326 | // Test: 327 | // * giving an error to the addReadStreamLazy callback emits the error on the zipfile. 328 | // * calling addReadStreamLazy with no options argument. 329 | // * trying to add beyond end() throws an error. 330 | (function() { 331 | var zipfile = new yazl.ZipFile(); 332 | zipfile.on("error", function(err) { 333 | if (err.message !== "error 1") throw new Error("expected only error 1, got: " + err.message); 334 | }); 335 | zipfile.addReadStreamLazy("hello.txt", function(cb) { 336 | cb(new Error("error 1")); 337 | }); 338 | zipfile.addReadStreamLazy("hello2.txt", function(cb) { 339 | cb(new Error("error 2")); 340 | }); 341 | zipfile.end(function() { 342 | throw new Error("should not call calculatedTotalSizeCallback in error conditions") 343 | }); 344 | var gotError = false; 345 | try { 346 | zipfile.addBuffer(bufferFrom("a"), "a"); 347 | } catch (err) { 348 | gotError = true; 349 | } 350 | if (!gotError) throw new Error("expected error for adding after calling end()"); 351 | })(); 352 | 353 | function bufferFrom(something, encoding) { 354 | bufferFrom = modern; 355 | try { 356 | return bufferFrom(something, encoding); 357 | } catch (e) { 358 | bufferFrom = legacy; 359 | return bufferFrom(something, encoding); 360 | } 361 | function modern(something, encoding) { 362 | return Buffer.from(something, encoding); 363 | } 364 | function legacy(something, encoding) { 365 | return new Buffer(something, encoding); 366 | } 367 | } 368 | --------------------------------------------------------------------------------