├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ └── test.yml ├── .gitignore ├── .vscode └── settings.json ├── CHANGES.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE.txt ├── README.md ├── demo ├── index.html ├── worker.html └── worker.js ├── index.d.ts ├── package-lock.json ├── package.json ├── src ├── crc32.ts ├── datetime.ts ├── index.ts ├── input.ts ├── metadata.ts ├── polyfills.ts ├── utils.ts ├── worker.ts └── zip.ts ├── terser.json ├── test ├── APPNOTE.TXT ├── crc32.test.ts ├── datetime.test.ts ├── integration.test.ts ├── metadata.test.ts ├── table.array └── zip.test.ts └── tsconfig.json /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "[BUG] Dates on zipped files are off by decades" 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | (please remember that bugs in IE and any browser version that shipped earlier than 2015 are expected and **will not be fixed**) 32 | 33 | **Smartphone (please complete the following information):** 34 | - Device: [e.g. iPhone6] 35 | - OS: [e.g. iOS8.1] 36 | - Browser [e.g. stock browser, safari] 37 | - Version [e.g. 22] 38 | 39 | **Additional context** 40 | Add any other context about the problem here. 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: "[FEATURE] Create a disk image instead of a ZIP file" 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Why is your feature relevant to client-zip?** 14 | Consider that this library is just a function that takes inputs and provides a Response. Relevant features include, for example, all those listed in the roadmap. Out-of-scope features include unzipping, saving the Response somewhere, or instantiating a ServiceWorker to use client-zip in a certain way (the last two features would make interesting projects using client-zip as a dependency). 15 | 16 | **Describe the solution you'd like** 17 | A clear and concise description of what you want to happen. 18 | 19 | **Describe alternatives you've considered** 20 | A clear and concise description of any alternative solutions or features you've considered. 21 | 22 | **Additional context** 23 | Add any other context or screenshots about the feature request here. 24 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test 2 | 3 | on: 4 | push: 5 | branches: [master, no-zip64] 6 | pull_request: 7 | branches: [master, no-zip64] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v4 16 | 17 | - name: Install Deno 18 | uses: denoland/setup-deno@v1 19 | with: 20 | deno-version: v1.x 21 | 22 | - name: Run Tests 23 | run: deno test --allow-read test/ 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | /index.js 3 | /worker.js 4 | .npmignore 5 | .vscode -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "deno.enable": true, 3 | "[typescript]": { 4 | "editor.defaultFormatter": "denoland.vscode-deno" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | ## 2.5.0: 2 | 3 | * new: `mode` field in input objects can override the default POSIX file mode. 4 | 5 | ## 2.4.6: 6 | 7 | * bugfix: the `filename` attribute in a Content-Disposition header is now parsed correctly 8 | - when it is not the last attribute in the header 9 | - and when it is the extended `filename*` attribute 10 | 11 | ## 2.4.5: 12 | 13 | * bugfix: the output stream can now be transferred without breaking the central repository. 14 | 15 | ## 2.4.4: 16 | 17 | * *options* argument was missing in `makeZip` type declaration 18 | 19 | ## 2.4.3: 20 | 21 | * bugfix: cancelling the output stream will cause an error in the source iterator FOR REAL NOW 22 | 23 | ## 2.4.2: 24 | 25 | * new: filenames can be flagged as UTF-8 using the EFS bit in the file header flags 26 | - this is configurable with the new `buffersAreUTF8` option 27 | * and it should help the Windows Zip utility read non-ASCII filenames correctly 28 | 29 | ## 2.4.1: 30 | 31 | * bugfix: cancelling the output stream will now cause an error in the source iterator 32 | 33 | ## 2.4.0: 34 | 35 | * minor: JavaScript CRC32 is now faster than WebAssembly. 36 | 37 | ## 2.3.1: 38 | 39 | * new: added type and exports to package.json so client-zip can run in Node.js. 40 | 41 | ## 2.3.0: 42 | 43 | * fixed filename extraction from a Response when its URL has a trailing slash. 44 | * new: allow the creation of empty folders in the archive with `{ name, lastModified? }`. 45 | - remove trailing slashes from file names and add one to folder names if needed. 46 | * new: export the `makeZip` function which returns a ReadableStream without its Response wrapper. 47 | 48 | ## 2.2.2: 49 | 50 | * fixed typings (2.2.0 incorrectly excluded ArrayBuffers, Blobs and strings as valid `input` types) 51 | 52 | ## 2.2.1: 53 | 54 | * fixed content-length prediction when some files have a length of zero (previously, this threw an error). 55 | 56 | ## 2.2.0: 57 | 58 | * minor: added an *options* parameter to `downloadZip`. 59 | * new: Zip file size can now be predicted: 60 | - export the `predictLength` function to compute the size of a Zip file before creating it. 61 | - *options.length* can be set to include a "Content-Length" header in the Response. 62 | - *options.metadata* can instead be given the same argument as `predictLength`, as a shortcut to compute and set the Content-Length. 63 | 64 | ## 2.1.0: 65 | 66 | * minor: used the `start` directive in WebAssembly to simplify (very slightly) the CRC32 module. 67 | 68 | ## 2.0.1: 69 | 70 | * Fixed a bug where small files requiring a 64-bit offset (but not size) were corrupted. 71 | * Never use 64-bit sizes in data descriptor for small files, even at large offsets. 72 | 73 | ## 2.0.0: 74 | 75 | * breaking: now targets ES2020 because we need BigInts, because… 76 | * breaking (a little): client-zip now generates ZIP64 archives! 77 | * minor: the worker IIFE version is now identical (accepts all the input types) ; might be useful with transferable blobs and streams. 78 | * Zip64 is only used when necessary. 79 | 80 | ## 1.3.1: 81 | * patch: the UNIX part of the external attributes was left at zero, causing some Linux setups to create files with no permissions at all ; now set to 664 (rw-rw-r) 82 | 83 | ## 1.3.0: 84 | * minor: added a `Content-Disposition: attachment` header to the returned Response ; particularly useful when combined with a `form` action intercepted by a Service Worker because forms don't have a `download` attribute like links 85 | 86 | ## 1.2.2: 87 | * patch: added a few missing TypeScript annotations to avoid "Member 'name' implicitly has an 'any' type." errors in strict TypeScript settings 88 | 89 | ## 1.2.1 90 | * patch: fixed typings that made TypeScript complain when you passed an array to downloadZip 91 | 92 | ## 1.2.0 93 | * minor: added a worker script alongside the ES module. It only accepts Responses as input. 94 | * updated README with results from my "faster-crc" experiments 95 | 96 | ## 1.1.1 97 | * patch: fixed DOS date/time encoding 98 | * patch: fixed Invalid Date that was attributed to Responses with no Last-Modified header 99 | 100 | ## 1.1.0 101 | * minor: the WebAssembly Memory is now created inside the wasm module, resulting in a module that is easier to import and a small reduction in bundle size 102 | * typo: removed a little bit of duplicate code in normalizeInput's Response name handling 103 | 104 | ## 0.3.0 105 | * minor: `input.lastModification` is now optional in all cases 106 | * minor: when extracting the file name from a Request, consider first the "filename" option from the "Content-Disposition" header 107 | 108 | ## 0.2.4 More Fixed 109 | * patch (**critical**): fixed infinite loop in a function that was trying to chunk large files for crc32 110 | 111 | ## 0.2.3 Fixed! 112 | * patch (**critical**): computed file size was *NaN* when reading from a stream 113 | * patch: a little mangling and refactoring again 114 | 115 | ## 0.2.2 116 | * patch: update README with recent changes to property names 117 | 118 | ## 0.2.1 119 | * patch: a little refactoring and tweaking of Terser options 120 | * actually breaking but I didn't notice: renamed input property `modDate` to `lastModification` 121 | 122 | ## 0.2 more speed! 123 | * minor: now using WebAssembly for CRC32, yielding a ~10x speed improvement over the JavaScript version 124 | * minor: polyfill *Blob*.stream() instead of using a FileReader to process Blobs 125 | * patch: fix typo in README 126 | 127 | ## 0.1 First commit 128 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | _since it's trendy to have one and I do not mind stating the obvious once in a while_ 4 | 5 | Please use English for all on-topic conversation. As a linguist it hurts to say that, but I don't know a practical alternative. 6 | 7 | Hopefully if you're reading this, it's because you intend to contribute to the client-zip library, report some issue with it, or request a feature that you would like to have in it. If that's the case, I'm sure you are already motivated to be nice, helpful and constructive. But, just in case… (or if you're mainly interested in Codes of Conduct) 8 | 9 | Since I am, at this point, the main contributor, let me state that I am not easily offended (an understatement) 10 | and I prefer to avoid censorship but I do have a profound dislike for bullshit, 11 | so I'd rather you spend some thought on the relevance and accuracy of whatever you write than on whether 12 | it could possibly piss me off. 13 | 14 | I encourage people to contribute regardless of their… whatever, I don't care and neither should you (let me clarify that: I don't care about labels and groups ; I do care about individuals). But please [Read The Fuckin' Manual™](CONTRIBUTING.md) first. 15 | And I encourage you to also not take offense easily, be optimistic in your interpretation of others' words, 16 | choose your battles, and be fearlessly reasonable when you do pick one. 17 | 18 | Obviouly, what English euphemistically refers to as "language" does not bother me and I don't think it should bother anyone. 19 | I am more interested in [pragmatics](https://en.wikipedia.org/wiki/Pragmatics): 20 | Why was something written, and why does it matter? 21 | 22 | These attitudes are bad for a conversation, don't do them: 23 | 24 | * insults ("language" directed at other people) 25 | * logical fallacies ; including in particular, arguing for or against an idea based on who proposed or defends it 26 | * refusing to deconstruct one's expressed viewpoint or argument (make your unspoken assumptions explicit so they can be examined) 27 | * demanding that someone justify a viewpoint they have not expressed or an action they have not taken (instead, ask for clarification on something they have said or done, see where it leads) 28 | * making a factual claim you cannot substantiate (also, use appropriate qualifiers to indicate your confidence level) 29 | * getting attached to ideas (except a few that you choose carefully and certainly none related to software development) 30 | 31 | Also, since we're here to advance a piece of code, off-topic chatter had better be short and funny. 32 | 33 | That's it, I'm not going to list all possible stupid things to say and all possible places not to do it. 34 | Really, those rules apply everywhere (except the bit about off-topic chatter). 35 | 36 | Sincerely, 37 | 38 | David Junger a.k.a. "Touffy" -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Welcome ! 4 | 5 | This is a small and relatively simple piece of code (don't let the low-level buffer operations fool you) implementing a (currently) small subset of an old thing called the ZIP file format (the specification is actually [included as a test file](test/APPNOTE.TXT)). 6 | 7 | You can contribute by describing a feature or improvement you would like to have (including, but not limited to, the features listed in the roadmap) or reporting a bug. There are issue templates to help you with expressing those requests or reports on GitHub. 8 | 9 | You can also propose an implementation of your feature or bugfix in the form of a Pull Request (write the issue first, so everyone will know someone is working on it). You can do that even if you feel that your coding skills are not quite up to it. I would be happy to help. Either way, this document contains some details on how to proceed. 10 | 11 | ## Prerequisites 12 | 13 | For writing a Pull Request, basic knowledge of git and GitHub is required, though it is easy to acquire if you've never used them before. You'll also need a decent understanding of memory buffers and — of course — JavaScript! 14 | 15 | [deno](https://deno.land/manual/getting_started/installation) must be installed on your machine to run the tests, and a code editor that understands TypeScript and the way deno does things like import paths. [The deno website](https://deno.land/manual/getting_started/setup_your_environment) is probably the best place to look. This repository already contains appropriate VSCode settings (just install the deno extension). 16 | 17 | The bundling script uses node and a few NPM modules, but you don't really need to do any of that unless you want to publish your own version, so you shouldn't even need to run `npm install`. 18 | 19 | ## Compatibility 20 | 21 | If possible, ensure your code runs in the two latest major versions of Chrome, Edge, Safari and Firefox on desktop, as well as Safari for iOS and Chrome for Android. 22 | 23 | An optional feature or performance improvement can ignore that requirement, in which case it must be dynamically loaded after successful feature detection (not browser detection). 24 | 25 | ## Test and document your change 26 | 27 | If it's not obvious, write a few lines in the Pull Request to explain what you're doing and why. 28 | When relevant, update the automated tests as well. Make sure the types remain accurate. 29 | 30 | Don't forget to update the README and any other documentation that needs updating. Add your name to the copyright notice in the license if you want to. 31 | 32 | You may also update the CHANGES file, but it's all right if you don't — I will update it when I accept the PR. 33 | Also leave the version bump 34 | (tagging master, updating the package.json, creating the release on GitHub and `npm publish`) 35 | to me. I promise I'll be quick about it. 36 | 37 | ## Start with version 1 (when relevant) 38 | 39 | When your feature or bugfix does not rely on Zip64 or BigInts or something else very modern, consider basing your PR onto the [`no-zip64` branch](../../tree/no-zip64) instead of `master`. 40 | 41 | ## Code style 42 | 43 | I am not a big fan of rigid code style standards and tools like Prettier, which is why there are none in this project (well, except the one built into deno, but I don't use it). I am open to adding and/or using one eventually, if the code base and number of contributors grow enough to justify it. 44 | 45 | I do appreciate linters for their capacity to detect errors, but TypeScript is more effective for that purpose. You are free to use a linter while working on your contributions, just don't include it in the Pull Request and avoid committing purely stylistic changes to existing code. 46 | 47 | Use this freedom to write expressive code your way. As far as I am concerned, there is no requirement that all the code here should look alike. 48 | 49 | ## Design guidelines 50 | 51 | That being said, your contribution should adhere to the library's design goals 52 | (these are guidelines, so they are negotiable of course): 53 | 54 | ### Keep it simple 55 | 56 | The library exposes a single pure function, `downloadZip`, that takes a variety of inputs and returns a `Response`. Stick to it. Don't export an object with methods and internal state. Don't create another archive format than ZIP (it's in the name of the library). 57 | 58 | You may add options to `downloadZip` (an "options" object rather than an ordered list of many arguments), 59 | or new properties for the input objects. Try to stay backwards-compatible and future-proof. 60 | 61 | ### Keep it small 62 | 63 | Think twice before you add a runtime dependency. It's pretty nice not to have any. 64 | 65 | If you add an internal class or basic object interface, add its property names to the Terser option `mangle.properties.regex` and make sure there is no collision with native property names or the public API of `downloadZip`. 66 | 67 | I don't want the source code to become more convoluted just for the sake of better minification, but I am proud of the tiny bundle size and I hope you share the sentiment. 68 | 69 | ### Keep it fast 70 | 71 | client-zip makes some compromises to reduce memory usage and maintain a low time to first byte 72 | (in particular the `03` flag i.e. not announcing file size and CRC32 in the file header, and being unable to set a Content-Length header in advance). 73 | You could add an optional behavior that is more greedy, but not a default behavior. 74 | 75 | ### Use modern standards 76 | 77 | client-zip doesn't even try to work in IE and other old browsers. You can and should use recent Web standards 78 | as long as you comply with the [Compatibility] section. 79 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright 2020 David Junger 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![Test](https://github.com/Touffy/client-zip/workflows/Test/badge.svg) 2 | ![Size](https://badgen.net/bundlephobia/minzip/client-zip) 3 | ![Dependencies](https://badgen.net/bundlephobia/dependency-count/client-zip) 4 | ![Types](https://badgen.net/npm/types/client-zip) 5 | 6 | # What is `client-zip` ? 7 | 8 | `client-zip` concatenates multiple files (e.g. from multiple HTTP requests) into a single ZIP, **in the browser**, so you can let your users download all the files in one click. It does *not* compress the files or unzip existing archives. 9 | 10 | `client-zip` is lightweight (6.4 kB minified, 2.6 kB gzipped), dependency-free, and 40 times faster than the old JSZip. 11 | 12 | * [Quick Start](#Quick-Start) 13 | * [Compatibility](#Compatibility) 14 | * [Usage](#Usage) 15 | * [Benchmarks](#Benchmarks) 16 | * [Known Issues](#Known-Issues) 17 | * [Roadmap](#Roadmap) 18 | * [Notes and F.A.Q.](#Notes) 19 | 20 | # Quick Start 21 | 22 | ```sh 23 | npm i client-zip 24 | ``` 25 | 26 | (or just load the module from a CDN such as [UNPKG](https://unpkg.com/client-zip/index.js) or [jsDelivr](https://cdn.jsdelivr.net/npm/client-zip/index.js)) 27 | 28 | For direct usage with a ServiceWorker's `importScripts`, a [worker.js](https://unpkg.com/client-zip/worker.js) file is also available alongside the module. 29 | 30 | ```javascript 31 | import { downloadZip } from "https://cdn.jsdelivr.net/npm/client-zip/index.js" 32 | 33 | async function downloadTestZip() { 34 | // define what we want in the ZIP 35 | const code = await fetch("https://raw.githubusercontent.com/Touffy/client-zip/master/src/index.ts") 36 | const intro = { name: "intro.txt", lastModified: new Date(), input: "Hello. This is the client-zip library." } 37 | 38 | // get the ZIP stream in a Blob 39 | const blob = await downloadZip([intro, code]).blob() 40 | 41 | // make and click a temporary link to download the Blob 42 | const link = document.createElement("a") 43 | link.href = URL.createObjectURL(blob) 44 | link.download = "test.zip" 45 | link.click() 46 | link.remove() 47 | 48 | // in real life, don't forget to revoke your Blob URLs if you use them 49 | } 50 | ``` 51 | 52 | # Compatibility 53 | 54 | client-zip works in all modern browsers (and Deno) out of the box. If you bundle it with your app and try to transpile it down to lower than ES2020, it will break because it needs BigInts. [Version 1.x](https://www.npmjs.com/package/client-zip/v/nozip64) may be painfully transpiled down to as low as ES2015. 55 | 56 | The default release of version 2 targets ES2020 and is a bare ES module + an IIFE version suitable for a ServiceWorker's `importScript`. Version 1 releases were built for ES2018. 57 | 58 | When necessary, client-zip version 2 will generate Zip64 archives. It will always specify "ZIP version 4.5 required to unzip", even when that's not really true. The resulting files are not readable by every ZIP reader out there. 59 | 60 | # Usage 61 | 62 | The module exports three functions: 63 | ```typescript 64 | function downloadZip(files: ForAwaitable, options?: Options): Response 65 | 66 | function makeZip(files: ForAwaitable, options?: Options): ReadableStream 67 | 68 | function predictLength(metadata: Iterable): bigint 69 | ``` 70 | 71 | `downloadZip` is obviously the main function and the only one exposed by the worker script. You give it an [(*async* or not) iterable a.k.a ForAwaitable](https://github.com/microsoft/TypeScript/issues/36153) list of inputs. Each input (`InputTypes`) can be: 72 | * a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) 73 | * a [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) 74 | * or an object with the properties: 75 | - `name`: the file name ; optional if your input is a File or a Response because they have relevant metadata 76 | - `lastModified`: last modification date of the file (defaults to `new Date()` unless the input is a File or Response with a valid "Last-Modified" header) 77 | - `input`: something that contains your data; it can be a `File`, a `Blob`, a `Response`, some kind of `ArrayView` or a raw `ArrayBuffer`, a `ReadableStream` (yes, only Uint8Arrays, but most APIs give you just that type anyway), an `AsyncIterable`, … or just a string. 78 | - `mode`: override the POSIX file mode (by default, it will be `0o664` for files and `0o775` for folders). Should be between `0` and `0o777` — disrespect that constraint at your own risk. 79 | 80 | The *options* argument currently supports three properties, `length`, `metadata` (see [Content-Length prediction](#content-length-prediction)) and `buffersAreUTF8` (see [Filename encoding](#filename-encoding)). 81 | 82 | The function returns a `Response` immediately. You don't need to wait for the whole ZIP to be ready. It's up to you if you want to pipe the Response somewhere (e.g. if you are using `client-zip` inside a ServiceWorker) or let the browser buffer it all in a Blob. 83 | 84 | Unless your list of inputs is quite small, you should prefer generators (when zipping Files or other resources that are already available) and async generators (when zipping Responses so you can `fetch` them lazily, or other resources that are generated last-minute so you don't need to store them longer than necessary) to provide the inputs to `downloadZip`. 85 | 86 | `makeZip` is just like `downloadZip` except it returns the underlying `ReadableStream` directly, for use cases that do not involve actually downloading to the client filesystem. 87 | 88 | ## Content-Length prediction 89 | 90 | Because of client-zip's streaming design, it can't look ahead at all the files to determine how big the complete archive will be. The returned `Response` will therefore not have a "Content-Length" header, and that can be problematic. 91 | 92 | Starting with version 1.5, if you are able to gather all the relevant metadata (file sizes and names) before calling `downloadZip`, you can get it to predict the exact size of the archive and include it as a "Content-Length" header. The metadata must be a synchronous iterable, where each item (`MetadataTypes`) can be : 93 | 94 | * a [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response), either from the actual request you will use as input, or a HEAD request (either way, the response body will not be consumed at this point) 95 | * a [`File`](https://developer.mozilla.org/en-US/docs/Web/API/File) 96 | * or an object with the properties: 97 | - `name`: the file name ; optional if your input is a File or a Response because they have relevant metadata 98 | - `size`: the byte length of the file ; also optional if you provide a File or a Response with a Content-Length header 99 | - `input`: same as what you'd pass as the actual input, except this is optional here, and passing a Stream is completely useless 100 | 101 | If you already have Files (e.g. in a form input), it's alright to pass them as metadata too. However, if you would normally `fetch` each file from a server, or generate them dynamically, please try using a dedicated metadata endpoint or function, and transforming its response into an array of `{name, size}` objects, rather than doing all the requests or computations in advance just to get a Content-Length. 102 | 103 | An object with a *name* but no *input* and no *size* (not even zero) will be interpreted as an empty folder and renamed accordingly. To properly specify empty files without an *input*, set the *size* explicitly to zero (`0` or `0n`). 104 | 105 | This iterable of metadata can be passed as the `metadata` property of `downloadZip`'s *options*, or, if you want to display the predicted size without actually creating the Zip file, to the `predictLength` function (not exposed in the worker script). Naturally, the metadata and actual data must match, and be **provided in the same order!** Otherwise, there could be inaccuracies in Zip64 lengths. 106 | 107 | In the case of `predictLength`, you can even save the return value and pass it later to `downloadZip` as the `length` option, instead of repeating the `metadata`. 108 | 109 | ## Filename encoding 110 | 111 | (tl;dr: set `buffersAreUTF8: true` in the *options* argument) 112 | 113 | In ZIP archives, the *language encoding flag* indicates that a filename is encoded in UTF-8. Some ZIP archive programs (e.g. build-in ZIP archive viewer in Windows) might not decode UTF-8 filenames correctly if this flag is off. 114 | 115 | `client-zip` always encodes **string** filenames (including filenames extracted from URLs) as UTF-8 and sets this flag for the related entries. However, `downloadZip`'s *options* include a `buffersAreUTF8` setting, affecting filenames that you supply as an **ArrayBuffer** (or ArrayView). 116 | 117 | By default (when `buffersAreUTF8` is not set or `undefined`), each ArrayBuffer filename will be tested, and flagged only if it is valid UTF-8. It is a safe default, but a little inefficient because UTF-8 is the only thing you can get in most contexts anyway. So you may tell client-zip to skip the test by setting `buffersAreUTF8: true` ; ArrayBuffers will *always* be flagged as UTF-8 without checking. 118 | 119 | If you happen to get your filenames from a dusty API reading from an antique filesystem with non-ASCII filenames encoded in some retro 8-bit encoding and you want to keep them that way in the ZIP archive, you may set `buffersAreUTF8: false` ; ArrayBuffer filenames will *never* be flagged as UTF-8. Please beware that the stored filenames will extract correctly only with a ZIP program using the same system encoding as the source. 120 | 121 | # Benchmarks 122 | 123 | *updated in may 2023* 124 | 125 | *updated again in may 2023 (experiment 3)* 126 | 127 | I started this project because I wasn't impressed with what — at the time — appeared to be the only other ZIP library for browsers, [JSZip](https://stuk.github.io/jszip/). I later found other libraries, which I've included in the new benchmarks, and JSZip has improved dramatically (version 3.6 was 40 times slower vs. currently only 40% slower). 128 | 129 | I requested Blob outputs from each lib, without compression. I measured the time until the blob was ready, on my M1 Pro. Sounds fair? 130 | 131 | **Experiment 1** consists of 4 files (total 539 MB) manually added to a file input from my local filesystem, so there is no latency and the ZIP format structural overhead is insignificant. 132 | 133 | **Experiment 2** is a set of 6214 small TGA files (total 119 MB). I tried to load them with a file input as before, but my browsers kept throwing errors while processing the large array of Files. So I had to switch to a different method, where the files are served over HTTP locally by nginx and *fetched* lazily. Unfortunately, that causes some atrocious latency across the board. 134 | 135 | **Experiment 3** is the same set of 6214 TGA files combined with very small PNG files for a total of 12 044 files (total 130 MB). This time, the files are *fetched* by a [DownloadStream](https://github.com/Touffy/dl-stream) to minimize latency. 136 | 137 | | | | `client-zip`@2.4.3 | fflate@0.7.4 | zip.js@2.7.14 | conflux@4.0.3 | JSZip@3.10.1 | 138 | |:------------------|--------|-------------------:|---------------:|----------------:|----------------:|----------------:| 139 | | **experiment 1** | Safari | 1.647 (σ=21) s | 1.792 (σ=15) s | 1.912 (σ=80) s | 1.820 (σ=16) s | 2.122 (σ=60) s | 140 | | baseline: 1.653 s | Chrome | 2.480 (σ=41) s | 1.601 (σ=4) s | 4.251 (σ=53) s | 4.268 (σ=44) s | 3.921 (σ=15) s | 141 | | **experiment 2** | Safari | 2.173 (σ=11) s | 2.157 (σ=23) s | 3.158 (σ=17) s | 1.794 (σ=13) s | 2.631 (σ=27) s | 142 | | baseline: 0.615 s | Chrome | 3.567 (σ=77) s | 3.506 (σ=9) s | 5.689 (σ=17) s | 3.174 (σ=22) s | 4.602 (σ=50) s | 143 | | **experiment 3** | Safari | 1.768 (σ=12) s | 1.691 (σ=19) s | 3.149 (σ=45) s | 1.511 (σ=38) s | 2.703 (σ=79) s | 144 | | baseline: 0.892 s | Chrome | 4.604 (σ=79) s | 3.972 (σ=85) s | 7.507 (σ=261) s | 3.812 (σ=80) s | 6.297 (σ=35) s | 145 | 146 | The experiments were run 10 times (not counting a first run to let the JavaScript engine "warm up" and ensure the browser caches everything) for each lib and each dataset, *with the dev tools closed* (this is important, opening the dev tools has a noticeable impact on CPU and severe impact on HTTP latency). The numbers in the table are the mean time of the ten runs, with the standard deviation in parentheses. 147 | 148 | For the baseline, I timed the `zip -0` process in my UNIX shell. As advertised, fflate run just as fast — in Chrome, anyway, and when there is no overhead for HTTP (experiment 1). In the same test, client-zip beats everyone else in Safari. 149 | 150 | Conflux does particularly well in the second and third experiments thanks to its internal use of ReadableStreams, which seem to run faster than async generators. 151 | 152 | Zip.js workers were disabled because I didn't want to bother fixing the error I got from the library. Using workers on this task could only help by sacrificing lots of memory, anyway. But I suppose Zip.js really needs those workers to offset its disgraceful single-threaded performance. 153 | 154 | It's interesting that Chrome performs so much worse than Safari with client-zip and conflux, the two libraries that rely on WHATWG Streams and (in my case) async iterables, whereas it shows better (and extremely consistent) runtimes with fflate, which uses synchronous code with callbacks, in experiment 1. Zip.js and JSZip used to be faster in Chrome than Safari, but clearly things have changed. Experiments 2 and 3 are really taxing for Chrome. 155 | 156 | In a different experiment using Deno to avoid storing very large output files, memory usage for any amount of data remained constant or close enough. My tests maxed out at 36.1 MB of RAM while processing nearly 6 GB. 157 | 158 | Now, comparing bundle size is clearly unfair because the others do a bunch of things that my library doesn't. Here you go anyway (sizes are shown in decimal kilobytes): 159 | 160 | | | `client-zip`@2.5.0 | fflate@0.7.4 | zip.js@2.7.14 | conflux@4.0.3 | JSZip@3.10.1 | 161 | |--------------------|-------------------:|-------------:|--------------:|--------------:|--------------:| 162 | | minified | 6.4 kB | 29.8 kB | 163.2 kB | 198.8 kB | 94.9 kB | 163 | | minified + gzipped | 2.6 kB | 11 kB | 58 kB | 56.6 kB | 27.6 kB | 164 | 165 | The datasets I used in the new tests are not public domain, but nothing sensitive either ; I can send them if you ask. 166 | 167 | # Known Issues 168 | 169 | * MS Office documents must be stored using ZIP version 2.0 ; [use client-zip^1 to generate those](https://github.com/Touffy/client-zip/issues/59), you don't need client-zip^2 features for Office documents anyway. 170 | * client-zip cannot be bundled by SSR frameworks that expect it to run server-side too ([workaround](https://github.com/Touffy/client-zip/issues/28#issuecomment-1018033984)). 171 | * Firefox may kill a Service Worker that is still feeding a download ([workaround](https://github.com/Touffy/client-zip/issues/46#issuecomment-1259223708)). 172 | * Safari could not download from a Service Worker until version 15.4 (released 4 march 2022). 173 | 174 | # Roadmap 175 | 176 | `client-zip` does not support compression, encryption, or any extra fields and attributes. It already meets the need that sparked its creation: combining many `fetch` responses into a one-click download for the end user. 177 | 178 | **New in version 2**: it now generates Zip64 archives, which increases the limit on file size to 4 Exabytes (because of JavaScript numbers) and total size to 18 Zettabytes. 179 | **New in version 2.2**: archive size can be predicted and used as the response's Content-Length. 180 | 181 | If you need a feature, you're very welcome to [open an issue](https://github.com/Touffy/client-zip/issues) or submit a pull request. 182 | 183 | ### extra fields 184 | 185 | Should be straightforward to implement if needed. Maybe `client-zip` should allow extending by third-party code so those extra fields can be plug-ins instead of built into the library. 186 | 187 | Configurable UNIX permissions in external attributes. The UNIX permissions are now configurable (since 1.7.0) via the `mode` field, set by default to 664 for files, 775 for folders. 188 | 189 | ### ZIP64 190 | 191 | Done. 192 | 193 | ### compression 194 | 195 | Limited use case. If the user is going to extract the archive just after downloading anyway, it's a waste of CPU. Implementation should be relatively easy with the new CompressionStream API. Incompatible with content-length prediction. 196 | 197 | ### encryption 198 | 199 | AES and RSA encryption could have been implemented with [WebCrypto](https://www.w3.org/TR/WebCryptoAPI/). However, only the proprietary PKWARE utility supports strong encryption of file contents **and metadata**. Well-supported Zip encryption methods (even using AES) do not hide metadata, giving you questionable privacy. Therefore, **this feature is no longer planned for client-zip**. 200 | 201 | ### performance improvements 202 | 203 | The current implementation does a fair bit of ArrayBuffer copying and allocation, much of which can be avoided with brand new (and sadly not widely supported yet) browser APIs like [`TextEncoder.encodeInto`](https://encoding.spec.whatwg.org/#dom-textencoder-encodeinto), [`TextEncoderStream`](https://encoding.spec.whatwg.org/#interface-textencoderstream), [BYOB Streams](https://streams.spec.whatwg.org/#byob-readers) and [`TransformStreams`](https://streams.spec.whatwg.org/#ts-model). 204 | 205 | CRC-32 computation is, and will certainly remain, by far the largest performance bottleneck in client-zip. Currently, it is implemented with a version of Sarwate's standard algorithm in JavaScript. My initial experiments have shown that a version of the slice-by-8 algorithm using SIMD instructions in WebAssembly can run a bit faster, but the previous (simpler) WASM implementation is now slower than pure JavaScript. 206 | 207 | # Notes 208 | 209 | ## A note about dates 210 | 211 | The old DOS date/time format used by ZIP files is an unspecified "local time". Therefore, to ensure the best results for the end user, `client-zip` will use the client's own timezone (not UTC or something decided by the author), resulting in a ZIP archive that varies across different clients. If you write integration tests that expect an exact binary content, make sure you set the machine running the tests to the same timezone as the one that generated the expected content. 212 | 213 | ## How can I include folders in the archive ? 214 | 215 | When the folder has contents, just include the folder hierarchy in its content's filenames (e.g. `{ name: "folder/file.ext", input }` will implicitly create "folder/" and place "file.ext" in it). Empty folders can be specified as `{ name: "folder/" }` (with **no size**, **no input**, and an optional lastModified property). Forward slashes even for Windows users ! 216 | 217 | Any input object that has no size and no input will be treated as a folder, and a trailing slash will be added to its filename when necessary. Conversely, any input object that has a size or input (even an empty string) will be treated as a file, and the trailing slash will be removed if present. 218 | 219 | Usage of `predictLength` or the `metadata` option must be consistent with the actual input. For example, if `{ name: "file" }` is passed as metadata, client-zip will think it's an empty folder named "file/". If you then pass `{ input: "", name: "file" }` in the same order to `downloadZip`, it will store the contents as an empty file with no trailing slash ; therefore, the predicted length will be off by at least one. 220 | -------------------------------------------------------------------------------- /demo/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Bundler.js test 6 | 11 | 12 | 13 | 14 | 15 |

(including two strings a file fetched from GitHub)

16 | 17 | 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /demo/worker.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | ServiceWorker streaming demo 6 | 22 | 23 | 24 | 25 |

ServiceWorker streaming demo

26 |

This shows how you can leverage a ServiceWorker to stream without ever having to store 27 | a full Blob of the ZIP in the browser. 28 |

29 |

I'm doing it here by POSTing a simple form with an arbitrary number of URL fields 30 | (therefore no JavaScript is needed to send the list — no onsubmit — but only to register the ServiceWorker 31 | and clone the URL input). You could also use hidden fields to specify some extra URLs or headers, 32 | even specify all the URLs that way to make a non-interactive form with only a submit button visible. 33 |

34 |

The ServiceWorker (just 14 lines) intercepts the request, lazily fetches the URLs found in the form data 35 | (all the fields named "url") with a DownloadStream, 36 | feeds that to downloadZip and responds with downloadZip's return value (which is a Response). 37 | That's it. 38 | The process is only a bit more complicated that using a Blob (look at the code). 39 |

40 | 41 |
42 |

URLs of documents to include in the ZIP (for cross-origin URLs, the server must send CORS headers):

43 |
    44 |
  1. 45 |
46 | 47 | 48 | 49 |
50 | 51 | 57 | 58 | 76 | 77 | 78 | -------------------------------------------------------------------------------- /demo/worker.js: -------------------------------------------------------------------------------- 1 | importScripts('https://unpkg.com/client-zip/worker.js', 'https://unpkg.com/dl-stream/worker.js') 2 | 3 | self.addEventListener("fetch", (event) => { 4 | const url = new URL(event.request.url) 5 | // This will intercept all request with a URL starting in /downloadZip/ ; 6 | // you should use a meaningful URL for each download, for example /downloadZip/invoices.zip 7 | const [,name] = url.pathname.match(/\/downloadZip\/(.+)/i) || [,] 8 | if (url.origin === self.origin && name) 9 | event.respondWith(event.request.formData() 10 | .then(data => downloadZip(new DownloadStream(data.getAll('url')))) 11 | .catch(err => new Response(err.message, { status: 500 })) 12 | ) 13 | }) 14 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | type BufferLike = ArrayBuffer | string | ArrayBufferView | Blob 2 | type StreamLike = ReadableStream | AsyncIterable 3 | 4 | /** The file name, modification date and size will be read from the input; 5 | * extra arguments can be given to override the input’s metadata. */ 6 | type InputWithMeta = File | Response | { input: File | Response, name?: any, lastModified?: any, size?: number | bigint, mode?: number } 7 | 8 | /** Intrinsic size, but the file name must be provided and modification date can’t be guessed. */ 9 | type InputWithSizeMeta = { input: BufferLike, name: any, lastModified?: any, size?: number | bigint, mode?: number } 10 | 11 | /** The file name must be provided ; modification date and content length can’t be guessed. */ 12 | type InputWithoutMeta = { input: StreamLike, name: any, lastModified?: any, size?: number | bigint, mode?: number } 13 | 14 | /** The folder name must be provided ; modification date can’t be guessed. */ 15 | type InputFolder = { name: any, lastModified?: any, input?: never, size?: never, mode?: number } 16 | 17 | /** Both filename and size must be provided ; input is not helpful here. */ 18 | type JustMeta = { input?: StreamLike | undefined, name: any, lastModified?: any, size: number | bigint, mode?: number } 19 | 20 | type ForAwaitable = AsyncIterable | Iterable 21 | 22 | type Options = { 23 | /** If provided, the returned Response will have its `Content-Length` header set to this value. 24 | * It can be computed accurately with the `predictLength` function. */ 25 | length?: number | bigint 26 | /** If provided, the returned Response will have its `Content-Length` header set to the result of 27 | * calling `predictLength` on that metadata. Overrides the `length` option. */ 28 | metadata?: Iterable 29 | /** The ZIP *language encoding flag* will always be set when a filename was given as a string, 30 | * but when it is given as an ArrayView or ArrayBuffer, it depends on this option : 31 | * - `true`: always on (ArrayBuffers will *always* be flagged as UTF-8) — recommended, 32 | * - `false`: always off (ArrayBuffers will *never* be flagged as UTF-8), 33 | * - `undefined`: each ArrayBuffer will be tested and flagged if it is valid UTF-8. */ 34 | buffersAreUTF8?: boolean 35 | } 36 | 37 | /** Given an iterable of file metadata (or equivalent), 38 | * @returns the exact byte length of the Zip file that would be generated by `downloadZip`. */ 39 | export declare function predictLength(files: Iterable): bigint 40 | 41 | export declare function downloadZip(files: ForAwaitable, options?: Options): Response 42 | 43 | export declare function makeZip(files: ForAwaitable, options?: Options): ReadableStream 44 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "client-zip", 3 | "version": "2.5.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "client-zip", 9 | "version": "2.5.0", 10 | "license": "MIT", 11 | "devDependencies": { 12 | "esbuild": "^0.25.1", 13 | "terser": "~5.39" 14 | } 15 | }, 16 | "node_modules/@esbuild/aix-ppc64": { 17 | "version": "0.25.1", 18 | "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.1.tgz", 19 | "integrity": "sha512-kfYGy8IdzTGy+z0vFGvExZtxkFlA4zAxgKEahG9KE1ScBjpQnFsNOX8KTU5ojNru5ed5CVoJYXFtoxaq5nFbjQ==", 20 | "cpu": [ 21 | "ppc64" 22 | ], 23 | "dev": true, 24 | "license": "MIT", 25 | "optional": true, 26 | "os": [ 27 | "aix" 28 | ], 29 | "engines": { 30 | "node": ">=18" 31 | } 32 | }, 33 | "node_modules/@esbuild/android-arm": { 34 | "version": "0.25.1", 35 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.1.tgz", 36 | "integrity": "sha512-dp+MshLYux6j/JjdqVLnMglQlFu+MuVeNrmT5nk6q07wNhCdSnB7QZj+7G8VMUGh1q+vj2Bq8kRsuyA00I/k+Q==", 37 | "cpu": [ 38 | "arm" 39 | ], 40 | "dev": true, 41 | "license": "MIT", 42 | "optional": true, 43 | "os": [ 44 | "android" 45 | ], 46 | "engines": { 47 | "node": ">=18" 48 | } 49 | }, 50 | "node_modules/@esbuild/android-arm64": { 51 | "version": "0.25.1", 52 | "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.1.tgz", 53 | "integrity": "sha512-50tM0zCJW5kGqgG7fQ7IHvQOcAn9TKiVRuQ/lN0xR+T2lzEFvAi1ZcS8DiksFcEpf1t/GYOeOfCAgDHFpkiSmA==", 54 | "cpu": [ 55 | "arm64" 56 | ], 57 | "dev": true, 58 | "license": "MIT", 59 | "optional": true, 60 | "os": [ 61 | "android" 62 | ], 63 | "engines": { 64 | "node": ">=18" 65 | } 66 | }, 67 | "node_modules/@esbuild/android-x64": { 68 | "version": "0.25.1", 69 | "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.1.tgz", 70 | "integrity": "sha512-GCj6WfUtNldqUzYkN/ITtlhwQqGWu9S45vUXs7EIYf+7rCiiqH9bCloatO9VhxsL0Pji+PF4Lz2XXCES+Q8hDw==", 71 | "cpu": [ 72 | "x64" 73 | ], 74 | "dev": true, 75 | "license": "MIT", 76 | "optional": true, 77 | "os": [ 78 | "android" 79 | ], 80 | "engines": { 81 | "node": ">=18" 82 | } 83 | }, 84 | "node_modules/@esbuild/darwin-arm64": { 85 | "version": "0.25.1", 86 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.1.tgz", 87 | "integrity": "sha512-5hEZKPf+nQjYoSr/elb62U19/l1mZDdqidGfmFutVUjjUZrOazAtwK+Kr+3y0C/oeJfLlxo9fXb1w7L+P7E4FQ==", 88 | "cpu": [ 89 | "arm64" 90 | ], 91 | "dev": true, 92 | "license": "MIT", 93 | "optional": true, 94 | "os": [ 95 | "darwin" 96 | ], 97 | "engines": { 98 | "node": ">=18" 99 | } 100 | }, 101 | "node_modules/@esbuild/darwin-x64": { 102 | "version": "0.25.1", 103 | "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.1.tgz", 104 | "integrity": "sha512-hxVnwL2Dqs3fM1IWq8Iezh0cX7ZGdVhbTfnOy5uURtao5OIVCEyj9xIzemDi7sRvKsuSdtCAhMKarxqtlyVyfA==", 105 | "cpu": [ 106 | "x64" 107 | ], 108 | "dev": true, 109 | "license": "MIT", 110 | "optional": true, 111 | "os": [ 112 | "darwin" 113 | ], 114 | "engines": { 115 | "node": ">=18" 116 | } 117 | }, 118 | "node_modules/@esbuild/freebsd-arm64": { 119 | "version": "0.25.1", 120 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.1.tgz", 121 | "integrity": "sha512-1MrCZs0fZa2g8E+FUo2ipw6jw5qqQiH+tERoS5fAfKnRx6NXH31tXBKI3VpmLijLH6yriMZsxJtaXUyFt/8Y4A==", 122 | "cpu": [ 123 | "arm64" 124 | ], 125 | "dev": true, 126 | "license": "MIT", 127 | "optional": true, 128 | "os": [ 129 | "freebsd" 130 | ], 131 | "engines": { 132 | "node": ">=18" 133 | } 134 | }, 135 | "node_modules/@esbuild/freebsd-x64": { 136 | "version": "0.25.1", 137 | "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.1.tgz", 138 | "integrity": "sha512-0IZWLiTyz7nm0xuIs0q1Y3QWJC52R8aSXxe40VUxm6BB1RNmkODtW6LHvWRrGiICulcX7ZvyH6h5fqdLu4gkww==", 139 | "cpu": [ 140 | "x64" 141 | ], 142 | "dev": true, 143 | "license": "MIT", 144 | "optional": true, 145 | "os": [ 146 | "freebsd" 147 | ], 148 | "engines": { 149 | "node": ">=18" 150 | } 151 | }, 152 | "node_modules/@esbuild/linux-arm": { 153 | "version": "0.25.1", 154 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.1.tgz", 155 | "integrity": "sha512-NdKOhS4u7JhDKw9G3cY6sWqFcnLITn6SqivVArbzIaf3cemShqfLGHYMx8Xlm/lBit3/5d7kXvriTUGa5YViuQ==", 156 | "cpu": [ 157 | "arm" 158 | ], 159 | "dev": true, 160 | "license": "MIT", 161 | "optional": true, 162 | "os": [ 163 | "linux" 164 | ], 165 | "engines": { 166 | "node": ">=18" 167 | } 168 | }, 169 | "node_modules/@esbuild/linux-arm64": { 170 | "version": "0.25.1", 171 | "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.1.tgz", 172 | "integrity": "sha512-jaN3dHi0/DDPelk0nLcXRm1q7DNJpjXy7yWaWvbfkPvI+7XNSc/lDOnCLN7gzsyzgu6qSAmgSvP9oXAhP973uQ==", 173 | "cpu": [ 174 | "arm64" 175 | ], 176 | "dev": true, 177 | "license": "MIT", 178 | "optional": true, 179 | "os": [ 180 | "linux" 181 | ], 182 | "engines": { 183 | "node": ">=18" 184 | } 185 | }, 186 | "node_modules/@esbuild/linux-ia32": { 187 | "version": "0.25.1", 188 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.1.tgz", 189 | "integrity": "sha512-OJykPaF4v8JidKNGz8c/q1lBO44sQNUQtq1KktJXdBLn1hPod5rE/Hko5ugKKZd+D2+o1a9MFGUEIUwO2YfgkQ==", 190 | "cpu": [ 191 | "ia32" 192 | ], 193 | "dev": true, 194 | "license": "MIT", 195 | "optional": true, 196 | "os": [ 197 | "linux" 198 | ], 199 | "engines": { 200 | "node": ">=18" 201 | } 202 | }, 203 | "node_modules/@esbuild/linux-loong64": { 204 | "version": "0.25.1", 205 | "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.1.tgz", 206 | "integrity": "sha512-nGfornQj4dzcq5Vp835oM/o21UMlXzn79KobKlcs3Wz9smwiifknLy4xDCLUU0BWp7b/houtdrgUz7nOGnfIYg==", 207 | "cpu": [ 208 | "loong64" 209 | ], 210 | "dev": true, 211 | "license": "MIT", 212 | "optional": true, 213 | "os": [ 214 | "linux" 215 | ], 216 | "engines": { 217 | "node": ">=18" 218 | } 219 | }, 220 | "node_modules/@esbuild/linux-mips64el": { 221 | "version": "0.25.1", 222 | "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.1.tgz", 223 | "integrity": "sha512-1osBbPEFYwIE5IVB/0g2X6i1qInZa1aIoj1TdL4AaAb55xIIgbg8Doq6a5BzYWgr+tEcDzYH67XVnTmUzL+nXg==", 224 | "cpu": [ 225 | "mips64el" 226 | ], 227 | "dev": true, 228 | "license": "MIT", 229 | "optional": true, 230 | "os": [ 231 | "linux" 232 | ], 233 | "engines": { 234 | "node": ">=18" 235 | } 236 | }, 237 | "node_modules/@esbuild/linux-ppc64": { 238 | "version": "0.25.1", 239 | "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.1.tgz", 240 | "integrity": "sha512-/6VBJOwUf3TdTvJZ82qF3tbLuWsscd7/1w+D9LH0W/SqUgM5/JJD0lrJ1fVIfZsqB6RFmLCe0Xz3fmZc3WtyVg==", 241 | "cpu": [ 242 | "ppc64" 243 | ], 244 | "dev": true, 245 | "license": "MIT", 246 | "optional": true, 247 | "os": [ 248 | "linux" 249 | ], 250 | "engines": { 251 | "node": ">=18" 252 | } 253 | }, 254 | "node_modules/@esbuild/linux-riscv64": { 255 | "version": "0.25.1", 256 | "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.1.tgz", 257 | "integrity": "sha512-nSut/Mx5gnilhcq2yIMLMe3Wl4FK5wx/o0QuuCLMtmJn+WeWYoEGDN1ipcN72g1WHsnIbxGXd4i/MF0gTcuAjQ==", 258 | "cpu": [ 259 | "riscv64" 260 | ], 261 | "dev": true, 262 | "license": "MIT", 263 | "optional": true, 264 | "os": [ 265 | "linux" 266 | ], 267 | "engines": { 268 | "node": ">=18" 269 | } 270 | }, 271 | "node_modules/@esbuild/linux-s390x": { 272 | "version": "0.25.1", 273 | "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.1.tgz", 274 | "integrity": "sha512-cEECeLlJNfT8kZHqLarDBQso9a27o2Zd2AQ8USAEoGtejOrCYHNtKP8XQhMDJMtthdF4GBmjR2au3x1udADQQQ==", 275 | "cpu": [ 276 | "s390x" 277 | ], 278 | "dev": true, 279 | "license": "MIT", 280 | "optional": true, 281 | "os": [ 282 | "linux" 283 | ], 284 | "engines": { 285 | "node": ">=18" 286 | } 287 | }, 288 | "node_modules/@esbuild/linux-x64": { 289 | "version": "0.25.1", 290 | "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.1.tgz", 291 | "integrity": "sha512-xbfUhu/gnvSEg+EGovRc+kjBAkrvtk38RlerAzQxvMzlB4fXpCFCeUAYzJvrnhFtdeyVCDANSjJvOvGYoeKzFA==", 292 | "cpu": [ 293 | "x64" 294 | ], 295 | "dev": true, 296 | "license": "MIT", 297 | "optional": true, 298 | "os": [ 299 | "linux" 300 | ], 301 | "engines": { 302 | "node": ">=18" 303 | } 304 | }, 305 | "node_modules/@esbuild/netbsd-arm64": { 306 | "version": "0.25.1", 307 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.1.tgz", 308 | "integrity": "sha512-O96poM2XGhLtpTh+s4+nP7YCCAfb4tJNRVZHfIE7dgmax+yMP2WgMd2OecBuaATHKTHsLWHQeuaxMRnCsH8+5g==", 309 | "cpu": [ 310 | "arm64" 311 | ], 312 | "dev": true, 313 | "license": "MIT", 314 | "optional": true, 315 | "os": [ 316 | "netbsd" 317 | ], 318 | "engines": { 319 | "node": ">=18" 320 | } 321 | }, 322 | "node_modules/@esbuild/netbsd-x64": { 323 | "version": "0.25.1", 324 | "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.1.tgz", 325 | "integrity": "sha512-X53z6uXip6KFXBQ+Krbx25XHV/NCbzryM6ehOAeAil7X7oa4XIq+394PWGnwaSQ2WRA0KI6PUO6hTO5zeF5ijA==", 326 | "cpu": [ 327 | "x64" 328 | ], 329 | "dev": true, 330 | "license": "MIT", 331 | "optional": true, 332 | "os": [ 333 | "netbsd" 334 | ], 335 | "engines": { 336 | "node": ">=18" 337 | } 338 | }, 339 | "node_modules/@esbuild/openbsd-arm64": { 340 | "version": "0.25.1", 341 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.1.tgz", 342 | "integrity": "sha512-Na9T3szbXezdzM/Kfs3GcRQNjHzM6GzFBeU1/6IV/npKP5ORtp9zbQjvkDJ47s6BCgaAZnnnu/cY1x342+MvZg==", 343 | "cpu": [ 344 | "arm64" 345 | ], 346 | "dev": true, 347 | "license": "MIT", 348 | "optional": true, 349 | "os": [ 350 | "openbsd" 351 | ], 352 | "engines": { 353 | "node": ">=18" 354 | } 355 | }, 356 | "node_modules/@esbuild/openbsd-x64": { 357 | "version": "0.25.1", 358 | "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.1.tgz", 359 | "integrity": "sha512-T3H78X2h1tszfRSf+txbt5aOp/e7TAz3ptVKu9Oyir3IAOFPGV6O9c2naym5TOriy1l0nNf6a4X5UXRZSGX/dw==", 360 | "cpu": [ 361 | "x64" 362 | ], 363 | "dev": true, 364 | "license": "MIT", 365 | "optional": true, 366 | "os": [ 367 | "openbsd" 368 | ], 369 | "engines": { 370 | "node": ">=18" 371 | } 372 | }, 373 | "node_modules/@esbuild/sunos-x64": { 374 | "version": "0.25.1", 375 | "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.1.tgz", 376 | "integrity": "sha512-2H3RUvcmULO7dIE5EWJH8eubZAI4xw54H1ilJnRNZdeo8dTADEZ21w6J22XBkXqGJbe0+wnNJtw3UXRoLJnFEg==", 377 | "cpu": [ 378 | "x64" 379 | ], 380 | "dev": true, 381 | "license": "MIT", 382 | "optional": true, 383 | "os": [ 384 | "sunos" 385 | ], 386 | "engines": { 387 | "node": ">=18" 388 | } 389 | }, 390 | "node_modules/@esbuild/win32-arm64": { 391 | "version": "0.25.1", 392 | "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.1.tgz", 393 | "integrity": "sha512-GE7XvrdOzrb+yVKB9KsRMq+7a2U/K5Cf/8grVFRAGJmfADr/e/ODQ134RK2/eeHqYV5eQRFxb1hY7Nr15fv1NQ==", 394 | "cpu": [ 395 | "arm64" 396 | ], 397 | "dev": true, 398 | "license": "MIT", 399 | "optional": true, 400 | "os": [ 401 | "win32" 402 | ], 403 | "engines": { 404 | "node": ">=18" 405 | } 406 | }, 407 | "node_modules/@esbuild/win32-ia32": { 408 | "version": "0.25.1", 409 | "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.1.tgz", 410 | "integrity": "sha512-uOxSJCIcavSiT6UnBhBzE8wy3n0hOkJsBOzy7HDAuTDE++1DJMRRVCPGisULScHL+a/ZwdXPpXD3IyFKjA7K8A==", 411 | "cpu": [ 412 | "ia32" 413 | ], 414 | "dev": true, 415 | "license": "MIT", 416 | "optional": true, 417 | "os": [ 418 | "win32" 419 | ], 420 | "engines": { 421 | "node": ">=18" 422 | } 423 | }, 424 | "node_modules/@esbuild/win32-x64": { 425 | "version": "0.25.1", 426 | "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.1.tgz", 427 | "integrity": "sha512-Y1EQdcfwMSeQN/ujR5VayLOJ1BHaK+ssyk0AEzPjC+t1lITgsnccPqFjb6V+LsTp/9Iov4ysfjxLaGJ9RPtkVg==", 428 | "cpu": [ 429 | "x64" 430 | ], 431 | "dev": true, 432 | "license": "MIT", 433 | "optional": true, 434 | "os": [ 435 | "win32" 436 | ], 437 | "engines": { 438 | "node": ">=18" 439 | } 440 | }, 441 | "node_modules/@jridgewell/gen-mapping": { 442 | "version": "0.3.8", 443 | "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz", 444 | "integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==", 445 | "dev": true, 446 | "license": "MIT", 447 | "dependencies": { 448 | "@jridgewell/set-array": "^1.2.1", 449 | "@jridgewell/sourcemap-codec": "^1.4.10", 450 | "@jridgewell/trace-mapping": "^0.3.24" 451 | }, 452 | "engines": { 453 | "node": ">=6.0.0" 454 | } 455 | }, 456 | "node_modules/@jridgewell/resolve-uri": { 457 | "version": "3.1.2", 458 | "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", 459 | "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", 460 | "dev": true, 461 | "license": "MIT", 462 | "engines": { 463 | "node": ">=6.0.0" 464 | } 465 | }, 466 | "node_modules/@jridgewell/set-array": { 467 | "version": "1.2.1", 468 | "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", 469 | "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", 470 | "dev": true, 471 | "license": "MIT", 472 | "engines": { 473 | "node": ">=6.0.0" 474 | } 475 | }, 476 | "node_modules/@jridgewell/source-map": { 477 | "version": "0.3.6", 478 | "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.6.tgz", 479 | "integrity": "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ==", 480 | "dev": true, 481 | "license": "MIT", 482 | "dependencies": { 483 | "@jridgewell/gen-mapping": "^0.3.5", 484 | "@jridgewell/trace-mapping": "^0.3.25" 485 | } 486 | }, 487 | "node_modules/@jridgewell/sourcemap-codec": { 488 | "version": "1.5.0", 489 | "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", 490 | "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", 491 | "dev": true, 492 | "license": "MIT" 493 | }, 494 | "node_modules/@jridgewell/trace-mapping": { 495 | "version": "0.3.25", 496 | "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", 497 | "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", 498 | "dev": true, 499 | "license": "MIT", 500 | "dependencies": { 501 | "@jridgewell/resolve-uri": "^3.1.0", 502 | "@jridgewell/sourcemap-codec": "^1.4.14" 503 | } 504 | }, 505 | "node_modules/acorn": { 506 | "version": "8.14.1", 507 | "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", 508 | "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", 509 | "dev": true, 510 | "license": "MIT", 511 | "bin": { 512 | "acorn": "bin/acorn" 513 | }, 514 | "engines": { 515 | "node": ">=0.4.0" 516 | } 517 | }, 518 | "node_modules/buffer-from": { 519 | "version": "1.1.2", 520 | "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", 521 | "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", 522 | "dev": true, 523 | "license": "MIT" 524 | }, 525 | "node_modules/commander": { 526 | "version": "2.20.3", 527 | "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", 528 | "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", 529 | "dev": true, 530 | "license": "MIT" 531 | }, 532 | "node_modules/esbuild": { 533 | "version": "0.25.1", 534 | "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz", 535 | "integrity": "sha512-BGO5LtrGC7vxnqucAe/rmvKdJllfGaYWdyABvyMoXQlfYMb2bbRuReWR5tEGE//4LcNJj9XrkovTqNYRFZHAMQ==", 536 | "dev": true, 537 | "hasInstallScript": true, 538 | "license": "MIT", 539 | "bin": { 540 | "esbuild": "bin/esbuild" 541 | }, 542 | "engines": { 543 | "node": ">=18" 544 | }, 545 | "optionalDependencies": { 546 | "@esbuild/aix-ppc64": "0.25.1", 547 | "@esbuild/android-arm": "0.25.1", 548 | "@esbuild/android-arm64": "0.25.1", 549 | "@esbuild/android-x64": "0.25.1", 550 | "@esbuild/darwin-arm64": "0.25.1", 551 | "@esbuild/darwin-x64": "0.25.1", 552 | "@esbuild/freebsd-arm64": "0.25.1", 553 | "@esbuild/freebsd-x64": "0.25.1", 554 | "@esbuild/linux-arm": "0.25.1", 555 | "@esbuild/linux-arm64": "0.25.1", 556 | "@esbuild/linux-ia32": "0.25.1", 557 | "@esbuild/linux-loong64": "0.25.1", 558 | "@esbuild/linux-mips64el": "0.25.1", 559 | "@esbuild/linux-ppc64": "0.25.1", 560 | "@esbuild/linux-riscv64": "0.25.1", 561 | "@esbuild/linux-s390x": "0.25.1", 562 | "@esbuild/linux-x64": "0.25.1", 563 | "@esbuild/netbsd-arm64": "0.25.1", 564 | "@esbuild/netbsd-x64": "0.25.1", 565 | "@esbuild/openbsd-arm64": "0.25.1", 566 | "@esbuild/openbsd-x64": "0.25.1", 567 | "@esbuild/sunos-x64": "0.25.1", 568 | "@esbuild/win32-arm64": "0.25.1", 569 | "@esbuild/win32-ia32": "0.25.1", 570 | "@esbuild/win32-x64": "0.25.1" 571 | } 572 | }, 573 | "node_modules/source-map": { 574 | "version": "0.6.1", 575 | "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", 576 | "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", 577 | "dev": true, 578 | "license": "BSD-3-Clause", 579 | "engines": { 580 | "node": ">=0.10.0" 581 | } 582 | }, 583 | "node_modules/source-map-support": { 584 | "version": "0.5.21", 585 | "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", 586 | "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", 587 | "dev": true, 588 | "license": "MIT", 589 | "dependencies": { 590 | "buffer-from": "^1.0.0", 591 | "source-map": "^0.6.0" 592 | } 593 | }, 594 | "node_modules/terser": { 595 | "version": "5.39.0", 596 | "resolved": "https://registry.npmjs.org/terser/-/terser-5.39.0.tgz", 597 | "integrity": "sha512-LBAhFyLho16harJoWMg/nZsQYgTrg5jXOn2nCYjRUcZZEdE3qa2zb8QEDRUGVZBW4rlazf2fxkg8tztybTaqWw==", 598 | "dev": true, 599 | "license": "BSD-2-Clause", 600 | "dependencies": { 601 | "@jridgewell/source-map": "^0.3.3", 602 | "acorn": "^8.8.2", 603 | "commander": "^2.20.0", 604 | "source-map-support": "~0.5.20" 605 | }, 606 | "bin": { 607 | "terser": "bin/terser" 608 | }, 609 | "engines": { 610 | "node": ">=10" 611 | } 612 | } 613 | } 614 | } 615 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "client-zip", 3 | "version": "2.5.0", 4 | "description": "A tiny and fast client-side streaming ZIP generator", 5 | "browser": "index.js", 6 | "module": "index.js", 7 | "types": "index.d.ts", 8 | "type": "module", 9 | "exports": { 10 | ".": "./index.js" 11 | }, 12 | "scripts": { 13 | "start": "npm run build:module && npm run build:worker", 14 | "build:module": "esbuild --target=es2020 --bundle src/index.ts --format=esm | terser --config-file terser.json --module -o index.js", 15 | "build:worker": "esbuild --target=es2020 --bundle src/worker.ts --format=iife --global-name=downloadZip | terser --config-file terser.json -o worker.js", 16 | "test": "deno test --allow-read test/" 17 | }, 18 | "repository": { 19 | "type": "git", 20 | "url": "git@github.com:Touffy/client-zip.git" 21 | }, 22 | "keywords": [ 23 | "zip", 24 | "stream", 25 | "browser", 26 | "zip64" 27 | ], 28 | "author": "David Junger ", 29 | "license": "MIT", 30 | "devDependencies": { 31 | "esbuild": "^0.25.1", 32 | "terser": "~5.39" 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/crc32.ts: -------------------------------------------------------------------------------- 1 | export const CRC_TABLE = new Uint32Array(256); 2 | 3 | for (let i = 0; i < 256; ++i) { 4 | let crc = i; 5 | for (let j = 0; j < 8; ++j) { 6 | crc = (crc >>> 1) ^ (crc & 0x01 && 0xEDB88320); 7 | } 8 | CRC_TABLE[i] = crc; 9 | } 10 | 11 | export function crc32(data: Uint8Array, crc = 0): number { 12 | crc = crc ^ -1; 13 | for (var i=0, l=data.length; i>> 8 ^ CRC_TABLE[crc & 0xFF ^ data[i]]; 15 | } 16 | return (crc ^ -1) >>> 0; 17 | }; 18 | -------------------------------------------------------------------------------- /src/datetime.ts: -------------------------------------------------------------------------------- 1 | export function formatDOSDateTime(date: Date, into: DataView, offset = 0) { 2 | const dosTime = date.getSeconds() >> 1 3 | | date.getMinutes() << 5 4 | | date.getHours() << 11 5 | 6 | const dosDate = date.getDate() 7 | | (date.getMonth() + 1) << 5 8 | | (date.getFullYear() - 1980) << 9 9 | 10 | into.setUint16(offset, dosTime, true) 11 | into.setUint16(offset + 2, dosDate, true) 12 | } 13 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import "./polyfills.ts" 2 | import { BufferLike, StreamLike, normalizeInput, ReadableFromIterator } from "./input.ts" 3 | import { normalizeMetadata } from "./metadata.ts" 4 | import { loadFiles, contentLength, ForAwaitable } from "./zip.ts" 5 | 6 | /** The file name, modification date and size will be read from the input; 7 | * extra arguments can be given to override the input’s metadata. */ 8 | type InputWithMeta = File | Response | { input: File | Response, name?: any, lastModified?: any, size?: number | bigint, mode?: number } 9 | 10 | /** Intrinsic size, but the file name must be provided and modification date can’t be guessed. */ 11 | type InputWithSizeMeta = { input: BufferLike, name: any, lastModified?: any, size?: number | bigint, mode?: number } 12 | 13 | /** The file name must be provided ; modification date and content length can’t be guessed. */ 14 | type InputWithoutMeta = { input: StreamLike, name: any, lastModified?: any, size?: number | bigint, mode?: number } 15 | 16 | /** The folder name must be provided ; modification date can’t be guessed. */ 17 | type InputFolder = { name: any, lastModified?: any, input?: never, size?: never, mode?: number } 18 | 19 | /** Both filename and size must be provided ; input is not helpful here. */ 20 | type JustMeta = { input?: StreamLike | undefined, name: any, lastModified?: any, size: number | bigint, mode?: number } 21 | 22 | export type Options = { 23 | /** If provided, the returned Response will have its `Content-Length` header set to this value. 24 | * It can be computed accurately with the `predictLength` function. */ 25 | length?: number | bigint 26 | /** If provided, the returned Response will have its `Content-Length` header set to the result of 27 | * calling `predictLength` on that metadata. Overrides the `length` option. */ 28 | metadata?: Iterable 29 | /** The ZIP *language encoding flag* will always be set when a filename was given as a string, 30 | * but when it is given as an ArrayView or ArrayBuffer, it depends on this option : 31 | * - `true`: always on (ArrayBuffers will *always* be flagged as UTF-8) — recommended, 32 | * - `false`: always off (ArrayBuffers will *never* be flagged as UTF-8), 33 | * - `undefined`: each ArrayBuffer will be tested and flagged if it is valid UTF-8. */ 34 | buffersAreUTF8?: boolean 35 | } 36 | 37 | function normalizeArgs(file: InputWithMeta | InputWithSizeMeta | InputWithoutMeta | InputFolder | JustMeta) { 38 | return file instanceof File || file instanceof Response 39 | ? [[file], [file]] as const 40 | : [[file.input, file.name, file.size], [file.input, file.lastModified, file.mode]] as const 41 | } 42 | 43 | function* mapMeta(files: Iterable) { 44 | // @ts-ignore type inference isn't good enough for this… yet… 45 | // but rewriting the code to be more explicit would make it longer 46 | for (const file of files) yield normalizeMetadata(...normalizeArgs(file)[0]) 47 | } 48 | 49 | function mapFiles(files: ForAwaitable) { 50 | // @ts-ignore TypeScript really needs to catch up 51 | const iterator = files[Symbol.iterator in files ? Symbol.iterator : Symbol.asyncIterator]() 52 | return { 53 | async next() { 54 | const res = await iterator.next() 55 | if (res.done) return res 56 | const [metaArgs, dataArgs] = normalizeArgs(res.value) 57 | // @ts-ignore type inference isn't good enough for this… yet… 58 | // but rewriting the code to be more explicit would make it longer 59 | return { done: false, value: Object.assign(normalizeInput(...dataArgs), normalizeMetadata(...metaArgs)) } 60 | }, 61 | throw: iterator.throw?.bind(iterator), 62 | [Symbol.asyncIterator]() { return this } 63 | } 64 | } 65 | 66 | /** Given an iterable of file metadata (or equivalent), 67 | * @returns the exact byte length of the Zip file that would be generated by `downloadZip`. */ 68 | export const predictLength = (files: Iterable) => contentLength(mapMeta(files)) 69 | 70 | export function downloadZip(files: ForAwaitable, options: Options = {}) { 71 | const headers: Record = { "Content-Type": "application/zip", "Content-Disposition": "attachment" } 72 | if ((typeof options.length === "bigint" || Number.isInteger(options.length)) && options.length! > 0) headers["Content-Length"] = String(options.length) 73 | if (options.metadata) headers["Content-Length"] = String(predictLength(options.metadata)) 74 | return new Response(makeZip(files, options), { headers }) 75 | } 76 | 77 | export function makeZip(files: ForAwaitable, options: Options = {}) { 78 | const mapped = mapFiles(files) 79 | return ReadableFromIterator(loadFiles(mapped, options), mapped); 80 | } 81 | -------------------------------------------------------------------------------- /src/input.ts: -------------------------------------------------------------------------------- 1 | import { encodeString, makeUint8Array } from "./utils.ts" 2 | 3 | export type BufferLike = ArrayBuffer | string | ArrayBufferView | Blob 4 | export type StreamLike = ReadableStream | AsyncIterable 5 | export type ZipFileDescription = { 6 | modDate: Date 7 | bytes: ReadableStream | Uint8Array | Promise 8 | crc?: number // will be computed later 9 | mode: number // UNIX permissions, 0o664 by default 10 | isFile: true 11 | } 12 | export type ZipFolderDescription = { 13 | modDate: Date 14 | mode: number // UNIX permissions, 0o775 by default 15 | isFile: false 16 | } 17 | export type ZipEntryDescription = ZipFileDescription | ZipFolderDescription; 18 | 19 | /** The file name and modification date will be read from the input if it is a File or Response; 20 | * extra arguments can be given to override the input's metadata. 21 | * For other types of input, the `name` is required and `modDate` will default to *now*. 22 | * @param modDate should be a Date or timestamp or anything else that works in `new Date()` 23 | */ 24 | export function normalizeInput(input: File | Response | BufferLike | StreamLike, modDate?: any, mode?: number): ZipFileDescription; 25 | export function normalizeInput(input: undefined, modDate?: any, mode?: number): ZipFolderDescription; 26 | export function normalizeInput(input?: File | Response | BufferLike | StreamLike, modDate?: any, mode?: number): ZipEntryDescription { 27 | if (modDate !== undefined && !(modDate instanceof Date)) modDate = new Date(modDate) 28 | 29 | const isFile = input !== undefined 30 | 31 | if(!mode) { 32 | mode = isFile ? 0o664 : 0o775 33 | } 34 | 35 | if (input instanceof File) return { 36 | isFile, 37 | modDate: modDate || new Date(input.lastModified), 38 | bytes: input.stream(), 39 | mode 40 | } 41 | if (input instanceof Response) return { 42 | isFile, 43 | modDate: modDate || new Date(input.headers.get("Last-Modified") || Date.now()), 44 | bytes: input.body!, 45 | mode 46 | } 47 | 48 | if (modDate === undefined) modDate = new Date() 49 | else if (isNaN(modDate)) throw new Error("Invalid modification date.") 50 | if (!isFile) return { isFile, modDate, mode } 51 | if (typeof input === "string") return { isFile, modDate, bytes: encodeString(input), mode } 52 | if (input instanceof Blob) return { isFile, modDate, bytes: input.stream(), mode } 53 | if (input instanceof Uint8Array || input instanceof ReadableStream) return { isFile, modDate, bytes: input, mode } 54 | if (input instanceof ArrayBuffer || ArrayBuffer.isView(input)) return { isFile, modDate, bytes: makeUint8Array(input), mode } 55 | if (Symbol.asyncIterator in input) return { isFile, modDate, bytes: ReadableFromIterator(input[Symbol.asyncIterator]()), mode } 56 | throw new TypeError("Unsupported input format.") 57 | } 58 | 59 | export function ReadableFromIterator(iter: AsyncIterator, upstream: AsyncIterator = iter) { 60 | return new ReadableStream({ 61 | async pull(controller) { 62 | let pushedSize = 0 63 | while (controller.desiredSize! > pushedSize) { 64 | const next = await iter.next() 65 | if (next.value) { 66 | const chunk = normalizeChunk(next.value) 67 | controller.enqueue(chunk) 68 | pushedSize += chunk.byteLength 69 | } 70 | else { 71 | controller.close() 72 | break 73 | } 74 | } 75 | }, 76 | cancel(err) { 77 | upstream.throw?.(err) 78 | } 79 | }) 80 | } 81 | 82 | export function normalizeChunk(chunk: BufferLike) { 83 | if (typeof chunk === "string") return encodeString(chunk) 84 | if (chunk instanceof Uint8Array) return chunk 85 | return makeUint8Array(chunk) 86 | } 87 | -------------------------------------------------------------------------------- /src/metadata.ts: -------------------------------------------------------------------------------- 1 | import { encodeString, makeUint8Array } from "./utils.ts" 2 | import type { BufferLike, StreamLike } from "./input.ts" 3 | 4 | export type Metadata = { 5 | encodedName: Uint8Array 6 | uncompressedSize?: bigint 7 | /** keep track of whether the filename was supplied as a Buffer-like object */ 8 | nameIsBuffer: boolean 9 | } 10 | 11 | /** The file name and modification date will be read from the input if it is a File or Response; 12 | * extra arguments can be given to override the input's metadata. 13 | * For other types of input, the `name` is required and `modDate` will default to *now*. 14 | * @param name will be coerced, so… whatever, but Uint8Array or string would be nice 15 | */ 16 | export function normalizeMetadata(input?: File | Response | BufferLike | StreamLike, name?: unknown, size?: number | bigint): Metadata { 17 | let [encodedName, nameIsBuffer] = normalizeName(name) 18 | 19 | if (input instanceof File) return { 20 | encodedName: fixFilename(encodedName || encodeString(input.name)), uncompressedSize: BigInt(input.size), nameIsBuffer 21 | } 22 | if (input instanceof Response) { 23 | const contentDisposition = input.headers.get("content-disposition") 24 | const filename = contentDisposition && contentDisposition.match(/;\s*filename\*?\s*=\s*(?:UTF-\d+''|)["']?([^;"'\r\n]*)["']?(?:;|$)/i); 25 | const urlName = filename && filename[1] || input.url && new URL(input.url).pathname.split("/").findLast(Boolean) 26 | const decoded = urlName && decodeURIComponent(urlName) 27 | // @ts-ignore allow coercion from null to zero 28 | const length = size || +input.headers.get('content-length') 29 | return { encodedName: fixFilename(encodedName || encodeString(decoded)), uncompressedSize: BigInt(length), nameIsBuffer } 30 | } 31 | encodedName = fixFilename(encodedName, input !== undefined || size !== undefined) 32 | if (typeof input === "string") return { encodedName, uncompressedSize: BigInt(encodeString(input).length), nameIsBuffer } 33 | if (input instanceof Blob) return { encodedName, uncompressedSize: BigInt(input.size), nameIsBuffer } 34 | if (input instanceof ArrayBuffer || ArrayBuffer.isView(input)) return { encodedName, uncompressedSize: BigInt(input.byteLength), nameIsBuffer } 35 | // @ts-ignore 36 | return { encodedName, uncompressedSize: getUncompressedSize(input, size), nameIsBuffer } 37 | } 38 | 39 | function getUncompressedSize(input: any, size: number | bigint) { 40 | if (size > -1) { 41 | return BigInt(size); 42 | } 43 | return input ? undefined : 0n; 44 | } 45 | 46 | function normalizeName(name: unknown): [encodedName: Uint8Array | undefined, nameIsBuffer: boolean] { 47 | if (!name) return [undefined, false] 48 | if (name instanceof Uint8Array) return [name, true] 49 | if (ArrayBuffer.isView(name) || name instanceof ArrayBuffer) return [makeUint8Array(name), true] 50 | return [encodeString(name), false] 51 | } 52 | 53 | function fixFilename(encodedName: Uint8Array | undefined, isFile = true) { 54 | if (!encodedName || encodedName.every(c => c === 47)) throw new Error("The file must have a name.") 55 | // remove trailing slashes in files 56 | if (isFile) while (encodedName[encodedName.length-1] === 47) encodedName = encodedName.subarray(0, -1) 57 | else // add a trailing slash to folders 58 | if (encodedName[encodedName.length-1] !== 47) encodedName = new Uint8Array([...encodedName, 47]) 59 | return encodedName 60 | } 61 | -------------------------------------------------------------------------------- /src/polyfills.ts: -------------------------------------------------------------------------------- 1 | if (!("stream" in Blob.prototype)) Object.defineProperty(Blob.prototype, "stream", { 2 | value(this: Blob) { return new Response(this).body } 3 | }) 4 | 5 | if (!("setBigUint64" in DataView.prototype)) Object.defineProperty(DataView.prototype, "setBigUint64", { 6 | value(this: DataView, byteOffset: number, value: bigint, littleEndian?: boolean) { 7 | const lowWord = Number(value & 0xffffffffn) 8 | const highWord = Number(value >> 32n) 9 | this.setUint32(byteOffset + (littleEndian ? 0 : 4), lowWord, littleEndian) 10 | this.setUint32(byteOffset + (littleEndian ? 4 : 0), highWord, littleEndian) 11 | } 12 | }) 13 | 14 | export {} 15 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | export const makeBuffer = (size: number) => new DataView(new ArrayBuffer(size)) 2 | export const makeUint8Array = (thing: any) => new Uint8Array(thing.buffer || thing) 3 | export const encodeString = (whatever: unknown) => new TextEncoder().encode(String(whatever)) 4 | export const clampInt32 = (n: bigint) => Math.min(0xffffffff, Number(n)) 5 | export const clampInt16 = (n: bigint) => Math.min(0xffff, Number(n)) 6 | -------------------------------------------------------------------------------- /src/worker.ts: -------------------------------------------------------------------------------- 1 | import { downloadZip } from "./index.ts" 2 | export default downloadZip 3 | -------------------------------------------------------------------------------- /src/zip.ts: -------------------------------------------------------------------------------- 1 | import { makeBuffer, makeUint8Array, clampInt16, clampInt32 } from "./utils.ts" 2 | import { crc32 } from "./crc32.ts" 3 | import { formatDOSDateTime } from "./datetime.ts" 4 | import type { ZipEntryDescription, ZipFileDescription } from "./input.ts" 5 | import { Metadata } from "./metadata.ts" 6 | import { Options } from "./index.ts" 7 | 8 | const fileHeaderSignature = 0x504b_0304, fileHeaderLength = 30 9 | const descriptorSignature = 0x504b_0708, descriptorLength = 16 10 | const centralHeaderSignature = 0x504b_0102, centralHeaderLength = 46 11 | const endSignature = 0x504b_0506, endLength = 22 12 | const zip64endRecordSignature = 0x504b_0606, zip64endRecordLength = 56 13 | const zip64endLocatorSignature = 0x504b_0607, zip64endLocatorLength = 20 14 | 15 | export type ForAwaitable = AsyncIterable | Iterable 16 | 17 | type Zip64FieldLength = 0 | 12 | 28 18 | 19 | export function contentLength(files: Iterable>) { 20 | let centralLength = BigInt(endLength) 21 | let offset = 0n 22 | let archiveNeedsZip64 = false 23 | for (const file of files) { 24 | if (!file.encodedName) throw new Error("Every file must have a non-empty name.") 25 | if (file.uncompressedSize === undefined) 26 | throw new Error(`Missing size for file "${new TextDecoder().decode(file.encodedName)}".`) 27 | const bigFile = file.uncompressedSize! >= 0xffffffffn 28 | const bigOffset = offset >= 0xffffffffn 29 | // @ts-ignore 30 | offset += BigInt(fileHeaderLength + descriptorLength + file.encodedName.length + (bigFile && 8)) + file.uncompressedSize 31 | // @ts-ignore 32 | centralLength += BigInt(file.encodedName.length + centralHeaderLength + (bigOffset * 12 | bigFile * 28)) 33 | archiveNeedsZip64 ||= bigFile 34 | } 35 | if (archiveNeedsZip64 || offset >= 0xffffffffn) 36 | centralLength += BigInt(zip64endRecordLength + zip64endLocatorLength) 37 | return centralLength + offset 38 | } 39 | 40 | export function flagNameUTF8({encodedName, nameIsBuffer}: Metadata, buffersAreUTF8?: boolean) { 41 | // @ts-ignore 42 | return (!nameIsBuffer || (buffersAreUTF8 ?? tryUTF8(encodedName))) * 0b1000 43 | } 44 | const UTF8Decoder = new TextDecoder('utf8', { fatal: true }) 45 | function tryUTF8(str: Uint8Array) { 46 | try { UTF8Decoder.decode(str) } 47 | catch { return false } 48 | return true 49 | } 50 | 51 | export async function* loadFiles(files: ForAwaitable, options: Options) { 52 | const centralRecord: Uint8Array[] = [] 53 | let offset = 0n 54 | let fileCount = 0n 55 | let archiveNeedsZip64 = false 56 | 57 | // write files 58 | for await (const file of files) { 59 | const flags = flagNameUTF8(file, options.buffersAreUTF8) 60 | yield fileHeader(file, flags) 61 | yield new Uint8Array(file.encodedName) 62 | if (file.isFile) { 63 | yield* fileData(file) 64 | } 65 | const bigFile = file.uncompressedSize! >= 0xffffffffn 66 | const bigOffset = offset >= 0xffffffffn 67 | // @ts-ignore 68 | const zip64HeaderLength = (bigOffset * 12 | bigFile * 28) as Zip64FieldLength 69 | yield dataDescriptor(file, bigFile) 70 | 71 | centralRecord.push(centralHeader(file, offset, flags, zip64HeaderLength)) 72 | centralRecord.push(file.encodedName) 73 | if (zip64HeaderLength) centralRecord.push(zip64ExtraField(file, offset, zip64HeaderLength)) 74 | if (bigFile) offset += 8n // because the data descriptor will have 64-bit sizes 75 | fileCount++ 76 | offset += BigInt(fileHeaderLength + descriptorLength + file.encodedName.length) + file.uncompressedSize! 77 | archiveNeedsZip64 ||= bigFile 78 | } 79 | 80 | // write central repository 81 | let centralSize = 0n 82 | for (const record of centralRecord) { 83 | yield record 84 | centralSize += BigInt(record.length) 85 | } 86 | 87 | if (archiveNeedsZip64 || offset >= 0xffffffffn) { 88 | const endZip64 = makeBuffer(zip64endRecordLength + zip64endLocatorLength) 89 | // 4.3.14 Zip64 end of central directory record 90 | endZip64.setUint32(0, zip64endRecordSignature) 91 | endZip64.setBigUint64(4, BigInt(zip64endRecordLength - 12), true) 92 | endZip64.setUint32(12, 0x2d03_2d_00) // UNIX app version 4.5 | ZIP version 4.5 93 | // leave 8 bytes at zero 94 | endZip64.setBigUint64(24, fileCount, true) 95 | endZip64.setBigUint64(32, fileCount, true) 96 | endZip64.setBigUint64(40, centralSize, true) 97 | endZip64.setBigUint64(48, offset, true) 98 | 99 | // 4.3.15 Zip64 end of central directory locator 100 | endZip64.setUint32(56, zip64endLocatorSignature) 101 | // leave 4 bytes at zero 102 | endZip64.setBigUint64(64, offset + centralSize, true) 103 | endZip64.setUint32(72, 1, true) 104 | yield makeUint8Array(endZip64) 105 | } 106 | 107 | const end = makeBuffer(endLength) 108 | end.setUint32(0, endSignature) 109 | // skip 4 useless bytes here 110 | end.setUint16(8, clampInt16(fileCount), true) 111 | end.setUint16(10, clampInt16(fileCount), true) 112 | end.setUint32(12, clampInt32(centralSize), true) 113 | end.setUint32(16, clampInt32(offset), true) 114 | // leave comment length = zero (2 bytes) 115 | yield makeUint8Array(end) 116 | } 117 | 118 | export function fileHeader(file: ZipEntryDescription & Metadata, flags = 0) { 119 | const header = makeBuffer(fileHeaderLength) 120 | header.setUint32(0, fileHeaderSignature) 121 | header.setUint32(4, 0x2d_00_0800 | flags) // ZIP version 4.5 | flags, bit 3 on = size and CRCs will be zero 122 | // leave compression = zero (2 bytes) until we implement compression 123 | formatDOSDateTime(file.modDate, header, 10) 124 | // leave CRC = zero (4 bytes) because we'll write it later, in the central repo 125 | // leave lengths = zero (2x4 bytes) because we'll write them later, in the central repo 126 | header.setUint16(26, file.encodedName.length, true) 127 | // leave extra field length = zero (2 bytes) 128 | return makeUint8Array(header) 129 | } 130 | 131 | export async function* fileData(file: ZipFileDescription & Metadata) { 132 | let { bytes } = file 133 | if ("then" in bytes) bytes = await bytes 134 | if (bytes instanceof Uint8Array) { 135 | yield bytes 136 | file.crc = crc32(bytes, 0) 137 | file.uncompressedSize = BigInt(bytes.length) 138 | } else { 139 | file.uncompressedSize = 0n 140 | const reader = bytes.getReader() 141 | while (true) { 142 | const { value, done } = await reader.read() 143 | if (done) break 144 | file.crc = crc32(value!, file.crc) 145 | file.uncompressedSize += BigInt(value!.length) 146 | yield value! 147 | } 148 | } 149 | } 150 | 151 | export function dataDescriptor(file: ZipEntryDescription & Metadata, needsZip64: boolean) { 152 | const header = makeBuffer(descriptorLength + (needsZip64 ? 8 : 0)) 153 | header.setUint32(0, descriptorSignature) 154 | header.setUint32(4, file.isFile ? file.crc! : 0, true) 155 | if (needsZip64) { 156 | header.setBigUint64(8, file.uncompressedSize!, true) 157 | header.setBigUint64(16, file.uncompressedSize!, true) 158 | } else { 159 | header.setUint32(8, clampInt32(file.uncompressedSize!), true) 160 | header.setUint32(12, clampInt32(file.uncompressedSize!), true) 161 | } 162 | return makeUint8Array(header) 163 | } 164 | 165 | export function centralHeader(file: ZipEntryDescription & Metadata, offset: bigint, flags = 0, zip64HeaderLength: Zip64FieldLength = 0) { 166 | const header = makeBuffer(centralHeaderLength) 167 | header.setUint32(0, centralHeaderSignature) 168 | header.setUint32(4, 0x2d03_2d_00) // UNIX app version 4.5 | ZIP version 4.5 169 | header.setUint16(8, 0x0800 | flags) // flags, bit 3 on 170 | // leave compression = zero (2 bytes) until we implement compression 171 | formatDOSDateTime(file.modDate, header, 12) 172 | header.setUint32(16, file.isFile ? file.crc! : 0, true) 173 | header.setUint32(20, clampInt32(file.uncompressedSize!), true) 174 | header.setUint32(24, clampInt32(file.uncompressedSize!), true) 175 | header.setUint16(28, file.encodedName.length, true) 176 | header.setUint16(30, zip64HeaderLength, true) 177 | // useless disk fields = zero (4 bytes) 178 | // useless attributes = zero (4 bytes) 179 | header.setUint16(40, file.mode | (file.isFile ? 0o100000 : 0o040000), true) 180 | header.setUint32(42, clampInt32(offset), true) // offset 181 | return makeUint8Array(header) 182 | } 183 | 184 | export function zip64ExtraField(file: ZipEntryDescription & Metadata, offset: bigint, zip64HeaderLength: Exclude) { 185 | const header = makeBuffer(zip64HeaderLength) 186 | header.setUint16(0, 1, true) 187 | header.setUint16(2, zip64HeaderLength - 4, true) 188 | if (zip64HeaderLength & 16) { 189 | header.setBigUint64(4, file.uncompressedSize!, true) 190 | header.setBigUint64(12, file.uncompressedSize!, true) 191 | } 192 | header.setBigUint64(zip64HeaderLength - 8, offset, true) 193 | return makeUint8Array(header) 194 | } 195 | -------------------------------------------------------------------------------- /terser.json: -------------------------------------------------------------------------------- 1 | { 2 | "ecma": 2020, 3 | "compress": { "inline": 0, "unsafe_arrows": true, "booleans_as_integers": true }, 4 | "mangle": { 5 | "reserved": ["m", "c"], 6 | "properties": { 7 | "regex": "^crc$|^uncompressedSize$|^modDate$|^bytes$|^encodedName$|^nameIsBuffer$" 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /test/crc32.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals } from "https://deno.land/std@0.132.0/testing/asserts.ts" 2 | import { crc32, CRC_TABLE } from "../src/crc32.ts" 3 | 4 | const table = await Deno.readFile("./test/table.array") 5 | 6 | Deno.test("the CRC32 module precomputes CRCs for each byte using the polynomial 0xEDB88320", () => { 7 | const actual = new Uint8Array(CRC_TABLE.buffer) 8 | const expected = table.slice(0, 0x400) 9 | assertEquals(actual, expected) 10 | }) 11 | 12 | Deno.test("the CRC32 for an empty file", () => { 13 | assertEquals(crc32(new Uint8Array(0), 0), 0) 14 | }) 15 | 16 | Deno.test("the CRC32 for short files", () => { 17 | assertEquals(crc32(new TextEncoder().encode("Hello world!"), 0), 0x1b851995) 18 | assertEquals(crc32(new TextEncoder().encode("WebAssmebly is fun. Also 10x faster than JavaScript for this."), 0), 0x8a89a52a) 19 | assertEquals(crc32(new Uint8Array(table), 0), 0x1a76768f) 20 | }) 21 | 22 | Deno.test("the CRC32 for files larger than 64kB", () => { 23 | const zipSpec = Deno.readFileSync("./test/APPNOTE.TXT") 24 | assertEquals(crc32(new Uint8Array(zipSpec), 0), 0xbb3afe3f) 25 | }) 26 | -------------------------------------------------------------------------------- /test/datetime.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals } from "https://deno.land/std@0.132.0/testing/asserts.ts" 2 | import { formatDOSDateTime } from "../src/datetime.ts" 3 | import { makeBuffer } from "../src/utils.ts" 4 | 5 | Deno.test("the datetime encoding to local 32-bit DOS format", () => { 6 | const date = new Date("2020-02-15T11:24:18") 7 | const actual = makeBuffer(4) 8 | formatDOSDateTime(date, actual) 9 | const expected = 0x095b4f50 10 | assertEquals(actual.getUint32(0), expected) 11 | }) 12 | -------------------------------------------------------------------------------- /test/integration.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals } from "https://deno.land/std@0.132.0/testing/asserts.ts" 2 | import { downloadZip } from "../src/index.ts" 3 | 4 | const zipSpec = Deno.readFileSync("./test/APPNOTE.TXT") 5 | const specName = new TextEncoder().encode("APPNOTE.TXT") 6 | const specDate = new Date("2019-04-26T02:00") 7 | 8 | Deno.test("downloadZip propagates pulling and cancellation", async (t) => { 9 | const thrown: any[] = [] 10 | let pulled = 0 11 | const input: IterableIterator<{ input: Uint8Array, name: Uint8Array, lastModified: Date }> = { 12 | next() { 13 | if (pulled++) return { done: true, value: undefined } 14 | return { done: false, value: { input: zipSpec, name: specName, lastModified: specDate } } 15 | }, 16 | throw(err: any) { 17 | thrown.push(err) 18 | return { done: true, value: undefined } 19 | }, 20 | [Symbol.iterator]() { 21 | return this 22 | } 23 | } 24 | const response = downloadZip(input) 25 | const reader = response.body!.getReader() 26 | await t.step("it does not pull from its input until someone reads the output", () => { 27 | assertEquals(pulled, 0) 28 | }) 29 | await t.step("it pulls lazily from the input iterable", async () => { 30 | for (let i = 0; i < 2; i++) await reader.read() 31 | assertEquals(pulled, 1) 32 | for (let i = 0; i < 4; i++) await reader.read() 33 | assertEquals(pulled, 2) 34 | assertEquals(thrown.length, 0) 35 | }) 36 | await t.step("it cancels the input iterable when its output is cancelled", async () => { 37 | const error = new Error("I don't want to ZIP anymore !") 38 | await reader.cancel(error) 39 | assertEquals(thrown.length, 1) 40 | assertEquals(thrown[0], error) 41 | }) 42 | }) 43 | -------------------------------------------------------------------------------- /test/metadata.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals, assertThrows } from "https://deno.land/std@0.132.0/testing/asserts.ts" 2 | import { normalizeMetadata } from "../src/metadata.ts" 3 | 4 | const encodedName = new TextEncoder().encode("test.txt") 5 | const encodedFolderName = new TextEncoder().encode("root/folder/") 6 | 7 | /************************************** Responses **************************************/ 8 | 9 | Deno.test("normalizeMetadata needs a filename along Responses with insufficient metadata", () => { 10 | assertThrows(() => normalizeMetadata(new Response("four", { 11 | headers: { "content-disposition": "attachment" } 12 | })), Error, "The file must have a name.") 13 | }) 14 | 15 | Deno.test("normalizeMetadata guesses filename from Content-Disposition", () => { 16 | const metadata = normalizeMetadata(new Response("four", { 17 | headers: { "content-disposition": "attachment; filename=test.txt; size=0" } 18 | })) 19 | assertEquals(metadata, { uncompressedSize: 0n, encodedName, nameIsBuffer: false }) 20 | }) 21 | 22 | Deno.test("normalizeMetadata guesses filename from non latin Content-Disposition", () => { 23 | const metadata = normalizeMetadata(new Response("four", { 24 | headers: { "content-disposition": "attachment; filename* = UTF-8''%CF%8C%CE%BD%CE%BF%CE%BC%CE%B1%20%CE%B1%CF%81%CF%87%CE%B5%CE%AF%CE%BF%CF%85.txt" } 25 | })) 26 | assertEquals(metadata, { uncompressedSize: 0n,encodedName: new TextEncoder().encode("όνομα αρχείου.txt"), nameIsBuffer: false }) 27 | }) 28 | 29 | 30 | Deno.test("normalizeMetadata guesses filename from a Response URL", () => { 31 | const response = Object.create(Response.prototype, { 32 | url: { get() { return "https://example.com/path/test.txt" } }, 33 | headers: { get() { return new Headers() } } 34 | }) 35 | const metadata = normalizeMetadata(response) 36 | assertEquals(metadata, { uncompressedSize: 0n, encodedName, nameIsBuffer: false }) 37 | }) 38 | 39 | Deno.test("normalizeMetadata guesses filename from a Response URL with trailing slash", () => { 40 | const response = Object.create(Response.prototype, { 41 | url: { get() { return "https://example.com/path/test.txt/" } }, 42 | headers: { get() { return new Headers() } } 43 | }) 44 | const metadata = normalizeMetadata(response) 45 | assertEquals(metadata, { uncompressedSize: 0n, encodedName, nameIsBuffer: false }) 46 | }) 47 | 48 | /************************************** Files **************************************/ 49 | 50 | Deno.test("normalizeMetadata reads filename and size from a File", () => { 51 | const metadata = normalizeMetadata(new File(["four"], "test.txt")) 52 | assertEquals(metadata, { uncompressedSize: 4n, encodedName, nameIsBuffer: false }) 53 | }) 54 | 55 | /************************************** Folders **************************************/ 56 | 57 | Deno.test("normalizeMetadata fixes trailing slashes in folder names", () => { 58 | const metadata = normalizeMetadata(undefined, new TextEncoder().encode("root/folder")) 59 | assertEquals(metadata, { uncompressedSize: 0n, encodedName: encodedFolderName, nameIsBuffer: true }) 60 | }) 61 | 62 | Deno.test("normalizeMetadata fixes trailing slashes in file names", () => { 63 | const metadata = normalizeMetadata(undefined, encodedFolderName, 0n) 64 | assertEquals(metadata, { uncompressedSize: 0n, encodedName: new TextEncoder().encode("root/folder"), nameIsBuffer: true }) 65 | }) 66 | -------------------------------------------------------------------------------- /test/table.array: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Touffy/client-zip/7fcc01b4b72f8bf0c8e9e26b5d29e5c6b22faab0/test/table.array -------------------------------------------------------------------------------- /test/zip.test.ts: -------------------------------------------------------------------------------- 1 | import { assertEquals, assertStrictEquals } from "https://deno.land/std@0.132.0/testing/asserts.ts" 2 | import { Buffer } from "https://deno.land/std@0.132.0/io/buffer.ts" 3 | import { fileHeader, fileData, dataDescriptor, centralHeader, zip64ExtraField, contentLength, flagNameUTF8 } from "../src/zip.ts" 4 | import type { ZipFileDescription, ZipFolderDescription } from "../src/input.ts" 5 | import type { Metadata } from "../src/metadata.ts" 6 | 7 | const BufferFromHex = (hex: string) => new Uint8Array(Array.from(hex.matchAll(/.{2}/g), ([s]) => parseInt(s, 16))) 8 | 9 | const zipSpec = Deno.readFileSync("./test/APPNOTE.TXT") 10 | const specName = new TextEncoder().encode("APPNOTE.TXT") 11 | const specDate = new Date("2019-04-26T02:00") 12 | const invalidUTF8 = BufferFromHex("fe") 13 | 14 | const baseFile: ZipFileDescription & Metadata = Object.freeze( 15 | { isFile: true, bytes: new Uint8Array(zipSpec), encodedName: specName, nameIsBuffer: false, modDate: specDate, mode: 0o664 }) 16 | 17 | const baseFolder: ZipFolderDescription & Metadata = Object.freeze( 18 | { isFile: false, encodedName: new TextEncoder().encode("folder"), nameIsBuffer: false, modDate: specDate, mode: 0o775 }) 19 | 20 | Deno.test("the ZIP fileHeader function makes file headers", () => { 21 | const file = {...baseFile} 22 | const actual = fileHeader(file) 23 | const expected = BufferFromHex("504b03042d000800000000109a4e0000000000000000000000000b000000") 24 | assertEquals(actual, expected) 25 | }) 26 | 27 | Deno.test("the ZIP fileHeader function makes folder headers", () => { 28 | const folder = {...baseFolder} 29 | const actual = fileHeader(folder) 30 | const expected = BufferFromHex("504b03042d000800000000109a4e00000000000000000000000006000000") 31 | assertEquals(actual, expected) 32 | }) 33 | 34 | Deno.test("the ZIP fileHeader function merges extra flags", () => { 35 | const file = {...baseFile} 36 | const actual = fileHeader(file, 0x808) 37 | const expected = BufferFromHex("504b03042d000808000000109a4e0000000000000000000000000b000000") 38 | assertEquals(actual, expected) 39 | }) 40 | 41 | Deno.test("the ZIP fileData function yields all the file's data", async () => { 42 | const file = {...baseFile} 43 | const actual = new Buffer() 44 | for await (const chunk of fileData(file)) actual.writeSync(chunk) 45 | assertEquals(actual.bytes({copy: false}), zipSpec) 46 | }) 47 | 48 | Deno.test("the ZIP fileData function sets the file's size and CRC properties", async () => { 49 | const file = {...baseFile} 50 | assertStrictEquals(file.uncompressedSize, undefined) 51 | assertStrictEquals(file.crc, undefined) 52 | for await (const _ of fileData(file)); 53 | assertStrictEquals(file.uncompressedSize, BigInt(zipSpec.length)) 54 | assertStrictEquals(file.crc, 0xbb3afe3f) 55 | }) 56 | 57 | Deno.test("the ZIP dataDescriptor function makes data descriptors", () => { 58 | const file = {...baseFile, uncompressedSize: 0x10203040n, crc: 0x12345678} 59 | const actual = dataDescriptor(file, false) 60 | const expected = BufferFromHex("504b0708785634124030201040302010") 61 | assertEquals(actual, expected) 62 | }) 63 | 64 | Deno.test("the ZIP dataDescriptor function makes ZIP64 data descriptors", () => { 65 | const file = {...baseFile, uncompressedSize: 0x110203040n, crc: 0x12345678} 66 | const actual = dataDescriptor(file, true) 67 | const expected = BufferFromHex("504b07087856341240302010010000004030201001000000") 68 | assertEquals(actual, expected) 69 | }) 70 | 71 | Deno.test("the ZIP dataDescriptor function makes folder data descriptors", () => { 72 | const actual = dataDescriptor(baseFolder, false) 73 | const expected = BufferFromHex("504b0708000000000000000000000000") 74 | assertEquals(actual, expected) 75 | }) 76 | 77 | Deno.test("the ZIP centralHeader function makes central record file headers", () => { 78 | const file = {...baseFile, uncompressedSize: 0x10203040n, crc: 0x12345678} 79 | const offset = 0x01020304n 80 | const actual = centralHeader(file, offset, 0) 81 | const expected = BufferFromHex("504b01022d032d000800000000109a4e7856341240302010403020100b0000000000000000000000b48104030201") 82 | assertEquals(actual, expected) 83 | }) 84 | 85 | Deno.test("the ZIP centralHeader function merges extra flags", () => { 86 | const file = {...baseFile, uncompressedSize: 0x10203040n, crc: 0x12345678} 87 | const offset = 0x01020304n 88 | const actual = centralHeader(file, offset, 0x808) 89 | const expected = BufferFromHex("504b01022d032d000808000000109a4e7856341240302010403020100b0000000000000000000000b48104030201") 90 | assertEquals(actual, expected) 91 | }) 92 | 93 | Deno.test("the ZIP centralHeader function makes ZIP64 central record file headers", () => { 94 | const file = {...baseFile, uncompressedSize: 0x110203040n, crc: 0x12345678} 95 | const offset = 0x101020304n 96 | const actual = centralHeader(file, offset, 0, 28) 97 | const expected = BufferFromHex("504b01022d032d000800000000109a4e78563412ffffffffffffffff0b001c000000000000000000b481ffffffff") 98 | assertEquals(actual, expected) 99 | }) 100 | 101 | Deno.test("the ZIP centralHeader function makes central record folder headers", () => { 102 | const offset = 0x01020304n 103 | const actual = centralHeader(baseFolder, offset, 0, 0) 104 | const expected = BufferFromHex("504b01022d032d000800000000109a4e000000000000000000000000060000000000000000000000fd4104030201") 105 | assertEquals(actual, expected) 106 | }) 107 | 108 | Deno.test("the ZIP zip64ExtraField function makes Zip64 extra fields", () => { 109 | const file = {...baseFile, uncompressedSize: 0x10203040n, crc: 0x12345678} 110 | const offset = 0x01020304n 111 | const actual = zip64ExtraField(file, offset, 28) 112 | const expected = BufferFromHex("01001800403020100000000040302010000000000403020100000000") 113 | assertEquals(actual, expected) 114 | }) 115 | 116 | Deno.test("the contentLength function accurately predicts the length of an archive", () => { 117 | const actual = contentLength([{uncompressedSize: BigInt(zipSpec.byteLength), encodedName: specName}]) 118 | const expected = 171462n 119 | assertEquals(actual, expected) 120 | }) 121 | 122 | Deno.test("the contentLength function does not throw on zero-length files", () => { 123 | const actual = contentLength([{uncompressedSize: 0n, encodedName: specName}]) 124 | const expected = 136n 125 | assertEquals(actual, expected) 126 | }) 127 | 128 | Deno.test("the contentLength function accurately predicts the length of a large archive", () => { 129 | const actual = contentLength([ 130 | {uncompressedSize: 0x110203040n, encodedName: specName}, 131 | {uncompressedSize: BigInt(zipSpec.byteLength), encodedName: specName}, 132 | ]) 133 | const expected = 4565683956n 134 | assertEquals(actual, expected) 135 | }) 136 | 137 | Deno.test("the flagNameUTF8 function always turns on bit 11 if the name was not a Buffer", () => { 138 | const actual = flagNameUTF8({encodedName: specName, nameIsBuffer: false}) 139 | assertEquals(actual, 0b1000) 140 | assertEquals(flagNameUTF8({encodedName: specName, nameIsBuffer: false}, false), 0b1000) 141 | assertEquals(flagNameUTF8({encodedName: specName, nameIsBuffer: false}, true), 0b1000) 142 | assertEquals(flagNameUTF8({encodedName: invalidUTF8, nameIsBuffer: false}, false), 0b1000) 143 | assertEquals(flagNameUTF8({encodedName: invalidUTF8, nameIsBuffer: false}, true), 0b1000) 144 | }) 145 | 146 | Deno.test("the flagNameUTF8 function turns on bit 11 if the name is valid UTF-8", () => { 147 | const actual = flagNameUTF8({encodedName: specName, nameIsBuffer: true}) 148 | assertEquals(actual, 0b1000) 149 | }) 150 | 151 | Deno.test("the flagNameUTF8 function turns off bit 11 if the name is invalid UTF-8", () => { 152 | const actual = flagNameUTF8({encodedName: invalidUTF8, nameIsBuffer: true}) 153 | assertEquals(actual, 0) 154 | }) 155 | 156 | Deno.test("the flagNameUTF8 function does whatever the option says about Buffers", () => { 157 | assertEquals(flagNameUTF8({encodedName: specName, nameIsBuffer: true}, false), 0) 158 | assertEquals(flagNameUTF8({encodedName: specName, nameIsBuffer: true}, true), 0b1000) 159 | assertEquals(flagNameUTF8({encodedName: invalidUTF8, nameIsBuffer: true}, false), 0) 160 | assertEquals(flagNameUTF8({encodedName: invalidUTF8, nameIsBuffer: true}, true), 0b1000) 161 | }) 162 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "baseUrl": "src", 4 | "moduleResolution": "node", 5 | "module": "ES2015", 6 | "target": "ES2020", 7 | "strictFunctionTypes": true, 8 | "noImplicitReturns": true, 9 | "lib": ["WebWorker"], 10 | "paths": { 11 | "*.ts": ["*"] 12 | } 13 | }, 14 | "include": ["src/*.ts"], 15 | } 16 | --------------------------------------------------------------------------------