├── .gitignore ├── index.d.mts ├── index.d.ts ├── LICENSE ├── package.json ├── index.html ├── crc32.ts ├── README.md ├── index.mjs ├── index.js ├── test.ts ├── index.ts ├── test.js └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | testfiles 4 | -------------------------------------------------------------------------------- /index.d.mts: -------------------------------------------------------------------------------- 1 | 2 | export interface UncompressedFile { 3 | path: string; 4 | data: string | Uint8Array | ArrayBuffer; 5 | lastModified?: Date; 6 | } 7 | 8 | /** 9 | * 10 | * @param inputFiles Array of input file objects, of the form `{ path: string; data: string | Uint8Array | ArrayBuffer; lastModified?: Date; }`. 11 | * @param compressWhenPossible If `false`, files are archived without compression. Default is `true`. 12 | */ 13 | export function createZip(inputFiles: UncompressedFile[], compressWhenPossible?: boolean): Promise; 14 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | 2 | export interface UncompressedFile { 3 | path: string; 4 | data: string | Uint8Array | ArrayBuffer; 5 | lastModified?: Date; 6 | } 7 | 8 | /** 9 | * 10 | * @param inputFiles Array of input file objects, of the form `{ path: string; data: string | Uint8Array | ArrayBuffer; lastModified?: Date; }`. 11 | * @param compressWhenPossible If `false`, files are archived without compression. Default is `true`. 12 | */ 13 | export function createZip(inputFiles: UncompressedFile[], compressWhenPossible?: boolean): Promise; 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (C) George MacKerron 2024 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "littlezipper", 3 | "version": "0.1.5", 4 | "description": "Extremely simple .zip file creation with no dependencies", 5 | "files": [ 6 | "index.js", 7 | "index.mjs", 8 | "index.d.ts", 9 | "index.d.mts", 10 | "README.md" 11 | ], 12 | "exports": { 13 | "require": "./index.js", 14 | "import": "./index.mjs" 15 | }, 16 | "scripts": { 17 | "buildCjs": "esbuild index.ts --bundle --format=cjs --minify --outfile=index.js", 18 | "buildEsm": "esbuild index.ts --bundle --format=esm --minify --outfile=index.mjs", 19 | "buildTest": "esbuild test.ts --bundle --platform=node --outfile=test.js", 20 | "build": "npm run buildCjs && npm run buildEsm && cp index.d.ts index.d.mts", 21 | "test": "tsc --noEmit && npm run buildTest && node test.js" 22 | }, 23 | "keywords": [ 24 | "zip", 25 | "compressionstream" 26 | ], 27 | "author": "George MacKerron", 28 | "license": "MIT", 29 | "devDependencies": { 30 | "@types/node": "^22.13.1", 31 | "esbuild": "^0.24.2", 32 | "typescript": "^5.3.3" 33 | }, 34 | "repository": { 35 | "type": "git", 36 | "url": "https://github.com/jawj/littlezipper" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | -------------------------------------------------------------------------------- /crc32.ts: -------------------------------------------------------------------------------- 1 | /* 2 | This implementation is adapted from https://github.com/SheetJS/js-crc32. 3 | 4 | Copyright (C) 2014-present SheetJS LLC 5 | 6 | Licensed under the Apache License, Version 2.0 (the "License"); 7 | you may not use this file except in compliance with the License. 8 | You may obtain a copy of the License at 9 | 10 | http://www.apache.org/licenses/LICENSE-2.0 11 | 12 | Unless required by applicable law or agreed to in writing, software 13 | distributed under the License is distributed on an "AS IS" BASIS, 14 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | See the License for the specific language governing permissions and 16 | limitations under the License. 17 | */ 18 | 19 | let T: Int32Array[]; 20 | 21 | const init = () => { 22 | const 23 | i32 = Int32Array, 24 | T0 = new i32(256), 25 | t = new i32(4096); 26 | 27 | let c: number, n: number, v: number; 28 | 29 | for (n = 0; n < 256; n++) { 30 | c = n; 31 | c = ((c & 1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); // nice bit of loop-unrolling 32 | c = ((c & 1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); 33 | c = ((c & 1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); 34 | c = ((c & 1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); 35 | c = ((c & 1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); 36 | c = ((c & 1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); 37 | c = ((c & 1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); 38 | t[n] = T0[n] = ((c & 1) ? (-306674912 ^ (c >>> 1)) : (c >>> 1)); 39 | } 40 | for (n = 0; n < 256; n++) { 41 | v = T0[n]; 42 | for (c = 256 + n; c < 4096; c += 256) v = t[c] = (v >>> 8) ^ T0[v & 255]; 43 | } 44 | T = [T0]; 45 | for (n = 1; n < 16; n++) T[n] = t.subarray(n * 256, (n + 1) * 256); // slice lookups are not faster 46 | } 47 | 48 | export const crc32 = (B: Uint8Array, seed = 0) => { 49 | if (!T) init(); 50 | const [T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, Ta, Tb, Tc, Td, Te, Tf] = T; 51 | let 52 | crc = seed ^ -1, 53 | l = B.length - 15, 54 | i = 0; 55 | 56 | for (; i < l;) crc = 57 | Tf[B[i++] ^ (crc & 255)] ^ 58 | Te[B[i++] ^ ((crc >> 8) & 255)] ^ 59 | Td[B[i++] ^ ((crc >> 16) & 255)] ^ 60 | Tc[B[i++] ^ (crc >>> 24)] ^ 61 | Tb[B[i++]] ^ Ta[B[i++]] ^ T9[B[i++]] ^ T8[B[i++]] ^ 62 | T7[B[i++]] ^ T6[B[i++]] ^ T5[B[i++]] ^ T4[B[i++]] ^ 63 | T3[B[i++]] ^ T2[B[i++]] ^ T1[B[i++]] ^ T0[B[i++]]; 64 | l += 15; 65 | while (i < l) crc = (crc >>> 8) ^ T0[(crc ^ B[i++]) & 255]; 66 | return ~crc; 67 | } 68 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # littlezipper 2 | 3 | This project uses the `CompressionStream` API — supported by all recent browsers, Node, Deno [and now Bun](https://bun.com/blog/bun-v1.3.3#compressionstream-and-decompressionstream) — to create `.zip` files. 4 | 5 | This is not wholly trivial since, a little frustratingly, `CompressionStream` can natively produce `.gz` format data but not `.zip`. Thus, we pick out both the deflated data and the CRC from the `.gz` stream, and write them into a `.zip` file instead. 6 | 7 | We don't actually have to implement any compression in JavaScript, so the library is fast and small. 8 | 9 | Where `CompressionStream` is not available, we fall back to producing uncompressed `.zip` files (and calculate the CRC in heavily-optimized JavaScript). This may be acceptable if you are creating a `.zip` that is actually something else, such as an `.xlsx`, `.apk` or `.xpi`. 10 | 11 | The library is currently suitable for small- and medium-sized files, since it briefly requires just over 2x the total uncompressed size of your files in memory. That's because you pass it an array of files data, and for the `.zip` output it allocates a `Uint8Array` backed by a worst-case `ArrayBuffer`, which is the size of all the uncompressed data plus a little more for headers. 12 | 13 | Potential future improvements could include implementing a `TransformStream` instead, which could enable smaller memory use and larger file sizes. However, the `.zip` format annoyingly puts the CRC and compressed data size _before_ the compressed data, which limits opportunities for memory saving. 14 | 15 | ## Installation 16 | 17 | ```bash 18 | npm install littlezipper 19 | ``` 20 | 21 | TypeScript types are included. 22 | 23 | ## Usage 24 | 25 | The library exposes a single function, `createZip`. 26 | 27 | ```typescript 28 | import { createZip } from 'littlezipper'; 29 | 30 | const zip = await createZip([ 31 | { path: 'test.txt', data: 'This is a test', lastModified: new Date('2020-01-01T00:00:00') }, 32 | { path: 'test.bin', data: new Uint8Array([1, 2, 3]) }, 33 | ]); 34 | ``` 35 | 36 | The first argument to `createZip` is an array of file entries. Each entry must have `path` (`string`) and `data` (`string`, `Uint8Array` or `ArrayBuffer`) keys, and may have a `lastModified` (`Date`) key, which otherwise defaults to the current date and time. 37 | 38 | The optional second argument defines whether we attempt to deflate the data (default: `true`). If `false`, the resulting `.zip` file will be as large as the input data plus a few bytes for headers. 39 | 40 | ## License 41 | 42 | [Apache License, Version 2.0](LICENSE). 43 | -------------------------------------------------------------------------------- /index.mjs: -------------------------------------------------------------------------------- 1 | var P,Y=()=>{let f=Int32Array,l=new f(256),c=new f(4096),r,n,i;for(n=0;n<256;n++)r=n,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,c[n]=l[n]=r&1?-306674912^r>>>1:r>>>1;for(n=0;n<256;n++)for(i=l[n],r=256+n;r<4096;r+=256)i=c[r]=i>>>8^l[i&255];for(P=[l],n=1;n<16;n++)P[n]=c.subarray(n*256,(n+1)*256)},N=(f,l=0)=>{P||Y();let[c,r,n,i,S,U,C,k,g,I,M,t,e,p,s,d]=P,o=l^-1,m=f.length-15,a=0;for(;a>8&255]^p[f[a++]^o>>16&255]^e[f[a++]^o>>>24]^t[f[a++]]^M[f[a++]]^I[f[a++]]^g[f[a++]]^k[f[a++]]^C[f[a++]]^U[f[a++]]^S[f[a++]]^i[f[a++]]^n[f[a++]]^r[f[a++]]^c[f[a++]];for(m+=15;a>>8^c[(o^f[a++])&255];return~o};var j=typeof CompressionStream<"u",R=new TextEncoder,W=f=>f.reduce((l,c)=>l+c.length,0),Z=Uint8Array;function q(f){let l=new CompressionStream("gzip"),c=l.writable.getWriter(),r=l.readable.getReader();return c.write(f),c.close(),()=>r.read()}async function Q(f,l=!0,c=q){let r=[],n=j&&l,i=f.length,S=f.map(s=>R.encode(s.path)),U=f.map(({data:s})=>typeof s=="string"?R.encode(s):s instanceof ArrayBuffer?new Z(s):s),C=W(U),k=W(S),g=i*46+k,I=C+i*30+k+g+22,M=new Date,t=new Z(I),e=0;for(let s=0;s>8,t[e++]=H&255,t[e++]=H>>8;let x=e;if(e+=8,t[e++]=a&255,t[e++]=a>>8&255,t[e++]=a>>16&255,t[e++]=a>>24,t[e++]=o&255,t[e++]=o>>8,t[e++]=t[e++]=0,t.set(d,e),e+=o,n){let y=e,E=c(m),b,A=0,v=0;e:{for(;;){let u=await E();if(u.done)throw new Error("Bad gzip data");if(b=u.value,A=v,v=A+b.length,A<=3&&v>3&&b[3-A]&30){z=!0;break e}if(v>=10){b=b.subarray(10-A);break}}for(;;){let u=e-y,w=b.length;if(u+w>=a+8){z=!0;break e}t.set(b,e),e+=w;let O=await E();if(O.done)break;b=O.value}}if(z)for(;;){let u=b.length,w=8-u,O=e;e=y;for(let D=0;D<8;D++)t[e++]=D>8&255,t[x++]=y>>16&255,t[x++]=y>>24}t[x++]=T&255,t[x++]=T>>8&255,t[x++]=T>>16&255,t[x++]=T>>24}let p=e;for(let s=0;s>8&255,t[e++]=d>>16&255,t[e++]=d>>24,t.set(o,e),e+=m}return t[e++]=80,t[e++]=75,t[e++]=5,t[e++]=6,t[e++]=t[e++]=t[e++]=t[e++]=0,t[e++]=i&255,t[e++]=i>>8,t[e++]=i&255,t[e++]=i>>8,t[e++]=g&255,t[e++]=g>>8&255,t[e++]=g>>16&255,t[e++]=g>>24,t[e++]=p&255,t[e++]=p>>8&255,t[e++]=p>>16&255,t[e++]=p>>24,t[e++]=t[e++]=0,t.subarray(0,e)}export{Q as createZip}; 2 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | "use strict";var F=Object.defineProperty;var j=Object.getOwnPropertyDescriptor;var q=Object.getOwnPropertyNames;var J=Object.prototype.hasOwnProperty;var K=(f,s)=>{for(var i in s)F(f,i,{get:s[i],enumerable:!0})},Q=(f,s,i,r)=>{if(s&&typeof s=="object"||typeof s=="function")for(let a of q(s))!J.call(f,a)&&a!==i&&F(f,a,{get:()=>s[a],enumerable:!(r=j(s,a))||r.enumerable});return f};var V=f=>Q(F({},"__esModule",{value:!0}),f);var ee={};K(ee,{createZip:()=>B});module.exports=V(ee);var P,X=()=>{let f=Int32Array,s=new f(256),i=new f(4096),r,a,c;for(a=0;a<256;a++)r=a,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,r=r&1?-306674912^r>>>1:r>>>1,i[a]=s[a]=r&1?-306674912^r>>>1:r>>>1;for(a=0;a<256;a++)for(c=s[a],r=256+a;r<4096;r+=256)c=i[r]=c>>>8^s[c&255];for(P=[s],a=1;a<16;a++)P[a]=i.subarray(a*256,(a+1)*256)},R=(f,s=0)=>{P||X();let[i,r,a,c,S,U,C,k,g,I,M,t,e,p,o,d]=P,l=s^-1,m=f.length-15,n=0;for(;n>8&255]^p[f[n++]^l>>16&255]^e[f[n++]^l>>>24]^t[f[n++]]^M[f[n++]]^I[f[n++]]^g[f[n++]]^k[f[n++]]^C[f[n++]]^U[f[n++]]^S[f[n++]]^c[f[n++]]^a[f[n++]]^r[f[n++]]^i[f[n++]];for(m+=15;n>>8^i[(l^f[n++])&255];return~l};var _=typeof CompressionStream<"u",W=new TextEncoder,Z=f=>f.reduce((s,i)=>s+i.length,0),G=Uint8Array;function $(f){let s=new CompressionStream("gzip"),i=s.writable.getWriter(),r=s.readable.getReader();return i.write(f),i.close(),()=>r.read()}async function B(f,s=!0,i=$){let r=[],a=_&&s,c=f.length,S=f.map(o=>W.encode(o.path)),U=f.map(({data:o})=>typeof o=="string"?W.encode(o):o instanceof ArrayBuffer?new G(o):o),C=Z(U),k=Z(S),g=c*46+k,I=C+c*30+k+g+22,M=new Date,t=new G(I),e=0;for(let o=0;o>8,t[e++]=L&255,t[e++]=L>>8;let x=e;if(e+=8,t[e++]=n&255,t[e++]=n>>8&255,t[e++]=n>>16&255,t[e++]=n>>24,t[e++]=l&255,t[e++]=l>>8,t[e++]=t[e++]=0,t.set(d,e),e+=l,a){let y=e,E=i(m),b,A=0,v=0;e:{for(;;){let u=await E();if(u.done)throw new Error("Bad gzip data");if(b=u.value,A=v,v=A+b.length,A<=3&&v>3&&b[3-A]&30){z=!0;break e}if(v>=10){b=b.subarray(10-A);break}}for(;;){let u=e-y,w=b.length;if(u+w>=n+8){z=!0;break e}t.set(b,e),e+=w;let O=await E();if(O.done)break;b=O.value}}if(z)for(;;){let u=b.length,w=8-u,O=e;e=y;for(let D=0;D<8;D++)t[e++]=D>8&255,t[x++]=y>>16&255,t[x++]=y>>24}t[x++]=T&255,t[x++]=T>>8&255,t[x++]=T>>16&255,t[x++]=T>>24}let p=e;for(let o=0;o>8&255,t[e++]=d>>16&255,t[e++]=d>>24,t.set(l,e),e+=m}return t[e++]=80,t[e++]=75,t[e++]=5,t[e++]=6,t[e++]=t[e++]=t[e++]=t[e++]=0,t[e++]=c&255,t[e++]=c>>8,t[e++]=c&255,t[e++]=c>>8,t[e++]=g&255,t[e++]=g>>8&255,t[e++]=g>>16&255,t[e++]=g>>24,t[e++]=p&255,t[e++]=p>>8&255,t[e++]=p>>16&255,t[e++]=p>>24,t[e++]=t[e++]=0,t.subarray(0,e)} 2 | -------------------------------------------------------------------------------- /test.ts: -------------------------------------------------------------------------------- 1 | import { createZip } from '.'; 2 | import { writeFileSync } from 'fs'; 3 | import { execFileSync } from 'child_process'; 4 | import { webcrypto as crypto } from 'crypto'; // for older Node versions 5 | 6 | const testStr = "The quick brown fox jumps over the lazy dog.\n"; 7 | 8 | function makeTestData() { 9 | const rawFiles = []; 10 | let i = 0; 11 | do { 12 | i++; 13 | const maxDataLength = [16, 1024, 65536][Math.floor(Math.random() * 3)]; 14 | const dataLength = Math.floor(Math.random() * maxDataLength); 15 | let data; 16 | if (Math.random() < .5) { 17 | data = testStr.repeat(Math.ceil(dataLength / testStr.length)).slice(0, dataLength); 18 | } else { 19 | data = new Uint8Array(dataLength); 20 | crypto.getRandomValues(data as Uint8Array); 21 | } 22 | rawFiles.push({ 23 | path: `f_${i}.${typeof data === 'string' ? 'txt' : 'dat'}`, // .dat and not .bin, because Macs try to extract .bin files! 24 | data, 25 | }); 26 | } while (Math.random() < 0.667); 27 | return rawFiles; 28 | } 29 | 30 | function makeTestZip(compress: boolean, makeReadFn: undefined | typeof byteByByteReadFn) { 31 | return createZip(makeTestData(), compress, makeReadFn); 32 | } 33 | 34 | function byteByByteReadFn(dataIn: Uint8Array) { 35 | const 36 | cs = new CompressionStream('gzip'), 37 | writer = cs.writable.getWriter(), 38 | reader = cs.readable.getReader(); 39 | 40 | writer.write(dataIn); 41 | writer.close(); 42 | 43 | let 44 | buffer: Uint8Array | undefined, 45 | bufferIndex: number; 46 | 47 | return async () => { 48 | if (buffer !== undefined && bufferIndex < buffer.byteLength) { 49 | return { value: buffer.subarray(bufferIndex, ++bufferIndex), done: false }; 50 | } 51 | const { value, done } = await reader.read(); 52 | if (done) { 53 | return { value, done }; 54 | 55 | } else { 56 | buffer = value as Uint8Array; 57 | bufferIndex = 0; 58 | return { value: buffer.subarray(bufferIndex, ++bufferIndex), done: false }; 59 | } 60 | } 61 | } 62 | 63 | function singleChunkReadFn(dataIn: Uint8Array) { 64 | const 65 | cs = new CompressionStream('gzip'), 66 | writer = cs.writable.getWriter(), 67 | reader = cs.readable.getReader(); 68 | 69 | writer.write(dataIn); 70 | writer.close(); 71 | 72 | let 73 | buffer = new Uint8Array(), 74 | returned = false; 75 | 76 | return async () => { 77 | if (returned) { 78 | return { value: undefined as any, done: true }; 79 | } 80 | for (; ;) { 81 | const { value, done } = await reader.read(); 82 | if (done) { 83 | returned = true; 84 | return { value: buffer, done: false }; 85 | } 86 | const newBuffer = new Uint8Array(buffer.byteLength + value.byteLength); 87 | newBuffer.set(buffer); 88 | newBuffer.set(value, buffer.byteLength); 89 | buffer = newBuffer; 90 | } 91 | } 92 | } 93 | 94 | async function test() { 95 | for (const compress of [false, true]) { 96 | console.log('compress:', compress); 97 | for (const makeReadFn of [byteByByteReadFn, singleChunkReadFn, undefined]) { 98 | console.log(' read function:', makeReadFn?.name); 99 | for (let i = 0; i < 1000; i++) { 100 | const zip = await makeTestZip(compress, makeReadFn); 101 | const file = `testfiles/z_${i}.zip`; 102 | writeFileSync(file, zip); 103 | execFileSync('/usr/bin/unzip', ['-t', file]); // throws error on non-zero exit 104 | } 105 | } 106 | } 107 | } 108 | 109 | test(); 110 | -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * littlezipper 3 | * Copyright (C) George MacKerron 2024 4 | * 5 | * Licensed under the Apache License, Version 2.0 (the "License"); 6 | * you may not use this file except in compliance with the License. 7 | * You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | // references: 19 | // https://users.cs.jmu.edu/buchhofp/forensics/formats/pkzip.html 20 | // https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT 21 | // https://www.rfc-editor.org/rfc/rfc1952 22 | 23 | import { crc32 } from './crc32'; 24 | 25 | export interface UncompressedFile { 26 | path: string; 27 | data: string | ArrayBuffer | Uint8Array; 28 | lastModified?: Date; 29 | } 30 | 31 | const 32 | hasCompressionStreams = typeof CompressionStream !== 'undefined', 33 | te = new TextEncoder(), 34 | lengthSum = (ns: Uint8Array[]) => ns.reduce((memo, n) => memo + n.length, 0), 35 | ui8 = Uint8Array; 36 | 37 | function makeGzipReadFn(dataIn: Uint8Array) { 38 | const 39 | cs = new CompressionStream('gzip'), 40 | writer = cs.writable.getWriter(), 41 | reader = cs.readable.getReader(); 42 | 43 | writer.write(dataIn); 44 | writer.close(); 45 | return () => reader.read(); 46 | } 47 | 48 | export async function createZip(inputFiles: UncompressedFile[], compressWhenPossible = true, gzipReadFn = makeGzipReadFn) { 49 | const 50 | localHeaderOffsets = [], 51 | attemptDeflate = hasCompressionStreams && compressWhenPossible, 52 | numFiles = inputFiles.length, 53 | filePaths = inputFiles.map(file => te.encode(file.path)), 54 | fileData = inputFiles.map(({ data }) => 55 | typeof data === 'string' ? te.encode(data) : 56 | data instanceof ArrayBuffer ? new ui8(data) : data), 57 | totalDataSize = lengthSum(fileData), 58 | totalFilePathsSize = lengthSum(filePaths), 59 | centralDirectorySize = numFiles * 46 + totalFilePathsSize, 60 | // if deflate expands the data, which can happen, we just stick it in uncompressed, so the uncompressed size is worst case 61 | maxZipSize = totalDataSize 62 | + numFiles * 30 + totalFilePathsSize // local headers 63 | + centralDirectorySize + 22, // 22 = central directory trailer 64 | now = new Date(), 65 | zip = new ui8(maxZipSize); 66 | 67 | let b = 0; // zip byte index 68 | 69 | // write local headers and compressed files 70 | for (let fileIndex = 0; fileIndex < numFiles; fileIndex++) { 71 | localHeaderOffsets[fileIndex] = b; 72 | 73 | const 74 | filePath = filePaths[fileIndex], 75 | filePathSize = filePath.length, 76 | uncompressed = fileData[fileIndex], 77 | uncompressedSize = uncompressed.length, 78 | lm = inputFiles[fileIndex].lastModified ?? now, 79 | mtime = ((lm.getSeconds() / 2) | 0) + (lm.getMinutes() << 5) + (lm.getHours() << 11), 80 | mdate = lm.getDate() + ((lm.getMonth() + 1) << 5) + ((lm.getFullYear() - 1980) << 9); 81 | 82 | let 83 | compressedSize = 0, 84 | abortDeflate = false; 85 | 86 | // signature 87 | zip[b++] = 0x50; // P 88 | zip[b++] = 0x4b; // K 89 | zip[b++] = 0x03; 90 | zip[b++] = 0x04; 91 | // version needed to extract 92 | zip[b++] = 20; // 2.0 93 | zip[b++] = 0; 94 | // general purpose flag 95 | zip[b++] = 0; 96 | zip[b++] = 0b1000; // bit 11 (indexed from 0) => UTF-8 file names 97 | // compression 98 | const bDeflate = b; 99 | zip[b++] = zip[b++] = 0; // we'll modify this compression flag later if we deflate the file 100 | // mtime, mdate 101 | zip[b++] = mtime & 0xff; 102 | zip[b++] = mtime >> 8; 103 | zip[b++] = mdate & 0xff; 104 | zip[b++] = mdate >> 8; 105 | // CRC (4 bytes) then compressed size (4 bytes) -- we'll write these later 106 | let bCrc = b; 107 | b += 8; 108 | // uncompressed size 109 | zip[b++] = uncompressedSize & 0xff; 110 | zip[b++] = (uncompressedSize >> 8) & 0xff; 111 | zip[b++] = (uncompressedSize >> 16) & 0xff; 112 | zip[b++] = (uncompressedSize >> 24); 113 | // file name length 114 | zip[b++] = filePathSize & 0xff; 115 | zip[b++] = filePathSize >> 8; 116 | // extra field length 117 | zip[b++] = zip[b++] = 0; 118 | // file name 119 | zip.set(filePath, b); 120 | b += filePathSize; 121 | 122 | // compressed data 123 | if (attemptDeflate) { 124 | const 125 | compressedStart = b, 126 | read = gzipReadFn(uncompressed); 127 | 128 | let 129 | bytes: Uint8Array, 130 | bytesStartOffset = 0, 131 | bytesEndOffset = 0; 132 | 133 | deflate: { 134 | // check and skip gzip header 135 | for (; ;) { 136 | const data = await read(); 137 | if (data.done) throw new Error('Bad gzip data'); 138 | 139 | bytes = data.value; 140 | bytesStartOffset = bytesEndOffset; 141 | bytesEndOffset = bytesStartOffset + bytes.length; 142 | 143 | // check flags value 144 | // note: we assume no optional fields; if there are any, we give up on compression 145 | if (bytesStartOffset <= 3 && bytesEndOffset > 3) { 146 | const flags = bytes[3 - bytesStartOffset]; 147 | if (flags & 0b11110) { 148 | abortDeflate = true; // assumptions on gzip flags were violated 149 | break deflate; 150 | } 151 | } 152 | 153 | // check end of header 154 | if (bytesEndOffset >= 10 /* gzip header bytes */) { 155 | bytes = bytes.subarray(10 - bytesStartOffset); // length could be zero 156 | break; 157 | } 158 | } 159 | 160 | // copy compressed data 161 | for (; ;) { 162 | const 163 | bytesAlreadyWritten = b - compressedStart, 164 | bytesLength = bytes.length; 165 | 166 | if (bytesAlreadyWritten + bytesLength >= uncompressedSize + 8) { // allow 8 bytes for CRC and length at end 167 | abortDeflate = true; 168 | break deflate; 169 | } 170 | 171 | zip.set(bytes, b); 172 | b += bytesLength; 173 | 174 | const data = await read(); 175 | if (data.done) break; 176 | 177 | bytes = data.value; 178 | } 179 | } 180 | 181 | if (abortDeflate) { 182 | // Either we got unexpected flags, or deflate made the data larger. 183 | // In either case, we give up on the compressed data, but hold on for the CRC. 184 | // We need the last 8 bytes of gzip data: the first 4 of these are the CRC. 185 | 186 | for (; ;) { 187 | const 188 | bytesLength = bytes.length, 189 | copyBytes = 8 - bytesLength, 190 | bPrev = b; 191 | 192 | b = compressedStart; 193 | for (let i = 0; i < 8; i++) { 194 | zip[b++] = i < copyBytes ? zip[bPrev - copyBytes + i] : bytes[bytesLength - 8 + i]; 195 | } 196 | 197 | const data = await read(); 198 | if (data.done) break; 199 | 200 | bytes = data.value; 201 | } 202 | } 203 | 204 | // backtrack and retrieve CRC 205 | b -= 8; 206 | zip[bCrc++] = zip[b++]; 207 | zip[bCrc++] = zip[b++]; 208 | zip[bCrc++] = zip[b++]; 209 | zip[bCrc++] = zip[b++]; 210 | b -= 4; 211 | 212 | if (!abortDeflate) { 213 | zip[bDeflate] = 8; // set compression flag to 8 = deflate 214 | compressedSize = b - compressedStart; 215 | } 216 | } 217 | 218 | if (!attemptDeflate || abortDeflate) { 219 | zip.set(uncompressed, b); 220 | b += uncompressedSize; 221 | compressedSize = uncompressedSize; 222 | } 223 | 224 | if (!attemptDeflate) { 225 | // calculate CRC ourselves 226 | const crc = crc32(uncompressed); 227 | zip[bCrc++] = crc & 0xff; 228 | zip[bCrc++] = (crc >> 8) & 0xff; 229 | zip[bCrc++] = (crc >> 16) & 0xff; 230 | zip[bCrc++] = (crc >> 24); 231 | } 232 | 233 | // return to compressed size 234 | zip[bCrc++] = compressedSize & 0xff; 235 | zip[bCrc++] = (compressedSize >> 8) & 0xff; 236 | zip[bCrc++] = (compressedSize >> 16) & 0xff; 237 | zip[bCrc++] = (compressedSize >> 24); 238 | } 239 | 240 | // write central directory 241 | const centralDirectoryOffset = b; 242 | for (let fileIndex = 0; fileIndex < numFiles; fileIndex++) { 243 | const 244 | localHeaderOffset = localHeaderOffsets[fileIndex], 245 | fileName = filePaths[fileIndex], 246 | fileNameSize = fileName.length; 247 | 248 | // signature 249 | zip[b++] = 0x50; // P 250 | zip[b++] = 0x4b; // K 251 | zip[b++] = 0x01; 252 | zip[b++] = 0x02; 253 | // version created by 254 | zip[b++] = 20; // 2.0 255 | zip[b++] = 0; // -> platform (MS-DOS) 256 | // version needed to extract 257 | zip[b++] = 20; // 2.0 258 | zip[b++] = 0; 259 | // copy local header from [general purpose flag] to [extra field length] 260 | zip.set(zip.subarray(localHeaderOffset + 6, localHeaderOffset + 30), b); 261 | b += 24; 262 | // file comment length (2b), disk number (2b), internal attr (2b), external attr (4b) 263 | zip[b++] = zip[b++] = 264 | zip[b++] = zip[b++] = 265 | zip[b++] = zip[b++] = 266 | zip[b++] = zip[b++] = zip[b++] = zip[b++] = 0; 267 | // local header offset 268 | zip[b++] = localHeaderOffset & 0xff; 269 | zip[b++] = (localHeaderOffset >> 8) & 0xff; 270 | zip[b++] = (localHeaderOffset >> 16) & 0xff; 271 | zip[b++] = (localHeaderOffset >> 24); 272 | // file name 273 | zip.set(fileName, b); 274 | b += fileNameSize; 275 | } 276 | 277 | // write end-of-central-directory record 278 | // signature 279 | zip[b++] = 0x50; // P 280 | zip[b++] = 0x4b; // K 281 | zip[b++] = 0x05; 282 | zip[b++] = 0x06; 283 | // disk numbers x 2 284 | zip[b++] = zip[b++] = 285 | zip[b++] = zip[b++] = 0; 286 | // disk entries 287 | zip[b++] = numFiles & 0xff; 288 | zip[b++] = numFiles >> 8; 289 | // total entries 290 | zip[b++] = numFiles & 0xff; 291 | zip[b++] = numFiles >> 8; 292 | // central directory size 293 | zip[b++] = centralDirectorySize & 0xff; 294 | zip[b++] = (centralDirectorySize >> 8) & 0xff; 295 | zip[b++] = (centralDirectorySize >> 16) & 0xff; 296 | zip[b++] = (centralDirectorySize >> 24); 297 | // central directory offset 298 | zip[b++] = centralDirectoryOffset & 0xff; 299 | zip[b++] = (centralDirectoryOffset >> 8) & 0xff; 300 | zip[b++] = (centralDirectoryOffset >> 16) & 0xff; 301 | zip[b++] = (centralDirectoryOffset >> 24); 302 | // comment length 303 | zip[b++] = zip[b++] = 0; 304 | 305 | return zip.subarray(0, b); 306 | } 307 | -------------------------------------------------------------------------------- /test.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // crc32.ts 4 | var T; 5 | var init = () => { 6 | const i32 = Int32Array, T0 = new i32(256), t = new i32(4096); 7 | let c, n, v; 8 | for (n = 0; n < 256; n++) { 9 | c = n; 10 | c = c & 1 ? -306674912 ^ c >>> 1 : c >>> 1; 11 | c = c & 1 ? -306674912 ^ c >>> 1 : c >>> 1; 12 | c = c & 1 ? -306674912 ^ c >>> 1 : c >>> 1; 13 | c = c & 1 ? -306674912 ^ c >>> 1 : c >>> 1; 14 | c = c & 1 ? -306674912 ^ c >>> 1 : c >>> 1; 15 | c = c & 1 ? -306674912 ^ c >>> 1 : c >>> 1; 16 | c = c & 1 ? -306674912 ^ c >>> 1 : c >>> 1; 17 | t[n] = T0[n] = c & 1 ? -306674912 ^ c >>> 1 : c >>> 1; 18 | } 19 | for (n = 0; n < 256; n++) { 20 | v = T0[n]; 21 | for (c = 256 + n; c < 4096; c += 256) v = t[c] = v >>> 8 ^ T0[v & 255]; 22 | } 23 | T = [T0]; 24 | for (n = 1; n < 16; n++) T[n] = t.subarray(n * 256, (n + 1) * 256); 25 | }; 26 | var crc32 = (B, seed = 0) => { 27 | if (!T) init(); 28 | const [T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, Ta, Tb, Tc, Td, Te, Tf] = T; 29 | let crc = seed ^ -1, l = B.length - 15, i = 0; 30 | for (; i < l; ) crc = Tf[B[i++] ^ crc & 255] ^ Te[B[i++] ^ crc >> 8 & 255] ^ Td[B[i++] ^ crc >> 16 & 255] ^ Tc[B[i++] ^ crc >>> 24] ^ Tb[B[i++]] ^ Ta[B[i++]] ^ T9[B[i++]] ^ T8[B[i++]] ^ T7[B[i++]] ^ T6[B[i++]] ^ T5[B[i++]] ^ T4[B[i++]] ^ T3[B[i++]] ^ T2[B[i++]] ^ T1[B[i++]] ^ T0[B[i++]]; 31 | l += 15; 32 | while (i < l) crc = crc >>> 8 ^ T0[(crc ^ B[i++]) & 255]; 33 | return ~crc; 34 | }; 35 | 36 | // index.ts 37 | var hasCompressionStreams = typeof CompressionStream !== "undefined"; 38 | var te = new TextEncoder(); 39 | var lengthSum = (ns) => ns.reduce((memo, n) => memo + n.length, 0); 40 | var ui8 = Uint8Array; 41 | function makeGzipReadFn(dataIn) { 42 | const cs = new CompressionStream("gzip"), writer = cs.writable.getWriter(), reader = cs.readable.getReader(); 43 | writer.write(dataIn); 44 | writer.close(); 45 | return () => reader.read(); 46 | } 47 | async function createZip(inputFiles, compressWhenPossible = true, gzipReadFn = makeGzipReadFn) { 48 | const localHeaderOffsets = [], attemptDeflate = hasCompressionStreams && compressWhenPossible, numFiles = inputFiles.length, filePaths = inputFiles.map((file) => te.encode(file.path)), fileData = inputFiles.map(({ data }) => typeof data === "string" ? te.encode(data) : data instanceof ArrayBuffer ? new ui8(data) : data), totalDataSize = lengthSum(fileData), totalFilePathsSize = lengthSum(filePaths), centralDirectorySize = numFiles * 46 + totalFilePathsSize, maxZipSize = totalDataSize + numFiles * 30 + totalFilePathsSize + centralDirectorySize + 22, now = /* @__PURE__ */ new Date(), zip = new ui8(maxZipSize); 49 | let b = 0; 50 | for (let fileIndex = 0; fileIndex < numFiles; fileIndex++) { 51 | localHeaderOffsets[fileIndex] = b; 52 | const filePath = filePaths[fileIndex], filePathSize = filePath.length, uncompressed = fileData[fileIndex], uncompressedSize = uncompressed.length, lm = inputFiles[fileIndex].lastModified ?? now, mtime = (lm.getSeconds() / 2 | 0) + (lm.getMinutes() << 5) + (lm.getHours() << 11), mdate = lm.getDate() + (lm.getMonth() + 1 << 5) + (lm.getFullYear() - 1980 << 9); 53 | let compressedSize = 0, abortDeflate = false; 54 | zip[b++] = 80; 55 | zip[b++] = 75; 56 | zip[b++] = 3; 57 | zip[b++] = 4; 58 | zip[b++] = 20; 59 | zip[b++] = 0; 60 | zip[b++] = 0; 61 | zip[b++] = 8; 62 | const bDeflate = b; 63 | zip[b++] = zip[b++] = 0; 64 | zip[b++] = mtime & 255; 65 | zip[b++] = mtime >> 8; 66 | zip[b++] = mdate & 255; 67 | zip[b++] = mdate >> 8; 68 | let bCrc = b; 69 | b += 8; 70 | zip[b++] = uncompressedSize & 255; 71 | zip[b++] = uncompressedSize >> 8 & 255; 72 | zip[b++] = uncompressedSize >> 16 & 255; 73 | zip[b++] = uncompressedSize >> 24; 74 | zip[b++] = filePathSize & 255; 75 | zip[b++] = filePathSize >> 8; 76 | zip[b++] = zip[b++] = 0; 77 | zip.set(filePath, b); 78 | b += filePathSize; 79 | if (attemptDeflate) { 80 | const compressedStart = b, read = gzipReadFn(uncompressed); 81 | let bytes, bytesStartOffset = 0, bytesEndOffset = 0; 82 | deflate: { 83 | for (; ; ) { 84 | const data = await read(); 85 | if (data.done) throw new Error("Bad gzip data"); 86 | bytes = data.value; 87 | bytesStartOffset = bytesEndOffset; 88 | bytesEndOffset = bytesStartOffset + bytes.length; 89 | if (bytesStartOffset <= 3 && bytesEndOffset > 3) { 90 | const flags = bytes[3 - bytesStartOffset]; 91 | if (flags & 30) { 92 | abortDeflate = true; 93 | break deflate; 94 | } 95 | } 96 | if (bytesEndOffset >= 10) { 97 | bytes = bytes.subarray(10 - bytesStartOffset); 98 | break; 99 | } 100 | } 101 | for (; ; ) { 102 | const bytesAlreadyWritten = b - compressedStart, bytesLength = bytes.length; 103 | if (bytesAlreadyWritten + bytesLength >= uncompressedSize + 8) { 104 | abortDeflate = true; 105 | break deflate; 106 | } 107 | zip.set(bytes, b); 108 | b += bytesLength; 109 | const data = await read(); 110 | if (data.done) break; 111 | bytes = data.value; 112 | } 113 | } 114 | if (abortDeflate) { 115 | for (; ; ) { 116 | const bytesLength = bytes.length, copyBytes = 8 - bytesLength, bPrev = b; 117 | b = compressedStart; 118 | for (let i = 0; i < 8; i++) { 119 | zip[b++] = i < copyBytes ? zip[bPrev - copyBytes + i] : bytes[bytesLength - 8 + i]; 120 | } 121 | const data = await read(); 122 | if (data.done) break; 123 | bytes = data.value; 124 | } 125 | } 126 | b -= 8; 127 | zip[bCrc++] = zip[b++]; 128 | zip[bCrc++] = zip[b++]; 129 | zip[bCrc++] = zip[b++]; 130 | zip[bCrc++] = zip[b++]; 131 | b -= 4; 132 | if (!abortDeflate) { 133 | zip[bDeflate] = 8; 134 | compressedSize = b - compressedStart; 135 | } 136 | } 137 | if (!attemptDeflate || abortDeflate) { 138 | zip.set(uncompressed, b); 139 | b += uncompressedSize; 140 | compressedSize = uncompressedSize; 141 | } 142 | if (!attemptDeflate) { 143 | const crc = crc32(uncompressed); 144 | zip[bCrc++] = crc & 255; 145 | zip[bCrc++] = crc >> 8 & 255; 146 | zip[bCrc++] = crc >> 16 & 255; 147 | zip[bCrc++] = crc >> 24; 148 | } 149 | zip[bCrc++] = compressedSize & 255; 150 | zip[bCrc++] = compressedSize >> 8 & 255; 151 | zip[bCrc++] = compressedSize >> 16 & 255; 152 | zip[bCrc++] = compressedSize >> 24; 153 | } 154 | const centralDirectoryOffset = b; 155 | for (let fileIndex = 0; fileIndex < numFiles; fileIndex++) { 156 | const localHeaderOffset = localHeaderOffsets[fileIndex], fileName = filePaths[fileIndex], fileNameSize = fileName.length; 157 | zip[b++] = 80; 158 | zip[b++] = 75; 159 | zip[b++] = 1; 160 | zip[b++] = 2; 161 | zip[b++] = 20; 162 | zip[b++] = 0; 163 | zip[b++] = 20; 164 | zip[b++] = 0; 165 | zip.set(zip.subarray(localHeaderOffset + 6, localHeaderOffset + 30), b); 166 | b += 24; 167 | zip[b++] = zip[b++] = zip[b++] = zip[b++] = zip[b++] = zip[b++] = zip[b++] = zip[b++] = zip[b++] = zip[b++] = 0; 168 | zip[b++] = localHeaderOffset & 255; 169 | zip[b++] = localHeaderOffset >> 8 & 255; 170 | zip[b++] = localHeaderOffset >> 16 & 255; 171 | zip[b++] = localHeaderOffset >> 24; 172 | zip.set(fileName, b); 173 | b += fileNameSize; 174 | } 175 | zip[b++] = 80; 176 | zip[b++] = 75; 177 | zip[b++] = 5; 178 | zip[b++] = 6; 179 | zip[b++] = zip[b++] = zip[b++] = zip[b++] = 0; 180 | zip[b++] = numFiles & 255; 181 | zip[b++] = numFiles >> 8; 182 | zip[b++] = numFiles & 255; 183 | zip[b++] = numFiles >> 8; 184 | zip[b++] = centralDirectorySize & 255; 185 | zip[b++] = centralDirectorySize >> 8 & 255; 186 | zip[b++] = centralDirectorySize >> 16 & 255; 187 | zip[b++] = centralDirectorySize >> 24; 188 | zip[b++] = centralDirectoryOffset & 255; 189 | zip[b++] = centralDirectoryOffset >> 8 & 255; 190 | zip[b++] = centralDirectoryOffset >> 16 & 255; 191 | zip[b++] = centralDirectoryOffset >> 24; 192 | zip[b++] = zip[b++] = 0; 193 | return zip.subarray(0, b); 194 | } 195 | 196 | // test.ts 197 | var import_fs = require("fs"); 198 | var import_child_process = require("child_process"); 199 | var import_crypto = require("crypto"); 200 | var testStr = "The quick brown fox jumps over the lazy dog.\n"; 201 | function makeTestData() { 202 | const rawFiles = []; 203 | let i = 0; 204 | do { 205 | i++; 206 | const maxDataLength = [16, 1024, 65536][Math.floor(Math.random() * 3)]; 207 | const dataLength = Math.floor(Math.random() * maxDataLength); 208 | let data; 209 | if (Math.random() < 0.5) { 210 | data = testStr.repeat(Math.ceil(dataLength / testStr.length)).slice(0, dataLength); 211 | } else { 212 | data = new Uint8Array(dataLength); 213 | import_crypto.webcrypto.getRandomValues(data); 214 | } 215 | rawFiles.push({ 216 | path: `f_${i}.${typeof data === "string" ? "txt" : "dat"}`, 217 | // .dat and not .bin, because Macs try to extract .bin files! 218 | data 219 | }); 220 | } while (Math.random() < 0.667); 221 | return rawFiles; 222 | } 223 | function makeTestZip(compress, makeReadFn) { 224 | return createZip(makeTestData(), compress, makeReadFn); 225 | } 226 | function byteByByteReadFn(dataIn) { 227 | const cs = new CompressionStream("gzip"), writer = cs.writable.getWriter(), reader = cs.readable.getReader(); 228 | writer.write(dataIn); 229 | writer.close(); 230 | let buffer, bufferIndex; 231 | return async () => { 232 | if (buffer !== void 0 && bufferIndex < buffer.byteLength) { 233 | return { value: buffer.subarray(bufferIndex, ++bufferIndex), done: false }; 234 | } 235 | const { value, done } = await reader.read(); 236 | if (done) { 237 | return { value, done }; 238 | } else { 239 | buffer = value; 240 | bufferIndex = 0; 241 | return { value: buffer.subarray(bufferIndex, ++bufferIndex), done: false }; 242 | } 243 | }; 244 | } 245 | function singleChunkReadFn(dataIn) { 246 | const cs = new CompressionStream("gzip"), writer = cs.writable.getWriter(), reader = cs.readable.getReader(); 247 | writer.write(dataIn); 248 | writer.close(); 249 | let buffer = new Uint8Array(), returned = false; 250 | return async () => { 251 | if (returned) { 252 | return { value: void 0, done: true }; 253 | } 254 | for (; ; ) { 255 | const { value, done } = await reader.read(); 256 | if (done) { 257 | returned = true; 258 | return { value: buffer, done: false }; 259 | } 260 | const newBuffer = new Uint8Array(buffer.byteLength + value.byteLength); 261 | newBuffer.set(buffer); 262 | newBuffer.set(value, buffer.byteLength); 263 | buffer = newBuffer; 264 | } 265 | }; 266 | } 267 | async function test() { 268 | for (const compress of [false, true]) { 269 | console.log("compress:", compress); 270 | for (const makeReadFn of [byteByByteReadFn, singleChunkReadFn, void 0]) { 271 | console.log(" read function:", makeReadFn?.name); 272 | for (let i = 0; i < 1e3; i++) { 273 | const zip = await makeTestZip(compress, makeReadFn); 274 | const file = `testfiles/z_${i}.zip`; 275 | (0, import_fs.writeFileSync)(file, zip); 276 | (0, import_child_process.execFileSync)("/usr/bin/unzip", ["-t", file]); 277 | } 278 | } 279 | } 280 | } 281 | test(); 282 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es2017", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ 22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ 26 | 27 | /* Modules */ 28 | "module": "commonjs", /* Specify what module code is generated. */ 29 | // "rootDir": "./", /* Specify the root folder within your source files. */ 30 | // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ 31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ 35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ 38 | // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ 39 | // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ 40 | // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ 41 | // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ 42 | // "resolveJsonModule": true, /* Enable importing .json files. */ 43 | // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ 44 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ 45 | 46 | /* JavaScript Support */ 47 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 48 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 49 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ 50 | 51 | /* Emit */ 52 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 53 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 54 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 55 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 56 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 57 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ 58 | // "outDir": "./", /* Specify an output folder for all emitted files. */ 59 | // "removeComments": true, /* Disable emitting comments. */ 60 | // "noEmit": true, /* Disable emitting files from a compilation. */ 61 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 62 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ 63 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 64 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 65 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 66 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 67 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 68 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 69 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ 70 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ 71 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 72 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ 73 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 74 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 75 | 76 | /* Interop Constraints */ 77 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 78 | // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ 79 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 80 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 81 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 82 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 83 | 84 | /* Type Checking */ 85 | "strict": true, /* Enable all strict type-checking options. */ 86 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 87 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 88 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 89 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ 90 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 91 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 92 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ 93 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 94 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 95 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 96 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 97 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 98 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 99 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ 100 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 101 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ 102 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 103 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 104 | 105 | /* Completeness */ 106 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 107 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 108 | } 109 | } 110 | --------------------------------------------------------------------------------