├── .eslintrc
├── .github
├── FUNDING.yml
└── workflows
│ └── ci-checks.yaml
├── .gitignore
├── .prettierrc
├── LICENSE
├── README.md
├── example-async.js
├── example-callbacks.js
├── node_stream_zip.d.ts
├── node_stream_zip.js
├── package-lock.json
├── package.json
├── release-notes.md
└── test
├── content
├── BSDmakefile
├── README.md
├── doc
│ ├── api_assets
│ │ ├── logo.svg
│ │ └── sh.css
│ ├── changelog-foot.html
│ └── sh_javascript.min.js
└── кириллица.html
├── err
├── bad_crc.zip
├── corrupt_entry.zip
├── deflate64.zip
├── enc_aes.zip
├── enc_zipcrypto.zip
├── evil.zip
├── lzma.zip
└── rar.rar
├── ok
├── fast.zip
├── fastest.zip
├── maximum.zip
├── normal.zip
├── normal_comment.zip
├── osx.zip
├── sfx.exe
├── store.zip
├── ultra.zip
├── windows.zip
└── withcomment.zip
├── special
├── tiny.zip
├── utf8.zip
└── zip64.zip
└── tests.js
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "eslint:recommended",
3 | "parserOptions": {
4 | "ecmaVersion": 2020
5 | },
6 | "env": {
7 | "node": true,
8 | "es6": true
9 | },
10 | "rules": {
11 | "no-var": "error",
12 | "prefer-const": "error",
13 | "curly": "error",
14 | "eqeqeq": "error",
15 | "prefer-arrow-callback": "error",
16 | "no-new-object": "error",
17 | "object-shorthand": "error",
18 | "no-array-constructor": "error",
19 | "array-callback-return": "error",
20 | "no-eval": "error",
21 | "no-new-func": "error",
22 | "prefer-rest-params": "error",
23 | "prefer-spread": "error",
24 | "no-useless-constructor": "error",
25 | "no-dupe-class-members": "error",
26 | "no-duplicate-imports": "error",
27 | "no-unneeded-ternary": "error",
28 | "no-console": "error"
29 | }
30 | }
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: antelle
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
13 |
--------------------------------------------------------------------------------
/.github/workflows/ci-checks.yaml:
--------------------------------------------------------------------------------
1 | name: CI Checks
2 | on:
3 | push:
4 | branches:
5 | - master
6 | pull_request:
7 | jobs:
8 | lint:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v2
12 | with:
13 | fetch-depth: 0
14 | - name: NPM install
15 | run: npm ci
16 | - name: Lint
17 | run: npm run lint
18 | - name: Check types
19 | run: npm run check-types
20 | - name: Run tests
21 | run: npm test
22 | - name: Setup Node.js 14
23 | uses: actions/setup-node@v2
24 | with:
25 | node-version: '14'
26 | - name: Run tests on Node.js 14
27 | run: npm test
28 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | .DS_Store
3 | node_modules/
4 | build/
5 | resources/lib
6 | *.log
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "tabWidth": 4,
3 | "printWidth": 100,
4 | "singleQuote": true
5 | }
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2021 Antelle https://github.com/antelle
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining
4 | a copy of this software and associated documentation files (the
5 | "Software"), to deal in the Software without restriction, including
6 | without limitation the rights to use, copy, modify, merge, publish,
7 | distribute, sublicense, and/or sell copies of the Software, and to
8 | permit persons to whom the Software is furnished to do so, subject to
9 | the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be
12 | included in all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
21 |
22 | == dependency license: adm-zip ==
23 |
24 | Copyright (c) 2012 Another-D-Mention Software and other contributors,
25 | http://www.another-d-mention.ro/
26 |
27 | Permission is hereby granted, free of charge, to any person obtaining
28 | a copy of this software and associated documentation files (the
29 | "Software"), to deal in the Software without restriction, including
30 | without limitation the rights to use, copy, modify, merge, publish,
31 | distribute, sublicense, and/or sell copies of the Software, and to
32 | permit persons to whom the Software is furnished to do so, subject to
33 | the following conditions:
34 |
35 | The above copyright notice and this permission notice shall be
36 | included in all copies or substantial portions of the Software.
37 |
38 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
39 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
40 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
41 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
42 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
43 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
44 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # node-stream-zip 
2 |
3 | node.js library for reading and extraction of ZIP archives.
4 | Features:
5 |
6 | - it never loads entire archive into memory, everything is read by chunks
7 | - large archives support
8 | - all operations are non-blocking, no sync i/o
9 | - fast initialization
10 | - no dependencies, no binary addons
11 | - decompression with built-in zlib module
12 | - deflate, sfx, macosx/windows built-in archives
13 | - ZIP64 support
14 |
15 | ## Installation
16 |
17 | ```sh
18 | npm i node-stream-zip
19 | ```
20 |
21 | ## Usage
22 |
23 | There are two APIs provided:
24 | 1. [promise-based / async](#async-api)
25 | 2. [callbacks](#callback-api)
26 |
27 | It's recommended to use the new, promise API, however the legacy callback API
28 | may be more flexible for certain operations.
29 |
30 | ### Async API
31 |
32 | Open a zip file
33 | ```javascript
34 | const StreamZip = require('node-stream-zip');
35 | const zip = new StreamZip.async({ file: 'archive.zip' });
36 | ```
37 |
38 | Stream one entry to stdout
39 | ```javascript
40 | const stm = await zip.stream('path/inside/zip.txt');
41 | stm.pipe(process.stdout);
42 | stm.on('end', () => zip.close());
43 | ```
44 |
45 | Read a file as buffer
46 | ```javascript
47 | const data = await zip.entryData('path/inside/zip.txt');
48 | await zip.close();
49 | ```
50 |
51 | Extract one file to disk
52 | ```javascript
53 | await zip.extract('path/inside/zip.txt', './extracted.txt');
54 | await zip.close();
55 | ```
56 |
57 | List entries
58 | ```javascript
59 | const entriesCount = await zip.entriesCount;
60 | console.log(`Entries read: ${entriesCount}`);
61 |
62 | const entries = await zip.entries();
63 | for (const entry of Object.values(entries)) {
64 | const desc = entry.isDirectory ? 'directory' : `${entry.size} bytes`;
65 | console.log(`Entry ${entry.name}: ${desc}`);
66 | }
67 |
68 | // Do not forget to close the file once you're done
69 | await zip.close();
70 | ```
71 |
72 | Extract a folder from archive to disk
73 | ```javascript
74 | fs.mkdirSync('extracted');
75 | await zip.extract('path/inside/zip/', './extracted');
76 | await zip.close();
77 | ```
78 |
79 | Extract everything
80 | ```javascript
81 | fs.mkdirSync('extracted');
82 | const count = await zip.extract(null, './extracted');
83 | console.log(`Extracted ${count} entries`);
84 | await zip.close();
85 | ```
86 |
87 | When extracting a folder, you can listen to `extract` event
88 | ```javascript
89 | zip.on('extract', (entry, file) => {
90 | console.log(`Extracted ${entry.name} to ${file}`);
91 | });
92 | ```
93 |
94 | `entry` event is generated for every entry during loading
95 | ```javascript
96 | zip.on('entry', entry => {
97 | // you can already stream this entry,
98 | // without waiting until all entry descriptions are read (suitable for very large archives)
99 | console.log(`Read entry ${entry.name}`);
100 | });
101 | ```
102 |
103 | ### Callback API
104 |
105 | Open a zip file
106 | ```javascript
107 | const StreamZip = require('node-stream-zip');
108 | const zip = new StreamZip({ file: 'archive.zip' });
109 |
110 | // Handle errors
111 | zip.on('error', err => { /*...*/ });
112 | ```
113 |
114 | List entries
115 | ```javascript
116 | zip.on('ready', () => {
117 | console.log('Entries read: ' + zip.entriesCount);
118 | for (const entry of Object.values(zip.entries())) {
119 | const desc = entry.isDirectory ? 'directory' : `${entry.size} bytes`;
120 | console.log(`Entry ${entry.name}: ${desc}`);
121 | }
122 | // Do not forget to close the file once you're done
123 | zip.close();
124 | });
125 | ```
126 |
127 | Stream one entry to stdout
128 | ```javascript
129 | zip.on('ready', () => {
130 | zip.stream('path/inside/zip.txt', (err, stm) => {
131 | stm.pipe(process.stdout);
132 | stm.on('end', () => zip.close());
133 | });
134 | });
135 | ```
136 |
137 | Extract one file to disk
138 | ```javascript
139 | zip.on('ready', () => {
140 | zip.extract('path/inside/zip.txt', './extracted.txt', err => {
141 | console.log(err ? 'Extract error' : 'Extracted');
142 | zip.close();
143 | });
144 | });
145 | ```
146 |
147 | Extract a folder from archive to disk
148 | ```javascript
149 | zip.on('ready', () => {
150 | fs.mkdirSync('extracted');
151 | zip.extract('path/inside/zip/', './extracted', err => {
152 | console.log(err ? 'Extract error' : 'Extracted');
153 | zip.close();
154 | });
155 | });
156 | ```
157 |
158 | Extract everything
159 | ```javascript
160 | zip.on('ready', () => {
161 | fs.mkdirSync('extracted');
162 | zip.extract(null, './extracted', (err, count) => {
163 | console.log(err ? 'Extract error' : `Extracted ${count} entries`);
164 | zip.close();
165 | });
166 | });
167 | ```
168 |
169 | Read a file as buffer in sync way
170 | ```javascript
171 | zip.on('ready', () => {
172 | const data = zip.entryDataSync('path/inside/zip.txt');
173 | zip.close();
174 | });
175 | ```
176 |
177 | When extracting a folder, you can listen to `extract` event
178 | ```javascript
179 | zip.on('extract', (entry, file) => {
180 | console.log(`Extracted ${entry.name} to ${file}`);
181 | });
182 | ```
183 |
184 | `entry` event is generated for every entry during loading
185 | ```javascript
186 | zip.on('entry', entry => {
187 | // you can already stream this entry,
188 | // without waiting until all entry descriptions are read (suitable for very large archives)
189 | console.log(`Read entry ${entry.name}`);
190 | });
191 | ```
192 |
193 | ## Options
194 |
195 | You can pass these options to the constructor
196 | - `storeEntries: true` - you will be able to work with entries inside zip archive, otherwise the only way to access them is `entry` event
197 | - `skipEntryNameValidation: true` - by default, entry name is checked for malicious characters, like `../` or `c:\123`, pass this flag to disable validation errors
198 | - `nameEncoding: 'utf8'` - encoding used to decode file names, UTF8 by default
199 |
200 | ## Methods
201 |
202 | - `zip.entries()` - get all entries description
203 | - `zip.entry(name)` - get entry description by name
204 | - `zip.stream(entry, function(err, stm) { })` - get entry data reader stream
205 | - `zip.entryDataSync(entry)` - get entry data in sync way
206 | - `zip.close()` - cleanup after all entries have been read, streamed, extracted, and you don't need the archive
207 |
208 | ## Building
209 |
210 | The project doesn't require building. To run unit tests with [nodeunit](https://github.com/caolan/nodeunit):
211 | ```sh
212 | npm test
213 | ```
214 |
215 | ## Known issues
216 |
217 | - [utf8](https://github.com/rubyzip/rubyzip/wiki/Files-with-non-ascii-filenames) file names
218 |
219 | ## Out of scope
220 |
221 | - AES encrypted files: the library will throw an error if you try to open it
222 |
223 | ## Contributors
224 |
225 | ZIP parsing code has been partially forked from [cthackers/adm-zip](https://github.com/cthackers/adm-zip) (MIT license).
226 |
--------------------------------------------------------------------------------
/example-async.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable no-console,no-unused-vars */
2 | const StreamZip = require('./');
3 |
4 | (async () => {
5 | console.log('Loading zip...');
6 |
7 | const zip = new StreamZip.async({ file: './test/ok/normal.zip' });
8 | const entriesCount = await zip.entriesCount;
9 |
10 | console.log(`Done in ${process.uptime()}s. Entries read: ${entriesCount}`);
11 |
12 | const entry = await zip.entry('README.md');
13 | console.log('Entry for README.md:', entry);
14 |
15 | const data = await zip.entryData('README.md');
16 | const firstLine = data.toString().split('\n')[0].trim();
17 | console.log(`First line of README.md: "${firstLine}"`);
18 |
19 | async function streamDataToStdOut() {
20 | const stm = await zip.stream('README.md');
21 | console.log('README.md contents streamed:\n');
22 | stm.pipe(process.stdout);
23 | }
24 |
25 | async function extractEntry() {
26 | await zip.extract('README.md', './tmp');
27 | }
28 |
29 | async function extractAll() {
30 | const extracted = await zip.extract(null, './tmp');
31 | console.log(`Extracted ${extracted} entries`);
32 | }
33 |
34 | await zip.close();
35 | })().catch(console.error);
36 |
--------------------------------------------------------------------------------
/example-callbacks.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable no-console,no-unused-vars */
2 | const StreamZip = require('./');
3 |
4 | const zip = new StreamZip({ file: './test/ok/normal.zip' });
5 | zip.on('error', (err) => {
6 | console.error('ERROR: ' + err);
7 | });
8 | zip.on('ready', () => {
9 | const entriesCount = zip.entriesCount;
10 | console.log(`Done in ${process.uptime()}s. Entries read: ${entriesCount}`);
11 |
12 | const entry = zip.entry('README.md');
13 | console.log('Entry for README.md:', entry);
14 |
15 | const data = zip.entryDataSync('README.md');
16 | const firstLine = data.toString().split('\n')[0].trim();
17 | console.log(`First line of README.md: "${firstLine}"`);
18 |
19 | zip.close();
20 |
21 | function streamDataToStdOut() {
22 | zip.stream('README.md', (err, stm) => {
23 | if (err) {
24 | return console.error(err);
25 | }
26 | console.log('README.md contents streamed:\n');
27 | stm.pipe(process.stdout);
28 | });
29 | }
30 |
31 | function extractEntry() {
32 | zip.extract('README.md', './tmp', (err) => {
33 | console.log(err ? err : 'Entry extracted');
34 | zip.close();
35 | });
36 | }
37 |
38 | function extractAll() {
39 | zip.extract(null, './tmp', (err, count) => {
40 | console.log(err ? err : `Extracted ${count} entries`);
41 | zip.close();
42 | });
43 | }
44 | });
45 | zip.on('extract', (entry, file) => {
46 | console.log('extract', entry.name, file);
47 | });
48 |
--------------------------------------------------------------------------------
/node_stream_zip.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
3 | declare namespace StreamZip {
4 | interface StreamZipOptions {
5 | /**
6 | * File to read
7 | * @default undefined
8 | */
9 | file?: string;
10 |
11 | /**
12 | * Alternatively, you can pass fd here
13 | * @default undefined
14 | */
15 | fd?: number;
16 |
17 | /**
18 | * You will be able to work with entries inside zip archive,
19 | * otherwise the only way to access them is entry event
20 | * @default true
21 | */
22 | storeEntries?: boolean;
23 |
24 | /**
25 | * By default, entry name is checked for malicious characters, like ../ or c:\123,
26 | * pass this flag to disable validation error
27 | * @default false
28 | */
29 | skipEntryNameValidation?: boolean;
30 |
31 | /**
32 | * Filesystem read chunk size
33 | * @default automatic based on file size
34 | */
35 | chunkSize?: number;
36 |
37 | /**
38 | * Encoding used to decode file names
39 | * @default UTF8
40 | */
41 | nameEncoding?: string;
42 | }
43 |
44 | interface ZipEntry {
45 | /**
46 | * file name
47 | */
48 | name: string;
49 |
50 | /**
51 | * true if it's a directory entry
52 | */
53 | isDirectory: boolean;
54 |
55 | /**
56 | * true if it's a file entry, see also isDirectory
57 | */
58 | isFile: boolean;
59 |
60 | /**
61 | * file comment
62 | */
63 | comment: string;
64 |
65 | /**
66 | * if the file is encrypted
67 | */
68 | encrypted: boolean;
69 |
70 | /**
71 | * version made by
72 | */
73 | verMade: number;
74 |
75 | /**
76 | * version needed to extract
77 | */
78 | version: number;
79 |
80 | /**
81 | * encrypt, decrypt flags
82 | */
83 | flags: number;
84 |
85 | /**
86 | * compression method
87 | */
88 | method: number;
89 |
90 | /**
91 | * modification time
92 | */
93 | time: number;
94 |
95 | /**
96 | * uncompressed file crc-32 value
97 | */
98 | crc: number;
99 |
100 | /**
101 | * compressed size
102 | */
103 | compressedSize: number;
104 |
105 | /**
106 | * uncompressed size
107 | */
108 | size: number;
109 |
110 | /**
111 | * volume number start
112 | */
113 | diskStart: number;
114 |
115 | /**
116 | * internal file attributes
117 | */
118 | inattr: number;
119 |
120 | /**
121 | * external file attributes
122 | */
123 | attr: number;
124 |
125 | /**
126 | * LOC header offset
127 | */
128 | offset: number;
129 | }
130 |
131 | class StreamZipAsync {
132 | constructor(config: StreamZipOptions);
133 |
134 | entriesCount: Promise;
135 | comment: Promise;
136 |
137 | entry(name: string): Promise;
138 | entries(): Promise<{ [name: string]: ZipEntry }>;
139 | entryData(entry: string | ZipEntry): Promise;
140 | stream(entry: string | ZipEntry): Promise;
141 | extract(entry: string | ZipEntry | null, outPath: string): Promise;
142 |
143 | on(event: 'entry', handler: (entry: ZipEntry) => void): void;
144 | on(event: 'extract', handler: (entry: ZipEntry, outPath: string) => void): void;
145 |
146 | close(): Promise;
147 | }
148 | }
149 |
150 | type StreamZipOptions = StreamZip.StreamZipOptions;
151 | type ZipEntry = StreamZip.ZipEntry;
152 |
153 | declare class StreamZip {
154 | constructor(config: StreamZipOptions);
155 |
156 | /**
157 | * number of entries in the archive
158 | */
159 | entriesCount: number;
160 |
161 | /**
162 | * archive comment
163 | */
164 | comment: string;
165 |
166 | on(event: 'error', handler: (error: any) => void): void;
167 | on(event: 'entry', handler: (entry: ZipEntry) => void): void;
168 | on(event: 'ready', handler: () => void): void;
169 | on(event: 'extract', handler: (entry: ZipEntry, outPath: string) => void): void;
170 |
171 | entry(name: string): ZipEntry | undefined;
172 |
173 | entries(): { [name: string]: ZipEntry };
174 |
175 | stream(
176 | entry: string | ZipEntry,
177 | callback: (err: any | null, stream?: NodeJS.ReadableStream) => void
178 | ): void;
179 |
180 | entryDataSync(entry: string | ZipEntry): Buffer;
181 |
182 | openEntry(
183 | entry: string | ZipEntry,
184 | callback: (err: any | null, entry?: ZipEntry) => void,
185 | sync: boolean
186 | ): void;
187 |
188 | extract(
189 | entry: string | ZipEntry | null,
190 | outPath: string,
191 | callback: (err?: any, res?: number) => void
192 | ): void;
193 |
194 | close(callback?: (err?: any) => void): void;
195 |
196 | static async: typeof StreamZip.StreamZipAsync;
197 | }
198 |
199 | export = StreamZip;
200 |
--------------------------------------------------------------------------------
/node_stream_zip.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @license node-stream-zip | (c) 2020 Antelle | https://github.com/antelle/node-stream-zip/blob/master/LICENSE
3 | * Portions copyright https://github.com/cthackers/adm-zip | https://raw.githubusercontent.com/cthackers/adm-zip/master/LICENSE
4 | */
5 |
6 | let fs = require('fs');
7 | const util = require('util');
8 | const path = require('path');
9 | const events = require('events');
10 | const zlib = require('zlib');
11 | const stream = require('stream');
12 |
13 | const consts = {
14 | /* The local file header */
15 | LOCHDR: 30, // LOC header size
16 | LOCSIG: 0x04034b50, // "PK\003\004"
17 | LOCVER: 4, // version needed to extract
18 | LOCFLG: 6, // general purpose bit flag
19 | LOCHOW: 8, // compression method
20 | LOCTIM: 10, // modification time (2 bytes time, 2 bytes date)
21 | LOCCRC: 14, // uncompressed file crc-32 value
22 | LOCSIZ: 18, // compressed size
23 | LOCLEN: 22, // uncompressed size
24 | LOCNAM: 26, // filename length
25 | LOCEXT: 28, // extra field length
26 |
27 | /* The Data descriptor */
28 | EXTSIG: 0x08074b50, // "PK\007\008"
29 | EXTHDR: 16, // EXT header size
30 | EXTCRC: 4, // uncompressed file crc-32 value
31 | EXTSIZ: 8, // compressed size
32 | EXTLEN: 12, // uncompressed size
33 |
34 | /* The central directory file header */
35 | CENHDR: 46, // CEN header size
36 | CENSIG: 0x02014b50, // "PK\001\002"
37 | CENVEM: 4, // version made by
38 | CENVER: 6, // version needed to extract
39 | CENFLG: 8, // encrypt, decrypt flags
40 | CENHOW: 10, // compression method
41 | CENTIM: 12, // modification time (2 bytes time, 2 bytes date)
42 | CENCRC: 16, // uncompressed file crc-32 value
43 | CENSIZ: 20, // compressed size
44 | CENLEN: 24, // uncompressed size
45 | CENNAM: 28, // filename length
46 | CENEXT: 30, // extra field length
47 | CENCOM: 32, // file comment length
48 | CENDSK: 34, // volume number start
49 | CENATT: 36, // internal file attributes
50 | CENATX: 38, // external file attributes (host system dependent)
51 | CENOFF: 42, // LOC header offset
52 |
53 | /* The entries in the end of central directory */
54 | ENDHDR: 22, // END header size
55 | ENDSIG: 0x06054b50, // "PK\005\006"
56 | ENDSIGFIRST: 0x50,
57 | ENDSUB: 8, // number of entries on this disk
58 | ENDTOT: 10, // total number of entries
59 | ENDSIZ: 12, // central directory size in bytes
60 | ENDOFF: 16, // offset of first CEN header
61 | ENDCOM: 20, // zip file comment length
62 | MAXFILECOMMENT: 0xffff,
63 |
64 | /* The entries in the end of ZIP64 central directory locator */
65 | ENDL64HDR: 20, // ZIP64 end of central directory locator header size
66 | ENDL64SIG: 0x07064b50, // ZIP64 end of central directory locator signature
67 | ENDL64SIGFIRST: 0x50,
68 | ENDL64OFS: 8, // ZIP64 end of central directory offset
69 |
70 | /* The entries in the end of ZIP64 central directory */
71 | END64HDR: 56, // ZIP64 end of central directory header size
72 | END64SIG: 0x06064b50, // ZIP64 end of central directory signature
73 | END64SIGFIRST: 0x50,
74 | END64SUB: 24, // number of entries on this disk
75 | END64TOT: 32, // total number of entries
76 | END64SIZ: 40,
77 | END64OFF: 48,
78 |
79 | /* Compression methods */
80 | STORED: 0, // no compression
81 | SHRUNK: 1, // shrunk
82 | REDUCED1: 2, // reduced with compression factor 1
83 | REDUCED2: 3, // reduced with compression factor 2
84 | REDUCED3: 4, // reduced with compression factor 3
85 | REDUCED4: 5, // reduced with compression factor 4
86 | IMPLODED: 6, // imploded
87 | // 7 reserved
88 | DEFLATED: 8, // deflated
89 | ENHANCED_DEFLATED: 9, // deflate64
90 | PKWARE: 10, // PKWare DCL imploded
91 | // 11 reserved
92 | BZIP2: 12, // compressed using BZIP2
93 | // 13 reserved
94 | LZMA: 14, // LZMA
95 | // 15-17 reserved
96 | IBM_TERSE: 18, // compressed using IBM TERSE
97 | IBM_LZ77: 19, //IBM LZ77 z
98 |
99 | /* General purpose bit flag */
100 | FLG_ENC: 0, // encrypted file
101 | FLG_COMP1: 1, // compression option
102 | FLG_COMP2: 2, // compression option
103 | FLG_DESC: 4, // data descriptor
104 | FLG_ENH: 8, // enhanced deflation
105 | FLG_STR: 16, // strong encryption
106 | FLG_LNG: 1024, // language encoding
107 | FLG_MSK: 4096, // mask header values
108 | FLG_ENTRY_ENC: 1,
109 |
110 | /* 4.5 Extensible data fields */
111 | EF_ID: 0,
112 | EF_SIZE: 2,
113 |
114 | /* Header IDs */
115 | ID_ZIP64: 0x0001,
116 | ID_AVINFO: 0x0007,
117 | ID_PFS: 0x0008,
118 | ID_OS2: 0x0009,
119 | ID_NTFS: 0x000a,
120 | ID_OPENVMS: 0x000c,
121 | ID_UNIX: 0x000d,
122 | ID_FORK: 0x000e,
123 | ID_PATCH: 0x000f,
124 | ID_X509_PKCS7: 0x0014,
125 | ID_X509_CERTID_F: 0x0015,
126 | ID_X509_CERTID_C: 0x0016,
127 | ID_STRONGENC: 0x0017,
128 | ID_RECORD_MGT: 0x0018,
129 | ID_X509_PKCS7_RL: 0x0019,
130 | ID_IBM1: 0x0065,
131 | ID_IBM2: 0x0066,
132 | ID_POSZIP: 0x4690,
133 |
134 | EF_ZIP64_OR_32: 0xffffffff,
135 | EF_ZIP64_OR_16: 0xffff,
136 | };
137 |
138 | const StreamZip = function (config) {
139 | let fd, fileSize, chunkSize, op, centralDirectory, closed;
140 | const ready = false,
141 | that = this,
142 | entries = config.storeEntries !== false ? {} : null,
143 | fileName = config.file,
144 | textDecoder = config.nameEncoding ? new TextDecoder(config.nameEncoding) : null;
145 |
146 | open();
147 |
148 | function open() {
149 | if (config.fd) {
150 | fd = config.fd;
151 | readFile();
152 | } else {
153 | fs.open(fileName, 'r', (err, f) => {
154 | if (err) {
155 | return that.emit('error', err);
156 | }
157 | fd = f;
158 | readFile();
159 | });
160 | }
161 | }
162 |
163 | function readFile() {
164 | fs.fstat(fd, (err, stat) => {
165 | if (err) {
166 | return that.emit('error', err);
167 | }
168 | fileSize = stat.size;
169 | chunkSize = config.chunkSize || Math.round(fileSize / 1000);
170 | chunkSize = Math.max(
171 | Math.min(chunkSize, Math.min(128 * 1024, fileSize)),
172 | Math.min(1024, fileSize)
173 | );
174 | readCentralDirectory();
175 | });
176 | }
177 |
178 | function readUntilFoundCallback(err, bytesRead) {
179 | if (err || !bytesRead) {
180 | return that.emit('error', err || new Error('Archive read error'));
181 | }
182 | let pos = op.lastPos;
183 | let bufferPosition = pos - op.win.position;
184 | const buffer = op.win.buffer;
185 | const minPos = op.minPos;
186 | while (--pos >= minPos && --bufferPosition >= 0) {
187 | if (buffer.length - bufferPosition >= 4 && buffer[bufferPosition] === op.firstByte) {
188 | // quick check first signature byte
189 | if (buffer.readUInt32LE(bufferPosition) === op.sig) {
190 | op.lastBufferPosition = bufferPosition;
191 | op.lastBytesRead = bytesRead;
192 | op.complete();
193 | return;
194 | }
195 | }
196 | }
197 | if (pos === minPos) {
198 | return that.emit('error', new Error('Bad archive'));
199 | }
200 | op.lastPos = pos + 1;
201 | op.chunkSize *= 2;
202 | if (pos <= minPos) {
203 | return that.emit('error', new Error('Bad archive'));
204 | }
205 | const expandLength = Math.min(op.chunkSize, pos - minPos);
206 | op.win.expandLeft(expandLength, readUntilFoundCallback);
207 | }
208 |
209 | function readCentralDirectory() {
210 | const totalReadLength = Math.min(consts.ENDHDR + consts.MAXFILECOMMENT, fileSize);
211 | op = {
212 | win: new FileWindowBuffer(fd),
213 | totalReadLength,
214 | minPos: fileSize - totalReadLength,
215 | lastPos: fileSize,
216 | chunkSize: Math.min(1024, chunkSize),
217 | firstByte: consts.ENDSIGFIRST,
218 | sig: consts.ENDSIG,
219 | complete: readCentralDirectoryComplete,
220 | };
221 | op.win.read(fileSize - op.chunkSize, op.chunkSize, readUntilFoundCallback);
222 | }
223 |
224 | function readCentralDirectoryComplete() {
225 | const buffer = op.win.buffer;
226 | const pos = op.lastBufferPosition;
227 | try {
228 | centralDirectory = new CentralDirectoryHeader();
229 | centralDirectory.read(buffer.slice(pos, pos + consts.ENDHDR));
230 | centralDirectory.headerOffset = op.win.position + pos;
231 | if (centralDirectory.commentLength) {
232 | that.comment = buffer
233 | .slice(
234 | pos + consts.ENDHDR,
235 | pos + consts.ENDHDR + centralDirectory.commentLength
236 | )
237 | .toString();
238 | } else {
239 | that.comment = null;
240 | }
241 | that.entriesCount = centralDirectory.volumeEntries;
242 | that.centralDirectory = centralDirectory;
243 | if (
244 | (centralDirectory.volumeEntries === consts.EF_ZIP64_OR_16 &&
245 | centralDirectory.totalEntries === consts.EF_ZIP64_OR_16) ||
246 | centralDirectory.size === consts.EF_ZIP64_OR_32 ||
247 | centralDirectory.offset === consts.EF_ZIP64_OR_32
248 | ) {
249 | readZip64CentralDirectoryLocator();
250 | } else {
251 | op = {};
252 | readEntries();
253 | }
254 | } catch (err) {
255 | that.emit('error', err);
256 | }
257 | }
258 |
259 | function readZip64CentralDirectoryLocator() {
260 | const length = consts.ENDL64HDR;
261 | if (op.lastBufferPosition > length) {
262 | op.lastBufferPosition -= length;
263 | readZip64CentralDirectoryLocatorComplete();
264 | } else {
265 | op = {
266 | win: op.win,
267 | totalReadLength: length,
268 | minPos: op.win.position - length,
269 | lastPos: op.win.position,
270 | chunkSize: op.chunkSize,
271 | firstByte: consts.ENDL64SIGFIRST,
272 | sig: consts.ENDL64SIG,
273 | complete: readZip64CentralDirectoryLocatorComplete,
274 | };
275 | op.win.read(op.lastPos - op.chunkSize, op.chunkSize, readUntilFoundCallback);
276 | }
277 | }
278 |
279 | function readZip64CentralDirectoryLocatorComplete() {
280 | const buffer = op.win.buffer;
281 | const locHeader = new CentralDirectoryLoc64Header();
282 | locHeader.read(
283 | buffer.slice(op.lastBufferPosition, op.lastBufferPosition + consts.ENDL64HDR)
284 | );
285 | const readLength = fileSize - locHeader.headerOffset;
286 | op = {
287 | win: op.win,
288 | totalReadLength: readLength,
289 | minPos: locHeader.headerOffset,
290 | lastPos: op.lastPos,
291 | chunkSize: op.chunkSize,
292 | firstByte: consts.END64SIGFIRST,
293 | sig: consts.END64SIG,
294 | complete: readZip64CentralDirectoryComplete,
295 | };
296 | op.win.read(fileSize - op.chunkSize, op.chunkSize, readUntilFoundCallback);
297 | }
298 |
299 | function readZip64CentralDirectoryComplete() {
300 | const buffer = op.win.buffer;
301 | const zip64cd = new CentralDirectoryZip64Header();
302 | zip64cd.read(buffer.slice(op.lastBufferPosition, op.lastBufferPosition + consts.END64HDR));
303 | that.centralDirectory.volumeEntries = zip64cd.volumeEntries;
304 | that.centralDirectory.totalEntries = zip64cd.totalEntries;
305 | that.centralDirectory.size = zip64cd.size;
306 | that.centralDirectory.offset = zip64cd.offset;
307 | that.entriesCount = zip64cd.volumeEntries;
308 | op = {};
309 | readEntries();
310 | }
311 |
312 | function readEntries() {
313 | op = {
314 | win: new FileWindowBuffer(fd),
315 | pos: centralDirectory.offset,
316 | chunkSize,
317 | entriesLeft: centralDirectory.volumeEntries,
318 | };
319 | op.win.read(op.pos, Math.min(chunkSize, fileSize - op.pos), readEntriesCallback);
320 | }
321 |
322 | function readEntriesCallback(err, bytesRead) {
323 | if (err || !bytesRead) {
324 | return that.emit('error', err || new Error('Entries read error'));
325 | }
326 | let bufferPos = op.pos - op.win.position;
327 | let entry = op.entry;
328 | const buffer = op.win.buffer;
329 | const bufferLength = buffer.length;
330 | try {
331 | while (op.entriesLeft > 0) {
332 | if (!entry) {
333 | entry = new ZipEntry();
334 | entry.readHeader(buffer, bufferPos);
335 | entry.headerOffset = op.win.position + bufferPos;
336 | op.entry = entry;
337 | op.pos += consts.CENHDR;
338 | bufferPos += consts.CENHDR;
339 | }
340 | const entryHeaderSize = entry.fnameLen + entry.extraLen + entry.comLen;
341 | const advanceBytes = entryHeaderSize + (op.entriesLeft > 1 ? consts.CENHDR : 0);
342 | if (bufferLength - bufferPos < advanceBytes) {
343 | op.win.moveRight(chunkSize, readEntriesCallback, bufferPos);
344 | op.move = true;
345 | return;
346 | }
347 | entry.read(buffer, bufferPos, textDecoder);
348 | if (!config.skipEntryNameValidation) {
349 | entry.validateName();
350 | }
351 | if (entries) {
352 | entries[entry.name] = entry;
353 | }
354 | that.emit('entry', entry);
355 | op.entry = entry = null;
356 | op.entriesLeft--;
357 | op.pos += entryHeaderSize;
358 | bufferPos += entryHeaderSize;
359 | }
360 | that.emit('ready');
361 | } catch (err) {
362 | that.emit('error', err);
363 | }
364 | }
365 |
366 | function checkEntriesExist() {
367 | if (!entries) {
368 | throw new Error('storeEntries disabled');
369 | }
370 | }
371 |
372 | Object.defineProperty(this, 'ready', {
373 | get() {
374 | return ready;
375 | },
376 | });
377 |
378 | this.entry = function (name) {
379 | checkEntriesExist();
380 | return entries[name];
381 | };
382 |
383 | this.entries = function () {
384 | checkEntriesExist();
385 | return entries;
386 | };
387 |
388 | this.stream = function (entry, callback) {
389 | return this.openEntry(
390 | entry,
391 | (err, entry) => {
392 | if (err) {
393 | return callback(err);
394 | }
395 | const offset = dataOffset(entry);
396 | let entryStream = new EntryDataReaderStream(fd, offset, entry.compressedSize);
397 | if (entry.method === consts.STORED) {
398 | // nothing to do
399 | } else if (entry.method === consts.DEFLATED) {
400 | entryStream = entryStream.pipe(zlib.createInflateRaw());
401 | } else {
402 | return callback(new Error('Unknown compression method: ' + entry.method));
403 | }
404 | if (canVerifyCrc(entry)) {
405 | entryStream = entryStream.pipe(
406 | new EntryVerifyStream(entryStream, entry.crc, entry.size)
407 | );
408 | }
409 | callback(null, entryStream);
410 | },
411 | false
412 | );
413 | };
414 |
415 | this.entryDataSync = function (entry) {
416 | let err = null;
417 | this.openEntry(
418 | entry,
419 | (e, en) => {
420 | err = e;
421 | entry = en;
422 | },
423 | true
424 | );
425 | if (err) {
426 | throw err;
427 | }
428 | let data = Buffer.alloc(entry.compressedSize);
429 | new FsRead(fd, data, 0, entry.compressedSize, dataOffset(entry), (e) => {
430 | err = e;
431 | }).read(true);
432 | if (err) {
433 | throw err;
434 | }
435 | if (entry.method === consts.STORED) {
436 | // nothing to do
437 | } else if (entry.method === consts.DEFLATED || entry.method === consts.ENHANCED_DEFLATED) {
438 | data = zlib.inflateRawSync(data);
439 | } else {
440 | throw new Error('Unknown compression method: ' + entry.method);
441 | }
442 | if (data.length !== entry.size) {
443 | throw new Error('Invalid size');
444 | }
445 | if (canVerifyCrc(entry)) {
446 | const verify = new CrcVerify(entry.crc, entry.size);
447 | verify.data(data);
448 | }
449 | return data;
450 | };
451 |
452 | this.openEntry = function (entry, callback, sync) {
453 | if (typeof entry === 'string') {
454 | checkEntriesExist();
455 | entry = entries[entry];
456 | if (!entry) {
457 | return callback(new Error('Entry not found'));
458 | }
459 | }
460 | if (!entry.isFile) {
461 | return callback(new Error('Entry is not file'));
462 | }
463 | if (!fd) {
464 | return callback(new Error('Archive closed'));
465 | }
466 | const buffer = Buffer.alloc(consts.LOCHDR);
467 | new FsRead(fd, buffer, 0, buffer.length, entry.offset, (err) => {
468 | if (err) {
469 | return callback(err);
470 | }
471 | let readEx;
472 | try {
473 | entry.readDataHeader(buffer);
474 | if (entry.encrypted) {
475 | readEx = new Error('Entry encrypted');
476 | }
477 | } catch (ex) {
478 | readEx = ex;
479 | }
480 | callback(readEx, entry);
481 | }).read(sync);
482 | };
483 |
484 | function dataOffset(entry) {
485 | return entry.offset + consts.LOCHDR + entry.fnameLen + entry.extraLen;
486 | }
487 |
488 | function canVerifyCrc(entry) {
489 | // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
490 | return (entry.flags & 0x8) !== 0x8;
491 | }
492 |
493 | function extract(entry, outPath, callback) {
494 | that.stream(entry, (err, stm) => {
495 | if (err) {
496 | callback(err);
497 | } else {
498 | let fsStm, errThrown;
499 | stm.on('error', (err) => {
500 | errThrown = err;
501 | if (fsStm) {
502 | stm.unpipe(fsStm);
503 | fsStm.close(() => {
504 | callback(err);
505 | });
506 | }
507 | });
508 | fs.open(outPath, 'w', (err, fdFile) => {
509 | if (err) {
510 | return callback(err);
511 | }
512 | if (errThrown) {
513 | fs.close(fd, () => {
514 | callback(errThrown);
515 | });
516 | return;
517 | }
518 | fsStm = fs.createWriteStream(outPath, { fd: fdFile });
519 | fsStm.on('finish', () => {
520 | that.emit('extract', entry, outPath);
521 | if (!errThrown) {
522 | callback();
523 | }
524 | });
525 | stm.pipe(fsStm);
526 | });
527 | }
528 | });
529 | }
530 |
531 | function createDirectories(baseDir, dirs, callback) {
532 | if (!dirs.length) {
533 | return callback();
534 | }
535 | let dir = dirs.shift();
536 | dir = path.join(baseDir, path.join(...dir));
537 | fs.mkdir(dir, { recursive: true }, (err) => {
538 | if (err && err.code !== 'EEXIST') {
539 | return callback(err);
540 | }
541 | createDirectories(baseDir, dirs, callback);
542 | });
543 | }
544 |
545 | function extractFiles(baseDir, baseRelPath, files, callback, extractedCount) {
546 | if (!files.length) {
547 | return callback(null, extractedCount);
548 | }
549 | const file = files.shift();
550 | const targetPath = path.join(baseDir, file.name.replace(baseRelPath, ''));
551 | extract(file, targetPath, (err) => {
552 | if (err) {
553 | return callback(err, extractedCount);
554 | }
555 | extractFiles(baseDir, baseRelPath, files, callback, extractedCount + 1);
556 | });
557 | }
558 |
559 | this.extract = function (entry, outPath, callback) {
560 | let entryName = entry || '';
561 | if (typeof entry === 'string') {
562 | entry = this.entry(entry);
563 | if (entry) {
564 | entryName = entry.name;
565 | } else {
566 | if (entryName.length && entryName[entryName.length - 1] !== '/') {
567 | entryName += '/';
568 | }
569 | }
570 | }
571 | if (!entry || entry.isDirectory) {
572 | const files = [],
573 | dirs = [],
574 | allDirs = {};
575 | for (const e in entries) {
576 | if (
577 | Object.prototype.hasOwnProperty.call(entries, e) &&
578 | e.lastIndexOf(entryName, 0) === 0
579 | ) {
580 | let relPath = e.replace(entryName, '');
581 | const childEntry = entries[e];
582 | if (childEntry.isFile) {
583 | files.push(childEntry);
584 | relPath = path.dirname(relPath);
585 | }
586 | if (relPath && !allDirs[relPath] && relPath !== '.') {
587 | allDirs[relPath] = true;
588 | let parts = relPath.split('/').filter((f) => {
589 | return f;
590 | });
591 | if (parts.length) {
592 | dirs.push(parts);
593 | }
594 | while (parts.length > 1) {
595 | parts = parts.slice(0, parts.length - 1);
596 | const partsPath = parts.join('/');
597 | if (allDirs[partsPath] || partsPath === '.') {
598 | break;
599 | }
600 | allDirs[partsPath] = true;
601 | dirs.push(parts);
602 | }
603 | }
604 | }
605 | }
606 | dirs.sort((x, y) => {
607 | return x.length - y.length;
608 | });
609 | if (dirs.length) {
610 | createDirectories(outPath, dirs, (err) => {
611 | if (err) {
612 | callback(err);
613 | } else {
614 | extractFiles(outPath, entryName, files, callback, 0);
615 | }
616 | });
617 | } else {
618 | extractFiles(outPath, entryName, files, callback, 0);
619 | }
620 | } else {
621 | fs.stat(outPath, (err, stat) => {
622 | if (stat && stat.isDirectory()) {
623 | extract(entry, path.join(outPath, path.basename(entry.name)), callback);
624 | } else {
625 | extract(entry, outPath, callback);
626 | }
627 | });
628 | }
629 | };
630 |
631 | this.close = function (callback) {
632 | if (closed || !fd) {
633 | closed = true;
634 | if (callback) {
635 | callback();
636 | }
637 | } else {
638 | closed = true;
639 | fs.close(fd, (err) => {
640 | fd = null;
641 | if (callback) {
642 | callback(err);
643 | }
644 | });
645 | }
646 | };
647 |
648 | const originalEmit = events.EventEmitter.prototype.emit;
649 | this.emit = function (...args) {
650 | if (!closed) {
651 | return originalEmit.call(this, ...args);
652 | }
653 | };
654 | };
655 |
656 | StreamZip.setFs = function (customFs) {
657 | fs = customFs;
658 | };
659 |
660 | StreamZip.debugLog = (...args) => {
661 | if (StreamZip.debug) {
662 | // eslint-disable-next-line no-console
663 | console.log(...args);
664 | }
665 | };
666 |
667 | util.inherits(StreamZip, events.EventEmitter);
668 |
669 | const propZip = Symbol('zip');
670 |
671 | StreamZip.async = class StreamZipAsync extends events.EventEmitter {
672 | constructor(config) {
673 | super();
674 |
675 | const zip = new StreamZip(config);
676 |
677 | zip.on('entry', (entry) => this.emit('entry', entry));
678 | zip.on('extract', (entry, outPath) => this.emit('extract', entry, outPath));
679 |
680 | this[propZip] = new Promise((resolve, reject) => {
681 | zip.on('ready', () => {
682 | zip.removeListener('error', reject);
683 | resolve(zip);
684 | });
685 | zip.on('error', reject);
686 | });
687 | }
688 |
689 | get entriesCount() {
690 | return this[propZip].then((zip) => zip.entriesCount);
691 | }
692 |
693 | get comment() {
694 | return this[propZip].then((zip) => zip.comment);
695 | }
696 |
697 | async entry(name) {
698 | const zip = await this[propZip];
699 | return zip.entry(name);
700 | }
701 |
702 | async entries() {
703 | const zip = await this[propZip];
704 | return zip.entries();
705 | }
706 |
707 | async stream(entry) {
708 | const zip = await this[propZip];
709 | return new Promise((resolve, reject) => {
710 | zip.stream(entry, (err, stm) => {
711 | if (err) {
712 | reject(err);
713 | } else {
714 | resolve(stm);
715 | }
716 | });
717 | });
718 | }
719 |
720 | async entryData(entry) {
721 | const stm = await this.stream(entry);
722 | return new Promise((resolve, reject) => {
723 | const data = [];
724 | stm.on('data', (chunk) => data.push(chunk));
725 | stm.on('end', () => {
726 | resolve(Buffer.concat(data));
727 | });
728 | stm.on('error', (err) => {
729 | stm.removeAllListeners('end');
730 | reject(err);
731 | });
732 | });
733 | }
734 |
735 | async extract(entry, outPath) {
736 | const zip = await this[propZip];
737 | return new Promise((resolve, reject) => {
738 | zip.extract(entry, outPath, (err, res) => {
739 | if (err) {
740 | reject(err);
741 | } else {
742 | resolve(res);
743 | }
744 | });
745 | });
746 | }
747 |
748 | async close() {
749 | const zip = await this[propZip];
750 | return new Promise((resolve, reject) => {
751 | zip.close((err) => {
752 | if (err) {
753 | reject(err);
754 | } else {
755 | resolve();
756 | }
757 | });
758 | });
759 | }
760 | };
761 |
762 | class CentralDirectoryHeader {
763 | read(data) {
764 | if (data.length !== consts.ENDHDR || data.readUInt32LE(0) !== consts.ENDSIG) {
765 | throw new Error('Invalid central directory');
766 | }
767 | // number of entries on this volume
768 | this.volumeEntries = data.readUInt16LE(consts.ENDSUB);
769 | // total number of entries
770 | this.totalEntries = data.readUInt16LE(consts.ENDTOT);
771 | // central directory size in bytes
772 | this.size = data.readUInt32LE(consts.ENDSIZ);
773 | // offset of first CEN header
774 | this.offset = data.readUInt32LE(consts.ENDOFF);
775 | // zip file comment length
776 | this.commentLength = data.readUInt16LE(consts.ENDCOM);
777 | }
778 | }
779 |
780 | class CentralDirectoryLoc64Header {
781 | read(data) {
782 | if (data.length !== consts.ENDL64HDR || data.readUInt32LE(0) !== consts.ENDL64SIG) {
783 | throw new Error('Invalid zip64 central directory locator');
784 | }
785 | // ZIP64 EOCD header offset
786 | this.headerOffset = readUInt64LE(data, consts.ENDSUB);
787 | }
788 | }
789 |
790 | class CentralDirectoryZip64Header {
791 | read(data) {
792 | if (data.length !== consts.END64HDR || data.readUInt32LE(0) !== consts.END64SIG) {
793 | throw new Error('Invalid central directory');
794 | }
795 | // number of entries on this volume
796 | this.volumeEntries = readUInt64LE(data, consts.END64SUB);
797 | // total number of entries
798 | this.totalEntries = readUInt64LE(data, consts.END64TOT);
799 | // central directory size in bytes
800 | this.size = readUInt64LE(data, consts.END64SIZ);
801 | // offset of first CEN header
802 | this.offset = readUInt64LE(data, consts.END64OFF);
803 | }
804 | }
805 |
806 | class ZipEntry {
807 | readHeader(data, offset) {
808 | // data should be 46 bytes and start with "PK 01 02"
809 | if (data.length < offset + consts.CENHDR || data.readUInt32LE(offset) !== consts.CENSIG) {
810 | throw new Error('Invalid entry header');
811 | }
812 | // version made by
813 | this.verMade = data.readUInt16LE(offset + consts.CENVEM);
814 | // version needed to extract
815 | this.version = data.readUInt16LE(offset + consts.CENVER);
816 | // encrypt, decrypt flags
817 | this.flags = data.readUInt16LE(offset + consts.CENFLG);
818 | // compression method
819 | this.method = data.readUInt16LE(offset + consts.CENHOW);
820 | // modification time (2 bytes time, 2 bytes date)
821 | const timebytes = data.readUInt16LE(offset + consts.CENTIM);
822 | const datebytes = data.readUInt16LE(offset + consts.CENTIM + 2);
823 | this.time = parseZipTime(timebytes, datebytes);
824 |
825 | // uncompressed file crc-32 value
826 | this.crc = data.readUInt32LE(offset + consts.CENCRC);
827 | // compressed size
828 | this.compressedSize = data.readUInt32LE(offset + consts.CENSIZ);
829 | // uncompressed size
830 | this.size = data.readUInt32LE(offset + consts.CENLEN);
831 | // filename length
832 | this.fnameLen = data.readUInt16LE(offset + consts.CENNAM);
833 | // extra field length
834 | this.extraLen = data.readUInt16LE(offset + consts.CENEXT);
835 | // file comment length
836 | this.comLen = data.readUInt16LE(offset + consts.CENCOM);
837 | // volume number start
838 | this.diskStart = data.readUInt16LE(offset + consts.CENDSK);
839 | // internal file attributes
840 | this.inattr = data.readUInt16LE(offset + consts.CENATT);
841 | // external file attributes
842 | this.attr = data.readUInt32LE(offset + consts.CENATX);
843 | // LOC header offset
844 | this.offset = data.readUInt32LE(offset + consts.CENOFF);
845 | }
846 |
847 | readDataHeader(data) {
848 | // 30 bytes and should start with "PK\003\004"
849 | if (data.readUInt32LE(0) !== consts.LOCSIG) {
850 | throw new Error('Invalid local header');
851 | }
852 | // version needed to extract
853 | this.version = data.readUInt16LE(consts.LOCVER);
854 | // general purpose bit flag
855 | this.flags = data.readUInt16LE(consts.LOCFLG);
856 | // compression method
857 | this.method = data.readUInt16LE(consts.LOCHOW);
858 | // modification time (2 bytes time ; 2 bytes date)
859 | const timebytes = data.readUInt16LE(consts.LOCTIM);
860 | const datebytes = data.readUInt16LE(consts.LOCTIM + 2);
861 | this.time = parseZipTime(timebytes, datebytes);
862 |
863 | // uncompressed file crc-32 value
864 | this.crc = data.readUInt32LE(consts.LOCCRC) || this.crc;
865 | // compressed size
866 | const compressedSize = data.readUInt32LE(consts.LOCSIZ);
867 | if (compressedSize && compressedSize !== consts.EF_ZIP64_OR_32) {
868 | this.compressedSize = compressedSize;
869 | }
870 | // uncompressed size
871 | const size = data.readUInt32LE(consts.LOCLEN);
872 | if (size && size !== consts.EF_ZIP64_OR_32) {
873 | this.size = size;
874 | }
875 | // filename length
876 | this.fnameLen = data.readUInt16LE(consts.LOCNAM);
877 | // extra field length
878 | this.extraLen = data.readUInt16LE(consts.LOCEXT);
879 | }
880 |
881 | read(data, offset, textDecoder) {
882 | const nameData = data.slice(offset, (offset += this.fnameLen));
883 | this.name = textDecoder
884 | ? textDecoder.decode(new Uint8Array(nameData))
885 | : nameData.toString('utf8');
886 | const lastChar = data[offset - 1];
887 | this.isDirectory = lastChar === 47 || lastChar === 92;
888 |
889 | if (this.extraLen) {
890 | this.readExtra(data, offset);
891 | offset += this.extraLen;
892 | }
893 | this.comment = this.comLen ? data.slice(offset, offset + this.comLen).toString() : null;
894 | }
895 |
896 | validateName() {
897 | if (/\\|^\w+:|^\/|(^|\/)\.\.(\/|$)/.test(this.name)) {
898 | throw new Error('Malicious entry: ' + this.name);
899 | }
900 | }
901 |
902 | readExtra(data, offset) {
903 | let signature, size;
904 | const maxPos = offset + this.extraLen;
905 | while (offset < maxPos) {
906 | signature = data.readUInt16LE(offset);
907 | offset += 2;
908 | size = data.readUInt16LE(offset);
909 | offset += 2;
910 | if (consts.ID_ZIP64 === signature) {
911 | this.parseZip64Extra(data, offset, size);
912 | }
913 | offset += size;
914 | }
915 | }
916 |
917 | parseZip64Extra(data, offset, length) {
918 | if (length >= 8 && this.size === consts.EF_ZIP64_OR_32) {
919 | this.size = readUInt64LE(data, offset);
920 | offset += 8;
921 | length -= 8;
922 | }
923 | if (length >= 8 && this.compressedSize === consts.EF_ZIP64_OR_32) {
924 | this.compressedSize = readUInt64LE(data, offset);
925 | offset += 8;
926 | length -= 8;
927 | }
928 | if (length >= 8 && this.offset === consts.EF_ZIP64_OR_32) {
929 | this.offset = readUInt64LE(data, offset);
930 | offset += 8;
931 | length -= 8;
932 | }
933 | if (length >= 4 && this.diskStart === consts.EF_ZIP64_OR_16) {
934 | this.diskStart = data.readUInt32LE(offset);
935 | // offset += 4; length -= 4;
936 | }
937 | }
938 |
939 | get encrypted() {
940 | return (this.flags & consts.FLG_ENTRY_ENC) === consts.FLG_ENTRY_ENC;
941 | }
942 |
943 | get isFile() {
944 | return !this.isDirectory;
945 | }
946 | }
947 |
948 | class FsRead {
949 | constructor(fd, buffer, offset, length, position, callback) {
950 | this.fd = fd;
951 | this.buffer = buffer;
952 | this.offset = offset;
953 | this.length = length;
954 | this.position = position;
955 | this.callback = callback;
956 | this.bytesRead = 0;
957 | this.waiting = false;
958 | }
959 |
960 | read(sync) {
961 | StreamZip.debugLog('read', this.position, this.bytesRead, this.length, this.offset);
962 | this.waiting = true;
963 | let err;
964 | if (sync) {
965 | let bytesRead = 0;
966 | try {
967 | bytesRead = fs.readSync(
968 | this.fd,
969 | this.buffer,
970 | this.offset + this.bytesRead,
971 | this.length - this.bytesRead,
972 | this.position + this.bytesRead
973 | );
974 | } catch (e) {
975 | err = e;
976 | }
977 | this.readCallback(sync, err, err ? bytesRead : null);
978 | } else {
979 | fs.read(
980 | this.fd,
981 | this.buffer,
982 | this.offset + this.bytesRead,
983 | this.length - this.bytesRead,
984 | this.position + this.bytesRead,
985 | this.readCallback.bind(this, sync)
986 | );
987 | }
988 | }
989 |
990 | readCallback(sync, err, bytesRead) {
991 | if (typeof bytesRead === 'number') {
992 | this.bytesRead += bytesRead;
993 | }
994 | if (err || !bytesRead || this.bytesRead === this.length) {
995 | this.waiting = false;
996 | return this.callback(err, this.bytesRead);
997 | } else {
998 | this.read(sync);
999 | }
1000 | }
1001 | }
1002 |
1003 | class FileWindowBuffer {
1004 | constructor(fd) {
1005 | this.position = 0;
1006 | this.buffer = Buffer.alloc(0);
1007 | this.fd = fd;
1008 | this.fsOp = null;
1009 | }
1010 |
1011 | checkOp() {
1012 | if (this.fsOp && this.fsOp.waiting) {
1013 | throw new Error('Operation in progress');
1014 | }
1015 | }
1016 |
1017 | read(pos, length, callback) {
1018 | this.checkOp();
1019 | if (this.buffer.length < length) {
1020 | this.buffer = Buffer.alloc(length);
1021 | }
1022 | this.position = pos;
1023 | this.fsOp = new FsRead(this.fd, this.buffer, 0, length, this.position, callback).read();
1024 | }
1025 |
1026 | expandLeft(length, callback) {
1027 | this.checkOp();
1028 | this.buffer = Buffer.concat([Buffer.alloc(length), this.buffer]);
1029 | this.position -= length;
1030 | if (this.position < 0) {
1031 | this.position = 0;
1032 | }
1033 | this.fsOp = new FsRead(this.fd, this.buffer, 0, length, this.position, callback).read();
1034 | }
1035 |
1036 | expandRight(length, callback) {
1037 | this.checkOp();
1038 | const offset = this.buffer.length;
1039 | this.buffer = Buffer.concat([this.buffer, Buffer.alloc(length)]);
1040 | this.fsOp = new FsRead(
1041 | this.fd,
1042 | this.buffer,
1043 | offset,
1044 | length,
1045 | this.position + offset,
1046 | callback
1047 | ).read();
1048 | }
1049 |
1050 | moveRight(length, callback, shift) {
1051 | this.checkOp();
1052 | if (shift) {
1053 | this.buffer.copy(this.buffer, 0, shift);
1054 | } else {
1055 | shift = 0;
1056 | }
1057 | this.position += shift;
1058 | this.fsOp = new FsRead(
1059 | this.fd,
1060 | this.buffer,
1061 | this.buffer.length - shift,
1062 | shift,
1063 | this.position + this.buffer.length - shift,
1064 | callback
1065 | ).read();
1066 | }
1067 | }
1068 |
1069 | class EntryDataReaderStream extends stream.Readable {
1070 | constructor(fd, offset, length) {
1071 | super();
1072 | this.fd = fd;
1073 | this.offset = offset;
1074 | this.length = length;
1075 | this.pos = 0;
1076 | this.readCallback = this.readCallback.bind(this);
1077 | }
1078 |
1079 | _read(n) {
1080 | const buffer = Buffer.alloc(Math.min(n, this.length - this.pos));
1081 | if (buffer.length) {
1082 | fs.read(this.fd, buffer, 0, buffer.length, this.offset + this.pos, this.readCallback);
1083 | } else {
1084 | this.push(null);
1085 | }
1086 | }
1087 |
1088 | readCallback(err, bytesRead, buffer) {
1089 | this.pos += bytesRead;
1090 | if (err) {
1091 | this.emit('error', err);
1092 | this.push(null);
1093 | } else if (!bytesRead) {
1094 | this.push(null);
1095 | } else {
1096 | if (bytesRead !== buffer.length) {
1097 | buffer = buffer.slice(0, bytesRead);
1098 | }
1099 | this.push(buffer);
1100 | }
1101 | }
1102 | }
1103 |
1104 | class EntryVerifyStream extends stream.Transform {
1105 | constructor(baseStm, crc, size) {
1106 | super();
1107 | this.verify = new CrcVerify(crc, size);
1108 | baseStm.on('error', (e) => {
1109 | this.emit('error', e);
1110 | });
1111 | }
1112 |
1113 | _transform(data, encoding, callback) {
1114 | let err;
1115 | try {
1116 | this.verify.data(data);
1117 | } catch (e) {
1118 | err = e;
1119 | }
1120 | callback(err, data);
1121 | }
1122 | }
1123 |
1124 | class CrcVerify {
1125 | constructor(crc, size) {
1126 | this.crc = crc;
1127 | this.size = size;
1128 | this.state = {
1129 | crc: ~0,
1130 | size: 0,
1131 | };
1132 | }
1133 |
1134 | data(data) {
1135 | const crcTable = CrcVerify.getCrcTable();
1136 | let crc = this.state.crc;
1137 | let off = 0;
1138 | let len = data.length;
1139 | while (--len >= 0) {
1140 | crc = crcTable[(crc ^ data[off++]) & 0xff] ^ (crc >>> 8);
1141 | }
1142 | this.state.crc = crc;
1143 | this.state.size += data.length;
1144 | if (this.state.size >= this.size) {
1145 | const buf = Buffer.alloc(4);
1146 | buf.writeInt32LE(~this.state.crc & 0xffffffff, 0);
1147 | crc = buf.readUInt32LE(0);
1148 | if (crc !== this.crc) {
1149 | throw new Error('Invalid CRC');
1150 | }
1151 | if (this.state.size !== this.size) {
1152 | throw new Error('Invalid size');
1153 | }
1154 | }
1155 | }
1156 |
1157 | static getCrcTable() {
1158 | let crcTable = CrcVerify.crcTable;
1159 | if (!crcTable) {
1160 | CrcVerify.crcTable = crcTable = [];
1161 | const b = Buffer.alloc(4);
1162 | for (let n = 0; n < 256; n++) {
1163 | let c = n;
1164 | for (let k = 8; --k >= 0; ) {
1165 | if ((c & 1) !== 0) {
1166 | c = 0xedb88320 ^ (c >>> 1);
1167 | } else {
1168 | c = c >>> 1;
1169 | }
1170 | }
1171 | if (c < 0) {
1172 | b.writeInt32LE(c, 0);
1173 | c = b.readUInt32LE(0);
1174 | }
1175 | crcTable[n] = c;
1176 | }
1177 | }
1178 | return crcTable;
1179 | }
1180 | }
1181 |
1182 | function parseZipTime(timebytes, datebytes) {
1183 | const timebits = toBits(timebytes, 16);
1184 | const datebits = toBits(datebytes, 16);
1185 |
1186 | const mt = {
1187 | h: parseInt(timebits.slice(0, 5).join(''), 2),
1188 | m: parseInt(timebits.slice(5, 11).join(''), 2),
1189 | s: parseInt(timebits.slice(11, 16).join(''), 2) * 2,
1190 | Y: parseInt(datebits.slice(0, 7).join(''), 2) + 1980,
1191 | M: parseInt(datebits.slice(7, 11).join(''), 2),
1192 | D: parseInt(datebits.slice(11, 16).join(''), 2),
1193 | };
1194 | const dt_str = [mt.Y, mt.M, mt.D].join('-') + ' ' + [mt.h, mt.m, mt.s].join(':') + ' GMT+0';
1195 | return new Date(dt_str).getTime();
1196 | }
1197 |
1198 | function toBits(dec, size) {
1199 | let b = (dec >>> 0).toString(2);
1200 | while (b.length < size) {
1201 | b = '0' + b;
1202 | }
1203 | return b.split('');
1204 | }
1205 |
1206 | function readUInt64LE(buffer, offset) {
1207 | return buffer.readUInt32LE(offset + 4) * 0x0000000100000000 + buffer.readUInt32LE(offset);
1208 | }
1209 |
1210 | module.exports = StreamZip;
1211 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "node-stream-zip",
3 | "version": "1.15.0",
4 | "description": "node.js library for reading and extraction of ZIP archives",
5 | "keywords": [
6 | "zip",
7 | "archive",
8 | "unzip",
9 | "stream"
10 | ],
11 | "homepage": "https://github.com/antelle/node-stream-zip",
12 | "author": "Antelle (https://github.com/antelle)",
13 | "bugs": {
14 | "email": "antelle.net@gmail.com",
15 | "url": "https://github.com/antelle/node-stream-zip/issues"
16 | },
17 | "license": "MIT",
18 | "files": [
19 | "LICENSE",
20 | "node_stream_zip.js",
21 | "node_stream_zip.d.ts"
22 | ],
23 | "scripts": {
24 | "lint": "eslint node_stream_zip.js test/tests.js",
25 | "check-types": "tsc node_stream_zip.d.ts",
26 | "test": "nodeunit test/tests.js"
27 | },
28 | "main": "node_stream_zip.js",
29 | "types": "node_stream_zip.d.ts",
30 | "repository": {
31 | "type": "git",
32 | "url": "https://github.com/antelle/node-stream-zip.git"
33 | },
34 | "engines": {
35 | "node": ">=0.12.0"
36 | },
37 | "devDependencies": {
38 | "@types/node": "^14.14.6",
39 | "eslint": "^7.19.0",
40 | "nodeunit": "^0.11.3",
41 | "prettier": "^2.2.1"
42 | },
43 | "funding": {
44 | "type": "github",
45 | "url": "https://github.com/sponsors/antelle"
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/release-notes.md:
--------------------------------------------------------------------------------
1 | Release notes
2 | -------------
3 | ##### v1.15.0 (2021-09-07)
4 | `+` added `nameEncoding` parameter
5 |
6 | ##### v1.14.0 (2021-07-27)
7 | `+` exported StreamZipAsync type
8 |
9 | ##### v1.13.6 (2021-06-23)
10 | `-` fixed compatibility with old node.js
11 |
12 | ##### v1.13.5 (2021-06-08)
13 | `-` fix #77: using recursive mkdir for deeply nested archives
14 |
15 | ##### v1.13.4 (2021-05-04)
16 | `+` funding link
17 |
18 | ##### v1.13.3 (2021-04-01)
19 | `-` fixed typings (entry)
20 |
21 | ##### v1.13.2 (2021-02-23)
22 | `-` fixed typings (entryData)
23 |
24 | ##### v1.13.1 (2021-02-14)
25 | `-` added missing `close` definition
26 |
27 | ##### v1.13.0 (2021-02-05)
28 | `+` added simple Promise api available as StreamZip.async
29 | `*` all methods now throw Error, not string
30 |
31 | ##### v1.12.0 (2020-11-15)
32 | `+` added an option to pass fd instead of file name
33 |
34 | ##### v1.11.7 (2020-11-06)
35 | `-` fixed type definitions
36 |
37 | ##### v1.11.6 (2020-11-04)
38 | `-` fixed type definitions
39 |
40 | ##### v1.11.5 (2020-11-03)
41 | `-` improved type definitions
42 |
43 | ##### v1.11.4 (2020-10-31)
44 | `-` fixed type definitions
45 |
46 | ##### v1.11.3 (2020-08-14)
47 | `-` fixed parameter type definition
48 |
49 | ##### v1.11.2 (2020-06-02)
50 | `-` fixed some TypeScript definitions
51 |
52 | ##### v1.11.1 (2020-05-15)
53 | `+` fixed typescript interface definition
54 |
55 | ##### v1.11.0 (2020-05-13)
56 | `+` removed deflate64
57 |
58 | ##### v1.10.0 (2020-05-01)
59 | `+` missing file added
60 |
61 | ##### v1.10.0 (2020-05-01)
62 | `+` typescript interface
63 |
64 | ##### v1.9.2 (2020-04-28)
65 | `-` fixed close() when zip was not found
66 |
67 | ##### v1.9.1 (2020-01-14)
68 | `-` fixed callbacks in close()
69 |
70 | ##### v1.9.0 (2020-01-14)
71 | `*` closing a file cancels all pending events
72 |
73 | ##### v1.8.2 (2019-07-04)
74 | `+` upgraded modules
75 |
76 | ##### v1.8.1 (2019-07-04)
77 | `+` included the license
78 |
79 | ##### v1.8.0 (2019-01-30)
80 | `+` fixed deprecations
81 |
82 | ##### v1.7.0 (2018-04-20)
83 | `+` parsing time values
84 |
85 | ##### v1.6.0 (2018-03-22)
86 | `+` callback in `close` method
87 |
88 | ##### v1.5.0 (2018-02-28)
89 | `+` openEntry method
90 |
91 | ##### v1.4.2 (2017-12-02)
92 | `+` option to specify custom fs: `StreamZip.setFs`
93 |
94 | ##### v1.4.1 (2017-11-19)
95 | `-` fixed folder extraction
96 |
97 | ##### v1.4.0 (2017-10-28)
98 | Archives with malicious entries will throw an error
99 | `+` option to disable it: `skipEntryNameValidation`
100 |
101 | ##### v1.3.8 (2017-10-27)
102 | Fix #20: throw errors
103 |
104 | ##### v1.3.7 (2017-01-16)
105 | Fixed compatibility with node.js v0.10
106 |
107 | ##### v1.3.6 (2017-01-03)
108 | Fix #14: error unpacking archives with a special comment
109 |
110 | ##### v1.3.5 (2016-11-26)
111 | Fix #12: descriptive error messages
112 |
113 | ##### v1.3.4 (2016-07-23)
114 | Fix #10: extraction of files larger than 4GB
115 |
116 | ##### v1.3.3 (2016-04-05)
117 | Fixed headerOffset bug
118 |
119 | ##### v1.3.2 (2016-03-20)
120 | Support 4GB+ ZIP64 archives
121 | `-` fix #5: correct parsing of ZIP64 headers
122 |
123 | ##### v1.3.1 (2015-12-19)
124 | ZIP64 unit test
125 |
126 | ##### v1.3.0 (2015-12-19)
127 | ZIP64 format support
128 |
129 | ##### v1.2.2 (2015-11-24)
130 | Tiny archives reading bugfix
131 | `-` fix #3: reading archives smaller than 1kb
132 |
133 | ##### v1.2.1 (2015-03-01)
134 | Exporting header offsets info
135 | `+` `headerOffset`, `centralDirectory`
136 |
137 | ##### v1.1.1 (2015-03-01)
138 | Bugfix
139 | `-` npm packging bug fixed
140 |
141 | ##### v1.1.0 (2015-02-28)
142 | Sync read feature
143 | `+` `StreamZip.entryDataSync` method
144 |
145 | ##### v1.0.0 (2015-02-23)
146 | First stable release
147 |
--------------------------------------------------------------------------------
/test/content/BSDmakefile:
--------------------------------------------------------------------------------
1 | all:
2 | @echo "I need GNU make. Please run \`gmake\` instead."
3 |
--------------------------------------------------------------------------------
/test/content/README.md:
--------------------------------------------------------------------------------
1 | Evented I/O for V8 javascript.
2 | ===
3 |
4 | ### To build:
5 |
6 | Prerequisites (Unix only):
7 |
8 | * GCC 4.2 or newer
9 | * G++ 4.2 or newer
10 | * Python 2.6 or 2.7
11 | * GNU Make 3.81 or newer
12 | * libexecinfo (FreeBSD and OpenBSD only)
13 |
14 | Unix/Macintosh:
15 |
16 | ```sh
17 | ./configure
18 | make
19 | make install
20 | ```
21 |
22 | If your python binary is in a non-standard location or has a
23 | non-standard name, run the following instead:
24 |
25 | ```sh
26 | export PYTHON=/path/to/python
27 | $PYTHON ./configure
28 | make
29 | make install
30 | ```
31 |
32 | Prerequisites (Windows only):
33 |
34 | * Python 2.6 or 2.7
35 | * Visual Studio 2010 or 2012
36 |
37 | Windows:
38 |
39 | ```sh
40 | vcbuild nosign
41 | ```
42 |
43 | You can download pre-built binaries for various operating systems from
44 | [http://nodejs.org/download/](http://nodejs.org/download/). The Windows
45 | and OS X installers will prompt you for the location in which to install.
46 | The tarballs are self-contained; you can extract them to a local directory
47 | with:
48 |
49 | ```sh
50 | tar xzf /path/to/node---.tar.gz
51 | ```
52 |
53 | Or system-wide with:
54 |
55 | ```sh
56 | cd /usr/local && tar --strip-components 1 -xzf \
57 | /path/to/node---.tar.gz
58 | ```
59 |
60 | ### To run the tests:
61 |
62 | Unix/Macintosh:
63 |
64 | ```sh
65 | make test
66 | ```
67 |
68 | Windows:
69 |
70 | ```sh
71 | vcbuild test
72 | ```
73 |
74 | ### To build the documentation:
75 |
76 | ```sh
77 | make doc
78 | ```
79 |
80 | ### To read the documentation:
81 |
82 | ```sh
83 | man doc/node.1
84 | ```
85 |
86 | ### `Intl` (ECMA-402) support:
87 |
88 | [Intl](https://github.com/joyent/node/wiki/Intl) support is not
89 | enabled by default.
90 |
91 | #### "small" (English only) support
92 |
93 | This option will build with "small" (English only) support, but
94 | the full `Intl` (ECMA-402) APIs. With `--download=all` it will
95 | download the ICU library as needed.
96 |
97 | Unix/Macintosh:
98 |
99 | ```sh
100 | ./configure --with-intl=small-icu --download=all
101 | ```
102 |
103 | Windows:
104 |
105 | ```sh
106 | vcbuild small-icu download-all
107 | ```
108 |
109 | The `small-icu` mode builds
110 | with English-only data. You can add full data at runtime.
111 |
112 | *Note:* more docs are on
113 | [the wiki](https://github.com/joyent/node/wiki/Intl).
114 |
115 | #### Build with full ICU support (all locales supported by ICU):
116 |
117 | With the `--download=all`, this may download ICU if you don't
118 | have an ICU in `deps/icu`.
119 |
120 | Unix/Macintosh:
121 |
122 | ```sh
123 | ./configure --with-intl=full-icu --download=all
124 | ```
125 |
126 | Windows:
127 |
128 | ```sh
129 | vcbuild full-icu download-all
130 | ```
131 |
132 | #### Build with no Intl support `:-(`
133 |
134 | The `Intl` object will not be available.
135 | This is the default at present, so this option is not normally needed.
136 |
137 | Unix/Macintosh:
138 |
139 | ```sh
140 | ./configure --with-intl=none
141 | ```
142 |
143 | Windows:
144 |
145 | ```sh
146 | vcbuild intl-none
147 | ```
148 |
149 | #### Use existing installed ICU (Unix/Macintosh only):
150 |
151 | ```sh
152 | pkg-config --modversion icu-i18n && ./configure --with-intl=system-icu
153 | ```
154 |
155 | #### Build with a specific ICU:
156 |
157 | You can find other ICU releases at
158 | [the ICU homepage](http://icu-project.org/download).
159 | Download the file named something like `icu4c-**##.#**-src.tgz` (or
160 | `.zip`).
161 |
162 | Unix/Macintosh: from an already-unpacked ICU
163 |
164 | ```sh
165 | ./configure --with-intl=[small-icu,full-icu] --with-icu-source=/path/to/icu
166 | ```
167 |
168 | Unix/Macintosh: from a local ICU tarball
169 |
170 | ```sh
171 | ./configure --with-intl=[small-icu,full-icu] --with-icu-source=/path/to/icu.tgz
172 | ```
173 |
174 | Unix/Macintosh: from a tarball URL
175 |
176 | ```sh
177 | ./configure --with-intl=full-icu --with-icu-source=http://url/to/icu.tgz
178 | ```
179 |
180 | Windows: first unpack latest ICU to `deps/icu`
181 | [icu4c-**##.#**-src.tgz](http://icu-project.org/download) (or `.zip`)
182 | as `deps/icu` (You'll have: `deps/icu/source/...`)
183 |
184 | ```sh
185 | vcbuild full-icu
186 | ```
187 |
188 | Resources for Newcomers
189 | ---
190 | - [The Wiki](https://github.com/joyent/node/wiki)
191 | - [nodejs.org](http://nodejs.org/)
192 | - [how to install node.js and npm (node package manager)](http://www.joyent.com/blog/installing-node-and-npm/)
193 | - [list of modules](https://github.com/joyent/node/wiki/modules)
194 | - [searching the npm registry](http://npmjs.org/)
195 | - [list of companies and projects using node](https://github.com/joyent/node/wiki/Projects,-Applications,-and-Companies-Using-Node)
196 | - [node.js mailing list](http://groups.google.com/group/nodejs)
197 | - irc chatroom, [#node.js on freenode.net](http://webchat.freenode.net?channels=node.js&uio=d4)
198 | - [community](https://github.com/joyent/node/wiki/Community)
199 | - [contributing](https://github.com/joyent/node/wiki/Contributing)
200 | - [big list of all the helpful wiki pages](https://github.com/joyent/node/wiki/_pages)
201 |
--------------------------------------------------------------------------------
/test/content/doc/api_assets/logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
43 |
--------------------------------------------------------------------------------
/test/content/doc/api_assets/sh.css:
--------------------------------------------------------------------------------
1 | .sh_sourceCode {
2 | font-weight: normal;
3 | font-style: normal;
4 | }
5 |
6 | .sh_sourceCode .sh_symbol,
7 | .sh_sourceCode .sh_cbracket {
8 | color: #333;
9 | }
10 |
11 | .sh_sourceCode .sh_keyword {
12 | color: #c96;
13 | }
14 |
15 | .sh_sourceCode .sh_string,
16 | .sh_sourceCode .sh_regexp,
17 | .sh_sourceCode .sh_number,
18 | .sh_sourceCode .sh_specialchar {
19 | color: #690;
20 | }
21 |
22 | .sh_sourceCode .sh_comment {
23 | color: #666;
24 | }
25 |
--------------------------------------------------------------------------------
/test/content/doc/changelog-foot.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
21 |
22 |
23 |
24 |
25 |
34 |