├── test ├── multibyte │ ├── じっぷ │ │ └── じっぷ.txt │ ├── じっぷ.zip │ └── test.js ├── issue_130 │ ├── nested │ │ ├── nested_file.txt │ │ └── deeper │ │ │ └── deeper_file.txt │ ├── test.zip │ └── issue_130.test.js ├── assets │ ├── fast.zip │ ├── normal.zip │ ├── store.zip │ ├── ultra.zip │ ├── fastest.zip │ ├── linux_arc.zip │ ├── maximum.zip │ ├── maximum3.zip │ ├── symlink.zip │ ├── stream-nozip64.zip │ ├── attributes_test.zip │ ├── issue-237-Twizzeld.zip │ ├── large_directory_size.zip │ └── issue-471-infozip-encrypted.zip ├── crc │ ├── bad_crc.zip │ ├── good_crc.zip │ ├── good_crc_trailing_data_descriptor.zip │ └── index.js ├── mbcs │ ├── chs_name.zip │ └── mbcs.test.js ├── large_directory_size │ └── large_directory_size.test.js ├── issue_471 │ └── infozip-password.test.js ├── utils.test.js ├── methods │ ├── zipcrypto.test.js │ └── methods.test.js ├── mocha.js └── header.js ├── .gitignore ├── .prettierignore ├── .gitattributes ├── headers ├── index.js ├── mainHeader.js └── entryHeader.js ├── methods ├── index.js ├── deflater.js ├── inflater.js └── zipcrypto.js ├── util ├── decoder.js ├── index.js ├── fattr.js ├── errors.js ├── constants.js └── utils.js ├── .mocharc.yml ├── .prettierrc.json ├── .editorconfig ├── .github └── workflows │ ├── windows.yml │ ├── ci.yml │ └── codeql.yml ├── LICENSE ├── package.json ├── README.md ├── history.md ├── zipEntry.js └── zipFile.js /test/multibyte/じっぷ/じっぷ.txt: -------------------------------------------------------------------------------- 1 | じっぷ 2 | -------------------------------------------------------------------------------- /test/issue_130/nested/nested_file.txt: -------------------------------------------------------------------------------- 1 | nested -------------------------------------------------------------------------------- /test/issue_130/nested/deeper/deeper_file.txt: -------------------------------------------------------------------------------- 1 | deeper -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | /test/issue_*/unzipped/ 3 | xxx 4 | .idea 5 | *.iml 6 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # keep current manual config 2 | util/constants.js 3 | util/errors.js 4 | -------------------------------------------------------------------------------- /test/assets/fast.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/fast.zip -------------------------------------------------------------------------------- /test/crc/bad_crc.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/crc/bad_crc.zip -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Enforce Unix newlines in repo 2 | * text=auto eol=lf 3 | 4 | *.zip binary 5 | -------------------------------------------------------------------------------- /test/assets/normal.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/normal.zip -------------------------------------------------------------------------------- /test/assets/store.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/store.zip -------------------------------------------------------------------------------- /test/assets/ultra.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/ultra.zip -------------------------------------------------------------------------------- /test/crc/good_crc.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/crc/good_crc.zip -------------------------------------------------------------------------------- /test/mbcs/chs_name.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/mbcs/chs_name.zip -------------------------------------------------------------------------------- /test/multibyte/じっぷ.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/multibyte/じっぷ.zip -------------------------------------------------------------------------------- /test/assets/fastest.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/fastest.zip -------------------------------------------------------------------------------- /test/assets/linux_arc.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/linux_arc.zip -------------------------------------------------------------------------------- /test/assets/maximum.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/maximum.zip -------------------------------------------------------------------------------- /test/assets/maximum3.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/maximum3.zip -------------------------------------------------------------------------------- /test/assets/symlink.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/symlink.zip -------------------------------------------------------------------------------- /test/issue_130/test.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/issue_130/test.zip -------------------------------------------------------------------------------- /test/assets/stream-nozip64.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/stream-nozip64.zip -------------------------------------------------------------------------------- /headers/index.js: -------------------------------------------------------------------------------- 1 | exports.EntryHeader = require("./entryHeader"); 2 | exports.MainHeader = require("./mainHeader"); 3 | -------------------------------------------------------------------------------- /test/assets/attributes_test.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/attributes_test.zip -------------------------------------------------------------------------------- /test/assets/issue-237-Twizzeld.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/issue-237-Twizzeld.zip -------------------------------------------------------------------------------- /test/assets/large_directory_size.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/large_directory_size.zip -------------------------------------------------------------------------------- /test/assets/issue-471-infozip-encrypted.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/assets/issue-471-infozip-encrypted.zip -------------------------------------------------------------------------------- /test/crc/good_crc_trailing_data_descriptor.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cthackers/adm-zip/HEAD/test/crc/good_crc_trailing_data_descriptor.zip -------------------------------------------------------------------------------- /methods/index.js: -------------------------------------------------------------------------------- 1 | exports.Deflater = require("./deflater"); 2 | exports.Inflater = require("./inflater"); 3 | exports.ZipCrypto = require("./zipcrypto"); 4 | -------------------------------------------------------------------------------- /util/decoder.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | efs: true, 3 | encode: (data) => Buffer.from(data, "utf8"), 4 | decode: (data) => data.toString("utf8") 5 | }; 6 | -------------------------------------------------------------------------------- /util/index.js: -------------------------------------------------------------------------------- 1 | module.exports = require("./utils"); 2 | module.exports.Constants = require("./constants"); 3 | module.exports.Errors = require("./errors"); 4 | module.exports.FileAttr = require("./fattr"); 5 | module.exports.decoder = require("./decoder"); 6 | -------------------------------------------------------------------------------- /.mocharc.yml: -------------------------------------------------------------------------------- 1 | # add colors to reporters 2 | colors: true 3 | 4 | # auto exit if test hangs 5 | exit: true 6 | 7 | # default reporter 8 | #reporter: spec 9 | reporter: list 10 | 11 | # test files 12 | spec: 13 | - test/mocha.js 14 | - test/crc/index.js 15 | - test/multibyte/test.js 16 | - test/**/*.test.js 17 | - test/header.js 18 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": false, 3 | "tabWidth": 4, 4 | "trailingComma": "none", 5 | "printWidth": 180, 6 | "bracketSpacing": true, 7 | "endOfLine": "auto", 8 | "useTabs": false, 9 | "semi": true, 10 | "overrides": [ 11 | { 12 | "files": ["*.json", "*.yml"], 13 | "options": { 14 | "tabWidth": 2 15 | } 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # This file is for unifying the coding style for different editors and IDEs 2 | # editorconfig.org 3 | 4 | root = true 5 | 6 | # Set default charset 7 | [*.{js,json,txt,md}] 8 | charset = utf-8 9 | indent_style = space 10 | insert_final_newline = true 11 | 12 | [*.{js,md,json}] 13 | trim_trailing_whitespace = true 14 | 15 | [*.{js,md}] 16 | indent_size = 4 17 | 18 | [*.js] 19 | quote_type = double 20 | 21 | [*.{yml,json}] 22 | indent_size = 2 23 | -------------------------------------------------------------------------------- /.github/workflows/windows.yml: -------------------------------------------------------------------------------- 1 | name: Windows build 2 | 3 | on: 4 | push: 5 | pull_request: 6 | 7 | jobs: 8 | build: 9 | name: Node ${{ matrix.node-version }} 10 | runs-on: windows-latest 11 | 12 | strategy: 13 | matrix: 14 | # mocha nolonger supports node 10 15 | node-version: [18.x, 20.x, 22.x] 16 | 17 | steps: 18 | - name: Clone repository 19 | uses: actions/checkout@v4 20 | 21 | - name: Setup Node.js 22 | uses: actions/setup-node@v4 23 | with: 24 | node-version: ${{ matrix.node-version }} 25 | 26 | - name: Install 27 | run: | 28 | npm ci 29 | 30 | - name: Run Tests 31 | run: | 32 | npm test 33 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Linux build 2 | 3 | on: 4 | push: 5 | pull_request: 6 | 7 | jobs: 8 | build: 9 | name: Node ${{ matrix.node-version }} 10 | runs-on: ubuntu-latest 11 | 12 | strategy: 13 | matrix: 14 | # mocha nolonger supports node 10 15 | node-version: [12.x, 14.x, 16.x, 18.x, 20.x, 21.x] 16 | 17 | steps: 18 | - name: Clone repository 19 | uses: actions/checkout@v4 20 | 21 | - name: Setup Node.js 22 | uses: actions/setup-node@v4 23 | with: 24 | node-version: ${{ matrix.node-version }} 25 | 26 | - name: Install 27 | run: | 28 | npm install 29 | 30 | - name: Run Tests 31 | run: | 32 | npm test 33 | -------------------------------------------------------------------------------- /test/large_directory_size/large_directory_size.test.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const assert = require("assert"); 4 | const path = require("path"); 5 | const Zip = require("../../adm-zip"); 6 | const Errors = require("../../util/errors"); 7 | 8 | describe("read zip file header with invalid large number of entries", () => { 9 | it("throws too large error", () => { 10 | // this zip file reports 2147483648 disk entry count which is impossible 11 | const zip = new Zip(path.join(__dirname, "../assets/large_directory_size.zip")); 12 | // assert that the following call throws an exception 13 | assert.throws(() => { 14 | zip.getEntries(); 15 | }, Errors.DISK_ENTRY_TOO_LARGE()); 16 | }); 17 | }); 18 | -------------------------------------------------------------------------------- /test/multibyte/test.js: -------------------------------------------------------------------------------- 1 | const { expect } = require("chai"); 2 | const Utils = require("../../util"); 3 | const AdmZip = require("../../adm-zip"); 4 | const path = require("path"); 5 | 6 | describe("adm-zip", () => { 7 | it("adds multibyte ZIP comment in UTF-8 with appropriate byte", () => { 8 | const zip = new AdmZip(); 9 | zip.addLocalFile(path.join(__dirname, "./じっぷ/じっぷ.txt")); 10 | zip.addZipComment("じっぷ"); 11 | const willSend = zip.toBuffer(); 12 | const end = willSend.slice(willSend.lastIndexOf(Utils.Constants.ENDSIG)); 13 | const commentLength = end.readInt16LE(Utils.Constants.ENDCOM, 2); 14 | expect(commentLength).to.eq(9); 15 | const expected = Buffer.from("じっぷ"); 16 | const actual = end.slice(Utils.Constants.ENDCOM + 2); 17 | expect(actual).to.include(expected); 18 | expect(expected).to.include(actual); 19 | }); 20 | }); 21 | -------------------------------------------------------------------------------- /methods/deflater.js: -------------------------------------------------------------------------------- 1 | module.exports = function (/*Buffer*/ inbuf) { 2 | var zlib = require("zlib"); 3 | 4 | var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 }; 5 | 6 | return { 7 | deflate: function () { 8 | return zlib.deflateRawSync(inbuf, opts); 9 | }, 10 | 11 | deflateAsync: function (/*Function*/ callback) { 12 | var tmp = zlib.createDeflateRaw(opts), 13 | parts = [], 14 | total = 0; 15 | tmp.on("data", function (data) { 16 | parts.push(data); 17 | total += data.length; 18 | }); 19 | tmp.on("end", function () { 20 | var buf = Buffer.alloc(total), 21 | written = 0; 22 | buf.fill(0); 23 | for (var i = 0; i < parts.length; i++) { 24 | var part = parts[i]; 25 | part.copy(buf, written); 26 | written += part.length; 27 | } 28 | callback && callback(buf); 29 | }); 30 | tmp.end(inbuf); 31 | } 32 | }; 33 | }; 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2012 Another-D-Mention Software and other contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /methods/inflater.js: -------------------------------------------------------------------------------- 1 | const version = +(process.versions ? process.versions.node : "").split(".")[0] || 0; 2 | 3 | module.exports = function (/*Buffer*/ inbuf, /*number*/ expectedLength) { 4 | var zlib = require("zlib"); 5 | const option = version >= 15 && expectedLength > 0 ? { maxOutputLength: expectedLength } : {}; 6 | 7 | return { 8 | inflate: function () { 9 | return zlib.inflateRawSync(inbuf, option); 10 | }, 11 | 12 | inflateAsync: function (/*Function*/ callback) { 13 | var tmp = zlib.createInflateRaw(option), 14 | parts = [], 15 | total = 0; 16 | tmp.on("data", function (data) { 17 | parts.push(data); 18 | total += data.length; 19 | }); 20 | tmp.on("end", function () { 21 | var buf = Buffer.alloc(total), 22 | written = 0; 23 | buf.fill(0); 24 | for (var i = 0; i < parts.length; i++) { 25 | var part = parts[i]; 26 | part.copy(buf, written); 27 | written += part.length; 28 | } 29 | callback && callback(buf); 30 | }); 31 | tmp.end(inbuf); 32 | } 33 | }; 34 | }; 35 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "adm-zip", 3 | "version": "0.5.16", 4 | "description": "Javascript implementation of zip for nodejs with support for electron original-fs. Allows user to create or extract zip files both in memory or to/from disk", 5 | "scripts": { 6 | "test": "mocha -R spec", 7 | "test:format": "npm run format:prettier:raw -- --check", 8 | "format": "npm run format:prettier", 9 | "format:prettier": "npm run format:prettier:raw -- --write", 10 | "format:prettier:raw": "prettier \"**/*.{js,yml,json}\"" 11 | }, 12 | "keywords": [ 13 | "zip", 14 | "methods", 15 | "archive", 16 | "unzip" 17 | ], 18 | "homepage": "https://github.com/cthackers/adm-zip", 19 | "author": "Nasca Iacob (https://github.com/cthackers)", 20 | "bugs": { 21 | "email": "sy@another-d-mention.ro", 22 | "url": "https://github.com/cthackers/adm-zip/issues" 23 | }, 24 | "license": "MIT", 25 | "files": [ 26 | "adm-zip.js", 27 | "headers", 28 | "methods", 29 | "util", 30 | "zipEntry.js", 31 | "zipFile.js", 32 | "LICENSE" 33 | ], 34 | "main": "adm-zip.js", 35 | "repository": { 36 | "type": "git", 37 | "url": "https://github.com/cthackers/adm-zip.git" 38 | }, 39 | "engines": { 40 | "node": ">=12.0" 41 | }, 42 | "devDependencies": { 43 | "chai": "^4.3.4", 44 | "iconv-lite": "^0.6.3", 45 | "mocha": "^10.2.0", 46 | "prettier": "^3.3.2", 47 | "rimraf": "^3.0.2" 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /test/issue_471/infozip-password.test.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // Tests for github issue 471: https://github.com/cthackers/adm-zip/issues/471 4 | 5 | const assert = require("assert"); 6 | const path = require("path"); 7 | const Zip = require("../../adm-zip"); 8 | 9 | describe("decryption with info-zip spec password check", () => { 10 | // test decryption with both password types 11 | it("test decrypted data with password", () => { 12 | // the issue-471-infozip-encrypted.zip file has been generated with Info-Zip Zip 2.32, but the Info-Zip 13 | // standard is used by other zip generators as well. 14 | const infoZip = new Zip(path.join(__dirname, "../assets/issue-471-infozip-encrypted.zip")); 15 | const entries = infoZip.getEntries(); 16 | assert(entries.length === 1, "Good: Test archive contains exactly 1 file"); 17 | 18 | const testFile = entries.filter(function (entry) { 19 | return entry.entryName === "dummy.txt"; 20 | }); 21 | assert(testFile.length === 1, "Good: dummy.txt file exists as archive entry"); 22 | 23 | const readData = entries[0].getData("secret"); 24 | assert(readData.toString("utf8").startsWith("How much wood could a woodchuck chuck"), "Good: buffer matches expectations"); 25 | 26 | // assert that the following call throws an exception 27 | assert.throws(() => { 28 | const readDataBad = entries[0].getData("badpassword"); 29 | }, "Good: error thrown for bad password"); 30 | }); 31 | }); 32 | -------------------------------------------------------------------------------- /test/issue_130/issue_130.test.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const assert = require("assert"); 4 | const fs = require("fs"); 5 | const pth = require("path"); 6 | const Zip = require("../../adm-zip"); 7 | const rimraf = require("rimraf"); 8 | 9 | describe("ADM-ZIP - Issues", () => { 10 | const destination = pth.resolve("./test/xxx"); 11 | const unzipped = pth.join(destination, "unzipped"); 12 | 13 | // clean up folder content 14 | afterEach((done) => rimraf(destination, done)); 15 | 16 | it("Issue 130 - Created zip's under Windows are corrupt", () => { 17 | // init the final zip file 18 | const writeZip = new Zip(); 19 | 20 | // file in root folder 21 | writeZip.addFile("root_file.txt", "root"); 22 | 23 | // add folder 24 | writeZip.addFile("sub/", Buffer.alloc(0)); 25 | 26 | // file in sub folder 27 | writeZip.addFile("sub/sub_file.txt", "sub"); 28 | 29 | // files from local folder 30 | writeZip.addLocalFolder(pth.resolve("./test/issue_130", "nested"), "nested"); 31 | 32 | // write to disk 33 | writeZip.writeZip(pth.join(destination, "test.zip")); 34 | 35 | // read zip from disk 36 | const readZip = new Zip(pth.join(destination, "test.zip")); 37 | 38 | // unpack everything 39 | readZip.extractAllTo(unzipped, true); 40 | 41 | // assert the files 42 | const fileRoot = fs.readFileSync(pth.join(unzipped, "root_file.txt"), "utf8"); 43 | assert(fileRoot === "root", "root file not correct"); 44 | 45 | const fileSub = fs.readFileSync(pth.join(unzipped, "sub/sub_file.txt"), "utf8"); 46 | assert(fileSub === "sub", "sub file not correct"); 47 | 48 | const fileNested = fs.readFileSync(pth.join(unzipped, "nested/nested_file.txt"), "utf8"); 49 | assert(fileNested === "nested", "nested file not correct"); 50 | 51 | const fileDeeper = fs.readFileSync(pth.join(unzipped, "nested/deeper/deeper_file.txt"), "utf8"); 52 | assert(fileDeeper === "deeper", "deeper file not correct"); 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /util/fattr.js: -------------------------------------------------------------------------------- 1 | const pth = require("path"); 2 | 3 | module.exports = function (/*String*/ path, /*Utils object*/ { fs }) { 4 | var _path = path || "", 5 | _obj = newAttr(), 6 | _stat = null; 7 | 8 | function newAttr() { 9 | return { 10 | directory: false, 11 | readonly: false, 12 | hidden: false, 13 | executable: false, 14 | mtime: 0, 15 | atime: 0 16 | }; 17 | } 18 | 19 | if (_path && fs.existsSync(_path)) { 20 | _stat = fs.statSync(_path); 21 | _obj.directory = _stat.isDirectory(); 22 | _obj.mtime = _stat.mtime; 23 | _obj.atime = _stat.atime; 24 | _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner 25 | _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right 26 | _obj.hidden = pth.basename(_path)[0] === "."; 27 | } else { 28 | console.warn("Invalid path: " + _path); 29 | } 30 | 31 | return { 32 | get directory() { 33 | return _obj.directory; 34 | }, 35 | 36 | get readOnly() { 37 | return _obj.readonly; 38 | }, 39 | 40 | get hidden() { 41 | return _obj.hidden; 42 | }, 43 | 44 | get mtime() { 45 | return _obj.mtime; 46 | }, 47 | 48 | get atime() { 49 | return _obj.atime; 50 | }, 51 | 52 | get executable() { 53 | return _obj.executable; 54 | }, 55 | 56 | decodeAttributes: function () {}, 57 | 58 | encodeAttributes: function () {}, 59 | 60 | toJSON: function () { 61 | return { 62 | path: _path, 63 | isDirectory: _obj.directory, 64 | isReadOnly: _obj.readonly, 65 | isHidden: _obj.hidden, 66 | isExecutable: _obj.executable, 67 | mTime: _obj.mtime, 68 | aTime: _obj.atime 69 | }; 70 | }, 71 | 72 | toString: function () { 73 | return JSON.stringify(this.toJSON(), null, "\t"); 74 | } 75 | }; 76 | }; 77 | -------------------------------------------------------------------------------- /util/errors.js: -------------------------------------------------------------------------------- 1 | const errors = { 2 | /* Header error messages */ 3 | INVALID_LOC: "Invalid LOC header (bad signature)", 4 | INVALID_CEN: "Invalid CEN header (bad signature)", 5 | INVALID_END: "Invalid END header (bad signature)", 6 | 7 | /* Descriptor */ 8 | DESCRIPTOR_NOT_EXIST: "No descriptor present", 9 | DESCRIPTOR_UNKNOWN: "Unknown descriptor format", 10 | DESCRIPTOR_FAULTY: "Descriptor data is malformed", 11 | 12 | /* ZipEntry error messages*/ 13 | NO_DATA: "Nothing to decompress", 14 | BAD_CRC: "CRC32 checksum failed {0}", 15 | FILE_IN_THE_WAY: "There is a file in the way: {0}", 16 | UNKNOWN_METHOD: "Invalid/unsupported compression method", 17 | 18 | /* Inflater error messages */ 19 | AVAIL_DATA: "inflate::Available inflate data did not terminate", 20 | INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block", 21 | TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes", 22 | INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths", 23 | INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length", 24 | INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete", 25 | INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths", 26 | INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths", 27 | INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement", 28 | INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)", 29 | 30 | /* ADM-ZIP error messages */ 31 | CANT_EXTRACT_FILE: "Could not extract the file", 32 | CANT_OVERRIDE: "Target file already exists", 33 | DISK_ENTRY_TOO_LARGE: "Number of disk entries is too large", 34 | NO_ZIP: "No zip file was loaded", 35 | NO_ENTRY: "Entry doesn't exist", 36 | DIRECTORY_CONTENT_ERROR: "A directory cannot have content", 37 | FILE_NOT_FOUND: 'File not found: "{0}"', 38 | NOT_IMPLEMENTED: "Not implemented", 39 | INVALID_FILENAME: "Invalid filename", 40 | INVALID_FORMAT: "Invalid or unsupported zip format. No END header found", 41 | INVALID_PASS_PARAM: "Incompatible password parameter", 42 | WRONG_PASSWORD: "Wrong Password", 43 | 44 | /* ADM-ZIP */ 45 | COMMENT_TOO_LONG: "Comment is too long", // Comment can be max 65535 bytes long (NOTE: some non-US characters may take more space) 46 | EXTRA_FIELD_PARSE_ERROR: "Extra field parsing error" 47 | }; 48 | 49 | // template 50 | function E(message) { 51 | return function (...args) { 52 | if (args.length) { // Allow {0} .. {9} arguments in error message, based on argument number 53 | message = message.replace(/\{(\d)\}/g, (_, n) => args[n] || ''); 54 | } 55 | 56 | return new Error('ADM-ZIP: ' + message); 57 | }; 58 | } 59 | 60 | // Init errors with template 61 | for (const msg of Object.keys(errors)) { 62 | exports[msg] = E(errors[msg]); 63 | } 64 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ADM-ZIP for NodeJS 2 | 3 | ADM-ZIP is a pure JavaScript implementation for zip data compression for [NodeJS](https://nodejs.org/). 4 | 5 | 6 | Build Status 7 | 8 | 9 | # Installation 10 | 11 | With [npm](https://www.npmjs.com/) do: 12 | 13 | $ npm install adm-zip 14 | 15 | **Electron** file system support described below. 16 | 17 | ## What is it good for? 18 | 19 | The library allows you to: 20 | 21 | - decompress zip files directly to disk or in memory buffers 22 | - compress files and store them to disk in .zip format or in compressed buffers 23 | - update content of/add new/delete files from an existing .zip 24 | 25 | # Dependencies 26 | 27 | There are no other nodeJS libraries that ADM-ZIP is dependent of 28 | 29 | # Examples 30 | 31 | ## Basic usage 32 | 33 | ```javascript 34 | var AdmZip = require("adm-zip"); 35 | 36 | // reading archives 37 | var zip = new AdmZip("./my_file.zip"); 38 | var password = "1234567890"; 39 | var zipEntries = zip.getEntries(); // an array of ZipEntry records - add password parameter if entries are password protected 40 | 41 | zipEntries.forEach(function (zipEntry) { 42 | console.log(zipEntry.toString()); // outputs zip entries information 43 | if (zipEntry.entryName == "my_file.txt") { 44 | console.log(zipEntry.getData().toString("utf8")); 45 | } 46 | }); 47 | // outputs the content of some_folder/my_file.txt 48 | console.log(zip.readAsText("some_folder/my_file.txt")); 49 | // extracts the specified file to the specified location 50 | zip.extractEntryTo(/*entry name*/ "some_folder/my_file.txt", /*target path*/ "/home/me/tempfolder", /*maintainEntryPath*/ false, /*overwrite*/ true); 51 | // extracts everything 52 | zip.extractAllTo(/*target path*/ "/home/me/zipcontent/", /*overwrite*/ true); 53 | 54 | // creating archives 55 | var zip = new AdmZip(); 56 | 57 | // add file directly 58 | var content = "inner content of the file"; 59 | zip.addFile("test.txt", Buffer.from(content, "utf8"), "entry comment goes here"); 60 | // add local file 61 | zip.addLocalFile("/home/me/some_picture.png"); 62 | // get everything as a buffer 63 | var willSendthis = zip.toBuffer(); 64 | // or write everything to disk 65 | zip.writeZip(/*target file name*/ "/home/me/files.zip"); 66 | 67 | // ... more examples in the wiki 68 | ``` 69 | 70 | For more detailed information please check out the [wiki](https://github.com/cthackers/adm-zip/wiki). 71 | 72 | ## Electron original-fs 73 | 74 | ADM-ZIP has supported electron **original-fs** for years without any user interractions but it causes problem with bundlers like rollup etc. For continuing support **original-fs** or any other custom file system module. There is possible specify your module by **fs** option in ADM-ZIP constructor. 75 | 76 | Example: 77 | 78 | ```javascript 79 | const AdmZip = require("adm-zip"); 80 | const OriginalFs = require("original-fs"); 81 | 82 | // reading archives 83 | const zip = new AdmZip("./my_file.zip", { fs: OriginalFs }); 84 | . 85 | . 86 | . 87 | ``` 88 | -------------------------------------------------------------------------------- /test/crc/index.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert"); 2 | const path = require("path"); 3 | const Zip = require("../../adm-zip"); 4 | const rimraf = require("rimraf"); 5 | 6 | describe("crc", () => { 7 | const destination = __dirname + "/xxx"; 8 | 9 | beforeEach((done) => rimraf(destination, done)); 10 | 11 | it("Good CRC", (done) => { 12 | const goodZip = new Zip(path.join(__dirname, "good_crc.zip")); 13 | const entries = goodZip.getEntries(); 14 | assert(entries.length === 1, "Good CRC: Test archive contains exactly 1 file"); 15 | 16 | const testFile = entries.filter(function (entry) { 17 | return entry.entryName === "lorem_ipsum.txt"; 18 | }); 19 | assert(testFile.length === 1, "Good CRC: lorem_ipsum.txt file exists as archive entry"); 20 | 21 | const testFileEntryName = testFile[0].entryName; 22 | goodZip.readAsTextAsync(testFileEntryName, function (data, err) { 23 | assert(!err, "Good CRC: error object not present"); 24 | assert(data && data.length, "Good CRC: buffer not empty"); 25 | done(); 26 | }); 27 | }); 28 | 29 | it("Good CRC - trailing data descriptor ", (done) => { 30 | const goodZip = new Zip(path.join(__dirname, "good_crc_trailing_data_descriptor.zip")); 31 | const entries = goodZip.getEntries(); 32 | assert(entries.length === 1, "Good CRC: Test archive contains exactly 1 file"); 33 | 34 | const testFile = entries.filter(function (entry) { 35 | return entry.entryName === "lorem_ipsum.txt"; 36 | }); 37 | assert(testFile.length === 1, "Good CRC: lorem_ipsum.txt file exists as archive entry"); 38 | 39 | const testFileEntryName = testFile[0].entryName; 40 | goodZip.readAsTextAsync(testFileEntryName, function (data, err) { 41 | assert(!err, "Good CRC: error object not present"); 42 | assert(data && data.length, "Good CRC: buffer not empty"); 43 | done(); 44 | }); 45 | }); 46 | 47 | it("Bad CRC - async method returns err string", (done) => { 48 | const badZip = new Zip(path.join(__dirname, "bad_crc.zip")); 49 | const entries = badZip.getEntries(); 50 | assert(entries.length === 1, "Bad CRC: Test archive contains exactly 1 file"); 51 | 52 | const testFile = entries.filter(function (entry) { 53 | return entry.entryName === "lorem_ipsum.txt"; 54 | }); 55 | assert(testFile.length === 1, "Bad CRC: lorem_ipsum.txt file exists as archive entry"); 56 | 57 | const testFileEntryName = testFile[0].entryName; 58 | badZip.readAsTextAsync(testFileEntryName, function (data, err) { 59 | assert(data && data.length, "Bad CRC: buffer not empty"); 60 | assert(err, "Bad CRC: error object present"); 61 | done(); 62 | }); 63 | }); 64 | 65 | it("Bad CRC - sync method throws an error object", (done) => { 66 | const badZip = new Zip(path.join(__dirname, "bad_crc.zip")); 67 | const entries = badZip.getEntries(); 68 | const testFile = entries.filter(function (entry) { 69 | return entry.entryName === "lorem_ipsum.txt"; 70 | }); 71 | const testFileEntryName = testFile[0].entryName; 72 | 73 | try { 74 | badZip.readAsText(testFileEntryName); 75 | } catch (e) { 76 | assert(e.stack, "Bad CRC: threw something other than an Error instance"); 77 | done(); 78 | return; 79 | } 80 | assert.fail("Bad CRC: did not throw exception"); 81 | }); 82 | 83 | it("CRC is not changed after re-created", () => { 84 | const goodZip = new Zip(path.join(__dirname, "good_crc.zip")); 85 | const original = goodZip.getEntries()[0].header.crc; 86 | assert.equal(original, 3528145192); 87 | const newZipPath = destination + "/good_crc_new.zip"; 88 | goodZip.writeZip(newZipPath); 89 | const newZip = new Zip(newZipPath); 90 | const actual = newZip.getEntries()[0].header.crc; 91 | assert.equal(actual, original); 92 | }); 93 | }); 94 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: ["master"] 17 | pull_request: 18 | branches: ["master"] 19 | schedule: 20 | - cron: "41 3 * * 5" 21 | 22 | jobs: 23 | analyze: 24 | name: Analyze (${{ matrix.language }}) 25 | # Runner size impacts CodeQL analysis time. To learn more, please see: 26 | # - https://gh.io/recommended-hardware-resources-for-running-codeql 27 | # - https://gh.io/supported-runners-and-hardware-resources 28 | # - https://gh.io/using-larger-runners (GitHub.com only) 29 | # Consider using larger runners or machines with greater resources for possible analysis time improvements. 30 | runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} 31 | timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} 32 | permissions: 33 | # required for all workflows 34 | security-events: write 35 | 36 | # required to fetch internal or private CodeQL packs 37 | packages: read 38 | 39 | # only required for workflows in private repositories 40 | actions: read 41 | contents: read 42 | 43 | strategy: 44 | fail-fast: false 45 | matrix: 46 | include: 47 | - language: javascript-typescript 48 | build-mode: none 49 | # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' 50 | # Use `c-cpp` to analyze code written in C, C++ or both 51 | # Use 'java-kotlin' to analyze code written in Java, Kotlin or both 52 | # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both 53 | # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, 54 | # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. 55 | # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how 56 | # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages 57 | steps: 58 | - name: Checkout repository 59 | uses: actions/checkout@v4 60 | 61 | # Initializes the CodeQL tools for scanning. 62 | - name: Initialize CodeQL 63 | uses: github/codeql-action/init@v3 64 | with: 65 | languages: ${{ matrix.language }} 66 | build-mode: ${{ matrix.build-mode }} 67 | # If you wish to specify custom queries, you can do so here or in a config file. 68 | # By default, queries listed here will override any specified in a config file. 69 | # Prefix the list here with "+" to use these queries and those in the config file. 70 | 71 | # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 72 | # queries: security-extended,security-and-quality 73 | 74 | # If the analyze step fails for one of the languages you are analyzing with 75 | # "We were unable to automatically build your code", modify the matrix above 76 | # to set the build mode to "manual" for that language. Then modify this step 77 | # to build your code. 78 | # ℹ️ Command-line programs to run using the OS shell. 79 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 80 | - if: matrix.build-mode == 'manual' 81 | shell: bash 82 | run: | 83 | echo 'If you are using a "manual" build mode for one or more of the' \ 84 | 'languages you are analyzing, replace this with the commands to build' \ 85 | 'your code, for example:' 86 | echo ' make bootstrap' 87 | echo ' make release' 88 | exit 1 89 | 90 | - name: Perform CodeQL Analysis 91 | uses: github/codeql-action/analyze@v3 92 | with: 93 | category: "/language:${{matrix.language}}" 94 | -------------------------------------------------------------------------------- /headers/mainHeader.js: -------------------------------------------------------------------------------- 1 | var Utils = require("../util"), 2 | Constants = Utils.Constants; 3 | 4 | /* The entries in the end of central directory */ 5 | module.exports = function () { 6 | var _volumeEntries = 0, 7 | _totalEntries = 0, 8 | _size = 0, 9 | _offset = 0, 10 | _commentLength = 0; 11 | 12 | return { 13 | get diskEntries() { 14 | return _volumeEntries; 15 | }, 16 | set diskEntries(/*Number*/ val) { 17 | _volumeEntries = _totalEntries = val; 18 | }, 19 | 20 | get totalEntries() { 21 | return _totalEntries; 22 | }, 23 | set totalEntries(/*Number*/ val) { 24 | _totalEntries = _volumeEntries = val; 25 | }, 26 | 27 | get size() { 28 | return _size; 29 | }, 30 | set size(/*Number*/ val) { 31 | _size = val; 32 | }, 33 | 34 | get offset() { 35 | return _offset; 36 | }, 37 | set offset(/*Number*/ val) { 38 | _offset = val; 39 | }, 40 | 41 | get commentLength() { 42 | return _commentLength; 43 | }, 44 | set commentLength(/*Number*/ val) { 45 | _commentLength = val; 46 | }, 47 | 48 | get mainHeaderSize() { 49 | return Constants.ENDHDR + _commentLength; 50 | }, 51 | 52 | loadFromBinary: function (/*Buffer*/ data) { 53 | // data should be 22 bytes and start with "PK 05 06" 54 | // or be 56+ bytes and start with "PK 06 06" for Zip64 55 | if ( 56 | (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && 57 | (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG) 58 | ) { 59 | throw Utils.Errors.INVALID_END(); 60 | } 61 | 62 | if (data.readUInt32LE(0) === Constants.ENDSIG) { 63 | // number of entries on this volume 64 | _volumeEntries = data.readUInt16LE(Constants.ENDSUB); 65 | // total number of entries 66 | _totalEntries = data.readUInt16LE(Constants.ENDTOT); 67 | // central directory size in bytes 68 | _size = data.readUInt32LE(Constants.ENDSIZ); 69 | // offset of first CEN header 70 | _offset = data.readUInt32LE(Constants.ENDOFF); 71 | // zip file comment length 72 | _commentLength = data.readUInt16LE(Constants.ENDCOM); 73 | } else { 74 | // number of entries on this volume 75 | _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB); 76 | // total number of entries 77 | _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT); 78 | // central directory size in bytes 79 | _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZE); 80 | // offset of first CEN header 81 | _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF); 82 | 83 | _commentLength = 0; 84 | } 85 | }, 86 | 87 | toBinary: function () { 88 | var b = Buffer.alloc(Constants.ENDHDR + _commentLength); 89 | // "PK 05 06" signature 90 | b.writeUInt32LE(Constants.ENDSIG, 0); 91 | b.writeUInt32LE(0, 4); 92 | // number of entries on this volume 93 | b.writeUInt16LE(_volumeEntries, Constants.ENDSUB); 94 | // total number of entries 95 | b.writeUInt16LE(_totalEntries, Constants.ENDTOT); 96 | // central directory size in bytes 97 | b.writeUInt32LE(_size, Constants.ENDSIZ); 98 | // offset of first CEN header 99 | b.writeUInt32LE(_offset, Constants.ENDOFF); 100 | // zip file comment length 101 | b.writeUInt16LE(_commentLength, Constants.ENDCOM); 102 | // fill comment memory with spaces so no garbage is left there 103 | b.fill(" ", Constants.ENDHDR); 104 | 105 | return b; 106 | }, 107 | 108 | toJSON: function () { 109 | // creates 0x0000 style output 110 | const offset = function (nr, len) { 111 | let offs = nr.toString(16).toUpperCase(); 112 | while (offs.length < len) offs = "0" + offs; 113 | return "0x" + offs; 114 | }; 115 | 116 | return { 117 | diskEntries: _volumeEntries, 118 | totalEntries: _totalEntries, 119 | size: _size + " bytes", 120 | offset: offset(_offset, 4), 121 | commentLength: _commentLength 122 | }; 123 | }, 124 | 125 | toString: function () { 126 | return JSON.stringify(this.toJSON(), null, "\t"); 127 | } 128 | }; 129 | }; 130 | // Misspelled 131 | -------------------------------------------------------------------------------- /test/utils.test.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | const { expect } = require("chai"); 3 | const { crc32, canonical, sanitize, zipnamefix } = require("../util/utils"); 4 | const pth = require("path"); 5 | 6 | describe("utils", () => { 7 | describe("crc32 function", () => { 8 | // tests how crc32 function handles strings as input 9 | it("handle strings", () => { 10 | const tests = [ 11 | // basic latin 12 | { crc: 0x00000000, data: "" }, 13 | { crc: 0xe8b7be43, data: "a" }, 14 | { crc: 0x352441c2, data: "abc" }, 15 | { crc: 0xcbf43926, data: "123456789" }, 16 | { crc: 0x20159d7f, data: "message digest" }, 17 | { crc: 0x4c2750bd, data: "abcdefghijklmnopqrstuvwxyz" }, 18 | { crc: 0x1fc2e6d2, data: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" }, 19 | { crc: 0xf8c05f58, data: "1234567890123456789012345678901234567890123456789" }, 20 | { crc: 0x1f61e4e0, data: "FFFFFFFFFFFFFFFFFFFFFFFFFFF" }, 21 | // Unicode 22 | { crc: 0x70b5f183, data: "ä" }, 23 | { crc: 0x414fa339, data: "The quick brown fox jumps over the lazy dog" }, 24 | // fox jump in russian 25 | { crc: 0x7d67cd7a, data: "Быстрая коричневая лиса прыгает через ленивую собаку" }, 26 | // fox jump in german 27 | { crc: 0x8c3db82b, data: "Der schnelle Braunfuchs springt über den faulen Hund" }, 28 | // fox jump in arabic 29 | { crc: 0x6d8c0241, data: "الثعلب البني السريع يقفز فوق الكلب الكسول" }, 30 | // fox jump in korean 31 | { crc: 0x13a25011, data: "빠른 갈색 여우가 게으른 개를 뛰어 넘습니다." } 32 | ]; 33 | 34 | for (let test of tests) { 35 | expect(crc32(test.data)).to.equal(test.crc); 36 | } 37 | }); 38 | }); 39 | 40 | describe("sanitizing functions :", () => { 41 | // tests how sanitize works 42 | it("function sanitize()", () => { 43 | const tests = [ 44 | // basic latin 45 | { prefix: "", file: "", result: "" }, 46 | { prefix: "folder", file: "file", result: "folder/file" }, 47 | { prefix: "folder", file: "../file", result: "folder/file" }, 48 | { prefix: "folder", file: "../../../file", result: "folder/file" }, 49 | { prefix: "folder", file: "./../file", result: "folder/file" }, 50 | { prefix: "test/folder/subfolder", file: "../../file", result: "test/folder/subfolder/file" }, 51 | { prefix: "test/folder/subfolder", file: "../../file1/../file2", result: "test/folder/subfolder/file2" }, 52 | // no prefixed (currently allows change folder) 53 | { prefix: "", file: "../../file1/../file2", result: "file2" }, 54 | { prefix: "", file: "../subfolder/file2", result: "subfolder/file2" }, 55 | { prefix: "", file: "../subfolder2/file2", result: "subfolder2/file2" }, 56 | { prefix: "", file: "../subfolder/file2", result: "subfolder/file2" }, 57 | { prefix: "", file: "../../subfolder2/file2", result: "subfolder2/file2" } 58 | ]; 59 | 60 | const curfolder = pth.resolve("."); 61 | // console.log("\n"); 62 | for (let test of tests) { 63 | // path.normalize in win32 will convert "/" to native "\" format 64 | 65 | const out = sanitize(pth.normalize(test.prefix || ""), test.file); 66 | const res = pth.join(curfolder, pth.normalize(test.result)); 67 | 68 | expect(out).to.equal(res); 69 | } 70 | }); 71 | 72 | it("function canonical()", () => { 73 | const tests = [ 74 | // no name 75 | { file: "", result: "" }, 76 | // file has name 77 | { file: "file", result: "file" }, 78 | { file: "../file", result: "file" }, 79 | { file: "../../../file", result: "file" }, 80 | { file: "./../file", result: "file" }, 81 | { file: "../../file", result: "file" }, 82 | { file: "../../file1/../file2", result: "file2" }, 83 | { file: "../subfolder/file2", result: pth.normalize("subfolder/file2") }, 84 | { file: "../subfolder2/file2", result: pth.normalize("subfolder2/file2") }, 85 | { file: "../subfolder/file2", result: pth.normalize("subfolder/file2") }, 86 | { file: "../../subfolder2/file2", result: pth.normalize("subfolder2/file2") } 87 | ]; 88 | 89 | for (const { file, result } of Array.from(tests)) { 90 | tests.push({ result, file: file.split("/").join("\\") }); 91 | } 92 | 93 | for (let test of tests) { 94 | expect(canonical(test.file)).to.equal(test.result); 95 | } 96 | }); 97 | it("function zipnamefix()", () => { 98 | const tests = [ 99 | // no name 100 | { file: "", result: "" }, 101 | // file has name 102 | { file: "file", result: "file" }, 103 | { file: "../file", result: "file" }, 104 | { file: "../../../file", result: "file" }, 105 | { file: "./../file", result: "file" }, 106 | { file: "../../file", result: "file" }, 107 | { file: "../../file1/../file2", result: "file2" }, 108 | { file: "../subfolder/file2", result: "subfolder/file2" }, 109 | { file: "../subfolder2/file2", result: "subfolder2/file2" }, 110 | { file: "../subfolder/file2", result: "subfolder/file2" }, 111 | { file: "../../subfolder2/file2", result: "subfolder2/file2" } 112 | ]; 113 | 114 | for (const { file, result } of Array.from(tests)) { 115 | tests.push({ result, file: file.split("/").join("\\") }); 116 | } 117 | 118 | for (let test of tests) { 119 | expect(zipnamefix(test.file)).to.equal(test.result); 120 | } 121 | }); 122 | }); 123 | }); 124 | -------------------------------------------------------------------------------- /test/methods/zipcrypto.test.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | const { expect } = require("chai"); 3 | const { decrypt, encrypt, _salter } = require("../../methods/zipcrypto"); 4 | const { crc32 } = require("../../util/utils"); 5 | 6 | // node crypto 7 | const { createHash } = require("crypto"); 8 | 9 | describe("method - zipcrypto", () => { 10 | describe("zipcrypto decrypt", () => { 11 | const source = { 12 | crc: 0xd87f7e0c, 13 | // 16 byte buffer as test source 14 | data: Buffer.from("D1Q5///EbpBY6rHIZXvd3A==", "base64"), 15 | // just data integrity check 16 | md5: "wYHjota6dQNazueWO9/uDg==", 17 | pwdok: "secret", 18 | pwdbad: "Secret", 19 | flagsencrypted: 0x01, 20 | flagsinfozipencrypted: 0x09, 21 | timeHighByte: 0xd8, 22 | // result 23 | result: Buffer.from("test", "ascii") 24 | }; 25 | 26 | // test invalid input data 27 | it("handles invalid data field values / types", () => { 28 | for (const data of [undefined, null, "str", true, false, 6, Buffer.alloc(4)]) { 29 | const result = decrypt(data, { crc: source.crc }, source.pwdok); 30 | expect(result).to.have.lengthOf(0); 31 | } 32 | }); 33 | 34 | // test is data intact 35 | it("is test data valid", () => { 36 | // source data 37 | const md5sum = createHash("md5"); 38 | md5sum.update(source.data); 39 | expect(md5sum.digest("base64")).to.equal(source.md5); 40 | // result data 41 | expect(crc32(source.result)).to.equal(source.crc); 42 | }); 43 | 44 | // is error thrown if invalid password was provided 45 | it("should throw if invalid password is provided", () => { 46 | expect(function badpassword() { 47 | decrypt(source.data, { crc: source.crc, flags: source.flagsencrypted }, source.pwdbad); 48 | }).to.throw(); 49 | 50 | expect(function okpassword() { 51 | decrypt(source.data, { crc: source.crc, flags: source.flagsencrypted }, source.pwdok); 52 | }).to.not.throw(); 53 | }); 54 | 55 | // is error thrown if invalid password was provided 56 | it("should throw if invalid password is provided for Info-Zip bit 3 flag", () => { 57 | expect(function badpassword() { 58 | decrypt(source.data, { crc: source.crc, flags: source.flagsinfozipencrypted, timeHighByte: source.timeHighByte }, source.pwdbad); 59 | }).to.throw(); 60 | 61 | expect(function okpassword() { 62 | decrypt(source.data, { crc: source.crc, flags: source.flagsinfozipencrypted, timeHighByte: source.timeHighByte }, source.pwdok); 63 | }).to.not.throw(); 64 | }); 65 | 66 | // test decryption with both password types 67 | it("test decrypted data with password", () => { 68 | // test password, string 69 | const result1 = decrypt(source.data, { crc: source.crc, flags: source.flagsencrypted }, source.pwdok); 70 | expect(result1.compare(source.result)).to.equal(0); 71 | 72 | // test password, buffer 73 | const result2 = decrypt(source.data, { crc: source.crc, flags: source.flagsencrypted }, Buffer.from(source.pwdok, "ascii")); 74 | expect(result2.compare(source.result)).to.equal(0); 75 | }); 76 | }); 77 | 78 | describe("zipcrypto encrypt", () => { 79 | const source = { 80 | crc: 0xd87f7e0c, 81 | // data 82 | data_str: "test", 83 | data_buffer: Buffer.from("test", "ascii"), 84 | salt: Buffer.from("xx+OYQ1Pkvo0ztPY", "base64"), 85 | // 16 byte buffer as test source 86 | data: Buffer.from("D1Q5///EbpBY6rHIZXvd3A==", "base64"), 87 | // just data integrity check 88 | pwdok: "secret", 89 | // result 90 | result: Buffer.from("D1Q5///EbpBY6rHIZXvd3A==", "base64") 91 | }; 92 | 93 | // test binary results with known salt 94 | it("test binary results with known salt", () => { 95 | const head = { crc: source.crc }; 96 | // inject known salt 97 | _salter(source.salt); 98 | const result = encrypt(source.data_str, head, source.pwdok, false); 99 | expect(result.compare(source.result)).to.equal(0); 100 | // restore salting 101 | _salter(); 102 | }); 103 | 104 | // test decryption with both password types 105 | it("test encryption and decrytion with node random salt", () => { 106 | const head = { crc: source.crc }; 107 | _salter("node"); 108 | // test password, string 109 | const data_buf = Buffer.from(source.data_str); 110 | const result1 = encrypt(source.data_str, head, source.pwdok, false); 111 | const result2 = decrypt(result1, head, source.pwdok); 112 | expect(result2.compare(data_buf)).to.equal(0); 113 | _salter(); 114 | }); 115 | 116 | // test decryption with both password types 117 | it("test encryption and decrytion with known source data", () => { 118 | const head = { crc: source.crc }; 119 | // test password, string 120 | const data_buf = Buffer.from(source.data_str); 121 | const result1 = encrypt(source.data_str, head, source.pwdok, false); 122 | const result2 = decrypt(result1, head, source.pwdok); 123 | expect(result2.compare(data_buf)).to.equal(0); 124 | }); 125 | 126 | // test how encrytion will handle some random data 127 | it("test encrypting and decryting with some javascript objects", () => { 128 | const tests = [true, null, false, undefined, {}, [], 747, new Date(), [{}]]; 129 | const head = {}; 130 | 131 | for (const test of tests) { 132 | const data_buf = test == null ? Buffer.alloc(0) : Buffer.from(test.toString()); 133 | head.crc = crc32(data_buf); 134 | 135 | const result1 = encrypt(test, head, source.pwdok, false); 136 | const result2 = decrypt(result1, head, source.pwdok); 137 | expect(result2.compare(data_buf)).to.equal(0); 138 | } 139 | }); 140 | }); 141 | }); 142 | -------------------------------------------------------------------------------- /methods/zipcrypto.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // node crypt, we use it for generate salt 4 | // eslint-disable-next-line node/no-unsupported-features/node-builtins 5 | const { randomFillSync } = require("crypto"); 6 | const Errors = require("../util/errors"); 7 | 8 | // generate CRC32 lookup table 9 | const crctable = new Uint32Array(256).map((t, crc) => { 10 | for (let j = 0; j < 8; j++) { 11 | if (0 !== (crc & 1)) { 12 | crc = (crc >>> 1) ^ 0xedb88320; 13 | } else { 14 | crc >>>= 1; 15 | } 16 | } 17 | return crc >>> 0; 18 | }); 19 | 20 | // C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits) 21 | const uMul = (a, b) => Math.imul(a, b) >>> 0; 22 | 23 | // crc32 byte single update (actually same function is part of utils.crc32 function :) ) 24 | const crc32update = (pCrc32, bval) => { 25 | return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); 26 | }; 27 | 28 | // function for generating salt for encrytion header 29 | const genSalt = () => { 30 | if ("function" === typeof randomFillSync) { 31 | return randomFillSync(Buffer.alloc(12)); 32 | } else { 33 | // fallback if function is not defined 34 | return genSalt.node(); 35 | } 36 | }; 37 | 38 | // salt generation with node random function (mainly as fallback) 39 | genSalt.node = () => { 40 | const salt = Buffer.alloc(12); 41 | const len = salt.length; 42 | for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff; 43 | return salt; 44 | }; 45 | 46 | // general config 47 | const config = { 48 | genSalt 49 | }; 50 | 51 | // Class Initkeys handles same basic ops with keys 52 | function Initkeys(pw) { 53 | const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw); 54 | this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); 55 | for (let i = 0; i < pass.length; i++) { 56 | this.updateKeys(pass[i]); 57 | } 58 | } 59 | 60 | Initkeys.prototype.updateKeys = function (byteValue) { 61 | const keys = this.keys; 62 | keys[0] = crc32update(keys[0], byteValue); 63 | keys[1] += keys[0] & 0xff; 64 | keys[1] = uMul(keys[1], 134775813) + 1; 65 | keys[2] = crc32update(keys[2], keys[1] >>> 24); 66 | return byteValue; 67 | }; 68 | 69 | Initkeys.prototype.next = function () { 70 | const k = (this.keys[2] | 2) >>> 0; // key 71 | return (uMul(k, k ^ 1) >> 8) & 0xff; // decode 72 | }; 73 | 74 | function make_decrypter(/*Buffer*/ pwd) { 75 | // 1. Stage initialize key 76 | const keys = new Initkeys(pwd); 77 | 78 | // return decrypter function 79 | return function (/*Buffer*/ data) { 80 | // result - we create new Buffer for results 81 | const result = Buffer.alloc(data.length); 82 | let pos = 0; 83 | // process input data 84 | for (let c of data) { 85 | //c ^= keys.next(); 86 | //result[pos++] = c; // decode & Save Value 87 | result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte 88 | } 89 | return result; 90 | }; 91 | } 92 | 93 | function make_encrypter(/*Buffer*/ pwd) { 94 | // 1. Stage initialize key 95 | const keys = new Initkeys(pwd); 96 | 97 | // return encrypting function, result and pos is here so we dont have to merge buffers later 98 | return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) { 99 | // result - we create new Buffer for results 100 | if (!result) result = Buffer.alloc(data.length); 101 | // process input data 102 | for (let c of data) { 103 | const k = keys.next(); // save key byte 104 | result[pos++] = c ^ k; // save val 105 | keys.updateKeys(c); // update keys with decoded byte 106 | } 107 | return result; 108 | }; 109 | } 110 | 111 | function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) { 112 | if (!data || !Buffer.isBuffer(data) || data.length < 12) { 113 | return Buffer.alloc(0); 114 | } 115 | 116 | // 1. We Initialize and generate decrypting function 117 | const decrypter = make_decrypter(pwd); 118 | 119 | // 2. decrypt salt what is always 12 bytes and is a part of file content 120 | const salt = decrypter(data.slice(0, 12)); 121 | 122 | // if bit 3 (0x08) of the general-purpose flags field is set, check salt[11] with the high byte of the header time 123 | // 2 byte data block (as per Info-Zip spec), otherwise check with the high byte of the header entry 124 | const verifyByte = (header.flags & 0x8) === 0x8 ? header.timeHighByte : header.crc >>> 24; 125 | 126 | //3. does password meet expectations 127 | if (salt[11] !== verifyByte) { 128 | throw Errors.WRONG_PASSWORD(); 129 | } 130 | 131 | // 4. decode content 132 | return decrypter(data.slice(12)); 133 | } 134 | 135 | // lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality 136 | function _salter(data) { 137 | if (Buffer.isBuffer(data) && data.length >= 12) { 138 | // be aware - currently salting buffer data is modified 139 | config.genSalt = function () { 140 | return data.slice(0, 12); 141 | }; 142 | } else if (data === "node") { 143 | // test salt generation with node random function 144 | config.genSalt = genSalt.node; 145 | } else { 146 | // if value is not acceptable config gets reset. 147 | config.genSalt = genSalt; 148 | } 149 | } 150 | 151 | function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) { 152 | // 1. test data if data is not Buffer we make buffer from it 153 | if (data == null) data = Buffer.alloc(0); 154 | // if data is not buffer be make buffer from it 155 | if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString()); 156 | 157 | // 2. We Initialize and generate encrypting function 158 | const encrypter = make_encrypter(pwd); 159 | 160 | // 3. generate salt (12-bytes of random data) 161 | const salt = config.genSalt(); 162 | salt[11] = (header.crc >>> 24) & 0xff; 163 | 164 | // old implementations (before PKZip 2.04g) used two byte check 165 | if (oldlike) salt[10] = (header.crc >>> 16) & 0xff; 166 | 167 | // 4. create output 168 | const result = Buffer.alloc(data.length + 12); 169 | encrypter(salt, result); 170 | 171 | // finally encode content 172 | return encrypter(data, result, 12); 173 | } 174 | 175 | module.exports = { decrypt, encrypt, _salter }; 176 | -------------------------------------------------------------------------------- /util/constants.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | /* The local file header */ 3 | LOCHDR : 30, // LOC header size 4 | LOCSIG : 0x04034b50, // "PK\003\004" 5 | LOCVER : 4, // version needed to extract 6 | LOCFLG : 6, // general purpose bit flag 7 | LOCHOW : 8, // compression method 8 | LOCTIM : 10, // modification time (2 bytes time, 2 bytes date) 9 | LOCCRC : 14, // uncompressed file crc-32 value 10 | LOCSIZ : 18, // compressed size 11 | LOCLEN : 22, // uncompressed size 12 | LOCNAM : 26, // filename length 13 | LOCEXT : 28, // extra field length 14 | 15 | /* The Data descriptor */ 16 | EXTSIG : 0x08074b50, // "PK\007\008" 17 | EXTHDR : 16, // EXT header size 18 | EXTCRC : 4, // uncompressed file crc-32 value 19 | EXTSIZ : 8, // compressed size 20 | EXTLEN : 12, // uncompressed size 21 | 22 | /* The central directory file header */ 23 | CENHDR : 46, // CEN header size 24 | CENSIG : 0x02014b50, // "PK\001\002" 25 | CENVEM : 4, // version made by 26 | CENVER : 6, // version needed to extract 27 | CENFLG : 8, // encrypt, decrypt flags 28 | CENHOW : 10, // compression method 29 | CENTIM : 12, // modification time (2 bytes time, 2 bytes date) 30 | CENCRC : 16, // uncompressed file crc-32 value 31 | CENSIZ : 20, // compressed size 32 | CENLEN : 24, // uncompressed size 33 | CENNAM : 28, // filename length 34 | CENEXT : 30, // extra field length 35 | CENCOM : 32, // file comment length 36 | CENDSK : 34, // volume number start 37 | CENATT : 36, // internal file attributes 38 | CENATX : 38, // external file attributes (host system dependent) 39 | CENOFF : 42, // LOC header offset 40 | 41 | /* The entries in the end of central directory */ 42 | ENDHDR : 22, // END header size 43 | ENDSIG : 0x06054b50, // "PK\005\006" 44 | ENDSUB : 8, // number of entries on this disk 45 | ENDTOT : 10, // total number of entries 46 | ENDSIZ : 12, // central directory size in bytes 47 | ENDOFF : 16, // offset of first CEN header 48 | ENDCOM : 20, // zip file comment length 49 | 50 | END64HDR : 20, // zip64 END header size 51 | END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007" 52 | END64START : 4, // number of the disk with the start of the zip64 53 | END64OFF : 8, // relative offset of the zip64 end of central directory 54 | END64NUMDISKS : 16, // total number of disks 55 | 56 | ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006" 57 | ZIP64HDR : 56, // zip64 record minimum size 58 | ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE 59 | ZIP64SIZE : 4, // zip64 size of the central directory record 60 | ZIP64VEM : 12, // zip64 version made by 61 | ZIP64VER : 14, // zip64 version needed to extract 62 | ZIP64DSK : 16, // zip64 number of this disk 63 | ZIP64DSKDIR : 20, // number of the disk with the start of the record directory 64 | ZIP64SUB : 24, // number of entries on this disk 65 | ZIP64TOT : 32, // total number of entries 66 | ZIP64SIZB : 40, // zip64 central directory size in bytes 67 | ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number 68 | ZIP64EXTRA : 56, // extensible data sector 69 | 70 | /* Compression methods */ 71 | STORED : 0, // no compression 72 | SHRUNK : 1, // shrunk 73 | REDUCED1 : 2, // reduced with compression factor 1 74 | REDUCED2 : 3, // reduced with compression factor 2 75 | REDUCED3 : 4, // reduced with compression factor 3 76 | REDUCED4 : 5, // reduced with compression factor 4 77 | IMPLODED : 6, // imploded 78 | // 7 reserved for Tokenizing compression algorithm 79 | DEFLATED : 8, // deflated 80 | ENHANCED_DEFLATED: 9, // enhanced deflated 81 | PKWARE : 10,// PKWare DCL imploded 82 | // 11 reserved by PKWARE 83 | BZIP2 : 12, // compressed using BZIP2 84 | // 13 reserved by PKWARE 85 | LZMA : 14, // LZMA 86 | // 15-17 reserved by PKWARE 87 | IBM_TERSE : 18, // compressed using IBM TERSE 88 | IBM_LZ77 : 19, // IBM LZ77 z 89 | AES_ENCRYPT : 99, // WinZIP AES encryption method 90 | 91 | /* General purpose bit flag */ 92 | // values can obtained with expression 2**bitnr 93 | FLG_ENC : 1, // Bit 0: encrypted file 94 | FLG_COMP1 : 2, // Bit 1, compression option 95 | FLG_COMP2 : 4, // Bit 2, compression option 96 | FLG_DESC : 8, // Bit 3, data descriptor 97 | FLG_ENH : 16, // Bit 4, enhanced deflating 98 | FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data. 99 | FLG_STR : 64, // Bit 6, strong encryption (patented) 100 | // Bits 7-10: Currently unused. 101 | FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS) 102 | // Bit 12: Reserved by PKWARE for enhanced compression. 103 | // Bit 13: encrypted the Central Directory (patented). 104 | // Bits 14-15: Reserved by PKWARE. 105 | FLG_MSK : 4096, // mask header values 106 | 107 | /* Load type */ 108 | FILE : 2, 109 | BUFFER : 1, 110 | NONE : 0, 111 | 112 | /* 4.5 Extensible data fields */ 113 | EF_ID : 0, 114 | EF_SIZE : 2, 115 | 116 | /* Header IDs */ 117 | ID_ZIP64 : 0x0001, 118 | ID_AVINFO : 0x0007, 119 | ID_PFS : 0x0008, 120 | ID_OS2 : 0x0009, 121 | ID_NTFS : 0x000a, 122 | ID_OPENVMS : 0x000c, 123 | ID_UNIX : 0x000d, 124 | ID_FORK : 0x000e, 125 | ID_PATCH : 0x000f, 126 | ID_X509_PKCS7 : 0x0014, 127 | ID_X509_CERTID_F : 0x0015, 128 | ID_X509_CERTID_C : 0x0016, 129 | ID_STRONGENC : 0x0017, 130 | ID_RECORD_MGT : 0x0018, 131 | ID_X509_PKCS7_RL : 0x0019, 132 | ID_IBM1 : 0x0065, 133 | ID_IBM2 : 0x0066, 134 | ID_POSZIP : 0x4690, 135 | 136 | EF_ZIP64_OR_32 : 0xffffffff, 137 | EF_ZIP64_OR_16 : 0xffff, 138 | EF_ZIP64_SUNCOMP : 0, 139 | EF_ZIP64_SCOMP : 8, 140 | EF_ZIP64_RHO : 16, 141 | EF_ZIP64_DSN : 24 142 | }; 143 | -------------------------------------------------------------------------------- /test/mocha.js: -------------------------------------------------------------------------------- 1 | const { expect } = require("chai"); 2 | //const Attr = require("../util").FileAttr; 3 | const Zip = require("../adm-zip"); 4 | const pth = require("path"); 5 | const fs = require("fs"); 6 | const rimraf = require("rimraf"); 7 | 8 | describe("adm-zip", () => { 9 | const destination = "./test/xxx"; 10 | 11 | // clean up folder content 12 | afterEach((done) => rimraf(destination, done)); 13 | 14 | it("zip pathTraversal", () => { 15 | const target = pth.join(destination, "test"); 16 | const zip = new Zip(); 17 | zip.addFile("../../../test1.ext", "content"); 18 | zip.addFile("folder/../../test2.ext", "content"); 19 | zip.addFile("test3.ext", "content"); 20 | 21 | const extract = new Zip(zip.toBuffer()); 22 | zip.getEntries().forEach((e) => zip.extractEntryTo(e, destination, false, true)); 23 | 24 | extract.extractAllTo(target); 25 | const files = walk(target); 26 | expect(files.sort()).to.deep.equal([pth.normalize("./test/xxx/test/test1.ext"), pth.normalize("./test/xxx/test/test2.ext"), pth.normalize("./test/xxx/test/test3.ext")]); 27 | }); 28 | 29 | it("zip.addFile - add directory", () => { 30 | const zip1 = new Zip(); 31 | zip1.addFile("dir11/", null); 32 | zip1.addFile("dir12/", undefined); 33 | zip1.addFile("dir13/", ""); 34 | zip1.addFile("dir11/dir21/"); 35 | zip1.addFile("dir11/dir22/"); 36 | zip1.addFile("dir12/dir23/"); 37 | zip1.addFile("dir13/dir24/"); 38 | zip1.addFile("dir11/dir22/test.txt", "content"); 39 | const zip2 = new Zip(zip1.toBuffer()); 40 | const zip2Entries = zip2.getEntries().map((e) => e.entryName); 41 | 42 | expect(zip2Entries).to.deep.equal(["dir11/", "dir11/dir21/", "dir11/dir22/", "dir11/dir22/test.txt", "dir12/", "dir12/dir23/", "dir13/", "dir13/dir24/"]); 43 | }); 44 | 45 | it("passes issue-237-Twizzeld test case", () => { 46 | const zip = new Zip("./test/assets/issue-237-Twizzeld.zip"); 47 | const zipEntries = zip.getEntries(); 48 | zipEntries.forEach(function (zipEntry) { 49 | if (!zipEntry.isDirectory) { 50 | zip.extractEntryTo(zipEntry, "./", false, true); 51 | // This should create text.txt on the desktop. 52 | // It will actually create two, but the first is overwritten by the second. 53 | } 54 | }); 55 | let text = fs.readFileSync("./text.txt").toString(); 56 | expect(text).to.equal("ride em cowboy!"); 57 | fs.unlinkSync("./text.txt"); 58 | }); 59 | 60 | it("passes issue-438-AddFile with windows path sepator", () => { 61 | const zip = new Zip(); 62 | zip.addFile("foo\\bar.txt", "test", "test"); 63 | zip.extractAllTo(destination); 64 | 65 | const files = walk(destination); 66 | 67 | expect(files.sort()).to.deep.equal([pth.normalize("./test/xxx/foo/bar.txt")].sort()); 68 | }); 69 | 70 | it("testing noSort option", () => { 71 | const content = "test"; 72 | const comment = "comment"; 73 | 74 | // is sorting working - value "false" 75 | const zip1 = new Zip({ noSort: false }); 76 | zip1.addFile("a.txt", content, comment); 77 | zip1.addFile("c.txt", content, comment); 78 | zip1.addFile("b.txt", content, comment); 79 | zip1.addFile("a.txt", content, comment); 80 | zip1.toBuffer(); 81 | 82 | const zip1Entries = zip1.getEntries().map((e) => e.entryName); 83 | expect(zip1Entries).to.deep.equal(["a.txt", "b.txt", "c.txt"]); 84 | 85 | // skip sorting - value "true" 86 | const zip2 = new Zip({ noSort: true }); 87 | zip1.addFile("a.txt", content, comment); 88 | zip2.addFile("c.txt", content, comment); 89 | zip2.addFile("b.txt", content, comment); 90 | zip2.addFile("a.txt", content, comment); 91 | zip2.toBuffer(); 92 | 93 | const zip2Entries = zip2.getEntries().map((e) => e.entryName); 94 | expect(zip2Entries).to.deep.equal(["c.txt", "b.txt", "a.txt"]); 95 | }); 96 | 97 | it("windows style path with backslash should be converted to slashes", () => { 98 | const content = "test"; 99 | const comment = "comment"; 100 | 101 | // is sorting working - value "false" 102 | const zip1 = new Zip({ noSort: true }); 103 | // next 3 lines are with identical names, so only one file is added 104 | zip1.addFile("..\\..\\..\\windows\\system32\\drivers\\etc\\hosts.txt", content, comment); 105 | zip1.addFile("aa\\bb\\..\\cc\\..\\..\\windows\\system32\\drivers\\admin\\..\\etc\\hosts.txt", content, comment); 106 | zip1.addFile(".\\windows\\system32\\drivers\\etc\\hosts.txt", content, comment); 107 | // 3 other file 108 | zip1.addFile("system32\\drivers\\etc\\hosts.txt", content, comment); 109 | zip1.addFile("drivers\\etc\\hosts.txt", content, comment); 110 | zip1.addFile(".\\hosts.txt", content, comment); 111 | zip1.toBuffer(); 112 | 113 | const zip1Entries = zip1.getEntries().map((e) => e.entryName); 114 | expect(zip1Entries).to.deep.equal(["windows/system32/drivers/etc/hosts.txt", "system32/drivers/etc/hosts.txt", "drivers/etc/hosts.txt", "hosts.txt"]); 115 | }); 116 | 117 | // Issue 64 118 | it("zip.writeZip - multiple times", () => { 119 | const zip = new Zip("./test/assets/ultra.zip"); 120 | const fileName = pth.resolve(destination, "writezip"); 121 | 122 | for (let i = 0; i < 5; i++) zip.writeZip(`${fileName}.${i}.zip`); 123 | 124 | const expected_list = ["./test/xxx/writezip.0.zip", "./test/xxx/writezip.1.zip", "./test/xxx/writezip.2.zip", "./test/xxx/writezip.3.zip", "./test/xxx/writezip.4.zip"].map( 125 | pth.normalize 126 | ); 127 | 128 | const files = walk(destination); 129 | expect(files.sort()).to.deep.equal(expected_list); 130 | }); 131 | 132 | /* 133 | it("repro: symlink", () => { 134 | const zip = new Zip("./test/assets/symlink.zip"); 135 | zip.extractAllTo(destination); 136 | 137 | const linkPath = pth.join(destination, "link"); 138 | const linkStat = fs.lstatSync(linkPath); 139 | expect(linkStat.isSymbolicLink()).to.be.true; 140 | 141 | const linkTarget = fs.readlinkSync(linkPath); 142 | expect(linkTarget).to.equal("target"); 143 | 144 | const linkContent = fs.readFileSync(linkPath); 145 | expect(linkContent).to.equal("diddlydiddly doo, i'm a linkaroo"); 146 | }); 147 | */ 148 | }); 149 | 150 | function walk(dir) { 151 | let results = []; 152 | const list = fs.readdirSync(dir); 153 | list.forEach(function (file) { 154 | file = dir + "/" + file; 155 | const stat = fs.statSync(file); 156 | if (stat && stat.isDirectory()) { 157 | /* Recurse into a subdirectory */ 158 | results = results.concat(walk(file)); 159 | } else { 160 | /* Is a file */ 161 | results.push(pth.normalize(file)); 162 | } 163 | }); 164 | return results; 165 | } 166 | -------------------------------------------------------------------------------- /test/header.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | const { expect } = require("chai"); 3 | 4 | describe("headers", () => { 5 | describe("main-header", () => { 6 | const mainHeader = require("../headers/mainHeader"); 7 | // empty zip file 8 | const empty = Buffer.from("504b0506000000000000000000000000000000000000", "hex"); 9 | const readBuf = Buffer.from("504b050600000000cac0cefaed0b0000eeffc0000000", "hex"); 10 | 11 | // try read empty file 12 | it("read empty file", () => { 13 | const mainh = new mainHeader(); 14 | mainh.loadFromBinary(empty); 15 | 16 | expect(mainh.commentLength).to.equal(0); 17 | expect(mainh.diskEntries).to.equal(0); 18 | expect(mainh.mainHeaderSize).to.equal(22); 19 | expect(mainh.offset).to.equal(0); 20 | expect(mainh.size).to.equal(0); 21 | }); 22 | 23 | // write new empty file 24 | it("write empty file", () => { 25 | const mainh = new mainHeader(); 26 | const buf = mainh.toBinary(); 27 | 28 | expect(buf.length).to.equal(empty.length); 29 | expect(buf).to.eql(empty); 30 | }); 31 | 32 | // compare values 33 | it("compare correct read values", () => { 34 | const mainh = new mainHeader(); 35 | mainh.loadFromBinary(readBuf); 36 | 37 | expect(mainh.commentLength).to.equal(0); 38 | expect(mainh.mainHeaderSize).to.equal(22); 39 | expect(mainh.diskEntries).to.equal(0xc0ca); 40 | expect(mainh.totalEntries).to.equal(0xface); 41 | expect(mainh.offset).to.equal(0xc0ffee); 42 | expect(mainh.size).to.equal(0xbed); 43 | 44 | // test toJSON function 45 | expect(mainh.toJSON()).to.eql({ 46 | diskEntries: 0xc0ca, 47 | totalEntries: 0xface, 48 | size: "3053 bytes", 49 | offset: "0xC0FFEE", 50 | commentLength: 0 51 | }); 52 | }); 53 | 54 | it("set comment length", () => { 55 | const mainh = new mainHeader(); 56 | mainh.commentLength = 5; 57 | 58 | expect(mainh.commentLength).to.equal(5); 59 | expect(mainh.mainHeaderSize).to.equal(22 + 5); 60 | }); 61 | 62 | // try read empty file 63 | it("test toString function", () => { 64 | const mainh = new mainHeader(); 65 | mainh.loadFromBinary(empty); 66 | 67 | // test toJSON function 68 | expect(mainh.toJSON()).to.eql({ 69 | totalEntries: 0, 70 | size: "0 bytes", 71 | offset: "0x0000", 72 | diskEntries: 0, 73 | commentLength: 0 74 | }); 75 | 76 | // test toString function (remove CR from CRLF) 77 | expect(mainh.toString().replace(/\r/g, "")).to.equal( 78 | '{\n\t"diskEntries": 0,\n\t"totalEntries": 0,\n\t"size": "0 bytes",\n\t"offset": "0x0000",\n\t"commentLength": 0\n}' 79 | ); 80 | }); 81 | }); 82 | 83 | describe("central-header", () => { 84 | const centralHeader = require("../headers/entryHeader"); 85 | const datestamp = [1981, 3, 1, 12, 10, 10]; 86 | const readBuf = Buffer.from("504b0102140014000008080045618102efbeadde0001000000020000000000000000000000000000000000000000", "hex"); 87 | 88 | // comparison values for readBuf 89 | const readBufValues = { 90 | attr: 0, 91 | inAttr: 0, 92 | offset: 0, 93 | flags: 0x800, 94 | made: 20, 95 | version: 20, 96 | 97 | method: 8, 98 | size: 0x200, 99 | compressedSize: 0x100, 100 | crc: 0xdeadbeef, 101 | 102 | diskNumStart: 0, 103 | commentLength: 0, 104 | extraLength: 0, 105 | fileNameLength: 0 106 | }; 107 | 108 | it("compare binary header values with some predetermined values", () => { 109 | const head = new centralHeader(); 110 | head.loadFromBinary(readBuf); 111 | 112 | for (const name in readBufValues) { 113 | expect(head[name]).to.equal(readBufValues[name]); 114 | head[name] = readBufValues[name]; 115 | } 116 | 117 | expect(head.centralHeaderSize).to.equal(46); 118 | 119 | // split into individual values by local time or timezone messes up our results 120 | expect([head.time.getFullYear(), head.time.getMonth(), head.time.getDate(), head.time.getHours(), head.time.getMinutes(), head.time.getSeconds()]).to.eql(datestamp); 121 | 122 | // test toJSON function 123 | const headerdata = { 124 | made: 20, 125 | version: 20, 126 | flags: 2048, 127 | method: "DEFLATED (8)", 128 | crc: "0xDEADBEEF", 129 | compressedSize: "256 bytes", 130 | size: "512 bytes", 131 | fileNameLength: "0 bytes", 132 | extraLength: "0 bytes", 133 | commentLength: "0 bytes", 134 | diskNumStart: 0, 135 | inAttr: 0, 136 | attr: 0, 137 | offset: 0, 138 | centralHeaderSize: "46 bytes" 139 | }; 140 | 141 | headerdata.time = head.time; 142 | expect(head.toJSON()).to.eql(headerdata); 143 | }); 144 | 145 | it("handles fileAttr when attr above 0x80000000", () => { 146 | const attr = 0x81E80000; 147 | 148 | const head = new centralHeader(); 149 | head.loadFromBinary(readBuf); 150 | head.attr = attr; 151 | 152 | expect(head.fileAttr).to.equal(0x01E8); 153 | }); 154 | 155 | it("read binary and create new binary from it, they have to be equal", () => { 156 | const head = new centralHeader(); 157 | head.loadFromBinary(readBuf); 158 | const buf = head.centralHeaderToBinary(); 159 | 160 | expect(buf.length).to.equal(readBuf.length); 161 | expect(buf).to.eql(readBuf); 162 | }); 163 | 164 | it("construct header with values and compare, binaries have to be equal", () => { 165 | const head = new centralHeader(); 166 | 167 | // Set Values 168 | for (const name in readBufValues) { 169 | head[name] = readBufValues[name]; 170 | } 171 | 172 | // time from datestamp 173 | // header time is constructed with local time 174 | // if time is constructed by new Date() it is also in local zone and so it cancels possible timezone difference 175 | head.time = new Date(...datestamp); 176 | 177 | const buf = head.centralHeaderToBinary(); 178 | 179 | expect(buf.length).to.equal(readBuf.length); 180 | expect(buf).to.eql(readBuf); 181 | }); 182 | 183 | it("centralHeaderSize results if postdata is specified", () => { 184 | const head = new centralHeader(); 185 | 186 | head.fileNameLength = 100; 187 | head.commentLength = 200; 188 | head.extraLength = 100; 189 | 190 | expect(head.centralHeaderSize).to.equal(446); 191 | }); 192 | 193 | it("centralHeader date if date is specified", () => { 194 | const head = new centralHeader(); 195 | const times = [1978, 3, 1, 12, 10, 10]; 196 | 197 | head.time = new Date(...times); 198 | expect(head.timeval).to.equal(0); 199 | 200 | times[0] = 1979; 201 | head.time = new Date(...times); 202 | expect(head.timeval).to.equal(0); 203 | 204 | times[0] = 1980; 205 | head.time = new Date(...times); 206 | expect(head.timeval).to.equal(0x00816145); 207 | 208 | times[0] = 1981; 209 | head.time = new Date(...times); 210 | expect(head.timeval).to.equal(0x02816145); 211 | }); 212 | 213 | describe("local-header", () => { 214 | const localHeader = Buffer.from("504b030414000008080045618102efbeadde000100000002000000000000", "hex"); 215 | 216 | const localHeaderValues = { 217 | compressedSize: 0x100, 218 | crc: 0xdeadbeef, 219 | extraLen: 0, 220 | flags: 0x800, 221 | fnameLen: 0, 222 | method: 8, 223 | size: 0x200, 224 | version: 20 225 | }; 226 | 227 | it("compare binary header values with predetermined values", () => { 228 | const head = new centralHeader(); 229 | head.loadFromBinary(readBuf); 230 | head.loadLocalHeaderFromBinary(localHeader); 231 | 232 | for (const name in localHeaderValues) { 233 | expect(head.localHeader[name]).to.equal(localHeaderValues[name]); 234 | } 235 | }); 236 | 237 | it("read binary and create new binary from it, they have to be equal", () => { 238 | const head = new centralHeader(); 239 | head.loadFromBinary(readBuf); 240 | head.loadLocalHeaderFromBinary(localHeader); 241 | 242 | const buf = head.localHeaderToBinary(); 243 | 244 | expect(buf.length).to.equal(localHeader.length); 245 | expect(buf).to.eql(localHeader); 246 | }); 247 | 248 | it("construct header by values and compare binaries have to be equal", () => { 249 | const head = new centralHeader(); 250 | head.loadFromBinary(readBuf); 251 | 252 | // Set Values 253 | for (const name in readBufValues) { 254 | head[name] = readBufValues[name]; 255 | } 256 | 257 | // time from datestamp 258 | // header time is constructed with local time 259 | // if time is constructed by new Date() it is also in local zone and so it cancels possible timezone difference 260 | head.time = new Date(...datestamp); 261 | 262 | const buf = head.localHeaderToBinary(); 263 | 264 | expect(buf.length).to.equal(localHeader.length); 265 | expect(buf).to.eql(localHeader); 266 | }); 267 | }); 268 | }); 269 | }); 270 | -------------------------------------------------------------------------------- /util/utils.js: -------------------------------------------------------------------------------- 1 | const fsystem = require("fs"); 2 | const pth = require("path"); 3 | const Constants = require("./constants"); 4 | const Errors = require("./errors"); 5 | const isWin = typeof process === "object" && "win32" === process.platform; 6 | 7 | const is_Obj = (obj) => typeof obj === "object" && obj !== null; 8 | 9 | // generate CRC32 lookup table 10 | const crcTable = new Uint32Array(256).map((t, c) => { 11 | for (let k = 0; k < 8; k++) { 12 | if ((c & 1) !== 0) { 13 | c = 0xedb88320 ^ (c >>> 1); 14 | } else { 15 | c >>>= 1; 16 | } 17 | } 18 | return c >>> 0; 19 | }); 20 | 21 | // UTILS functions 22 | 23 | function Utils(opts) { 24 | this.sep = pth.sep; 25 | this.fs = fsystem; 26 | 27 | if (is_Obj(opts)) { 28 | // custom filesystem 29 | if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") { 30 | this.fs = opts.fs; 31 | } 32 | } 33 | } 34 | 35 | module.exports = Utils; 36 | 37 | // INSTANTIABLE functions 38 | 39 | Utils.prototype.makeDir = function (/*String*/ folder) { 40 | const self = this; 41 | 42 | // Sync - make directories tree 43 | function mkdirSync(/*String*/ fpath) { 44 | let resolvedPath = fpath.split(self.sep)[0]; 45 | fpath.split(self.sep).forEach(function (name) { 46 | if (!name || name.substr(-1, 1) === ":") return; 47 | resolvedPath += self.sep + name; 48 | var stat; 49 | try { 50 | stat = self.fs.statSync(resolvedPath); 51 | } catch (e) { 52 | self.fs.mkdirSync(resolvedPath); 53 | } 54 | if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY(`"${resolvedPath}"`); 55 | }); 56 | } 57 | 58 | mkdirSync(folder); 59 | }; 60 | 61 | Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) { 62 | const self = this; 63 | if (self.fs.existsSync(path)) { 64 | if (!overwrite) return false; // cannot overwrite 65 | 66 | var stat = self.fs.statSync(path); 67 | if (stat.isDirectory()) { 68 | return false; 69 | } 70 | } 71 | var folder = pth.dirname(path); 72 | if (!self.fs.existsSync(folder)) { 73 | self.makeDir(folder); 74 | } 75 | 76 | var fd; 77 | try { 78 | fd = self.fs.openSync(path, "w", 0o666); // 0666 79 | } catch (e) { 80 | self.fs.chmodSync(path, 0o666); 81 | fd = self.fs.openSync(path, "w", 0o666); 82 | } 83 | if (fd) { 84 | try { 85 | self.fs.writeSync(fd, content, 0, content.length, 0); 86 | } finally { 87 | self.fs.closeSync(fd); 88 | } 89 | } 90 | self.fs.chmodSync(path, attr || 0o666); 91 | return true; 92 | }; 93 | 94 | Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) { 95 | if (typeof attr === "function") { 96 | callback = attr; 97 | attr = undefined; 98 | } 99 | 100 | const self = this; 101 | 102 | self.fs.exists(path, function (exist) { 103 | if (exist && !overwrite) return callback(false); 104 | 105 | self.fs.stat(path, function (err, stat) { 106 | if (exist && stat.isDirectory()) { 107 | return callback(false); 108 | } 109 | 110 | var folder = pth.dirname(path); 111 | self.fs.exists(folder, function (exists) { 112 | if (!exists) self.makeDir(folder); 113 | 114 | self.fs.open(path, "w", 0o666, function (err, fd) { 115 | if (err) { 116 | self.fs.chmod(path, 0o666, function () { 117 | self.fs.open(path, "w", 0o666, function (err, fd) { 118 | self.fs.write(fd, content, 0, content.length, 0, function () { 119 | self.fs.close(fd, function () { 120 | self.fs.chmod(path, attr || 0o666, function () { 121 | callback(true); 122 | }); 123 | }); 124 | }); 125 | }); 126 | }); 127 | } else if (fd) { 128 | self.fs.write(fd, content, 0, content.length, 0, function () { 129 | self.fs.close(fd, function () { 130 | self.fs.chmod(path, attr || 0o666, function () { 131 | callback(true); 132 | }); 133 | }); 134 | }); 135 | } else { 136 | self.fs.chmod(path, attr || 0o666, function () { 137 | callback(true); 138 | }); 139 | } 140 | }); 141 | }); 142 | }); 143 | }); 144 | }; 145 | 146 | Utils.prototype.findFiles = function (/*String*/ path) { 147 | const self = this; 148 | 149 | function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) { 150 | if (typeof pattern === "boolean") { 151 | recursive = pattern; 152 | pattern = undefined; 153 | } 154 | let files = []; 155 | self.fs.readdirSync(dir).forEach(function (file) { 156 | const path = pth.join(dir, file); 157 | const stat = self.fs.statSync(path); 158 | 159 | if (!pattern || pattern.test(path)) { 160 | files.push(pth.normalize(path) + (stat.isDirectory() ? self.sep : "")); 161 | } 162 | 163 | if (stat.isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive)); 164 | }); 165 | return files; 166 | } 167 | 168 | return findSync(path, undefined, true); 169 | }; 170 | 171 | /** 172 | * Callback for showing if everything was done. 173 | * 174 | * @callback filelistCallback 175 | * @param {Error} err - Error object 176 | * @param {string[]} list - was request fully completed 177 | */ 178 | 179 | /** 180 | * 181 | * @param {string} dir 182 | * @param {filelistCallback} cb 183 | */ 184 | Utils.prototype.findFilesAsync = function (dir, cb) { 185 | const self = this; 186 | let results = []; 187 | self.fs.readdir(dir, function (err, list) { 188 | if (err) return cb(err); 189 | let list_length = list.length; 190 | if (!list_length) return cb(null, results); 191 | list.forEach(function (file) { 192 | file = pth.join(dir, file); 193 | self.fs.stat(file, function (err, stat) { 194 | if (err) return cb(err); 195 | if (stat) { 196 | results.push(pth.normalize(file) + (stat.isDirectory() ? self.sep : "")); 197 | if (stat.isDirectory()) { 198 | self.findFilesAsync(file, function (err, res) { 199 | if (err) return cb(err); 200 | results = results.concat(res); 201 | if (!--list_length) cb(null, results); 202 | }); 203 | } else { 204 | if (!--list_length) cb(null, results); 205 | } 206 | } 207 | }); 208 | }); 209 | }); 210 | }; 211 | 212 | Utils.prototype.getAttributes = function () {}; 213 | 214 | Utils.prototype.setAttributes = function () {}; 215 | 216 | // STATIC functions 217 | 218 | // crc32 single update (it is part of crc32) 219 | Utils.crc32update = function (crc, byte) { 220 | return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8); 221 | }; 222 | 223 | Utils.crc32 = function (buf) { 224 | if (typeof buf === "string") { 225 | buf = Buffer.from(buf, "utf8"); 226 | } 227 | 228 | let len = buf.length; 229 | let crc = ~0; 230 | for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]); 231 | // xor and cast as uint32 number 232 | return ~crc >>> 0; 233 | }; 234 | 235 | Utils.methodToString = function (/*Number*/ method) { 236 | switch (method) { 237 | case Constants.STORED: 238 | return "STORED (" + method + ")"; 239 | case Constants.DEFLATED: 240 | return "DEFLATED (" + method + ")"; 241 | default: 242 | return "UNSUPPORTED (" + method + ")"; 243 | } 244 | }; 245 | 246 | /** 247 | * removes ".." style path elements 248 | * @param {string} path - fixable path 249 | * @returns string - fixed filepath 250 | */ 251 | Utils.canonical = function (/*string*/ path) { 252 | if (!path) return ""; 253 | // trick normalize think path is absolute 254 | const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); 255 | return pth.join(".", safeSuffix); 256 | }; 257 | 258 | /** 259 | * fix file names in achive 260 | * @param {string} path - fixable path 261 | * @returns string - fixed filepath 262 | */ 263 | 264 | Utils.zipnamefix = function (path) { 265 | if (!path) return ""; 266 | // trick normalize think path is absolute 267 | const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); 268 | return pth.posix.join(".", safeSuffix); 269 | }; 270 | 271 | /** 272 | * 273 | * @param {Array} arr 274 | * @param {function} callback 275 | * @returns 276 | */ 277 | Utils.findLast = function (arr, callback) { 278 | if (!Array.isArray(arr)) throw new TypeError("arr is not array"); 279 | 280 | const len = arr.length >>> 0; 281 | for (let i = len - 1; i >= 0; i--) { 282 | if (callback(arr[i], i, arr)) { 283 | return arr[i]; 284 | } 285 | } 286 | return void 0; 287 | }; 288 | 289 | // make abolute paths taking prefix as root folder 290 | Utils.sanitize = function (/*string*/ prefix, /*string*/ name) { 291 | prefix = pth.resolve(pth.normalize(prefix)); 292 | var parts = name.split("/"); 293 | for (var i = 0, l = parts.length; i < l; i++) { 294 | var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); 295 | if (path.indexOf(prefix) === 0) { 296 | return path; 297 | } 298 | } 299 | return pth.normalize(pth.join(prefix, pth.basename(name))); 300 | }; 301 | 302 | // converts buffer, Uint8Array, string types to buffer 303 | Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input, /* function */ encoder) { 304 | if (Buffer.isBuffer(input)) { 305 | return input; 306 | } else if (input instanceof Uint8Array) { 307 | return Buffer.from(input); 308 | } else { 309 | // expect string all other values are invalid and return empty buffer 310 | return typeof input === "string" ? encoder(input) : Buffer.alloc(0); 311 | } 312 | }; 313 | 314 | Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) { 315 | var slice = Buffer.from(buffer.slice(index, index + 8)); 316 | slice.swap64(); 317 | 318 | return parseInt(`0x${slice.toString("hex")}`); 319 | }; 320 | 321 | Utils.fromDOS2Date = function (val) { 322 | return new Date(((val >> 25) & 0x7f) + 1980, Math.max(((val >> 21) & 0x0f) - 1, 0), Math.max((val >> 16) & 0x1f, 1), (val >> 11) & 0x1f, (val >> 5) & 0x3f, (val & 0x1f) << 1); 323 | }; 324 | 325 | Utils.fromDate2DOS = function (val) { 326 | let date = 0; 327 | let time = 0; 328 | if (val.getFullYear() > 1979) { 329 | date = (((val.getFullYear() - 1980) & 0x7f) << 9) | ((val.getMonth() + 1) << 5) | val.getDate(); 330 | time = (val.getHours() << 11) | (val.getMinutes() << 5) | (val.getSeconds() >> 1); 331 | } 332 | return (date << 16) | time; 333 | }; 334 | 335 | Utils.isWin = isWin; // Do we have windows system 336 | Utils.crcTable = crcTable; 337 | -------------------------------------------------------------------------------- /history.md: -------------------------------------------------------------------------------- 1 | 0.5.4 / 2021-03-08 2 | ================== 3 | * Fixed relative paths 4 | * Added zipcrypto encryption 5 | * Lower verMade for macOS when generating zip file 6 | 7 | 0.5.3 / 2021-02-07 8 | ================== 9 | * Fixed filemode when unzipping 10 | 11 | 0.5.2 / 2021-01-27 12 | ================== 13 | * Fixed path traversal issue (GHSL-2020-198) 14 | 15 | 0.5.1 / 2020-11-27 16 | ================== 17 | 18 | * Incremented version (cthackers) 19 | * Fixed outFileName (cthackers) 20 | 21 | 0.5.0 / 2020-11-19 22 | ================== 23 | * Added extra parameter to extractEntryTo so target filename can be renamed (cthackers) 24 | * Updated dev dependency (cthackers) 25 | * modified addLocalFolder method (5saviahv) 26 | * modified addLocalFile method (5saviahv) 27 | * Deflate needs min V2.0 (5saviahv) 28 | * Node v6 (5saviahv) 29 | * Added ZipCrypto decrypting ability (5saviahv) 30 | * LICENSE filename in package.json (5saviahv) 31 | * add multibyte-encoded comment with byte length instead of character length (Kosuke Suzuki) 32 | * Bump lodash from 4.17.15 to 4.17.19 (dependabot[bot]) 33 | * now it works in browser (Emiliano Necciari) 34 | 35 | 0.4.16 / 2020-06-23 36 | =================== 37 | * Updated mocha version to fix vulnerability (cthackers) 38 | * Update project version (cthackers) 39 | * fix: throw real exception objects on error (Matthew Sainsbury) 40 | * Version number incremented (Saqib M) 41 | * Update zipFile.js (Saqib M) 42 | * Update README.md with the latest URLs (Takuya Noguchi) 43 | * Update Node.js version to use in CI tests (Takuya Noguchi) 44 | * process.versions is null when the library is used in browser (Emiliano Necciari) 45 | 46 | 0.4.14 / 2020-02-06 47 | =================== 48 | * Version increment for npm publish (cthackers) 49 | * Iterate over entries without storing their metadata (Pierre Lehnen) 50 | * Add partial support for zip64 (larger number of entries) (Pierre Lehnen) 51 | * Escape $ sign for regex in addLocalFolder() (William) 52 | * fix accent filename (mart_-) 53 | * Removed improperly raised error while decompressing empty file asynchronously. (Nicolas Leclerc) 54 | * fix: CRC is unexpectedly changed after zip is re-created (teppeis) 55 | 56 | 0.4.13 / 2018-10-18 57 | =================== 58 | * Add async version of addLocalFile Use open and readFile instead of existsSync and readFileSync. There are still some sync functions left in the Utils.findFiles call, but the impact is minimal compared to the readFileSync. (Maigret Aurelien) 59 | * Fix jsdoc typings for functions. (Leon Aves) 60 | * fixed Utils.FileSystem overwriting 'fs' module even when 'original-fs' is broken (Tom Wallroth) 61 | * fix race-condition crash when extracting data and extracted files are (re)moved (Tom Wallroth) 62 | * Fix: bad buffer.alloc for .toBuffer in async mode (Colin GILLE) 63 | * Add a full license text to the distribution (Honza Javorek) 64 | * Rename MIT-LICENSE.txt to LICENSE (Standa Opichal) 65 | * fix bug when filename or path contains multi-byte characters (warbaby) 66 | * bump version to 0.4.12 (Marsette Vona) 67 | * change default compression method for added files back to DEFLATED from STORED (revert #139) (Marsette Vona) 68 | * remove JSDeflater() and JSInflater() in favor of zlib.deflateRawSync() and zlib.inflateRawSync() respectively (Marsette Vona) 69 | * Fix (Mirko Tebaldi) 70 | * 0.4.12 - Created a test to check Twizzeld's issue on Issue #237. (was not able to replicate his issue) (cjacobs) 71 | * Fix Buffer.alloc bug #234 (keyesdav) 72 | * 0.4.12 - Fix additional issue with extractEntryTo improperly handling directory children. (cjacobs) 73 | * 0.4.12 - Fix #237, add tests, update travis node versions. (cjacobs) 74 | * 0.4.12 - Fix #237, add tests, update travis node versions. (cjacobs) 75 | * 0.4.12 - Fix #237, add tests, update travis node versions. (cjacobs) 76 | * 0.4.12 - Fix #237, add tests, update travis node versions. (cjacobs) 77 | * add tests for CRC fixes (Kevin Tjiam) 78 | * compare calculated CRC with loaded CRC (Kevin Tjiam) 79 | * handle errors in callback from getDataAsync (Kevin Tjiam) 80 | 81 | 0.4.11 / 2018-05-13 82 | =================== 83 | * Version bump (cthackers) 84 | * Fixed #176 (cthackers) 85 | * Fixed wrong date on files (issue #203) (cthackers) 86 | 87 | 0.4.10 / 2018-05-13 88 | =================== 89 | * Fixed bugs introduced with 0.4.9 (File Formats) 90 | * Fix issue #218 (Jean-Marc Collin) 91 | * Fix octal literals so they work in strict mode (Houssam Haidar) 92 | * To support strict mode use 0o prefix to octal numbers (Jon Tore Hafstad) 93 | * Updated entryHeaderToBinary. Fixed a typo that made the CRC be written as an Int32 instead of a UInt32. (Rafael Costa) 94 | 95 | 0.4.9 / 2018-04-25 96 | ================== 97 | * Update package.json (The Brain) 98 | * Update README.md (The Brain) 99 | * fix: resolve both target and entry path (Danny Grander) 100 | 101 | 0.4.8 / 2018-04-23 102 | ================== 103 | * Update package.json (The Brain) 104 | * Update package.json (The Brain) 105 | * Update package.json (The Brain) 106 | * fix: prevent extracting archived files outside of target path (Aviad Reich) 107 | * add try-catch around fs.writeSync (olya) 108 | * Fix data accessing example in README (Philipp Muens) 109 | * Remove buffers `noAssert` argument (Ruben Bridgewater) 110 | * Fix license expression to be compatible to SPDX. (Golo Roden) 111 | * Added travis ci support (Amila Welihinda) 112 | * add bug fix on special character in filename that are allowed in linux but not in windows (Ygal Bellaiche) 113 | * Change project name for publishing to npm (David Kadlecek) 114 | * Added support for electron original-fs (David Kadlecek) 115 | * fixed #130: ensure buffer (lloiser) 116 | * fix Issue: https://github.com/cthackers/adm-zip/issues/102 (mygoare) 117 | * Update license attribute (Peter deHaan) 118 | * lowcase for the function name (Changyu Geng) 119 | * Add a test function (Changyu Geng) 120 | * Under windows, the path should be normalize first, otherwise the localPath will still use back slash (Shauk Wu) 121 | * Update adm-zip.js (MikeNerevarin) 122 | * Fix adm-zip.addFile default attributes for files and directories (Pavel Strashkin) 123 | * Fixed CRC bug (The Brain) 124 | 125 | 0.4.7 / 2015-02-09 126 | ================== 127 | * Update zipEntry.js (The Brain) 128 | * Update package.json (The Brain) 129 | 130 | 0.4.5 / 2015-02-09 131 | ================== 132 | * Bumped library version for a a npm push (cthackers) 133 | * Merged pull request (cthackers) 134 | * Use `files` property in package.json (Kevin Martensson) 135 | * preserve attr in entry headers (João Moreno) 136 | * when overwrite flag is set, should check if the target exists, rather than the targetPath (Shauk Wu) 137 | * + manage cases where paths 'C:/' and 'c:/' are the same (IvanDimanov) 138 | * writeZip now calles provided callback if given (Adam Booth) 139 | * Fixed a bug where empty ZIP files were being treated as invalid. (Max Sorensen) 140 | * Add path.normalize to addLocalFolder (aomurbekov) 141 | * add an optional filter to addLocalFolder (Gregg Tavares) 142 | * Bail out after an error (XeonCore) 143 | * Fix indentation (XeonCore) 144 | * Add async versions of extractAllTo (XeonCore) 145 | * added decrypt support (Alexander Skovpen) 146 | * Fix false report of BAD_CRC on deflated entries in async mode (Julien Chaumond) 147 | * Added possibility to rename local file when adding it to archive (Taschenschieber) 148 | * Read ZIP64 extended information (Raphael Schweikert) 149 | * Add ZIP format specification for reference (Raphael Schweikert) 150 | * Ignore node_modules directory (Raphael Schweikert) 151 | * Revert "Start a new" (Iacob Nasca) 152 | * Revert "Added test files" (Iacob Nasca) 153 | * Revert "Added zipEntry class" (Iacob Nasca) 154 | * Revert "Incremented pkg version" (Iacob Nasca) 155 | * Revert "Waaa, i should never use gui to manage git repos" (Iacob Nasca) 156 | * Incremented pkg version (Iacob Nasca) 157 | * Waaa, i should never use gui to manage git repos (Iacob Nasca) 158 | * Added zipEntry class (Iacob Nasca) 159 | * Added test files (Iacob Nasca) 160 | * Start a new (Iacob Nasca) 161 | 162 | 0.4.4 / 2014-02-04 163 | ================== 164 | * Incremented version to 0.4.4 (The Brain) 165 | * Update README.md (The Brain) 166 | * Update README.md (yarsh) 167 | * Make strict mode compatible - don't use octals (yarsh) 168 | * Make strict mode compatible - don't use octals in addFile (yarsh) 169 | * Make strict mode compatible - don't create implicit global "Headers" (yarsh) 170 | * Make strict mode compatible - don't delete global (yarsh) 171 | * fix zipFile#deleteEntry. (brn) 172 | * Updated sample code for zip.extractEntryTo. Added maintainEntryPath parameter. (Third Santor) 173 | * Fixed issue where chunks of data were not being handled before sending to callback. Reused the code from inflater. (John Alfaro) 174 | * Update package.json (The Brain) 175 | * Add toAsyncBuffer code (Jack Lee) 176 | * Update zipEntry.js (The Brain) 177 | 178 | 0.4.3 / 2013-04-11 179 | ================== 180 | * fixed issue #26 (Iacob Nasca) 181 | * Updated deflater. Fixed more bugs and flows. Some memory improv (Iacob Nasca) 182 | * Fixed some compression bugs (Iacob Nasca) 183 | * Incremented project version. Removed some usless files (Iacob Nasca) 184 | * Fixed crc errors (Iacob Nasca) 185 | * - (Iacob Nasca) 186 | * fix isDirectory bug (percy) 187 | * support multibyte filename (blacktail) 188 | 189 | 0.2.1 / 2013-03-04 190 | ================== 191 | * Some typos and npm version bumbp (The Brain) 192 | * Update util/utils.js (Danny Trunk) 193 | * Fixed issue #15, #12, #13 (The Brain) 194 | * Fixed path.existSync (issue #27) (The Brain) 195 | * Fixed issue #29 (The Brain) 196 | * fixed the call to pth.existsSync to use fs.existsSync (Simon Horton) 197 | * fixed a "path.existsSync is now called fs.existsSync" warning (Simon Horton) 198 | 199 | 0.1.9 / 2012-11-05 200 | ================== 201 | * Incremented npm version (The Brain) 202 | * Merged pull request by sihorton (The Brain) 203 | 204 | 0.1.8 / 2012-10-11 205 | ================== 206 | * Version increment. NPM push (cthackers) 207 | * smartly fallback to path.existsSync. (Chris Talkington) 208 | 209 | 0.1.7 / 2012-09-29 210 | ================== 211 | * New npm push (cthackers) 212 | * Fix deprecation notice (Peter Rekdal) 213 | * :gem: Travis CI image/link in readme :gem: (travis4all) 214 | * :gem: Added travis.yml file :gem: (travis4all) 215 | * Added license information (The Brain) 216 | 217 | 0.1.5 / 2012-08-03 218 | ================== 219 | * Version bump for a npm release (The Brain) 220 | * Adding a class to support fs attributes and permissions (The Brain) 221 | * Starting a test suite (The Brain) 222 | * added possibility to unzip data from raw buffer. Just need to pass Buffer. Tested. (Anton Podviaznikov) 223 | * Fixed writeZip bug (The Brain) 224 | * Incremented version number for new npm push (The Brain) 225 | * Fixed async methods (The Brain) 226 | 227 | 0.1.3 / 2012-03-12 228 | ================== 229 | * Incremented npm build nr (The Brain) 230 | * Rewrit the Inflater method (The Brain) 231 | * Implemented Deflater class. Fixed Inflater bug. Some other refactorings (The Brain) 232 | * Changed nothing (The Brain) 233 | * Fixed a bug in the data headers (The Brain) 234 | * Partially implemented addLocalFolder method (The Brain) 235 | * Added methods documentation (The Brain) 236 | * Added asynconous decompression and public methods (The Brain) 237 | * Fixed some doc typos (The Brain) 238 | 239 | 0.1.2 / 2012-02-28 240 | ================== 241 | * Updated some documentation and version number for npm (The Brain) 242 | * Refactoring, refactoring, refactoring (The Brain) 243 | * Fixed zipEntry typo causing null data to be sent to the inflater (The Brain) 244 | * Fixed crc32 function.\nAdded END header support.\nMoved and renamed some files.\nOther refactoring (The Brain) 245 | * More refactoring (The Brain) 246 | * Major major refactoring (The Brain) 247 | * Added crc32 function (The Brain) 248 | 249 | 0.1.1 / 2012-02-23 250 | ================== 251 | * Changed md file with the newest api names. Implemented extract to disk methods (The Brain) 252 | * Fixed deflate bug. Some refactoring (The Brain) 253 | * Changed some docs (The Brain) 254 | * Changed some namings. Added new methods (The Brain) 255 | * More doc (The Brain) 256 | * More doc (The Brain) 257 | * More doc (The Brain) 258 | * Added a bit of documentation (The Brain) 259 | * Added support for INFLATE method (The Brain) 260 | * support reading zip files with STORE archive method (The Brain) 261 | * first commit (The Brain) 262 | -------------------------------------------------------------------------------- /test/mbcs/mbcs.test.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert"); 2 | const pth = require("path"); 3 | const Zip = require("../../adm-zip"); 4 | const rimraf = require("rimraf"); 5 | const iconv = require("iconv-lite"); 6 | 7 | describe("Multibyte Character Sets in Filename", () => { 8 | const destination = pth.resolve("./test/xxx"); 9 | const asset1 = pth.resolve("./test/mbcs/", "chs_name.zip"); 10 | 11 | // clean up folder content 12 | afterEach((done) => rimraf(destination, done)); 13 | 14 | // chinese 15 | it("ascii filename and chinese content", (done) => { 16 | const encoding = "ascii"; 17 | const decoder = { 18 | encode: (data) => iconv.encode(data, encoding), 19 | decode: (data) => iconv.decode(data, encoding) 20 | }; 21 | 22 | const content = "测试文本\ntest text"; 23 | 24 | const zip1 = new Zip({ decoder }); 25 | zip1.addFile("ascii.txt", content); 26 | zip1.addFile("test/ascii.txt", content); 27 | zip1.writeZip(pth.join(destination, "00-ascii.zip")); 28 | 29 | const zip2 = new Zip(pth.join(destination, "00-ascii.zip"), { decoder }); 30 | const text = zip2.readAsText("ascii.txt"); 31 | assert(text === content, text); 32 | done(); 33 | }); 34 | 35 | it("add files with chinese filename into new zip", (done) => { 36 | const encoding = "gbk"; 37 | const decoder = { 38 | encode: (data) => iconv.encode(data, encoding), 39 | decode: (data) => iconv.decode(data, encoding) 40 | }; 41 | 42 | const content = "文件内容"; 43 | const file = "中文路径.txt"; 44 | 45 | const zip1 = new Zip({ decoder }); 46 | zip1.addFile(file, content); 47 | zip1.addFile("test/" + file, content); 48 | zip1.writeZip(pth.join(destination, "01-chs_name.zip")); 49 | 50 | const zip2 = new Zip(pth.join(destination, "01-chs_name.zip"), { decoder }); 51 | const text = zip2.readAsText(file); 52 | assert(text === content, text); 53 | done(); 54 | }); 55 | 56 | it("fetch file with chinese filename (gbk) in existing zip", (done) => { 57 | const encoding = "gbk"; 58 | const decoder = { 59 | encode: (data) => iconv.encode(data, encoding), 60 | decode: (data) => iconv.decode(data, encoding) 61 | }; 62 | 63 | let tZip = new Zip(asset1, { decoder }); 64 | for (let entry of tZip.getEntries()) { 65 | if (entry.isDirectory) continue; 66 | const CNpath = entry.entryName; 67 | assert(CNpath === "中文路径.txt"); 68 | } 69 | done(); 70 | }); 71 | 72 | it("add file with chinese filename into existing zip", (done) => { 73 | const encoding = "gbk"; 74 | const decoder = { 75 | encode: (data) => iconv.encode(data, encoding), 76 | decode: (data) => iconv.decode(data, encoding) 77 | }; 78 | 79 | const content = "文件内容"; 80 | const file1 = "test/中文测试.txt"; 81 | const file2 = "中文路径.txt"; 82 | 83 | let zip1 = new Zip(asset1, { decoder }); 84 | zip1.addFile(file1, content); 85 | zip1.writeZip(pth.join(destination, "02-chs_name.zip")); 86 | 87 | const zip2 = new Zip(pth.join(destination, "02-chs_name.zip"), { decoder }); 88 | const text1 = zip2.readAsText(file1); 89 | assert(text1 === content, text1); 90 | 91 | const text2 = zip2.readAsText(file2); 92 | assert(text2 === content, text2); 93 | 94 | done(); 95 | }); 96 | 97 | it("read and keep entry.extra while write zip", () => { 98 | const encoding = "gbk"; 99 | const decoder = { 100 | encode: (data) => iconv.encode(data, encoding), 101 | decode: (data) => iconv.decode(data, encoding) 102 | }; 103 | 104 | let zip1 = new Zip(asset1, { decoder }); 105 | let entry1 = zip1.getEntry("中文路径.txt", "gbk"); 106 | zip1.writeZip(pth.join(destination, "03-chs_name_clone.zip")); 107 | 108 | let zip2 = new Zip(pth.join(destination, "03-chs_name_clone.zip"), { decoder }); 109 | let entry2 = zip2.getEntry("中文路径.txt"); 110 | assert(entry1.extra.equals(entry2.extra)); 111 | 112 | // "read EFSflag" 113 | assert(entry1.header.flags_efs === false); 114 | assert(entry2.header.flags_efs === false); 115 | }); 116 | 117 | it("add files with chinese filename (UTF-8) into new zip", (done) => { 118 | let zip1 = new Zip(); 119 | zip1.addFile("測試.txt", "測試"); 120 | zip1.addFile("test/測試.txt", "測試"); 121 | zip1.writeZip(pth.join(destination, "04-cht_name.zip")); 122 | 123 | let zip2 = new Zip(pth.join(destination, "04-cht_name.zip")); 124 | let entry = zip2.getEntry("測試.txt"); 125 | const text = zip2.readAsText(entry); 126 | assert(text === "測試", text); 127 | 128 | assert(entry.header.flags_efs); 129 | done(); 130 | }); 131 | 132 | it("add files with chinese filename (Big5) into new zip", (done) => { 133 | const encoding = "big5"; 134 | const decoder = { 135 | encode: (data) => iconv.encode(data, encoding), 136 | decode: (data) => iconv.decode(data, encoding) 137 | }; 138 | 139 | const content = iconv.encode("測試", encoding); // buffer 140 | 141 | let zip1 = new Zip({ decoder }); 142 | zip1.addFile("測試.txt", content); 143 | zip1.addFile("test/測試.txt", content); 144 | zip1.writeZip(pth.join(destination, "05-cht_name_big5.zip")); 145 | 146 | const zip2 = new Zip(pth.join(destination, "05-cht_name_big5.zip"), { decoder }); 147 | const entry = zip2.getEntry("測試.txt"); 148 | const bufdata = zip2.readFile(entry); 149 | //console.log(entry.toJSON()) 150 | assert(bufdata.equals(content)); 151 | 152 | assert(!entry.header.flags_efs); 153 | done(); 154 | }); 155 | 156 | // japanese 157 | it("add files with japanese filename (UTF-8) into new zip", (done) => { 158 | const file = "にほんご.txt"; 159 | const content = "にほんご"; 160 | 161 | const zip1 = new Zip(); 162 | zip1.addFile(file, content); 163 | zip1.addFile("test/" + file, content); 164 | zip1.writeZip(pth.join(destination, "06-jp_name.zip")); 165 | 166 | const zip2 = new Zip(pth.join(destination, "06-jp_name.zip")); 167 | const text1 = zip2.readAsText(file); 168 | assert(text1 === content, text1); 169 | const entry2 = zip2.getEntry("./test/" + file); 170 | const text2 = zip2.readAsText(entry2); 171 | assert(text2 === content, text2); 172 | assert(entry2.header.flags_efs); 173 | done(); 174 | }); 175 | 176 | it("add files with japanese filename (EUC-JP) into new zip", (done) => { 177 | const encoding = "EUC-JP"; 178 | const decoder = { 179 | encode: (data) => iconv.encode(data, encoding), 180 | decode: (data) => iconv.decode(data, encoding) 181 | }; 182 | 183 | const file = "にほんご.txt"; 184 | const content = iconv.encode("にほんご", encoding); // buffer 185 | 186 | const zip1 = new Zip({ decoder }); 187 | zip1.addFile(file, content); 188 | zip1.addFile("test/" + file, content); 189 | zip1.writeZip(pth.join(destination, "07-jp_name.zip")); 190 | 191 | const zip2 = new Zip(pth.join(destination, "07-jp_name.zip"), { decoder }); 192 | let entry1 = zip2.getEntry(file); 193 | let bufdata1 = zip2.readFile(entry1); 194 | assert(bufdata1.equals(content)); 195 | let entry2 = zip2.getEntry("./test/" + file); 196 | let bufdata2 = zip2.readFile(entry2); 197 | assert(bufdata2.equals(content)); 198 | assert(entry1.header.flags_efs === false); 199 | assert(entry2.header.flags_efs === false); 200 | done(); 201 | }); 202 | 203 | it("add files with japanese filename (Shift_JIS) into new zip", (done) => { 204 | const encoding = "Shift_JIS"; 205 | const decoder = { 206 | encode: (data) => iconv.encode(data, encoding), 207 | decode: (data) => iconv.decode(data, encoding) 208 | }; 209 | 210 | const file = "にほんご.txt"; 211 | const content = "にほんご"; 212 | const bufdata = iconv.encode(content, "utf16le"); // buffer 213 | 214 | const zip1 = new Zip({ decoder }); 215 | zip1.addFile(file, bufdata); 216 | zip1.addFile("test/" + file, bufdata); 217 | zip1.writeZip(pth.join(destination, "08-jp_name.zip")); 218 | 219 | const zip2 = new Zip(pth.join(destination, "08-jp_name.zip"), { decoder }); 220 | let text1 = zip2.readAsText(file, "utf16le"); 221 | assert(text1 === content, text1); 222 | let text2 = zip2.readAsText("test/" + file, "utf16le"); 223 | assert(text2 === content, text2); 224 | done(); 225 | }); 226 | 227 | // hebrew (writing left to right) 228 | it("add files with hebrew filename (UTF-8) into new zip", (done) => { 229 | const file = "שפה עברית.txt"; 230 | const content = "יונה לבנה קטנה עפה מעל אנגליה"; 231 | 232 | const zip1 = new Zip(); 233 | zip1.addFile(file, content); 234 | zip1.addFile("test/" + file, content); 235 | zip1.writeZip(pth.join(destination, "09-heb_name.zip")); 236 | 237 | const zip2 = new Zip(pth.join(destination, "09-heb_name.zip")); 238 | const text1 = zip2.readAsText(file); 239 | assert(text1 === content, text1); 240 | const entry2 = zip2.getEntry("./test/" + file); 241 | const text2 = zip2.readAsText(entry2); 242 | assert(text2 === content, text2); 243 | assert(entry2.header.flags_efs); 244 | done(); 245 | }); 246 | 247 | it("add files with hebrew filename (win1255) into new zip", (done) => { 248 | const encoding = "win1255"; 249 | const decoder = { 250 | encode: (data) => iconv.encode(data, encoding), 251 | decode: (data) => iconv.decode(data, encoding) 252 | }; 253 | 254 | const file = "שפה עברית.txt"; 255 | const content = "יונה לבנה קטנה עפה מעל אנגליה"; 256 | const bufdata = iconv.encode(content, "utf16le"); // buffer 257 | 258 | const zip1 = new Zip({ decoder }); 259 | zip1.addFile(file, bufdata); 260 | zip1.addFile("test/" + file, bufdata); 261 | zip1.writeZip(pth.join(destination, "10-heb_name.zip")); 262 | 263 | const zip2 = new Zip(pth.join(destination, "10-heb_name.zip"), { decoder }); 264 | let text1 = zip2.readAsText(file, "utf16le"); 265 | assert(text1 === content, text1); 266 | let text2 = zip2.readAsText("test/" + file, "utf16le"); 267 | assert(text2 === content, text2); 268 | done(); 269 | }); 270 | 271 | // Cyrillic 272 | it("add files with bulgarian filename (win1251) into new zip", (done) => { 273 | const encoding = "win1251"; 274 | const decoder = { 275 | encode: (data) => iconv.encode(data, encoding), 276 | decode: (data) => iconv.decode(data, encoding) 277 | }; 278 | 279 | const file = "Български.txt"; 280 | const content = "Приключенията на таралежа"; 281 | const bufdata = iconv.encode(content, "utf16le"); // buffer 282 | 283 | const zip1 = new Zip({ decoder }); 284 | zip1.addFile(file, bufdata); 285 | zip1.addFile("test/" + file, bufdata); 286 | zip1.writeZip(pth.join(destination, "11-bul_name.zip")); 287 | 288 | const zip2 = new Zip(pth.join(destination, "11-bul_name.zip"), { decoder }); 289 | let entry1 = zip2.getEntry(file); 290 | let text1 = zip2.readAsText(entry1, "utf16le"); 291 | assert(text1 === content, text1); 292 | let entry2 = zip2.getEntry("./test/" + file); 293 | let text2 = zip2.readAsText(entry2, "utf16le"); 294 | assert(text2 === content, text2); 295 | assert(entry1.header.flags_efs === false); 296 | assert(entry2.header.flags_efs === false); 297 | done(); 298 | }); 299 | 300 | // Unicode symbols 301 | it("add files with Unicode symbols filename (utf8) into new zip", (done) => { 302 | const file = "Symbols⌛🙈🙉.txt"; 303 | const content = "♜♞♝♛♚♝♞♜\n♟♟♟♟♟♟♟♟\n♙♙♙♙♙♙♙♙\n♖♘♗♕♔♗♘♖"; 304 | const bufdata = iconv.encode(content, "utf16le"); // buffer 305 | 306 | const zip1 = new Zip(); 307 | zip1.addFile(file, bufdata); 308 | zip1.addFile("test/" + file, bufdata); 309 | zip1.writeZip(pth.join(destination, "12-sym_name.zip")); 310 | 311 | const zip2 = new Zip(pth.join(destination, "12-sym_name.zip")); 312 | let entry1 = zip2.getEntry(file); 313 | let text1 = zip2.readAsText(entry1, "utf16le"); 314 | assert(text1 === content, text1); 315 | let entry2 = zip2.getEntry("./test/" + file); 316 | let text2 = zip2.readAsText(entry2, "utf16le"); 317 | assert(text2 === content, text2); 318 | assert(entry1.header.flags_efs); 319 | assert(entry2.header.flags_efs); 320 | done(); 321 | }); 322 | }); 323 | -------------------------------------------------------------------------------- /headers/entryHeader.js: -------------------------------------------------------------------------------- 1 | var Utils = require("../util"), 2 | Constants = Utils.Constants; 3 | 4 | /* The central directory file header */ 5 | module.exports = function () { 6 | var _verMade = 20, // v2.0 7 | _version = 10, // v1.0 8 | _flags = 0, 9 | _method = 0, 10 | _time = 0, 11 | _crc = 0, 12 | _compressedSize = 0, 13 | _size = 0, 14 | _fnameLen = 0, 15 | _extraLen = 0, 16 | _comLen = 0, 17 | _diskStart = 0, 18 | _inattr = 0, 19 | _attr = 0, 20 | _offset = 0; 21 | 22 | _verMade |= Utils.isWin ? 0x0a00 : 0x0300; 23 | 24 | // Set EFS flag since filename and comment fields are all by default encoded using UTF-8. 25 | // Without it file names may be corrupted for other apps when file names use unicode chars 26 | _flags |= Constants.FLG_EFS; 27 | 28 | const _localHeader = { 29 | extraLen: 0 30 | }; 31 | 32 | // casting 33 | const uint32 = (val) => Math.max(0, val) >>> 0; 34 | const uint16 = (val) => Math.max(0, val) & 0xffff; 35 | const uint8 = (val) => Math.max(0, val) & 0xff; 36 | 37 | _time = Utils.fromDate2DOS(new Date()); 38 | 39 | return { 40 | get made() { 41 | return _verMade; 42 | }, 43 | set made(val) { 44 | _verMade = val; 45 | }, 46 | 47 | get version() { 48 | return _version; 49 | }, 50 | set version(val) { 51 | _version = val; 52 | }, 53 | 54 | get flags() { 55 | return _flags; 56 | }, 57 | set flags(val) { 58 | _flags = val; 59 | }, 60 | 61 | get flags_efs() { 62 | return (_flags & Constants.FLG_EFS) > 0; 63 | }, 64 | set flags_efs(val) { 65 | if (val) { 66 | _flags |= Constants.FLG_EFS; 67 | } else { 68 | _flags &= ~Constants.FLG_EFS; 69 | } 70 | }, 71 | 72 | get flags_desc() { 73 | return (_flags & Constants.FLG_DESC) > 0; 74 | }, 75 | set flags_desc(val) { 76 | if (val) { 77 | _flags |= Constants.FLG_DESC; 78 | } else { 79 | _flags &= ~Constants.FLG_DESC; 80 | } 81 | }, 82 | 83 | get method() { 84 | return _method; 85 | }, 86 | set method(val) { 87 | switch (val) { 88 | case Constants.STORED: 89 | this.version = 10; 90 | case Constants.DEFLATED: 91 | default: 92 | this.version = 20; 93 | } 94 | _method = val; 95 | }, 96 | 97 | get time() { 98 | return Utils.fromDOS2Date(this.timeval); 99 | }, 100 | set time(val) { 101 | val = new Date(val); 102 | this.timeval = Utils.fromDate2DOS(val); 103 | }, 104 | 105 | get timeval() { 106 | return _time; 107 | }, 108 | set timeval(val) { 109 | _time = uint32(val); 110 | }, 111 | 112 | get timeHighByte() { 113 | return uint8(_time >>> 8); 114 | }, 115 | get crc() { 116 | return _crc; 117 | }, 118 | set crc(val) { 119 | _crc = uint32(val); 120 | }, 121 | 122 | get compressedSize() { 123 | return _compressedSize; 124 | }, 125 | set compressedSize(val) { 126 | _compressedSize = uint32(val); 127 | }, 128 | 129 | get size() { 130 | return _size; 131 | }, 132 | set size(val) { 133 | _size = uint32(val); 134 | }, 135 | 136 | get fileNameLength() { 137 | return _fnameLen; 138 | }, 139 | set fileNameLength(val) { 140 | _fnameLen = val; 141 | }, 142 | 143 | get extraLength() { 144 | return _extraLen; 145 | }, 146 | set extraLength(val) { 147 | _extraLen = val; 148 | }, 149 | 150 | get extraLocalLength() { 151 | return _localHeader.extraLen; 152 | }, 153 | set extraLocalLength(val) { 154 | _localHeader.extraLen = val; 155 | }, 156 | 157 | get commentLength() { 158 | return _comLen; 159 | }, 160 | set commentLength(val) { 161 | _comLen = val; 162 | }, 163 | 164 | get diskNumStart() { 165 | return _diskStart; 166 | }, 167 | set diskNumStart(val) { 168 | _diskStart = uint32(val); 169 | }, 170 | 171 | get inAttr() { 172 | return _inattr; 173 | }, 174 | set inAttr(val) { 175 | _inattr = uint32(val); 176 | }, 177 | 178 | get attr() { 179 | return _attr; 180 | }, 181 | set attr(val) { 182 | _attr = uint32(val); 183 | }, 184 | 185 | // get Unix file permissions 186 | get fileAttr() { 187 | return (_attr || 0) >> 16 & 0xfff; 188 | }, 189 | 190 | get offset() { 191 | return _offset; 192 | }, 193 | set offset(val) { 194 | _offset = uint32(val); 195 | }, 196 | 197 | get encrypted() { 198 | return (_flags & Constants.FLG_ENC) === Constants.FLG_ENC; 199 | }, 200 | 201 | get centralHeaderSize() { 202 | return Constants.CENHDR + _fnameLen + _extraLen + _comLen; 203 | }, 204 | 205 | get realDataOffset() { 206 | return _offset + Constants.LOCHDR + _localHeader.fnameLen + _localHeader.extraLen; 207 | }, 208 | 209 | get localHeader() { 210 | return _localHeader; 211 | }, 212 | 213 | loadLocalHeaderFromBinary: function (/*Buffer*/ input) { 214 | var data = input.slice(_offset, _offset + Constants.LOCHDR); 215 | // 30 bytes and should start with "PK\003\004" 216 | if (data.readUInt32LE(0) !== Constants.LOCSIG) { 217 | throw Utils.Errors.INVALID_LOC(); 218 | } 219 | 220 | // version needed to extract 221 | _localHeader.version = data.readUInt16LE(Constants.LOCVER); 222 | // general purpose bit flag 223 | _localHeader.flags = data.readUInt16LE(Constants.LOCFLG); 224 | // desc flag 225 | _localHeader.flags_desc = (_localHeader.flags & Constants.FLG_DESC) > 0; 226 | // compression method 227 | _localHeader.method = data.readUInt16LE(Constants.LOCHOW); 228 | // modification time (2 bytes time, 2 bytes date) 229 | _localHeader.time = data.readUInt32LE(Constants.LOCTIM); 230 | // uncompressed file crc-32 valu 231 | _localHeader.crc = data.readUInt32LE(Constants.LOCCRC); 232 | // compressed size 233 | _localHeader.compressedSize = data.readUInt32LE(Constants.LOCSIZ); 234 | // uncompressed size 235 | _localHeader.size = data.readUInt32LE(Constants.LOCLEN); 236 | // filename length 237 | _localHeader.fnameLen = data.readUInt16LE(Constants.LOCNAM); 238 | // extra field length 239 | _localHeader.extraLen = data.readUInt16LE(Constants.LOCEXT); 240 | 241 | // read extra data 242 | const extraStart = _offset + Constants.LOCHDR + _localHeader.fnameLen; 243 | const extraEnd = extraStart + _localHeader.extraLen; 244 | return input.slice(extraStart, extraEnd); 245 | }, 246 | 247 | loadFromBinary: function (/*Buffer*/ data) { 248 | // data should be 46 bytes and start with "PK 01 02" 249 | if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) { 250 | throw Utils.Errors.INVALID_CEN(); 251 | } 252 | // version made by 253 | _verMade = data.readUInt16LE(Constants.CENVEM); 254 | // version needed to extract 255 | _version = data.readUInt16LE(Constants.CENVER); 256 | // encrypt, decrypt flags 257 | _flags = data.readUInt16LE(Constants.CENFLG); 258 | // compression method 259 | _method = data.readUInt16LE(Constants.CENHOW); 260 | // modification time (2 bytes time, 2 bytes date) 261 | _time = data.readUInt32LE(Constants.CENTIM); 262 | // uncompressed file crc-32 value 263 | _crc = data.readUInt32LE(Constants.CENCRC); 264 | // compressed size 265 | _compressedSize = data.readUInt32LE(Constants.CENSIZ); 266 | // uncompressed size 267 | _size = data.readUInt32LE(Constants.CENLEN); 268 | // filename length 269 | _fnameLen = data.readUInt16LE(Constants.CENNAM); 270 | // extra field length 271 | _extraLen = data.readUInt16LE(Constants.CENEXT); 272 | // file comment length 273 | _comLen = data.readUInt16LE(Constants.CENCOM); 274 | // volume number start 275 | _diskStart = data.readUInt16LE(Constants.CENDSK); 276 | // internal file attributes 277 | _inattr = data.readUInt16LE(Constants.CENATT); 278 | // external file attributes 279 | _attr = data.readUInt32LE(Constants.CENATX); 280 | // LOC header offset 281 | _offset = data.readUInt32LE(Constants.CENOFF); 282 | }, 283 | 284 | localHeaderToBinary: function () { 285 | // LOC header size (30 bytes) 286 | var data = Buffer.alloc(Constants.LOCHDR); 287 | // "PK\003\004" 288 | data.writeUInt32LE(Constants.LOCSIG, 0); 289 | // version needed to extract 290 | data.writeUInt16LE(_version, Constants.LOCVER); 291 | // general purpose bit flag 292 | data.writeUInt16LE(_flags, Constants.LOCFLG); 293 | // compression method 294 | data.writeUInt16LE(_method, Constants.LOCHOW); 295 | // modification time (2 bytes time, 2 bytes date) 296 | data.writeUInt32LE(_time, Constants.LOCTIM); 297 | // uncompressed file crc-32 value 298 | data.writeUInt32LE(_crc, Constants.LOCCRC); 299 | // compressed size 300 | data.writeUInt32LE(_compressedSize, Constants.LOCSIZ); 301 | // uncompressed size 302 | data.writeUInt32LE(_size, Constants.LOCLEN); 303 | // filename length 304 | data.writeUInt16LE(_fnameLen, Constants.LOCNAM); 305 | // extra field length 306 | data.writeUInt16LE(_localHeader.extraLen, Constants.LOCEXT); 307 | return data; 308 | }, 309 | 310 | centralHeaderToBinary: function () { 311 | // CEN header size (46 bytes) 312 | var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); 313 | // "PK\001\002" 314 | data.writeUInt32LE(Constants.CENSIG, 0); 315 | // version made by 316 | data.writeUInt16LE(_verMade, Constants.CENVEM); 317 | // version needed to extract 318 | data.writeUInt16LE(_version, Constants.CENVER); 319 | // encrypt, decrypt flags 320 | data.writeUInt16LE(_flags, Constants.CENFLG); 321 | // compression method 322 | data.writeUInt16LE(_method, Constants.CENHOW); 323 | // modification time (2 bytes time, 2 bytes date) 324 | data.writeUInt32LE(_time, Constants.CENTIM); 325 | // uncompressed file crc-32 value 326 | data.writeUInt32LE(_crc, Constants.CENCRC); 327 | // compressed size 328 | data.writeUInt32LE(_compressedSize, Constants.CENSIZ); 329 | // uncompressed size 330 | data.writeUInt32LE(_size, Constants.CENLEN); 331 | // filename length 332 | data.writeUInt16LE(_fnameLen, Constants.CENNAM); 333 | // extra field length 334 | data.writeUInt16LE(_extraLen, Constants.CENEXT); 335 | // file comment length 336 | data.writeUInt16LE(_comLen, Constants.CENCOM); 337 | // volume number start 338 | data.writeUInt16LE(_diskStart, Constants.CENDSK); 339 | // internal file attributes 340 | data.writeUInt16LE(_inattr, Constants.CENATT); 341 | // external file attributes 342 | data.writeUInt32LE(_attr, Constants.CENATX); 343 | // LOC header offset 344 | data.writeUInt32LE(_offset, Constants.CENOFF); 345 | return data; 346 | }, 347 | 348 | toJSON: function () { 349 | const bytes = function (nr) { 350 | return nr + " bytes"; 351 | }; 352 | 353 | return { 354 | made: _verMade, 355 | version: _version, 356 | flags: _flags, 357 | method: Utils.methodToString(_method), 358 | time: this.time, 359 | crc: "0x" + _crc.toString(16).toUpperCase(), 360 | compressedSize: bytes(_compressedSize), 361 | size: bytes(_size), 362 | fileNameLength: bytes(_fnameLen), 363 | extraLength: bytes(_extraLen), 364 | commentLength: bytes(_comLen), 365 | diskNumStart: _diskStart, 366 | inAttr: _inattr, 367 | attr: _attr, 368 | offset: _offset, 369 | centralHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) 370 | }; 371 | }, 372 | 373 | toString: function () { 374 | return JSON.stringify(this.toJSON(), null, "\t"); 375 | } 376 | }; 377 | }; 378 | -------------------------------------------------------------------------------- /zipEntry.js: -------------------------------------------------------------------------------- 1 | var Utils = require("./util"), 2 | Headers = require("./headers"), 3 | Constants = Utils.Constants, 4 | Methods = require("./methods"); 5 | 6 | module.exports = function (/** object */ options, /*Buffer*/ input) { 7 | var _centralHeader = new Headers.EntryHeader(), 8 | _entryName = Buffer.alloc(0), 9 | _comment = Buffer.alloc(0), 10 | _isDirectory = false, 11 | uncompressedData = null, 12 | _extra = Buffer.alloc(0), 13 | _extralocal = Buffer.alloc(0), 14 | _efs = true; 15 | 16 | // assign options 17 | const opts = options; 18 | 19 | const decoder = typeof opts.decoder === "object" ? opts.decoder : Utils.decoder; 20 | _efs = decoder.hasOwnProperty("efs") ? decoder.efs : false; 21 | 22 | function getCompressedDataFromZip() { 23 | //if (!input || !Buffer.isBuffer(input)) { 24 | if (!input || !(input instanceof Uint8Array)) { 25 | return Buffer.alloc(0); 26 | } 27 | _extralocal = _centralHeader.loadLocalHeaderFromBinary(input); 28 | return input.slice(_centralHeader.realDataOffset, _centralHeader.realDataOffset + _centralHeader.compressedSize); 29 | } 30 | 31 | function crc32OK(data) { 32 | // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the local header is written 33 | if (!_centralHeader.flags_desc && !_centralHeader.localHeader.flags_desc) { 34 | if (Utils.crc32(data) !== _centralHeader.localHeader.crc) { 35 | return false; 36 | } 37 | } else { 38 | const descriptor = {}; 39 | const dataEndOffset = _centralHeader.realDataOffset + _centralHeader.compressedSize; 40 | // no descriptor after compressed data, instead new local header 41 | if (input.readUInt32LE(dataEndOffset) == Constants.LOCSIG || input.readUInt32LE(dataEndOffset) == Constants.CENSIG) { 42 | throw Utils.Errors.DESCRIPTOR_NOT_EXIST(); 43 | } 44 | 45 | // get decriptor data 46 | if (input.readUInt32LE(dataEndOffset) == Constants.EXTSIG) { 47 | // descriptor with signature 48 | descriptor.crc = input.readUInt32LE(dataEndOffset + Constants.EXTCRC); 49 | descriptor.compressedSize = input.readUInt32LE(dataEndOffset + Constants.EXTSIZ); 50 | descriptor.size = input.readUInt32LE(dataEndOffset + Constants.EXTLEN); 51 | } else if (input.readUInt16LE(dataEndOffset + 12) === 0x4b50) { 52 | // descriptor without signature (we check is new header starting where we expect) 53 | descriptor.crc = input.readUInt32LE(dataEndOffset + Constants.EXTCRC - 4); 54 | descriptor.compressedSize = input.readUInt32LE(dataEndOffset + Constants.EXTSIZ - 4); 55 | descriptor.size = input.readUInt32LE(dataEndOffset + Constants.EXTLEN - 4); 56 | } else { 57 | throw Utils.Errors.DESCRIPTOR_UNKNOWN(); 58 | } 59 | 60 | // check data integrity 61 | if (descriptor.compressedSize !== _centralHeader.compressedSize || descriptor.size !== _centralHeader.size || descriptor.crc !== _centralHeader.crc) { 62 | throw Utils.Errors.DESCRIPTOR_FAULTY(); 63 | } 64 | if (Utils.crc32(data) !== descriptor.crc) { 65 | return false; 66 | } 67 | 68 | // @TODO: zip64 bit descriptor fields 69 | // if bit 3 is set and any value in local header "zip64 Extended information" extra field are set 0 (place holder) 70 | // then 64-bit descriptor format is used instead of 32-bit 71 | // central header - "zip64 Extended information" extra field should store real values and not place holders 72 | } 73 | return true; 74 | } 75 | 76 | function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) { 77 | if (typeof callback === "undefined" && typeof async === "string") { 78 | pass = async; 79 | async = void 0; 80 | } 81 | if (_isDirectory) { 82 | if (async && callback) { 83 | callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR()); //si added error. 84 | } 85 | return Buffer.alloc(0); 86 | } 87 | 88 | var compressedData = getCompressedDataFromZip(); 89 | 90 | if (compressedData.length === 0) { 91 | // File is empty, nothing to decompress. 92 | if (async && callback) callback(compressedData); 93 | return compressedData; 94 | } 95 | 96 | if (_centralHeader.encrypted) { 97 | if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { 98 | throw Utils.Errors.INVALID_PASS_PARAM(); 99 | } 100 | compressedData = Methods.ZipCrypto.decrypt(compressedData, _centralHeader, pass); 101 | } 102 | 103 | var data = Buffer.alloc(_centralHeader.size); 104 | 105 | switch (_centralHeader.method) { 106 | case Utils.Constants.STORED: 107 | compressedData.copy(data); 108 | if (!crc32OK(data)) { 109 | if (async && callback) callback(data, Utils.Errors.BAD_CRC()); //si added error 110 | throw Utils.Errors.BAD_CRC(); 111 | } else { 112 | //si added otherwise did not seem to return data. 113 | if (async && callback) callback(data); 114 | return data; 115 | } 116 | case Utils.Constants.DEFLATED: 117 | var inflater = new Methods.Inflater(compressedData, _centralHeader.size); 118 | if (!async) { 119 | const result = inflater.inflate(data); 120 | result.copy(data, 0); 121 | if (!crc32OK(data)) { 122 | throw Utils.Errors.BAD_CRC(`"${decoder.decode(_entryName)}"`); 123 | } 124 | return data; 125 | } else { 126 | inflater.inflateAsync(function (result) { 127 | result.copy(result, 0); 128 | if (callback) { 129 | if (!crc32OK(result)) { 130 | callback(result, Utils.Errors.BAD_CRC()); //si added error 131 | } else { 132 | callback(result); 133 | } 134 | } 135 | }); 136 | } 137 | break; 138 | default: 139 | if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD()); 140 | throw Utils.Errors.UNKNOWN_METHOD(); 141 | } 142 | } 143 | 144 | function compress(/*Boolean*/ async, /*Function*/ callback) { 145 | if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) { 146 | // no data set or the data wasn't changed to require recompression 147 | if (async && callback) callback(getCompressedDataFromZip()); 148 | return getCompressedDataFromZip(); 149 | } 150 | 151 | if (uncompressedData.length && !_isDirectory) { 152 | var compressedData; 153 | // Local file header 154 | switch (_centralHeader.method) { 155 | case Utils.Constants.STORED: 156 | _centralHeader.compressedSize = _centralHeader.size; 157 | 158 | compressedData = Buffer.alloc(uncompressedData.length); 159 | uncompressedData.copy(compressedData); 160 | 161 | if (async && callback) callback(compressedData); 162 | return compressedData; 163 | default: 164 | case Utils.Constants.DEFLATED: 165 | var deflater = new Methods.Deflater(uncompressedData); 166 | if (!async) { 167 | var deflated = deflater.deflate(); 168 | _centralHeader.compressedSize = deflated.length; 169 | return deflated; 170 | } else { 171 | deflater.deflateAsync(function (data) { 172 | compressedData = Buffer.alloc(data.length); 173 | _centralHeader.compressedSize = data.length; 174 | data.copy(compressedData); 175 | callback && callback(compressedData); 176 | }); 177 | } 178 | deflater = null; 179 | break; 180 | } 181 | } else if (async && callback) { 182 | callback(Buffer.alloc(0)); 183 | } else { 184 | return Buffer.alloc(0); 185 | } 186 | } 187 | 188 | function readUInt64LE(buffer, offset) { 189 | return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset); 190 | } 191 | 192 | function parseExtra(data) { 193 | try { 194 | var offset = 0; 195 | var signature, size, part; 196 | while (offset + 4 < data.length) { 197 | signature = data.readUInt16LE(offset); 198 | offset += 2; 199 | size = data.readUInt16LE(offset); 200 | offset += 2; 201 | part = data.slice(offset, offset + size); 202 | offset += size; 203 | if (Constants.ID_ZIP64 === signature) { 204 | parseZip64ExtendedInformation(part); 205 | } 206 | } 207 | } catch (error) { 208 | throw Utils.Errors.EXTRA_FIELD_PARSE_ERROR(); 209 | } 210 | } 211 | 212 | //Override header field values with values from the ZIP64 extra field 213 | function parseZip64ExtendedInformation(data) { 214 | var size, compressedSize, offset, diskNumStart; 215 | 216 | if (data.length >= Constants.EF_ZIP64_SCOMP) { 217 | size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); 218 | if (_centralHeader.size === Constants.EF_ZIP64_OR_32) { 219 | _centralHeader.size = size; 220 | } 221 | } 222 | if (data.length >= Constants.EF_ZIP64_RHO) { 223 | compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); 224 | if (_centralHeader.compressedSize === Constants.EF_ZIP64_OR_32) { 225 | _centralHeader.compressedSize = compressedSize; 226 | } 227 | } 228 | if (data.length >= Constants.EF_ZIP64_DSN) { 229 | offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); 230 | if (_centralHeader.offset === Constants.EF_ZIP64_OR_32) { 231 | _centralHeader.offset = offset; 232 | } 233 | } 234 | if (data.length >= Constants.EF_ZIP64_DSN + 4) { 235 | diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); 236 | if (_centralHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { 237 | _centralHeader.diskNumStart = diskNumStart; 238 | } 239 | } 240 | } 241 | 242 | return { 243 | get entryName() { 244 | return decoder.decode(_entryName); 245 | }, 246 | get rawEntryName() { 247 | return _entryName; 248 | }, 249 | set entryName(val) { 250 | _entryName = Utils.toBuffer(val, decoder.encode); 251 | var lastChar = _entryName[_entryName.length - 1]; 252 | _isDirectory = lastChar === 47 || lastChar === 92; 253 | _centralHeader.fileNameLength = _entryName.length; 254 | }, 255 | 256 | get efs() { 257 | if (typeof _efs === "function") { 258 | return _efs(this.entryName); 259 | } else { 260 | return _efs; 261 | } 262 | }, 263 | 264 | get extra() { 265 | return _extra; 266 | }, 267 | set extra(val) { 268 | _extra = val; 269 | _centralHeader.extraLength = val.length; 270 | parseExtra(val); 271 | }, 272 | 273 | get comment() { 274 | return decoder.decode(_comment); 275 | }, 276 | set comment(val) { 277 | _comment = Utils.toBuffer(val, decoder.encode); 278 | _centralHeader.commentLength = _comment.length; 279 | if (_comment.length > 0xffff) throw Utils.Errors.COMMENT_TOO_LONG(); 280 | }, 281 | 282 | get name() { 283 | var n = decoder.decode(_entryName); 284 | return _isDirectory 285 | ? n 286 | .substr(n.length - 1) 287 | .split("/") 288 | .pop() 289 | : n.split("/").pop(); 290 | }, 291 | get isDirectory() { 292 | return _isDirectory; 293 | }, 294 | 295 | getCompressedData: function () { 296 | return compress(false, null); 297 | }, 298 | 299 | getCompressedDataAsync: function (/*Function*/ callback) { 300 | compress(true, callback); 301 | }, 302 | 303 | setData: function (value) { 304 | uncompressedData = Utils.toBuffer(value, Utils.decoder.encode); 305 | if (!_isDirectory && uncompressedData.length) { 306 | _centralHeader.size = uncompressedData.length; 307 | _centralHeader.method = Utils.Constants.DEFLATED; 308 | _centralHeader.crc = Utils.crc32(value); 309 | _centralHeader.changed = true; 310 | } else { 311 | // folders and blank files should be stored 312 | _centralHeader.method = Utils.Constants.STORED; 313 | } 314 | }, 315 | 316 | getData: function (pass) { 317 | if (_centralHeader.changed) { 318 | return uncompressedData; 319 | } else { 320 | return decompress(false, null, pass); 321 | } 322 | }, 323 | 324 | getDataAsync: function (/*Function*/ callback, pass) { 325 | if (_centralHeader.changed) { 326 | callback(uncompressedData); 327 | } else { 328 | decompress(true, callback, pass); 329 | } 330 | }, 331 | 332 | set attr(attr) { 333 | _centralHeader.attr = attr; 334 | }, 335 | get attr() { 336 | return _centralHeader.attr; 337 | }, 338 | 339 | set header(/*Buffer*/ data) { 340 | _centralHeader.loadFromBinary(data); 341 | }, 342 | 343 | get header() { 344 | return _centralHeader; 345 | }, 346 | 347 | packCentralHeader: function () { 348 | _centralHeader.flags_efs = this.efs; 349 | _centralHeader.extraLength = _extra.length; 350 | // 1. create header (buffer) 351 | var header = _centralHeader.centralHeaderToBinary(); 352 | var addpos = Utils.Constants.CENHDR; 353 | // 2. add file name 354 | _entryName.copy(header, addpos); 355 | addpos += _entryName.length; 356 | // 3. add extra data 357 | _extra.copy(header, addpos); 358 | addpos += _centralHeader.extraLength; 359 | // 4. add file comment 360 | _comment.copy(header, addpos); 361 | return header; 362 | }, 363 | 364 | packLocalHeader: function () { 365 | let addpos = 0; 366 | _centralHeader.flags_efs = this.efs; 367 | _centralHeader.extraLocalLength = _extralocal.length; 368 | // 1. construct local header Buffer 369 | const localHeaderBuf = _centralHeader.localHeaderToBinary(); 370 | // 2. localHeader - crate header buffer 371 | const localHeader = Buffer.alloc(localHeaderBuf.length + _entryName.length + _centralHeader.extraLocalLength); 372 | // 2.1 add localheader 373 | localHeaderBuf.copy(localHeader, addpos); 374 | addpos += localHeaderBuf.length; 375 | // 2.2 add file name 376 | _entryName.copy(localHeader, addpos); 377 | addpos += _entryName.length; 378 | // 2.3 add extra field 379 | _extralocal.copy(localHeader, addpos); 380 | addpos += _extralocal.length; 381 | 382 | return localHeader; 383 | }, 384 | 385 | toJSON: function () { 386 | const bytes = function (nr) { 387 | return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; 388 | }; 389 | 390 | return { 391 | entryName: this.entryName, 392 | name: this.name, 393 | comment: this.comment, 394 | isDirectory: this.isDirectory, 395 | header: _centralHeader.toJSON(), 396 | compressedData: bytes(input), 397 | data: bytes(uncompressedData) 398 | }; 399 | }, 400 | 401 | toString: function () { 402 | return JSON.stringify(this.toJSON(), null, "\t"); 403 | } 404 | }; 405 | }; 406 | -------------------------------------------------------------------------------- /zipFile.js: -------------------------------------------------------------------------------- 1 | const ZipEntry = require("./zipEntry"); 2 | const Headers = require("./headers"); 3 | const Utils = require("./util"); 4 | 5 | module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { 6 | var entryList = [], 7 | entryTable = {}, 8 | _comment = Buffer.alloc(0), 9 | mainHeader = new Headers.MainHeader(), 10 | loadedEntries = false; 11 | var password = null; 12 | const temporary = new Set(); 13 | 14 | // assign options 15 | const opts = options; 16 | 17 | const { noSort, decoder } = opts; 18 | 19 | if (inBuffer) { 20 | // is a memory buffer 21 | readMainHeader(opts.readEntries); 22 | } else { 23 | // none. is a new file 24 | loadedEntries = true; 25 | } 26 | 27 | function makeTemporaryFolders() { 28 | const foldersList = new Set(); 29 | 30 | // Make list of all folders in file 31 | for (const elem of Object.keys(entryTable)) { 32 | const elements = elem.split("/"); 33 | elements.pop(); // filename 34 | if (!elements.length) continue; // no folders 35 | for (let i = 0; i < elements.length; i++) { 36 | const sub = elements.slice(0, i + 1).join("/") + "/"; 37 | foldersList.add(sub); 38 | } 39 | } 40 | 41 | // create missing folders as temporary 42 | for (const elem of foldersList) { 43 | if (!(elem in entryTable)) { 44 | const tempfolder = new ZipEntry(opts); 45 | tempfolder.entryName = elem; 46 | tempfolder.attr = 0x10; 47 | tempfolder.temporary = true; 48 | entryList.push(tempfolder); 49 | entryTable[tempfolder.entryName] = tempfolder; 50 | temporary.add(tempfolder); 51 | } 52 | } 53 | } 54 | 55 | function readEntries() { 56 | loadedEntries = true; 57 | entryTable = {}; 58 | if (mainHeader.diskEntries > (inBuffer.length - mainHeader.offset) / Utils.Constants.CENHDR) { 59 | throw Utils.Errors.DISK_ENTRY_TOO_LARGE(); 60 | } 61 | entryList = new Array(mainHeader.diskEntries); // total number of entries 62 | var index = mainHeader.offset; // offset of first CEN header 63 | for (var i = 0; i < entryList.length; i++) { 64 | var tmp = index, 65 | entry = new ZipEntry(opts, inBuffer); 66 | entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); 67 | 68 | entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); 69 | 70 | if (entry.header.extraLength) { 71 | entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); 72 | } 73 | 74 | if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); 75 | 76 | index += entry.header.centralHeaderSize; 77 | 78 | entryList[i] = entry; 79 | entryTable[entry.entryName] = entry; 80 | } 81 | temporary.clear(); 82 | makeTemporaryFolders(); 83 | } 84 | 85 | function readMainHeader(/*Boolean*/ readNow) { 86 | var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size 87 | max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length 88 | n = max, 89 | endStart = inBuffer.length, 90 | endOffset = -1, // Start offset of the END header 91 | commentEnd = 0; 92 | 93 | // option to search header form entire file 94 | const trailingSpace = typeof opts.trailingSpace === "boolean" ? opts.trailingSpace : false; 95 | if (trailingSpace) max = 0; 96 | 97 | for (i; i >= n; i--) { 98 | if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' 99 | if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { 100 | // "PK\005\006" 101 | endOffset = i; 102 | commentEnd = i; 103 | endStart = i + Utils.Constants.ENDHDR; 104 | // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature 105 | n = i - Utils.Constants.END64HDR; 106 | continue; 107 | } 108 | 109 | if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { 110 | // Found a zip64 signature, let's continue reading the whole zip64 record 111 | n = max; 112 | continue; 113 | } 114 | 115 | if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { 116 | // Found the zip64 record, let's determine it's size 117 | endOffset = i; 118 | endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; 119 | break; 120 | } 121 | } 122 | 123 | if (endOffset == -1) throw Utils.Errors.INVALID_FORMAT(); 124 | 125 | mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); 126 | if (mainHeader.commentLength) { 127 | _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); 128 | } 129 | if (readNow) readEntries(); 130 | } 131 | 132 | function sortEntries() { 133 | if (entryList.length > 1 && !noSort) { 134 | entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); 135 | } 136 | } 137 | 138 | return { 139 | /** 140 | * Returns an array of ZipEntry objects existent in the current opened archive 141 | * @return Array 142 | */ 143 | get entries() { 144 | if (!loadedEntries) { 145 | readEntries(); 146 | } 147 | return entryList.filter((e) => !temporary.has(e)); 148 | }, 149 | 150 | /** 151 | * Archive comment 152 | * @return {String} 153 | */ 154 | get comment() { 155 | return decoder.decode(_comment); 156 | }, 157 | set comment(val) { 158 | _comment = Utils.toBuffer(val, decoder.encode); 159 | mainHeader.commentLength = _comment.length; 160 | }, 161 | 162 | getEntryCount: function () { 163 | if (!loadedEntries) { 164 | return mainHeader.diskEntries; 165 | } 166 | 167 | return entryList.length; 168 | }, 169 | 170 | forEach: function (callback) { 171 | this.entries.forEach(callback); 172 | }, 173 | 174 | /** 175 | * Returns a reference to the entry with the given name or null if entry is inexistent 176 | * 177 | * @param entryName 178 | * @return ZipEntry 179 | */ 180 | getEntry: function (/*String*/ entryName) { 181 | if (!loadedEntries) { 182 | readEntries(); 183 | } 184 | return entryTable[entryName] || null; 185 | }, 186 | 187 | /** 188 | * Adds the given entry to the entry list 189 | * 190 | * @param entry 191 | */ 192 | setEntry: function (/*ZipEntry*/ entry) { 193 | if (!loadedEntries) { 194 | readEntries(); 195 | } 196 | entryList.push(entry); 197 | entryTable[entry.entryName] = entry; 198 | mainHeader.totalEntries = entryList.length; 199 | }, 200 | 201 | /** 202 | * Removes the file with the given name from the entry list. 203 | * 204 | * If the entry is a directory, then all nested files and directories will be removed 205 | * @param entryName 206 | * @returns {void} 207 | */ 208 | deleteFile: function (/*String*/ entryName, withsubfolders = true) { 209 | if (!loadedEntries) { 210 | readEntries(); 211 | } 212 | const entry = entryTable[entryName]; 213 | const list = this.getEntryChildren(entry, withsubfolders).map((child) => child.entryName); 214 | 215 | list.forEach(this.deleteEntry); 216 | }, 217 | 218 | /** 219 | * Removes the entry with the given name from the entry list. 220 | * 221 | * @param {string} entryName 222 | * @returns {void} 223 | */ 224 | deleteEntry: function (/*String*/ entryName) { 225 | if (!loadedEntries) { 226 | readEntries(); 227 | } 228 | const entry = entryTable[entryName]; 229 | const index = entryList.indexOf(entry); 230 | if (index >= 0) { 231 | entryList.splice(index, 1); 232 | delete entryTable[entryName]; 233 | mainHeader.totalEntries = entryList.length; 234 | } 235 | }, 236 | 237 | /** 238 | * Iterates and returns all nested files and directories of the given entry 239 | * 240 | * @param entry 241 | * @return Array 242 | */ 243 | getEntryChildren: function (/*ZipEntry*/ entry, subfolders = true) { 244 | if (!loadedEntries) { 245 | readEntries(); 246 | } 247 | if (typeof entry === "object") { 248 | if (entry.isDirectory && subfolders) { 249 | const list = []; 250 | const name = entry.entryName; 251 | 252 | for (const zipEntry of entryList) { 253 | if (zipEntry.entryName.startsWith(name)) { 254 | list.push(zipEntry); 255 | } 256 | } 257 | return list; 258 | } else { 259 | return [entry]; 260 | } 261 | } 262 | return []; 263 | }, 264 | 265 | /** 266 | * How many child elements entry has 267 | * 268 | * @param {ZipEntry} entry 269 | * @return {integer} 270 | */ 271 | getChildCount: function (entry) { 272 | if (entry && entry.isDirectory) { 273 | const list = this.getEntryChildren(entry); 274 | return list.includes(entry) ? list.length - 1 : list.length; 275 | } 276 | return 0; 277 | }, 278 | 279 | /** 280 | * Returns the zip file 281 | * 282 | * @return Buffer 283 | */ 284 | compressToBuffer: function () { 285 | if (!loadedEntries) { 286 | readEntries(); 287 | } 288 | sortEntries(); 289 | 290 | const dataBlock = []; 291 | const headerBlocks = []; 292 | let totalSize = 0; 293 | let dindex = 0; 294 | 295 | mainHeader.size = 0; 296 | mainHeader.offset = 0; 297 | let totalEntries = 0; 298 | 299 | for (const entry of this.entries) { 300 | // compress data and set local and entry header accordingly. Reason why is called first 301 | const compressedData = entry.getCompressedData(); 302 | entry.header.offset = dindex; 303 | 304 | // 1. construct local header 305 | const localHeader = entry.packLocalHeader(); 306 | 307 | // 2. offsets 308 | const dataLength = localHeader.length + compressedData.length; 309 | dindex += dataLength; 310 | 311 | // 3. store values in sequence 312 | dataBlock.push(localHeader); 313 | dataBlock.push(compressedData); 314 | 315 | // 4. construct central header 316 | const centralHeader = entry.packCentralHeader(); 317 | headerBlocks.push(centralHeader); 318 | // 5. update main header 319 | mainHeader.size += centralHeader.length; 320 | totalSize += dataLength + centralHeader.length; 321 | totalEntries++; 322 | } 323 | 324 | totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length 325 | // point to end of data and beginning of central directory first record 326 | mainHeader.offset = dindex; 327 | mainHeader.totalEntries = totalEntries; 328 | 329 | dindex = 0; 330 | const outBuffer = Buffer.alloc(totalSize); 331 | // write data blocks 332 | for (const content of dataBlock) { 333 | content.copy(outBuffer, dindex); 334 | dindex += content.length; 335 | } 336 | 337 | // write central directory entries 338 | for (const content of headerBlocks) { 339 | content.copy(outBuffer, dindex); 340 | dindex += content.length; 341 | } 342 | 343 | // write main header 344 | const mh = mainHeader.toBinary(); 345 | if (_comment) { 346 | _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment 347 | } 348 | mh.copy(outBuffer, dindex); 349 | 350 | // Since we update entry and main header offsets, 351 | // they are no longer valid and we have to reset content 352 | // (Issue 64) 353 | 354 | inBuffer = outBuffer; 355 | loadedEntries = false; 356 | 357 | return outBuffer; 358 | }, 359 | 360 | toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { 361 | try { 362 | if (!loadedEntries) { 363 | readEntries(); 364 | } 365 | sortEntries(); 366 | 367 | const dataBlock = []; 368 | const centralHeaders = []; 369 | let totalSize = 0; 370 | let dindex = 0; 371 | let totalEntries = 0; 372 | 373 | mainHeader.size = 0; 374 | mainHeader.offset = 0; 375 | 376 | const compress2Buffer = function (entryLists) { 377 | if (entryLists.length > 0) { 378 | const entry = entryLists.shift(); 379 | const name = entry.entryName + entry.extra.toString(); 380 | if (onItemStart) onItemStart(name); 381 | entry.getCompressedDataAsync(function (compressedData) { 382 | if (onItemEnd) onItemEnd(name); 383 | entry.header.offset = dindex; 384 | 385 | // 1. construct local header 386 | const localHeader = entry.packLocalHeader(); 387 | 388 | // 2. offsets 389 | const dataLength = localHeader.length + compressedData.length; 390 | dindex += dataLength; 391 | 392 | // 3. store values in sequence 393 | dataBlock.push(localHeader); 394 | dataBlock.push(compressedData); 395 | 396 | // central header 397 | const centalHeader = entry.packCentralHeader(); 398 | centralHeaders.push(centalHeader); 399 | mainHeader.size += centalHeader.length; 400 | totalSize += dataLength + centalHeader.length; 401 | totalEntries++; 402 | 403 | compress2Buffer(entryLists); 404 | }); 405 | } else { 406 | totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length 407 | // point to end of data and beginning of central directory first record 408 | mainHeader.offset = dindex; 409 | mainHeader.totalEntries = totalEntries; 410 | 411 | dindex = 0; 412 | const outBuffer = Buffer.alloc(totalSize); 413 | dataBlock.forEach(function (content) { 414 | content.copy(outBuffer, dindex); // write data blocks 415 | dindex += content.length; 416 | }); 417 | centralHeaders.forEach(function (content) { 418 | content.copy(outBuffer, dindex); // write central directory entries 419 | dindex += content.length; 420 | }); 421 | 422 | const mh = mainHeader.toBinary(); 423 | if (_comment) { 424 | _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment 425 | } 426 | 427 | mh.copy(outBuffer, dindex); // write main header 428 | 429 | // Since we update entry and main header offsets, they are no 430 | // longer valid and we have to reset content using our new buffer 431 | // (Issue 64) 432 | 433 | inBuffer = outBuffer; 434 | loadedEntries = false; 435 | 436 | onSuccess(outBuffer); 437 | } 438 | }; 439 | 440 | compress2Buffer(Array.from(this.entries)); 441 | } catch (e) { 442 | onFail(e); 443 | } 444 | } 445 | }; 446 | }; 447 | -------------------------------------------------------------------------------- /test/methods/methods.test.js: -------------------------------------------------------------------------------- 1 | const { expect } = require("chai"); 2 | //const Attr = require("../util").FileAttr; 3 | const Zip = require("../../adm-zip"); 4 | const pth = require("path"); 5 | const fs = require("fs"); 6 | const rimraf = require("rimraf"); 7 | const Utils = require("../../util/utils"); 8 | 9 | describe("adm-zip.js - methods handling local files", () => { 10 | const wrapList = (c) => pth.normalize(pth.join(destination, c)); 11 | const destination = "./test/xxx"; 12 | const testFileFolderList = [ 13 | { name: "subfolder1/subfolder2/zipEntry1.txt", content: "zipEntry1" }, 14 | { name: "subfolder1/subfolder2/subfolder3/zipEntry2.txt", content: "zipEntry2" }, 15 | { name: "subfolder1/subfolder2/subfolder3/zipEntry3.txt", content: "zipEntry3" }, 16 | { name: "subfolder1/subfolder2/subfolder3/subfolder4/" } 17 | ]; 18 | const testFileFileList = [ 19 | { name: "folder/zipEntry1.txt", content: "zipEntry1" }, 20 | { name: "folder/zipEntry2.txt", content: "zipEntry2" }, 21 | { name: "folder/zipEntry3.txt", content: "zipEntry3" }, 22 | { name: "folder/zipEntry4.txt", content: "zipEntry4" }, 23 | { name: "folder/subfolder1/" }, 24 | { name: "folder/subfolder2/" }, 25 | { name: "folder/subfolder3/" } 26 | ]; 27 | 28 | // clean up folder content 29 | afterEach((done) => rimraf(destination, done)); 30 | 31 | describe(".deleteFile()", () => { 32 | const ultrazip = [ 33 | "./attributes_test/asd/New Text Document.txt", 34 | "./attributes_test/blank file.txt", 35 | "./attributes_test/New folder/hidden.txt", 36 | "./attributes_test/New folder/hidden_readonly.txt", 37 | "./attributes_test/New folder/readonly.txt", 38 | "./utes_test/New folder/somefile.txt" 39 | ].map(wrapList); 40 | 41 | // Issue 523 - deletes additional files 42 | it("zip.deleteFile() - delete folder with subfolders", () => { 43 | const content = "test"; 44 | const comment = "comment"; 45 | const zip1 = new Zip({ noSort: true }); 46 | zip1.addFile("test/"); 47 | zip1.addFile("test/path1/"); 48 | zip1.addFile("test/path1/file1.txt", content, comment); 49 | zip1.addFile("test/path1/folder1/"); 50 | zip1.addFile("test/path1/folder1/file2.txt", content, comment); 51 | zip1.addFile("test/path2/"); 52 | zip1.addFile("test/path2/file1.txt", content, comment); 53 | zip1.addFile("test/path2/folder1/"); 54 | zip1.addFile("test/path2/folder1/file2.txt", content, comment); 55 | 56 | zip1.deleteFile("test/path1/"); 57 | 58 | const zipEntries = zip1.getEntries().map((child) => child.entryName); 59 | 60 | expect(zipEntries).to.deep.equal(["test/", "test/path2/", "test/path2/file1.txt", "test/path2/folder1/", "test/path2/folder1/file2.txt"]); 61 | }); 62 | 63 | it("zip.deleteFile() - delete folder", () => { 64 | const content = "test"; 65 | const comment = "comment"; 66 | const zip1 = new Zip({ noSort: true }); 67 | zip1.addFile("test/"); 68 | zip1.addFile("test/path1/"); 69 | zip1.addFile("test/path1/file1.txt", content, comment); 70 | zip1.addFile("test/path1/folder1/"); 71 | zip1.addFile("test/path1/folder1/file2.txt", content, comment); 72 | zip1.addFile("test/path2/"); 73 | zip1.addFile("test/path2/file1.txt", content, comment); 74 | zip1.addFile("test/path2/folder1/"); 75 | zip1.addFile("test/path2/folder1/file2.txt", content, comment); 76 | 77 | zip1.deleteFile("test/path1/", false); 78 | 79 | const zipEntries = zip1.getEntries().map((child) => child.entryName); 80 | 81 | expect(zipEntries).to.deep.equal([ 82 | "test/", 83 | "test/path1/file1.txt", 84 | "test/path1/folder1/", 85 | "test/path1/folder1/file2.txt", 86 | "test/path2/", 87 | "test/path2/file1.txt", 88 | "test/path2/folder1/", 89 | "test/path2/folder1/file2.txt" 90 | ]); 91 | }); 92 | 93 | it("zip.deleteFile() - delete files", () => { 94 | const content = "test"; 95 | const comment = "comment"; 96 | const zip1 = new Zip({ noSort: true }); 97 | zip1.addFile("test/"); 98 | zip1.addFile("test/path1/"); 99 | zip1.addFile("test/path1/file1.txt", content, comment); 100 | zip1.addFile("test/path1/folder1/"); 101 | zip1.addFile("test/path1/folder1/file2.txt", content, comment); 102 | 103 | zip1.deleteFile("test/path1/file1.txt", false); 104 | zip1.deleteFile("test/path1/folder1/file2.txt", false); 105 | 106 | const zipEntries = zip1.getEntries().map((child) => child.entryName); 107 | 108 | expect(zipEntries).to.deep.equal(["test/", "test/path1/", "test/path1/folder1/"]); 109 | }); 110 | }); 111 | 112 | describe(".extractAllTo() - sync", () => { 113 | const ultrazip = [ 114 | "./attributes_test/asd/New Text Document.txt", 115 | "./attributes_test/blank file.txt", 116 | "./attributes_test/New folder/hidden.txt", 117 | "./attributes_test/New folder/hidden_readonly.txt", 118 | "./attributes_test/New folder/readonly.txt", 119 | "./utes_test/New folder/somefile.txt" 120 | ].map(wrapList); 121 | 122 | it("zip.extractAllTo(destination)", () => { 123 | const zip = new Zip("./test/assets/ultra.zip"); 124 | zip.extractAllTo(destination); 125 | const files = walk(destination); 126 | 127 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 128 | }); 129 | 130 | it("zip.extractAllTo(destination) - streamed file", () => { 131 | const zip = new Zip("./test/assets/stream-nozip64.zip"); 132 | zip.extractAllTo(destination); 133 | const files = walk(destination); 134 | 135 | expect(files.sort()).to.deep.equal([pth.normalize("./test/xxx/lorem.txt")]); 136 | }); 137 | }); 138 | 139 | describe(".extractAllToAsync - sync", () => { 140 | const ultrazip = [ 141 | "./attributes_test/asd/New Text Document.txt", 142 | "./attributes_test/blank file.txt", 143 | "./attributes_test/New folder/hidden.txt", 144 | "./attributes_test/New folder/hidden_readonly.txt", 145 | "./attributes_test/New folder/readonly.txt", 146 | "./utes_test/New folder/somefile.txt" 147 | ].map(wrapList); 148 | 149 | it("zip.extractAllToAsync(destination)", (done) => { 150 | const zip = new Zip("./test/assets/ultra.zip"); 151 | zip.extractAllToAsync(destination, (error) => { 152 | const files = walk(destination); 153 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 154 | done(); 155 | }); 156 | }); 157 | 158 | it("zip.extractAllToAsync(destination) [Promise]", function () { 159 | const zip = new Zip("./test/assets/ultra.zip"); 160 | // note the return 161 | return zip.extractAllToAsync(destination).then(function (data) { 162 | const files = walk(destination); 163 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 164 | }); // no catch, it'll figure it out since the promise is rejected 165 | }); 166 | 167 | it("zip.extractAllToAsync(destination, false, false, callback)", (done) => { 168 | const zip = new Zip("./test/assets/ultra.zip"); 169 | zip.extractAllToAsync(destination, false, false, (error) => { 170 | const files = walk(destination); 171 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 172 | done(); 173 | }); 174 | }); 175 | 176 | it("zip.extractAllToAsync(destination, false, false) [Promise]", function () { 177 | const zip = new Zip("./test/assets/ultra.zip"); 178 | // note the return 179 | return zip.extractAllToAsync(destination, false, false).then(function (data) { 180 | const files = walk(destination); 181 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 182 | }); // no catch, it'll figure it out since the promise is rejected 183 | }); 184 | 185 | it("zip.extractAllToAsync(destination, false, callback)", (done) => { 186 | const zip = new Zip("./test/assets/ultra.zip"); 187 | zip.extractAllToAsync(destination, false, (error) => { 188 | const files = walk(destination); 189 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 190 | done(); 191 | }); 192 | }); 193 | 194 | it("zip.extractAllToAsync(destination, false) [Promise]", () => { 195 | const zip = new Zip("./test/assets/ultra.zip"); 196 | // note the return 197 | return zip.extractAllToAsync(destination, false).then(function (data) { 198 | const files = walk(destination); 199 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 200 | }); // no catch, it'll figure it out since the promise is rejected 201 | }); 202 | }); 203 | 204 | describe(".extractEntryTo() - sync", () => { 205 | // each entry one by one 206 | it("zip.extractEntryTo(entry, destination, false, true)", () => { 207 | const zip = new Zip("./test/assets/ultra.zip"); 208 | var zipEntries = zip.getEntries(); 209 | zipEntries.forEach((e) => zip.extractEntryTo(e, destination, false, true)); 210 | 211 | const files = walk(destination); 212 | const ultrazip = ["blank file.txt", "hidden.txt", "hidden_readonly.txt", "New Text Document.txt", "readonly.txt", "somefile.txt"].map(wrapList); 213 | 214 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 215 | }); 216 | 217 | // each entry one by one 218 | it("zip.extractEntryTo(entry, destination, true, true)", () => { 219 | const zip = new Zip("./test/assets/ultra.zip"); 220 | var zipEntries = zip.getEntries(); 221 | zipEntries.forEach((e) => zip.extractEntryTo(e, destination, true, true)); 222 | 223 | const files = walk(destination); 224 | const ultrazip = [ 225 | "./attributes_test/asd/New Text Document.txt", 226 | "./attributes_test/blank file.txt", 227 | "./attributes_test/New folder/hidden.txt", 228 | "./attributes_test/New folder/hidden_readonly.txt", 229 | "./attributes_test/New folder/readonly.txt", 230 | "./utes_test/New folder/somefile.txt" 231 | ].map(wrapList); 232 | 233 | expect(files.sort()).to.deep.equal(ultrazip.sort()); 234 | }); 235 | 236 | it("zip.extractEntryTo(entry, destination, false, true) - [ extract folder from file where folders exists ]", () => { 237 | const zip = new Zip("./test/assets/maximum.zip"); 238 | 239 | zip.extractEntryTo("./attributes_test/New folder/", destination, false, true); 240 | 241 | const files = walk(destination); 242 | const maximumzip = ["hidden.txt", "hidden_readonly.txt", "readonly.txt", "somefile.txt"].map(wrapList); 243 | 244 | expect(files.sort()).to.deep.equal(maximumzip.sort()); 245 | }); 246 | 247 | it("zip.extractEntryTo(entry, destination, false, true) - [ extract folder from file where folders does not exists ]", () => { 248 | const zip = new Zip("./test/assets/maximum3.zip"); 249 | 250 | zip.extractEntryTo("./attributes_test/New folder/", destination, false, true); 251 | 252 | const files = walk(destination); 253 | const maximumzip = ["hidden.txt", "hidden_readonly.txt", "readonly.txt", "somefile.txt"].map(wrapList); 254 | 255 | expect(files.sort()).to.deep.equal(maximumzip.sort()); 256 | }); 257 | 258 | it("zip.extractEntryTo(entry, destination, true, true) - [ extract folder from file where folders exists ]", () => { 259 | const zip = new Zip("./test/assets/maximum.zip"); 260 | 261 | zip.extractEntryTo("./attributes_test/New folder/", destination, true, true); 262 | 263 | const files = walk(destination); 264 | const maximumzip = [ 265 | "./attributes_test/New folder/hidden.txt", 266 | "./attributes_test/New folder/hidden_readonly.txt", 267 | "./attributes_test/New folder/readonly.txt", 268 | "./attributes_test/New folder/somefile.txt" 269 | ].map(wrapList); 270 | expect(files.sort()).to.deep.equal(maximumzip.sort()); 271 | }); 272 | 273 | it("zip.extractEntryTo(entry, destination, true, true) - [ extract folder from file where folders does not exists ]", () => { 274 | const zip = new Zip("./test/assets/maximum3.zip"); 275 | 276 | zip.extractEntryTo("./attributes_test/New folder/", destination, true, true); 277 | 278 | const files = walk(destination); 279 | const maximumzip = [ 280 | "./attributes_test/New folder/hidden.txt", 281 | "./attributes_test/New folder/hidden_readonly.txt", 282 | "./attributes_test/New folder/readonly.txt", 283 | "./attributes_test/New folder/somefile.txt" 284 | ].map(wrapList); 285 | expect(files.sort()).to.deep.equal(maximumzip.sort()); 286 | }); 287 | }); 288 | 289 | describe(".addLocalFolder() - sync", () => { 290 | beforeEach(() => { 291 | genFiles(testFileFolderList, destination); 292 | }); 293 | 294 | it("zip.addLocalFolder(destination)", () => { 295 | const zip = new Zip({ noSort: true }); 296 | zip.addLocalFolder(destination); 297 | zip.toBuffer(); 298 | 299 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 300 | 301 | const expected = [ 302 | "subfolder1/", 303 | "subfolder1/subfolder2/", 304 | "subfolder1/subfolder2/subfolder3/", 305 | "subfolder1/subfolder2/subfolder3/subfolder4/", 306 | "subfolder1/subfolder2/subfolder3/zipEntry2.txt", 307 | "subfolder1/subfolder2/subfolder3/zipEntry3.txt", 308 | "subfolder1/subfolder2/zipEntry1.txt" 309 | ]; 310 | 311 | expect(zip1Entries).to.deep.equal(expected); 312 | }); 313 | 314 | it("zip.addLocalFolder(destination, zipPath)", () => { 315 | const zip = new Zip(); 316 | zip.addLocalFolder(destination, "parent"); 317 | zip.toBuffer(); 318 | 319 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 320 | 321 | const expected = [ 322 | "parent/subfolder1/", 323 | "parent/subfolder1/subfolder2/", 324 | "parent/subfolder1/subfolder2/subfolder3/", 325 | "parent/subfolder1/subfolder2/subfolder3/subfolder4/", 326 | "parent/subfolder1/subfolder2/subfolder3/zipEntry2.txt", 327 | "parent/subfolder1/subfolder2/subfolder3/zipEntry3.txt", 328 | "parent/subfolder1/subfolder2/zipEntry1.txt" 329 | ].sort(); 330 | 331 | expect(zip1Entries).to.deep.equal(expected); 332 | }); 333 | 334 | it("zip.addLocalFolder(destination, '', filter)", () => { 335 | const zip = new Zip(); 336 | const filter = /zipEntry[23]\.txt/; 337 | zip.addLocalFolder(destination, "", filter); 338 | zip.toBuffer(); 339 | 340 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 341 | 342 | const expected = ["subfolder1/subfolder2/subfolder3/zipEntry2.txt", "subfolder1/subfolder2/subfolder3/zipEntry3.txt"].sort(); 343 | 344 | expect(zip1Entries).to.deep.equal(expected); 345 | }); 346 | 347 | it("zip.addLocalFolder(destination, '', filter)", () => { 348 | const zip = new Zip(); 349 | const filter = function (str) { 350 | return str.slice(-1) === pth.sep; 351 | }; 352 | zip.addLocalFolder(destination, "", filter); 353 | zip.toBuffer(); 354 | 355 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 356 | 357 | const expected = ["subfolder1/", "subfolder1/subfolder2/", "subfolder1/subfolder2/subfolder3/", "subfolder1/subfolder2/subfolder3/subfolder4/"].sort(); 358 | 359 | expect(zip1Entries).to.deep.equal(expected); 360 | }); 361 | }); 362 | 363 | describe(".addLocalFileAsync() - async", () => { 364 | beforeEach(() => { 365 | genFiles(testFileFileList, destination); 366 | }); 367 | 368 | it("zip.addLocalFileAsync({ localPath, comment, zipPath }, callback)", (done) => { 369 | const zip = new Zip(); 370 | const zipPath = "folder"; 371 | const fileComment = "file Comment"; 372 | const list1 = testFileFileList.map((c) => c.name); 373 | list1.sort(); 374 | zip.addZipComment(fileComment); 375 | 376 | setImmediate( 377 | list1.reverse().reduce( 378 | function (next, file) { 379 | return function (err, done) { 380 | if (err) next(err, false); 381 | 382 | const localPath = pth.resolve(destination, file); 383 | const comment = pth.basename(file); 384 | 385 | zip.addLocalFileAsync({ localPath, comment, zipPath }, function (err, done) { 386 | if (err) next(err, false); 387 | 388 | setImmediate(next, undefined, true); 389 | }); 390 | }; 391 | }, 392 | function (err) { 393 | if (err) done(err); 394 | 395 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 396 | const zip1Comment = zip.getEntries().map((e) => e.comment); 397 | 398 | const expected1 = list1; 399 | const expected2 = list1.map((n) => pth.basename(n)); 400 | 401 | expect(zip1Entries.sort()).to.deep.equal(expected1.sort()); 402 | expect(zip1Comment.sort()).to.deep.equal(expected2.sort()); 403 | expect(zip.getZipComment()).to.equal(fileComment); 404 | 405 | done(); 406 | } 407 | ) 408 | ); 409 | }); 410 | }); 411 | 412 | describe(".addLocalFolderAsync2() - async", () => { 413 | beforeEach(() => { 414 | genFiles(testFileFolderList, destination); 415 | }); 416 | 417 | it("zip.addLocalFolderAsync2(destination, callback)", (done) => { 418 | const zip = new Zip(); 419 | zip.addLocalFolderAsync2(destination, (error) => { 420 | if (error) done(false); 421 | 422 | zip.toBuffer(function () { 423 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 424 | 425 | const expected = [ 426 | "subfolder1/", 427 | "subfolder1/subfolder2/", 428 | "subfolder1/subfolder2/subfolder3/", 429 | "subfolder1/subfolder2/zipEntry1.txt", 430 | "subfolder1/subfolder2/subfolder3/subfolder4/", 431 | "subfolder1/subfolder2/subfolder3/zipEntry2.txt", 432 | "subfolder1/subfolder2/subfolder3/zipEntry3.txt" 433 | ]; 434 | 435 | expect(zip1Entries).to.deep.equal(expected.sort()); 436 | done(); 437 | }); 438 | }); 439 | }); 440 | 441 | it("zip.addLocalFolderAsync2({localPath}, callback)", (done) => { 442 | const zip = new Zip(); 443 | zip.addLocalFolderAsync2({ localPath: destination }, (error) => { 444 | if (error) done(false); 445 | 446 | zip.toBuffer(function () { 447 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 448 | 449 | const expected = [ 450 | "subfolder1/", 451 | "subfolder1/subfolder2/", 452 | "subfolder1/subfolder2/subfolder3/", 453 | "subfolder1/subfolder2/subfolder3/subfolder4/", 454 | "subfolder1/subfolder2/subfolder3/zipEntry2.txt", 455 | "subfolder1/subfolder2/subfolder3/zipEntry3.txt", 456 | "subfolder1/subfolder2/zipEntry1.txt" 457 | ].sort(); 458 | 459 | expect(zip1Entries).to.deep.equal(expected); 460 | done(); 461 | }); 462 | }); 463 | }); 464 | 465 | it("zip.addLocalFolderAsync2({localPath, namefix}, callback)", (done) => { 466 | const zip = new Zip(); 467 | const namefix = (str) => str.toLowerCase(); 468 | zip.addLocalFolderAsync2({ localPath: destination, namefix }, (error) => { 469 | if (error) done(false); 470 | 471 | zip.toBuffer(function () { 472 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 473 | 474 | const expected = [ 475 | "subfolder1/", 476 | "subfolder1/subfolder2/", 477 | "subfolder1/subfolder2/subfolder3/", 478 | "subfolder1/subfolder2/subfolder3/subfolder4/", 479 | "subfolder1/subfolder2/subfolder3/zipentry2.txt", 480 | "subfolder1/subfolder2/subfolder3/zipentry3.txt", 481 | "subfolder1/subfolder2/zipentry1.txt" 482 | ].sort(); 483 | 484 | expect(zip1Entries).to.deep.equal(expected); 485 | done(); 486 | }); 487 | }); 488 | }); 489 | 490 | it("zip.addLocalFolderAsync2({localPath, namefix}, callback)", (done) => { 491 | const zip = new Zip(); 492 | genFiles([{ name: "subfolder1/æble.txt", content: "apple" }], destination); 493 | 494 | zip.addLocalFolderAsync2({ localPath: destination, namefix: "latin1" }, (error) => { 495 | if (error) done(false); 496 | 497 | zip.toBuffer(function () { 498 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 499 | 500 | const expected = [ 501 | "subfolder1/", 502 | "subfolder1/ble.txt", 503 | "subfolder1/subfolder2/", 504 | "subfolder1/subfolder2/subfolder3/", 505 | "subfolder1/subfolder2/subfolder3/subfolder4/", 506 | "subfolder1/subfolder2/subfolder3/zipEntry2.txt", 507 | "subfolder1/subfolder2/subfolder3/zipEntry3.txt", 508 | "subfolder1/subfolder2/zipEntry1.txt" 509 | ].sort(); 510 | 511 | expect(zip1Entries).to.deep.equal(expected); 512 | done(); 513 | }); 514 | }); 515 | }); 516 | 517 | it("zip.addLocalFolderAsync2({localPath, zipPath}, callback)", (done) => { 518 | const zip = new Zip(); 519 | zip.addLocalFolderAsync2({ localPath: destination, zipPath: "parent" }, (error) => { 520 | if (error) done(false); 521 | 522 | zip.toBuffer(function () { 523 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 524 | 525 | const expected = [ 526 | "parent/subfolder1/", 527 | "parent/subfolder1/subfolder2/", 528 | "parent/subfolder1/subfolder2/subfolder3/", 529 | "parent/subfolder1/subfolder2/subfolder3/subfolder4/", 530 | "parent/subfolder1/subfolder2/subfolder3/zipEntry2.txt", 531 | "parent/subfolder1/subfolder2/subfolder3/zipEntry3.txt", 532 | "parent/subfolder1/subfolder2/zipEntry1.txt" 533 | ].sort(); 534 | 535 | expect(zip1Entries).to.deep.equal(expected); 536 | done(); 537 | }); 538 | }); 539 | }); 540 | 541 | it("zip.addLocalFolderAsync2({localPath, filter}, callback)", (done) => { 542 | const zip = new Zip(); 543 | const filter = /zipEntry[23]\.txt/; 544 | zip.addLocalFolderAsync2({ localPath: destination, filter }, (error) => { 545 | if (error) done(false); 546 | 547 | zip.toBuffer(function () { 548 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 549 | 550 | const expected = ["subfolder1/subfolder2/subfolder3/zipEntry2.txt", "subfolder1/subfolder2/subfolder3/zipEntry3.txt"].sort(); 551 | 552 | expect(zip1Entries).to.deep.equal(expected); 553 | done(); 554 | }); 555 | }); 556 | }); 557 | 558 | it("zip.addLocalFolderAsync2({localPath, filter}, callback)", (done) => { 559 | const zip = new Zip(); 560 | const filter = function (str) { 561 | return str.slice(-1) === pth.sep; 562 | }; 563 | zip.addLocalFolderAsync2({ localPath: destination, filter }, (error) => { 564 | if (error) done(false); 565 | 566 | zip.toBuffer(function () { 567 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 568 | 569 | const expected = ["subfolder1/", "subfolder1/subfolder2/", "subfolder1/subfolder2/subfolder3/", "subfolder1/subfolder2/subfolder3/subfolder4/"].sort(); 570 | 571 | expect(zip1Entries).to.deep.equal(expected); 572 | done(); 573 | }); 574 | }); 575 | }); 576 | }); 577 | 578 | describe(".addLocalFolderPromise() - promise", () => { 579 | beforeEach(() => { 580 | genFiles(testFileFolderList, destination); 581 | }); 582 | 583 | it("zip.addLocalFolderPromise(destination)", async function () { 584 | const zip = new Zip(); 585 | const zip1 = await zip.addLocalFolderPromise(destination); 586 | 587 | zip1.toBuffer(); 588 | const zip1Entries = zip1.getEntries().map((e) => e.entryName); 589 | 590 | const expected = [ 591 | "subfolder1/", 592 | "subfolder1/subfolder2/", 593 | "subfolder1/subfolder2/subfolder3/", 594 | "subfolder1/subfolder2/zipEntry1.txt", 595 | "subfolder1/subfolder2/subfolder3/subfolder4/", 596 | "subfolder1/subfolder2/subfolder3/zipEntry2.txt", 597 | "subfolder1/subfolder2/subfolder3/zipEntry3.txt" 598 | ].sort(); 599 | 600 | expect(zip1Entries).to.deep.equal(expected.sort()); 601 | }); 602 | 603 | it("zip.addLocalFolderPromise(destination, {namefix})", async function () { 604 | const zip = new Zip(); 605 | const namefix = (str) => str.toLowerCase(); 606 | const zip1 = await zip.addLocalFolderPromise(destination, { namefix }); 607 | 608 | zip1.toBuffer(); 609 | const zip1Entries = zip1.getEntries().map((e) => e.entryName); 610 | 611 | const expected = [ 612 | "subfolder1/", 613 | "subfolder1/subfolder2/", 614 | "subfolder1/subfolder2/subfolder3/", 615 | "subfolder1/subfolder2/subfolder3/subfolder4/", 616 | "subfolder1/subfolder2/subfolder3/zipentry2.txt", 617 | "subfolder1/subfolder2/subfolder3/zipentry3.txt", 618 | "subfolder1/subfolder2/zipentry1.txt" 619 | ].sort(); 620 | 621 | expect(zip1Entries).to.deep.equal(expected.sort()); 622 | }); 623 | 624 | it("zip.addLocalFolderPromise(destination, {zipPath})", async function () { 625 | const zip = new Zip(); 626 | await zip.addLocalFolderPromise(destination, { zipPath: "parent" }); 627 | const zip1Entries = zip.getEntries().map((e) => e.entryName); 628 | 629 | const expected = [ 630 | "parent/subfolder1/", 631 | "parent/subfolder1/subfolder2/", 632 | "parent/subfolder1/subfolder2/subfolder3/", 633 | "parent/subfolder1/subfolder2/zipEntry1.txt", 634 | "parent/subfolder1/subfolder2/subfolder3/subfolder4/", 635 | "parent/subfolder1/subfolder2/subfolder3/zipEntry2.txt", 636 | "parent/subfolder1/subfolder2/subfolder3/zipEntry3.txt" 637 | ]; 638 | 639 | expect(zip1Entries.sort()).to.deep.equal(expected.sort()); 640 | }); 641 | }); 642 | }); 643 | 644 | function walk(dir) { 645 | let results = []; 646 | const list = fs.readdirSync(dir); 647 | list.forEach(function (file) { 648 | file = dir + "/" + file; 649 | const stat = fs.statSync(file); 650 | if (stat && stat.isDirectory()) { 651 | /* Recurse into a subdirectory */ 652 | results = results.concat(walk(file)); 653 | } else { 654 | /* Is a file */ 655 | results.push(pth.normalize(file)); 656 | } 657 | }); 658 | return results; 659 | } 660 | 661 | function genFiles(list, location) { 662 | const utils = new Utils({ fs }); 663 | 664 | for (const el of list) { 665 | const path = pth.resolve(location, el.name); 666 | if (el.name.slice(-1) === "/") { 667 | utils.makeDir(path); 668 | } else { 669 | utils.makeDir(pth.dirname(path)); 670 | fs.writeFileSync(path, el.content, "utf8"); 671 | } 672 | } 673 | } 674 | --------------------------------------------------------------------------------