├── .github ├── labels.yml ├── release-drafter.yml └── workflows │ ├── labels.yaml │ ├── nodejs.yml │ ├── npmpublish.yml │ └── release-drafter.yml ├── .gitignore ├── .prettierignore ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── benchmark ├── common.js └── simple │ └── pack-zip.js ├── examples ├── express.js ├── fixtures │ ├── file1.txt │ ├── file2.txt │ └── somedir │ │ └── file3.txt ├── pack-tar.js ├── pack-tgz.js ├── pack-zip.js └── progress.js ├── index.js ├── lib ├── core.js ├── error.js ├── plugins │ ├── json.js │ ├── tar.js │ └── zip.js └── utils.js ├── netlify.toml ├── package-lock.json ├── package.json ├── renovate.json ├── test ├── archiver.js ├── fixtures │ ├── directory │ │ ├── .dotfile │ │ ├── ignore.txt │ │ ├── level0.txt │ │ └── subdir │ │ │ ├── level1.txt │ │ │ └── subsub │ │ │ └── level2.txt │ ├── empty.txt │ ├── executable.sh │ └── test.txt ├── helpers │ └── index.js └── plugins.js └── website ├── .gitignore ├── babel.config.js ├── docs ├── archive_formats.md ├── archiver_api.md └── quickstart.md ├── docusaurus.config.js ├── package.json ├── sidebars.js ├── src ├── css │ └── custom.css └── pages │ ├── index.js │ ├── styles.module.css │ └── zipstream.md └── static └── img ├── favicon.ico └── logo.svg /.github/labels.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "breaking-change" 3 | color: ee0701 4 | description: "A breaking change for existing users." 5 | - name: "bugfix" 6 | color: ee0701 7 | description: "Inconsistencies or issues which will cause a problem for users or implementors." 8 | - name: "documentation" 9 | color: 0052cc 10 | description: "Solely about the documentation of the project." 11 | - name: "enhancement" 12 | color: 1d76db 13 | description: "Enhancement of the code, not introducing new features." 14 | - name: "refactor" 15 | color: 1d76db 16 | description: "Improvement of existing code, not introducing new features." 17 | - name: "performance" 18 | color: 1d76db 19 | description: "Improving performance, not introducing new features." 20 | - name: "new-feature" 21 | color: 0e8a16 22 | description: "New features or options." 23 | - name: "maintenance" 24 | color: 2af79e 25 | description: "Generic maintenance tasks." 26 | - name: "ci" 27 | color: 1d76db 28 | description: "Work that improves the continue integration." 29 | - name: "dependencies" 30 | color: 1d76db 31 | description: "Upgrade or downgrade of project dependencies." 32 | 33 | - name: "in-progress" 34 | color: fbca04 35 | description: "Issue is currently being resolved by a developer." 36 | - name: "stale" 37 | color: fef2c0 38 | description: "There has not been activity on this issue or PR for quite some time." 39 | - name: "no-stale" 40 | color: fef2c0 41 | description: "This issue or PR is exempted from the stable bot." 42 | 43 | - name: "security" 44 | color: ee0701 45 | description: "Marks a security issue that needs to be resolved asap." 46 | - name: "incomplete" 47 | color: fef2c0 48 | description: "Marks a PR or issue that is missing information." 49 | - name: "invalid" 50 | color: fef2c0 51 | description: "Marks a PR or issue that is missing information." 52 | 53 | - name: "beginner-friendly" 54 | color: 0e8a16 55 | description: "Good first issue for people wanting to contribute to the project." 56 | - name: "help-wanted" 57 | color: 0e8a16 58 | description: "We need some extra helping hands or expertise in order to resolve this." 59 | 60 | - name: "priority-critical" 61 | color: ee0701 62 | description: "This should be dealt with ASAP. Not fixing this issue would be a serious error." 63 | - name: "priority-high" 64 | color: b60205 65 | description: "After critical issues are fixed, these should be dealt with before any further issues." 66 | - name: "priority-medium" 67 | color: 0e8a16 68 | description: "This issue may be useful, and needs some attention." 69 | - name: "priority-low" 70 | color: e4ea8a 71 | description: "Nice addition, maybe... someday..." 72 | 73 | - name: "major" 74 | color: b60205 75 | description: "This PR causes a major version bump in the version number." 76 | - name: "minor" 77 | color: 0e8a16 78 | description: "This PR causes a minor version bump in the version number." 79 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name-template: "$RESOLVED_VERSION" 3 | tag-template: "$RESOLVED_VERSION" 4 | change-template: "- $TITLE @$AUTHOR (#$NUMBER)" 5 | sort-direction: ascending 6 | 7 | categories: 8 | - title: "Breaking changes" 9 | labels: 10 | - "breaking-change" 11 | - title: "New features" 12 | labels: 13 | - "new-feature" 14 | - title: "Bug fixes" 15 | labels: 16 | - "bugfix" 17 | - title: "Enhancements" 18 | labels: 19 | - "enhancement" 20 | - "refactor" 21 | - "performance" 22 | - title: "Maintenance" 23 | labels: 24 | - "maintenance" 25 | - "ci" 26 | - title: "Documentation" 27 | labels: 28 | - "documentation" 29 | - title: "Dependency updates" 30 | labels: 31 | - "dependencies" 32 | 33 | version-resolver: 34 | major: 35 | labels: 36 | - "major" 37 | - "breaking-change" 38 | minor: 39 | labels: 40 | - "minor" 41 | - "new-feature" 42 | patch: 43 | labels: 44 | - "bugfix" 45 | - "chore" 46 | - "ci" 47 | - "dependencies" 48 | - "documentation" 49 | - "enhancement" 50 | - "performance" 51 | - "refactor" 52 | default: patch 53 | 54 | template: | 55 | ## What’s changed 56 | 57 | $CHANGES 58 | -------------------------------------------------------------------------------- /.github/workflows/labels.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Sync labels 3 | 4 | # yamllint disable-line rule:truthy 5 | on: 6 | push: 7 | branches: 8 | - main 9 | paths: 10 | - .github/labels.yml 11 | workflow_dispatch: 12 | 13 | jobs: 14 | labels: 15 | name: Sync labels 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Check out code from GitHub 19 | uses: actions/checkout@v4 20 | - name: Run Label Syncer 21 | uses: micnncim/action-label-syncer@v1.3.0 22 | env: 23 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 24 | -------------------------------------------------------------------------------- /.github/workflows/nodejs.yml: -------------------------------------------------------------------------------- 1 | name: Node CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | 15 | strategy: 16 | matrix: 17 | node-version: [18.x, 20.x] 18 | 19 | steps: 20 | - uses: actions/checkout@v4.2.2 21 | - name: Use Node.js ${{ matrix.node-version }} 22 | uses: actions/setup-node@v4.2.0 23 | with: 24 | node-version: ${{ matrix.node-version }} 25 | - name: npm install and test 26 | run: | 27 | npm ci 28 | npm test 29 | env: 30 | CI: true 31 | -------------------------------------------------------------------------------- /.github/workflows/npmpublish.yml: -------------------------------------------------------------------------------- 1 | name: Node Publish Package 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4.2.2 12 | - uses: actions/setup-node@v4.2.0 13 | with: 14 | node-version: 20 15 | - run: npm ci 16 | - run: npm test 17 | 18 | publish-npm: 19 | needs: build 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v4.2.2 23 | - uses: actions/setup-node@v4.2.0 24 | with: 25 | node-version: 20 26 | registry-url: https://registry.npmjs.org/ 27 | - run: npm ci 28 | - run: npm publish 29 | env: 30 | NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} 31 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Release Drafter 3 | 4 | # yamllint disable-line rule:truthy 5 | on: 6 | push: 7 | branches: 8 | - master 9 | workflow_dispatch: 10 | 11 | jobs: 12 | update_release_draft: 13 | name: Update Release Draft 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Run Release Drafter 17 | uses: release-drafter/release-drafter@v6.0.0 18 | env: 19 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | npm-debug.log 2 | node_modules/ 3 | tmp/ 4 | test/fixtures/bench 5 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # Ignore artifacts: 2 | build 3 | coverage 4 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## Changelog 2 | 3 | **8.0.0** - _October 17, 2024_ — [Diff](https://github.com/archiverjs/node-archiver/compare/7.0.1...8.0.0) 4 | 5 | **7.0.1** - _March 9, 2024_ — [Diff](https://github.com/archiverjs/node-archiver/compare/7.0.0...7.0.1) 6 | 7 | **7.0.0** - _February 28, 2024_ — [Diff](https://github.com/archiverjs/node-archiver/compare/6.0.2...7.0.0) 8 | 9 | **6.0.2** - _February 27, 2024_ — [Diff](https://github.com/archiverjs/node-archiver/compare/6.0.1...6.0.2) 10 | 11 | **6.0.1** - _September 3, 2023_ — [Diff](https://github.com/archiverjs/node-archiver/compare/6.0.0...6.0.1) 12 | 13 | **6.0.0** - _August 17, 2023_ — [Diff](https://github.com/archiverjs/node-archiver/compare/5.3.2...6.0.0) 14 | 15 | [Release Archive](https://github.com/archiverjs/node-archiver/releases) 16 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## Contributing 2 | 3 | #### Code Style Guide 4 | 5 | - code should be ran through `prettier` 6 | 7 | #### Tests 8 | 9 | - tests should be added in `test/` 10 | - tests can be run with `npm test` 11 | - see existing tests for guidance 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012-2014 Chris Talkington, contributors. 2 | 3 | Permission is hereby granted, free of charge, to any person 4 | obtaining a copy of this software and associated documentation 5 | files (the "Software"), to deal in the Software without 6 | restriction, including without limitation the rights to use, 7 | copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the 9 | Software is furnished to do so, subject to the following 10 | conditions: 11 | 12 | The above copyright notice and this permission notice shall be 13 | included in all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 17 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 18 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 19 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 20 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Archiver 2 | 3 | A streaming interface for archive generation 4 | 5 | Visit the [API documentation](https://www.archiverjs.com/) for a list of all methods available. 6 | 7 | ## Install 8 | 9 | ```bash 10 | npm install archiver --save 11 | ``` 12 | 13 | ## Quick Start 14 | 15 | ```js 16 | import fs from "fs"; 17 | import { ZipArchive } from "archiver"; 18 | 19 | // create a file to stream archive data to. 20 | const output = fs.createWriteStream(__dirname + "/example.zip"); 21 | const archive = new ZipArchive({ 22 | zlib: { level: 9 }, // Sets the compression level. 23 | }); 24 | 25 | // listen for all archive data to be written 26 | // 'close' event is fired only when a file descriptor is involved 27 | output.on("close", function () { 28 | console.log(archive.pointer() + " total bytes"); 29 | console.log( 30 | "archiver has been finalized and the output file descriptor has closed.", 31 | ); 32 | }); 33 | 34 | // This event is fired when the data source is drained no matter what was the data source. 35 | // It is not part of this library but rather from the NodeJS Stream API. 36 | // @see: https://nodejs.org/api/stream.html#stream_event_end 37 | output.on("end", function () { 38 | console.log("Data has been drained"); 39 | }); 40 | 41 | // good practice to catch warnings (ie stat failures and other non-blocking errors) 42 | archive.on("warning", function (err) { 43 | if (err.code === "ENOENT") { 44 | // log warning 45 | } else { 46 | // throw error 47 | throw err; 48 | } 49 | }); 50 | 51 | // good practice to catch this error explicitly 52 | archive.on("error", function (err) { 53 | throw err; 54 | }); 55 | 56 | // pipe archive data to the file 57 | archive.pipe(output); 58 | 59 | // append a file from stream 60 | const file1 = __dirname + "/file1.txt"; 61 | archive.append(fs.createReadStream(file1), { name: "file1.txt" }); 62 | 63 | // append a file from string 64 | archive.append("string cheese!", { name: "file2.txt" }); 65 | 66 | // append a file from buffer 67 | const buffer3 = Buffer.from("buff it!"); 68 | archive.append(buffer3, { name: "file3.txt" }); 69 | 70 | // append a file 71 | archive.file("file1.txt", { name: "file4.txt" }); 72 | 73 | // append files from a sub-directory and naming it `new-subdir` within the archive 74 | archive.directory("subdir/", "new-subdir"); 75 | 76 | // append files from a sub-directory, putting its contents at the root of archive 77 | archive.directory("subdir/", false); 78 | 79 | // append files from a glob pattern 80 | archive.glob("file*.txt", { cwd: __dirname }); 81 | 82 | // finalize the archive (ie we are done appending files but streams have to finish yet) 83 | // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand 84 | archive.finalize(); 85 | ``` 86 | 87 | ## Formats 88 | 89 | Archiver ships with out of the box support for TAR and ZIP archives. 90 | -------------------------------------------------------------------------------- /benchmark/common.js: -------------------------------------------------------------------------------- 1 | function binaryBuffer(n) { 2 | var buffer = Buffer.alloc(n); 3 | 4 | for (var i = 0; i < n; i++) { 5 | buffer.writeUInt8(i & 255, i); 6 | } 7 | 8 | return buffer; 9 | } 10 | 11 | module.exports.binaryBuffer = binaryBuffer; 12 | -------------------------------------------------------------------------------- /benchmark/simple/pack-zip.js: -------------------------------------------------------------------------------- 1 | var fs = require("fs"); 2 | 3 | var mkdir = require("mkdirp"); 4 | var streamBench = require("stream-bench"); 5 | 6 | var archiver = require("../../"); 7 | var common = require("../common"); 8 | 9 | var binaryBuffer = common.binaryBuffer; 10 | 11 | var BITS_IN_BYTE = 1024; 12 | var BITS_IN_MBYTE = BITS_IN_BYTE * 1024; 13 | 14 | var file = false; 15 | var level = 1; 16 | 17 | if (process.argv[2]) { 18 | if (isNaN(parseInt(process.argv[2], 10))) { 19 | file = process.argv[2]; 20 | 21 | if (process.argv[3]) { 22 | level = parseInt(process.argv[3], 10); 23 | 24 | if (level > 9) { 25 | level = 1; 26 | } 27 | } 28 | } else { 29 | level = parseInt(process.argv[2], 10); 30 | } 31 | } 32 | 33 | var archive = archiver("zip", { 34 | zlib: { 35 | level: level, 36 | }, 37 | }); 38 | 39 | if (file === false) { 40 | mkdir.sync("tmp"); 41 | 42 | file = "tmp/20mb.dat"; 43 | fs.writeFileSync(file, binaryBuffer(BITS_IN_MBYTE * 20)); 44 | } 45 | 46 | console.log("zlib level: " + level); 47 | 48 | var bench = streamBench({ 49 | logReport: true, 50 | interval: 500, 51 | dump: true, 52 | }); 53 | 54 | archive.pipe(bench); 55 | 56 | archive.file(file, { name: "large file" }).finalize(); 57 | -------------------------------------------------------------------------------- /examples/express.js: -------------------------------------------------------------------------------- 1 | var app = require("express")(); 2 | var archiver = require("archiver"); 3 | var p = require("path"); 4 | 5 | app.get("/", function (req, res) { 6 | var archive = archiver("zip"); 7 | 8 | archive.on("error", function (err) { 9 | res.status(500).send({ error: err.message }); 10 | }); 11 | 12 | //on stream closed we can end the request 13 | archive.on("end", function () { 14 | console.log("Archive wrote %d bytes", archive.pointer()); 15 | }); 16 | 17 | //set the archive name 18 | res.attachment("archive-name.zip"); 19 | 20 | //this is the streaming magic 21 | archive.pipe(res); 22 | 23 | var files = [ 24 | __dirname + "/fixtures/file1.txt", 25 | __dirname + "/fixtures/file2.txt", 26 | ]; 27 | 28 | for (var i in files) { 29 | archive.file(files[i], { name: p.basename(files[i]) }); 30 | } 31 | 32 | var directories = [__dirname + "/fixtures/somedir"]; 33 | 34 | for (var i in directories) { 35 | archive.directory( 36 | directories[i], 37 | directories[i].replace(__dirname + "/fixtures", ""), 38 | ); 39 | } 40 | 41 | archive.finalize(); 42 | }); 43 | 44 | app.listen(3000); 45 | -------------------------------------------------------------------------------- /examples/fixtures/file1.txt: -------------------------------------------------------------------------------- 1 | Duis veniam commodo reprehenderit sint occaecat in sed anim veniam qui 2 | exercitation ut qui est cupidatat sint velit laboris ut eu enim consectetur amet 3 | tempor sit sed fugiat quis id exercitation reprehenderit eu voluptate laborum 4 | aliqua dolor irure magna aliqua amet commodo dolore aliquip commodo nostrud 5 | laborum Excepteur fugiat dolor exercitation nostrud laborum velit quis nisi 6 | Excepteur in minim sunt enim dolore proident sit do reprehenderit nulla id anim 7 | reprehenderit dolor ullamco elit sed cillum adipisicing reprehenderit aliquip 8 | ullamco Excepteur incididunt culpa aliquip velit consequat adipisicing eiusmod 9 | esse esse cillum commodo adipisicing ut qui quis dolor aliqua cupidatat 10 | Excepteur enim tempor in Ut reprehenderit deserunt voluptate in sed enim 11 | incididunt ad ex voluptate incididunt et laboris consequat Ut dolore incididunt 12 | velit ullamco minim nisi ut exercitation enim tempor occaecat anim nostrud 13 | veniam exercitation in Excepteur incididunt cupidatat ea irure mollit aliqua 14 | officia commodo incididunt ex consectetur nulla ullamco minim do magna pariatur 15 | esse laborum elit. 16 | -------------------------------------------------------------------------------- /examples/fixtures/file2.txt: -------------------------------------------------------------------------------- 1 | Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor 2 | incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis 3 | nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. 4 | Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu 5 | fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in 6 | culpa qui officia deserunt mollit anim id est laborum. 7 | -------------------------------------------------------------------------------- /examples/fixtures/somedir/file3.txt: -------------------------------------------------------------------------------- 1 | Duis veniam commodo reprehenderit sint occaecat in sed anim veniam qui 2 | exercitation ut qui est cupidatat sint velit laboris ut eu enim consectetur amet 3 | tempor sit sed fugiat quis id exercitation reprehenderit eu voluptate laborum 4 | aliqua dolor irure magna aliqua amet commodo dolore aliquip commodo nostrud 5 | laborum Excepteur fugiat dolor exercitation nostrud laborum velit quis nisi 6 | Excepteur in minim sunt enim dolore proident sit do reprehenderit nulla id anim 7 | reprehenderit dolor ullamco elit sed cillum adipisicing reprehenderit aliquip 8 | ullamco Excepteur incididunt culpa aliquip velit consequat adipisicing eiusmod 9 | esse esse cillum commodo adipisicing ut qui quis dolor aliqua cupidatat 10 | Excepteur enim tempor in Ut reprehenderit deserunt voluptate in sed enim 11 | incididunt ad ex voluptate incididunt et laboris consequat Ut dolore incididunt 12 | velit ullamco minim nisi ut exercitation enim tempor occaecat anim nostrud 13 | veniam exercitation in Excepteur incididunt cupidatat ea irure mollit aliqua 14 | officia commodo incididunt ex consectetur nulla ullamco minim do magna pariatur 15 | esse laborum elit. 16 | -------------------------------------------------------------------------------- /examples/pack-tar.js: -------------------------------------------------------------------------------- 1 | var fs = require("fs"); 2 | 3 | var archiver = require("archiver"); 4 | 5 | var output = fs.createWriteStream(__dirname + "/example-output.tar"); 6 | var archive = archiver("tar"); 7 | 8 | output.on("close", function () { 9 | console.log(archive.pointer() + " total bytes"); 10 | console.log( 11 | "archiver has been finalized and the output file descriptor has closed.", 12 | ); 13 | }); 14 | 15 | archive.on("error", function (err) { 16 | throw err; 17 | }); 18 | 19 | archive.pipe(output); 20 | 21 | var file1 = __dirname + "/fixtures/file1.txt"; 22 | var file2 = __dirname + "/fixtures/file2.txt"; 23 | 24 | archive 25 | .append(fs.createReadStream(file1), { name: "file1.txt" }) 26 | .append(fs.createReadStream(file2), { name: "file2.txt" }) 27 | .finalize(); 28 | -------------------------------------------------------------------------------- /examples/pack-tgz.js: -------------------------------------------------------------------------------- 1 | var fs = require("fs"); 2 | var archiver = require("archiver"); 3 | 4 | var output = fs.createWriteStream(__dirname + "/example-output.tar.gz"); 5 | var archive = archiver("tar", { 6 | gzip: true, 7 | gzipOptions: { 8 | level: 1, 9 | }, 10 | }); 11 | 12 | output.on("close", function () { 13 | console.log(archive.pointer() + " total bytes"); 14 | console.log( 15 | "archiver has been finalized and the output file descriptor has closed.", 16 | ); 17 | }); 18 | 19 | archive.on("error", function (err) { 20 | throw err; 21 | }); 22 | 23 | archive.pipe(output); 24 | 25 | var file1 = __dirname + "/fixtures/file1.txt"; 26 | var file2 = __dirname + "/fixtures/file2.txt"; 27 | 28 | archive 29 | .append(fs.createReadStream(file1), { name: "file1.txt" }) 30 | .append(fs.createReadStream(file2), { name: "file2.txt" }) 31 | .finalize(); 32 | -------------------------------------------------------------------------------- /examples/pack-zip.js: -------------------------------------------------------------------------------- 1 | var fs = require("fs"); 2 | 3 | var archiver = require("archiver"); 4 | 5 | var output = fs.createWriteStream(__dirname + "/example-output.zip"); 6 | var archive = archiver("zip"); 7 | 8 | output.on("close", function () { 9 | console.log(archive.pointer() + " total bytes"); 10 | console.log( 11 | "archiver has been finalized and the output file descriptor has closed.", 12 | ); 13 | }); 14 | 15 | archive.on("error", function (err) { 16 | throw err; 17 | }); 18 | 19 | archive.pipe(output); 20 | 21 | var file1 = __dirname + "/fixtures/file1.txt"; 22 | var file2 = __dirname + "/fixtures/file2.txt"; 23 | 24 | archive 25 | .append(fs.createReadStream(file1), { name: "file1.txt" }) 26 | .append(fs.createReadStream(file2), { name: "file2.txt" }) 27 | .finalize(); 28 | -------------------------------------------------------------------------------- /examples/progress.js: -------------------------------------------------------------------------------- 1 | var archiver = require("../"); 2 | var tmp = require("os").tmpdir(); 3 | var async = require("async"); 4 | var fs = require("fs"); 5 | 6 | // You can change this by something bigger! 7 | var directory = __dirname + "/fixtures"; 8 | var destination = tmp + "/" + Date.now() + ".zip"; 9 | var destinationStream = fs.createWriteStream(destination); 10 | 11 | console.log("Zipping %s to %s", directory, destination); 12 | 13 | // To find out the progression, we may prefer to first calculate the size of the zip's future content 14 | // For this, we need to recursivly `readDir` and get the size from a `stat` call on every file. 15 | // Note that Archiver is also computing the total size, but it's done asynchronously and may not be accurate 16 | directorySize(directory, function (err, totalSize) { 17 | var prettyTotalSize = bytesToSize(totalSize); 18 | var archive = archiver("zip"); 19 | 20 | archive.on("error", function (err) { 21 | console.error("Error while zipping", err); 22 | }); 23 | 24 | archive.on("progress", function (progress) { 25 | var percent = (progress.fs.processedBytes / totalSize) * 100; 26 | 27 | console.log( 28 | "%s / %s (%d %)", 29 | bytesToSize(progress.fs.processedBytes), 30 | prettyTotalSize, 31 | percent, 32 | ); 33 | }); 34 | 35 | //on stream closed we can end the request 36 | archive.on("end", function () { 37 | console.log("%s / %s (%d %)", prettyTotalSize, prettyTotalSize, 100); 38 | 39 | var archiveSize = archive.pointer(); 40 | 41 | console.log("Archiver wrote %s bytes", bytesToSize(archiveSize)); 42 | console.log("Compression ratio: %d:1", Math.round(totalSize / archiveSize)); 43 | console.log("Space savings: %d %", (1 - archiveSize / totalSize) * 100); 44 | }); 45 | 46 | archive.pipe(destinationStream); 47 | 48 | archive.directory(directory); 49 | 50 | archive.finalize(); 51 | }); 52 | 53 | /** 54 | * You can use a nodejs module to do this, this function is really straightforward and will fail on error 55 | * Note that when computing a directory size you may want to skip some errors (like ENOENT) 56 | * That said, this is for demonstration purpose and may not suit a production environnment 57 | */ 58 | function directorySize(path, cb, size) { 59 | if (size === undefined) { 60 | size = 0; 61 | } 62 | 63 | fs.stat(path, function (err, stat) { 64 | if (err) { 65 | cb(err); 66 | return; 67 | } 68 | 69 | size += stat.size; 70 | 71 | if (!stat.isDirectory()) { 72 | cb(null, size); 73 | return; 74 | } 75 | 76 | fs.readdir(path, function (err, paths) { 77 | if (err) { 78 | cb(err); 79 | return; 80 | } 81 | 82 | async.map( 83 | paths.map(function (p) { 84 | return path + "/" + p; 85 | }), 86 | directorySize, 87 | function (err, sizes) { 88 | size += sizes.reduce(function (a, b) { 89 | return a + b; 90 | }, 0); 91 | cb(err, size); 92 | }, 93 | ); 94 | }); 95 | }); 96 | } 97 | 98 | /** 99 | * https://stackoverflow.com/questions/15900485/correct-way-to-convert-size-in-bytes-to-kb-mb-gb-in-javascript#18650828 100 | */ 101 | function bytesToSize(bytes) { 102 | var sizes = ["Bytes", "KB", "MB", "GB", "TB"]; 103 | if (bytes == 0) return "0 Byte"; 104 | var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024))); 105 | return Math.round(bytes / Math.pow(1024, i), 2) + " " + sizes[i]; 106 | } 107 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | import Archiver from "./lib/core.js"; 2 | import Zip from "./lib/plugins/zip.js"; 3 | import Tar from "./lib/plugins/tar.js"; 4 | import Json from "./lib/plugins/json.js"; 5 | 6 | export { Archiver }; 7 | 8 | export class ZipArchive extends Archiver { 9 | constructor(options) { 10 | super(options); 11 | this._format = "zip"; 12 | this._module = new Zip(options); 13 | this._supportsDirectory = true; 14 | this._supportsSymlink = true; 15 | this._modulePipe(); 16 | } 17 | } 18 | 19 | export class TarArchive extends Archiver { 20 | constructor(options) { 21 | super(options); 22 | this._format = "tar"; 23 | this._module = new Tar(options); 24 | this._supportsDirectory = true; 25 | this._supportsSymlink = true; 26 | this._modulePipe(); 27 | } 28 | } 29 | 30 | export class JsonArchive extends Archiver { 31 | constructor(options) { 32 | super(options); 33 | this._format = "json"; 34 | this._module = new Json(options); 35 | this._supportsDirectory = true; 36 | this._supportsSymlink = true; 37 | this._modulePipe(); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /lib/core.js: -------------------------------------------------------------------------------- 1 | import { createReadStream, lstat, readlinkSync, Stats } from "fs"; 2 | import { isStream } from "is-stream"; 3 | import readdirGlob from "readdir-glob"; 4 | import { Readable } from "lazystream"; 5 | import { queue } from "async"; 6 | import { 7 | dirname, 8 | relative as relativePath, 9 | resolve as resolvePath, 10 | } from "path"; 11 | import { ArchiverError } from "./error.js"; 12 | import { Transform } from "readable-stream"; 13 | import { 14 | dateify, 15 | normalizeInputSource, 16 | sanitizePath, 17 | trailingSlashIt, 18 | } from "./utils.js"; 19 | const { ReaddirGlob } = readdirGlob; 20 | const win32 = process.platform === "win32"; 21 | 22 | export default class Archiver extends Transform { 23 | _supportsDirectory = false; 24 | _supportsSymlink = false; 25 | 26 | /** 27 | * @constructor 28 | * @param {String} format The archive format to use. 29 | * @param {(CoreOptions|TransformOptions)} options See also {@link ZipOptions} and {@link TarOptions}. 30 | */ 31 | constructor(options) { 32 | options = { 33 | highWaterMark: 1024 * 1024, 34 | statConcurrency: 4, 35 | ...options, 36 | }; 37 | super(options); 38 | this.options = options; 39 | this._format = false; 40 | this._module = false; 41 | this._pending = 0; 42 | this._pointer = 0; 43 | this._entriesCount = 0; 44 | this._entriesProcessedCount = 0; 45 | this._fsEntriesTotalBytes = 0; 46 | this._fsEntriesProcessedBytes = 0; 47 | this._queue = queue(this._onQueueTask.bind(this), 1); 48 | this._queue.drain(this._onQueueDrain.bind(this)); 49 | this._statQueue = queue( 50 | this._onStatQueueTask.bind(this), 51 | options.statConcurrency, 52 | ); 53 | this._statQueue.drain(this._onQueueDrain.bind(this)); 54 | this._state = { 55 | aborted: false, 56 | finalize: false, 57 | finalizing: false, 58 | finalized: false, 59 | modulePiped: false, 60 | }; 61 | this._streams = []; 62 | } 63 | 64 | /** 65 | * Internal logic for `abort`. 66 | * 67 | * @private 68 | * @return void 69 | */ 70 | _abort() { 71 | this._state.aborted = true; 72 | this._queue.kill(); 73 | this._statQueue.kill(); 74 | if (this._queue.idle()) { 75 | this._shutdown(); 76 | } 77 | } 78 | /** 79 | * Internal helper for appending files. 80 | * 81 | * @private 82 | * @param {String} filepath The source filepath. 83 | * @param {EntryData} data The entry data. 84 | * @return void 85 | */ 86 | _append(filepath, data) { 87 | data = data || {}; 88 | let task = { 89 | source: null, 90 | filepath: filepath, 91 | }; 92 | if (!data.name) { 93 | data.name = filepath; 94 | } 95 | data.sourcePath = filepath; 96 | task.data = data; 97 | this._entriesCount++; 98 | if (data.stats && data.stats instanceof Stats) { 99 | task = this._updateQueueTaskWithStats(task, data.stats); 100 | if (task) { 101 | if (data.stats.size) { 102 | this._fsEntriesTotalBytes += data.stats.size; 103 | } 104 | this._queue.push(task); 105 | } 106 | } else { 107 | this._statQueue.push(task); 108 | } 109 | } 110 | /** 111 | * Internal logic for `finalize`. 112 | * 113 | * @private 114 | * @return void 115 | */ 116 | _finalize() { 117 | if ( 118 | this._state.finalizing || 119 | this._state.finalized || 120 | this._state.aborted 121 | ) { 122 | return; 123 | } 124 | this._state.finalizing = true; 125 | this._moduleFinalize(); 126 | this._state.finalizing = false; 127 | this._state.finalized = true; 128 | } 129 | /** 130 | * Checks the various state variables to determine if we can `finalize`. 131 | * 132 | * @private 133 | * @return {Boolean} 134 | */ 135 | _maybeFinalize() { 136 | if ( 137 | this._state.finalizing || 138 | this._state.finalized || 139 | this._state.aborted 140 | ) { 141 | return false; 142 | } 143 | if ( 144 | this._state.finalize && 145 | this._pending === 0 && 146 | this._queue.idle() && 147 | this._statQueue.idle() 148 | ) { 149 | this._finalize(); 150 | return true; 151 | } 152 | return false; 153 | } 154 | /** 155 | * Appends an entry to the module. 156 | * 157 | * @private 158 | * @fires Archiver#entry 159 | * @param {(Buffer|Stream)} source 160 | * @param {EntryData} data 161 | * @param {Function} callback 162 | * @return void 163 | */ 164 | _moduleAppend(source, data, callback) { 165 | if (this._state.aborted) { 166 | callback(); 167 | return; 168 | } 169 | this._module.append( 170 | source, 171 | data, 172 | function (err) { 173 | this._task = null; 174 | if (this._state.aborted) { 175 | this._shutdown(); 176 | return; 177 | } 178 | if (err) { 179 | this.emit("error", err); 180 | setImmediate(callback); 181 | return; 182 | } 183 | /** 184 | * Fires when the entry's input has been processed and appended to the archive. 185 | * 186 | * @event Archiver#entry 187 | * @type {EntryData} 188 | */ 189 | this.emit("entry", data); 190 | this._entriesProcessedCount++; 191 | if (data.stats && data.stats.size) { 192 | this._fsEntriesProcessedBytes += data.stats.size; 193 | } 194 | /** 195 | * @event Archiver#progress 196 | * @type {ProgressData} 197 | */ 198 | this.emit("progress", { 199 | entries: { 200 | total: this._entriesCount, 201 | processed: this._entriesProcessedCount, 202 | }, 203 | fs: { 204 | totalBytes: this._fsEntriesTotalBytes, 205 | processedBytes: this._fsEntriesProcessedBytes, 206 | }, 207 | }); 208 | setImmediate(callback); 209 | }.bind(this), 210 | ); 211 | } 212 | /** 213 | * Finalizes the module. 214 | * 215 | * @private 216 | * @return void 217 | */ 218 | _moduleFinalize() { 219 | if (typeof this._module.finalize === "function") { 220 | this._module.finalize(); 221 | } else if (typeof this._module.end === "function") { 222 | this._module.end(); 223 | } else { 224 | this.emit("error", new ArchiverError("NOENDMETHOD")); 225 | } 226 | } 227 | /** 228 | * Pipes the module to our internal stream with error bubbling. 229 | * 230 | * @private 231 | * @return void 232 | */ 233 | _modulePipe() { 234 | this._module.on("error", this._onModuleError.bind(this)); 235 | this._module.pipe(this); 236 | this._state.modulePiped = true; 237 | } 238 | /** 239 | * Unpipes the module from our internal stream. 240 | * 241 | * @private 242 | * @return void 243 | */ 244 | _moduleUnpipe() { 245 | this._module.unpipe(this); 246 | this._state.modulePiped = false; 247 | } 248 | /** 249 | * Normalizes entry data with fallbacks for key properties. 250 | * 251 | * @private 252 | * @param {Object} data 253 | * @param {fs.Stats} stats 254 | * @return {Object} 255 | */ 256 | _normalizeEntryData(data, stats) { 257 | data = { 258 | type: "file", 259 | name: null, 260 | date: null, 261 | mode: null, 262 | prefix: null, 263 | sourcePath: null, 264 | stats: false, 265 | ...data, 266 | }; 267 | if (stats && data.stats === false) { 268 | data.stats = stats; 269 | } 270 | let isDir = data.type === "directory"; 271 | if (data.name) { 272 | if (typeof data.prefix === "string" && "" !== data.prefix) { 273 | data.name = data.prefix + "/" + data.name; 274 | data.prefix = null; 275 | } 276 | data.name = sanitizePath(data.name); 277 | if (data.type !== "symlink" && data.name.slice(-1) === "/") { 278 | isDir = true; 279 | data.type = "directory"; 280 | } else if (isDir) { 281 | data.name += "/"; 282 | } 283 | } 284 | // 511 === 0777; 493 === 0755; 438 === 0666; 420 === 0644 285 | if (typeof data.mode === "number") { 286 | if (win32) { 287 | data.mode &= 511; 288 | } else { 289 | data.mode &= 4095; 290 | } 291 | } else if (data.stats && data.mode === null) { 292 | if (win32) { 293 | data.mode = data.stats.mode & 511; 294 | } else { 295 | data.mode = data.stats.mode & 4095; 296 | } 297 | // stat isn't reliable on windows; force 0755 for dir 298 | if (win32 && isDir) { 299 | data.mode = 493; 300 | } 301 | } else if (data.mode === null) { 302 | data.mode = isDir ? 493 : 420; 303 | } 304 | if (data.stats && data.date === null) { 305 | data.date = data.stats.mtime; 306 | } else { 307 | data.date = dateify(data.date); 308 | } 309 | return data; 310 | } 311 | /** 312 | * Error listener that re-emits error on to our internal stream. 313 | * 314 | * @private 315 | * @param {Error} err 316 | * @return void 317 | */ 318 | _onModuleError(err) { 319 | /** 320 | * @event Archiver#error 321 | * @type {ErrorData} 322 | */ 323 | this.emit("error", err); 324 | } 325 | /** 326 | * Checks the various state variables after queue has drained to determine if 327 | * we need to `finalize`. 328 | * 329 | * @private 330 | * @return void 331 | */ 332 | _onQueueDrain() { 333 | if ( 334 | this._state.finalizing || 335 | this._state.finalized || 336 | this._state.aborted 337 | ) { 338 | return; 339 | } 340 | if ( 341 | this._state.finalize && 342 | this._pending === 0 && 343 | this._queue.idle() && 344 | this._statQueue.idle() 345 | ) { 346 | this._finalize(); 347 | } 348 | } 349 | /** 350 | * Appends each queue task to the module. 351 | * 352 | * @private 353 | * @param {Object} task 354 | * @param {Function} callback 355 | * @return void 356 | */ 357 | _onQueueTask(task, callback) { 358 | const fullCallback = () => { 359 | if (task.data.callback) { 360 | task.data.callback(); 361 | } 362 | callback(); 363 | }; 364 | if ( 365 | this._state.finalizing || 366 | this._state.finalized || 367 | this._state.aborted 368 | ) { 369 | fullCallback(); 370 | return; 371 | } 372 | this._task = task; 373 | this._moduleAppend(task.source, task.data, fullCallback); 374 | } 375 | /** 376 | * Performs a file stat and reinjects the task back into the queue. 377 | * 378 | * @private 379 | * @param {Object} task 380 | * @param {Function} callback 381 | * @return void 382 | */ 383 | _onStatQueueTask(task, callback) { 384 | if ( 385 | this._state.finalizing || 386 | this._state.finalized || 387 | this._state.aborted 388 | ) { 389 | callback(); 390 | return; 391 | } 392 | lstat( 393 | task.filepath, 394 | function (err, stats) { 395 | if (this._state.aborted) { 396 | setImmediate(callback); 397 | return; 398 | } 399 | if (err) { 400 | this._entriesCount--; 401 | /** 402 | * @event Archiver#warning 403 | * @type {ErrorData} 404 | */ 405 | this.emit("warning", err); 406 | setImmediate(callback); 407 | return; 408 | } 409 | task = this._updateQueueTaskWithStats(task, stats); 410 | if (task) { 411 | if (stats.size) { 412 | this._fsEntriesTotalBytes += stats.size; 413 | } 414 | this._queue.push(task); 415 | } 416 | setImmediate(callback); 417 | }.bind(this), 418 | ); 419 | } 420 | /** 421 | * Unpipes the module and ends our internal stream. 422 | * 423 | * @private 424 | * @return void 425 | */ 426 | _shutdown() { 427 | this._moduleUnpipe(); 428 | this.end(); 429 | } 430 | /** 431 | * Tracks the bytes emitted by our internal stream. 432 | * 433 | * @private 434 | * @param {Buffer} chunk 435 | * @param {String} encoding 436 | * @param {Function} callback 437 | * @return void 438 | */ 439 | _transform(chunk, encoding, callback) { 440 | if (chunk) { 441 | this._pointer += chunk.length; 442 | } 443 | callback(null, chunk); 444 | } 445 | /** 446 | * Updates and normalizes a queue task using stats data. 447 | * 448 | * @private 449 | * @param {Object} task 450 | * @param {Stats} stats 451 | * @return {Object} 452 | */ 453 | _updateQueueTaskWithStats(task, stats) { 454 | if (stats.isFile()) { 455 | task.data.type = "file"; 456 | task.data.sourceType = "stream"; 457 | task.source = new Readable(function () { 458 | return createReadStream(task.filepath); 459 | }); 460 | } else if (stats.isDirectory() && this._supportsDirectory) { 461 | task.data.name = trailingSlashIt(task.data.name); 462 | task.data.type = "directory"; 463 | task.data.sourcePath = trailingSlashIt(task.filepath); 464 | task.data.sourceType = "buffer"; 465 | task.source = Buffer.concat([]); 466 | } else if (stats.isSymbolicLink() && this._supportsSymlink) { 467 | const linkPath = readlinkSync(task.filepath); 468 | const dirName = dirname(task.filepath); 469 | task.data.type = "symlink"; 470 | task.data.linkname = relativePath( 471 | dirName, 472 | resolvePath(dirName, linkPath), 473 | ); 474 | task.data.sourceType = "buffer"; 475 | task.source = Buffer.concat([]); 476 | } else { 477 | if (stats.isDirectory()) { 478 | this.emit( 479 | "warning", 480 | new ArchiverError("DIRECTORYNOTSUPPORTED", task.data), 481 | ); 482 | } else if (stats.isSymbolicLink()) { 483 | this.emit( 484 | "warning", 485 | new ArchiverError("SYMLINKNOTSUPPORTED", task.data), 486 | ); 487 | } else { 488 | this.emit("warning", new ArchiverError("ENTRYNOTSUPPORTED", task.data)); 489 | } 490 | return null; 491 | } 492 | task.data = this._normalizeEntryData(task.data, stats); 493 | return task; 494 | } 495 | /** 496 | * Aborts the archiving process, taking a best-effort approach, by: 497 | * 498 | * - removing any pending queue tasks 499 | * - allowing any active queue workers to finish 500 | * - detaching internal module pipes 501 | * - ending both sides of the Transform stream 502 | * 503 | * It will NOT drain any remaining sources. 504 | * 505 | * @return {this} 506 | */ 507 | abort() { 508 | if (this._state.aborted || this._state.finalized) { 509 | return this; 510 | } 511 | this._abort(); 512 | return this; 513 | } 514 | /** 515 | * Appends an input source (text string, buffer, or stream) to the instance. 516 | * 517 | * When the instance has received, processed, and emitted the input, the `entry` 518 | * event is fired. 519 | * 520 | * @fires Archiver#entry 521 | * @param {(Buffer|Stream|String)} source The input source. 522 | * @param {EntryData} data See also {@link ZipEntryData} and {@link TarEntryData}. 523 | * @return {this} 524 | */ 525 | append(source, data) { 526 | if (this._state.finalize || this._state.aborted) { 527 | this.emit("error", new ArchiverError("QUEUECLOSED")); 528 | return this; 529 | } 530 | data = this._normalizeEntryData(data); 531 | if (typeof data.name !== "string" || data.name.length === 0) { 532 | this.emit("error", new ArchiverError("ENTRYNAMEREQUIRED")); 533 | return this; 534 | } 535 | if (data.type === "directory" && !this._supportsDirectory) { 536 | this.emit( 537 | "error", 538 | new ArchiverError("DIRECTORYNOTSUPPORTED", { name: data.name }), 539 | ); 540 | return this; 541 | } 542 | source = normalizeInputSource(source); 543 | if (Buffer.isBuffer(source)) { 544 | data.sourceType = "buffer"; 545 | } else if (isStream(source)) { 546 | data.sourceType = "stream"; 547 | } else { 548 | this.emit( 549 | "error", 550 | new ArchiverError("INPUTSTEAMBUFFERREQUIRED", { name: data.name }), 551 | ); 552 | return this; 553 | } 554 | this._entriesCount++; 555 | this._queue.push({ 556 | data: data, 557 | source: source, 558 | }); 559 | return this; 560 | } 561 | /** 562 | * Appends a directory and its files, recursively, given its dirpath. 563 | * 564 | * @param {String} dirpath The source directory path. 565 | * @param {String} destpath The destination path within the archive. 566 | * @param {(EntryData|Function)} data See also [ZipEntryData]{@link ZipEntryData} and 567 | * [TarEntryData]{@link TarEntryData}. 568 | * @return {this} 569 | */ 570 | directory(dirpath, destpath, data) { 571 | if (this._state.finalize || this._state.aborted) { 572 | this.emit("error", new ArchiverError("QUEUECLOSED")); 573 | return this; 574 | } 575 | if (typeof dirpath !== "string" || dirpath.length === 0) { 576 | this.emit("error", new ArchiverError("DIRECTORYDIRPATHREQUIRED")); 577 | return this; 578 | } 579 | this._pending++; 580 | if (destpath === false) { 581 | destpath = ""; 582 | } else if (typeof destpath !== "string") { 583 | destpath = dirpath; 584 | } 585 | var dataFunction = false; 586 | if (typeof data === "function") { 587 | dataFunction = data; 588 | data = {}; 589 | } else if (typeof data !== "object") { 590 | data = {}; 591 | } 592 | var globOptions = { 593 | stat: true, 594 | dot: true, 595 | }; 596 | function onGlobEnd() { 597 | this._pending--; 598 | this._maybeFinalize(); 599 | } 600 | function onGlobError(err) { 601 | this.emit("error", err); 602 | } 603 | function onGlobMatch(match) { 604 | globber.pause(); 605 | let ignoreMatch = false; 606 | let entryData = Object.assign({}, data); 607 | entryData.name = match.relative; 608 | entryData.prefix = destpath; 609 | entryData.stats = match.stat; 610 | entryData.callback = globber.resume.bind(globber); 611 | try { 612 | if (dataFunction) { 613 | entryData = dataFunction(entryData); 614 | if (entryData === false) { 615 | ignoreMatch = true; 616 | } else if (typeof entryData !== "object") { 617 | throw new ArchiverError("DIRECTORYFUNCTIONINVALIDDATA", { 618 | dirpath: dirpath, 619 | }); 620 | } 621 | } 622 | } catch (e) { 623 | this.emit("error", e); 624 | return; 625 | } 626 | if (ignoreMatch) { 627 | globber.resume(); 628 | return; 629 | } 630 | this._append(match.absolute, entryData); 631 | } 632 | const globber = readdirGlob(dirpath, globOptions); 633 | globber.on("error", onGlobError.bind(this)); 634 | globber.on("match", onGlobMatch.bind(this)); 635 | globber.on("end", onGlobEnd.bind(this)); 636 | return this; 637 | } 638 | /** 639 | * Appends a file given its filepath using a 640 | * [lazystream]{@link https://github.com/jpommerening/node-lazystream} wrapper to 641 | * prevent issues with open file limits. 642 | * 643 | * When the instance has received, processed, and emitted the file, the `entry` 644 | * event is fired. 645 | * 646 | * @param {String} filepath The source filepath. 647 | * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and 648 | * [TarEntryData]{@link TarEntryData}. 649 | * @return {this} 650 | */ 651 | file(filepath, data) { 652 | if (this._state.finalize || this._state.aborted) { 653 | this.emit("error", new ArchiverError("QUEUECLOSED")); 654 | return this; 655 | } 656 | if (typeof filepath !== "string" || filepath.length === 0) { 657 | this.emit("error", new ArchiverError("FILEFILEPATHREQUIRED")); 658 | return this; 659 | } 660 | this._append(filepath, data); 661 | return this; 662 | } 663 | /** 664 | * Appends multiple files that match a glob pattern. 665 | * 666 | * @param {String} pattern The [glob pattern]{@link https://github.com/isaacs/minimatch} to match. 667 | * @param {Object} options See [node-readdir-glob]{@link https://github.com/yqnn/node-readdir-glob#options}. 668 | * @param {EntryData} data See also [ZipEntryData]{@link ZipEntryData} and 669 | * [TarEntryData]{@link TarEntryData}. 670 | * @return {this} 671 | */ 672 | glob(pattern, options, data) { 673 | this._pending++; 674 | options = { 675 | stat: true, 676 | pattern: pattern, 677 | ...options, 678 | }; 679 | function onGlobEnd() { 680 | this._pending--; 681 | this._maybeFinalize(); 682 | } 683 | function onGlobError(err) { 684 | this.emit("error", err); 685 | } 686 | function onGlobMatch(match) { 687 | globber.pause(); 688 | const entryData = Object.assign({}, data); 689 | entryData.callback = globber.resume.bind(globber); 690 | entryData.stats = match.stat; 691 | entryData.name = match.relative; 692 | this._append(match.absolute, entryData); 693 | } 694 | const globber = new ReaddirGlob(options.cwd || ".", options); 695 | globber.on("error", onGlobError.bind(this)); 696 | globber.on("match", onGlobMatch.bind(this)); 697 | globber.on("end", onGlobEnd.bind(this)); 698 | return this; 699 | } 700 | /** 701 | * Finalizes the instance and prevents further appending to the archive 702 | * structure (queue will continue til drained). 703 | * 704 | * The `end`, `close` or `finish` events on the destination stream may fire 705 | * right after calling this method so you should set listeners beforehand to 706 | * properly detect stream completion. 707 | * 708 | * @return {Promise} 709 | */ 710 | finalize() { 711 | if (this._state.aborted) { 712 | var abortedError = new ArchiverError("ABORTED"); 713 | this.emit("error", abortedError); 714 | return Promise.reject(abortedError); 715 | } 716 | if (this._state.finalize) { 717 | var finalizingError = new ArchiverError("FINALIZING"); 718 | this.emit("error", finalizingError); 719 | return Promise.reject(finalizingError); 720 | } 721 | this._state.finalize = true; 722 | if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) { 723 | this._finalize(); 724 | } 725 | var self = this; 726 | return new Promise(function (resolve, reject) { 727 | var errored; 728 | self._module.on("end", function () { 729 | if (!errored) { 730 | resolve(); 731 | } 732 | }); 733 | self._module.on("error", function (err) { 734 | errored = true; 735 | reject(err); 736 | }); 737 | }); 738 | } 739 | /** 740 | * Appends a symlink to the instance. 741 | * 742 | * This does NOT interact with filesystem and is used for programmatically creating symlinks. 743 | * 744 | * @param {String} filepath The symlink path (within archive). 745 | * @param {String} target The target path (within archive). 746 | * @param {Number} mode Sets the entry permissions. 747 | * @return {this} 748 | */ 749 | symlink(filepath, target, mode) { 750 | if (this._state.finalize || this._state.aborted) { 751 | this.emit("error", new ArchiverError("QUEUECLOSED")); 752 | return this; 753 | } 754 | if (typeof filepath !== "string" || filepath.length === 0) { 755 | this.emit("error", new ArchiverError("SYMLINKFILEPATHREQUIRED")); 756 | return this; 757 | } 758 | if (typeof target !== "string" || target.length === 0) { 759 | this.emit( 760 | "error", 761 | new ArchiverError("SYMLINKTARGETREQUIRED", { filepath: filepath }), 762 | ); 763 | return this; 764 | } 765 | if (!this._supportsSymlink) { 766 | this.emit( 767 | "error", 768 | new ArchiverError("SYMLINKNOTSUPPORTED", { filepath: filepath }), 769 | ); 770 | return this; 771 | } 772 | var data = {}; 773 | data.type = "symlink"; 774 | data.name = filepath.replace(/\\/g, "/"); 775 | data.linkname = target.replace(/\\/g, "/"); 776 | data.sourceType = "buffer"; 777 | if (typeof mode === "number") { 778 | data.mode = mode; 779 | } 780 | this._entriesCount++; 781 | this._queue.push({ 782 | data: data, 783 | source: Buffer.concat([]), 784 | }); 785 | return this; 786 | } 787 | /** 788 | * Returns the current length (in bytes) that has been emitted. 789 | * 790 | * @return {Number} 791 | */ 792 | pointer() { 793 | return this._pointer; 794 | } 795 | } 796 | 797 | /** 798 | * @typedef {Object} CoreOptions 799 | * @global 800 | * @property {Number} [statConcurrency=4] Sets the number of workers used to 801 | * process the internal fs stat queue. 802 | */ 803 | 804 | /** 805 | * @typedef {Object} TransformOptions 806 | * @property {Boolean} [allowHalfOpen=true] If set to false, then the stream 807 | * will automatically end the readable side when the writable side ends and vice 808 | * versa. 809 | * @property {Boolean} [readableObjectMode=false] Sets objectMode for readable 810 | * side of the stream. Has no effect if objectMode is true. 811 | * @property {Boolean} [writableObjectMode=false] Sets objectMode for writable 812 | * side of the stream. Has no effect if objectMode is true. 813 | * @property {Boolean} [decodeStrings=true] Whether or not to decode strings 814 | * into Buffers before passing them to _write(). `Writable` 815 | * @property {String} [encoding=NULL] If specified, then buffers will be decoded 816 | * to strings using the specified encoding. `Readable` 817 | * @property {Number} [highWaterMark=16kb] The maximum number of bytes to store 818 | * in the internal buffer before ceasing to read from the underlying resource. 819 | * `Readable` `Writable` 820 | * @property {Boolean} [objectMode=false] Whether this stream should behave as a 821 | * stream of objects. Meaning that stream.read(n) returns a single value instead 822 | * of a Buffer of size n. `Readable` `Writable` 823 | */ 824 | 825 | /** 826 | * @typedef {Object} EntryData 827 | * @property {String} name Sets the entry name including internal path. 828 | * @property {(String|Date)} [date=NOW()] Sets the entry date. 829 | * @property {Number} [mode=D:0755/F:0644] Sets the entry permissions. 830 | * @property {String} [prefix] Sets a path prefix for the entry name. Useful 831 | * when working with methods like `directory` or `glob`. 832 | * @property {fs.Stats} [stats] Sets the fs stat data for this entry allowing 833 | * for reduction of fs stat calls when stat data is already known. 834 | */ 835 | 836 | /** 837 | * @typedef {Object} ErrorData 838 | * @property {String} message The message of the error. 839 | * @property {String} code The error code assigned to this error. 840 | * @property {String} data Additional data provided for reporting or debugging (where available). 841 | */ 842 | 843 | /** 844 | * @typedef {Object} ProgressData 845 | * @property {Object} entries 846 | * @property {Number} entries.total Number of entries that have been appended. 847 | * @property {Number} entries.processed Number of entries that have been processed. 848 | * @property {Object} fs 849 | * @property {Number} fs.totalBytes Number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) 850 | * @property {Number} fs.processedBytes Number of bytes that have been processed. (based on fs.Stats) 851 | */ 852 | -------------------------------------------------------------------------------- /lib/error.js: -------------------------------------------------------------------------------- 1 | import util from "util"; 2 | 3 | const ERROR_CODES = { 4 | ABORTED: "archive was aborted", 5 | DIRECTORYDIRPATHREQUIRED: 6 | "diretory dirpath argument must be a non-empty string value", 7 | DIRECTORYFUNCTIONINVALIDDATA: 8 | "invalid data returned by directory custom data function", 9 | ENTRYNAMEREQUIRED: "entry name must be a non-empty string value", 10 | FILEFILEPATHREQUIRED: 11 | "file filepath argument must be a non-empty string value", 12 | FINALIZING: "archive already finalizing", 13 | QUEUECLOSED: "queue closed", 14 | NOENDMETHOD: "no suitable finalize/end method defined by module", 15 | DIRECTORYNOTSUPPORTED: "support for directory entries not defined by module", 16 | FORMATSET: "archive format already set", 17 | INPUTSTEAMBUFFERREQUIRED: 18 | "input source must be valid Stream or Buffer instance", 19 | MODULESET: "module already set", 20 | SYMLINKNOTSUPPORTED: "support for symlink entries not defined by module", 21 | SYMLINKFILEPATHREQUIRED: 22 | "symlink filepath argument must be a non-empty string value", 23 | SYMLINKTARGETREQUIRED: 24 | "symlink target argument must be a non-empty string value", 25 | ENTRYNOTSUPPORTED: "entry not supported", 26 | }; 27 | 28 | function ArchiverError(code, data) { 29 | Error.captureStackTrace(this, this.constructor); 30 | //this.name = this.constructor.name; 31 | this.message = ERROR_CODES[code] || code; 32 | this.code = code; 33 | this.data = data; 34 | } 35 | util.inherits(ArchiverError, Error); 36 | 37 | export { ArchiverError }; 38 | -------------------------------------------------------------------------------- /lib/plugins/json.js: -------------------------------------------------------------------------------- 1 | import { Transform } from "readable-stream"; 2 | import crc32 from "buffer-crc32"; 3 | import { collectStream } from "../utils.js"; 4 | 5 | /** 6 | * JSON Format Plugin 7 | * 8 | * @module plugins/json 9 | * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} 10 | * @copyright (c) 2012-2014 Chris Talkington, contributors. 11 | */ 12 | export default class Json extends Transform { 13 | /** 14 | * @constructor 15 | * @param {(JsonOptions|TransformOptions)} options 16 | */ 17 | constructor(options) { 18 | super({ ...options }); 19 | this.files = []; 20 | } 21 | /** 22 | * [_transform description] 23 | * 24 | * @private 25 | * @param {Buffer} chunk 26 | * @param {String} encoding 27 | * @param {Function} callback 28 | * @return void 29 | */ 30 | _transform(chunk, encoding, callback) { 31 | callback(null, chunk); 32 | } 33 | /** 34 | * [_writeStringified description] 35 | * 36 | * @private 37 | * @return void 38 | */ 39 | _writeStringified() { 40 | var fileString = JSON.stringify(this.files); 41 | this.write(fileString); 42 | } 43 | /** 44 | * [append description] 45 | * 46 | * @param {(Buffer|Stream)} source 47 | * @param {EntryData} data 48 | * @param {Function} callback 49 | * @return void 50 | */ 51 | append(source, data, callback) { 52 | var self = this; 53 | data.crc32 = 0; 54 | function onend(err, sourceBuffer) { 55 | if (err) { 56 | callback(err); 57 | return; 58 | } 59 | data.size = sourceBuffer.length || 0; 60 | data.crc32 = crc32.unsigned(sourceBuffer); 61 | self.files.push(data); 62 | callback(null, data); 63 | } 64 | if (data.sourceType === "buffer") { 65 | onend(null, source); 66 | } else if (data.sourceType === "stream") { 67 | collectStream(source, onend); 68 | } 69 | } 70 | /** 71 | * [finalize description] 72 | * 73 | * @return void 74 | */ 75 | finalize() { 76 | this._writeStringified(); 77 | this.end(); 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /lib/plugins/tar.js: -------------------------------------------------------------------------------- 1 | import zlib from "zlib"; 2 | import engine from "tar-stream"; 3 | import { collectStream } from "../utils.js"; 4 | 5 | /** 6 | * TAR Format Plugin 7 | * 8 | * @module plugins/tar 9 | * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} 10 | * @copyright (c) 2012-2014 Chris Talkington, contributors. 11 | */ 12 | export default class Tar { 13 | /** 14 | * @constructor 15 | * @param {TarOptions} options 16 | */ 17 | constructor(options) { 18 | options = this.options = { 19 | gzip: false, 20 | ...options, 21 | }; 22 | if (typeof options.gzipOptions !== "object") { 23 | options.gzipOptions = {}; 24 | } 25 | this.engine = engine.pack(options); 26 | this.compressor = false; 27 | if (options.gzip) { 28 | this.compressor = zlib.createGzip(options.gzipOptions); 29 | this.compressor.on("error", this._onCompressorError.bind(this)); 30 | } 31 | } 32 | /** 33 | * [_onCompressorError description] 34 | * 35 | * @private 36 | * @param {Error} err 37 | * @return void 38 | */ 39 | _onCompressorError(err) { 40 | this.engine.emit("error", err); 41 | } 42 | /** 43 | * [append description] 44 | * 45 | * @param {(Buffer|Stream)} source 46 | * @param {TarEntryData} data 47 | * @param {Function} callback 48 | * @return void 49 | */ 50 | append(source, data, callback) { 51 | var self = this; 52 | data.mtime = data.date; 53 | function append(err, sourceBuffer) { 54 | if (err) { 55 | callback(err); 56 | return; 57 | } 58 | self.engine.entry(data, sourceBuffer, function (err) { 59 | callback(err, data); 60 | }); 61 | } 62 | if (data.sourceType === "buffer") { 63 | append(null, source); 64 | } else if (data.sourceType === "stream" && data.stats) { 65 | data.size = data.stats.size; 66 | var entry = self.engine.entry(data, function (err) { 67 | callback(err, data); 68 | }); 69 | source.pipe(entry); 70 | } else if (data.sourceType === "stream") { 71 | collectStream(source, append); 72 | } 73 | } 74 | /** 75 | * [finalize description] 76 | * 77 | * @return void 78 | */ 79 | finalize() { 80 | this.engine.finalize(); 81 | } 82 | /** 83 | * [on description] 84 | * 85 | * @return this.engine 86 | */ 87 | on() { 88 | return this.engine.on.apply(this.engine, arguments); 89 | } 90 | /** 91 | * [pipe description] 92 | * 93 | * @param {String} destination 94 | * @param {Object} options 95 | * @return this.engine 96 | */ 97 | pipe(destination, options) { 98 | if (this.compressor) { 99 | return this.engine.pipe 100 | .apply(this.engine, [this.compressor]) 101 | .pipe(destination, options); 102 | } else { 103 | return this.engine.pipe.apply(this.engine, arguments); 104 | } 105 | } 106 | /** 107 | * [unpipe description] 108 | * 109 | * @return this.engine 110 | */ 111 | unpipe() { 112 | if (this.compressor) { 113 | return this.compressor.unpipe.apply(this.compressor, arguments); 114 | } else { 115 | return this.engine.unpipe.apply(this.engine, arguments); 116 | } 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /lib/plugins/zip.js: -------------------------------------------------------------------------------- 1 | import engine from "zip-stream"; 2 | 3 | /** 4 | * ZIP Format Plugin 5 | * 6 | * @module plugins/zip 7 | * @license [MIT]{@link https://github.com/archiverjs/node-archiver/blob/master/LICENSE} 8 | * @copyright (c) 2012-2014 Chris Talkington, contributors. 9 | */ 10 | export default class Zip { 11 | /** 12 | * @constructor 13 | * @param {ZipOptions} [options] 14 | * @param {String} [options.comment] Sets the zip archive comment. 15 | * @param {Boolean} [options.forceLocalTime=false] Forces the archive to contain local file times instead of UTC. 16 | * @param {Boolean} [options.forceZip64=false] Forces the archive to contain ZIP64 headers. 17 | * @param {Boolean} [options.namePrependSlash=false] Prepends a forward slash to archive file paths. 18 | * @param {Boolean} [options.store=false] Sets the compression method to STORE. 19 | * @param {Object} [options.zlib] Passed to [zlib]{@link https://nodejs.org/api/zlib.html#zlib_class_options} 20 | */ 21 | constructor(options) { 22 | options = this.options = { 23 | comment: "", 24 | forceUTC: false, 25 | namePrependSlash: false, 26 | store: false, 27 | ...options, 28 | }; 29 | this.engine = new engine(options); 30 | } 31 | /** 32 | * @param {(Buffer|Stream)} source 33 | * @param {ZipEntryData} data 34 | * @param {String} data.name Sets the entry name including internal path. 35 | * @param {(String|Date)} [data.date=NOW()] Sets the entry date. 36 | * @param {Number} [data.mode=D:0755/F:0644] Sets the entry permissions. 37 | * @param {String} [data.prefix] Sets a path prefix for the entry name. Useful 38 | * when working with methods like `directory` or `glob`. 39 | * @param {fs.Stats} [data.stats] Sets the fs stat data for this entry allowing 40 | * for reduction of fs stat calls when stat data is already known. 41 | * @param {Boolean} [data.store=ZipOptions.store] Sets the compression method to STORE. 42 | * @param {Function} callback 43 | * @return void 44 | */ 45 | append(source, data, callback) { 46 | this.engine.entry(source, data, callback); 47 | } 48 | /** 49 | * @return void 50 | */ 51 | finalize() { 52 | this.engine.finalize(); 53 | } 54 | /** 55 | * @return this.engine 56 | */ 57 | on() { 58 | return this.engine.on.apply(this.engine, arguments); 59 | } 60 | /** 61 | * @return this.engine 62 | */ 63 | pipe() { 64 | return this.engine.pipe.apply(this.engine, arguments); 65 | } 66 | /** 67 | * @return this.engine 68 | */ 69 | unpipe() { 70 | return this.engine.unpipe.apply(this.engine, arguments); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /lib/utils.js: -------------------------------------------------------------------------------- 1 | import normalizePath from "normalize-path"; 2 | import { PassThrough } from "readable-stream"; 3 | import { isStream } from "is-stream"; 4 | 5 | export function collectStream(source, callback) { 6 | var collection = []; 7 | var size = 0; 8 | 9 | source.on("error", callback); 10 | 11 | source.on("data", function (chunk) { 12 | collection.push(chunk); 13 | size += chunk.length; 14 | }); 15 | 16 | source.on("end", function () { 17 | var buf = Buffer.alloc(size); 18 | var offset = 0; 19 | 20 | collection.forEach(function (data) { 21 | data.copy(buf, offset); 22 | offset += data.length; 23 | }); 24 | 25 | callback(null, buf); 26 | }); 27 | } 28 | 29 | export function dateify(dateish) { 30 | dateish = dateish || new Date(); 31 | 32 | if (dateish instanceof Date) { 33 | dateish = dateish; 34 | } else if (typeof dateish === "string") { 35 | dateish = new Date(dateish); 36 | } else { 37 | dateish = new Date(); 38 | } 39 | 40 | return dateish; 41 | } 42 | 43 | export function normalizeInputSource(source) { 44 | if (source === null) { 45 | return Buffer.alloc(0); 46 | } else if (typeof source === "string") { 47 | return Buffer.from(source); 48 | } else if (isStream(source)) { 49 | // Always pipe through a PassThrough stream to guarantee pausing the stream if it's already flowing, 50 | // since it will only be processed in a (distant) future iteration of the event loop, and will lose 51 | // data if already flowing now. 52 | return source.pipe(new PassThrough()); 53 | } 54 | 55 | return source; 56 | } 57 | 58 | export function sanitizePath(filepath) { 59 | return normalizePath(filepath, false) 60 | .replace(/^\w+:/, "") 61 | .replace(/^(\.\.\/|\/)+/, ""); 62 | } 63 | 64 | export function trailingSlashIt(str) { 65 | return str.slice(-1) !== "/" ? str + "/" : str; 66 | } 67 | -------------------------------------------------------------------------------- /netlify.toml: -------------------------------------------------------------------------------- 1 | [Settings] 2 | [build] 3 | command = "npm run build" 4 | base = "website" 5 | publish = "website/build" 6 | 7 | [[redirects]] 8 | from = "/docs/*" 9 | to = "/:splat" 10 | 11 | [[redirects]] 12 | from = "/zip-stream/*" 13 | to = "/zipstream:splat" 14 | status = 301 15 | 16 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "archiver", 3 | "version": "7.0.1", 4 | "description": "a streaming interface for archive generation", 5 | "homepage": "https://github.com/archiverjs/node-archiver", 6 | "author": { 7 | "name": "Chris Talkington", 8 | "url": "http://christalkington.com/" 9 | }, 10 | "repository": { 11 | "type": "git", 12 | "url": "https://github.com/archiverjs/node-archiver.git" 13 | }, 14 | "bugs": { 15 | "url": "https://github.com/archiverjs/node-archiver/issues" 16 | }, 17 | "license": "MIT", 18 | "type": "module", 19 | "exports": "./index.js", 20 | "files": [ 21 | "index.js", 22 | "lib" 23 | ], 24 | "engines": { 25 | "node": ">=18" 26 | }, 27 | "scripts": { 28 | "test": "mocha --reporter dot", 29 | "bench": "node benchmark/simple/pack-zip.js" 30 | }, 31 | "dependencies": { 32 | "async": "^3.2.4", 33 | "buffer-crc32": "^1.0.0", 34 | "is-stream": "^4.0.0", 35 | "lazystream": "^1.0.0", 36 | "normalize-path": "^3.0.0", 37 | "readable-stream": "^4.0.0", 38 | "readdir-glob": "^1.1.3", 39 | "tar-stream": "^3.0.0", 40 | "zip-stream": "^7.0.2" 41 | }, 42 | "devDependencies": { 43 | "archiver-jsdoc-theme": "1.1.3", 44 | "chai": "4.5.0", 45 | "jsdoc": "4.0.4", 46 | "mkdirp": "3.0.1", 47 | "mocha": "10.8.2", 48 | "prettier": "3.3.3", 49 | "rimraf": "5.0.10", 50 | "stream-bench": "0.1.2", 51 | "tar": "6.2.1", 52 | "yauzl": "3.1.3" 53 | }, 54 | "keywords": [ 55 | "archive", 56 | "archiver", 57 | "stream", 58 | "zip", 59 | "tar" 60 | ], 61 | "publishConfig": { 62 | "registry": "https://registry.npmjs.org/" 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": [ 4 | "config:base", 5 | "schedule:daily", 6 | ":maintainLockFilesMonthly", 7 | ":pinDevDependencies", 8 | ":semanticCommitsDisabled" 9 | ], 10 | "timezone": "America/Chicago", 11 | "labels": ["dependencies"], 12 | "packageRules": [ 13 | { 14 | "matchManagers": ["npm", "nvm"], 15 | "separateMultipleMajor": true 16 | }, 17 | { 18 | "matchManagers": ["npm", "nvm"], 19 | "matchUpdateTypes": ["minor", "patch"], 20 | "automerge": true 21 | }, 22 | { 23 | "matchManagers": ["github-actions"], 24 | "addLabels": ["ci", "github-actions"], 25 | "rangeStrategy": "pin" 26 | }, 27 | { 28 | "matchManagers": ["github-actions"], 29 | "matchUpdateTypes": ["minor", "patch"], 30 | "automerge": true 31 | } 32 | ], 33 | "constraints": { 34 | "npm": "<9" 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /test/archiver.js: -------------------------------------------------------------------------------- 1 | import { 2 | WriteStream, 3 | chmodSync, 4 | createReadStream, 5 | createWriteStream, 6 | statSync, 7 | symlinkSync, 8 | unlinkSync, 9 | writeFileSync, 10 | } from "fs"; 11 | import { PassThrough } from "readable-stream"; 12 | import { Readable } from "readable-stream"; 13 | import { assert } from "chai"; 14 | import { mkdirp } from "mkdirp"; 15 | import { 16 | binaryBuffer, 17 | readJSON, 18 | UnBufferedStream, 19 | WriteHashStream, 20 | } from "./helpers/index.js"; 21 | import { JsonArchive } from "../index.js"; 22 | 23 | var testBuffer = binaryBuffer(1024 * 16); 24 | var testDate = new Date("Jan 03 2013 14:26:38 GMT"); 25 | var testDate2 = new Date("Feb 10 2013 10:24:42 GMT"); 26 | var win32 = process.platform === "win32"; 27 | 28 | describe("archiver", function () { 29 | before(function () { 30 | mkdirp.sync("tmp"); 31 | if (!win32) { 32 | chmodSync("test/fixtures/executable.sh", 511); // 0777 33 | chmodSync("test/fixtures/directory/subdir/", 493); // 0755 34 | symlinkSync( 35 | "test/fixtures/directory/level0.txt", 36 | "test/fixtures/directory/subdir/level0link.txt", 37 | ); 38 | symlinkSync( 39 | "test/fixtures/directory/subdir/subsub/", 40 | "test/fixtures/directory/subdir/subsublink", 41 | ); 42 | } else { 43 | writeFileSync( 44 | "test/fixtures/directory/subdir/level0link.txt", 45 | "../level0.txt", 46 | ); 47 | writeFileSync("test/fixtures/directory/subdir/subsublink", "subsub"); 48 | } 49 | }); 50 | after(function () { 51 | unlinkSync("test/fixtures/directory/subdir/level0link.txt"); 52 | unlinkSync("test/fixtures/directory/subdir/subsublink"); 53 | }); 54 | describe("core", function () { 55 | var archive = new JsonArchive(); 56 | describe("#_normalizeEntryData", function () { 57 | it("should support prefix of the entry name", function () { 58 | var prefix1 = archive._normalizeEntryData({ 59 | name: "entry.txt", 60 | prefix: "prefix/", 61 | }); 62 | assert.propertyVal(prefix1, "name", "prefix/entry.txt"); 63 | var prefix2 = archive._normalizeEntryData({ 64 | name: "entry.txt", 65 | prefix: "", 66 | }); 67 | assert.propertyVal(prefix2, "name", "entry.txt"); 68 | }); 69 | it("should support special bits on unix", function () { 70 | if (!win32) { 71 | var mode = archive._normalizeEntryData({ 72 | name: "executable.sh", 73 | mode: statSync("test/fixtures/executable.sh").mode, 74 | }); 75 | assert.propertyVal(mode, "mode", 511); 76 | } 77 | }); 78 | }); 79 | }); 80 | describe("api", function () { 81 | describe("#abort", function () { 82 | let archive; 83 | before(function (done) { 84 | archive = new JsonArchive(); 85 | const testStream = new WriteStream("tmp/abort.json"); 86 | testStream.on("close", function () { 87 | done(); 88 | }); 89 | archive.pipe(testStream); 90 | archive 91 | .append(testBuffer, { name: "buffer.txt", date: testDate }) 92 | .append(createReadStream("test/fixtures/test.txt"), { 93 | name: "stream.txt", 94 | date: testDate, 95 | }) 96 | .file("test/fixtures/test.txt") 97 | .abort(); 98 | }); 99 | it("should have a state of aborted", function () { 100 | assert.property(archive, "_state"); 101 | assert.propertyVal(archive._state, "aborted", true); 102 | }); 103 | }); 104 | describe("#append", function () { 105 | var actual; 106 | var archive; 107 | var entries = {}; 108 | before(function (done) { 109 | archive = new JsonArchive(); 110 | var testStream = new WriteStream("tmp/append.json"); 111 | testStream.on("close", function () { 112 | actual = readJSON("tmp/append.json"); 113 | actual.forEach(function (entry) { 114 | entries[entry.name] = entry; 115 | }); 116 | done(); 117 | }); 118 | archive.pipe(testStream); 119 | archive 120 | .append(testBuffer, { name: "buffer.txt", date: testDate }) 121 | .append(createReadStream("test/fixtures/test.txt"), { 122 | name: "stream.txt", 123 | date: testDate, 124 | }) 125 | .append(Readable.from(["test"]), { 126 | name: "stream-like.txt", 127 | date: testDate, 128 | }) 129 | .append(null, { name: "directory/", date: testDate }) 130 | .finalize(); 131 | }); 132 | it("should append multiple entries", function () { 133 | assert.isArray(actual); 134 | assert.lengthOf(actual, 4); 135 | }); 136 | it("should append buffer", function () { 137 | assert.property(entries, "buffer.txt"); 138 | assert.propertyVal(entries["buffer.txt"], "name", "buffer.txt"); 139 | assert.propertyVal(entries["buffer.txt"], "type", "file"); 140 | assert.propertyVal( 141 | entries["buffer.txt"], 142 | "date", 143 | "2013-01-03T14:26:38.000Z", 144 | ); 145 | assert.propertyVal(entries["buffer.txt"], "mode", 420); 146 | assert.propertyVal(entries["buffer.txt"], "crc32", 3893830384); 147 | assert.propertyVal(entries["buffer.txt"], "size", 16384); 148 | }); 149 | it("should append stream", function () { 150 | assert.property(entries, "stream.txt"); 151 | assert.propertyVal(entries["stream.txt"], "name", "stream.txt"); 152 | assert.propertyVal(entries["stream.txt"], "type", "file"); 153 | assert.propertyVal( 154 | entries["stream.txt"], 155 | "date", 156 | "2013-01-03T14:26:38.000Z", 157 | ); 158 | assert.propertyVal(entries["stream.txt"], "mode", 420); 159 | assert.propertyVal(entries["stream.txt"], "crc32", 585446183); 160 | assert.propertyVal(entries["stream.txt"], "size", 19); 161 | }); 162 | it("should append stream-like source", function () { 163 | assert.property(entries, "stream-like.txt"); 164 | assert.propertyVal( 165 | entries["stream-like.txt"], 166 | "name", 167 | "stream-like.txt", 168 | ); 169 | assert.propertyVal(entries["stream-like.txt"], "type", "file"); 170 | assert.propertyVal( 171 | entries["stream-like.txt"], 172 | "date", 173 | "2013-01-03T14:26:38.000Z", 174 | ); 175 | assert.propertyVal(entries["stream-like.txt"], "mode", 420); 176 | assert.propertyVal(entries["stream-like.txt"], "crc32", 3632233996); 177 | assert.propertyVal(entries["stream-like.txt"], "size", 4); 178 | }); 179 | it("should append directory", function () { 180 | assert.property(entries, "directory/"); 181 | assert.propertyVal(entries["directory/"], "name", "directory/"); 182 | assert.propertyVal(entries["directory/"], "type", "directory"); 183 | assert.propertyVal( 184 | entries["directory/"], 185 | "date", 186 | "2013-01-03T14:26:38.000Z", 187 | ); 188 | assert.propertyVal(entries["directory/"], "mode", 493); 189 | assert.propertyVal(entries["directory/"], "crc32", 0); 190 | assert.propertyVal(entries["directory/"], "size", 0); 191 | }); 192 | }); 193 | describe("#directory", function () { 194 | var actual; 195 | var archive; 196 | var entries = {}; 197 | before(function (done) { 198 | archive = new JsonArchive(); 199 | var testStream = new WriteStream("tmp/directory.json"); 200 | testStream.on("close", function () { 201 | actual = readJSON("tmp/directory.json"); 202 | actual.forEach(function (entry) { 203 | entries[entry.name] = entry; 204 | }); 205 | done(); 206 | }); 207 | archive.pipe(testStream); 208 | archive 209 | .directory("test/fixtures/directory", null, { date: testDate }) 210 | .directory("test/fixtures/directory", "Win\\DS\\", { date: testDate }) 211 | .directory("test/fixtures/directory", "directory", function (data) { 212 | if (data.name === "ignore.txt") { 213 | return false; 214 | } 215 | data.funcProp = true; 216 | return data; 217 | }) 218 | .finalize(); 219 | }); 220 | it("should append multiple entries", function () { 221 | assert.isArray(actual); 222 | assert.property(entries, "test/fixtures/directory/level0.txt"); 223 | assert.property(entries, "test/fixtures/directory/subdir/"); 224 | assert.property(entries, "test/fixtures/directory/subdir/level1.txt"); 225 | assert.property(entries, "test/fixtures/directory/subdir/subsub/"); 226 | assert.property( 227 | entries, 228 | "test/fixtures/directory/subdir/subsub/level2.txt", 229 | ); 230 | assert.propertyVal( 231 | entries["test/fixtures/directory/level0.txt"], 232 | "date", 233 | "2013-01-03T14:26:38.000Z", 234 | ); 235 | assert.propertyVal( 236 | entries["test/fixtures/directory/subdir/"], 237 | "date", 238 | "2013-01-03T14:26:38.000Z", 239 | ); 240 | assert.property(entries, "directory/level0.txt"); 241 | assert.property(entries, "directory/subdir/"); 242 | assert.property(entries, "directory/subdir/level1.txt"); 243 | assert.property(entries, "directory/subdir/subsub/"); 244 | assert.property(entries, "directory/subdir/subsub/level2.txt"); 245 | }); 246 | it("should support setting data properties via function", function () { 247 | assert.property(entries, "directory/level0.txt"); 248 | assert.propertyVal(entries["directory/level0.txt"], "funcProp", true); 249 | }); 250 | it("should support ignoring matches via function", function () { 251 | assert.notProperty(entries, "directory/ignore.txt"); 252 | }); 253 | it("should find dot files", function () { 254 | assert.property(entries, "directory/.dotfile"); 255 | }); 256 | it("should retain symlinks", function () { 257 | assert.property( 258 | entries, 259 | "test/fixtures/directory/subdir/level0link.txt", 260 | ); 261 | assert.property(entries, "directory/subdir/level0link.txt"); 262 | }); 263 | it("should retain directory symlink", function () { 264 | assert.property(entries, "test/fixtures/directory/subdir/subsublink"); 265 | assert.property(entries, "directory/subdir/subsublink"); 266 | }); 267 | it("should handle windows path separators in prefix", function () { 268 | assert.property(entries, "Win/DS/level0.txt"); 269 | }); 270 | }); 271 | describe("#file", function () { 272 | var actual; 273 | var archive; 274 | var entries = {}; 275 | before(function (done) { 276 | archive = new JsonArchive(); 277 | var testStream = new WriteStream("tmp/file.json"); 278 | testStream.on("close", function () { 279 | actual = readJSON("tmp/file.json"); 280 | actual.forEach(function (entry) { 281 | entries[entry.name] = entry; 282 | }); 283 | done(); 284 | }); 285 | archive.pipe(testStream); 286 | archive 287 | .file("test/fixtures/test.txt", { name: "test.txt", date: testDate }) 288 | .file("test/fixtures/test.txt") 289 | .file("test/fixtures/executable.sh", { mode: win32 ? 511 : null }) // 0777 290 | .finalize(); 291 | }); 292 | it("should append multiple entries", function () { 293 | assert.isArray(actual); 294 | assert.lengthOf(actual, 3); 295 | }); 296 | it("should append filepath", function () { 297 | assert.property(entries, "test.txt"); 298 | assert.propertyVal(entries["test.txt"], "name", "test.txt"); 299 | assert.propertyVal( 300 | entries["test.txt"], 301 | "date", 302 | "2013-01-03T14:26:38.000Z", 303 | ); 304 | assert.propertyVal(entries["test.txt"], "crc32", 585446183); 305 | assert.propertyVal(entries["test.txt"], "size", 19); 306 | }); 307 | it("should fallback to filepath when no name is set", function () { 308 | assert.property(entries, "test/fixtures/test.txt"); 309 | }); 310 | it("should fallback to file stats when applicable", function () { 311 | assert.property(entries, "test/fixtures/executable.sh"); 312 | assert.propertyVal( 313 | entries["test/fixtures/executable.sh"], 314 | "name", 315 | "test/fixtures/executable.sh", 316 | ); 317 | assert.propertyVal(entries["test/fixtures/executable.sh"], "mode", 511); 318 | assert.propertyVal( 319 | entries["test/fixtures/executable.sh"], 320 | "crc32", 321 | 3957348457, 322 | ); 323 | assert.propertyVal(entries["test/fixtures/executable.sh"], "size", 11); 324 | }); 325 | }); 326 | describe("#glob", function () { 327 | var actual; 328 | var archive; 329 | var entries = {}; 330 | before(function (done) { 331 | archive = new JsonArchive(); 332 | var testStream = new WriteStream("tmp/glob.json"); 333 | testStream.on("close", function () { 334 | actual = readJSON("tmp/glob.json"); 335 | actual.forEach(function (entry) { 336 | entries[entry.name] = entry; 337 | }); 338 | done(); 339 | }); 340 | archive.pipe(testStream); 341 | archive 342 | .glob("test/fixtures/test.txt", null) 343 | .glob("test/fixtures/empty.txt", null) 344 | .glob("test/fixtures/executable.sh", null) 345 | .glob("test/fixtures/directory/**/*", { 346 | ignore: "test/fixtures/directory/subdir/**/*", 347 | nodir: true, 348 | }) 349 | .glob("**/*", { cwd: "test/fixtures/directory/subdir/" }) 350 | .finalize(); 351 | }); 352 | it("should append multiple entries", function () { 353 | assert.isArray(actual); 354 | assert.property(entries, "test/fixtures/test.txt"); 355 | assert.property(entries, "test/fixtures/executable.sh"); 356 | assert.property(entries, "test/fixtures/empty.txt"); 357 | assert.property(entries, "test/fixtures/directory/level0.txt"); 358 | assert.property(entries, "level1.txt"); 359 | assert.property(entries, "subsub/level2.txt"); 360 | }); 361 | }); 362 | describe("#promise", function () { 363 | var archive; 364 | it("should use a promise", function (done) { 365 | archive = new JsonArchive(); 366 | var testStream = new WriteStream("tmp/promise.json"); 367 | archive.pipe(testStream); 368 | archive 369 | .append(testBuffer, { name: "buffer.txt", date: testDate }) 370 | .append(createReadStream("test/fixtures/test.txt"), { 371 | name: "stream.txt", 372 | date: testDate, 373 | }) 374 | .append(null, { name: "directory/", date: testDate }) 375 | .finalize() 376 | .then(function () { 377 | done(); 378 | }); 379 | }); 380 | }); 381 | describe("#errors", function () { 382 | var archive; 383 | it("should allow continue on stat failing", function (done) { 384 | archive = new JsonArchive(); 385 | var testStream = new WriteStream("tmp/errors-stat.json"); 386 | testStream.on("close", function () { 387 | done(); 388 | }); 389 | archive.pipe(testStream); 390 | archive 391 | .file("test/fixtures/test.txt") 392 | .file("test/fixtures/test-missing.txt") 393 | .file("test/fixtures/empty.txt") 394 | .finalize(); 395 | }); 396 | it("should allow continue on with several stat failings", function (done) { 397 | archive = new JsonArchive(); 398 | var testStream = new WriteStream("tmp/errors-stat.json"); 399 | testStream.on("close", function () { 400 | done(); 401 | }); 402 | archive.pipe(testStream); 403 | archive.file("test/fixtures/test.txt"); 404 | for (var i = 1; i <= 20; i++) 405 | archive.file("test/fixtures/test-missing.txt"); 406 | archive.finalize(); 407 | }); 408 | }); 409 | }); 410 | describe("#symlink", function () { 411 | var actual; 412 | var archive; 413 | var entries = {}; 414 | before(function (done) { 415 | archive = new JsonArchive(); 416 | var testStream = new WriteStream("tmp/symlink.json"); 417 | testStream.on("close", function () { 418 | actual = readJSON("tmp/symlink.json"); 419 | actual.forEach(function (entry) { 420 | entries[entry.name] = entry; 421 | }); 422 | done(); 423 | }); 424 | archive.pipe(testStream); 425 | archive 426 | .append("file-a", { name: "file-a" }) 427 | .symlink("directory-a/symlink-to-file-a", "../file-a") 428 | .symlink( 429 | "directory-b/directory-c/symlink-to-directory-a", 430 | "../../directory-a", 431 | 493, 432 | ) 433 | .finalize(); 434 | }); 435 | it("should append multiple entries", () => { 436 | assert.isArray(actual); 437 | assert.property(entries, "file-a"); 438 | assert.property(entries, "directory-a/symlink-to-file-a"); 439 | assert.property( 440 | entries, 441 | "directory-b/directory-c/symlink-to-directory-a", 442 | ); 443 | assert.propertyVal( 444 | entries["directory-b/directory-c/symlink-to-directory-a"], 445 | "mode", 446 | 493, 447 | ); 448 | }); 449 | }); 450 | }); 451 | -------------------------------------------------------------------------------- /test/fixtures/directory/.dotfile: -------------------------------------------------------------------------------- 1 | some dot file contents -------------------------------------------------------------------------------- /test/fixtures/directory/ignore.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/archiverjs/node-archiver/c647ded5e9210cd195c1091a26223d7dc9086b49/test/fixtures/directory/ignore.txt -------------------------------------------------------------------------------- /test/fixtures/directory/level0.txt: -------------------------------------------------------------------------------- 1 | level0 -------------------------------------------------------------------------------- /test/fixtures/directory/subdir/level1.txt: -------------------------------------------------------------------------------- 1 | level1 -------------------------------------------------------------------------------- /test/fixtures/directory/subdir/subsub/level2.txt: -------------------------------------------------------------------------------- 1 | level2 -------------------------------------------------------------------------------- /test/fixtures/empty.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/archiverjs/node-archiver/c647ded5e9210cd195c1091a26223d7dc9086b49/test/fixtures/empty.txt -------------------------------------------------------------------------------- /test/fixtures/executable.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -------------------------------------------------------------------------------- /test/fixtures/test.txt: -------------------------------------------------------------------------------- 1 | this is a text file -------------------------------------------------------------------------------- /test/helpers/index.js: -------------------------------------------------------------------------------- 1 | import crypto from "crypto"; 2 | import { readFileSync, WriteStream } from "fs"; 3 | import { inherits } from "util"; 4 | import { Stream } from "stream"; 5 | import { Readable, Writable } from "readable-stream"; 6 | 7 | export function adjustDateByOffset(d, offset) { 8 | d = d instanceof Date ? d : new Date(); 9 | if (offset >= 1) { 10 | d.setMinutes(d.getMinutes() - offset); 11 | } else { 12 | d.setMinutes(d.getMinutes() + Math.abs(offset)); 13 | } 14 | return d; 15 | } 16 | 17 | export function binaryBuffer(n) { 18 | var buffer = Buffer.alloc(n); 19 | for (var i = 0; i < n; i++) { 20 | buffer.writeUInt8(i & 255, i); 21 | } 22 | return buffer; 23 | } 24 | 25 | function BinaryStream(size, options) { 26 | Readable.call(this, options); 27 | var buf = Buffer.alloc(size); 28 | for (var i = 0; i < size; i++) { 29 | buf.writeUInt8(i & 255, i); 30 | } 31 | this.push(buf); 32 | this.push(null); 33 | } 34 | inherits(BinaryStream, Readable); 35 | BinaryStream.prototype._read = function (size) {}; 36 | function DeadEndStream(options) { 37 | Writable.call(this, options); 38 | } 39 | inherits(DeadEndStream, Writable); 40 | DeadEndStream.prototype._write = function (chuck, encoding, callback) { 41 | callback(); 42 | }; 43 | 44 | export function readJSON(filepath) { 45 | var contents; 46 | try { 47 | contents = readFileSync(String(filepath)); 48 | contents = JSON.parse(contents); 49 | } catch (e) { 50 | contents = null; 51 | } 52 | return contents; 53 | } 54 | 55 | function UnBufferedStream() { 56 | this.readable = true; 57 | } 58 | inherits(UnBufferedStream, Stream); 59 | function WriteHashStream(path, options) { 60 | WriteStream.call(this, path, options); 61 | this.hash = crypto.createHash("sha1"); 62 | this.digest = null; 63 | this.on("close", function () { 64 | this.digest = this.hash.digest("hex"); 65 | }); 66 | } 67 | inherits(WriteHashStream, WriteStream); 68 | WriteHashStream.prototype.write = function (chunk) { 69 | if (chunk) { 70 | this.hash.update(chunk); 71 | } 72 | return WriteStream.prototype.write.call(this, chunk); 73 | }; 74 | 75 | export { BinaryStream }; 76 | export { DeadEndStream }; 77 | export { UnBufferedStream }; 78 | export { WriteHashStream }; 79 | -------------------------------------------------------------------------------- /test/plugins.js: -------------------------------------------------------------------------------- 1 | import { 2 | chmodSync, 3 | createReadStream, 4 | symlinkSync, 5 | unlinkSync, 6 | writeFileSync, 7 | WriteStream, 8 | } from "fs"; 9 | import { assert } from "chai"; 10 | import { mkdirp } from "mkdirp"; 11 | import tar from "tar"; 12 | import yauzl from "yauzl"; 13 | import { TarArchive, ZipArchive } from "../index.js"; 14 | import { binaryBuffer } from "./helpers/index.js"; 15 | 16 | const testBuffer = binaryBuffer(1024 * 16); 17 | const testDate = new Date("Jan 03 2013 14:26:38 GMT"); 18 | const testDate2 = new Date("Feb 10 2013 10:24:42 GMT"); 19 | const win32 = process.platform === "win32"; 20 | 21 | describe("plugins", function () { 22 | before(function () { 23 | mkdirp.sync("tmp"); 24 | if (!win32) { 25 | chmodSync("test/fixtures/executable.sh", 511); // 0777 26 | chmodSync("test/fixtures/directory/subdir/", 493); // 0755 27 | symlinkSync( 28 | "../level0.txt", 29 | "test/fixtures/directory/subdir/level0link.txt", 30 | ); 31 | symlinkSync("subsub/", "test/fixtures/directory/subdir/subsublink"); 32 | } else { 33 | writeFileSync( 34 | "test/fixtures/directory/subdir/level0link.txt", 35 | "../level0.txt", 36 | ); 37 | writeFileSync("test/fixtures/directory/subdir/subsublink", "subsub"); 38 | } 39 | }); 40 | after(function () { 41 | unlinkSync("test/fixtures/directory/subdir/level0link.txt"); 42 | unlinkSync("test/fixtures/directory/subdir/subsublink"); 43 | }); 44 | describe("tar", function () { 45 | var actual = []; 46 | var archive; 47 | var entries = {}; 48 | before(function (done) { 49 | archive = new TarArchive(); 50 | var testStream = new tar.Parse(); 51 | testStream.on("entry", function (entry) { 52 | actual.push(entry.path); 53 | entries[entry.path] = { 54 | type: entry.type, 55 | path: entry.path, 56 | mode: entry.mode, 57 | uid: entry.uid, 58 | gid: entry.gid, 59 | uname: entry.uname, 60 | gname: entry.gname, 61 | size: entry.size, 62 | mtime: entry.mtime, 63 | atime: entry.atime, 64 | ctime: entry.ctime, 65 | linkpath: entry.linkpath, 66 | }; 67 | entry.resume(); 68 | }); 69 | testStream.on("end", function () { 70 | done(); 71 | }); 72 | archive.pipe(testStream); 73 | archive 74 | .append(testBuffer, { name: "buffer.txt", date: testDate }) 75 | .append(createReadStream("test/fixtures/test.txt"), { 76 | name: "stream.txt", 77 | date: testDate, 78 | }) 79 | .append(null, { name: "folder/", date: testDate }) 80 | .directory("test/fixtures/directory", "directory") 81 | .symlink("manual-link.txt", "manual-link-target.txt") 82 | .finalize(); 83 | }); 84 | it("should append multiple entries", function () { 85 | assert.isArray(actual); 86 | assert.isAbove(actual.length, 10); 87 | }); 88 | it("should append buffer", function () { 89 | assert.property(entries, "buffer.txt"); 90 | assert.propertyVal(entries["buffer.txt"], "path", "buffer.txt"); 91 | assert.propertyVal(entries["buffer.txt"], "type", "File"); 92 | assert.propertyVal(entries["buffer.txt"], "mode", 420); 93 | assert.propertyVal(entries["buffer.txt"], "size", 16384); 94 | }); 95 | it("should append stream", function () { 96 | assert.property(entries, "stream.txt"); 97 | assert.propertyVal(entries["stream.txt"], "path", "stream.txt"); 98 | assert.propertyVal(entries["stream.txt"], "type", "File"); 99 | assert.propertyVal(entries["stream.txt"], "mode", 420); 100 | assert.propertyVal(entries["stream.txt"], "size", 19); 101 | }); 102 | it("should append folder", function () { 103 | assert.property(entries, "folder/"); 104 | assert.propertyVal(entries["folder/"], "path", "folder/"); 105 | assert.propertyVal(entries["folder/"], "type", "Directory"); 106 | assert.propertyVal(entries["folder/"], "mode", 493); 107 | assert.propertyVal(entries["folder/"], "size", 0); 108 | }); 109 | it("should append manual symlink", function () { 110 | assert.property(entries, "manual-link.txt"); 111 | assert.propertyVal(entries["manual-link.txt"], "type", "SymbolicLink"); 112 | assert.propertyVal( 113 | entries["manual-link.txt"], 114 | "linkpath", 115 | "manual-link-target.txt", 116 | ); 117 | }); 118 | it("should append via directory", function () { 119 | assert.property(entries, "directory/subdir/level1.txt"); 120 | assert.property(entries, "directory/subdir/level0link.txt"); 121 | }); 122 | it("should retain symlinks via directory", function () { 123 | if (win32) { 124 | this.skip(); 125 | } 126 | assert.property(entries, "directory/subdir/level0link.txt"); 127 | assert.propertyVal( 128 | entries["directory/subdir/level0link.txt"], 129 | "type", 130 | "SymbolicLink", 131 | ); 132 | assert.propertyVal( 133 | entries["directory/subdir/level0link.txt"], 134 | "linkpath", 135 | "../level0.txt", 136 | ); 137 | assert.property(entries, "directory/subdir/subsublink"); 138 | assert.propertyVal( 139 | entries["directory/subdir/subsublink"], 140 | "type", 141 | "SymbolicLink", 142 | ); 143 | assert.propertyVal( 144 | entries["directory/subdir/subsublink"], 145 | "linkpath", 146 | "subsub", 147 | ); 148 | }); 149 | }); 150 | describe("zip", function () { 151 | var actual = []; 152 | var archive; 153 | var entries = {}; 154 | var zipComment = ""; 155 | before(function (done) { 156 | archive = new ZipArchive({ comment: "archive comment" }); 157 | var testStream = new WriteStream("tmp/plugin.zip"); 158 | testStream.on("close", function (entry) { 159 | yauzl.open("tmp/plugin.zip", function (err, zip) { 160 | zip.on("entry", function (entry) { 161 | actual.push(entry.fileName); 162 | entries[entry.fileName] = entry; 163 | }); 164 | zip.on("close", function () { 165 | done(); 166 | }); 167 | zipComment = zip.comment; 168 | }); 169 | }); 170 | archive.pipe(testStream); 171 | archive 172 | .append(testBuffer, { 173 | name: "buffer.txt", 174 | date: testDate, 175 | comment: "entry comment", 176 | }) 177 | .append(createReadStream("test/fixtures/test.txt"), { 178 | name: "stream.txt", 179 | date: testDate, 180 | }) 181 | .file("test/fixtures/executable.sh", { 182 | name: "executable.sh", 183 | mode: win32 ? 511 : null, // 0777 184 | }) 185 | .directory("test/fixtures/directory", "directory") 186 | .symlink("manual-link.txt", "manual-link-target.txt") 187 | .finalize(); 188 | }); 189 | it("should append multiple entries", function () { 190 | assert.isArray(actual); 191 | assert.isAbove(actual.length, 10); 192 | }); 193 | it("should append buffer", function () { 194 | assert.property(entries, "buffer.txt"); 195 | assert.propertyVal(entries["buffer.txt"], "uncompressedSize", 16384); 196 | assert.propertyVal(entries["buffer.txt"], "crc32", 3893830384); 197 | }); 198 | it("should append stream", function () { 199 | assert.property(entries, "stream.txt"); 200 | assert.propertyVal(entries["stream.txt"], "uncompressedSize", 19); 201 | assert.propertyVal(entries["stream.txt"], "crc32", 585446183); 202 | }); 203 | it("should append via file", function () { 204 | assert.property(entries, "executable.sh"); 205 | assert.propertyVal(entries["executable.sh"], "uncompressedSize", 11); 206 | assert.propertyVal(entries["executable.sh"], "crc32", 3957348457); 207 | }); 208 | it("should append via directory", function () { 209 | assert.property(entries, "directory/subdir/level1.txt"); 210 | assert.propertyVal( 211 | entries["directory/subdir/level1.txt"], 212 | "uncompressedSize", 213 | 6, 214 | ); 215 | assert.propertyVal( 216 | entries["directory/subdir/level1.txt"], 217 | "crc32", 218 | 133711013, 219 | ); 220 | }); 221 | it("should append manual symlink", function () { 222 | assert.property(entries, "manual-link.txt"); 223 | assert.propertyVal(entries["manual-link.txt"], "crc32", 1121667014); 224 | assert.propertyVal( 225 | entries["manual-link.txt"], 226 | "externalFileAttributes", 227 | 2684354592, 228 | ); 229 | }); 230 | it("should allow for custom unix mode", function () { 231 | assert.property(entries, "executable.sh"); 232 | assert.propertyVal( 233 | entries["executable.sh"], 234 | "externalFileAttributes", 235 | 2180972576, 236 | ); 237 | assert.equal( 238 | (entries["executable.sh"].externalFileAttributes >>> 16) & 0xfff, 239 | 511, 240 | ); 241 | assert.property(entries, "directory/subdir/"); 242 | assert.propertyVal( 243 | entries["directory/subdir/"], 244 | "externalFileAttributes", 245 | 1106051088, 246 | ); 247 | assert.equal( 248 | (entries["directory/subdir/"].externalFileAttributes >>> 16) & 0xfff, 249 | 493, 250 | ); 251 | }); 252 | it("should allow for entry comments", function () { 253 | assert.property(entries, "buffer.txt"); 254 | assert.propertyVal(entries["buffer.txt"], "fileComment", "entry comment"); 255 | }); 256 | it("should allow for archive comment", function () { 257 | assert.equal("archive comment", zipComment); 258 | }); 259 | }); 260 | }); 261 | -------------------------------------------------------------------------------- /website/.gitignore: -------------------------------------------------------------------------------- 1 | # Dependencies 2 | /node_modules 3 | 4 | # Production 5 | /build 6 | 7 | # Generated files 8 | .docusaurus 9 | .cache-loader 10 | 11 | # Misc 12 | .DS_Store 13 | .env.local 14 | .env.development.local 15 | .env.test.local 16 | .env.production.local 17 | 18 | npm-debug.log* 19 | yarn-debug.log* 20 | yarn-error.log* 21 | -------------------------------------------------------------------------------- /website/babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [require.resolve("@docusaurus/core/lib/babel/preset")], 3 | }; 4 | -------------------------------------------------------------------------------- /website/docs/archive_formats.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: "archive-formats" 3 | title: "Archive Formats" 4 | sidebar_label: "Archive Formats" 5 | --- 6 | 7 | ## Built-in Formats 8 | 9 | Archiver supports the following formats out of the box. 10 | 11 | ### ZIP 12 | 13 | The [zip-stream](https://www.npmjs.com/package/zip-stream) package is used to produce ZIP archives. 14 | 15 | ### TAR 16 | 17 | The [tar-stream](https://www.npmjs.com/package/tar-stream) package is used to produce TAR archives. 18 | 19 | GZIP compression is also suppported. 20 | 21 | ### JSON 22 | 23 | The JSON format is designed primarily for debugging and just collects and stringifys the entry data into JSON. 24 | 25 | ## Custom Formats 26 | 27 | Archiver also supports the registration of custom archive formats. 28 | 29 | The [API](/docs/archiver/#format-registration) for this is rather simple at this point but may change over time. 30 | -------------------------------------------------------------------------------- /website/docs/archiver_api.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: "archiver" 3 | title: "Archiver API" 4 | sidebar_label: "Archiver" 5 | --- 6 | 7 | ## Archiver Class 8 | 9 | ```js 10 | new Archiver(format, options); 11 | ``` 12 | 13 | ### constructor 14 | 15 | ##### Parameters 16 | 17 | - `format` - _String_ - The archive format to use. 18 | - `options` - _Object_ 19 | 20 | #### Options 21 | 22 | The `options` object may include the following properties as well as all [Stream.duplex options](https://nodejs.org/api/stream.html#stream_new_stream_duplex_options): 23 | 24 | ##### Core Options 25 | 26 | - `statConcurrency` - _Number_ (default 4) - Sets the number of workers used to process the internal fs stat queue. 27 | 28 | ##### ZIP Options 29 | 30 | - `comment` - _String_ - Sets the zip archive comment. 31 | - `forceLocalTime` - _Boolean_ - Forces the archive to contain local file times instead of UTC. 32 | - `forceZip64` - _Boolean_ - Forces the archive to contain ZIP64 headers. 33 | - `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. 34 | - `store` - _Boolean_ - Sets the compression method to STORE. 35 | - `zlib` - _Object_ - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. 36 | 37 | ##### TAR Options 38 | 39 | - `gzip` - _Boolean_ - Compress the tar archive using gzip. 40 | - `gzipOptions` - _Object_ - Passed to [zlib](https://nodejs.org/api/zlib.html#zlib_class_options) to control compression. 41 | 42 | See [tar-stream](https://www.npmjs.com/package/tar-stream) documentation for additional properties. 43 | 44 | --- 45 | 46 | ### abort 47 | 48 | ```js 49 | abort() → {this} 50 | ``` 51 | 52 | Aborts the archiving process, taking a best-effort approach, by: 53 | 54 | - removing any pending queue tasks 55 | - allowing any active queue workers to finish 56 | - detaching internal module pipes 57 | - ending both sides of the Transform stream 58 | 59 | It will NOT drain any remaining sources. 60 | 61 | ##### Parameters 62 | 63 | None 64 | 65 | --- 66 | 67 | ### append 68 | 69 | ```js 70 | append(source, data) → {this} 71 | ``` 72 | 73 | Appends an input source (text string, buffer, or stream) to the instance. 74 | 75 | When the instance has received, processed, and emitted the input, the entry event is fired. 76 | 77 | ##### Parameters 78 | 79 | - `source` - _Buffer | Stream | String_ - The input source. 80 | - `data` - _Object_ - [The entry data](#entry-data). 81 | 82 | --- 83 | 84 | ### directory 85 | 86 | ```js 87 | directory(dirpath, destpath, data) → {this} 88 | ``` 89 | 90 | Appends a directory and its files, recursively, given its dirpath. 91 | 92 | ##### Parameters 93 | 94 | - `dirpath` - _String_ - The source directory path. 95 | - `destpath` - _String_ - The destination path within the archive. 96 | - `data` - _Object_ - [The entry data](#entry-data). 97 | 98 | --- 99 | 100 | ### file 101 | 102 | ```js 103 | file(filepath, data) → {this} 104 | ``` 105 | 106 | Appends a file given its filepath using a [lazystream](https://github.com/jpommerening/node-lazystream) wrapper to prevent issues with open file limits. 107 | 108 | When the instance has received, processed, and emitted the file, the entry event is fired. 109 | 110 | ##### Parameters 111 | 112 | - `filepath` - _String_ - The source filepath. 113 | - `data` - _Object_ - [The entry data](#entry-data). 114 | 115 | --- 116 | 117 | ### finalize 118 | 119 | ```js 120 | finalize() → {Promise} 121 | ``` 122 | 123 | Finalizes the instance and prevents further appending to the archive structure (queue will continue til drained). 124 | 125 | The `end`, `close` or `finish` events on the destination stream may fire right after calling this method so you should set listeners beforehand to properly detect stream completion. 126 | 127 | ##### Parameters 128 | 129 | None 130 | 131 | --- 132 | 133 | ### glob 134 | 135 | ```js 136 | glob(pattern, options, data) → {this} 137 | ``` 138 | 139 | Appends multiple files that match a glob pattern. 140 | 141 | ##### Parameters 142 | 143 | - `pattern` - _String_ - The [glob pattern](https://github.com/isaacs/minimatch) to match. 144 | - `options` - _Object_ - Options passed to [node-readdir-glob](https://github.com/yqnn/node-readdir-glob#options), plus an optional `cwd` property that sets the directory to read (defaults to `'.'`). 145 | - `data` - _Object_ - [The entry data](#entry-data). 146 | 147 | --- 148 | 149 | ### pointer 150 | 151 | ```js 152 | pointer() → {Number} 153 | ``` 154 | 155 | Returns the current length (in bytes) that has been emitted. 156 | 157 | ##### Parameters 158 | 159 | None 160 | 161 | --- 162 | 163 | ### setFormat 164 | 165 | ```js 166 | setFormat(format) → {this} 167 | ``` 168 | 169 | Sets the module format name used for archiving. 170 | 171 | ##### Parameters 172 | 173 | - `format` - _String_ - The name of the format. 174 | 175 | --- 176 | 177 | ### setModule 178 | 179 | ```js 180 | setModule(module) → {this} 181 | ``` 182 | 183 | Sets the module used for archiving. 184 | 185 | ##### Parameters 186 | 187 | - `module` - _Function_ - The function for archiver to interact with. 188 | 189 | --- 190 | 191 | ### symlink 192 | 193 | ```js 194 | symlink(filepath, target, mode) → {this} 195 | ``` 196 | 197 | Appends a symlink to the instance. 198 | 199 | This does NOT interact with filesystem and is used for programmatically creating symlinks. 200 | 201 | ##### Parameters 202 | 203 | - `filepath` - _String_ - The symlink path (within archive). 204 | - `target` - _String_ - The target path (within archive). 205 | - `mode` - _Number_ - The entry permissions. 206 | 207 | ## Events 208 | 209 | #### Event: entry 210 | 211 | Fires when the entry's input has been processed and appended to the archive. 212 | 213 | The `entry` event object contains the following properties: 214 | 215 | - [Entry Data](#entry-data) 216 | 217 | #### Event: progress 218 | 219 | The `progress` event object contains the following properties: 220 | 221 | - `entries` - _Object_ - An object containing the following properties: 222 | - `total` - _Number_ - The number of entries that have been appended. 223 | - `processed` - _Number_ - The number of entries that have been processed. 224 | - `fs` - Object - An object containing the following properties: 225 | - `totalBytes` - _Number_ - The number of bytes that have been appended. Calculated asynchronously and might not be accurate: it growth while entries are added. (based on fs.Stats) 226 | - `processedBytes` - _Number_ - The number of bytes that have been processed. (based on fs.Stats) 227 | 228 | #### Event: error 229 | 230 | The `error` event object contains the following properties: 231 | 232 | - `message` - _String_ - The message of the error. 233 | - `code` - _String_ - The error code assigned to this error. 234 | - `data` - _Object_ - Additional data provided for reporting or debugging (where available). 235 | 236 | #### Event: warning 237 | 238 | The `warning` event object contains the following properties: 239 | 240 | - `message` - _String_ - The message of the error. 241 | - `code` - _String_ - The error code assigned to this error. 242 | - `data` - _Object_ - Additional data provided for reporting or debugging (where available). 243 | 244 | ## Entry Data 245 | 246 | The entry data object may contain the following properties: 247 | 248 | #### Core Entry Properties 249 | 250 | - `name` - _String_ - Sets the entry name including internal path. 251 | - `date` - _String | Date_ - Sets the entry date. 252 | - `mode` - _Number_ - Sets the entry permissions. 253 | - `prefix` - _String_ - Sets a path prefix for the entry name. Useful when working with methods like [directory](#directory) or [glob](#glob). 254 | - `stats` - _fs.Stats_ - Sets the stat data for this entry allowing for reduction of fs.stat calls. 255 | 256 | #### ZIP Entry Properties 257 | 258 | - `namePrependSlash` - _Boolean_ - Prepends a forward slash to archive file paths. 259 | - `store` - _Boolean_ - Sets the compression method to STORE. 260 | 261 | ## Format Registration 262 | 263 | ### registerFormat 264 | 265 | ```js 266 | registerFormat(format, module); 267 | ``` 268 | 269 | Registers a format for use with archiver. 270 | 271 | ##### Parameters 272 | 273 | - `format` - _String_ - The name of the format. 274 | - `module` - _Function_ - The function for archiver to interact with. 275 | 276 | #### module 277 | 278 | ```js 279 | module(options); 280 | ``` 281 | 282 | The `module` function should consist of the following: 283 | 284 | - a Readable Stream interface that contains the resulting archive data. 285 | - a `module.prototype.append` function. 286 | - a `module.prototype.finalize` function. 287 | 288 | ##### module.prototype.append 289 | 290 | ```js 291 | module.prototype.append(source, data, callback) { 292 | // source: Buffer or Stream 293 | // data: entry (meta)data 294 | // callback: called when entry has been added to archive 295 | callback(err, data) 296 | } 297 | ``` 298 | 299 | ##### module.prototype.finalize 300 | 301 | ```js 302 | module.prototype.finalize() {} 303 | ``` 304 | 305 | --- 306 | 307 | ### isFormatRegistered 308 | 309 | ```js 310 | isRegisteredFormat(format); 311 | ``` 312 | 313 | Check if the format is already registered. 314 | 315 | ##### Parameters 316 | 317 | - `format` - _String_ - The name of the format. 318 | -------------------------------------------------------------------------------- /website/docs/quickstart.md: -------------------------------------------------------------------------------- 1 | --- 2 | id: quickstart 3 | title: Quickstart 4 | --- 5 | 6 | ## Installation 7 | 8 | Archiver is available on [npm](https://www.npmjs.com/package/archiver). 9 | 10 | `$ npm install archiver` 11 | 12 | ## Examples 13 | 14 | ```js 15 | // require modules 16 | const fs = require("fs"); 17 | const archiver = require("archiver"); 18 | 19 | // create a file to stream archive data to. 20 | const output = fs.createWriteStream(__dirname + "/example.zip"); 21 | const archive = archiver("zip", { 22 | zlib: { level: 9 }, // Sets the compression level. 23 | }); 24 | 25 | // listen for all archive data to be written 26 | // 'close' event is fired only when a file descriptor is involved 27 | output.on("close", function () { 28 | console.log(archive.pointer() + " total bytes"); 29 | console.log( 30 | "archiver has been finalized and the output file descriptor has closed.", 31 | ); 32 | }); 33 | 34 | // This event is fired when the data source is drained no matter what was the data source. 35 | // It is not part of this library but rather from the NodeJS Stream API. 36 | // @see: https://nodejs.org/api/stream.html#stream_event_end 37 | output.on("end", function () { 38 | console.log("Data has been drained"); 39 | }); 40 | 41 | // good practice to catch warnings (ie stat failures and other non-blocking errors) 42 | archive.on("warning", function (err) { 43 | if (err.code === "ENOENT") { 44 | // log warning 45 | } else { 46 | // throw error 47 | throw err; 48 | } 49 | }); 50 | 51 | // good practice to catch this error explicitly 52 | archive.on("error", function (err) { 53 | throw err; 54 | }); 55 | 56 | // pipe archive data to the file 57 | archive.pipe(output); 58 | 59 | // append a file from stream 60 | const file1 = __dirname + "/file1.txt"; 61 | archive.append(fs.createReadStream(file1), { name: "file1.txt" }); 62 | 63 | // append a file from string 64 | archive.append("string cheese!", { name: "file2.txt" }); 65 | 66 | // append a file from buffer 67 | const buffer3 = Buffer.from("buff it!"); 68 | archive.append(buffer3, { name: "file3.txt" }); 69 | 70 | // append a file 71 | archive.file("file1.txt", { name: "file4.txt" }); 72 | 73 | // append files from a sub-directory and naming it `new-subdir` within the archive 74 | archive.directory("subdir/", "new-subdir"); 75 | 76 | // append files from a sub-directory, putting its contents at the root of archive 77 | archive.directory("subdir/", false); 78 | 79 | // append files from a glob pattern 80 | archive.glob("file*.txt", { cwd: __dirname }); 81 | 82 | // finalize the archive (ie we are done appending files but streams have to finish yet) 83 | // 'close', 'end' or 'finish' may be fired right after calling this method so register to them beforehand 84 | archive.finalize(); 85 | ``` 86 | -------------------------------------------------------------------------------- /website/docusaurus.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | title: "Archiver", 3 | tagline: "A streaming interface for archive generation.", 4 | url: "https://www.archiverjs.com", 5 | baseUrl: "/", 6 | onBrokenLinks: "throw", 7 | onBrokenMarkdownLinks: "warn", 8 | favicon: "img/favicon.ico", 9 | organizationName: "archiverjs", 10 | projectName: "node-archiver", 11 | themeConfig: { 12 | navbar: { 13 | title: "Archiver", 14 | logo: { 15 | alt: "Archiver Logo", 16 | src: "img/logo.svg", 17 | }, 18 | items: [ 19 | { 20 | to: "docs/quickstart", 21 | label: "Docs", 22 | position: "left", 23 | }, 24 | { 25 | to: "docs/archiver", 26 | label: "API", 27 | position: "left", 28 | }, 29 | { 30 | href: "https://github.com/archiverjs/node-archiver/", 31 | label: "GitHub", 32 | position: "right", 33 | }, 34 | ], 35 | }, 36 | footer: { 37 | style: "dark", 38 | links: [ 39 | { 40 | title: "Docs", 41 | items: [ 42 | { 43 | label: "Get Started", 44 | to: "docs/quickstart", 45 | }, 46 | { 47 | label: "Archive Formats", 48 | to: "docs/archive-formats", 49 | }, 50 | { 51 | label: "API Reference", 52 | to: "docs/archiver", 53 | }, 54 | ], 55 | }, 56 | { 57 | title: "More", 58 | items: [ 59 | { 60 | label: "ZipStream", 61 | to: "zipstream", 62 | }, 63 | { 64 | label: "GitHub", 65 | href: "https://github.com/archiverjs/", 66 | }, 67 | ], 68 | }, 69 | ], 70 | copyright: `Copyright © ${new Date().getFullYear()}. Built with Docusaurus.`, 71 | }, 72 | }, 73 | presets: [ 74 | [ 75 | "@docusaurus/preset-classic", 76 | { 77 | docs: { 78 | sidebarPath: require.resolve("./sidebars.js"), 79 | editUrl: 80 | "https://github.com/archiverjs/node-archiver/edit/master/website/", 81 | }, 82 | blog: { 83 | showReadingTime: true, 84 | editUrl: 85 | "https://github.com/archiverjs/node-archiver/edit/master/website/blog/", 86 | }, 87 | theme: { 88 | customCss: require.resolve("./src/css/custom.css"), 89 | }, 90 | gtag: { 91 | trackingID: "UA-75847652-4", 92 | anonymizeIP: true, 93 | }, 94 | }, 95 | ], 96 | ], 97 | }; 98 | -------------------------------------------------------------------------------- /website/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@archiverjs/website", 3 | "version": "1.0.0", 4 | "private": true, 5 | "scripts": { 6 | "docusaurus": "docusaurus", 7 | "start": "docusaurus start", 8 | "build": "docusaurus build", 9 | "swizzle": "docusaurus swizzle", 10 | "deploy": "docusaurus deploy", 11 | "serve": "docusaurus serve", 12 | "clear": "docusaurus clear", 13 | "write-translations": "write-translations" 14 | }, 15 | "dependencies": { 16 | "@docusaurus/core": "3.5.2", 17 | "@docusaurus/preset-classic": "3.5.2", 18 | "@mdx-js/react": "^3.0.0", 19 | "clsx": "^2.0.0", 20 | "react": "^18.0.0", 21 | "react-dom": "^18.0.0" 22 | }, 23 | "browserslist": { 24 | "production": [ 25 | ">0.5%", 26 | "not dead", 27 | "not op_mini all" 28 | ], 29 | "development": [ 30 | "last 1 chrome version", 31 | "last 1 firefox version", 32 | "last 1 safari version" 33 | ] 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /website/sidebars.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | docs: { 3 | "Getting Started": ["quickstart", "archive-formats"], 4 | "API Reference": ["archiver"], 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /website/src/css/custom.css: -------------------------------------------------------------------------------- 1 | /* stylelint-disable docusaurus/copyright-header */ 2 | /** 3 | * Any CSS included here will be global. The classic template 4 | * bundles Infima by default. Infima is a CSS framework designed to 5 | * work well for content-centric websites. 6 | */ 7 | 8 | /* You can override the default Infima variables here. */ 9 | :root { 10 | --ifm-color-primary: #62af1f; 11 | --ifm-color-primary-dark: #589e1c; 12 | --ifm-color-primary-darker: #53951a; 13 | --ifm-color-primary-darkest: #457b16; 14 | --ifm-color-primary-light: #6cc122; 15 | --ifm-color-primary-lighter: #71c924; 16 | --ifm-color-primary-lightest: #80db31; 17 | --ifm-code-font-size: 95%; 18 | } 19 | 20 | .docusaurus-highlight-code-line { 21 | background-color: rgb(72, 77, 91); 22 | display: block; 23 | margin: 0 calc(-1 * var(--ifm-pre-padding)); 24 | padding: 0 var(--ifm-pre-padding); 25 | } 26 | -------------------------------------------------------------------------------- /website/src/pages/index.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import clsx from "clsx"; 3 | import Layout from "@theme/Layout"; 4 | import Link from "@docusaurus/Link"; 5 | import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; 6 | import useBaseUrl from "@docusaurus/useBaseUrl"; 7 | import styles from "./styles.module.css"; 8 | 9 | const features = [ 10 | { 11 | title: "Streaming", 12 | description: ( 13 | <> 14 | Archiver was designed to use native node streams as its data transport. 15 | > 16 | ), 17 | }, 18 | { 19 | title: "Extendable", 20 | description: ( 21 | <> 22 | Archiver can be extended to support different archive formats while 23 | reusing the same composition API. 24 | > 25 | ), 26 | }, 27 | ]; 28 | 29 | function Feature({ imageUrl, title, description }) { 30 | const imgUrl = useBaseUrl(imageUrl); 31 | return ( 32 |
{description}
40 |{siteConfig.tagline}
53 |