├── .github ├── dependabot.yml └── workflows │ ├── generated-pr.yml │ ├── js-test-and-release.yml │ └── stale.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── examples ├── .gitignore ├── car-to-fixture.js ├── dump-car.js ├── dump-index.js ├── package.json ├── round-trip.js ├── test-examples.js └── verify-car.js ├── package.json ├── src ├── api.ts ├── buffer-decoder.js ├── buffer-reader-browser.js ├── buffer-reader.js ├── buffer-writer.js ├── coding.ts ├── decoder-common.js ├── decoder.js ├── encoder.js ├── header-validator.js ├── header.ipldsch ├── index-browser.js ├── index.js ├── indexed-reader-browser.js ├── indexed-reader.js ├── indexer.js ├── iterator-channel.js ├── iterator.js ├── promise-fs-opts.js ├── reader-browser.js ├── reader.js ├── writer-browser.js └── writer.js ├── test ├── _fixtures_to_js.mjs ├── common.js ├── fixtures-expectations.js ├── fixtures.js ├── fixtures │ ├── sample-corrupt-pragma.car │ ├── sample-index.carindex │ ├── sample-rootless-v42.car │ ├── sample-rw-bs-v2.car │ ├── sample-unixfs-v2.car │ ├── sample-v1-noidentity.car │ ├── sample-v1-tailing-corrupt-section.car │ ├── sample-v1-with-zero-len-section.car │ ├── sample-v1-with-zero-len-section2.car │ ├── sample-v1.car │ ├── sample-v2-corrupt-data-and-index.car │ ├── sample-v2-indexless.car │ └── sample-wrapped-v2.car ├── go.car ├── go.carv2 ├── node-test-file-streams.js ├── node-test-indexed-reader.js ├── node-test-large.js ├── node-test-raw.js ├── node-test-updateroots.js ├── node.js ├── test-buffer-writer.spec.js ├── test-errors.spec.js ├── test-indexer.spec.js ├── test-interface.spec.js ├── test-iterator.spec.js ├── test-reader-sync.spec.js ├── test-reader.spec.js ├── test-writer.spec.js └── verify-store-reader.js └── tsconfig.json /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: npm 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | time: "10:00" 8 | open-pull-requests-limit: 10 9 | commit-message: 10 | prefix: "deps" 11 | prefix-development: "deps(dev)" 12 | - package-ecosystem: 'npm' 13 | directory: '/examples/' 14 | schedule: 15 | interval: 'daily' 16 | ignore: 17 | - dependency-name: "@ipld/car" 18 | commit-message: 19 | prefix: 'chore' 20 | include: 'scope' 21 | -------------------------------------------------------------------------------- /.github/workflows/generated-pr.yml: -------------------------------------------------------------------------------- 1 | name: Close Generated PRs 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | workflow_dispatch: 7 | 8 | permissions: 9 | issues: write 10 | pull-requests: write 11 | 12 | jobs: 13 | stale: 14 | uses: ipdxco/unified-github-workflows/.github/workflows/reusable-generated-pr.yml@v1 15 | -------------------------------------------------------------------------------- /.github/workflows/js-test-and-release.yml: -------------------------------------------------------------------------------- 1 | name: test & maybe release 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | workflow_dispatch: 9 | 10 | permissions: 11 | contents: write 12 | id-token: write 13 | packages: write 14 | pull-requests: write 15 | 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }} 18 | cancel-in-progress: true 19 | 20 | jobs: 21 | js-test-and-release: 22 | uses: ipdxco/unified-github-workflows/.github/workflows/js-test-and-release.yml@v1.0 23 | secrets: 24 | DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} 25 | DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} 26 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 27 | UCI_GITHUB_TOKEN: ${{ secrets.UCI_GITHUB_TOKEN }} 28 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 29 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: Close Stale Issues 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | workflow_dispatch: 7 | 8 | permissions: 9 | issues: write 10 | pull-requests: write 11 | 12 | jobs: 13 | stale: 14 | uses: ipdxco/unified-github-workflows/.github/workflows/reusable-stale-issue.yml@v1 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | package-lock.json 2 | node_modules/ 3 | .nyc_output/ 4 | .coverage/ 5 | build/ 6 | dist/ 7 | types/ 8 | examples/node_modules/ 9 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This project is dual licensed under MIT and Apache-2.0. 2 | 3 | MIT: https://www.opensource.org/licenses/mit 4 | Apache-2.0: https://www.apache.org/licenses/license-2.0 5 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at 2 | 3 | http://www.apache.org/licenses/LICENSE-2.0 4 | 5 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 6 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /examples/.gitignore: -------------------------------------------------------------------------------- 1 | # ignore output from the examples 2 | example.car 3 | baf* 4 | Qm* 5 | -------------------------------------------------------------------------------- /examples/car-to-fixture.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /* eslint-disable no-console */ 4 | 5 | // Take a .car file and dump its contents to stdout as a single DAG-JSON format 6 | // block. The format is standardised for describing CAR fixtures at 7 | // https://ipld.io/specs/transport/car/fixture/ 8 | 9 | import fs from 'fs' 10 | import { CarIndexer } from '@ipld/car/indexer' 11 | import { CarReader } from '@ipld/car/reader' 12 | import * as dagCbor from '@ipld/dag-cbor' 13 | import * as dagJson from '@ipld/dag-json' 14 | import * as dagPb from '@ipld/dag-pb' 15 | import * as json from 'multiformats/codecs/json' 16 | import * as raw from 'multiformats/codecs/raw' 17 | 18 | if (!process.argv[2]) { 19 | console.log('Usage: dump-car.js ') 20 | process.exit(1) 21 | } 22 | 23 | const codecs = { 24 | [dagCbor.code]: dagCbor, 25 | [dagPb.code]: dagPb, 26 | [dagJson.code]: dagJson, 27 | [raw.code]: raw, 28 | [json.code]: json 29 | } 30 | 31 | function decode (cid, bytes) { 32 | if (!codecs[cid.code]) { 33 | throw new Error(`Unknown codec code: 0x${cid.code.toString(16)}`) 34 | } 35 | return codecs[cid.code].decode(bytes) 36 | } 37 | 38 | async function run () { 39 | const bytes = await fs.promises.readFile(process.argv[2]) 40 | // this is not the most optimal way to get both an index and a reader, 41 | // nor is reading in the bytes into memory necessarily the best thing 42 | // to be doing, but this is fine for small files and where efficiency 43 | // isn't critical 44 | const indexer = await CarIndexer.fromBytes(bytes) 45 | const reader = await CarReader.fromBytes(bytes) 46 | const fixture = { 47 | header: reader._header, // a little naughty but we need gory details 48 | blocks: [] 49 | } 50 | let i = 0 51 | for await (const blockIndex of indexer) { 52 | fixture.blocks[i] = blockIndex 53 | const block = await reader.get(blockIndex.cid) 54 | fixture.blocks[i].content = decode(blockIndex.cid, block.bytes) 55 | i++ 56 | } 57 | const json = new TextDecoder().decode(dagJson.encode(fixture)) 58 | if (process.argv.includes('--pretty')) { 59 | console.log(JSON.stringify(JSON.parse(json), null, 2)) 60 | } else { 61 | console.log(json) 62 | } 63 | } 64 | 65 | run().catch((err) => { 66 | console.error(err) 67 | process.exit(1) 68 | }) 69 | -------------------------------------------------------------------------------- /examples/dump-car.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /* eslint-disable no-console */ 4 | 5 | // Take a .car file and dump its contents into one file per block, with the 6 | // filename being the CID of that block. 7 | // Also prints a DAG-JSON form of the block and its CID to stdout. 8 | // If `--inspect` is supplied, don't write the blocks, just print them to stdout. 9 | 10 | import fs from 'fs' 11 | import { CarBlockIterator } from '@ipld/car/iterator' 12 | import * as dagCbor from '@ipld/dag-cbor' 13 | import * as dagJson from '@ipld/dag-json' 14 | import * as dagPb from '@ipld/dag-pb' 15 | import * as json from 'multiformats/codecs/json' 16 | import * as raw from 'multiformats/codecs/raw' 17 | 18 | if (!process.argv[2]) { 19 | console.log('Usage: dump-car.js [--inspect] ') 20 | process.exit(1) 21 | } 22 | 23 | const codecs = { 24 | [dagCbor.code]: dagCbor, 25 | [dagPb.code]: dagPb, 26 | [dagJson.code]: dagJson, 27 | [raw.code]: raw, 28 | [json.code]: json 29 | } 30 | 31 | function decode (cid, bytes) { 32 | if (!codecs[cid.code]) { 33 | throw new Error(`Unknown codec code: 0x${cid.code.toString(16)}`) 34 | } 35 | return codecs[cid.code].decode(bytes) 36 | } 37 | 38 | async function run () { 39 | const inspect = process.argv.includes('--inspect') 40 | const inStream = fs.createReadStream(process.argv.filter((a) => a !== '--inspect')[2]) 41 | const reader = await CarBlockIterator.fromIterable(inStream) 42 | console.log(`Version: ${reader.version}`) 43 | console.log(`Roots: [${(await reader.getRoots()).map((r) => r.toString()).join(', ')}]`) 44 | console.log('Blocks:') 45 | let i = 1 46 | for await (const { cid, bytes } of reader) { 47 | if (!inspect) { 48 | await fs.promises.writeFile(cid.toString(), bytes) 49 | } 50 | 51 | const decoded = decode(cid, bytes) 52 | console.log(`#${i++} ${cid} [${codecs[cid.code].name}]`) 53 | console.dir(new TextDecoder().decode(dagJson.encode(decoded))) 54 | } 55 | } 56 | 57 | run().catch((err) => { 58 | console.error(err) 59 | process.exit(1) 60 | }) 61 | -------------------------------------------------------------------------------- /examples/dump-index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /* eslint-disable no-console */ 4 | 5 | // Take a .car file and dump its index in DAG-JSON format, one line per block 6 | 7 | import fs from 'fs' 8 | import { CarIndexer } from '@ipld/car/indexer' 9 | import * as dagJson from '@ipld/dag-json' 10 | 11 | if (!process.argv[2]) { 12 | console.log('Usage: dump-index.js ') 13 | process.exit(1) 14 | } 15 | 16 | async function run () { 17 | const indexer = await CarIndexer.fromIterable(fs.createReadStream(process.argv[2])) 18 | for await (const blockIndex of indexer) { 19 | console.log(new TextDecoder().decode(dagJson.encode(blockIndex))) 20 | } 21 | } 22 | 23 | run().catch((err) => { 24 | console.error(err) 25 | process.exit(1) 26 | }) 27 | -------------------------------------------------------------------------------- /examples/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@ipld/car-examples", 3 | "version": "0.0.0-dev", 4 | "description": "How to use the Content Addressable aRchive format reader and writer", 5 | "main": "example.js", 6 | "type": "module", 7 | "private": true, 8 | "scripts": { 9 | "start": "node round-trip.js", 10 | "round-trip": "node round-trip.js", 11 | "verify": "node verify-car.js", 12 | "dump": "node dump-car.js", 13 | "test": "npm install && node test-examples.js" 14 | }, 15 | "author": "Rod (http://r.va.gg/)", 16 | "license": "(Apache-2.0 AND MIT)", 17 | "dependencies": { 18 | "@ipld/car": "file:..", 19 | "@ipld/dag-cbor": "^9.0.0", 20 | "@ipld/dag-json": "^10.0.0", 21 | "@ipld/dag-pb": "^4.0.0", 22 | "@multiformats/blake2": "^2.0.0", 23 | "@types/varint": "^6.0.0", 24 | "multiformats": "^12.1.0", 25 | "varint": "^6.0.0" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /examples/round-trip.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /* eslint-disable no-console */ 4 | 5 | // Create a simple .car file with a single block and that block's CID as the 6 | // single root. Then read the .car and fetch the block again. 7 | 8 | import fs from 'fs' 9 | import { Readable } from 'stream' 10 | import { CarReader, CarWriter } from '@ipld/car' 11 | import { CID } from 'multiformats/cid' 12 | import * as raw from 'multiformats/codecs/raw' 13 | import { sha256 } from 'multiformats/hashes/sha2' 14 | 15 | async function example () { 16 | const bytes = new TextEncoder().encode('random meaningless bytes') 17 | const hash = await sha256.digest(raw.encode(bytes)) 18 | const cid = CID.create(1, raw.code, hash) 19 | 20 | // create the writer and set the header with a single root 21 | const { writer, out } = await CarWriter.create([cid]) 22 | Readable.from(out).pipe(fs.createWriteStream('example.car')) 23 | 24 | // store a new block, creates a new file entry in the CAR archive 25 | await writer.put({ cid, bytes }) 26 | await writer.close() 27 | 28 | const inStream = fs.createReadStream('example.car') 29 | // read and parse the entire stream in one go, this will cache the contents of 30 | // the car in memory so is not suitable for large files. 31 | const reader = await CarReader.fromIterable(inStream) 32 | 33 | // read the list of roots from the header 34 | const roots = await reader.getRoots() 35 | // retrieve a block, as a { cid:CID, bytes:UInt8Array } pair from the archive 36 | const got = await reader.get(roots[0]) 37 | // also possible: for await (const { cid, bytes } of CarIterator.fromIterable(inStream)) { ... } 38 | 39 | console.log('Retrieved [%s] from example.car with CID [%s]', 40 | new TextDecoder().decode(got.bytes), 41 | roots[0].toString()) 42 | } 43 | 44 | example().catch((err) => { 45 | console.error(err) 46 | process.exit(1) 47 | }) 48 | -------------------------------------------------------------------------------- /examples/test-examples.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | 3 | import assert from 'assert' 4 | import { execFile } from 'child_process' 5 | import { promises as fsPromises } from 'fs' 6 | import { promisify } from 'util' 7 | 8 | const { unlink, stat } = fsPromises 9 | 10 | const goCarCids = [ 11 | 'bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq', 12 | 'bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4', 13 | 'bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke', 14 | 'bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm', 15 | 'bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm', 16 | 'QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT', 17 | 'QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d', 18 | 'QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys'] 19 | 20 | async function cleanGoCarDump () { 21 | return Promise.all(goCarCids.map((c) => unlink(c))) 22 | } 23 | 24 | async function runExample (name, args = []) { 25 | return promisify(execFile)(process.execPath, [`${name}.js`].concat(args)) 26 | } 27 | 28 | runExample('round-trip').then(({ stdout, stderr }) => { 29 | assert.strictEqual(stderr, '') 30 | assert.strictEqual(stdout, 'Retrieved [random meaningless bytes] from example.car with CID [bafkreihwkf6mtnjobdqrkiksr7qhp6tiiqywux64aylunbvmfhzeql2coa]\n') 31 | console.log('\u001b[32m✔\u001b[39m [example] round-trip') 32 | }).then(async () => { 33 | await runExample('verify-car', ['example.car']).then(({ stdout, stderr }) => { 34 | assert.strictEqual(stderr, '') 35 | assert.strictEqual(stdout, 'Verified 1 block(s) in example.car\n') 36 | console.log('\u001b[32m✔\u001b[39m [example] verify-car example.car') 37 | }) 38 | }).then(async () => { 39 | await runExample('verify-car', ['../test/go.car']).then(({ stdout, stderr }) => { 40 | assert.strictEqual(stderr, '') 41 | assert.strictEqual(stdout, 'Verified 8 block(s) in ../test/go.car\n') 42 | console.log('\u001b[32m✔\u001b[39m [example] verify-car ../test/go.car') 43 | }) 44 | }).then(async () => { 45 | try { 46 | await cleanGoCarDump() 47 | } catch (err) { 48 | // failure is expected, this is just a prep 49 | } 50 | await runExample('dump-car', ['../test/go.car']).then(async ({ stdout, stderr }) => { 51 | assert.strictEqual(stderr, '') 52 | assert.strictEqual(stdout, 53 | `Version: 1 54 | Roots: [bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm, bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm] 55 | Blocks: 56 | #1 bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm [dag-cbor] 57 | '{"link":{"/":"QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d"},"name":"blip"}' 58 | #2 QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d [dag-pb] 59 | '{"Links":[{"Hash":{"/":"bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke"},"Name":"bear","Tsize":4},{"Hash":{"/":"QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys"},"Name":"second","Tsize":149}]}' 60 | #3 bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke [raw] 61 | '{"/":{"bytes":"Y2NjYw"}}' 62 | #4 QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys [dag-pb] 63 | '{"Links":[{"Hash":{"/":"bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4"},"Name":"dog","Tsize":4},{"Hash":{"/":"QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT"},"Name":"first","Tsize":51}]}' 64 | #5 bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4 [raw] 65 | '{"/":{"bytes":"YmJiYg"}}' 66 | #6 QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT [dag-pb] 67 | '{"Links":[{"Hash":{"/":"bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq"},"Name":"cat","Tsize":4}]}' 68 | #7 bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq [raw] 69 | '{"/":{"bytes":"YWFhYQ"}}' 70 | #8 bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm [dag-cbor] 71 | '{"link":null,"name":"limbo"}' 72 | `) 73 | assert.strictEqual((await Promise.all(goCarCids.map((c) => stat(c)))).map((s) => s.isFile()).filter(Boolean).length, goCarCids.length) 74 | await cleanGoCarDump() 75 | console.log('\u001b[32m✔\u001b[39m [example] dump-car ../test/go.car') 76 | }) 77 | }).then(async () => { 78 | await runExample('car-to-fixture', ['../test/go.car']).then(({ stdout, stderr }) => { 79 | assert.strictEqual(stderr, '') 80 | assert.strictEqual(stdout, '{"blocks":[{"blockLength":55,"blockOffset":137,"cid":{"/":"bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm"},"content":{"link":{"/":"QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d"},"name":"blip"},"length":92,"offset":100},{"blockLength":97,"blockOffset":228,"cid":{"/":"QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d"},"content":{"Links":[{"Hash":{"/":"bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke"},"Name":"bear","Tsize":4},{"Hash":{"/":"QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys"},"Name":"second","Tsize":149}]},"length":133,"offset":192},{"blockLength":4,"blockOffset":362,"cid":{"/":"bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke"},"content":{"/":{"bytes":"Y2NjYw"}},"length":41,"offset":325},{"blockLength":94,"blockOffset":402,"cid":{"/":"QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys"},"content":{"Links":[{"Hash":{"/":"bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4"},"Name":"dog","Tsize":4},{"Hash":{"/":"QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT"},"Name":"first","Tsize":51}]},"length":130,"offset":366},{"blockLength":4,"blockOffset":533,"cid":{"/":"bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4"},"content":{"/":{"bytes":"YmJiYg"}},"length":41,"offset":496},{"blockLength":47,"blockOffset":572,"cid":{"/":"QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT"},"content":{"Links":[{"Hash":{"/":"bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq"},"Name":"cat","Tsize":4}]},"length":82,"offset":537},{"blockLength":4,"blockOffset":656,"cid":{"/":"bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq"},"content":{"/":{"bytes":"YWFhYQ"}},"length":41,"offset":619},{"blockLength":18,"blockOffset":697,"cid":{"/":"bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm"},"content":{"link":null,"name":"limbo"},"length":55,"offset":660}],"header":{"roots":[{"/":"bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm"},{"/":"bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm"}],"version":1}}\n') 81 | console.log('\u001b[32m✔\u001b[39m [example] car-to-fixture ../test/go.car') 82 | }) 83 | }).then(async () => { 84 | await runExample('dump-index', ['example.car']).then(({ stdout, stderr }) => { 85 | assert.strictEqual(stderr, '') 86 | assert.strictEqual(stdout, '{"blockLength":24,"blockOffset":96,"cid":{"/":"bafkreihwkf6mtnjobdqrkiksr7qhp6tiiqywux64aylunbvmfhzeql2coa"},"length":61,"offset":59}\n') 87 | console.log('\u001b[32m✔\u001b[39m [example] dump-index example.car') 88 | }) 89 | }).then(async () => { 90 | await runExample('dump-index', ['../test/go.car']).then(({ stdout, stderr }) => { 91 | assert.strictEqual(stderr, '') 92 | assert.strictEqual(stdout, 93 | `{"blockLength":55,"blockOffset":137,"cid":{"/":"bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm"},"length":92,"offset":100} 94 | {"blockLength":97,"blockOffset":228,"cid":{"/":"QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d"},"length":133,"offset":192} 95 | {"blockLength":4,"blockOffset":362,"cid":{"/":"bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke"},"length":41,"offset":325} 96 | {"blockLength":94,"blockOffset":402,"cid":{"/":"QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys"},"length":130,"offset":366} 97 | {"blockLength":4,"blockOffset":533,"cid":{"/":"bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4"},"length":41,"offset":496} 98 | {"blockLength":47,"blockOffset":572,"cid":{"/":"QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT"},"length":82,"offset":537} 99 | {"blockLength":4,"blockOffset":656,"cid":{"/":"bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq"},"length":41,"offset":619} 100 | {"blockLength":18,"blockOffset":697,"cid":{"/":"bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm"},"length":55,"offset":660} 101 | `) 102 | console.log('\u001b[32m✔\u001b[39m [example] dump-index ../test/go.carv2') 103 | }) 104 | }).then(async () => { 105 | await runExample('dump-index', ['../test/go.carv2']).then(({ stdout, stderr }) => { 106 | assert.strictEqual(stderr, '') 107 | assert.strictEqual(stdout, 108 | `{"blockLength":47,"blockOffset":143,"cid":{"/":"QmfEoLyB5NndqeKieExd1rtJzTduQUPEV8TwAYcUiy3H5Z"},"length":82,"offset":108} 109 | {"blockLength":99,"blockOffset":226,"cid":{"/":"QmczfirA7VEH7YVvKPTPoU69XM3qY4DC39nnTsWd4K3SkM"},"length":135,"offset":190} 110 | {"blockLength":54,"blockOffset":360,"cid":{"/":"Qmcpz2FHJD7VAhg1fxFXdYJKePtkx1BsHuCrAgWVnaHMTE"},"length":89,"offset":325} 111 | {"blockLength":4,"blockOffset":451,"cid":{"/":"bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu"},"length":41,"offset":414} 112 | {"blockLength":7,"blockOffset":492,"cid":{"/":"bafkreifc4hca3inognou377hfhvu2xfchn2ltzi7yu27jkaeujqqqdbjju"},"length":44,"offset":455} 113 | `) 114 | console.log('\u001b[32m✔\u001b[39m [example] dump-index ../test/go.carv2') 115 | }) 116 | }).catch((err) => { 117 | console.error(err.stack) 118 | process.exit(1) 119 | }) 120 | -------------------------------------------------------------------------------- /examples/verify-car.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /* eslint-disable no-console */ 4 | 5 | // Example: verify a car file's block bytes match the reported CIDs and that 6 | // they have round-tripishness. 7 | // This example is overly verbose but illustrates some concepts involved in CAR 8 | // files. 9 | 10 | import fs from 'fs' 11 | import { CarBlockIterator } from '@ipld/car/iterator' 12 | import * as dagCbor from '@ipld/dag-cbor' 13 | import * as dagJson from '@ipld/dag-json' 14 | import * as dagPb from '@ipld/dag-pb' 15 | import { blake2b256 } from '@multiformats/blake2/blake2b' 16 | import { bytes, CID } from 'multiformats' 17 | import * as json from 'multiformats/codecs/json' 18 | import * as raw from 'multiformats/codecs/raw' 19 | import { from as hasher } from 'multiformats/hashes/hasher' 20 | import { sha256 } from 'multiformats/hashes/sha2' 21 | 22 | const { toHex } = bytes 23 | 24 | if (!process.argv[2]) { 25 | console.log('Usage: verify-car.js ') 26 | process.exit(1) 27 | } 28 | 29 | const codecs = { 30 | [dagCbor.code]: dagCbor, 31 | [dagPb.code]: dagPb, 32 | [dagJson.code]: dagJson, 33 | [raw.code]: raw, 34 | [json.code]: json 35 | } 36 | 37 | const hashes = { 38 | [sha256.code]: sha256, 39 | [blake2b256.code]: hasher(blake2b256) 40 | } 41 | 42 | async function run () { 43 | const inStream = fs.createReadStream(process.argv[2]) 44 | const reader = await CarBlockIterator.fromIterable(inStream) 45 | let count = 0 46 | 47 | for await (const { bytes, cid } of reader) { 48 | // Verify step 1: is this a CID we know how to deal with? 49 | if (!codecs[cid.code]) { 50 | console.log(`Unexpected codec: 0x${cid.code.toString(16)}`) 51 | process.exit(1) 52 | } 53 | if (!hashes[cid.multihash.code]) { 54 | console.log(`Unexpected multihash code: 0x${cid.multihash.code.toString(16)}`) 55 | process.exit(1) 56 | } 57 | 58 | // Verify step 2: if we hash the bytes, do we get the same digest as reported by the CID? 59 | // Note that this step is sufficient if you just want to safely verify the CAR's reported CIDs 60 | const hash = await hashes[cid.multihash.code].digest(bytes) 61 | if (toHex(hash.digest) !== toHex(cid.multihash.digest)) { 62 | console.log(`\nMismatch: digest of bytes (${toHex(hash)}) does not match digest in CID (${toHex(cid.multihash.digest)})`) 63 | } 64 | 65 | // Verify step 3: Can we round-trip the object and get the same CID for the re-encoded bytes? 66 | // Note that this step is rarely useful and may be over-kill in most cases of "verification" 67 | const obj = codecs[cid.code].decode(bytes) 68 | const reenc = codecs[cid.code].encode(obj) 69 | const rehash = await hashes[cid.multihash.code].digest(reenc) 70 | const recid = CID.create(cid.version, cid.code, rehash) 71 | if (!recid.equals(cid)) { 72 | console.log(`\nMismatch: ${cid} <> ${recid}`) 73 | console.log(`Orig:\n${toHex(bytes)}\nRe-encode:\n${toHex(reenc)}`) 74 | } 75 | 76 | if (++count % 100 === 0) { 77 | process.stdout.write('.') 78 | } 79 | } 80 | if (count > 100) { 81 | console.log() 82 | } 83 | console.log(`Verified ${count} block(s) in ${process.argv[2]}`) 84 | } 85 | 86 | run().catch((err) => { 87 | console.error(err) 88 | process.exit(1) 89 | }) 90 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@ipld/car", 3 | "version": "5.4.2", 4 | "description": "Content Addressable aRchive format reader and writer", 5 | "author": "Rod (http://r.va.gg/)", 6 | "license": "Apache-2.0 OR MIT", 7 | "homepage": "https://github.com/ipld/js-car#readme", 8 | "repository": { 9 | "type": "git", 10 | "url": "git+https://github.com/ipld/js-car.git" 11 | }, 12 | "bugs": { 13 | "url": "https://github.com/ipld/js-car/issues" 14 | }, 15 | "keywords": [ 16 | "car", 17 | "ipfs", 18 | "ipld", 19 | "multiformats" 20 | ], 21 | "engines": { 22 | "node": ">=16.0.0", 23 | "npm": ">=7.0.0" 24 | }, 25 | "type": "module", 26 | "types": "./dist/src/index.d.ts", 27 | "typesVersions": { 28 | "*": { 29 | "*": [ 30 | "*", 31 | "dist/*", 32 | "dist/src/*", 33 | "dist/src/*/index" 34 | ], 35 | "src/*": [ 36 | "*", 37 | "dist/*", 38 | "dist/src/*", 39 | "dist/src/*/index" 40 | ] 41 | } 42 | }, 43 | "files": [ 44 | "src", 45 | "dist", 46 | "!dist/test", 47 | "!**/*.tsbuildinfo" 48 | ], 49 | "exports": { 50 | ".": { 51 | "types": "./dist/src/index.d.ts", 52 | "import": "./src/index.js", 53 | "browser": "./src/index-browser.js" 54 | }, 55 | "./buffer-writer": { 56 | "types": "./dist/src/buffer-writer.d.ts", 57 | "import": "./src/buffer-writer.js" 58 | }, 59 | "./decoder": { 60 | "types": "./dist/src/decoder.d.ts", 61 | "import": "./src/decoder.js" 62 | }, 63 | "./indexed-reader": { 64 | "types": "./dist/src/indexed-reader.d.ts", 65 | "browser": "./src/indexed-reader-browser.js", 66 | "import": "./src/indexed-reader.js" 67 | }, 68 | "./indexer": { 69 | "types": "./dist/src/indexer.d.ts", 70 | "import": "./src/indexer.js" 71 | }, 72 | "./iterator": { 73 | "types": "./dist/src/iterator.d.ts", 74 | "import": "./src/iterator.js" 75 | }, 76 | "./reader": { 77 | "types": "./dist/src/reader.d.ts", 78 | "browser": "./src/reader-browser.js", 79 | "import": "./src/reader.js" 80 | }, 81 | "./buffer-reader": { 82 | "types": "./dist/src/buffer-reader-browser.d.ts", 83 | "browser": "./src/buffer-reader-browser.js", 84 | "import": "./src/buffer-reader.js" 85 | }, 86 | "./writer": { 87 | "types": "./dist/src/writer.d.ts", 88 | "browser": "./src/writer-browser.js", 89 | "import": "./src/writer.js" 90 | } 91 | }, 92 | "eslintConfig": { 93 | "extends": "ipfs", 94 | "parserOptions": { 95 | "sourceType": "module" 96 | } 97 | }, 98 | "release": { 99 | "branches": [ 100 | "master" 101 | ], 102 | "plugins": [ 103 | [ 104 | "@semantic-release/commit-analyzer", 105 | { 106 | "preset": "conventionalcommits", 107 | "releaseRules": [ 108 | { 109 | "breaking": true, 110 | "release": "major" 111 | }, 112 | { 113 | "revert": true, 114 | "release": "patch" 115 | }, 116 | { 117 | "type": "feat", 118 | "release": "minor" 119 | }, 120 | { 121 | "type": "fix", 122 | "release": "patch" 123 | }, 124 | { 125 | "type": "docs", 126 | "release": "patch" 127 | }, 128 | { 129 | "type": "test", 130 | "release": "patch" 131 | }, 132 | { 133 | "type": "deps", 134 | "release": "patch" 135 | }, 136 | { 137 | "scope": "no-release", 138 | "release": false 139 | } 140 | ] 141 | } 142 | ], 143 | [ 144 | "@semantic-release/release-notes-generator", 145 | { 146 | "preset": "conventionalcommits", 147 | "presetConfig": { 148 | "types": [ 149 | { 150 | "type": "feat", 151 | "section": "Features" 152 | }, 153 | { 154 | "type": "fix", 155 | "section": "Bug Fixes" 156 | }, 157 | { 158 | "type": "chore", 159 | "section": "Trivial Changes" 160 | }, 161 | { 162 | "type": "docs", 163 | "section": "Documentation" 164 | }, 165 | { 166 | "type": "deps", 167 | "section": "Dependencies" 168 | }, 169 | { 170 | "type": "test", 171 | "section": "Tests" 172 | } 173 | ] 174 | } 175 | } 176 | ], 177 | "@semantic-release/changelog", 178 | "@semantic-release/npm", 179 | "@semantic-release/github", 180 | [ 181 | "@semantic-release/git", 182 | { 183 | "assets": [ 184 | "CHANGELOG.md", 185 | "package.json" 186 | ] 187 | } 188 | ] 189 | ] 190 | }, 191 | "scripts": { 192 | "clean": "aegir clean", 193 | "lint": "aegir lint", 194 | "build": "aegir build", 195 | "build:validator": "npx @ipld/schema to-js src/header.ipldsch > src/header-validator.js", 196 | "release": "aegir release", 197 | "test": "npm run lint && aegir test && npm run test:examples", 198 | "test:node": "aegir test -t node --cov", 199 | "test:chrome": "aegir test -t browser --cov", 200 | "test:chrome-webworker": "aegir test -t webworker", 201 | "test:firefox": "aegir test -t browser -- --browser firefox", 202 | "test:firefox-webworker": "aegir test -t webworker -- --browser firefox", 203 | "test:electron-main": "aegir test -t electron-main", 204 | "test:examples": "npm run test --prefix examples/", 205 | "dep-check": "aegir dep-check", 206 | "coverage": "c8 --reporter=html --reporter=text mocha test/test-*.js && npx st -d coverage -p 8888", 207 | "docs": "jsdoc4readme --readme --description-only src/reader*.js src/indexed-reader.js src/iterator.js src/indexer.js src/writer*.js src/buffer-writer.js src/decoder.js src/buffer-reader*.js" 208 | }, 209 | "dependencies": { 210 | "@ipld/dag-cbor": "^9.0.7", 211 | "cborg": "^4.0.5", 212 | "multiformats": "^13.0.0", 213 | "varint": "^6.0.0" 214 | }, 215 | "devDependencies": { 216 | "@ipld/dag-pb": "^4.0.7", 217 | "@ipld/garbage": "^6.0.5", 218 | "@types/varint": "^6.0.3", 219 | "aegir": "^47.0.6", 220 | "jsdoc4readme": "^1.4.0" 221 | }, 222 | "browser": { 223 | "./src/index.js": "./src/index-browser.js", 224 | "./src/index-reader.js": "./src/index-reader-browser.js", 225 | "./src/reader.js": "./src/reader-browser.js", 226 | "./src/buffer-reader.js": "./src/buffer-reader-browser.js", 227 | "./src/writer.js": "./src/writer-browser.js", 228 | "fs": false, 229 | "util": false, 230 | "stream": false 231 | }, 232 | "directories": { 233 | "test": "test" 234 | }, 235 | "standard": { 236 | "ignore": [ 237 | "dist" 238 | ] 239 | }, 240 | "eslintIgnore": [ 241 | "src/header-validator.js" 242 | ] 243 | } 244 | -------------------------------------------------------------------------------- /src/api.ts: -------------------------------------------------------------------------------- 1 | import type { CID } from 'multiformats/cid' 2 | 3 | /** 4 | * Literally any `Iterable` (async or regular). 5 | */ 6 | export type AwaitIterable = Iterable | AsyncIterable 7 | 8 | export type { CID } 9 | /* Generic types for interfacing with block storage */ 10 | 11 | export interface Block { 12 | cid: CID 13 | bytes: Uint8Array 14 | } 15 | 16 | export interface BlockHeader { 17 | cid: CID 18 | length: number 19 | blockLength: number 20 | } 21 | 22 | export interface BlockIndex extends BlockHeader { 23 | offset: number 24 | blockOffset: number 25 | } 26 | 27 | export interface RootsReader { 28 | version: number 29 | getRoots(): Promise 30 | } 31 | 32 | export interface RootsBufferReader { 33 | version: number 34 | getRoots(): CID[] 35 | } 36 | 37 | export interface BlockIterator extends AsyncIterable {} 38 | 39 | export interface CIDIterator extends AsyncIterable {} 40 | 41 | export interface BlockReader { 42 | has(key: CID): Promise 43 | get(key: CID): Promise 44 | blocks(): BlockIterator 45 | cids(): CIDIterator 46 | } 47 | 48 | export interface BlockBufferReader { 49 | has(key: CID): boolean 50 | get(key: CID): Block | undefined 51 | blocks(): Iterable 52 | cids(): Iterable 53 | } 54 | 55 | export interface BlockWriter { 56 | put(block: Block): Promise 57 | close(): Promise 58 | version(): number 59 | } 60 | 61 | export interface CarBufferWriter { 62 | addRoot(root: CID, options?: { resize?: boolean }): CarBufferWriter 63 | write(block: Block): CarBufferWriter 64 | close(options?: { resize?: boolean }): Uint8Array 65 | } 66 | 67 | export interface CarBufferWriterOptions { 68 | roots?: CID[] // defaults to [] 69 | byteOffset?: number // defaults to 0 70 | byteLength?: number // defaults to buffer.byteLength 71 | 72 | headerSize?: number // defaults to size needed for provided roots 73 | } 74 | 75 | export interface WriterChannel { 76 | writer: BlockWriter 77 | out: AsyncIterable 78 | } 79 | 80 | export interface CarReader extends BlockReader, RootsReader {} 81 | export interface CarBufferReader extends BlockBufferReader, RootsBufferReader {} 82 | 83 | /* Specific implementations for CAR block storage */ 84 | 85 | /* 86 | export interface CarBlockIterator extends BlockIterator, RootsReader {} 87 | export interface CarCIDIterator extends CIDIterator, RootsReader {} 88 | export interface CarIndexer extends AsyncIterable, RootsReader {} 89 | export interface CarWriter extends BlockWriter {} 90 | */ 91 | -------------------------------------------------------------------------------- /src/buffer-decoder.js: -------------------------------------------------------------------------------- 1 | import { decode as decodeDagCbor } from '@ipld/dag-cbor' 2 | import { CID } from 'multiformats/cid' 3 | import * as Digest from 'multiformats/hashes/digest' 4 | import { CIDV0_BYTES, decodeV2Header, decodeVarint, getMultihashLength, V2_HEADER_LENGTH } from './decoder-common.js' 5 | import { CarV1HeaderOrV2Pragma } from './header-validator.js' 6 | 7 | /** 8 | * @typedef {import('./api.js').Block} Block 9 | * @typedef {import('./api.js').BlockHeader} BlockHeader 10 | * @typedef {import('./api.js').BlockIndex} BlockIndex 11 | * @typedef {import('./coding.js').BytesBufferReader} BytesBufferReader 12 | * @typedef {import('./coding.js').CarHeader} CarHeader 13 | * @typedef {import('./coding.js').CarV2Header} CarV2Header 14 | * @typedef {import('./coding.js').CarV2FixedHeader} CarV2FixedHeader 15 | */ 16 | 17 | /** 18 | * Reads header data from a `BytesReader`. The header may either be in the form 19 | * of a `CarHeader` or `CarV2Header` depending on the CAR being read. 20 | * 21 | * @name decoder.readHeader(reader) 22 | * @param {BytesBufferReader} reader 23 | * @param {number} [strictVersion] 24 | * @returns {CarHeader | CarV2Header} 25 | */ 26 | export function readHeader (reader, strictVersion) { 27 | const length = decodeVarint(reader.upTo(8), reader) 28 | if (length === 0) { 29 | throw new Error('Invalid CAR header (zero length)') 30 | } 31 | const header = reader.exactly(length, true) 32 | const block = decodeDagCbor(header) 33 | if (CarV1HeaderOrV2Pragma.toTyped(block) === undefined) { 34 | throw new Error('Invalid CAR header format') 35 | } 36 | if ((block.version !== 1 && block.version !== 2) || (strictVersion !== undefined && block.version !== strictVersion)) { 37 | throw new Error(`Invalid CAR version: ${block.version}${strictVersion !== undefined ? ` (expected ${strictVersion})` : ''}`) 38 | } 39 | if (block.version === 1) { 40 | // CarV1HeaderOrV2Pragma makes roots optional, let's make it mandatory 41 | if (!Array.isArray(block.roots)) { 42 | throw new Error('Invalid CAR header format') 43 | } 44 | return block 45 | } 46 | // version 2 47 | if (block.roots !== undefined) { 48 | throw new Error('Invalid CAR header format') 49 | } 50 | const v2Header = decodeV2Header(reader.exactly(V2_HEADER_LENGTH, true)) 51 | reader.seek(v2Header.dataOffset - reader.pos) 52 | const v1Header = readHeader(reader, 1) 53 | return Object.assign(v1Header, v2Header) 54 | } 55 | 56 | /** 57 | * Reads CID sync 58 | * 59 | * @param {BytesBufferReader} reader 60 | * @returns {CID} 61 | */ 62 | function readCid (reader) { 63 | const first = reader.exactly(2, false) 64 | if (first[0] === CIDV0_BYTES.SHA2_256 && first[1] === CIDV0_BYTES.LENGTH) { 65 | // cidv0 32-byte sha2-256 66 | const bytes = reader.exactly(34, true) 67 | const multihash = Digest.decode(bytes) 68 | return CID.create(0, CIDV0_BYTES.DAG_PB, multihash) 69 | } 70 | 71 | const version = decodeVarint(reader.upTo(8), reader) 72 | if (version !== 1) { 73 | throw new Error(`Unexpected CID version (${version})`) 74 | } 75 | const codec = decodeVarint(reader.upTo(8), reader) 76 | const bytes = reader.exactly(getMultihashLength(reader.upTo(8)), true) 77 | const multihash = Digest.decode(bytes) 78 | return CID.create(version, codec, multihash) 79 | } 80 | 81 | /** 82 | * Reads the leading data of an individual block from CAR data from a 83 | * `BytesBufferReader`. Returns a `BlockHeader` object which contains 84 | * `{ cid, length, blockLength }` which can be used to either index the block 85 | * or read the block binary data. 86 | * 87 | * @name async decoder.readBlockHead(reader) 88 | * @param {BytesBufferReader} reader 89 | * @returns {BlockHeader} 90 | */ 91 | export function readBlockHead (reader) { 92 | // length includes a CID + Binary, where CID has a variable length 93 | // we have to deal with 94 | const start = reader.pos 95 | let length = decodeVarint(reader.upTo(8), reader) 96 | if (length === 0) { 97 | throw new Error('Invalid CAR section (zero length)') 98 | } 99 | length += (reader.pos - start) 100 | const cid = readCid(reader) 101 | const blockLength = length - Number(reader.pos - start) // subtract CID length 102 | 103 | return { cid, length, blockLength } 104 | } 105 | 106 | /** 107 | * Returns Car header and blocks from a Uint8Array 108 | * 109 | * @param {Uint8Array} bytes 110 | * @returns {{ header : CarHeader | CarV2Header , blocks: Block[]}} 111 | */ 112 | export function fromBytes (bytes) { 113 | let reader = bytesReader(bytes) 114 | const header = readHeader(reader) 115 | if (header.version === 2) { 116 | const v1length = reader.pos - header.dataOffset 117 | reader = limitReader(reader, header.dataSize - v1length) 118 | } 119 | 120 | const blocks = [] 121 | while (reader.upTo(8).length > 0) { 122 | const { cid, blockLength } = readBlockHead(reader) 123 | 124 | blocks.push({ cid, bytes: reader.exactly(blockLength, true) }) 125 | } 126 | 127 | return { 128 | header, blocks 129 | } 130 | } 131 | 132 | /** 133 | * Creates a `BytesBufferReader` from a `Uint8Array`. 134 | * 135 | * @name decoder.bytesReader(bytes) 136 | * @param {Uint8Array} bytes 137 | * @returns {BytesBufferReader} 138 | */ 139 | export function bytesReader (bytes) { 140 | let pos = 0 141 | 142 | /** @type {BytesBufferReader} */ 143 | return { 144 | upTo (length) { 145 | return bytes.subarray(pos, pos + Math.min(length, bytes.length - pos)) 146 | }, 147 | 148 | exactly (length, seek = false) { 149 | if (length > bytes.length - pos) { 150 | throw new Error('Unexpected end of data') 151 | } 152 | 153 | const out = bytes.subarray(pos, pos + length) 154 | if (seek) { 155 | pos += length 156 | } 157 | return out 158 | }, 159 | 160 | seek (length) { 161 | pos += length 162 | }, 163 | 164 | get pos () { 165 | return pos 166 | } 167 | } 168 | } 169 | 170 | /** 171 | * Wraps a `BytesBufferReader` in a limiting `BytesBufferReader` which limits maximum read 172 | * to `byteLimit` bytes. It _does not_ update `pos` of the original 173 | * `BytesBufferReader`. 174 | * 175 | * @name decoder.limitReader(reader, byteLimit) 176 | * @param {BytesBufferReader} reader 177 | * @param {number} byteLimit 178 | * @returns {BytesBufferReader} 179 | */ 180 | export function limitReader (reader, byteLimit) { 181 | let bytesRead = 0 182 | 183 | /** @type {BytesBufferReader} */ 184 | return { 185 | upTo (length) { 186 | let bytes = reader.upTo(length) 187 | if (bytes.length + bytesRead > byteLimit) { 188 | bytes = bytes.subarray(0, byteLimit - bytesRead) 189 | } 190 | return bytes 191 | }, 192 | 193 | exactly (length, seek = false) { 194 | const bytes = reader.exactly(length, seek) 195 | if (bytes.length + bytesRead > byteLimit) { 196 | throw new Error('Unexpected end of data') 197 | } 198 | if (seek) { 199 | bytesRead += length 200 | } 201 | return bytes 202 | }, 203 | 204 | seek (length) { 205 | bytesRead += length 206 | reader.seek(length) 207 | }, 208 | 209 | get pos () { 210 | return reader.pos 211 | } 212 | } 213 | } 214 | -------------------------------------------------------------------------------- /src/buffer-reader-browser.js: -------------------------------------------------------------------------------- 1 | import * as BufferDecoder from './buffer-decoder.js' 2 | 3 | /** 4 | * @typedef {import('multiformats').CID} CID 5 | * @typedef {import('./api.js').Block} Block 6 | * @typedef {import('./api.js').CarBufferReader} ICarBufferReader 7 | * @typedef {import('./coding.js').CarHeader} CarHeader 8 | * @typedef {import('./coding.js').CarV2Header} CarV2Header 9 | */ 10 | 11 | /** 12 | * Provides blockstore-like access to a CAR. 13 | * 14 | * Implements the `RootsBufferReader` interface: 15 | * {@link ICarBufferReader.getRoots `getRoots()`}. And the `BlockBufferReader` interface: 16 | * {@link ICarBufferReader.get `get()`}, {@link ICarBufferReader.has `has()`}, 17 | * {@link ICarBufferReader.blocks `blocks()`} and 18 | * {@link ICarBufferReader.cids `cids()`}. 19 | * 20 | * Load this class with either `import { CarBufferReader } from '@ipld/car/buffer-reader'` 21 | * (`const { CarBufferReader } = require('@ipld/car/buffer-reader')`). Or 22 | * `import { CarBufferReader } from '@ipld/car'` (`const { CarBufferReader } = require('@ipld/car')`). 23 | * The former will likely result in smaller bundle sizes where this is 24 | * important. 25 | * 26 | * @name CarBufferReader 27 | * @class 28 | * @implements {ICarBufferReader} 29 | * @property {number} version The version number of the CAR referenced by this 30 | * reader (should be `1` or `2`). 31 | */ 32 | export class CarBufferReader { 33 | /** 34 | * @constructs CarBufferReader 35 | * @param {CarHeader|CarV2Header} header 36 | * @param {Block[]} blocks 37 | */ 38 | constructor (header, blocks) { 39 | this._header = header 40 | this._blocks = blocks 41 | this._cids = undefined 42 | } 43 | 44 | /** 45 | * @property {number} version of the CAR 46 | * @memberof CarBufferReader 47 | * @instance 48 | */ 49 | get version () { 50 | return this._header.version 51 | } 52 | 53 | /** 54 | * Get the list of roots defined by the CAR referenced by this reader. May be 55 | * zero or more `CID`s. 56 | * 57 | * @function 58 | * @memberof CarBufferReader 59 | * @instance 60 | * @returns {CID[]} 61 | */ 62 | getRoots () { 63 | return this._header.roots 64 | } 65 | 66 | /** 67 | * Check whether a given `CID` exists within the CAR referenced by this 68 | * reader. 69 | * 70 | * @function 71 | * @memberof CarBufferReader 72 | * @instance 73 | * @param {CID} key 74 | * @returns {boolean} 75 | */ 76 | has (key) { 77 | return this._blocks.some(b => b.cid.equals(key)) 78 | } 79 | 80 | /** 81 | * Fetch a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) from the CAR 82 | * referenced by this reader matching the provided `CID`. In the case where 83 | * the provided `CID` doesn't exist within the CAR, `undefined` will be 84 | * returned. 85 | * 86 | * @function 87 | * @memberof CarBufferReader 88 | * @instance 89 | * @param {CID} key 90 | * @returns {Block | undefined} 91 | */ 92 | get (key) { 93 | return this._blocks.find(b => b.cid.equals(key)) 94 | } 95 | 96 | /** 97 | * Returns a `Block[]` of the `Block`s (`{ cid:CID, bytes:Uint8Array }` pairs) contained within 98 | * the CAR referenced by this reader. 99 | * 100 | * @function 101 | * @memberof CarBufferReader 102 | * @instance 103 | * @returns {Block[]} 104 | */ 105 | blocks () { 106 | return this._blocks 107 | } 108 | 109 | /** 110 | * Returns a `CID[]` of the `CID`s contained within the CAR referenced by this reader. 111 | * 112 | * @function 113 | * @memberof CarBufferReader 114 | * @instance 115 | * @returns {CID[]} 116 | */ 117 | cids () { 118 | if (!this._cids) { 119 | this._cids = this._blocks.map(b => b.cid) 120 | } 121 | return this._cids 122 | } 123 | 124 | /** 125 | * Instantiate a {@link CarBufferReader} from a `Uint8Array` blob. This performs a 126 | * decode fully in memory and maintains the decoded state in memory for full 127 | * access to the data via the `CarReader` API. 128 | * 129 | * @static 130 | * @memberof CarBufferReader 131 | * @param {Uint8Array} bytes 132 | * @returns {CarBufferReader} 133 | */ 134 | static fromBytes (bytes) { 135 | if (!(bytes instanceof Uint8Array)) { 136 | throw new TypeError('fromBytes() requires a Uint8Array') 137 | } 138 | 139 | const { header, blocks } = BufferDecoder.fromBytes(bytes) 140 | return new CarBufferReader(header, blocks) 141 | } 142 | } 143 | 144 | export const __browser = true 145 | -------------------------------------------------------------------------------- /src/buffer-reader.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import { CarBufferReader as BrowserCarBufferReader } from './buffer-reader-browser.js' 3 | 4 | /** 5 | * @typedef {import('./api.js').Block} Block 6 | * @typedef {import('./api.js').BlockIndex} BlockIndex 7 | * @typedef {import('./api.js').CarBufferReader} ICarBufferReader 8 | */ 9 | 10 | const fsread = fs.readSync 11 | 12 | /** 13 | * @class 14 | * @implements {ICarBufferReader} 15 | */ 16 | export class CarBufferReader extends BrowserCarBufferReader { 17 | /** 18 | * Reads a block directly from a file descriptor for an open CAR file. This 19 | * function is **only available in Node.js** and not a browser environment. 20 | * 21 | * This function can be used in connection with {@link CarIndexer} which emits 22 | * the `BlockIndex` objects that are required by this function. 23 | * 24 | * The user is responsible for opening and closing the file used in this call. 25 | * 26 | * @static 27 | * @memberof CarBufferReader 28 | * @param {number} fd - A file descriptor from the 29 | * Node.js `fs` module. An integer, from `fs.open()`. 30 | * @param {BlockIndex} blockIndex - An index pointing to the location of the 31 | * Block required. This `BlockIndex` should take the form: 32 | * `{cid:CID, blockLength:number, blockOffset:number}`. 33 | * @returns {Block} A `{ cid:CID, bytes:Uint8Array }` pair. 34 | */ 35 | static readRaw (fd, blockIndex) { 36 | const { cid, blockLength, blockOffset } = blockIndex 37 | const bytes = new Uint8Array(blockLength) 38 | let read 39 | if (typeof fd === 'number') { 40 | read = fsread(fd, bytes, 0, blockLength, blockOffset) 41 | } else { 42 | throw new TypeError('Bad fd') 43 | } 44 | if (read !== blockLength) { 45 | throw new Error(`Failed to read entire block (${read} instead of ${blockLength})`) 46 | } 47 | return { cid, bytes } 48 | } 49 | } 50 | 51 | export const __browser = false 52 | -------------------------------------------------------------------------------- /src/buffer-writer.js: -------------------------------------------------------------------------------- 1 | import * as CBOR from '@ipld/dag-cbor' 2 | import { Token, Type } from 'cborg' 3 | import { tokensToLength } from 'cborg/length' 4 | import varint from 'varint' 5 | 6 | /** 7 | * @typedef {import('./api.js').CID} CID 8 | * @typedef {import('./api.js').Block} Block 9 | * @typedef {import('./api.js').CarBufferWriter} Writer 10 | * @typedef {import('./api.js').CarBufferWriterOptions} Options 11 | * @typedef {import('./coding.js').CarEncoder} CarEncoder 12 | */ 13 | 14 | /** 15 | * A simple CAR writer that writes to a pre-allocated buffer. 16 | * 17 | * @class 18 | * @name CarBufferWriter 19 | * @implements {Writer} 20 | */ 21 | class CarBufferWriter { 22 | /** 23 | * @param {Uint8Array} bytes 24 | * @param {number} headerSize 25 | */ 26 | constructor (bytes, headerSize) { 27 | /** @readonly */ 28 | this.bytes = bytes 29 | this.byteOffset = headerSize 30 | 31 | /** 32 | * @readonly 33 | * @type {CID[]} 34 | */ 35 | this.roots = [] 36 | this.headerSize = headerSize 37 | } 38 | 39 | /** 40 | * Add a root to this writer, to be used to create a header when the CAR is 41 | * finalized with {@link CarBufferWriter.close `close()`} 42 | * 43 | * @param {CID} root 44 | * @param {{resize?:boolean}} [options] 45 | * @returns {CarBufferWriter} 46 | */ 47 | addRoot (root, options) { 48 | addRoot(this, root, options) 49 | return this 50 | } 51 | 52 | /** 53 | * Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive. 54 | * Throws if there is not enough capacity. 55 | * 56 | * @param {Block} block - A `{ cid:CID, bytes:Uint8Array }` pair. 57 | * @returns {CarBufferWriter} 58 | */ 59 | write (block) { 60 | addBlock(this, block) 61 | return this 62 | } 63 | 64 | /** 65 | * Finalize the CAR and return it as a `Uint8Array`. 66 | * 67 | * @param {object} [options] 68 | * @param {boolean} [options.resize] 69 | * @returns {Uint8Array} 70 | */ 71 | close (options) { 72 | return close(this, options) 73 | } 74 | } 75 | 76 | /** 77 | * @param {CarBufferWriter} writer 78 | * @param {CID} root 79 | * @param {{resize?:boolean}} [options] 80 | */ 81 | export const addRoot = (writer, root, options = {}) => { 82 | const { resize = false } = options 83 | const { bytes, headerSize, byteOffset, roots } = writer 84 | writer.roots.push(root) 85 | const size = headerLength(writer) 86 | // If there is not enough space for the new root 87 | if (size > headerSize) { 88 | // Check if we root would fit if we were to resize the head. 89 | if (size - headerSize + byteOffset < bytes.byteLength) { 90 | // If resize is enabled resize head 91 | if (resize) { 92 | resizeHeader(writer, size) 93 | // otherwise remove head and throw an error suggesting to resize 94 | } else { 95 | roots.pop() 96 | throw new RangeError(`Header of size ${headerSize} has no capacity for new root ${root}. 97 | However there is a space in the buffer and you could call addRoot(root, { resize: root }) to resize header to make a space for this root.`) 98 | } 99 | // If head would not fit even with resize pop new root and throw error 100 | } else { 101 | roots.pop() 102 | throw new RangeError(`Buffer has no capacity for a new root ${root}`) 103 | } 104 | } 105 | } 106 | 107 | /** 108 | * Calculates number of bytes required for storing given block in CAR. Useful in 109 | * estimating size of an `ArrayBuffer` for the `CarBufferWriter`. 110 | * 111 | * @name CarBufferWriter.blockLength(Block) 112 | * @param {Block} block 113 | * @returns {number} 114 | */ 115 | export const blockLength = ({ cid, bytes }) => { 116 | const size = cid.bytes.byteLength + bytes.byteLength 117 | return varint.encodingLength(size) + size 118 | } 119 | 120 | /** 121 | * @param {CarBufferWriter} writer 122 | * @param {Block} block 123 | */ 124 | export const addBlock = (writer, { cid, bytes }) => { 125 | const byteLength = cid.bytes.byteLength + bytes.byteLength 126 | const size = varint.encode(byteLength) 127 | if (writer.byteOffset + size.length + byteLength > writer.bytes.byteLength) { 128 | throw new RangeError('Buffer has no capacity for this block') 129 | } else { 130 | writeBytes(writer, size) 131 | writeBytes(writer, cid.bytes) 132 | writeBytes(writer, bytes) 133 | } 134 | } 135 | 136 | /** 137 | * @param {CarBufferWriter} writer 138 | * @param {object} [options] 139 | * @param {boolean} [options.resize] 140 | */ 141 | export const close = (writer, options = {}) => { 142 | const { resize = false } = options 143 | const { roots, bytes, byteOffset, headerSize } = writer 144 | 145 | const headerBytes = CBOR.encode({ version: 1, roots }) 146 | const varintBytes = varint.encode(headerBytes.length) 147 | 148 | const size = varintBytes.length + headerBytes.byteLength 149 | const offset = headerSize - size 150 | 151 | // If header size estimate was accurate we just write header and return 152 | // view into buffer. 153 | if (offset === 0) { 154 | writeHeader(writer, varintBytes, headerBytes) 155 | return bytes.subarray(0, byteOffset) 156 | // If header was overestimated and `{resize: true}` is passed resize header 157 | } else if (resize) { 158 | resizeHeader(writer, size) 159 | writeHeader(writer, varintBytes, headerBytes) 160 | return bytes.subarray(0, writer.byteOffset) 161 | } else { 162 | throw new RangeError(`Header size was overestimated. 163 | You can use close({ resize: true }) to resize header`) 164 | } 165 | } 166 | 167 | /** 168 | * @param {CarBufferWriter} writer 169 | * @param {number} byteLength 170 | */ 171 | export const resizeHeader = (writer, byteLength) => { 172 | const { bytes, headerSize } = writer 173 | // Move data section to a new offset 174 | bytes.set(bytes.subarray(headerSize, writer.byteOffset), byteLength) 175 | // Update header size & byteOffset 176 | writer.byteOffset += byteLength - headerSize 177 | writer.headerSize = byteLength 178 | } 179 | 180 | /** 181 | * @param {CarBufferWriter} writer 182 | * @param {number[]|Uint8Array} bytes 183 | */ 184 | 185 | const writeBytes = (writer, bytes) => { 186 | writer.bytes.set(bytes, writer.byteOffset) 187 | writer.byteOffset += bytes.length 188 | } 189 | /** 190 | * @param {{bytes:Uint8Array}} writer 191 | * @param {number[]} varint 192 | * @param {Uint8Array} header 193 | */ 194 | const writeHeader = ({ bytes }, varint, header) => { 195 | bytes.set(varint) 196 | bytes.set(header, varint.length) 197 | } 198 | 199 | const headerPreludeTokens = [ 200 | new Token(Type.map, 2), 201 | new Token(Type.string, 'version'), 202 | new Token(Type.uint, 1), 203 | new Token(Type.string, 'roots') 204 | ] 205 | 206 | const CID_TAG = new Token(Type.tag, 42) 207 | 208 | /** 209 | * Calculates header size given the array of byteLength for roots. 210 | * 211 | * @name CarBufferWriter.calculateHeaderLength(rootLengths) 212 | * @param {number[]} rootLengths 213 | * @returns {number} 214 | */ 215 | export const calculateHeaderLength = (rootLengths) => { 216 | const tokens = [...headerPreludeTokens] 217 | tokens.push(new Token(Type.array, rootLengths.length)) 218 | for (const rootLength of rootLengths) { 219 | tokens.push(CID_TAG) 220 | tokens.push(new Token(Type.bytes, { length: rootLength + 1 })) 221 | } 222 | const length = tokensToLength(tokens) // no options needed here because we have simple tokens 223 | return varint.encodingLength(length) + length 224 | } 225 | 226 | /** 227 | * Calculates header size given the array of roots. 228 | * 229 | * @name CarBufferWriter.headerLength({ roots }) 230 | * @param {object} options 231 | * @param {CID[]} options.roots 232 | * @returns {number} 233 | */ 234 | export const headerLength = ({ roots }) => 235 | calculateHeaderLength(roots.map(cid => cid.bytes.byteLength)) 236 | 237 | /** 238 | * Estimates header size given a count of the roots and the expected byte length 239 | * of the root CIDs. The default length works for a standard CIDv1 with a 240 | * single-byte multihash code, such as SHA2-256 (i.e. the most common CIDv1). 241 | * 242 | * @name CarBufferWriter.estimateHeaderLength(rootCount[, rootByteLength]) 243 | * @param {number} rootCount 244 | * @param {number} [rootByteLength] 245 | * @returns {number} 246 | */ 247 | export const estimateHeaderLength = (rootCount, rootByteLength = 36) => 248 | calculateHeaderLength(new Array(rootCount).fill(rootByteLength)) 249 | 250 | /** 251 | * Creates synchronous CAR writer that can be used to encode blocks into a given 252 | * buffer. Optionally you could pass `byteOffset` and `byteLength` to specify a 253 | * range inside buffer to write into. If car file is going to have `roots` you 254 | * need to either pass them under `options.roots` (from which header size will 255 | * be calculated) or provide `options.headerSize` to allocate required space 256 | * in the buffer. You may also provide known `roots` and `headerSize` to 257 | * allocate space for the roots that may not be known ahead of time. 258 | * 259 | * Note: Incorrect `headerSize` may lead to copying bytes inside a buffer 260 | * which will have a negative impact on performance. 261 | * 262 | * @name CarBufferWriter.createWriter(buffer[, options]) 263 | * @param {ArrayBuffer} buffer 264 | * @param {object} [options] 265 | * @param {CID[]} [options.roots] 266 | * @param {number} [options.byteOffset] 267 | * @param {number} [options.byteLength] 268 | * @param {number} [options.headerSize] 269 | * @returns {CarBufferWriter} 270 | */ 271 | export const createWriter = (buffer, options = {}) => { 272 | const { 273 | roots = [], 274 | byteOffset = 0, 275 | byteLength = buffer.byteLength, 276 | headerSize = headerLength({ roots }) 277 | } = options 278 | const bytes = new Uint8Array(buffer, byteOffset, byteLength) 279 | 280 | const writer = new CarBufferWriter(bytes, headerSize) 281 | for (const root of roots) { 282 | writer.addRoot(root) 283 | } 284 | 285 | return writer 286 | } 287 | -------------------------------------------------------------------------------- /src/coding.ts: -------------------------------------------------------------------------------- 1 | import type { Block, BlockIndex } from './api.js' 2 | import type { CID } from 'multiformats/cid' 3 | 4 | export interface CarEncoder { 5 | setRoots(roots: CID[]): Promise 6 | 7 | writeBlock(block: Block): Promise 8 | 9 | close(): Promise 10 | 11 | version(): number 12 | } 13 | 14 | export interface IteratorChannel_Writer { 15 | write(chunk: T): Promise 16 | end(): Promise 17 | } 18 | 19 | export interface IteratorChannel { 20 | writer: IteratorChannel_Writer 21 | 22 | iterator: AsyncIterator 23 | } 24 | 25 | export interface CarHeader { 26 | version: 1 27 | roots: CID[] 28 | } 29 | 30 | export interface CarV2FixedHeader { 31 | characteristics: [bigint, bigint] 32 | dataOffset: number 33 | dataSize: number 34 | indexOffset: number 35 | } 36 | 37 | export interface CarV2Header extends CarV2FixedHeader { 38 | version: 2 39 | roots: CID[] 40 | } 41 | 42 | export interface CarDecoder { 43 | header(): Promise 44 | 45 | blocks(): AsyncGenerator 46 | 47 | blocksIndex(): AsyncGenerator 48 | } 49 | 50 | export interface Seekable { 51 | seek(length: number): void 52 | } 53 | 54 | export interface BytesReader extends Seekable { 55 | upTo(length: number): Promise 56 | 57 | exactly(length: number, seek?: boolean): Promise 58 | 59 | pos: number 60 | } 61 | 62 | export interface BytesBufferReader extends Seekable { 63 | upTo(length: number): Uint8Array 64 | 65 | exactly(length: number, seek?: boolean): Uint8Array 66 | 67 | pos: number 68 | } 69 | -------------------------------------------------------------------------------- /src/decoder-common.js: -------------------------------------------------------------------------------- 1 | import varint from 'varint' 2 | 3 | export const CIDV0_BYTES = { 4 | SHA2_256: 0x12, 5 | LENGTH: 0x20, 6 | DAG_PB: 0x70 7 | } 8 | 9 | export const V2_HEADER_LENGTH = /* characteristics */ 16 /* v1 offset */ + 8 /* v1 size */ + 8 /* index offset */ + 8 10 | 11 | /** 12 | * Decodes varint and seeks the buffer 13 | * 14 | * ```js 15 | * // needs bytes to be read first 16 | * const bytes = reader.upTo(8) // maybe async 17 | * ``` 18 | * 19 | * @param {Uint8Array} bytes 20 | * @param {import('./coding.js').Seekable} seeker 21 | * @returns {number} 22 | */ 23 | export function decodeVarint (bytes, seeker) { 24 | if (!bytes.length) { 25 | throw new Error('Unexpected end of data') 26 | } 27 | const i = varint.decode(bytes) 28 | seeker.seek(/** @type {number} */(varint.decode.bytes)) 29 | return i 30 | } 31 | 32 | /** 33 | * Decode v2 header 34 | * 35 | * ```js 36 | * // needs bytes to be read first 37 | * const bytes = reader.exactly(V2_HEADER_LENGTH, true) // maybe async 38 | * ``` 39 | * 40 | * @param {Uint8Array} bytes 41 | * @returns {import('./coding.js').CarV2FixedHeader} 42 | */ 43 | export function decodeV2Header (bytes) { 44 | const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) 45 | let offset = 0 46 | const header = { 47 | version: 2, 48 | /** @type {[bigint, bigint]} */ 49 | characteristics: [ 50 | dv.getBigUint64(offset, true), 51 | dv.getBigUint64(offset += 8, true) 52 | ], 53 | dataOffset: Number(dv.getBigUint64(offset += 8, true)), 54 | dataSize: Number(dv.getBigUint64(offset += 8, true)), 55 | indexOffset: Number(dv.getBigUint64(offset += 8, true)) 56 | } 57 | return header 58 | } 59 | 60 | /** 61 | * Checks the length of the multihash to be read afterwards 62 | * 63 | * ```js 64 | * // needs bytes to be read first 65 | * const bytes = reader.upTo(8) // maybe async 66 | * ``` 67 | * 68 | * @param {Uint8Array} bytes 69 | */ 70 | export function getMultihashLength (bytes) { 71 | // | code | length | .... | 72 | // where both code and length are varints, so we have to decode 73 | // them first before we can know total length 74 | 75 | varint.decode(bytes) // code 76 | const codeLength = /** @type {number} */(varint.decode.bytes) 77 | const length = varint.decode(bytes.subarray(varint.decode.bytes)) 78 | const lengthLength = /** @type {number} */(varint.decode.bytes) 79 | const mhLength = codeLength + lengthLength + length 80 | 81 | return mhLength 82 | } 83 | -------------------------------------------------------------------------------- /src/decoder.js: -------------------------------------------------------------------------------- 1 | import { decode as decodeDagCbor } from '@ipld/dag-cbor' 2 | import { CID } from 'multiformats/cid' 3 | import * as Digest from 'multiformats/hashes/digest' 4 | import { CIDV0_BYTES, decodeV2Header, decodeVarint, getMultihashLength, V2_HEADER_LENGTH } from './decoder-common.js' 5 | import { CarV1HeaderOrV2Pragma } from './header-validator.js' 6 | 7 | /** 8 | * @typedef {import('./api.js').Block} Block 9 | * @typedef {import('./api.js').BlockHeader} BlockHeader 10 | * @typedef {import('./api.js').BlockIndex} BlockIndex 11 | * @typedef {import('./coding.js').BytesReader} BytesReader 12 | * @typedef {import('./coding.js').CarHeader} CarHeader 13 | * @typedef {import('./coding.js').CarV2Header} CarV2Header 14 | * @typedef {import('./coding.js').CarV2FixedHeader} CarV2FixedHeader 15 | * @typedef {import('./coding.js').CarDecoder} CarDecoder 16 | */ 17 | 18 | /** 19 | * Reads header data from a `BytesReader`. The header may either be in the form 20 | * of a `CarHeader` or `CarV2Header` depending on the CAR being read. 21 | * 22 | * @name async decoder.readHeader(reader) 23 | * @param {BytesReader} reader 24 | * @param {number} [strictVersion] 25 | * @returns {Promise} 26 | */ 27 | export async function readHeader (reader, strictVersion) { 28 | const length = decodeVarint(await reader.upTo(8), reader) 29 | if (length === 0) { 30 | throw new Error('Invalid CAR header (zero length)') 31 | } 32 | const header = await reader.exactly(length, true) 33 | const block = decodeDagCbor(header) 34 | if (CarV1HeaderOrV2Pragma.toTyped(block) === undefined) { 35 | throw new Error('Invalid CAR header format') 36 | } 37 | if ((block.version !== 1 && block.version !== 2) || (strictVersion !== undefined && block.version !== strictVersion)) { 38 | throw new Error(`Invalid CAR version: ${block.version}${strictVersion !== undefined ? ` (expected ${strictVersion})` : ''}`) 39 | } 40 | if (block.version === 1) { 41 | // CarV1HeaderOrV2Pragma makes roots optional, let's make it mandatory 42 | if (!Array.isArray(block.roots)) { 43 | throw new Error('Invalid CAR header format') 44 | } 45 | return block 46 | } 47 | // version 2 48 | if (block.roots !== undefined) { 49 | throw new Error('Invalid CAR header format') 50 | } 51 | const v2Header = decodeV2Header(await reader.exactly(V2_HEADER_LENGTH, true)) 52 | reader.seek(v2Header.dataOffset - reader.pos) 53 | const v1Header = await readHeader(reader, 1) 54 | return Object.assign(v1Header, v2Header) 55 | } 56 | 57 | /** 58 | * @param {BytesReader} reader 59 | * @returns {Promise} 60 | */ 61 | async function readCid (reader) { 62 | const first = await reader.exactly(2, false) 63 | if (first[0] === CIDV0_BYTES.SHA2_256 && first[1] === CIDV0_BYTES.LENGTH) { 64 | // cidv0 32-byte sha2-256 65 | const bytes = await reader.exactly(34, true) 66 | const multihash = Digest.decode(bytes) 67 | return CID.create(0, CIDV0_BYTES.DAG_PB, multihash) 68 | } 69 | 70 | const version = decodeVarint(await reader.upTo(8), reader) 71 | if (version !== 1) { 72 | throw new Error(`Unexpected CID version (${version})`) 73 | } 74 | const codec = decodeVarint(await reader.upTo(8), reader) 75 | const bytes = await reader.exactly(getMultihashLength(await reader.upTo(8)), true) 76 | const multihash = Digest.decode(bytes) 77 | return CID.create(version, codec, multihash) 78 | } 79 | 80 | /** 81 | * Reads the leading data of an individual block from CAR data from a 82 | * `BytesReader`. Returns a `BlockHeader` object which contains 83 | * `{ cid, length, blockLength }` which can be used to either index the block 84 | * or read the block binary data. 85 | * 86 | * @name async decoder.readBlockHead(reader) 87 | * @param {BytesReader} reader 88 | * @returns {Promise} 89 | */ 90 | export async function readBlockHead (reader) { 91 | // length includes a CID + Binary, where CID has a variable length 92 | // we have to deal with 93 | const start = reader.pos 94 | let length = decodeVarint(await reader.upTo(8), reader) 95 | if (length === 0) { 96 | throw new Error('Invalid CAR section (zero length)') 97 | } 98 | length += (reader.pos - start) 99 | const cid = await readCid(reader) 100 | const blockLength = length - Number(reader.pos - start) // subtract CID length 101 | 102 | return { cid, length, blockLength } 103 | } 104 | 105 | /** 106 | * @param {BytesReader} reader 107 | * @returns {Promise} 108 | */ 109 | async function readBlock (reader) { 110 | const { cid, blockLength } = await readBlockHead(reader) 111 | const bytes = await reader.exactly(blockLength, true) 112 | return { bytes, cid } 113 | } 114 | 115 | /** 116 | * @param {BytesReader} reader 117 | * @returns {Promise} 118 | */ 119 | async function readBlockIndex (reader) { 120 | const offset = reader.pos 121 | const { cid, length, blockLength } = await readBlockHead(reader) 122 | const index = { cid, length, blockLength, offset, blockOffset: reader.pos } 123 | reader.seek(index.blockLength) 124 | return index 125 | } 126 | 127 | /** 128 | * Creates a `CarDecoder` from a `BytesReader`. The `CarDecoder` is as async 129 | * interface that will consume the bytes from the `BytesReader` to yield a 130 | * `header()` and either `blocks()` or `blocksIndex()` data. 131 | * 132 | * @name decoder.createDecoder(reader) 133 | * @param {BytesReader} reader 134 | * @returns {CarDecoder} 135 | */ 136 | export function createDecoder (reader) { 137 | const headerPromise = (async () => { 138 | const header = await readHeader(reader) 139 | if (header.version === 2) { 140 | const v1length = reader.pos - header.dataOffset 141 | reader = limitReader(reader, header.dataSize - v1length) 142 | } 143 | return header 144 | })() 145 | 146 | return { 147 | header: () => headerPromise, 148 | 149 | async * blocks () { 150 | await headerPromise 151 | while ((await reader.upTo(8)).length > 0) { 152 | yield await readBlock(reader) 153 | } 154 | }, 155 | 156 | async * blocksIndex () { 157 | await headerPromise 158 | while ((await reader.upTo(8)).length > 0) { 159 | yield await readBlockIndex(reader) 160 | } 161 | } 162 | } 163 | } 164 | 165 | /** 166 | * Creates a `BytesReader` from a `Uint8Array`. 167 | * 168 | * @name decoder.bytesReader(bytes) 169 | * @param {Uint8Array} bytes 170 | * @returns {BytesReader} 171 | */ 172 | export function bytesReader (bytes) { 173 | let pos = 0 174 | 175 | /** @type {BytesReader} */ 176 | return { 177 | async upTo (length) { 178 | const out = bytes.subarray(pos, pos + Math.min(length, bytes.length - pos)) 179 | return out 180 | }, 181 | 182 | async exactly (length, seek = false) { 183 | if (length > bytes.length - pos) { 184 | throw new Error('Unexpected end of data') 185 | } 186 | const out = bytes.subarray(pos, pos + length) 187 | if (seek) { 188 | pos += length 189 | } 190 | return out 191 | }, 192 | 193 | seek (length) { 194 | pos += length 195 | }, 196 | 197 | get pos () { 198 | return pos 199 | } 200 | } 201 | } 202 | 203 | /** 204 | * reusable reader for streams and files, we just need a way to read an 205 | * additional chunk (of some undetermined size) and a way to close the 206 | * reader when finished 207 | * 208 | * @param {() => Promise} readChunk 209 | * @returns {BytesReader} 210 | */ 211 | export function chunkReader (readChunk /*, closer */) { 212 | let pos = 0 213 | let have = 0 214 | let offset = 0 215 | let currentChunk = new Uint8Array(0) 216 | 217 | const read = async (/** @type {number} */ length) => { 218 | have = currentChunk.length - offset 219 | const bufa = /** @type {Uint8Array[]} */([currentChunk.subarray(offset)]) 220 | while (have < length) { 221 | const chunk = await readChunk() 222 | if (chunk == null) { 223 | break 224 | } 225 | /* c8 ignore next 8 */ 226 | // undo this ignore ^ when we have a fd implementation that can seek() 227 | if (have < 0) { // because of a seek() 228 | /* c8 ignore next 4 */ 229 | // toohard to test the else 230 | if (chunk.length > have) { 231 | bufa.push(chunk.subarray(-have)) 232 | } // else discard 233 | } else { 234 | bufa.push(chunk) 235 | } 236 | have += chunk.length 237 | } 238 | currentChunk = new Uint8Array(bufa.reduce((p, c) => p + c.length, 0)) 239 | let off = 0 240 | for (const b of bufa) { 241 | currentChunk.set(b, off) 242 | off += b.length 243 | } 244 | offset = 0 245 | } 246 | 247 | /** @type {BytesReader} */ 248 | return { 249 | async upTo (length) { 250 | if (currentChunk.length - offset < length) { 251 | await read(length) 252 | } 253 | return currentChunk.subarray(offset, offset + Math.min(currentChunk.length - offset, length)) 254 | }, 255 | 256 | async exactly (length, seek = false) { 257 | if (currentChunk.length - offset < length) { 258 | await read(length) 259 | } 260 | if (currentChunk.length - offset < length) { 261 | throw new Error('Unexpected end of data') 262 | } 263 | const out = currentChunk.subarray(offset, offset + length) 264 | if (seek) { 265 | pos += length 266 | offset += length 267 | } 268 | return out 269 | }, 270 | 271 | seek (length) { 272 | pos += length 273 | offset += length 274 | }, 275 | 276 | get pos () { 277 | return pos 278 | } 279 | } 280 | } 281 | 282 | /** 283 | * Creates a `BytesReader` from an `AsyncIterable`, which allows for 284 | * consumption of CAR data from a streaming source. 285 | * 286 | * @name decoder.asyncIterableReader(asyncIterable) 287 | * @param {AsyncIterable} asyncIterable 288 | * @returns {BytesReader} 289 | */ 290 | export function asyncIterableReader (asyncIterable) { 291 | const iterator = asyncIterable[Symbol.asyncIterator]() 292 | 293 | async function readChunk () { 294 | const next = await iterator.next() 295 | if (next.done) { 296 | return null 297 | } 298 | return next.value 299 | } 300 | 301 | return chunkReader(readChunk) 302 | } 303 | 304 | /** 305 | * Wraps a `BytesReader` in a limiting `BytesReader` which limits maximum read 306 | * to `byteLimit` bytes. It _does not_ update `pos` of the original 307 | * `BytesReader`. 308 | * 309 | * @name decoder.limitReader(reader, byteLimit) 310 | * @param {BytesReader} reader 311 | * @param {number} byteLimit 312 | * @returns {BytesReader} 313 | */ 314 | export function limitReader (reader, byteLimit) { 315 | let bytesRead = 0 316 | 317 | /** @type {BytesReader} */ 318 | return { 319 | async upTo (length) { 320 | let bytes = await reader.upTo(length) 321 | if (bytes.length + bytesRead > byteLimit) { 322 | bytes = bytes.subarray(0, byteLimit - bytesRead) 323 | } 324 | return bytes 325 | }, 326 | 327 | async exactly (length, seek = false) { 328 | const bytes = await reader.exactly(length, seek) 329 | if (bytes.length + bytesRead > byteLimit) { 330 | throw new Error('Unexpected end of data') 331 | } 332 | if (seek) { 333 | bytesRead += length 334 | } 335 | return bytes 336 | }, 337 | 338 | seek (length) { 339 | bytesRead += length 340 | reader.seek(length) 341 | }, 342 | 343 | get pos () { 344 | return reader.pos 345 | } 346 | } 347 | } 348 | -------------------------------------------------------------------------------- /src/encoder.js: -------------------------------------------------------------------------------- 1 | import { encode as dagCborEncode } from '@ipld/dag-cbor' 2 | import varint from 'varint' 3 | 4 | /** 5 | * @typedef {import('multiformats').CID} CID 6 | * @typedef {import('./api.js').Block} Block 7 | * @typedef {import('./coding.js').CarEncoder} CarEncoder 8 | * @typedef {import('./coding.js').IteratorChannel_Writer} IteratorChannel_Writer 9 | */ 10 | 11 | const CAR_V1_VERSION = 1 12 | 13 | /** 14 | * Create a header from an array of roots. 15 | * 16 | * @param {CID[]} roots 17 | * @returns {Uint8Array} 18 | */ 19 | export function createHeader (roots) { 20 | const headerBytes = dagCborEncode({ version: CAR_V1_VERSION, roots }) 21 | const varintBytes = varint.encode(headerBytes.length) 22 | const header = new Uint8Array(varintBytes.length + headerBytes.length) 23 | header.set(varintBytes, 0) 24 | header.set(headerBytes, varintBytes.length) 25 | return header 26 | } 27 | 28 | /** 29 | * @param {IteratorChannel_Writer} writer 30 | * @returns {CarEncoder} 31 | */ 32 | function createEncoder (writer) { 33 | // none of this is wrapped in a mutex, that needs to happen above this to 34 | // avoid overwrites 35 | 36 | return { 37 | /** 38 | * @param {CID[]} roots 39 | * @returns {Promise} 40 | */ 41 | async setRoots (roots) { 42 | const bytes = createHeader(roots) 43 | await writer.write(bytes) 44 | }, 45 | 46 | /** 47 | * @param {Block} block 48 | * @returns {Promise} 49 | */ 50 | async writeBlock (block) { 51 | const { cid, bytes } = block 52 | await writer.write(new Uint8Array(varint.encode(cid.bytes.length + bytes.length))) 53 | await writer.write(cid.bytes) 54 | if (bytes.length) { 55 | // zero-length blocks are valid, but it'd be safer if we didn't write them 56 | await writer.write(bytes) 57 | } 58 | }, 59 | 60 | /** 61 | * @returns {Promise} 62 | */ 63 | async close () { 64 | await writer.end() 65 | }, 66 | 67 | /** 68 | * @returns {number} 69 | */ 70 | version () { 71 | return CAR_V1_VERSION 72 | } 73 | } 74 | } 75 | 76 | export { createEncoder } 77 | -------------------------------------------------------------------------------- /src/header-validator.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable jsdoc/check-indentation, max-depth */ 2 | 3 | /** 4 | * Auto-generated with @ipld/schema@v4.2.0 at Thu Sep 14 2023 from IPLD Schema: 5 | * 6 | * # CarV1HeaderOrV2Pragma is a more relaxed form, and can parse {version:x} where 7 | * # roots are optional. This is typically useful for the {verison:2} CARv2 8 | * # pragma. 9 | * 10 | * type CarV1HeaderOrV2Pragma struct { 11 | * roots optional [&Any] 12 | * # roots is _not_ optional for CarV1 but we defer that check within code to 13 | * # gracefully handle the V2 case where it's just {version:X} 14 | * version Int 15 | * } 16 | * 17 | * # CarV1Header is the strict form of the header, and requires roots to be 18 | * # present. This is compatible with the CARv1 specification. 19 | * 20 | * # type CarV1Header struct { 21 | * # roots [&Any] 22 | * # version Int 23 | * # } 24 | * 25 | */ 26 | 27 | const Kinds = { 28 | Null: /** 29 | * @param obj 30 | * @returns {undefined|null} 31 | */ (/** @type {any} */ obj) => obj === null ? obj : undefined, 32 | Int: /** 33 | * @param obj 34 | * @returns {undefined|number} 35 | */ (/** @type {any} */ obj) => Number.isInteger(obj) ? obj : undefined, 36 | Float: /** 37 | * @param obj 38 | * @returns {undefined|number} 39 | */ (/** @type {any} */ obj) => typeof obj === 'number' && Number.isFinite(obj) ? obj : undefined, 40 | String: /** 41 | * @param obj 42 | * @returns {undefined|string} 43 | */ (/** @type {any} */ obj) => typeof obj === 'string' ? obj : undefined, 44 | Bool: /** 45 | * @param obj 46 | * @returns {undefined|boolean} 47 | */ (/** @type {any} */ obj) => typeof obj === 'boolean' ? obj : undefined, 48 | Bytes: /** 49 | * @param obj 50 | * @returns {undefined|Uint8Array} 51 | */ (/** @type {any} */ obj) => obj instanceof Uint8Array ? obj : undefined, 52 | Link: /** 53 | * @param obj 54 | * @returns {undefined|object} 55 | */ (/** @type {any} */ obj) => obj !== null && typeof obj === 'object' && obj.asCID === obj ? obj : undefined, 56 | List: /** 57 | * @param obj 58 | * @returns {undefined|Array} 59 | */ (/** @type {any} */ obj) => Array.isArray(obj) ? obj : undefined, 60 | Map: /** 61 | * @param obj 62 | * @returns {undefined|object} 63 | */ (/** @type {any} */ obj) => obj !== null && typeof obj === 'object' && obj.asCID !== obj && !Array.isArray(obj) && !(obj instanceof Uint8Array) ? obj : undefined 64 | } 65 | /** @type {{ [k in string]: (obj:any)=>undefined|any}} */ 66 | const Types = { 67 | 'CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)': Kinds.Link, 68 | 'CarV1HeaderOrV2Pragma > roots (anon)': /** 69 | * @param obj 70 | * @returns {undefined|any} 71 | */ (/** @type {any} */ obj) => { 72 | if (Kinds.List(obj) === undefined) { 73 | return undefined 74 | } 75 | for (let i = 0; i < obj.length; i++) { 76 | let v = obj[i] 77 | v = Types['CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)'](v) 78 | if (v === undefined) { 79 | return undefined 80 | } 81 | if (v !== obj[i]) { 82 | const ret = obj.slice(0, i) 83 | for (let j = i; j < obj.length; j++) { 84 | let v = obj[j] 85 | v = Types['CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)'](v) 86 | if (v === undefined) { 87 | return undefined 88 | } 89 | ret.push(v) 90 | } 91 | return ret 92 | } 93 | } 94 | return obj 95 | }, 96 | Int: Kinds.Int, 97 | CarV1HeaderOrV2Pragma: /** 98 | * @param obj 99 | * @returns {undefined|any} 100 | */ (/** @type {any} */ obj) => { 101 | if (Kinds.Map(obj) === undefined) { 102 | return undefined 103 | } 104 | const entries = Object.entries(obj) 105 | /** @type {{[k in string]: any}} */ 106 | let ret = obj 107 | let requiredCount = 1 108 | for (let i = 0; i < entries.length; i++) { 109 | const [key, value] = entries[i] 110 | switch (key) { 111 | case 'roots': 112 | { 113 | const v = Types['CarV1HeaderOrV2Pragma > roots (anon)'](obj[key]) 114 | if (v === undefined) { 115 | return undefined 116 | } 117 | if (v !== value || ret !== obj) { 118 | if (ret === obj) { 119 | /** @type {{[k in string]: any}} */ 120 | ret = {} 121 | for (let j = 0; j < i; j++) { 122 | ret[entries[j][0]] = entries[j][1] 123 | } 124 | } 125 | ret.roots = v 126 | } 127 | } 128 | break 129 | case 'version': 130 | { 131 | requiredCount-- 132 | const v = Types.Int(obj[key]) 133 | if (v === undefined) { 134 | return undefined 135 | } 136 | if (v !== value || ret !== obj) { 137 | if (ret === obj) { 138 | /** @type {{[k in string]: any}} */ 139 | ret = {} 140 | for (let j = 0; j < i; j++) { 141 | ret[entries[j][0]] = entries[j][1] 142 | } 143 | } 144 | ret.version = v 145 | } 146 | } 147 | break 148 | default: 149 | return undefined 150 | } 151 | } 152 | 153 | if (requiredCount > 0) { 154 | return undefined 155 | } 156 | return ret 157 | } 158 | } 159 | /** @type {{ [k in string]: (obj:any)=>undefined|any}} */ 160 | const Reprs = { 161 | 'CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)': Kinds.Link, 162 | 'CarV1HeaderOrV2Pragma > roots (anon)': /** 163 | * @param obj 164 | * @returns {undefined|any} 165 | */ (/** @type {any} */ obj) => { 166 | if (Kinds.List(obj) === undefined) { 167 | return undefined 168 | } 169 | for (let i = 0; i < obj.length; i++) { 170 | let v = obj[i] 171 | v = Reprs['CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)'](v) 172 | if (v === undefined) { 173 | return undefined 174 | } 175 | if (v !== obj[i]) { 176 | const ret = obj.slice(0, i) 177 | for (let j = i; j < obj.length; j++) { 178 | let v = obj[j] 179 | v = Reprs['CarV1HeaderOrV2Pragma > roots (anon) > valueType (anon)'](v) 180 | if (v === undefined) { 181 | return undefined 182 | } 183 | ret.push(v) 184 | } 185 | return ret 186 | } 187 | } 188 | return obj 189 | }, 190 | Int: Kinds.Int, 191 | CarV1HeaderOrV2Pragma: /** 192 | * @param obj 193 | * @returns {undefined|any} 194 | */ (/** @type {any} */ obj) => { 195 | if (Kinds.Map(obj) === undefined) { 196 | return undefined 197 | } 198 | const entries = Object.entries(obj) 199 | /** @type {{[k in string]: any}} */ 200 | let ret = obj 201 | let requiredCount = 1 202 | for (let i = 0; i < entries.length; i++) { 203 | const [key, value] = entries[i] 204 | switch (key) { 205 | case 'roots': 206 | { 207 | const v = Reprs['CarV1HeaderOrV2Pragma > roots (anon)'](value) 208 | if (v === undefined) { 209 | return undefined 210 | } 211 | if (v !== value || ret !== obj) { 212 | if (ret === obj) { 213 | /** @type {{[k in string]: any}} */ 214 | ret = {} 215 | for (let j = 0; j < i; j++) { 216 | ret[entries[j][0]] = entries[j][1] 217 | } 218 | } 219 | ret.roots = v 220 | } 221 | } 222 | break 223 | case 'version': 224 | { 225 | requiredCount-- 226 | const v = Reprs.Int(value) 227 | if (v === undefined) { 228 | return undefined 229 | } 230 | if (v !== value || ret !== obj) { 231 | if (ret === obj) { 232 | /** @type {{[k in string]: any}} */ 233 | ret = {} 234 | for (let j = 0; j < i; j++) { 235 | ret[entries[j][0]] = entries[j][1] 236 | } 237 | } 238 | ret.version = v 239 | } 240 | } 241 | break 242 | default: 243 | return undefined 244 | } 245 | } 246 | if (requiredCount > 0) { 247 | return undefined 248 | } 249 | return ret 250 | } 251 | } 252 | 253 | export const CarV1HeaderOrV2Pragma = { 254 | toTyped: Types.CarV1HeaderOrV2Pragma, 255 | toRepresentation: Reprs.CarV1HeaderOrV2Pragma 256 | } 257 | -------------------------------------------------------------------------------- /src/header.ipldsch: -------------------------------------------------------------------------------- 1 | # CarV1HeaderOrV2Pragma is a more relaxed form, and can parse {version:x} where 2 | # roots are optional. This is typically useful for the {verison:2} CARv2 3 | # pragma. 4 | 5 | type CarV1HeaderOrV2Pragma struct { 6 | roots optional [&Any] 7 | # roots is _not_ optional for CarV1 but we defer that check within code to 8 | # gracefully handle the V2 case where it's just {version:X} 9 | version Int 10 | } 11 | 12 | # CarV1Header is the strict form of the header, and requires roots to be 13 | # present. This is compatible with the CARv1 specification. 14 | 15 | # type CarV1Header struct { 16 | # roots [&Any] 17 | # version Int 18 | # } 19 | -------------------------------------------------------------------------------- /src/index-browser.js: -------------------------------------------------------------------------------- 1 | import { CarBufferReader } from './buffer-reader.js' 2 | import * as CarBufferWriter from './buffer-writer.js' 3 | import { CarIndexedReader } from './indexed-reader-browser.js' 4 | import { CarIndexer } from './indexer.js' 5 | import { CarBlockIterator, CarCIDIterator } from './iterator.js' 6 | import { CarReader } from './reader-browser.js' 7 | import { CarWriter } from './writer-browser.js' 8 | 9 | export { 10 | CarReader, 11 | CarIndexer, 12 | CarBlockIterator, 13 | CarCIDIterator, 14 | CarWriter, 15 | CarIndexedReader, 16 | CarBufferReader, 17 | CarBufferWriter 18 | } 19 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import { CarBufferReader } from './buffer-reader.js' 2 | import * as CarBufferWriter from './buffer-writer.js' 3 | import { CarIndexedReader } from './indexed-reader.js' 4 | import { CarIndexer } from './indexer.js' 5 | import { CarBlockIterator, CarCIDIterator } from './iterator.js' 6 | import { CarReader } from './reader.js' 7 | import { CarWriter } from './writer.js' 8 | 9 | // @see https://www.iana.org/assignments/media-types/application/vnd.ipld.car 10 | export const contentType = 'application/vnd.ipld.car' 11 | 12 | export { 13 | CarReader, 14 | CarBufferReader, 15 | CarIndexer, 16 | CarBlockIterator, 17 | CarCIDIterator, 18 | CarWriter, 19 | CarIndexedReader, 20 | CarBufferWriter 21 | } 22 | -------------------------------------------------------------------------------- /src/indexed-reader-browser.js: -------------------------------------------------------------------------------- 1 | export class CarIndexedReader { 2 | static async fromFile () { 3 | throw new Error('Unsupported in this environment') 4 | } 5 | } 6 | 7 | export const __browser = true 8 | -------------------------------------------------------------------------------- /src/indexed-reader.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import { Readable } from 'stream' 3 | import { CID } from 'multiformats/cid' 4 | import { CarIndexer } from './indexer.js' 5 | import { CarReader as NodeCarReader } from './reader.js' 6 | 7 | /** 8 | * @typedef {import('fs').promises.FileHandle} FileHandle 9 | * @typedef {import('./api.js').Block} Block 10 | * @typedef {import('./api.js').BlockIndex} BlockIndex 11 | * @typedef {import('./api.js').CarReader} CarReaderIface 12 | * @typedef {import('./reader-browser.js').CarReader} CarReader 13 | * @typedef {{ blockLength:number, blockOffset:number }} RawLocation 14 | */ 15 | 16 | /** 17 | * A form of {@link CarReader} that pre-indexes a CAR archive from a file and 18 | * provides random access to blocks within the file using the index data. This 19 | * function is **only available in Node.js** and not a browser environment. 20 | * 21 | * For large CAR files, using this form of `CarReader` can be singificantly more 22 | * efficient in terms of memory. The index consists of a list of `CID`s and 23 | * their location within the archive (see {@link CarIndexer}). For large numbers 24 | * of blocks, this index can also occupy a significant amount of memory. In some 25 | * cases it may be necessary to expand the memory capacity of a Node.js instance 26 | * to allow this index to fit. (e.g. by running with 27 | * `NODE_OPTIONS="--max-old-space-size=16384"`). 28 | * 29 | * As an `CarIndexedReader` instance maintains an open file descriptor for its 30 | * CAR file, an additional {@link CarReader#close} method is attached. This 31 | * _must_ be called to have full clean-up of resources after use. 32 | * 33 | * Load this class with either 34 | * `import { CarIndexedReader } from '@ipld/car/indexed-reader'` 35 | * (`const { CarIndexedReader } = require('@ipld/car/indexed-reader')`). Or 36 | * `import { CarIndexedReader } from '@ipld/car'` 37 | * (`const { CarIndexedReader } = require('@ipld/car')`). The former will likely 38 | * result in smaller bundle sizes where this is important. 39 | * 40 | * @name CarIndexedReader 41 | * @class 42 | * @implements {CarReaderIface} 43 | * @extends {CarReader} 44 | * @property {number} version The version number of the CAR referenced by this 45 | * reader (should be `1`). 46 | */ 47 | export class CarIndexedReader { 48 | /** 49 | * @param {number} version 50 | * @param {string} path 51 | * @param {CID[]} roots 52 | * @param {Map} index 53 | * @param {string[]} order 54 | */ 55 | constructor (version, path, roots, index, order) { 56 | this._version = version 57 | this._path = path 58 | this._roots = roots 59 | this._index = index 60 | this._order = order 61 | this._fd = null 62 | } 63 | 64 | get version () { 65 | return this._version 66 | } 67 | 68 | /** 69 | * See {@link CarReader#getRoots} 70 | * 71 | * @function 72 | * @memberof CarIndexedReader 73 | * @instance 74 | * @async 75 | * @returns {Promise} 76 | */ 77 | async getRoots () { 78 | return this._roots 79 | } 80 | 81 | /** 82 | * See {@link CarReader#has} 83 | * 84 | * @function 85 | * @memberof CarIndexedReader 86 | * @instance 87 | * @async 88 | * @param {CID} key 89 | * @returns {Promise} 90 | */ 91 | async has (key) { 92 | return this._index.has(key.toString()) 93 | } 94 | 95 | /** 96 | * See {@link CarReader#get} 97 | * 98 | * @function 99 | * @memberof CarIndexedReader 100 | * @instance 101 | * @async 102 | * @param {CID} key 103 | * @returns {Promise} 104 | */ 105 | async get (key) { 106 | const blockIndex = this._index.get(key.toString()) 107 | if (!blockIndex) { 108 | return undefined 109 | } 110 | if (!this._fd) { 111 | this._fd = await fs.promises.open(this._path, 'r') 112 | } 113 | const readIndex = { 114 | cid: key, 115 | length: 0, 116 | offset: 0, 117 | blockLength: blockIndex.blockLength, 118 | blockOffset: blockIndex.blockOffset 119 | } 120 | return NodeCarReader.readRaw(this._fd, readIndex) 121 | } 122 | 123 | /** 124 | * See {@link CarReader#blocks} 125 | * 126 | * @function 127 | * @memberof CarIndexedReader 128 | * @instance 129 | * @async 130 | * @generator 131 | * @returns {AsyncGenerator} 132 | */ 133 | async * blocks () { 134 | for (const cidStr of this._order) { 135 | const block = await this.get(CID.parse(cidStr)) 136 | /* c8 ignore next 3 */ 137 | if (!block) { 138 | throw new Error('Unexpected internal error') 139 | } 140 | yield block 141 | } 142 | } 143 | 144 | /** 145 | * See {@link CarReader#cids} 146 | * 147 | * @function 148 | * @memberof CarIndexedReader 149 | * @instance 150 | * @async 151 | * @generator 152 | * @returns {AsyncGenerator} 153 | */ 154 | async * cids () { 155 | for (const cidStr of this._order) { 156 | yield CID.parse(cidStr) 157 | } 158 | } 159 | 160 | /** 161 | * Close the underlying file descriptor maintained by this `CarIndexedReader`. 162 | * This must be called for proper resource clean-up to occur. 163 | * 164 | * @function 165 | * @memberof CarIndexedReader 166 | * @instance 167 | * @async 168 | * @returns {Promise} 169 | */ 170 | async close () { 171 | if (this._fd) { 172 | return this._fd.close() 173 | } 174 | } 175 | 176 | /** 177 | * Instantiate an {@link CarIndexedReader} from a file with the provided 178 | * `path`. The CAR file is first indexed with a full path that collects `CID`s 179 | * and block locations. This index is maintained in memory. Subsequent reads 180 | * operate on a read-only file descriptor, fetching the block from its in-file 181 | * location. 182 | * 183 | * For large archives, the initial indexing may take some time. The returned 184 | * `Promise` will resolve only after this is complete. 185 | * 186 | * @async 187 | * @static 188 | * @memberof CarIndexedReader 189 | * @param {string} path 190 | * @returns {Promise} 191 | */ 192 | static async fromFile (path) { 193 | if (typeof path !== 'string') { 194 | throw new TypeError('fromFile() requires a file path string') 195 | } 196 | 197 | const iterable = await CarIndexer.fromIterable(Readable.from(fs.createReadStream(path))) 198 | /** @type {Map} */ 199 | const index = new Map() 200 | /** @type {string[]} */ 201 | const order = [] 202 | for await (const { cid, blockLength, blockOffset } of iterable) { 203 | const cidStr = cid.toString() 204 | index.set(cidStr, { blockLength, blockOffset }) 205 | order.push(cidStr) 206 | } 207 | return new CarIndexedReader(iterable.version, path, await iterable.getRoots(), index, order) 208 | } 209 | } 210 | 211 | export const __browser = false 212 | -------------------------------------------------------------------------------- /src/indexer.js: -------------------------------------------------------------------------------- 1 | import { 2 | asyncIterableReader, 3 | bytesReader, 4 | createDecoder 5 | } from './decoder.js' 6 | 7 | /** 8 | * @typedef {import('multiformats').CID} CID 9 | * @typedef {import('./api.js').Block} Block 10 | * @typedef {import('./api.js').RootsReader} RootsReader 11 | * @typedef {import('./api.js').BlockIndex} BlockIndex 12 | * @typedef {import('./coding.js').BytesReader} BytesReader 13 | */ 14 | 15 | /** 16 | * Provides an iterator over all of the `Block`s in a CAR, returning their CIDs 17 | * and byte-location information. Implements an `AsyncIterable`. 18 | * Where a `BlockIndex` is a 19 | * `{ cid:CID, length:number, offset:number, blockLength:number, blockOffset:number }`. 20 | * 21 | * As an implementer of `AsyncIterable`, this class can be used directly in a 22 | * `for await (const blockIndex of iterator) {}` loop. Where the `iterator` is 23 | * constructed using {@link CarIndexer.fromBytes} or 24 | * {@link CarIndexer.fromIterable}. 25 | * 26 | * An iteration can only be performce _once_ per instantiation. 27 | * 28 | * `CarIndexer` also implements the `RootsReader` interface and provides 29 | * the {@link CarIndexer.getRoots `getRoots()`} method. 30 | * 31 | * Load this class with either 32 | * `import { CarIndexer } from '@ipld/car/indexer'` 33 | * (`const { CarIndexer } = require('@ipld/car/indexer')`). Or 34 | * `import { CarIndexer } from '@ipld/car'` 35 | * (`const { CarIndexer } = require('@ipld/car')`). The former will likely 36 | * result in smaller bundle sizes where this is important. 37 | * 38 | * @name CarIndexer 39 | * @class 40 | * @implements {RootsReader} 41 | * @implements {AsyncIterable} 42 | * @property {number} version The version number of the CAR referenced by this 43 | * reader (should be `1`). 44 | */ 45 | export class CarIndexer { 46 | /** 47 | * @param {number} version 48 | * @param {CID[]} roots 49 | * @param {AsyncGenerator} iterator 50 | */ 51 | constructor (version, roots, iterator) { 52 | this._version = version 53 | this._roots = roots 54 | this._iterator = iterator 55 | } 56 | 57 | get version () { 58 | return this._version 59 | } 60 | 61 | /** 62 | * Get the list of roots defined by the CAR referenced by this indexer. May be 63 | * zero or more `CID`s. 64 | * 65 | * @function 66 | * @memberof CarIndexer 67 | * @instance 68 | * @async 69 | * @returns {Promise} 70 | */ 71 | async getRoots () { 72 | return this._roots 73 | } 74 | 75 | /** 76 | * @returns {AsyncIterator} 77 | */ 78 | [Symbol.asyncIterator] () { 79 | return this._iterator 80 | } 81 | 82 | /** 83 | * Instantiate a {@link CarIndexer} from a `Uint8Array` blob. Only the header 84 | * is decoded initially, the remainder is processed and emitted via the 85 | * iterator as it is consumed. 86 | * 87 | * @async 88 | * @static 89 | * @memberof CarIndexer 90 | * @param {Uint8Array} bytes 91 | * @returns {Promise} 92 | */ 93 | static async fromBytes (bytes) { 94 | if (!(bytes instanceof Uint8Array)) { 95 | throw new TypeError('fromBytes() requires a Uint8Array') 96 | } 97 | return decodeIndexerComplete(bytesReader(bytes)) 98 | } 99 | 100 | /** 101 | * Instantiate a {@link CarIndexer} from a `AsyncIterable`, 102 | * such as a [modern Node.js stream](https://nodejs.org/api/stream.html#stream_streams_compatibility_with_async_generators_and_async_iterators). 103 | * is decoded initially, the remainder is processed and emitted via the 104 | * iterator as it is consumed. 105 | * 106 | * @async 107 | * @static 108 | * @memberof CarIndexer 109 | * @param {AsyncIterable} asyncIterable 110 | * @returns {Promise} 111 | */ 112 | static async fromIterable (asyncIterable) { 113 | if (!asyncIterable || !(typeof asyncIterable[Symbol.asyncIterator] === 'function')) { 114 | throw new TypeError('fromIterable() requires an async iterable') 115 | } 116 | return decodeIndexerComplete(asyncIterableReader(asyncIterable)) 117 | } 118 | } 119 | 120 | /** 121 | * @private 122 | * @param {BytesReader} reader 123 | * @returns {Promise} 124 | */ 125 | async function decodeIndexerComplete (reader) { 126 | const decoder = createDecoder(reader) 127 | const { version, roots } = await decoder.header() 128 | 129 | return new CarIndexer(version, roots, decoder.blocksIndex()) 130 | } 131 | -------------------------------------------------------------------------------- /src/iterator-channel.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @template {any} T 3 | * @typedef {import('./coding.js').IteratorChannel} IteratorChannel 4 | */ 5 | 6 | function noop () {} 7 | 8 | /** 9 | * @template {any} T 10 | * @returns {IteratorChannel} 11 | */ 12 | export function create () { 13 | /** @type {T[]} */ 14 | const chunkQueue = [] 15 | /** @type {Promise | null} */ 16 | let drainer = null 17 | let drainerResolver = noop 18 | let ended = false 19 | /** @type {Promise> | null} */ 20 | let outWait = null 21 | let outWaitResolver = noop 22 | 23 | const makeDrainer = () => { 24 | if (!drainer) { 25 | drainer = new Promise((resolve) => { 26 | drainerResolver = () => { 27 | drainer = null 28 | drainerResolver = noop 29 | resolve() 30 | } 31 | }) 32 | } 33 | return drainer 34 | } 35 | 36 | /** 37 | * @returns {IteratorChannel} 38 | */ 39 | const writer = { 40 | /** 41 | * @param {T} chunk 42 | * @returns {Promise} 43 | */ 44 | write (chunk) { 45 | chunkQueue.push(chunk) 46 | const drainer = makeDrainer() 47 | outWaitResolver() 48 | return drainer 49 | }, 50 | 51 | async end () { 52 | ended = true 53 | const drainer = makeDrainer() 54 | outWaitResolver() 55 | await drainer 56 | } 57 | } 58 | 59 | /** @type {AsyncIterator} */ 60 | const iterator = { 61 | /** @returns {Promise>} */ 62 | async next () { 63 | const chunk = chunkQueue.shift() 64 | if (chunk) { 65 | if (chunkQueue.length === 0) { 66 | drainerResolver() 67 | } 68 | return { done: false, value: chunk } 69 | } 70 | 71 | if (ended) { 72 | drainerResolver() 73 | return { done: true, value: undefined } 74 | } 75 | 76 | if (!outWait) { 77 | outWait = new Promise((resolve) => { 78 | outWaitResolver = () => { 79 | outWait = null 80 | outWaitResolver = noop 81 | return resolve(iterator.next()) 82 | } 83 | }) 84 | } 85 | 86 | return outWait 87 | } 88 | } 89 | 90 | return { writer, iterator } 91 | } 92 | -------------------------------------------------------------------------------- /src/iterator.js: -------------------------------------------------------------------------------- 1 | import { 2 | asyncIterableReader, 3 | bytesReader, 4 | createDecoder 5 | } from './decoder.js' 6 | 7 | /** 8 | * @typedef {import('multiformats').CID} CID 9 | * @typedef {import('./api.js').Block} Block 10 | * @typedef {import('./api.js').RootsReader} RootsReader 11 | * @typedef {import('./coding.js').BytesReader} BytesReader 12 | */ 13 | 14 | /** 15 | * @class 16 | * @implements {RootsReader} 17 | * @property {number} version The version number of the CAR referenced by this reader (should be `1`). 18 | */ 19 | export class CarIteratorBase { 20 | /** 21 | * @param {number} version 22 | * @param {CID[]} roots 23 | * @param {AsyncIterable|void} iterable 24 | */ 25 | constructor (version, roots, iterable) { 26 | this._version = version 27 | this._roots = roots 28 | this._iterable = iterable 29 | this._decoded = false 30 | } 31 | 32 | get version () { 33 | return this._version 34 | } 35 | 36 | /** 37 | * @returns {Promise} 38 | */ 39 | async getRoots () { 40 | return this._roots 41 | } 42 | } 43 | 44 | /** 45 | * Provides an iterator over all of the `Block`s in a CAR. Implements a 46 | * `BlockIterator` interface, or `AsyncIterable`. Where a `Block` is 47 | * a `{ cid:CID, bytes:Uint8Array }` pair. 48 | * 49 | * As an implementer of `AsyncIterable`, this class can be used directly in a 50 | * `for await (const block of iterator) {}` loop. Where the `iterator` is 51 | * constructed using {@link CarBlockiterator.fromBytes} or 52 | * {@link CarBlockiterator.fromIterable}. 53 | * 54 | * An iteration can only be performce _once_ per instantiation. 55 | * 56 | * `CarBlockIterator` also implements the `RootsReader` interface and provides 57 | * the {@link CarBlockiterator.getRoots `getRoots()`} method. 58 | * 59 | * Load this class with either 60 | * `import { CarBlockIterator } from '@ipld/car/iterator'` 61 | * (`const { CarBlockIterator } = require('@ipld/car/iterator')`). Or 62 | * `import { CarBlockIterator } from '@ipld/car'` 63 | * (`const { CarBlockIterator } = require('@ipld/car')`). 64 | * 65 | * @name CarBlockIterator 66 | * @class 67 | * @implements {RootsReader} 68 | * @implements {AsyncIterable} 69 | * @property {number} version The version number of the CAR referenced by this 70 | * iterator (should be `1`). 71 | */ 72 | export class CarBlockIterator extends CarIteratorBase { 73 | // inherited method 74 | /** 75 | * Get the list of roots defined by the CAR referenced by this iterator. May be 76 | * zero or more `CID`s. 77 | * 78 | * @function getRoots 79 | * @memberof CarBlockIterator 80 | * @instance 81 | * @async 82 | * @returns {Promise} 83 | */ 84 | 85 | /** 86 | * @returns {AsyncIterator} 87 | */ 88 | [Symbol.asyncIterator] () { 89 | if (this._decoded) { 90 | throw new Error('Cannot decode more than once') 91 | } 92 | /* c8 ignore next 3 */ 93 | if (!this._iterable) { 94 | throw new Error('Block iterable not found') 95 | } 96 | this._decoded = true 97 | return this._iterable[Symbol.asyncIterator]() 98 | } 99 | 100 | /** 101 | * Instantiate a {@link CarBlockIterator} from a `Uint8Array` blob. Rather 102 | * than decoding the entire byte array prior to returning the iterator, as in 103 | * {@link CarReader.fromBytes}, only the header is decoded and the remainder 104 | * of the CAR is parsed as the `Block`s as yielded. 105 | * 106 | * @async 107 | * @static 108 | * @memberof CarBlockIterator 109 | * @param {Uint8Array} bytes 110 | * @returns {Promise} 111 | */ 112 | static async fromBytes (bytes) { 113 | const { version, roots, iterator } = await fromBytes(bytes) 114 | return new CarBlockIterator(version, roots, iterator) 115 | } 116 | 117 | /** 118 | * Instantiate a {@link CarBlockIterator} from a `AsyncIterable`, 119 | * such as a [modern Node.js stream](https://nodejs.org/api/stream.html#stream_streams_compatibility_with_async_generators_and_async_iterators). 120 | * Rather than decoding the entire byte array prior to returning the iterator, 121 | * as in {@link CarReader.fromIterable}, only the header is decoded and the 122 | * remainder of the CAR is parsed as the `Block`s as yielded. 123 | * 124 | * @async 125 | * @static 126 | * @param {AsyncIterable} asyncIterable 127 | * @returns {Promise} 128 | */ 129 | static async fromIterable (asyncIterable) { 130 | const { version, roots, iterator } = await fromIterable(asyncIterable) 131 | return new CarBlockIterator(version, roots, iterator) 132 | } 133 | } 134 | 135 | /** 136 | * Provides an iterator over all of the `CID`s in a CAR. Implements a 137 | * `CIDIterator` interface, or `AsyncIterable`. Similar to 138 | * {@link CarBlockIterator} but only yields the CIDs in the CAR. 139 | * 140 | * As an implementer of `AsyncIterable`, this class can be used directly in a 141 | * `for await (const cid of iterator) {}` loop. Where the `iterator` is 142 | * constructed using {@link CarCIDiterator.fromBytes} or 143 | * {@link CarCIDiterator.fromIterable}. 144 | * 145 | * An iteration can only be performce _once_ per instantiation. 146 | * 147 | * `CarCIDIterator` also implements the `RootsReader` interface and provides 148 | * the {@link CarCIDiterator.getRoots `getRoots()`} method. 149 | * 150 | * Load this class with either 151 | * `import { CarCIDIterator } from '@ipld/car/iterator'` 152 | * (`const { CarCIDIterator } = require('@ipld/car/iterator')`). Or 153 | * `import { CarCIDIterator } from '@ipld/car'` 154 | * (`const { CarCIDIterator } = require('@ipld/car')`). 155 | * 156 | * @name CarCIDIterator 157 | * @class 158 | * @implements {RootsReader} 159 | * @implements {AsyncIterable} 160 | * @property {number} version The version number of the CAR referenced by this 161 | * iterator (should be `1`). 162 | */ 163 | export class CarCIDIterator extends CarIteratorBase { 164 | // inherited method 165 | /** 166 | * Get the list of roots defined by the CAR referenced by this iterator. May be 167 | * zero or more `CID`s. 168 | * 169 | * @function getRoots 170 | * @memberof CarCIDIterator 171 | * @instance 172 | * @async 173 | * @returns {Promise} 174 | */ 175 | 176 | /** 177 | * @returns {AsyncIterator} 178 | */ 179 | [Symbol.asyncIterator] () { 180 | if (this._decoded) { 181 | throw new Error('Cannot decode more than once') 182 | } 183 | /* c8 ignore next 3 */ 184 | if (!this._iterable) { 185 | throw new Error('Block iterable not found') 186 | } 187 | this._decoded = true 188 | const iterable = this._iterable[Symbol.asyncIterator]() 189 | return { 190 | async next () { 191 | const next = await iterable.next() 192 | if (next.done) { 193 | return next 194 | } 195 | return { done: false, value: next.value.cid } 196 | } 197 | } 198 | } 199 | 200 | /** 201 | * Instantiate a {@link CarCIDIterator} from a `Uint8Array` blob. Rather 202 | * than decoding the entire byte array prior to returning the iterator, as in 203 | * {@link CarReader.fromBytes}, only the header is decoded and the remainder 204 | * of the CAR is parsed as the `CID`s as yielded. 205 | * 206 | * @async 207 | * @static 208 | * @memberof CarCIDIterator 209 | * @param {Uint8Array} bytes 210 | * @returns {Promise} 211 | */ 212 | static async fromBytes (bytes) { 213 | const { version, roots, iterator } = await fromBytes(bytes) 214 | return new CarCIDIterator(version, roots, iterator) 215 | } 216 | 217 | /** 218 | * Instantiate a {@link CarCIDIterator} from a `AsyncIterable`, 219 | * such as a [modern Node.js stream](https://nodejs.org/api/stream.html#stream_streams_compatibility_with_async_generators_and_async_iterators). 220 | * Rather than decoding the entire byte array prior to returning the iterator, 221 | * as in {@link CarReader.fromIterable}, only the header is decoded and the 222 | * remainder of the CAR is parsed as the `CID`s as yielded. 223 | * 224 | * @async 225 | * @static 226 | * @memberof CarCIDIterator 227 | * @param {AsyncIterable} asyncIterable 228 | * @returns {Promise} 229 | */ 230 | static async fromIterable (asyncIterable) { 231 | const { version, roots, iterator } = await fromIterable(asyncIterable) 232 | return new CarCIDIterator(version, roots, iterator) 233 | } 234 | } 235 | 236 | /** 237 | * @param {Uint8Array} bytes 238 | * @returns {Promise<{ version:number, roots:CID[], iterator:AsyncIterable}>} 239 | */ 240 | async function fromBytes (bytes) { 241 | if (!(bytes instanceof Uint8Array)) { 242 | throw new TypeError('fromBytes() requires a Uint8Array') 243 | } 244 | return decodeIterator(bytesReader(bytes)) 245 | } 246 | 247 | /** 248 | * @param {AsyncIterable} asyncIterable 249 | * @returns {Promise<{ version:number, roots:CID[], iterator:AsyncIterable}>} 250 | */ 251 | async function fromIterable (asyncIterable) { 252 | if (!asyncIterable || !(typeof asyncIterable[Symbol.asyncIterator] === 'function')) { 253 | throw new TypeError('fromIterable() requires an async iterable') 254 | } 255 | return decodeIterator(asyncIterableReader(asyncIterable)) 256 | } 257 | 258 | /** 259 | * @private 260 | * @param {BytesReader} reader 261 | * @returns {Promise<{ version:number, roots:CID[], iterator:AsyncIterable}>} 262 | */ 263 | async function decodeIterator (reader) { 264 | const decoder = createDecoder(reader) 265 | const { version, roots } = await decoder.header() 266 | return { version, roots, iterator: decoder.blocks() } 267 | } 268 | -------------------------------------------------------------------------------- /src/promise-fs-opts.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import { promisify } from 'util' 3 | 4 | const hasFS = Boolean(fs) 5 | 6 | export { hasFS } 7 | 8 | /** 9 | * @type {any} 10 | */ 11 | let _fsReadFn 12 | /** 13 | * @description This function is needed not to initialize the `fs.read` on load time. To run in cf workers without polyfill. 14 | * @param {number} fd 15 | * @param {Uint8Array} buffer 16 | * @param {number} offset 17 | * @param {number} length 18 | * @param {number} position 19 | * @returns {Promise<{ bytesRead: number, buffer: Uint8Array }>} 20 | */ 21 | export function fsread (fd, buffer, offset, length, position) { 22 | if (!_fsReadFn) { 23 | _fsReadFn = promisify(fs.read) 24 | } 25 | return _fsReadFn(fd, buffer, offset, length, position) 26 | } 27 | 28 | /** 29 | * @type {any} 30 | */ 31 | let _fsWriteFn 32 | /** 33 | * @description This function is needed not to initialize the `fs.write` on load time. To run in cf workers without polyfill. 34 | * @param {number} fd 35 | * @param {Uint8Array} buffer 36 | * @param {number} offset 37 | * @param {number} length 38 | * @param {number} position 39 | * @returns {Promise<{ bytesRead: number, buffer: Uint8Array }>} 40 | */ 41 | export function fswrite (fd, buffer, offset, length, position) { 42 | if (!_fsWriteFn) { 43 | _fsWriteFn = promisify(fs.write) 44 | } 45 | return _fsWriteFn(fd, buffer, offset, length, position) 46 | } 47 | -------------------------------------------------------------------------------- /src/reader-browser.js: -------------------------------------------------------------------------------- 1 | import { asyncIterableReader, bytesReader, createDecoder } from './decoder.js' 2 | 3 | /** 4 | * @typedef {import('multiformats').CID} CID 5 | * @typedef {import('./api.js').Block} Block 6 | * @typedef {import('./api.js').CarReader} CarReaderIface 7 | * @typedef {import('./coding.js').BytesReader} BytesReader 8 | * @typedef {import('./coding.js').CarHeader} CarHeader 9 | * @typedef {import('./coding.js').CarV2Header} CarV2Header 10 | */ 11 | 12 | /** 13 | * Provides blockstore-like access to a CAR. 14 | * 15 | * Implements the `RootsReader` interface: 16 | * {@link CarReader.getRoots `getRoots()`}. And the `BlockReader` interface: 17 | * {@link CarReader.get `get()`}, {@link CarReader.has `has()`}, 18 | * {@link CarReader.blocks `blocks()`} (defined as a `BlockIterator`) and 19 | * {@link CarReader.cids `cids()`} (defined as a `CIDIterator`). 20 | * 21 | * Load this class with either `import { CarReader } from '@ipld/car/reader'` 22 | * (`const { CarReader } = require('@ipld/car/reader')`). Or 23 | * `import { CarReader } from '@ipld/car'` (`const { CarReader } = require('@ipld/car')`). 24 | * The former will likely result in smaller bundle sizes where this is 25 | * important. 26 | * 27 | * @name CarReader 28 | * @class 29 | * @implements {CarReaderIface} 30 | * @property {number} version The version number of the CAR referenced by this 31 | * reader (should be `1` or `2`). 32 | */ 33 | export class CarReader { 34 | /** 35 | * @constructs CarReader 36 | * @param {CarHeader|CarV2Header} header 37 | * @param {Block[]} blocks 38 | */ 39 | constructor (header, blocks) { 40 | this._header = header 41 | this._blocks = blocks 42 | this._keys = blocks.map((b) => b.cid.toString()) 43 | } 44 | 45 | /** 46 | * @property 47 | * @memberof CarReader 48 | * @instance 49 | */ 50 | get version () { 51 | return this._header.version 52 | } 53 | 54 | /** 55 | * Get the list of roots defined by the CAR referenced by this reader. May be 56 | * zero or more `CID`s. 57 | * 58 | * @function 59 | * @memberof CarReader 60 | * @instance 61 | * @async 62 | * @returns {Promise} 63 | */ 64 | async getRoots () { 65 | return this._header.roots 66 | } 67 | 68 | /** 69 | * Check whether a given `CID` exists within the CAR referenced by this 70 | * reader. 71 | * 72 | * @function 73 | * @memberof CarReader 74 | * @instance 75 | * @async 76 | * @param {CID} key 77 | * @returns {Promise} 78 | */ 79 | async has (key) { 80 | return this._keys.indexOf(key.toString()) > -1 81 | } 82 | 83 | /** 84 | * Fetch a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) from the CAR 85 | * referenced by this reader matching the provided `CID`. In the case where 86 | * the provided `CID` doesn't exist within the CAR, `undefined` will be 87 | * returned. 88 | * 89 | * @function 90 | * @memberof CarReader 91 | * @instance 92 | * @async 93 | * @param {CID} key 94 | * @returns {Promise} 95 | */ 96 | async get (key) { 97 | const index = this._keys.indexOf(key.toString()) 98 | return index > -1 ? this._blocks[index] : undefined 99 | } 100 | 101 | /** 102 | * Returns a `BlockIterator` (`AsyncIterable`) that iterates over all 103 | * of the `Block`s (`{ cid:CID, bytes:Uint8Array }` pairs) contained within 104 | * the CAR referenced by this reader. 105 | * 106 | * @function 107 | * @memberof CarReader 108 | * @instance 109 | * @async 110 | * @generator 111 | * @returns {AsyncGenerator} 112 | */ 113 | async * blocks () { 114 | for (const block of this._blocks) { 115 | yield block 116 | } 117 | } 118 | 119 | /** 120 | * Returns a `CIDIterator` (`AsyncIterable`) that iterates over all of 121 | * the `CID`s contained within the CAR referenced by this reader. 122 | * 123 | * @function 124 | * @memberof CarReader 125 | * @instance 126 | * @async 127 | * @generator 128 | * @returns {AsyncGenerator} 129 | */ 130 | async * cids () { 131 | for (const block of this._blocks) { 132 | yield block.cid 133 | } 134 | } 135 | 136 | /** 137 | * Instantiate a {@link CarReader} from a `Uint8Array` blob. This performs a 138 | * decode fully in memory and maintains the decoded state in memory for full 139 | * access to the data via the `CarReader` API. 140 | * 141 | * @async 142 | * @static 143 | * @memberof CarReader 144 | * @param {Uint8Array} bytes 145 | * @returns {Promise} 146 | */ 147 | static async fromBytes (bytes) { 148 | if (!(bytes instanceof Uint8Array)) { 149 | throw new TypeError('fromBytes() requires a Uint8Array') 150 | } 151 | return decodeReaderComplete(bytesReader(bytes)) 152 | } 153 | 154 | /** 155 | * Instantiate a {@link CarReader} from a `AsyncIterable`, such as 156 | * a [modern Node.js stream](https://nodejs.org/api/stream.html#stream_streams_compatibility_with_async_generators_and_async_iterators). 157 | * This performs a decode fully in memory and maintains the decoded state in 158 | * memory for full access to the data via the `CarReader` API. 159 | * 160 | * Care should be taken for large archives; this API may not be appropriate 161 | * where memory is a concern or the archive is potentially larger than the 162 | * amount of memory that the runtime can handle. 163 | * 164 | * @async 165 | * @static 166 | * @memberof CarReader 167 | * @param {AsyncIterable} asyncIterable 168 | * @returns {Promise} 169 | */ 170 | static async fromIterable (asyncIterable) { 171 | if (!asyncIterable || !(typeof asyncIterable[Symbol.asyncIterator] === 'function')) { 172 | throw new TypeError('fromIterable() requires an async iterable') 173 | } 174 | return decodeReaderComplete(asyncIterableReader(asyncIterable)) 175 | } 176 | } 177 | 178 | /** 179 | * @private 180 | * @param {BytesReader} reader 181 | * @returns {Promise} 182 | */ 183 | export async function decodeReaderComplete (reader) { 184 | const decoder = createDecoder(reader) 185 | const header = await decoder.header() 186 | const blocks = [] 187 | for await (const block of decoder.blocks()) { 188 | blocks.push(block) 189 | } 190 | 191 | return new CarReader(header, blocks) 192 | } 193 | 194 | export const __browser = true 195 | -------------------------------------------------------------------------------- /src/reader.js: -------------------------------------------------------------------------------- 1 | import { fsread, hasFS } from './promise-fs-opts.js' 2 | import { CarReader as BrowserCarReader } from './reader-browser.js' 3 | 4 | /** 5 | * @typedef {import('./api.js').Block} Block 6 | * @typedef {import('./api.js').BlockIndex} BlockIndex 7 | * @typedef {import('./api.js').CarReader} CarReaderIface 8 | * @typedef {import('fs').promises.FileHandle} FileHandle 9 | */ 10 | 11 | /** 12 | * @class 13 | * @implements {CarReaderIface} 14 | */ 15 | export class CarReader extends BrowserCarReader { 16 | /** 17 | * Reads a block directly from a file descriptor for an open CAR file. This 18 | * function is **only available in Node.js** and not a browser environment. 19 | * 20 | * This function can be used in connection with {@link CarIndexer} which emits 21 | * the `BlockIndex` objects that are required by this function. 22 | * 23 | * The user is responsible for opening and closing the file used in this call. 24 | * 25 | * @async 26 | * @static 27 | * @memberof CarReader 28 | * @param {FileHandle | number} fd - A file descriptor from the 29 | * Node.js `fs` module. Either an integer, from `fs.open()` or a `FileHandle` 30 | * from `fs.promises.open()`. 31 | * @param {BlockIndex} blockIndex - An index pointing to the location of the 32 | * Block required. This `BlockIndex` should take the form: 33 | * `{cid:CID, blockLength:number, blockOffset:number}`. 34 | * @returns {Promise} A `{ cid:CID, bytes:Uint8Array }` pair. 35 | */ 36 | static async readRaw (fd, blockIndex) { 37 | const { cid, blockLength, blockOffset } = blockIndex 38 | const bytes = new Uint8Array(blockLength) 39 | let read 40 | if (typeof fd === 'number') { 41 | read = (await fsread(fd, bytes, 0, blockLength, blockOffset)).bytesRead 42 | } else if (typeof fd === 'object' && typeof fd.read === 'function') { // FileDescriptor 43 | read = (await fd.read(bytes, 0, blockLength, blockOffset)).bytesRead 44 | } else { 45 | throw new TypeError('Bad fd') 46 | } 47 | if (read !== blockLength) { 48 | throw new Error(`Failed to read entire block (${read} instead of ${blockLength})`) 49 | } 50 | return { cid, bytes } 51 | } 52 | } 53 | 54 | export const __browser = !hasFS 55 | -------------------------------------------------------------------------------- /src/writer-browser.js: -------------------------------------------------------------------------------- 1 | import { CID } from 'multiformats/cid' 2 | import { bytesReader, readHeader } from './decoder.js' 3 | import { createEncoder, createHeader } from './encoder.js' 4 | import { create as iteratorChannel } from './iterator-channel.js' 5 | 6 | /** 7 | * @typedef {import('./api.js').Block} Block 8 | * @typedef {import('./api.js').BlockWriter} BlockWriter 9 | * @typedef {import('./api.js').WriterChannel} WriterChannel 10 | * @typedef {import('./coding.js').CarEncoder} CarEncoder 11 | * @typedef {import('./coding.js').IteratorChannel} IteratorChannel 12 | */ 13 | 14 | /** 15 | * Provides a writer interface for the creation of CAR files. 16 | * 17 | * Creation of a `CarWriter` involves the instatiation of an input / output pair 18 | * in the form of a `WriterChannel`, which is a 19 | * `{ writer:CarWriter, out:AsyncIterable }` pair. These two 20 | * components form what can be thought of as a stream-like interface. The 21 | * `writer` component (an instantiated `CarWriter`), has methods to 22 | * {@link CarWriter.put `put()`} new blocks and {@link CarWriter.put `close()`} 23 | * the writing operation (finalising the CAR archive). The `out` component is 24 | * an `AsyncIterable` that yields the bytes of the archive. This can be 25 | * redirected to a file or other sink. In Node.js, you can use the 26 | * [`Readable.from()`](https://nodejs.org/api/stream.html#stream_stream_readable_from_iterable_options) 27 | * API to convert this to a standard Node.js stream, or it can be directly fed 28 | * to a 29 | * [`stream.pipeline()`](https://nodejs.org/api/stream.html#stream_stream_pipeline_source_transforms_destination_callback). 30 | * 31 | * The channel will provide a form of backpressure. The `Promise` from a 32 | * `write()` won't resolve until the resulting data is drained from the `out` 33 | * iterable. 34 | * 35 | * It is also possible to ignore the `Promise` from `write()` calls and allow 36 | * the generated data to queue in memory. This should be avoided for large CAR 37 | * archives of course due to the memory costs and potential for memory overflow. 38 | * 39 | * Load this class with either 40 | * `import { CarWriter } from '@ipld/car/writer'` 41 | * (`const { CarWriter } = require('@ipld/car/writer')`). Or 42 | * `import { CarWriter } from '@ipld/car'` 43 | * (`const { CarWriter } = require('@ipld/car')`). The former will likely 44 | * result in smaller bundle sizes where this is important. 45 | * 46 | * @name CarWriter 47 | * @class 48 | * @implements {BlockWriter} 49 | */ 50 | export class CarWriter { 51 | /** 52 | * @param {CID[]} roots 53 | * @param {CarEncoder} encoder 54 | */ 55 | constructor (roots, encoder) { 56 | this._encoder = encoder 57 | /** @type {Promise} */ 58 | this._mutex = encoder.setRoots(roots) 59 | this._ended = false 60 | } 61 | 62 | /** 63 | * Write a `Block` (a `{ cid:CID, bytes:Uint8Array }` pair) to the archive. 64 | * 65 | * @function 66 | * @memberof CarWriter 67 | * @instance 68 | * @async 69 | * @param {Block} block - A `{ cid:CID, bytes:Uint8Array }` pair. 70 | * @returns {Promise} The returned promise will only resolve once the 71 | * bytes this block generates are written to the `out` iterable. 72 | */ 73 | async put (block) { 74 | if (!(block.bytes instanceof Uint8Array) || !block.cid) { 75 | throw new TypeError('Can only write {cid, bytes} objects') 76 | } 77 | if (this._ended) { 78 | throw new Error('Already closed') 79 | } 80 | const cid = CID.asCID(block.cid) 81 | if (!cid) { 82 | throw new TypeError('Can only write {cid, bytes} objects') 83 | } 84 | this._mutex = this._mutex.then(() => this._encoder.writeBlock({ cid, bytes: block.bytes })) 85 | return this._mutex 86 | } 87 | 88 | /** 89 | * Finalise the CAR archive and signal that the `out` iterable should end once 90 | * any remaining bytes are written. 91 | * 92 | * @function 93 | * @memberof CarWriter 94 | * @instance 95 | * @async 96 | * @returns {Promise} 97 | */ 98 | async close () { 99 | if (this._ended) { 100 | throw new Error('Already closed') 101 | } 102 | await this._mutex 103 | this._ended = true 104 | return this._encoder.close() 105 | } 106 | 107 | /** 108 | * Returns the version number of the CAR file being written 109 | * 110 | * @returns {number} 111 | */ 112 | version () { 113 | return this._encoder.version() 114 | } 115 | 116 | /** 117 | * Create a new CAR writer "channel" which consists of a 118 | * `{ writer:CarWriter, out:AsyncIterable }` pair. 119 | * 120 | * @async 121 | * @static 122 | * @memberof CarWriter 123 | * @param {CID[] | CID | void} roots 124 | * @returns {WriterChannel} The channel takes the form of 125 | * `{ writer:CarWriter, out:AsyncIterable }`. 126 | */ 127 | static create (roots) { 128 | roots = toRoots(roots) 129 | const { encoder, iterator } = encodeWriter() 130 | const writer = new CarWriter(roots, encoder) 131 | const out = new CarWriterOut(iterator) 132 | return { writer, out } 133 | } 134 | 135 | /** 136 | * Create a new CAR appender "channel" which consists of a 137 | * `{ writer:CarWriter, out:AsyncIterable }` pair. 138 | * This appender does not consider roots and does not produce a CAR header. 139 | * It is designed to append blocks to an _existing_ CAR archive. It is 140 | * expected that `out` will be concatenated onto the end of an existing 141 | * archive that already has a properly formatted header. 142 | * 143 | * @async 144 | * @static 145 | * @memberof CarWriter 146 | * @returns {WriterChannel} The channel takes the form of 147 | * `{ writer:CarWriter, out:AsyncIterable }`. 148 | */ 149 | static createAppender () { 150 | const { encoder, iterator } = encodeWriter() 151 | encoder.setRoots = () => Promise.resolve() 152 | const writer = new CarWriter([], encoder) 153 | const out = new CarWriterOut(iterator) 154 | return { writer, out } 155 | } 156 | 157 | /** 158 | * Update the list of roots in the header of an existing CAR as represented 159 | * in a Uint8Array. 160 | * 161 | * This operation is an _overwrite_, the total length of the CAR will not be 162 | * modified. A rejection will occur if the new header will not be the same 163 | * length as the existing header, in which case the CAR will not be modified. 164 | * It is the responsibility of the user to ensure that the roots being 165 | * replaced encode as the same length as the new roots. 166 | * 167 | * The byte array passed in an argument will be modified and also returned 168 | * upon successful modification. 169 | * 170 | * @async 171 | * @static 172 | * @memberof CarWriter 173 | * @param {Uint8Array} bytes 174 | * @param {CID[]} roots - A new list of roots to replace the existing list in 175 | * the CAR header. The new header must take up the same number of bytes as the 176 | * existing header, so the roots should collectively be the same byte length 177 | * as the existing roots. 178 | * @returns {Promise} 179 | */ 180 | static async updateRootsInBytes (bytes, roots) { 181 | const reader = bytesReader(bytes) 182 | await readHeader(reader) 183 | const newHeader = createHeader(roots) 184 | if (Number(reader.pos) !== newHeader.length) { 185 | throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${reader.pos} bytes, new header is ${newHeader.length} bytes)`) 186 | } 187 | bytes.set(newHeader, 0) 188 | return bytes 189 | } 190 | } 191 | 192 | /** 193 | * @class 194 | * @implements {AsyncIterable} 195 | */ 196 | export class CarWriterOut { 197 | /** 198 | * @param {AsyncIterator} iterator 199 | */ 200 | constructor (iterator) { 201 | this._iterator = iterator 202 | } 203 | 204 | [Symbol.asyncIterator] () { 205 | if (this._iterating) { 206 | throw new Error('Multiple iterator not supported') 207 | } 208 | this._iterating = true 209 | return this._iterator 210 | } 211 | } 212 | 213 | function encodeWriter () { 214 | /** @type {IteratorChannel} */ 215 | const iw = iteratorChannel() 216 | const { writer, iterator } = iw 217 | const encoder = createEncoder(writer) 218 | return { encoder, iterator } 219 | } 220 | 221 | /** 222 | * @private 223 | * @param {CID[] | CID | void} roots 224 | * @returns {CID[]} 225 | */ 226 | function toRoots (roots) { 227 | if (roots === undefined) { 228 | return [] 229 | } 230 | 231 | if (!Array.isArray(roots)) { 232 | const cid = CID.asCID(roots) 233 | if (!cid) { 234 | throw new TypeError('roots must be a single CID or an array of CIDs') 235 | } 236 | return [cid] 237 | } 238 | 239 | const _roots = [] 240 | for (const root of roots) { 241 | const _root = CID.asCID(root) 242 | if (!_root) { 243 | throw new TypeError('roots must be a single CID or an array of CIDs') 244 | } 245 | _roots.push(_root) 246 | } 247 | return _roots 248 | } 249 | 250 | export const __browser = true 251 | -------------------------------------------------------------------------------- /src/writer.js: -------------------------------------------------------------------------------- 1 | import { readHeader, chunkReader } from './decoder.js' 2 | import { createHeader } from './encoder.js' 3 | import { fsread, fswrite, hasFS } from './promise-fs-opts.js' 4 | import { CarWriter as BrowserCarWriter } from './writer-browser.js' 5 | 6 | /** 7 | * @typedef {import('multiformats/cid').CID} CID 8 | * @typedef {import('./api.js').BlockWriter} BlockWriter 9 | * @typedef {import('fs').promises.FileHandle} FileHandle 10 | */ 11 | 12 | /** 13 | * @class 14 | * @implements {BlockWriter} 15 | */ 16 | export class CarWriter extends BrowserCarWriter { 17 | /** 18 | * Update the list of roots in the header of an existing CAR file. The first 19 | * argument must be a file descriptor for CAR file that is open in read and 20 | * write mode (not append), e.g. `fs.open` or `fs.promises.open` with `'r+'` 21 | * mode. 22 | * 23 | * This operation is an _overwrite_, the total length of the CAR will not be 24 | * modified. A rejection will occur if the new header will not be the same 25 | * length as the existing header, in which case the CAR will not be modified. 26 | * It is the responsibility of the user to ensure that the roots being 27 | * replaced encode as the same length as the new roots. 28 | * 29 | * This function is **only available in Node.js** and not a browser 30 | * environment. 31 | * 32 | * @async 33 | * @static 34 | * @memberof CarWriter 35 | * @param {FileHandle | number} fd - A file descriptor from the 36 | * Node.js `fs` module. Either an integer, from `fs.open()` or a `FileHandle` 37 | * from `fs.promises.open()`. 38 | * @param {CID[]} roots - A new list of roots to replace the existing list in 39 | * the CAR header. The new header must take up the same number of bytes as the 40 | * existing header, so the roots should collectively be the same byte length 41 | * as the existing roots. 42 | * @returns {Promise} 43 | */ 44 | static async updateRootsInFile (fd, roots) { 45 | const chunkSize = 256 46 | /** @type {Uint8Array} */ 47 | let bytes 48 | let offset = 0 49 | 50 | /** @type {() => Promise} */ 51 | let readChunk 52 | if (typeof fd === 'number') { 53 | readChunk = async () => (await fsread(fd, bytes, 0, chunkSize, offset)).bytesRead 54 | } else if (typeof fd === 'object' && typeof fd.read === 'function') { // FileDescriptor 55 | readChunk = async () => (await fd.read(bytes, 0, chunkSize, offset)).bytesRead 56 | } else { 57 | throw new TypeError('Bad fd') 58 | } 59 | const fdReader = chunkReader(async () => { 60 | bytes = new Uint8Array(chunkSize) // need a new chunk each time, can't reuse old 61 | const read = await readChunk() 62 | offset += read 63 | /* eslint no-warning-comments: 0 */ 64 | // TODO: test header > 256 bytes 65 | return read < chunkSize ? bytes.subarray(0, read) : bytes 66 | }) 67 | 68 | await readHeader(fdReader) 69 | const newHeader = createHeader(roots) 70 | if (fdReader.pos !== newHeader.length) { 71 | throw new Error(`updateRoots() can only overwrite a header of the same length (old header is ${fdReader.pos} bytes, new header is ${newHeader.length} bytes)`) 72 | } 73 | if (typeof fd === 'number') { 74 | await fswrite(fd, newHeader, 0, newHeader.length, 0) 75 | } else if (typeof fd === 'object' && typeof fd.read === 'function') { // FileDescriptor 76 | await fd.write(newHeader, 0, newHeader.length, 0) 77 | } 78 | } 79 | } 80 | 81 | export const __browser = !hasFS 82 | -------------------------------------------------------------------------------- /test/_fixtures_to_js.mjs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { readdir, readFile, writeFile } from 'fs/promises' 4 | import { dirname, join } from 'path' 5 | 6 | async function main () { 7 | const thisdir = dirname(new URL(import.meta.url).pathname) 8 | const outfile = join(thisdir, 'fixtures.js') 9 | const fixturesdir = join(thisdir, 'fixtures') 10 | const files = await readdir(fixturesdir) 11 | let content = '/** @type {Record} */\nexport const data = {\n' 12 | for (const f of files) { 13 | content += ` '${f}': '` 14 | content += (await readFile(join(fixturesdir, f))).toString('base64') 15 | content += '\',\n' 16 | } 17 | content += ' _: \'\'\n}\n' 18 | await writeFile(join(outfile), content, 'utf8') 19 | } 20 | 21 | main().catch((err) => { 22 | console.error(err) 23 | process.exit(1) 24 | }) 25 | -------------------------------------------------------------------------------- /test/common.js: -------------------------------------------------------------------------------- 1 | import * as dagCbor from '@ipld/dag-cbor' 2 | import * as dagPb from '@ipld/dag-pb' 3 | import { assert } from 'aegir/chai' 4 | import { bytes, CID } from 'multiformats' 5 | import * as raw from 'multiformats/codecs/raw' 6 | import { sha256 } from 'multiformats/hashes/sha2' 7 | 8 | /** 9 | * @typedef {import('../src/api.js').Block} Block 10 | * @typedef {import('@ipld/dag-pb').PBNode} PBNode 11 | */ 12 | 13 | /** 14 | * @extends {Block} 15 | */ 16 | class TestBlock { 17 | /** 18 | * @param {Uint8Array} bytes 19 | * @param {CID} cid 20 | * @param {any} object 21 | */ 22 | constructor (bytes, cid, object) { 23 | this.bytes = bytes 24 | this.cid = cid 25 | this.object = object 26 | } 27 | } 28 | 29 | /** @type {TestBlock[]} */ 30 | let rawBlocks 31 | /** @type {TestBlock[]} */ 32 | const pbBlocks = [] 33 | /** @type {TestBlock[]} */ 34 | const cborBlocks = [] 35 | /** @type {[string, TestBlock[]][]} */ 36 | let allBlocks 37 | /** @type {TestBlock[]} */ 38 | let allBlocksFlattened 39 | 40 | const rndCid = CID.parse('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm') 41 | 42 | /** 43 | * @param {any} object 44 | * @param {{code: number, encode: (obj: any) => Uint8Array}} codec 45 | * @param {import('multiformats/cid').Version} version 46 | * @returns {Promise} 47 | */ 48 | async function toBlock (object, codec, version = 1) { 49 | const bytes = codec.encode(object) 50 | const hash = await sha256.digest(bytes) 51 | const cid = CID.create(version, codec.code, hash) 52 | return new TestBlock(bytes, cid, object) 53 | } 54 | 55 | async function makeData () { 56 | if (!rawBlocks) { 57 | rawBlocks = await Promise.all('aaaa bbbb cccc zzzz'.split(' ').map((s) => { 58 | return toBlock(new TextEncoder().encode(s), raw) 59 | })) 60 | 61 | /** 62 | * @param {string} name 63 | * @param {TestBlock} block 64 | */ 65 | const toPbLink = (name, block) => { 66 | let size = block.bytes.length 67 | if (block.cid.code === 0x70) { 68 | // special cumulative size handling for linking to dag-pb blocks 69 | /** @type {PBNode} */ 70 | const node = block.object 71 | size = node.Links.reduce((p, c) => p + (c.Tsize || 0), size) 72 | } 73 | return { 74 | Name: name, 75 | Tsize: size, 76 | Hash: block.cid 77 | } 78 | } 79 | 80 | pbBlocks.push(await toBlock({ Links: [toPbLink('cat', rawBlocks[0])] }, dagPb, 0)) 81 | pbBlocks.push(await toBlock({ 82 | Links: [toPbLink('dog', rawBlocks[1]), toPbLink('first', pbBlocks[0])] 83 | }, dagPb, 0)) 84 | pbBlocks.push(await toBlock({ 85 | Links: [toPbLink('bear', rawBlocks[2]), toPbLink('second', pbBlocks[1])] 86 | }, dagPb, 0)) 87 | 88 | const cbstructs = [['blip', pbBlocks[2].cid], ['limbo', null]] 89 | for (const b of cbstructs) { 90 | cborBlocks.push(await toBlock({ name: b[0], link: b[1] }, dagCbor)) 91 | } 92 | 93 | allBlocks = [['raw', rawBlocks.slice(0, 3)], ['pb', pbBlocks], ['cbor', cborBlocks]] 94 | allBlocksFlattened = allBlocks.reduce((/** @type {TestBlock[]} */ p, c) => p.concat(c[1]), /** @type {TestBlock[]} */ []) 95 | } 96 | 97 | return { 98 | rawBlocks, 99 | pbBlocks, 100 | cborBlocks, 101 | allBlocks, 102 | allBlocksFlattened 103 | } 104 | } 105 | 106 | /** 107 | * @param {Uint8Array} data 108 | * @param {number} chunkSize 109 | * @returns {AsyncIterable} 110 | */ 111 | function makeIterable (data, chunkSize) { 112 | let pos = 0 113 | return { 114 | [Symbol.asyncIterator] () { 115 | return { 116 | async next () { 117 | await new Promise((resolve) => setTimeout(resolve, 5)) 118 | if (pos >= data.length) { 119 | return { done: true, value: undefined } 120 | } 121 | const value = data.slice(pos, pos += chunkSize) 122 | return { done: false, value } 123 | } 124 | } 125 | } 126 | } 127 | } 128 | 129 | const carBytes = bytes.fromHex('63a265726f6f747382d82a58250001711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8bd82a5825000171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365b6776657273696f6e01280155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b461616161280155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e646877626262622801551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a2745163636363511220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d0122d0a240155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b4120363617418048001122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a122d0a240155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e6468771203646f671804122d0a221220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d01205666972737418338301122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de122e0a2401551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a274511204626561721804122f0a22122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a12067365636f6e641895015b01711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8ba2646c696e6bd82a582300122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de646e616d6564626c6970360171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365ba2646c696e6bf6646e616d65656c696d626f') 130 | 131 | // go.car is written as a graph, not by the allBlocks ordering here, so ordering is slightly out 132 | const goCarBytes = bytes.fromHex('63a265726f6f747382d82a58250001711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8bd82a5825000171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365b6776657273696f6e015b01711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8ba2646c696e6bd82a582300122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de646e616d6564626c69708301122002acecc5de2438ea4126a3010ecb1f8a599c8eff22fff1a1dcffe999b27fd3de122e0a2401551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a274511204626561721804122f0a22122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a12067365636f6e641895012801551220b6fbd675f98e2abd22d4ed29fdc83150fedc48597e92dd1a7a24381d44a27451636363638001122079a982de3c9907953d4d323cee1d0fb1ed8f45f8ef02870c0cb9e09246bd530a122d0a240155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e6468771203646f671804122d0a221220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d0120566697273741833280155122081cc5b17018674b401b42f35ba07bb79e211239c23bffe658da1577e3e64687762626262511220e7dc486e97e6ebe5cdabab3e392bdad128b6e09acc94bb4e2aa2af7b986d24d0122d0a240155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b412036361741804280155122061be55a8e2f6b4e172338bddf184d6dbee29c98853e0a0485ecee7f27b9af0b461616161360171122069ea0740f9807a28f4d932c62e7c1c83be055e55072c90266ab3e79df63a365ba2646c696e6bf6646e616d65656c696d626f') 133 | const goCarRoots = [ 134 | CID.parse('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), 135 | CID.parse('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm') 136 | ] 137 | const goCarIndex = [ 138 | { cid: CID.parse('bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm'), offset: 100, length: 92, blockOffset: 137, blockLength: 55 }, 139 | { cid: CID.parse('QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d'), offset: 192, length: 133, blockOffset: 228, blockLength: 97 }, 140 | { cid: CID.parse('bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke'), offset: 325, length: 41, blockOffset: 362, blockLength: 4 }, 141 | { cid: CID.parse('QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys'), offset: 366, length: 130, blockOffset: 402, blockLength: 94 }, 142 | { cid: CID.parse('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4'), offset: 496, length: 41, blockOffset: 533, blockLength: 4 }, 143 | { cid: CID.parse('QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT'), offset: 537, length: 82, blockOffset: 572, blockLength: 47 }, 144 | { cid: CID.parse('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), offset: 619, length: 41, blockOffset: 656, blockLength: 4 }, 145 | { cid: CID.parse('bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm'), offset: 660, length: 55, blockOffset: 697, blockLength: 18 } 146 | ] 147 | 148 | const goCarV2Bytes = bytes.fromHex('0aa16776657273696f6e02000000000000000000000000000000003300000000000000c001000000000000f30100000000000038a265726f6f747381d82a5823001220fb16f5083412ef1371d031ed4aa239903d84efdadf1ba3cd678e6475b1a232f86776657273696f6e01511220fb16f5083412ef1371d031ed4aa239903d84efdadf1ba3cd678e6475b1a232f8122d0a221220d9c0d5376d26f1931f7ad52d7acc00fc1090d2edb0808bf61eeb0a152826f6261204f09f8da418a40185011220d9c0d5376d26f1931f7ad52d7acc00fc1090d2edb0808bf61eeb0a152826f62612310a221220d745b7757f5b4593eeab7820306c7bc64eb496a7410a0d07df7a34ffec4b97f1120962617272656c657965183a122e0a2401551220a2e1c40da1ae335d4dffe729eb4d5ca23b74b9e51fc535f4a804a261080c294d1204f09f90a11807581220d745b7757f5b4593eeab7820306c7bc64eb496a7410a0d07df7a34ffec4b97f112340a2401551220b474a99a2705e23cf905a484ec6d14ef58b56bbe62e9292783466ec363b5072d120a666973686d6f6e67657218042801551220b474a99a2705e23cf905a484ec6d14ef58b56bbe62e9292783466ec363b5072d666973682b01551220a2e1c40da1ae335d4dffe729eb4d5ca23b74b9e51fc535f4a804a261080c294d6c6f62737465720100000028000000c800000000000000a2e1c40da1ae335d4dffe729eb4d5ca23b74b9e51fc535f4a804a261080c294d9401000000000000b474a99a2705e23cf905a484ec6d14ef58b56bbe62e9292783466ec363b5072d6b01000000000000d745b7757f5b4593eeab7820306c7bc64eb496a7410a0d07df7a34ffec4b97f11201000000000000d9c0d5376d26f1931f7ad52d7acc00fc1090d2edb0808bf61eeb0a152826f6268b00000000000000fb16f5083412ef1371d031ed4aa239903d84efdadf1ba3cd678e6475b1a232f83900000000000000') 149 | const goCarV2Roots = [CID.parse('QmfEoLyB5NndqeKieExd1rtJzTduQUPEV8TwAYcUiy3H5Z')] 150 | const goCarV2Index = [ 151 | { blockLength: 47, blockOffset: 143, cid: CID.parse('QmfEoLyB5NndqeKieExd1rtJzTduQUPEV8TwAYcUiy3H5Z'), length: 82, offset: 108 }, 152 | { blockLength: 99, blockOffset: 226, cid: CID.parse('QmczfirA7VEH7YVvKPTPoU69XM3qY4DC39nnTsWd4K3SkM'), length: 135, offset: 190 }, 153 | { blockLength: 54, blockOffset: 360, cid: CID.parse('Qmcpz2FHJD7VAhg1fxFXdYJKePtkx1BsHuCrAgWVnaHMTE'), length: 89, offset: 325 }, 154 | { blockLength: 4, blockOffset: 451, cid: CID.parse('bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu'), length: 41, offset: 414 }, 155 | { blockLength: 7, blockOffset: 492, cid: CID.parse('bafkreifc4hca3inognou377hfhvu2xfchn2ltzi7yu27jkaeujqqqdbjju'), length: 44, offset: 455 } 156 | ] 157 | /** @type {{[k in string]: any}} */ 158 | const goCarV2Contents = { 159 | QmfEoLyB5NndqeKieExd1rtJzTduQUPEV8TwAYcUiy3H5Z: { 160 | Links: [{ 161 | Hash: CID.parse('QmczfirA7VEH7YVvKPTPoU69XM3qY4DC39nnTsWd4K3SkM'), 162 | Name: '🍤', 163 | Tsize: 164 164 | }] 165 | }, 166 | QmczfirA7VEH7YVvKPTPoU69XM3qY4DC39nnTsWd4K3SkM: { 167 | Links: [ 168 | { 169 | Hash: CID.parse('Qmcpz2FHJD7VAhg1fxFXdYJKePtkx1BsHuCrAgWVnaHMTE'), 170 | Name: 'barreleye', 171 | Tsize: 58 172 | }, 173 | { 174 | Hash: CID.parse('bafkreifc4hca3inognou377hfhvu2xfchn2ltzi7yu27jkaeujqqqdbjju'), 175 | Name: '🐡', 176 | Tsize: 7 177 | } 178 | ] 179 | }, 180 | Qmcpz2FHJD7VAhg1fxFXdYJKePtkx1BsHuCrAgWVnaHMTE: { 181 | Links: [{ 182 | Hash: CID.parse('bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu'), 183 | Name: 'fishmonger', 184 | Tsize: 4 185 | }] 186 | }, 187 | bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu: 'fish', 188 | bafkreifc4hca3inognou377hfhvu2xfchn2ltzi7yu27jkaeujqqqdbjju: 'lobster' 189 | } 190 | 191 | export { 192 | toBlock, 193 | assert, 194 | makeData, 195 | makeIterable, 196 | rndCid, 197 | carBytes, 198 | goCarBytes, 199 | goCarRoots, 200 | goCarIndex, 201 | goCarV2Bytes, 202 | goCarV2Roots, 203 | goCarV2Index, 204 | goCarV2Contents 205 | } 206 | -------------------------------------------------------------------------------- /test/fixtures/sample-corrupt-pragma.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-corrupt-pragma.car -------------------------------------------------------------------------------- /test/fixtures/sample-index.carindex: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-index.carindex -------------------------------------------------------------------------------- /test/fixtures/sample-rootless-v42.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-rootless-v42.car -------------------------------------------------------------------------------- /test/fixtures/sample-rw-bs-v2.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-rw-bs-v2.car -------------------------------------------------------------------------------- /test/fixtures/sample-unixfs-v2.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-unixfs-v2.car -------------------------------------------------------------------------------- /test/fixtures/sample-v1-noidentity.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-v1-noidentity.car -------------------------------------------------------------------------------- /test/fixtures/sample-v1-tailing-corrupt-section.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-v1-tailing-corrupt-section.car -------------------------------------------------------------------------------- /test/fixtures/sample-v1-with-zero-len-section.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-v1-with-zero-len-section.car -------------------------------------------------------------------------------- /test/fixtures/sample-v1-with-zero-len-section2.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-v1-with-zero-len-section2.car -------------------------------------------------------------------------------- /test/fixtures/sample-v1.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-v1.car -------------------------------------------------------------------------------- /test/fixtures/sample-v2-corrupt-data-and-index.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-v2-corrupt-data-and-index.car -------------------------------------------------------------------------------- /test/fixtures/sample-v2-indexless.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-v2-indexless.car -------------------------------------------------------------------------------- /test/fixtures/sample-wrapped-v2.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/fixtures/sample-wrapped-v2.car -------------------------------------------------------------------------------- /test/go.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/go.car -------------------------------------------------------------------------------- /test/go.carv2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipld/js-car/f9c3da2b7200efd4d12b44ed3d94b1aeabb911fd/test/go.carv2 -------------------------------------------------------------------------------- /test/node-test-file-streams.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | // this test is not intended to run in the browser 4 | 5 | import fs from 'fs' 6 | import path from 'path' 7 | import { Readable, pipeline } from 'stream' 8 | import { fileURLToPath } from 'url' 9 | import { promisify } from 'util' 10 | import { CarReader, CarWriter } from '../src/index.js' 11 | import { makeData, assert } from './common.js' 12 | import { 13 | verifyRoots, 14 | verifyHas, 15 | verifyGet, 16 | verifyBlocks, 17 | verifyCids 18 | } from './verify-store-reader.js' 19 | 20 | /** @typedef {import('multiformats').CID} CID */ 21 | /** @typedef {import('../src/api.js').Block} Block */ 22 | 23 | const __filename = fileURLToPath(import.meta.url) 24 | const __dirname = path.dirname(__filename) 25 | 26 | const tmpCarPath = path.join(__dirname, 'tmp.car') 27 | 28 | describe('Node Streams CarReader.fromIterable()', () => { 29 | /** @type {Block[]} */ 30 | let allBlocksFlattened 31 | /** @type {CID[]} */ 32 | let roots 33 | 34 | before(async () => { 35 | const data = await makeData() 36 | const cborBlocks = data.cborBlocks 37 | allBlocksFlattened = data.allBlocksFlattened 38 | roots = [cborBlocks[0].cid, cborBlocks[1].cid] 39 | try { 40 | await fs.promises.unlink(tmpCarPath) 41 | } catch (e) {} 42 | }) 43 | 44 | it('from fixture file', async () => { 45 | const inStream = fs.createReadStream(path.join(__dirname, './go.car')) 46 | const reader = await CarReader.fromIterable(inStream) 47 | await verifyRoots(reader) 48 | await verifyHas(reader) 49 | await verifyGet(reader) 50 | await verifyBlocks(reader.blocks(), true) 51 | await verifyCids(reader.cids(), true) 52 | }) 53 | 54 | it('complete', async () => { 55 | const { writer, out } = CarWriter.create(roots) 56 | 57 | const pipe = promisify(pipeline)( 58 | Readable.from(out), 59 | fs.createWriteStream(tmpCarPath) 60 | ) 61 | 62 | for (const block of allBlocksFlattened) { 63 | await writer.put(block) 64 | } 65 | await writer.close() 66 | await pipe 67 | 68 | const sizes = await Promise.all(['go.car', 'tmp.car'].map(async (car) => { 69 | return (await fs.promises.stat(path.join(__dirname, car))).size 70 | })) 71 | 72 | assert.strictEqual(sizes[0], sizes[1]) 73 | 74 | const inStream = fs.createReadStream(tmpCarPath) 75 | const reader = await CarReader.fromIterable(inStream) 76 | await verifyRoots(reader) 77 | await verifyHas(reader) 78 | await verifyGet(reader) 79 | await verifyBlocks(reader.blocks(), true) 80 | await verifyCids(reader.cids(), true) 81 | 82 | await fs.promises.unlink(tmpCarPath) 83 | }) 84 | }) 85 | -------------------------------------------------------------------------------- /test/node-test-indexed-reader.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import path from 'path' 4 | import { fileURLToPath } from 'url' 5 | import { CarIndexedReader } from '../src/indexed-reader.js' 6 | import { assert, goCarIndex } from './common.js' 7 | import { 8 | verifyRoots, 9 | verifyHas, 10 | verifyGet, 11 | verifyBlocks, 12 | verifyCids 13 | } from './verify-store-reader.js' 14 | 15 | const __filename = fileURLToPath(import.meta.url) 16 | const __dirname = path.dirname(__filename) 17 | 18 | describe('CarIndexedReader fromFile()', () => { 19 | it('complete', async () => { 20 | const reader = await CarIndexedReader.fromFile(path.join(__dirname, 'go.car')) 21 | await verifyRoots(reader) 22 | await verifyHas(reader) 23 | await verifyGet(reader) 24 | await verifyBlocks(reader.blocks(), true) 25 | await verifyCids(reader.cids(), true) 26 | // now verify the ordering is correct 27 | let i = 0 28 | for await (const block of reader.blocks()) { 29 | assert.strictEqual(block.cid.toString(), goCarIndex[i++].cid.toString()) 30 | } 31 | i = 0 32 | for await (const cid of reader.cids()) { 33 | assert.strictEqual(cid.toString(), goCarIndex[i++].cid.toString()) 34 | } 35 | assert.strictEqual(reader.version, 1) 36 | await reader.close() 37 | }) 38 | 39 | it('bad argument', async () => { 40 | for (const arg of [true, false, null, undefined, Uint8Array.from([1, 2, 3]), 100, { obj: 'nope' }]) { 41 | // @ts-ignore 42 | await assert.isRejected(CarIndexedReader.fromFile(arg)) 43 | } 44 | }) 45 | }) 46 | -------------------------------------------------------------------------------- /test/node-test-large.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | // this test is not intended to run in the browser 4 | 5 | import fs from 'fs' 6 | import { Readable } from 'stream' 7 | import * as dagCbor from '@ipld/dag-cbor' 8 | import { garbage } from '@ipld/garbage' 9 | import { CID } from 'multiformats/cid' 10 | import { sha256 } from 'multiformats/hashes/sha2' 11 | import varint from 'varint' 12 | import { CarWriter, CarIndexer, CarReader, CarIndexedReader } from '../src/index.js' 13 | import { assert } from './common.js' 14 | 15 | /** @typedef {import('../src/api.js').BlockIndex} BlockIndex */ 16 | 17 | describe('Large CAR', () => { 18 | /** @type {any[]} */ 19 | const objects = [] 20 | /** @type {string[]} */ 21 | const cids = [] 22 | /** @type {BlockIndex[]} */ 23 | const expectedIndex = [] 24 | 25 | it('create, no roots', async () => { 26 | const { writer, out } = CarWriter.create([]) 27 | Readable.from(out).pipe(fs.createWriteStream('./test.car')) 28 | 29 | // offset starts at header length 30 | let offset = dagCbor.encode({ version: 1, roots: [] }).length 31 | offset += varint.encode(offset).length 32 | 33 | for (let i = 0; i < 500; i++) { 34 | const obj = garbage(1000) 35 | objects.push(obj) 36 | const bytes = dagCbor.encode(obj) 37 | const hash = await sha256.digest(bytes) 38 | const cid = CID.create(1, dagCbor.code, hash) 39 | cids.push(cid.toString()) 40 | const blockLength = bytes.length 41 | let length = cid.bytes.length + blockLength 42 | const lengthLength = varint.encode(length).length 43 | length += lengthLength 44 | const blockOffset = offset + lengthLength + cid.bytes.length 45 | expectedIndex.push({ cid, offset, length, blockOffset, blockLength }) 46 | offset += length 47 | await writer.put({ cid, bytes }) 48 | } 49 | 50 | await writer.close() 51 | }) 52 | 53 | it('CarIndexer.fromIterable', async () => { 54 | const indexer = await CarIndexer.fromIterable(fs.createReadStream('./test.car')) 55 | assert.deepStrictEqual(await indexer.getRoots(), []) 56 | let i = 0 57 | for await (const blockIndex of indexer) { 58 | assert.deepStrictEqual(blockIndex, expectedIndex[i]) 59 | i++ 60 | } 61 | }) 62 | 63 | it('CarIndexer.fromBytes', async () => { 64 | const indexer = await CarIndexer.fromBytes(await fs.promises.readFile('./test.car')) 65 | assert.deepStrictEqual(await indexer.getRoots(), []) 66 | let i = 0 67 | for await (const blockIndex of indexer) { 68 | assert.deepStrictEqual(blockIndex, expectedIndex[i]) 69 | i++ 70 | } 71 | }) 72 | 73 | it('CarReader.fromBytes', async () => { 74 | const reader = await CarReader.fromBytes(await fs.promises.readFile('./test.car')) 75 | assert.deepStrictEqual(await reader.getRoots(), []) 76 | let i = 0 77 | for await (const { cid, bytes } of reader.blocks()) { 78 | assert.strictEqual(cid.toString(), cids[i], `cid #${i} ${cid} <> ${cids[i]}`) 79 | const obj = dagCbor.decode(bytes) 80 | assert.deepStrictEqual(obj, objects[i], `object #${i}`) 81 | i++ 82 | } 83 | }) 84 | 85 | it('CarReader.fromIterable', async () => { 86 | const reader = await CarReader.fromIterable(fs.createReadStream('./test.car')) 87 | assert.deepStrictEqual(await reader.getRoots(), []) 88 | let i = 0 89 | for await (const { cid, bytes } of reader.blocks()) { 90 | assert.strictEqual(cid.toString(), cids[i], `cid #${i} ${cid} <> ${cids[i]}`) 91 | const obj = dagCbor.decode(bytes) 92 | assert.deepStrictEqual(obj, objects[i], `object #${i}`) 93 | i++ 94 | } 95 | }) 96 | 97 | it('CarIndexedReader.fromFile', async () => { 98 | const reader = await CarIndexedReader.fromFile('./test.car') 99 | assert.deepStrictEqual(await reader.getRoots(), []) 100 | let i = 0 101 | for await (const { cid, bytes } of reader.blocks()) { 102 | assert.strictEqual(cid.toString(), cids[i], `cid #${i} ${cid} <> ${cids[i]}`) 103 | const obj = dagCbor.decode(bytes) 104 | assert.deepStrictEqual(obj, objects[i], `object #${i}`) 105 | i++ 106 | } 107 | }) 108 | 109 | after(async () => { 110 | return fs.promises.unlink('./test.car').catch(() => {}) 111 | }) 112 | }) 113 | -------------------------------------------------------------------------------- /test/node-test-raw.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | // this test is not intended to run in the browser 4 | 5 | import fs from 'fs' 6 | import path from 'path' 7 | import { fileURLToPath } from 'url' 8 | import { promisify } from 'util' 9 | import { bytes } from 'multiformats' 10 | import { CarReader } from '../src/index.js' 11 | import { assert, makeData, goCarIndex } from './common.js' 12 | 13 | /** @typedef {import('../src/api.js').Block} Block */ 14 | 15 | const fsopen = promisify(fs.open) 16 | const fsclose = promisify(fs.close) 17 | 18 | const { toHex } = bytes 19 | const __filename = fileURLToPath(import.meta.url) 20 | const __dirname = path.dirname(__filename) 21 | 22 | describe('CarReader.readRaw', () => { 23 | /** @type {Block[]} */ 24 | let allBlocksFlattened 25 | 26 | before(async () => { 27 | const data = await makeData() 28 | allBlocksFlattened = data.allBlocksFlattened 29 | }) 30 | 31 | /** 32 | * @param {fs.promises.FileHandle | number} fd 33 | */ 34 | async function verifyRead (fd) { 35 | const expectedBlocks = allBlocksFlattened.slice() 36 | const expectedCids = [] 37 | for (const { cid } of expectedBlocks) { 38 | expectedCids.push(cid.toString()) 39 | } 40 | 41 | for (const blockIndex of goCarIndex) { 42 | const { cid, bytes } = await CarReader.readRaw(fd, blockIndex) 43 | const index = expectedCids.indexOf(cid.toString()) 44 | assert.ok(index >= 0, 'got expected block') 45 | assert.strictEqual( 46 | toHex(expectedBlocks[index].bytes), 47 | toHex(bytes), 48 | 'got expected block content') 49 | expectedBlocks.splice(index, 1) 50 | expectedCids.splice(index, 1) 51 | } 52 | assert.strictEqual(expectedBlocks.length, 0, 'got all expected blocks') 53 | } 54 | 55 | it('read raw using index (fd)', async () => { 56 | const fd = await fsopen(path.join(__dirname, 'go.car'), 'r') 57 | await verifyRead(fd) 58 | await fsclose(fd) 59 | }) 60 | 61 | it('read raw using index (FileHandle)', async () => { 62 | const fd = await fs.promises.open(path.join(__dirname, 'go.car'), 'r') 63 | await verifyRead(fd) 64 | await fd.close() 65 | }) 66 | 67 | it('errors', async () => { 68 | // @ts-ignore 69 | await assert.isRejected(CarReader.readRaw(true, goCarIndex[0]), { 70 | name: 'TypeError', 71 | message: 'Bad fd' 72 | }) 73 | 74 | const badBlock = Object.assign({}, goCarIndex[goCarIndex.length - 1]) 75 | badBlock.blockLength += 10 76 | const fd = await fsopen(path.join(__dirname, 'go.car'), 'r') 77 | await assert.isRejected(CarReader.readRaw(fd, badBlock), { 78 | name: 'Error', 79 | message: `Failed to read entire block (${badBlock.blockLength - 10} instead of ${badBlock.blockLength})` 80 | }) 81 | await fsclose(fd) 82 | }) 83 | }) 84 | -------------------------------------------------------------------------------- /test/node-test-updateroots.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | // this test is not intended to run in the browser 4 | 5 | import fs from 'fs' 6 | import path from 'path' 7 | import { fileURLToPath } from 'url' 8 | import { promisify } from 'util' 9 | import { CID } from 'multiformats/cid' 10 | import { CarReader } from '../src/reader.js' 11 | import { CarWriter } from '../src/writer.js' 12 | import { assert } from './common.js' 13 | import { 14 | verifyRoots, 15 | verifyHas, 16 | verifyGet, 17 | verifyBlocks, 18 | verifyCids 19 | } from './verify-store-reader.js' 20 | 21 | const fsopen = promisify(fs.open) 22 | const fsclose = promisify(fs.close) 23 | 24 | const __filename = fileURLToPath(import.meta.url) 25 | const __dirname = path.dirname(__filename) 26 | 27 | const goCarPath = path.join(__dirname, 'go.car') 28 | const tmpCarPath = path.join(__dirname, 'tmp.car') 29 | 30 | const newRoots = [ 31 | CID.parse('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), 32 | CID.parse('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4') 33 | ] 34 | 35 | async function verify () { 36 | const reader = await CarReader.fromIterable(fs.createReadStream(tmpCarPath)) 37 | await assert.isRejected(verifyRoots(reader)) // whoa, different roots? like magic 38 | assert.deepEqual(await reader.getRoots(), newRoots) 39 | await verifyHas(reader) 40 | await verifyGet(reader) 41 | await verifyBlocks(reader.blocks(), true) 42 | await verifyCids(reader.cids(), true) 43 | } 44 | 45 | describe('Node CarWriter.updateHeader()', () => { 46 | before(async () => { 47 | try { 48 | await fs.promises.unlink(tmpCarPath) 49 | } catch (e) {} 50 | }) 51 | 52 | beforeEach(async () => { 53 | await fs.promises.copyFile(goCarPath, tmpCarPath) 54 | }) 55 | 56 | afterEach(async () => { 57 | await fs.promises.unlink(tmpCarPath) 58 | }) 59 | 60 | it('update roots (fd)', async () => { 61 | const fd = await fsopen(tmpCarPath, 'r+') 62 | await CarWriter.updateRootsInFile(fd, newRoots) 63 | await fsclose(fd) 64 | await verify() 65 | }) 66 | 67 | it('update roots (FileHandle)', async () => { 68 | const fd = await fs.promises.open(tmpCarPath, 'r+') 69 | await CarWriter.updateRootsInFile(fd, newRoots) 70 | await fd.close() 71 | await verify() 72 | }) 73 | 74 | it('error: bad fd', async () => { 75 | // @ts-ignore 76 | await assert.isRejected(CarWriter.updateRootsInFile(true, newRoots), { 77 | name: 'TypeError', 78 | message: 'Bad fd' 79 | }) 80 | }) 81 | 82 | it('error: wrong header size', async () => { 83 | const fd = await fs.promises.open(tmpCarPath, 'r+') 84 | await assert.isRejected(CarWriter.updateRootsInFile(fd, [...newRoots, newRoots[0]]), /can only overwrite a header of the same length/) 85 | await assert.isRejected(CarWriter.updateRootsInFile(fd, [newRoots[0]]), /can only overwrite a header of the same length/) 86 | await assert.isRejected(CarWriter.updateRootsInFile(fd, []), /can only overwrite a header of the same length/) 87 | await fd.close() 88 | }) 89 | }) 90 | -------------------------------------------------------------------------------- /test/node.js: -------------------------------------------------------------------------------- 1 | import './node-test-file-streams.js' 2 | import './node-test-indexed-reader.js' 3 | import './node-test-large.js' 4 | import './node-test-raw.js' 5 | import './node-test-updateroots.js' 6 | -------------------------------------------------------------------------------- /test/test-buffer-writer.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import * as CBOR from '@ipld/dag-cbor' 4 | import { CID, varint } from 'multiformats' 5 | import * as Block from 'multiformats/block' 6 | import * as Raw from 'multiformats/codecs/raw' 7 | import { identity } from 'multiformats/hashes/identity' 8 | import { sha256, sha512 } from 'multiformats/hashes/sha2' 9 | import * as CarBufferWriter from '../src/buffer-writer.js' 10 | import { createHeader } from '../src/encoder.js' 11 | import { CarReader } from '../src/reader.js' 12 | import { assert } from './common.js' 13 | 14 | describe('CarBufferWriter', () => { 15 | const cid = CID.parse('bafkreifuosuzujyf4i6psbneqtwg2fhplc2wxptc5euspa2gn3bwhnihfu') 16 | describe('calculateHeaderLength', async () => { 17 | for (const count of [0, 1, 10, 18, 24, 48, 124, 255, 258, 65536 - 1, 65536]) { 18 | it(`calculateHeaderLength(new Array(${count}).fill(36))`, () => { 19 | const roots = new Array(count).fill(cid) 20 | const sizes = new Array(count).fill(cid.bytes.byteLength) 21 | assert.deepEqual( 22 | CarBufferWriter.calculateHeaderLength(sizes), 23 | createHeader(roots).byteLength 24 | ) 25 | }) 26 | it(`calculateHeaderLength(new Array(${count}).fill(36))`, () => { 27 | const roots = new Array(count).fill(cid) 28 | const rootLengths = roots.map((c) => c.bytes.byteLength) 29 | assert.deepEqual(CarBufferWriter.calculateHeaderLength(rootLengths), createHeader(roots).byteLength) 30 | }) 31 | } 32 | it('estimate on large CIDs', () => { 33 | const largeCID = CID.parse(`bafkqbbac${'a'.repeat(416)}`) 34 | assert.equal( 35 | CarBufferWriter.calculateHeaderLength([ 36 | cid.bytes.byteLength, 37 | largeCID.bytes.byteLength 38 | ]), 39 | createHeader([ 40 | cid, 41 | largeCID 42 | ]).byteLength 43 | ) 44 | }) 45 | 46 | it('estimate on large CIDs 2', () => { 47 | const largeCID = CID.createV1(Raw.code, identity.digest(new Uint8Array(512).fill(1))) 48 | assert.equal( 49 | CarBufferWriter.calculateHeaderLength([ 50 | cid.bytes.byteLength, 51 | largeCID.bytes.byteLength 52 | ]), 53 | createHeader([cid, largeCID]).byteLength 54 | ) 55 | }) 56 | }) 57 | 58 | describe('writer', () => { 59 | it('estimate header and write blocks', async () => { 60 | const headerSize = CarBufferWriter.estimateHeaderLength(1) 61 | const dataSize = 256 62 | const buffer = new ArrayBuffer(headerSize + dataSize) 63 | const writer = CarBufferWriter.createWriter(buffer, { headerSize }) 64 | const b1 = await Block.encode({ 65 | value: { hello: 'world' }, 66 | codec: CBOR, 67 | hasher: sha256 68 | }) 69 | 70 | writer.write(b1) 71 | 72 | const b2 = await Block.encode({ 73 | value: { bye: 'world' }, 74 | codec: CBOR, 75 | hasher: sha256 76 | }) 77 | writer.write(b2) 78 | 79 | writer.addRoot(b1.cid) 80 | const bytes = writer.close() 81 | 82 | const reader = await CarReader.fromBytes(bytes) 83 | assert.deepEqual(await reader.getRoots(), [b1.cid]) 84 | assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }]) 85 | }) 86 | 87 | it('overestimate header', async () => { 88 | const headerSize = CarBufferWriter.estimateHeaderLength(2) 89 | const dataSize = 256 90 | const buffer = new ArrayBuffer(headerSize + dataSize) 91 | const writer = CarBufferWriter.createWriter(buffer, { headerSize }) 92 | const b1 = await Block.encode({ 93 | value: { hello: 'world' }, 94 | codec: CBOR, 95 | hasher: sha256 96 | }) 97 | 98 | writer.write(b1) 99 | 100 | const b2 = await Block.encode({ 101 | value: { bye: 'world' }, 102 | codec: CBOR, 103 | hasher: sha256 104 | }) 105 | writer.write(b2) 106 | 107 | writer.addRoot(b1.cid) 108 | assert.throws(() => writer.close(), /Header size was overestimate/) 109 | const bytes = writer.close({ resize: true }) 110 | 111 | const reader = await CarReader.fromBytes(bytes) 112 | assert.deepEqual(await reader.getRoots(), [b1.cid]) 113 | assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }]) 114 | }) 115 | 116 | it('underestimate header', async () => { 117 | const headerSize = CarBufferWriter.estimateHeaderLength(2) 118 | const dataSize = 300 119 | const buffer = new ArrayBuffer(headerSize + dataSize) 120 | const writer = CarBufferWriter.createWriter(buffer, { headerSize }) 121 | const b1 = await Block.encode({ 122 | value: { hello: 'world' }, 123 | codec: CBOR, 124 | hasher: sha256 125 | }) 126 | 127 | writer.write(b1) 128 | writer.addRoot(b1.cid) 129 | 130 | const b2 = await Block.encode({ 131 | value: { bye: 'world' }, 132 | codec: CBOR, 133 | hasher: sha512 134 | }) 135 | writer.write(b2) 136 | assert.throws(() => writer.addRoot(b2.cid), /has no capacity/) 137 | writer.addRoot(b2.cid, { resize: true }) 138 | 139 | const bytes = writer.close() 140 | 141 | const reader = await CarReader.fromBytes(bytes) 142 | assert.deepEqual(await reader.getRoots(), [b1.cid, b2.cid]) 143 | assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }]) 144 | }) 145 | }) 146 | 147 | it('has no space for the root', async () => { 148 | const headerSize = CarBufferWriter.estimateHeaderLength(1) 149 | const dataSize = 100 150 | const buffer = new ArrayBuffer(headerSize + dataSize) 151 | const writer = CarBufferWriter.createWriter(buffer, { headerSize }) 152 | const b1 = await Block.encode({ 153 | value: { hello: 'world' }, 154 | codec: CBOR, 155 | hasher: sha256 156 | }) 157 | 158 | writer.write(b1) 159 | writer.addRoot(b1.cid) 160 | 161 | const b2 = await Block.encode({ 162 | value: { bye: 'world' }, 163 | codec: CBOR, 164 | hasher: sha256 165 | }) 166 | writer.write(b2) 167 | assert.throws(() => writer.addRoot(b2.cid), /Buffer has no capacity for a new root/) 168 | assert.throws(() => writer.addRoot(b2.cid, { resize: true }), /Buffer has no capacity for a new root/) 169 | 170 | const bytes = writer.close() 171 | 172 | const reader = await CarReader.fromBytes(bytes) 173 | assert.deepEqual(await reader.getRoots(), [b1.cid]) 174 | assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }]) 175 | }) 176 | 177 | it('has no space for the block', async () => { 178 | const headerSize = CarBufferWriter.estimateHeaderLength(1) 179 | const dataSize = 58 180 | const buffer = new ArrayBuffer(headerSize + dataSize) 181 | const writer = CarBufferWriter.createWriter(buffer, { headerSize }) 182 | const b1 = await Block.encode({ 183 | value: { hello: 'world' }, 184 | codec: CBOR, 185 | hasher: sha256 186 | }) 187 | 188 | writer.write(b1) 189 | writer.addRoot(b1.cid) 190 | 191 | const b2 = await Block.encode({ 192 | value: { bye: 'world' }, 193 | codec: CBOR, 194 | hasher: sha256 195 | }) 196 | assert.throws(() => writer.write(b2), /Buffer has no capacity for this block/) 197 | 198 | const bytes = writer.close() 199 | 200 | const reader = await CarReader.fromBytes(bytes) 201 | assert.deepEqual(await reader.getRoots(), [b1.cid]) 202 | assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }]) 203 | }) 204 | 205 | it('provide roots', async () => { 206 | const b1 = await Block.encode({ 207 | value: { hello: 'world' }, 208 | codec: CBOR, 209 | hasher: sha256 210 | }) 211 | const b2 = await Block.encode({ 212 | value: { bye: 'world' }, 213 | codec: CBOR, 214 | hasher: sha512 215 | }) 216 | 217 | const buffer = new ArrayBuffer(300) 218 | const writer = CarBufferWriter.createWriter(buffer, { roots: [b1.cid, b2.cid] }) 219 | 220 | writer.write(b1) 221 | writer.write(b2) 222 | 223 | const bytes = writer.close() 224 | 225 | const reader = await CarReader.fromBytes(bytes) 226 | assert.deepEqual(await reader.getRoots(), [b1.cid, b2.cid]) 227 | assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }]) 228 | }) 229 | 230 | it('provide large CID root', async () => { 231 | const bytes = new Uint8Array(512).fill(1) 232 | const b1 = await Block.encode({ 233 | value: { hello: 'world' }, 234 | codec: CBOR, 235 | hasher: sha256 236 | }) 237 | 238 | const b2 = { 239 | cid: CID.createV1(Raw.code, identity.digest(bytes)), 240 | bytes 241 | } 242 | 243 | const headerSize = CBOR.encode({ version: 1, roots: [b1.cid, b2.cid] }).byteLength 244 | const bodySize = CarBufferWriter.blockLength(b1) + CarBufferWriter.blockLength(b2) 245 | const varintSize = varint.encodingLength(headerSize) 246 | 247 | const writer = CarBufferWriter.createWriter(new ArrayBuffer(varintSize + headerSize + bodySize), { roots: [b1.cid, b2.cid] }) 248 | 249 | writer.write(b1) 250 | writer.write(b2) 251 | const car = writer.close() 252 | const reader = await CarReader.fromBytes(car) 253 | assert.deepEqual(await reader.getRoots(), [b1.cid, b2.cid]) 254 | assert.deepEqual(reader._blocks, [{ cid: b1.cid, bytes: b1.bytes }, { cid: b2.cid, bytes: b2.bytes }]) 255 | }) 256 | }) 257 | -------------------------------------------------------------------------------- /test/test-errors.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import { encode as cbEncode } from '@ipld/dag-cbor' 4 | import { bytes } from 'multiformats' 5 | import { encode as vEncode } from 'varint' 6 | import { CarReader } from '../src/reader.js' 7 | import { carBytes, assert, goCarV2Bytes } from './common.js' 8 | 9 | /** 10 | * @param {any} block 11 | * @returns {Uint8Array} 12 | */ 13 | function makeHeader (block) { 14 | const u = cbEncode(block) 15 | const l = vEncode(u.length) 16 | const u2 = new Uint8Array(u.length + l.length) 17 | u2.set(l, 0) 18 | u2.set(u, l.length) 19 | return u2 20 | } 21 | 22 | describe('Misc errors', () => { 23 | const buf = carBytes.slice() 24 | 25 | it('decode errors', async () => { 26 | // cid v0 27 | const buf2 = new Uint8Array(buf.length) 28 | buf2.set(buf, 0) 29 | buf2[101] = 0 // first block's CID 30 | await assert.isRejected(CarReader.fromBytes(buf2), { 31 | name: 'Error', 32 | message: 'Unexpected CID version (0)' 33 | }) 34 | }) 35 | 36 | it('bad version', async () => { 37 | // quick sanity check that makeHeader() works properly! 38 | const buf2 = bytes.fromHex('0aa16776657273696f6e03') 39 | assert.strictEqual(bytes.toHex(makeHeader({ version: 3 })), '0aa16776657273696f6e03') 40 | // {version:3} 41 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR version: 3') 42 | }) 43 | 44 | describe('bad header', async () => { 45 | it('sanity check', async () => { 46 | // sanity check, this should be fine 47 | const buf2 = makeHeader({ version: 1, roots: [] }) 48 | await assert.isFulfilled(CarReader.fromBytes(buf2)) 49 | }) 50 | 51 | it('no \'version\' array', async () => { 52 | const buf2 = makeHeader({ roots: [] }) 53 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format') 54 | }) 55 | 56 | it('bad \'version\' type', async () => { 57 | const buf2 = makeHeader({ version: '1', roots: [] }) 58 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format') 59 | }) 60 | 61 | it('no \'roots\' array', async () => { 62 | const buf2 = makeHeader({ version: 1 }) 63 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format') 64 | }) 65 | 66 | it('bad \'roots\' type', async () => { 67 | const buf2 = makeHeader({ version: 1, roots: {} }) 68 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format') 69 | }) 70 | 71 | it('extraneous properties', async () => { 72 | const buf2 = makeHeader({ version: 1, roots: [], blip: true }) 73 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format') 74 | }) 75 | 76 | it('not an object', async () => { 77 | const buf2 = makeHeader([1, []]) 78 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format') 79 | }) 80 | 81 | it('not an object', async () => { 82 | const buf2 = makeHeader(null) 83 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR header format') 84 | }) 85 | 86 | it('recursive v2 header', async () => { 87 | // first 51 bytes are the carv2 header: 88 | // 11b prefix, 16b characteristics, 8b data offset, 8b data size, 8b index offset 89 | const v2Header = goCarV2Bytes.slice(0, 51) 90 | // parser should expect to get a carv1 header at the data offset, but it uses the same 91 | // code to check the carv2 header so let's make sure it doesn't allow recursive carv2 92 | // headers 93 | const buf2 = new Uint8Array(51 * 2) 94 | buf2.set(v2Header, 0) 95 | buf2.set(v2Header, 51) 96 | await assert.isRejected(CarReader.fromBytes(buf2), Error, 'Invalid CAR version: 2 (expected 1)') 97 | }) 98 | }) 99 | }) 100 | -------------------------------------------------------------------------------- /test/test-indexer.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import { expect } from 'aegir/chai' 4 | import { CarIndexer } from '../src/indexer.js' 5 | import { 6 | goCarBytes, 7 | goCarIndex, 8 | goCarV2Bytes, 9 | goCarV2Roots, 10 | goCarV2Index, 11 | makeIterable, 12 | assert 13 | } from './common.js' 14 | import { verifyRoots } from './verify-store-reader.js' 15 | 16 | describe('CarIndexer fromBytes()', () => { 17 | it('complete', async () => { 18 | const indexer = await CarIndexer.fromBytes(goCarBytes) 19 | await verifyRoots(indexer) // behaves like an Reader for roots 20 | assert.strictEqual(indexer.version, 1) 21 | 22 | const indexData = [] 23 | for await (const index of indexer) { 24 | indexData.push(index) 25 | } 26 | 27 | assert.deepStrictEqual(indexData, goCarIndex) 28 | }) 29 | 30 | it('v2 complete', async () => { 31 | const indexer = await CarIndexer.fromBytes(goCarV2Bytes) 32 | const roots = await indexer.getRoots() 33 | assert.strictEqual(roots.length, 1) 34 | assert.ok(goCarV2Roots[0].equals(roots[0])) 35 | assert.strictEqual(indexer.version, 2) 36 | 37 | const indexData = [] 38 | for await (const index of indexer) { 39 | indexData.push(index) 40 | } 41 | 42 | assert.deepStrictEqual(indexData, goCarV2Index) 43 | }) 44 | 45 | it('bad argument', async () => { 46 | for (const arg of [true, false, null, undefined, 'string', 100, { obj: 'nope' }]) { 47 | // @ts-expect-error arg is wrong type 48 | // the assert.isRejected form of this causes an uncatchable error in Chrome 49 | await expect(CarIndexer.fromBytes(arg)).to.eventually.be.rejected() 50 | } 51 | }) 52 | }) 53 | 54 | describe('CarIndexer fromIterable()', () => { 55 | /** @param {CarIndexer} indexer */ 56 | async function verifyIndexer (indexer) { 57 | await verifyRoots(indexer) // behaves like an Reader for roots 58 | assert.strictEqual(indexer.version, 1) 59 | 60 | const indexData = [] 61 | for await (const index of indexer) { 62 | indexData.push(index) 63 | } 64 | 65 | assert.deepStrictEqual(indexData, goCarIndex) 66 | } 67 | 68 | it('complete (single chunk)', async () => { 69 | const indexer = await CarIndexer.fromIterable(makeIterable(goCarBytes, goCarBytes.length)) 70 | return verifyIndexer(indexer) 71 | }) 72 | 73 | it('complete (101-byte chunks)', async () => { 74 | const indexer = await CarIndexer.fromIterable(makeIterable(goCarBytes, 101)) 75 | return verifyIndexer(indexer) 76 | }) 77 | 78 | it('complete (32-byte chunks)', async () => { 79 | const indexer = await CarIndexer.fromIterable(makeIterable(goCarBytes, 32)) 80 | return verifyIndexer(indexer) 81 | }) 82 | 83 | it('bad argument', async () => { 84 | for (const arg of [new Uint8Array(0), true, false, null, undefined, 'string', 100, { obj: 'nope' }]) { 85 | // @ts-expect-error arg is wrong type 86 | // the assert.isRejected form of this causes an uncatchable error in Chrome 87 | await expect(CarIndexer.fromIterable(arg)).to.eventually.be.rejected() 88 | } 89 | }) 90 | }) 91 | -------------------------------------------------------------------------------- /test/test-interface.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import * as car from '../src/index.js' 3 | import { CarIndexer } from '../src/indexer.js' 4 | import { CarBlockIterator, CarCIDIterator } from '../src/iterator.js' 5 | import { CarReader, __browser } from '../src/reader.js' 6 | import { CarWriter } from '../src/writer.js' 7 | import { assert } from './common.js' 8 | 9 | // simple sanity check that our main exports match the direct exports 10 | describe('Interface', () => { 11 | it('exports match', () => { 12 | assert.strictEqual(car.CarReader, CarReader) 13 | assert.strictEqual(car.CarIndexer, CarIndexer) 14 | assert.strictEqual(car.CarBlockIterator, CarBlockIterator) 15 | assert.strictEqual(car.CarCIDIterator, CarCIDIterator) 16 | assert.strictEqual(car.CarWriter, CarWriter) 17 | }) 18 | 19 | it('browser exports', () => { 20 | // @ts-ignore 21 | assert.strictEqual(__browser, globalThis.process === undefined) 22 | }) 23 | }) 24 | -------------------------------------------------------------------------------- /test/test-iterator.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import { expect } from 'aegir/chai' 4 | import { CarBlockIterator, CarCIDIterator } from '../src/iterator.js' 5 | import { carBytes, makeIterable, assert } from './common.js' 6 | import { verifyRoots, verifyBlocks, verifyCids } from './verify-store-reader.js' 7 | 8 | /** 9 | * @param {CarBlockIterator} iter 10 | * @returns {Promise} 11 | */ 12 | async function verifyBlockIterator (iter) { 13 | await verifyRoots(iter) 14 | await verifyBlocks(iter) 15 | assert.strictEqual(iter.version, 1) 16 | return iter 17 | } 18 | 19 | /** 20 | * @param {CarCIDIterator} iter 21 | * @returns {Promise} 22 | */ 23 | async function verifyCIDIterator (iter) { 24 | await verifyRoots(iter) 25 | await verifyCids(iter) 26 | assert.strictEqual(iter.version, 1) 27 | return iter 28 | } 29 | 30 | for (const type of ['Block', 'CID']) { 31 | describe(`Car${type}Iterator`, () => { 32 | it('fromBytes()', async () => { 33 | if (type === 'Block') { 34 | await verifyBlockIterator(await CarBlockIterator.fromBytes(carBytes)) 35 | } else { 36 | await verifyCIDIterator(await CarCIDIterator.fromBytes(carBytes)) 37 | } 38 | }) 39 | 40 | it('fromBytes() bad double read', async () => { 41 | if (type === 'Block') { 42 | const iter = await verifyBlockIterator(await CarBlockIterator.fromBytes(carBytes)) 43 | await assert.isRejected(verifyBlocks(iter), /more than once/i) 44 | } else { 45 | const iter = await verifyCIDIterator(await CarCIDIterator.fromBytes(carBytes)) 46 | await assert.isRejected(verifyCids(iter), /more than once/i) 47 | } 48 | }) 49 | 50 | it('fromBytes() bad argument', async () => { 51 | for (const arg of [true, false, null, undefined, 'string', 100, { obj: 'nope' }]) { 52 | // @ts-expect-error arg is wrong type 53 | // the assert.isRejected form of this causes an uncatchable error in Chrome 54 | await expect((type === 'Block' ? CarBlockIterator : CarCIDIterator).fromBytes(arg)).to.eventually.be.rejected() 55 | } 56 | }) 57 | 58 | it('fromIterable() bad argument', async () => { 59 | for (const arg of [new Uint8Array(0), true, false, null, undefined, 'string', 100, { obj: 'nope' }]) { 60 | // @ts-expect-error arg is wrong type 61 | // the assert.isRejected form of this causes an uncatchable error in Chrome 62 | await expect((type === 'Block' ? CarBlockIterator : CarCIDIterator).fromIterable(arg)).to.eventually.be.rejected() 63 | } 64 | }) 65 | 66 | for (const chunkSize of [carBytes.length, 100, 64, 32]) { 67 | const chunkDesc = chunkSize === carBytes.length ? 'single chunk' : `${chunkSize} bytes` 68 | it(`fromIterable() blocks (${chunkDesc})`, async () => { 69 | if (type === 'Block') { 70 | await verifyBlockIterator(await CarBlockIterator.fromIterable(makeIterable(carBytes, chunkSize))) 71 | } else { 72 | await verifyCIDIterator(await CarCIDIterator.fromIterable(makeIterable(carBytes, chunkSize))) 73 | } 74 | }) 75 | } 76 | }) 77 | } 78 | -------------------------------------------------------------------------------- /test/test-reader-sync.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import * as dagPb from '@ipld/dag-pb' 4 | import { expect } from 'aegir/chai' 5 | import { base64 } from 'multiformats/bases/base64' 6 | import { bytesReader, readHeader } from '../src/buffer-decoder.js' 7 | import { CarBufferReader } from '../src/buffer-reader-browser.js' 8 | import { 9 | carBytes, 10 | assert, 11 | goCarV2Bytes, 12 | goCarV2Roots, 13 | goCarV2Index, 14 | goCarV2Contents 15 | } from './common.js' 16 | import { expectations as fixtureExpectations } from './fixtures-expectations.js' 17 | import { data as fixtures } from './fixtures.js' 18 | import { 19 | verifyRoots, 20 | verifyHas, 21 | verifyGet, 22 | verifyBlocks, 23 | verifyCids 24 | } from './verify-store-reader.js' 25 | 26 | describe('CarReader Sync fromBytes()', () => { 27 | it('complete', async () => { 28 | const reader = CarBufferReader.fromBytes(carBytes) 29 | await verifyRoots(reader) 30 | await verifyHas(reader) 31 | await verifyGet(reader) 32 | await verifyBlocks(reader.blocks()) 33 | await verifyCids(reader.cids()) 34 | assert.strictEqual(reader.version, 1) 35 | }) 36 | 37 | it('complete (get before has) switch', async () => { 38 | const reader = CarBufferReader.fromBytes(carBytes) 39 | await verifyRoots(reader) 40 | await verifyGet(reader) 41 | await verifyHas(reader) 42 | await verifyBlocks(reader.blocks()) 43 | await verifyCids(reader.cids()) 44 | }) 45 | 46 | it('bad argument', () => { 47 | for (const arg of [true, false, null, undefined, 'string', 100, { obj: 'nope' }]) { 48 | expect(() => { 49 | // @ts-ignore 50 | CarBufferReader.fromBytes(arg) 51 | }).throws() 52 | } 53 | }) 54 | 55 | it('decode error - truncated', () => { 56 | assert.throws(() => { 57 | CarBufferReader.fromBytes(carBytes.slice(0, carBytes.length - 10)) 58 | }, Error, 'Unexpected end of data') 59 | }) 60 | 61 | it('v2 complete', () => { 62 | const reader = CarBufferReader.fromBytes(goCarV2Bytes) 63 | const roots = reader.getRoots() 64 | assert.strictEqual(roots.length, 1) 65 | assert.ok(goCarV2Roots[0].equals(roots[0])) 66 | assert.strictEqual(reader.version, 2) 67 | for (const { cid } of goCarV2Index) { 68 | const block = reader.get(cid) 69 | assert.isDefined(block) 70 | if (block) { 71 | assert.ok(cid.equals(block.cid)) 72 | let content 73 | if (cid.code === dagPb.code) { 74 | content = dagPb.decode(block.bytes) 75 | } else if (cid.code === 85) { // raw 76 | content = new TextDecoder().decode(block.bytes) 77 | } else { 78 | assert.fail('Unexpected codec') 79 | } 80 | assert.deepStrictEqual(content, goCarV2Contents[cid.toString()]) 81 | } 82 | } 83 | }) 84 | 85 | it('decode error - trailing null bytes', () => { 86 | const bytes = new Uint8Array(carBytes.length + 5) 87 | bytes.set(carBytes) 88 | try { 89 | CarBufferReader.fromBytes(bytes) 90 | } catch (/** @type {any} */ err) { 91 | assert.strictEqual(err.message, 'Invalid CAR section (zero length)') 92 | return 93 | } 94 | assert.fail('Did not throw') 95 | }) 96 | 97 | it('decode error - bad first byte', () => { 98 | const bytes = new Uint8Array(carBytes.length + 5) 99 | bytes.set(carBytes) 100 | bytes[0] = 0 101 | try { 102 | CarBufferReader.fromBytes(bytes) 103 | } catch (/** @type {any} */ err) { 104 | assert.strictEqual(err.message, 'Invalid CAR header (zero length)') 105 | return 106 | } 107 | assert.fail('Did not throw') 108 | }) 109 | }) 110 | 111 | describe('Shared fixtures', () => { 112 | describe('Header', () => { 113 | for (const [name, { version: expectedVersion, err: expectedError }] of Object.entries(fixtureExpectations)) { 114 | it(name, async () => { 115 | const data = base64.baseDecode(fixtures[name]) 116 | let header 117 | try { 118 | header = readHeader(bytesReader(data)) 119 | } catch (/** @type {any} */ err) { 120 | if (expectedError != null) { 121 | assert.equal(err.message, expectedError) 122 | return 123 | } 124 | assert.ifError(err) 125 | } 126 | if (expectedError != null) { 127 | assert.fail(`Expected error: ${expectedError}`) 128 | } 129 | assert.isDefined(header, 'did not decode header') 130 | if (expectedVersion != null && header != null) { 131 | assert.strictEqual(header.version, expectedVersion) 132 | } 133 | }) 134 | } 135 | }) 136 | 137 | describe('Contents', () => { 138 | for (const [name, { cids: expectedCids }] of Object.entries(fixtureExpectations)) { 139 | if (expectedCids == null) { 140 | continue 141 | } 142 | it(name, async () => { 143 | const data = base64.baseDecode(fixtures[name]) 144 | const reader = CarBufferReader.fromBytes(data) 145 | let i = 0 146 | for await (const cid of reader.cids()) { 147 | assert.strictEqual(cid.toString(), expectedCids[i++]) 148 | } 149 | assert.strictEqual(i, expectedCids.length) 150 | }) 151 | } 152 | }) 153 | }) 154 | -------------------------------------------------------------------------------- /test/test-reader.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import * as dagPb from '@ipld/dag-pb' 4 | import { expect } from 'aegir/chai' 5 | import { base64 } from 'multiformats/bases/base64' 6 | import * as Block from 'multiformats/block' 7 | import * as raw from 'multiformats/codecs/raw' 8 | import { sha256 } from 'multiformats/hashes/sha2' 9 | import { bytesReader, readHeader } from '../src/decoder.js' 10 | import { CarReader } from '../src/reader.js' 11 | import { CarWriter } from '../src/writer.js' 12 | import { 13 | carBytes, 14 | makeIterable, 15 | assert, 16 | goCarV2Bytes, 17 | goCarV2Roots, 18 | goCarV2Index, 19 | goCarV2Contents 20 | } from './common.js' 21 | import { expectations as fixtureExpectations } from './fixtures-expectations.js' 22 | import { data as fixtures } from './fixtures.js' 23 | import { 24 | verifyRoots, 25 | verifyHas, 26 | verifyGet, 27 | verifyBlocks, 28 | verifyCids 29 | } from './verify-store-reader.js' 30 | 31 | describe('CarReader fromBytes()', () => { 32 | it('complete', async () => { 33 | const reader = await CarReader.fromBytes(carBytes) 34 | await verifyRoots(reader) 35 | await verifyHas(reader) 36 | await verifyGet(reader) 37 | await verifyBlocks(reader.blocks()) 38 | await verifyCids(reader.cids()) 39 | assert.strictEqual(reader.version, 1) 40 | }) 41 | 42 | it('complete (get before has) switch', async () => { 43 | const reader = await CarReader.fromBytes(carBytes) 44 | await verifyRoots(reader) 45 | await verifyGet(reader) 46 | await verifyHas(reader) 47 | await verifyBlocks(reader.blocks()) 48 | await verifyCids(reader.cids()) 49 | }) 50 | 51 | it('bad argument', async () => { 52 | for (const arg of [true, false, null, undefined, 'string', 100, { obj: 'nope' }]) { 53 | // @ts-expect-error arg is wrong type 54 | // the assert.isRejected form of this causes an uncatchable error in Chrome 55 | await expect(CarReader.fromBytes(arg)).to.eventually.be.rejected() 56 | } 57 | }) 58 | 59 | it('decode error - truncated', async () => { 60 | await assert.isRejected(CarReader.fromBytes(carBytes.slice(0, carBytes.length - 10)), { 61 | name: 'Error', 62 | message: 'Unexpected end of data' 63 | }) 64 | }) 65 | 66 | it('v2 complete', async () => { 67 | const reader = await CarReader.fromBytes(goCarV2Bytes) 68 | const roots = await reader.getRoots() 69 | assert.strictEqual(roots.length, 1) 70 | assert.ok(goCarV2Roots[0].equals(roots[0])) 71 | assert.strictEqual(reader.version, 2) 72 | for (const { cid } of goCarV2Index) { 73 | const block = await reader.get(cid) 74 | assert.isDefined(block) 75 | if (block) { 76 | assert.ok(cid.equals(block.cid)) 77 | let content 78 | if (cid.code === dagPb.code) { 79 | content = dagPb.decode(block.bytes) 80 | } else if (cid.code === 85) { // raw 81 | content = new TextDecoder().decode(block.bytes) 82 | } else { 83 | assert.fail('Unexpected codec') 84 | } 85 | assert.deepStrictEqual(content, goCarV2Contents[cid.toString()]) 86 | } 87 | } 88 | }) 89 | 90 | describe('decode error - trailing null bytes', () => { 91 | const cases = [ 92 | { 93 | name: 'carBytes', 94 | bytes: (() => { 95 | const bytes = new Uint8Array(carBytes.length + 5) 96 | bytes.set(carBytes) 97 | return bytes 98 | })() 99 | }, 100 | { 101 | name: 'sample-v1-with-zero-len-section.car', 102 | bytes: base64.baseDecode(fixtures['sample-v1-with-zero-len-section.car']) 103 | }, 104 | { 105 | name: 'sample-v1-with-zero-len-section2.car', 106 | bytes: base64.baseDecode(fixtures['sample-v1-with-zero-len-section2.car']) 107 | } 108 | ] 109 | for (const { name, bytes } of cases) { 110 | it(name, async () => { 111 | try { 112 | await CarReader.fromBytes(bytes) 113 | } catch (/** @type {any} */ err) { 114 | assert.strictEqual(err.message, 'Invalid CAR section (zero length)') 115 | return 116 | } 117 | assert.fail('Did not throw') 118 | }) 119 | } 120 | }) 121 | 122 | it('decode error - trailing null bytes', async () => { 123 | const bytes = new Uint8Array(carBytes.length + 5) 124 | bytes.set(carBytes) 125 | try { 126 | await CarReader.fromBytes(bytes) 127 | } catch (/** @type {any} */ err) { 128 | assert.strictEqual(err.message, 'Invalid CAR section (zero length)') 129 | return 130 | } 131 | assert.fail('Did not throw') 132 | }) 133 | 134 | it('decode error - bad first byte', async () => { 135 | const bytes = new Uint8Array(carBytes.length + 5) 136 | bytes.set(carBytes) 137 | bytes[0] = 0 138 | try { 139 | await CarReader.fromBytes(bytes) 140 | } catch (/** @type {any} */ err) { 141 | assert.strictEqual(err.message, 'Invalid CAR header (zero length)') 142 | return 143 | } 144 | assert.fail('Did not throw') 145 | }) 146 | }) 147 | 148 | describe('CarReader fromIterable()', () => { 149 | it('complete (single chunk)', async () => { 150 | const reader = await CarReader.fromIterable(makeIterable(carBytes, carBytes.length)) 151 | await verifyRoots(reader) 152 | await verifyHas(reader) 153 | await verifyGet(reader) 154 | await verifyBlocks(reader.blocks()) 155 | await verifyCids(reader.cids()) 156 | }) 157 | 158 | it('complete (101-byte chunks)', async () => { 159 | const reader = await CarReader.fromIterable(makeIterable(carBytes, 101)) 160 | await verifyRoots(reader) 161 | await verifyHas(reader) 162 | await verifyGet(reader) 163 | await verifyBlocks(reader.blocks()) 164 | await verifyCids(reader.cids()) 165 | }) 166 | 167 | it('complete (64-byte chunks)', async () => { 168 | const reader = await CarReader.fromIterable(makeIterable(carBytes, 64)) 169 | await verifyRoots(reader) 170 | await verifyHas(reader) 171 | await verifyGet(reader) 172 | await verifyBlocks(reader.blocks()) 173 | await verifyCids(reader.cids()) 174 | }) 175 | 176 | it('complete (32-byte chunks)', async () => { 177 | const reader = await CarReader.fromIterable(makeIterable(carBytes, 32)) 178 | await verifyRoots(reader) 179 | await verifyHas(reader) 180 | await verifyGet(reader) 181 | await verifyBlocks(reader.blocks()) 182 | await verifyCids(reader.cids()) 183 | }) 184 | 185 | it('handle zero-byte chunks', async () => { 186 | // write 3 blocks, the middle one has zero bytes - this is a valid dag-pb form 187 | // so it's important that we can handle it .. also we may just be dealing with 188 | // an asynciterator that provides zero-length chunks 189 | const { writer, out } = await CarWriter.create([]) 190 | const b1 = await Block.encode({ value: Uint8Array.from([0, 1, 2]), hasher: sha256, codec: raw }) 191 | writer.put(b1) 192 | const b2 = await Block.encode({ value: Uint8Array.from([]), hasher: sha256, codec: raw }) 193 | writer.put(b2) 194 | const b3 = await Block.encode({ value: Uint8Array.from([3, 4, 5]), hasher: sha256, codec: raw }) 195 | writer.put(b3) 196 | const closePromise = writer.close() 197 | const reader = await CarReader.fromIterable(out) // read from the writer 198 | const b1a = await reader.get(b1.cid) 199 | assert.isDefined(b1a) 200 | assert.deepStrictEqual(b1a && Array.from(b1a.bytes), [0, 1, 2]) 201 | const b2a = await reader.get(b2.cid) 202 | assert.isDefined(b2a) 203 | assert.deepStrictEqual(b2a && Array.from(b2a.bytes), []) 204 | const b3a = await reader.get(b3.cid) 205 | assert.isDefined(b3a) 206 | assert.deepStrictEqual(b3a && Array.from(b3a.bytes), [3, 4, 5]) 207 | await closePromise 208 | }) 209 | 210 | it('bad argument', async () => { 211 | for (const arg of [new Uint8Array(0), true, false, null, undefined, 'string', 100, { obj: 'nope' }]) { 212 | // @ts-expect-error arg is wrong type 213 | // the assert.isRejected form of this causes an uncatchable error in Chrome 214 | await expect(CarReader.fromIterable(arg)).to.eventually.be.rejected() 215 | } 216 | }) 217 | 218 | it('decode error - truncated', async () => { 219 | await assert.isRejected(CarReader.fromIterable(makeIterable(carBytes.slice(0, carBytes.length - 10), 64)), { 220 | name: 'Error', 221 | message: 'Unexpected end of data' 222 | }) 223 | }) 224 | 225 | it('v2 decode error - truncated', async () => { 226 | const bytes = goCarV2Bytes.slice() 227 | const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) 228 | // dataSize is an 64-bit uint at byte offset 35 from the begining, we're shortening it 229 | // by 10 to simulate a premature end of CARv1 content 230 | dv.setBigUint64(35, BigInt(448 - 10), true) 231 | await assert.isRejected(CarReader.fromIterable(makeIterable(bytes, 64)), { 232 | name: 'Error', 233 | message: 'Unexpected end of data' 234 | }) 235 | }) 236 | }) 237 | 238 | describe('Shared fixtures', () => { 239 | describe('Header', () => { 240 | for (const [name, { version: expectedVersion, err: expectedError }] of Object.entries(fixtureExpectations)) { 241 | it(name, async () => { 242 | const data = base64.baseDecode(fixtures[name]) 243 | let header 244 | try { 245 | header = await readHeader(bytesReader(data)) 246 | } catch (/** @type {any} */ err) { 247 | if (expectedError != null) { 248 | assert.equal(err.message, expectedError) 249 | return 250 | } 251 | assert.ifError(err) 252 | } 253 | if (expectedError != null) { 254 | assert.fail(`Expected error: ${expectedError}`) 255 | } 256 | assert.isDefined(header, 'did not decode header') 257 | if (expectedVersion != null && header != null) { 258 | assert.strictEqual(header.version, expectedVersion) 259 | } 260 | }) 261 | } 262 | }) 263 | 264 | describe('Contents', () => { 265 | for (const [name, { cids: expectedCids }] of Object.entries(fixtureExpectations)) { 266 | if (expectedCids == null) { 267 | continue 268 | } 269 | it(name, async () => { 270 | const data = base64.baseDecode(fixtures[name]) 271 | const reader = await CarReader.fromBytes(data) 272 | let i = 0 273 | for await (const cid of reader.cids()) { 274 | assert.strictEqual(cid.toString(), expectedCids[i++]) 275 | } 276 | assert.strictEqual(i, expectedCids.length) 277 | }) 278 | } 279 | }) 280 | }) 281 | -------------------------------------------------------------------------------- /test/test-writer.spec.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import { expect } from 'aegir/chai' 4 | import { bytes, CID } from 'multiformats' 5 | import { CarReader } from '../src/reader.js' 6 | import { CarWriter } from '../src/writer.js' 7 | import { carBytes, makeData, assert, rndCid } from './common.js' 8 | import { 9 | verifyRoots, 10 | verifyHas, 11 | verifyGet, 12 | verifyBlocks, 13 | verifyCids 14 | } from './verify-store-reader.js' 15 | 16 | /** 17 | * @typedef {import('../src/api.js').Block} Block 18 | */ 19 | 20 | const { toHex } = bytes 21 | 22 | /** 23 | * @param {Uint8Array[]} chunks 24 | */ 25 | function concatBytes (chunks) { 26 | const length = chunks.reduce((p, c) => p + c.length, 0) 27 | const bytes = new Uint8Array(length) 28 | let off = 0 29 | for (const chunk of chunks) { 30 | bytes.set(chunk, off) 31 | off += chunk.length 32 | } 33 | return bytes 34 | } 35 | 36 | /** 37 | * @param {AsyncIterable} iterable 38 | */ 39 | function collector (iterable) { 40 | const chunks = [] 41 | const cfn = (async () => { 42 | for await (const chunk of iterable) { 43 | chunks.push(chunk) 44 | } 45 | return concatBytes(chunks) 46 | })() 47 | return cfn 48 | } 49 | 50 | const newRoots = [ 51 | CID.parse('bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq'), 52 | CID.parse('bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4') 53 | ] 54 | 55 | /** 56 | * @param {Uint8Array} bytes 57 | */ 58 | async function verifyUpdateRoots (bytes) { 59 | const reader = await CarReader.fromBytes(bytes) 60 | // the assert.isRejected form of this causes an uncatchable error in Chrome 61 | await expect(verifyRoots(reader)).to.eventually.be.rejected() // whoa, different roots? like magic 62 | assert.deepEqual(await reader.getRoots(), newRoots) 63 | await verifyHas(reader) 64 | await verifyGet(reader) 65 | await verifyBlocks(reader.blocks(), true) 66 | await verifyCids(reader.cids(), true) 67 | } 68 | 69 | describe('CarWriter', () => { 70 | /** @type {Block[]} */ 71 | let cborBlocks 72 | /** @type {[string, Block[]][]} */ 73 | let allBlocks 74 | /** @type {Block[]} */ 75 | let allBlocksFlattened 76 | /** @type {CID[]} */ 77 | let roots 78 | 79 | /** 80 | * @param {Uint8Array} actual 81 | */ 82 | function assertCarData (actual) { 83 | assert.strictEqual( 84 | toHex(actual), 85 | toHex(carBytes), 86 | 'got expected bytes' 87 | ) 88 | } 89 | 90 | before(async () => { 91 | const data = await makeData() 92 | cborBlocks = data.cborBlocks 93 | allBlocks = data.allBlocks 94 | allBlocksFlattened = data.allBlocksFlattened 95 | roots = [cborBlocks[0].cid, cborBlocks[1].cid] 96 | }) 97 | 98 | it('complete', async () => { 99 | const { writer, out } = CarWriter.create(roots) 100 | 101 | // writer is async iterable 102 | assert.strictEqual(typeof out[Symbol.asyncIterator], 'function') 103 | const collection = collector(out) 104 | 105 | const writeQueue = [] 106 | for (const block of allBlocksFlattened) { 107 | writeQueue.push(writer.put(block)) 108 | } 109 | writeQueue.push(writer.close()) 110 | 111 | let collected = false 112 | collection.then((bytes) => { 113 | collected = true 114 | assertCarData(bytes) 115 | }) 116 | await Promise.all(writeQueue) 117 | assert.strictEqual(collected, true) 118 | }) 119 | 120 | it('complete, deferred collection', async () => { 121 | const { writer, out } = CarWriter.create(roots) 122 | 123 | const writeQueue = [] 124 | for (const block of allBlocksFlattened) { 125 | writeQueue.push(writer.put(block)) 126 | } 127 | writeQueue.push(writer.close()) 128 | 129 | // attach to the iterator after we've queued all the writing 130 | let collected = false 131 | collector(out).then((bytes) => { 132 | collected = true 133 | assertCarData(bytes) 134 | }) 135 | await Promise.all(writeQueue) 136 | assert.strictEqual(collected, true) 137 | }) 138 | 139 | it('complete, close after write', async () => { 140 | const { writer, out } = CarWriter.create(roots) 141 | 142 | assert.strictEqual(typeof out[Symbol.asyncIterator], 'function') 143 | const collection = collector(out) 144 | 145 | const writeQueue = [] 146 | for (const block of allBlocksFlattened) { 147 | writeQueue.push(writer.put(block)) 148 | } 149 | writeQueue.push(writer.close()) 150 | 151 | let written = false 152 | Promise.all(writeQueue).then(() => { 153 | written = true 154 | }) 155 | const bytes = await collection 156 | assert.strictEqual(written, false) 157 | await Promise.resolve() 158 | assertCarData(bytes) 159 | }) 160 | 161 | it('complete, no queue', async () => { 162 | const { writer, out } = CarWriter.create(roots) 163 | const collection = collector(out) 164 | 165 | for (const block of allBlocksFlattened) { 166 | await writer.put(block) 167 | } 168 | await writer.close() 169 | 170 | const bytes = await collection 171 | assertCarData(bytes) 172 | }) 173 | 174 | it('complete, slow drip', async () => { 175 | const { writer, out } = CarWriter.create(roots) 176 | 177 | // writer is async iterable 178 | assert.strictEqual(typeof out[Symbol.asyncIterator], 'function') 179 | const collection = collector(out) 180 | 181 | for (const block of allBlocksFlattened) { 182 | writer.put(block) 183 | await new Promise((resolve) => setTimeout(resolve, 100)) 184 | } 185 | await writer.close() 186 | 187 | await new Promise((resolve) => setTimeout(resolve, 100)) 188 | 189 | const bytes = await collection 190 | assertCarData(bytes) 191 | }) 192 | 193 | it('complete, no queue, deferred collection', async () => { 194 | const { writer, out } = CarWriter.create(roots) 195 | 196 | for (const block of allBlocksFlattened) { 197 | writer.put(block) 198 | } 199 | writer.close() 200 | 201 | const collection = collector(out) 202 | const bytes = await collection 203 | assertCarData(bytes) 204 | }) 205 | 206 | it('single root', async () => { 207 | const { writer, out } = CarWriter.create(roots[0]) 208 | const collection = collector(out) 209 | 210 | for (const block of allBlocksFlattened) { 211 | await writer.put(block) 212 | } 213 | await writer.close() 214 | 215 | const bytes = await collection 216 | 217 | // test the start of the bytes to make sure we have the root def block we expect 218 | // { roots: [ CID(bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm) ], version: 1 } 219 | const expectedRootDef = 'a265726f6f747381d82a58250001711220f88bc853804cf294fe417e4fa83028689fcdb1b1592c5102e1474dbc200fab8b6776657273696f6e01' 220 | const expectedStart = (expectedRootDef.length / 2).toString(16) + // length of root def block 221 | expectedRootDef + 222 | '28' // length of first raw block + CIDv0 223 | 224 | assert.strictEqual(toHex(bytes).substring(0, expectedStart.length), expectedStart) 225 | }) 226 | 227 | it('version', async () => { 228 | const { writer } = CarWriter.create(roots) 229 | 230 | // v1 only 231 | assert.equal(writer.version(), 1) 232 | }) 233 | 234 | it('no roots', async () => { 235 | const { writer, out } = CarWriter.create() 236 | const collection = collector(out) 237 | 238 | for (const block of allBlocksFlattened) { 239 | await writer.put(block) 240 | } 241 | await writer.close() 242 | 243 | const bytes = await collection 244 | 245 | // test the start of the bytes to make sure we have the root def block we expect 246 | // { roots: [], version: 1 } 247 | const expectedRootDef = 'a265726f6f7473806776657273696f6e01' 248 | const expectedStart = (expectedRootDef.length / 2).toString(16) + // length of root def block 249 | expectedRootDef + 250 | '28' // length of first raw block + CIDv0 251 | 252 | assert.strictEqual(toHex(bytes).substring(0, expectedStart.length), expectedStart) 253 | }) 254 | 255 | it('appender', async () => { 256 | let writerOut = CarWriter.create(roots) 257 | let collection = collector(writerOut.out) 258 | await writerOut.writer.close() 259 | const headerBytes = await collection 260 | 261 | /** @param {number} index */ 262 | const append = async (index) => { 263 | writerOut = CarWriter.createAppender() 264 | collection = collector(writerOut.out) 265 | for (const block of allBlocks[index][1]) { 266 | await writerOut.writer.put(block) 267 | } 268 | await writerOut.writer.close() 269 | return collection 270 | } 271 | 272 | const rawBytes = await append(0) 273 | const pbBytes = await append(1) 274 | const cborBytes = await append(2) 275 | 276 | assert.ok(rawBytes.length > 0) 277 | assert.ok(pbBytes.length > 0) 278 | assert.ok(cborBytes.length > 0) 279 | 280 | const reassembled = concatBytes([headerBytes, rawBytes, pbBytes, cborBytes]) 281 | 282 | assert.strictEqual(toHex(reassembled), toHex(carBytes)) 283 | }) 284 | 285 | it('bad argument for create()', () => { 286 | for (const arg of [new Uint8Array(0), true, false, null, 'string', 100, { obj: 'nope' }, [false]]) { 287 | // @ts-expect-error arg is wrong type 288 | assert.throws(() => CarWriter.create(arg)) 289 | } 290 | }) 291 | 292 | it('bad argument for put()', async () => { 293 | const { writer } = CarWriter.create() 294 | for (const arg of [new Uint8Array(0), true, false, null, 'string', 100, { obj: 'nope' }, [false]]) { 295 | // the assert.isRejected form of this causes an uncatchable error in Chrome 296 | // @ts-expect-error 297 | await expect(writer.put(arg)).to.eventually.be.rejected() 298 | } 299 | 300 | for (const arg of [true, false, null, 'string', 100, { obj: 'nope' }, [false]]) { 301 | // the assert.isRejected form of this causes an uncatchable error in Chrome 302 | // @ts-expect-error 303 | await expect(writer.put({ bytes: new Uint8Array(0), cid: arg })).to.eventually.be.rejected() 304 | } 305 | 306 | for (const arg of [true, false, null, 'string', 100, { obj: 'nope' }, [false]]) { 307 | // the assert.isRejected form of this causes an uncatchable error in Chrome 308 | // @ts-expect-error 309 | await expect(writer.put({ cid: rndCid, bytes: arg })).to.eventually.be.rejected() 310 | } 311 | }) 312 | 313 | it('bad write after closed', async () => { 314 | const { writer, out } = CarWriter.create() 315 | const collection = collector(out) 316 | await writer.put(allBlocksFlattened[0]) 317 | await writer.close() 318 | await assert.isRejected(writer.put(allBlocksFlattened[1]), /closed/) 319 | await collection 320 | }) 321 | 322 | it('bad attempt to multiple iterate', async () => { 323 | const { out } = CarWriter.create() 324 | collector(out) 325 | await assert.isRejected(collector(out), /multiple iterator/i) 326 | }) 327 | 328 | it('bad attempt to multiple close', async () => { 329 | const { writer, out } = CarWriter.create() 330 | collector(out) 331 | await writer.close() 332 | 333 | // the assert.isRejected form of this causes an uncatchable error in Chrome 334 | await expect(writer.close()).to.eventually.be.rejectedWith(/closed/i) 335 | }) 336 | 337 | it('update roots (fd)', async () => { 338 | const bytes = carBytes.slice() 339 | await CarWriter.updateRootsInBytes(bytes, newRoots) 340 | await verifyUpdateRoots(bytes) 341 | }) 342 | 343 | it('update roots error: wrong header size', async () => { 344 | const bytes = carBytes.slice() 345 | await assert.isRejected(CarWriter.updateRootsInBytes(bytes, [...newRoots, newRoots[0]]), /can only overwrite a header of the same length/) 346 | await assert.isRejected(CarWriter.updateRootsInBytes(bytes, [newRoots[0]]), /can only overwrite a header of the same length/) 347 | await assert.isRejected(CarWriter.updateRootsInBytes(bytes, []), /can only overwrite a header of the same length/) 348 | }) 349 | }) 350 | -------------------------------------------------------------------------------- /test/verify-store-reader.js: -------------------------------------------------------------------------------- 1 | import { bytes } from 'multiformats' 2 | import * as raw from 'multiformats/codecs/raw' 3 | import { toBlock, assert, makeData } from './common.js' 4 | 5 | /** 6 | * @typedef {import('multiformats').CID} CID 7 | * @typedef {import('../src/api.js').Block} Block 8 | * @typedef {import('../src/api.js').RootsReader} RootsReader 9 | * @typedef {import('../src/api.js').BlockIterator} BlockIterator 10 | * @typedef {import('../src/api.js').CIDIterator} CIDIterator 11 | * @typedef {import('../src/api.js').BlockReader} BlockReader 12 | */ 13 | 14 | /** 15 | * @param {Block} actual 16 | * @param {Block} expected 17 | * @param {string | void} id 18 | */ 19 | function compareBlockData (actual, expected, id) { 20 | assert.strictEqual( 21 | bytes.toHex(actual.bytes), 22 | bytes.toHex(expected.bytes), 23 | `comparing block as hex ${id || ''}` 24 | ) 25 | } 26 | 27 | /** 28 | * @param {CID} actual 29 | * @param {CID} expected 30 | */ 31 | function compareCids (actual, expected) { 32 | assert.strictEqual(actual.toString(), expected.toString()) 33 | } 34 | 35 | /** 36 | * @param {RootsReader | import('../src/api.js').RootsBufferReader} reader 37 | */ 38 | async function verifyRoots (reader) { 39 | // using toString() for now, backing buffers in Uint8Arrays are getting in the way 40 | // in the browser 41 | const { cborBlocks } = await makeData() 42 | 43 | const expected = [cborBlocks[0].cid.toString(), cborBlocks[1].cid.toString()] 44 | assert.deepStrictEqual((await reader.getRoots()).map((c) => c.toString()), expected) 45 | } 46 | 47 | /** 48 | * @param {BlockReader | import('../src/api.js').BlockBufferReader} reader 49 | */ 50 | async function verifyHas (reader) { 51 | const { allBlocks } = await makeData() 52 | 53 | /** 54 | * @param {CID} cid 55 | * @param {string} name 56 | */ 57 | const verifyHas = async (cid, name) => { 58 | assert.ok(await reader.has(cid), `reader doesn't have expected key for ${name}`) 59 | } 60 | 61 | /** 62 | * @param {CID} cid 63 | * @param {string} name 64 | */ 65 | const verifyHasnt = async (cid, name) => { 66 | assert.ok(!(await reader.has(cid)), `reader has unexpected key for ${name}`) 67 | assert.strictEqual(await reader.get(cid), undefined) 68 | } 69 | 70 | for (const [type, blocks] of allBlocks) { 71 | for (let i = 0; i < blocks.length; i++) { 72 | await verifyHas(blocks[i].cid, `block #${i} (${type} / ${blocks[i].cid})`) 73 | } 74 | } 75 | 76 | // not a block we have 77 | await verifyHasnt((await toBlock(new TextEncoder().encode('dddd'), raw)).cid, 'dddd') 78 | } 79 | 80 | /** 81 | * @param {BlockReader | import('../src/api.js').BlockBufferReader} reader 82 | */ 83 | async function verifyGet (reader) { 84 | const { allBlocks } = await makeData() 85 | 86 | /** 87 | * @param {Block} expected 88 | * @param {number} index 89 | * @param {string} type 90 | */ 91 | const verifyBlock = async (expected, index, type) => { 92 | let actual 93 | try { 94 | actual = await reader.get(expected.cid) 95 | assert.isDefined(actual) 96 | if (actual) { 97 | compareBlockData(actual, expected, `#${index} (${type})`) 98 | } 99 | } catch (err) { 100 | assert.ifError(err, `get block length #${index} (${type})`) 101 | } 102 | } 103 | 104 | for (const [type, blocks] of allBlocks) { 105 | for (let i = 0; i < blocks.length; i++) { 106 | await verifyBlock(blocks[i], i, type) 107 | } 108 | } 109 | } 110 | 111 | /** 112 | * @param {import('../src/api.js').AwaitIterable} iterator 113 | * @param {boolean | void} unordered 114 | */ 115 | async function verifyBlocks (iterator, unordered) { 116 | const { allBlocksFlattened } = await makeData() 117 | if (!unordered) { 118 | const expected = allBlocksFlattened.slice() 119 | for await (const actual of iterator) { 120 | const next = expected.shift() 121 | assert.isDefined(next) 122 | if (next) { 123 | compareBlockData(actual, next) 124 | } 125 | } 126 | } else { 127 | /** @type {{[prop: string]: Block}} */ 128 | const expected = {} 129 | for (const block of allBlocksFlattened) { 130 | expected[block.cid.toString()] = block 131 | } 132 | 133 | for await (const actual of iterator) { 134 | const { cid } = actual 135 | const exp = expected[cid.toString()] 136 | if (!exp) { 137 | throw new Error(`Unexpected block: ${cid.toString()}`) 138 | } 139 | compareBlockData(actual, exp) 140 | delete expected[cid.toString()] 141 | } 142 | 143 | if (Object.keys(expected).length) { 144 | throw new Error('Did not find all expected blocks') 145 | } 146 | } 147 | } 148 | 149 | /** 150 | * @param {import('../src/api.js').AwaitIterable} iterator 151 | * @param {boolean | void} unordered 152 | */ 153 | async function verifyCids (iterator, unordered) { 154 | const { allBlocksFlattened } = await makeData() 155 | if (!unordered) { 156 | const expected = allBlocksFlattened.slice() 157 | for await (const actual of iterator) { 158 | const next = expected.shift() 159 | assert.isDefined(next) 160 | if (next) { 161 | compareCids(actual, next.cid) 162 | } 163 | } 164 | } else { 165 | /** @type {{[prop: string]: Block}} */ 166 | const expected = {} 167 | for (const block of allBlocksFlattened) { 168 | expected[block.cid.toString()] = block 169 | } 170 | 171 | for await (const cid of iterator) { 172 | const exp = expected[cid.toString()] 173 | if (!exp) { 174 | throw new Error(`Unexpected cid: ${cid.toString()}`) 175 | } 176 | delete expected[cid.toString()] 177 | } 178 | 179 | if (Object.keys(expected).length) { 180 | throw new Error('Did not find all expected cids') 181 | } 182 | } 183 | } 184 | 185 | export { 186 | verifyRoots, 187 | verifyHas, 188 | verifyGet, 189 | verifyBlocks, 190 | verifyCids 191 | } 192 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "aegir/src/config/tsconfig.aegir.json", 3 | "compilerOptions": { 4 | "outDir": "dist", 5 | "emitDeclarationOnly": true 6 | }, 7 | "include": [ 8 | "src", 9 | "test" 10 | ], 11 | "exclude": [ 12 | "node_modules", 13 | "dist", 14 | "examples" 15 | ] 16 | } 17 | --------------------------------------------------------------------------------