├── .cspell.json ├── .github ├── dependabot.yml ├── dictionary.txt ├── pull_request_template.md └── workflows │ ├── gateway-conformance.yml │ ├── generated-pr.yml │ ├── js-test-and-release.yml │ ├── semantic-pull-request.yml │ └── stale.yml ├── .gitignore ├── CHANGELOG.md ├── CODEOWNERS ├── LICENSE ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── package.json ├── packages ├── gateway-conformance │ ├── .aegir.js │ ├── .gitignore │ ├── CHANGELOG.md │ ├── CODE_OF_CONDUCT.md │ ├── LICENSE-APACHE │ ├── LICENSE-MIT │ ├── README.md │ ├── package.json │ ├── src │ │ ├── bin.ts │ │ ├── conformance.spec.ts │ │ ├── constants.ts │ │ ├── demo-server.ts │ │ ├── expected-failing-tests.json │ │ ├── expected-passing-tests.json │ │ ├── fixtures │ │ │ ├── basic-server.ts │ │ │ ├── create-kubo.ts │ │ │ ├── create-verified-fetch.ts │ │ │ ├── get-local-dns-resolver.ts │ │ │ ├── header-utils.ts │ │ │ ├── ipns-record-datastore.ts │ │ │ └── kubo-mgmt.ts │ │ ├── get-report-details.ts │ │ ├── get-tests-to-run.ts │ │ ├── get-tests-to-skip.ts │ │ ├── get-wontfix-tests.ts │ │ ├── index.ts │ │ └── update-expected-tests.ts │ ├── tsconfig.json │ └── typedoc.json ├── interop │ ├── .aegir.js │ ├── CHANGELOG.md │ ├── CODE_OF_CONDUCT.md │ ├── LICENSE-APACHE │ ├── LICENSE-MIT │ ├── README.md │ ├── package.json │ ├── src │ │ ├── abort-handling.spec.ts │ │ ├── bin.ts │ │ ├── direct-retrieval.spec.ts │ │ ├── fixtures │ │ │ ├── create-kubo.ts │ │ │ ├── data │ │ │ │ ├── QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr-tokens.uniswap.org-2024-01-18.car │ │ │ │ ├── QmbQDovX7wRe9ek7u6QXe9zgCXkTzoUSsTFJEkrYV1HrVR-xkcd-Barrel-part-1.car │ │ │ │ ├── QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv-helia-identify-website.car │ │ │ │ ├── QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw-fake-blog.libp2p.io.car │ │ │ │ ├── bafybeibc5sgo2plmjkq2tzmhrn54bk3crhnc23zd2msg4ea7a4pxrkgfna.car │ │ │ │ ├── bafybeidbclfqleg2uojchspzd4bob56dqetqjsj27gy2cq3klkkgxtpn4i-single-layer-hamt-with-multi-block-files.car │ │ │ │ ├── gateway-conformance-fixtures.car │ │ │ │ └── k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam.ipns-record │ │ │ └── load-fixtures.ts │ │ ├── index.ts │ │ ├── ipns.spec.ts │ │ ├── json.spec.ts │ │ ├── unixfs-dir.spec.ts │ │ └── websites.spec.ts │ ├── tsconfig.json │ └── typedoc.json └── verified-fetch │ ├── .aegir.js │ ├── CHANGELOG.md │ ├── CODE_OF_CONDUCT.md │ ├── LICENSE-APACHE │ ├── LICENSE-MIT │ ├── README.md │ ├── package.json │ ├── src │ ├── errors.ts │ ├── index.ts │ ├── plugins │ │ ├── errors.ts │ │ ├── index.ts │ │ ├── plugin-base.ts │ │ ├── plugin-handle-byte-range-context.ts │ │ ├── plugin-handle-car.ts │ │ ├── plugin-handle-dag-cbor.ts │ │ ├── plugin-handle-dag-pb.ts │ │ ├── plugin-handle-dag-walk.ts │ │ ├── plugin-handle-dir-index-html.ts │ │ ├── plugin-handle-ipns-record.ts │ │ ├── plugin-handle-json.ts │ │ ├── plugin-handle-raw.ts │ │ ├── plugin-handle-tar.ts │ │ ├── plugins.ts │ │ └── types.ts │ ├── singleton.ts │ ├── types.ts │ ├── utils │ │ ├── byte-range-context.ts │ │ ├── content-type-parser.ts │ │ ├── dag-cbor-to-safe-json.ts │ │ ├── dir-index-html.ts │ │ ├── get-content-disposition-filename.ts │ │ ├── get-content-type.ts │ │ ├── get-e-tag.ts │ │ ├── get-peer-id-from-string.ts │ │ ├── get-resolved-accept-header.ts │ │ ├── get-stream-from-async-iterable.ts │ │ ├── get-tar-stream.ts │ │ ├── handle-redirects.ts │ │ ├── is-accept-explicit.ts │ │ ├── libp2p-defaults.browser.ts │ │ ├── libp2p-defaults.ts │ │ ├── libp2p-types.ts │ │ ├── parse-resource.ts │ │ ├── parse-url-string.ts │ │ ├── request-headers.ts │ │ ├── resource-to-cache-key.ts │ │ ├── response-headers.ts │ │ ├── responses.ts │ │ ├── select-output-type.ts │ │ ├── server-timing.ts │ │ ├── tlru.ts │ │ ├── type-guards.ts │ │ └── walk-path.ts │ └── verified-fetch.ts │ ├── test │ ├── abort-handling.spec.ts │ ├── accept-header.spec.ts │ ├── cache-control-header.spec.ts │ ├── car.spec.ts │ ├── content-type-parser.spec.ts │ ├── custom-dns-resolvers.spec.ts │ ├── fixtures │ │ ├── cids.ts │ │ ├── create-offline-helia.ts │ │ ├── create-random-data-chunks.ts │ │ ├── dns-answer-fake.ts │ │ ├── get-abortable-promise.ts │ │ ├── get-custom-plugin-factory.ts │ │ ├── ipns-stubs.ts │ │ ├── make-aborted-request.ts │ │ └── memory-car.ts │ ├── get-e-tag.spec.ts │ ├── get-stream-from-async-iterable.spec.ts │ ├── index.spec.ts │ ├── ipns-record.spec.ts │ ├── parse-resource.spec.ts │ ├── plugins.spec.ts │ ├── range-requests.spec.ts │ ├── tar.spec.ts │ ├── utils │ │ ├── byte-range-context.spec.ts │ │ ├── get-content-disposition-filename.spec.ts │ │ ├── handle-redirects.spec.ts │ │ ├── parse-url-string.spec.ts │ │ ├── request-headers.spec.ts │ │ ├── resource-to-cache-key.spec.ts │ │ ├── response-headers.spec.ts │ │ ├── select-output-type.spec.ts │ │ └── server-timing.spec.ts │ └── verified-fetch.spec.ts │ ├── tsconfig.json │ └── typedoc.json └── typedoc.json /.cspell.json: -------------------------------------------------------------------------------- 1 | { 2 | "import": [ 3 | "./node_modules/aegir/cspell.json" 4 | ], 5 | "dictionaries": ["project"], 6 | "dictionaryDefinitions": [{ 7 | "name": "project", 8 | "path": "./.github/dictionary.txt", 9 | "addWords": true 10 | }] 11 | } 12 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: npm 4 | directories: 5 | - "/" 6 | schedule: 7 | interval: daily 8 | time: "10:00" 9 | open-pull-requests-limit: 20 10 | commit-message: 11 | prefix: "deps" 12 | prefix-development: "chore" 13 | groups: 14 | helia-deps: # group all deps that should be updated when Helia deps need updated 15 | patterns: 16 | - "*helia*" 17 | - "*libp2p*" 18 | - "*multiformats*" 19 | store-deps: # group all blockstore and datastore updates (interface & impl) 20 | patterns: 21 | - "*blockstore*" 22 | - "*datastore*" 23 | kubo-deps: # group kubo, kubo-rpc-client, and ipfsd-ctl updates 24 | patterns: 25 | - "*kubo*" 26 | - "ipfsd-ctl" 27 | - package-ecosystem: "github-actions" 28 | directory: "/" 29 | schedule: 30 | interval: "weekly" 31 | commit-message: 32 | prefix: chore 33 | -------------------------------------------------------------------------------- /.github/dictionary.txt: -------------------------------------------------------------------------------- 1 | ENOENT 2 | GOPATH 3 | mgmt 4 | gotest 5 | XKCD 6 | filetypemime 7 | fleek 8 | msword 9 | msdownload 10 | powerpoint 11 | noopener 12 | noreferrer 13 | nosniff 14 | Segoe 15 | Cantarell 16 | Noto 17 | Consolas 18 | filev 19 | byteranges 20 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Title 2 | 3 | 9 | 10 | ## Description 11 | 12 | 18 | 19 | ## Notes & open questions 20 | 21 | 24 | 25 | ## Change checklist 26 | 27 | - [ ] I have performed a self-review of my own code 28 | - [ ] I have made corresponding changes to the documentation if necessary (this includes comments as well) 29 | - [ ] I have added tests that prove my fix is effective or that my feature works 30 | -------------------------------------------------------------------------------- /.github/workflows/gateway-conformance.yml: -------------------------------------------------------------------------------- 1 | name: Gateway Conformance 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | workflow_dispatch: 9 | 10 | jobs: 11 | gateway-conformance: 12 | runs-on: ubuntu-latest 13 | steps: 14 | # 1, Setup Node, install npm dependencies, and build all packages/* 15 | # see https://github.com/ipdxco/unified-github-workflows/blob/3a1a7870ce5967163d8f5c8210b8ad50b2e659aa/.github/workflows/js-test-and-release.yml#L28-L34 16 | - uses: actions/checkout@v4 17 | - uses: actions/setup-node@v4 18 | with: 19 | node-version: lts/* 20 | - uses: ipfs/aegir/actions/cache-node-modules@main 21 | 22 | # 2. Set up 'go' so we can install the gateway-conformance binary 23 | - name: Setup Go 24 | uses: actions/setup-go@v5 25 | with: 26 | go-version: 1.22.x 27 | 28 | # 3. Download the gateway-conformance fixtures using ipfs/gateway-conformance action 29 | # This will prevent us from needing to install `docker` on the github runner 30 | - name: Download gateway-conformance fixtures 31 | uses: ipfs/gateway-conformance/.github/actions/extract-fixtures@v0.7 32 | # working-directory: ./packages/gateway-conformance 33 | with: 34 | output: ./packages/gateway-conformance/dist/src/fixtures/data/gateway-conformance-fixtures 35 | 36 | 37 | # 4. Run the tests 38 | - name: Run gateway-conformance tests 39 | run: | 40 | npm run test 41 | working-directory: ./packages/gateway-conformance 42 | 43 | # 5. Convert json output to reports similar to how it's done at https://github.com/ipfs/gateway-conformance/blob/main/.github/actions/test/action.yml 44 | # the 'gwc-report-all.json' file is created by the 'has expected total failures and successes' test 45 | # TODO: remove this when we're passing enough tests to use the 'ipfs/gateway-conformance/.github/actions/test' action 46 | - name: Create the XML 47 | if: failure() || success() 48 | uses: pl-strflt/gotest-json-to-junit-xml@v1 49 | with: 50 | input: ./packages/gateway-conformance/gwc-report-all.json 51 | output: ./packages/gateway-conformance/gwc-report-all.xml 52 | - name: Create the HTML 53 | if: failure() || success() 54 | uses: pl-strflt/junit-xml-to-html@v1 55 | with: 56 | mode: no-frames 57 | input: ./packages/gateway-conformance/gwc-report-all.xml 58 | output: ./packages/gateway-conformance/gwc-report-all.html 59 | - name: Create the Markdown 60 | if: failure() || success() 61 | uses: pl-strflt/junit-xml-to-html@v1 62 | with: 63 | mode: summary 64 | input: ./packages/gateway-conformance/gwc-report-all.xml 65 | output: ./packages/gateway-conformance/gwc-report-all.md 66 | 67 | # 6. Upload the reports 68 | - name: Upload MD summary 69 | if: failure() || success() 70 | run: cat ./packages/gateway-conformance/gwc-report-all.md >> $GITHUB_STEP_SUMMARY 71 | - name: Upload HTML report 72 | if: failure() || success() 73 | uses: actions/upload-artifact@v4 74 | with: 75 | name: gateway-conformance.html 76 | path: ./packages/gateway-conformance/gwc-report-all.html 77 | - name: Upload JSON report 78 | if: failure() || success() 79 | uses: actions/upload-artifact@v4 80 | with: 81 | name: gateway-conformance.json 82 | path: ./packages/gateway-conformance/gwc-report-all.json 83 | -------------------------------------------------------------------------------- /.github/workflows/generated-pr.yml: -------------------------------------------------------------------------------- 1 | name: Close Generated PRs 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | workflow_dispatch: 7 | 8 | permissions: 9 | issues: write 10 | pull-requests: write 11 | 12 | jobs: 13 | stale: 14 | uses: ipdxco/unified-github-workflows/.github/workflows/reusable-generated-pr.yml@v1 15 | -------------------------------------------------------------------------------- /.github/workflows/js-test-and-release.yml: -------------------------------------------------------------------------------- 1 | name: test & maybe release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | workflow_dispatch: 9 | 10 | permissions: 11 | contents: write 12 | id-token: write 13 | packages: write 14 | pull-requests: write 15 | 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }} 18 | cancel-in-progress: true 19 | 20 | jobs: 21 | js-test-and-release: 22 | uses: ipdxco/unified-github-workflows/.github/workflows/js-test-and-release.yml@v1.0 23 | secrets: 24 | DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }} 25 | DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} 26 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 27 | UCI_GITHUB_TOKEN: ${{ secrets.UCI_GITHUB_TOKEN }} 28 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 29 | -------------------------------------------------------------------------------- /.github/workflows/semantic-pull-request.yml: -------------------------------------------------------------------------------- 1 | name: Semantic PR 2 | 3 | on: 4 | pull_request_target: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | jobs: 11 | main: 12 | uses: pl-strflt/.github/.github/workflows/reusable-semantic-pull-request.yml@v0.3 13 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: Close Stale Issues 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | workflow_dispatch: 7 | 8 | permissions: 9 | issues: write 10 | pull-requests: write 11 | 12 | jobs: 13 | stale: 14 | uses: ipdxco/unified-github-workflows/.github/workflows/reusable-stale-issue.yml@v1 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | dist 4 | .docs 5 | .coverage 6 | node_modules 7 | package-lock.json 8 | yarn.lock 9 | .vscode 10 | .tmp-compiled-docs 11 | tsconfig-doc-check.aegir.json 12 | removed-passing-tests.json 13 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @ipfs/helia-dev 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This project is dual licensed under MIT and Apache-2.0. 2 | 3 | MIT: https://www.opensource.org/licenses/mit 4 | Apache-2.0: https://www.apache.org/licenses/license-2.0 5 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at 2 | 3 | http://www.apache.org/licenses/LICENSE-2.0 4 | 5 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 6 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | Helia logo 4 | 5 |

6 | 7 | # helia-verified-fetch 8 | 9 | [![ipfs.tech](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](https://ipfs.tech) 10 | [![Discuss](https://img.shields.io/discourse/https/discuss.ipfs.tech/posts.svg?style=flat-square)](https://discuss.ipfs.tech) 11 | [![codecov](https://img.shields.io/codecov/c/github/ipfs/helia-verified-fetch.svg?style=flat-square)](https://codecov.io/gh/ipfs/helia-verified-fetch) 12 | [![CI](https://img.shields.io/github/actions/workflow/status/ipfs/helia-verified-fetch/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/ipfs/helia-verified-fetch/actions/workflows/js-test-and-release.yml?query=branch%3Amain) 13 | 14 | > A fetch-like API for obtaining verified & trustless IPFS content on the web 15 | 16 | ## About 17 | 18 | This monorepo contains the `@helia/verified-fetch` package and its corresponding interop tests. 19 | 20 | # Packages 21 | 22 | - [`packages/gateway-conformance`](https://github.com/ipfs/helia-verified-fetch/tree/main/packages/gateway-conformance) Gateway conformance tests for @helia/verified-fetch 23 | - [`packages/interop`](https://github.com/ipfs/helia-verified-fetch/tree/main/packages/interop) Interop tests for @helia/verified-fetch 24 | - [`packages/verified-fetch`](https://github.com/ipfs/helia-verified-fetch/tree/main/packages/verified-fetch) A fetch-like API for obtaining verified & trustless IPFS content on the web 25 | 26 | # Getting started 27 | 28 | See the [**`@helia/verified-fetch`**](./packages/verified-fetch#readme) package for how to get started with the package including usage examples. 29 | 30 | Learn more in the [announcement blog post](https://blog.ipfs.tech/verified-fetch/) and check out the [ready-to-run example](https://github.com/ipfs-examples/helia-examples/tree/main/examples/helia-browser-verified-fetch). 31 | 32 | # API Docs 33 | 34 | - 35 | 36 | # License 37 | 38 | Licensed under either of 39 | 40 | - Apache 2.0, ([LICENSE-APACHE](https://github.com/ipfs/helia-verified-fetch/blob/main/LICENSE-APACHE) / ) 41 | - MIT ([LICENSE-MIT](https://github.com/ipfs/helia-verified-fetch/blob/main/LICENSE-MIT) / ) 42 | 43 | # Contribute 44 | 45 | Contributions welcome! Please check out [the issues](https://github.com/ipfs/helia-verified-fetch/issues). 46 | 47 | Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. 48 | 49 | Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). 50 | 51 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 52 | 53 | [![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) 54 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "helia-verified-fetch", 3 | "version": "1.0.0", 4 | "description": "A fetch-like API for obtaining verified & trustless IPFS content on the web", 5 | "license": "Apache-2.0 OR MIT", 6 | "homepage": "https://github.com/ipfs/helia-verified-fetch#readme", 7 | "repository": { 8 | "type": "git", 9 | "url": "git+https://github.com/ipfs/helia-verified-fetch.git" 10 | }, 11 | "bugs": { 12 | "url": "https://github.com/ipfs/helia-verified-fetch/issues" 13 | }, 14 | "keywords": [ 15 | "ipfs" 16 | ], 17 | "private": true, 18 | "scripts": { 19 | "reset": "aegir run clean && aegir clean **/node_modules **/package-lock.json", 20 | "test": "aegir run test", 21 | "test:node": "aegir run test:node", 22 | "test:chrome": "aegir run test:chrome", 23 | "test:chrome-webworker": "aegir run test:chrome-webworker", 24 | "test:firefox": "aegir run test:firefox", 25 | "test:firefox-webworker": "aegir run test:firefox-webworker", 26 | "test:electron-main": "aegir run test:electron-main", 27 | "test:electron-renderer": "aegir run test:electron-renderer", 28 | "clean": "aegir run clean", 29 | "generate": "aegir run generate", 30 | "build": "aegir run build", 31 | "lint": "aegir run lint", 32 | "dep-check": "aegir run dep-check", 33 | "doc-check": "aegir run doc-check", 34 | "spell-check": "aegir spell-check", 35 | "release": "run-s build docs:no-publish npm:release docs", 36 | "npm:release": "aegir run release --concurrency 1", 37 | "docs": "aegir docs", 38 | "docs:no-publish": "aegir docs --publish false" 39 | }, 40 | "devDependencies": { 41 | "aegir": "^47.0.11", 42 | "npm-run-all": "^4.1.5" 43 | }, 44 | "type": "module", 45 | "workspaces": [ 46 | "packages/*" 47 | ] 48 | } 49 | -------------------------------------------------------------------------------- /packages/gateway-conformance/.aegir.js: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | import getPort from 'aegir/get-port' 3 | import { logger } from '@libp2p/logger' 4 | const log = logger('aegir') 5 | 6 | /** @type {import('aegir').PartialOptions} */ 7 | export default { 8 | build: { 9 | bundlesizeMax: '1KB' 10 | }, 11 | test: { 12 | files: ['./dist/src/*.spec.js'], 13 | before: async (options) => { 14 | if (options.runner !== 'node') { 15 | throw new Error('Only node runner is supported') 16 | } 17 | 18 | const { createKuboNode } = await import('./dist/src/fixtures/create-kubo.js') 19 | const KUBO_PORT = await getPort(3440) 20 | const SERVER_PORT = await getPort(3441) 21 | // The Kubo gateway will be passed to the VerifiedFetch config 22 | const { node: controller, gatewayUrl, repoPath } = await createKuboNode(KUBO_PORT) 23 | await controller.start() 24 | const { loadKuboFixtures } = await import('./dist/src/fixtures/kubo-mgmt.js') 25 | const IPFS_NS_MAP = await loadKuboFixtures(repoPath) 26 | const kuboGateway = gatewayUrl 27 | 28 | const { startVerifiedFetchGateway } = await import('./dist/src/fixtures/basic-server.js') 29 | const stopBasicServer = await startVerifiedFetchGateway({ 30 | serverPort: SERVER_PORT, 31 | kuboGateway, 32 | IPFS_NS_MAP 33 | }).catch((err) => { 34 | log.error(err) 35 | }) 36 | 37 | const CONFORMANCE_HOST = 'localhost' 38 | 39 | return { 40 | controller, 41 | stopBasicServer, 42 | env: { 43 | IPFS_NS_MAP, 44 | CONFORMANCE_HOST, 45 | KUBO_PORT: `${KUBO_PORT}`, 46 | SERVER_PORT: `${SERVER_PORT}`, 47 | KUBO_GATEWAY: kuboGateway 48 | } 49 | } 50 | }, 51 | after: async (options, beforeResult) => { 52 | // @ts-expect-error - broken aegir types 53 | await beforeResult.controller.stop() 54 | log('controller stopped') 55 | 56 | // @ts-expect-error - broken aegir types 57 | await beforeResult.stopBasicServer() 58 | log('basic server stopped') 59 | 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /packages/gateway-conformance/.gitignore: -------------------------------------------------------------------------------- 1 | gwc-report*.json 2 | -------------------------------------------------------------------------------- /packages/gateway-conformance/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) 4 | -------------------------------------------------------------------------------- /packages/gateway-conformance/LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /packages/gateway-conformance/README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | Helia logo 4 | 5 |

6 | 7 | # @helia/verified-fetch-gateway-conformance 8 | 9 | [![ipfs.tech](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](https://ipfs.tech) 10 | [![Discuss](https://img.shields.io/discourse/https/discuss.ipfs.tech/posts.svg?style=flat-square)](https://discuss.ipfs.tech) 11 | [![codecov](https://img.shields.io/codecov/c/github/ipfs/helia-verified-fetch.svg?style=flat-square)](https://codecov.io/gh/ipfs/helia-verified-fetch) 12 | [![CI](https://img.shields.io/github/actions/workflow/status/ipfs/helia-verified-fetch/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/ipfs/helia-verified-fetch/actions/workflows/js-test-and-release.yml?query=branch%3Amain) 13 | 14 | > Gateway conformance tests for @helia/verified-fetch 15 | 16 | # About 17 | 18 | 32 | 33 | Runs Gateway Conformance tests against @helia/verified-fetch using Kubo as a 34 | backing trustless-gateway. 35 | 36 | ## Example - Testing a new @helia/verified-fetch release 37 | 38 | ```console 39 | $ npm i @helia/verified-fetch-gateway-conformance 40 | $ VERIFIED_FETCH=@helia/verified-fetch@1.x.x-6f8c15b verified-fetch-gateway-conformance 41 | ``` 42 | 43 | ## Example - Testing with a different Kubo version 44 | 45 | ```console 46 | $ npm i @helia/verified-fetch-gateway-conformance 47 | $ KUBO_BINARY=/path/to/kubo verified-fetch-gateway-conformance 48 | ``` 49 | 50 | ## Example - using a different gateway-conformance image 51 | 52 | ```console 53 | $ GWC_IMAGE=ghcr.io/ipfs/gateway-conformance:v0.5.1 verified-fetch-gateway-conformance 54 | ``` 55 | 56 | ## Example - Debugging a test run 57 | 58 | ```console 59 | $ DEBUG="-mocha*,*,*:trace" npm run test # very verbose output 60 | $ DEBUG="conformance-tests*,conformance-tests*:trace" npm run test # only gateway-conformance test output 61 | ``` 62 | 63 | ## Example - querying the gateway-conformance server directly 64 | 65 | ```console 66 | $ npm run build 67 | $ node dist/src/demo-server.js # in terminal 1 68 | $ curl -v GET http://localhost:3442/ipfs/bafkqabtimvwgy3yk/ # in terminal 2 69 | ``` 70 | 71 | ## Troubleshooting 72 | 73 | ### Missing file in gateway-conformance-fixtures folder 74 | 75 | If you see the following error: 76 | 77 | > ENOENT: no such file or directory, open '\[...]/helia-verified-fetch/packages/gateway-conformance/dist/src/... 78 | 79 | This likely means the docker container is not executing properly for some 80 | reason. You can try running the following command to see if there are any 81 | errors: `DEBUG="-mocha*,*,*:trace" npm run test` 82 | 83 | # Install 84 | 85 | ```console 86 | $ npm i @helia/verified-fetch-gateway-conformance 87 | ``` 88 | 89 | # License 90 | 91 | Licensed under either of 92 | 93 | - Apache 2.0, ([LICENSE-APACHE](https://github.com/ipfs/helia-verified-fetch/blob/main/packages/gateway-conformance/LICENSE-APACHE) / ) 94 | - MIT ([LICENSE-MIT](https://github.com/ipfs/helia-verified-fetch/blob/main/packages/gateway-conformance/LICENSE-MIT) / ) 95 | 96 | # Contribute 97 | 98 | Contributions welcome! Please check out [the issues](https://github.com/ipfs/helia-verified-fetch/issues). 99 | 100 | Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. 101 | 102 | Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). 103 | 104 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 105 | 106 | [![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) 107 | -------------------------------------------------------------------------------- /packages/gateway-conformance/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@helia/verified-fetch-gateway-conformance", 3 | "version": "1.4.0", 4 | "description": "Gateway conformance tests for @helia/verified-fetch", 5 | "license": "Apache-2.0 OR MIT", 6 | "homepage": "https://github.com/ipfs/helia-verified-fetch/tree/main/packages/gateway-conformance#readme", 7 | "repository": { 8 | "type": "git", 9 | "url": "git+https://github.com/ipfs/helia-verified-fetch.git" 10 | }, 11 | "bugs": { 12 | "url": "https://github.com/ipfs/helia-verified-fetch/issues" 13 | }, 14 | "publishConfig": { 15 | "access": "public", 16 | "provenance": true 17 | }, 18 | "keywords": [ 19 | "IPFS" 20 | ], 21 | "bin": { 22 | "demo-server": "./dist/src/demo-server.js", 23 | "verified-fetch-gateway-conformance": "./dist/src/bin.js" 24 | }, 25 | "type": "module", 26 | "types": "./dist/src/index.d.ts", 27 | "files": [ 28 | "src", 29 | "dist", 30 | "!dist/test", 31 | "!**/*.tsbuildinfo" 32 | ], 33 | "exports": { 34 | ".": { 35 | "types": "./dist/src/index.d.ts", 36 | "import": "./dist/src/index.js" 37 | } 38 | }, 39 | "release": { 40 | "branches": [ 41 | "main" 42 | ], 43 | "plugins": [ 44 | [ 45 | "@semantic-release/commit-analyzer", 46 | { 47 | "preset": "conventionalcommits", 48 | "releaseRules": [ 49 | { 50 | "breaking": true, 51 | "release": "major" 52 | }, 53 | { 54 | "revert": true, 55 | "release": "patch" 56 | }, 57 | { 58 | "type": "feat", 59 | "release": "minor" 60 | }, 61 | { 62 | "type": "fix", 63 | "release": "patch" 64 | }, 65 | { 66 | "type": "docs", 67 | "release": "patch" 68 | }, 69 | { 70 | "type": "test", 71 | "release": "patch" 72 | }, 73 | { 74 | "type": "deps", 75 | "release": "patch" 76 | }, 77 | { 78 | "scope": "no-release", 79 | "release": false 80 | } 81 | ] 82 | } 83 | ], 84 | [ 85 | "@semantic-release/release-notes-generator", 86 | { 87 | "preset": "conventionalcommits", 88 | "presetConfig": { 89 | "types": [ 90 | { 91 | "type": "feat", 92 | "section": "Features" 93 | }, 94 | { 95 | "type": "fix", 96 | "section": "Bug Fixes" 97 | }, 98 | { 99 | "type": "chore", 100 | "section": "Trivial Changes" 101 | }, 102 | { 103 | "type": "docs", 104 | "section": "Documentation" 105 | }, 106 | { 107 | "type": "deps", 108 | "section": "Dependencies" 109 | }, 110 | { 111 | "type": "test", 112 | "section": "Tests" 113 | } 114 | ] 115 | } 116 | } 117 | ], 118 | "@semantic-release/changelog", 119 | "@semantic-release/npm", 120 | "@semantic-release/github", 121 | [ 122 | "@semantic-release/git", 123 | { 124 | "assets": [ 125 | "CHANGELOG.md", 126 | "package.json" 127 | ] 128 | } 129 | ] 130 | ] 131 | }, 132 | "scripts": { 133 | "clean": "aegir clean dist gwc-report-*.json", 134 | "lint": "aegir lint", 135 | "dep-check": "aegir dep-check", 136 | "doc-check": "aegir doc-check", 137 | "build": "aegir build", 138 | "test": "aegir test -t node", 139 | "update": "npm run build && node dist/src/update-expected-tests.js", 140 | "release": "aegir release" 141 | }, 142 | "dependencies": { 143 | "@helia/block-brokers": "^4.2.1", 144 | "@helia/http": "^2.1.1", 145 | "@helia/interface": "^5.3.1", 146 | "@helia/routers": "^3.1.1", 147 | "@helia/verified-fetch": "^3.0.2", 148 | "@libp2p/interface": "^2.10.1", 149 | "@libp2p/kad-dht": "^15.1.1", 150 | "@libp2p/logger": "^5.1.17", 151 | "@libp2p/peer-id": "^5.1.4", 152 | "@multiformats/dns": "^1.0.6", 153 | "aegir": "^47.0.11", 154 | "blockstore-core": "^5.0.2", 155 | "datastore-core": "^10.0.2", 156 | "execa": "^9.5.3", 157 | "fast-glob": "^3.3.3", 158 | "interface-blockstore": "^5.3.1", 159 | "interface-datastore": "^8.3.1", 160 | "ipfsd-ctl": "^15.0.2", 161 | "kubo": "^0.34.1", 162 | "kubo-rpc-client": "^5.1.0", 163 | "multiformats": "^13.3.6", 164 | "uint8arrays": "^5.1.0", 165 | "undici": "^7.10.0" 166 | }, 167 | "browser": { 168 | "./dist/src/fixtures/create-kubo.js": "./dist/src/fixtures/create-kubo.browser.js", 169 | "kubo": false 170 | } 171 | } 172 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/bin.ts: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | import { spawn } from 'node:child_process' 4 | import { dirname, resolve } from 'node:path' 5 | import { fileURLToPath } from 'node:url' 6 | 7 | // aegir should be run from `node_modules/@helia/verified-fetch-gateway-conformance` 8 | const cwd = resolve(dirname(fileURLToPath(import.meta.url)), '../../') 9 | 10 | const test = spawn('npx', ['aegir', 'test'], { 11 | cwd 12 | }) 13 | 14 | test.stdout.on('data', (data) => { 15 | process.stdout.write(data) 16 | }) 17 | 18 | test.stderr.on('data', (data) => { 19 | process.stderr.write(data) 20 | }) 21 | 22 | test.on('close', (code) => { 23 | process.exit(code ?? 0) 24 | }) 25 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/constants.ts: -------------------------------------------------------------------------------- 1 | export const GWC_IMAGE = process.env.GWC_IMAGE ?? 'ghcr.io/ipfs/gateway-conformance:v0.8.0' 2 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/demo-server.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Basically copies what .aegir.js does, but without all the env vars and setup.. just so you can run `node src/demo-server.ts` and test queries manually. 3 | */ 4 | import { logger } from '@libp2p/logger' 5 | import getPort from 'aegir/get-port' 6 | import { startVerifiedFetchGateway } from './fixtures/basic-server.js' 7 | import { createKuboNode } from './fixtures/create-kubo.js' 8 | import { loadKuboFixtures } from './fixtures/kubo-mgmt.js' 9 | import type { KuboNode } from 'ipfsd-ctl' 10 | 11 | const log = logger('demo-server') 12 | 13 | const SERVER_PORT = await getPort(3441) 14 | 15 | let kuboGateway: string | undefined 16 | let controller: KuboNode | undefined 17 | let IPFS_NS_MAP = '' 18 | if (process.env.KUBO_GATEWAY == null) { 19 | const KUBO_GATEWAY_PORT = await getPort(3440) 20 | const kuboNodeDetails = await createKuboNode(KUBO_GATEWAY_PORT) 21 | controller = kuboNodeDetails.node 22 | kuboGateway = kuboNodeDetails.gatewayUrl 23 | const repoPath = kuboNodeDetails.repoPath 24 | await controller.start() 25 | IPFS_NS_MAP = await loadKuboFixtures(repoPath) 26 | } 27 | 28 | const stopServer = await startVerifiedFetchGateway({ 29 | serverPort: SERVER_PORT, 30 | kuboGateway, 31 | IPFS_NS_MAP 32 | }) 33 | 34 | process.on('exit', () => { 35 | stopServer().catch((err) => { 36 | log.error('Failed to stop server', err) 37 | }) 38 | controller?.stop().catch((err) => { 39 | log.error('Failed to stop controller', err) 40 | process.exit(1) 41 | }) 42 | }) 43 | 44 | export {} 45 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/fixtures/create-kubo.ts: -------------------------------------------------------------------------------- 1 | import { createNode } from 'ipfsd-ctl' 2 | import { path as kuboPath } from 'kubo' 3 | import { create } from 'kubo-rpc-client' 4 | import type { KuboNode } from 'ipfsd-ctl' 5 | 6 | export interface KuboNodeDetails { 7 | node: KuboNode 8 | gatewayUrl: string 9 | repoPath: string 10 | } 11 | 12 | export async function createKuboNode (listenPort?: number): Promise { 13 | const controller = await createNode({ 14 | type: 'kubo', 15 | rpc: create, 16 | test: true, 17 | bin: kuboPath(), 18 | init: { 19 | config: { 20 | Addresses: { 21 | Swarm: [ 22 | '/ip4/0.0.0.0/tcp/0', 23 | '/ip4/0.0.0.0/tcp/0/ws' 24 | ], 25 | Gateway: `/ip4/127.0.0.1/tcp/${listenPort ?? 0}` 26 | }, 27 | Gateway: { 28 | NoFetch: true, 29 | ExposeRoutingAPI: true, 30 | HTTPHeaders: { 31 | 'Access-Control-Allow-Origin': ['*'], 32 | 'Access-Control-Allow-Methods': ['GET', 'POST', 'PUT', 'OPTIONS'] 33 | } 34 | } 35 | } 36 | }, 37 | args: ['--enable-pubsub-experiment', '--enable-namesys-pubsub'] 38 | }) 39 | const info = await controller.info() 40 | 41 | return { 42 | node: controller, 43 | gatewayUrl: `http://127.0.0.1:${listenPort ?? 0}`, 44 | repoPath: info.repo 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/fixtures/create-verified-fetch.ts: -------------------------------------------------------------------------------- 1 | import type { Helia } from '@helia/interface' 2 | import type { CreateVerifiedFetchInit, CreateVerifiedFetchOptions, VerifiedFetch } from '@helia/verified-fetch' 3 | export async function createVerifiedFetch (init?: CreateVerifiedFetchInit | Helia, options?: CreateVerifiedFetchOptions): Promise { 4 | const { createVerifiedFetch: createVerifiedFetchOriginal } = await import(process.env.VERIFIED_FETCH ?? '@helia/verified-fetch') 5 | 6 | return createVerifiedFetchOriginal(init, options) 7 | } 8 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/fixtures/get-local-dns-resolver.ts: -------------------------------------------------------------------------------- 1 | import { logger } from '@libp2p/logger' 2 | import type { Answer, Question } from '@multiformats/dns' 3 | import type { DNSResolver } from '@multiformats/dns/resolvers' 4 | 5 | export function getLocalDnsResolver (ipfsNsMap: string, kuboGateway: string): DNSResolver { 6 | const log = logger('basic-server:dns') 7 | const nsMap = new Map() 8 | const keyVals = ipfsNsMap.split(',') 9 | for (const keyVal of keyVals) { 10 | const [key, val] = keyVal.split(':') 11 | log('Setting entry: %s="%s"', key, val) 12 | nsMap.set(key, val) 13 | } 14 | 15 | return async (domain, options) => { 16 | const questions: Question[] = [] 17 | const answers: Answer[] = [] 18 | 19 | if (Array.isArray(options?.types)) { 20 | options?.types?.forEach?.((type) => { 21 | questions.push({ name: domain, type }) 22 | }) 23 | } else { 24 | questions.push({ name: domain, type: options?.types ?? 16 }) 25 | } 26 | // TODO: do we need to do anything with CNAME resolution...? 27 | // if (questions.some((q) => q.type === 5)) { 28 | // answers.push({ 29 | // name: domain, 30 | // type: 5, 31 | // TTL: 180, 32 | // data: '' 33 | // }) 34 | // } 35 | if (questions.some((q) => q.type === 16)) { 36 | log.trace('Querying "%s" for types %O', domain, options?.types) 37 | const actualDomainKey = domain.replace('_dnslink.', '') 38 | const nsValue = nsMap.get(actualDomainKey) 39 | if (nsValue == null) { 40 | log.error('No IPFS_NS_MAP entry for domain "%s"', actualDomainKey) 41 | 42 | throw new Error('No IPFS_NS_MAP entry for domain') 43 | } 44 | const data = `dnslink=${nsValue}` 45 | answers.push({ 46 | name: domain, 47 | type: 16, 48 | TTL: 180, 49 | data // should be in the format 'dnslink=/ipfs/bafyfoo' 50 | }) 51 | } 52 | 53 | const dnsResponse = { 54 | Status: 0, 55 | TC: false, 56 | RD: false, 57 | RA: false, 58 | AD: true, 59 | CD: true, 60 | Question: questions, 61 | Answer: answers 62 | } 63 | 64 | log.trace('Returning DNS response for %s: %O', domain, dnsResponse) 65 | 66 | return dnsResponse 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/fixtures/header-utils.ts: -------------------------------------------------------------------------------- 1 | import type { Logger } from '@libp2p/logger' 2 | import type { IncomingHttpHeaders } from 'undici/types/header.js' 3 | 4 | export function convertNodeJsHeadersToFetchHeaders (headers: IncomingHttpHeaders): HeadersInit { 5 | const fetchHeaders = new Headers() 6 | for (const [key, value] of Object.entries(headers)) { 7 | if (value == null) { 8 | continue 9 | } 10 | if (Array.isArray(value)) { 11 | for (const v of value) { 12 | fetchHeaders.append(key, v) 13 | } 14 | } else { 15 | fetchHeaders.append(key, value) 16 | } 17 | } 18 | return fetchHeaders 19 | } 20 | 21 | export interface ConvertFetchHeadersToNodeJsHeadersOptions { 22 | resp: Response 23 | log: Logger 24 | fixingGwcAnnoyance: boolean 25 | serverPort: number 26 | } 27 | 28 | export function convertFetchHeadersToNodeJsHeaders ({ resp, log, fixingGwcAnnoyance, serverPort }: ConvertFetchHeadersToNodeJsHeadersOptions): IncomingHttpHeaders { 29 | const headers: Record = {} 30 | for (const [key, value] of resp.headers.entries()) { 31 | if (fixingGwcAnnoyance) { 32 | log.trace('need to fix GWC annoyance.') 33 | if (value.includes(`localhost:${serverPort}`)) { 34 | const newValue = value.replace(`localhost:${serverPort}`, 'localhost') 35 | log.trace('fixing GWC annoyance. Replacing Header[%s] value of "%s" with "%s"', key, value, newValue) 36 | // we need to fix any Location, or other headers that have localhost without port in them. 37 | headers[key] = newValue 38 | } else { 39 | log.trace('NOT fixing GWC annoyance. Setting Header[%s] value of "%s"', key, value) 40 | headers[key] = value 41 | } 42 | } else { 43 | headers[key] = value 44 | } 45 | } 46 | return headers 47 | } 48 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/fixtures/ipns-record-datastore.ts: -------------------------------------------------------------------------------- 1 | import { MemoryDatastore } from 'datastore-core' 2 | import type { Datastore } from 'interface-datastore' 3 | 4 | const datastore = new MemoryDatastore() 5 | /** 6 | * We need a normalized datastore so we can set custom records 7 | * from the IPFS_NS_MAP like kubo does. 8 | */ 9 | export function getIpnsRecordDatastore (): Datastore { 10 | return datastore 11 | } 12 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/get-report-details.ts: -------------------------------------------------------------------------------- 1 | import { readFile } from 'node:fs/promises' 2 | export interface ReportDetails { 3 | passingTests: string[] 4 | failingTests: string[] 5 | failureCount: number 6 | successCount: number 7 | successRate: number 8 | } 9 | 10 | export async function getReportDetails (path: string): Promise { 11 | let failureCount = 0 12 | let successCount = 0 13 | const passingTests: string[] = [] 14 | const failingTests: string[] = [] 15 | 16 | // parse the newline delimited JSON report at gwc-report-${name}.json and count the number of "PASS:" and "FAIL:" lines 17 | const report = await readFile(path, 'utf8') 18 | const lines = report.split('\n') 19 | for (const line of lines) { 20 | if (line.includes('--- FAIL:')) { 21 | failureCount++ 22 | failingTests.push(line.split('--- FAIL: ')[1].split(' ')[0]) 23 | } else if (line.includes('--- PASS:')) { 24 | successCount++ 25 | passingTests.push(line.split('--- PASS: ')[1].split(' ')[0]) 26 | } 27 | } 28 | const successRate = Number.parseFloat(((successCount / (successCount + failureCount)) * 100).toFixed(2)) 29 | 30 | return { 31 | failingTests, 32 | passingTests, 33 | failureCount, 34 | successCount, 35 | successRate 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/get-tests-to-run.ts: -------------------------------------------------------------------------------- 1 | import { getWontFixTests } from './get-wontfix-tests.js' 2 | 3 | /** 4 | * 5 | * You can see output for specific tests with something like 6 | * 7 | * @example 8 | * 9 | * ``` 10 | * DEBUG="gateway-conformance*,gateway-conformance*:trace" RUN_TESTS='TestNativeDag/HEAD_plain_JSON_codec_with_no_explicit_format_returns_HTTP_200.*' npm run test 11 | * ``` 12 | * 13 | * If you run `npm run update` and see that some passing tests are removed, you should probably verify that those tests 14 | * pass. You can chose to not update `expected-failing-tests.json` and `expected-passing-tests.json` and then choose to 15 | * save the removed passing tests to a file to ensure that they do still pass with a command like: 16 | * 17 | * @example 18 | * ``` 19 | * DEBUG="gateway-conformance*,gateway-conformance*:trace" RUN_TESTS="$(jq -r '.[]' removed-passing-tests.json | paste -sd ',' -)" npm run test 20 | * ``` 21 | */ 22 | export function getTestsToRun (): string[] { 23 | const envTestsToRun = process.env.RUN_TESTS != null ? process.env.RUN_TESTS.split(',') : [] 24 | // by default, we filter out tests that we know we are not going to fix... 25 | // set FORCE_RUN=true to run all tests you set in RUN_TESTS (even if they are in the wontfix list) 26 | const shouldFilterOutWontFixTests = process.env.FORCE_RUN == null 27 | const wontFixTests = getWontFixTests() 28 | // TODO: tests to run can be gotest based regex, we need to be smarter about filtering. 29 | const testsToRun = shouldFilterOutWontFixTests ? envTestsToRun.filter((test) => !wontFixTests.includes(test)) : envTestsToRun 30 | return testsToRun 31 | } 32 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/get-tests-to-skip.ts: -------------------------------------------------------------------------------- 1 | import { getWontFixTests } from './get-wontfix-tests.js' 2 | 3 | /** 4 | * 5 | * you can skip certain tests by setting SKIP_TESTS to a comma-separated list of test names 6 | * 7 | * @example 8 | * 9 | * ``` 10 | * SKIP_TESTS='TestNativeDag/HEAD_plain_JSON_codec_with_no_explicit_format_returns_HTTP_200.*' npm run test 11 | * ``` 12 | */ 13 | export function getTestsToSkip (): string[] { 14 | const envTestsToSkip = process.env.SKIP_TESTS != null ? process.env.SKIP_TESTS.split(',') : [] 15 | const testsToSkip = [...getWontFixTests(), ...envTestsToSkip] 16 | return testsToSkip 17 | } 18 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/get-wontfix-tests.ts: -------------------------------------------------------------------------------- 1 | export function getWontFixTests (): string[] { 2 | return [ 3 | // these tests are dependent upon supporting multi-range requests: https://github.com/ipfs/helia-verified-fetch/pull/207 4 | 'TestNativeDag/Convert_application%2Fvnd.ipld.dag-cbor_to_application%2Fvnd.ipld.dag-json_with_range_request_includes_correct_bytes_-_multi_range/Check_1', 5 | 'TestNativeDag/Convert_application%2Fvnd.ipld.dag-cbor_to_application%2Fvnd.ipld.dag-json_with_range_request_includes_correct_bytes_-_multi_range' 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @packageDocumentation 3 | * 4 | * Runs Gateway Conformance tests against @helia/verified-fetch using Kubo as a 5 | * backing trustless-gateway. 6 | * 7 | * @example Testing a new @helia/verified-fetch release 8 | * 9 | * ```console 10 | * $ npm i @helia/verified-fetch-gateway-conformance 11 | * $ VERIFIED_FETCH=@helia/verified-fetch@1.x.x-6f8c15b verified-fetch-gateway-conformance 12 | * ``` 13 | * 14 | * @example Testing with a different Kubo version 15 | * 16 | * ```console 17 | * $ npm i @helia/verified-fetch-gateway-conformance 18 | * $ KUBO_BINARY=/path/to/kubo verified-fetch-gateway-conformance 19 | * ``` 20 | * 21 | * @example using a different gateway-conformance image 22 | * 23 | * ```console 24 | * $ GWC_IMAGE=ghcr.io/ipfs/gateway-conformance:v0.5.1 verified-fetch-gateway-conformance 25 | * ``` 26 | * 27 | * @example Debugging a test run 28 | * 29 | * ```console 30 | * $ DEBUG="-mocha*,*,*:trace" npm run test # very verbose output 31 | * $ DEBUG="conformance-tests*,conformance-tests*:trace" npm run test # only gateway-conformance test output 32 | * ``` 33 | * 34 | * @example querying the gateway-conformance server directly 35 | * 36 | * ```console 37 | * $ npm run build 38 | * $ node dist/src/demo-server.js # in terminal 1 39 | * $ curl -v GET http://localhost:3442/ipfs/bafkqabtimvwgy3yk/ # in terminal 2 40 | * ``` 41 | * 42 | * ## Troubleshooting 43 | * 44 | * ### Missing file in gateway-conformance-fixtures folder 45 | * 46 | * If you see the following error: 47 | * > ENOENT: no such file or directory, open '[...]/helia-verified-fetch/packages/gateway-conformance/dist/src/... 48 | * 49 | * This likely means the docker container is not executing properly for some 50 | * reason. You can try running the following command to see if there are any 51 | * errors: `DEBUG="-mocha*,*,*:trace" npm run test` 52 | */ 53 | 54 | export {} 55 | -------------------------------------------------------------------------------- /packages/gateway-conformance/src/update-expected-tests.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | /** 3 | * Script that will read gwc-report-all.json and update the expected-passing-tests.json and expected-failing-tests.json files with the latest test results. 4 | * 5 | * This is useful when you want to update the expected test results after running the tests with the following command: 6 | * 7 | * ```bash 8 | * SUCCESS_RATE=100 npm run test -- --bail=false 9 | * ``` 10 | * 11 | * This will run all the tests and update the expected-passing-tests.json and expected-failing-tests.json files with the latest test results. 12 | */ 13 | 14 | import { readFile, writeFile } from 'node:fs/promises' 15 | import { join } from 'node:path' 16 | import readline from 'node:readline' 17 | import { getReportDetails } from './get-report-details.js' 18 | 19 | /** 20 | * Prompt the user with a yes / no question. 21 | * 22 | * @param {string} question - – The text to show. 23 | * @returns {Promise} Resolves to the user’s choice. 24 | */ 25 | async function confirm (question: string): Promise { 26 | const hint = ' [y/n] ' 27 | const rl = readline.createInterface({ input: process.stdin, output: process.stdout }) 28 | 29 | return new Promise(resolve => { 30 | const ask = (): void => { 31 | rl.question(`${question}${hint}`, input => { 32 | const a = input.trim().toLowerCase() 33 | 34 | if (['y', 'yes'].includes(a)) { rl.close(); resolve(true); return } 35 | if (['n', 'no'].includes(a)) { rl.close(); resolve(false); return } 36 | 37 | console.log('Please type "y" or "n" then press Enter.') 38 | ask() // repeat until valid 39 | }) 40 | } 41 | ask() 42 | }) 43 | } 44 | // display a warning that this should not be done blindly and that the updated passing and failing tests should be checked for correctness 45 | console.warn('WARNING: This will update the expected-passing-tests.json and expected-failing-tests.json files with the latest test results.') 46 | console.warn('WARNING: This should not be done blindly and that the updated passing and failing tests should be checked for correctness.') 47 | 48 | const expectedPassingTestsPath = join(process.cwd(), 'src', 'expected-passing-tests.json') 49 | const expectedFailingTestsPath = join(process.cwd(), 'src', 'expected-failing-tests.json') 50 | 51 | const currentPassingTests: string[] = JSON.parse(await readFile(expectedPassingTestsPath, 'utf-8')) 52 | const currentFailingTests: string[] = JSON.parse(await readFile(expectedFailingTestsPath, 'utf-8')) 53 | 54 | const { passingTests, failingTests } = await getReportDetails('gwc-report-all.json') 55 | 56 | // output the differences between the current passing and failing tests and the new passing and failing tests 57 | console.log('Differences between the current passing and failing tests and the new passing and failing tests:') 58 | console.log('Added passing tests:') 59 | const passingTestAdditions = passingTests.filter((test: string) => !currentPassingTests.includes(test)) 60 | console.log(passingTestAdditions) 61 | console.log('Removed passing tests:') 62 | const passingTestRemovals = currentPassingTests.filter((test: string) => !passingTests.includes(test)) 63 | console.log(passingTestRemovals) 64 | console.log('Added failing tests:') 65 | const failingTestAdditions = failingTests.filter((test: string) => !currentFailingTests.includes(test)) 66 | console.log(failingTestAdditions) 67 | console.log('Removed failing tests:') 68 | const failingTestRemovals = currentFailingTests.filter((test: string) => !failingTests.includes(test)) 69 | console.log(failingTestRemovals) 70 | 71 | if (failingTestAdditions.length > 0 || failingTestAdditions.length > 0) { 72 | console.warn('WARNING: There are previously passing tests that are now failing, is this expected?') 73 | } 74 | 75 | if (passingTestRemovals.length + failingTestRemovals.length + passingTestAdditions.length + failingTestAdditions.length > 0) { 76 | const answer = await confirm('Are you sure you want to update the expected-passing-tests.json and expected-failing-tests.json files with the latest test results?') 77 | 78 | if (!answer) { 79 | console.log('Aborting.') 80 | 81 | if (passingTestRemovals.length > 0) { 82 | // to help with debugging, we can save the removed passing tests to a file to ensure that they do still pass with a command like: 83 | // DEBUG="gateway-conformance*,gateway-conformance*:trace" RUN_TESTS="$(jq -r '.[]' removed-passing-tests.json | paste -sd ',' -)" npm run test 84 | const shouldSaveRemovedPassingTests = await confirm('Should we save the removed passing tests to removed-passing-tests.json file?') 85 | if (shouldSaveRemovedPassingTests) { 86 | await writeFile('removed-passing-tests.json', JSON.stringify(passingTestRemovals, null, 2) + '\n') 87 | } 88 | } 89 | 90 | process.exit(0) 91 | } 92 | await writeFile(expectedPassingTestsPath, JSON.stringify(passingTests, null, 2) + '\n') 93 | await writeFile(expectedFailingTestsPath, JSON.stringify(failingTests, null, 2) + '\n') 94 | } else { 95 | console.log('No changes to the expected-passing-tests.json and expected-failing-tests.json files.') 96 | } 97 | -------------------------------------------------------------------------------- /packages/gateway-conformance/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "aegir/src/config/tsconfig.aegir.json", 3 | "compilerOptions": { 4 | "outDir": "dist", 5 | "target": "ES2022", 6 | "module": "nodenext", 7 | "moduleResolution": "nodenext" 8 | }, 9 | "include": [ 10 | "src", 11 | "test", 12 | "src/expected-passing-tests.json", 13 | "src/expected-failing-tests.json" 14 | ], 15 | "references": [ 16 | { 17 | "path": "../verified-fetch" 18 | } 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /packages/gateway-conformance/typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "entryPoints": [ 3 | "./src/index.ts" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /packages/interop/.aegir.js: -------------------------------------------------------------------------------- 1 | import { resolve } from 'node:path' 2 | import { tmpdir } from 'node:os' 3 | import { createDelegatedRoutingV1HttpApiServer } from '@helia/delegated-routing-v1-http-api-server' 4 | import { stubInterface } from 'sinon-ts' 5 | 6 | const IPFS_PATH = resolve(tmpdir(), 'verified-fetch-interop-ipfs-repo') 7 | 8 | /** @type {import('aegir').PartialOptions} */ 9 | export default { 10 | build: { 11 | bundlesizeMax: '1KB' 12 | }, 13 | dependencyCheck: { 14 | ignore: [ 15 | '@helia/delegated-routing-v1-http-api-server', 16 | 'sinon-ts' 17 | ] 18 | 19 | }, 20 | test: { 21 | files: './dist/src/*.spec.js', 22 | before: async () => { 23 | 24 | const { createKuboNode } = await import('./dist/src/fixtures/create-kubo.js') 25 | const kuboNode = await createKuboNode(IPFS_PATH) 26 | 27 | await kuboNode.start() 28 | 29 | // requires aegir build to be run first, which it will by default. 30 | const { loadFixtures } = await import('./dist/src/fixtures/load-fixtures.js') 31 | 32 | await loadFixtures(IPFS_PATH) 33 | 34 | const multiaddrs = (await kuboNode.api.id()).addresses 35 | const id = (await kuboNode.api.id()).id 36 | 37 | const helia = stubInterface({ 38 | routing: stubInterface({ 39 | findProviders: async function * findProviders () { 40 | yield { 41 | multiaddrs, 42 | id, 43 | protocols: ['transport-bitswap'] 44 | } 45 | } 46 | }) 47 | }) 48 | const routingServer = await createDelegatedRoutingV1HttpApiServer(helia, { 49 | listen: { 50 | host: '127.0.0.1', 51 | port: 0 52 | } 53 | }) 54 | await routingServer.ready() 55 | 56 | const address = routingServer.server.address() 57 | const port = typeof address === 'string' ? address : address?.port 58 | 59 | return { 60 | kuboNode, 61 | routingServer, 62 | env: { 63 | KUBO_DIRECT_RETRIEVAL_ROUTER: `http://127.0.0.1:${port}` 64 | } 65 | } 66 | }, 67 | after: async (_options, beforeResult) => { 68 | await beforeResult.kuboNode.stop() 69 | await beforeResult.routingServer.close() 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /packages/interop/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) 4 | -------------------------------------------------------------------------------- /packages/interop/LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /packages/interop/README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | Helia logo 4 | 5 |

6 | 7 | # @helia/verified-fetch-interop 8 | 9 | [![ipfs.tech](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](https://ipfs.tech) 10 | [![Discuss](https://img.shields.io/discourse/https/discuss.ipfs.tech/posts.svg?style=flat-square)](https://discuss.ipfs.tech) 11 | [![codecov](https://img.shields.io/codecov/c/github/ipfs/helia-verified-fetch.svg?style=flat-square)](https://codecov.io/gh/ipfs/helia-verified-fetch) 12 | [![CI](https://img.shields.io/github/actions/workflow/status/ipfs/helia-verified-fetch/js-test-and-release.yml?branch=main\&style=flat-square)](https://github.com/ipfs/helia-verified-fetch/actions/workflows/js-test-and-release.yml?query=branch%3Amain) 13 | 14 | > Interop tests for @helia/verified-fetch 15 | 16 | # About 17 | 18 | Runs interop tests between Helia and Kubo. 19 | 20 | ## Example - Testing a new Kubo release 21 | 22 | ```console 23 | $ npm i @helia/interop 24 | $ KUBO_BINARY=/path/to/kubo helia-interop 25 | ``` 26 | 27 | # Install 28 | 29 | ```console 30 | $ npm i @helia/verified-fetch-interop 31 | ``` 32 | 33 | ## Browser ` 39 | ``` 40 | 41 | # License 42 | 43 | Licensed under either of 44 | 45 | - Apache 2.0, ([LICENSE-APACHE](https://github.com/ipfs/helia-verified-fetch/blob/main/packages/interop/LICENSE-APACHE) / ) 46 | - MIT ([LICENSE-MIT](https://github.com/ipfs/helia-verified-fetch/blob/main/packages/interop/LICENSE-MIT) / ) 47 | 48 | # Contribute 49 | 50 | Contributions welcome! Please check out [the issues](https://github.com/ipfs/helia-verified-fetch/issues). 51 | 52 | Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general. 53 | 54 | Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). 55 | 56 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 57 | 58 | [![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) 59 | -------------------------------------------------------------------------------- /packages/interop/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@helia/verified-fetch-interop", 3 | "version": "1.26.2", 4 | "description": "Interop tests for @helia/verified-fetch", 5 | "license": "Apache-2.0 OR MIT", 6 | "homepage": "https://github.com/ipfs/helia-verified-fetch/tree/main/packages/interop#readme", 7 | "repository": { 8 | "type": "git", 9 | "url": "git+https://github.com/ipfs/helia-verified-fetch.git" 10 | }, 11 | "bugs": { 12 | "url": "https://github.com/ipfs/helia-verified-fetch/issues" 13 | }, 14 | "publishConfig": { 15 | "access": "public", 16 | "provenance": true 17 | }, 18 | "keywords": [ 19 | "IPFS" 20 | ], 21 | "bin": { 22 | "helia-verified-fetch-interop": "./dist/src/bin.js" 23 | }, 24 | "type": "module", 25 | "types": "./dist/src/index.d.ts", 26 | "files": [ 27 | "src", 28 | "dist", 29 | "!dist/test", 30 | "!**/*.tsbuildinfo" 31 | ], 32 | "exports": { 33 | ".": { 34 | "types": "./dist/src/index.d.ts", 35 | "import": "./dist/src/index.js" 36 | } 37 | }, 38 | "release": { 39 | "branches": [ 40 | "main" 41 | ], 42 | "plugins": [ 43 | [ 44 | "@semantic-release/commit-analyzer", 45 | { 46 | "preset": "conventionalcommits", 47 | "releaseRules": [ 48 | { 49 | "breaking": true, 50 | "release": "major" 51 | }, 52 | { 53 | "revert": true, 54 | "release": "patch" 55 | }, 56 | { 57 | "type": "feat", 58 | "release": "minor" 59 | }, 60 | { 61 | "type": "fix", 62 | "release": "patch" 63 | }, 64 | { 65 | "type": "docs", 66 | "release": "patch" 67 | }, 68 | { 69 | "type": "test", 70 | "release": "patch" 71 | }, 72 | { 73 | "type": "deps", 74 | "release": "patch" 75 | }, 76 | { 77 | "scope": "no-release", 78 | "release": false 79 | } 80 | ] 81 | } 82 | ], 83 | [ 84 | "@semantic-release/release-notes-generator", 85 | { 86 | "preset": "conventionalcommits", 87 | "presetConfig": { 88 | "types": [ 89 | { 90 | "type": "feat", 91 | "section": "Features" 92 | }, 93 | { 94 | "type": "fix", 95 | "section": "Bug Fixes" 96 | }, 97 | { 98 | "type": "chore", 99 | "section": "Trivial Changes" 100 | }, 101 | { 102 | "type": "docs", 103 | "section": "Documentation" 104 | }, 105 | { 106 | "type": "deps", 107 | "section": "Dependencies" 108 | }, 109 | { 110 | "type": "test", 111 | "section": "Tests" 112 | } 113 | ] 114 | } 115 | } 116 | ], 117 | "@semantic-release/changelog", 118 | "@semantic-release/npm", 119 | "@semantic-release/github", 120 | [ 121 | "@semantic-release/git", 122 | { 123 | "assets": [ 124 | "CHANGELOG.md", 125 | "package.json" 126 | ] 127 | } 128 | ] 129 | ] 130 | }, 131 | "scripts": { 132 | "clean": "aegir clean", 133 | "lint": "aegir lint", 134 | "dep-check": "aegir dep-check", 135 | "doc-check": "aegir doc-check", 136 | "build": "aegir build", 137 | "test": "aegir test", 138 | "test:chrome": "aegir test -t browser --cov", 139 | "test:chrome-webworker": "aegir test -t webworker", 140 | "test:firefox": "aegir test -t browser -- --browser firefox", 141 | "test:firefox-webworker": "aegir test -t webworker -- --browser firefox", 142 | "test:node": "aegir test -t node --cov", 143 | "test:electron-main": "aegir test -t electron-main", 144 | "release": "aegir release" 145 | }, 146 | "dependencies": { 147 | "@helia/delegated-routing-v1-http-api-server": "^4.0.6", 148 | "@helia/verified-fetch": "^2.0.0", 149 | "aegir": "^47.0.11", 150 | "execa": "^9.5.3", 151 | "glob": "^11.0.2", 152 | "ipfsd-ctl": "^15.0.2", 153 | "kubo": "^0.34.1", 154 | "kubo-rpc-client": "^5.1.0", 155 | "magic-bytes.js": "^1.12.1", 156 | "multiformats": "^13.3.6", 157 | "sinon-ts": "^2.0.0", 158 | "wherearewe": "^2.0.1" 159 | }, 160 | "browser": { 161 | "./dist/src/fixtures/create-kubo.js": "./dist/src/fixtures/create-kubo.browser.js", 162 | "kubo": false 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /packages/interop/src/abort-handling.spec.ts: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { createVerifiedFetch } from '@helia/verified-fetch' 3 | import { expect } from 'aegir/chai' 4 | import type { VerifiedFetch } from '@helia/verified-fetch' 5 | 6 | describe('verified-fetch abort handling', () => { 7 | let verifiedFetch: VerifiedFetch 8 | before(async () => { 9 | if (process.env.KUBO_DIRECT_RETRIEVAL_ROUTER == null || process.env.KUBO_DIRECT_RETRIEVAL_ROUTER === '') { 10 | throw new Error('KUBO_DIRECT_RETRIEVAL_ROUTER environment variable is required') 11 | } 12 | 13 | verifiedFetch = await createVerifiedFetch({ 14 | gateways: [process.env.KUBO_DIRECT_RETRIEVAL_ROUTER], 15 | routers: [process.env.KUBO_DIRECT_RETRIEVAL_ROUTER], 16 | allowInsecure: true, 17 | allowLocal: true 18 | }) 19 | }) 20 | 21 | after(async () => { 22 | await verifiedFetch.stop() 23 | }) 24 | 25 | it('should handle aborts properly', async function () { 26 | this.timeout(2000) 27 | const controller = new AbortController() 28 | const timeout = setTimeout(() => { 29 | controller.abort() 30 | }, 70) 31 | 32 | const fetchPromise = verifiedFetch('ipfs://QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm/1 - Barrel - Part 1/1 - Barrel - Part 1 - alt.txt', { 33 | signal: controller.signal 34 | }) 35 | await expect(fetchPromise).to.eventually.be.rejected.with.property('name', 'AbortError') 36 | clearTimeout(timeout) 37 | }) 38 | }) 39 | -------------------------------------------------------------------------------- /packages/interop/src/bin.ts: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env node 2 | 3 | import { spawn } from 'node:child_process' 4 | import { dirname, resolve } from 'node:path' 5 | import { fileURLToPath } from 'node:url' 6 | 7 | // aegir should be run from `node_modules/@helia/interop` 8 | const cwd = resolve(dirname(fileURLToPath(import.meta.url)), '../../') 9 | 10 | const test = spawn('npx', ['aegir', 'test'], { 11 | cwd 12 | }) 13 | 14 | test.stdout.on('data', (data) => { 15 | process.stdout.write(data) 16 | }) 17 | 18 | test.stderr.on('data', (data) => { 19 | process.stderr.write(data) 20 | }) 21 | 22 | test.on('close', (code) => { 23 | process.exit(code ?? 0) 24 | }) 25 | -------------------------------------------------------------------------------- /packages/interop/src/direct-retrieval.spec.ts: -------------------------------------------------------------------------------- 1 | import { createVerifiedFetch } from '@helia/verified-fetch' 2 | import { expect } from 'aegir/chai' 3 | import { isNode, isBrowser } from 'wherearewe' 4 | import type { CreateVerifiedFetchInit } from '@helia/verified-fetch' 5 | 6 | /** 7 | * Currently only testing browser and node 8 | */ 9 | const describe = isNode || isBrowser ? global.describe : global.describe.skip 10 | 11 | describe('@helia/verified-fetch - direct retrieval', () => { 12 | let directRetrievalRouterUrl: string 13 | let createVerifiedFetchInit: CreateVerifiedFetchInit 14 | 15 | beforeEach(async () => { 16 | if (process.env.KUBO_DIRECT_RETRIEVAL_ROUTER == null || process.env.KUBO_DIRECT_RETRIEVAL_ROUTER === '') { 17 | throw new Error('KUBO_DIRECT_RETRIEVAL_ROUTER environment variable is required') 18 | } 19 | directRetrievalRouterUrl = process.env.KUBO_DIRECT_RETRIEVAL_ROUTER 20 | createVerifiedFetchInit = { 21 | gateways: [], 22 | routers: [directRetrievalRouterUrl] 23 | } 24 | if (!isNode) { 25 | createVerifiedFetchInit.libp2pConfig = { 26 | connectionGater: { 27 | denyDialMultiaddr: () => false 28 | } 29 | } 30 | } 31 | }) 32 | 33 | it('can fetch content directly from another node', async () => { 34 | const fetch = await createVerifiedFetch(createVerifiedFetchInit) 35 | 36 | const res = await fetch('ipfs://QmbQDovX7wRe9ek7u6QXe9zgCXkTzoUSsTFJEkrYV1HrVR/1 - Barrel - Part 1 - alt.txt') 37 | 38 | expect(res.status).to.equal(200) 39 | const body = await res.text() 40 | expect(body).to.equal('Don\'t we all.') 41 | 42 | await fetch.stop() 43 | }) 44 | }) 45 | -------------------------------------------------------------------------------- /packages/interop/src/fixtures/create-kubo.ts: -------------------------------------------------------------------------------- 1 | import { createNode } from 'ipfsd-ctl' 2 | import { path as kuboPath } from 'kubo' 3 | import { create } from 'kubo-rpc-client' 4 | import type { KuboNode } from 'ipfsd-ctl' 5 | 6 | export async function createKuboNode (repoPath = undefined): Promise { 7 | return createNode({ 8 | type: 'kubo', 9 | rpc: create, 10 | bin: kuboPath(), 11 | test: true, 12 | repo: repoPath, 13 | init: { 14 | config: { 15 | Addresses: { 16 | Swarm: [ 17 | '/ip4/0.0.0.0/tcp/4001', 18 | '/ip4/0.0.0.0/tcp/4002/ws', 19 | '/ip4/0.0.0.0/udp/4001/webrtc-direct', 20 | '/ip4/0.0.0.0/udp/4001/quic-v1/webtransport', 21 | '/ip6/::/udp/4001/webrtc-direct', 22 | '/ip6/::/udp/4001/quic-v1/webtransport' 23 | ], 24 | Gateway: '/ip4/127.0.0.1/tcp/8180' 25 | }, 26 | Gateway: { 27 | NoFetch: true, 28 | ExposeRoutingAPI: true, 29 | HTTPHeaders: { 30 | 'Access-Control-Allow-Origin': ['*'], 31 | 'Access-Control-Allow-Methods': ['GET', 'POST', 'PUT', 'OPTIONS'] 32 | } 33 | } 34 | } 35 | }, 36 | args: ['--enable-pubsub-experiment', '--enable-namesys-pubsub'] 37 | }) 38 | } 39 | -------------------------------------------------------------------------------- /packages/interop/src/fixtures/data/QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr-tokens.uniswap.org-2024-01-18.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr-tokens.uniswap.org-2024-01-18.car -------------------------------------------------------------------------------- /packages/interop/src/fixtures/data/QmbQDovX7wRe9ek7u6QXe9zgCXkTzoUSsTFJEkrYV1HrVR-xkcd-Barrel-part-1.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/QmbQDovX7wRe9ek7u6QXe9zgCXkTzoUSsTFJEkrYV1HrVR-xkcd-Barrel-part-1.car -------------------------------------------------------------------------------- /packages/interop/src/fixtures/data/QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv-helia-identify-website.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv-helia-identify-website.car -------------------------------------------------------------------------------- /packages/interop/src/fixtures/data/QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw-fake-blog.libp2p.io.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw-fake-blog.libp2p.io.car -------------------------------------------------------------------------------- /packages/interop/src/fixtures/data/bafybeibc5sgo2plmjkq2tzmhrn54bk3crhnc23zd2msg4ea7a4pxrkgfna.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/bafybeibc5sgo2plmjkq2tzmhrn54bk3crhnc23zd2msg4ea7a4pxrkgfna.car -------------------------------------------------------------------------------- /packages/interop/src/fixtures/data/bafybeidbclfqleg2uojchspzd4bob56dqetqjsj27gy2cq3klkkgxtpn4i-single-layer-hamt-with-multi-block-files.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/bafybeidbclfqleg2uojchspzd4bob56dqetqjsj27gy2cq3klkkgxtpn4i-single-layer-hamt-with-multi-block-files.car -------------------------------------------------------------------------------- /packages/interop/src/fixtures/data/gateway-conformance-fixtures.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/gateway-conformance-fixtures.car -------------------------------------------------------------------------------- /packages/interop/src/fixtures/data/k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam.ipns-record: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam.ipns-record -------------------------------------------------------------------------------- /packages/interop/src/fixtures/load-fixtures.ts: -------------------------------------------------------------------------------- 1 | import { basename } from 'node:path' 2 | import { $ } from 'execa' 3 | import { glob } from 'glob' 4 | import { path as kuboPath } from 'kubo' 5 | 6 | /** 7 | * Only callable from node (intended to be consumed by .aegir.js) 8 | * but the fixtures loaded by this function are also used by browser tests. 9 | */ 10 | export async function loadFixtures (IPFS_PATH = undefined): Promise { 11 | const kuboBinary = process.env.KUBO_BINARY ?? kuboPath() 12 | 13 | const carFiles = await glob('**/fixtures/data/*.car', { cwd: process.cwd() }) 14 | const ipnsRecordFiles = await glob('**/fixtures/data/*.ipns-record', { cwd: process.cwd() }) 15 | 16 | await Promise.allSettled(carFiles.map(async (carFile) => { 17 | await $({ env: { IPFS_PATH } })`${kuboBinary} dag import --pin-roots=false --offline ${carFile}` 18 | })) 19 | 20 | for (const ipnsRecord of ipnsRecordFiles) { 21 | const key = basename(ipnsRecord, '.ipns-record').split('_')[0] 22 | await $({ env: { IPFS_PATH } })`${kuboBinary} routing put --allow-offline /ipns/${key} ${ipnsRecord}` 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /packages/interop/src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @packageDocumentation 3 | * 4 | * Runs interop tests between @helia/verified-fetch and Kubo. 5 | * 6 | * @example Testing a new Kubo release 7 | * 8 | * ```console 9 | * $ npm i @helia/verified-fetch-interop 10 | * $ KUBO_BINARY=/path/to/kubo helia-verified-fetch-interop 11 | * ``` 12 | */ 13 | 14 | export {} 15 | -------------------------------------------------------------------------------- /packages/interop/src/ipns.spec.ts: -------------------------------------------------------------------------------- 1 | import { createVerifiedFetch } from '@helia/verified-fetch' 2 | import { expect } from 'aegir/chai' 3 | import type { VerifiedFetch } from '@helia/verified-fetch' 4 | 5 | describe('@helia/verified-fetch - ipns', () => { 6 | let verifiedFetch: VerifiedFetch 7 | 8 | before(async () => { 9 | verifiedFetch = await createVerifiedFetch({ 10 | gateways: ['http://127.0.0.1:8180'], 11 | routers: ['http://127.0.0.1:8180'], 12 | allowInsecure: true, 13 | allowLocal: true 14 | }) 15 | }) 16 | 17 | after(async () => { 18 | await verifiedFetch.stop() 19 | }) 20 | 21 | it('should be able to load /ipns/', async () => { 22 | // ensure the key is being returned by the ipfs gateway itself 23 | const kuboResponse = await fetch('http://127.0.0.1:8180/ipns/k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam') 24 | const kuboResponseBody = await kuboResponse.text() 25 | expect(kuboResponseBody).to.equal('hello\n') 26 | 27 | const res = await verifiedFetch('/ipns/k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam') 28 | expect(res.status).to.equal(200) 29 | const body = await res.text() 30 | expect(body).to.equal('hello\n') 31 | }) 32 | }) 33 | -------------------------------------------------------------------------------- /packages/interop/src/json.spec.ts: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { createVerifiedFetch } from '@helia/verified-fetch' 3 | import { expect } from 'aegir/chai' 4 | import { CID } from 'multiformats/cid' 5 | 6 | describe('@helia/verified-fetch - json', () => { 7 | describe('unixfs - multi-block', () => { 8 | let verifiedFetch: Awaited> 9 | 10 | before(async () => { 11 | // As of 2024-01-18, https://cloudflare-ipfs.com/ipns/tokens.uniswap.org resolves to: 12 | // root: QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr 13 | // child1: QmNik5N4ryNwzzXYq5hCYKGcRjAf9QtigxtiJh9o8aXXbG // partial JSON 14 | // child2: QmWNBJX6fZyNTLWNYBHxAHpBctCP43R2zeqV2G8uavqFZn // partial JSON 15 | verifiedFetch = await createVerifiedFetch({ 16 | gateways: ['http://127.0.0.1:8180'], 17 | routers: ['http://127.0.0.1:8180'], 18 | allowInsecure: true, 19 | allowLocal: true 20 | }) 21 | }) 22 | 23 | after(async () => { 24 | await verifiedFetch.stop() 25 | }) 26 | 27 | it('handles UnixFS-chunked JSON file', async () => { 28 | const resp = await verifiedFetch(CID.parse('QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr'), { 29 | allowLocal: true, 30 | allowInsecure: true 31 | }) 32 | expect(resp).to.be.ok() 33 | const jsonObj = await resp.json() 34 | expect(jsonObj).to.be.ok() 35 | expect(jsonObj).to.have.property('name').equal('Uniswap Labs Default') 36 | expect(jsonObj).to.have.property('timestamp').equal('2023-12-13T18:25:25.830Z') 37 | expect(jsonObj).to.have.property('version').to.deep.equal({ major: 11, minor: 11, patch: 0 }) 38 | expect(jsonObj).to.have.property('tags') 39 | expect(jsonObj).to.have.property('logoURI').equal('ipfs://QmNa8mQkrNKp1WEEeGjFezDmDeodkWRevGFN8JCV7b4Xir') 40 | expect(jsonObj).to.have.property('keywords').to.deep.equal(['uniswap', 'default']) 41 | expect(jsonObj.tokens).to.be.an('array').of.length(767) 42 | }) 43 | 44 | it('handles hamt-sharded directory with json file', async () => { 45 | const resp = await verifiedFetch('ipfs://bafybeibc5sgo2plmjkq2tzmhrn54bk3crhnc23zd2msg4ea7a4pxrkgfna/371', { 46 | allowLocal: true, 47 | allowInsecure: true 48 | }) 49 | expect(resp).to.be.ok() 50 | expect(resp.status).to.equal(200) 51 | expect(resp.headers.get('content-type')).to.equal('application/json') 52 | const jsonObj = await resp.json() 53 | expect(jsonObj).to.be.ok() 54 | expect(jsonObj).to.have.property('name').equal('Pudgy Penguin #371') 55 | }) 56 | }) 57 | }) 58 | -------------------------------------------------------------------------------- /packages/interop/src/websites.spec.ts: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { createVerifiedFetch } from '@helia/verified-fetch' 3 | import { expect } from 'aegir/chai' 4 | 5 | describe('@helia/verified-fetch - websites', () => { 6 | describe('helia-identify.on.fleek.co', () => { 7 | let verifiedFetch: Awaited> 8 | 9 | before(async () => { 10 | // 2024-01-22 CID for _dnslink.helia-identify.on.fleek.co 11 | verifiedFetch = await createVerifiedFetch({ 12 | gateways: ['http://127.0.0.1:8180'], 13 | routers: ['http://127.0.0.1:8180'], 14 | allowInsecure: true, 15 | allowLocal: true 16 | }) 17 | }) 18 | 19 | after(async () => { 20 | await verifiedFetch.stop() 21 | }) 22 | 23 | it('loads index.html when passed helia-identify.on.fleek.co root CID', async () => { 24 | const resp = await verifiedFetch('ipfs://QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv', { 25 | allowLocal: true, 26 | allowInsecure: true 27 | }) 28 | expect(resp).to.be.ok() 29 | const html = await resp.text() 30 | expect(html).to.be.ok() 31 | expect(html).to.include('Run Identify on a remote node with Helia') 32 | }) 33 | 34 | it('loads helia-identify.on.fleek.co index.html directly ', async () => { 35 | const resp = await verifiedFetch('ipfs://QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv/index.html', { 36 | allowLocal: true, 37 | allowInsecure: true 38 | }) 39 | expect(resp).to.be.ok() 40 | const html = await resp.text() 41 | expect(html).to.be.ok() 42 | expect(html).to.include('Run Identify on a remote node with Helia') 43 | }) 44 | }) 45 | 46 | /** 47 | * 48 | * Created on 2024-01-23. /ipns/blog.libp2p.io/index.html resolved to QmVZNGy6SPvUbvQCXXaGDdp8kvfJm9MMozjU12dyzH6hKf 49 | * 50 | * ```shell 51 | * mkdir fake-blog.libp2p.io 52 | * npx kubo@0.25.0 cat '/ipfs/QmVZNGy6SPvUbvQCXXaGDdp8kvfJm9MMozjU12dyzH6hKf' > fake-blog.libp2p.io/index.html 53 | * npx kubo@0.25.0 add -r fake-blog.libp2p.io 54 | * npx kubo@0.25.0 dag export QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw > QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw-fake-blog.libp2p.io.car 55 | * ``` 56 | */ 57 | describe('fake blog.libp2p.io', () => { 58 | let verifiedFetch: Awaited> 59 | 60 | before(async () => { 61 | verifiedFetch = await createVerifiedFetch({ 62 | gateways: ['http://127.0.0.1:8180'], 63 | routers: ['http://127.0.0.1:8180'], 64 | allowInsecure: true, 65 | allowLocal: true 66 | }) 67 | }) 68 | 69 | after(async () => { 70 | await verifiedFetch.stop() 71 | }) 72 | 73 | it('loads index.html when passed fake-blog.libp2p.io root CID', async () => { 74 | const resp = await verifiedFetch('ipfs://QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw', { 75 | allowLocal: true, 76 | allowInsecure: true 77 | }) 78 | expect(resp).to.be.ok() 79 | const html = await resp.text() 80 | expect(html).to.be.ok() 81 | expect(html).to.include('Home | libp2p Blog & News') 82 | expect(html).to.include('') 83 | }) 84 | }) 85 | }) 86 | -------------------------------------------------------------------------------- /packages/interop/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "aegir/src/config/tsconfig.aegir.json", 3 | "compilerOptions": { 4 | "outDir": "dist" 5 | }, 6 | "include": [ 7 | "src", 8 | "test" 9 | ], 10 | "references": [ 11 | { 12 | "path": "../verified-fetch" 13 | } 14 | ] 15 | } 16 | -------------------------------------------------------------------------------- /packages/interop/typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "entryPoints": [ 3 | "./src/index.ts" 4 | ] 5 | } 6 | -------------------------------------------------------------------------------- /packages/verified-fetch/.aegir.js: -------------------------------------------------------------------------------- 1 | /** @type {import('aegir').PartialOptions} */ 2 | const options = { 3 | build: { 4 | bundlesizeMax: '355KB' 5 | } 6 | } 7 | 8 | export default options 9 | -------------------------------------------------------------------------------- /packages/verified-fetch/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md) 4 | -------------------------------------------------------------------------------- /packages/verified-fetch/LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/errors.ts: -------------------------------------------------------------------------------- 1 | export class InvalidRangeError extends Error { 2 | static name = 'InvalidRangeError' 3 | 4 | constructor (message = 'Invalid range request') { 5 | super(message) 6 | this.name = 'InvalidRangeError' 7 | } 8 | } 9 | 10 | export class NoContentError extends Error { 11 | static name = 'NoContentError' 12 | 13 | constructor (message = 'No content found') { 14 | super(message) 15 | this.name = 'NoContentError' 16 | } 17 | } 18 | 19 | export class SubdomainNotSupportedError extends Error { 20 | static name = 'SubdomainNotSupportedError' 21 | 22 | constructor (message = 'Subdomain not supported') { 23 | super(message) 24 | this.name = 'SubdomainNotSupportedError' 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/errors.ts: -------------------------------------------------------------------------------- 1 | import type { FatalPluginErrorOptions, PluginErrorOptions } from './types.js' 2 | 3 | /** 4 | * If a plugin encounters an error, it should throw an instance of this class. 5 | */ 6 | export class PluginError extends Error { 7 | public name = 'PluginError' 8 | public code: string 9 | public fatal: boolean 10 | public details?: Record 11 | public response?: any 12 | 13 | constructor (code: string, message: string, options?: PluginErrorOptions) { 14 | super(message) 15 | this.code = code 16 | this.fatal = options?.fatal ?? false 17 | this.details = options?.details 18 | this.response = options?.response 19 | } 20 | } 21 | 22 | /** 23 | * If a plugin encounters a fatal error and verified-fetch should not continue processing the request, it should throw 24 | * an instance of this class. 25 | * 26 | * Note that you should be very careful when throwing a `PluginFatalError`, as it will stop the request from being 27 | * processed further. If you do not have a response to return to the client, you should consider throwing a 28 | * `PluginError` instead. 29 | */ 30 | export class PluginFatalError extends PluginError { 31 | public name = 'PluginFatalError' 32 | 33 | constructor (code: string, message: string, options: FatalPluginErrorOptions) { 34 | super(code, message, { ...options, fatal: true }) 35 | this.name = 'PluginFatalError' 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This file is the entry into all things we export from the `src/plugins` directory. 3 | */ 4 | 5 | export { PluginError, PluginFatalError } from './errors.js' 6 | export { BasePlugin } from './plugin-base.js' 7 | export type { PluginOptions, PluginContext, VerifiedFetchPluginFactory } from './types.js' 8 | export * from './plugins.js' 9 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-base.ts: -------------------------------------------------------------------------------- 1 | import type { VerifiedFetchPlugin, PluginContext, PluginOptions } from './types.js' 2 | import type { Logger } from '@libp2p/interface' 3 | 4 | /** 5 | * Base class for verified-fetch plugins. This class provides a basic implementation of the `FetchHandlerPlugin` 6 | * interface. 7 | * 8 | * Subclasses must implement the `id` property and the `canHandle` and `handle` methods. 9 | * Subclasses may override the `codes` and `log` properties. 10 | * 11 | * If your plugin adds/edits the context supplied in `handle`, you should increment the `context.modified` property. 12 | */ 13 | export abstract class BasePlugin implements VerifiedFetchPlugin { 14 | readonly codes: number[] = [] 15 | readonly pluginOptions: PluginOptions 16 | abstract readonly id: string 17 | protected _log?: Logger 18 | 19 | get log (): Logger { 20 | // instantiate the logger lazily because it depends on the id, which is not set until after the constructor is called 21 | if (this._log == null) { 22 | this._log = this.pluginOptions.logger.forComponent(this.id) 23 | } 24 | return this._log 25 | } 26 | 27 | constructor (options: PluginOptions) { 28 | this.pluginOptions = options 29 | } 30 | 31 | abstract canHandle (context: PluginContext): boolean 32 | 33 | abstract handle (context: PluginContext): Promise 34 | } 35 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-byte-range-context.ts: -------------------------------------------------------------------------------- 1 | import { ByteRangeContext } from '../utils/byte-range-context.js' 2 | import { badRangeResponse } from '../utils/responses.js' 3 | import { BasePlugin } from './plugin-base.js' 4 | import type { PluginContext } from './types.js' 5 | 6 | /** 7 | * This plugin simply adds the ByteRangeContext to the PluginContext. 8 | */ 9 | export class ByteRangeContextPlugin extends BasePlugin { 10 | readonly id = 'byte-range-context-plugin' 11 | 12 | /** 13 | * Return false if the ByteRangeContext has already been set, otherwise return true. 14 | */ 15 | canHandle (context: PluginContext): boolean { 16 | return context.byteRangeContext == null 17 | } 18 | 19 | async handle (context: PluginContext): Promise { 20 | context.byteRangeContext = new ByteRangeContext(this.pluginOptions.logger, context.options?.headers) 21 | context.modified++ 22 | 23 | if (context.byteRangeContext.isRangeRequest && !context.byteRangeContext.isValidRangeRequest) { 24 | // invalid range request.. fail 25 | return badRangeResponse(context.resource) 26 | } 27 | 28 | return null 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-car.ts: -------------------------------------------------------------------------------- 1 | import { BlockExporter, car, CIDPath, SubgraphExporter, UnixFSExporter } from '@helia/car' 2 | import { CarWriter } from '@ipld/car' 3 | import { code as dagPbCode } from '@ipld/dag-pb' 4 | import toBrowserReadableStream from 'it-to-browser-readablestream' 5 | import { okRangeResponse } from '../utils/responses.js' 6 | import { BasePlugin } from './plugin-base.js' 7 | import type { PluginContext } from './types.js' 8 | import type { ExportCarOptions } from '@helia/car' 9 | 10 | function getFilename ({ cid, ipfsPath, query }: Pick): string { 11 | if (query.filename != null) { 12 | return query.filename 13 | } 14 | 15 | // convert context.ipfsPath to a filename. replace all / with _, replace prefix protocol with empty string 16 | const filename = ipfsPath.replace(/\/ipfs\//, '').replace(/\/ipns\//, '').replace(/\//g, '_') 17 | 18 | return `${filename}.car` 19 | } 20 | 21 | // https://specs.ipfs.tech/http-gateways/trustless-gateway/#dag-scope-request-query-parameter 22 | type DagScope = 'all' | 'entity' | 'block' 23 | function getDagScope ({ query }: Pick): DagScope | null { 24 | const dagScope = query['dag-scope'] 25 | if (dagScope === 'all' || dagScope === 'entity' || dagScope === 'block') { 26 | return dagScope 27 | } 28 | return 'all' 29 | } 30 | 31 | /** 32 | * Accepts a `CID` and returns a `Response` with a body stream that is a CAR 33 | * of the `DAG` referenced by the `CID`. 34 | */ 35 | export class CarPlugin extends BasePlugin { 36 | readonly id = 'car-plugin' 37 | 38 | canHandle (context: PluginContext): boolean { 39 | this.log('checking if we can handle %c with accept %s', context.cid, context.accept) 40 | if (context.byteRangeContext == null) { 41 | return false 42 | } 43 | if (context.pathDetails == null) { 44 | return false 45 | } 46 | 47 | return context.accept?.startsWith('application/vnd.ipld.car') === true || context.query.format === 'car' // application/vnd.ipld.car 48 | } 49 | 50 | async handle (context: PluginContext & Required>): Promise { 51 | const { options, pathDetails, cid } = context 52 | if (pathDetails == null) { 53 | throw new Error('attempted to handle request for car with no path details') 54 | } 55 | const { getBlockstore, helia } = this.pluginOptions 56 | context.reqFormat = 'car' 57 | context.query.download = true 58 | context.query.filename = getFilename(context) 59 | const blockstore = getBlockstore(cid, context.resource, options?.session ?? true, options) 60 | 61 | const c = car({ 62 | blockstore, 63 | getCodec: helia.getCodec, 64 | logger: helia.logger 65 | }) 66 | const carExportOptions: ExportCarOptions = { 67 | ...options, 68 | traversal: new CIDPath(pathDetails.ipfsRoots) 69 | } 70 | const dagScope = getDagScope(context) 71 | // root should be the terminal element if it exists, otherwise the root cid.. because of this, we can't use the @helia/car stream() method. 72 | const root = pathDetails.terminalElement.cid ?? cid 73 | if (dagScope === 'block') { 74 | carExportOptions.exporter = new BlockExporter() 75 | } else if (dagScope === 'entity') { 76 | // if its unixFS, we need to enumerate a directory, or get all blocks for the entity, otherwise, use blockExporter 77 | if (root.code === dagPbCode) { 78 | carExportOptions.exporter = new UnixFSExporter() 79 | } else { 80 | carExportOptions.exporter = new BlockExporter() 81 | } 82 | } else { 83 | carExportOptions.exporter = new SubgraphExporter() 84 | } 85 | const { writer, out } = CarWriter.create(root) 86 | const iter = async function * (): AsyncIterable { 87 | for await (const buf of out) { 88 | yield buf 89 | } 90 | } 91 | 92 | // the root passed to export should be the root CID of the DAG, not the terminal element. 93 | c.export(cid, writer, carExportOptions) 94 | .catch((err) => { 95 | this.log.error('error exporting car - %e', err) 96 | }) 97 | // export will close the writer when it's done, no finally needed. 98 | 99 | context.byteRangeContext.setBody(toBrowserReadableStream(iter())) 100 | 101 | const response = okRangeResponse(context.resource, context.byteRangeContext.getBody('application/vnd.ipld.car; version=1'), { byteRangeContext: context.byteRangeContext, log: this.log }) 102 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? 'application/vnd.ipld.car; version=1') 103 | 104 | return response 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-dag-cbor.ts: -------------------------------------------------------------------------------- 1 | import * as ipldDagCbor from '@ipld/dag-cbor' 2 | import * as ipldDagJson from '@ipld/dag-json' 3 | import { dagCborToSafeJSON } from '../utils/dag-cbor-to-safe-json.js' 4 | import { setIpfsRoots } from '../utils/response-headers.js' 5 | import { notAcceptableResponse, okRangeResponse } from '../utils/responses.js' 6 | import { isObjectNode } from '../utils/walk-path.js' 7 | import { BasePlugin } from './plugin-base.js' 8 | import type { PluginContext } from './types.js' 9 | import type { ObjectNode } from 'ipfs-unixfs-exporter' 10 | 11 | /** 12 | * Handles `dag-cbor` content, including requests with Accept: `application/vnd.ipld.dag-json` and `application/json`. 13 | */ 14 | export class DagCborPlugin extends BasePlugin { 15 | readonly id = 'dag-cbor-plugin' 16 | readonly codes = [ipldDagCbor.code] 17 | 18 | canHandle ({ cid, accept, pathDetails, byteRangeContext }: PluginContext): boolean { 19 | this.log('checking if we can handle %c with accept %s', cid, accept) 20 | if (pathDetails == null) { 21 | return false 22 | } 23 | if (!isObjectNode(pathDetails.terminalElement)) { 24 | return false 25 | } 26 | if (cid.code !== ipldDagCbor.code) { 27 | return false 28 | } 29 | if (byteRangeContext == null) { 30 | return false 31 | } 32 | 33 | return isObjectNode(pathDetails.terminalElement) 34 | } 35 | 36 | async handle (context: PluginContext & Required>): Promise { 37 | const { cid, path, resource, accept, pathDetails } = context 38 | 39 | this.log.trace('fetching %c/%s', cid, path) 40 | 41 | const ipfsRoots = pathDetails.ipfsRoots 42 | const terminalElement = pathDetails.terminalElement as ObjectNode // checked in canHandle fn. 43 | 44 | const block = terminalElement.node 45 | 46 | let body: string | Uint8Array 47 | 48 | if (accept === 'application/octet-stream' || accept === 'application/vnd.ipld.dag-cbor' || accept === 'application/cbor') { 49 | // skip decoding 50 | body = block 51 | } else if (accept === 'application/vnd.ipld.dag-json') { 52 | try { 53 | // if vnd.ipld.dag-json has been specified, convert to the format - note 54 | // that this supports more data types than regular JSON, the content-type 55 | // response header is set so the user knows to process it differently 56 | const obj = ipldDagCbor.decode(block) 57 | body = ipldDagJson.encode(obj) 58 | } catch (err) { 59 | this.log.error('could not transform %c to application/vnd.ipld.dag-json', err) 60 | return notAcceptableResponse(resource) 61 | } 62 | } else { 63 | try { 64 | body = dagCborToSafeJSON(block) 65 | } catch (err) { 66 | if (accept === 'application/json') { 67 | this.log('could not decode DAG-CBOR as JSON-safe, but the client sent "Accept: application/json"', err) 68 | 69 | return notAcceptableResponse(resource) 70 | } 71 | 72 | this.log('could not decode DAG-CBOR as JSON-safe, falling back to `application/octet-stream`', err) 73 | body = block 74 | } 75 | } 76 | 77 | context.byteRangeContext.setBody(body) 78 | 79 | const responseContentType = accept ?? (body instanceof Uint8Array ? 'application/octet-stream' : 'application/json') 80 | const response = okRangeResponse(resource, context.byteRangeContext.getBody(responseContentType), { byteRangeContext: context.byteRangeContext, log: this.log }) 81 | 82 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? responseContentType) 83 | 84 | this.log.trace('setting content type to "%s"', context.byteRangeContext.getContentType() ?? responseContentType) 85 | setIpfsRoots(response, ipfsRoots) 86 | 87 | return response 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-dag-walk.ts: -------------------------------------------------------------------------------- 1 | import { code as dagCborCode } from '@ipld/dag-cbor' 2 | import { code as dagPbCode } from '@ipld/dag-pb' 3 | import { handlePathWalking } from '../utils/walk-path.js' 4 | import { BasePlugin } from './plugin-base.js' 5 | import type { PluginContext } from './types.js' 6 | 7 | /** 8 | * This plugin should almost always run first because it's going to handle path walking if needed, and will only say it can handle 9 | * the request if path walking is possible (path is not empty, terminalCid is unknown, and the path has not been walked yet). 10 | * 11 | * Once this plugin has run, the PluginContext will be updated and then this plugin will return false for canHandle, so it won't run again. 12 | */ 13 | export class DagWalkPlugin extends BasePlugin { 14 | readonly id = 'dag-walk-plugin' 15 | /** 16 | * Return false if the path has already been walked, otherwise return true if the CID is encoded with a codec that supports pathing. 17 | */ 18 | canHandle (context: PluginContext): boolean { 19 | this.log('checking if we can handle %c with accept %s', context.cid, context.accept) 20 | const { pathDetails, cid } = context 21 | if (pathDetails != null) { 22 | // path has already been walked 23 | return false 24 | } 25 | 26 | return (cid.code === dagPbCode || cid.code === dagCborCode) 27 | } 28 | 29 | async handle (context: PluginContext): Promise { 30 | const { cid, resource, options, withServerTiming = false } = context 31 | const { getBlockstore, handleServerTiming } = this.pluginOptions 32 | const blockstore = getBlockstore(cid, resource, options?.session ?? true, options) 33 | 34 | // TODO: migrate handlePathWalking into this plugin 35 | const pathDetails = await handleServerTiming('path-walking', '', async () => handlePathWalking({ ...context, blockstore, log: this.log }), withServerTiming) 36 | 37 | if (pathDetails instanceof Response) { 38 | this.log.trace('path walking failed') 39 | 40 | if (pathDetails.status === 404) { 41 | // invalid or incorrect path.. we walked the path but nothing is there 42 | // send the 404 response 43 | return pathDetails 44 | } 45 | 46 | // some other error walking the path (codec doesn't support pathing, etc..), let the next plugin try to handle it 47 | return null 48 | } 49 | 50 | context.modified++ 51 | context.pathDetails = pathDetails 52 | 53 | return null 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-dir-index-html.ts: -------------------------------------------------------------------------------- 1 | import { code as dagPbCode } from '@ipld/dag-pb' 2 | import { base32 } from 'multiformats/bases/base32' 3 | import { sha256 } from 'multiformats/hashes/sha2' 4 | import { dirIndexHtml } from '../utils/dir-index-html.js' 5 | import { getETag } from '../utils/get-e-tag.js' 6 | import { getIpfsRoots } from '../utils/response-headers.js' 7 | import { okRangeResponse } from '../utils/responses.js' 8 | import { BasePlugin } from './plugin-base.js' 9 | import type { PluginContext, VerifiedFetchPluginFactory } from './types.js' 10 | import type { UnixFSEntry } from 'ipfs-unixfs-exporter' 11 | 12 | /** 13 | * Converts a list of directory entries into a small hash that can be used in the etag header. 14 | * 15 | * @see https://github.com/ipfs/boxo/blob/dc60fe747c375c631a92fcfd6c7456f44a760d24/gateway/assets/assets.go#L84 16 | * @see https://github.com/ipfs/boxo/blob/dc60fe747c375c631a92fcfd6c7456f44a760d24/gateway/handler_unixfs_dir.go#L233-L235 17 | */ 18 | async function getAssetHash (directoryEntries: UnixFSEntry[]): Promise { 19 | const entryDetails = directoryEntries.reduce((acc, entry) => { 20 | return `${acc}${entry.name}${entry.cid.toString()}` 21 | }, '') 22 | const hashBytes = await sha256.encode(new TextEncoder().encode(entryDetails)) 23 | return base32.encode(hashBytes) 24 | } 25 | 26 | export class DirIndexHtmlPlugin extends BasePlugin { 27 | readonly id = 'dir-index-html-plugin' 28 | readonly codes = [dagPbCode] 29 | canHandle (context: PluginContext): boolean { 30 | const { cid, pathDetails, directoryEntries } = context 31 | if (pathDetails == null) { 32 | return false 33 | } 34 | if (pathDetails.terminalElement?.type !== 'directory') { 35 | return false 36 | } 37 | 38 | if (directoryEntries == null || directoryEntries.length === 0) { 39 | return false 40 | } 41 | 42 | return cid.code === dagPbCode 43 | } 44 | 45 | async handle (context: PluginContext & Required>): Promise { 46 | const { resource, pathDetails, directoryEntries } = context 47 | 48 | const { terminalElement, ipfsRoots } = pathDetails 49 | 50 | const gatewayURL = resource 51 | const htmlResponse = dirIndexHtml(terminalElement, directoryEntries, { gatewayURL, log: this.log }) 52 | 53 | context.byteRangeContext.setBody(htmlResponse) 54 | 55 | const etagPrefix = `DirIndex-${await getAssetHash(directoryEntries)}_CID-` 56 | 57 | const response = okRangeResponse(resource, context.byteRangeContext.getBody('text/html'), { byteRangeContext: context.byteRangeContext, log: this.log }, { 58 | headers: { 59 | 'Content-Type': context.byteRangeContext.getContentType() ?? 'text/html', 60 | // see https://github.com/ipfs/gateway-conformance/pull/219 61 | 'Cache-Control': 'public, max-age=604800, stale-while-revalidate=2678400', 62 | 'X-Ipfs-Roots': getIpfsRoots(ipfsRoots), 63 | // e.g. DirIndex-_CID- 64 | Etag: getETag({ cid: terminalElement.cid, reqFormat: context.reqFormat, contentPrefix: etagPrefix }) 65 | } 66 | }) 67 | 68 | return response 69 | } 70 | } 71 | 72 | export const dirIndexHtmlPluginFactory: VerifiedFetchPluginFactory = (opts) => new DirIndexHtmlPlugin(opts) 73 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-ipns-record.ts: -------------------------------------------------------------------------------- 1 | import { Record as DHTRecord } from '@libp2p/kad-dht' 2 | import { Key } from 'interface-datastore' 3 | import { concat as uint8ArrayConcat } from 'uint8arrays/concat' 4 | import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string' 5 | import { toString as uint8ArrayToString } from 'uint8arrays/to-string' 6 | import { getPeerIdFromString } from '../utils/get-peer-id-from-string.js' 7 | import { badRequestResponse, okRangeResponse } from '../utils/responses.js' 8 | import { PluginFatalError } from './errors.js' 9 | import { BasePlugin } from './plugin-base.js' 10 | import type { PluginContext } from './types.js' 11 | import type { PeerId } from '@libp2p/interface' 12 | 13 | /** 14 | * Accepts an `ipns://...`, `https?://.ipns.`, or `https?:///ipns/...` URL as a string and 15 | * returns a `Response` containing a raw IPNS record. 16 | */ 17 | export class IpnsRecordPlugin extends BasePlugin { 18 | readonly id = 'ipns-record-plugin' 19 | readonly codes = [] 20 | canHandle ({ cid, accept, query, byteRangeContext }: PluginContext): boolean { 21 | this.log('checking if we can handle %c with accept %s', cid, accept) 22 | if (byteRangeContext == null) { 23 | return false 24 | } 25 | 26 | return accept === 'application/vnd.ipfs.ipns-record' || query.format === 'ipns-record' 27 | } 28 | 29 | async handle (context: PluginContext & Required>): Promise { 30 | const { resource, path, options } = context 31 | const { helia } = this.pluginOptions 32 | context.reqFormat = 'ipns-record' 33 | if (path !== '' || !(resource.startsWith('ipns://') || resource.includes('.ipns.') || resource.includes('/ipns/'))) { 34 | this.log.error('invalid request for IPNS name "%s" and path "%s"', resource, path) 35 | throw new PluginFatalError('ERR_INVALID_IPNS_NAME', 'Invalid IPNS name', { response: badRequestResponse(resource, new Error('Invalid IPNS name')) }) 36 | } 37 | let peerId: PeerId 38 | 39 | try { 40 | let peerIdString: string 41 | if (resource.startsWith('ipns://')) { 42 | peerIdString = resource.replace('ipns://', '') 43 | } else if (resource.includes('/ipns/')) { 44 | peerIdString = resource.split('/ipns/')[1].split('/')[0].split('?')[0] 45 | } else { 46 | peerIdString = resource.split('.ipns.')[0].split('://')[1] 47 | } 48 | 49 | this.log.trace('trying to parse peer id from "%s"', peerIdString) 50 | peerId = getPeerIdFromString(peerIdString) 51 | } catch (err: any) { 52 | this.log.error('could not parse peer id from IPNS url %s', resource, err) 53 | 54 | throw new PluginFatalError('ERR_NO_PEER_ID_FOUND', 'could not parse peer id from url', { response: badRequestResponse(resource, err) }) 55 | } 56 | 57 | // since this call happens after parseResource, we've already resolved the 58 | // IPNS name so a local copy should be in the helia datastore, so we can 59 | // just read it out.. 60 | const routingKey = uint8ArrayConcat([ 61 | uint8ArrayFromString('/ipns/'), 62 | peerId.toMultihash().bytes 63 | ]) 64 | const datastoreKey = new Key('/dht/record/' + uint8ArrayToString(routingKey, 'base32'), false) 65 | const buf = await helia.datastore.get(datastoreKey, options) 66 | const record = DHTRecord.deserialize(buf) 67 | 68 | context.byteRangeContext.setBody(record.value) 69 | 70 | const response = okRangeResponse(resource, context.byteRangeContext.getBody('application/vnd.ipfs.ipns-record'), { byteRangeContext: context.byteRangeContext, log: this.log }) 71 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? 'application/vnd.ipfs.ipns-record') 72 | 73 | return response 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-json.ts: -------------------------------------------------------------------------------- 1 | import * as ipldDagCbor from '@ipld/dag-cbor' 2 | import * as ipldDagJson from '@ipld/dag-json' 3 | import { code as jsonCode } from 'multiformats/codecs/json' 4 | import { notAcceptableResponse, okRangeResponse } from '../utils/responses.js' 5 | import { BasePlugin } from './plugin-base.js' 6 | import type { PluginContext } from './types.js' 7 | 8 | /** 9 | * Handles `dag-json` content, including requests with Accept: `application/vnd.ipld.dag-cbor` and `application/cbor`. 10 | */ 11 | export class JsonPlugin extends BasePlugin { 12 | readonly id = 'json-plugin' 13 | readonly codes = [ipldDagJson.code, jsonCode] 14 | canHandle ({ cid, accept, byteRangeContext }: PluginContext): boolean { 15 | this.log('checking if we can handle %c with accept %s', cid, accept) 16 | if (byteRangeContext == null) { 17 | return false 18 | } 19 | 20 | if (accept === 'application/vnd.ipld.dag-json' && cid.code !== ipldDagCbor.code) { 21 | // we can handle application/vnd.ipld.dag-json, but if the CID codec is ipldDagCbor, DagCborPlugin should handle it 22 | // TODO: remove the need for deny-listing cases in plugins 23 | return true 24 | } 25 | 26 | return ipldDagJson.code === cid.code || jsonCode === cid.code 27 | } 28 | 29 | async handle (context: PluginContext & Required>): Promise { 30 | const { path, resource, cid, accept, options } = context 31 | const { getBlockstore } = this.pluginOptions 32 | const session = options?.session ?? true 33 | 34 | this.log.trace('fetching %c/%s', cid, path) 35 | 36 | const terminalCid = context.pathDetails?.terminalElement.cid ?? context.cid 37 | const blockstore = getBlockstore(terminalCid, resource, session, options) 38 | const block = await blockstore.get(terminalCid, options) 39 | let body: string | Uint8Array 40 | 41 | if (accept === 'application/vnd.ipld.dag-cbor' || accept === 'application/cbor') { 42 | try { 43 | // if vnd.ipld.dag-cbor has been specified, convert to the format - note 44 | // that this supports more data types than regular JSON, the content-type 45 | // response header is set so the user knows to process it differently 46 | const obj = ipldDagJson.decode(block) 47 | body = ipldDagCbor.encode(obj) 48 | } catch (err) { 49 | this.log.error('could not transform %c to application/vnd.ipld.dag-cbor', err) 50 | return notAcceptableResponse(resource) 51 | } 52 | } else { 53 | // skip decoding 54 | body = block 55 | } 56 | 57 | let contentType: string 58 | if (accept == null) { 59 | if (ipldDagJson.code === cid.code) { 60 | contentType = 'application/vnd.ipld.dag-json' 61 | } else { 62 | contentType = 'application/json' 63 | } 64 | } else { 65 | contentType = accept.split(';')[0] 66 | } 67 | 68 | context.byteRangeContext.setBody(body) 69 | 70 | const response = okRangeResponse(resource, context.byteRangeContext.getBody(contentType), { byteRangeContext: context.byteRangeContext, log: this.log }) 71 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? contentType) 72 | if (!context.byteRangeContext.isValidRangeRequest) { 73 | response.headers.set('content-length', body.length.toString()) 74 | } 75 | return response 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-raw.ts: -------------------------------------------------------------------------------- 1 | import { code as rawCode } from 'multiformats/codecs/raw' 2 | import { identity } from 'multiformats/hashes/identity' 3 | import { getContentType } from '../utils/get-content-type.js' 4 | import { notFoundResponse, okRangeResponse } from '../utils/responses.js' 5 | import { PluginFatalError } from './errors.js' 6 | import { BasePlugin } from './plugin-base.js' 7 | import type { PluginContext } from './types.js' 8 | 9 | /** 10 | * These are Accept header values that will cause content type sniffing to be 11 | * skipped and set to these values. 12 | */ 13 | const RAW_HEADERS = [ 14 | 'application/vnd.ipld.dag-json', 15 | 'application/vnd.ipld.raw', 16 | 'application/octet-stream' 17 | ] 18 | 19 | /** 20 | * if the user has specified an `Accept` header, and it's in our list of 21 | * allowable "raw" format headers, use that instead of detecting the content 22 | * type. This avoids the user from receiving something different when they 23 | * signal that they want to `Accept` a specific mime type. 24 | */ 25 | function getOverriddenRawContentType ({ headers, accept }: { headers?: HeadersInit, accept?: string }): string | undefined { 26 | // accept has already been resolved by getResolvedAcceptHeader, if we have it, use it. 27 | const acceptHeader = accept ?? new Headers(headers).get('accept') ?? '' 28 | 29 | // e.g. "Accept: text/html, application/xhtml+xml, application/xml;q=0.9, image/webp, */*;q=0.8" 30 | const acceptHeaders = acceptHeader.split(',') 31 | .map(s => s.split(';')[0]) 32 | .map(s => s.trim()) 33 | 34 | for (const mimeType of acceptHeaders) { 35 | if (mimeType === '*/*') { 36 | return 37 | } 38 | 39 | if (RAW_HEADERS.includes(mimeType ?? '')) { 40 | return mimeType 41 | } 42 | } 43 | } 44 | 45 | export class RawPlugin extends BasePlugin { 46 | readonly id = 'raw-plugin' 47 | codes: number[] = [rawCode, identity.code] 48 | 49 | canHandle ({ cid, accept, query, byteRangeContext }: PluginContext): boolean { 50 | this.log('checking if we can handle %c with accept %s', cid, accept) 51 | if (byteRangeContext == null) { 52 | return false 53 | } 54 | return accept === 'application/vnd.ipld.raw' || query.format === 'raw' 55 | } 56 | 57 | async handle (context: PluginContext & Required>): Promise { 58 | const { path, resource, cid, accept, query, options } = context 59 | const { getBlockstore, contentTypeParser } = this.pluginOptions 60 | const session = options?.session ?? true 61 | const log = this.log 62 | 63 | if (accept === 'application/vnd.ipld.raw' || query.format === 'raw') { 64 | context.reqFormat = 'raw' 65 | context.query.download = true 66 | context.query.filename = context.query.filename ?? `${cid.toString()}.bin` 67 | log.trace('Set content disposition...') 68 | } else { 69 | log.trace('Did NOT set content disposition...') 70 | } 71 | 72 | if (path !== '' && cid.code === rawCode) { 73 | log.trace('404-ing raw codec request for %c/%s', cid, path) 74 | // throw new PluginError('ERR_RAW_PATHS_NOT_SUPPORTED', 'Raw codec does not support paths') 75 | // return notFoundResponse(resource, 'Raw codec does not support paths') 76 | throw new PluginFatalError('ERR_RAW_PATHS_NOT_SUPPORTED', 'Raw codec does not support paths', { response: notFoundResponse(resource, 'Raw codec does not support paths') }) 77 | } 78 | 79 | const terminalCid = context.pathDetails?.terminalElement.cid ?? context.cid 80 | const blockstore = getBlockstore(terminalCid, resource, session, options) 81 | const result = await blockstore.get(terminalCid, options) 82 | context.byteRangeContext.setBody(result) 83 | 84 | // if the user has specified an `Accept` header that corresponds to a raw 85 | // type, honour that header, so for example they don't request 86 | // `application/vnd.ipld.raw` but get `application/octet-stream` 87 | const contentType = await getContentType({ filename: query.filename, bytes: result, path, defaultContentType: getOverriddenRawContentType({ headers: options?.headers, accept }), contentTypeParser, log }) 88 | const response = okRangeResponse(resource, context.byteRangeContext.getBody(contentType), { byteRangeContext: context.byteRangeContext, log }, { 89 | redirected: false 90 | }) 91 | 92 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? contentType) 93 | 94 | return response 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugin-handle-tar.ts: -------------------------------------------------------------------------------- 1 | import { code as dagPbCode } from '@ipld/dag-pb' 2 | import toBrowserReadableStream from 'it-to-browser-readablestream' 3 | import { code as rawCode } from 'multiformats/codecs/raw' 4 | import { getETag } from '../utils/get-e-tag.js' 5 | import { tarStream } from '../utils/get-tar-stream.js' 6 | import { notAcceptableResponse, okRangeResponse } from '../utils/responses.js' 7 | import { BasePlugin } from './plugin-base.js' 8 | import type { PluginContext } from './types.js' 9 | 10 | /** 11 | * Accepts a UnixFS `CID` and returns a `.tar` file containing the file or 12 | * directory structure referenced by the `CID`. 13 | */ 14 | export class TarPlugin extends BasePlugin { 15 | readonly id = 'tar-plugin' 16 | readonly codes = [] 17 | canHandle ({ cid, accept, query, byteRangeContext }: PluginContext): boolean { 18 | this.log('checking if we can handle %c with accept %s', cid, accept) 19 | if (byteRangeContext == null) { 20 | return false 21 | } 22 | return accept === 'application/x-tar' || query.format === 'tar' 23 | } 24 | 25 | async handle (context: PluginContext & Required>): Promise { 26 | const { cid, path, resource, options, pathDetails } = context 27 | const { getBlockstore } = this.pluginOptions 28 | 29 | const terminusElement = pathDetails?.terminalElement.cid ?? cid 30 | if (terminusElement.code !== dagPbCode && terminusElement.code !== rawCode) { 31 | return notAcceptableResponse('only UnixFS data can be returned in a TAR file') 32 | } 33 | 34 | context.reqFormat = 'tar' 35 | context.query.download = true 36 | context.query.filename = context.query.filename ?? `${terminusElement.toString()}.tar` 37 | 38 | const blockstore = getBlockstore(terminusElement, resource, options?.session, options) 39 | const stream = toBrowserReadableStream(tarStream(`/ipfs/${cid}/${path}`, blockstore, options)) 40 | 41 | context.byteRangeContext.setBody(stream) 42 | 43 | const response = okRangeResponse(resource, context.byteRangeContext.getBody('application/x-tar'), { byteRangeContext: context.byteRangeContext, log: this.log }) 44 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? 'application/x-tar') 45 | 46 | response.headers.set('etag', getETag({ cid: terminusElement, reqFormat: context.reqFormat, weak: true })) 47 | 48 | return response 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/plugins.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Export extension (non-default) plugins here 3 | */ 4 | export { DirIndexHtmlPlugin, dirIndexHtmlPluginFactory } from './plugin-handle-dir-index-html.js' 5 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/plugins/types.ts: -------------------------------------------------------------------------------- 1 | import type { PluginError } from './errors.js' 2 | import type { VerifiedFetchInit } from '../index.js' 3 | import type { ContentTypeParser, RequestFormatShorthand } from '../types.js' 4 | import type { ByteRangeContext } from '../utils/byte-range-context.js' 5 | import type { ParsedUrlStringResults } from '../utils/parse-url-string.js' 6 | import type { PathWalkerResponse } from '../utils/walk-path.js' 7 | import type { AbortOptions, ComponentLogger, Logger } from '@libp2p/interface' 8 | import type { Helia } from 'helia' 9 | import type { Blockstore } from 'interface-blockstore' 10 | import type { UnixFSEntry } from 'ipfs-unixfs-exporter' 11 | import type { CID } from 'multiformats/cid' 12 | import type { CustomProgressEvent } from 'progress-events' 13 | 14 | /** 15 | * Contains common components and functions required by plugins to handle a request. 16 | * - Read-Only: Plugins can read but shouldn't rewrite them. 17 | * - Persistent: Relevant even after the request completes (e.g., logging or metrics). 18 | */ 19 | export interface PluginOptions { 20 | logger: ComponentLogger 21 | getBlockstore(cid: CID, resource: string | CID, useSession?: boolean, options?: AbortOptions): Blockstore 22 | handleServerTiming(name: string, description: string, fn: () => Promise, withServerTiming: boolean): Promise 23 | contentTypeParser?: ContentTypeParser 24 | helia: Helia 25 | } 26 | 27 | /** 28 | * Represents the ephemeral, modifiable state used by the pipeline. 29 | * - Mutable: Evolves as you walk the plugin chain. 30 | * - Shared Data: Allows plugins to communicate partial results, discovered data, or interim errors. 31 | * - Ephemeral: Typically discarded once fetch(...) completes. 32 | */ 33 | export interface PluginContext extends ParsedUrlStringResults { 34 | readonly cid: CID 35 | readonly path: string 36 | readonly resource: string 37 | readonly accept?: string 38 | /** 39 | * The last time the context is modified, so we know whether a plugin has modified it. 40 | * A plugin should increment this value if it modifies the context. 41 | */ 42 | modified: number 43 | withServerTiming?: boolean 44 | onProgress?(evt: CustomProgressEvent): void 45 | options?: Omit & AbortOptions 46 | isDirectory?: boolean 47 | directoryEntries?: UnixFSEntry[] 48 | errors?: PluginError[] 49 | reqFormat?: RequestFormatShorthand 50 | pathDetails?: PathWalkerResponse 51 | query: ParsedUrlStringResults['query'] 52 | /** 53 | * ByteRangeContext contains information about the size of the content and range requests. 54 | * This can be used to set the Content-Length header without loading the entire body. 55 | * 56 | * This is set by the ByteRangeContextPlugin 57 | */ 58 | byteRangeContext?: ByteRangeContext 59 | [key: string]: unknown 60 | } 61 | 62 | export interface VerifiedFetchPlugin { 63 | readonly id: string 64 | readonly codes: number[] 65 | readonly log: Logger 66 | canHandle (context: PluginContext): boolean 67 | handle (context: PluginContext): Promise 68 | } 69 | 70 | export interface VerifiedFetchPluginFactory { 71 | (options: PluginOptions): VerifiedFetchPlugin 72 | } 73 | 74 | export interface PluginErrorOptions { 75 | fatal?: boolean 76 | details?: Record 77 | response?: Response 78 | } 79 | 80 | export interface FatalPluginErrorOptions extends PluginErrorOptions { 81 | response: Response 82 | } 83 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/singleton.ts: -------------------------------------------------------------------------------- 1 | import { createVerifiedFetch } from './index.js' 2 | import type { Resource, VerifiedFetch, VerifiedFetchInit } from './index.js' 3 | 4 | let impl: VerifiedFetch | undefined 5 | 6 | export const verifiedFetch: VerifiedFetch = async function verifiedFetch (resource: Resource, options?: VerifiedFetchInit): Promise { 7 | if (impl == null) { 8 | impl = await createVerifiedFetch() 9 | } 10 | 11 | return impl(resource, options) 12 | } 13 | 14 | verifiedFetch.start = async function () { 15 | await impl?.start() 16 | } 17 | 18 | verifiedFetch.stop = async function () { 19 | await impl?.stop() 20 | } 21 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/types.ts: -------------------------------------------------------------------------------- 1 | export type RequestFormatShorthand = 'raw' | 'car' | 'tar' | 'ipns-record' | 'dag-json' | 'dag-cbor' | 'json' | 'cbor' 2 | 3 | export type SupportedBodyTypes = string | Uint8Array | ArrayBuffer | Blob | ReadableStream | null 4 | 5 | /** 6 | * A ContentTypeParser attempts to return the mime type of a given file. It 7 | * receives the first chunk of the file data and the file name, if it is 8 | * available. The function can be sync or async and if it returns/resolves to 9 | * `undefined`, `application/octet-stream` will be used. 10 | */ 11 | export interface ContentTypeParser { 12 | /** 13 | * Attempt to determine a mime type, either via of the passed bytes or the 14 | * filename if it is available. 15 | */ 16 | (bytes: Uint8Array, fileName?: string): Promise | string | undefined 17 | } 18 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/content-type-parser.ts: -------------------------------------------------------------------------------- 1 | import { logger } from '@libp2p/logger' 2 | import { fileTypeFromBuffer } from 'file-type' 3 | 4 | const log = logger('helia:verified-fetch:content-type-parser') 5 | 6 | export const defaultMimeType = 'application/octet-stream' 7 | function checkForSvg (text: string): boolean { 8 | log('checking for svg') 9 | return /^(<\?xml[^>]+>)?[^<^\w]+ { 13 | log('checking for json') 14 | try { 15 | JSON.parse(text) 16 | return true 17 | } catch (err) { 18 | log('failed to parse as json', err) 19 | return false 20 | } 21 | } 22 | 23 | function getText (bytes: Uint8Array): string | null { 24 | log('checking for text') 25 | const decoder = new TextDecoder('utf-8', { fatal: true }) 26 | try { 27 | return decoder.decode(bytes) 28 | } catch (err) { 29 | return null 30 | } 31 | } 32 | 33 | async function checkForHtml (text: string): Promise { 34 | log('checking for html') 35 | return /^\s*<(?:!doctype\s+html|html|head|body)\b/i.test(text) 36 | } 37 | 38 | export async function contentTypeParser (bytes: Uint8Array, fileName?: string): Promise { 39 | log('contentTypeParser called for fileName: %s, byte size=%s', fileName, bytes.length) 40 | const detectedType = (await fileTypeFromBuffer(bytes))?.mime 41 | if (detectedType != null) { 42 | log('detectedType: %s', detectedType) 43 | return detectedType 44 | } 45 | log('no detectedType') 46 | 47 | if (fileName == null) { 48 | // it's likely text... no other way to determine file-type. 49 | const text = getText(bytes) 50 | if (text != null) { 51 | // check for svg, json, html, or it's plain text. 52 | if (checkForSvg(text)) { 53 | return 'image/svg+xml' 54 | } else if (await checkForJson(text)) { 55 | return 'application/json' 56 | } else if (await checkForHtml(text)) { 57 | return 'text/html; charset=utf-8' 58 | } else { 59 | return 'text/plain; charset=utf-8' 60 | } 61 | } 62 | return defaultMimeType 63 | } 64 | 65 | // no need to include file-types listed at https://github.com/SgtPooki/file-type#supported-file-types 66 | switch (fileName.split('.').pop()) { 67 | case 'css': 68 | return 'text/css' 69 | case 'html': 70 | return 'text/html; charset=utf-8' 71 | case 'js': 72 | return 'application/javascript' 73 | case 'json': 74 | return 'application/json' 75 | case 'txt': 76 | return 'text/plain' 77 | case 'woff2': 78 | return 'font/woff2' 79 | // see bottom of https://github.com/SgtPooki/file-type#supported-file-types 80 | case 'svg': 81 | return 'image/svg+xml' 82 | case 'csv': 83 | return 'text/csv' 84 | case 'doc': 85 | return 'application/msword' 86 | case 'xls': 87 | return 'application/vnd.ms-excel' 88 | case 'ppt': 89 | return 'application/vnd.ms-powerpoint' 90 | case 'msi': 91 | return 'application/x-msdownload' 92 | default: 93 | return defaultMimeType 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/dag-cbor-to-safe-json.ts: -------------------------------------------------------------------------------- 1 | import { decode } from 'cborg' 2 | import { encode } from 'cborg/json' 3 | 4 | /** 5 | * Take a `DAG-CBOR` encoded `Uint8Array`, deserialize it as an object and 6 | * re-serialize it in a form that can be passed to `JSON.serialize` and then 7 | * `JSON.parse` without losing any data. 8 | */ 9 | export function dagCborToSafeJSON (buf: Uint8Array): string { 10 | const obj = decode(buf, { 11 | allowIndefinite: false, 12 | coerceUndefinedToNull: false, 13 | allowNaN: false, 14 | allowInfinity: false, 15 | strict: true, 16 | useMaps: false, 17 | rejectDuplicateMapKeys: true, 18 | 19 | // this is different to `DAG-CBOR` - the reason we disallow BigInts is 20 | // because we are about to re-encode to `JSON` which does not support 21 | // BigInts. Blocks containing large numbers should be deserialized using a 22 | // cbor decoder instead 23 | allowBigInt: false 24 | }) 25 | 26 | return new TextDecoder().decode(encode(obj)) 27 | } 28 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/get-content-disposition-filename.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Takes a filename URL param and returns a string for use in a 3 | * `Content-Disposition` header 4 | */ 5 | export function getContentDispositionFilename (filename: string): string { 6 | const asciiOnly = replaceNonAsciiCharacters(filename) 7 | 8 | if (asciiOnly === filename) { 9 | return `filename="${filename}"` 10 | } 11 | 12 | return `filename="${asciiOnly}"; filename*=UTF-8''${encodeURIComponent(filename)}` 13 | } 14 | 15 | function replaceNonAsciiCharacters (filename: string): string { 16 | // eslint-disable-next-line no-control-regex 17 | return filename.replace(/[^\x00-\x7F]/g, '_') 18 | } 19 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/get-content-type.ts: -------------------------------------------------------------------------------- 1 | import { defaultMimeType } from './content-type-parser.js' 2 | import { isPromise } from './type-guards.js' 3 | import type { ContentTypeParser } from '../types.js' 4 | import type { Logger } from '@libp2p/interface' 5 | 6 | export interface GetContentTypeOptions { 7 | bytes: Uint8Array 8 | path: string 9 | defaultContentType?: string 10 | contentTypeParser: ContentTypeParser | undefined 11 | log: Logger 12 | 13 | /** 14 | * This should be set to the `filename` query parameter for the given request. 15 | * 16 | * @see https://specs.ipfs.tech/http-gateways/path-gateway/#filename-request-query-parameter 17 | */ 18 | filename?: string 19 | } 20 | 21 | export async function getContentType ({ bytes, path, contentTypeParser, log, defaultContentType = 'application/octet-stream', filename: filenameParam }: GetContentTypeOptions): Promise { 22 | let contentType: string | undefined 23 | 24 | if (contentTypeParser != null) { 25 | try { 26 | let fileName 27 | if (filenameParam == null) { 28 | fileName = path.split('/').pop()?.trim() 29 | fileName = (fileName === '' || fileName?.split('.').length === 1) ? undefined : fileName 30 | } else { 31 | fileName = filenameParam 32 | } 33 | const parsed = contentTypeParser(bytes, fileName) 34 | 35 | if (isPromise(parsed)) { 36 | const result = await parsed 37 | 38 | if (result != null) { 39 | contentType = result 40 | } 41 | } else if (parsed != null) { 42 | contentType = parsed 43 | } 44 | log.trace('contentTypeParser returned %s', contentType) 45 | } catch (err) { 46 | log.error('error parsing content type', err) 47 | } 48 | } 49 | if (contentType === defaultMimeType) { 50 | // if the content type is the default in our content-type-parser, instead, set it to the default content type provided to this function. 51 | contentType = defaultContentType 52 | } 53 | 54 | return contentType ?? defaultContentType 55 | } 56 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/get-e-tag.ts: -------------------------------------------------------------------------------- 1 | import type { RequestFormatShorthand } from '../types.js' 2 | import type { CID } from 'multiformats/cid' 3 | 4 | interface GetETagArg { 5 | cid: CID 6 | reqFormat?: RequestFormatShorthand 7 | rangeStart?: number 8 | rangeEnd?: number 9 | /** 10 | * Weak Etag is used when we can't guarantee byte-for-byte-determinism (generated, or mutable content). 11 | * Some examples: 12 | * - IPNS requests 13 | * - CAR streamed with blocks in non-deterministic order 14 | * - TAR streamed with files in non-deterministic order 15 | */ 16 | weak?: boolean 17 | 18 | /** 19 | * A custom prefix to use for the content of the etag. This is needed for some cases (like dir-index-html) where we need to use a custom prefix for the etag. 20 | */ 21 | contentPrefix?: string 22 | } 23 | 24 | const getPrefix = ({ weak, reqFormat }: Partial): string => { 25 | if (reqFormat === 'tar' || reqFormat === 'car' || reqFormat === 'ipns-record' || weak === true) { 26 | return 'W/' 27 | } 28 | return '' 29 | } 30 | 31 | const getFormatSuffix = ({ reqFormat }: Partial): string => { 32 | if (reqFormat == null) { 33 | return '' 34 | } 35 | if (reqFormat === 'tar') { 36 | return '.x-tar' 37 | } 38 | 39 | return `.${reqFormat}` 40 | } 41 | 42 | /** 43 | * etag 44 | * you need to wrap cid with "" 45 | * we use strong Etags for immutable responses and weak one (prefixed with W/ ) for mutable/generated ones (ipns, car, tar, and generated HTML). 46 | * block and car responses should have different etag than deserialized one, so you can add some prefix like we do in existing gateway 47 | * 48 | * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag 49 | * @see https://specs.ipfs.tech/http-gateways/path-gateway/#etag-response-header 50 | */ 51 | export function getETag ({ cid, reqFormat, weak, rangeStart, rangeEnd, contentPrefix }: GetETagArg): string { 52 | const prefix = getPrefix({ weak, reqFormat }) 53 | let suffix = getFormatSuffix({ reqFormat }) 54 | if (rangeStart != null || rangeEnd != null) { 55 | suffix += `.${rangeStart ?? '0'}-${rangeEnd ?? 'N'}` 56 | } 57 | 58 | return `${prefix}"${contentPrefix ?? ''}${cid.toString()}${suffix}"` 59 | } 60 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/get-peer-id-from-string.ts: -------------------------------------------------------------------------------- 1 | import { peerIdFromCID, peerIdFromString } from '@libp2p/peer-id' 2 | import { CID } from 'multiformats/cid' 3 | import type { PeerId } from '@libp2p/interface' 4 | 5 | export function getPeerIdFromString (peerIdString: string): PeerId { 6 | if (peerIdString.charAt(0) === '1' || peerIdString.charAt(0) === 'Q') { 7 | return peerIdFromString(peerIdString) 8 | } 9 | 10 | // try resolving as a base36 CID 11 | return peerIdFromCID(CID.parse(peerIdString)) 12 | } 13 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/get-resolved-accept-header.ts: -------------------------------------------------------------------------------- 1 | import { isExplicitAcceptHeader, isExplicitFormatQuery, isExplicitIpldAcceptRequest } from './is-accept-explicit.js' 2 | import { queryFormatToAcceptHeader } from './select-output-type.js' 3 | import type { ParsedUrlStringResults } from './parse-url-string.js' 4 | import type { ComponentLogger } from '@libp2p/interface' 5 | 6 | export interface ResolvedAcceptHeaderOptions { 7 | query?: ParsedUrlStringResults['query'] 8 | headers?: RequestInit['headers'] 9 | logger: ComponentLogger 10 | } 11 | 12 | export function getResolvedAcceptHeader ({ query, headers, logger }: ResolvedAcceptHeaderOptions): string | undefined { 13 | const log = logger.forComponent('helia:verified-fetch:get-resolved-accept-header') 14 | const requestHeaders = new Headers(headers) 15 | const incomingAcceptHeader = requestHeaders.get('accept') ?? undefined 16 | 17 | if (incomingAcceptHeader != null) { 18 | log('incoming accept header "%s"', incomingAcceptHeader) 19 | } 20 | 21 | if (!isExplicitIpldAcceptRequest({ query, headers: requestHeaders })) { 22 | log('no explicit IPLD content-type requested, returning incoming accept header %s', incomingAcceptHeader) 23 | return incomingAcceptHeader 24 | } 25 | 26 | const queryFormatMapping = queryFormatToAcceptHeader(query?.format) 27 | 28 | if (query?.format != null) { 29 | log('incoming query format "%s", mapped to %s', query.format, queryFormatMapping) 30 | } 31 | 32 | let acceptHeader = incomingAcceptHeader 33 | // if the incomingAcceptHeader is autogenerated by the requesting client (browser/curl/fetch/etc) then we may need to override it if query.format is specified 34 | if (!isExplicitAcceptHeader(requestHeaders) && isExplicitFormatQuery(query)) { 35 | log('accept header not recognized, but query format provided, setting accept header to %s', queryFormatMapping) 36 | acceptHeader = queryFormatMapping 37 | } 38 | 39 | log('resolved accept header to "%s"', acceptHeader) 40 | 41 | return acceptHeader 42 | } 43 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/get-stream-from-async-iterable.ts: -------------------------------------------------------------------------------- 1 | import { AbortError } from '@libp2p/interface' 2 | import { CustomProgressEvent } from 'progress-events' 3 | import { NoContentError } from '../errors.js' 4 | import type { VerifiedFetchInit } from '../index.js' 5 | import type { ComponentLogger } from '@libp2p/interface' 6 | 7 | /** 8 | * Converts an async iterator of Uint8Array bytes to a stream and returns the first chunk of bytes 9 | */ 10 | export async function getStreamFromAsyncIterable (iterator: AsyncIterable, path: string, logger: ComponentLogger, options?: Pick): Promise<{ stream: ReadableStream, firstChunk: Uint8Array }> { 11 | const log = logger.forComponent('helia:verified-fetch:get-stream-from-async-iterable') 12 | const reader = iterator[Symbol.asyncIterator]() 13 | const { value: firstChunk, done } = await reader.next() 14 | 15 | if (done === true) { 16 | log.error('no content found for path', path) 17 | throw new NoContentError() 18 | } 19 | 20 | const stream = new ReadableStream({ 21 | async start (controller) { 22 | // the initial value is already available 23 | options?.onProgress?.(new CustomProgressEvent('verified-fetch:request:progress:chunk')) 24 | controller.enqueue(firstChunk) 25 | }, 26 | async pull (controller) { 27 | const { value, done } = await reader.next() 28 | if (options?.signal?.aborted) { 29 | controller.error(new AbortError(options.signal.reason ?? 'signal aborted by user')) 30 | controller.close() 31 | return 32 | } 33 | 34 | if (done === true) { 35 | if (value != null) { 36 | options?.onProgress?.(new CustomProgressEvent('verified-fetch:request:progress:chunk')) 37 | controller.enqueue(value) 38 | } 39 | controller.close() 40 | return 41 | } 42 | 43 | options?.onProgress?.(new CustomProgressEvent('verified-fetch:request:progress:chunk')) 44 | controller.enqueue(value) 45 | } 46 | }) 47 | 48 | return { 49 | stream, 50 | firstChunk 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/get-tar-stream.ts: -------------------------------------------------------------------------------- 1 | import { NotUnixFSError } from '@helia/unixfs/errors' 2 | import { exporter, recursive } from 'ipfs-unixfs-exporter' 3 | import map from 'it-map' 4 | import { pipe } from 'it-pipe' 5 | import { pack } from 'it-tar' 6 | import type { AbortOptions } from '@libp2p/interface' 7 | import type { Blockstore } from 'interface-blockstore' 8 | import type { UnixFSEntry } from 'ipfs-unixfs-exporter' 9 | import type { TarEntryHeader, TarImportCandidate } from 'it-tar' 10 | 11 | const EXPORTABLE = ['file', 'raw', 'directory'] 12 | 13 | function toHeader (file: UnixFSEntry): Partial & { name: string } { 14 | let mode: number | undefined 15 | let mtime: Date | undefined 16 | 17 | if (file.type === 'file' || file.type === 'directory') { 18 | mode = file.unixfs.mode 19 | mtime = file.unixfs.mtime != null ? new Date(Number(file.unixfs.mtime.secs * 1000n)) : undefined 20 | } 21 | 22 | return { 23 | name: file.path, 24 | mode, 25 | mtime, 26 | size: Number(file.size), 27 | type: file.type === 'directory' ? 'directory' : 'file' 28 | } 29 | } 30 | 31 | function toTarImportCandidate (entry: UnixFSEntry): TarImportCandidate { 32 | if (!EXPORTABLE.includes(entry.type)) { 33 | throw new NotUnixFSError(`${entry.type} is not a UnixFS node`) 34 | } 35 | 36 | const candidate: TarImportCandidate = { 37 | header: toHeader(entry) 38 | } 39 | 40 | if (entry.type === 'file' || entry.type === 'raw') { 41 | candidate.body = entry.content() 42 | } 43 | 44 | return candidate 45 | } 46 | 47 | export async function * tarStream (ipfsPath: string, blockstore: Blockstore, options?: AbortOptions): AsyncGenerator { 48 | const file = await exporter(ipfsPath, blockstore, options) 49 | 50 | if (file.type === 'file' || file.type === 'raw') { 51 | yield * pipe( 52 | [toTarImportCandidate(file)], 53 | pack() 54 | ) 55 | 56 | return 57 | } 58 | 59 | if (file.type === 'directory') { 60 | yield * pipe( 61 | recursive(ipfsPath, blockstore, options), 62 | (source) => map(source, (entry) => toTarImportCandidate(entry)), 63 | pack() 64 | ) 65 | 66 | return 67 | } 68 | 69 | throw new NotUnixFSError('Not a UnixFS node') 70 | } 71 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/handle-redirects.ts: -------------------------------------------------------------------------------- 1 | import { SubdomainNotSupportedError } from '../errors.js' 2 | import { matchURLString } from './parse-url-string.js' 3 | import { movedPermanentlyResponse } from './responses.js' 4 | import type { VerifiedFetchInit, Resource } from '../index.js' 5 | import type { AbortOptions, ComponentLogger } from '@libp2p/interface' 6 | import type { CID } from 'multiformats/cid' 7 | 8 | interface GetRedirectResponse { 9 | cid: CID 10 | resource: Resource 11 | options?: Omit & AbortOptions 12 | logger: ComponentLogger 13 | 14 | /** 15 | * Only used in testing. 16 | */ 17 | fetch?: typeof globalThis.fetch 18 | } 19 | 20 | function maybeAddTrailingSlash (path: string): string { 21 | // if it has an extension-like ending, don't add a trailing slash 22 | if (path.match(/\.[a-zA-Z0-9]{1,4}$/) != null) { 23 | return path 24 | } 25 | return path.endsWith('/') ? path : `${path}/` 26 | } 27 | 28 | // See https://specs.ipfs.tech/http-gateways/path-gateway/#location-response-header 29 | export async function getRedirectResponse ({ resource, options, logger, cid, fetch = globalThis.fetch }: GetRedirectResponse): Promise { 30 | const log = logger.forComponent('helia:verified-fetch:get-redirect-response') 31 | 32 | if (typeof resource !== 'string' || options == null || ['ipfs://', 'ipns://'].some((prefix) => resource.startsWith(prefix))) { 33 | return null 34 | } 35 | 36 | const headers = new Headers(options?.headers) 37 | const forwardedHost = headers.get('x-forwarded-host') 38 | const headerHost = headers.get('host') 39 | const forwardedFor = headers.get('x-forwarded-for') 40 | if (forwardedFor == null && forwardedHost == null && headerHost == null) { 41 | log.trace('no redirect info found in headers') 42 | return null 43 | } 44 | 45 | log.trace('checking for redirect info') 46 | // if x-forwarded-host is passed, we need to set the location header to the subdomain 47 | // so that the browser can redirect to the correct subdomain 48 | try { 49 | const urlParts = matchURLString(resource) 50 | const reqUrl = new URL(resource) 51 | const actualHost = forwardedHost ?? reqUrl.host 52 | const subdomainUrl = new URL(reqUrl) 53 | if (urlParts.protocol === 'ipfs' && cid.version === 0) { 54 | subdomainUrl.host = `${cid.toV1()}.ipfs.${actualHost}` 55 | } else { 56 | subdomainUrl.host = `${urlParts.cidOrPeerIdOrDnsLink}.${urlParts.protocol}.${actualHost}` 57 | } 58 | 59 | if (headerHost?.includes(urlParts.protocol) === true && subdomainUrl.host.includes(headerHost)) { 60 | log.trace('request was for a subdomain already, not setting location header') 61 | return null 62 | } 63 | 64 | if (headerHost != null && !subdomainUrl.host.includes(headerHost)) { 65 | log.trace('host header is not the same as the subdomain url host, not setting location header') 66 | return null 67 | } 68 | if (reqUrl.host === subdomainUrl.host) { 69 | log.trace('req url is the same as the subdomain url, not setting location header') 70 | return null 71 | } 72 | 73 | subdomainUrl.pathname = maybeAddTrailingSlash(reqUrl.pathname.replace(`/${urlParts.cidOrPeerIdOrDnsLink}`, '').replace(`/${urlParts.protocol}`, '')) 74 | log.trace('subdomain url %s', subdomainUrl.href) 75 | const pathUrl = new URL(reqUrl, `${reqUrl.protocol}//${actualHost}`) 76 | pathUrl.pathname = maybeAddTrailingSlash(reqUrl.pathname) 77 | log.trace('path url %s', pathUrl.href) 78 | // try to query subdomain with HEAD request to see if it's supported 79 | try { 80 | const subdomainTest = await fetch(subdomainUrl, { method: 'HEAD' }) 81 | if (subdomainTest.ok) { 82 | log('subdomain supported, redirecting to subdomain') 83 | return movedPermanentlyResponse(resource.toString(), subdomainUrl.href) 84 | } else { 85 | log('subdomain not supported, subdomain failed with status %s %s', subdomainTest.status, subdomainTest.statusText) 86 | throw new SubdomainNotSupportedError('subdomain not supported') 87 | } 88 | } catch (err: any) { 89 | log('subdomain not supported', err) 90 | if (pathUrl.href === reqUrl.href) { 91 | log('path url is the same as the request url, not setting location header') 92 | return null 93 | } 94 | // pathUrl is different from request URL (maybe even with just a trailing slash) 95 | return movedPermanentlyResponse(resource.toString(), pathUrl.href) 96 | } 97 | } catch (e) { 98 | // if it's not a full URL, we have nothing left to do. 99 | log.error('error setting location header for x-forwarded-host', e) 100 | } 101 | return null 102 | } 103 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/is-accept-explicit.ts: -------------------------------------------------------------------------------- 1 | import { FORMAT_TO_MIME_TYPE } from './select-output-type.js' 2 | import type { ParsedUrlStringResults } from './parse-url-string.js' 3 | 4 | export interface IsAcceptExplicitOptions { 5 | query?: ParsedUrlStringResults['query'] 6 | headers: Headers 7 | } 8 | 9 | export function isExplicitAcceptHeader (headers: Headers): boolean { 10 | const incomingAcceptHeader = headers.get('accept') 11 | if (incomingAcceptHeader != null && Object.values(FORMAT_TO_MIME_TYPE).includes(incomingAcceptHeader)) { 12 | return true 13 | } 14 | return false 15 | } 16 | 17 | export function isExplicitFormatQuery (query?: ParsedUrlStringResults['query']): boolean { 18 | const formatQuery = query?.format 19 | if (formatQuery != null && Object.keys(FORMAT_TO_MIME_TYPE).includes(formatQuery)) { 20 | return true 21 | } 22 | return false 23 | } 24 | 25 | /** 26 | * The user can provide an explicit `accept` header in the request headers or a `format` query parameter in the URL. 27 | * If either of these are provided, this function returns true. 28 | */ 29 | export function isExplicitIpldAcceptRequest ({ query, headers }: IsAcceptExplicitOptions): boolean { 30 | return isExplicitAcceptHeader(headers) || isExplicitFormatQuery(query) 31 | } 32 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/libp2p-defaults.browser.ts: -------------------------------------------------------------------------------- 1 | import { webRTCDirect } from '@libp2p/webrtc' 2 | import { webSockets } from '@libp2p/websockets' 3 | import { libp2pDefaults } from 'helia' 4 | import type { ServiceFactoryMap } from './libp2p-types.js' 5 | import type { DefaultLibp2pServices } from 'helia' 6 | import type { Libp2pOptions } from 'libp2p' 7 | 8 | type ServiceMap = Pick 9 | 10 | export function getLibp2pConfig (): Libp2pOptions & Required> { 11 | const libp2pDefaultOptions = libp2pDefaults() 12 | 13 | libp2pDefaultOptions.start = false 14 | libp2pDefaultOptions.addresses = { listen: [] } 15 | libp2pDefaultOptions.transports = [webRTCDirect(), webSockets()] 16 | libp2pDefaultOptions.peerDiscovery = [] // Avoid connecting to bootstrap nodes 17 | 18 | const services: ServiceFactoryMap = { 19 | dcutr: libp2pDefaultOptions.services.dcutr, 20 | identify: libp2pDefaultOptions.services.identify, 21 | keychain: libp2pDefaultOptions.services.keychain, 22 | ping: libp2pDefaultOptions.services.ping 23 | } 24 | 25 | return { 26 | ...libp2pDefaultOptions, 27 | start: false, 28 | services 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/libp2p-defaults.ts: -------------------------------------------------------------------------------- 1 | import { kadDHT } from '@libp2p/kad-dht' 2 | import { libp2pDefaults } from 'helia' 3 | import { ipnsSelector } from 'ipns/selector' 4 | import { ipnsValidator } from 'ipns/validator' 5 | import type { ServiceFactoryMap } from './libp2p-types.js' 6 | import type { DefaultLibp2pServices } from 'helia' 7 | import type { Libp2pOptions } from 'libp2p' 8 | 9 | type ServiceMap = Pick 10 | 11 | export function getLibp2pConfig (): Libp2pOptions & Required> { 12 | const libp2pDefaultOptions = libp2pDefaults() 13 | 14 | libp2pDefaultOptions.start = false 15 | 16 | const services: ServiceFactoryMap = { 17 | autoNAT: libp2pDefaultOptions.services.autoNAT, 18 | dcutr: libp2pDefaultOptions.services.dcutr, 19 | dht: kadDHT({ 20 | clientMode: true, 21 | validators: { 22 | ipns: ipnsValidator 23 | }, 24 | selectors: { 25 | ipns: ipnsSelector 26 | } 27 | }), 28 | identify: libp2pDefaultOptions.services.identify, 29 | keychain: libp2pDefaultOptions.services.keychain, 30 | ping: libp2pDefaultOptions.services.ping, 31 | upnp: libp2pDefaultOptions.services.upnp 32 | } 33 | 34 | return { 35 | ...libp2pDefaultOptions, 36 | start: false, 37 | services 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/libp2p-types.ts: -------------------------------------------------------------------------------- 1 | import type { DelegatedRoutingV1HttpApiClient } from '@helia/delegated-routing-v1-http-api-client' 2 | import type { ServiceMap } from '@libp2p/interface' 3 | 4 | type DelegatedRoutingServices = Record<`delegatedRouting${number}`, ((components?: unknown) => DelegatedRoutingV1HttpApiClient)> 5 | 6 | export type ServiceFactoryMap = { 7 | [Property in keyof T]: (components: any & T) => T[Property] 8 | } & DelegatedRoutingServices 9 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/parse-resource.ts: -------------------------------------------------------------------------------- 1 | import { CID } from 'multiformats/cid' 2 | import { parseUrlString } from './parse-url-string.js' 3 | import type { ParseUrlStringOptions, ParsedUrlStringResults } from './parse-url-string.js' 4 | import type { Resource } from '../index.js' 5 | import type { IPNS } from '@helia/ipns' 6 | import type { ComponentLogger } from '@libp2p/interface' 7 | 8 | export interface ParseResourceComponents { 9 | ipns: IPNS 10 | logger: ComponentLogger 11 | } 12 | 13 | export interface ParseResourceOptions extends ParseUrlStringOptions { 14 | withServerTiming?: boolean 15 | } 16 | /** 17 | * Handles the different use cases for the `resource` argument. 18 | * The resource can represent an IPFS path, IPNS path, or CID. 19 | * If the resource represents an IPNS path, we need to resolve it to a CID. 20 | */ 21 | export async function parseResource (resource: Resource, { ipns, logger }: ParseResourceComponents, { withServerTiming = false, ...options }: ParseResourceOptions = { withServerTiming: false }): Promise { 22 | if (typeof resource === 'string') { 23 | return parseUrlString({ urlString: resource, ipns, logger, withServerTiming }, options) 24 | } 25 | 26 | const cid = CID.asCID(resource) 27 | 28 | if (cid != null) { 29 | // an actual CID 30 | return { 31 | cid, 32 | protocol: 'ipfs', 33 | path: '', 34 | query: {}, 35 | ipfsPath: `/ipfs/${cid.toString()}`, 36 | ttl: 29030400, // 1 year for ipfs content 37 | serverTimings: [] 38 | } satisfies ParsedUrlStringResults 39 | } 40 | 41 | throw new TypeError(`Invalid resource. Cannot determine CID from resource: ${resource}`) 42 | } 43 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/request-headers.ts: -------------------------------------------------------------------------------- 1 | import { InvalidRangeError } from '../errors.js' 2 | 3 | export function getHeader (headers: HeadersInit | undefined, header: string): string | undefined { 4 | if (headers == null) { 5 | return undefined 6 | } 7 | if (headers instanceof Headers) { 8 | return headers.get(header) ?? undefined 9 | } 10 | if (Array.isArray(headers)) { 11 | const entry = headers.find(([key]) => key.toLowerCase() === header.toLowerCase()) 12 | return entry?.[1] 13 | } 14 | const key = Object.keys(headers).find(k => k.toLowerCase() === header.toLowerCase()) 15 | if (key == null) { 16 | return undefined 17 | } 18 | 19 | return headers[key] 20 | } 21 | 22 | /** 23 | * Given two ints from a Range header, and potential fileSize, returns: 24 | * 1. number of bytes the response should contain. 25 | * 2. the start index of the range. // inclusive 26 | * 3. the end index of the range. // inclusive 27 | */ 28 | // eslint-disable-next-line complexity 29 | export function calculateByteRangeIndexes (start: number | undefined, end: number | undefined, fileSize?: number): { byteSize?: number, start?: number, end?: number } { 30 | if ((start ?? 0) > (end ?? Infinity)) { 31 | throw new InvalidRangeError('Invalid range: Range-start index is greater than range-end index.') 32 | } 33 | if (start != null && (end ?? 0) >= (fileSize ?? Infinity)) { 34 | throw new InvalidRangeError('Invalid range: Range-end index is greater than or equal to the size of the file.') 35 | } 36 | if (start == null && (end ?? 0) > (fileSize ?? Infinity)) { 37 | throw new InvalidRangeError('Invalid range: Range-end index is greater than the size of the file.') 38 | } 39 | if (start != null && start < 0) { 40 | throw new InvalidRangeError('Invalid range: Range-start index cannot be negative.') 41 | } 42 | 43 | if (start != null && end != null) { 44 | return { byteSize: end - start + 1, start, end } 45 | } else if (start == null && end != null) { 46 | // suffix byte range requested 47 | if (fileSize == null) { 48 | return { end } 49 | } 50 | if (end === fileSize) { 51 | return { byteSize: fileSize, start: 0, end: fileSize - 1 } 52 | } 53 | return { byteSize: end, start: fileSize - end, end: fileSize - 1 } 54 | } else if (start != null && end == null) { 55 | if (fileSize == null) { 56 | // we only have the start index, and no fileSize, so we can't return a valid range. 57 | return { start } 58 | } 59 | const end = fileSize - 1 60 | const byteSize = fileSize - start 61 | return { byteSize, start, end } 62 | } 63 | 64 | return { byteSize: fileSize, start: 0, end: fileSize != null ? fileSize - 1 : 0 } 65 | } 66 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/resource-to-cache-key.ts: -------------------------------------------------------------------------------- 1 | import { CID } from 'multiformats/cid' 2 | import { matchURLString } from './parse-url-string.js' 3 | 4 | /** 5 | * Takes a resource and returns a session cache key as an IPFS or IPNS path with 6 | * any trailing segments removed. 7 | * 8 | * E.g. 9 | * 10 | * - Qmfoo -> /ipfs/Qmfoo 11 | * - https://Qmfoo.ipfs.gateway.org -> /ipfs/Qmfoo 12 | * - https://gateway.org/ipfs/Qmfoo -> /ipfs/Qmfoo 13 | * - https://gateway.org/ipfs/Qmfoo/bar.txt -> /ipfs/Qmfoo 14 | * - etc 15 | */ 16 | export function resourceToSessionCacheKey (url: string | CID): string { 17 | const cid = CID.asCID(url) 18 | 19 | if (cid != null) { 20 | return `ipfs://${cid}` 21 | } 22 | 23 | try { 24 | return `ipfs://${CID.parse(url.toString())}` 25 | } catch {} 26 | 27 | const { protocol, cidOrPeerIdOrDnsLink } = matchURLString(url.toString()) 28 | 29 | return `${protocol}://${cidOrPeerIdOrDnsLink}` 30 | } 31 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/response-headers.ts: -------------------------------------------------------------------------------- 1 | import { InvalidRangeError } from '../errors.js' 2 | import type { CID } from 'multiformats/cid' 3 | 4 | interface CacheControlHeaderOptions { 5 | /** 6 | * This should be seconds as a number. 7 | * 8 | * See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control#response_directives 9 | */ 10 | ttl?: number 11 | protocol: 'ipfs' | 'ipns' 12 | response: Response 13 | } 14 | 15 | /** 16 | * Implementations may place an upper bound on any TTL received, as noted in Section 8 of [rfc2181]. 17 | * If TTL value is unknown, implementations should not send a Cache-Control 18 | * No matter if TTL value is known or not, implementations should always send a Last-Modified header with the timestamp of the record resolution. 19 | * 20 | * @see https://specs.ipfs.tech/http-gateways/path-gateway/#cache-control-response-header 21 | */ 22 | export function setCacheControlHeader ({ ttl, protocol, response }: CacheControlHeaderOptions): void { 23 | if (response.headers.has('cache-control')) { 24 | // don't set the header if it's already set by a plugin 25 | return 26 | } 27 | let headerValue: string 28 | if (protocol === 'ipfs') { 29 | headerValue = 'public, max-age=29030400, immutable' 30 | } else if (ttl == null) { 31 | /** 32 | * default limit for unknown TTL: "use 5 minute as default fallback when it is not available." 33 | * 34 | * @see https://github.com/ipfs/boxo/issues/329#issuecomment-1995236409 35 | */ 36 | headerValue = 'public, max-age=300' 37 | } else { 38 | headerValue = `public, max-age=${ttl}` 39 | } 40 | 41 | response.headers.set('cache-control', headerValue) 42 | } 43 | 44 | /** 45 | * This function returns the value of the `Content-Range` header for a given range. 46 | * If you know the total size of the body, pass it as `byteSize` 47 | * 48 | * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Range 49 | */ 50 | export function getContentRangeHeader ({ byteStart, byteEnd, byteSize }: { byteStart: number | undefined, byteEnd: number | undefined, byteSize: number | undefined }): string { 51 | const total = byteSize ?? '*' // if we don't know the total size, we should use * 52 | 53 | if ((byteEnd ?? 0) >= (byteSize ?? Infinity)) { 54 | throw new InvalidRangeError('Invalid range: Range-end index is greater than or equal to the size of the file.') 55 | } 56 | if ((byteStart ?? 0) >= (byteSize ?? Infinity)) { 57 | throw new InvalidRangeError('Invalid range: Range-start index is greater than or equal to the size of the file.') 58 | } 59 | 60 | if (byteStart != null && byteEnd == null) { 61 | // only byteStart in range 62 | if (byteSize == null) { 63 | return `bytes */${total}` 64 | } 65 | return `bytes ${byteStart}-${byteSize - 1}/${byteSize}` 66 | } 67 | 68 | if (byteStart == null && byteEnd != null) { 69 | // only byteEnd in range 70 | if (byteSize == null) { 71 | return `bytes */${total}` 72 | } 73 | const end = byteSize - 1 74 | const start = end - byteEnd + 1 75 | 76 | return `bytes ${start}-${end}/${byteSize}` 77 | } 78 | 79 | if (byteStart == null && byteEnd == null) { 80 | // neither are provided, we can't return a valid range. 81 | return `bytes */${total}` 82 | } 83 | 84 | return `bytes ${byteStart}-${byteEnd}/${total}` 85 | } 86 | 87 | /** 88 | * Sets the `X-Ipfs-Roots` header on the response if it exists. 89 | * 90 | * @see https://specs.ipfs.tech/http-gateways/path-gateway/#x-ipfs-roots-response-header 91 | */ 92 | export function setIpfsRoots (response: Response, ipfsRoots?: CID[]): void { 93 | if (ipfsRoots != null) { 94 | response.headers.set('X-Ipfs-Roots', getIpfsRoots(ipfsRoots)) 95 | } 96 | } 97 | 98 | export function getIpfsRoots (ipfsRoots: CID[]): string { 99 | return ipfsRoots.map(cid => cid.toV1().toString()).join(',') 100 | } 101 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/select-output-type.ts: -------------------------------------------------------------------------------- 1 | import { code as dagCborCode } from '@ipld/dag-cbor' 2 | import { code as dagJsonCode } from '@ipld/dag-json' 3 | import { code as dagPbCode } from '@ipld/dag-pb' 4 | import { code as jsonCode } from 'multiformats/codecs/json' 5 | import { code as rawCode } from 'multiformats/codecs/raw' 6 | import type { RequestFormatShorthand } from '../types.js' 7 | import type { CID } from 'multiformats/cid' 8 | 9 | /** 10 | * This maps supported response types for each codec supported by verified-fetch 11 | */ 12 | const CID_TYPE_MAP: Record = { 13 | [dagCborCode]: [ 14 | 'application/json', 15 | 'application/vnd.ipld.dag-cbor', 16 | 'application/cbor', 17 | 'application/vnd.ipld.dag-json', 18 | 'application/octet-stream', 19 | 'application/vnd.ipld.raw', 20 | 'application/vnd.ipfs.ipns-record', 21 | 'application/vnd.ipld.car' 22 | ], 23 | [dagJsonCode]: [ 24 | 'application/json', 25 | 'application/vnd.ipld.dag-cbor', 26 | 'application/cbor', 27 | 'application/vnd.ipld.dag-json', 28 | 'application/octet-stream', 29 | 'application/vnd.ipld.raw', 30 | 'application/vnd.ipfs.ipns-record', 31 | 'application/vnd.ipld.car' 32 | ], 33 | [jsonCode]: [ 34 | 'application/json', 35 | 'application/vnd.ipld.dag-cbor', 36 | 'application/cbor', 37 | 'application/vnd.ipld.dag-json', 38 | 'application/octet-stream', 39 | 'application/vnd.ipld.raw', 40 | 'application/vnd.ipfs.ipns-record', 41 | 'application/vnd.ipld.car' 42 | ], 43 | [dagPbCode]: [ 44 | 'application/octet-stream', 45 | 'application/json', 46 | 'application/vnd.ipld.dag-cbor', 47 | 'application/cbor', 48 | 'application/vnd.ipld.dag-json', 49 | 'application/vnd.ipld.raw', 50 | 'application/vnd.ipfs.ipns-record', 51 | 'application/vnd.ipld.car', 52 | 'application/x-tar' 53 | ], 54 | [rawCode]: [ 55 | 'application/octet-stream', 56 | 'application/vnd.ipld.raw', 57 | 'application/vnd.ipfs.ipns-record', 58 | 'application/vnd.ipld.dag-json', 59 | 'application/vnd.ipld.car', 60 | 'application/x-tar' 61 | ] 62 | } 63 | 64 | /** 65 | * Selects an output mime-type based on the CID and a passed `Accept` header 66 | */ 67 | export function selectOutputType (cid: CID, accept?: string): string | undefined { 68 | const cidMimeTypes = CID_TYPE_MAP[cid.code] 69 | 70 | if (accept != null) { 71 | return chooseMimeType(accept, cidMimeTypes) 72 | } 73 | } 74 | 75 | function chooseMimeType (accept: string, validMimeTypes: string[]): string | undefined { 76 | const requestedMimeTypes = accept 77 | .split(',') 78 | .map(s => { 79 | const parts = s.trim().split(';') 80 | 81 | return { 82 | mimeType: `${parts[0]}`.trim(), 83 | weight: parseQFactor(parts[1]) 84 | } 85 | }) 86 | .sort((a, b) => { 87 | if (a.weight === b.weight) { 88 | return 0 89 | } 90 | 91 | if (a.weight > b.weight) { 92 | return -1 93 | } 94 | 95 | return 1 96 | }) 97 | .map(s => s.mimeType) 98 | 99 | for (const headerFormat of requestedMimeTypes) { 100 | for (const mimeType of validMimeTypes) { 101 | if (headerFormat.includes(mimeType)) { 102 | return mimeType 103 | } 104 | 105 | if (headerFormat === '*/*') { 106 | return mimeType 107 | } 108 | 109 | if (headerFormat.startsWith('*/') && mimeType.split('/')[1] === headerFormat.split('/')[1]) { 110 | return mimeType 111 | } 112 | 113 | if (headerFormat.endsWith('/*') && mimeType.split('/')[0] === headerFormat.split('/')[0]) { 114 | return mimeType 115 | } 116 | } 117 | } 118 | } 119 | 120 | /** 121 | * Parses q-factor weighting from the accept header to allow letting some mime 122 | * types take precedence over others. 123 | * 124 | * If the q-factor for an acceptable mime representation is omitted it defaults 125 | * to `1`. 126 | * 127 | * All specified values should be in the range 0-1. 128 | * 129 | * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept#q 130 | */ 131 | function parseQFactor (str?: string): number { 132 | if (str != null) { 133 | str = str.trim() 134 | } 135 | 136 | if (str?.startsWith('q=') !== true) { 137 | return 1 138 | } 139 | 140 | const factor = parseFloat(str.replace('q=', '')) 141 | 142 | if (isNaN(factor)) { 143 | return 0 144 | } 145 | 146 | return factor 147 | } 148 | 149 | export const FORMAT_TO_MIME_TYPE: Record = { 150 | raw: 'application/vnd.ipld.raw', 151 | car: 'application/vnd.ipld.car', 152 | 'dag-json': 'application/vnd.ipld.dag-json', 153 | 'dag-cbor': 'application/vnd.ipld.dag-cbor', 154 | json: 'application/json', 155 | cbor: 'application/cbor', 156 | 'ipns-record': 'application/vnd.ipfs.ipns-record', 157 | tar: 'application/x-tar' 158 | } 159 | 160 | /** 161 | * Converts a `format=...` query param to a mime type as would be found in the 162 | * `Accept` header, if a valid mapping is available 163 | */ 164 | export function queryFormatToAcceptHeader (format?: RequestFormatShorthand): string | undefined { 165 | if (format != null) { 166 | return FORMAT_TO_MIME_TYPE[format] 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/server-timing.ts: -------------------------------------------------------------------------------- 1 | export interface ServerTimingSuccess { 2 | error: null 3 | result: T 4 | header: string 5 | } 6 | export interface ServerTimingError { 7 | result: null 8 | error: Error 9 | header: string 10 | } 11 | export type ServerTimingResult = ServerTimingSuccess | ServerTimingError 12 | 13 | export async function serverTiming ( 14 | name: string, 15 | description: string, 16 | fn: () => Promise 17 | ): Promise> { 18 | const startTime = performance.now() 19 | 20 | try { 21 | const result = await fn() // Execute the function 22 | const endTime = performance.now() 23 | 24 | const duration = (endTime - startTime).toFixed(1) // Duration in milliseconds 25 | 26 | // Create the Server-Timing header string 27 | const header = `${name};dur=${duration};desc="${description}"` 28 | return { result, header, error: null } 29 | } catch (error: any) { 30 | const endTime = performance.now() 31 | const duration = (endTime - startTime).toFixed(1) 32 | 33 | // Still return a timing header even on error 34 | const header = `${name};dur=${duration};desc="${description}"` 35 | return { result: null, error, header } // Pass error with timing info 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/tlru.ts: -------------------------------------------------------------------------------- 1 | import QuickLRU from 'quick-lru' 2 | 3 | /** 4 | * Time Aware Least Recent Used Cache 5 | * 6 | * @see https://arxiv.org/pdf/1801.00390 7 | */ 8 | export class TLRU { 9 | private readonly lru: QuickLRU 10 | 11 | constructor (maxSize: number) { 12 | this.lru = new QuickLRU({ maxSize }) 13 | } 14 | 15 | get (key: string): T | undefined { 16 | return this.lru.get(key) 17 | } 18 | 19 | set (key: string, value: T, ttlMs: number): void { 20 | this.lru.set(key, value, { 21 | maxAge: Date.now() + ttlMs 22 | }) 23 | } 24 | 25 | has (key: string): boolean { 26 | const value = this.get(key) 27 | 28 | if (value != null) { 29 | return true 30 | } 31 | 32 | return false 33 | } 34 | 35 | remove (key: string): void { 36 | this.lru.delete(key) 37 | } 38 | 39 | clear (): void { 40 | this.lru.clear() 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/type-guards.ts: -------------------------------------------------------------------------------- 1 | export function isPromise (p?: any): p is Promise { 2 | return p?.then != null 3 | } 4 | -------------------------------------------------------------------------------- /packages/verified-fetch/src/utils/walk-path.ts: -------------------------------------------------------------------------------- 1 | import { DoesNotExistError } from '@helia/unixfs/errors' 2 | import { AbortError } from '@libp2p/interface' 3 | import { walkPath as exporterWalk } from 'ipfs-unixfs-exporter' 4 | import { badGatewayResponse, notFoundResponse } from './responses.js' 5 | import type { PluginContext } from '../plugins/types.js' 6 | import type { Logger } from '@libp2p/interface' 7 | import type { Blockstore } from 'interface-blockstore' 8 | import type { ExporterOptions, ReadableStorage, ObjectNode, UnixFSEntry } from 'ipfs-unixfs-exporter' 9 | import type { CID } from 'multiformats/cid' 10 | 11 | export interface PathWalkerOptions extends ExporterOptions { 12 | 13 | } 14 | export interface PathWalkerResponse { 15 | ipfsRoots: CID[] 16 | terminalElement: UnixFSEntry 17 | } 18 | 19 | export interface PathWalkerFn { 20 | (blockstore: ReadableStorage, path: string, options?: PathWalkerOptions): Promise 21 | } 22 | 23 | async function walkPath (blockstore: ReadableStorage, path: string, options?: PathWalkerOptions): Promise { 24 | const ipfsRoots: CID[] = [] 25 | let terminalElement: UnixFSEntry | undefined 26 | 27 | for await (const entry of exporterWalk(path, blockstore, options)) { 28 | ipfsRoots.push(entry.cid) 29 | terminalElement = entry 30 | } 31 | 32 | if (terminalElement == null) { 33 | throw new DoesNotExistError('No terminal element found') 34 | } 35 | 36 | return { 37 | ipfsRoots, 38 | terminalElement 39 | } 40 | } 41 | 42 | export function isObjectNode (node: UnixFSEntry): node is ObjectNode { 43 | return node.type === 'object' 44 | } 45 | 46 | /** 47 | * Attempts to walk the path in the blockstore, returning ipfsRoots needed to resolve the path, and the terminal element. 48 | * If the signal is aborted, the function will throw an AbortError 49 | * If a terminal element is not found, a notFoundResponse is returned 50 | * If another unknown error occurs, a badGatewayResponse is returned 51 | * 52 | */ 53 | export async function handlePathWalking ({ cid, path, resource, options, blockstore, log }: PluginContext & { blockstore: Blockstore, log: Logger }): Promise { 54 | try { 55 | return await walkPath(blockstore, `${cid.toString()}/${path}`, options) 56 | } catch (err: any) { 57 | if (options?.signal?.aborted) { 58 | throw new AbortError(options?.signal?.reason) 59 | } 60 | 61 | if (['ERR_NO_PROP', 'ERR_NO_TERMINAL_ELEMENT', 'ERR_NOT_FOUND'].includes(err.code)) { 62 | return notFoundResponse(resource) 63 | } 64 | 65 | log.error('error walking path %s', path, err) 66 | return badGatewayResponse(resource, 'Error walking path') 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/cache-control-header.spec.ts: -------------------------------------------------------------------------------- 1 | import { dagCbor } from '@helia/dag-cbor' 2 | import { ipns } from '@helia/ipns' 3 | import { generateKeyPair } from '@libp2p/crypto/keys' 4 | import { stop } from '@libp2p/interface' 5 | import { peerIdFromPrivateKey } from '@libp2p/peer-id' 6 | import { dns } from '@multiformats/dns' 7 | import { expect } from 'aegir/chai' 8 | import Sinon from 'sinon' 9 | import { VerifiedFetch } from '../src/verified-fetch.js' 10 | import { createHelia } from './fixtures/create-offline-helia.js' 11 | import { answerFake } from './fixtures/dns-answer-fake.js' 12 | import type { Helia } from '@helia/interface' 13 | import type { IPNS } from '@helia/ipns' 14 | import type { DNSResponse } from '@multiformats/dns' 15 | 16 | describe('cache-control header', () => { 17 | let helia: Helia 18 | let name: IPNS 19 | let verifiedFetch: VerifiedFetch 20 | let customDnsResolver: Sinon.SinonStub> 21 | 22 | beforeEach(async () => { 23 | customDnsResolver = Sinon.stub() 24 | helia = await createHelia({ 25 | dns: dns({ 26 | resolvers: { 27 | '.': customDnsResolver 28 | } 29 | }) 30 | }) 31 | name = ipns(helia) 32 | verifiedFetch = new VerifiedFetch({ 33 | helia 34 | }) 35 | }) 36 | 37 | afterEach(async () => { 38 | await stop(helia, verifiedFetch) 39 | }) 40 | 41 | it('should allow return the correct max-age in the cache header for immutable responses', async () => { 42 | const obj = { 43 | hello: 'world' 44 | } 45 | const c = dagCbor(helia) 46 | const cid = await c.add(obj) 47 | 48 | const resp = await verifiedFetch.fetch(cid) 49 | 50 | expect(resp).to.be.ok() 51 | expect(resp.status).to.equal(200) 52 | expect(resp.headers.get('Cache-Control')).to.equal('public, max-age=29030400, immutable') 53 | }) 54 | 55 | it('should return not contain immutable in the cache-control header for an IPNS name', async () => { 56 | const obj = { 57 | hello: 'world' 58 | } 59 | const c = dagCbor(helia) 60 | const cid = await c.add(obj) 61 | 62 | const oneHourInMs = 1000 * 60 * 60 63 | const key = await generateKeyPair('Ed25519') 64 | const peerId = peerIdFromPrivateKey(key) 65 | 66 | // ipns currently only allows customizing the lifetime which is also used as the TTL 67 | await name.publish(key, cid, { lifetime: oneHourInMs }) 68 | 69 | const resp = await verifiedFetch.fetch(`ipns://${peerId}`) 70 | expect(resp).to.be.ok() 71 | expect(resp.status).to.equal(200) 72 | 73 | expect(resp.headers.get('Cache-Control')).to.not.containIgnoreCase('immutable') 74 | }) 75 | 76 | // Skipping until https://github.com/ipfs/js-ipns/issues/310 is resolved 77 | // Note that the source of the error is from the `name.publish` call rather than the max-age value 78 | // in the cache control header. 79 | it.skip('should return the correct max-age in the cache-control header for an IPNS name', async () => { 80 | const obj = { 81 | hello: 'world' 82 | } 83 | const c = dagCbor(helia) 84 | const cid = await c.add(obj) 85 | 86 | const oneHourInSeconds = 60 * 60 87 | const key = await generateKeyPair('Ed25519') 88 | const peerId = peerIdFromPrivateKey(key) 89 | 90 | /** 91 | * ipns currently only allows customizing the lifetime which is also used as the TTL 92 | * 93 | * lifetime is coming back as 100000 times larger than expected 94 | * 95 | * @see https://github.com/ipfs/js-ipns/blob/16e0e10682fa9a663e0bb493a44d3e99a5200944/src/index.ts#L200 96 | * @see https://github.com/ipfs/js-ipns/pull/308 97 | */ 98 | await name.publish(key, cid, { lifetime: oneHourInSeconds * 1000 }) // pass to ipns as milliseconds 99 | 100 | const resp = await verifiedFetch.fetch(`ipns://${peerId}`) 101 | expect(resp).to.be.ok() 102 | expect(resp.status).to.equal(200) 103 | 104 | expect(resp.headers.get('Cache-Control')).to.equal(`public, max-age=${oneHourInSeconds}`) 105 | }) 106 | 107 | it('should not contain immutable in the cache-control header for a DNSLink name', async () => { 108 | const obj = { 109 | hello: 'world' 110 | } 111 | const c = dagCbor(helia) 112 | const cid = await c.add(obj) 113 | customDnsResolver.withArgs('_dnslink.example-domain.com').resolves(answerFake(`dnslink=/ipfs/${cid}`, 666, '_dnslink.example-domain.com', 16)) 114 | 115 | const resp = await verifiedFetch.fetch('ipns://example-domain.com') 116 | expect(resp).to.be.ok() 117 | expect(resp.status).to.equal(200) 118 | 119 | expect(resp.headers.get('Cache-Control')).to.equal('public, max-age=666') 120 | }) 121 | }) 122 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/custom-dns-resolvers.spec.ts: -------------------------------------------------------------------------------- 1 | import { stop } from '@libp2p/interface' 2 | import { dns, RecordType } from '@multiformats/dns' 3 | import { expect } from 'aegir/chai' 4 | import Sinon from 'sinon' 5 | import { createVerifiedFetch } from '../src/index.js' 6 | import { ipnsCache } from '../src/utils/parse-url-string.js' 7 | import { VerifiedFetch } from '../src/verified-fetch.js' 8 | import { createHelia } from './fixtures/create-offline-helia.js' 9 | 10 | describe('custom dns-resolvers', () => { 11 | beforeEach(() => { 12 | ipnsCache.clear() 13 | }) 14 | 15 | it('is used when passed to createVerifiedFetch', async () => { 16 | const customDnsResolver = Sinon.stub().withArgs('_dnslink.some-non-cached-domain.com').resolves({ 17 | Answer: [{ 18 | data: 'dnslink=/ipfs/bafkqac3imvwgy3zao5xxe3de' 19 | }] 20 | }) 21 | 22 | const fetch = await createVerifiedFetch({ 23 | gateways: ['http://127.0.0.1:8080'], 24 | dnsResolvers: [customDnsResolver] 25 | }) 26 | const response = await fetch('ipns://some-non-cached-domain.com') 27 | expect(response.status).to.equal(200) 28 | expect(response.statusText).to.equal('OK') 29 | await expect(response.text()).to.eventually.equal('hello world') 30 | 31 | expect(customDnsResolver.callCount).to.equal(1) 32 | expect(customDnsResolver.getCall(0).args).to.deep.equal(['_dnslink.some-non-cached-domain.com', { 33 | types: [ 34 | RecordType.TXT 35 | ] 36 | }]) 37 | await stop(fetch) 38 | }) 39 | 40 | it('is used when passed to VerifiedFetch', async () => { 41 | const customDnsResolver = Sinon.stub().withArgs('_dnslink.some-non-cached-domain2.com').resolves({ 42 | Answer: [{ 43 | data: 'dnslink=/ipfs/bafkqac3imvwgy3zao5xxe3de' 44 | }] 45 | }) 46 | 47 | const helia = await createHelia({ 48 | dns: dns({ 49 | resolvers: { 50 | '.': customDnsResolver 51 | } 52 | }) 53 | }) 54 | 55 | const verifiedFetch = new VerifiedFetch({ 56 | helia 57 | }) 58 | 59 | const response = await verifiedFetch.fetch('ipns://some-non-cached-domain2.com') 60 | expect(response.status).to.equal(200) 61 | expect(response.statusText).to.equal('OK') 62 | await expect(response.text()).to.eventually.equal('hello world') 63 | 64 | expect(customDnsResolver.callCount).to.equal(1) 65 | expect(customDnsResolver.getCall(0).args).to.deep.equal(['_dnslink.some-non-cached-domain2.com', { 66 | types: [ 67 | RecordType.TXT 68 | ] 69 | }]) 70 | await stop(helia, verifiedFetch) 71 | }) 72 | }) 73 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/cids.ts: -------------------------------------------------------------------------------- 1 | import * as dagCbor from '@ipld/dag-cbor' 2 | import * as dagJson from '@ipld/dag-json' 3 | import * as dagPb from '@ipld/dag-pb' 4 | import { CID } from 'multiformats/cid' 5 | import * as json from 'multiformats/codecs/json' 6 | import * as raw from 'multiformats/codecs/raw' 7 | 8 | // 112 = dag-pb, 18 = sha256, 0 = CIDv0 9 | const mh = CID.parse('QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr').multihash 10 | 11 | export const cids: Record = { 12 | filev0: CID.createV0(mh), 13 | file: CID.createV1(dagPb.code, mh), 14 | dagCbor: CID.createV1(dagCbor.code, mh), 15 | dagJson: CID.createV1(dagJson.code, mh), 16 | json: CID.createV1(json.code, mh), 17 | raw: CID.createV1(raw.code, mh) 18 | } 19 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/create-offline-helia.ts: -------------------------------------------------------------------------------- 1 | import { createHeliaHTTP } from '@helia/http' 2 | import { MemoryBlockstore } from 'blockstore-core' 3 | import { IdentityBlockstore } from 'blockstore-core/identity' 4 | import { MemoryDatastore } from 'datastore-core' 5 | import type { HeliaInit } from 'helia' 6 | 7 | export async function createHelia (init: Partial = {}): Promise> { 8 | const datastore = new MemoryDatastore() 9 | const blockstore = new IdentityBlockstore(new MemoryBlockstore()) 10 | 11 | const helia = await createHeliaHTTP({ 12 | datastore, 13 | blockstore, 14 | blockBrokers: [], 15 | routers: [], 16 | ...init 17 | }) 18 | 19 | await helia.start() 20 | 21 | return helia 22 | } 23 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/create-random-data-chunks.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Creates a Uint8Array filled with random data of the specified size 3 | * 4 | * @param sizeInBytes - The size of the array in bytes 5 | */ 6 | function createRandomData (sizeInBytes: number): Uint8Array { 7 | const data = new Uint8Array(sizeInBytes) 8 | const MAX_BYTES_PER_CALL = 65536 // see https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues#exceptions 9 | 10 | for (let offset = 0; offset < sizeInBytes; offset += MAX_BYTES_PER_CALL) { 11 | const chunkSize = Math.min(MAX_BYTES_PER_CALL, sizeInBytes - offset) 12 | const chunk = data.subarray(offset, offset + chunkSize) 13 | crypto.getRandomValues(chunk) 14 | } 15 | 16 | return data 17 | } 18 | 19 | /** 20 | * Creates multiple Uint8Arrays filled with random data and combines them. 21 | * 22 | * Useful for testing CIDs that reference larger content that spans multiple blocks. 23 | * 24 | * @param numberOfChunks - Number of chunks to create 25 | * @param sizeInBytes - Size of each chunk in bytes 26 | * @returns An object containing the individual chunks and the combined data 27 | */ 28 | export function createRandomDataChunks (numberOfChunks: number, sizeInBytes: number): { 29 | chunks: Uint8Array[] 30 | combined: Uint8Array 31 | } { 32 | if (numberOfChunks * sizeInBytes <= 1024 * 1024) { 33 | throw new Error('NumberOfChunks * sizeInBytes must be greater than 1MB, otherwise, you don\'t need to use this function.') 34 | } 35 | 36 | const chunks = Array.from({ length: numberOfChunks }, () => createRandomData(sizeInBytes)) 37 | const totalSize = chunks.reduce((acc, chunk) => acc + chunk.length, 0) 38 | const combined = new Uint8Array(totalSize) 39 | 40 | let offset = 0 41 | for (const chunk of chunks) { 42 | combined.set(chunk, offset) 43 | offset += chunk.length 44 | } 45 | 46 | return { chunks, combined } 47 | } 48 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/dns-answer-fake.ts: -------------------------------------------------------------------------------- 1 | import { stubInterface } from 'sinon-ts' 2 | import type { DNSResponse } from '@multiformats/dns' 3 | 4 | export function answerFake (data: string, TTL: number, name: string, type: number): DNSResponse { 5 | const fake = stubInterface() 6 | fake.Answer = [{ 7 | data, 8 | TTL, 9 | name, 10 | type 11 | }] 12 | return fake 13 | } 14 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/get-abortable-promise.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * we need to emulate signal handling (blockBrokers/dnsResolvers/etc should handle abort signals too) 3 | * this is a simplified version of what libs we depend on should do, and the 4 | * tests in this file verify how verified-fetch would handle the failure 5 | */ 6 | export async function getAbortablePromise (signal?: AbortSignal): Promise { 7 | return new Promise((resolve, reject) => { 8 | const timeoutId = setTimeout(() => { 9 | reject(new Error('timeout while resolving')) 10 | }, 5000) 11 | 12 | signal?.addEventListener('abort', () => { 13 | clearTimeout(timeoutId) 14 | reject(new Error('aborted')) 15 | }) 16 | }) 17 | } 18 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/get-custom-plugin-factory.ts: -------------------------------------------------------------------------------- 1 | import { BasePlugin } from '../../src/plugins/plugin-base.js' 2 | import type { PluginContext, PluginOptions, VerifiedFetchPluginFactory } from '../../src/plugins/types.js' 3 | 4 | export interface PluginFixtureOptions { 5 | codes?: number[] 6 | constructorName?: string 7 | id?: string 8 | canHandle?(context: PluginContext): boolean 9 | handle?(context: PluginContext): Promise 10 | } 11 | 12 | export const getCustomPluginFactory = (options: PluginFixtureOptions): VerifiedFetchPluginFactory => { 13 | const className = options.constructorName ?? 'CustomPlugin' 14 | 15 | const classes = { 16 | [className]: class extends BasePlugin { 17 | id = options.id ?? options.constructorName?.replace(/([a-z0-9])([A-Z])/g, '$1-$2').toLowerCase() ?? 'custom-plugin' 18 | codes = options.codes ?? [] 19 | 20 | canHandle (context: PluginContext): boolean { 21 | return options.canHandle != null ? options.canHandle(context) : false 22 | } 23 | 24 | async handle (context: PluginContext): Promise { 25 | if (options.handle != null) { 26 | return options.handle(context) 27 | } else { 28 | throw new Error('Not implemented') 29 | } 30 | } 31 | } 32 | } 33 | 34 | const CustomPlugin = classes[className] 35 | 36 | return (pluginOptions: PluginOptions) => new CustomPlugin(pluginOptions) 37 | } 38 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/ipns-stubs.ts: -------------------------------------------------------------------------------- 1 | import { stubInterface } from 'sinon-ts' 2 | import type { PeerId } from '@libp2p/interface' 3 | import type { IPNSRecord } from 'ipns' 4 | import type { StubbedInstance } from 'sinon-ts' 5 | 6 | export interface IpnsRecordStubOptions { 7 | peerId: PeerId 8 | ttl?: bigint 9 | } 10 | 11 | /** 12 | * When stubbing an IPNSRecord, we need to provide a PeerId and some ttl value or else we will get 13 | * "SyntaxError: Cannot convert stub to a BigInt" when parse-url-string.ts calls `calculateTtl` 14 | */ 15 | export function ipnsRecordStub ({ peerId, ttl }: IpnsRecordStubOptions): StubbedInstance { 16 | return stubInterface({ 17 | value: peerId.toString(), 18 | ttl 19 | }) 20 | } 21 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/make-aborted-request.ts: -------------------------------------------------------------------------------- 1 | import type { VerifiedFetch } from '../../src/verified-fetch.js' 2 | 3 | export async function makeAbortedRequest (verifiedFetch: VerifiedFetch, [resource, options = {}]: Parameters, promise: Promise): Promise { 4 | const controller = new AbortController() 5 | const resultPromise = verifiedFetch.fetch(resource, { 6 | ...options, 7 | signal: controller.signal 8 | }) 9 | 10 | void promise.then(() => { 11 | controller.abort() 12 | }) 13 | return resultPromise 14 | } 15 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/fixtures/memory-car.ts: -------------------------------------------------------------------------------- 1 | import { CarWriter } from '@ipld/car' 2 | import toBuffer from 'it-to-buffer' 3 | import defer from 'p-defer' 4 | import type { CID } from 'multiformats/cid' 5 | 6 | export interface MemoryCar extends Pick { 7 | bytes(): Promise 8 | } 9 | 10 | export function memoryCarWriter (root: CID | CID[]): MemoryCar { 11 | const deferred = defer() 12 | const { writer, out } = CarWriter.create(Array.isArray(root) ? root : [root]) 13 | 14 | Promise.resolve() 15 | .then(async () => { 16 | deferred.resolve(toBuffer(out)) 17 | }) 18 | .catch(err => { 19 | deferred.reject(err) 20 | }) 21 | 22 | return { 23 | async put (block: { cid: CID, bytes: Uint8Array }): Promise { 24 | await writer.put(block) 25 | }, 26 | async close (): Promise { 27 | await writer.close() 28 | }, 29 | async bytes (): Promise { 30 | return deferred.promise 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/get-e-tag.spec.ts: -------------------------------------------------------------------------------- 1 | import { unixfs } from '@helia/unixfs' 2 | import { stop } from '@libp2p/interface' 3 | import { expect } from 'aegir/chai' 4 | import last from 'it-last' 5 | import { CID } from 'multiformats/cid' 6 | import { getETag } from '../src/utils/get-e-tag.js' 7 | import { VerifiedFetch } from '../src/verified-fetch.js' 8 | import { createHelia } from './fixtures/create-offline-helia.js' 9 | import type { Helia } from 'helia' 10 | 11 | const cidString = 'QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr' 12 | const testCID = CID.parse(cidString) 13 | 14 | describe('getETag', () => { 15 | it('CID eTag', () => { 16 | expect(getETag({ cid: testCID, weak: true })).to.equal(`W/"${cidString}"`) 17 | expect(getETag({ cid: testCID, weak: false })).to.equal(`"${cidString}"`) 18 | }) 19 | 20 | it('should return ETag with CID and format suffix', () => { 21 | expect(getETag({ cid: testCID, reqFormat: 'raw' })).to.equal(`"${cidString}.raw"`) 22 | expect(getETag({ cid: testCID, reqFormat: 'json' })).to.equal(`"${cidString}.json"`) 23 | }) 24 | 25 | it('should return ETag with CID and range suffix', () => { 26 | expect(getETag({ cid: testCID, weak: true, reqFormat: 'car', rangeStart: 10, rangeEnd: 20 })).to.equal(`W/"${cidString}.car.10-20"`) 27 | // weak is false, but it's a car request, so weak is overridden. 28 | expect(getETag({ cid: testCID, weak: false, reqFormat: 'car', rangeStart: 10, rangeEnd: 20 })).to.equal(`W/"${cidString}.car.10-20"`) 29 | }) 30 | 31 | it('should return ETag with CID, format and range suffix', () => { 32 | expect(getETag({ cid: testCID, reqFormat: 'raw', weak: false, rangeStart: 10, rangeEnd: 20 })).to.equal(`"${cidString}.raw.10-20"`) 33 | }) 34 | 35 | it('should handle undefined rangeStart and rangeEnd', () => { 36 | expect(getETag({ cid: testCID, reqFormat: 'raw', weak: false, rangeStart: undefined, rangeEnd: undefined })).to.equal(`"${cidString}.raw"`) 37 | expect(getETag({ cid: testCID, reqFormat: 'raw', weak: false, rangeStart: 55, rangeEnd: undefined })).to.equal(`"${cidString}.raw.55-N"`) 38 | expect(getETag({ cid: testCID, reqFormat: 'raw', weak: false, rangeStart: undefined, rangeEnd: 77 })).to.equal(`"${cidString}.raw.0-77"`) 39 | }) 40 | 41 | it('should handle tar appropriately', () => { 42 | expect(getETag({ 43 | cid: CID.parse('bafkreialihlqnf5uwo4byh4n3cmwlntwqzxxs2fg5vanqdi3d7tb2l5xkm'), 44 | reqFormat: 'tar', 45 | weak: false, 46 | rangeStart: undefined, 47 | rangeEnd: undefined 48 | })).to.equal('W/"bafkreialihlqnf5uwo4byh4n3cmwlntwqzxxs2fg5vanqdi3d7tb2l5xkm.x-tar"') 49 | }) 50 | }) 51 | 52 | describe('getEtagRequest', () => { 53 | let helia: Helia 54 | let verifiedFetch: VerifiedFetch 55 | 56 | beforeEach(async () => { 57 | helia = await createHelia() 58 | verifiedFetch = new VerifiedFetch({ helia }) 59 | }) 60 | 61 | afterEach(async () => { 62 | await stop(helia, verifiedFetch) 63 | }) 64 | 65 | it('should return the proper etag for a verified fetch request', async () => { 66 | const finalRootFileContent = new Uint8Array([0x01, 0x02, 0x03]) 67 | 68 | const fs = unixfs(helia) 69 | const res = await last(fs.addAll([{ 70 | path: 'someFile.foo', 71 | content: finalRootFileContent 72 | }], { 73 | wrapWithDirectory: true 74 | })) 75 | 76 | if (res == null) { 77 | throw new Error('Import failed') 78 | } 79 | 80 | // get actual cid of the file from unixfs 81 | const terminus = await last(fs.ls(res.cid, { path: 'someFile.foo' })) 82 | if (terminus?.cid == null) { 83 | throw new Error('Terminus CID not found') 84 | } 85 | 86 | const response = await verifiedFetch.fetch(`ipfs://${res.cid}/someFile.foo`) 87 | expect(response.headers.get('etag')).to.equal(`"${terminus.cid.toString()}"`) 88 | }) 89 | }) 90 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/get-stream-from-async-iterable.spec.ts: -------------------------------------------------------------------------------- 1 | import { defaultLogger } from '@libp2p/logger' 2 | import { expect } from 'aegir/chai' 3 | import sinon from 'sinon' 4 | import { getStreamFromAsyncIterable } from '../src/utils/get-stream-from-async-iterable.js' 5 | 6 | describe('getStreamFromAsyncIterable', () => { 7 | let onProgressSpy: sinon.SinonSpy 8 | 9 | beforeEach(() => { 10 | onProgressSpy = sinon.spy() 11 | }) 12 | 13 | it('should throw an error if no content is found', async () => { 14 | const iterator = (async function * () { })() 15 | await expect(getStreamFromAsyncIterable(iterator, 'test', defaultLogger())).to.be.rejectedWith('No content found') 16 | }) 17 | 18 | it('should return the correct content type and a readable stream', async () => { 19 | const chunks = new TextEncoder().encode('Hello, world!') 20 | const iterator = (async function * () { yield chunks })() 21 | const { firstChunk, stream } = await getStreamFromAsyncIterable(iterator, 'test.txt', defaultLogger(), { onProgress: onProgressSpy }) 22 | expect(firstChunk).to.equal(chunks) 23 | const reader = stream.getReader() 24 | const { value } = await reader.read() 25 | expect(onProgressSpy.callCount).to.equal(1) 26 | expect(new TextDecoder().decode(value)).to.equal('Hello, world!') 27 | }) 28 | 29 | it('should handle multiple chunks of data', async () => { 30 | const textEncoder = new TextEncoder() 31 | const chunks = ['Hello,', ' world!'].map((txt) => textEncoder.encode(txt)) 32 | const iterator = (async function * () { yield chunks[0]; yield chunks[1] })() 33 | const { firstChunk, stream } = await getStreamFromAsyncIterable(iterator, 'test.txt', defaultLogger(), { onProgress: onProgressSpy }) 34 | expect(firstChunk).to.equal(chunks[0]) 35 | const reader = stream.getReader() 36 | let result = '' 37 | let chunk 38 | while (!(chunk = await reader.read()).done) { 39 | result += new TextDecoder().decode(chunk.value) 40 | } 41 | expect(onProgressSpy.callCount).to.equal(2) 42 | expect(result).to.equal('Hello, world!') 43 | }) 44 | 45 | it('should include last value done is true', async () => { 46 | // if done === true and there is a value 47 | const LIMIT = 5 48 | let actualFirstChunk: Uint8Array 49 | const iterator: AsyncIterable = { 50 | [Symbol.asyncIterator] () { 51 | let i = 0 52 | return { 53 | async next () { 54 | const done = i === LIMIT 55 | const value = new Uint8Array([i++]) 56 | actualFirstChunk = actualFirstChunk ?? value 57 | return Promise.resolve({ value, done }) 58 | } 59 | } 60 | } 61 | } 62 | const { firstChunk, stream } = await getStreamFromAsyncIterable(iterator, 'test.txt', defaultLogger(), { onProgress: onProgressSpy }) 63 | // @ts-expect-error - actualFirstChunk is not used before set, because the await above. 64 | expect(firstChunk).to.equal(actualFirstChunk) 65 | const reader = stream.getReader() 66 | const result = [] 67 | let chunk 68 | while (!(chunk = await reader.read()).done) { 69 | result.push(...chunk.value) 70 | } 71 | expect(onProgressSpy.callCount).to.equal(6) 72 | expect(result).to.deep.equal([...Array(LIMIT + 1).keys()]) 73 | }) 74 | }) 75 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/index.spec.ts: -------------------------------------------------------------------------------- 1 | import { createHeliaHTTP } from '@helia/http' 2 | import { expect } from 'aegir/chai' 3 | import { createHelia } from 'helia' 4 | import { createVerifiedFetch, verifiedFetch } from '../src/index.js' 5 | 6 | describe('createVerifiedFetch', () => { 7 | it('can be constructed with a HeliaHttp instance', async () => { 8 | const heliaHttp = await createHeliaHTTP() 9 | const verifiedFetch = await createVerifiedFetch(heliaHttp) 10 | 11 | expect(verifiedFetch).to.be.ok() 12 | await verifiedFetch.stop() 13 | }) 14 | 15 | it('can be constructed with a HeliaP2P instance', async () => { 16 | const heliaP2P = await createHelia() 17 | const verifiedFetch = await createVerifiedFetch(heliaP2P) 18 | 19 | expect(verifiedFetch).to.be.ok() 20 | await heliaP2P.stop() 21 | await verifiedFetch.stop() 22 | }) 23 | 24 | it('can be constructed with gateways', async () => { 25 | const verifiedFetch = await createVerifiedFetch({ 26 | gateways: ['https://127.0.0.1'] 27 | }) 28 | expect(verifiedFetch).to.be.ok() 29 | await verifiedFetch.stop() 30 | }) 31 | 32 | it('can be constructed with gateways & routers', async () => { 33 | const verifiedFetch = await createVerifiedFetch({ 34 | gateways: ['https://127.0.0.1'], 35 | routers: ['https://127.0.0.1'] 36 | }) 37 | expect(verifiedFetch).to.be.ok() 38 | await verifiedFetch.stop() 39 | }) 40 | 41 | it('can be constructed with no options', async () => { 42 | const verifiedFetch = await createVerifiedFetch() 43 | 44 | expect(verifiedFetch).to.be.ok() 45 | await verifiedFetch.stop() 46 | }) 47 | 48 | it('can be used as a singleton', () => { 49 | expect(verifiedFetch).to.be.a('function') 50 | expect(verifiedFetch.stop).to.be.a('function') 51 | expect(verifiedFetch.start).to.be.a('function') 52 | }) 53 | }) 54 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/ipns-record.spec.ts: -------------------------------------------------------------------------------- 1 | import { dagCbor } from '@helia/dag-cbor' 2 | import { ipns } from '@helia/ipns' 3 | import { generateKeyPair } from '@libp2p/crypto/keys' 4 | import { stop } from '@libp2p/interface' 5 | import { peerIdFromPrivateKey } from '@libp2p/peer-id' 6 | import { expect } from 'aegir/chai' 7 | import { marshalIPNSRecord, unmarshalIPNSRecord } from 'ipns' 8 | import { VerifiedFetch } from '../src/verified-fetch.js' 9 | import { createHelia } from './fixtures/create-offline-helia.js' 10 | import type { Helia } from '@helia/interface' 11 | import type { IPNS } from '@helia/ipns' 12 | 13 | describe('ipns records', () => { 14 | let helia: Helia 15 | let name: IPNS 16 | let verifiedFetch: VerifiedFetch 17 | 18 | beforeEach(async () => { 19 | helia = await createHelia() 20 | name = ipns(helia) 21 | verifiedFetch = new VerifiedFetch({ 22 | helia 23 | }) 24 | }) 25 | 26 | afterEach(async () => { 27 | await stop(helia, verifiedFetch) 28 | }) 29 | 30 | it('should support fetching a raw IPNS record', async () => { 31 | const obj = { 32 | hello: 'world' 33 | } 34 | const c = dagCbor(helia) 35 | const cid = await c.add(obj) 36 | 37 | const key = await generateKeyPair('Ed25519') 38 | const peerId = peerIdFromPrivateKey(key) 39 | const record = await name.publish(key, cid) 40 | 41 | const resp = await verifiedFetch.fetch(`ipns://${peerId}`, { 42 | headers: { 43 | accept: 'application/vnd.ipfs.ipns-record' 44 | } 45 | }) 46 | expect(resp.status).to.equal(200) 47 | expect(resp.headers.get('content-type')).to.equal('application/vnd.ipfs.ipns-record') 48 | 49 | const buf = new Uint8Array(await resp.arrayBuffer()) 50 | expect(marshalIPNSRecord(record)).to.equalBytes(buf) 51 | 52 | const output = unmarshalIPNSRecord(buf) 53 | expect(output.value).to.deep.equal(`/ipfs/${cid}`) 54 | }) 55 | 56 | it('should reject a request for non-IPNS url', async () => { 57 | const resp = await verifiedFetch.fetch('ipfs://QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv', { 58 | headers: { 59 | accept: 'application/vnd.ipfs.ipns-record' 60 | } 61 | }) 62 | expect(resp.status).to.equal(400) 63 | }) 64 | 65 | it('should reject a request for a DNSLink url', async () => { 66 | const resp = await verifiedFetch.fetch('ipns://ipfs.io', { 67 | headers: { 68 | accept: 'application/vnd.ipfs.ipns-record' 69 | } 70 | }) 71 | expect(resp.status).to.equal(400) 72 | }) 73 | 74 | it('should reject a request for a url with a path component', async () => { 75 | const obj = { 76 | hello: 'world' 77 | } 78 | const c = dagCbor(helia) 79 | const cid = await c.add(obj) 80 | 81 | const key = await generateKeyPair('Ed25519') 82 | const peerId = peerIdFromPrivateKey(key) 83 | await name.publish(key, cid) 84 | 85 | const resp = await verifiedFetch.fetch(`ipns://${peerId}/hello`, { 86 | headers: { 87 | accept: 'application/vnd.ipfs.ipns-record' 88 | } 89 | }) 90 | expect(resp.status).to.equal(400) 91 | }) 92 | }) 93 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/parse-resource.spec.ts: -------------------------------------------------------------------------------- 1 | import { generateKeyPair } from '@libp2p/crypto/keys' 2 | import { defaultLogger } from '@libp2p/logger' 3 | import { peerIdFromPrivateKey } from '@libp2p/peer-id' 4 | import { expect } from 'aegir/chai' 5 | import { CID } from 'multiformats/cid' 6 | import sinon from 'sinon' 7 | import { stubInterface } from 'sinon-ts' 8 | import { parseResource } from '../src/utils/parse-resource.js' 9 | import type { IPNS } from '@helia/ipns' 10 | import type { StubbedInstance } from 'sinon-ts' 11 | 12 | const testCID = CID.parse('QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr') 13 | const key = await generateKeyPair('Ed25519') 14 | const peerId = peerIdFromPrivateKey(key) 15 | 16 | describe('parseResource', () => { 17 | it('does not call @helia/ipns for CID', async () => { 18 | const shouldNotBeCalled1 = sinon.stub().throws(new Error('should not be called')) 19 | const shouldNotBeCalled2 = sinon.stub().throws(new Error('should not be called')) 20 | const { cid, path, query, ipfsPath } = await parseResource(testCID, { 21 | ipns: stubInterface({ 22 | resolveDNSLink: shouldNotBeCalled1, 23 | resolve: shouldNotBeCalled2 24 | }), 25 | logger: defaultLogger() 26 | }) 27 | expect(shouldNotBeCalled1.called).to.be.false() 28 | expect(shouldNotBeCalled2.called).to.be.false() 29 | expect(cid.toString()).to.equal(testCID.toString()) 30 | expect(path).to.equal('') 31 | expect(query).to.deep.equal({}) 32 | expect(ipfsPath).to.equal(`/ipfs/${testCID.toString()}`) 33 | }) 34 | 35 | it('throws an error if given an invalid resource', async () => { 36 | // @ts-expect-error - purposefully invalid input 37 | await expect(parseResource({}, stubInterface())).to.be.rejectedWith('Invalid resource.') 38 | }) 39 | 40 | describe('ipfsPath', () => { 41 | let ipnsStub: StubbedInstance 42 | 43 | beforeEach(async () => { 44 | ipnsStub = stubInterface({ 45 | resolveDNSLink: sinon.stub().returns({ cid: testCID }), 46 | resolve: sinon.stub().returns({ cid: testCID }) 47 | }) 48 | }); 49 | 50 | [ 51 | // resource without paths 52 | { resource: testCID, expectedValue: `/ipfs/${testCID}` }, 53 | { resource: `ipfs://${testCID}`, expectedValue: `/ipfs/${testCID}` }, 54 | { resource: `http://example.com/ipfs/${testCID}`, expectedValue: `/ipfs/${testCID}` }, 55 | { resource: `ipns://${peerId}`, expectedValue: `/ipns/${peerId}` }, 56 | { resource: `http://example.com/ipns/${peerId}`, expectedValue: `/ipns/${peerId}` }, 57 | { resource: 'ipns://specs.ipfs.tech', expectedValue: '/ipns/specs.ipfs.tech' }, 58 | { resource: 'http://example.com/ipns/specs.ipfs.tech', expectedValue: '/ipns/specs.ipfs.tech' }, 59 | // resources with paths 60 | { resource: `ipfs://${testCID}/foobar`, expectedValue: `/ipfs/${testCID}/foobar` }, 61 | { resource: `http://example.com/ipfs/${testCID}/foobar`, expectedValue: `/ipfs/${testCID}/foobar` }, 62 | { resource: `ipns://${peerId}/foobar`, expectedValue: `/ipns/${peerId}/foobar` }, 63 | { resource: `http://example.com/ipns/${peerId}/foobar`, expectedValue: `/ipns/${peerId}/foobar` }, 64 | { resource: 'ipns://specs.ipfs.tech/foobar', expectedValue: '/ipns/specs.ipfs.tech/foobar' }, 65 | { resource: 'http://example.com/ipns/specs.ipfs.tech/foobar', expectedValue: '/ipns/specs.ipfs.tech/foobar' } 66 | ].forEach(({ resource, expectedValue }) => { 67 | it(`should return the correct ipfsPath for "${resource}"`, async () => { 68 | const { ipfsPath } = await parseResource(resource, { 69 | ipns: ipnsStub, 70 | logger: defaultLogger() 71 | }) 72 | 73 | expect(ipfsPath).to.equal(expectedValue) 74 | }) 75 | }) 76 | }) 77 | }) 78 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/utils/get-content-disposition-filename.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'aegir/chai' 2 | import { getContentDispositionFilename } from '../../src/utils/get-content-disposition-filename.js' 3 | 4 | describe('get-content-disposition-filename', () => { 5 | it('should support ascii-only filenames', () => { 6 | expect( 7 | getContentDispositionFilename('foo.txt') 8 | ).to.equal('filename="foo.txt"') 9 | }) 10 | 11 | it('should remove non-ascii characters from filenames', () => { 12 | expect( 13 | // spell-checker: disable-next-line 14 | getContentDispositionFilename('testтест.jpg') 15 | ).to.equal('filename="test____.jpg"; filename*=UTF-8\'\'test%D1%82%D0%B5%D1%81%D1%82.jpg') 16 | }) 17 | }) 18 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/utils/handle-redirects.spec.ts: -------------------------------------------------------------------------------- 1 | import { prefixLogger } from '@libp2p/logger' 2 | import { expect } from 'aegir/chai' 3 | import { CID } from 'multiformats/cid' 4 | import Sinon from 'sinon' 5 | import { getRedirectResponse } from '../../src/utils/handle-redirects.js' 6 | 7 | const logger = prefixLogger('test:handle-redirects') 8 | describe('handle-redirects', () => { 9 | describe('getRedirectResponse', () => { 10 | const sandbox = Sinon.createSandbox() 11 | const cid = CID.parse('bafkqabtimvwgy3yk') 12 | 13 | let fetchStub: Sinon.SinonStub 14 | 15 | beforeEach(() => { 16 | fetchStub = sandbox.stub(globalThis, 'fetch') 17 | }) 18 | 19 | afterEach(() => { 20 | sandbox.restore() 21 | }) 22 | 23 | const nullResponses = [ 24 | { resource: cid, options: {}, logger, cid, testTitle: 'should return null if resource is not a string' }, 25 | { resource: 'http://ipfs.io/ipfs/bafkqabtimvwgy3yk', options: undefined, logger, cid, testTitle: 'should return null if options is undefined' }, 26 | { resource: 'ipfs://', options: {}, logger, cid, testTitle: 'should return null for ipfs:// protocol urls' }, 27 | { resource: 'ipns://', options: {}, logger, cid, testTitle: 'should return null for ipns:// protocol urls' } 28 | ] 29 | 30 | nullResponses.forEach(({ resource, options, logger, cid, testTitle }) => { 31 | it(testTitle, async () => { 32 | const response = await getRedirectResponse({ resource, options, logger, cid }) 33 | expect(response).to.be.null() 34 | }) 35 | }) 36 | 37 | it('should attempt to get the current host from the headers', async () => { 38 | const resource = 'http://ipfs.io/ipfs/bafkqabtimvwgy3yk' 39 | const options = { headers: new Headers({ 'x-forwarded-host': 'localhost:3931' }) } 40 | fetchStub.returns(Promise.resolve(new Response(null, { status: 200 }))) 41 | 42 | const response = await getRedirectResponse({ resource, options, logger, cid, fetch: fetchStub }) 43 | expect(fetchStub.calledOnce).to.be.true() 44 | expect(response).to.not.be.null() 45 | expect(response).to.have.property('status', 301) 46 | const location = response?.headers.get('location') 47 | expect(location).to.equal('http://bafkqabtimvwgy3yk.ipfs.localhost:3931/') 48 | }) 49 | 50 | it('should return redirect response to requested host with trailing slash when HEAD fetch fails', async () => { 51 | const resource = 'http://ipfs.io/ipfs/bafkqabtimvwgy3yk' 52 | const options = { headers: new Headers({ 'x-forwarded-host': 'localhost:3931' }) } 53 | fetchStub.returns(new Response(null, { status: 404 })) 54 | 55 | const response = await getRedirectResponse({ resource, options, logger, cid, fetch: fetchStub }) 56 | expect(fetchStub.calledOnce).to.be.true() 57 | expect(response).to.not.be.null() 58 | expect(response).to.have.property('status', 301) 59 | const location = response?.headers.get('location') 60 | // note that the URL returned in location header has trailing slash. 61 | expect(location).to.equal('http://ipfs.io/ipfs/bafkqabtimvwgy3yk/') 62 | }) 63 | 64 | it('should not return redirect response to x-forwarded-host if HEAD fetch fails', async () => { 65 | const resource = 'http://ipfs.io/ipfs/bafkqabtimvwgy3yk/file.txt' 66 | const options = { headers: new Headers({ 'x-forwarded-host': 'localhost:3931' }) } 67 | fetchStub.returns(new Response(null, { status: 404 })) 68 | 69 | const response = await getRedirectResponse({ resource, options, logger, cid, fetch: fetchStub }) 70 | expect(fetchStub.calledOnce).to.be.true() 71 | expect(response).to.be.null() 72 | }) 73 | 74 | it('should not return redirect response to x-forwarded-host when HEAD fetch fails and trailing slash already exists', async () => { 75 | const resource = 'http://ipfs.io/ipfs/bafkqabtimvwgy3yk/' 76 | const options = { headers: new Headers({ 'x-forwarded-host': 'localhost:3931' }) } 77 | fetchStub.returns(new Response(null, { status: 404 })) 78 | 79 | const response = await getRedirectResponse({ resource, options, logger, cid, fetch: fetchStub }) 80 | expect(fetchStub.calledOnce).to.be.true() 81 | expect(response).to.be.null() 82 | }) 83 | }) 84 | }) 85 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/utils/request-headers.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'aegir/chai' 2 | import { getHeader, calculateByteRangeIndexes } from '../../src/utils/request-headers.js' 3 | 4 | describe('request-headers', () => { 5 | describe('getHeader', () => { 6 | it('should return undefined when headers are undefined', () => { 7 | expect(getHeader(undefined, 'dummy')).to.be.undefined() 8 | expect(getHeader(new Headers(), 'dummy')).to.be.undefined() 9 | expect(getHeader({}, 'dummy')).to.be.undefined() 10 | expect(getHeader([], 'dummy')).to.be.undefined() 11 | }) 12 | 13 | it('should return correct header value for Headers instance', () => { 14 | const headers = new Headers({ Dummy: 'value' }) 15 | expect(getHeader(headers, 'Dummy')).to.equal('value') 16 | expect(getHeader(headers, 'dummy')).to.equal('value') 17 | }) 18 | 19 | it('should return correct header value for array of tuples', () => { 20 | const headers: Array<[string, string]> = [['Dummy', 'value']] 21 | expect(getHeader(headers, 'Dummy')).to.equal('value') 22 | expect(getHeader(headers, 'dummy')).to.equal('value') 23 | }) 24 | 25 | it('should return correct header value for record', () => { 26 | const headers: Record = { Dummy: 'value' } 27 | expect(getHeader(headers, 'Dummy')).to.equal('value') 28 | expect(getHeader(headers, 'dummy')).to.equal('value') 29 | }) 30 | }) 31 | 32 | describe('calculateByteRangeIndexes', () => { 33 | const testCases = [ 34 | // Range: bytes=5- 35 | { start: 5, end: undefined, fileSize: 10, expected: { byteSize: 5, start: 5, end: 9 } }, 36 | // Range: bytes=-5 37 | { start: undefined, end: 5, fileSize: 10, expected: { byteSize: 5, start: 5, end: 9 } }, 38 | // Range: bytes=0-0 39 | { start: 0, end: 0, fileSize: 10, expected: { byteSize: 1, start: 0, end: 0 } }, 40 | // Range: bytes=5- with unknown filesize 41 | { start: 5, end: undefined, fileSize: undefined, expected: { start: 5 } }, 42 | // Range: bytes=-5 with unknown filesize 43 | { start: undefined, end: 5, fileSize: undefined, expected: { end: 5 } }, 44 | // Range: bytes=0-0 with unknown filesize 45 | { start: 0, end: 0, fileSize: undefined, expected: { byteSize: 1, start: 0, end: 0 } }, 46 | // Range: bytes=-9 & fileSize=11 47 | { start: undefined, end: 9, fileSize: 11, expected: { byteSize: 9, start: 2, end: 10 } }, 48 | // Range: bytes=-11 & fileSize=11 49 | { start: undefined, end: 11, fileSize: 11, expected: { byteSize: 11, start: 0, end: 10 } }, 50 | // Range: bytes=-2 & fileSize=11 51 | { start: undefined, end: 2, fileSize: 11, expected: { byteSize: 2, start: 9, end: 10 } }, 52 | // Range request with no range (added for coverage) 53 | { start: undefined, end: undefined, fileSize: 10, expected: { byteSize: 10, start: 0, end: 9 } } 54 | 55 | ] 56 | testCases.forEach(({ start, end, fileSize, expected }) => { 57 | it(`should return expected result for bytes=${start ?? ''}-${end ?? ''} and fileSize=${fileSize}`, () => { 58 | expect(calculateByteRangeIndexes(start, end, fileSize)).to.deep.equal(expected) 59 | }) 60 | }) 61 | it('throws error for invalid range', () => { 62 | expect(() => calculateByteRangeIndexes(5, 4, 10)).to.throw('Invalid range: Range-start index is greater than range-end index.') 63 | expect(() => calculateByteRangeIndexes(5, 11, 11)).to.throw('Invalid range: Range-end index is greater than or equal to the size of the file.') 64 | expect(() => calculateByteRangeIndexes(undefined, 12, 11)).to.throw('Invalid range: Range-end index is greater than the size of the file.') 65 | expect(() => calculateByteRangeIndexes(-1, 5, 10)).to.throw('Invalid range: Range-start index cannot be negative.') 66 | }) 67 | }) 68 | }) 69 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/utils/resource-to-cache-key.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'aegir/chai' 2 | import { CID } from 'multiformats/cid' 3 | import { resourceToSessionCacheKey } from '../../src/utils/resource-to-cache-key.js' 4 | 5 | describe('resource-to-cache-key', () => { 6 | it('converts url with IPFS path', () => { 7 | expect(resourceToSessionCacheKey('https://localhost:8080/ipfs/QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA')) 8 | .to.equal('ipfs://QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA') 9 | }) 10 | 11 | it('converts url with IPFS path and resource path', () => { 12 | expect(resourceToSessionCacheKey('https://localhost:8080/ipfs/QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA/foo/bar/baz.txt')) 13 | .to.equal('ipfs://QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA') 14 | }) 15 | 16 | it('converts url with IPNS path', () => { 17 | expect(resourceToSessionCacheKey('https://localhost:8080/ipns/ipfs.io')) 18 | .to.equal('ipns://ipfs.io') 19 | }) 20 | 21 | it('converts url with IPNS path and resource path', () => { 22 | expect(resourceToSessionCacheKey('https://localhost:8080/ipns/ipfs.io/foo/bar/baz.txt')) 23 | .to.equal('ipns://ipfs.io') 24 | }) 25 | 26 | it('converts IPFS subdomain', () => { 27 | expect(resourceToSessionCacheKey('https://QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA.ipfs.localhost:8080')) 28 | .to.equal('ipfs://QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA') 29 | }) 30 | 31 | it('converts IPFS subdomain with path', () => { 32 | expect(resourceToSessionCacheKey('https://QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA.ipfs.localhost:8080/foo/bar/baz.txt')) 33 | .to.equal('ipfs://QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA') 34 | }) 35 | 36 | it('converts IPNS subdomain', () => { 37 | expect(resourceToSessionCacheKey('https://ipfs.io.ipns.localhost:8080')) 38 | .to.equal('ipns://ipfs.io') 39 | }) 40 | 41 | it('converts IPNS subdomain with resource path', () => { 42 | expect(resourceToSessionCacheKey('https://ipfs.io.ipns.localhost:8080/foo/bar/baz.txt')) 43 | .to.equal('ipns://ipfs.io') 44 | }) 45 | 46 | it('converts CID', () => { 47 | expect(resourceToSessionCacheKey(CID.parse('QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA'))) 48 | .to.equal('ipfs://QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA') 49 | }) 50 | 51 | it('converts CID string', () => { 52 | expect(resourceToSessionCacheKey('QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA')) 53 | .to.equal('ipfs://QmNnooDu7bfjPFoTZYxMNLWUQJyrVwtbZg5gBMjTezGAJA') 54 | }) 55 | }) 56 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/utils/response-headers.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'aegir/chai' 2 | import { getContentRangeHeader } from '../../src/utils/response-headers.js' 3 | 4 | describe('response-headers', () => { 5 | describe('getContentRangeHeader', () => { 6 | it('should return correct content range header when all options are set', () => { 7 | const byteStart = 0 8 | const byteEnd = 500 9 | const byteSize = 1000 10 | expect(getContentRangeHeader({ byteStart, byteEnd, byteSize })).to.equal(`bytes ${byteStart}-${byteEnd}/${byteSize}`) 11 | }) 12 | 13 | it('should return correct content range header when only byteEnd and byteSize are provided', () => { 14 | expect(getContentRangeHeader({ byteStart: undefined, byteEnd: 9, byteSize: 11 })).to.equal('bytes 2-10/11') 15 | }) 16 | 17 | it('should return correct content range header when only byteStart and byteSize are provided', () => { 18 | expect(getContentRangeHeader({ byteStart: 5, byteEnd: undefined, byteSize: 11 })).to.equal('bytes 5-10/11') 19 | }) 20 | 21 | it('should return correct content range header when only byteStart is provided', () => { 22 | expect(getContentRangeHeader({ byteStart: 500, byteEnd: undefined, byteSize: undefined })).to.equal('bytes */*') 23 | }) 24 | 25 | it('should return correct content range header when only byteEnd is provided', () => { 26 | expect(getContentRangeHeader({ byteStart: undefined, byteEnd: 500, byteSize: undefined })).to.equal('bytes */*') 27 | }) 28 | 29 | it('should return content range header with when only byteSize is provided', () => { 30 | expect(getContentRangeHeader({ byteStart: undefined, byteEnd: undefined, byteSize: 50 })).to.equal('bytes */50') 31 | }) 32 | 33 | it('should not allow range-end to equal or exceed the size of the file', () => { 34 | expect(() => getContentRangeHeader({ byteStart: 0, byteEnd: 11, byteSize: 11 })).to.throw('Invalid range') // byteEnd is equal to byteSize 35 | expect(() => getContentRangeHeader({ byteStart: undefined, byteEnd: 11, byteSize: 11 })).to.throw('Invalid range') // byteEnd is equal to byteSize 36 | expect(() => getContentRangeHeader({ byteStart: undefined, byteEnd: 12, byteSize: 11 })).to.throw('Invalid range') // byteEnd is greater than byteSize 37 | expect(() => getContentRangeHeader({ byteStart: 11, byteEnd: undefined, byteSize: 11 })).to.throw('Invalid range') // byteEnd is greater than byteSize 38 | }) 39 | }) 40 | }) 41 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/utils/select-output-type.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'aegir/chai' 2 | import { selectOutputType } from '../../src/utils/select-output-type.js' 3 | import { cids } from '../fixtures/cids.js' 4 | 5 | describe('select-output-type', () => { 6 | it('should return undefined if no accept header passed', () => { 7 | const format = selectOutputType(cids.file) 8 | 9 | expect(format).to.be.undefined() 10 | }) 11 | 12 | it('should override query format with Accept header if available', () => { 13 | const format = selectOutputType(cids.file, 'application/vnd.ipld.car') 14 | 15 | expect(format).to.equal('application/vnd.ipld.car') 16 | }) 17 | 18 | it('should match accept headers with equal weighting in definition order', () => { 19 | const format = selectOutputType(cids.file, 'application/x-tar, */*') 20 | 21 | expect(format).to.equal('application/x-tar') 22 | }) 23 | 24 | it('should match accept headers in weighting order', () => { 25 | const format = selectOutputType(cids.file, 'application/x-tar;q=0.1, application/octet-stream;q=0.5, text/html') 26 | 27 | expect(format).to.equal('application/octet-stream') 28 | }) 29 | 30 | it('should support partial type wildcard', () => { 31 | const format = selectOutputType(cids.file, '*/json') 32 | 33 | expect(format).to.equal('application/json') 34 | }) 35 | 36 | it('should support partial subtype wildcard', () => { 37 | const format = selectOutputType(cids.file, 'application/*') 38 | 39 | expect(format).to.equal('application/octet-stream') 40 | }) 41 | }) 42 | -------------------------------------------------------------------------------- /packages/verified-fetch/test/utils/server-timing.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'aegir/chai' 2 | import { createVerifiedFetch } from '../../src/index.js' 3 | import { serverTiming } from '../../src/utils/server-timing.js' 4 | import { createHelia } from '../fixtures/create-offline-helia.js' 5 | import type { VerifiedFetch } from '../../src/index.js' 6 | import type { ServerTimingResult } from '../../src/utils/server-timing.js' 7 | 8 | describe('serverTiming', () => { 9 | it('should return a success object with the correct header and no error', async () => { 10 | const name = 'testSuccess' 11 | const description = 'Testing success case' 12 | const mockValue = 42 13 | const fn = async (): Promise => { 14 | await new Promise(resolve => setTimeout(resolve, 10)) 15 | return mockValue 16 | } 17 | 18 | const result: ServerTimingResult = await serverTiming(name, description, fn) 19 | 20 | expect(result.error).to.be.null() 21 | expect(result.result).to.equal(mockValue) 22 | expect(result.header).to.be.a('string') 23 | 24 | const [timingName, timingDuration, timingDesc] = result.header.split(';') 25 | expect(timingName).to.equal(name) 26 | expect(timingDuration).to.match(/^dur=\d+(\.\d)?$/) 27 | expect(timingDesc).to.equal(`desc="${description}"`) 28 | }) 29 | 30 | it('should return an error object with the correct header when fn throws', async () => { 31 | const name = 'testError' 32 | const description = 'Testing error case' 33 | const testError = new Error('Test failure') 34 | const fn = async (): Promise => { 35 | throw testError 36 | } 37 | 38 | const result: ServerTimingResult = await serverTiming(name, description, fn) 39 | 40 | expect(result.result).to.be.null() 41 | expect(result.error).to.equal(testError) 42 | expect(result.header).to.be.a('string') 43 | 44 | const [timingName, timingDuration, timingDesc] = result.header.split(';') 45 | expect(timingName).to.equal(name) 46 | expect(timingDuration).to.match(/^dur=\d+(\.\d)?$/) 47 | expect(timingDesc).to.equal(`desc="${description}"`) 48 | }) 49 | 50 | /** 51 | * This test checks that the duration is > 0, verifying that 52 | * we are measuring time between start and end. 53 | */ 54 | it('should measure elapsed time accurately', async () => { 55 | const name = 'testTiming' 56 | const description = 'Testing timing measurement' 57 | const fn = async (): Promise => { 58 | await new Promise(resolve => setTimeout(resolve, 20)) 59 | return 'timing-check' 60 | } 61 | 62 | const result: ServerTimingResult = await serverTiming(name, description, fn) 63 | expect(result.error).to.be.null() 64 | expect(result.result).to.equal('timing-check') 65 | 66 | const [, timingDuration] = result.header.split(';') 67 | const durationValue = Number(timingDuration.replace('dur=', '')) 68 | // round durationValue to nearest 10ms. On windows and firefox, a delay of 20ms returns ~19.x ms. 69 | expect(Math.round(durationValue / 10) * 10).to.be.greaterThanOrEqual(20).and.lessThanOrEqual(30) 70 | }) 71 | 72 | describe('serverTiming with verified-fetch', () => { 73 | let vFetch: VerifiedFetch 74 | before(async () => { 75 | vFetch = await createVerifiedFetch(await createHelia()) 76 | }) 77 | 78 | it('response does not include server timing by default', async () => { 79 | const response = await vFetch('https://example.com') 80 | expect(response.headers.get('Server-Timing')).to.be.null() 81 | }) 82 | 83 | it('can include one-off server timing headers in response', async () => { 84 | const response = await vFetch('https://example.com', { 85 | withServerTiming: true 86 | }) 87 | expect(response.headers.get('Server-Timing')).to.be.a('string') 88 | }) 89 | }) 90 | }) 91 | -------------------------------------------------------------------------------- /packages/verified-fetch/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "aegir/src/config/tsconfig.aegir.json", 3 | "compilerOptions": { 4 | "outDir": "dist" 5 | }, 6 | "include": [ 7 | "src", 8 | "test" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /packages/verified-fetch/typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "entryPoints": [ 3 | "./src/index.ts", 4 | "./src/plugins/plugins.ts" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://typedoc.org/schema.json", 3 | "name": "Helia Verified Fetch", 4 | "exclude": [ 5 | "packages/interop" 6 | ] 7 | } 8 | --------------------------------------------------------------------------------