├── .cspell.json
├── .github
├── dependabot.yml
├── dictionary.txt
├── pull_request_template.md
└── workflows
│ ├── gateway-conformance.yml
│ ├── generated-pr.yml
│ ├── js-test-and-release.yml
│ ├── semantic-pull-request.yml
│ └── stale.yml
├── .gitignore
├── CHANGELOG.md
├── CODEOWNERS
├── LICENSE
├── LICENSE-APACHE
├── LICENSE-MIT
├── README.md
├── package.json
├── packages
├── gateway-conformance
│ ├── .aegir.js
│ ├── .gitignore
│ ├── CHANGELOG.md
│ ├── CODE_OF_CONDUCT.md
│ ├── LICENSE-APACHE
│ ├── LICENSE-MIT
│ ├── README.md
│ ├── package.json
│ ├── src
│ │ ├── bin.ts
│ │ ├── conformance.spec.ts
│ │ ├── constants.ts
│ │ ├── demo-server.ts
│ │ ├── expected-failing-tests.json
│ │ ├── expected-passing-tests.json
│ │ ├── fixtures
│ │ │ ├── basic-server.ts
│ │ │ ├── create-kubo.ts
│ │ │ ├── create-verified-fetch.ts
│ │ │ ├── get-local-dns-resolver.ts
│ │ │ ├── header-utils.ts
│ │ │ ├── ipns-record-datastore.ts
│ │ │ └── kubo-mgmt.ts
│ │ ├── get-report-details.ts
│ │ ├── get-tests-to-run.ts
│ │ ├── get-tests-to-skip.ts
│ │ ├── get-wontfix-tests.ts
│ │ ├── index.ts
│ │ └── update-expected-tests.ts
│ ├── tsconfig.json
│ └── typedoc.json
├── interop
│ ├── .aegir.js
│ ├── CHANGELOG.md
│ ├── CODE_OF_CONDUCT.md
│ ├── LICENSE-APACHE
│ ├── LICENSE-MIT
│ ├── README.md
│ ├── package.json
│ ├── src
│ │ ├── abort-handling.spec.ts
│ │ ├── bin.ts
│ │ ├── direct-retrieval.spec.ts
│ │ ├── fixtures
│ │ │ ├── create-kubo.ts
│ │ │ ├── data
│ │ │ │ ├── QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr-tokens.uniswap.org-2024-01-18.car
│ │ │ │ ├── QmbQDovX7wRe9ek7u6QXe9zgCXkTzoUSsTFJEkrYV1HrVR-xkcd-Barrel-part-1.car
│ │ │ │ ├── QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv-helia-identify-website.car
│ │ │ │ ├── QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw-fake-blog.libp2p.io.car
│ │ │ │ ├── bafybeibc5sgo2plmjkq2tzmhrn54bk3crhnc23zd2msg4ea7a4pxrkgfna.car
│ │ │ │ ├── bafybeidbclfqleg2uojchspzd4bob56dqetqjsj27gy2cq3klkkgxtpn4i-single-layer-hamt-with-multi-block-files.car
│ │ │ │ ├── gateway-conformance-fixtures.car
│ │ │ │ └── k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam.ipns-record
│ │ │ └── load-fixtures.ts
│ │ ├── index.ts
│ │ ├── ipns.spec.ts
│ │ ├── json.spec.ts
│ │ ├── unixfs-dir.spec.ts
│ │ └── websites.spec.ts
│ ├── tsconfig.json
│ └── typedoc.json
└── verified-fetch
│ ├── .aegir.js
│ ├── CHANGELOG.md
│ ├── CODE_OF_CONDUCT.md
│ ├── LICENSE-APACHE
│ ├── LICENSE-MIT
│ ├── README.md
│ ├── package.json
│ ├── src
│ ├── errors.ts
│ ├── index.ts
│ ├── plugins
│ │ ├── errors.ts
│ │ ├── index.ts
│ │ ├── plugin-base.ts
│ │ ├── plugin-handle-byte-range-context.ts
│ │ ├── plugin-handle-car.ts
│ │ ├── plugin-handle-dag-cbor.ts
│ │ ├── plugin-handle-dag-pb.ts
│ │ ├── plugin-handle-dag-walk.ts
│ │ ├── plugin-handle-dir-index-html.ts
│ │ ├── plugin-handle-ipns-record.ts
│ │ ├── plugin-handle-json.ts
│ │ ├── plugin-handle-raw.ts
│ │ ├── plugin-handle-tar.ts
│ │ ├── plugins.ts
│ │ └── types.ts
│ ├── singleton.ts
│ ├── types.ts
│ ├── utils
│ │ ├── byte-range-context.ts
│ │ ├── content-type-parser.ts
│ │ ├── dag-cbor-to-safe-json.ts
│ │ ├── dir-index-html.ts
│ │ ├── get-content-disposition-filename.ts
│ │ ├── get-content-type.ts
│ │ ├── get-e-tag.ts
│ │ ├── get-peer-id-from-string.ts
│ │ ├── get-resolved-accept-header.ts
│ │ ├── get-stream-from-async-iterable.ts
│ │ ├── get-tar-stream.ts
│ │ ├── handle-redirects.ts
│ │ ├── is-accept-explicit.ts
│ │ ├── libp2p-defaults.browser.ts
│ │ ├── libp2p-defaults.ts
│ │ ├── libp2p-types.ts
│ │ ├── parse-resource.ts
│ │ ├── parse-url-string.ts
│ │ ├── request-headers.ts
│ │ ├── resource-to-cache-key.ts
│ │ ├── response-headers.ts
│ │ ├── responses.ts
│ │ ├── select-output-type.ts
│ │ ├── server-timing.ts
│ │ ├── tlru.ts
│ │ ├── type-guards.ts
│ │ └── walk-path.ts
│ └── verified-fetch.ts
│ ├── test
│ ├── abort-handling.spec.ts
│ ├── accept-header.spec.ts
│ ├── cache-control-header.spec.ts
│ ├── car.spec.ts
│ ├── content-type-parser.spec.ts
│ ├── custom-dns-resolvers.spec.ts
│ ├── fixtures
│ │ ├── cids.ts
│ │ ├── create-offline-helia.ts
│ │ ├── create-random-data-chunks.ts
│ │ ├── dns-answer-fake.ts
│ │ ├── get-abortable-promise.ts
│ │ ├── get-custom-plugin-factory.ts
│ │ ├── ipns-stubs.ts
│ │ ├── make-aborted-request.ts
│ │ └── memory-car.ts
│ ├── get-e-tag.spec.ts
│ ├── get-stream-from-async-iterable.spec.ts
│ ├── index.spec.ts
│ ├── ipns-record.spec.ts
│ ├── parse-resource.spec.ts
│ ├── plugins.spec.ts
│ ├── range-requests.spec.ts
│ ├── tar.spec.ts
│ ├── utils
│ │ ├── byte-range-context.spec.ts
│ │ ├── get-content-disposition-filename.spec.ts
│ │ ├── handle-redirects.spec.ts
│ │ ├── parse-url-string.spec.ts
│ │ ├── request-headers.spec.ts
│ │ ├── resource-to-cache-key.spec.ts
│ │ ├── response-headers.spec.ts
│ │ ├── select-output-type.spec.ts
│ │ └── server-timing.spec.ts
│ └── verified-fetch.spec.ts
│ ├── tsconfig.json
│ └── typedoc.json
└── typedoc.json
/.cspell.json:
--------------------------------------------------------------------------------
1 | {
2 | "import": [
3 | "./node_modules/aegir/cspell.json"
4 | ],
5 | "dictionaries": ["project"],
6 | "dictionaryDefinitions": [{
7 | "name": "project",
8 | "path": "./.github/dictionary.txt",
9 | "addWords": true
10 | }]
11 | }
12 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: npm
4 | directories:
5 | - "/"
6 | schedule:
7 | interval: daily
8 | time: "10:00"
9 | open-pull-requests-limit: 20
10 | commit-message:
11 | prefix: "deps"
12 | prefix-development: "chore"
13 | groups:
14 | helia-deps: # group all deps that should be updated when Helia deps need updated
15 | patterns:
16 | - "*helia*"
17 | - "*libp2p*"
18 | - "*multiformats*"
19 | store-deps: # group all blockstore and datastore updates (interface & impl)
20 | patterns:
21 | - "*blockstore*"
22 | - "*datastore*"
23 | kubo-deps: # group kubo, kubo-rpc-client, and ipfsd-ctl updates
24 | patterns:
25 | - "*kubo*"
26 | - "ipfsd-ctl"
27 | - package-ecosystem: "github-actions"
28 | directory: "/"
29 | schedule:
30 | interval: "weekly"
31 | commit-message:
32 | prefix: chore
33 |
--------------------------------------------------------------------------------
/.github/dictionary.txt:
--------------------------------------------------------------------------------
1 | ENOENT
2 | GOPATH
3 | mgmt
4 | gotest
5 | XKCD
6 | filetypemime
7 | fleek
8 | msword
9 | msdownload
10 | powerpoint
11 | noopener
12 | noreferrer
13 | nosniff
14 | Segoe
15 | Cantarell
16 | Noto
17 | Consolas
18 | filev
19 | byteranges
20 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## Title
2 |
3 |
9 |
10 | ## Description
11 |
12 |
18 |
19 | ## Notes & open questions
20 |
21 |
24 |
25 | ## Change checklist
26 |
27 | - [ ] I have performed a self-review of my own code
28 | - [ ] I have made corresponding changes to the documentation if necessary (this includes comments as well)
29 | - [ ] I have added tests that prove my fix is effective or that my feature works
30 |
--------------------------------------------------------------------------------
/.github/workflows/gateway-conformance.yml:
--------------------------------------------------------------------------------
1 | name: Gateway Conformance
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | workflow_dispatch:
9 |
10 | jobs:
11 | gateway-conformance:
12 | runs-on: ubuntu-latest
13 | steps:
14 | # 1, Setup Node, install npm dependencies, and build all packages/*
15 | # see https://github.com/ipdxco/unified-github-workflows/blob/3a1a7870ce5967163d8f5c8210b8ad50b2e659aa/.github/workflows/js-test-and-release.yml#L28-L34
16 | - uses: actions/checkout@v4
17 | - uses: actions/setup-node@v4
18 | with:
19 | node-version: lts/*
20 | - uses: ipfs/aegir/actions/cache-node-modules@main
21 |
22 | # 2. Set up 'go' so we can install the gateway-conformance binary
23 | - name: Setup Go
24 | uses: actions/setup-go@v5
25 | with:
26 | go-version: 1.22.x
27 |
28 | # 3. Download the gateway-conformance fixtures using ipfs/gateway-conformance action
29 | # This will prevent us from needing to install `docker` on the github runner
30 | - name: Download gateway-conformance fixtures
31 | uses: ipfs/gateway-conformance/.github/actions/extract-fixtures@v0.7
32 | # working-directory: ./packages/gateway-conformance
33 | with:
34 | output: ./packages/gateway-conformance/dist/src/fixtures/data/gateway-conformance-fixtures
35 |
36 |
37 | # 4. Run the tests
38 | - name: Run gateway-conformance tests
39 | run: |
40 | npm run test
41 | working-directory: ./packages/gateway-conformance
42 |
43 | # 5. Convert json output to reports similar to how it's done at https://github.com/ipfs/gateway-conformance/blob/main/.github/actions/test/action.yml
44 | # the 'gwc-report-all.json' file is created by the 'has expected total failures and successes' test
45 | # TODO: remove this when we're passing enough tests to use the 'ipfs/gateway-conformance/.github/actions/test' action
46 | - name: Create the XML
47 | if: failure() || success()
48 | uses: pl-strflt/gotest-json-to-junit-xml@v1
49 | with:
50 | input: ./packages/gateway-conformance/gwc-report-all.json
51 | output: ./packages/gateway-conformance/gwc-report-all.xml
52 | - name: Create the HTML
53 | if: failure() || success()
54 | uses: pl-strflt/junit-xml-to-html@v1
55 | with:
56 | mode: no-frames
57 | input: ./packages/gateway-conformance/gwc-report-all.xml
58 | output: ./packages/gateway-conformance/gwc-report-all.html
59 | - name: Create the Markdown
60 | if: failure() || success()
61 | uses: pl-strflt/junit-xml-to-html@v1
62 | with:
63 | mode: summary
64 | input: ./packages/gateway-conformance/gwc-report-all.xml
65 | output: ./packages/gateway-conformance/gwc-report-all.md
66 |
67 | # 6. Upload the reports
68 | - name: Upload MD summary
69 | if: failure() || success()
70 | run: cat ./packages/gateway-conformance/gwc-report-all.md >> $GITHUB_STEP_SUMMARY
71 | - name: Upload HTML report
72 | if: failure() || success()
73 | uses: actions/upload-artifact@v4
74 | with:
75 | name: gateway-conformance.html
76 | path: ./packages/gateway-conformance/gwc-report-all.html
77 | - name: Upload JSON report
78 | if: failure() || success()
79 | uses: actions/upload-artifact@v4
80 | with:
81 | name: gateway-conformance.json
82 | path: ./packages/gateway-conformance/gwc-report-all.json
83 |
--------------------------------------------------------------------------------
/.github/workflows/generated-pr.yml:
--------------------------------------------------------------------------------
1 | name: Close Generated PRs
2 |
3 | on:
4 | schedule:
5 | - cron: '0 0 * * *'
6 | workflow_dispatch:
7 |
8 | permissions:
9 | issues: write
10 | pull-requests: write
11 |
12 | jobs:
13 | stale:
14 | uses: ipdxco/unified-github-workflows/.github/workflows/reusable-generated-pr.yml@v1
15 |
--------------------------------------------------------------------------------
/.github/workflows/js-test-and-release.yml:
--------------------------------------------------------------------------------
1 | name: test & maybe release
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | workflow_dispatch:
9 |
10 | permissions:
11 | contents: write
12 | id-token: write
13 | packages: write
14 | pull-requests: write
15 |
16 | concurrency:
17 | group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }}
18 | cancel-in-progress: true
19 |
20 | jobs:
21 | js-test-and-release:
22 | uses: ipdxco/unified-github-workflows/.github/workflows/js-test-and-release.yml@v1.0
23 | secrets:
24 | DOCKER_TOKEN: ${{ secrets.DOCKER_TOKEN }}
25 | DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
26 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
27 | UCI_GITHUB_TOKEN: ${{ secrets.UCI_GITHUB_TOKEN }}
28 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
29 |
--------------------------------------------------------------------------------
/.github/workflows/semantic-pull-request.yml:
--------------------------------------------------------------------------------
1 | name: Semantic PR
2 |
3 | on:
4 | pull_request_target:
5 | types:
6 | - opened
7 | - edited
8 | - synchronize
9 |
10 | jobs:
11 | main:
12 | uses: pl-strflt/.github/.github/workflows/reusable-semantic-pull-request.yml@v0.3
13 |
--------------------------------------------------------------------------------
/.github/workflows/stale.yml:
--------------------------------------------------------------------------------
1 | name: Close Stale Issues
2 |
3 | on:
4 | schedule:
5 | - cron: '0 0 * * *'
6 | workflow_dispatch:
7 |
8 | permissions:
9 | issues: write
10 | pull-requests: write
11 |
12 | jobs:
13 | stale:
14 | uses: ipdxco/unified-github-workflows/.github/workflows/reusable-stale-issue.yml@v1
15 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | build
3 | dist
4 | .docs
5 | .coverage
6 | node_modules
7 | package-lock.json
8 | yarn.lock
9 | .vscode
10 | .tmp-compiled-docs
11 | tsconfig-doc-check.aegir.json
12 | removed-passing-tests.json
13 |
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @ipfs/helia-dev
2 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | This project is dual licensed under MIT and Apache-2.0.
2 |
3 | MIT: https://www.opensource.org/licenses/mit
4 | Apache-2.0: https://www.apache.org/licenses/license-2.0
5 |
--------------------------------------------------------------------------------
/LICENSE-APACHE:
--------------------------------------------------------------------------------
1 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
2 |
3 | http://www.apache.org/licenses/LICENSE-2.0
4 |
5 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
6 |
--------------------------------------------------------------------------------
/LICENSE-MIT:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in
11 | all copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | THE SOFTWARE.
20 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | # helia-verified-fetch
8 |
9 | [](https://ipfs.tech)
10 | [](https://discuss.ipfs.tech)
11 | [](https://codecov.io/gh/ipfs/helia-verified-fetch)
12 | [](https://github.com/ipfs/helia-verified-fetch/actions/workflows/js-test-and-release.yml?query=branch%3Amain)
13 |
14 | > A fetch-like API for obtaining verified & trustless IPFS content on the web
15 |
16 | ## About
17 |
18 | This monorepo contains the `@helia/verified-fetch` package and its corresponding interop tests.
19 |
20 | # Packages
21 |
22 | - [`packages/gateway-conformance`](https://github.com/ipfs/helia-verified-fetch/tree/main/packages/gateway-conformance) Gateway conformance tests for @helia/verified-fetch
23 | - [`packages/interop`](https://github.com/ipfs/helia-verified-fetch/tree/main/packages/interop) Interop tests for @helia/verified-fetch
24 | - [`packages/verified-fetch`](https://github.com/ipfs/helia-verified-fetch/tree/main/packages/verified-fetch) A fetch-like API for obtaining verified & trustless IPFS content on the web
25 |
26 | # Getting started
27 |
28 | See the [**`@helia/verified-fetch`**](./packages/verified-fetch#readme) package for how to get started with the package including usage examples.
29 |
30 | Learn more in the [announcement blog post](https://blog.ipfs.tech/verified-fetch/) and check out the [ready-to-run example](https://github.com/ipfs-examples/helia-examples/tree/main/examples/helia-browser-verified-fetch).
31 |
32 | # API Docs
33 |
34 | -
35 |
36 | # License
37 |
38 | Licensed under either of
39 |
40 | - Apache 2.0, ([LICENSE-APACHE](https://github.com/ipfs/helia-verified-fetch/blob/main/LICENSE-APACHE) / )
41 | - MIT ([LICENSE-MIT](https://github.com/ipfs/helia-verified-fetch/blob/main/LICENSE-MIT) / )
42 |
43 | # Contribute
44 |
45 | Contributions welcome! Please check out [the issues](https://github.com/ipfs/helia-verified-fetch/issues).
46 |
47 | Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general.
48 |
49 | Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md).
50 |
51 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.
52 |
53 | [](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md)
54 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "helia-verified-fetch",
3 | "version": "1.0.0",
4 | "description": "A fetch-like API for obtaining verified & trustless IPFS content on the web",
5 | "license": "Apache-2.0 OR MIT",
6 | "homepage": "https://github.com/ipfs/helia-verified-fetch#readme",
7 | "repository": {
8 | "type": "git",
9 | "url": "git+https://github.com/ipfs/helia-verified-fetch.git"
10 | },
11 | "bugs": {
12 | "url": "https://github.com/ipfs/helia-verified-fetch/issues"
13 | },
14 | "keywords": [
15 | "ipfs"
16 | ],
17 | "private": true,
18 | "scripts": {
19 | "reset": "aegir run clean && aegir clean **/node_modules **/package-lock.json",
20 | "test": "aegir run test",
21 | "test:node": "aegir run test:node",
22 | "test:chrome": "aegir run test:chrome",
23 | "test:chrome-webworker": "aegir run test:chrome-webworker",
24 | "test:firefox": "aegir run test:firefox",
25 | "test:firefox-webworker": "aegir run test:firefox-webworker",
26 | "test:electron-main": "aegir run test:electron-main",
27 | "test:electron-renderer": "aegir run test:electron-renderer",
28 | "clean": "aegir run clean",
29 | "generate": "aegir run generate",
30 | "build": "aegir run build",
31 | "lint": "aegir run lint",
32 | "dep-check": "aegir run dep-check",
33 | "doc-check": "aegir run doc-check",
34 | "spell-check": "aegir spell-check",
35 | "release": "run-s build docs:no-publish npm:release docs",
36 | "npm:release": "aegir run release --concurrency 1",
37 | "docs": "aegir docs",
38 | "docs:no-publish": "aegir docs --publish false"
39 | },
40 | "devDependencies": {
41 | "aegir": "^47.0.11",
42 | "npm-run-all": "^4.1.5"
43 | },
44 | "type": "module",
45 | "workspaces": [
46 | "packages/*"
47 | ]
48 | }
49 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/.aegir.js:
--------------------------------------------------------------------------------
1 | // @ts-check
2 | import getPort from 'aegir/get-port'
3 | import { logger } from '@libp2p/logger'
4 | const log = logger('aegir')
5 |
6 | /** @type {import('aegir').PartialOptions} */
7 | export default {
8 | build: {
9 | bundlesizeMax: '1KB'
10 | },
11 | test: {
12 | files: ['./dist/src/*.spec.js'],
13 | before: async (options) => {
14 | if (options.runner !== 'node') {
15 | throw new Error('Only node runner is supported')
16 | }
17 |
18 | const { createKuboNode } = await import('./dist/src/fixtures/create-kubo.js')
19 | const KUBO_PORT = await getPort(3440)
20 | const SERVER_PORT = await getPort(3441)
21 | // The Kubo gateway will be passed to the VerifiedFetch config
22 | const { node: controller, gatewayUrl, repoPath } = await createKuboNode(KUBO_PORT)
23 | await controller.start()
24 | const { loadKuboFixtures } = await import('./dist/src/fixtures/kubo-mgmt.js')
25 | const IPFS_NS_MAP = await loadKuboFixtures(repoPath)
26 | const kuboGateway = gatewayUrl
27 |
28 | const { startVerifiedFetchGateway } = await import('./dist/src/fixtures/basic-server.js')
29 | const stopBasicServer = await startVerifiedFetchGateway({
30 | serverPort: SERVER_PORT,
31 | kuboGateway,
32 | IPFS_NS_MAP
33 | }).catch((err) => {
34 | log.error(err)
35 | })
36 |
37 | const CONFORMANCE_HOST = 'localhost'
38 |
39 | return {
40 | controller,
41 | stopBasicServer,
42 | env: {
43 | IPFS_NS_MAP,
44 | CONFORMANCE_HOST,
45 | KUBO_PORT: `${KUBO_PORT}`,
46 | SERVER_PORT: `${SERVER_PORT}`,
47 | KUBO_GATEWAY: kuboGateway
48 | }
49 | }
50 | },
51 | after: async (options, beforeResult) => {
52 | // @ts-expect-error - broken aegir types
53 | await beforeResult.controller.stop()
54 | log('controller stopped')
55 |
56 | // @ts-expect-error - broken aegir types
57 | await beforeResult.stopBasicServer()
58 | log('basic server stopped')
59 |
60 | }
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/.gitignore:
--------------------------------------------------------------------------------
1 | gwc-report*.json
2 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Code of Conduct
2 |
3 | This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md)
4 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/LICENSE-MIT:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in
11 | all copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | THE SOFTWARE.
20 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | # @helia/verified-fetch-gateway-conformance
8 |
9 | [](https://ipfs.tech)
10 | [](https://discuss.ipfs.tech)
11 | [](https://codecov.io/gh/ipfs/helia-verified-fetch)
12 | [](https://github.com/ipfs/helia-verified-fetch/actions/workflows/js-test-and-release.yml?query=branch%3Amain)
13 |
14 | > Gateway conformance tests for @helia/verified-fetch
15 |
16 | # About
17 |
18 |
32 |
33 | Runs Gateway Conformance tests against @helia/verified-fetch using Kubo as a
34 | backing trustless-gateway.
35 |
36 | ## Example - Testing a new @helia/verified-fetch release
37 |
38 | ```console
39 | $ npm i @helia/verified-fetch-gateway-conformance
40 | $ VERIFIED_FETCH=@helia/verified-fetch@1.x.x-6f8c15b verified-fetch-gateway-conformance
41 | ```
42 |
43 | ## Example - Testing with a different Kubo version
44 |
45 | ```console
46 | $ npm i @helia/verified-fetch-gateway-conformance
47 | $ KUBO_BINARY=/path/to/kubo verified-fetch-gateway-conformance
48 | ```
49 |
50 | ## Example - using a different gateway-conformance image
51 |
52 | ```console
53 | $ GWC_IMAGE=ghcr.io/ipfs/gateway-conformance:v0.5.1 verified-fetch-gateway-conformance
54 | ```
55 |
56 | ## Example - Debugging a test run
57 |
58 | ```console
59 | $ DEBUG="-mocha*,*,*:trace" npm run test # very verbose output
60 | $ DEBUG="conformance-tests*,conformance-tests*:trace" npm run test # only gateway-conformance test output
61 | ```
62 |
63 | ## Example - querying the gateway-conformance server directly
64 |
65 | ```console
66 | $ npm run build
67 | $ node dist/src/demo-server.js # in terminal 1
68 | $ curl -v GET http://localhost:3442/ipfs/bafkqabtimvwgy3yk/ # in terminal 2
69 | ```
70 |
71 | ## Troubleshooting
72 |
73 | ### Missing file in gateway-conformance-fixtures folder
74 |
75 | If you see the following error:
76 |
77 | > ENOENT: no such file or directory, open '\[...]/helia-verified-fetch/packages/gateway-conformance/dist/src/...
78 |
79 | This likely means the docker container is not executing properly for some
80 | reason. You can try running the following command to see if there are any
81 | errors: `DEBUG="-mocha*,*,*:trace" npm run test`
82 |
83 | # Install
84 |
85 | ```console
86 | $ npm i @helia/verified-fetch-gateway-conformance
87 | ```
88 |
89 | # License
90 |
91 | Licensed under either of
92 |
93 | - Apache 2.0, ([LICENSE-APACHE](https://github.com/ipfs/helia-verified-fetch/blob/main/packages/gateway-conformance/LICENSE-APACHE) / )
94 | - MIT ([LICENSE-MIT](https://github.com/ipfs/helia-verified-fetch/blob/main/packages/gateway-conformance/LICENSE-MIT) / )
95 |
96 | # Contribute
97 |
98 | Contributions welcome! Please check out [the issues](https://github.com/ipfs/helia-verified-fetch/issues).
99 |
100 | Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general.
101 |
102 | Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md).
103 |
104 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.
105 |
106 | [](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md)
107 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@helia/verified-fetch-gateway-conformance",
3 | "version": "1.4.0",
4 | "description": "Gateway conformance tests for @helia/verified-fetch",
5 | "license": "Apache-2.0 OR MIT",
6 | "homepage": "https://github.com/ipfs/helia-verified-fetch/tree/main/packages/gateway-conformance#readme",
7 | "repository": {
8 | "type": "git",
9 | "url": "git+https://github.com/ipfs/helia-verified-fetch.git"
10 | },
11 | "bugs": {
12 | "url": "https://github.com/ipfs/helia-verified-fetch/issues"
13 | },
14 | "publishConfig": {
15 | "access": "public",
16 | "provenance": true
17 | },
18 | "keywords": [
19 | "IPFS"
20 | ],
21 | "bin": {
22 | "demo-server": "./dist/src/demo-server.js",
23 | "verified-fetch-gateway-conformance": "./dist/src/bin.js"
24 | },
25 | "type": "module",
26 | "types": "./dist/src/index.d.ts",
27 | "files": [
28 | "src",
29 | "dist",
30 | "!dist/test",
31 | "!**/*.tsbuildinfo"
32 | ],
33 | "exports": {
34 | ".": {
35 | "types": "./dist/src/index.d.ts",
36 | "import": "./dist/src/index.js"
37 | }
38 | },
39 | "release": {
40 | "branches": [
41 | "main"
42 | ],
43 | "plugins": [
44 | [
45 | "@semantic-release/commit-analyzer",
46 | {
47 | "preset": "conventionalcommits",
48 | "releaseRules": [
49 | {
50 | "breaking": true,
51 | "release": "major"
52 | },
53 | {
54 | "revert": true,
55 | "release": "patch"
56 | },
57 | {
58 | "type": "feat",
59 | "release": "minor"
60 | },
61 | {
62 | "type": "fix",
63 | "release": "patch"
64 | },
65 | {
66 | "type": "docs",
67 | "release": "patch"
68 | },
69 | {
70 | "type": "test",
71 | "release": "patch"
72 | },
73 | {
74 | "type": "deps",
75 | "release": "patch"
76 | },
77 | {
78 | "scope": "no-release",
79 | "release": false
80 | }
81 | ]
82 | }
83 | ],
84 | [
85 | "@semantic-release/release-notes-generator",
86 | {
87 | "preset": "conventionalcommits",
88 | "presetConfig": {
89 | "types": [
90 | {
91 | "type": "feat",
92 | "section": "Features"
93 | },
94 | {
95 | "type": "fix",
96 | "section": "Bug Fixes"
97 | },
98 | {
99 | "type": "chore",
100 | "section": "Trivial Changes"
101 | },
102 | {
103 | "type": "docs",
104 | "section": "Documentation"
105 | },
106 | {
107 | "type": "deps",
108 | "section": "Dependencies"
109 | },
110 | {
111 | "type": "test",
112 | "section": "Tests"
113 | }
114 | ]
115 | }
116 | }
117 | ],
118 | "@semantic-release/changelog",
119 | "@semantic-release/npm",
120 | "@semantic-release/github",
121 | [
122 | "@semantic-release/git",
123 | {
124 | "assets": [
125 | "CHANGELOG.md",
126 | "package.json"
127 | ]
128 | }
129 | ]
130 | ]
131 | },
132 | "scripts": {
133 | "clean": "aegir clean dist gwc-report-*.json",
134 | "lint": "aegir lint",
135 | "dep-check": "aegir dep-check",
136 | "doc-check": "aegir doc-check",
137 | "build": "aegir build",
138 | "test": "aegir test -t node",
139 | "update": "npm run build && node dist/src/update-expected-tests.js",
140 | "release": "aegir release"
141 | },
142 | "dependencies": {
143 | "@helia/block-brokers": "^4.2.1",
144 | "@helia/http": "^2.1.1",
145 | "@helia/interface": "^5.3.1",
146 | "@helia/routers": "^3.1.1",
147 | "@helia/verified-fetch": "^3.0.2",
148 | "@libp2p/interface": "^2.10.1",
149 | "@libp2p/kad-dht": "^15.1.1",
150 | "@libp2p/logger": "^5.1.17",
151 | "@libp2p/peer-id": "^5.1.4",
152 | "@multiformats/dns": "^1.0.6",
153 | "aegir": "^47.0.11",
154 | "blockstore-core": "^5.0.2",
155 | "datastore-core": "^10.0.2",
156 | "execa": "^9.5.3",
157 | "fast-glob": "^3.3.3",
158 | "interface-blockstore": "^5.3.1",
159 | "interface-datastore": "^8.3.1",
160 | "ipfsd-ctl": "^15.0.2",
161 | "kubo": "^0.34.1",
162 | "kubo-rpc-client": "^5.1.0",
163 | "multiformats": "^13.3.6",
164 | "uint8arrays": "^5.1.0",
165 | "undici": "^7.10.0"
166 | },
167 | "browser": {
168 | "./dist/src/fixtures/create-kubo.js": "./dist/src/fixtures/create-kubo.browser.js",
169 | "kubo": false
170 | }
171 | }
172 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/bin.ts:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env node
2 |
3 | import { spawn } from 'node:child_process'
4 | import { dirname, resolve } from 'node:path'
5 | import { fileURLToPath } from 'node:url'
6 |
7 | // aegir should be run from `node_modules/@helia/verified-fetch-gateway-conformance`
8 | const cwd = resolve(dirname(fileURLToPath(import.meta.url)), '../../')
9 |
10 | const test = spawn('npx', ['aegir', 'test'], {
11 | cwd
12 | })
13 |
14 | test.stdout.on('data', (data) => {
15 | process.stdout.write(data)
16 | })
17 |
18 | test.stderr.on('data', (data) => {
19 | process.stderr.write(data)
20 | })
21 |
22 | test.on('close', (code) => {
23 | process.exit(code ?? 0)
24 | })
25 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/constants.ts:
--------------------------------------------------------------------------------
1 | export const GWC_IMAGE = process.env.GWC_IMAGE ?? 'ghcr.io/ipfs/gateway-conformance:v0.8.0'
2 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/demo-server.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Basically copies what .aegir.js does, but without all the env vars and setup.. just so you can run `node src/demo-server.ts` and test queries manually.
3 | */
4 | import { logger } from '@libp2p/logger'
5 | import getPort from 'aegir/get-port'
6 | import { startVerifiedFetchGateway } from './fixtures/basic-server.js'
7 | import { createKuboNode } from './fixtures/create-kubo.js'
8 | import { loadKuboFixtures } from './fixtures/kubo-mgmt.js'
9 | import type { KuboNode } from 'ipfsd-ctl'
10 |
11 | const log = logger('demo-server')
12 |
13 | const SERVER_PORT = await getPort(3441)
14 |
15 | let kuboGateway: string | undefined
16 | let controller: KuboNode | undefined
17 | let IPFS_NS_MAP = ''
18 | if (process.env.KUBO_GATEWAY == null) {
19 | const KUBO_GATEWAY_PORT = await getPort(3440)
20 | const kuboNodeDetails = await createKuboNode(KUBO_GATEWAY_PORT)
21 | controller = kuboNodeDetails.node
22 | kuboGateway = kuboNodeDetails.gatewayUrl
23 | const repoPath = kuboNodeDetails.repoPath
24 | await controller.start()
25 | IPFS_NS_MAP = await loadKuboFixtures(repoPath)
26 | }
27 |
28 | const stopServer = await startVerifiedFetchGateway({
29 | serverPort: SERVER_PORT,
30 | kuboGateway,
31 | IPFS_NS_MAP
32 | })
33 |
34 | process.on('exit', () => {
35 | stopServer().catch((err) => {
36 | log.error('Failed to stop server', err)
37 | })
38 | controller?.stop().catch((err) => {
39 | log.error('Failed to stop controller', err)
40 | process.exit(1)
41 | })
42 | })
43 |
44 | export {}
45 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/fixtures/create-kubo.ts:
--------------------------------------------------------------------------------
1 | import { createNode } from 'ipfsd-ctl'
2 | import { path as kuboPath } from 'kubo'
3 | import { create } from 'kubo-rpc-client'
4 | import type { KuboNode } from 'ipfsd-ctl'
5 |
6 | export interface KuboNodeDetails {
7 | node: KuboNode
8 | gatewayUrl: string
9 | repoPath: string
10 | }
11 |
12 | export async function createKuboNode (listenPort?: number): Promise {
13 | const controller = await createNode({
14 | type: 'kubo',
15 | rpc: create,
16 | test: true,
17 | bin: kuboPath(),
18 | init: {
19 | config: {
20 | Addresses: {
21 | Swarm: [
22 | '/ip4/0.0.0.0/tcp/0',
23 | '/ip4/0.0.0.0/tcp/0/ws'
24 | ],
25 | Gateway: `/ip4/127.0.0.1/tcp/${listenPort ?? 0}`
26 | },
27 | Gateway: {
28 | NoFetch: true,
29 | ExposeRoutingAPI: true,
30 | HTTPHeaders: {
31 | 'Access-Control-Allow-Origin': ['*'],
32 | 'Access-Control-Allow-Methods': ['GET', 'POST', 'PUT', 'OPTIONS']
33 | }
34 | }
35 | }
36 | },
37 | args: ['--enable-pubsub-experiment', '--enable-namesys-pubsub']
38 | })
39 | const info = await controller.info()
40 |
41 | return {
42 | node: controller,
43 | gatewayUrl: `http://127.0.0.1:${listenPort ?? 0}`,
44 | repoPath: info.repo
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/fixtures/create-verified-fetch.ts:
--------------------------------------------------------------------------------
1 | import type { Helia } from '@helia/interface'
2 | import type { CreateVerifiedFetchInit, CreateVerifiedFetchOptions, VerifiedFetch } from '@helia/verified-fetch'
3 | export async function createVerifiedFetch (init?: CreateVerifiedFetchInit | Helia, options?: CreateVerifiedFetchOptions): Promise {
4 | const { createVerifiedFetch: createVerifiedFetchOriginal } = await import(process.env.VERIFIED_FETCH ?? '@helia/verified-fetch')
5 |
6 | return createVerifiedFetchOriginal(init, options)
7 | }
8 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/fixtures/get-local-dns-resolver.ts:
--------------------------------------------------------------------------------
1 | import { logger } from '@libp2p/logger'
2 | import type { Answer, Question } from '@multiformats/dns'
3 | import type { DNSResolver } from '@multiformats/dns/resolvers'
4 |
5 | export function getLocalDnsResolver (ipfsNsMap: string, kuboGateway: string): DNSResolver {
6 | const log = logger('basic-server:dns')
7 | const nsMap = new Map()
8 | const keyVals = ipfsNsMap.split(',')
9 | for (const keyVal of keyVals) {
10 | const [key, val] = keyVal.split(':')
11 | log('Setting entry: %s="%s"', key, val)
12 | nsMap.set(key, val)
13 | }
14 |
15 | return async (domain, options) => {
16 | const questions: Question[] = []
17 | const answers: Answer[] = []
18 |
19 | if (Array.isArray(options?.types)) {
20 | options?.types?.forEach?.((type) => {
21 | questions.push({ name: domain, type })
22 | })
23 | } else {
24 | questions.push({ name: domain, type: options?.types ?? 16 })
25 | }
26 | // TODO: do we need to do anything with CNAME resolution...?
27 | // if (questions.some((q) => q.type === 5)) {
28 | // answers.push({
29 | // name: domain,
30 | // type: 5,
31 | // TTL: 180,
32 | // data: ''
33 | // })
34 | // }
35 | if (questions.some((q) => q.type === 16)) {
36 | log.trace('Querying "%s" for types %O', domain, options?.types)
37 | const actualDomainKey = domain.replace('_dnslink.', '')
38 | const nsValue = nsMap.get(actualDomainKey)
39 | if (nsValue == null) {
40 | log.error('No IPFS_NS_MAP entry for domain "%s"', actualDomainKey)
41 |
42 | throw new Error('No IPFS_NS_MAP entry for domain')
43 | }
44 | const data = `dnslink=${nsValue}`
45 | answers.push({
46 | name: domain,
47 | type: 16,
48 | TTL: 180,
49 | data // should be in the format 'dnslink=/ipfs/bafyfoo'
50 | })
51 | }
52 |
53 | const dnsResponse = {
54 | Status: 0,
55 | TC: false,
56 | RD: false,
57 | RA: false,
58 | AD: true,
59 | CD: true,
60 | Question: questions,
61 | Answer: answers
62 | }
63 |
64 | log.trace('Returning DNS response for %s: %O', domain, dnsResponse)
65 |
66 | return dnsResponse
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/fixtures/header-utils.ts:
--------------------------------------------------------------------------------
1 | import type { Logger } from '@libp2p/logger'
2 | import type { IncomingHttpHeaders } from 'undici/types/header.js'
3 |
4 | export function convertNodeJsHeadersToFetchHeaders (headers: IncomingHttpHeaders): HeadersInit {
5 | const fetchHeaders = new Headers()
6 | for (const [key, value] of Object.entries(headers)) {
7 | if (value == null) {
8 | continue
9 | }
10 | if (Array.isArray(value)) {
11 | for (const v of value) {
12 | fetchHeaders.append(key, v)
13 | }
14 | } else {
15 | fetchHeaders.append(key, value)
16 | }
17 | }
18 | return fetchHeaders
19 | }
20 |
21 | export interface ConvertFetchHeadersToNodeJsHeadersOptions {
22 | resp: Response
23 | log: Logger
24 | fixingGwcAnnoyance: boolean
25 | serverPort: number
26 | }
27 |
28 | export function convertFetchHeadersToNodeJsHeaders ({ resp, log, fixingGwcAnnoyance, serverPort }: ConvertFetchHeadersToNodeJsHeadersOptions): IncomingHttpHeaders {
29 | const headers: Record = {}
30 | for (const [key, value] of resp.headers.entries()) {
31 | if (fixingGwcAnnoyance) {
32 | log.trace('need to fix GWC annoyance.')
33 | if (value.includes(`localhost:${serverPort}`)) {
34 | const newValue = value.replace(`localhost:${serverPort}`, 'localhost')
35 | log.trace('fixing GWC annoyance. Replacing Header[%s] value of "%s" with "%s"', key, value, newValue)
36 | // we need to fix any Location, or other headers that have localhost without port in them.
37 | headers[key] = newValue
38 | } else {
39 | log.trace('NOT fixing GWC annoyance. Setting Header[%s] value of "%s"', key, value)
40 | headers[key] = value
41 | }
42 | } else {
43 | headers[key] = value
44 | }
45 | }
46 | return headers
47 | }
48 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/fixtures/ipns-record-datastore.ts:
--------------------------------------------------------------------------------
1 | import { MemoryDatastore } from 'datastore-core'
2 | import type { Datastore } from 'interface-datastore'
3 |
4 | const datastore = new MemoryDatastore()
5 | /**
6 | * We need a normalized datastore so we can set custom records
7 | * from the IPFS_NS_MAP like kubo does.
8 | */
9 | export function getIpnsRecordDatastore (): Datastore {
10 | return datastore
11 | }
12 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/get-report-details.ts:
--------------------------------------------------------------------------------
1 | import { readFile } from 'node:fs/promises'
2 | export interface ReportDetails {
3 | passingTests: string[]
4 | failingTests: string[]
5 | failureCount: number
6 | successCount: number
7 | successRate: number
8 | }
9 |
10 | export async function getReportDetails (path: string): Promise {
11 | let failureCount = 0
12 | let successCount = 0
13 | const passingTests: string[] = []
14 | const failingTests: string[] = []
15 |
16 | // parse the newline delimited JSON report at gwc-report-${name}.json and count the number of "PASS:" and "FAIL:" lines
17 | const report = await readFile(path, 'utf8')
18 | const lines = report.split('\n')
19 | for (const line of lines) {
20 | if (line.includes('--- FAIL:')) {
21 | failureCount++
22 | failingTests.push(line.split('--- FAIL: ')[1].split(' ')[0])
23 | } else if (line.includes('--- PASS:')) {
24 | successCount++
25 | passingTests.push(line.split('--- PASS: ')[1].split(' ')[0])
26 | }
27 | }
28 | const successRate = Number.parseFloat(((successCount / (successCount + failureCount)) * 100).toFixed(2))
29 |
30 | return {
31 | failingTests,
32 | passingTests,
33 | failureCount,
34 | successCount,
35 | successRate
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/get-tests-to-run.ts:
--------------------------------------------------------------------------------
1 | import { getWontFixTests } from './get-wontfix-tests.js'
2 |
3 | /**
4 | *
5 | * You can see output for specific tests with something like
6 | *
7 | * @example
8 | *
9 | * ```
10 | * DEBUG="gateway-conformance*,gateway-conformance*:trace" RUN_TESTS='TestNativeDag/HEAD_plain_JSON_codec_with_no_explicit_format_returns_HTTP_200.*' npm run test
11 | * ```
12 | *
13 | * If you run `npm run update` and see that some passing tests are removed, you should probably verify that those tests
14 | * pass. You can chose to not update `expected-failing-tests.json` and `expected-passing-tests.json` and then choose to
15 | * save the removed passing tests to a file to ensure that they do still pass with a command like:
16 | *
17 | * @example
18 | * ```
19 | * DEBUG="gateway-conformance*,gateway-conformance*:trace" RUN_TESTS="$(jq -r '.[]' removed-passing-tests.json | paste -sd ',' -)" npm run test
20 | * ```
21 | */
22 | export function getTestsToRun (): string[] {
23 | const envTestsToRun = process.env.RUN_TESTS != null ? process.env.RUN_TESTS.split(',') : []
24 | // by default, we filter out tests that we know we are not going to fix...
25 | // set FORCE_RUN=true to run all tests you set in RUN_TESTS (even if they are in the wontfix list)
26 | const shouldFilterOutWontFixTests = process.env.FORCE_RUN == null
27 | const wontFixTests = getWontFixTests()
28 | // TODO: tests to run can be gotest based regex, we need to be smarter about filtering.
29 | const testsToRun = shouldFilterOutWontFixTests ? envTestsToRun.filter((test) => !wontFixTests.includes(test)) : envTestsToRun
30 | return testsToRun
31 | }
32 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/get-tests-to-skip.ts:
--------------------------------------------------------------------------------
1 | import { getWontFixTests } from './get-wontfix-tests.js'
2 |
3 | /**
4 | *
5 | * you can skip certain tests by setting SKIP_TESTS to a comma-separated list of test names
6 | *
7 | * @example
8 | *
9 | * ```
10 | * SKIP_TESTS='TestNativeDag/HEAD_plain_JSON_codec_with_no_explicit_format_returns_HTTP_200.*' npm run test
11 | * ```
12 | */
13 | export function getTestsToSkip (): string[] {
14 | const envTestsToSkip = process.env.SKIP_TESTS != null ? process.env.SKIP_TESTS.split(',') : []
15 | const testsToSkip = [...getWontFixTests(), ...envTestsToSkip]
16 | return testsToSkip
17 | }
18 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/get-wontfix-tests.ts:
--------------------------------------------------------------------------------
1 | export function getWontFixTests (): string[] {
2 | return [
3 | // these tests are dependent upon supporting multi-range requests: https://github.com/ipfs/helia-verified-fetch/pull/207
4 | 'TestNativeDag/Convert_application%2Fvnd.ipld.dag-cbor_to_application%2Fvnd.ipld.dag-json_with_range_request_includes_correct_bytes_-_multi_range/Check_1',
5 | 'TestNativeDag/Convert_application%2Fvnd.ipld.dag-cbor_to_application%2Fvnd.ipld.dag-json_with_range_request_includes_correct_bytes_-_multi_range'
6 | ]
7 | }
8 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/index.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * @packageDocumentation
3 | *
4 | * Runs Gateway Conformance tests against @helia/verified-fetch using Kubo as a
5 | * backing trustless-gateway.
6 | *
7 | * @example Testing a new @helia/verified-fetch release
8 | *
9 | * ```console
10 | * $ npm i @helia/verified-fetch-gateway-conformance
11 | * $ VERIFIED_FETCH=@helia/verified-fetch@1.x.x-6f8c15b verified-fetch-gateway-conformance
12 | * ```
13 | *
14 | * @example Testing with a different Kubo version
15 | *
16 | * ```console
17 | * $ npm i @helia/verified-fetch-gateway-conformance
18 | * $ KUBO_BINARY=/path/to/kubo verified-fetch-gateway-conformance
19 | * ```
20 | *
21 | * @example using a different gateway-conformance image
22 | *
23 | * ```console
24 | * $ GWC_IMAGE=ghcr.io/ipfs/gateway-conformance:v0.5.1 verified-fetch-gateway-conformance
25 | * ```
26 | *
27 | * @example Debugging a test run
28 | *
29 | * ```console
30 | * $ DEBUG="-mocha*,*,*:trace" npm run test # very verbose output
31 | * $ DEBUG="conformance-tests*,conformance-tests*:trace" npm run test # only gateway-conformance test output
32 | * ```
33 | *
34 | * @example querying the gateway-conformance server directly
35 | *
36 | * ```console
37 | * $ npm run build
38 | * $ node dist/src/demo-server.js # in terminal 1
39 | * $ curl -v GET http://localhost:3442/ipfs/bafkqabtimvwgy3yk/ # in terminal 2
40 | * ```
41 | *
42 | * ## Troubleshooting
43 | *
44 | * ### Missing file in gateway-conformance-fixtures folder
45 | *
46 | * If you see the following error:
47 | * > ENOENT: no such file or directory, open '[...]/helia-verified-fetch/packages/gateway-conformance/dist/src/...
48 | *
49 | * This likely means the docker container is not executing properly for some
50 | * reason. You can try running the following command to see if there are any
51 | * errors: `DEBUG="-mocha*,*,*:trace" npm run test`
52 | */
53 |
54 | export {}
55 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/src/update-expected-tests.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable no-console */
2 | /**
3 | * Script that will read gwc-report-all.json and update the expected-passing-tests.json and expected-failing-tests.json files with the latest test results.
4 | *
5 | * This is useful when you want to update the expected test results after running the tests with the following command:
6 | *
7 | * ```bash
8 | * SUCCESS_RATE=100 npm run test -- --bail=false
9 | * ```
10 | *
11 | * This will run all the tests and update the expected-passing-tests.json and expected-failing-tests.json files with the latest test results.
12 | */
13 |
14 | import { readFile, writeFile } from 'node:fs/promises'
15 | import { join } from 'node:path'
16 | import readline from 'node:readline'
17 | import { getReportDetails } from './get-report-details.js'
18 |
19 | /**
20 | * Prompt the user with a yes / no question.
21 | *
22 | * @param {string} question - – The text to show.
23 | * @returns {Promise} Resolves to the user’s choice.
24 | */
25 | async function confirm (question: string): Promise {
26 | const hint = ' [y/n] '
27 | const rl = readline.createInterface({ input: process.stdin, output: process.stdout })
28 |
29 | return new Promise(resolve => {
30 | const ask = (): void => {
31 | rl.question(`${question}${hint}`, input => {
32 | const a = input.trim().toLowerCase()
33 |
34 | if (['y', 'yes'].includes(a)) { rl.close(); resolve(true); return }
35 | if (['n', 'no'].includes(a)) { rl.close(); resolve(false); return }
36 |
37 | console.log('Please type "y" or "n" then press Enter.')
38 | ask() // repeat until valid
39 | })
40 | }
41 | ask()
42 | })
43 | }
44 | // display a warning that this should not be done blindly and that the updated passing and failing tests should be checked for correctness
45 | console.warn('WARNING: This will update the expected-passing-tests.json and expected-failing-tests.json files with the latest test results.')
46 | console.warn('WARNING: This should not be done blindly and that the updated passing and failing tests should be checked for correctness.')
47 |
48 | const expectedPassingTestsPath = join(process.cwd(), 'src', 'expected-passing-tests.json')
49 | const expectedFailingTestsPath = join(process.cwd(), 'src', 'expected-failing-tests.json')
50 |
51 | const currentPassingTests: string[] = JSON.parse(await readFile(expectedPassingTestsPath, 'utf-8'))
52 | const currentFailingTests: string[] = JSON.parse(await readFile(expectedFailingTestsPath, 'utf-8'))
53 |
54 | const { passingTests, failingTests } = await getReportDetails('gwc-report-all.json')
55 |
56 | // output the differences between the current passing and failing tests and the new passing and failing tests
57 | console.log('Differences between the current passing and failing tests and the new passing and failing tests:')
58 | console.log('Added passing tests:')
59 | const passingTestAdditions = passingTests.filter((test: string) => !currentPassingTests.includes(test))
60 | console.log(passingTestAdditions)
61 | console.log('Removed passing tests:')
62 | const passingTestRemovals = currentPassingTests.filter((test: string) => !passingTests.includes(test))
63 | console.log(passingTestRemovals)
64 | console.log('Added failing tests:')
65 | const failingTestAdditions = failingTests.filter((test: string) => !currentFailingTests.includes(test))
66 | console.log(failingTestAdditions)
67 | console.log('Removed failing tests:')
68 | const failingTestRemovals = currentFailingTests.filter((test: string) => !failingTests.includes(test))
69 | console.log(failingTestRemovals)
70 |
71 | if (failingTestAdditions.length > 0 || failingTestAdditions.length > 0) {
72 | console.warn('WARNING: There are previously passing tests that are now failing, is this expected?')
73 | }
74 |
75 | if (passingTestRemovals.length + failingTestRemovals.length + passingTestAdditions.length + failingTestAdditions.length > 0) {
76 | const answer = await confirm('Are you sure you want to update the expected-passing-tests.json and expected-failing-tests.json files with the latest test results?')
77 |
78 | if (!answer) {
79 | console.log('Aborting.')
80 |
81 | if (passingTestRemovals.length > 0) {
82 | // to help with debugging, we can save the removed passing tests to a file to ensure that they do still pass with a command like:
83 | // DEBUG="gateway-conformance*,gateway-conformance*:trace" RUN_TESTS="$(jq -r '.[]' removed-passing-tests.json | paste -sd ',' -)" npm run test
84 | const shouldSaveRemovedPassingTests = await confirm('Should we save the removed passing tests to removed-passing-tests.json file?')
85 | if (shouldSaveRemovedPassingTests) {
86 | await writeFile('removed-passing-tests.json', JSON.stringify(passingTestRemovals, null, 2) + '\n')
87 | }
88 | }
89 |
90 | process.exit(0)
91 | }
92 | await writeFile(expectedPassingTestsPath, JSON.stringify(passingTests, null, 2) + '\n')
93 | await writeFile(expectedFailingTestsPath, JSON.stringify(failingTests, null, 2) + '\n')
94 | } else {
95 | console.log('No changes to the expected-passing-tests.json and expected-failing-tests.json files.')
96 | }
97 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "aegir/src/config/tsconfig.aegir.json",
3 | "compilerOptions": {
4 | "outDir": "dist",
5 | "target": "ES2022",
6 | "module": "nodenext",
7 | "moduleResolution": "nodenext"
8 | },
9 | "include": [
10 | "src",
11 | "test",
12 | "src/expected-passing-tests.json",
13 | "src/expected-failing-tests.json"
14 | ],
15 | "references": [
16 | {
17 | "path": "../verified-fetch"
18 | }
19 | ]
20 | }
21 |
--------------------------------------------------------------------------------
/packages/gateway-conformance/typedoc.json:
--------------------------------------------------------------------------------
1 | {
2 | "entryPoints": [
3 | "./src/index.ts"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/packages/interop/.aegir.js:
--------------------------------------------------------------------------------
1 | import { resolve } from 'node:path'
2 | import { tmpdir } from 'node:os'
3 | import { createDelegatedRoutingV1HttpApiServer } from '@helia/delegated-routing-v1-http-api-server'
4 | import { stubInterface } from 'sinon-ts'
5 |
6 | const IPFS_PATH = resolve(tmpdir(), 'verified-fetch-interop-ipfs-repo')
7 |
8 | /** @type {import('aegir').PartialOptions} */
9 | export default {
10 | build: {
11 | bundlesizeMax: '1KB'
12 | },
13 | dependencyCheck: {
14 | ignore: [
15 | '@helia/delegated-routing-v1-http-api-server',
16 | 'sinon-ts'
17 | ]
18 |
19 | },
20 | test: {
21 | files: './dist/src/*.spec.js',
22 | before: async () => {
23 |
24 | const { createKuboNode } = await import('./dist/src/fixtures/create-kubo.js')
25 | const kuboNode = await createKuboNode(IPFS_PATH)
26 |
27 | await kuboNode.start()
28 |
29 | // requires aegir build to be run first, which it will by default.
30 | const { loadFixtures } = await import('./dist/src/fixtures/load-fixtures.js')
31 |
32 | await loadFixtures(IPFS_PATH)
33 |
34 | const multiaddrs = (await kuboNode.api.id()).addresses
35 | const id = (await kuboNode.api.id()).id
36 |
37 | const helia = stubInterface({
38 | routing: stubInterface({
39 | findProviders: async function * findProviders () {
40 | yield {
41 | multiaddrs,
42 | id,
43 | protocols: ['transport-bitswap']
44 | }
45 | }
46 | })
47 | })
48 | const routingServer = await createDelegatedRoutingV1HttpApiServer(helia, {
49 | listen: {
50 | host: '127.0.0.1',
51 | port: 0
52 | }
53 | })
54 | await routingServer.ready()
55 |
56 | const address = routingServer.server.address()
57 | const port = typeof address === 'string' ? address : address?.port
58 |
59 | return {
60 | kuboNode,
61 | routingServer,
62 | env: {
63 | KUBO_DIRECT_RETRIEVAL_ROUTER: `http://127.0.0.1:${port}`
64 | }
65 | }
66 | },
67 | after: async (_options, beforeResult) => {
68 | await beforeResult.kuboNode.stop()
69 | await beforeResult.routingServer.close()
70 | }
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/packages/interop/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Code of Conduct
2 |
3 | This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md)
4 |
--------------------------------------------------------------------------------
/packages/interop/LICENSE-MIT:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in
11 | all copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | THE SOFTWARE.
20 |
--------------------------------------------------------------------------------
/packages/interop/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | # @helia/verified-fetch-interop
8 |
9 | [](https://ipfs.tech)
10 | [](https://discuss.ipfs.tech)
11 | [](https://codecov.io/gh/ipfs/helia-verified-fetch)
12 | [](https://github.com/ipfs/helia-verified-fetch/actions/workflows/js-test-and-release.yml?query=branch%3Amain)
13 |
14 | > Interop tests for @helia/verified-fetch
15 |
16 | # About
17 |
18 | Runs interop tests between Helia and Kubo.
19 |
20 | ## Example - Testing a new Kubo release
21 |
22 | ```console
23 | $ npm i @helia/interop
24 | $ KUBO_BINARY=/path/to/kubo helia-interop
25 | ```
26 |
27 | # Install
28 |
29 | ```console
30 | $ npm i @helia/verified-fetch-interop
31 | ```
32 |
33 | ## Browser `
39 | ```
40 |
41 | # License
42 |
43 | Licensed under either of
44 |
45 | - Apache 2.0, ([LICENSE-APACHE](https://github.com/ipfs/helia-verified-fetch/blob/main/packages/interop/LICENSE-APACHE) / )
46 | - MIT ([LICENSE-MIT](https://github.com/ipfs/helia-verified-fetch/blob/main/packages/interop/LICENSE-MIT) / )
47 |
48 | # Contribute
49 |
50 | Contributions welcome! Please check out [the issues](https://github.com/ipfs/helia-verified-fetch/issues).
51 |
52 | Also see our [contributing document](https://github.com/ipfs/community/blob/master/CONTRIBUTING_JS.md) for more information on how we work, and about contributing in general.
53 |
54 | Please be aware that all interactions related to this repo are subject to the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md).
55 |
56 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.
57 |
58 | [](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md)
59 |
--------------------------------------------------------------------------------
/packages/interop/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@helia/verified-fetch-interop",
3 | "version": "1.26.2",
4 | "description": "Interop tests for @helia/verified-fetch",
5 | "license": "Apache-2.0 OR MIT",
6 | "homepage": "https://github.com/ipfs/helia-verified-fetch/tree/main/packages/interop#readme",
7 | "repository": {
8 | "type": "git",
9 | "url": "git+https://github.com/ipfs/helia-verified-fetch.git"
10 | },
11 | "bugs": {
12 | "url": "https://github.com/ipfs/helia-verified-fetch/issues"
13 | },
14 | "publishConfig": {
15 | "access": "public",
16 | "provenance": true
17 | },
18 | "keywords": [
19 | "IPFS"
20 | ],
21 | "bin": {
22 | "helia-verified-fetch-interop": "./dist/src/bin.js"
23 | },
24 | "type": "module",
25 | "types": "./dist/src/index.d.ts",
26 | "files": [
27 | "src",
28 | "dist",
29 | "!dist/test",
30 | "!**/*.tsbuildinfo"
31 | ],
32 | "exports": {
33 | ".": {
34 | "types": "./dist/src/index.d.ts",
35 | "import": "./dist/src/index.js"
36 | }
37 | },
38 | "release": {
39 | "branches": [
40 | "main"
41 | ],
42 | "plugins": [
43 | [
44 | "@semantic-release/commit-analyzer",
45 | {
46 | "preset": "conventionalcommits",
47 | "releaseRules": [
48 | {
49 | "breaking": true,
50 | "release": "major"
51 | },
52 | {
53 | "revert": true,
54 | "release": "patch"
55 | },
56 | {
57 | "type": "feat",
58 | "release": "minor"
59 | },
60 | {
61 | "type": "fix",
62 | "release": "patch"
63 | },
64 | {
65 | "type": "docs",
66 | "release": "patch"
67 | },
68 | {
69 | "type": "test",
70 | "release": "patch"
71 | },
72 | {
73 | "type": "deps",
74 | "release": "patch"
75 | },
76 | {
77 | "scope": "no-release",
78 | "release": false
79 | }
80 | ]
81 | }
82 | ],
83 | [
84 | "@semantic-release/release-notes-generator",
85 | {
86 | "preset": "conventionalcommits",
87 | "presetConfig": {
88 | "types": [
89 | {
90 | "type": "feat",
91 | "section": "Features"
92 | },
93 | {
94 | "type": "fix",
95 | "section": "Bug Fixes"
96 | },
97 | {
98 | "type": "chore",
99 | "section": "Trivial Changes"
100 | },
101 | {
102 | "type": "docs",
103 | "section": "Documentation"
104 | },
105 | {
106 | "type": "deps",
107 | "section": "Dependencies"
108 | },
109 | {
110 | "type": "test",
111 | "section": "Tests"
112 | }
113 | ]
114 | }
115 | }
116 | ],
117 | "@semantic-release/changelog",
118 | "@semantic-release/npm",
119 | "@semantic-release/github",
120 | [
121 | "@semantic-release/git",
122 | {
123 | "assets": [
124 | "CHANGELOG.md",
125 | "package.json"
126 | ]
127 | }
128 | ]
129 | ]
130 | },
131 | "scripts": {
132 | "clean": "aegir clean",
133 | "lint": "aegir lint",
134 | "dep-check": "aegir dep-check",
135 | "doc-check": "aegir doc-check",
136 | "build": "aegir build",
137 | "test": "aegir test",
138 | "test:chrome": "aegir test -t browser --cov",
139 | "test:chrome-webworker": "aegir test -t webworker",
140 | "test:firefox": "aegir test -t browser -- --browser firefox",
141 | "test:firefox-webworker": "aegir test -t webworker -- --browser firefox",
142 | "test:node": "aegir test -t node --cov",
143 | "test:electron-main": "aegir test -t electron-main",
144 | "release": "aegir release"
145 | },
146 | "dependencies": {
147 | "@helia/delegated-routing-v1-http-api-server": "^4.0.6",
148 | "@helia/verified-fetch": "^2.0.0",
149 | "aegir": "^47.0.11",
150 | "execa": "^9.5.3",
151 | "glob": "^11.0.2",
152 | "ipfsd-ctl": "^15.0.2",
153 | "kubo": "^0.34.1",
154 | "kubo-rpc-client": "^5.1.0",
155 | "magic-bytes.js": "^1.12.1",
156 | "multiformats": "^13.3.6",
157 | "sinon-ts": "^2.0.0",
158 | "wherearewe": "^2.0.1"
159 | },
160 | "browser": {
161 | "./dist/src/fixtures/create-kubo.js": "./dist/src/fixtures/create-kubo.browser.js",
162 | "kubo": false
163 | }
164 | }
165 |
--------------------------------------------------------------------------------
/packages/interop/src/abort-handling.spec.ts:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 | import { createVerifiedFetch } from '@helia/verified-fetch'
3 | import { expect } from 'aegir/chai'
4 | import type { VerifiedFetch } from '@helia/verified-fetch'
5 |
6 | describe('verified-fetch abort handling', () => {
7 | let verifiedFetch: VerifiedFetch
8 | before(async () => {
9 | if (process.env.KUBO_DIRECT_RETRIEVAL_ROUTER == null || process.env.KUBO_DIRECT_RETRIEVAL_ROUTER === '') {
10 | throw new Error('KUBO_DIRECT_RETRIEVAL_ROUTER environment variable is required')
11 | }
12 |
13 | verifiedFetch = await createVerifiedFetch({
14 | gateways: [process.env.KUBO_DIRECT_RETRIEVAL_ROUTER],
15 | routers: [process.env.KUBO_DIRECT_RETRIEVAL_ROUTER],
16 | allowInsecure: true,
17 | allowLocal: true
18 | })
19 | })
20 |
21 | after(async () => {
22 | await verifiedFetch.stop()
23 | })
24 |
25 | it('should handle aborts properly', async function () {
26 | this.timeout(2000)
27 | const controller = new AbortController()
28 | const timeout = setTimeout(() => {
29 | controller.abort()
30 | }, 70)
31 |
32 | const fetchPromise = verifiedFetch('ipfs://QmdmQXB2mzChmMeKY47C43LxUdg1NDJ5MWcKMKxDu7RgQm/1 - Barrel - Part 1/1 - Barrel - Part 1 - alt.txt', {
33 | signal: controller.signal
34 | })
35 | await expect(fetchPromise).to.eventually.be.rejected.with.property('name', 'AbortError')
36 | clearTimeout(timeout)
37 | })
38 | })
39 |
--------------------------------------------------------------------------------
/packages/interop/src/bin.ts:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env node
2 |
3 | import { spawn } from 'node:child_process'
4 | import { dirname, resolve } from 'node:path'
5 | import { fileURLToPath } from 'node:url'
6 |
7 | // aegir should be run from `node_modules/@helia/interop`
8 | const cwd = resolve(dirname(fileURLToPath(import.meta.url)), '../../')
9 |
10 | const test = spawn('npx', ['aegir', 'test'], {
11 | cwd
12 | })
13 |
14 | test.stdout.on('data', (data) => {
15 | process.stdout.write(data)
16 | })
17 |
18 | test.stderr.on('data', (data) => {
19 | process.stderr.write(data)
20 | })
21 |
22 | test.on('close', (code) => {
23 | process.exit(code ?? 0)
24 | })
25 |
--------------------------------------------------------------------------------
/packages/interop/src/direct-retrieval.spec.ts:
--------------------------------------------------------------------------------
1 | import { createVerifiedFetch } from '@helia/verified-fetch'
2 | import { expect } from 'aegir/chai'
3 | import { isNode, isBrowser } from 'wherearewe'
4 | import type { CreateVerifiedFetchInit } from '@helia/verified-fetch'
5 |
6 | /**
7 | * Currently only testing browser and node
8 | */
9 | const describe = isNode || isBrowser ? global.describe : global.describe.skip
10 |
11 | describe('@helia/verified-fetch - direct retrieval', () => {
12 | let directRetrievalRouterUrl: string
13 | let createVerifiedFetchInit: CreateVerifiedFetchInit
14 |
15 | beforeEach(async () => {
16 | if (process.env.KUBO_DIRECT_RETRIEVAL_ROUTER == null || process.env.KUBO_DIRECT_RETRIEVAL_ROUTER === '') {
17 | throw new Error('KUBO_DIRECT_RETRIEVAL_ROUTER environment variable is required')
18 | }
19 | directRetrievalRouterUrl = process.env.KUBO_DIRECT_RETRIEVAL_ROUTER
20 | createVerifiedFetchInit = {
21 | gateways: [],
22 | routers: [directRetrievalRouterUrl]
23 | }
24 | if (!isNode) {
25 | createVerifiedFetchInit.libp2pConfig = {
26 | connectionGater: {
27 | denyDialMultiaddr: () => false
28 | }
29 | }
30 | }
31 | })
32 |
33 | it('can fetch content directly from another node', async () => {
34 | const fetch = await createVerifiedFetch(createVerifiedFetchInit)
35 |
36 | const res = await fetch('ipfs://QmbQDovX7wRe9ek7u6QXe9zgCXkTzoUSsTFJEkrYV1HrVR/1 - Barrel - Part 1 - alt.txt')
37 |
38 | expect(res.status).to.equal(200)
39 | const body = await res.text()
40 | expect(body).to.equal('Don\'t we all.')
41 |
42 | await fetch.stop()
43 | })
44 | })
45 |
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/create-kubo.ts:
--------------------------------------------------------------------------------
1 | import { createNode } from 'ipfsd-ctl'
2 | import { path as kuboPath } from 'kubo'
3 | import { create } from 'kubo-rpc-client'
4 | import type { KuboNode } from 'ipfsd-ctl'
5 |
6 | export async function createKuboNode (repoPath = undefined): Promise {
7 | return createNode({
8 | type: 'kubo',
9 | rpc: create,
10 | bin: kuboPath(),
11 | test: true,
12 | repo: repoPath,
13 | init: {
14 | config: {
15 | Addresses: {
16 | Swarm: [
17 | '/ip4/0.0.0.0/tcp/4001',
18 | '/ip4/0.0.0.0/tcp/4002/ws',
19 | '/ip4/0.0.0.0/udp/4001/webrtc-direct',
20 | '/ip4/0.0.0.0/udp/4001/quic-v1/webtransport',
21 | '/ip6/::/udp/4001/webrtc-direct',
22 | '/ip6/::/udp/4001/quic-v1/webtransport'
23 | ],
24 | Gateway: '/ip4/127.0.0.1/tcp/8180'
25 | },
26 | Gateway: {
27 | NoFetch: true,
28 | ExposeRoutingAPI: true,
29 | HTTPHeaders: {
30 | 'Access-Control-Allow-Origin': ['*'],
31 | 'Access-Control-Allow-Methods': ['GET', 'POST', 'PUT', 'OPTIONS']
32 | }
33 | }
34 | }
35 | },
36 | args: ['--enable-pubsub-experiment', '--enable-namesys-pubsub']
37 | })
38 | }
39 |
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/data/QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr-tokens.uniswap.org-2024-01-18.car:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr-tokens.uniswap.org-2024-01-18.car
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/data/QmbQDovX7wRe9ek7u6QXe9zgCXkTzoUSsTFJEkrYV1HrVR-xkcd-Barrel-part-1.car:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/QmbQDovX7wRe9ek7u6QXe9zgCXkTzoUSsTFJEkrYV1HrVR-xkcd-Barrel-part-1.car
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/data/QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv-helia-identify-website.car:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv-helia-identify-website.car
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/data/QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw-fake-blog.libp2p.io.car:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw-fake-blog.libp2p.io.car
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/data/bafybeibc5sgo2plmjkq2tzmhrn54bk3crhnc23zd2msg4ea7a4pxrkgfna.car:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/bafybeibc5sgo2plmjkq2tzmhrn54bk3crhnc23zd2msg4ea7a4pxrkgfna.car
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/data/bafybeidbclfqleg2uojchspzd4bob56dqetqjsj27gy2cq3klkkgxtpn4i-single-layer-hamt-with-multi-block-files.car:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/bafybeidbclfqleg2uojchspzd4bob56dqetqjsj27gy2cq3klkkgxtpn4i-single-layer-hamt-with-multi-block-files.car
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/data/gateway-conformance-fixtures.car:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/gateway-conformance-fixtures.car
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/data/k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam.ipns-record:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ipfs/helia-verified-fetch/7c8e274e8f743d94884f78fbd786ad4f2904d1de/packages/interop/src/fixtures/data/k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam.ipns-record
--------------------------------------------------------------------------------
/packages/interop/src/fixtures/load-fixtures.ts:
--------------------------------------------------------------------------------
1 | import { basename } from 'node:path'
2 | import { $ } from 'execa'
3 | import { glob } from 'glob'
4 | import { path as kuboPath } from 'kubo'
5 |
6 | /**
7 | * Only callable from node (intended to be consumed by .aegir.js)
8 | * but the fixtures loaded by this function are also used by browser tests.
9 | */
10 | export async function loadFixtures (IPFS_PATH = undefined): Promise {
11 | const kuboBinary = process.env.KUBO_BINARY ?? kuboPath()
12 |
13 | const carFiles = await glob('**/fixtures/data/*.car', { cwd: process.cwd() })
14 | const ipnsRecordFiles = await glob('**/fixtures/data/*.ipns-record', { cwd: process.cwd() })
15 |
16 | await Promise.allSettled(carFiles.map(async (carFile) => {
17 | await $({ env: { IPFS_PATH } })`${kuboBinary} dag import --pin-roots=false --offline ${carFile}`
18 | }))
19 |
20 | for (const ipnsRecord of ipnsRecordFiles) {
21 | const key = basename(ipnsRecord, '.ipns-record').split('_')[0]
22 | await $({ env: { IPFS_PATH } })`${kuboBinary} routing put --allow-offline /ipns/${key} ${ipnsRecord}`
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/packages/interop/src/index.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * @packageDocumentation
3 | *
4 | * Runs interop tests between @helia/verified-fetch and Kubo.
5 | *
6 | * @example Testing a new Kubo release
7 | *
8 | * ```console
9 | * $ npm i @helia/verified-fetch-interop
10 | * $ KUBO_BINARY=/path/to/kubo helia-verified-fetch-interop
11 | * ```
12 | */
13 |
14 | export {}
15 |
--------------------------------------------------------------------------------
/packages/interop/src/ipns.spec.ts:
--------------------------------------------------------------------------------
1 | import { createVerifiedFetch } from '@helia/verified-fetch'
2 | import { expect } from 'aegir/chai'
3 | import type { VerifiedFetch } from '@helia/verified-fetch'
4 |
5 | describe('@helia/verified-fetch - ipns', () => {
6 | let verifiedFetch: VerifiedFetch
7 |
8 | before(async () => {
9 | verifiedFetch = await createVerifiedFetch({
10 | gateways: ['http://127.0.0.1:8180'],
11 | routers: ['http://127.0.0.1:8180'],
12 | allowInsecure: true,
13 | allowLocal: true
14 | })
15 | })
16 |
17 | after(async () => {
18 | await verifiedFetch.stop()
19 | })
20 |
21 | it('should be able to load /ipns/', async () => {
22 | // ensure the key is being returned by the ipfs gateway itself
23 | const kuboResponse = await fetch('http://127.0.0.1:8180/ipns/k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam')
24 | const kuboResponseBody = await kuboResponse.text()
25 | expect(kuboResponseBody).to.equal('hello\n')
26 |
27 | const res = await verifiedFetch('/ipns/k51qzi5uqu5dk3v4rmjber23h16xnr23bsggmqqil9z2gduiis5se8dht36dam')
28 | expect(res.status).to.equal(200)
29 | const body = await res.text()
30 | expect(body).to.equal('hello\n')
31 | })
32 | })
33 |
--------------------------------------------------------------------------------
/packages/interop/src/json.spec.ts:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 | import { createVerifiedFetch } from '@helia/verified-fetch'
3 | import { expect } from 'aegir/chai'
4 | import { CID } from 'multiformats/cid'
5 |
6 | describe('@helia/verified-fetch - json', () => {
7 | describe('unixfs - multi-block', () => {
8 | let verifiedFetch: Awaited>
9 |
10 | before(async () => {
11 | // As of 2024-01-18, https://cloudflare-ipfs.com/ipns/tokens.uniswap.org resolves to:
12 | // root: QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr
13 | // child1: QmNik5N4ryNwzzXYq5hCYKGcRjAf9QtigxtiJh9o8aXXbG // partial JSON
14 | // child2: QmWNBJX6fZyNTLWNYBHxAHpBctCP43R2zeqV2G8uavqFZn // partial JSON
15 | verifiedFetch = await createVerifiedFetch({
16 | gateways: ['http://127.0.0.1:8180'],
17 | routers: ['http://127.0.0.1:8180'],
18 | allowInsecure: true,
19 | allowLocal: true
20 | })
21 | })
22 |
23 | after(async () => {
24 | await verifiedFetch.stop()
25 | })
26 |
27 | it('handles UnixFS-chunked JSON file', async () => {
28 | const resp = await verifiedFetch(CID.parse('QmQJ8fxavY54CUsxMSx9aE9Rdcmvhx8awJK2jzJp4iAqCr'), {
29 | allowLocal: true,
30 | allowInsecure: true
31 | })
32 | expect(resp).to.be.ok()
33 | const jsonObj = await resp.json()
34 | expect(jsonObj).to.be.ok()
35 | expect(jsonObj).to.have.property('name').equal('Uniswap Labs Default')
36 | expect(jsonObj).to.have.property('timestamp').equal('2023-12-13T18:25:25.830Z')
37 | expect(jsonObj).to.have.property('version').to.deep.equal({ major: 11, minor: 11, patch: 0 })
38 | expect(jsonObj).to.have.property('tags')
39 | expect(jsonObj).to.have.property('logoURI').equal('ipfs://QmNa8mQkrNKp1WEEeGjFezDmDeodkWRevGFN8JCV7b4Xir')
40 | expect(jsonObj).to.have.property('keywords').to.deep.equal(['uniswap', 'default'])
41 | expect(jsonObj.tokens).to.be.an('array').of.length(767)
42 | })
43 |
44 | it('handles hamt-sharded directory with json file', async () => {
45 | const resp = await verifiedFetch('ipfs://bafybeibc5sgo2plmjkq2tzmhrn54bk3crhnc23zd2msg4ea7a4pxrkgfna/371', {
46 | allowLocal: true,
47 | allowInsecure: true
48 | })
49 | expect(resp).to.be.ok()
50 | expect(resp.status).to.equal(200)
51 | expect(resp.headers.get('content-type')).to.equal('application/json')
52 | const jsonObj = await resp.json()
53 | expect(jsonObj).to.be.ok()
54 | expect(jsonObj).to.have.property('name').equal('Pudgy Penguin #371')
55 | })
56 | })
57 | })
58 |
--------------------------------------------------------------------------------
/packages/interop/src/websites.spec.ts:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 | import { createVerifiedFetch } from '@helia/verified-fetch'
3 | import { expect } from 'aegir/chai'
4 |
5 | describe('@helia/verified-fetch - websites', () => {
6 | describe('helia-identify.on.fleek.co', () => {
7 | let verifiedFetch: Awaited>
8 |
9 | before(async () => {
10 | // 2024-01-22 CID for _dnslink.helia-identify.on.fleek.co
11 | verifiedFetch = await createVerifiedFetch({
12 | gateways: ['http://127.0.0.1:8180'],
13 | routers: ['http://127.0.0.1:8180'],
14 | allowInsecure: true,
15 | allowLocal: true
16 | })
17 | })
18 |
19 | after(async () => {
20 | await verifiedFetch.stop()
21 | })
22 |
23 | it('loads index.html when passed helia-identify.on.fleek.co root CID', async () => {
24 | const resp = await verifiedFetch('ipfs://QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv', {
25 | allowLocal: true,
26 | allowInsecure: true
27 | })
28 | expect(resp).to.be.ok()
29 | const html = await resp.text()
30 | expect(html).to.be.ok()
31 | expect(html).to.include('Run Identify on a remote node with Helia')
32 | })
33 |
34 | it('loads helia-identify.on.fleek.co index.html directly ', async () => {
35 | const resp = await verifiedFetch('ipfs://QmbxpRxwKXxnJQjnPqm1kzDJSJ8YgkLxH23mcZURwPHjGv/index.html', {
36 | allowLocal: true,
37 | allowInsecure: true
38 | })
39 | expect(resp).to.be.ok()
40 | const html = await resp.text()
41 | expect(html).to.be.ok()
42 | expect(html).to.include('Run Identify on a remote node with Helia')
43 | })
44 | })
45 |
46 | /**
47 | *
48 | * Created on 2024-01-23. /ipns/blog.libp2p.io/index.html resolved to QmVZNGy6SPvUbvQCXXaGDdp8kvfJm9MMozjU12dyzH6hKf
49 | *
50 | * ```shell
51 | * mkdir fake-blog.libp2p.io
52 | * npx kubo@0.25.0 cat '/ipfs/QmVZNGy6SPvUbvQCXXaGDdp8kvfJm9MMozjU12dyzH6hKf' > fake-blog.libp2p.io/index.html
53 | * npx kubo@0.25.0 add -r fake-blog.libp2p.io
54 | * npx kubo@0.25.0 dag export QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw > QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw-fake-blog.libp2p.io.car
55 | * ```
56 | */
57 | describe('fake blog.libp2p.io', () => {
58 | let verifiedFetch: Awaited>
59 |
60 | before(async () => {
61 | verifiedFetch = await createVerifiedFetch({
62 | gateways: ['http://127.0.0.1:8180'],
63 | routers: ['http://127.0.0.1:8180'],
64 | allowInsecure: true,
65 | allowLocal: true
66 | })
67 | })
68 |
69 | after(async () => {
70 | await verifiedFetch.stop()
71 | })
72 |
73 | it('loads index.html when passed fake-blog.libp2p.io root CID', async () => {
74 | const resp = await verifiedFetch('ipfs://QmeiDMLtPUS3RT2xAcUwsNyZz169wPke2q7im9vZpVLSYw', {
75 | allowLocal: true,
76 | allowInsecure: true
77 | })
78 | expect(resp).to.be.ok()
79 | const html = await resp.text()
80 | expect(html).to.be.ok()
81 | expect(html).to.include('Home | libp2p Blog & News')
82 | expect(html).to.include('')
83 | })
84 | })
85 | })
86 |
--------------------------------------------------------------------------------
/packages/interop/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "aegir/src/config/tsconfig.aegir.json",
3 | "compilerOptions": {
4 | "outDir": "dist"
5 | },
6 | "include": [
7 | "src",
8 | "test"
9 | ],
10 | "references": [
11 | {
12 | "path": "../verified-fetch"
13 | }
14 | ]
15 | }
16 |
--------------------------------------------------------------------------------
/packages/interop/typedoc.json:
--------------------------------------------------------------------------------
1 | {
2 | "entryPoints": [
3 | "./src/index.ts"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/packages/verified-fetch/.aegir.js:
--------------------------------------------------------------------------------
1 | /** @type {import('aegir').PartialOptions} */
2 | const options = {
3 | build: {
4 | bundlesizeMax: '355KB'
5 | }
6 | }
7 |
8 | export default options
9 |
--------------------------------------------------------------------------------
/packages/verified-fetch/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Code of Conduct
2 |
3 | This project follows the [`IPFS Community Code of Conduct`](https://github.com/ipfs/community/blob/master/code-of-conduct.md)
4 |
--------------------------------------------------------------------------------
/packages/verified-fetch/LICENSE-MIT:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in
11 | all copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | THE SOFTWARE.
20 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/errors.ts:
--------------------------------------------------------------------------------
1 | export class InvalidRangeError extends Error {
2 | static name = 'InvalidRangeError'
3 |
4 | constructor (message = 'Invalid range request') {
5 | super(message)
6 | this.name = 'InvalidRangeError'
7 | }
8 | }
9 |
10 | export class NoContentError extends Error {
11 | static name = 'NoContentError'
12 |
13 | constructor (message = 'No content found') {
14 | super(message)
15 | this.name = 'NoContentError'
16 | }
17 | }
18 |
19 | export class SubdomainNotSupportedError extends Error {
20 | static name = 'SubdomainNotSupportedError'
21 |
22 | constructor (message = 'Subdomain not supported') {
23 | super(message)
24 | this.name = 'SubdomainNotSupportedError'
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/errors.ts:
--------------------------------------------------------------------------------
1 | import type { FatalPluginErrorOptions, PluginErrorOptions } from './types.js'
2 |
3 | /**
4 | * If a plugin encounters an error, it should throw an instance of this class.
5 | */
6 | export class PluginError extends Error {
7 | public name = 'PluginError'
8 | public code: string
9 | public fatal: boolean
10 | public details?: Record
11 | public response?: any
12 |
13 | constructor (code: string, message: string, options?: PluginErrorOptions) {
14 | super(message)
15 | this.code = code
16 | this.fatal = options?.fatal ?? false
17 | this.details = options?.details
18 | this.response = options?.response
19 | }
20 | }
21 |
22 | /**
23 | * If a plugin encounters a fatal error and verified-fetch should not continue processing the request, it should throw
24 | * an instance of this class.
25 | *
26 | * Note that you should be very careful when throwing a `PluginFatalError`, as it will stop the request from being
27 | * processed further. If you do not have a response to return to the client, you should consider throwing a
28 | * `PluginError` instead.
29 | */
30 | export class PluginFatalError extends PluginError {
31 | public name = 'PluginFatalError'
32 |
33 | constructor (code: string, message: string, options: FatalPluginErrorOptions) {
34 | super(code, message, { ...options, fatal: true })
35 | this.name = 'PluginFatalError'
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/index.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * This file is the entry into all things we export from the `src/plugins` directory.
3 | */
4 |
5 | export { PluginError, PluginFatalError } from './errors.js'
6 | export { BasePlugin } from './plugin-base.js'
7 | export type { PluginOptions, PluginContext, VerifiedFetchPluginFactory } from './types.js'
8 | export * from './plugins.js'
9 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-base.ts:
--------------------------------------------------------------------------------
1 | import type { VerifiedFetchPlugin, PluginContext, PluginOptions } from './types.js'
2 | import type { Logger } from '@libp2p/interface'
3 |
4 | /**
5 | * Base class for verified-fetch plugins. This class provides a basic implementation of the `FetchHandlerPlugin`
6 | * interface.
7 | *
8 | * Subclasses must implement the `id` property and the `canHandle` and `handle` methods.
9 | * Subclasses may override the `codes` and `log` properties.
10 | *
11 | * If your plugin adds/edits the context supplied in `handle`, you should increment the `context.modified` property.
12 | */
13 | export abstract class BasePlugin implements VerifiedFetchPlugin {
14 | readonly codes: number[] = []
15 | readonly pluginOptions: PluginOptions
16 | abstract readonly id: string
17 | protected _log?: Logger
18 |
19 | get log (): Logger {
20 | // instantiate the logger lazily because it depends on the id, which is not set until after the constructor is called
21 | if (this._log == null) {
22 | this._log = this.pluginOptions.logger.forComponent(this.id)
23 | }
24 | return this._log
25 | }
26 |
27 | constructor (options: PluginOptions) {
28 | this.pluginOptions = options
29 | }
30 |
31 | abstract canHandle (context: PluginContext): boolean
32 |
33 | abstract handle (context: PluginContext): Promise
34 | }
35 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-byte-range-context.ts:
--------------------------------------------------------------------------------
1 | import { ByteRangeContext } from '../utils/byte-range-context.js'
2 | import { badRangeResponse } from '../utils/responses.js'
3 | import { BasePlugin } from './plugin-base.js'
4 | import type { PluginContext } from './types.js'
5 |
6 | /**
7 | * This plugin simply adds the ByteRangeContext to the PluginContext.
8 | */
9 | export class ByteRangeContextPlugin extends BasePlugin {
10 | readonly id = 'byte-range-context-plugin'
11 |
12 | /**
13 | * Return false if the ByteRangeContext has already been set, otherwise return true.
14 | */
15 | canHandle (context: PluginContext): boolean {
16 | return context.byteRangeContext == null
17 | }
18 |
19 | async handle (context: PluginContext): Promise {
20 | context.byteRangeContext = new ByteRangeContext(this.pluginOptions.logger, context.options?.headers)
21 | context.modified++
22 |
23 | if (context.byteRangeContext.isRangeRequest && !context.byteRangeContext.isValidRangeRequest) {
24 | // invalid range request.. fail
25 | return badRangeResponse(context.resource)
26 | }
27 |
28 | return null
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-car.ts:
--------------------------------------------------------------------------------
1 | import { BlockExporter, car, CIDPath, SubgraphExporter, UnixFSExporter } from '@helia/car'
2 | import { CarWriter } from '@ipld/car'
3 | import { code as dagPbCode } from '@ipld/dag-pb'
4 | import toBrowserReadableStream from 'it-to-browser-readablestream'
5 | import { okRangeResponse } from '../utils/responses.js'
6 | import { BasePlugin } from './plugin-base.js'
7 | import type { PluginContext } from './types.js'
8 | import type { ExportCarOptions } from '@helia/car'
9 |
10 | function getFilename ({ cid, ipfsPath, query }: Pick): string {
11 | if (query.filename != null) {
12 | return query.filename
13 | }
14 |
15 | // convert context.ipfsPath to a filename. replace all / with _, replace prefix protocol with empty string
16 | const filename = ipfsPath.replace(/\/ipfs\//, '').replace(/\/ipns\//, '').replace(/\//g, '_')
17 |
18 | return `${filename}.car`
19 | }
20 |
21 | // https://specs.ipfs.tech/http-gateways/trustless-gateway/#dag-scope-request-query-parameter
22 | type DagScope = 'all' | 'entity' | 'block'
23 | function getDagScope ({ query }: Pick): DagScope | null {
24 | const dagScope = query['dag-scope']
25 | if (dagScope === 'all' || dagScope === 'entity' || dagScope === 'block') {
26 | return dagScope
27 | }
28 | return 'all'
29 | }
30 |
31 | /**
32 | * Accepts a `CID` and returns a `Response` with a body stream that is a CAR
33 | * of the `DAG` referenced by the `CID`.
34 | */
35 | export class CarPlugin extends BasePlugin {
36 | readonly id = 'car-plugin'
37 |
38 | canHandle (context: PluginContext): boolean {
39 | this.log('checking if we can handle %c with accept %s', context.cid, context.accept)
40 | if (context.byteRangeContext == null) {
41 | return false
42 | }
43 | if (context.pathDetails == null) {
44 | return false
45 | }
46 |
47 | return context.accept?.startsWith('application/vnd.ipld.car') === true || context.query.format === 'car' // application/vnd.ipld.car
48 | }
49 |
50 | async handle (context: PluginContext & Required>): Promise {
51 | const { options, pathDetails, cid } = context
52 | if (pathDetails == null) {
53 | throw new Error('attempted to handle request for car with no path details')
54 | }
55 | const { getBlockstore, helia } = this.pluginOptions
56 | context.reqFormat = 'car'
57 | context.query.download = true
58 | context.query.filename = getFilename(context)
59 | const blockstore = getBlockstore(cid, context.resource, options?.session ?? true, options)
60 |
61 | const c = car({
62 | blockstore,
63 | getCodec: helia.getCodec,
64 | logger: helia.logger
65 | })
66 | const carExportOptions: ExportCarOptions = {
67 | ...options,
68 | traversal: new CIDPath(pathDetails.ipfsRoots)
69 | }
70 | const dagScope = getDagScope(context)
71 | // root should be the terminal element if it exists, otherwise the root cid.. because of this, we can't use the @helia/car stream() method.
72 | const root = pathDetails.terminalElement.cid ?? cid
73 | if (dagScope === 'block') {
74 | carExportOptions.exporter = new BlockExporter()
75 | } else if (dagScope === 'entity') {
76 | // if its unixFS, we need to enumerate a directory, or get all blocks for the entity, otherwise, use blockExporter
77 | if (root.code === dagPbCode) {
78 | carExportOptions.exporter = new UnixFSExporter()
79 | } else {
80 | carExportOptions.exporter = new BlockExporter()
81 | }
82 | } else {
83 | carExportOptions.exporter = new SubgraphExporter()
84 | }
85 | const { writer, out } = CarWriter.create(root)
86 | const iter = async function * (): AsyncIterable {
87 | for await (const buf of out) {
88 | yield buf
89 | }
90 | }
91 |
92 | // the root passed to export should be the root CID of the DAG, not the terminal element.
93 | c.export(cid, writer, carExportOptions)
94 | .catch((err) => {
95 | this.log.error('error exporting car - %e', err)
96 | })
97 | // export will close the writer when it's done, no finally needed.
98 |
99 | context.byteRangeContext.setBody(toBrowserReadableStream(iter()))
100 |
101 | const response = okRangeResponse(context.resource, context.byteRangeContext.getBody('application/vnd.ipld.car; version=1'), { byteRangeContext: context.byteRangeContext, log: this.log })
102 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? 'application/vnd.ipld.car; version=1')
103 |
104 | return response
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-dag-cbor.ts:
--------------------------------------------------------------------------------
1 | import * as ipldDagCbor from '@ipld/dag-cbor'
2 | import * as ipldDagJson from '@ipld/dag-json'
3 | import { dagCborToSafeJSON } from '../utils/dag-cbor-to-safe-json.js'
4 | import { setIpfsRoots } from '../utils/response-headers.js'
5 | import { notAcceptableResponse, okRangeResponse } from '../utils/responses.js'
6 | import { isObjectNode } from '../utils/walk-path.js'
7 | import { BasePlugin } from './plugin-base.js'
8 | import type { PluginContext } from './types.js'
9 | import type { ObjectNode } from 'ipfs-unixfs-exporter'
10 |
11 | /**
12 | * Handles `dag-cbor` content, including requests with Accept: `application/vnd.ipld.dag-json` and `application/json`.
13 | */
14 | export class DagCborPlugin extends BasePlugin {
15 | readonly id = 'dag-cbor-plugin'
16 | readonly codes = [ipldDagCbor.code]
17 |
18 | canHandle ({ cid, accept, pathDetails, byteRangeContext }: PluginContext): boolean {
19 | this.log('checking if we can handle %c with accept %s', cid, accept)
20 | if (pathDetails == null) {
21 | return false
22 | }
23 | if (!isObjectNode(pathDetails.terminalElement)) {
24 | return false
25 | }
26 | if (cid.code !== ipldDagCbor.code) {
27 | return false
28 | }
29 | if (byteRangeContext == null) {
30 | return false
31 | }
32 |
33 | return isObjectNode(pathDetails.terminalElement)
34 | }
35 |
36 | async handle (context: PluginContext & Required>): Promise {
37 | const { cid, path, resource, accept, pathDetails } = context
38 |
39 | this.log.trace('fetching %c/%s', cid, path)
40 |
41 | const ipfsRoots = pathDetails.ipfsRoots
42 | const terminalElement = pathDetails.terminalElement as ObjectNode // checked in canHandle fn.
43 |
44 | const block = terminalElement.node
45 |
46 | let body: string | Uint8Array
47 |
48 | if (accept === 'application/octet-stream' || accept === 'application/vnd.ipld.dag-cbor' || accept === 'application/cbor') {
49 | // skip decoding
50 | body = block
51 | } else if (accept === 'application/vnd.ipld.dag-json') {
52 | try {
53 | // if vnd.ipld.dag-json has been specified, convert to the format - note
54 | // that this supports more data types than regular JSON, the content-type
55 | // response header is set so the user knows to process it differently
56 | const obj = ipldDagCbor.decode(block)
57 | body = ipldDagJson.encode(obj)
58 | } catch (err) {
59 | this.log.error('could not transform %c to application/vnd.ipld.dag-json', err)
60 | return notAcceptableResponse(resource)
61 | }
62 | } else {
63 | try {
64 | body = dagCborToSafeJSON(block)
65 | } catch (err) {
66 | if (accept === 'application/json') {
67 | this.log('could not decode DAG-CBOR as JSON-safe, but the client sent "Accept: application/json"', err)
68 |
69 | return notAcceptableResponse(resource)
70 | }
71 |
72 | this.log('could not decode DAG-CBOR as JSON-safe, falling back to `application/octet-stream`', err)
73 | body = block
74 | }
75 | }
76 |
77 | context.byteRangeContext.setBody(body)
78 |
79 | const responseContentType = accept ?? (body instanceof Uint8Array ? 'application/octet-stream' : 'application/json')
80 | const response = okRangeResponse(resource, context.byteRangeContext.getBody(responseContentType), { byteRangeContext: context.byteRangeContext, log: this.log })
81 |
82 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? responseContentType)
83 |
84 | this.log.trace('setting content type to "%s"', context.byteRangeContext.getContentType() ?? responseContentType)
85 | setIpfsRoots(response, ipfsRoots)
86 |
87 | return response
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-dag-walk.ts:
--------------------------------------------------------------------------------
1 | import { code as dagCborCode } from '@ipld/dag-cbor'
2 | import { code as dagPbCode } from '@ipld/dag-pb'
3 | import { handlePathWalking } from '../utils/walk-path.js'
4 | import { BasePlugin } from './plugin-base.js'
5 | import type { PluginContext } from './types.js'
6 |
7 | /**
8 | * This plugin should almost always run first because it's going to handle path walking if needed, and will only say it can handle
9 | * the request if path walking is possible (path is not empty, terminalCid is unknown, and the path has not been walked yet).
10 | *
11 | * Once this plugin has run, the PluginContext will be updated and then this plugin will return false for canHandle, so it won't run again.
12 | */
13 | export class DagWalkPlugin extends BasePlugin {
14 | readonly id = 'dag-walk-plugin'
15 | /**
16 | * Return false if the path has already been walked, otherwise return true if the CID is encoded with a codec that supports pathing.
17 | */
18 | canHandle (context: PluginContext): boolean {
19 | this.log('checking if we can handle %c with accept %s', context.cid, context.accept)
20 | const { pathDetails, cid } = context
21 | if (pathDetails != null) {
22 | // path has already been walked
23 | return false
24 | }
25 |
26 | return (cid.code === dagPbCode || cid.code === dagCborCode)
27 | }
28 |
29 | async handle (context: PluginContext): Promise {
30 | const { cid, resource, options, withServerTiming = false } = context
31 | const { getBlockstore, handleServerTiming } = this.pluginOptions
32 | const blockstore = getBlockstore(cid, resource, options?.session ?? true, options)
33 |
34 | // TODO: migrate handlePathWalking into this plugin
35 | const pathDetails = await handleServerTiming('path-walking', '', async () => handlePathWalking({ ...context, blockstore, log: this.log }), withServerTiming)
36 |
37 | if (pathDetails instanceof Response) {
38 | this.log.trace('path walking failed')
39 |
40 | if (pathDetails.status === 404) {
41 | // invalid or incorrect path.. we walked the path but nothing is there
42 | // send the 404 response
43 | return pathDetails
44 | }
45 |
46 | // some other error walking the path (codec doesn't support pathing, etc..), let the next plugin try to handle it
47 | return null
48 | }
49 |
50 | context.modified++
51 | context.pathDetails = pathDetails
52 |
53 | return null
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-dir-index-html.ts:
--------------------------------------------------------------------------------
1 | import { code as dagPbCode } from '@ipld/dag-pb'
2 | import { base32 } from 'multiformats/bases/base32'
3 | import { sha256 } from 'multiformats/hashes/sha2'
4 | import { dirIndexHtml } from '../utils/dir-index-html.js'
5 | import { getETag } from '../utils/get-e-tag.js'
6 | import { getIpfsRoots } from '../utils/response-headers.js'
7 | import { okRangeResponse } from '../utils/responses.js'
8 | import { BasePlugin } from './plugin-base.js'
9 | import type { PluginContext, VerifiedFetchPluginFactory } from './types.js'
10 | import type { UnixFSEntry } from 'ipfs-unixfs-exporter'
11 |
12 | /**
13 | * Converts a list of directory entries into a small hash that can be used in the etag header.
14 | *
15 | * @see https://github.com/ipfs/boxo/blob/dc60fe747c375c631a92fcfd6c7456f44a760d24/gateway/assets/assets.go#L84
16 | * @see https://github.com/ipfs/boxo/blob/dc60fe747c375c631a92fcfd6c7456f44a760d24/gateway/handler_unixfs_dir.go#L233-L235
17 | */
18 | async function getAssetHash (directoryEntries: UnixFSEntry[]): Promise {
19 | const entryDetails = directoryEntries.reduce((acc, entry) => {
20 | return `${acc}${entry.name}${entry.cid.toString()}`
21 | }, '')
22 | const hashBytes = await sha256.encode(new TextEncoder().encode(entryDetails))
23 | return base32.encode(hashBytes)
24 | }
25 |
26 | export class DirIndexHtmlPlugin extends BasePlugin {
27 | readonly id = 'dir-index-html-plugin'
28 | readonly codes = [dagPbCode]
29 | canHandle (context: PluginContext): boolean {
30 | const { cid, pathDetails, directoryEntries } = context
31 | if (pathDetails == null) {
32 | return false
33 | }
34 | if (pathDetails.terminalElement?.type !== 'directory') {
35 | return false
36 | }
37 |
38 | if (directoryEntries == null || directoryEntries.length === 0) {
39 | return false
40 | }
41 |
42 | return cid.code === dagPbCode
43 | }
44 |
45 | async handle (context: PluginContext & Required>): Promise {
46 | const { resource, pathDetails, directoryEntries } = context
47 |
48 | const { terminalElement, ipfsRoots } = pathDetails
49 |
50 | const gatewayURL = resource
51 | const htmlResponse = dirIndexHtml(terminalElement, directoryEntries, { gatewayURL, log: this.log })
52 |
53 | context.byteRangeContext.setBody(htmlResponse)
54 |
55 | const etagPrefix = `DirIndex-${await getAssetHash(directoryEntries)}_CID-`
56 |
57 | const response = okRangeResponse(resource, context.byteRangeContext.getBody('text/html'), { byteRangeContext: context.byteRangeContext, log: this.log }, {
58 | headers: {
59 | 'Content-Type': context.byteRangeContext.getContentType() ?? 'text/html',
60 | // see https://github.com/ipfs/gateway-conformance/pull/219
61 | 'Cache-Control': 'public, max-age=604800, stale-while-revalidate=2678400',
62 | 'X-Ipfs-Roots': getIpfsRoots(ipfsRoots),
63 | // e.g. DirIndex-_CID-
64 | Etag: getETag({ cid: terminalElement.cid, reqFormat: context.reqFormat, contentPrefix: etagPrefix })
65 | }
66 | })
67 |
68 | return response
69 | }
70 | }
71 |
72 | export const dirIndexHtmlPluginFactory: VerifiedFetchPluginFactory = (opts) => new DirIndexHtmlPlugin(opts)
73 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-ipns-record.ts:
--------------------------------------------------------------------------------
1 | import { Record as DHTRecord } from '@libp2p/kad-dht'
2 | import { Key } from 'interface-datastore'
3 | import { concat as uint8ArrayConcat } from 'uint8arrays/concat'
4 | import { fromString as uint8ArrayFromString } from 'uint8arrays/from-string'
5 | import { toString as uint8ArrayToString } from 'uint8arrays/to-string'
6 | import { getPeerIdFromString } from '../utils/get-peer-id-from-string.js'
7 | import { badRequestResponse, okRangeResponse } from '../utils/responses.js'
8 | import { PluginFatalError } from './errors.js'
9 | import { BasePlugin } from './plugin-base.js'
10 | import type { PluginContext } from './types.js'
11 | import type { PeerId } from '@libp2p/interface'
12 |
13 | /**
14 | * Accepts an `ipns://...`, `https?://.ipns.`, or `https?:///ipns/...` URL as a string and
15 | * returns a `Response` containing a raw IPNS record.
16 | */
17 | export class IpnsRecordPlugin extends BasePlugin {
18 | readonly id = 'ipns-record-plugin'
19 | readonly codes = []
20 | canHandle ({ cid, accept, query, byteRangeContext }: PluginContext): boolean {
21 | this.log('checking if we can handle %c with accept %s', cid, accept)
22 | if (byteRangeContext == null) {
23 | return false
24 | }
25 |
26 | return accept === 'application/vnd.ipfs.ipns-record' || query.format === 'ipns-record'
27 | }
28 |
29 | async handle (context: PluginContext & Required>): Promise {
30 | const { resource, path, options } = context
31 | const { helia } = this.pluginOptions
32 | context.reqFormat = 'ipns-record'
33 | if (path !== '' || !(resource.startsWith('ipns://') || resource.includes('.ipns.') || resource.includes('/ipns/'))) {
34 | this.log.error('invalid request for IPNS name "%s" and path "%s"', resource, path)
35 | throw new PluginFatalError('ERR_INVALID_IPNS_NAME', 'Invalid IPNS name', { response: badRequestResponse(resource, new Error('Invalid IPNS name')) })
36 | }
37 | let peerId: PeerId
38 |
39 | try {
40 | let peerIdString: string
41 | if (resource.startsWith('ipns://')) {
42 | peerIdString = resource.replace('ipns://', '')
43 | } else if (resource.includes('/ipns/')) {
44 | peerIdString = resource.split('/ipns/')[1].split('/')[0].split('?')[0]
45 | } else {
46 | peerIdString = resource.split('.ipns.')[0].split('://')[1]
47 | }
48 |
49 | this.log.trace('trying to parse peer id from "%s"', peerIdString)
50 | peerId = getPeerIdFromString(peerIdString)
51 | } catch (err: any) {
52 | this.log.error('could not parse peer id from IPNS url %s', resource, err)
53 |
54 | throw new PluginFatalError('ERR_NO_PEER_ID_FOUND', 'could not parse peer id from url', { response: badRequestResponse(resource, err) })
55 | }
56 |
57 | // since this call happens after parseResource, we've already resolved the
58 | // IPNS name so a local copy should be in the helia datastore, so we can
59 | // just read it out..
60 | const routingKey = uint8ArrayConcat([
61 | uint8ArrayFromString('/ipns/'),
62 | peerId.toMultihash().bytes
63 | ])
64 | const datastoreKey = new Key('/dht/record/' + uint8ArrayToString(routingKey, 'base32'), false)
65 | const buf = await helia.datastore.get(datastoreKey, options)
66 | const record = DHTRecord.deserialize(buf)
67 |
68 | context.byteRangeContext.setBody(record.value)
69 |
70 | const response = okRangeResponse(resource, context.byteRangeContext.getBody('application/vnd.ipfs.ipns-record'), { byteRangeContext: context.byteRangeContext, log: this.log })
71 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? 'application/vnd.ipfs.ipns-record')
72 |
73 | return response
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-json.ts:
--------------------------------------------------------------------------------
1 | import * as ipldDagCbor from '@ipld/dag-cbor'
2 | import * as ipldDagJson from '@ipld/dag-json'
3 | import { code as jsonCode } from 'multiformats/codecs/json'
4 | import { notAcceptableResponse, okRangeResponse } from '../utils/responses.js'
5 | import { BasePlugin } from './plugin-base.js'
6 | import type { PluginContext } from './types.js'
7 |
8 | /**
9 | * Handles `dag-json` content, including requests with Accept: `application/vnd.ipld.dag-cbor` and `application/cbor`.
10 | */
11 | export class JsonPlugin extends BasePlugin {
12 | readonly id = 'json-plugin'
13 | readonly codes = [ipldDagJson.code, jsonCode]
14 | canHandle ({ cid, accept, byteRangeContext }: PluginContext): boolean {
15 | this.log('checking if we can handle %c with accept %s', cid, accept)
16 | if (byteRangeContext == null) {
17 | return false
18 | }
19 |
20 | if (accept === 'application/vnd.ipld.dag-json' && cid.code !== ipldDagCbor.code) {
21 | // we can handle application/vnd.ipld.dag-json, but if the CID codec is ipldDagCbor, DagCborPlugin should handle it
22 | // TODO: remove the need for deny-listing cases in plugins
23 | return true
24 | }
25 |
26 | return ipldDagJson.code === cid.code || jsonCode === cid.code
27 | }
28 |
29 | async handle (context: PluginContext & Required>): Promise {
30 | const { path, resource, cid, accept, options } = context
31 | const { getBlockstore } = this.pluginOptions
32 | const session = options?.session ?? true
33 |
34 | this.log.trace('fetching %c/%s', cid, path)
35 |
36 | const terminalCid = context.pathDetails?.terminalElement.cid ?? context.cid
37 | const blockstore = getBlockstore(terminalCid, resource, session, options)
38 | const block = await blockstore.get(terminalCid, options)
39 | let body: string | Uint8Array
40 |
41 | if (accept === 'application/vnd.ipld.dag-cbor' || accept === 'application/cbor') {
42 | try {
43 | // if vnd.ipld.dag-cbor has been specified, convert to the format - note
44 | // that this supports more data types than regular JSON, the content-type
45 | // response header is set so the user knows to process it differently
46 | const obj = ipldDagJson.decode(block)
47 | body = ipldDagCbor.encode(obj)
48 | } catch (err) {
49 | this.log.error('could not transform %c to application/vnd.ipld.dag-cbor', err)
50 | return notAcceptableResponse(resource)
51 | }
52 | } else {
53 | // skip decoding
54 | body = block
55 | }
56 |
57 | let contentType: string
58 | if (accept == null) {
59 | if (ipldDagJson.code === cid.code) {
60 | contentType = 'application/vnd.ipld.dag-json'
61 | } else {
62 | contentType = 'application/json'
63 | }
64 | } else {
65 | contentType = accept.split(';')[0]
66 | }
67 |
68 | context.byteRangeContext.setBody(body)
69 |
70 | const response = okRangeResponse(resource, context.byteRangeContext.getBody(contentType), { byteRangeContext: context.byteRangeContext, log: this.log })
71 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? contentType)
72 | if (!context.byteRangeContext.isValidRangeRequest) {
73 | response.headers.set('content-length', body.length.toString())
74 | }
75 | return response
76 | }
77 | }
78 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-raw.ts:
--------------------------------------------------------------------------------
1 | import { code as rawCode } from 'multiformats/codecs/raw'
2 | import { identity } from 'multiformats/hashes/identity'
3 | import { getContentType } from '../utils/get-content-type.js'
4 | import { notFoundResponse, okRangeResponse } from '../utils/responses.js'
5 | import { PluginFatalError } from './errors.js'
6 | import { BasePlugin } from './plugin-base.js'
7 | import type { PluginContext } from './types.js'
8 |
9 | /**
10 | * These are Accept header values that will cause content type sniffing to be
11 | * skipped and set to these values.
12 | */
13 | const RAW_HEADERS = [
14 | 'application/vnd.ipld.dag-json',
15 | 'application/vnd.ipld.raw',
16 | 'application/octet-stream'
17 | ]
18 |
19 | /**
20 | * if the user has specified an `Accept` header, and it's in our list of
21 | * allowable "raw" format headers, use that instead of detecting the content
22 | * type. This avoids the user from receiving something different when they
23 | * signal that they want to `Accept` a specific mime type.
24 | */
25 | function getOverriddenRawContentType ({ headers, accept }: { headers?: HeadersInit, accept?: string }): string | undefined {
26 | // accept has already been resolved by getResolvedAcceptHeader, if we have it, use it.
27 | const acceptHeader = accept ?? new Headers(headers).get('accept') ?? ''
28 |
29 | // e.g. "Accept: text/html, application/xhtml+xml, application/xml;q=0.9, image/webp, */*;q=0.8"
30 | const acceptHeaders = acceptHeader.split(',')
31 | .map(s => s.split(';')[0])
32 | .map(s => s.trim())
33 |
34 | for (const mimeType of acceptHeaders) {
35 | if (mimeType === '*/*') {
36 | return
37 | }
38 |
39 | if (RAW_HEADERS.includes(mimeType ?? '')) {
40 | return mimeType
41 | }
42 | }
43 | }
44 |
45 | export class RawPlugin extends BasePlugin {
46 | readonly id = 'raw-plugin'
47 | codes: number[] = [rawCode, identity.code]
48 |
49 | canHandle ({ cid, accept, query, byteRangeContext }: PluginContext): boolean {
50 | this.log('checking if we can handle %c with accept %s', cid, accept)
51 | if (byteRangeContext == null) {
52 | return false
53 | }
54 | return accept === 'application/vnd.ipld.raw' || query.format === 'raw'
55 | }
56 |
57 | async handle (context: PluginContext & Required>): Promise {
58 | const { path, resource, cid, accept, query, options } = context
59 | const { getBlockstore, contentTypeParser } = this.pluginOptions
60 | const session = options?.session ?? true
61 | const log = this.log
62 |
63 | if (accept === 'application/vnd.ipld.raw' || query.format === 'raw') {
64 | context.reqFormat = 'raw'
65 | context.query.download = true
66 | context.query.filename = context.query.filename ?? `${cid.toString()}.bin`
67 | log.trace('Set content disposition...')
68 | } else {
69 | log.trace('Did NOT set content disposition...')
70 | }
71 |
72 | if (path !== '' && cid.code === rawCode) {
73 | log.trace('404-ing raw codec request for %c/%s', cid, path)
74 | // throw new PluginError('ERR_RAW_PATHS_NOT_SUPPORTED', 'Raw codec does not support paths')
75 | // return notFoundResponse(resource, 'Raw codec does not support paths')
76 | throw new PluginFatalError('ERR_RAW_PATHS_NOT_SUPPORTED', 'Raw codec does not support paths', { response: notFoundResponse(resource, 'Raw codec does not support paths') })
77 | }
78 |
79 | const terminalCid = context.pathDetails?.terminalElement.cid ?? context.cid
80 | const blockstore = getBlockstore(terminalCid, resource, session, options)
81 | const result = await blockstore.get(terminalCid, options)
82 | context.byteRangeContext.setBody(result)
83 |
84 | // if the user has specified an `Accept` header that corresponds to a raw
85 | // type, honour that header, so for example they don't request
86 | // `application/vnd.ipld.raw` but get `application/octet-stream`
87 | const contentType = await getContentType({ filename: query.filename, bytes: result, path, defaultContentType: getOverriddenRawContentType({ headers: options?.headers, accept }), contentTypeParser, log })
88 | const response = okRangeResponse(resource, context.byteRangeContext.getBody(contentType), { byteRangeContext: context.byteRangeContext, log }, {
89 | redirected: false
90 | })
91 |
92 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? contentType)
93 |
94 | return response
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugin-handle-tar.ts:
--------------------------------------------------------------------------------
1 | import { code as dagPbCode } from '@ipld/dag-pb'
2 | import toBrowserReadableStream from 'it-to-browser-readablestream'
3 | import { code as rawCode } from 'multiformats/codecs/raw'
4 | import { getETag } from '../utils/get-e-tag.js'
5 | import { tarStream } from '../utils/get-tar-stream.js'
6 | import { notAcceptableResponse, okRangeResponse } from '../utils/responses.js'
7 | import { BasePlugin } from './plugin-base.js'
8 | import type { PluginContext } from './types.js'
9 |
10 | /**
11 | * Accepts a UnixFS `CID` and returns a `.tar` file containing the file or
12 | * directory structure referenced by the `CID`.
13 | */
14 | export class TarPlugin extends BasePlugin {
15 | readonly id = 'tar-plugin'
16 | readonly codes = []
17 | canHandle ({ cid, accept, query, byteRangeContext }: PluginContext): boolean {
18 | this.log('checking if we can handle %c with accept %s', cid, accept)
19 | if (byteRangeContext == null) {
20 | return false
21 | }
22 | return accept === 'application/x-tar' || query.format === 'tar'
23 | }
24 |
25 | async handle (context: PluginContext & Required>): Promise {
26 | const { cid, path, resource, options, pathDetails } = context
27 | const { getBlockstore } = this.pluginOptions
28 |
29 | const terminusElement = pathDetails?.terminalElement.cid ?? cid
30 | if (terminusElement.code !== dagPbCode && terminusElement.code !== rawCode) {
31 | return notAcceptableResponse('only UnixFS data can be returned in a TAR file')
32 | }
33 |
34 | context.reqFormat = 'tar'
35 | context.query.download = true
36 | context.query.filename = context.query.filename ?? `${terminusElement.toString()}.tar`
37 |
38 | const blockstore = getBlockstore(terminusElement, resource, options?.session, options)
39 | const stream = toBrowserReadableStream(tarStream(`/ipfs/${cid}/${path}`, blockstore, options))
40 |
41 | context.byteRangeContext.setBody(stream)
42 |
43 | const response = okRangeResponse(resource, context.byteRangeContext.getBody('application/x-tar'), { byteRangeContext: context.byteRangeContext, log: this.log })
44 | response.headers.set('content-type', context.byteRangeContext.getContentType() ?? 'application/x-tar')
45 |
46 | response.headers.set('etag', getETag({ cid: terminusElement, reqFormat: context.reqFormat, weak: true }))
47 |
48 | return response
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/plugins.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Export extension (non-default) plugins here
3 | */
4 | export { DirIndexHtmlPlugin, dirIndexHtmlPluginFactory } from './plugin-handle-dir-index-html.js'
5 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/plugins/types.ts:
--------------------------------------------------------------------------------
1 | import type { PluginError } from './errors.js'
2 | import type { VerifiedFetchInit } from '../index.js'
3 | import type { ContentTypeParser, RequestFormatShorthand } from '../types.js'
4 | import type { ByteRangeContext } from '../utils/byte-range-context.js'
5 | import type { ParsedUrlStringResults } from '../utils/parse-url-string.js'
6 | import type { PathWalkerResponse } from '../utils/walk-path.js'
7 | import type { AbortOptions, ComponentLogger, Logger } from '@libp2p/interface'
8 | import type { Helia } from 'helia'
9 | import type { Blockstore } from 'interface-blockstore'
10 | import type { UnixFSEntry } from 'ipfs-unixfs-exporter'
11 | import type { CID } from 'multiformats/cid'
12 | import type { CustomProgressEvent } from 'progress-events'
13 |
14 | /**
15 | * Contains common components and functions required by plugins to handle a request.
16 | * - Read-Only: Plugins can read but shouldn't rewrite them.
17 | * - Persistent: Relevant even after the request completes (e.g., logging or metrics).
18 | */
19 | export interface PluginOptions {
20 | logger: ComponentLogger
21 | getBlockstore(cid: CID, resource: string | CID, useSession?: boolean, options?: AbortOptions): Blockstore
22 | handleServerTiming(name: string, description: string, fn: () => Promise, withServerTiming: boolean): Promise
23 | contentTypeParser?: ContentTypeParser
24 | helia: Helia
25 | }
26 |
27 | /**
28 | * Represents the ephemeral, modifiable state used by the pipeline.
29 | * - Mutable: Evolves as you walk the plugin chain.
30 | * - Shared Data: Allows plugins to communicate partial results, discovered data, or interim errors.
31 | * - Ephemeral: Typically discarded once fetch(...) completes.
32 | */
33 | export interface PluginContext extends ParsedUrlStringResults {
34 | readonly cid: CID
35 | readonly path: string
36 | readonly resource: string
37 | readonly accept?: string
38 | /**
39 | * The last time the context is modified, so we know whether a plugin has modified it.
40 | * A plugin should increment this value if it modifies the context.
41 | */
42 | modified: number
43 | withServerTiming?: boolean
44 | onProgress?(evt: CustomProgressEvent): void
45 | options?: Omit & AbortOptions
46 | isDirectory?: boolean
47 | directoryEntries?: UnixFSEntry[]
48 | errors?: PluginError[]
49 | reqFormat?: RequestFormatShorthand
50 | pathDetails?: PathWalkerResponse
51 | query: ParsedUrlStringResults['query']
52 | /**
53 | * ByteRangeContext contains information about the size of the content and range requests.
54 | * This can be used to set the Content-Length header without loading the entire body.
55 | *
56 | * This is set by the ByteRangeContextPlugin
57 | */
58 | byteRangeContext?: ByteRangeContext
59 | [key: string]: unknown
60 | }
61 |
62 | export interface VerifiedFetchPlugin {
63 | readonly id: string
64 | readonly codes: number[]
65 | readonly log: Logger
66 | canHandle (context: PluginContext): boolean
67 | handle (context: PluginContext): Promise
68 | }
69 |
70 | export interface VerifiedFetchPluginFactory {
71 | (options: PluginOptions): VerifiedFetchPlugin
72 | }
73 |
74 | export interface PluginErrorOptions {
75 | fatal?: boolean
76 | details?: Record
77 | response?: Response
78 | }
79 |
80 | export interface FatalPluginErrorOptions extends PluginErrorOptions {
81 | response: Response
82 | }
83 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/singleton.ts:
--------------------------------------------------------------------------------
1 | import { createVerifiedFetch } from './index.js'
2 | import type { Resource, VerifiedFetch, VerifiedFetchInit } from './index.js'
3 |
4 | let impl: VerifiedFetch | undefined
5 |
6 | export const verifiedFetch: VerifiedFetch = async function verifiedFetch (resource: Resource, options?: VerifiedFetchInit): Promise {
7 | if (impl == null) {
8 | impl = await createVerifiedFetch()
9 | }
10 |
11 | return impl(resource, options)
12 | }
13 |
14 | verifiedFetch.start = async function () {
15 | await impl?.start()
16 | }
17 |
18 | verifiedFetch.stop = async function () {
19 | await impl?.stop()
20 | }
21 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/types.ts:
--------------------------------------------------------------------------------
1 | export type RequestFormatShorthand = 'raw' | 'car' | 'tar' | 'ipns-record' | 'dag-json' | 'dag-cbor' | 'json' | 'cbor'
2 |
3 | export type SupportedBodyTypes = string | Uint8Array | ArrayBuffer | Blob | ReadableStream | null
4 |
5 | /**
6 | * A ContentTypeParser attempts to return the mime type of a given file. It
7 | * receives the first chunk of the file data and the file name, if it is
8 | * available. The function can be sync or async and if it returns/resolves to
9 | * `undefined`, `application/octet-stream` will be used.
10 | */
11 | export interface ContentTypeParser {
12 | /**
13 | * Attempt to determine a mime type, either via of the passed bytes or the
14 | * filename if it is available.
15 | */
16 | (bytes: Uint8Array, fileName?: string): Promise | string | undefined
17 | }
18 |
--------------------------------------------------------------------------------
/packages/verified-fetch/src/utils/content-type-parser.ts:
--------------------------------------------------------------------------------
1 | import { logger } from '@libp2p/logger'
2 | import { fileTypeFromBuffer } from 'file-type'
3 |
4 | const log = logger('helia:verified-fetch:content-type-parser')
5 |
6 | export const defaultMimeType = 'application/octet-stream'
7 | function checkForSvg (text: string): boolean {
8 | log('checking for svg')
9 | return /^(<\?xml[^>]+>)?[^<^\w]+