├── .github
├── ISSUE_TEMPLATE
│ ├── config.yml
│ └── open_an_issue.md
├── config.yml
├── dependabot.yml
└── workflows
│ └── main.yml
├── .gitignore
├── CHANGELOG.md
├── LICENSE
├── README.md
├── package.json
├── src
├── adapter.js
├── errors.js
├── index.js
├── key.js
├── memory.js
├── tests.js
├── types.d.ts
└── utils.js
├── test
├── key.spec.js
├── memory.spec.js
└── utils.spec.js
└── tsconfig.json
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: Getting Help on IPFS
4 | url: https://ipfs.io/help
5 | about: All information about how and where to get help on IPFS.
6 | - name: IPFS Official Forum
7 | url: https://discuss.ipfs.io
8 | about: Please post general questions, support requests, and discussions here.
9 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/open_an_issue.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Open an issue
3 | about: Only for actionable issues relevant to this repository.
4 | title: ''
5 | labels: need/triage
6 | assignees: ''
7 |
8 | ---
9 |
20 |
--------------------------------------------------------------------------------
/.github/config.yml:
--------------------------------------------------------------------------------
1 | # Configuration for welcome - https://github.com/behaviorbot/welcome
2 |
3 | # Configuration for new-issue-welcome - https://github.com/behaviorbot/new-issue-welcome
4 | # Comment to be posted to on first time issues
5 | newIssueWelcomeComment: >
6 | Thank you for submitting your first issue to this repository! A maintainer
7 | will be here shortly to triage and review.
8 |
9 | In the meantime, please double-check that you have provided all the
10 | necessary information to make this process easy! Any information that can
11 | help save additional round trips is useful! We currently aim to give
12 | initial feedback within **two business days**. If this does not happen, feel
13 | free to leave a comment.
14 |
15 | Please keep an eye on how this issue will be labeled, as labels give an
16 | overview of priorities, assignments and additional actions requested by the
17 | maintainers:
18 |
19 | - "Priority" labels will show how urgent this is for the team.
20 | - "Status" labels will show if this is ready to be worked on, blocked, or in progress.
21 | - "Need" labels will indicate if additional input or analysis is required.
22 |
23 | Finally, remember to use https://discuss.ipfs.io if you just need general
24 | support.
25 |
26 | # Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome
27 | # Comment to be posted to on PRs from first time contributors in your repository
28 | newPRWelcomeComment: >
29 | Thank you for submitting this PR!
30 |
31 | A maintainer will be here shortly to review it.
32 |
33 | We are super grateful, but we are also overloaded! Help us by making sure
34 | that:
35 |
36 | * The context for this PR is clear, with relevant discussion, decisions
37 | and stakeholders linked/mentioned.
38 |
39 | * Your contribution itself is clear (code comments, self-review for the
40 | rest) and in its best form. Follow the [code contribution
41 | guidelines](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md#code-contribution-guidelines)
42 | if they apply.
43 |
44 | Getting other community members to do a review would be great help too on
45 | complex PRs (you can ask in the chats/forums). If you are unsure about
46 | something, just leave us a comment.
47 |
48 | Next steps:
49 |
50 | * A maintainer will triage and assign priority to this PR, commenting on
51 | any missing things and potentially assigning a reviewer for high
52 | priority items.
53 |
54 | * The PR gets reviews, discussed and approvals as needed.
55 |
56 | * The PR is merged by maintainers when it has been approved and comments addressed.
57 |
58 | We currently aim to provide initial feedback/triaging within **two business
59 | days**. Please keep an eye on any labelling actions, as these will indicate
60 | priorities and status of your contribution.
61 |
62 | We are very grateful for your contribution!
63 |
64 |
65 | # Configuration for first-pr-merge - https://github.com/behaviorbot/first-pr-merge
66 | # Comment to be posted to on pull requests merged by a first time user
67 | # Currently disabled
68 | #firstPRMergeComment: ""
69 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: npm
4 | directory: "/"
5 | schedule:
6 | interval: daily
7 | time: "11:00"
8 | open-pull-requests-limit: 10
9 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: ci
2 | on:
3 | push:
4 | branches:
5 | - master
6 | pull_request:
7 | branches:
8 | - master
9 |
10 | jobs:
11 | check:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v2
15 | - run: npm install
16 | - run: npx aegir lint
17 | - uses: gozala/typescript-error-reporter-action@v1.0.8
18 | - run: npx aegir build
19 | - run: npx aegir dep-check
20 | - uses: ipfs/aegir/actions/bundle-size@master
21 | name: size
22 | with:
23 | github_token: ${{ secrets.GITHUB_TOKEN }}
24 | test-node:
25 | needs: check
26 | runs-on: ${{ matrix.os }}
27 | strategy:
28 | matrix:
29 | os: [windows-latest, ubuntu-latest, macos-latest]
30 | node: [14, 15]
31 | fail-fast: true
32 | steps:
33 | - uses: actions/checkout@v2
34 | - uses: actions/setup-node@v1
35 | with:
36 | node-version: ${{ matrix.node }}
37 | - run: npm install
38 | - run: npx aegir test -t node --cov --bail
39 | - uses: codecov/codecov-action@v1
40 | test-chrome:
41 | needs: check
42 | runs-on: ubuntu-latest
43 | steps:
44 | - uses: actions/checkout@v2
45 | - run: npm install
46 | - run: npx aegir test -t browser -t webworker --bail
47 | test-firefox:
48 | needs: check
49 | runs-on: ubuntu-latest
50 | steps:
51 | - uses: actions/checkout@v2
52 | - run: npm install
53 | - run: npx aegir test -t browser -t webworker --bail -- --browser firefox
54 | test-electron-main:
55 | needs: check
56 | runs-on: ubuntu-latest
57 | steps:
58 | - uses: actions/checkout@v2
59 | - run: npm install
60 | - run: npx xvfb-maybe aegir test -t electron-main --bail
61 | test-electron-renderer:
62 | needs: check
63 | runs-on: ubuntu-latest
64 | steps:
65 | - uses: actions/checkout@v2
66 | - run: npm install
67 | - run: npx xvfb-maybe aegir test -t electron-renderer --bail
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | yarn.lock
2 | package-lock.json
3 |
4 | **/node_modules/
5 | **/*.log
6 | test/repo-tests*
7 |
8 | # Logs
9 | logs
10 | *.log
11 |
12 | coverage
13 | .nyc_output
14 |
15 | # Runtime data
16 | pids
17 | *.pid
18 | *.seed
19 |
20 | # Directory for instrumented libs generated by jscoverage/JSCover
21 | lib-cov
22 |
23 | # Coverage directory used by tools like istanbul
24 | coverage
25 |
26 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
27 | .grunt
28 |
29 | # node-waf configuration
30 | .lock-wscript
31 |
32 | build
33 |
34 | # Dependency directory
35 | # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git
36 | node_modules
37 |
38 | dist
39 | docs
40 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## [4.0.2](https://github.com/ipfs/interface-datastore/compare/v4.0.1...v4.0.2) (2021-06-10)
2 |
3 |
4 |
5 | ## [4.0.1](https://github.com/ipfs/interface-datastore/compare/v4.0.0...v4.0.1) (2021-05-04)
6 |
7 |
8 |
9 | # [4.0.0](https://github.com/ipfs/interface-datastore/compare/v3.0.6...v4.0.0) (2021-04-15)
10 |
11 |
12 | ### Features
13 |
14 | * split .query into .query and .queryKeys ([#87](https://github.com/ipfs/interface-datastore/issues/87)) ([4bb5ebc](https://github.com/ipfs/interface-datastore/commit/4bb5ebccec28a6fbfa51411183e037c77313fb8f))
15 |
16 |
17 |
18 | ## [3.0.6](https://github.com/ipfs/interface-datastore/compare/v3.0.5...v3.0.6) (2021-04-14)
19 |
20 |
21 |
22 | ## [3.0.5](https://github.com/ipfs/interface-datastore/compare/v3.0.4...v3.0.5) (2021-04-06)
23 |
24 |
25 |
26 | ## [3.0.4](https://github.com/ipfs/interface-datastore/compare/v3.0.3...v3.0.4) (2021-02-05)
27 |
28 |
29 | ### Bug Fixes
30 |
31 | * renames .ts to .d.ts and copies to dist/src on build ([#71](https://github.com/ipfs/interface-datastore/issues/71)) ([568ee54](https://github.com/ipfs/interface-datastore/commit/568ee54323e487bff191437e13b1aeaa0a85f411)), closes [#68](https://github.com/ipfs/interface-datastore/issues/68) [#69](https://github.com/ipfs/interface-datastore/issues/69)
32 |
33 |
34 |
35 | ## [3.0.3](https://github.com/ipfs/interface-datastore/compare/v3.0.2...v3.0.3) (2021-01-22)
36 |
37 |
38 | ### Bug Fixes
39 |
40 | * fix datastore factory ([#65](https://github.com/ipfs/interface-datastore/issues/65)) ([586f883](https://github.com/ipfs/interface-datastore/commit/586f883d3f5ea0391cf3184024db9a60d9b4aa56))
41 |
42 |
43 |
44 | ## [3.0.2](https://github.com/ipfs/interface-datastore/compare/v3.0.1...v3.0.2) (2021-01-22)
45 |
46 |
47 | ### Bug Fixes
48 |
49 | * open store in tests ([#66](https://github.com/ipfs/interface-datastore/issues/66)) ([6092b10](https://github.com/ipfs/interface-datastore/commit/6092b103b40cb8ee1c57d42082221c1e899bdc14))
50 |
51 |
52 |
53 | ## [3.0.1](https://github.com/ipfs/interface-datastore/compare/v3.0.0...v3.0.1) (2021-01-17)
54 |
55 |
56 |
57 | # [3.0.0](https://github.com/ipfs/interface-datastore/compare/v2.0.1...v3.0.0) (2021-01-15)
58 |
59 |
60 | ### Bug Fixes
61 |
62 | * ci ([f197aa4](https://github.com/ipfs/interface-datastore/commit/f197aa4a719a388ba91c65ea49ee3cdc5be4dc84))
63 | * feedback ([248cddb](https://github.com/ipfs/interface-datastore/commit/248cddb7d14ee9f29e92fdbe24916578577f4f6d))
64 | * fix some types ([42aebd5](https://github.com/ipfs/interface-datastore/commit/42aebd5f56e4577e6743f0c3861ea0a558e142b7))
65 | * remove types versions and tweak orders ([e449528](https://github.com/ipfs/interface-datastore/commit/e449528d5b98edf6b62e770033d59686928fe67e))
66 | * types ([f8fe99e](https://github.com/ipfs/interface-datastore/commit/f8fe99ec949a694434564b0494bc9f6b57351df4))
67 | * update aegir and feedback ([eab84b0](https://github.com/ipfs/interface-datastore/commit/eab84b025c03b6a2fff805af3a238cefd57545f2))
68 |
69 |
70 | ### Features
71 |
72 | * ts types, github ci and clean up ([2afd9be](https://github.com/ipfs/interface-datastore/commit/2afd9be3abf747528473c46550671f92acc5792e))
73 |
74 |
75 |
76 | ## [2.0.1](https://github.com/ipfs/interface-datastore/compare/v2.0.0...v2.0.1) (2020-11-09)
77 |
78 |
79 |
80 |
81 | # [2.0.0](https://github.com/ipfs/interface-datastore/compare/v1.0.4...v2.0.0) (2020-07-29)
82 |
83 |
84 | ### Bug Fixes
85 |
86 | * remove node buffer ([#43](https://github.com/ipfs/interface-datastore/issues/43)) ([b2f0963](https://github.com/ipfs/interface-datastore/commit/b2f0963))
87 |
88 |
89 | ### BREAKING CHANGES
90 |
91 | * - node Buffers have been replaced with Uint8Arrays
92 | - `key.toBuffer` has been replaced with `key.uint8Array()`
93 |
94 |
95 |
96 |
97 | ## [1.0.4](https://github.com/ipfs/interface-datastore/compare/v1.0.3...v1.0.4) (2020-06-10)
98 |
99 |
100 |
101 |
102 | ## [1.0.3](https://github.com/ipfs/interface-datastore/compare/v1.0.2...v1.0.3) (2020-06-10)
103 |
104 |
105 | ### Bug Fixes
106 |
107 | * remove .has method from interface ([a0ebd3a](https://github.com/ipfs/interface-datastore/commit/a0ebd3a))
108 |
109 |
110 | ### BREAKING CHANGES
111 |
112 | * - The `.has` method has been removed, call `.get` instead
113 |
114 |
115 |
116 |
117 | ## [1.0.2](https://github.com/ipfs/interface-datastore/compare/v1.0.1...v1.0.2) (2020-05-07)
118 |
119 |
120 | ### Features
121 |
122 | * add adapter ([4223581](https://github.com/ipfs/interface-datastore/commit/4223581))
123 |
124 |
125 |
126 |
127 | ## [1.0.1](https://github.com/ipfs/interface-datastore/compare/v1.0.0...v1.0.1) (2020-05-07)
128 |
129 |
130 |
131 |
132 | # [1.0.0](https://github.com/ipfs/interface-datastore/compare/v0.8.3...v1.0.0) (2020-05-07)
133 |
134 |
135 | ### Features
136 |
137 | * add streaming methods and allow passing AbortSignals ([#36](https://github.com/ipfs/interface-datastore/issues/36)) ([6dace38](https://github.com/ipfs/interface-datastore/commit/6dace38))
138 |
139 |
140 |
141 |
142 | ## [0.8.3](https://github.com/ipfs/interface-datastore/compare/v0.8.2...v0.8.3) (2020-04-07)
143 |
144 |
145 |
146 |
147 | ## [0.8.2](https://github.com/ipfs/interface-datastore/compare/v0.8.1...v0.8.2) (2020-04-01)
148 |
149 |
150 | ### Bug Fixes
151 |
152 | * remove node globals ([#35](https://github.com/ipfs/interface-datastore/issues/35)) ([a9130c0](https://github.com/ipfs/interface-datastore/commit/a9130c0))
153 |
154 |
155 |
156 |
157 | ## [0.8.1](https://github.com/ipfs/interface-datastore/compare/v0.8.0...v0.8.1) (2020-02-17)
158 |
159 |
160 | ### Bug Fixes
161 |
162 | * do not stringify potentially invalid characters ([#34](https://github.com/ipfs/interface-datastore/issues/34)) ([0034ede](https://github.com/ipfs/interface-datastore/commit/0034ede))
163 |
164 |
165 |
166 |
167 | # [0.8.0](https://github.com/ipfs/interface-datastore/compare/v0.7.0...v0.8.0) (2019-08-09)
168 |
169 |
170 | ### Features
171 |
172 | * concat operation on Key ([8c9226c](https://github.com/ipfs/interface-datastore/commit/8c9226c))
173 |
174 |
175 |
176 |
177 | # [0.7.0](https://github.com/ipfs/interface-datastore/compare/v0.6.0...v0.7.0) (2019-05-01)
178 |
179 |
180 | ### Features
181 |
182 | * refactor to async iterators ([#25](https://github.com/ipfs/interface-datastore/issues/25)) ([ab2f2b9](https://github.com/ipfs/interface-datastore/commit/ab2f2b9))
183 |
184 |
185 |
186 |
187 | # [0.6.0](https://github.com/ipfs/interface-datastore/compare/v0.5.0...v0.6.0) (2018-10-24)
188 |
189 |
190 | ### Bug Fixes
191 |
192 | * add _key to the API functions using the instance ([5a377ed](https://github.com/ipfs/interface-datastore/commit/5a377ed))
193 |
194 |
195 | ### Features
196 |
197 | * add class-is module ([362eff8](https://github.com/ipfs/interface-datastore/commit/362eff8))
198 |
199 |
200 |
201 |
202 | # [0.5.0](https://github.com/ipfs/interface-datastore/compare/v0.4.2...v0.5.0) (2018-09-17)
203 |
204 |
205 | ### Features
206 |
207 | * add basic error codes ([bbf5f70](https://github.com/ipfs/interface-datastore/commit/bbf5f70))
208 |
209 |
210 |
211 |
212 | ## [0.4.2](https://github.com/ipfs/interface-datastore/compare/v0.4.1...v0.4.2) (2017-12-05)
213 |
214 |
215 |
216 |
217 | ## [0.4.1](https://github.com/ipfs/interface-datastore/compare/v0.4.0...v0.4.1) (2017-11-04)
218 |
219 |
220 |
221 |
222 | # [0.4.0](https://github.com/ipfs/interface-datastore/compare/v0.3.1...v0.4.0) (2017-11-03)
223 |
224 |
225 | ### Bug Fixes
226 |
227 | * make datastore OS agnostic (path things) ([#13](https://github.com/ipfs/interface-datastore/issues/13)) ([5697173](https://github.com/ipfs/interface-datastore/commit/5697173))
228 |
229 |
230 |
231 |
232 | ## [0.3.1](https://github.com/ipfs/interface-datastore/compare/v0.3.0...v0.3.1) (2017-09-07)
233 |
234 |
235 |
236 |
237 | # [0.3.0](https://github.com/ipfs/interface-datastore/compare/v0.2.2...v0.3.0) (2017-07-22)
238 |
239 |
240 |
241 |
242 | ## [0.2.2](https://github.com/ipfs/interface-datastore/compare/v0.2.1...v0.2.2) (2017-06-03)
243 |
244 |
245 | ### Bug Fixes
246 |
247 | * use os specific path separator ([d7ec65a](https://github.com/ipfs/interface-datastore/commit/d7ec65a))
248 |
249 |
250 |
251 |
252 | ## [0.2.1](https://github.com/ipfs/interface-datastore/compare/v0.2.0...v0.2.1) (2017-05-23)
253 |
254 |
255 |
256 |
257 | # [0.2.0](https://github.com/ipfs/interface-datastore/compare/v0.1.1...v0.2.0) (2017-03-23)
258 |
259 |
260 | ### Features
261 |
262 | * add open method ([#4](https://github.com/ipfs/interface-datastore/issues/4)) ([cbe8f7f](https://github.com/ipfs/interface-datastore/commit/cbe8f7f))
263 |
264 |
265 |
266 |
267 | ## [0.1.1](https://github.com/ipfs/interface-datastore/compare/v0.1.0...v0.1.1) (2017-03-15)
268 |
269 |
270 | ### Bug Fixes
271 |
272 | * libp2p-crypto is a regular dependency ([3db267b](https://github.com/ipfs/interface-datastore/commit/3db267b))
273 |
274 |
275 |
276 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017 IPFS
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ⛔️ DEPRECATED: This module has been merged into by the [ipfs-interfaces](https://github.com/ipfs/js-ipfs-interfaces) module
2 |
3 | # interface-datastore
4 |
5 | [](http://ipn.io)
6 | [](http://ipfs.io/)
7 | [](http://webchat.freenode.net/?channels=%23ipfs)
8 | [](https://codecov.io/gh/ipfs/interface-datastore)
9 | [](https://github.com/ipfs/interface-datastore/actions?query=branch%3Amaster+workflow%3Aci+)
10 |
11 | > Implementation of the [datastore](https://github.com/ipfs/go-datastore) interface in JavaScript
12 |
13 | ## Lead Maintainer
14 |
15 | [Alex Potsides](https://github.com/achingbrain)
16 |
17 | ## Table of Contents
18 |
19 | - [Implementations](#implementations)
20 | - [Adapter](#adapter)
21 | - [Install](#install)
22 | - [Usage](#usage)
23 | - [Wrapping Stores](#wrapping-stores)
24 | - [Test suite](#test-suite)
25 | - [Aborting requests](#aborting-requests)
26 | - [Concurrency](#concurrency)
27 | - [Keys](#keys)
28 | - [API](#api)
29 | - [Contribute](#contribute)
30 | - [License](#license)
31 |
32 | ## Implementations
33 |
34 | - Backed Implementations
35 | - Memory: [`src/memory`](src/memory.js)
36 | - level: [`datastore-level`](https://github.com/ipfs/js-datastore-level) (supports any levelup compatible backend)
37 | - File System: [`datstore-fs`](https://github.com/ipfs/js-datastore-fs)
38 | - Wrapper Implementations
39 | - Mount: [`datastore-core/src/mount`](https://github.com/ipfs/js-datastore-core/tree/master/src/mount.js)
40 | - Keytransform: [`datstore-core/src/keytransform`](https://github.com/ipfs/js-datastore-core/tree/master/src/keytransform.js)
41 | - Sharding: [`datastore-core/src/sharding`](https://github.com/ipfs/js-datastore-core/tree/master/src/sharding.js)
42 | - Tiered: [`datstore-core/src/tiered`](https://github.com/ipfs/js-datastore-core/blob/master/src/tiered.js)
43 | - Namespace: [`datastore-core/src/namespace`](https://github.com/ipfs/js-datastore-core/tree/master/src/namespace.js)
44 |
45 | If you want the same functionality as [go-ds-flatfs](https://github.com/ipfs/go-ds-flatfs), use sharding with fs.
46 |
47 | ```js
48 | const FsStore = require('datastore-fs')
49 | const ShardingStore = require('datastore-core').ShardingDatastore
50 | const NextToLast = require('datastore-core').shard.NextToLast
51 |
52 | const fs = new FsStore('path/to/store')
53 |
54 | // flatfs now works like go-flatfs
55 | const flatfs = await ShardingStore.createOrOpen(fs, new NextToLast(2))
56 | ```
57 |
58 | ## Adapter
59 |
60 | An adapter is made available to make implementing your own datastore easier:
61 |
62 | ```javascript
63 | const { Adapter } = require('interface-datastore')
64 |
65 | class MyDatastore extends Adapter {
66 | constructor () {
67 | super()
68 | }
69 |
70 | async put (key, val) {
71 | // your implementation here
72 | }
73 |
74 | async get (key) {
75 | // your implementation here
76 | }
77 |
78 | // etc...
79 | }
80 | ```
81 |
82 | See the [MemoryDatastore](./src/memory.js) for an example of how it is used.
83 |
84 | ## Install
85 |
86 | ```sh
87 | $ npm install interface-datastore
88 | ```
89 |
90 | ## Usage
91 |
92 | ### Wrapping Stores
93 |
94 | ```js
95 | const MemoryStore = require('interface-datastore').MemoryDatastore
96 | const MountStore = require('datastore-core').MountDatastore
97 | const Key = require('interface-datastore').Key
98 |
99 | const store = new MountStore({ prefix: new Key('/a'), datastore: new MemoryStore() })
100 | ```
101 |
102 | ### Test suite
103 |
104 | Available under [`src/tests.js`](src/tests.js)
105 |
106 | ```js
107 | describe('mystore', () => {
108 | require('interface-datastore/src/tests')({
109 | async setup () {
110 | return instanceOfMyStore
111 | },
112 | async teardown () {
113 | // cleanup resources
114 | }
115 | })
116 | })
117 | ```
118 |
119 | ### Aborting requests
120 |
121 | Most API methods accept an [AbortSignal][] as part of an options object. Implementations may listen for an `abort` event emitted by this object, or test the `signal.aborted` property. When received implementations should tear down any long-lived requests or resources created.
122 |
123 | ### Concurrency
124 |
125 | The streaming `(put|get|delete)Many` methods are intended to be used with modules such as [it-parallel-batch](https://www.npmjs.com/package/it-parallel-batch) to allow calling code to control levels of parallelisation. The batching method ensures results are returned in the correct order, but interface implementations should be thread safe.
126 |
127 | ```js
128 | const batch = require('it-parallel-batch')
129 | const source = [{
130 | key: ..,
131 | value: ..
132 | }]
133 |
134 | // put values into the datastore concurrently, max 10 at a time
135 | for await (const { key, data } of batch(store.putMany(source), 10)) {
136 | console.info(`Put ${key}`)
137 | }
138 | ```
139 |
140 | ### Keys
141 |
142 | To allow a better abstraction on how to address values, there is a `Key` class which is used as identifier. It's easy to create a key from a `Uint8Array` or a `string`.
143 |
144 | ```js
145 | const a = new Key('a')
146 | const b = new Key(new Uint8Array([0, 1, 2, 3]))
147 | ```
148 |
149 | The key scheme is inspired by file systems and Google App Engine key model. Keys are meant to be unique across a system. They are typically hierarchical, incorporating more and more specific namespaces. Thus keys can be deemed 'children' or 'ancestors' of other keys:
150 |
151 | - `new Key('/Comedy')`
152 | - `new Key('/Comedy/MontyPython')`
153 |
154 | Also, every namespace can be parameterized to embed relevant object information. For example, the Key `name` (most specific namespace) could include the object type:
155 |
156 | - `new Key('/Comedy/MontyPython/Actor:JohnCleese')`
157 | - `new Key('/Comedy/MontyPython/Sketch:CheeseShop')`
158 | - `new Key('/Comedy/MontyPython/Sketch:CheeseShop/Character:Mousebender')`
159 |
160 | ## API
161 | https://ipfs.github.io/interface-datastore/
162 |
163 | ## Contribute
164 |
165 | PRs accepted.
166 |
167 | Small note: If editing the Readme, please conform to the [standard-readme](https://github.com/RichardLitt/standard-readme) specification.
168 |
169 | ## License
170 |
171 | MIT 2017 © IPFS
172 |
173 |
174 | [Key]: #Keys
175 | [Object]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object
176 | [Uint8Array]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array
177 | [AbortSignal]: https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal
178 | [AsyncIterator]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol/asyncIterator
179 | [AsyncIterable]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols
180 | [String]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String
181 | [Array]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array
182 | [Function]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function
183 | [Number]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number
184 | [Boolean]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Boolean
185 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "interface-datastore",
3 | "version": "4.0.2",
4 | "description": "datastore interface",
5 | "leadMaintainer": "Alex Potsides ",
6 | "main": "src/index.js",
7 | "types": "dist/src/index.d.ts",
8 | "files": [
9 | "src",
10 | "dist"
11 | ],
12 | "scripts": {
13 | "prepare": "aegir build --no-bundle",
14 | "lint": "aegir ts -p check && aegir lint",
15 | "test": "aegir test",
16 | "test:node": "aegir test --target node",
17 | "test:browser": "aegir test --target browser",
18 | "release": "aegir release --docs",
19 | "release-minor": "aegir release --type minor --docs",
20 | "release-major": "aegir release --type major --docs",
21 | "coverage": "aegir test --cov",
22 | "docs": "aegir docs"
23 | },
24 | "repository": {
25 | "type": "git",
26 | "url": "git+https://github.com/ipfs/interface-datastore.git"
27 | },
28 | "keywords": [
29 | "interface",
30 | "key-value",
31 | "ipfs",
32 | "datastore"
33 | ],
34 | "license": "MIT",
35 | "bugs": {
36 | "url": "https://github.com/ipfs/interface-datastore/issues"
37 | },
38 | "homepage": "https://github.com/ipfs/interface-datastore#readme",
39 | "devDependencies": {
40 | "aegir": "^33.1.0",
41 | "it-map": "^1.0.5"
42 | },
43 | "dependencies": {
44 | "err-code": "^3.0.1",
45 | "interface-store": "^0.0.2",
46 | "ipfs-utils": "^8.1.2",
47 | "iso-random-stream": "^2.0.0",
48 | "it-all": "^1.0.2",
49 | "it-drain": "^1.0.1",
50 | "it-filter": "^1.0.2",
51 | "it-take": "^1.0.1",
52 | "nanoid": "^3.0.2",
53 | "uint8arrays": "^2.1.5"
54 | },
55 | "eslintConfig": {
56 | "extends": "ipfs"
57 | },
58 | "contributors": [
59 | "achingbrain ",
60 | "David Dias ",
61 | "Friedel Ziegelmayer ",
62 | "Juan Batiz-Benet ",
63 | "Pedro Teixeira ",
64 | "Jacob Heun ",
65 | "Hugo Dias ",
66 | "Vasco Santos ",
67 | "Hector Sanjuan ",
68 | "Richard Schneider ",
69 | "ᴠɪᴄᴛᴏʀ ʙᴊᴇʟᴋʜᴏʟᴍ ",
70 | "Alan Shaw ",
71 | "Carson Farmer ",
72 | "Erin Dachtler ",
73 | "tcme ",
74 | "Adam Uhlir "
75 | ]
76 | }
77 |
--------------------------------------------------------------------------------
/src/adapter.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const { sortAll } = require('./utils')
4 | const drain = require('it-drain')
5 | const filter = require('it-filter')
6 | const take = require('it-take')
7 |
8 | /**
9 | * @typedef {import('interface-store').Options} Options
10 | * @typedef {import('./key')} Key
11 | * @typedef {import('./types').Pair} Pair
12 | * @typedef {import('./types').Datastore} Datastore
13 | * @typedef {import('./types').Query} Query
14 | * @typedef {import('./types').KeyQuery} KeyQuery
15 | * @typedef {import('./types').Batch} Batch
16 | */
17 |
18 | /**
19 | * @template O
20 | * @typedef {import('interface-store').AwaitIterable} AwaitIterable
21 | */
22 |
23 | /**
24 | * @implements {Datastore}
25 | */
26 | class Adapter {
27 | /**
28 | * @returns {Promise}
29 | */
30 | open () {
31 | return Promise.reject(new Error('.open is not implemented'))
32 | }
33 |
34 | /**
35 | * @returns {Promise}
36 | */
37 | close () {
38 | return Promise.reject(new Error('.close is not implemented'))
39 | }
40 |
41 | /**
42 | * @param {Key} key
43 | * @param {Uint8Array} val
44 | * @param {Options} [options]
45 | * @returns {Promise}
46 | */
47 | put (key, val, options) {
48 | return Promise.reject(new Error('.put is not implemented'))
49 | }
50 |
51 | /**
52 | * @param {Key} key
53 | * @param {Options} [options]
54 | * @returns {Promise}
55 | */
56 | get (key, options) {
57 | return Promise.reject(new Error('.get is not implemented'))
58 | }
59 |
60 | /**
61 | * @param {Key} key
62 | * @param {Options} [options]
63 | * @returns {Promise}
64 | */
65 | has (key, options) {
66 | return Promise.reject(new Error('.has is not implemented'))
67 | }
68 |
69 | /**
70 | * @param {Key} key
71 | * @param {Options} [options]
72 | * @returns {Promise}
73 | */
74 | delete (key, options) {
75 | return Promise.reject(new Error('.delete is not implemented'))
76 | }
77 |
78 | /**
79 | * @param {AwaitIterable} source
80 | * @param {Options} [options]
81 | * @returns {AsyncIterable}
82 | */
83 | async * putMany (source, options = {}) {
84 | for await (const { key, value } of source) {
85 | await this.put(key, value, options)
86 | yield { key, value }
87 | }
88 | }
89 |
90 | /**
91 | * @param {AwaitIterable} source
92 | * @param {Options} [options]
93 | * @returns {AsyncIterable}
94 | */
95 | async * getMany (source, options = {}) {
96 | for await (const key of source) {
97 | yield this.get(key, options)
98 | }
99 | }
100 |
101 | /**
102 | * @param {AwaitIterable} source
103 | * @param {Options} [options]
104 | * @returns {AsyncIterable}
105 | */
106 | async * deleteMany (source, options = {}) {
107 | for await (const key of source) {
108 | await this.delete(key, options)
109 | yield key
110 | }
111 | }
112 |
113 | /**
114 | * @returns {Batch}
115 | */
116 | batch () {
117 | /** @type {Pair[]} */
118 | let puts = []
119 | /** @type {Key[]} */
120 | let dels = []
121 |
122 | return {
123 | put (key, value) {
124 | puts.push({ key, value })
125 | },
126 |
127 | delete (key) {
128 | dels.push(key)
129 | },
130 | commit: async (options) => {
131 | await drain(this.putMany(puts, options))
132 | puts = []
133 | await drain(this.deleteMany(dels, options))
134 | dels = []
135 | }
136 | }
137 | }
138 |
139 | /**
140 | * Extending classes should override `query` or implement this method
141 | *
142 | * @param {Query} q
143 | * @param {Options} [options]
144 | * @returns {AsyncIterable}
145 | */
146 | // eslint-disable-next-line require-yield
147 | async * _all (q, options) {
148 | throw new Error('._all is not implemented')
149 | }
150 |
151 | /**
152 | * Extending classes should override `queryKeys` or implement this method
153 | *
154 | * @param {KeyQuery} q
155 | * @param {Options} [options]
156 | * @returns {AsyncIterable}
157 | */
158 | // eslint-disable-next-line require-yield
159 | async * _allKeys (q, options) {
160 | throw new Error('._allKeys is not implemented')
161 | }
162 |
163 | /**
164 | * @param {Query} q
165 | * @param {Options} [options]
166 | */
167 | query (q, options) {
168 | let it = this._all(q, options)
169 |
170 | if (q.prefix != null) {
171 | it = filter(it, (e) =>
172 | e.key.toString().startsWith(/** @type {string} */ (q.prefix))
173 | )
174 | }
175 |
176 | if (Array.isArray(q.filters)) {
177 | it = q.filters.reduce((it, f) => filter(it, f), it)
178 | }
179 |
180 | if (Array.isArray(q.orders)) {
181 | it = q.orders.reduce((it, f) => sortAll(it, f), it)
182 | }
183 |
184 | if (q.offset != null) {
185 | let i = 0
186 | it = filter(it, () => i++ >= /** @type {number} */ (q.offset))
187 | }
188 |
189 | if (q.limit != null) {
190 | it = take(it, q.limit)
191 | }
192 |
193 | return it
194 | }
195 |
196 | /**
197 | * @param {KeyQuery} q
198 | * @param {Options} [options]
199 | */
200 | queryKeys (q, options) {
201 | let it = this._allKeys(q, options)
202 |
203 | if (q.prefix != null) {
204 | it = filter(it, (key) =>
205 | key.toString().startsWith(/** @type {string} */ (q.prefix))
206 | )
207 | }
208 |
209 | if (Array.isArray(q.filters)) {
210 | it = q.filters.reduce((it, f) => filter(it, f), it)
211 | }
212 |
213 | if (Array.isArray(q.orders)) {
214 | it = q.orders.reduce((it, f) => sortAll(it, f), it)
215 | }
216 |
217 | if (q.offset != null) {
218 | let i = 0
219 | it = filter(it, () => i++ >= /** @type {number} */ (q.offset))
220 | }
221 |
222 | if (q.limit != null) {
223 | it = take(it, q.limit)
224 | }
225 |
226 | return it
227 | }
228 | }
229 |
230 | module.exports = Adapter
231 |
--------------------------------------------------------------------------------
/src/errors.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const errCode = require('err-code')
4 |
5 | /**
6 | * @param {Error} [err]
7 | */
8 | function dbOpenFailedError (err) {
9 | err = err || new Error('Cannot open database')
10 | return errCode(err, 'ERR_DB_OPEN_FAILED')
11 | }
12 |
13 | /**
14 | * @param {Error} [err]
15 | */
16 | function dbDeleteFailedError (err) {
17 | err = err || new Error('Delete failed')
18 | return errCode(err, 'ERR_DB_DELETE_FAILED')
19 | }
20 |
21 | /**
22 | * @param {Error} [err]
23 | */
24 | function dbWriteFailedError (err) {
25 | err = err || new Error('Write failed')
26 | return errCode(err, 'ERR_DB_WRITE_FAILED')
27 | }
28 |
29 | /**
30 | * @param {Error} [err]
31 | */
32 | function notFoundError (err) {
33 | err = err || new Error('Not Found')
34 | return errCode(err, 'ERR_NOT_FOUND')
35 | }
36 |
37 | /**
38 | * @param {Error} [err]
39 | */
40 | function abortedError (err) {
41 | err = err || new Error('Aborted')
42 | return errCode(err, 'ERR_ABORTED')
43 | }
44 |
45 | module.exports = {
46 | dbOpenFailedError,
47 | dbDeleteFailedError,
48 | dbWriteFailedError,
49 | notFoundError,
50 | abortedError
51 | }
52 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | /**
4 | * @typedef {import('./types').Datastore} Datastore
5 | * @typedef {import('./types').Batch} Batch
6 | * @typedef {import('interface-store').Options} Options
7 | * @typedef {import('./types').Query} Query
8 | * @typedef {import('./types').QueryFilter} QueryFilter
9 | * @typedef {import('./types').QueryOrder} QueryOrder
10 | * @typedef {import('./types').KeyQuery} KeyQuery
11 | * @typedef {import('./types').KeyQueryFilter} KeyQueryFilter
12 | * @typedef {import('./types').KeyQueryOrder} KeyQueryOrder
13 | * @typedef {import('./types').Pair} Pair
14 | */
15 |
16 | const Key = require('./key')
17 | const MemoryDatastore = require('./memory')
18 | const utils = require('./utils')
19 | const Errors = require('./errors')
20 | const Adapter = require('./adapter')
21 |
22 | module.exports = {
23 | Key,
24 | MemoryDatastore,
25 | utils,
26 | Errors,
27 | Adapter
28 | }
29 |
--------------------------------------------------------------------------------
/src/key.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const { nanoid } = require('nanoid')
4 |
5 | const uint8ArrayToString = require('uint8arrays/to-string')
6 | const uint8ArrayFromString = require('uint8arrays/from-string')
7 |
8 | const symbol = Symbol.for('@ipfs/interface-datastore/key')
9 | const pathSepS = '/'
10 | const pathSepB = new TextEncoder().encode(pathSepS)
11 | const pathSep = pathSepB[0]
12 |
13 | /**
14 | * A Key represents the unique identifier of an object.
15 | * Our Key scheme is inspired by file systems and Google App Engine key model.
16 | * Keys are meant to be unique across a system. Keys are hierarchical,
17 | * incorporating more and more specific namespaces. Thus keys can be deemed
18 | * 'children' or 'ancestors' of other keys:
19 | * - `new Key('/Comedy')`
20 | * - `new Key('/Comedy/MontyPython')`
21 | * Also, every namespace can be parametrized to embed relevant object
22 | * information. For example, the Key `name` (most specific namespace) could
23 | * include the object type:
24 | * - `new Key('/Comedy/MontyPython/Actor:JohnCleese')`
25 | * - `new Key('/Comedy/MontyPython/Sketch:CheeseShop')`
26 | * - `new Key('/Comedy/MontyPython/Sketch:CheeseShop/Character:Mousebender')`
27 | *
28 | */
29 | class Key {
30 | /**
31 | * @param {string | Uint8Array} s
32 | * @param {boolean} [clean]
33 | */
34 | constructor (s, clean) {
35 | if (typeof s === 'string') {
36 | this._buf = uint8ArrayFromString(s)
37 | } else if (s instanceof Uint8Array) {
38 | this._buf = s
39 | } else {
40 | throw new Error('Invalid key, should be String of Uint8Array')
41 | }
42 |
43 | if (clean == null) {
44 | clean = true
45 | }
46 |
47 | if (clean) {
48 | this.clean()
49 | }
50 |
51 | if (this._buf.byteLength === 0 || this._buf[0] !== pathSep) {
52 | throw new Error('Invalid key')
53 | }
54 | }
55 |
56 | /**
57 | * Convert to the string representation
58 | *
59 | * @param {import('uint8arrays/to-string').SupportedEncodings} [encoding='utf8'] - The encoding to use.
60 | * @returns {string}
61 | */
62 | toString (encoding = 'utf8') {
63 | return uint8ArrayToString(this._buf, encoding)
64 | }
65 |
66 | /**
67 | * Return the Uint8Array representation of the key
68 | *
69 | * @returns {Uint8Array}
70 | */
71 | uint8Array () {
72 | return this._buf
73 | }
74 |
75 | get [symbol] () {
76 | return true
77 | }
78 |
79 | /**
80 | * Return string representation of the key
81 | *
82 | * @returns {string}
83 | */
84 | get [Symbol.toStringTag] () {
85 | return `Key(${this.toString()})`
86 | }
87 |
88 | /**
89 | * Constructs a key out of a namespace array.
90 | *
91 | * @param {Array} list - The array of namespaces
92 | * @returns {Key}
93 | *
94 | * @example
95 | * ```js
96 | * Key.withNamespaces(['one', 'two'])
97 | * // => Key('/one/two')
98 | * ```
99 | */
100 | static withNamespaces (list) {
101 | return new Key(list.join(pathSepS))
102 | }
103 |
104 | /**
105 | * Returns a randomly (uuid) generated key.
106 | *
107 | * @returns {Key}
108 | *
109 | * @example
110 | * ```js
111 | * Key.random()
112 | * // => Key('/f98719ea086343f7b71f32ea9d9d521d')
113 | * ```
114 | */
115 | static random () {
116 | return new Key(nanoid().replace(/-/g, ''))
117 | }
118 |
119 | /**
120 | * Cleanup the current key
121 | *
122 | * @returns {void}
123 | */
124 | clean () {
125 | if (!this._buf || this._buf.byteLength === 0) {
126 | this._buf = pathSepB
127 | }
128 |
129 | if (this._buf[0] !== pathSep) {
130 | const bytes = new Uint8Array(this._buf.byteLength + 1)
131 | bytes.fill(pathSep, 0, 1)
132 | bytes.set(this._buf, 1)
133 | this._buf = bytes
134 | }
135 |
136 | // normalize does not remove trailing slashes
137 | while (this._buf.byteLength > 1 && this._buf[this._buf.byteLength - 1] === pathSep) {
138 | this._buf = this._buf.subarray(0, -1)
139 | }
140 | }
141 |
142 | /**
143 | * Check if the given key is sorted lower than ourself.
144 | *
145 | * @param {Key} key - The other Key to check against
146 | * @returns {boolean}
147 | */
148 | less (key) {
149 | const list1 = this.list()
150 | const list2 = key.list()
151 |
152 | for (let i = 0; i < list1.length; i++) {
153 | if (list2.length < i + 1) {
154 | return false
155 | }
156 |
157 | const c1 = list1[i]
158 | const c2 = list2[i]
159 |
160 | if (c1 < c2) {
161 | return true
162 | } else if (c1 > c2) {
163 | return false
164 | }
165 | }
166 |
167 | return list1.length < list2.length
168 | }
169 |
170 | /**
171 | * Returns the key with all parts in reversed order.
172 | *
173 | * @returns {Key}
174 | *
175 | * @example
176 | * ```js
177 | * new Key('/Comedy/MontyPython/Actor:JohnCleese').reverse()
178 | * // => Key('/Actor:JohnCleese/MontyPython/Comedy')
179 | * ```
180 | */
181 | reverse () {
182 | return Key.withNamespaces(this.list().slice().reverse())
183 | }
184 |
185 | /**
186 | * Returns the `namespaces` making up this Key.
187 | *
188 | * @returns {Array}
189 | */
190 | namespaces () {
191 | return this.list()
192 | }
193 |
194 | /** Returns the "base" namespace of this key.
195 | *
196 | * @returns {string}
197 | *
198 | * @example
199 | * ```js
200 | * new Key('/Comedy/MontyPython/Actor:JohnCleese').baseNamespace()
201 | * // => 'Actor:JohnCleese'
202 | * ```
203 | */
204 | baseNamespace () {
205 | const ns = this.namespaces()
206 | return ns[ns.length - 1]
207 | }
208 |
209 | /**
210 | * Returns the `list` representation of this key.
211 | *
212 | * @returns {Array}
213 | *
214 | * @example
215 | * ```js
216 | * new Key('/Comedy/MontyPython/Actor:JohnCleese').list()
217 | * // => ['Comedy', 'MontyPythong', 'Actor:JohnCleese']
218 | * ```
219 | */
220 | list () {
221 | return this.toString().split(pathSepS).slice(1)
222 | }
223 |
224 | /**
225 | * Returns the "type" of this key (value of last namespace).
226 | *
227 | * @returns {string}
228 | *
229 | * @example
230 | * ```js
231 | * new Key('/Comedy/MontyPython/Actor:JohnCleese').type()
232 | * // => 'Actor'
233 | * ```
234 | */
235 | type () {
236 | return namespaceType(this.baseNamespace())
237 | }
238 |
239 | /**
240 | * Returns the "name" of this key (field of last namespace).
241 | *
242 | * @returns {string}
243 | *
244 | * @example
245 | * ```js
246 | * new Key('/Comedy/MontyPython/Actor:JohnCleese').name()
247 | * // => 'JohnCleese'
248 | * ```
249 | */
250 | name () {
251 | return namespaceValue(this.baseNamespace())
252 | }
253 |
254 | /**
255 | * Returns an "instance" of this type key (appends value to namespace).
256 | *
257 | * @param {string} s - The string to append.
258 | * @returns {Key}
259 | *
260 | * @example
261 | * ```js
262 | * new Key('/Comedy/MontyPython/Actor').instance('JohnClesse')
263 | * // => Key('/Comedy/MontyPython/Actor:JohnCleese')
264 | * ```
265 | */
266 | instance (s) {
267 | return new Key(this.toString() + ':' + s)
268 | }
269 |
270 | /**
271 | * Returns the "path" of this key (parent + type).
272 | *
273 | * @returns {Key}
274 | *
275 | * @example
276 | * ```js
277 | * new Key('/Comedy/MontyPython/Actor:JohnCleese').path()
278 | * // => Key('/Comedy/MontyPython/Actor')
279 | * ```
280 | */
281 | path () {
282 | let p = this.parent().toString()
283 | if (!p.endsWith(pathSepS)) {
284 | p += pathSepS
285 | }
286 | p += this.type()
287 | return new Key(p)
288 | }
289 |
290 | /**
291 | * Returns the `parent` Key of this Key.
292 | *
293 | * @returns {Key}
294 | *
295 | * @example
296 | * ```js
297 | * new Key("/Comedy/MontyPython/Actor:JohnCleese").parent()
298 | * // => Key("/Comedy/MontyPython")
299 | * ```
300 | */
301 | parent () {
302 | const list = this.list()
303 | if (list.length === 1) {
304 | return new Key(pathSepS)
305 | }
306 |
307 | return new Key(list.slice(0, -1).join(pathSepS))
308 | }
309 |
310 | /**
311 | * Returns the `child` Key of this Key.
312 | *
313 | * @param {Key} key - The child Key to add
314 | * @returns {Key}
315 | *
316 | * @example
317 | * ```js
318 | * new Key('/Comedy/MontyPython').child(new Key('Actor:JohnCleese'))
319 | * // => Key('/Comedy/MontyPython/Actor:JohnCleese')
320 | * ```
321 | */
322 | child (key) {
323 | if (this.toString() === pathSepS) {
324 | return key
325 | } else if (key.toString() === pathSepS) {
326 | return this
327 | }
328 |
329 | return new Key(this.toString() + key.toString(), false)
330 | }
331 |
332 | /**
333 | * Returns whether this key is a prefix of `other`
334 | *
335 | * @param {Key} other - The other key to test against
336 | * @returns {boolean}
337 | *
338 | * @example
339 | * ```js
340 | * new Key('/Comedy').isAncestorOf('/Comedy/MontyPython')
341 | * // => true
342 | * ```
343 | */
344 | isAncestorOf (other) {
345 | if (other.toString() === this.toString()) {
346 | return false
347 | }
348 |
349 | return other.toString().startsWith(this.toString())
350 | }
351 |
352 | /**
353 | * Returns whether this key is a contains another as prefix.
354 | *
355 | * @param {Key} other - The other Key to test against
356 | * @returns {boolean}
357 | *
358 | * @example
359 | * ```js
360 | * new Key('/Comedy/MontyPython').isDecendantOf('/Comedy')
361 | * // => true
362 | * ```
363 | */
364 | isDecendantOf (other) {
365 | if (other.toString() === this.toString()) {
366 | return false
367 | }
368 |
369 | return this.toString().startsWith(other.toString())
370 | }
371 |
372 | /**
373 | * Checks if this key has only one namespace.
374 | *
375 | * @returns {boolean}
376 | *
377 | */
378 | isTopLevel () {
379 | return this.list().length === 1
380 | }
381 |
382 | /**
383 | * Concats one or more Keys into one new Key.
384 | *
385 | * @param {Array} keys - The array of keys to concatenate
386 | * @returns {Key}
387 | */
388 | concat (...keys) {
389 | return Key.withNamespaces([...this.namespaces(), ...flatten(keys.map(key => key.namespaces()))])
390 | }
391 |
392 | /**
393 | * Check if value is a Key instance
394 | *
395 | * @param {any} value - Value to check
396 | * @returns {value is Key}
397 | */
398 | static isKey (value) {
399 | return value instanceof Key || Boolean(value && value[symbol])
400 | }
401 | }
402 |
403 | /**
404 | * The first component of a namespace. `foo` in `foo:bar`
405 | *
406 | * @param {string} ns
407 | * @returns {string}
408 | */
409 | function namespaceType (ns) {
410 | const parts = ns.split(':')
411 | if (parts.length < 2) {
412 | return ''
413 | }
414 | return parts.slice(0, -1).join(':')
415 | }
416 |
417 | /**
418 | * The last component of a namespace, `baz` in `foo:bar:baz`.
419 | *
420 | * @param {string} ns
421 | * @returns {string}
422 | */
423 | function namespaceValue (ns) {
424 | const parts = ns.split(':')
425 | return parts[parts.length - 1]
426 | }
427 |
428 | /**
429 | * Flatten array of arrays (only one level)
430 | *
431 | * @template T
432 | * @param {Array} arr
433 | * @returns {T[]}
434 | */
435 | function flatten (arr) {
436 | return /** @type {T[]} */([]).concat(...arr)
437 | }
438 |
439 | module.exports = Key
440 |
--------------------------------------------------------------------------------
/src/memory.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const Key = require('./key')
4 | const Adapter = require('./adapter')
5 | const Errors = require('./errors')
6 |
7 | /**
8 | * @typedef {import('./types').Pair} Pair
9 | * @typedef {import('./types').Datastore} Datastore
10 | * @typedef {import('interface-store').Options} Options
11 | */
12 |
13 | /**
14 | * @class MemoryDatastore
15 | * @implements {Datastore}
16 | */
17 | class MemoryDatastore extends Adapter {
18 | constructor () {
19 | super()
20 |
21 | /** @type {Record} */
22 | this.data = {}
23 | }
24 |
25 | open () {
26 | return Promise.resolve()
27 | }
28 |
29 | close () {
30 | return Promise.resolve()
31 | }
32 |
33 | /**
34 | * @param {Key} key
35 | * @param {Uint8Array} val
36 | */
37 | async put (key, val) { // eslint-disable-line require-await
38 | this.data[key.toString()] = val
39 | }
40 |
41 | /**
42 | * @param {Key} key
43 | */
44 | async get (key) {
45 | const exists = await this.has(key)
46 | if (!exists) throw Errors.notFoundError()
47 | return this.data[key.toString()]
48 | }
49 |
50 | /**
51 | * @param {Key} key
52 | */
53 | async has (key) { // eslint-disable-line require-await
54 | return this.data[key.toString()] !== undefined
55 | }
56 |
57 | /**
58 | * @param {Key} key
59 | */
60 | async delete (key) { // eslint-disable-line require-await
61 | delete this.data[key.toString()]
62 | }
63 |
64 | async * _all () {
65 | yield * Object.entries(this.data)
66 | .map(([key, value]) => ({ key: new Key(key), value }))
67 | }
68 |
69 | async * _allKeys () {
70 | yield * Object.entries(this.data)
71 | .map(([key]) => new Key(key))
72 | }
73 | }
74 |
75 | module.exports = MemoryDatastore
76 |
--------------------------------------------------------------------------------
/src/tests.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 | 'use strict'
3 |
4 | const { randomBytes } = require('iso-random-stream')
5 | const { expect } = require('aegir/utils/chai')
6 | const all = require('it-all')
7 | const drain = require('it-drain')
8 | const uint8ArrayFromString = require('uint8arrays/from-string')
9 |
10 | const { Key } = require('../src')
11 |
12 | /**
13 | * @typedef {import('./types').Datastore} Datastore
14 | * @typedef {import('./types').Pair} Pair
15 | * @typedef {import('./types').QueryOrder} QueryOrder
16 | * @typedef {import('./types').QueryFilter} QueryFilter
17 | * @typedef {import('./types').KeyQueryOrder} KeyQueryOrder
18 | * @typedef {import('./types').KeyQueryFilter} KeyQueryFilter
19 | */
20 |
21 | /**
22 | * @param {{ teardown: () => void; setup: () => Datastore; }} test
23 | */
24 | module.exports = (test) => {
25 | /**
26 | * @param {Datastore} store
27 | */
28 | const cleanup = async store => {
29 | await store.close()
30 | await test.teardown()
31 | }
32 |
33 | const createStore = async () => {
34 | const store = await test.setup()
35 | if (!store) throw new Error('missing store')
36 | await store.open()
37 | return store
38 | }
39 |
40 | describe('put', () => {
41 | /** @type {Datastore} */
42 | let store
43 |
44 | beforeEach(async () => {
45 | store = await createStore()
46 | })
47 |
48 | afterEach(() => cleanup(store))
49 |
50 | it('simple', () => {
51 | const k = new Key('/z/one')
52 | return store.put(k, uint8ArrayFromString('one'))
53 | })
54 |
55 | it('parallel', async () => {
56 | const data = []
57 | for (let i = 0; i < 100; i++) {
58 | data.push({ key: new Key(`/z/key${i}`), value: uint8ArrayFromString(`data${i}`) })
59 | }
60 |
61 | await Promise.all(data.map(d => store.put(d.key, d.value)))
62 |
63 | const res = await all(store.getMany(data.map(d => d.key)))
64 | expect(res).to.deep.equal(data.map(d => d.value))
65 | })
66 | })
67 |
68 | describe('putMany', () => {
69 | /** @type {Datastore} */
70 | let store
71 |
72 | beforeEach(async () => {
73 | store = await createStore()
74 | })
75 |
76 | afterEach(() => cleanup(store))
77 |
78 | it('streaming', async () => {
79 | const data = []
80 | for (let i = 0; i < 100; i++) {
81 | data.push({ key: new Key(`/z/key${i}`), value: uint8ArrayFromString(`data${i}`) })
82 | }
83 |
84 | let index = 0
85 |
86 | for await (const { key, value } of store.putMany(data)) {
87 | expect(data[index]).to.deep.equal({ key, value })
88 | index++
89 | }
90 |
91 | expect(index).to.equal(data.length)
92 |
93 | const res = await all(store.getMany(data.map(d => d.key)))
94 | expect(res).to.deep.equal(data.map(d => d.value))
95 | })
96 | })
97 |
98 | describe('get', () => {
99 | /** @type {Datastore} */
100 | let store
101 |
102 | beforeEach(async () => {
103 | store = await createStore()
104 | })
105 |
106 | afterEach(() => cleanup(store))
107 |
108 | it('simple', async () => {
109 | const k = new Key('/z/one')
110 | await store.put(k, uint8ArrayFromString('hello'))
111 | const res = await store.get(k)
112 | expect(res).to.be.eql(uint8ArrayFromString('hello'))
113 | })
114 |
115 | it('should throw error for missing key', async () => {
116 | const k = new Key('/does/not/exist')
117 |
118 | try {
119 | await store.get(k)
120 | } catch (err) {
121 | expect(err).to.have.property('code', 'ERR_NOT_FOUND')
122 | return
123 | }
124 |
125 | throw new Error('expected error to be thrown')
126 | })
127 | })
128 |
129 | describe('getMany', () => {
130 | /** @type {Datastore} */
131 | let store
132 |
133 | beforeEach(async () => {
134 | store = await createStore()
135 | })
136 |
137 | afterEach(() => cleanup(store))
138 |
139 | it('streaming', async () => {
140 | const k = new Key('/z/one')
141 | await store.put(k, uint8ArrayFromString('hello'))
142 | const source = [k]
143 |
144 | const res = await all(store.getMany(source))
145 | expect(res).to.have.lengthOf(1)
146 | expect(res[0]).to.be.eql(uint8ArrayFromString('hello'))
147 | })
148 |
149 | it('should throw error for missing key', async () => {
150 | const k = new Key('/does/not/exist')
151 |
152 | try {
153 | await drain(store.getMany([k]))
154 | } catch (err) {
155 | expect(err).to.have.property('code', 'ERR_NOT_FOUND')
156 | return
157 | }
158 |
159 | throw new Error('expected error to be thrown')
160 | })
161 | })
162 |
163 | describe('delete', () => {
164 | /** @type {Datastore} */
165 | let store
166 |
167 | beforeEach(async () => {
168 | store = await createStore()
169 | })
170 |
171 | afterEach(() => cleanup(store))
172 |
173 | it('simple', async () => {
174 | const k = new Key('/z/one')
175 | await store.put(k, uint8ArrayFromString('hello'))
176 | await store.get(k)
177 | await store.delete(k)
178 | const exists = await store.has(k)
179 | expect(exists).to.be.eql(false)
180 | })
181 |
182 | it('parallel', async () => {
183 | /** @type {[Key, Uint8Array][]} */
184 | const data = []
185 | for (let i = 0; i < 100; i++) {
186 | data.push([new Key(`/a/key${i}`), uint8ArrayFromString(`data${i}`)])
187 | }
188 |
189 | await Promise.all(data.map(d => store.put(d[0], d[1])))
190 |
191 | const res0 = await Promise.all(data.map(d => store.has(d[0])))
192 | res0.forEach(res => expect(res).to.be.eql(true))
193 |
194 | await Promise.all(data.map(d => store.delete(d[0])))
195 |
196 | const res1 = await Promise.all(data.map(d => store.has(d[0])))
197 | res1.forEach(res => expect(res).to.be.eql(false))
198 | })
199 | })
200 |
201 | describe('deleteMany', () => {
202 | /** @type {Datastore} */
203 | let store
204 |
205 | beforeEach(async () => {
206 | store = await createStore()
207 | })
208 |
209 | afterEach(() => cleanup(store))
210 |
211 | it('streaming', async () => {
212 | const data = []
213 | for (let i = 0; i < 100; i++) {
214 | data.push({ key: new Key(`/a/key${i}`), value: uint8ArrayFromString(`data${i}`) })
215 | }
216 |
217 | await drain(store.putMany(data))
218 |
219 | const res0 = await Promise.all(data.map(d => store.has(d.key)))
220 | res0.forEach(res => expect(res).to.be.eql(true))
221 |
222 | let index = 0
223 |
224 | for await (const key of store.deleteMany(data.map(d => d.key))) {
225 | expect(data[index].key).to.deep.equal(key)
226 | index++
227 | }
228 |
229 | expect(index).to.equal(data.length)
230 |
231 | const res1 = await Promise.all(data.map(d => store.has(d.key)))
232 | res1.forEach(res => expect(res).to.be.eql(false))
233 | })
234 | })
235 |
236 | describe('batch', () => {
237 | /** @type {Datastore} */
238 | let store
239 |
240 | beforeEach(async () => {
241 | store = await createStore()
242 | })
243 |
244 | afterEach(() => cleanup(store))
245 |
246 | it('simple', async () => {
247 | const b = store.batch()
248 |
249 | await store.put(new Key('/z/old'), uint8ArrayFromString('old'))
250 |
251 | b.put(new Key('/a/one'), uint8ArrayFromString('1'))
252 | b.put(new Key('/q/two'), uint8ArrayFromString('2'))
253 | b.put(new Key('/q/three'), uint8ArrayFromString('3'))
254 | b.delete(new Key('/z/old'))
255 | await b.commit()
256 |
257 | const keys = ['/a/one', '/q/two', '/q/three', '/z/old']
258 | const res = await Promise.all(keys.map(k => store.has(new Key(k))))
259 |
260 | expect(res).to.be.eql([true, true, true, false])
261 | })
262 |
263 | it('many (3 * 400)', async function () {
264 | this.timeout(20 * 1000)
265 | const b = store.batch()
266 | const count = 400
267 | for (let i = 0; i < count; i++) {
268 | b.put(new Key(`/a/hello${i}`), randomBytes(32))
269 | b.put(new Key(`/q/hello${i}`), randomBytes(64))
270 | b.put(new Key(`/z/hello${i}`), randomBytes(128))
271 | }
272 |
273 | await b.commit()
274 |
275 | /**
276 | * @param {AsyncIterable} iterable
277 | */
278 | const total = async iterable => {
279 | let count = 0
280 | // eslint-disable-next-line no-unused-vars
281 | for await (const _ of iterable) count++
282 | return count
283 | }
284 |
285 | expect(await total(store.query({ prefix: '/a' }))).to.equal(count)
286 | expect(await total(store.query({ prefix: '/z' }))).to.equal(count)
287 | expect(await total(store.query({ prefix: '/q' }))).to.equal(count)
288 | })
289 | })
290 |
291 | describe('query', () => {
292 | /** @type {Datastore} */
293 | let store
294 | const hello = { key: new Key('/q/1hello'), value: uint8ArrayFromString('1') }
295 | const world = { key: new Key('/z/2world'), value: uint8ArrayFromString('2') }
296 | const hello2 = { key: new Key('/z/3hello2'), value: uint8ArrayFromString('3') }
297 |
298 | /**
299 | * @type {QueryFilter}
300 | */
301 | const filter1 = entry => !entry.key.toString().endsWith('hello')
302 |
303 | /**
304 | * @type {QueryFilter}
305 | */
306 | const filter2 = entry => entry.key.toString().endsWith('hello2')
307 |
308 | /**
309 | * @type {QueryOrder}
310 | */
311 | const order1 = (a, b) => {
312 | if (a.value.toString() < b.value.toString()) {
313 | return -1
314 | }
315 | return 1
316 | }
317 |
318 | /**
319 | * @type {QueryOrder}
320 | */
321 | const order2 = (a, b) => {
322 | if (a.value.toString() < b.value.toString()) {
323 | return 1
324 | }
325 | if (a.value.toString() > b.value.toString()) {
326 | return -1
327 | }
328 | return 0
329 | }
330 |
331 | /** @type {Array<[string, any, any[]|number]>} */
332 | const tests = [
333 | ['empty', {}, [hello, world, hello2]],
334 | ['prefix', { prefix: '/z' }, [world, hello2]],
335 | ['1 filter', { filters: [filter1] }, [world, hello2]],
336 | ['2 filters', { filters: [filter1, filter2] }, [hello2]],
337 | ['limit', { limit: 1 }, 1],
338 | ['offset', { offset: 1 }, 2],
339 | ['1 order (1)', { orders: [order1] }, [hello, world, hello2]],
340 | ['1 order (reverse 1)', { orders: [order2] }, [hello2, world, hello]]
341 | ]
342 |
343 | before(async () => {
344 | store = await createStore()
345 |
346 | const b = store.batch()
347 |
348 | b.put(hello.key, hello.value)
349 | b.put(world.key, world.value)
350 | b.put(hello2.key, hello2.value)
351 |
352 | return b.commit()
353 | })
354 |
355 | after(() => cleanup(store))
356 |
357 | tests.forEach(([name, query, expected]) => it(name, async () => {
358 | let res = await all(store.query(query))
359 |
360 | if (Array.isArray(expected)) {
361 | if (query.orders == null) {
362 | expect(res).to.have.length(expected.length)
363 | /**
364 | * @param {Pair} a
365 | * @param {Pair} b
366 | */
367 | const s = (a, b) => {
368 | if (a.key.toString() < b.key.toString()) {
369 | return 1
370 | } else {
371 | return -1
372 | }
373 | }
374 | res = res.sort(s)
375 | const exp = expected.sort(s)
376 |
377 | res.forEach((r, i) => {
378 | expect(r.key.toString()).to.be.eql(exp[i].key.toString())
379 |
380 | if (r.value == null) {
381 | expect(exp[i].value).to.not.exist()
382 | } else {
383 | expect(r.value).to.deep.equal(exp[i].value)
384 | }
385 | })
386 | } else {
387 | expect(res).to.be.eql(expected)
388 | }
389 | } else if (typeof expected === 'number') {
390 | expect(res).to.have.length(expected)
391 | }
392 | }))
393 |
394 | it('allows mutating the datastore during a query', async () => {
395 | const hello3 = { key: new Key('/z/4hello3'), value: uint8ArrayFromString('4') }
396 | let firstIteration = true
397 |
398 | for await (const {} of store.query({})) { // eslint-disable-line no-empty-pattern
399 | if (firstIteration) {
400 | expect(await store.has(hello2.key)).to.be.true()
401 | await store.delete(hello2.key)
402 | expect(await store.has(hello2.key)).to.be.false()
403 |
404 | await store.put(hello3.key, hello3.value)
405 | firstIteration = false
406 | }
407 | }
408 |
409 | const results = await all(store.query({}))
410 |
411 | expect(firstIteration).to.be.false('Query did not return anything')
412 | expect(results.map(result => result.key)).to.have.deep.members([
413 | hello.key,
414 | world.key,
415 | hello3.key
416 | ])
417 | })
418 |
419 | it('queries while the datastore is being mutated', async () => {
420 | const writePromise = store.put(new Key(`/z/key-${Math.random()}`), uint8ArrayFromString('0'))
421 | const results = await all(store.query({}))
422 | expect(results.length).to.be.greaterThan(0)
423 | await writePromise
424 | })
425 | })
426 |
427 | describe('queryKeys', () => {
428 | /** @type {Datastore} */
429 | let store
430 | const hello = { key: new Key('/q/1hello'), value: uint8ArrayFromString('1') }
431 | const world = { key: new Key('/z/2world'), value: uint8ArrayFromString('2') }
432 | const hello2 = { key: new Key('/z/3hello2'), value: uint8ArrayFromString('3') }
433 |
434 | /**
435 | * @type {KeyQueryFilter}
436 | */
437 | const filter1 = key => !key.toString().endsWith('hello')
438 |
439 | /**
440 | * @type {KeyQueryFilter}
441 | */
442 | const filter2 = key => key.toString().endsWith('hello2')
443 |
444 | /**
445 | * @type {KeyQueryOrder}
446 | */
447 | const order1 = (a, b) => {
448 | if (a.toString() < b.toString()) {
449 | return -1
450 | }
451 | return 1
452 | }
453 |
454 | /**
455 | * @type {KeyQueryOrder}
456 | */
457 | const order2 = (a, b) => {
458 | if (a.toString() < b.toString()) {
459 | return 1
460 | }
461 | if (a.toString() > b.toString()) {
462 | return -1
463 | }
464 | return 0
465 | }
466 |
467 | /** @type {Array<[string, any, any[]|number]>} */
468 | const tests = [
469 | ['empty', {}, [hello.key, world.key, hello2.key]],
470 | ['prefix', { prefix: '/z' }, [world.key, hello2.key]],
471 | ['1 filter', { filters: [filter1] }, [world.key, hello2.key]],
472 | ['2 filters', { filters: [filter1, filter2] }, [hello2.key]],
473 | ['limit', { limit: 1 }, 1],
474 | ['offset', { offset: 1 }, 2],
475 | ['1 order (1)', { orders: [order1] }, [hello.key, world.key, hello2.key]],
476 | ['1 order (reverse 1)', { orders: [order2] }, [hello2.key, world.key, hello.key]]
477 | ]
478 |
479 | before(async () => {
480 | store = await createStore()
481 |
482 | const b = store.batch()
483 |
484 | b.put(hello.key, hello.value)
485 | b.put(world.key, world.value)
486 | b.put(hello2.key, hello2.value)
487 |
488 | return b.commit()
489 | })
490 |
491 | after(() => cleanup(store))
492 |
493 | tests.forEach(([name, query, expected]) => it(name, async () => {
494 | let res = await all(store.queryKeys(query))
495 |
496 | if (Array.isArray(expected)) {
497 | if (query.orders == null) {
498 | expect(res).to.have.length(expected.length)
499 | /**
500 | * @type {KeyQueryOrder}
501 | */
502 | const s = (a, b) => {
503 | if (a.toString() < b.toString()) {
504 | return 1
505 | } else {
506 | return -1
507 | }
508 | }
509 | res = res.sort(s)
510 | const exp = expected.sort(s)
511 |
512 | res.forEach((r, i) => {
513 | expect(r.toString()).to.be.eql(exp[i].toString())
514 | })
515 | } else {
516 | expect(res).to.be.eql(expected)
517 | }
518 | } else if (typeof expected === 'number') {
519 | expect(res).to.have.length(expected)
520 | }
521 | }))
522 |
523 | it('allows mutating the datastore during a query', async () => {
524 | const hello3 = { key: new Key('/z/4hello3'), value: uint8ArrayFromString('4') }
525 | let firstIteration = true
526 |
527 | for await (const {} of store.queryKeys({})) { // eslint-disable-line no-empty-pattern
528 | if (firstIteration) {
529 | expect(await store.has(hello2.key)).to.be.true()
530 | await store.delete(hello2.key)
531 | expect(await store.has(hello2.key)).to.be.false()
532 |
533 | await store.put(hello3.key, hello3.value)
534 | firstIteration = false
535 | }
536 | }
537 |
538 | const results = await all(store.queryKeys({}))
539 |
540 | expect(firstIteration).to.be.false('Query did not return anything')
541 | expect(results).to.have.deep.members([
542 | hello.key,
543 | world.key,
544 | hello3.key
545 | ])
546 | })
547 |
548 | it('queries while the datastore is being mutated', async () => {
549 | const writePromise = store.put(new Key(`/z/key-${Math.random()}`), uint8ArrayFromString('0'))
550 | const results = await all(store.queryKeys({}))
551 | expect(results.length).to.be.greaterThan(0)
552 | await writePromise
553 | })
554 | })
555 |
556 | describe('lifecycle', () => {
557 | /** @type {Datastore} */
558 | let store
559 |
560 | before(async () => {
561 | store = await test.setup()
562 | if (!store) throw new Error('missing store')
563 | })
564 |
565 | after(() => cleanup(store))
566 |
567 | it('close and open', async () => {
568 | await store.close()
569 | await store.open()
570 | await store.close()
571 | await store.open()
572 | })
573 | })
574 | }
575 |
--------------------------------------------------------------------------------
/src/types.d.ts:
--------------------------------------------------------------------------------
1 | import {
2 | Pair as StorePair,
3 | Batch as StoreBatch,
4 | QueryFilter as StoreQueryFilter,
5 | QueryOrder as StoreQueryOrder,
6 | Query as StoreQuery,
7 | KeyQueryFilter as StoreKeyQueryFilter,
8 | KeyQueryOrder as StoreKeyQueryOrder,
9 | KeyQuery as StoreKeyQuery,
10 | Options as StoreOptions,
11 | Store
12 | } from 'interface-store'
13 | import type Key from './key'
14 |
15 | export interface Options extends StoreOptions{
16 |
17 | }
18 |
19 | export interface Pair extends StorePair {
20 |
21 | }
22 |
23 | export interface Batch extends StoreBatch {
24 |
25 | }
26 |
27 | export interface Datastore extends Store {
28 |
29 | }
30 |
31 | export interface QueryFilter extends StoreQueryFilter {
32 |
33 | }
34 |
35 | export interface QueryOrder extends StoreQueryOrder {
36 |
37 | }
38 |
39 | export interface Query extends StoreQuery {
40 |
41 | }
42 |
43 | export interface KeyQueryFilter extends StoreKeyQueryFilter {
44 |
45 | }
46 |
47 | export interface KeyQueryOrder extends StoreKeyQueryOrder {
48 |
49 | }
50 |
51 | export interface KeyQuery extends StoreKeyQuery {
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/src/utils.js:
--------------------------------------------------------------------------------
1 | 'use strict'
2 |
3 | const tempdir = require('ipfs-utils/src/temp-dir')
4 | const all = require('it-all')
5 |
6 | /**
7 | * Collect all values from the iterable and sort them using
8 | * the passed sorter function
9 | *
10 | * @template T
11 | * @param {AsyncIterable | Iterable} iterable
12 | * @param {(a: T, b: T) => -1 | 0 | 1} sorter
13 | * @returns {AsyncIterable}
14 | */
15 | const sortAll = (iterable, sorter) => {
16 | return (async function * () {
17 | const values = await all(iterable)
18 | yield * values.sort(sorter)
19 | })()
20 | }
21 |
22 | /**
23 | * @param {string} s
24 | * @param {string} r
25 | */
26 | const replaceStartWith = (s, r) => {
27 | const matcher = new RegExp('^' + r)
28 | return s.replace(matcher, '')
29 | }
30 |
31 | module.exports = {
32 | sortAll,
33 | tmpdir: tempdir,
34 | replaceStartWith
35 | }
36 |
--------------------------------------------------------------------------------
/test/key.spec.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 | 'use strict'
3 |
4 | const { expect } = require('aegir/utils/chai')
5 | const Key = require('../src').Key
6 |
7 | const pathSep = '/'
8 |
9 | describe('Key', () => {
10 | /**
11 | * @param {string} s
12 | */
13 | const clean = (s) => {
14 | let fixed = s
15 | if (fixed.startsWith(pathSep + pathSep)) {
16 | fixed = fixed.slice(1)
17 | }
18 | if (fixed.length > 1 && fixed.endsWith(pathSep)) {
19 | fixed = fixed.slice(0, -1)
20 | }
21 |
22 | return fixed
23 | }
24 |
25 | describe('basic', () => {
26 | /**
27 | * @param {string} s
28 | */
29 | const validKey = (s) => it(s, () => {
30 | const fixed = clean(pathSep + s)
31 | const namespaces = fixed.split(pathSep).slice(1)
32 | const lastNamespace = namespaces[namespaces.length - 1]
33 | const lnparts = lastNamespace.split(':')
34 | let ktype = ''
35 | if (lnparts.length > 1) {
36 | ktype = lnparts.slice(0, -1).join(':')
37 | }
38 | const kname = lnparts[lnparts.length - 1]
39 | const kchild = clean(fixed + '/cchildd')
40 | const kparent = pathSep + namespaces.slice(0, -1).join(pathSep)
41 | const kpath = clean(kparent + pathSep + ktype)
42 | const kinstance = fixed + ':inst'
43 |
44 | const k = new Key(s)
45 | expect(k.toString()).to.eql(fixed)
46 | expect(k).to.eql(new Key(s))
47 | expect(k.toString()).to.eql(new Key(s).toString())
48 | expect(k.name()).to.eql(kname)
49 | expect(k.type()).to.eql(ktype)
50 | expect(k.path().toString()).to.eql(kpath)
51 | expect(k.instance('inst').toString()).to.eql(kinstance)
52 |
53 | const child = new Key('cchildd')
54 | expect(k.child(child).toString()).to.eql(kchild)
55 | expect(k.child(child).parent().toString()).to.eql(fixed)
56 | expect(k.parent().toString()).to.eql(kparent)
57 | expect(k.list()).to.have.length(namespaces.length)
58 | expect(k.namespaces()).to.have.length(namespaces.length)
59 | k.list().forEach((e, i) => {
60 | expect(namespaces[i]).to.eql(e)
61 | })
62 | })
63 |
64 | validKey('')
65 | validKey('abcde')
66 | validKey('disahfidsalfhduisaufidsail')
67 | validKey('/fdisahfodisa/fdsa/fdsafdsafdsafdsa/fdsafdsa/')
68 | validKey('4215432143214321432143214321')
69 | validKey('a/b/c/d/')
70 | validKey('abcde:fdsfd')
71 | validKey('disahfidsalfhduisaufidsail:fdsa')
72 | validKey('/fdisahfodisa/fdsa/fdsafdsafdsafdsa/fdsafdsa/:')
73 | validKey('4215432143214321432143214321:')
74 | })
75 |
76 | it('ancestry', () => {
77 | const k1 = new Key('/A/B/C')
78 | const k2 = new Key('/A/B/C/D')
79 |
80 | expect(k1.toString()).to.be.eql('/A/B/C')
81 | expect(k2.toString()).to.be.eql('/A/B/C/D')
82 |
83 | const checks = [
84 | k1.isAncestorOf(k2),
85 | k2.isDecendantOf(k1),
86 | new Key('/A').isAncestorOf(k2),
87 | new Key('/A').isAncestorOf(k1),
88 | !new Key('/A').isDecendantOf(k2),
89 | !new Key('/A').isDecendantOf(k1),
90 | k2.isDecendantOf(new Key('/A')),
91 | k1.isDecendantOf(new Key('/A')),
92 | !k2.isAncestorOf(new Key('/A')),
93 | !k1.isAncestorOf(new Key('/A')),
94 | !k2.isAncestorOf(k2),
95 | !k1.isAncestorOf(k1)
96 | ]
97 |
98 | checks.forEach((check) => expect(check).to.equal(true))
99 |
100 | expect(k1.child(new Key('D')).toString()).to.eql(k2.toString())
101 | expect(k1.toString()).to.eql(k2.parent().toString())
102 | expect(k1.path().toString()).to.eql(k2.parent().path().toString())
103 | })
104 |
105 | it('type', () => {
106 | const k1 = new Key('/A/B/C:c')
107 | const k2 = new Key('/A/B/C:c/D:d')
108 |
109 | expect(k1.isAncestorOf(k2)).to.eql(true)
110 | expect(k2.isDecendantOf(k1)).to.eql(true)
111 |
112 | expect(k1.type()).to.eql('C')
113 | expect(k2.type()).to.eql('D')
114 | expect(k1.type()).to.eql(k2.parent().type())
115 | })
116 |
117 | it('random', () => {
118 | /** @type {Record} */
119 | const keys = {}
120 | const k = 100
121 | for (let i = 0; i < k; i++) {
122 | const r = Key.random()
123 | expect(keys).to.not.have.key(r.toString())
124 | keys[r.toString()] = true
125 | }
126 |
127 | expect(Object.keys(keys)).to.have.length(k)
128 | })
129 |
130 | it('less', () => {
131 | /**
132 | * @param {string | Uint8Array} a
133 | * @param {string | Uint8Array} b
134 | */
135 | const checkLess = (a, b) => {
136 | const ak = new Key(a)
137 | const bk = new Key(b)
138 |
139 | expect(ak.less(bk)).to.eql(true)
140 | expect(bk.less(ak)).to.eql(false)
141 | }
142 |
143 | checkLess('/a/b/c', '/a/b/c/d')
144 | checkLess('/a/b', '/a/b/c/d')
145 | checkLess('/a', '/a/b/c/d')
146 | checkLess('/a/a/c', '/a/b/c')
147 | checkLess('/a/a/d', '/a/b/c')
148 | checkLess('/a/b/c/d/e/f/g/h', '/b')
149 | checkLess(pathSep, '/a')
150 | })
151 |
152 | it('concat', () => {
153 | const originalKey = new Key('/a/b/c')
154 |
155 | const concattedKey = originalKey.concat(new Key('/d/e/f'))
156 | expect(concattedKey.toString()).to.equal('/a/b/c/d/e/f')
157 |
158 | // Original key is not changed
159 | expect(originalKey.toString()).to.equal('/a/b/c')
160 |
161 | const concattedMultipleKeys = originalKey.concat(new Key('/d/e'), new Key('/f/g'))
162 | expect(concattedMultipleKeys.toString()).to.equal('/a/b/c/d/e/f/g')
163 |
164 | // New instance of Key is always created
165 | expect(originalKey.concat()).to.not.equal(originalKey)
166 | // but has the same value
167 | expect(originalKey.concat().toString()).to.equal('/a/b/c')
168 | })
169 |
170 | it('uint8Array', () => {
171 | const arr = Uint8Array.from(['/'.charCodeAt(0), 0, 1, 2, 3])
172 | const key = new Key(arr)
173 | const buf = key.uint8Array()
174 |
175 | expect(buf).to.deep.equal(arr)
176 | })
177 |
178 | it('uint8Array with surplus bytes', () => {
179 | const arr = Uint8Array.from(['/'.charCodeAt(0), 0, 1, 2, 3, 4])
180 | const view = new Uint8Array(arr.buffer, 0, arr.length - 1)
181 |
182 | // should be same buffer
183 | expect(view.buffer).to.equal(arr.buffer)
184 | expect(view.buffer.byteLength).to.equal(arr.buffer.byteLength)
185 |
186 | // view should be shorter than wrapped buffer
187 | expect(view.length).to.be.lessThan(arr.buffer.byteLength)
188 | expect(view.byteLength).to.be.lessThan(arr.buffer.byteLength)
189 |
190 | const key = new Key(view)
191 | const buf = key.uint8Array()
192 |
193 | expect(buf).to.deep.equal(view)
194 | })
195 |
196 | it('uint8Array with trailing slashes', () => {
197 | const slash = '/'.charCodeAt(0)
198 | const arrWithSlashes = Uint8Array.from([slash, 0, 1, 2, 3, slash, slash, slash])
199 | const arrWithoutSlashes = Uint8Array.from([slash, 0, 1, 2, 3])
200 | const key = new Key(arrWithSlashes)
201 | const buf = key.uint8Array()
202 |
203 | // slashes should have been stripped
204 | expect(buf).to.deep.equal(arrWithoutSlashes)
205 |
206 | // should be a view on the original buffer
207 | expect(buf.buffer).to.equal(arrWithSlashes.buffer)
208 | })
209 | })
210 |
--------------------------------------------------------------------------------
/test/memory.spec.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 | 'use strict'
3 |
4 | const MemoryDatastore = require('../src').MemoryDatastore
5 |
6 | describe('Memory', () => {
7 | describe('interface-datastore', () => {
8 | require('../src/tests')({
9 | setup () {
10 | return new MemoryDatastore()
11 | },
12 | teardown () {}
13 | })
14 | })
15 | })
16 |
--------------------------------------------------------------------------------
/test/utils.spec.js:
--------------------------------------------------------------------------------
1 | /* eslint-env mocha */
2 | 'use strict'
3 |
4 | const { expect } = require('aegir/utils/chai')
5 | const utils = require('../src').utils
6 | const filter = require('it-filter')
7 | const take = require('it-take')
8 | const map = require('it-map')
9 |
10 | describe('utils', () => {
11 | it('filter - sync', async () => {
12 | const data = [1, 2, 3, 4]
13 | /**
14 | * @param {number} val
15 | */
16 | const filterer = val => val % 2 === 0
17 | const res = []
18 | for await (const val of filter(data, filterer)) {
19 | res.push(val)
20 | }
21 | expect(res).to.be.eql([2, 4])
22 | })
23 |
24 | it('filter - async', async () => {
25 | const data = [1, 2, 3, 4]
26 | /**
27 | * @param {number} val
28 | */
29 | const filterer = val => val % 2 === 0
30 | const res = []
31 | for await (const val of filter(data, filterer)) {
32 | res.push(val)
33 | }
34 | expect(res).to.be.eql([2, 4])
35 | })
36 |
37 | it('sortAll', async () => {
38 | const data = [1, 2, 3, 4]
39 | /**
40 | * @param {number} a
41 | * @param {number} b
42 | */
43 | const sorter = (a, b) => {
44 | if (a < b) {
45 | return 1
46 | }
47 |
48 | if (a > b) {
49 | return -1
50 | }
51 |
52 | return 0
53 | }
54 | const res = []
55 | for await (const val of utils.sortAll(data, sorter)) {
56 | res.push(val)
57 | }
58 | expect(res).to.be.eql([4, 3, 2, 1])
59 | })
60 |
61 | it('sortAll - fail', async () => {
62 | const data = [1, 2, 3, 4]
63 | const sorter = () => { throw new Error('fail') }
64 | const res = []
65 |
66 | try {
67 | for await (const val of utils.sortAll(data, sorter)) {
68 | res.push(val)
69 | }
70 | } catch (err) {
71 | expect(err.message).to.be.eql('fail')
72 | return
73 | }
74 |
75 | throw new Error('expected error to be thrown')
76 | })
77 |
78 | it('should take n values from iterator', async () => {
79 | const data = [1, 2, 3, 4]
80 | const n = 3
81 | const res = []
82 | for await (const val of take(data, n)) {
83 | res.push(val)
84 | }
85 | expect(res).to.be.eql([1, 2, 3])
86 | })
87 |
88 | it('should take nothing from iterator', async () => {
89 | const data = [1, 2, 3, 4]
90 | const n = 0
91 | for await (const _ of take(data, n)) { // eslint-disable-line
92 | throw new Error('took a value')
93 | }
94 | })
95 |
96 | it('should map iterator values', async () => {
97 | const data = [1, 2, 3, 4]
98 | /**
99 | * @param {number} n
100 | */
101 | const mapper = n => n * 2
102 | const res = []
103 | for await (const val of map(data, mapper)) {
104 | res.push(val)
105 | }
106 | expect(res).to.be.eql([2, 4, 6, 8])
107 | })
108 |
109 | it('replaceStartWith', () => {
110 | expect(
111 | utils.replaceStartWith('helloworld', 'hello')
112 | ).to.eql(
113 | 'world'
114 | )
115 |
116 | expect(
117 | utils.replaceStartWith('helloworld', 'world')
118 | ).to.eql(
119 | 'helloworld'
120 | )
121 | })
122 |
123 | it('provides a temp folder', () => {
124 | expect(utils.tmpdir()).to.not.equal('')
125 | })
126 | })
127 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "aegir/src/config/tsconfig.aegir.json",
3 | "compilerOptions": {
4 | "outDir": "dist"
5 | },
6 | "include": [
7 | "test",
8 | "src"
9 | ]
10 | }
11 |
--------------------------------------------------------------------------------