├── docs ├── database │ ├── live.md │ ├── linking.md │ ├── storage.md │ └── values.md ├── under-the-hood │ ├── commits.md │ ├── streams.md │ └── replication │ │ ├── live.md │ │ └── merging.md ├── .vuepress │ └── config.cjs ├── README.md └── getting-started.md ├── bare.js ├── test ├── cli │ ├── map.js │ ├── test-init.js │ └── utils.js ├── lib │ ├── http.js │ ├── mock-kv.js │ └── mock-s3.js ├── scripts │ └── test-s3.js ├── test-hamt.js ├── test-updaters.js ├── test-replication.js ├── test-values.js ├── test-errors.js ├── test-kv.js ├── test-database.js ├── test-indexes.js ├── test-stores.js └── test-remotes.js ├── src ├── updaters │ ├── commit.js │ ├── inmemory.js │ ├── file.js │ ├── index.js │ └── kv.js ├── browser.js ├── index.js ├── cli │ ├── info.js │ ├── put.js │ └── init.js ├── stores │ ├── lru.js │ ├── index.js │ ├── lfs.js │ ├── s3.js │ ├── replicate.js │ ├── level.js │ ├── https.js │ ├── kv.js │ └── inmemory.js ├── utils.js ├── http │ ├── nodejs.js │ └── handlers.js ├── hamt.js ├── car.js ├── bare.js ├── values.js ├── database.js ├── schema.js ├── remotes.js ├── indexes.js └── kv.js ├── .gitignore ├── cli.js ├── .github └── workflows │ └── dagdb.yml ├── package.json ├── Schema.md └── README.md /docs/database/live.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/database/linking.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/database/storage.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/under-the-hood/commits.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/under-the-hood/streams.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/under-the-hood/replication/live.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/under-the-hood/replication/merging.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/.vuepress/config.cjs: -------------------------------------------------------------------------------- 1 | exports.base = '/dagdb/' 2 | -------------------------------------------------------------------------------- /bare.js: -------------------------------------------------------------------------------- 1 | import bare from './src/bare.js' 2 | export default bare 3 | -------------------------------------------------------------------------------- /test/cli/map.js: -------------------------------------------------------------------------------- 1 | module.exports = test => test.replace('test/cli/test-', 'src/cli/') 2 | -------------------------------------------------------------------------------- /src/updaters/commit.js: -------------------------------------------------------------------------------- 1 | const commit = async () => { 2 | } 3 | 4 | export default commit 5 | -------------------------------------------------------------------------------- /src/browser.js: -------------------------------------------------------------------------------- 1 | import Block from '@ipld/block/defaults.js' 2 | import bare from './bare.js' 3 | 4 | const mod = bare(Block) 5 | 6 | export default mod 7 | -------------------------------------------------------------------------------- /docs/database/values.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | Now that you understand how to work with different types you may want to move 4 | along to [working with links](database/linking). 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .nyc_output 2 | build 3 | coverage 4 | package-lock.json 5 | node_modules 6 | .DS_Store 7 | yarn.lock 8 | .dagdb.car 9 | docs/.vuepress/dist 10 | dist 11 | -------------------------------------------------------------------------------- /cli.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import yargs from 'yargs' 3 | 4 | const args = yargs 5 | .commandDir('./src/cli') 6 | .demandCommand() 7 | .argv 8 | 9 | if (!args._.length && !args.filename) { 10 | yargs.showHelp() 11 | } 12 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import Block from '@ipld/block/defaults' 2 | import bare from './bare.js' 3 | import lfs from './stores/lfs.js' 4 | import commit from './updaters/commit.js' 5 | import fileUpdater from './updaters/file.js' 6 | 7 | const mod = bare(Block, { lfs: lfs(Block), fileUpdater: fileUpdater(Block), commit }) 8 | 9 | export default mod 10 | -------------------------------------------------------------------------------- /test/cli/test-init.js: -------------------------------------------------------------------------------- 1 | import tap from 'tap' 2 | import { init } from './utils.js' 3 | 4 | tap.test('basic init', async t => { 5 | const { run, dbfile, initOutput } = await init() 6 | tap.same(initOutput.stdout, `Initialized empty database in ${dbfile}\n`) 7 | const { stdout } = await run('info') 8 | console.log({ stdout }) 9 | }) 10 | -------------------------------------------------------------------------------- /src/updaters/inmemory.js: -------------------------------------------------------------------------------- 1 | export default CID => { 2 | let current 3 | class Inmem { 4 | update (_current, old) { 5 | if (!old && !current) this.current = _current 6 | else if (this.current.equals(old)) this.current = _current 7 | return this.current 8 | } 9 | 10 | get root () { 11 | return this.current 12 | } 13 | } 14 | return new Inmem() 15 | } 16 | -------------------------------------------------------------------------------- /src/cli/info.js: -------------------------------------------------------------------------------- 1 | import { readonly, checkfile, options } from '../car.js' 2 | 3 | const handler = async argv => { 4 | await checkfile(argv.dbfile) 5 | const db = await readonly(argv.dbfile) 6 | const info = await db.info() 7 | console.log(info) 8 | } 9 | 10 | const desc = 'Print info about a database' 11 | const command = 'info' 12 | const builder = yargs => { 13 | options(yargs) 14 | } 15 | 16 | export { handler, desc, command, builder } 17 | -------------------------------------------------------------------------------- /test/lib/http.js: -------------------------------------------------------------------------------- 1 | /* globals describe, before, after */ 2 | import { createServer } from 'http' 3 | 4 | export default (name, handler, tests) => { 5 | const getPort = () => Math.floor(Math.random() * (9000 - 8000) + 8000) 6 | 7 | describe(`${name} http`, () => { 8 | const port = getPort() 9 | const server = createServer(handler) 10 | const closed = new Promise(resolve => server.once('close', resolve)) 11 | before(() => new Promise((resolve, reject) => { 12 | server.listen(port, e => { 13 | if (e) return reject(e) 14 | resolve() 15 | }) 16 | })) 17 | tests(port) 18 | after(() => { 19 | server.close() 20 | return closed 21 | }) 22 | }) 23 | } 24 | -------------------------------------------------------------------------------- /src/cli/put.js: -------------------------------------------------------------------------------- 1 | 2 | import { checkfile, options } from '../car.js' 3 | // const Block = require('@ipld/block') 4 | // const database = require('../database')(Block) 5 | 6 | const put = async argv => { 7 | await checkfile(argv.dbfile) 8 | // const { reader, writer, store, root } = await readwrite(argv.dbfile) 9 | // const db = database(root, store) 10 | } 11 | const handler = put 12 | const desc = 'Sets to a new document encoded from ' 13 | const command = 'put ' 14 | const builder = yargs => { 15 | options(yargs) 16 | yargs.positional('key', { 17 | desc: 'String key to set' 18 | }) 19 | yargs.positional('json', { 20 | desc: 'Full document body as JSON' 21 | }) 22 | } 23 | export { handler, desc, command, builder } 24 | -------------------------------------------------------------------------------- /src/updaters/file.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | 3 | export default Block => { 4 | const { CID } = Block 5 | class FileUpdater { 6 | constructor (path) { 7 | this.path = path 8 | } 9 | 10 | get root () { 11 | try { 12 | fs.statSync(this.path) 13 | } catch (e) { 14 | /* c8 ignore next */ 15 | if (e.code !== 'ENOENT') throw e 16 | return null 17 | } 18 | const buffer = fs.readFileSync(this.path) 19 | return CID.from(buffer) 20 | } 21 | 22 | update (newRoot, oldRoot) { 23 | const current = this.root 24 | /* c8 ignore next */ 25 | if (current && !oldRoot) return current 26 | if (!oldRoot || current.equals(oldRoot)) { 27 | fs.writeFileSync(this.path, newRoot.bytes) 28 | if (this.onUpdate) /* c8 ignore next */ this.onUpdate() 29 | return newRoot 30 | } 31 | return current 32 | } 33 | } 34 | return (...args) => new FileUpdater(...args) 35 | } 36 | -------------------------------------------------------------------------------- /test/scripts/test-s3.js: -------------------------------------------------------------------------------- 1 | /* globals describe, it */ 2 | // DAGDB_TEST_BUCKET=dagdb-test mocha test/scripts/test-s3.js -b --timeout=5000 3 | import { graphTests, replicateTests, basics } from '../lib/storage.js' 4 | import Block from '@ipld/block/defaults.js' 5 | 6 | import createS3Store from '../../src/store/s3.js' 7 | import { S3 } from 'aws-sdk' 8 | import awsConfig from 'aws-config' 9 | 10 | const createStore = createS3Store(Block) 11 | const test = it 12 | 13 | if (!process.env.DAGDB_TEST_BUCKET) { 14 | throw new Error('Missing env variable $DAGDB_TEST_BUCKET') 15 | } 16 | 17 | const create = () => { 18 | const id = Math.random().toString() 19 | const keyPrefix = id + '/' 20 | const Bucket = process.env.DAGDB_TEST_BUCKET 21 | const s3 = new S3({ ...awsConfig(), params: { Bucket } }) 22 | return createStore(s3, { keyPrefix }) 23 | } 24 | 25 | describe('s3', () => { 26 | test('basics', async () => { 27 | await basics(create) 28 | }) 29 | describe('graph', () => { 30 | graphTests(create, (store, ...args) => store.graph(...args)) 31 | }) 32 | describe('replicate', () => { 33 | replicateTests(create) 34 | }) 35 | }) 36 | -------------------------------------------------------------------------------- /test/test-hamt.js: -------------------------------------------------------------------------------- 1 | /* globals describe, it */ 2 | import * as hamt from '../src/hamt.js' 3 | import Block from '@ipld/block/defaults' 4 | import assert from 'assert' 5 | 6 | const test = it 7 | const same = assert.deepStrictEqual 8 | 9 | const missing = Block.encoder({ test: Math.random() }, 'dag-cbor') 10 | 11 | describe('test-hamt', () => { 12 | test('test store comparison', async () => { 13 | same(hamt._store.isEqual(await missing.cid(), await missing.cid()), true) 14 | same(hamt._noop(), undefined) 15 | }) 16 | test('test has', async () => { 17 | const empty = hamt.empty(Block) 18 | let head = await empty.cid() 19 | const blocks = {} 20 | blocks[head.toString()] = empty 21 | const _get = async cid => { 22 | const block = blocks[cid.toString()] 23 | if (block) return block 24 | throw new Error('Not Found') 25 | } 26 | const ops = [{ set: { key: 'test', val: true } }] 27 | for await (const block of hamt.bulk(head, ops, _get, Block)) { 28 | const cid = await block.cid() 29 | blocks[cid.toString()] = block 30 | head = cid 31 | } 32 | same(await hamt.has(head, 'test', _get), true) 33 | same(await hamt.has(head, 'test2', _get), false) 34 | }) 35 | }) 36 | -------------------------------------------------------------------------------- /src/cli/init.js: -------------------------------------------------------------------------------- 1 | import { promises as fs, createWriteStream } from 'fs' 2 | 3 | import { options } from '../car.js' 4 | import carfile from 'datastore-car' 5 | import Block from '@ipld/block/defaults.js' 6 | import createDatabase from '../database.js' 7 | 8 | const database = createDatabase(Block) 9 | 10 | const { stat } = fs 11 | 12 | const missing = async filename => { 13 | try { 14 | await stat(filename) 15 | } catch (e) { 16 | if (e.code !== 'ENOENT') throw e 17 | return true 18 | } 19 | return false 20 | } 21 | 22 | const init = async argv => { 23 | if (!(await missing(argv.dbfile))) { 24 | console.error('file already exists') 25 | process.exit(1) 26 | } 27 | const car = await carfile.writeStream(createWriteStream(argv.dbfile)) 28 | const empties = await Promise.all(database.empties) 29 | const [empty] = empties 30 | await car.setRoots([await empty.cid()]) 31 | const putBlock = block => block.cid().then(cid => car.put(cid, block.encodeUnsafe())) 32 | await Promise.all(empties.map(putBlock)) 33 | await car.close() 34 | console.log(`Initialized empty database in ${argv.dbfile}`) 35 | } 36 | const handler = init 37 | const desc = 'Create initial db file' 38 | const command = 'init' 39 | const builder = options 40 | 41 | export { handler, desc, command, builder } 42 | -------------------------------------------------------------------------------- /src/stores/lru.js: -------------------------------------------------------------------------------- 1 | import LRU from 'lru-cache' 2 | 3 | const defaultSize = 1024 * 1024 * 50 4 | const getLength = block => block.encodeUnsafe().length 5 | 6 | class LRUStore { 7 | constructor (opts = {}) { 8 | if (typeof opts.lru === 'undefined') opts.lru = true 9 | if (opts.lru) { 10 | this.lru = new LRU({ max: opts.lruSize || defaultSize, length: getLength }) 11 | } 12 | this.depthLimit = opts.depthLimit || 1024 13 | } 14 | 15 | async get (cid) { 16 | if (!this.lru) return this._getBlock(cid) 17 | const key = cid.toString('base32') 18 | if (this.lru.has(key)) return this.lru.get(key) 19 | const block = await this._getBlock(cid) 20 | this.lru.set(key, block) 21 | return block 22 | } 23 | 24 | async put (block) { 25 | if (!this.lru) return this._putBlock(block) 26 | const key = (await block.cid()).toString('base32') 27 | if (this.lru.has(key)) return 28 | const ret = await this._putBlock(block) 29 | this.lru.set(key, block) 30 | return ret 31 | } 32 | 33 | has (cid) { 34 | if (!this.lru) return this._hasBlock(cid) 35 | const key = cid.toString('base32') 36 | if (this.lru.has(key)) return { length: this.lru.get(key).decodeUnsafe().length } 37 | return this._hasBlock(cid) 38 | } 39 | } 40 | 41 | export default LRUStore 42 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # DagDB 2 | 3 | 34 | -------------------------------------------------------------------------------- /test/lib/mock-kv.js: -------------------------------------------------------------------------------- 1 | import Block from '@ipld/block/defaults' 2 | import createStore from '../../src/stores/kv.js' 3 | import charwise from 'charwise' 4 | 5 | const { encode, decode } = charwise 6 | 7 | const KVStore = createStore(Block) 8 | 9 | class NotFound extends Error { 10 | get statusCode () { 11 | return 404 12 | } 13 | } 14 | 15 | const asyncGen = async function * (arr) { 16 | yield * arr 17 | } 18 | 19 | class InMemoryStore extends KVStore { 20 | constructor (...args) { 21 | super(...args) 22 | this.storage = {} 23 | } 24 | 25 | async _put (arr, data) { 26 | this.storage[encode(arr)] = data 27 | } 28 | 29 | _putKey (arr) { 30 | return this._put(arr, true) 31 | } 32 | 33 | _hasKey (arr) { 34 | return !!this.storage[encode(arr)] 35 | } 36 | 37 | async _getKey (arr) { 38 | const key = encode(arr) 39 | if (!this.storage[key]) throw new NotFound('Not found') 40 | return this.storage[key] 41 | } 42 | 43 | _linksFrom (key) { 44 | const start = encode([key, 'link-from', 0]) 45 | const end = encode([key, 'link-from', []]) 46 | const keys = Object.keys(this.storage).sort() 47 | const cids = keys.filter(s => s > start && s < end).map(k => decode(k)[2]) 48 | return asyncGen(cids) 49 | } 50 | } 51 | 52 | export default (...args) => new InMemoryStore(...args) 53 | -------------------------------------------------------------------------------- /test/lib/mock-s3.js: -------------------------------------------------------------------------------- 1 | class Missing { 2 | get statusCode () { 3 | return 404 4 | } 5 | } 6 | 7 | class MockS3 { 8 | constructor () { 9 | this.storage = {} 10 | } 11 | 12 | async _headObject (opts) { 13 | if (!this.storage[opts.Key]) throw new Missing('Not found') 14 | return { ContentLength: this.storage[opts.Key].length } 15 | } 16 | 17 | headObject (opts) { 18 | return { promise: () => this._headObject(opts) } 19 | } 20 | 21 | async _getObject (opts) { 22 | if (!this.storage[opts.Key]) throw new Missing('Not found') 23 | return { Body: this.storage[opts.Key] } 24 | } 25 | 26 | getObject (opts) { 27 | return { promise: () => this._getObject(opts) } 28 | } 29 | 30 | async _putObject (opts) { 31 | this.storage[opts.Key] = opts.Body 32 | } 33 | 34 | putObject (opts) { 35 | return { promise: () => this._putObject(opts) } 36 | } 37 | 38 | async _listObjectsV2 (opts) { 39 | const after = opts.StartAfter || '' 40 | const keys = Object.keys(this.storage).filter(s => { 41 | return s.startsWith(opts.Prefix) && s > after 42 | }).sort() 43 | return { Contents: keys.map(Key => ({ Key })) } 44 | } 45 | 46 | listObjectsV2 (opts) { 47 | return { promise: () => this._listObjectsV2(opts) } 48 | } 49 | } 50 | 51 | export default (...args) => new MockS3(...args) 52 | -------------------------------------------------------------------------------- /src/stores/index.js: -------------------------------------------------------------------------------- 1 | import createHttp from './https.js' 2 | import createInmemory from './inmemory.js' 3 | import createLevel from './level.js' 4 | import createS3 from './s3.js' 5 | import leveljs from 'level-js' 6 | 7 | export default Block => { 8 | const http = createHttp(Block) 9 | const inmem = createInmemory(Block) 10 | const level = createLevel(Block) 11 | const s3 = createS3(Block) 12 | const from = id => { 13 | if (typeof id === 'object') { 14 | if (id.leveldown) return level(id.leveldown) 15 | if (id.s3) return s3(id.s3) /* c8 ignore next */ 16 | /* c8 ignore next */ 17 | if (id.browser) { 18 | /* c8 ignore next */ 19 | return level(leveljs('dagdb')) 20 | } /* c8 ignore next */ 21 | } else { 22 | if (id.startsWith('http://') || /* c8 ignore next */ id.startsWith('https://')) { 23 | return http(id) 24 | } 25 | } 26 | throw new Error(`Cannot resolve identifier "${id}"`) 27 | } 28 | const create = id => { 29 | if (id === 'inmem' || id === 'inmemory') { 30 | return inmem() 31 | } else if (typeof id === 'object') { 32 | if (id.leveldown) return level(id.leveldown) 33 | if (id.s3) return s3(id.s3) /* c8 ignore next */ 34 | /* c8 ignore next */ 35 | if (id.browser) { 36 | /* c8 ignore next */ 37 | return level(leveljs('dagdb')) 38 | } /* c8 ignore next */ 39 | } 40 | } 41 | return { from, create } 42 | } 43 | -------------------------------------------------------------------------------- /test/cli/utils.js: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | import tmp from 'tmp' 3 | import { spawn as _spawn } from 'child_process' 4 | import { promises as fs } from 'fs' 5 | 6 | const { stat } = fs 7 | const dir = tmp.dirSync({ prefix: 'dagdb-tests-' }).name 8 | 9 | const concat = arr => Uint8Array.from([].concat(...arr.map(a => Array.from(a)))) 10 | 11 | const spawn = (...args) => new Promise((resolve, reject) => { 12 | const handle = _spawn(...args) 13 | const stdout = [] 14 | const stderr = [] 15 | const str = arr => concat(arr).toString() 16 | handle.stdout.on('data', chunk => stdout.push(chunk)) 17 | handle.stderr.on('data', chunk => stderr.push(chunk)) 18 | handle.on('exit', code => { 19 | resolve({ code, stdout: str(stdout), stderr: str(stderr) }) 20 | }) 21 | handle.on('error', reject) 22 | }) 23 | 24 | const cli = path.normalize(path.join(__dirname, '../../cli.js')) 25 | 26 | exports.init = async () => { 27 | const dbfile = path.join(dir, Math.random().toString() + '.dagdb.car') 28 | const dbarg = `--dbfile=${dbfile}` 29 | const initOutput = await spawn(cli, ['init', dbarg]) 30 | const _stat = await stat(dbfile) 31 | const run = async (...args) => { 32 | console.log({ cli, args: [...args, dbarg] }) 33 | const outs = await spawn(cli, [...args, dbarg]) 34 | if (outs.code) throw new Error('Non-zero exit code.\n' + outs.stderr) 35 | return outs 36 | } 37 | return { spawn, run, stat: _stat, dbfile, dbarg, initOutput, dir } 38 | } 39 | -------------------------------------------------------------------------------- /src/updaters/index.js: -------------------------------------------------------------------------------- 1 | import bent from 'bent' 2 | import createKVUpdater from './kv.js' 3 | import inmemoryUpdater from './inmemory.js' 4 | 5 | const getJSON = bent('json') 6 | 7 | export default Block => { 8 | const { CID } = Block 9 | class HttpUpdater { 10 | get root () { 11 | return this.info().then(info => CID.from(info.root)) 12 | } 13 | 14 | constructor (infoUrl, updateUrl) { 15 | this.infoUrl = infoUrl 16 | this.updateUrl = updateUrl 17 | } 18 | 19 | info () { 20 | return getJSON(this.infoUrl) 21 | } 22 | 23 | async update (newRoot, oldRoot) { 24 | const url = new URL(this.updateUrl) 25 | url.searchParams.set('new', newRoot.toString('base32')) 26 | if (oldRoot) url.searchParams.set('old', oldRoot.toString('base32')) 27 | else url.searchParams.set('old', 'null') 28 | const info = await getJSON(url.toString()) 29 | return CID.from(info.root) 30 | } 31 | } 32 | const from = async (id, ...args) => { 33 | if (id.startsWith('http://') || /* istanbul ignore next */ id.startsWith('https://')) { 34 | return new HttpUpdater(id, ...args) 35 | } 36 | throw new Error(`Unsupported identifier "${id}"`) /* c8 ignore next */ 37 | } 38 | const create = async (id, ...args) => { 39 | if (id === 'inmem' || id === 'inmemory') { 40 | return inmemoryUpdater(CID) 41 | } 42 | throw new Error('Not implemented') /* c8 ignore next */ 43 | } 44 | return { from, kv: createKVUpdater(Block), create } 45 | } 46 | -------------------------------------------------------------------------------- /src/stores/lfs.js: -------------------------------------------------------------------------------- 1 | import LRU from 'lru-cache' 2 | import lfs from 'lfs-store' 3 | import getRepo from 'git-remote-origin-url' 4 | 5 | const defaults = { 6 | max: 1024 * 1024 * 50, 7 | length: block => block.encodeUnsafe().length 8 | } 9 | 10 | /* c8 ignore next */ 11 | const getUser = str => { 12 | /* c8 ignore next */ 13 | str = str.slice(0, str.lastIndexOf('/')) 14 | /* c8 ignore next */ 15 | return str.slice(str.lastIndexOf('/') + 1) 16 | /* c8 ignore next */ 17 | } 18 | 19 | export default (Block, opts = {}) => { 20 | const lru = new LRU({ ...defaults, ...opts }) 21 | return async (filepath = './blockstore.ipld-lfs', repo, user, token, disableCache) => { 22 | if (!repo) repo = await getRepo() 23 | if (!user) user = process.env.GITHUB_ACTOR /* c8 ignore next */ || getUser(repo) 24 | if (!token) token = /* c8 ignore next */ process.env.GHTOKEN || /* c8 ignore next */ process.env.GITHUB_TOKEN 25 | const store = await lfs(Block, filepath, repo, user, token) 26 | const get = async cid => { 27 | const key = cid.toString() 28 | if (!disableCache && lru.has(key)) return lru.get(key) 29 | const block = await store.get(cid) 30 | if (!disableCache) /* c8 ignore next */ lru.set(key, block) 31 | return block 32 | } 33 | const put = async block => { 34 | const cid = await block.cid() 35 | const key = cid.toString() 36 | if (!disableCache && lru.has(key)) return lru.get(key) 37 | await store.put(block) 38 | if (!disableCache) lru.set(key, block) 39 | } 40 | return { get, put, close: () => store.close() } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /.github/workflows/dagdb.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | name: Build, Test and maybe Publish 3 | jobs: 4 | test: 5 | name: Build & Test 6 | runs-on: ubuntu-latest 7 | strategy: 8 | matrix: 9 | node-version: [12.x, 14.x] 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Use Node.js ${{ matrix.node-version }} 13 | uses: actions/setup-node@v1 14 | with: 15 | node-version: ${{ matrix.node-version }} 16 | - name: Cache node_modules 17 | id: cache-modules 18 | uses: actions/cache@v1 19 | with: 20 | path: node_modules 21 | key: ${{ matrix.node-version }}-${{ runner.OS }}-build-${{ hashFiles('package.json') }} 22 | - name: Build 23 | if: steps.cache-modules.outputs.cache-hit != 'true' 24 | run: npm install 25 | - name: Test 26 | env: 27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 28 | run: npm_config_yes=true npx best-test@latest 29 | publish: 30 | name: Publish 31 | needs: test 32 | runs-on: ubuntu-latest 33 | if: github.event_name == 'push' && github.ref == 'refs/heads/master' 34 | steps: 35 | - uses: actions/checkout@v2 36 | - name: Cache node_modules 37 | id: cache-modules 38 | uses: actions/cache@v1 39 | with: 40 | path: node_modules 41 | key: 12.x-${{ runner.OS }}-build-${{ hashFiles('package.json') }} 42 | - name: Build 43 | if: steps.cache-modules.outputs.cache-hit != 'true' 44 | run: npm install 45 | - name: Test 46 | run: npm_config_yes=true npx best-test@latest 47 | env: 48 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 49 | - name: Publish 50 | uses: mikeal/merge-release@master 51 | env: 52 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 53 | NPM_AUTH_TOKEN: ${{ secrets.NPM_AUTH_TOKEN }} 54 | -------------------------------------------------------------------------------- /src/stores/s3.js: -------------------------------------------------------------------------------- 1 | import createKVStore from './kv.js' 2 | 3 | const empty = new Uint8Array(0) 4 | 5 | const ls = async function * (s3, opts) { 6 | opts = { ...opts } 7 | let data 8 | do { 9 | data = await s3.listObjectsV2(opts).promise() 10 | for (const entry of data.Contents) { 11 | yield entry.Key.slice(entry.Key.lastIndexOf('/') + 1) 12 | } 13 | if (!data.Contents.length) { 14 | return 15 | } 16 | opts.StartAfter = data.Contents[data.Contents.length - 1].Key 17 | } /* c8 ignore next */ while (data.Contents.length) 18 | } 19 | 20 | export default Block => { 21 | const KVStore = createKVStore(Block) 22 | class S3Store extends KVStore { 23 | constructor (s3, opts = {}, ...args) { 24 | super(opts, ...args) 25 | this.keyPrefix = opts.keyPrefix || '' 26 | this.s3 = s3 27 | } 28 | 29 | _put (arr, Body) { 30 | const Key = this.keyPrefix + arr.join('/') 31 | return this.s3.putObject({ Key, Body }).promise() 32 | } 33 | 34 | _putKey (arr) { 35 | return this._put(arr, empty) 36 | } 37 | 38 | async _hasKey (arr) { 39 | const Key = this.keyPrefix + arr.join('/') 40 | let resp 41 | try { 42 | resp = await this.s3.headObject({ Key }).promise() 43 | } catch (e) { 44 | /* c8 ignore next */ 45 | if (e.statusCode === 404) return false /* c8 ignore next */ 46 | /* c8 ignore next */ 47 | throw e 48 | /* c8 ignore next */ 49 | } 50 | return { length: resp.ContentLength } 51 | } 52 | 53 | async _getKey (arr) { 54 | const Key = this.keyPrefix + arr.join('/') 55 | const resp = await this.s3.getObject({ Key }).promise() 56 | return resp.Body 57 | } 58 | 59 | _linksFrom (key) { 60 | const Prefix = [this.keyPrefix + key, 'link-from'].join('/') 61 | return ls(this.s3, { Prefix }) 62 | } 63 | } 64 | return (...args) => new S3Store(...args) 65 | } 66 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | import * as hamt from './hamt.js' 2 | import schema from './schema.js' 3 | import createValidate from '@ipld/schema-validation' 4 | 5 | const isCID = node => node.asCID === node 6 | const validate = createValidate(schema) 7 | 8 | const fromBlock = (block, className) => validate(block.decode(), className) 9 | const fromBlockUnsafe = (block, className) => validate(block.decodeUnsafe(), className) 10 | 11 | const readonly = (source, key, value) => { 12 | Object.defineProperty(source, key, { value, writable: false }) 13 | } 14 | 15 | class NotFound extends Error { 16 | get statusCode () { 17 | return 404 18 | } 19 | 20 | get kvs () { 21 | return 'notfound' 22 | } 23 | } 24 | 25 | const encoderTransaction = async function * (iter) { 26 | let last 27 | for await (const block of iter) { 28 | last = block 29 | yield block 30 | } 31 | yield last.cid() 32 | } 33 | 34 | class Lazy { 35 | constructor (db) { 36 | const root = db.getRoot().then(root => root['db-v1'][this.prop]) 37 | readonly(this, '_root', root) 38 | this.db = db 39 | this.pending = new Map() 40 | this.store = db.store 41 | this.getBlock = db.store.get.bind(db.store) 42 | } 43 | 44 | async _get (name, Cls, typeName) { 45 | if (this.pending.has(name)) return this.pending.get(name) 46 | const root = await this._root 47 | const cid = await hamt.get(root, name, this.getBlock) 48 | if (!cid) throw new Error(`No ${typeName.toLowerCase()} named "${name}"`) 49 | const block = await this.db.store.get(cid) 50 | const decoded = fromBlock(block, typeName) 51 | return new Cls(decoded, this.db) 52 | } 53 | } 54 | 55 | const chain = (child, parent) => { 56 | Object.defineProperty(child, 'dirty', { get: () => parent.dirty }) 57 | readonly(child, 'store', parent.store) 58 | readonly(child, 'getBlock', parent.getBlock || parent.store.get.bind(parent.store)) 59 | } 60 | 61 | export { Lazy, NotFound, readonly, fromBlock, fromBlockUnsafe, validate, encoderTransaction, chain, isCID } 62 | -------------------------------------------------------------------------------- /src/stores/replicate.js: -------------------------------------------------------------------------------- 1 | const create = Block => { 2 | const { CID } = Block 3 | 4 | const replicate = async (cid, _from, _to, depth = 1024, batchSize = 100, skip = new Set()) => { 5 | let { complete, missing, incomplete } = await _to.graph(cid, depth) 6 | if (complete) return { complete } 7 | if (!incomplete) incomplete = new Set() 8 | if (!missing) missing = new Set() 9 | if (depth < 0) { 10 | return { 11 | missing: missing.size ? missing : undefined, 12 | incomplete: incomplete.size ? incomplete : undefined 13 | } 14 | } 15 | for (const key of skip) { 16 | missing.delete(key) 17 | incomplete.delete(key) 18 | } 19 | const push = async key => { 20 | skip.add(key) 21 | let block 22 | try { 23 | block = await _from.get(CID.from(key)) 24 | } catch (e) { 25 | if (e.statusCode !== 404) throw e 26 | missing.add(key) 27 | return 28 | } 29 | incomplete.add(key) 30 | return _to.put(block) 31 | } 32 | const gets = [...missing.values()] 33 | missing = new Set() 34 | while (gets.length) { 35 | await Promise.all(gets.splice(0, batchSize).map(push)) 36 | } 37 | for (const key of [...incomplete.values()]) { 38 | incomplete.delete(key) 39 | const _depth = depth - 1 40 | const result = await replicate(CID.from(key), _from, _to, _depth, batchSize, skip) 41 | if (result.complete) continue 42 | else { 43 | if (result.missing) { 44 | for (const key of result.missing.values()) { 45 | missing.add(key) 46 | } 47 | } 48 | 49 | if (result.incomplete) { 50 | for (const key of result.incomplete.values()) { 51 | incomplete.add(key) 52 | } 53 | } 54 | } 55 | } 56 | if (!missing.size && !incomplete.size) return { complete: true } 57 | return { 58 | missing: missing.size ? missing : undefined, 59 | incomplete: incomplete.size ? incomplete : undefined 60 | } 61 | } 62 | return replicate 63 | } 64 | export default create 65 | -------------------------------------------------------------------------------- /src/updaters/kv.js: -------------------------------------------------------------------------------- 1 | export default Block => { 2 | const { CID } = Block 3 | const lock = (self) => { 4 | let _resolve 5 | const p = new Promise(resolve => { 6 | _resolve = resolve 7 | }) 8 | const unlock = () => { 9 | self.lock = null 10 | _resolve() 11 | } 12 | return { p, unlock } 13 | } 14 | 15 | const getRoot = async updater => { 16 | let buff 17 | try { 18 | buff = await updater.store._getKey([updater.updateKey]) 19 | } catch (e) { 20 | if (e.message.toLowerCase().includes('not found')) { 21 | return null 22 | } /* c8 ignore next */ else { 23 | /* c8 ignore next */ 24 | throw e 25 | /* c8 ignore next */ 26 | } 27 | } 28 | return CID.from(buff) 29 | } 30 | 31 | class KVUpdater { 32 | constructor (store, updateKey = 'root') { 33 | this.store = store 34 | this.lock = null 35 | this.updateKey = updateKey 36 | } 37 | 38 | get root () { 39 | return getRoot(this) 40 | } 41 | 42 | async update (newRoot, prevRoot) { 43 | // Note: this implementation assumes you have a lock on the 44 | // underlying kv store. If you don't, this code is prone 45 | // to overwrite root transaction changes under high concurrent 46 | // load. This is why we don't use this w/ S3 and use the Dynamo 47 | // updater instead. 48 | while (this.lock) { 49 | await this.lock.p 50 | } 51 | this.lock = lock(this) 52 | if (!(await this.store._hasKey([this.updateKey]))) { 53 | if (prevRoot) throw new Error('There is no previous root') 54 | } else { 55 | const prev = CID.from(await this.store._getKey([this.updateKey])) 56 | if (!prevRoot || !prev.equals(prevRoot)) { 57 | this.lock.unlock() 58 | return prev 59 | } 60 | } 61 | await this._update(newRoot) 62 | this.lock.unlock() 63 | return newRoot 64 | } 65 | 66 | _update (newRoot) { 67 | const { bytes } = newRoot 68 | return this.store._put([this.updateKey], bytes) 69 | } 70 | } 71 | 72 | return (...args) => new KVUpdater(...args) 73 | } 74 | -------------------------------------------------------------------------------- /src/stores/level.js: -------------------------------------------------------------------------------- 1 | import createKV from './kv.js' 2 | import levelup from 'levelup' 3 | import encoding from 'encoding-down' 4 | import charwise from 'charwise' 5 | 6 | const empty = new Uint8Array(0) 7 | 8 | const ls = (lev, gt, lt) => new Promise((resolve, reject) => { 9 | // for some reason you can't convert the level 10 | // readable stream into an async generator ;( 11 | const keys = [] 12 | const opts = { gt, lt } 13 | const stream = lev.createKeyStream(opts) 14 | stream.on('data', key => keys.push(key[3])) 15 | stream.on('error', reject) 16 | stream.on('end', () => resolve(keys)) 17 | }) 18 | 19 | const lsIter = async function * (lev, gt, lt) { 20 | const keys = await ls(lev, gt, lt) 21 | yield * keys 22 | } 23 | 24 | export default Block => { 25 | const KVStore = createKV(Block) 26 | class LevelStore extends KVStore { 27 | constructor (leveldown, opts = {}, ...args) { 28 | super(opts, ...args) 29 | this.lev = levelup(encoding(leveldown, { valueEncoding: 'binary', keyEncoding: charwise })) 30 | this.prefix = opts.prefix || '_dagdb-bs' 31 | } 32 | 33 | _mkey (arr) { 34 | return [this.prefix, ...arr] 35 | } 36 | 37 | _put (arr, body) { 38 | return this.lev.put(this._mkey(arr), body) 39 | } 40 | 41 | _putKey (arr) { 42 | return this._put(arr, empty) 43 | } 44 | 45 | async _hasKey (arr) { 46 | let resp 47 | try { 48 | resp = await this.lev.get(this._mkey(arr)) 49 | } catch (e) { 50 | /* c8 ignore next */ 51 | if (e.status === 404) return false /* c8 ignore next */ 52 | /* c8 ignore next */ 53 | throw e 54 | /* c8 ignore next */ 55 | } 56 | return { length: resp.length } 57 | } 58 | 59 | async _getKey (arr) { 60 | try { 61 | return await this.lev.get(this._mkey(arr)) 62 | } catch (e) { 63 | e.statusCode = e.status 64 | throw e 65 | } /* c8 ignore next */ 66 | } 67 | 68 | _linksFrom (key) { 69 | const start = this._mkey([key, 'link-from', 0]) 70 | const end = this._mkey([key, 'link-from', []]) 71 | return lsIter(this.lev, start, end) 72 | } 73 | } 74 | return (...args) => new LevelStore(...args) 75 | } 76 | -------------------------------------------------------------------------------- /src/stores/https.js: -------------------------------------------------------------------------------- 1 | import bent from 'bent' 2 | import LRUStore from './lru.js' 3 | 4 | export default Block => { 5 | class HttpsStore extends LRUStore { 6 | constructor (baseurl, opts) { 7 | super(opts) 8 | let url 9 | let params 10 | if (baseurl.includes('?')) { 11 | url = baseurl.slice(0, baseurl.indexOf('?')) 12 | params = (new URL(baseurl)).searchParams 13 | } else { 14 | url = baseurl 15 | } 16 | this.url = url 17 | this.params = params 18 | this._getBuffer = bent('buffer') 19 | this._getJSON = bent('json') 20 | this._put = bent('PUT', 201) 21 | this._head = bent('HEAD', 200, 404) 22 | } 23 | 24 | mkurl (path, params) { 25 | let u = this.url 26 | if (!u.endsWith('/')) u += '/' 27 | u += path 28 | if (!params) params = this.params 29 | if (params) u += `?${params.toString()}` 30 | return u 31 | } 32 | 33 | async _getBlock (cid) { 34 | const buf = await this._getBuffer(this.mkurl(cid.toString('base32'))) 35 | const data = buf instanceof ArrayBuffer /* c8 ignore next */ ? new Uint8Array(buf) : buf 36 | return Block.create(data, cid) 37 | } 38 | 39 | async _putBlock (block) { 40 | const cid = await block.cid() 41 | const url = this.mkurl(cid.toString('base32')) 42 | return this._put(url, block.encodeUnsafe()) 43 | } 44 | 45 | async _hasBlock (cid) { 46 | const resp = await this._head(this.mkurl(cid.toString('base32'))) 47 | if (resp.statusCode === 200) return true 48 | else return false /* c8 ignore next */ 49 | } 50 | 51 | async graph (cid, depth) { 52 | let params 53 | if (typeof depth !== 'undefined') { 54 | params = new URLSearchParams(this.params) 55 | params.set('depth', depth) 56 | } 57 | const url = this.mkurl(cid.toString('base32') + '/graph', params) 58 | const info = await this._getJSON(url) 59 | const { result } = info 60 | if (result.incomplete) result.incomplete = new Set(result.incomplete) 61 | if (result.missing) result.missing = new Set(result.missing) 62 | return info.result 63 | } 64 | } 65 | return (...args) => new HttpsStore(...args) 66 | } 67 | -------------------------------------------------------------------------------- /src/http/nodejs.js: -------------------------------------------------------------------------------- 1 | import { blockstore, info, updater } from './handlers.js' 2 | 3 | const getBody = stream => new Promise((resolve, reject) => { 4 | const buffers = [] 5 | stream.on('error', reject) 6 | stream.on('data', chunk => buffers.push(chunk)) 7 | stream.on('end', () => resolve(Buffer.concat(buffers))) 8 | }) 9 | 10 | const handler = async (req, res, _handler) => { 11 | let body 12 | if (req.method === 'PUT') { 13 | body = await getBody(req) 14 | } 15 | const parsed = new URL('http://asdf/' + req.url) 16 | const params = { } 17 | for (const [key, value] of parsed.searchParams.entries()) { 18 | params[key] = value 19 | if (key === 'depth') { 20 | params.depth = parseInt(params.depth) 21 | } 22 | if (value === 'null') params[key] = null 23 | } 24 | const [method, path] = [req.method, parsed.pathname] 25 | const result = await _handler({ method, path, params, body }) 26 | res.writeHead(result.statusCode || 200, result.headers || {}) 27 | res.end(result.body ? Buffer.from(result.body) : undefined) 28 | } 29 | 30 | const createHandler = (Block, store, _updater, infoOpts = {}) => { 31 | const blockstoreHandler = blockstore(Block, store) 32 | const updaterHandler = updater(Block, _updater) 33 | const infoHandler = info(store, _updater) 34 | const _handler = (req, res, basepath = '') => { 35 | if (req.url === basepath || req.url === basepath + '/') { 36 | return handler(req, res, infoHandler) 37 | } 38 | req.url = req.url.slice(basepath.length) 39 | if (req.url.startsWith('/blockstore/')) { 40 | req.url = req.url.slice('/blockstore/'.length) 41 | return handler(req, res, blockstoreHandler) 42 | } else if (req.url.startsWith('/updater')) { 43 | req.url = req.url.slice('/updater'.length) 44 | return handler(req, res, updaterHandler) 45 | } else { 46 | res.statusCode = 404 47 | res.end('Not found') 48 | } 49 | } 50 | return _handler 51 | } 52 | 53 | createHandler.blockstore = (...args) => { 54 | const _handler = blockstore(...args) 55 | return (req, res) => handler(req, res, _handler) 56 | } 57 | createHandler.updater = (...args) => { 58 | const _handler = updater(...args) 59 | return (req, res) => handler(req, res, _handler) 60 | } 61 | 62 | export default createHandler 63 | -------------------------------------------------------------------------------- /src/hamt.js: -------------------------------------------------------------------------------- 1 | import iamap from 'iamap' 2 | import assert from 'assert' 3 | import murmurhash3 from 'murmurhash3js-revisited' 4 | 5 | const isCID = node => node.asCID === node 6 | 7 | function murmurHasher (key) { 8 | // TODO: get rid of Buffer 9 | assert(Buffer.isBuffer(key)) 10 | const b = Buffer.alloc(4) 11 | b.writeUInt32LE(murmurhash3.x86.hash32(key)) 12 | return b 13 | } 14 | iamap.registerHasher('murmur3-32', 32, murmurHasher) 15 | 16 | const noop = () => {} 17 | const config = { hashAlg: 'murmur3-32' } 18 | const isEqual = (one, two) => one.equals(two) 19 | const isLink = isCID 20 | const mkload = get => cid => get(cid).then(block => block.decode()) 21 | const store = { isEqual, isLink } 22 | 23 | const transaction = async function * (head, ops, get, Block) { 24 | const blocks = [] 25 | const save = obj => { 26 | const block = Block.encoder(obj, 'dag-cbor') 27 | blocks.push(block) 28 | return block.cid() 29 | } 30 | 31 | const load = mkload(get) 32 | let map = await iamap.load({ save, load, ...store }, head) 33 | for (const op of ops) { 34 | if (op.set) { 35 | map = await map.set(op.set.key, op.set.val) 36 | } else if (op.del) { 37 | map = await map.delete(op.del.key) 38 | } /* c8 ignore next */ else { 39 | /* c8 ignore next */ 40 | throw new Error('Invalid operation') 41 | /* c8 ignore next */ 42 | } 43 | } 44 | // would be great to have a hamt API that took bulk operations 45 | // and was async iterable 46 | yield * blocks 47 | } 48 | 49 | const fixture = { save: noop, load: noop, ...store } 50 | const empty = (Block) => { 51 | const map = new iamap.IAMap(fixture, config) 52 | return Block.encoder(map.toSerializable(), 'dag-cbor') 53 | } 54 | 55 | const _load = async (head, get) => { 56 | const load = mkload(get) 57 | const map = await iamap.load({ save: noop, load, ...store }, head) 58 | return map 59 | } 60 | 61 | const get = async (head, key, get) => { 62 | const map = await _load(head, get) 63 | return map.get(key) 64 | } 65 | const has = async (head, key, _get) => { 66 | const val = await get(head, key, _get) 67 | if (typeof val === 'undefined') return false 68 | return true 69 | } 70 | const all = (root, get) => { 71 | const iter = async function * () { 72 | const map = await _load(root, get) 73 | const entries = await map.entries() 74 | yield * entries 75 | } 76 | return iter() 77 | } 78 | const bulk = transaction 79 | const _store = store 80 | const _noop = noop 81 | 82 | export { all, bulk, empty, get, _store, _noop, has } 83 | -------------------------------------------------------------------------------- /docs/getting-started.md: -------------------------------------------------------------------------------- 1 | # Getting Started 2 | 3 | This tutorial will get you up and running with DagDB w/ JavaScript. If 4 | you'd prefer to start with the command line head over to the [getting started 5 | with the command line page](). 6 | 7 | ## Install 8 | 9 | ``` 10 | npm install dagdb 11 | ``` 12 | 13 | That's it. 14 | 15 | ## Create a database 16 | 17 | ```js 18 | const dagdb = require('dagdb') 19 | let db = await dagdb.create('dbname') 20 | ``` 21 | 22 | This creates a new databases. The above example is suitable for the browser, where `dbname` 23 | is simply the name of the database and will be created in browser storage (IndexedDB). 24 | 25 | In Node.js `dbname` is the path to a file or directory. See 26 | [Node.js storage options](database/storage#nodejs) for details. 27 | 28 | ## Open a database 29 | 30 | ```js 31 | let db = await dagdb.open('dbname') 32 | ``` 33 | 34 | ## Live databases 35 | 36 | A "live" database is a remote database that is cached locally. As you read and write 37 | to the database it will pull data from the remote as needed. If the remote database 38 | changes the live database will pull any updated data you had previously pulled from 39 | the remote. 40 | 41 | This is well suited for browsers, where local storage is limited and ultimately temporary. Since 42 | the replication is effectively filtered based on what you've read, there's no need to 43 | configure explicit data filters or design your database to be replicated to unique user 44 | and device profiles. 45 | 46 | ```js 47 | let db = await dagdb.live('https://storage.mysite.com/db') 48 | ``` 49 | 50 | To learn more about working with live databases head over to the 51 | [live database](database/live) documentation. 52 | 53 | ## Reading and writing data 54 | 55 | ```js 56 | await db.set('key', { hello: 'world' }) 57 | await db.get('key') // { hello: 'world' } 58 | db = await db.update() 59 | ``` 60 | 61 | When you create or open a database the object you get is a database transaction. This instance 62 | is permanently configured to the state of the database when you opened it. You can set new keys 63 | and retrive both newly added keys and keys that are already in the database, but those keys 64 | are not persisted into the database until it is commited (see [`.commit()`](under-the-hood/commits)). 65 | The `update()` method commits the transaction and then updates the local HEAD to point to this 66 | new commit. 67 | 68 | The `.update()` method returns a *new* database transaction configured to the now commited 69 | state of the database. 70 | 71 | You can store anything that can be encoded as JSON as well as binary data and streams. 72 | 73 | For more information on storing different data types move on to the [value types](database/values) page. 74 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dagdb", 3 | "version": "0.0.0-dev", 4 | "description": "Syncable large scale database built on IPLD", 5 | "main": "src/index.js", 6 | "type": "module", 7 | "scripts": { 8 | "lint": "standard", 9 | "build": "npm_config_yes=true npx ipjs@latest build --tests", 10 | "publish": "npm_config_yes=true npx ipjs@latest publish", 11 | "test:cjs": "npm run build && mocha dist/cjs/node-test/test-*.js", 12 | "test:node": "hundreds mocha test/test-*.js", 13 | "test:browser": "polendina --cleanup dist/cjs/browser-test/test-*.js", 14 | "test": "npm run lint && npm run test:node && npm run test:cjs && npm run test:browser", 15 | "test:node-v12": "npm run lint && mocha test/test-*.js && npm run test:cjs && npm run test:browser", 16 | "coverage": "c8 --reporter=html mocha test/test-*.js && npx st -d coverage -p 8080", 17 | "docs:dev": "vuepress dev docs", 18 | "docs:build": "vuepress build docs", 19 | "build:schema": "echo 'export default ' > src/schema.js && ipld-schema to-json Schema.md >> src/schema.js && standard --fix src/schema.js" 20 | }, 21 | "repository": { 22 | "type": "git", 23 | "url": "git+https://github.com/mikeal/dagdb.git" 24 | }, 25 | "keywords": [ 26 | "database", 27 | "dag", 28 | "decentralized" 29 | ], 30 | "author": "Mikeal Rogers (https://www.mikealrogers.com/)", 31 | "license": "(Apache-2.0 AND MIT)", 32 | "bugs": { 33 | "url": "https://github.com/mikeal/dagdb/issues" 34 | }, 35 | "c8": { 36 | "exclude": [ 37 | "src/updaters/commit.js", 38 | "src/updaters/file.js", 39 | "src/stores/lfs.js", 40 | "src/bare.js", 41 | "test*" 42 | ] 43 | }, 44 | "homepage": "https://github.com/mikeal/dagdb#readme", 45 | "exports": { 46 | ".": { 47 | "import": "./src/index.js", 48 | "browser": "./src/browser.js" 49 | }, 50 | "./server": { 51 | "import": "./src/http/nodejs.js" 52 | }, 53 | "./database": { 54 | "import": "./src/database.js" 55 | }, 56 | "./lru": { 57 | "import": "./src/stores/lru.js" 58 | } 59 | }, 60 | "dependencies": { 61 | "@ipld/block": "^6.0.4", 62 | "@ipld/fbl": "2.0.1", 63 | "@ipld/schema-validation": "1.1.0", 64 | "bent": "^7.3.10", 65 | "charwise": "^3.0.1", 66 | "datastore-car": "^1.2.0", 67 | "encoding-down": "^6.3.0", 68 | "git-remote-origin-url": "^3.1.0", 69 | "iamap": "mikeal/iamap#uint8array", 70 | "level-js": "^5.0.2", 71 | "levelup": "^4.4.0", 72 | "lfs-store": "^0.0.8", 73 | "lru-cache": "^6.0.0", 74 | "murmurhash3js-revisited": "^3.0.0" 75 | }, 76 | "devDependencies": { 77 | "aws-config": "^1.4.0", 78 | "aws-sdk": "^2.751.0", 79 | "c8": "^7.3.0", 80 | "hundreds": "0.0.8", 81 | "ipld-schema": "^0.3.2", 82 | "memdown": "^5.1.0", 83 | "mocha": "^8.1.3", 84 | "polendina": "^1.1.0", 85 | "standard": "^14.3.4", 86 | "tap": "^14.10.8", 87 | "tempy": "^0.7.0", 88 | "vuepress": "^1.5.4" 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/car.js: -------------------------------------------------------------------------------- 1 | import { promises as fs } from 'fs' 2 | import carfile from 'datastore-car' 3 | import createInmemory from './stores/inmemory.js' 4 | import Block from '@ipld/block/defaults.js' 5 | import mkdatabase from './database.js' 6 | import path from 'path' 7 | 8 | const database = mkdatabase(Block) 9 | const { stat, rename } = fs 10 | 11 | const getRoot = async car => { 12 | const [root, ...nope] = await car.getRoots() 13 | if (nope.length) { 14 | throw new Error('No support for CAR files with multiple roots') 15 | } 16 | return root 17 | } 18 | 19 | const loadReadOnly = async filename => { 20 | // const stream = fs.createReadStream(filename) 21 | const car = await carfile.readFileComplete(filename) 22 | const root = await getRoot(car) 23 | const store = { get: cid => car.get(cid).then(data => Block.create(data, cid)) } 24 | return database(root, store) 25 | } 26 | const loadWritable = async filename => { 27 | } 28 | 29 | const options = yargs => { 30 | yargs.option('dbfile', { 31 | desc: 'File containing the database', 32 | default: '.dagdb.car' 33 | }) 34 | } 35 | const checkfile = async file => { 36 | let exists 37 | try { 38 | exists = await stat(file) 39 | } catch (e) { 40 | if (!e.code === 'ENOENT') throw e 41 | } 42 | if (!exists) { 43 | if (file === './.dagdb.car') { 44 | console.error('You must run `init` to create a .dagdb.car file in the current directory') 45 | } else { 46 | console.error(`No such file "${file}"`) 47 | } 48 | process.exit(1) 49 | } 50 | return exists 51 | } 52 | 53 | const traverse = async function * (cid, get, seen = new Set()) { 54 | const block = await get(cid) 55 | yield block 56 | seen.add(cid.toString('base64')) 57 | for (const [, link] of block.reader().links()) { 58 | if (seen.has(link.toString('base64'))) continue 59 | yield * traverse(link, get, seen) 60 | } 61 | } 62 | 63 | const readwrite = async (filename, exportFile) => { 64 | await checkfile(filename) 65 | const reader = await loadReadOnly(filename) 66 | const inmem = createInmemory(Block) 67 | const cache = inmem() 68 | const put = cache.put.bind(cache) 69 | const get = async cid => { 70 | try { 71 | const block = await cache.get(cid) 72 | return block 73 | } catch (e) { 74 | if (e.statusCode !== 404) throw e 75 | return reader.get(cid).then(data => Block.create(data, cid)) 76 | } 77 | } 78 | const store = { get, put } 79 | const write = async newRoot => { 80 | const dir = path.dirname(filename) 81 | const base = path.basename(filename) 82 | const f = path.join(dir, '.tmp.' + base) 83 | const writer = await loadWritable(f) 84 | await writer.setRoots([newRoot]) 85 | for await (const block of traverse(newRoot, get)) { 86 | writer.put(await block.cid(), block.encodeUnsafe()) 87 | } 88 | await writer.close() 89 | await rename(f, exportFile || filename) 90 | } 91 | const [root] = await reader.getRoots() 92 | return { write, root, store } 93 | } 94 | 95 | export { loadReadOnly, loadWritable, options, checkfile, readwrite } 96 | -------------------------------------------------------------------------------- /src/stores/kv.js: -------------------------------------------------------------------------------- 1 | import LRUStore from './lru.js' 2 | 3 | export default Block => { 4 | const { CID } = Block 5 | class KVStore extends LRUStore { 6 | async graph (cid, depth = 1024, missing = new Set(), incomplete = new Set(), skips = new Set()) { 7 | const key = cid.toString('base32') 8 | 9 | if (skips.has(key)) return 10 | skips.add(key) 11 | if (!(await this.has(cid))) { 12 | missing.add(key) 13 | return { missing } 14 | } 15 | 16 | if (cid.code === 0x55) return { complete: true } 17 | 18 | if (depth < 0) { 19 | incomplete.add(key) 20 | return { incomplete } 21 | } 22 | 23 | if (await this._hasKey([key, 'complete'])) return { complete: true } 24 | for await (const linkKey of this._linksFrom(key)) { 25 | if (await this._hasKey([linkKey, 'complete'])) continue 26 | const cid = CID.from(linkKey) 27 | if (!(await this.has(cid))) { 28 | missing.add(linkKey) 29 | continue 30 | } 31 | if (depth < 1) { 32 | incomplete.add(linkKey) 33 | continue 34 | } 35 | await this.graph(cid, depth - 1, missing, incomplete, skips) 36 | } 37 | if (missing.size === 0 && incomplete.size === 0) { 38 | await this._putKey([key, 'complete']) 39 | return { complete: true } 40 | } 41 | const ret = {} 42 | if (missing.size) ret.missing = missing 43 | if (incomplete.size) ret.incomplete = incomplete 44 | return ret 45 | } 46 | 47 | async _indexLinks (cid, block) { 48 | if (cid.code === 0x55) return 49 | const key = cid.toString('base32') 50 | const ops = [] 51 | const seen = new Set() 52 | for (const [, link] of block.reader().links()) { 53 | const linkKey = link.toString('base32') 54 | if (seen.has(linkKey)) continue 55 | seen.add(linkKey) 56 | ops.push(this._putKey([linkKey, 'link-to', key])) 57 | ops.push(this._putKey([key, 'link-from', linkKey])) 58 | } 59 | await Promise.all(ops) 60 | return seen 61 | } 62 | 63 | async _indexComplete (cid, seen = new Set()) { 64 | const key = cid.toString('base32') 65 | const completeKeys = Array.from(seen.values()).map(key => [key, 'complete']) 66 | const completed = await Promise.all(completeKeys.map(key => this._hasKey(key))) 67 | const complete = completed.reduce((x, y) => x && y, true) 68 | if (complete) await this._putKey([key, 'complete']) 69 | } 70 | 71 | async _putBlock (block) { 72 | const cid = await block.cid() 73 | if (await this.has(cid)) return 74 | const seen = await this._indexLinks(cid, block) 75 | await this._put([cid.toString('base32'), 'encode'], block.encodeUnsafe()) 76 | await this._indexComplete(cid, seen) 77 | } 78 | 79 | _hasBlock (cid) { 80 | return this._hasKey([cid.toString('base32'), 'encode']) 81 | } 82 | 83 | async _getBlock (cid) { 84 | const key = cid.toString('base32') 85 | const data = await this._getKey([key, 'encode']) 86 | return Block.create(data, cid) 87 | } 88 | } 89 | 90 | return KVStore 91 | } 92 | -------------------------------------------------------------------------------- /src/stores/inmemory.js: -------------------------------------------------------------------------------- 1 | const create = Block => { 2 | const { CID } = Block 3 | class Missing extends Error { 4 | get statusCode () { 5 | return 404 6 | } 7 | } 8 | 9 | class InMemory { 10 | constructor () { 11 | this.storage = new Map() 12 | this.links = { to: new Map(), from: new Map() } 13 | this.complete = new Set() 14 | this.depthLimit = 1024 15 | } 16 | 17 | async graph (cid, depth = 1024, missing = new Set(), incomplete = new Set(), skips = new Set()) { 18 | // returns the graph information for the given CID 19 | const key = cid.toString('base32') 20 | 21 | if (skips.has(key)) return 22 | skips.add(key) 23 | if (this.complete.has(key)) return { complete: true } 24 | 25 | if (!(await this.has(cid))) { 26 | missing.add(key) 27 | return { missing } 28 | } 29 | if (cid.code === 0x55) return { complete: true } 30 | 31 | if (depth < 0) { 32 | incomplete.add(key) 33 | return { incomplete } 34 | } 35 | 36 | for (const linkKey of this.links.from.get(key).keys()) { 37 | if (this.complete.has(linkKey)) continue 38 | if (!this.links.from.has(linkKey)) { 39 | missing.add(linkKey) 40 | continue 41 | } 42 | if (depth < 1) { 43 | incomplete.add(linkKey) 44 | continue 45 | } 46 | await this.graph(CID.from(linkKey), depth - 1, missing, incomplete, skips) 47 | } 48 | if (missing.size === 0 && incomplete.size === 0) { 49 | this.complete.add(key) 50 | return { complete: true } 51 | } 52 | const ret = {} 53 | if (missing.size) ret.missing = missing 54 | if (incomplete.size) ret.incomplete = incomplete 55 | return ret 56 | } 57 | 58 | _index (cid, block) { 59 | const key = cid.toString('base32') 60 | if (this.links.from.has(key)) { 61 | return // already indexed this block 62 | } 63 | const _from = new Set() 64 | this.links.from.set(key, _from) 65 | if (cid.code === 0x55) return 66 | let complete = true 67 | for (const [, link] of block.reader().links()) { 68 | const linkKey = link.toString('base32') 69 | if (!this.links.to.has(linkKey)) this.links.to.set(linkKey, new Set()) 70 | this.links.to.get(linkKey).add(key) 71 | _from.add(linkKey) 72 | if (!this.complete.has(linkKey)) complete = false 73 | } 74 | if (complete) this.complete.add(key) 75 | } 76 | 77 | _put (cid, block) { 78 | this.storage.set(cid.toString('base32'), block) 79 | } 80 | 81 | async put (block) { 82 | const cid = await block.cid() 83 | this._put(cid, block) 84 | this._index(cid, block) 85 | } 86 | 87 | has (cid) { 88 | const key = cid.toString('base32') 89 | if (!this.links.from.has(key)) { 90 | return false 91 | } else { 92 | const length = this.storage.get(key).encodeUnsafe().length 93 | return new Promise(resolve => resolve({ length })) 94 | } 95 | } 96 | 97 | async get (cid) { 98 | const key = cid.toString('base32') 99 | const value = this.storage.get(key) 100 | if (!value) throw new Missing(`Do not have ${key} in store`) 101 | return value 102 | } 103 | } 104 | 105 | return (...args) => new InMemory(...args) 106 | } 107 | 108 | export default create 109 | -------------------------------------------------------------------------------- /src/bare.js: -------------------------------------------------------------------------------- 1 | import createDatabase from './database.js' 2 | import bent from 'bent' 3 | import createStores from './stores/index.js' 4 | import createUpdaters from './updaters/index.js' 5 | 6 | const getJSON = bent('json') 7 | 8 | const isHttp = id => { 9 | if (typeof id !== 'string') return false 10 | return id.startsWith('http://') || id.startsWith('https://') 11 | } 12 | 13 | export default (Block, opts = {}) => { 14 | const { lfs, fileUpdater, commit } = opts 15 | const { CID } = Block 16 | const database = createDatabase(Block) 17 | const stores = createStores(Block) 18 | const updaters = createUpdaters(Block) 19 | const getInfo = async (id, ...args) => { 20 | const info = await getJSON(id) 21 | if (!id.endsWith('/')) id += '/' 22 | const rel = str => (new URL(str, id)).toString() 23 | const store = await stores.from(rel(info.blockstore), ...args) 24 | const updater = await updaters.from(id, rel(info.updater), ...args) 25 | return { info, store, updater } 26 | } 27 | const mklfs = async (id, ...args) => { 28 | const { repo, user, updateFile, token, blockstoreFile, disableCache } = id['git+lfs'] 29 | const store = await lfs(blockstoreFile, repo, user, token, disableCache) 30 | const updater = await fileUpdater(updateFile /* c8 ignore next */ || './root.cid') 31 | return { store, updater } 32 | } 33 | const open = async (id, ...args) => { 34 | if (id === 'github-action') { 35 | const store = await lfs() 36 | const updater = await fileUpdater('./root.cid') 37 | updater.onUpdate = commit 38 | return database(updater.root, store, updater) 39 | } 40 | if (isHttp(id)) { 41 | const { info, store, updater } = await getInfo(id, ...args) 42 | if (!info.root) throw new Error('Database has not been created') 43 | return database(CID.from(info.root), store, updater, ...args) 44 | } else if (typeof id === 'object') { 45 | let { root, store, updater } = id 46 | if (id.leveldown || id.s3 || id.browser) { 47 | store = await stores.from(id, ...args) 48 | updater = await updaters.kv(store, id.updateKey) 49 | root = await updater.root 50 | } else if (id['git+lfs']) { 51 | const resp = await mklfs(id) 52 | store = resp.store 53 | updater = resp.updater 54 | root = updater.root 55 | } 56 | return database(root, store, updater, ...args) 57 | } 58 | throw new Error('Not implemented') /* c8 ignore next */ 59 | } 60 | const create = async (id, ...args) => { 61 | if (id === 'github-action') { 62 | const store = await lfs() 63 | const updater = await fileUpdater('./root.cid', { commit: true }) 64 | updater.onUpdate = commit 65 | return database.create(store, updater) 66 | } 67 | if (isHttp(id)) { 68 | const { info, store, updater } = await getInfo(id, ...args) 69 | if (info.root) throw new Error('Database already created') 70 | return database.create(store, updater, ...args) 71 | } else { 72 | let store 73 | let updater 74 | if (id.leveldown || id.s3 || id.browser) { 75 | store = await stores.create(id, ...args) 76 | updater = await updaters.kv(store, id.updateKey) 77 | } else if (id['git+lfs']) { 78 | const resp = await mklfs(id) 79 | store = resp.store 80 | updater = resp.updater 81 | } else { 82 | store = await stores.create(id, ...args) 83 | updater = await updaters.create(id, ...args) 84 | } 85 | return database.create(store, updater, ...args) 86 | } 87 | } 88 | return { create, open } 89 | } 90 | -------------------------------------------------------------------------------- /test/test-updaters.js: -------------------------------------------------------------------------------- 1 | /* globals before, describe, it */ 2 | import dagdb from '../src/index.js' 3 | import memdown from 'memdown' 4 | import createS3 from './lib/mock-s3.js' 5 | import { deepStrictEqual as same, ok } from 'assert' 6 | 7 | const test = it 8 | 9 | const updateTests = create => { 10 | test('basics', async () => { 11 | let db = await create() 12 | const oldcid = db.root 13 | db = await db.set('hello', 'world').update() 14 | same(await db.get('hello'), 'world') 15 | db = await dagdb.open({ ...db, root: db.root }) 16 | same(await db.get('hello'), 'world') 17 | same(db.root, await db.updater.root) 18 | 19 | db = await dagdb.open({ ...db, root: oldcid }) 20 | await db.set('hello', 'world') 21 | db = await db.commit() 22 | await db.set('hello', 'another') 23 | db = await db.update() 24 | same(await db.get('hello'), 'another') 25 | 26 | db = await dagdb.open({ ...db, root: oldcid }) 27 | await db.set('hello', 'nope') 28 | let threw = true 29 | try { 30 | db = await db.update() 31 | threw = false 32 | } catch (e) { 33 | if (!e.message.includes('Conflict, databases contain conflicting mutations')) throw e 34 | } 35 | ok(threw) 36 | }) 37 | } 38 | 39 | const openTests = mkopts => { 40 | test('open', async () => { 41 | const opts = mkopts() 42 | let db = await dagdb.create(opts) 43 | await db.set('hello', 'world') 44 | db = await db.update() 45 | same(await db.get('hello'), 'world') 46 | if (db.store.close) await db.store.close() 47 | db = await dagdb.open(opts) 48 | same(await db.get('hello'), 'world') 49 | same(db.root, await db.updater.root) 50 | 51 | await db.set('hello', 'world2') 52 | db = await db.update() 53 | 54 | if (db.store.close) await db.store.close() 55 | db = await dagdb.open(opts) 56 | same(await db.get('hello'), 'world2') 57 | same(db.root, await db.updater.root) 58 | 59 | // if (db.store.close) await db.store.close() 60 | }) 61 | } 62 | 63 | const addTests = mkopts => { 64 | updateTests(() => dagdb.create(mkopts())) 65 | openTests(mkopts) 66 | } 67 | 68 | describe('inmem', () => { 69 | updateTests(() => dagdb.create('inmem')) 70 | }) 71 | 72 | const rand = () => Math.random().toString() 73 | 74 | describe('level memdown', () => { 75 | const mkopts = () => ({ leveldown: memdown(rand()) }) 76 | addTests(mkopts) 77 | }) 78 | 79 | describe('s3', () => { 80 | const mkopts = () => ({ s3: createS3() }) 81 | addTests(mkopts) 82 | }) 83 | 84 | if (process.browser) { 85 | describe('browser', () => { 86 | addTests(() => ({ browser: true, updateKey: rand() })) 87 | }) 88 | } else if (process.GITHUB_WORKFLOW) { 89 | let tempy 90 | before(async () => { 91 | tempy = (await import('tempy')).default 92 | }) 93 | describe('git+lfs', function () { 94 | this.timeout(60 * 1000) 95 | addTests(() => { 96 | const blockstoreFile = tempy.file({ name: 'blockstore.ipld-lfs' }) 97 | const updateFile = tempy.file({ name: 'root.cid' }) 98 | 99 | const opts = { blockstoreFile, updateFile } 100 | return { 'git+lfs': opts } 101 | }) 102 | }) 103 | describe('git+lfs no lru', function () { 104 | this.timeout(60 * 1000) 105 | openTests(() => { 106 | const blockstoreFile = tempy.file({ name: 'blockstore.ipld-lfs' }) 107 | const updateFile = tempy.file({ name: 'root.cid' }) 108 | 109 | const opts = { blockstoreFile, updateFile, disableCache: true } 110 | return { 'git+lfs': opts } 111 | }) 112 | }) 113 | } 114 | -------------------------------------------------------------------------------- /src/http/handlers.js: -------------------------------------------------------------------------------- 1 | const jsonHeaders = body => { 2 | return { 'content-length': body.length, 'content-type': 'application/json' } 3 | } 4 | 5 | const blockstore = (Block, store) => { 6 | const { CID } = Block 7 | const handler = async opts => { 8 | let { method, path, params, body } = opts 9 | if (!method) throw new Error('Missing required param "method"') 10 | if (!path) throw new Error('Missing required param "path"') 11 | while (path[0] === '/') path = path.slice(1) 12 | if (method === 'PUT' && !body) { 13 | throw new Error('Missing required param "body"') 14 | } 15 | if (method === 'GET') { 16 | if (path.includes('/graph')) { 17 | const [key] = path.split('/') 18 | let { depth } = params 19 | if (typeof depth !== 'undefined') { 20 | if (depth > store.depthLimit) throw new Error(`Depth is greater than max limit of ${store.depthLimit}`) 21 | } else { 22 | depth = store.depthLimit 23 | } 24 | const result = await store.graph(CID.from(key), depth) 25 | if (result.missing) result.missing = Array.from(result.missing) 26 | if (result.incomplete) result.incomplete = Array.from(result.incomplete) 27 | const body = Buffer.from(JSON.stringify({ result, depth })) 28 | return { headers: jsonHeaders(body), body } 29 | } else { 30 | if (path.includes('/')) throw new Error('Path for block retreival must not include slashes') 31 | const cid = CID.from(path) 32 | let block 33 | try { 34 | block = await store.get(cid) 35 | } catch (e) { 36 | // we don't have intentional errors in our own cod 37 | if (e.statusCode === 404) return { statusCode: 404 } /* c8 ignore next */ 38 | /* c8 ignore next */ 39 | throw e 40 | /* c8 ignore next */ 41 | } 42 | const body = block.encodeUnsafe() 43 | return { headers: { 'content-length': body.length }, body } 44 | } 45 | } else if (method === 'PUT') { 46 | if (path.includes('/')) throw new Error('Path for block writes must not include slashes') 47 | const cid = CID.from(path) 48 | const block = Block.create(body, cid) 49 | await block.validate() 50 | await store.put(block) 51 | return { statusCode: 201 } 52 | } else if (method === 'HEAD') { 53 | if (path.includes('/')) throw new Error('Path for block retreival must not include slashes') 54 | const cid = CID.from(path) 55 | const has = await store.has(cid) 56 | if (!has) return { statusCode: 404 } 57 | if (has.length) return { headers: { 'content-length': has.length } } 58 | return { statusCode: 200 } 59 | } else { 60 | const e = new Error(`Unknown method "${method}"`) 61 | e.statusCode = 405 62 | throw e 63 | } /* c8 ignore next */ 64 | } 65 | return handler 66 | } 67 | 68 | const info = (store, updater, ext) => async opts => { 69 | const root = await updater.root 70 | const info = { 71 | root: root ? root.toString('base32') : root, 72 | blockstore: 'blockstore' 73 | } 74 | if (updater.update) info.updater = 'updater' 75 | const body = Buffer.from(JSON.stringify({ ...info, ...ext })) 76 | const ret = { headers: jsonHeaders(body), body } 77 | return ret 78 | } 79 | 80 | const updater = (Block, updater) => async opts => { 81 | const { CID } = Block 82 | if (!opts.params.new) throw new Error('Missing required param "new"') 83 | opts.params.new = CID.from(opts.params.new) 84 | if (opts.params.old) opts.params.old = CID.from(opts.params.old) 85 | const cid = await updater.update(opts.params.new, opts.params.old) 86 | const body = Buffer.from(JSON.stringify({ root: cid.toString('base32') })) 87 | const ret = { headers: jsonHeaders(body), body } 88 | return ret 89 | } 90 | export { blockstore, info, updater } 91 | -------------------------------------------------------------------------------- /Schema.md: -------------------------------------------------------------------------------- 1 | # [HAMT](https://github.com/ipld/specs/blob/master/data-structures/hashmap.md) 2 | 3 | This schema is only here for reference. It's not actually verified or validated 4 | by this library because we use [an existing implementation](https://github.com/rvagg/iamap). 5 | 6 | ```sh 7 | type HashMapData [Element] 8 | type HashMapRoot struct { 9 | hashAlg String 10 | bucketSize Int 11 | map Bytes 12 | data HashMapData 13 | } 14 | 15 | # Non-root node layout 16 | type HashMapNode struct { 17 | map Bytes 18 | data HashMapData 19 | } 20 | 21 | type Element union { 22 | | HashMapNode map 23 | | &HashMapNode link 24 | | Bucket list 25 | } representation kinded 26 | 27 | type Bucket [ BucketEntry ] 28 | 29 | type BucketEntry struct { 30 | key Bytes 31 | value Value 32 | } representation tuple 33 | 34 | type Value union { 35 | | Bool bool 36 | | String string 37 | | Bytes bytes 38 | | Int int 39 | | Float float 40 | | Map map 41 | | List list 42 | | Link link 43 | } representation kinded 44 | ``` 45 | 46 | # Key Value Database 47 | 48 | ```sh 49 | type DeleteOperation struct { 50 | key String 51 | } 52 | type SetOperation struct { 53 | key String 54 | val Link 55 | } 56 | type Operation union { 57 | | SetOperation "set" 58 | | DeleteOperation "del" 59 | } representation keyed 60 | 61 | type OpList [&Operation] 62 | type TransactionV1 struct { 63 | head &HashMapRoot 64 | ops OpList 65 | prev nullable &Transaction 66 | } 67 | type Transaction union { 68 | | TransactionV1 "kv-v1" 69 | } representation keyed 70 | ``` 71 | # Indexes 72 | 73 | ```sh 74 | type PropIndex struct { 75 | count int 76 | sum int 77 | map &HashMapRoot # map of { DBKey: PropValue } 78 | } 79 | type Props { String: &PropIndex } 80 | type Indexes struct { 81 | props &Props 82 | } 83 | ``` 84 | 85 | # DagDB Type 86 | 87 | This is a massive union of all the publicly visible types used by 88 | DagDB. There are many points where **any** of these types can be 89 | used as a value. For instance, a `Database` can also be used as 90 | a value almost anywhere and will be cast into the correct class 91 | instance when retrieved by user facing APIs. 92 | 93 | ```sh 94 | type DagDBTypeV1 union { 95 | | &Database "database" 96 | | &Transaction "transaction" 97 | | &FlexibleByteLayout "fbl" 98 | } representation keyed 99 | 100 | type DagDBType union { 101 | | DagDBTypeV1 "v1" 102 | } representation keyed 103 | 104 | type DagDB struct { 105 | type DagDBType (rename "_dagdb") 106 | } 107 | ``` 108 | 109 | DagDB's value loader walks decoded blocks and replaces the referenced 110 | values with instances of the relevant types and validates them against 111 | the referenced schemas. This effectively means that `"_dagdb"` is a 112 | reserved key *at any depth* with very few exceptions. 113 | 114 | # Remote 115 | 116 | ```sh 117 | type FullMerge bool # must be true 118 | type KeyedMerge string 119 | type RemoteMergeStrategy union { 120 | | FullMerge "full" 121 | | KeyedMerge "keyed" 122 | } representation keyed 123 | 124 | type RemoteSource struct { 125 | type String 126 | } 127 | 128 | type RemoteInfo struct { 129 | strategy RemoteMergeStrategy 130 | source RemoteSource 131 | } 132 | 133 | type Remote struct { 134 | info &RemoteInfo 135 | head &HashMapRoot # remote KV root 136 | merged &HashMapRoot # local KV root 137 | } 138 | ``` 139 | 140 | # Database 141 | 142 | A `Store` is a set of `Key Value Database`'s. 143 | 144 | One is a set of Tags, the other stores actions 145 | attached to each of those tags. 146 | 147 | Actions are anything that is triggered or built 148 | from the data associated with the named tags, like 149 | secondary indexes. 150 | 151 | Typically, actions are not pushed to a remote, they are local 152 | to the device/store. Tags typically **are** pushed to a remote. 153 | 154 | ```sh 155 | type DatabaseV1 struct { 156 | kv &Transaction 157 | indexes &Indexes # Values type is Index 158 | remotes &HashMapRoot # Values type is Remote 159 | } 160 | 161 | type Database union { 162 | | DatabaseV1 "db-v1" 163 | } representation keyed 164 | ``` 165 | 166 | `tags` is a key/value pairing of the named key to the 167 | transaction root of related kv store. 168 | 169 | `actions` is a key/value pairing of a tag name to 170 | related actions. Each action is also named, so the 171 | value here is a HashMapRoot. 172 | -------------------------------------------------------------------------------- /src/values.js: -------------------------------------------------------------------------------- 1 | import { readonly, isCID } from './utils.js' 2 | import createFBL from '@ipld/fbl/bare' 3 | 4 | const types = {} 5 | 6 | export default (Block) => { 7 | const { CID } = Block 8 | const fbl = createFBL(Block, 'dag-cbor') 9 | 10 | const fblDecoder = (root, store) => { 11 | const get = store.get.bind(store) 12 | const iter = fbl.read(root, get) 13 | iter._dagdb = { v1: 'fbl', root } 14 | iter.encode = () => (async function * (r) { yield r })(root) 15 | iter.read = (...args) => fbl.read(root, get, ...args) 16 | return iter 17 | } 18 | 19 | const _typeEncoder = async function * (gen, set) { 20 | let last 21 | for await (const block of gen) { 22 | // testing these guards would require an implementation w/ a schema 23 | // for a bad implementation, which would be bad to ship with. 24 | /* c8 ignore next */ 25 | if (last) throw new Error('Encoder yield after non-block') 26 | if (Block.isBlock(block)) { 27 | yield block 28 | continue 29 | } 30 | last = block 31 | } 32 | /* c8 ignore next */ 33 | if (typeof last === 'undefined') throw new Error('Encoder did not yield a root node') 34 | set(last) 35 | } 36 | const typeEncoder = gen => { 37 | const encoder = _typeEncoder(gen, last => { encoder.last = last }) 38 | return encoder 39 | } 40 | const decode = (value, store, updater) => { 41 | // decode only accepts IPLD Data Model 42 | // this method is expected to accept decoded Block data directly 43 | // and it can't work with any special types. 44 | if (CID.asCID(value)) { 45 | value = CID.asCID(value) 46 | const link = async () => { 47 | if (link.block) return link.block 48 | const block = await store.get(value) 49 | readonly(link, 'block', block) 50 | return decode(block.decode()) 51 | } 52 | readonly(link, 'cid', value) 53 | readonly(link, 'equals', _cid => { 54 | if (typeof _cid === 'function' && _cid.cid) { 55 | _cid = _cid.cid 56 | } 57 | return value.equals(_cid) 58 | }) 59 | return link 60 | } 61 | if (value && typeof value === 'object') { 62 | if (value._dagdb) { 63 | const type = Object.keys(value._dagdb.v1)[0] 64 | return types[type](value._dagdb.v1[type], store, updater) 65 | } else if (Array.isArray(value)) { 66 | for (let i = 0; i < value.length; i++) { 67 | value[i] = decode(value[i], store, updater) 68 | } 69 | } else { 70 | for (const [key, _value] of Object.entries(value)) { 71 | value[key] = decode(_value, store, updater) 72 | } 73 | } 74 | } 75 | return value 76 | } 77 | 78 | const encode = async function * (value) { 79 | // Encoders, both here and in special types, are 80 | // async generators that yield as many blocks as 81 | // they like as long as the very last thing they 82 | // yield is NOT a Block. This is so that the final 83 | // root of each each node can be embedded in a parent. 84 | // This contract MUST be adhered to by all special types. 85 | 86 | if (value === null) { 87 | yield value 88 | return 89 | } 90 | if (typeof value === 'object' && typeof value.then === 'function') value = await value 91 | if (isCID(value)) { 92 | yield value 93 | return 94 | } 95 | if (Block.isBlock(value)) { 96 | yield value.cid() 97 | return 98 | } 99 | if (typeof value === 'function' && value.cid) { 100 | if (value.block) yield value.block 101 | yield value.cid 102 | return 103 | } 104 | if (value[Symbol.asyncIterator] && !value._dagdb) { 105 | let last 106 | for await (const block of fbl.from(value)) { 107 | yield block 108 | last = block 109 | } 110 | yield { _dagdb: { v1: { fbl: await last.cid() } } } 111 | return 112 | } 113 | // fast return non-objects 114 | if (typeof value !== 'object') { 115 | yield value 116 | } else { 117 | if (value._dagdb) { 118 | const encoder = typeEncoder(value.encode()) 119 | yield * encoder 120 | const type = value._dagdb.v1 121 | const typeDef = {} 122 | typeDef[type] = encoder.last 123 | yield { _dagdb: { v1: typeDef } } 124 | } else if (Array.isArray(value)) { 125 | const ret = [] 126 | for (let i = 0; i < value.length; i++) { 127 | const encoder = typeEncoder(encode(value[i])) 128 | yield * encoder 129 | ret[i] = await encoder.last 130 | } 131 | yield ret 132 | } else { 133 | const ret = {} 134 | for (const [key, _value] of Object.entries(value)) { 135 | const encoder = typeEncoder(encode(_value)) 136 | yield * encoder 137 | ret[key] = await encoder.last 138 | } 139 | yield ret 140 | } 141 | } 142 | } 143 | 144 | const register = (type, fn) => { types[type] = fn } 145 | register('fbl', fblDecoder) 146 | 147 | return { encode, decode, register } 148 | } 149 | -------------------------------------------------------------------------------- /test/test-replication.js: -------------------------------------------------------------------------------- 1 | /* globals describe, it */ 2 | import Block from '@ipld/block/defaults' 3 | import createInmemory from '../src/stores/inmemory.js' 4 | import createKV from '../src/kv.js' 5 | import assert from 'assert' 6 | 7 | const inmem = createInmemory(Block) 8 | const kv = createKV(Block) 9 | const test = it 10 | const same = assert.deepStrictEqual 11 | 12 | const create = async (_kv = kv) => { 13 | const store = inmem() 14 | const kvs = await _kv.create(store) 15 | return { store, kvs } 16 | } 17 | 18 | const basics = async kv => { 19 | const { kvs } = await create(kv) 20 | await kvs.set('test', { hello: 'world' }) 21 | let obj = await kvs.get('test') 22 | same(obj, { hello: 'world' }) 23 | const root = await kvs.commit() 24 | obj = await root.get('test') 25 | same(obj, { hello: 'world' }) 26 | return root 27 | } 28 | 29 | describe('test-replication', () => { 30 | test('basic replication', async () => { 31 | const base = await basics() 32 | const { kvs } = await create() 33 | await kvs.pull(base) 34 | same(await kvs.get('test'), { hello: 'world' }) 35 | }) 36 | 37 | test('deduplication', async () => { 38 | let [one, two] = await Promise.all([basics(), basics()]) 39 | await one.set('test2', { foo: 'bar' }) 40 | one = await one.commit() 41 | await two.set('test2', { foo: 'bar' }) 42 | await two.pull(one) 43 | two = await two.commit() 44 | assert.ok(one.root.equals(two.root)) 45 | await one.pull(two) 46 | same(one.cache.size, 0) 47 | }) 48 | 49 | test('pull only latest change to key', async () => { 50 | let [one, two] = await Promise.all([basics(), basics()]) 51 | await one.set('test2', { foo: 'bar' }) 52 | one = await one.commit() 53 | await two.set('test2', { foo: 'bar' }) 54 | await two.pull(one) 55 | two = await two.commit() 56 | assert.ok(one.root.equals(two.root)) 57 | await one.pull(two) 58 | same(one.cache.size, 0) 59 | 60 | // test longer history reconciliation 61 | await one.set('test2', { foo: 2 }) 62 | one = await one.commit() 63 | await two.set('test2', { foo: 1 }) 64 | two = await two.commit() 65 | await two.set('test2', { foo: 2 }) 66 | two = await two.commit() 67 | await two.set('test2', { foo: 3 }) 68 | two = await two.commit() 69 | await await one.pull(two) 70 | same(one.cache.size, 1) 71 | same(await one.get('test2'), { foo: 3 }) 72 | one = await one.commit() 73 | // transaction root should not match 74 | assert(!one.root.equals(two.root)) 75 | const head1 = await one.getHead() 76 | const head2 = await two.getHead() 77 | assert(head1.equals(head2)) 78 | }) 79 | 80 | const remoteWins = (locals, remotes) => remotes[remotes.length - 1] 81 | 82 | test('remote wins conflict', async () => { 83 | let [one, two] = await Promise.all([basics(), basics()]) 84 | await one.set('test2', { foo: 'bar' }) 85 | one = await one.commit() 86 | await two.set('test2', { foo: 'bar' }) 87 | await two.pull(one) 88 | two = await two.commit() 89 | 90 | // overwrite cached values 91 | await one.set('test3', { foo: 'bar' }) 92 | one = await one.commit() 93 | await two.set('test3', { foo: 1 }) 94 | try { 95 | await two.pull(one) 96 | } catch (e) { 97 | if (!e.message.startsWith('Conflict')) throw e 98 | } 99 | await two.pull(one, [], remoteWins) 100 | same(await one.get('test3'), await two.get('test3')) 101 | await two.del('test3') 102 | await two.pull(one, [], remoteWins) 103 | same(two.cache.size, 1) 104 | same(await one.get('test3'), await two.get('test3')) 105 | 106 | // overwrite written conlict 107 | const _two = two 108 | await two.set('test3', { foo: 3 }) 109 | two = await two.commit() 110 | await two.pull(one, [], remoteWins) 111 | same(two.cache.size, 1) 112 | same(await one.get('test3'), await two.get('test3')) 113 | 114 | two = _two 115 | await two.set('test3', { foo: 7 }) 116 | two = await two.commit() 117 | await two.del('test3') 118 | two = await two.commit() 119 | await two.pull(one, [], remoteWins) 120 | same(two.cache.size, 1) 121 | same(await one.get('test3'), await two.get('test3')) 122 | 123 | two = await two.commit() 124 | await two.set('two', { x: 1 }) 125 | two = await two.commit() 126 | await two.set('test3', { foo: 20 }) 127 | two = await two.commit() 128 | 129 | await one.set('test3', { foo: 22 }) 130 | one = await one.commit() 131 | await one.del('test3') 132 | one = await one.commit() 133 | 134 | two.set('test3', { foo: 51 }) 135 | await two.pull(one, [], remoteWins) 136 | same(two.cache.size, 1) 137 | same(await two.has('test3'), false) 138 | two = await two.commit() 139 | same(await two.has('test3'), false) 140 | }) 141 | 142 | test('no-conflict on duplicate values', async () => { 143 | let [one, two] = await Promise.all([basics(), basics()]) 144 | await one.set('one', { foo: 'bar' }) 145 | await two.set('two', { foo: 'bar' }) 146 | one = await one.commit() 147 | two = await two.commit() 148 | await one.set('two', { foo: 'bar' }) 149 | one = await one.commit() 150 | await one.pull(two) 151 | same(one.cache.size, 0) 152 | await two.pull(one) 153 | same(two.cache.size, 1) 154 | same(await two.get('one'), { foo: 'bar' }) 155 | }) 156 | 157 | test('no-conflict on duplicate values w/ history', async () => { 158 | let [one, two] = await Promise.all([basics(), basics()]) 159 | await one.set('one', { foo: 'bar' }) 160 | await two.set('two', { foo: 'bar' }) 161 | one = await one.commit() 162 | two = await two.commit() 163 | 164 | await one.set('two', { foo: 'bar' }) 165 | one = await one.commit() 166 | let i = 0 167 | while (i < 10) { 168 | await one.set('two', { foo: i }) 169 | one = await one.commit() 170 | i++ 171 | } 172 | await two.pull(one) 173 | same(two.cache.size, 2) 174 | same(await two.get('two'), { foo: 9 }) 175 | }) 176 | }) 177 | -------------------------------------------------------------------------------- /test/test-values.js: -------------------------------------------------------------------------------- 1 | /* globals describe, it */ 2 | import Block from '@ipld/block/defaults' 3 | import mainModule from '../src/bare.js' 4 | import createInmemory from '../src/stores/inmemory.js' 5 | import createKV from '../src/kv.js' 6 | import assert from 'assert' 7 | 8 | const { CID } = Block 9 | const main = mainModule(Block) 10 | const inmem = createInmemory(Block) 11 | const kv = createKV(Block) 12 | const test = it 13 | const same = assert.deepStrictEqual 14 | const { toString } = Block.multiformats.bytes 15 | 16 | const create = async (_kv = kv) => { 17 | const store = inmem() 18 | const kvs = await _kv.create(store) 19 | return { store, kvs } 20 | } 21 | 22 | const basics = async kv => { 23 | const { kvs } = await create(kv) 24 | await kvs.set('test', { hello: 'world' }) 25 | let obj = await kvs.get('test') 26 | same(obj, { hello: 'world' }) 27 | const latest = await kvs.commit() 28 | obj = await latest.get('test') 29 | same(obj, { hello: 'world' }) 30 | return latest 31 | } 32 | 33 | describe('test-values', () => { 34 | test('string', async () => { 35 | let db = await basics(kv) 36 | await db.set('foo', 'bar') 37 | same(await db.get('foo'), 'bar') 38 | db = await db.commit() 39 | same(await db.get('foo'), 'bar') 40 | }) 41 | 42 | test('links', async () => { 43 | let db = await basics(kv) 44 | const block = await db.getBlock('test') 45 | const linked = { test: await block.cid() } 46 | await db.set('linked', linked) 47 | let val = await db.get('linked') 48 | same(await val.test(), { hello: 'world' }) 49 | db = await db.commit() 50 | val = await db.get('linked') 51 | same(await val.test(), { hello: 'world' }) 52 | 53 | // test w/ getter block caching 54 | await val.test() 55 | await db.set('fromLinked', val) 56 | val = await db.get('fromLinked') 57 | same(await val.test(), { hello: 'world' }) 58 | db = await db.commit() 59 | val = await db.get('fromLinked') 60 | same(await val.test(), { hello: 'world' }) 61 | 62 | // test w/o getter block caching 63 | val = await db.get('fromLinked') 64 | await db.set('fromLinkedNoCache', val) 65 | same(await val.test(), { hello: 'world' }) 66 | db = await db.commit() 67 | val = await db.get('fromLinked') 68 | same(await val.test(), { hello: 'world' }) 69 | 70 | // test equals 71 | val = val.test 72 | same(val.equals(val), true) 73 | same(val.equals(val.cid), true) 74 | const newlink = await db.link(Math.random()) 75 | same(val.equals(newlink), false) 76 | same(val.equals(newlink.cid), false) 77 | }) 78 | 79 | test('blocks as links', async () => { 80 | let db = await basics(kv) 81 | const block = await db.getBlock('test') 82 | const linked = { test: block } 83 | await db.set('linked', linked) 84 | let val = await db.get('linked') 85 | same(await val.test(), { hello: 'world' }) 86 | db = await db.commit() 87 | val = await db.get('linked') 88 | same(await val.test(), { hello: 'world' }) 89 | }) 90 | 91 | test('arrays', async () => { 92 | const db = await basics(kv) 93 | const block = await db.getBlock('test') 94 | await db.set('arr', ['asdf', { hello: 'world' }, block]) 95 | 96 | const val = await db.get('arr') 97 | assert.ok(Array.isArray(val)) 98 | same(val[0], 'asdf') 99 | same(val[1], { hello: 'world' }) 100 | same(await val[2](), { hello: 'world' }) 101 | }) 102 | 103 | test('kv in kv', async () => { 104 | const db = await basics(kv) 105 | await db.set('kvInKv', db) 106 | const cid = db.root 107 | assert.ok(cid.equals((await db.get('kvInKv')).root)) 108 | const latest = await db.commit() 109 | assert.ok((await latest.get('kvInKv')).root.equals(db.root)) 110 | 111 | await db.set('dirty', 'test') 112 | await latest.set('with-cache', db) 113 | // the latest changes would be commited so it wouldn't 114 | // match the old transaction root 115 | assert(!db.root.equals(await latest.get('with-cache'))) 116 | }) 117 | 118 | const load = async function * (...args) { 119 | yield * args 120 | } 121 | 122 | test('stream fbl', async () => { 123 | const iter = load(Buffer.from('1234'), Buffer.from('5678')) 124 | let db = await basics() 125 | await db.set('test', { stream: iter }) 126 | db = await db.commit() 127 | const obj = await db.get('test') 128 | let expected = ['1234', '5678'] 129 | for await (const buffer of obj.stream) { 130 | same(expected.shift(), toString(buffer)) 131 | } 132 | await db.set('test2', { two: obj.stream }) 133 | db = await db.commit() 134 | const obj2 = await db.get('test2') 135 | expected = ['1234', '5678'] 136 | for await (const buffer of obj2.two) { 137 | same(expected.shift(), toString(buffer)) 138 | } 139 | for await (const buffer of obj2.two.read(0, 2)) { 140 | same(toString(buffer), '12') 141 | } 142 | }) 143 | 144 | test('dagdb in dagdb w/ empty', async () => { 145 | let db = await main.create('inmem') 146 | let val = await db.empty() 147 | await db.set('subdb', val) 148 | db = await db.update() 149 | val = await db.get('subdb') 150 | same(val.root.equals((await db.empty()).root), true) 151 | }) 152 | 153 | test('encode null', async () => { 154 | let db = await main.create('inmem') 155 | await db.set('test', null) 156 | db = await db.update() 157 | same(await db.get('test'), null) 158 | }) 159 | 160 | test('stream fbl w/ filter', async () => { 161 | const iter = load(Buffer.from('1234'), Buffer.from('5678')) 162 | let db = await basics() 163 | const filter = async block => { 164 | const cid = await block.cid() 165 | if (cid.code === 0x55) return false 166 | return true 167 | } 168 | await db.set('test', { stream: iter }, { filter }) 169 | db = await db.commit() 170 | const obj = await db.get('test') 171 | const root = obj.stream._dagdb.root 172 | await db.set('test2', { two: obj.stream }) 173 | db = await db.commit() 174 | const obj2 = await db.get('test2') 175 | same(obj2.two._dagdb.root.equals(root), true) 176 | for (const key of db.store.storage.keys()) { 177 | const cid = CID.from(key) 178 | if (cid.code === 0x55) throw new Error('Raw block should have been filered out') 179 | } 180 | }) 181 | }) 182 | -------------------------------------------------------------------------------- /src/database.js: -------------------------------------------------------------------------------- 1 | import { fromBlock, validate, readonly } from './utils.js' 2 | import createKV from './kv.js' 3 | import createStores from './stores/index.js' 4 | import createUpdaters from './updaters/index.js' 5 | import createIndexes from './indexes.js' 6 | import createRemotes from './remotes.js' 7 | 8 | const databaseEncoder = async function * (db) { 9 | const kv = await db._kv 10 | if (kv.pending) throw new Error('Cannot use database with pending transactions as a value') 11 | // TODO: refactor to support encoding dirty databases 12 | // if you look at how .commit() is implemented in kv, it's 13 | // implemented as a generator and then flattened for the 14 | // .commit() method. that approach should be used here as well, 15 | // with all the commit() and latest() implementations below done as 16 | // generators that can be used by this encoder so that you can 17 | // use databases with pending transactions as values. 18 | yield db.root 19 | } 20 | 21 | const decorateUpdate = (db, p) => { 22 | p.update = () => p.then(() => db.update()) 23 | return p 24 | } 25 | 26 | const proxy = async function * (kv, key, ...args) { 27 | kv = await kv 28 | yield * kv[key](...args) 29 | } 30 | 31 | export default (Block) => { 32 | const { CID } = Block 33 | const toBlock = (value, className) => Block.encoder(validate(value, className), 'dag-cbor') 34 | const kv = createKV(Block) 35 | const stores = createStores(Block) 36 | const updaters = createUpdaters(Block) 37 | const remoteExports = createRemotes(Block, stores, toBlock, updaters, CID) 38 | const indexExports = createIndexes(Block, fromBlock, kv) 39 | const { Remotes, Remote } = remoteExports 40 | const { Indexes } = indexExports 41 | 42 | class Database { 43 | constructor (root, store, updater) { 44 | readonly(this, 'root', root) 45 | this.store = store 46 | this.updater = updater 47 | readonly(this, '_kv', this.getRoot().then(r => kv(r['db-v1'].kv, store))) 48 | this.remotes = new Remotes(this) 49 | this.indexes = new Indexes(this) 50 | this.Block = Block 51 | } 52 | 53 | register (type, fn) { 54 | return kv.register(type, fn) 55 | } 56 | 57 | get _dagdb () { 58 | return { v1: 'database' } 59 | } 60 | 61 | get dirty () { 62 | return this._kv.then(kv => kv.pending) 63 | } 64 | 65 | async commit () { 66 | let kv = await this._kv 67 | if (kv.pending) { 68 | kv = await kv.commit() 69 | } 70 | const root = await this.getRoot() 71 | root['db-v1'].kv = kv.root 72 | root['db-v1'].remotes = await this.remotes.update(kv.root) 73 | root['db-v1'].indexes = await this.indexes.update(kv.root) 74 | const block = toBlock(root, 'Database') 75 | await this.store.put(block) 76 | return new Database(await block.cid(), this.store, this.updater) 77 | } 78 | 79 | async getHead () { 80 | const kv = await this._kv 81 | return kv.getHead() 82 | } 83 | 84 | async pull (...args) { 85 | const kv = await this._kv 86 | return kv.pull(...args) 87 | } 88 | 89 | async has (...args) { 90 | const kv = await this._kv 91 | return kv.has(...args) 92 | } 93 | 94 | async get (...args) { 95 | const kv = await this._kv 96 | return kv.get(...args) 97 | } 98 | 99 | async _set (...args) { 100 | const kv = await this._kv 101 | return kv.set(...args) 102 | } 103 | 104 | set (...args) { 105 | return decorateUpdate(this, this._set(...args)) 106 | } 107 | 108 | async _del (...args) { 109 | const kv = await this._kv 110 | return kv.del(...args) 111 | } 112 | 113 | all (...args) { 114 | return proxy(this._kv, 'all', ...args) 115 | } 116 | 117 | del (...args) { 118 | return decorateUpdate(this, this._del(...args)) 119 | } 120 | 121 | async link (...args) { 122 | const kv = await this._kv 123 | return kv.link(...args) 124 | } 125 | 126 | async getRoot () { 127 | if (!this._rootBlock) { 128 | readonly(this, '_rootBlock', this.store.get(this.root)) 129 | } 130 | const block = await this._rootBlock 131 | return fromBlock(block, 'Database') 132 | } 133 | 134 | async info () { 135 | const kv = await this._kv 136 | return { size: await kv.size() } 137 | } 138 | 139 | async _merge (db) { 140 | const kv = await this._kv 141 | await kv.pull(db) 142 | } 143 | 144 | merge (db) { 145 | return decorateUpdate(this, this._merge(db)) 146 | } 147 | 148 | encode () { 149 | return databaseEncoder(this) 150 | } 151 | 152 | async update (...args) { 153 | let latest = await this.commit() 154 | let prevRoot = this.root 155 | if (latest.root.equals(this.root)) { 156 | prevRoot = null 157 | } 158 | let current = await this.updater.update(latest.root, prevRoot) 159 | while (!latest.root.equals(current)) { 160 | await this.merge(new Database(current, this.store, this.updater)) 161 | latest = await this.commit() 162 | current = await this.updater.update(latest.root, current, ...args) 163 | } 164 | return new Database(current, this.store, this.updater) 165 | } 166 | 167 | async empty () { 168 | const block = await empty 169 | return new Database(await block.cid(), this.store) 170 | } 171 | } 172 | 173 | remoteExports.Database = Database 174 | const exports = (...args) => new Database(...args) 175 | 176 | // empty database 177 | const empty = (async () => { 178 | const [kvBlock, hamtBlock] = await Promise.all(kv.empties) 179 | const [kvCID, hamtCID] = await Promise.all([kvBlock.cid(), hamtBlock.cid()]) 180 | const [indexesBlock] = await Promise.all(indexExports.empties) 181 | const indexes = await indexesBlock.cid() 182 | return toBlock({ 'db-v1': { kv: kvCID, remotes: hamtCID, indexes } }, 'Database') 183 | })() 184 | exports.empties = [empty, ...kv.empties, ...indexExports.empties] 185 | exports.create = async (store, updater) => { 186 | const empties = await Promise.all(exports.empties) 187 | await Promise.all(empties.map(b => store.put(b))) 188 | const root = await empties[0].cid() 189 | await updater.update(root) 190 | return new Database(root, store, updater) 191 | } 192 | exports.Remote = Remote 193 | kv.register('database', exports) 194 | return exports 195 | } 196 | -------------------------------------------------------------------------------- /test/test-errors.js: -------------------------------------------------------------------------------- 1 | /* globals it, describe, before, after */ 2 | import Block from '@ipld/block/defaults' 3 | import createInmemory from '../src/stores/inmemory.js' 4 | import createKV from '../src/kv.js' 5 | import assert from 'assert' 6 | import bent from 'bent' 7 | 8 | const inmem = createInmemory(Block) 9 | const kv = createKV(Block) 10 | const test = it 11 | const same = assert.deepStrictEqual 12 | const { createGet } = createKV 13 | 14 | const create = async (_kv = kv) => { 15 | const store = inmem() 16 | const kvs = await _kv.create(store) 17 | return { store, kvs } 18 | } 19 | 20 | const basics = async kv => { 21 | const { kvs } = await create(kv) 22 | await kvs.set('test', { hello: 'world' }) 23 | let obj = await kvs.get('test') 24 | same(obj, { hello: 'world' }) 25 | const root = await kvs.commit() 26 | obj = await root.get('test') 27 | same(obj, { hello: 'world' }) 28 | return root 29 | } 30 | 31 | describe('test-errors', () => { 32 | test('createGet CID check', async () => { 33 | const get = createGet() 34 | let threw = true 35 | try { 36 | await get('test') 37 | threw = false 38 | } catch (e) { 39 | if (!e.message.startsWith('Must be CID')) throw e 40 | } 41 | assert.ok(threw) 42 | }) 43 | 44 | test('has() message exception', async () => { 45 | const db = await basics() 46 | db.store.get = cid => { 47 | throw new Error('test error') 48 | } 49 | let threw = true 50 | try { 51 | await db.has('test') 52 | threw = false 53 | } catch (e) { 54 | if (!e.message.startsWith('test error')) throw e 55 | } 56 | assert.ok(threw) 57 | }) 58 | 59 | test('no common root', async () => { 60 | const original = await basics() 61 | await original.set('another', 'asdf') 62 | const db = await original.commit() 63 | const rootBlock = await db.store.get(db.root) 64 | const rootObject = rootBlock.decode() 65 | rootObject['kv-v1'].prev = null 66 | const _root = Block.encoder(rootObject, 'dag-cbor') 67 | const store = inmem() 68 | await store.put(_root) 69 | const db2 = kv(await _root.cid(), store) 70 | let threw = true 71 | try { 72 | await original.pull(db2) 73 | threw = false 74 | } catch (e) { 75 | if (!e.message.startsWith('No common root between databases')) throw e 76 | } 77 | assert.ok(threw) 78 | }) 79 | 80 | test('not found', async () => { 81 | const db = await basics() 82 | let threw = true 83 | try { 84 | await db.get('notfound') 85 | threw = false 86 | } catch (e) { 87 | const match = 'No key named "notfound"' 88 | if (e.message !== match) throw e 89 | if (e.statusCode !== 404) throw e 90 | } 91 | assert.ok(threw) 92 | await db.del('test') 93 | try { 94 | await db.get('test') 95 | threw = false 96 | } catch (e) { 97 | const match = 'No key named "test"' 98 | if (e.message !== match) throw e 99 | if (e.statusCode !== 404) throw e 100 | } 101 | assert.ok(threw) 102 | }) 103 | 104 | if (!process.browser) { 105 | describe('http', () => { 106 | const store = inmem() 107 | test('http storage handler', async () => { 108 | const handler = (await import('../src/http/handlers.js')).blockstore(Block, store) 109 | const getError = async (...args) => { 110 | try { 111 | await handler(...args) 112 | } catch (e) { 113 | return e 114 | } 115 | throw new Error('function did not throw') 116 | } 117 | const missing = Block.encoder({ test: Math.random() }, 'dag-cbor') 118 | const missingKey = (await missing.cid()).toString('base32') 119 | 120 | let e = await getError({}) 121 | same(e.message, 'Missing required param "method"') 122 | e = await getError({ method: 'GET' }) 123 | same(e.message, 'Missing required param "path"') 124 | e = await getError({ method: 'PUT', path: '/' }) 125 | same(e.message, 'Missing required param "body"') 126 | e = await getError({ method: 'GET', path: '/cid/graph', params: { depth: 1025 } }) 127 | same(e.message, 'Depth is greater than max limit of 1024') 128 | e = await getError({ method: 'GET', path: 'cid/blah/nope/breaks' }) 129 | same(e.message, 'Path for block retreival must not include slashes') 130 | e = await getError({ method: 'PUT', path: '/cid/nope', body: Buffer.from('') }) 131 | same(e.message, 'Path for block writes must not include slashes') 132 | e = await getError({ method: 'PUT', path: `/${missingKey}`, body: Buffer.from('adsf') }) 133 | same(e.message, 'Buffer does not match hash') 134 | e = await getError({ method: 'HEAD', path: '/cid/nope' }) 135 | same(e.message, 'Path for block retreival must not include slashes') 136 | e = await getError({ method: 'OPTIONS', path: '/test' }) 137 | same(e.message, 'Unknown method "OPTIONS"') 138 | same(e.statusCode, 405) 139 | 140 | const notfound = (await Block.encoder(Buffer.from('asdf'), 'raw').cid()).toString('base32') 141 | e = await handler({ method: 'GET', path: `/${notfound}` }) 142 | same(e.statusCode, 404) 143 | }) 144 | const getPort = () => Math.floor(Math.random() * (9000 - 8000) + 8000) 145 | const port = getPort() 146 | let handler 147 | let server 148 | let closed 149 | 150 | before(() => new Promise((resolve, reject) => { 151 | return (new Promise(resolve => resolve())).then(async () => { 152 | handler = (await import('../src/http/nodejs.js')).default.blockstore(Block, store) 153 | server = (await import('http')).createServer(handler) 154 | closed = new Promise(resolve => server.once('close', resolve)) 155 | server.listen(port, e => { 156 | if (e) return reject(e) 157 | resolve() 158 | }) 159 | }) 160 | })) 161 | 162 | const headNotFound = bent(404, 'string', `http://localhost:${port}`) 163 | 164 | describe('blockstore', async () => { 165 | test('not found', async () => { 166 | const block = Block.encoder(Buffer.from('test'), 'raw') 167 | const cid = await block.cid() 168 | const msg = await headNotFound(`/${cid.toString('base32')}`) 169 | same(msg, '') 170 | }) 171 | }) 172 | 173 | after(() => { 174 | server.close() 175 | return closed 176 | }) 177 | }) 178 | } 179 | }) 180 | -------------------------------------------------------------------------------- /test/test-kv.js: -------------------------------------------------------------------------------- 1 | /* globals describe, it */ 2 | import Block from '@ipld/block/defaults' 3 | import createInmemory from '../src/stores/inmemory.js' 4 | import createKV from '../src/kv.js' 5 | import assert from 'assert' 6 | import { isCID } from '../src/utils.js' 7 | 8 | const inmem = createInmemory(Block) 9 | const kv = createKV(Block) 10 | const test = it 11 | const same = assert.deepStrictEqual 12 | 13 | const create = async (_kv = kv) => { 14 | const store = inmem() 15 | const kvs = await _kv.create(store) 16 | return { store, kvs } 17 | } 18 | 19 | let hello 20 | 21 | const basics = async kv => { 22 | const { kvs } = await create(kv) 23 | await kvs.set('test', { hello: 'world' }) 24 | let obj = await kvs.get('test') 25 | same(obj, { hello: 'world' }) 26 | hello = await kvs.getBlock('test') 27 | const latest = await kvs.commit() 28 | obj = await latest.get('test') 29 | same(obj, { hello: 'world' }) 30 | return latest 31 | } 32 | 33 | describe('test-kv', () => { 34 | test('basic set/get', async () => { 35 | await basics(kv) 36 | }) 37 | 38 | test('basic overwrite', async () => { 39 | let { kvs } = await create() 40 | await kvs.set('test', { foo: 0 }) 41 | kvs = await kvs.commit() 42 | same(await kvs.get('test'), { foo: 0 }) 43 | await kvs.set('test', { foo: 1 }) 44 | same(await kvs.get('test'), { foo: 1 }) 45 | await kvs.commit() 46 | same(await kvs.get('test'), { foo: 1 }) 47 | }) 48 | 49 | test('not found', async () => { 50 | const { kvs } = await create() 51 | try { 52 | await kvs.get('test') 53 | } catch (e) { 54 | if (e.kvs !== 'notfound') { 55 | throw e 56 | } 57 | } 58 | }) 59 | 60 | test('basic removal', async () => { 61 | let { kvs } = await create() 62 | await kvs.set('test', { foo: 0 }) 63 | same(await kvs.has('test'), true) 64 | kvs = await kvs.commit() 65 | same(await kvs.get('test'), { foo: 0 }) 66 | await kvs.del('test') 67 | same(await kvs.has('test'), false) 68 | kvs = await kvs.commit() 69 | same(await kvs.has('test'), false) 70 | }) 71 | 72 | test('iter over all in db', async () => { 73 | const kvs = await basics() 74 | assert.ok(await kvs.has('test')) 75 | for await (const [key, link] of kvs.all({ decode: false })) { 76 | assert.ok(isCID(link)) 77 | same(key, 'test') 78 | assert.ok(link.equals(await hello.cid())) 79 | } 80 | for await (const [key, block] of kvs.all({ blocks: true, decode: false })) { 81 | assert.ok(Block.isBlock(block)) 82 | same(key, 'test') 83 | assert.ok((await block.cid()).equals(await block.cid())) 84 | } 85 | await kvs.set('test2', { test: 1 }) 86 | let _link 87 | for await (const [key, link] of kvs.all({ decode: false })) { 88 | if (key === 'test') continue 89 | same(key, 'test2') 90 | const block = await kvs.getBlock('test2') 91 | _link = link 92 | assert.ok(link.equals(await block.cid())) 93 | } 94 | for await (const [key, block] of kvs.all({ blocks: true, decode: false })) { 95 | if (key === 'test') continue 96 | same(key, 'test2') 97 | assert.ok(_link.equals(await block.cid())) 98 | } 99 | const kvs2 = await kvs.commit() 100 | await kvs.del('test2') 101 | for await (const [key] of kvs.all({ decode: false })) { 102 | if (key === 'test2') throw new Error('deleted key is in all iterator') 103 | } 104 | kvs2.del('test2') 105 | for await (const [key, link] of kvs2.all({ decode: false })) { 106 | assert.ok(isCID(link)) 107 | same(key, 'test') 108 | assert.ok(link.equals(await hello.cid())) 109 | } 110 | 111 | for await (const [, obj] of kvs2.all()) { 112 | same(obj, { hello: 'world' }) 113 | } 114 | }) 115 | 116 | test('write a block as a value', async () => { 117 | const block = Block.encoder({ hello: 'world' }, 'dag-cbor') 118 | let db = await basics() 119 | await db.set('testblock', block) 120 | same(await db.get('testblock'), { hello: 'world' }) 121 | db = await db.commit() 122 | same(await db.get('testblock'), { hello: 'world' }) 123 | }) 124 | 125 | test('commit no transactions', async () => { 126 | const db = await basics() 127 | let threw = true 128 | try { 129 | await db.commit() 130 | threw = false 131 | } catch (e) { 132 | if (!e.message.startsWith('There are no pending operations to commit')) throw e 133 | } 134 | assert.ok(threw) 135 | }) 136 | 137 | test('size', async () => { 138 | const db = await basics() 139 | same(await db.size(), 1) 140 | await db.set('test1', { hello: 'world' }) 141 | same(await db.size(), 2) 142 | await db.set('test2', { hello: 'world' }) 143 | same(await db.size(), 3) 144 | await db.del('test') 145 | same(await db.size(), 2) 146 | await db.del('missing') 147 | same(await db.size(), 2) 148 | }) 149 | 150 | test('link', async () => { 151 | let db = await basics() 152 | const data = { test: Math.random() } 153 | const link = await db.link(data) 154 | same(await link(), data) 155 | assert.ok(link.cid.equals((await db.link(data)).cid)) 156 | await db.set('test2', { two: link }) 157 | db = await db.commit() 158 | const obj = await db.get('test2') 159 | assert.ok(obj.two.cid.equals(link.cid)) 160 | }) 161 | 162 | test('getRef', async () => { 163 | const db = await basics() 164 | const link = await db.getRef('test') 165 | await db.set('copy', await db.get('test')) 166 | same(link, await db.getRef('copy') /* pending */) 167 | let threw = true 168 | try { 169 | await db.getRef('nope') 170 | threw = false 171 | } catch (e) { 172 | if (e.message !== 'No key named "nope"') throw e 173 | } 174 | same(threw, true) 175 | }) 176 | 177 | test('since', async () => { 178 | let db = await basics() 179 | const db1 = kv(db.root, db.store) 180 | await db.set('changed', { hello: 'world' }) 181 | db = await db.commit() 182 | await db.set('changed', { hello: 'world', pass: true }) 183 | db = await db.commit() 184 | const since = await db.since(db1.root) 185 | same(since.length, 1) 186 | const [block] = since 187 | let decoded = block.decodeUnsafe() 188 | same(decoded.set.key, 'changed') 189 | const value = await db.store.get(decoded.set.val) 190 | decoded = value.decodeUnsafe() 191 | same(decoded, { hello: 'world', pass: true }) 192 | }) 193 | 194 | test('object set, multiget', async () => { 195 | let db = await basics() 196 | let threw = true 197 | try { 198 | await db.set('asdf') 199 | threw = false 200 | } catch (e) { 201 | if (e.message !== 'Missing value') throw e 202 | } 203 | same(threw, true) 204 | await db.set({ t1: 1, t2: 2 }) 205 | db = await db.commit() 206 | const [t1, t2] = await db.get(['t1', 't2']) 207 | same(t1, 1) 208 | same(t2, 2) 209 | }) 210 | }) 211 | -------------------------------------------------------------------------------- /src/schema.js: -------------------------------------------------------------------------------- 1 | export default 2 | { 3 | types: { 4 | HashMapData: { 5 | kind: 'list', 6 | valueType: 'Element' 7 | }, 8 | HashMapRoot: { 9 | kind: 'struct', 10 | fields: { 11 | hashAlg: { 12 | type: 'String' 13 | }, 14 | bucketSize: { 15 | type: 'Int' 16 | }, 17 | map: { 18 | type: 'Bytes' 19 | }, 20 | data: { 21 | type: 'HashMapData' 22 | } 23 | }, 24 | representation: { 25 | map: {} 26 | } 27 | }, 28 | HashMapNode: { 29 | kind: 'struct', 30 | fields: { 31 | map: { 32 | type: 'Bytes' 33 | }, 34 | data: { 35 | type: 'HashMapData' 36 | } 37 | }, 38 | representation: { 39 | map: {} 40 | } 41 | }, 42 | Element: { 43 | kind: 'union', 44 | representation: { 45 | kinded: { 46 | map: 'HashMapNode', 47 | link: { 48 | kind: 'link', 49 | expectedType: 'HashMapNode' 50 | }, 51 | list: 'Bucket' 52 | } 53 | } 54 | }, 55 | Bucket: { 56 | kind: 'list', 57 | valueType: 'BucketEntry' 58 | }, 59 | BucketEntry: { 60 | kind: 'struct', 61 | fields: { 62 | key: { 63 | type: 'Bytes' 64 | }, 65 | value: { 66 | type: 'Value' 67 | } 68 | }, 69 | representation: { 70 | tuple: {} 71 | } 72 | }, 73 | Value: { 74 | kind: 'union', 75 | representation: { 76 | kinded: { 77 | bool: 'Bool', 78 | string: 'String', 79 | bytes: 'Bytes', 80 | int: 'Int', 81 | float: 'Float', 82 | map: 'Map', 83 | list: 'List', 84 | link: 'Link' 85 | } 86 | } 87 | }, 88 | DeleteOperation: { 89 | kind: 'struct', 90 | fields: { 91 | key: { 92 | type: 'String' 93 | } 94 | }, 95 | representation: { 96 | map: {} 97 | } 98 | }, 99 | SetOperation: { 100 | kind: 'struct', 101 | fields: { 102 | key: { 103 | type: 'String' 104 | }, 105 | val: { 106 | type: 'Link' 107 | } 108 | }, 109 | representation: { 110 | map: {} 111 | } 112 | }, 113 | Operation: { 114 | kind: 'union', 115 | representation: { 116 | keyed: { 117 | set: 'SetOperation', 118 | del: 'DeleteOperation' 119 | } 120 | } 121 | }, 122 | OpList: { 123 | kind: 'list', 124 | valueType: { 125 | kind: 'link', 126 | expectedType: 'Operation' 127 | } 128 | }, 129 | TransactionV1: { 130 | kind: 'struct', 131 | fields: { 132 | head: { 133 | type: { 134 | kind: 'link', 135 | expectedType: 'HashMapRoot' 136 | } 137 | }, 138 | ops: { 139 | type: 'OpList' 140 | }, 141 | prev: { 142 | type: { 143 | kind: 'link', 144 | expectedType: 'Transaction' 145 | }, 146 | nullable: true 147 | } 148 | }, 149 | representation: { 150 | map: {} 151 | } 152 | }, 153 | Transaction: { 154 | kind: 'union', 155 | representation: { 156 | keyed: { 157 | 'kv-v1': 'TransactionV1' 158 | } 159 | } 160 | }, 161 | PropIndex: { 162 | kind: 'struct', 163 | fields: { 164 | count: { 165 | type: 'int' 166 | }, 167 | sum: { 168 | type: 'int' 169 | }, 170 | map: { 171 | type: { 172 | kind: 'link', 173 | expectedType: 'HashMapRoot' 174 | } 175 | } 176 | }, 177 | representation: { 178 | map: {} 179 | } 180 | }, 181 | Props: { 182 | kind: 'map', 183 | keyType: 'String', 184 | valueType: { 185 | kind: 'link', 186 | expectedType: 'PropIndex' 187 | } 188 | }, 189 | Indexes: { 190 | kind: 'struct', 191 | fields: { 192 | props: { 193 | type: { 194 | kind: 'link', 195 | expectedType: 'Props' 196 | } 197 | } 198 | }, 199 | representation: { 200 | map: {} 201 | } 202 | }, 203 | DagDBTypeV1: { 204 | kind: 'union', 205 | representation: { 206 | keyed: { 207 | database: { 208 | kind: 'link', 209 | expectedType: 'Database' 210 | }, 211 | transaction: { 212 | kind: 'link', 213 | expectedType: 'Transaction' 214 | }, 215 | fbl: { 216 | kind: 'link', 217 | expectedType: 'FlexibleByteLayout' 218 | } 219 | } 220 | } 221 | }, 222 | DagDBType: { 223 | kind: 'union', 224 | representation: { 225 | keyed: { 226 | v1: 'DagDBTypeV1' 227 | } 228 | } 229 | }, 230 | DagDB: { 231 | kind: 'struct', 232 | fields: { 233 | type: { 234 | type: 'DagDBType' 235 | } 236 | }, 237 | representation: { 238 | map: { 239 | fields: { 240 | type: { 241 | rename: '_dagdb' 242 | } 243 | } 244 | } 245 | } 246 | }, 247 | FullMerge: { 248 | kind: 'bool' 249 | }, 250 | KeyedMerge: { 251 | kind: 'string' 252 | }, 253 | RemoteMergeStrategy: { 254 | kind: 'union', 255 | representation: { 256 | keyed: { 257 | full: 'FullMerge', 258 | keyed: 'KeyedMerge' 259 | } 260 | } 261 | }, 262 | RemoteSource: { 263 | kind: 'struct', 264 | fields: { 265 | type: { 266 | type: 'String' 267 | } 268 | }, 269 | representation: { 270 | map: {} 271 | } 272 | }, 273 | RemoteInfo: { 274 | kind: 'struct', 275 | fields: { 276 | strategy: { 277 | type: 'RemoteMergeStrategy' 278 | }, 279 | source: { 280 | type: 'RemoteSource' 281 | } 282 | }, 283 | representation: { 284 | map: {} 285 | } 286 | }, 287 | Remote: { 288 | kind: 'struct', 289 | fields: { 290 | info: { 291 | type: { 292 | kind: 'link', 293 | expectedType: 'RemoteInfo' 294 | } 295 | }, 296 | head: { 297 | type: { 298 | kind: 'link', 299 | expectedType: 'HashMapRoot' 300 | } 301 | }, 302 | merged: { 303 | type: { 304 | kind: 'link', 305 | expectedType: 'HashMapRoot' 306 | } 307 | } 308 | }, 309 | representation: { 310 | map: {} 311 | } 312 | }, 313 | DatabaseV1: { 314 | kind: 'struct', 315 | fields: { 316 | kv: { 317 | type: { 318 | kind: 'link', 319 | expectedType: 'Transaction' 320 | } 321 | }, 322 | indexes: { 323 | type: { 324 | kind: 'link', 325 | expectedType: 'Indexes' 326 | } 327 | }, 328 | remotes: { 329 | type: { 330 | kind: 'link', 331 | expectedType: 'HashMapRoot' 332 | } 333 | } 334 | }, 335 | representation: { 336 | map: {} 337 | } 338 | }, 339 | Database: { 340 | kind: 'union', 341 | representation: { 342 | keyed: { 343 | 'db-v1': 'DatabaseV1' 344 | } 345 | } 346 | } 347 | } 348 | } 349 | -------------------------------------------------------------------------------- /src/remotes.js: -------------------------------------------------------------------------------- 1 | import { Lazy } from './utils.js' 2 | import * as hamt from './hamt.js' 3 | import bent from 'bent' 4 | import createReplicate from './stores/replicate.js' 5 | 6 | const getJSON = bent('json') 7 | 8 | export default (Block, stores, toBlock, updaters, CID) => { 9 | const replicate = createReplicate(Block) 10 | const exports = {} 11 | 12 | const http = async (info, push = true) => { 13 | const resp = await getJSON(info.url) 14 | if (push && !resp.updater) throw new Error('Remote must have updater to use push') 15 | let root 16 | if (resp.root) root = CID.from(resp.root) 17 | let url = new URL(resp.blockstore, info.url) 18 | const store = await stores.from(url.toString()) 19 | let updater 20 | if (resp.updater) { 21 | url = new URL(resp.updater, info.url) 22 | updater = await updaters.from(info.url, url.toString()) 23 | } 24 | return { store, updater, root } 25 | } 26 | 27 | class Remote { 28 | constructor (obj, db) { 29 | this.db = db 30 | this.rootDecode = obj 31 | this.kv = db._kv 32 | } 33 | 34 | get info () { 35 | if (!this._info) { 36 | this._info = this.db.store.get(this.rootDecode.info).then(block => block.decodeUnsafe()) 37 | } 38 | return this._info 39 | } 40 | 41 | async push () { 42 | const info = await this.info 43 | if (info.source.type === 'local') { 44 | throw new Error('Local remotes cannot push') 45 | } 46 | if (!info.strategy.full) { 47 | throw new Error('Can only push databases using full merge strategy') 48 | } 49 | const local = this.rootDecode.head 50 | 51 | const { store, updater, root } = await registry[info.source.type](info.source, true) 52 | 53 | const db = new exports.Database(root, store) 54 | const head = await db.getHead() 55 | if (!head.equals(local)) { 56 | throw new Error('Remote has updated since last pull, re-pull before pushing') 57 | } 58 | await replicate(this.db.root, this.db.store, store) 59 | const cid = await updater.update(this.db.root, root) 60 | if (!cid.equals(this.db.root)) { 61 | throw new Error('Remote has updated since last pull, re-pull before pushing') 62 | } 63 | } 64 | 65 | async pull (resolver) { 66 | const info = await this.info 67 | if (info.source.type === 'local') { 68 | throw new Error('Local remotes must use pullDatabase directly') 69 | } 70 | const { store, updater, root } = await registry[info.source.type](info.source, false) 71 | 72 | const database = new exports.Database(root, store, updater) 73 | if (this.rootDecode.head) { 74 | if (this.rootDecode.head.equals(await database.getHead())) { 75 | return root // no changes since last merge 76 | } 77 | } 78 | return this.pullDatabase(database, resolver) 79 | } 80 | 81 | async pullDatabase (database, resolver) { 82 | const info = await this.info 83 | const strategy = info.strategy 84 | const known = [] 85 | if (this.rootDecode.head) { 86 | known.push(this.rootDecode.head) 87 | known.push(this.rootDecode.merged) 88 | } 89 | let cids 90 | if (strategy.full) { 91 | cids = await this.fullMerge(database, known, resolver) 92 | } else if (strategy.keyed) { 93 | cids = await this.keyedMerge(database, strategy.keyed, known, resolver) 94 | } /* c8 ignore next */ else { 95 | /* c8 ignore next */ 96 | throw new Error(`Unknown strategy '${JSON.stringify(strategy)}'`) 97 | /* c8 ignore next */ 98 | } 99 | for (const cid of cids) { 100 | await replicate(cid, database.store, this.db.store) 101 | } 102 | } 103 | 104 | async keyedMerge (db, key, known, resolver) { 105 | const kv = await this.kv 106 | if (!(await kv.has(key))) { 107 | await kv.set(key, db) 108 | } else { 109 | const prev = await kv.get(key) 110 | const prevHead = await prev.getHead() 111 | const dbHead = await db.getHead() 112 | if (prevHead.equals(dbHead)) return [] 113 | await prev.pull(db, known, resolver) 114 | const latest = await prev.commit() 115 | await kv.set(key, latest) 116 | } 117 | const latest = await kv.commit() 118 | this.rootDecode.head = await db.getHead() 119 | this.rootDecode.merged = await latest.getHead() 120 | return [latest.root] 121 | } 122 | 123 | async fullMerge (db, known, resolver) { 124 | const kv = await this.kv 125 | await kv.pull(db, known, resolver) 126 | this.rootDecode.head = await db.getHead() 127 | this.rootDecode.merged = null 128 | return kv.pendingTransactions() 129 | } 130 | 131 | async update (latest) { 132 | if (this.rootDecode.merged === null) { 133 | const trans = await this.db.store.get(latest) 134 | const head = trans.decode()['kv-v1'].head 135 | this.rootDecode.merged = head 136 | } 137 | return toBlock(this.rootDecode, 'Remote') 138 | } 139 | } 140 | 141 | class Remotes extends Lazy { 142 | get prop () { 143 | return 'remotes' 144 | } 145 | 146 | async add (name, info = {}) { 147 | if (typeof info === 'string') { 148 | if (info.startsWith('http://') || /* c8 ignore next */ info.startsWith('https://')) { 149 | info = { type: 'http', url: info } 150 | } else { 151 | throw new Error('Only http URL can be used as strings') 152 | } 153 | info = { source: info } 154 | } 155 | const defaults = { strategy: { full: true } } 156 | info = { ...defaults, ...info } 157 | const block = toBlock(info, 'RemoteInfo') 158 | await this.db.store.put(block) 159 | const remote = new Remote({ info: await block.cid() }, this.db) 160 | return this.pull(name, remote) 161 | } 162 | 163 | async addLocal (name, strategy = { full: true }) { 164 | const info = { strategy, source: { type: 'local' } } 165 | const block = toBlock(info, 'RemoteInfo') 166 | await this.db.store.put(block) 167 | const remote = new Remote({ info: await block.cid() }, this.db) 168 | this.pending.set(name, remote) 169 | return remote 170 | } 171 | 172 | get (name) { 173 | return this._get(name, Remote, 'Remote') 174 | } 175 | 176 | async pull (name, remote, resolver) { 177 | if (!remote) { 178 | remote = await this.get(name) 179 | } 180 | await remote.pull(resolver) 181 | this.pending.set(name, remote) 182 | } 183 | 184 | push (name, ...args) { 185 | return this.get(name).then(r => r.push(...args)) 186 | } 187 | 188 | async update (latest) { 189 | if (!this.pending.size) return this._root 190 | const ops = [] 191 | const promises = [] 192 | for (const [key, remote] of this.pending.entries()) { 193 | // TODO: implement remote removal 194 | const block = await remote.update(latest) 195 | promises.push(this.db.store.put(block)) 196 | ops.push({ set: { key, val: await block.cid() } }) 197 | } 198 | let last 199 | const head = await this.db._kv.then(kv => kv.getHead()) 200 | const get = this.db.store.get.bind(this.db.store) 201 | for await (const block of hamt.bulk(head, ops, get, Block)) { 202 | last = block 203 | promises.push(this.db.store.put(block)) 204 | } 205 | await Promise.all(promises) 206 | return last.cid() 207 | } 208 | 209 | register (name, fn) { 210 | exports.register(name, fn) 211 | } 212 | } 213 | 214 | const registry = { } 215 | 216 | exports.Remote = Remote 217 | exports.Remotes = Remotes 218 | exports.register = (name, fn) => { registry[name] = fn } 219 | exports.register('http', http) 220 | return exports 221 | } 222 | -------------------------------------------------------------------------------- /test/test-database.js: -------------------------------------------------------------------------------- 1 | /* globals it, describe, before, after */ 2 | import Block from '@ipld/block/defaults' 3 | import createInmemory from '../src/stores/inmemory.js' 4 | import createUpdater from '../src/updaters/kv.js' 5 | import createDatabaseInterface from '../src/database.js' 6 | import createKV from './lib/mock-kv.js' 7 | import assert from 'assert' 8 | 9 | const database = createDatabaseInterface(Block) 10 | const test = it 11 | const same = assert.deepStrictEqual 12 | const inmem = createInmemory(Block) 13 | const { CID } = Block 14 | 15 | const create = async () => { 16 | const store = inmem() 17 | const updater = createUpdater(Block)(createKV()) 18 | const db = await database.create(store, updater) 19 | return { store, db, updater } 20 | } 21 | 22 | const basics = async (_create = create) => { 23 | const { db } = await _create() 24 | await db.set('test', { hello: 'world' }) 25 | let obj = await db.get('test') 26 | same(obj, { hello: 'world' }) 27 | const latest = await db.commit() 28 | obj = await latest.get('test') 29 | same(obj, { hello: 'world' }) 30 | same(await db.has('test'), true) 31 | same(await db.has('missing'), false) 32 | return latest 33 | } 34 | 35 | describe('test-database', () => { 36 | test('basic set/get', async () => { 37 | await basics() 38 | await basics() 39 | }) 40 | 41 | test('info', async () => { 42 | const db = await basics() 43 | same(await db.info(), { size: 1 }) 44 | }) 45 | 46 | test('manual transation', async () => { 47 | const latest = await basics() 48 | const db = database(latest.root, latest.store) 49 | same(await db.get('test'), { hello: 'world' }) 50 | }) 51 | 52 | test('links', async () => { 53 | const db = await basics() 54 | let link = db.link({ blah: true }) // use the promise 55 | await db.set('test2', { two: link }) 56 | const obj = await db.get('test2') 57 | link = await link 58 | same(await obj.two(), await link()) 59 | }) 60 | 61 | test('register custom type', async () => { 62 | let db = await basics() 63 | class Dog { 64 | constructor ({ name, age }) { 65 | this.name = name 66 | this.age = age 67 | } 68 | 69 | get _dagdb () { 70 | return { v1: 'dog' } 71 | } 72 | 73 | async * encode () { 74 | const block = Block.encoder({ ...this }, 'dag-cbor') 75 | yield block 76 | yield block.cid() 77 | } 78 | 79 | static async decode (block) { 80 | return new Dog(block.decodeUnsafe()) 81 | } 82 | 83 | bark () { 84 | return `hello from "${this.name}"` 85 | } 86 | } 87 | 88 | const createDog = async (root, store) => { 89 | return Dog.decode(await store.get(root)) 90 | } 91 | 92 | db.register('dog', createDog) 93 | 94 | const dog = new Dog({ name: 'lucas', age: 8 }) 95 | await db.set('lucas', dog) 96 | db = await db.update() 97 | 98 | const lucas = await db.get('lucas') 99 | const bark = lucas.bark() 100 | same(bark, 'hello from "lucas"') 101 | }) 102 | 103 | test('update', async () => { 104 | let { db, updater } = await create() 105 | await db.set('test', { hello: 'world' }) 106 | db = await db.update() 107 | same(await db.get('test'), { hello: 'world' }) 108 | const root = CID.from(await updater.store._getKey(['root'])) 109 | assert.ok(root.equals(db.root)) 110 | }) 111 | 112 | test('update out of date root', async () => { 113 | let { db, updater } = await create() 114 | await db.set('test', { hello: 'world' }) 115 | await db.update() 116 | await db.set('test2', { foo: 'bar' }) 117 | db = await db.update() 118 | same(await db.get('test'), { hello: 'world' }) 119 | same(await db.get('test2'), { foo: 'bar' }) 120 | const root = CID.from(await updater.store._getKey(['root'])) 121 | assert.ok(root.equals(db.root)) 122 | }) 123 | 124 | test('concurrent updates', async () => { 125 | const { db } = await create() 126 | await db.set('test', { hello: 'world' }) 127 | const results = await Promise.all([db.update(), db.update(), db.update()]) 128 | const comp = (cid1, cid2) => cid1 && cid2 && cid1.equals(cid2) ? cid2 : false 129 | const equals = results.map(db => db.root).reduce(comp) 130 | assert.ok(equals) 131 | }) 132 | 133 | test('dirty database as value', async () => { 134 | const db = await basics() 135 | const val = await basics() 136 | await val.set('foo', 'bar') 137 | try { 138 | await db.set('val', val) 139 | throw new Error('Did not throw') 140 | } catch (e) { 141 | if (e.message !== 'Cannot use database with pending transactions as a value') throw e 142 | } 143 | }) 144 | 145 | test('all()', async () => { 146 | const db = await basics() 147 | for await (const [key, value] of db.all()) { 148 | same(value, { hello: 'world' }) 149 | same(key, 'test') 150 | } 151 | }) 152 | 153 | // errors 154 | 155 | /* 156 | test('error: update no changes', async () => { 157 | const { db } = await create() 158 | let threw = true 159 | try { 160 | await db.update() 161 | threw = false 162 | } catch (e) { 163 | if (e.message !== 'No changes to update') throw e 164 | } 165 | assert.ok(threw) 166 | }) 167 | */ 168 | 169 | test('error: empty updater write', async () => { 170 | const store = inmem() 171 | const db = await database.create(store, createUpdater(Block)(createKV())) 172 | const updater = createUpdater(Block)(createKV()) 173 | const empty = database(db.root, store, updater) 174 | await empty.set('test', { hello: 'world' }) 175 | let threw = true 176 | try { 177 | await empty.update() 178 | threw = false 179 | } catch (e) { 180 | if (e.message !== 'There is no previous root') throw e 181 | } 182 | assert.ok(threw) 183 | }) 184 | 185 | if (!process.browser) { 186 | let httpModule 187 | let getPort 188 | let stores 189 | let updaters 190 | let createHandler 191 | let handler 192 | before(async () => { 193 | httpModule = (await import('http')).default 194 | getPort = () => Math.floor(Math.random() * (9000 - 8000) + 8000) 195 | stores = {} 196 | updaters = {} 197 | 198 | createHandler = (await import('../src/http/nodejs.js')).default 199 | 200 | handler = async (req, res) => { 201 | const [id] = req.url.split('/').filter(x => x) 202 | const store = stores[id] 203 | const updater = updaters[id] 204 | if (!store) throw new Error('Missing store') 205 | const _handler = createHandler(Block, store, updater) 206 | return _handler(req, res, '/' + id) 207 | } 208 | }) 209 | 210 | describe('http', () => { 211 | let port 212 | let server 213 | let closed 214 | let createDatabase 215 | let create 216 | before(async () => { 217 | port = getPort() 218 | server = httpModule.createServer(handler) 219 | closed = new Promise(resolve => server.once('close', resolve)) 220 | createDatabase = (await import('../src/index.js')).default 221 | create = async (opts) => { 222 | const id = Math.random().toString() 223 | const url = `http://localhost:${port}/${id}` 224 | stores[id] = inmem() 225 | updaters[id] = createUpdater(Block)(createKV()) 226 | return { db: await createDatabase.create(url) } 227 | } 228 | await new Promise((resolve, reject) => { 229 | server.listen(port, e => { 230 | if (e) return reject(e) 231 | resolve() 232 | }) 233 | }) 234 | }) 235 | test('basics', async () => { 236 | await basics(create) 237 | }) 238 | test('open', async () => { 239 | let db = await basics(create) 240 | db = await db.update() 241 | const db2 = await createDatabase.open(db.updater.infoUrl) 242 | assert.ok(db.root.equals(db2.root)) 243 | }) 244 | after(() => { 245 | server.close() 246 | return closed 247 | }) 248 | }) 249 | } 250 | }) 251 | -------------------------------------------------------------------------------- /test/test-indexes.js: -------------------------------------------------------------------------------- 1 | /* globals describe, it */ 2 | import Block from '@ipld/block/defaults' 3 | import createInmemory from '../src/stores/inmemory.js' 4 | import createUpdater from '../src/updaters/kv.js' 5 | import createDatabaseInterface from '../src/database.js' 6 | import createKV from './lib/mock-kv.js' 7 | import assert from 'assert' 8 | 9 | const inmem = createInmemory(Block) 10 | const database = createDatabaseInterface(Block) 11 | const test = it 12 | const same = assert.deepStrictEqual 13 | 14 | const create = async (fixture) => { 15 | const store = inmem() 16 | const updater = createUpdater(Block)(createKV()) 17 | let db = await database.create(store, updater) 18 | if (fixture) { 19 | for (const [key, value] of Object.entries(fixture)) { 20 | await db.set(key, value) 21 | } 22 | db = db.update() 23 | } 24 | return db 25 | } 26 | 27 | const fixture = { 28 | test1: { one: 1, two: 2 }, 29 | test2: { two: 2, three: { four: 4 } } 30 | } 31 | const load = async () => { 32 | const db = await create(fixture) 33 | await db.indexes.props.add('one') 34 | await db.indexes.props.add('two') 35 | await db.indexes.props.add('three/four') 36 | return db 37 | } 38 | 39 | const entries = async (db, name) => { 40 | const results = [] 41 | for await (const result of db.indexes.props.entries(name)) { 42 | results.push(result) 43 | } 44 | return results 45 | } 46 | const sum = (db, name) => db.indexes.props.sum(name) 47 | const count = (db, name) => db.indexes.props.count(name) 48 | 49 | const sameEntries = async (db, name, comp) => { 50 | const ents = await entries(db, name) 51 | ents.forEach(ent => { delete ent.source }) 52 | same(ents, comp) 53 | } 54 | 55 | describe('test-indexes', () => { 56 | test('basic property index', async () => { 57 | const verify = async db => { 58 | await sameEntries(db, 'one', [{ prop: 'one', key: 'test1', value: 1 }]) 59 | const two = [ 60 | { prop: 'two', key: 'test1', value: 2 }, 61 | { prop: 'two', key: 'test2', value: 2 } 62 | ] 63 | await sameEntries(db, 'two', two) 64 | await sameEntries(db, 'three/four', [{ prop: 'three/four', key: 'test2', value: 4 }]) 65 | 66 | same(await count(db, 'one'), 1) 67 | same(await count(db, 'two'), 2) 68 | same(await count(db, 'three/four'), 1) 69 | same(await sum(db, 'one'), 1) 70 | same(await sum(db, 'two'), 4) 71 | same(await sum(db, 'three/four'), 4) 72 | } 73 | let db = await load() 74 | await verify(db) 75 | db = await load() 76 | db = await db.update() 77 | await verify(db) 78 | }) 79 | test('update property index', async () => { 80 | let db = await load() 81 | await db.set('test3', { two: 3 }) 82 | db = await db.update() 83 | same(await sum(db, 'two'), 7) 84 | same(await count(db, 'two'), 3) 85 | 86 | same(await count(db, 'one'), 1) 87 | same(await sum(db, 'one'), 1) 88 | await db.set('test3', { one: 1 }) 89 | await db.set('test4', { one: 'one' }) 90 | db = await db.update() 91 | 92 | same(await count(db, 'one'), 3) 93 | same(await count(db, 'two'), 2) 94 | same(await sum(db, 'one'), 2) 95 | same(await sum(db, 'two'), 4) 96 | }) 97 | test('non-object values', async () => { 98 | let db = await load() 99 | await db.set('string', 'test') 100 | // disabled, dag-cbor or block bug is blocking 101 | // await db.set('null', null) 102 | await db.set('true', true) 103 | await db.set('zoro', 0) 104 | db = await db.update() 105 | }) 106 | test('remove index', async () => { 107 | let db = await load() 108 | await db.set('test3', { two: 3 }) 109 | await db.set('test4', { two: 'two' }) 110 | db = await db.update() 111 | same(await sum(db, 'two'), 7) 112 | same(await count(db, 'two'), 4) 113 | await db.del('test3') 114 | await db.del('test4') 115 | db = await db.update() 116 | same(await sum(db, 'two'), 4) 117 | same(await count(db, 'two'), 2) 118 | }) 119 | test('string value in index', async () => { 120 | let db = await load() 121 | await db.set('string', { two: 'two' }) 122 | db = await db.update() 123 | const two = [ 124 | { prop: 'two', key: 'test1', value: 2 }, 125 | { prop: 'two', key: 'string', value: 'two' }, 126 | { prop: 'two', key: 'test2', value: 2 } 127 | ] 128 | await sameEntries(db, 'two', two) 129 | same(await sum(db, 'two'), 4) 130 | same(await count(db, 'two'), 3) 131 | }) 132 | test('values, sources', async () => { 133 | const db = await load() 134 | const values = [2, 2] 135 | for await (const val of db.indexes.props.values('two')) { 136 | same(val, values.shift()) 137 | } 138 | const sources = [ 139 | { one: 1, two: 2 }, 140 | { two: 2, three: { four: 4 } } 141 | ] 142 | for await (const source of db.indexes.props.sources('two')) { 143 | same(source, sources.shift()) 144 | } 145 | }) 146 | test('uniques', async () => { 147 | let db = await load() 148 | const fixture = { hello: 'world', one: 'one', two: 'two' } 149 | await db.set('fixture', fixture) 150 | await db.set('fixtureCopy', fixture) 151 | db = await db.update() 152 | let gen = db.indexes.props.sources('two', { uniqueSources: true }) 153 | let sources = [ 154 | { hello: 'world', one: 'one', two: 'two' }, 155 | { one: 1, two: 2 }, 156 | { two: 2, three: { four: 4 } } 157 | ] 158 | for await (const source of gen) { 159 | same(source, sources.shift()) 160 | } 161 | gen = db.indexes.props.sources('two', 'one') 162 | sources = [ 163 | { hello: 'world', one: 'one', two: 'two' }, 164 | { one: 1, two: 2 }, 165 | { hello: 'world', one: 'one', two: 'two' }, 166 | { two: 2, three: { four: 4 } }, 167 | { hello: 'world', one: 'one', two: 'two' }, 168 | { one: 1, two: 2 }, 169 | { hello: 'world', one: 'one', two: 'two' } 170 | ] 171 | for await (const source of gen) { 172 | same(source, sources.shift()) 173 | } 174 | gen = db.indexes.props.sources('two', 'one', { uniqueKeys: true }) 175 | sources = [ 176 | { hello: 'world', one: 'one', two: 'two' }, 177 | { one: 1, two: 2 }, 178 | { hello: 'world', one: 'one', two: 'two' }, 179 | { two: 2, three: { four: 4 } } 180 | ] 181 | for await (const source of gen) { 182 | same(source, sources.shift()) 183 | } 184 | }) 185 | test('traverse link', async () => { 186 | let db = await load() 187 | const link = await db.link({ four: 4 }) 188 | await db.set('withLink', { three: link }) 189 | 190 | db = await db.update() 191 | const three = [ 192 | { 193 | key: 'withLink', 194 | prop: 'three/four', 195 | value: 4 196 | }, 197 | { 198 | key: 'test2', 199 | prop: 'three/four', 200 | value: 4 201 | } 202 | ] 203 | await sameEntries(db, 'three/four', three) 204 | }) 205 | 206 | test('errors', async () => { 207 | const db = await load() 208 | let threw = true 209 | try { 210 | await db.indexes.props.get('nope') 211 | threw = false 212 | } catch (e) { 213 | if (e.message !== 'No property index for "nope"') throw e 214 | } 215 | same(threw, true) 216 | await db.set('another', 'test') 217 | same(await db.dirty, 1) 218 | same(await db.indexes.dirty, 1) 219 | same(await db.indexes.props.dirty, 1) 220 | 221 | const message = 'Cannot create new index with pending KV transactions, commit or update.' 222 | let methods = [ 223 | 'count', 224 | 'sum' 225 | ] 226 | for (const method of methods) { 227 | try { 228 | await db.indexes.props[method]() 229 | threw = false 230 | } catch (e) { 231 | if (e.message !== message) throw e 232 | } 233 | same(threw, true) 234 | } 235 | methods = [ 236 | 'sources', 237 | 'values', 238 | 'entries' 239 | ] 240 | const noop = () => {} 241 | for (const method of methods) { 242 | try { 243 | for await (const b of db.indexes.props[method]()) { 244 | noop(b) 245 | } 246 | threw = false 247 | } catch (e) { 248 | if (e.message !== message) throw e 249 | } 250 | same(threw, true) 251 | } 252 | }) 253 | }) 254 | -------------------------------------------------------------------------------- /test/test-stores.js: -------------------------------------------------------------------------------- 1 | /* globals describe, it, before, after */ 2 | import { fixtures, graphTests, replicateTests, basics } from './lib/storage.js' 3 | import Block from '@ipld/block/defaults' 4 | import LRUStore from '../src/stores/lru.js' 5 | import createInmemory from '../src/stores/inmemory.js' 6 | import assert from 'assert' 7 | 8 | const same = assert.deepStrictEqual 9 | const inmem = createInmemory(Block) 10 | const test = it 11 | 12 | const b = obj => Block.encoder(obj, 'dag-cbor') 13 | 14 | describe('test-stores', () => { 15 | describe('lru', () => { 16 | const only = Block.encoder({ hello: 'world' }, 'dag-cbor') 17 | class TestStore extends LRUStore { 18 | _putBlock () { 19 | } 20 | 21 | _getBlock (cid) { 22 | return Block.encoder({ hello: 'world' }, 'dag-cbor') 23 | } 24 | } 25 | test('get', async () => { 26 | const store = new TestStore() 27 | const cid = await only.cid() 28 | const block = await store.get(cid) 29 | assert.ok(cid.equals(await block.cid())) 30 | same(store.lru.length, 13) 31 | same(block, await store.get(cid)) 32 | }) 33 | test('put', async () => { 34 | const store = new TestStore() 35 | await store.put(only) 36 | await store.put(only) 37 | same(store.lru.length, 13) 38 | }) 39 | }) 40 | 41 | describe('inmem', () => { 42 | test('basic inmem', async () => { 43 | await basics(inmem) 44 | }) 45 | test('store block twice', async () => { 46 | const store = await inmem() 47 | const block = b({ hello: 'world' }) 48 | await store.put(block) 49 | await store.put(block) 50 | same(store.storage.size, 1) 51 | }) 52 | describe('graph', () => { 53 | graphTests(inmem, (store, ...args) => store.graph(...args)) 54 | 55 | test('depth 0', async () => { 56 | const store = await inmem() 57 | const blocks = await fixtures.commonBranches() 58 | const branches = blocks.slice(1, 3) 59 | await Promise.all(blocks.map(b => store.put(b))) 60 | const [root] = blocks 61 | var { complete, missing, incomplete } = await store.graph(await root.cid(), 0) 62 | assert.ok(!complete) 63 | assert.ok(!missing) 64 | assert.ok(incomplete) 65 | same(incomplete.size, 2) 66 | for (const block of branches) { 67 | const cid = await block.cid() 68 | assert.ok(incomplete.has(cid.toString('base32'))) 69 | } 70 | // cause a full traversal 71 | await store.graph(await root.cid()) 72 | // the full traversal should update the competion cache 73 | const r = await store.graph(await root.cid(), 0) 74 | assert.ok(r.complete && !r.missing && !r.incomplete) 75 | }) 76 | }) 77 | describe('replicate', () => { 78 | replicateTests(inmem) 79 | }) 80 | }) 81 | 82 | describe('kv', () => { 83 | let create 84 | before(async () => { 85 | create = (await import('./lib/mock-kv.js')).default 86 | }) 87 | test('basics', async () => { 88 | await basics(create) 89 | }) 90 | test('store block twice', async () => { 91 | const store = await create({ lru: false }) 92 | const block = b({ hello: 'world' }) 93 | await store.put(block) 94 | same(Object.keys(store.storage).length, 2) 95 | await store.put(block) 96 | same(Object.keys(store.storage).length, 2) 97 | }) 98 | test('add tests', () => { 99 | describe('test-store kv graph', () => { 100 | graphTests(create, (store, ...args) => store.graph(...args)) 101 | }) 102 | describe('test-store kv replicate', () => { 103 | replicateTests(create) 104 | }) 105 | }).timeout(60 * 1000) 106 | }) 107 | 108 | describe('s3', () => { 109 | let createS3 110 | let createStore 111 | let create 112 | before(async () => { 113 | createS3 = (await import('./lib/mock-s3.js')).default 114 | createStore = (await import('../src/stores/s3.js')).default(Block) 115 | create = opts => createStore(createS3(), opts) 116 | }) 117 | test('basics', async () => { 118 | await basics(create) 119 | }) 120 | test('store block twice', async () => { 121 | const store = await create({ lru: false }) 122 | const block = b({ hello: 'world' }) 123 | await store.put(block) 124 | same(Object.keys(store.s3.storage).length, 2) 125 | await store.put(block) 126 | same(Object.keys(store.s3.storage).length, 2) 127 | }) 128 | 129 | test('add tests', () => { 130 | describe('test-store s3 graph', () => { 131 | graphTests(create, (store, ...args) => store.graph(...args)) 132 | }) 133 | describe('test-store s3 replicate', () => { 134 | replicateTests(create) 135 | }) 136 | }).timeout(60 * 1000) 137 | }) 138 | 139 | describe('level', () => { 140 | let memdown 141 | let createStore 142 | let create 143 | before(async () => { 144 | memdown = (await import('memdown')).default 145 | createStore = (await import('../src/stores/level.js')).default(Block) 146 | create = () => createStore(memdown(Math.random().toString())) 147 | }) 148 | test('basics', async () => { 149 | await basics(create) 150 | }) 151 | test('add tests', () => { 152 | describe('test-stores level graph', () => { 153 | graphTests(create, (store, ...args) => store.graph(...args)) 154 | }) 155 | describe('test-stores level replicate', () => { 156 | replicateTests(create) 157 | }) 158 | }).timeout(60 * 1000) 159 | }) 160 | 161 | describe('errors', () => { 162 | test('unsupported scheme', async () => { 163 | const main = (await import('../src/stores/index.js')).default(Block) 164 | try { 165 | await main.from('wss://') 166 | throw new Error('Did not throw') 167 | } catch (e) { 168 | if (e.message !== 'Cannot resolve identifier "wss://"') throw e 169 | } 170 | }) 171 | }) 172 | 173 | if (!process.browser) { 174 | const getPort = () => Math.floor(Math.random() * (9000 - 8000) + 8000) 175 | const stores = {} 176 | 177 | let createNodejsHandler 178 | before(async () => { 179 | createNodejsHandler = (await import('../src/http/nodejs.js')).default.blockstore 180 | }) 181 | 182 | const handler = async (req, res) => { 183 | const parsed = new URL('http://asdf' + req.url) 184 | const id = parsed.searchParams.get('id') 185 | parsed.searchParams.delete('id') 186 | const store = stores[id] 187 | if (!store) throw new Error('Missing store') 188 | req.url = parsed.toString().slice('http://asdf'.length) 189 | const _handler = createNodejsHandler(Block, store) 190 | return _handler(req, res) 191 | } 192 | 193 | describe('http', () => { 194 | const port = getPort() 195 | let server 196 | let closed 197 | let createStore 198 | let create 199 | before(() => new Promise((resolve, reject) => { 200 | return (new Promise(resolve => resolve())).then(async () => { 201 | server = (await import('http')).createServer(handler) 202 | closed = new Promise(resolve => server.once('close', resolve)) 203 | server.listen(port, e => { 204 | if (e) return reject(e) 205 | resolve() 206 | }) 207 | createStore = (await import('../src/stores/https.js')).default(Block) 208 | create = (opts) => { 209 | const id = Math.random().toString() 210 | const url = `http://localhost:${port}?id=${id}` 211 | stores[id] = inmem() 212 | const store = createStore(url, opts) 213 | return store 214 | } 215 | }) 216 | })) 217 | 218 | test('basics', async () => { 219 | await basics(create) 220 | }) 221 | test('add tests', () => { 222 | describe('test-store http store.graph()', () => { 223 | graphTests(create, (store, ...args) => store.graph(...args)) 224 | }) 225 | describe('test-store http replicate', () => { 226 | replicateTests(create) 227 | }) 228 | after(() => { 229 | server.close() 230 | return closed 231 | }) 232 | }).timeout(60 * 1000) 233 | }) 234 | describe('http no params', () => { 235 | const port = getPort() 236 | const store = inmem() 237 | let server 238 | let closed 239 | let createStore 240 | let create 241 | before(() => new Promise((resolve, reject) => { 242 | return (new Promise(resolve => resolve())).then(async () => { 243 | server = (await import('http')).createServer(createNodejsHandler(Block, store)) 244 | closed = new Promise(resolve => server.once('close', resolve)) 245 | server.listen(port, e => { 246 | if (e) return reject(e) 247 | resolve() 248 | }) 249 | createStore = (await import('../src/stores/https.js')).default(Block) 250 | create = (opts) => { 251 | const url = `http://localhost:${port}` 252 | return createStore(url, opts) 253 | } 254 | }) 255 | })) 256 | 257 | test('basics', async () => { 258 | await basics(create) 259 | }) 260 | test('url making', done => { 261 | const store = create() 262 | same(store.mkurl('asdf'), `http://localhost:${port}/asdf`) 263 | store.url += '/' 264 | same(store.mkurl('asdf'), `http://localhost:${port}/asdf`) 265 | done() 266 | }) 267 | after(() => { 268 | server.close() 269 | return closed 270 | }) 271 | }) 272 | describe('http handler', () => { 273 | let createHandler 274 | before(async () => { 275 | createHandler = (await import('../src/http/handlers.js')).blockstore 276 | }) 277 | test('head', async () => { 278 | const store = inmem() 279 | const handler = createHandler(Block, store) 280 | const block = Block.encoder(Buffer.from('test'), 'raw') 281 | await store.put(block) 282 | const cid = await block.cid() 283 | const opts = { method: 'HEAD', path: cid.toString('base32') } 284 | let result = await handler(opts) 285 | same(result.headers['content-length'], 4) 286 | store.has = async () => true 287 | result = await handler(opts) 288 | same(result.statusCode, 200) 289 | }) 290 | }) 291 | } else { 292 | describe('idb', function () { 293 | this.timeout(8000) 294 | let idb 295 | let createStore 296 | let create 297 | before(async () => { 298 | idb = (await import('level-js')).default 299 | createStore = (await import('../src/stores/level.js')).default(Block) 300 | create = (opts) => createStore(idb(Math.random().toString()), opts) 301 | }) 302 | test('basics', async () => { 303 | await basics(create) 304 | }) 305 | test('add tests', () => { 306 | describe('test-store idb graph', () => { 307 | graphTests(create, (store, ...args) => store.graph(...args)) 308 | }) 309 | describe('test-store idb replicate', () => { 310 | replicateTests(create) 311 | }) 312 | }).timeout(60 * 1000) 313 | }) 314 | } 315 | }) 316 | -------------------------------------------------------------------------------- /src/indexes.js: -------------------------------------------------------------------------------- 1 | import { validate, chain } from './utils.js' 2 | import * as hamt from './hamt.js' 3 | 4 | // We need singletons on instances for things you can only get async. 5 | // The only good way to do that is by caching the promises and only 6 | // creating those promises when the properties are accessed so that 7 | // any exceptions can propogate to the first bit of code that awaits 8 | // on them. 9 | const lazyprop = (obj, name, fn) => { 10 | const writable = false 11 | const get = () => { 12 | const n = `_${name}` 13 | if (!obj[n]) Object.defineProperty(obj, n, { value: fn(), writable }) 14 | return obj[n] 15 | } 16 | Object.defineProperty(obj, name, { get }) 17 | } 18 | 19 | export default (Block, fromBlock, kv) => { 20 | const { toString } = Block.multiformats.bytes 21 | const toBlock = (value, className) => Block.encoder(validate(value, className), 'dag-cbor') 22 | const emptyHamt = hamt.empty(Block, 'dag-cbor') 23 | 24 | const emptyProp = emptyHamt.cid().then(map => toBlock({ count: 0, sum: 0, map }, 'PropIndex')) 25 | const exports = {} 26 | 27 | const updatePropIndex = async function * (prop, ops) { 28 | // dev-only guard 29 | /* c8 ignore next */ 30 | if (prop.updated) throw new Error('Index has already been updated') 31 | prop.updated = true 32 | const root = await prop.rootData 33 | const kvdb = await prop.props.getKV() 34 | const getBlock = prop.getBlock 35 | const hamtRoot = root.map 36 | const path = prop.name.split('/').filter(x => x) 37 | 38 | let keys = Array.from(new Set(ops.map(op => op.set ? op.set.key : op.del.key))) 39 | 40 | const has = await Promise.all(keys.map(key => hamt.has(hamtRoot, key, getBlock))) 41 | keys = new Set(keys.filter((v, i) => has[i])) 42 | 43 | root.count -= keys.size 44 | 45 | const updates = [] 46 | for (const op of ops) { 47 | const _del = async key => { 48 | // lookup prior resolved value for this key 49 | const value = await hamt.get(hamtRoot, key, getBlock) 50 | if (typeof value === 'undefined') return // not in index 51 | if (typeof value === 'number') { 52 | root.sum -= value 53 | } 54 | updates.push({ del: { key } }) // remove it from the index 55 | } 56 | if (op.set) { 57 | const { key, val } = op.set 58 | let value = await kvdb.getValue(val) 59 | const lookup = [...path] 60 | while (lookup.length && typeof value[lookup[0]] !== 'undefined') { 61 | value = value[lookup.shift()] 62 | if (typeof value === 'function') value = await value() 63 | } 64 | if (lookup.length) { 65 | if (keys.has(key)) { 66 | await _del(key) 67 | } 68 | continue 69 | } 70 | root.count += 1 71 | if (typeof value === 'number') { 72 | root.sum += value 73 | } 74 | // TODO: property encode value to handle links 75 | updates.push({ set: { key, val: value } }) 76 | } else { 77 | await _del(op.del.key) 78 | } 79 | } 80 | if (!updates.length) { 81 | prop.newRootBlock = await getBlock(root.map) 82 | return 83 | } 84 | let last 85 | for await (const block of hamt.bulk(hamtRoot, updates, getBlock, Block)) { 86 | yield block 87 | last = block 88 | } 89 | root.map = await last.cid() 90 | const newRootBlock = toBlock(root, 'PropIndex') 91 | yield newRootBlock 92 | prop.newRootBlock = newRootBlock 93 | } 94 | 95 | const propEntries = async function * (prop) { 96 | const root = await prop.rootData 97 | const hamtRoot = root.map 98 | yield * hamt.all(hamtRoot, prop.getBlock) 99 | } 100 | 101 | class Prop { 102 | constructor (props, root, name) { 103 | chain(this, props) 104 | this.root = root.then ? root : new Promise(resolve => resolve(root)) 105 | lazyprop(this, 'rootBlock', () => this.root.then(cid => this.getBlock(cid))) 106 | lazyprop(this, 'rootData', () => this.rootBlock.then(block => block.decode())) 107 | this.name = name 108 | this.props = props 109 | } 110 | 111 | updateIndex (ops) { 112 | return updatePropIndex(this, ops) 113 | } 114 | 115 | async update (ops) { 116 | let root = await this.root 117 | const blocks = [] 118 | ops = ops.map(op => op.decodeUnsafe()) 119 | let prop = this 120 | if (this.newRootBlock) { 121 | if (!ops.length) return this.newRootBlock.cid() 122 | root = await this.newRootBlock.cid() 123 | prop = new Prop(this.props, root, this.name) 124 | } 125 | for await (const block of prop.updateIndex(ops)) { 126 | blocks.push(block) 127 | } 128 | if (!blocks.length) { 129 | return root 130 | } 131 | await Promise.all(blocks.map(b => this.store.put(b))) 132 | return blocks.pop().cid() 133 | } 134 | 135 | entries () { 136 | return propEntries(this) 137 | } 138 | } 139 | Prop.create = (props, name) => { 140 | const prop = new Prop(props, emptyProp.then(block => block.cid()), name) 141 | prop._rootData = emptyProp.then(block => block.decode()) 142 | return prop 143 | } 144 | 145 | const entries = async function * (props, names) { 146 | await checkDirty(props) 147 | let opts = { uniqueKeys: false, uniqueSources: false } 148 | if (typeof names[names.length - 1] === 'object') { 149 | opts = { ...opts, ...names.pop() } 150 | } 151 | const indexes = names.map(name => props.get(name)) 152 | let seenSources 153 | if (opts.uniqueSources) { 154 | seenSources = new Set() 155 | } 156 | let seenKeys 157 | if (opts.uniqueKeys) { 158 | seenKeys = new Set() 159 | } 160 | for (const p of indexes) { 161 | const index = await p 162 | const prop = index.name 163 | for await (let { key, value } of index.entries()) { 164 | key = toString(key) 165 | let kv 166 | let link 167 | if (opts.uniqueSources) { 168 | kv = await index.props.getKV() 169 | link = await kv.getRef(key) 170 | const ck = link.toString() 171 | if (seenSources.has(ck)) continue 172 | seenSources.add(ck) 173 | } 174 | if (opts.uniqueKeys) { 175 | if (seenKeys.has(key)) continue 176 | seenKeys.add(key) 177 | } 178 | const source = async () => { 179 | if (!kv) kv = await index.props.getKV() 180 | if (!link) link = await kv.getRef(key) 181 | return kv.getValue(link) 182 | } 183 | yield { key, prop, value, source } 184 | } 185 | } 186 | } 187 | const pluck = async function * (props, names, attr) { 188 | for await (const entry of entries(props, names)) { 189 | let val = entry[attr] 190 | // async source functions 191 | if (typeof val === 'function') val = val() 192 | yield val 193 | } 194 | } 195 | 196 | const checkDirty = async props => { 197 | if (await props.dirty) { 198 | throw new Error('Cannot create new index with pending KV transactions, commit or update.') 199 | } 200 | } 201 | 202 | class Props { 203 | constructor (indexes) { 204 | chain(this, indexes) 205 | this.indexes = indexes 206 | lazyprop(this, 'root', () => indexes.rootData.then(data => data.props)) 207 | lazyprop(this, 'rootBlock', () => this.root.then(cid => this.getBlock(cid))) 208 | lazyprop(this, 'rootData', () => this.rootBlock.then(block => block.decode())) 209 | this.pending = new Map() 210 | } 211 | 212 | async _get (name) { 213 | const root = await this.rootData 214 | if (!root[name]) throw new Error(`No property index for "${name}"`) 215 | return new Prop(this, root[name], name) 216 | } 217 | 218 | async getKV () { 219 | const db = this.indexes.db 220 | const head = (await db.getRoot())['db-v1'].kv 221 | const kvdb = kv(head, db.store) 222 | return kvdb 223 | } 224 | 225 | async add (name) { 226 | await checkDirty(this) 227 | // TODO: check if already added and throw 228 | const prop = Prop.create(this, name) 229 | const kvdb = await this.getKV() 230 | const ops = [] 231 | for await (const [key, value] of kvdb.all({ decode: false })) { 232 | ops.push({ set: { key, val: value } }) 233 | } 234 | const promises = [] 235 | let last 236 | for await (const block of prop.updateIndex(ops)) { 237 | promises.push(this.store.put(block)) 238 | last = block 239 | } 240 | await Promise.all(promises) 241 | prop.newRoot = last 242 | this.pending.set(name, prop) 243 | } 244 | 245 | async get (name) { 246 | if (!this.pending.has(name)) { 247 | this.pending.set(name, await this._get(name)) 248 | } 249 | return this.pending.get(name) 250 | } 251 | 252 | entries (...names) { 253 | return entries(this, names) 254 | } 255 | 256 | values (...names) { 257 | return pluck(this, names, 'value') 258 | } 259 | 260 | sources (...names) { 261 | return pluck(this, names, 'source') 262 | } 263 | 264 | async count (...props) { 265 | await checkDirty(this) 266 | let count = 0 267 | const indexes = await Promise.all(props.map(name => this.get(name))) 268 | for (const index of indexes) { 269 | const data = await index.rootData 270 | count += data.count 271 | } 272 | return count 273 | } 274 | 275 | async sum (...props) { 276 | await checkDirty(this) 277 | let sum = 0 278 | const indexes = await Promise.all(props.map(name => this.get(name))) 279 | for (const index of indexes) { 280 | const data = await index.rootData 281 | sum += data.sum 282 | } 283 | return sum 284 | } 285 | 286 | async _all () { 287 | const data = await this.rootData 288 | const keys = new Set(Object.keys(data)) 289 | const results = [] 290 | for (const [k, prop] of this.pending.entries()) { 291 | keys.delete(k) 292 | results.push([k, prop]) 293 | } 294 | const promises = Array.from(keys.keys()).map(key => this.get(key).then(prop => [key, prop])) 295 | return [...results, ...await Promise.all(promises)] 296 | } 297 | 298 | async update (ops) { 299 | const props = await this._all() 300 | const _update = async ([key, prop]) => prop.update(ops).then(cid => [key, cid]) 301 | const results = await Promise.all(props.map(_update)) 302 | const block = toBlock(Object.fromEntries(results), 'Props') 303 | await this.store.put(block) 304 | return block.cid() 305 | } 306 | } 307 | class Indexes { 308 | constructor (db) { 309 | chain(this, db) 310 | this.db = db 311 | lazyprop(this, 'kvroot', () => db.getRoot().then(root => root['db-v1'].kv)) 312 | lazyprop(this, 'root', () => db.getRoot().then(root => root['db-v1'].indexes)) 313 | lazyprop(this, 'rootBlock', () => this.root.then(cid => this.getBlock(cid))) 314 | lazyprop(this, 'rootData', () => this.rootBlock.then(block => block.decode())) 315 | this.props = new Props(this) 316 | } 317 | 318 | all () { 319 | return [['props', this.props]] 320 | } 321 | 322 | async update (kvRoot) { 323 | const prev = await this.kvroot 324 | const kvdb = kv(kvRoot, this.store) 325 | const ops = await kvdb.since(prev) 326 | 327 | const _update = ([key, index]) => index.update(ops).then(root => [key, root]) 328 | const newIndexes = await Promise.all(this.all().map(_update)) 329 | const newRoot = toBlock(Object.fromEntries(newIndexes), 'Indexes') 330 | await this.store.put(newRoot) 331 | return newRoot.cid() 332 | } 333 | } 334 | const emptyMap = Block.encoder({}, 'dag-cbor') 335 | const emptyIndexes = emptyMap.cid().then(props => toBlock({ props }, 'Indexes')) 336 | exports.empties = [emptyIndexes, emptyMap, emptyProp] 337 | exports.Indexes = Indexes 338 | return exports 339 | } 340 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DagDB 2 | 3 | ***This project is pre-release, do not use it in production, breaking 4 | changes may still occur without notice.*** 5 | 6 | DagDB is a portable and syncable database for the Web. 7 | 8 | It can run as a distributed database in Node.js, including Serverless 9 | environments using AWS services as a backend. 10 | 11 | It also runs in the browser. In fact, there is no "client and server" 12 | in DagDB, everything is just a DagDB database replicating from another 13 | database. In this way, it's closer to `git` than a traditional database 14 | workflow. 15 | 16 | ## Creating Databases 17 | 18 | At an abstract level, DagDB databases operate on top of two storage interfaces. 19 | The first is the **block store**, which is a relatively simple key/value store. 20 | The second is an **updater** which is a single mutable reference to the current 21 | root of the database. 22 | 23 | The following methods are available to simplify the process of creating a new 24 | database on a number of storage and updater backends. 25 | 26 | ### Create an in-memory database. 27 | 28 | ```js 29 | import dagdb from 'dagdb' 30 | 31 | const db = await dagdb.create('inmem') /* 'inmemory' also works */ 32 | ``` 33 | 34 | ### Create a database in a Browser 35 | 36 | ```js 37 | import dagdb from 'dagdb' 38 | 39 | const db = await dagdb.create({ browser: true }) 40 | ``` 41 | 42 | If you want to have multiple unique databases stored in the browser 43 | you can use the `updateKey` option. 44 | 45 | ```js 46 | import dagdb from 'dagdb' 47 | 48 | // the default updateKey is "root" 49 | const db = await dagdb.create({ browser: true, updateKey: 'root' }) 50 | ``` 51 | 52 | ### Create a database in S3 53 | 54 | ```js 55 | import dagdb from 'dagdb' 56 | import { S3 } from 'aws-sdk' 57 | 58 | const Bucket = 'bucketName' 59 | const s3 = new S3({ params: { Bucket } }) 60 | 61 | let db = await dagdb.create({ s3 }) 62 | ``` 63 | 64 | This uses S3 for block storage and for the update transaction. This will work fine as long as you 65 | don't try to update the same database with a lot of concurrency, then you might encounter eventually 66 | consistency issues w/ S3. An updater built on top of Dynamo that can do transactional updates is 67 | planned in order to resolve these concerns. 68 | 69 | ### Create a database from a leveldown interface. 70 | 71 | This allows you to store DagDB data in a 72 | [wide variety of storage backends](https://nicedoc.io/Level/awesome#stores). 73 | 74 | ```js 75 | import memdown from 'memdown' 76 | 77 | const leveldown = memdown(Math.random().toString()) // memdown takes a unique identifier 78 | const db = await dagdb.create({ leveldown }) 79 | ``` 80 | 81 | ### Create a database at a remote URL (no local caching or storage). 82 | 83 | ```js 84 | const db = await dagdb.create('http://website.com/dbname') 85 | ``` 86 | 87 | ## Opening a Database 88 | 89 | ### Opening a remote database 90 | 91 | ```js 92 | import dagdb from 'dagdb' 93 | 94 | const db = await dagdb.open('http://website.com/dbname') 95 | ``` 96 | 97 | ### Opening a database in the Browser 98 | 99 | ```js 100 | import dagdb from 'dagdb' 101 | 102 | const db = await dagdb.open({ browser: true }) 103 | ``` 104 | 105 | If you want to have multiple unique databases stored in the browser 106 | you can use the `updateKey` option. 107 | 108 | ```js 109 | import dagdb from 'dagdb' 110 | 111 | // the default updateKey is "root" 112 | const db = await dagdb.open({ browser: true, updateKey: 'root' }) 113 | ``` 114 | 115 | ### Create a database in S3 116 | 117 | ```js 118 | import dagdb from 'dagdb' 119 | import { S3 } from 'aws-sdk' 120 | 121 | const Bucket = 'bucketName' 122 | const s3 = new S3({ params: { Bucket } }) 123 | 124 | let db = await dagdb.open({ s3 }) 125 | ``` 126 | 127 | ### Opening a leveldown database 128 | 129 | ```js 130 | import redisdown from 'redisdown' // Redis storage backend 131 | 132 | const db = await dagdb.open({ leveldown: redisdown('location') }) 133 | ``` 134 | 135 | ## Key Value Storage 136 | 137 | DagDB's primary storage system is a simple key-value store. Keys 138 | can be any string, and values can be almost anything. 139 | 140 | For instance, all JSON types are natively supported as values. 141 | 142 | ```js 143 | let db = await dagdb.create('inmem') 144 | await db.set('hello', 'world') 145 | console.log(await db.get('hello')) 146 | // prints "world" 147 | ``` 148 | 149 | As you can see, you can set and get values immediately. Something to 150 | note about this example is that, while the `"hello"` key is available, 151 | it is actually coming out of a staging area that has not yet been committed 152 | to the database. 153 | 154 | Every instance of `DagDB` is bound to an **immutable** database state. 155 | We then add, remove, or change keys in that database until finally 156 | updating it, which will return us a ***new*** `DagDB` instance 157 | for the newly updated immutable state. 158 | 159 | ```js 160 | let db = await dagdb.create('inmem') 161 | await db.set('hello', 'world') 162 | db = await db.update() 163 | console.log(await db.get('hello')) 164 | // prints "world" 165 | ``` 166 | 167 | Now that we know how to set values and update the database lets work 168 | with some more advanced values. 169 | 170 | ```js 171 | const now = new Date() 172 | await db.set('big-value', { 173 | name: 'Mikeal Rogers', 174 | created: { 175 | year: now.getYear(), 176 | month: now.getMonth(), 177 | day: now.getDay() 178 | }, 179 | hobbies: [ 'code', 'food', 'tea' ] 180 | }) 181 | ``` 182 | 183 | As you can see, we can use all JSON types and there's no limit to how far we 184 | can nest values inside of objects. In addition to JSON types we support efficient 185 | binary serialization, so you can use `Uint8Array` for any binary you have. 186 | 187 | ### Links 188 | 189 | So far we haven't shown you anything you can't do with any other key-value store. 190 | Now let's look at some features unique to DagDB and the primitives it's built on. 191 | 192 | ```js 193 | const link = await db.link({ name: 'Earth', size: 3958.8 }) 194 | await db.set('mikeal', { name: 'Mikeal Rogers', planet: link }) 195 | await db.set('chris', { name: 'Chris Hafey', planet: link }) 196 | db = await db.update() 197 | 198 | const howBigIsYourPlanet = async key => { 199 | const person = await db.get(key) 200 | const planet = await person.planet() 201 | console.log(`${person.name} lives on a planet w/ a radius of ${planet.size}mi`) 202 | } 203 | await howBigIsYourPlanet('mikeal') 204 | // prints "Mikeal Rogers lives on a planet w/ a radius of 3958.8mi" 205 | await howBigIsYourPlanet('chris') 206 | // prints "Chris Hafey lives on a planet w/ a radius of 3958.8mi" 207 | ``` 208 | 209 | Pretty cool! 210 | 211 | As you can see, link values are decoded by DagDB as async functions that will 212 | return the decoded value from the database. 213 | 214 | The great thing about links is that the data is de-duplicated across the database. 215 | DagDB uses a technique called "content addressing" that links data by hashing the 216 | value. This means that, even if you create the link again with the same data, the 217 | link will be the same and the data will be deduplicated. 218 | 219 | You can also compare links in order to tell if they refer to the same data. 220 | 221 | ```js 222 | const link1 = await db.link({ name: 'Earth', size: 3958.8 }) 223 | const link2 = await db.link({ name: 'Earth', size: 3958.8 }) 224 | console.log(link1.equals(link2)) 225 | // prints true 226 | 227 | const samePlanet = async (key1, key2) => { 228 | const person1 = await db.get(key1) 229 | const person2 = await db.get(key2) 230 | if (person1.planet.equals(person2.planet)) { 231 | console.log(`${person1.name} is on the same planet as ${person2.name}`) 232 | } else { 233 | console.log(`${person1.name} is not on the same planet as ${person2.name}`) 234 | } 235 | } 236 | samePlanet('mikeal', 'chris') 237 | // prints "Mikeal Rogers is on the same planet as Chris Hafey" 238 | ``` 239 | 240 | As you can see, links are more than addresses, they are useful values for comparison. 241 | 242 | There's no limit to the number of links and the depth at which you nest your values. 243 | Most importantly, you can use linked data in any other value with zero copy overhead, 244 | it's just a simple small update to the link value. 245 | 246 | ### Streams 247 | 248 | Since it is often problematic to store large amounts of binary as a single value, DagDB 249 | also natively supports storing streams of binary data. 250 | 251 | DagDB treats **any async generator** as a binary stream. Node.js Streams are valid 252 | async generators so they work right away. 253 | 254 | ```js 255 | import { createReadStream } from 'fs' 256 | 257 | const reader = createReadStream('/path/to/file') 258 | 259 | db = await db.set('my file', { file: reader }).update() 260 | 261 | const printFile = async (key, property) => { 262 | const value = await db.get(key) 263 | for await (const chunk of value[property]) { 264 | process.stdout.write(chunk) 265 | } 266 | } 267 | printFile('my file', 'file') 268 | ``` 269 | 270 | Note that, while you can use any Stream interface that is a valid async generator (like Node.js 271 | Streams) to store the data, when you retrieve the stream it will be returned as a common async 272 | generator (not a Node.js Stream). 273 | 274 | The size of every chunk in the stream is preserved. However, *this may change in the future*. 275 | Some transports have issues with block sizes larger than 1mb so we may change the defaults 276 | in the future to keep each chunk below 1mb. 277 | 278 | ### Nesting Databases 279 | 280 | Another really cool think you can do is use DagDB's as values in other databases. 281 | 282 | ```js 283 | let db1 = await dagdb.create('inmem') 284 | let db2 = await dagdb.create('inmem') 285 | 286 | db1 = await db1.set('hello', 'world').update() 287 | db2 = await db2.set('db1', db1).update() 288 | 289 | const db = await db2.get('db1') 290 | console.log(await db.get('hello')) 291 | // prints "world" 292 | ``` 293 | 294 | This feature uses a very flexible system that can be extended in the future to feature 295 | all kinds of new data types. 296 | 297 | ### Custom Types 298 | 299 | DagDB's support for nesting databases lends itself to support other types of embeddings as well. This is a powerful feature that is used internally to allow embedding of builtin types and classes, but it can also be used to support embedding arbitrary custom types as well. 300 | 301 | The API is pretty simple, it requires the caller to specify a `type` string, and an `init` function that takes two arguments, namely the `root` cid of the custom object, and the underlying `store`. For example: 302 | 303 | ```js 304 | const initCustomType = async (root, store) => { 305 | return new CustomType(await store.get(root)) 306 | } 307 | ``` 308 | 309 | Additionally, the custom type/object must support the following interface: 310 | 311 | ```ts 312 | interface Encoder { 313 | _dagdb: { v1: string } 314 | encode(): AsyncGenerator 315 | } 316 | ``` 317 | 318 | From the internal docs: 319 | 320 | > Encoders, both here and in special types, are async generators that yield as many blocks as they like as long as the very last thing they yield is NOT a Block. This is so that the final root of each each node can be embedded in a parent. This contract MUST be adhered to by all special types. Additionally, the _dagdb property specifies the type name for v1 of the interface (leaving room for future interface changes), and is used to lookup the in memory custom type mapping. 321 | 322 | To register the custom type, you simply call `register` on the database: 323 | 324 | ```js 325 | let db = await dagdb.create("inmem"); 326 | 327 | db.register("custom", initCustomType); 328 | 329 | const value = new CustomType(...args); 330 | await db.set("key", value); 331 | db = await db.update(); 332 | 333 | const custom = await local.get("key"); 334 | custom.method(); 335 | ``` 336 | 337 | ## Replication 338 | 339 | Replication in DagDB is quite different than traditional databases. Since there isn't a client 340 | and a server, since there's just databases everywhere, replication is a key component of how 341 | you access data. 342 | 343 | The closest thing to DagDB replication you're familiar with is `git`. The way changes are merged 344 | from one branch to another and from one remote to another. We even have a system for keeping track 345 | of remote databases that feels a lot like git. 346 | 347 | Let's start by adding and pulling from a remote. 348 | 349 | ```js 350 | const url = 'http://website.com/db' 351 | const remoteDatabase = await dagdb.create(url) 352 | await remoteDatabase.set('hello', 'world').update() 353 | 354 | let db = dagdb.create('inmem') 355 | await db.remotes.add('web', url) 356 | 357 | await db.remotes.pull('web') 358 | db = await db.update() 359 | 360 | console.log(await db.get('hello')) 361 | // prints "world" 362 | ``` 363 | 364 | Using remotes for replication is an efficient way to move data around because it keeps track 365 | of the last changeset and can easily pull only the changes since that time. However, if you 366 | have two database instances locally you can easily merge one into the other without using the 367 | remote system. 368 | 369 | ```js 370 | let db1 = await dagdb.create('inmem') 371 | let db2 = await dagdb.create('inmem') 372 | 373 | db1 = await db1.set('hello', 'world').update() 374 | db2 = await db2.merge(db1).update() 375 | 376 | console.log(await db2.get('hello')) 377 | // prints "world" 378 | ``` 379 | 380 | ### Replicate remote to key 381 | 382 | So far, we've been using replication to merge an entire database's keyspace into our own. 383 | But as we've already seen, you can use a DagDB database as a value, so it would make sense 384 | to use a remote to replicate into a key rather than merging into our entire local namespace. 385 | 386 | ```js 387 | const url = 'http://website.com/db' 388 | const remoteDatabase = await dagdb.create(url) 389 | await remoteDatabase.set('hello', 'world').update() 390 | 391 | let db = dagdb.create('inmem') 392 | await db.remotes.add('web', { source: url, strategy: { keyed: 'webdb' }}) 393 | 394 | await db.remotes.pull('web') 395 | db = await db.update() 396 | const webdb = await db.get('webdb') 397 | 398 | console.log(await webdb.get('hello')) 399 | // prints "world" 400 | ``` 401 | 402 | ## Running the HTTP Service 403 | 404 | ### in Node.js 405 | 406 | If you're using Node.js it's quite easy to get an HTTP handler you can 407 | pass to `http.createServer` for any database instance. 408 | 409 | ```js 410 | import http from 'http' 411 | import dagdb from 'dagdb' 412 | import createHandler from 'dagdb/server.js' 413 | 414 | const db = await dagdb.create('inmem') 415 | const handler = createHandler(db) 416 | 417 | const server = http.createServer(handler) 418 | server.listen(8080) 419 | ``` 420 | 421 | -------------------------------------------------------------------------------- /src/kv.js: -------------------------------------------------------------------------------- 1 | import * as hamt from './hamt.js' 2 | import { 3 | NotFound, readonly, isCID, 4 | fromBlock, fromBlockUnsafe, validate, 5 | encoderTransaction 6 | } from './utils.js' 7 | import valueLoader from './values.js' 8 | 9 | const getKey = decoded => decoded.set ? decoded.set.key : decoded.del.key 10 | 11 | const createGet = (local, remote) => { 12 | const cache = new Map() 13 | const get = async cid => { 14 | if (!isCID(cid)) throw new Error('Must be CID') 15 | const key = cid.toString('base64') 16 | if (cache.has(key)) return cache.get(key) 17 | const _cache = (block) => cache.set(key, block) 18 | let ret 19 | try { 20 | ret = await local(cid) 21 | } catch (e) { 22 | // noop 23 | } 24 | if (ret) { 25 | _cache(ret) 26 | return ret 27 | } 28 | // final cache check, useful under concurrent load 29 | /* c8 ignore next */ 30 | if (cache.has(key)) return cache.get(key) 31 | const block = await remote(cid) 32 | _cache(block) 33 | /* c8 ignore next */ 34 | return block 35 | } 36 | return get 37 | } 38 | 39 | const create = (Block) => { 40 | const { encode, decode, register } = valueLoader(Block) 41 | const { toString } = Block.multiformats.bytes 42 | const toBlock = (value, className) => Block.encoder(validate(value, className), 'dag-cbor') 43 | 44 | const commitKeyValueTransaction = async function * (opBlocks, root, get) { 45 | const rootBlock = await get(root) 46 | const kvt = fromBlockUnsafe(rootBlock, 'Transaction') 47 | 48 | const opLinks = [] 49 | const opDecodes = [] 50 | for (const op of opBlocks) { 51 | opDecodes.push(fromBlock(op, 'Operation')) 52 | opLinks.push(op.cid()) 53 | } 54 | 55 | let last 56 | for await (const block of hamt.bulk(kvt['kv-v1'].head, opDecodes, get, Block)) { 57 | last = block 58 | yield block 59 | } 60 | // this happens when there are bugs elsewhere so 61 | // it's not really possible to test for, but it's 62 | // an important guard because it protects us from 63 | // inserting an empty transaction head when there 64 | // are other bugs 65 | /* c8 ignore next */ 66 | if (!last) throw new Error('nothing from hamt') 67 | 68 | const [head, ops, prev] = await Promise.all([last.cid(), Promise.all(opLinks), rootBlock.cid()]) 69 | yield toBlock({ 'kv-v1': { head, ops, prev } }, 'Transaction') 70 | /* c8 ignore next */ 71 | } 72 | 73 | const isBlock = v => Block.isBlock(v) 74 | 75 | const commitTransaction = async function * (trans) { 76 | const root = trans.root 77 | const ops = [] 78 | for (const [op, ...blocks] of trans.cache.values()) { 79 | ops.push(op) 80 | yield op 81 | yield * blocks 82 | } 83 | if (!ops.length) throw new Error('There are no pending operations to commit') 84 | yield * commitKeyValueTransaction(ops, root, trans.store.get.bind(trans.store)) 85 | } 86 | 87 | class Transaction { 88 | constructor (root, store) { 89 | readonly(this, 'root', root) 90 | this.store = store 91 | this.cache = new Map() 92 | } 93 | 94 | get pending () { 95 | return this.cache.size 96 | } 97 | 98 | async since (prev) { 99 | let root = this.root 100 | const ops = [] 101 | const seen = new Set() 102 | while (!root.equals(prev)) { 103 | const data = await this.store.get(root).then(block => block.decodeUnsafe()) 104 | const _ops = await Promise.all(data['kv-v1'].ops.map(cid => this.store.get(cid))) 105 | for (const op of _ops) { 106 | const decode = op.decodeUnsafe() 107 | const key = decode.set ? decode.set.key : decode.del.key 108 | if (!seen.has(key)) { 109 | ops.push(op) 110 | } 111 | seen.add(key) 112 | } 113 | root = data['kv-v1'].prev 114 | } 115 | return ops 116 | } 117 | 118 | async __encode (block, opts = {}) { 119 | if (!isBlock(block)) { 120 | let last 121 | for await (const _block of encode(block)) { 122 | if (Block.isBlock(_block)) { 123 | if (opts.filter && !(await opts.filter(_block))) { 124 | // noop 125 | } else { 126 | await this.store.put(_block) 127 | } 128 | } 129 | last = _block 130 | } 131 | block = Block.encoder(last, 'dag-cbor') 132 | } 133 | await this.store.put(block) 134 | return block 135 | } 136 | 137 | async link (block) { 138 | block = await this.__encode(block) 139 | const cid = await block.cid() 140 | return decode(cid, this.store, this.updater) 141 | } 142 | 143 | async set (key, block, opts = {}) { 144 | if (typeof block === 'undefined') { 145 | if (typeof key !== 'object') throw new Error('Missing value') 146 | return Promise.all(Object.entries(key).map(([key, value]) => this.set(key, value))) 147 | } 148 | block = await this.__encode(block, opts) 149 | const op = toBlock({ set: { key, val: await block.cid() } }, 'Operation') 150 | this.cache.set(key, [op, block]) 151 | } 152 | 153 | async pendingTransactions () { 154 | return Promise.all(Array.from(this.cache.values()).map(x => x[0].cid())) 155 | } 156 | 157 | async del (key) { 158 | const op = toBlock({ del: { key } }, 'Operation') 159 | this.cache.set(key, [op]) 160 | } 161 | 162 | all (opts) { 163 | opts = { ...{ blocks: false, decode: true }, ...opts } 164 | const get = this.store.get.bind(this.store) 165 | const _decode = block => decode(block.decode(), this.store, this.updater) 166 | const iter = async function * (t) { 167 | const head = await t.getHead() 168 | for (const [key, [, block]] of t.cache.entries()) { 169 | if (!block) continue 170 | if (opts.decode) yield [key, _decode(block)] 171 | else if (opts.blocks) yield [key, block] 172 | else yield [key, await block.cid()] 173 | } 174 | const _iter = hamt.all(head, get) 175 | for await (let { key, value } of _iter) { 176 | key = toString(key) 177 | if (!t.cache.has(key)) { 178 | if (opts.decode) yield [key, _decode(await get(value))] 179 | else if (opts.blocks) yield [key, await get(value)] 180 | else yield [key, value] 181 | } 182 | } 183 | } 184 | return iter(this) 185 | } 186 | 187 | __get (key) { 188 | if (this.cache.has(key)) { 189 | const [, block] = this.cache.get(key) 190 | if (!block) throw new NotFound(`No key named "${key}"`) 191 | return block 192 | } 193 | return null 194 | } 195 | 196 | async getRootTransaction () { 197 | const root = await this.store.get(this.root) 198 | return fromBlock(root, 'Transaction') 199 | } 200 | 201 | async getBlock (key) { 202 | if (this.__get(key)) return this.__get(key) 203 | const head = await this.getHead() 204 | const link = await hamt.get(head, key, this.store.get.bind(this.store)) 205 | if (!link) throw new NotFound(`No key named "${key}"`) 206 | const block = await this.store.get(link) 207 | 208 | // one last cache check since there was async work 209 | /* c8 ignore next */ 210 | if (this.__get(key)) return this.__get(key) 211 | // workaround, fixed in Node.js v14.5.0 212 | /* c8 ignore next */ 213 | return block 214 | } 215 | 216 | async get (key) { 217 | if (Array.isArray(key)) return Promise.all(key.map(k => this.get(k))) 218 | const block = await this.getBlock(key) 219 | return decode(block.decode(), this.store, this.updater) 220 | } 221 | 222 | async getRef (key) { 223 | const block = await this.__get(key) 224 | if (block) return block.cid() 225 | const head = await this.getHead() 226 | const link = await hamt.get(head, key, this.store.get.bind(this.store)) 227 | if (!link) throw new NotFound(`No key named "${key}"`) 228 | return link 229 | } 230 | 231 | async getValue (cid) { 232 | const block = await this.store.get(cid) 233 | return decode(block.decode(), this.store, this.updater) 234 | } 235 | 236 | async has (key) { 237 | if (this.cache.has(key)) { 238 | if (this.cache.get(key).length === 1) return false 239 | return true 240 | } 241 | const head = await this.getHead() 242 | const link = await hamt.get(head, key, this.store.get.bind(this.store)) 243 | if (!link) return false 244 | return true 245 | } 246 | 247 | async size () { 248 | let i = 0 249 | const reader = this.all() 250 | while (true) { 251 | const { done } = await reader.next() 252 | if (done) return i 253 | i++ 254 | } /* c8 ignore next */ 255 | } 256 | 257 | async commit () { 258 | const pending = [] 259 | const _commit = commitTransaction(this) 260 | let last 261 | for await (const block of _commit) { 262 | last = block 263 | pending.push(this.store.put(block)) 264 | } 265 | await Promise.all(pending) 266 | return new Transaction(await last.cid(), this.store) 267 | } 268 | 269 | _encode () { 270 | return commitTransaction(this) 271 | } 272 | 273 | encode () { 274 | if (!this.cache.size) return (async function * (r) { yield r })(this.root) 275 | return encoderTransaction(this._encode()) 276 | } 277 | 278 | get _dagdb () { 279 | return { v1: 'transaction' } 280 | } 281 | 282 | async getHead () { 283 | const root = await this.getRootTransaction() 284 | return root['kv-v1'].head 285 | } 286 | 287 | async pull (trans, known = [], resolver = noResolver) { 288 | if (trans._kv) { 289 | return this.pull(await trans._kv, known, resolver) 290 | } 291 | // we need to make all the cached blocks accessible 292 | // to the resolver 293 | const _blocks = new Map() 294 | for (const [, block] of this.cache.values()) { 295 | if (block) _blocks.set(await block.cid().then(cid => cid.toString('base64')), block) 296 | } 297 | const local = async cid => { 298 | const key = cid.toString('base64') 299 | if (_blocks.has(key)) return _blocks.get(key) 300 | return this.store.get(cid) 301 | } 302 | const remote = trans.store.get.bind(trans.store) 303 | const oldRoot = this.root 304 | const newRoot = trans.root 305 | const stackedGet = createGet(local, remote) 306 | const staged = await replicate(oldRoot, newRoot, stackedGet, resolver, known) 307 | // now merge the latest options for each key from the remote 308 | // into the local cache for the transaction 309 | for (const [key, [op, block]] of staged.entries()) { 310 | if (this.cache.has(key)) { 311 | const [old] = this.cache.get(key) 312 | const cid = await old.cid() 313 | if (cid.equals(await op.cid())) continue 314 | const newOp = await resolver([old], [op], stackedGet) 315 | const decoded = newOp.decodeUnsafe() 316 | const value = [newOp] 317 | if (decoded.set) value.push(await stackedGet(decoded.set.val)) 318 | this.cache.set(key, value) 319 | } else { 320 | const value = [op] 321 | // This is an odd one. 322 | // Arrays with values of undefined end up getting encoded as null 323 | // in the browser and not in some Node.js versions. This is easily 324 | // fixable below but it can't be tested effectively in Node.js 325 | // so we have to disable coverage until we have browser coverage working. 326 | // c8 ignore else 327 | if (block) value.push(block) 328 | this.cache.set(key, value) 329 | } 330 | } 331 | } 332 | } 333 | 334 | const noResolver = localOps => { 335 | const decoded = localOps[0].decodeUnsafe() 336 | const key = getKey(decoded) 337 | throw new Error(`Conflict, databases contain conflicting mutations to "${key}" since last common`) 338 | } 339 | const reconcile = async (oldOps, newOps, get, resolver) => { 340 | const lastId = ops => ops[ops.length - 1].cid().then(cid => cid.toString('base64')) 341 | const staging = new Map() 342 | let i = 0 343 | const add = block => { 344 | const decoded = fromBlock(block, 'Operation') 345 | const key = getKey(decoded) 346 | if (!staging.has(key)) { 347 | staging.set(key, [[], []]) 348 | } 349 | const ops = staging.get(key)[i] 350 | ops.push(block) 351 | } 352 | oldOps.forEach(add) 353 | i = 1 354 | newOps.forEach(add) 355 | 356 | const ops = new Map() 357 | 358 | for (const [key, [oldOps, newOps]] of staging.entries()) { 359 | const accept = () => ops.set(key, newOps[newOps.length - 1]) 360 | // ignore keys that only have local history 361 | if (!newOps.length) continue 362 | // accept right away if there are no local changes to conflict with 363 | if (!oldOps.length) { 364 | accept() 365 | continue 366 | } 367 | // check if that last ops match and if so, ignore this key since the 368 | // both already have the same value 369 | const last = await lastId(oldOps) 370 | if (last === await lastId(newOps)) continue 371 | // if the last local operation exists anywhere in the history 372 | // of the new ops then we can take that as a common history 373 | // point and accept the latest change from the remote 374 | const ids = new Set(await Promise.all(newOps.map(block => block.cid().then(cid => cid.toString('base64'))))) 375 | if (ids.has(last)) { 376 | accept() 377 | continue 378 | } 379 | // there's a conflict, pass it to the resolver 380 | ops.set(key, await resolver(oldOps, newOps, get)) 381 | } 382 | return ops 383 | } 384 | 385 | const replicate = async (oldRoot, newRoot, get, resolver, known) => { 386 | oldRoot = await get(oldRoot) 387 | newRoot = await get(newRoot) 388 | const seen = new Set(known.map(cid => cid.toString('base64'))) 389 | 390 | const find = root => { 391 | const decoded = fromBlock(root, 'Transaction') 392 | // should we validate the schema here or just wait for it to potentially fail? 393 | const { head, prev } = decoded['kv-v1'] 394 | const key = head.toString('base64') 395 | if (seen.has(key)) return head 396 | seen.add(key) 397 | if (!prev) return null 398 | return get(prev).then(block => find(block)) 399 | } 400 | 401 | const race = async () => { 402 | const [old, latest] = [find(oldRoot), find(newRoot)] 403 | const common = await Promise.race([old, latest]) 404 | // TODO: cancel slower one 405 | if (common) return common 406 | else { 407 | const r = (await Promise.all([old, latest])).filter(x => x)[0] 408 | return r 409 | }/* c8 ignore next */ 410 | } 411 | 412 | const common = await race() 413 | if (!common) throw new Error('No common root between databases') 414 | 415 | const since = async (trans, _ops = []) => { 416 | const decoded = fromBlock(trans, 'Transaction') 417 | let { head, prev, ops } = decoded['kv-v1'] 418 | if (head.equals(common)) return _ops 419 | ops = ops.map(op => get(op)) 420 | return since(await get(prev), [...ops, ..._ops]) 421 | } 422 | 423 | const _all = root => since(root).then(ops => Promise.all(ops)) 424 | 425 | const [oldOps, newOps] = await Promise.all([_all(oldRoot), _all(newRoot)]) 426 | const ops = await reconcile(oldOps, newOps, get, resolver) 427 | const staged = new Map() 428 | for (const [key, op] of ops.entries()) { 429 | const decoded = op.decodeUnsafe() 430 | if (decoded.set) { 431 | staged.set(key, [op, await get(decoded.set.val)]) 432 | } else { 433 | staged.set(key, [op]) 434 | } 435 | } 436 | return staged 437 | } 438 | 439 | const emptyHamt = hamt.empty(Block, 'dag-cbor') 440 | const emptyData = emptyHamt.cid().then(head => ({ 'kv-v1': { head, ops: [], prev: null } })) 441 | const empty = emptyData.then(data => toBlock(data, 'Transaction')) 442 | 443 | const exports = (...args) => new Transaction(...args) 444 | exports.empties = [empty, emptyHamt] 445 | exports.create = async store => { 446 | const _empty = await empty 447 | await Promise.all([store.put(_empty), store.put(emptyHamt)]) 448 | const root = await _empty.cid() 449 | return new Transaction(root, store) 450 | } 451 | register('transaction', exports) 452 | exports.register = register 453 | return exports 454 | } 455 | 456 | create.createGet = createGet 457 | export default create 458 | -------------------------------------------------------------------------------- /test/test-remotes.js: -------------------------------------------------------------------------------- 1 | /* globals describe, before, it */ 2 | import Block from '@ipld/block/defaults' 3 | import bent from 'bent' 4 | import createInmemory from '../src/stores/inmemory.js' 5 | import assert from 'assert' 6 | import createReplicate from '../src/stores/replicate.js' 7 | import createUpdater from '../src/updaters/kv.js' 8 | import createDatabase from '../src/database.js' 9 | import createKV from './lib/mock-kv.js' 10 | 11 | const database = createDatabase(Block) 12 | const test = it 13 | const replicate = createReplicate(Block) 14 | const inmem = createInmemory(Block) 15 | const { CID } = Block 16 | const same = assert.deepStrictEqual 17 | const ok = assert.ok 18 | 19 | const getJSON = bent('json') 20 | 21 | const create = async () => { 22 | const store = inmem() 23 | const updater = createUpdater(Block)(createKV()) 24 | const db = await database.create(store, updater) 25 | return { store, db, updater } 26 | } 27 | 28 | const createRemotes = async (strategy) => { 29 | const dbs = await Promise.all([create(), create()]) 30 | const [db1, db2] = dbs.map(db => db.db) 31 | const remote = await db1.remotes.addLocal('test', strategy) 32 | return { db1, db2, remote } 33 | } 34 | 35 | const v1 = 'db-v1' 36 | 37 | describe('test-remotes', () => { 38 | test('nothing to merge', async () => { 39 | let { db1, db2, remote } = await createRemotes({ full: true }) 40 | await remote.pullDatabase(db2) 41 | const latest = await db1.update() 42 | const kv1 = (await latest._kv).root 43 | const kv2 = (await db1._kv).root 44 | ok(kv1.equals(kv2)) 45 | const root1 = await db1.store.get(db1.root) 46 | const root2 = await latest.store.get(latest.root) 47 | assert.ok(!root1.decode()[v1].remotes.equals(root2.decode()[v1].remotes)) 48 | remote = await latest.remotes.get('test') 49 | const decoded = remote.rootDecode 50 | ok(decoded.head.equals(decoded.merged)) 51 | }) 52 | 53 | test('full merge', async () => { 54 | let { db1, db2, remote } = await createRemotes({ full: true }) 55 | await db2.set('test', { hello: 'world' }) 56 | db2 = await db2.commit() 57 | await remote.pullDatabase(db2) 58 | let latest = await db1.update() 59 | const kv1 = (await latest._kv).root 60 | const kv2 = (await db2._kv).root 61 | ok(kv1.equals(kv2)) 62 | remote = await latest.remotes.get('test') 63 | await remote.pullDatabase(db2) 64 | latest = await latest.update() 65 | same(await latest.get('test'), { hello: 'world' }) 66 | 67 | await db2.set('test', { foo: 'bar' }) 68 | db2 = await db2.commit() 69 | remote = await latest.remotes.get('test') 70 | await remote.pullDatabase(db2) 71 | latest = await latest.update() 72 | same(await latest.get('test'), { foo: 'bar' }) 73 | }) 74 | 75 | test('keyed merge', async () => { 76 | let { db1, db2, remote } = await createRemotes({ keyed: 'test-db' }) 77 | await db2.set('test', { hello: 'world' }) 78 | db2 = await db2.commit() 79 | await remote.pullDatabase(db2) 80 | db1 = await db1.update() 81 | const kv1 = (await db2._kv).root 82 | const latestDB = await db1.get('test-db') 83 | const kv2 = (await latestDB._kv).root 84 | ok(kv1.equals(kv2)) 85 | same(await latestDB.get('test'), { hello: 'world' }) 86 | remote = await db1.remotes.get('test') 87 | await remote.pullDatabase(db2) 88 | 89 | let dbValue = await db1.get('test-db') 90 | same(await dbValue.get('test'), { hello: 'world' }) 91 | 92 | await db2.set('test', { foo: 'bar' }) 93 | db2 = await db2.commit() 94 | remote = await db1.remotes.get('test') 95 | await remote.pullDatabase(db2) 96 | db1 = await db1.commit() 97 | dbValue = await db1.get('test-db') 98 | same(await dbValue.get('test'), { foo: 'bar' }) 99 | }) 100 | 101 | test('unsupported scheme', async () => { 102 | const create = await import('../src/updaters/index.js') 103 | const main = create.default(Block) 104 | try { 105 | await main.from('ws://') 106 | throw new Error('Did not throw') 107 | } catch (e) { 108 | if (e.message !== 'Unsupported identifier "ws://"') throw e 109 | } 110 | }) 111 | 112 | test('error: invalid http url', async () => { 113 | const { db } = await create() 114 | let threw = true 115 | try { 116 | await db.remotes.add('test', 'nope') 117 | threw = false 118 | } catch (e) { 119 | if (e.message !== 'Only http URL can be used as strings') throw e 120 | } 121 | same(threw, true) 122 | }) 123 | 124 | test('error: no remote', async () => { 125 | const { db } = await create() 126 | try { 127 | await db.remotes.get('test') 128 | throw new Error('did not throw') 129 | } catch (e) { 130 | if (e.message !== 'No remote named "test"') throw e 131 | } 132 | }) 133 | 134 | test('error: open and create w/o url', async () => { 135 | const bare = await import('../src/bare.js') 136 | const main = bare.default(Block) 137 | try { 138 | await main.open('test') 139 | throw new Error('Did not throw') 140 | } catch (e) { 141 | if (e.message !== 'Not implemented') throw e 142 | } 143 | try { 144 | await main.create('test') 145 | throw new Error('Did not throw') 146 | } catch (e) { 147 | if (e.message !== 'Not implemented') throw e 148 | } 149 | }) 150 | 151 | test('error: bad info, local pull', async () => { 152 | const { remote } = await createRemotes({ full: true }) 153 | try { 154 | await remote.pull() 155 | throw new Error('did not throw') 156 | } catch (e) { 157 | if (e.message !== 'Local remotes must use pullDatabase directly') throw e 158 | } 159 | }) 160 | test('error: bad info, push local', async () => { 161 | const { remote } = await createRemotes({ full: true }) 162 | try { 163 | await remote.push() 164 | throw new Error('did not throw') 165 | } catch (e) { 166 | if (e.message !== 'Local remotes cannot push') throw e 167 | } 168 | }) 169 | test('error: bad info, push keyed merge', async () => { 170 | const { remote } = await createRemotes({ full: true }) 171 | remote._info = { source: { type: 'http', url: 'http://asdf' }, strategy: { keyed: 'asdf' } } 172 | try { 173 | await remote.push() 174 | throw new Error('did not throw') 175 | } catch (e) { 176 | if (e.message !== 'Can only push databases using full merge strategy') throw e 177 | } 178 | }) 179 | test('custom remote', async () => { 180 | // a bit of a hack, we'll want a better test eventually 181 | // that actually does a full replication 182 | const { db } = await create() 183 | db.remotes.register('test', () => {}) 184 | }) 185 | if (!process.browser) { 186 | const stores = {} 187 | const updaters = {} 188 | 189 | let httpTests 190 | let createHandler 191 | describe('http', () => { 192 | before(async () => { 193 | httpTests = (await import('./lib/http.js')).default 194 | createHandler = (await import('../src/http/nodejs.js')).default 195 | httpTests('test-remotes', handler, async port => { 196 | let createDatabase 197 | let create 198 | before(async () => { 199 | createDatabase = (await import('../src/index.js')).default 200 | create = async (opts) => { 201 | const id = Math.random().toString() 202 | const url = `http://localhost:${port}/${id}` 203 | stores[id] = inmem() 204 | updaters[id] = createUpdater(Block)(createKV()) 205 | return createDatabase.create(url) 206 | } 207 | }) 208 | test('basic full merge', async () => { 209 | let db1 = await create() 210 | let db2 = await create() 211 | await db2.set('test', { hello: 'world' }) 212 | db2 = await db2.update() 213 | await db1.remotes.add('a', db2.updater.infoUrl) 214 | db1 = await db1.update() 215 | db1 = await createDatabase.open(db1.updater.infoUrl) 216 | same(await db1.get('test'), { hello: 'world' }) 217 | await db2.set('test2', { foo: 'bar' }) 218 | db2 = await db2.update() 219 | await db1.remotes.pull('a') 220 | same(await db1.get('test2'), { foo: 'bar' }) 221 | db1 = await db1.update() 222 | await db1.remotes.pull('a') 223 | const root = db1.root 224 | db1 = await db1.update() 225 | root.equals(db1.root) 226 | }) 227 | test('updater', async () => { 228 | let db = await create() 229 | await db.set('test', { hello: 'world' }) 230 | db = await db.update() 231 | assert.ok(db.root.equals(await db.updater.root)) 232 | }) 233 | test('not found', async () => { 234 | const db = await create() 235 | const url = db.updater.infoUrl + 'notfound' 236 | const get = bent(404, 'string') 237 | const resp = await get(url) 238 | same(resp, 'Not found') 239 | }) 240 | test('push', async () => { 241 | let db = await create() 242 | const info = { source: { url: db.updater.infoUrl, type: 'http' }, strategy: { full: true } } 243 | delete db.updater 244 | await db.remotes.add('origin', info) 245 | db = await db.commit() 246 | const remote = await db.remotes.get('origin') 247 | await remote.push() 248 | }) 249 | const createReadonly = async (opts) => { 250 | const db = await create() 251 | const url = db.updater.infoUrl 252 | const split = url.split('/').filter(x => x) 253 | const id = split[split.length - 1] 254 | const updater = { root: db.root } 255 | updaters[id] = updater 256 | return [db, updater] 257 | } 258 | test('pull readonly', async () => { 259 | let [db1, updater] = await createReadonly() 260 | await db1.set('foo', 'bar') 261 | db1 = await db1.commit() 262 | updater.root = db1.root 263 | let db2 = await create() 264 | const info = { source: { url: db1.updater.infoUrl, type: 'http' }, strategy: { full: true } } 265 | await db2.remotes.add('test', info) 266 | await db2.remotes.pull('test') 267 | same(await db2.get('foo'), 'bar') 268 | db2 = await db2.update() 269 | try { 270 | await db2.remotes.push('test') 271 | throw new Error('Did not throw') 272 | } catch (e) { 273 | if (e.message !== 'Remote must have updater to use push') throw e 274 | } 275 | }) 276 | test('error: concurrent pushes', async () => { 277 | let db = await create() 278 | const oldRoot = db.root 279 | const db2 = await create() 280 | const info = { source: { url: db.updater.infoUrl, type: 'http' }, strategy: { full: true } } 281 | delete db.updater 282 | await db.remotes.add('origin', info) 283 | db = await db.commit() 284 | const remote1 = await db.remotes.get('origin') 285 | await replicate(db.root, db.store, db2.store) 286 | const dec = { ...remote1.rootDecode } 287 | const remote2 = new database.Remote({ ...dec }, db2) 288 | try { 289 | await Promise.all([remote1.push(), remote2.push()]) 290 | throw new Error('did not throw') 291 | } catch (e) { 292 | if (e.message !== 'Remote has updated since last pull, re-pull before pushing') throw e 293 | } 294 | const { url } = info.source 295 | const split = url.split('/').filter(x => x) 296 | const id = split[split.length - 1] 297 | const root = CID.from((await getJSON(info.source.url)).root) 298 | const updater = { root, update: () => oldRoot } 299 | updaters[id] = updater 300 | try { 301 | await remote1.push() 302 | throw new Error('did not throw') 303 | } catch (e) { 304 | if (e.message !== 'Remote has updated since last pull, re-pull before pushing') throw e 305 | } 306 | }) 307 | test('error: update old reference', async () => { 308 | let db = await create() 309 | const oldHead = await db.getHead() 310 | const url = db.updater.infoUrl 311 | const split = url.split('/').filter(x => x) 312 | const id = split[split.length - 1] 313 | const info = { source: { url, type: 'http' }, strategy: { full: true } } 314 | await db.remotes.add('origin', info) 315 | await db.set('blah', 'test') 316 | db = await db.update() 317 | const newHead = await db.getHead() 318 | const newRoot = db.root 319 | await db.set('another', 'test') 320 | db = await db.update() 321 | assert.ok(!oldHead.equals(newHead)) 322 | const remote = await db.remotes.get('origin') 323 | const update = () => { 324 | throw new Error('should not hit updater') 325 | } 326 | updaters[id] = { root: newRoot, update } 327 | try { 328 | await remote.push() 329 | throw new Error('did not throw') 330 | } catch (e) { 331 | if (e.message !== 'Remote has updated since last pull, re-pull before pushing') throw e 332 | } 333 | }) 334 | test('error: create already created', async () => { 335 | const db = await create() 336 | try { 337 | await createDatabase.create(db.updater.infoUrl) 338 | throw new Error('Did not throw') 339 | } catch (e) { 340 | if (e.message !== 'Database already created') throw e 341 | } 342 | }) 343 | test('error: open database not created', async () => { 344 | try { 345 | await createDatabase.open(`http://localhost:${port}/empty`) 346 | throw new Error('Did not throw') 347 | } catch (e) { 348 | if (e.message !== 'Database has not been created') throw e 349 | } 350 | }) 351 | }) 352 | }) 353 | test('handler info, readonly', async () => { 354 | const handler = (await import('../src/http/handlers.js')).info({}, { root: 'test' }) 355 | const resp = await handler({}) 356 | const info = JSON.parse(resp.body.toString()) 357 | same(info.root, 'test') 358 | assert.ok(!info.updater) 359 | same(info.blockstore, 'blockstore') 360 | }) 361 | test('missing required param', async () => { 362 | const handler = (await import('../src/http/handlers.js')).updater(Block) 363 | try { 364 | await handler({ params: {} }) 365 | throw new Error('Did not throw') 366 | } catch (e) { 367 | if (e.message !== 'Missing required param "new"') throw e 368 | } 369 | }) 370 | test('update handler', async () => { 371 | const b = Buffer.from('test') 372 | const block = Block.encoder(b, 'raw') 373 | const cid = await block.cid() 374 | const updater = { update: () => cid } 375 | const handler = createHandler.updater(Block, updater) 376 | let head 377 | let body 378 | const mock = { 379 | writeHead: (...args) => { head = args }, 380 | end: (...args) => { body = args } 381 | } 382 | await handler({ method: 'GET', url: `/?new=${cid.toString('base32')}` }, mock) 383 | body = JSON.parse(body.toString()) 384 | const [status, headers] = head 385 | same(headers['content-type'], 'application/json') 386 | same(status, 200) 387 | same(body, { root: cid.toString('base32') }) 388 | }) 389 | test('handler no base path', async () => { 390 | const b = Block.encoder(Buffer.from('test'), 'raw') 391 | const cid = await b.cid() 392 | const store = {} 393 | const updater = { root: await b.cid() } 394 | const handler = createHandler(Block, store, updater) 395 | let head 396 | let body 397 | const mock = { 398 | writeHead: (...args) => { head = args }, 399 | end: (...args) => { body = args } 400 | } 401 | await handler({ method: 'GET', url: '/' }, mock) 402 | body = JSON.parse(body.toString()) 403 | const [status, headers] = head 404 | same(headers['content-type'], 'application/json') 405 | same(status, 200) 406 | same(body, { root: cid.toString('base32'), blockstore: 'blockstore' }) 407 | }) 408 | 409 | const handler = async (req, res) => { 410 | if (req.url === '/empty') { 411 | return res.end(JSON.stringify({})) 412 | } 413 | const [id] = req.url.split('/').filter(x => x) 414 | const store = stores[id] 415 | const updater = updaters[id] 416 | if (!store) throw new Error('Missing store') 417 | const _handler = createHandler(Block, store, updater) 418 | return _handler(req, res, '/' + id) 419 | } 420 | }) 421 | } 422 | }) 423 | --------------------------------------------------------------------------------