├── .gitignore ├── .prettierrc ├── README.md ├── build-sqlite.sh ├── package-lock.json ├── package.json ├── release.sh ├── src ├── bptree-count.test.ts ├── bptree-count.ts ├── bptree-kv.test.ts ├── bptree-kv.ts ├── bptree-lock.test.ts ├── bptree-lock.ts ├── bptree-reducer.test.ts ├── bptree-reducer.ts ├── bptree-tx.test.ts ├── bptree-tx.ts ├── bptree.test.ts ├── bptree.ts ├── bptree2.test.ts ├── bptree2.ts ├── concurrency.test.ts ├── concurrency.ts ├── examples │ ├── dataTypes.ts │ ├── messaging.ts │ ├── minisql.ts │ └── old │ │ ├── MINISQL.md │ │ ├── messaging.ts │ │ ├── minisql2.ts │ │ ├── orm.ts │ │ └── social.ts ├── generator.test.ts ├── itree.test.ts ├── itree.ts ├── kv-lock.test.ts ├── kv-lock.ts ├── kv.test.ts ├── kv.ts ├── lib │ ├── AsyncBinaryPlusTree.test.ts │ ├── AsyncBinaryPlusTree.ts │ ├── AsyncIntervalTree.test.ts │ ├── AsyncIntervalTree.ts │ ├── AsyncReducerTree.test.ts │ ├── AsyncReducerTree.ts │ ├── Database.ts │ ├── InMemoryBinaryPlusTree.test.ts │ ├── InMemoryBinaryPlusTree.ts │ ├── InMemoryIntervalTree.test.ts │ ├── InMemoryIntervalTree.ts │ ├── InMemoryReducerTree.test.ts │ ├── InMemoryReducerTree.ts │ └── types.ts ├── lists.ts ├── okv.test.ts ├── okv.ts ├── perfTools.ts ├── performance.ts ├── performance2.ts ├── performance3.ts ├── propertyTest.test.ts ├── storage │ ├── IndexedDbKeyValueStorage.ts │ ├── IndexedDbOrderedKeyValueStorage.ts │ ├── JsonFileKeyValueStorage.ts │ ├── JsonFileOrderedKeyValueStorage.ts │ ├── LevelDbKeyValueStorage.ts │ ├── LevelDbOrderedKeyValueStorage.ts │ ├── SQLiteKeyValueStorage.ts │ ├── SQLiteOrderedKeyValueStorage.ts │ └── storage.test.ts ├── tuple-okv.test.ts ├── tuple-okv.ts └── tupledb │ ├── BTreeDb.test.ts │ ├── BTreeDb.ts │ ├── ITreeDb.test.ts │ └── ITreeDb.ts └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | *.log 4 | build 5 | tmp 6 | sqlite-amalgamation 7 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "semi": false, 3 | "useTabs": true, 4 | "trailingComma": "es5" 5 | } -------------------------------------------------------------------------------- /build-sqlite.sh: -------------------------------------------------------------------------------- 1 | set -e 2 | 3 | mkdir -p sqlite-amalgamation 4 | cd sqlite-amalgamation 5 | 6 | curl -O https://sqlite.com/2024/sqlite-amalgamation-3450100.zip 7 | unzip sqlite-amalgamation-3450100.zip 8 | mv sqlite-amalgamation-3450100/* . 9 | 10 | rm -rf sqlite-amalgamation-3450100.zip 11 | rm -rf sqlite-amalgamation-3450100 12 | 13 | # enable R-Tree 14 | LINE='#define SQLITE_ENABLE_RTREE 1' 15 | sed -i '' "1s|^|$LINE\\n|" sqlite3.c 16 | 17 | cd .. 18 | npm install better-sqlite3 --no-save --build-from-source --sqlite3="$(pwd)/sqlite-amalgamation" -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "database-experiments", 3 | "version": "0.0.1", 4 | "description": "", 5 | "repository": { 6 | "url": "https://github.com/ccorcos/database-experiments" 7 | }, 8 | "main": "index.js", 9 | "scripts": { 10 | "clean": "rm -rf build", 11 | "build": "tsc", 12 | "typecheck": "tsc --noEmit", 13 | "test": "mocha -r tsx './src/**/*.test.ts' --verbose", 14 | "release": "./release.sh", 15 | "perf": "tsx src/performance.ts", 16 | "perf2": "tsx src/performance2.ts", 17 | "perf3": "tsx src/performance3.ts" 18 | }, 19 | "keywords": [], 20 | "author": "Chet Corcos ", 21 | "license": "MIT", 22 | "devDependencies": { 23 | "@ccorcos/test-clock": "^0.0.4", 24 | "@types/better-sqlite3": "^7.6.9", 25 | "@types/lodash": "^4.14.202", 26 | "@types/mocha": "whitecolor/mocha-types", 27 | "@types/node": "^20.9.1", 28 | "mocha": "^10.2.0", 29 | "tinybench": "^2.6.0", 30 | "tsx": "^4.1.2", 31 | "typescript": "^5.2.2" 32 | }, 33 | "dependencies": { 34 | "@ccorcos/lock": "^1.0.3", 35 | "@ccorcos/ordered-array": "^0.0.4", 36 | "abstract-leveldown": "^7.2.0", 37 | "better-sqlite3": "^9.4.0", 38 | "data-type-ts": "^1.0.3", 39 | "fake-indexeddb": "^5.0.2", 40 | "fs-extra": "^11.2.0", 41 | "idb": "^8.0.0", 42 | "level": "^8.0.1", 43 | "lexicodec": "^0.0.4", 44 | "lodash": "^4.17.21", 45 | "position-strings": "^2.0.1", 46 | "ulid": "^2.3.0" 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | npm version patch 6 | 7 | npm run clean 8 | npm run build 9 | 10 | cp README.md build 11 | cp package.json build 12 | 13 | cd build 14 | npm publish 15 | -------------------------------------------------------------------------------- /src/bptree-kv.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { cloneDeep, isEqual, uniq } from "lodash" 3 | import { describe, it } from "mocha" 4 | import { BinaryPlusKeyValueDatabase } from "./bptree-kv" 5 | import { KeyValueDatabase } from "./kv" 6 | 7 | // min = 2, max = 4 8 | const structuralTests24 = ` 9 | + 5 10 | [5] 11 | 12 | + 10 13 | [5,10] 14 | 15 | + 3 16 | [3,5,10] 17 | 18 | // Delete from root leaf 19 | - 5 20 | [3,10] 21 | 22 | + 5 23 | [3,5,10] 24 | 25 | + 7 26 | [3,5,7,10] 27 | 28 | // Split 29 | + 6 30 | [null,7] 31 | [3,5,6] [7,10] 32 | 33 | // Merge right branch 34 | - 7 35 | [3,5,6,10] 36 | 37 | + 7 38 | [null,7] 39 | [3,5,6] [7,10] 40 | 41 | - 6 42 | [null,7] 43 | [3,5] [7,10] 44 | 45 | // Merge left branch 46 | - 5 47 | [3,7,10] 48 | 49 | + 5 50 | [3,5,7,10] 51 | 52 | + 6 53 | [null,7] 54 | [3,5,6] [7,10] 55 | 56 | + 14 57 | [null,7] 58 | [3,5,6] [7,10,14] 59 | 60 | + 23 61 | [null,7] 62 | [3,5,6] [7,10,14,23] 63 | 64 | + 24 65 | [null,7,23] 66 | [3,5,6] [7,10,14] [23,24] 67 | 68 | // Merge right branch 69 | - 23 70 | [null,7] 71 | [3,5,6] [7,10,14,24] 72 | 73 | + 23 74 | [null,7,23] 75 | [3,5,6] [7,10,14] [23,24] 76 | 77 | // Update parent minKey 78 | - 7 79 | [null,10,23] 80 | [3,5,6] [10,14] [23,24] 81 | 82 | // Merge middle branch 83 | - 14 84 | [null,23] 85 | [3,5,6,10] [23,24] 86 | 87 | + 14 88 | [null,10,23] 89 | [3,5,6] [10,14] [23,24] 90 | 91 | - 3 92 | [null,10,23] 93 | [5,6] [10,14] [23,24] 94 | 95 | // Merge left branch 96 | - 6 97 | [null,23] 98 | [5,10,14] [23,24] 99 | 100 | + 3 101 | [null,23] 102 | [3,5,10,14] [23,24] 103 | 104 | + 6 105 | [null,10,23] 106 | [3,5,6] [10,14] [23,24] 107 | 108 | + 7 109 | [null,10,23] 110 | [3,5,6,7] [10,14] [23,24] 111 | 112 | + 8 113 | [null,7,10,23] 114 | [3,5,6] [7,8] [10,14] [23,24] 115 | 116 | + 11 117 | [null,7,10,23] 118 | [3,5,6] [7,8] [10,11,14] [23,24] 119 | 120 | + 12 121 | [null,7,10,23] 122 | [3,5,6] [7,8] [10,11,12,14] [23,24] 123 | 124 | // Double split 125 | + 13 126 | [null,13] 127 | [null,7,10] [13,23] 128 | [3,5,6] [7,8] [10,11,12] [13,14] [23,24] 129 | 130 | + 15 131 | [null,13] 132 | [null,7,10] [13,23] 133 | [3,5,6] [7,8] [10,11,12] [13,14,15] [23,24] 134 | 135 | // Double update minKey 136 | - 13 137 | [null,14] 138 | [null,7,10] [14,23] 139 | [3,5,6] [7,8] [10,11,12] [14,15] [23,24] 140 | 141 | // Double merge mid-right branch 142 | - 14 143 | [null,7,10,15] 144 | [3,5,6] [7,8] [10,11,12] [15,23,24] 145 | 146 | + 2 147 | [null,7,10,15] 148 | [2,3,5,6] [7,8] [10,11,12] [15,23,24] 149 | 150 | + 4 151 | [null,10] 152 | [null,5,7] [10,15] 153 | [2,3,4] [5,6] [7,8] [10,11,12] [15,23,24] 154 | 155 | - 8 156 | [null,10] 157 | [null,5] [10,15] 158 | [2,3,4] [5,6,7] [10,11,12] [15,23,24] 159 | 160 | - 3 161 | [null,10] 162 | [null,5] [10,15] 163 | [2,4] [5,6,7] [10,11,12] [15,23,24] 164 | 165 | // Double merge left branch 166 | - 2 167 | [null,10,15] 168 | [4,5,6,7] [10,11,12] [15,23,24] 169 | 170 | - 15 171 | [null,10,23] 172 | [4,5,6,7] [10,11,12] [23,24] 173 | 174 | + 20 175 | [null,10,23] 176 | [4,5,6,7] [10,11,12,20] [23,24] 177 | 178 | // Redistribute right 179 | - 24 180 | [null,10,20] 181 | [4,5,6,7] [10,11,12] [20,23] 182 | 183 | + 13 184 | [null,10,20] 185 | [4,5,6,7] [10,11,12,13] [20,23] 186 | 187 | - 4 188 | [null,10,20] 189 | [5,6,7] [10,11,12,13] [20,23] 190 | 191 | - 5 192 | [null,10,20] 193 | [6,7] [10,11,12,13] [20,23] 194 | 195 | // Redistribute left 196 | - 6 197 | [null,12,20] 198 | [7,10,11] [12,13] [20,23] 199 | 200 | ` 201 | 202 | // Skipping becuase this is pretty slow. 203 | describe.skip("BinaryPlusKeyValueDatabase", () => { 204 | describe("structural tests 2-4", () => { 205 | const kv = new KeyValueDatabase() 206 | const tree = new BinaryPlusKeyValueDatabase(kv, 2, 4) 207 | test(tree, structuralTests24) 208 | }) 209 | 210 | describe("property test 2-4 * 100", () => { 211 | propertyTest({ minSize: 2, maxSize: 4, testSize: 100 }) 212 | }) 213 | 214 | describe("property test 3-6 * 100", () => { 215 | propertyTest({ minSize: 3, maxSize: 6, testSize: 100 }) 216 | }) 217 | 218 | it("big tree", () => { 219 | const numbers = randomNumbers(20_000) 220 | const kv = new KeyValueDatabase() 221 | const tree = new BinaryPlusKeyValueDatabase(kv, 3, 9) 222 | for (const number of numbers) { 223 | tree.set(number, number * 2) 224 | assert.equal(tree.get(number), number * 2) 225 | } 226 | for (const number of numbers) { 227 | tree.delete(number) 228 | assert.equal(tree.get(number), undefined) 229 | } 230 | assert.equal(tree.depth(), 1) 231 | }) 232 | 233 | function propertyTest(args: { 234 | minSize: number 235 | maxSize: number 236 | testSize: number 237 | }) { 238 | const numbers = randomNumbers(args.testSize) 239 | 240 | const kv = new KeyValueDatabase() 241 | const tree = new BinaryPlusKeyValueDatabase(kv, args.minSize, args.maxSize) 242 | for (let i = 0; i < numbers.length; i++) { 243 | const n = numbers[i] 244 | it(`Set ${i} : ${n}`, () => { 245 | // it(`+ ${n}`, () => { 246 | 247 | verifyImmutable(tree, () => { 248 | tree.set(n, n.toString()) 249 | verify(tree) 250 | }) 251 | 252 | for (let j = 0; j <= i; j++) { 253 | const x = numbers[j] 254 | assert.equal(tree.get(x), x.toString()) 255 | } 256 | // }) 257 | 258 | // Overwrite the jth key. 259 | for (let j = 0; j <= i; j++) { 260 | const x = numbers[j] 261 | 262 | // it(`Overwrite ${j}: ${x}`, () => { 263 | const t = clone(tree) 264 | 265 | verifyImmutable(tree, () => { 266 | t.set(x, x * 2) 267 | verify(t) 268 | }) 269 | 270 | // Check get on all keys. 271 | for (let k = 0; k <= i; k++) { 272 | const y = numbers[k] 273 | if (x === y) assert.equal(t.get(y), y * 2) 274 | else assert.equal(t.get(y), y.toString()) 275 | } 276 | // }) 277 | } 278 | 279 | // Delete the jth key. 280 | for (let j = 0; j <= i; j++) { 281 | const x = numbers[j] 282 | 283 | // it(`Delete ${j} : ${x}`, () => { 284 | const t = clone(tree) 285 | verifyImmutable(tree, () => { 286 | t.delete(x) 287 | verify(t) 288 | }) 289 | 290 | // Check get on all keys. 291 | for (let k = 0; k <= i; k++) { 292 | const y = numbers[k] 293 | if (x === y) assert.equal(t.get(y), undefined) 294 | else assert.equal(t.get(y), y.toString()) 295 | } 296 | // }) 297 | } 298 | }) 299 | } 300 | } 301 | }) 302 | 303 | function randomNumbers(size: number) { 304 | const numbers: number[] = [] 305 | for (let i = 0; i < size; i++) 306 | numbers.push(Math.round((Math.random() - 0.5) * size * 10)) 307 | return uniq(numbers) 308 | } 309 | 310 | function parseTests(str: string) { 311 | // Cleanup extra whitespace 312 | str = str 313 | .split("\n") 314 | .map((line) => line.trim()) 315 | .join("\n") 316 | .trim() 317 | 318 | return str.split("\n\n").map((block) => { 319 | const lines = block.split("\n") 320 | let comment = "" 321 | if (lines[0].startsWith("//")) { 322 | comment = lines[0].slice(3) 323 | lines.splice(0, 1) 324 | } 325 | const [op, nStr] = lines[0].split(" ") 326 | const n = parseInt(nStr) 327 | const tree = lines.slice(1).join("\n") 328 | return { comment, n, tree, op: op as "+" | "-" } 329 | }) 330 | } 331 | 332 | function test(tree: BinaryPlusKeyValueDatabase, str: string) { 333 | for (const test of parseTests(str)) { 334 | let label = `${test.op} ${test.n}` 335 | if (test.comment) label += " // " + test.comment 336 | it(label, () => { 337 | if (test.op === "+") tree.set(test.n, test.n.toString()) 338 | if (test.op === "-") tree.delete(test.n) 339 | assert.equal(inspect(tree), test.tree, test.comment) 340 | 341 | const value = test.op === "+" ? test.n.toString() : undefined 342 | assert.equal(tree.get(test.n), value, test.comment) 343 | 344 | assert.equal(tree.depth(), test.tree.split("\n").length, test.comment) 345 | }) 346 | } 347 | } 348 | 349 | type Key = string | number 350 | type KeyTree = 351 | | { keys: Key[]; children?: undefined } 352 | | { keys: Key[]; children: KeyTree[] } 353 | 354 | function toKeyTree(tree: BinaryPlusKeyValueDatabase, id = "root"): KeyTree { 355 | const node = tree.kv.get(id)?.value 356 | if (!node) { 357 | console.warn("Missing node!") 358 | // throw new Error("Missing node!") 359 | return { keys: [] } 360 | } 361 | 362 | const keys = node.values.map((v) => v.key) 363 | if (node.leaf) return { keys: keys } 364 | 365 | const subtrees = node.values.map((v) => toKeyTree(tree, v.value)) 366 | return { keys: keys, children: subtrees } 367 | } 368 | 369 | type TreeLayer = Key[][] 370 | 371 | function toTreeLayers(tree: KeyTree): TreeLayer[] { 372 | const layers: TreeLayer[] = [] 373 | 374 | let cursor = [tree] 375 | while (cursor.length > 0) { 376 | const layer: TreeLayer = [] 377 | const nextCursor: KeyTree[] = [] 378 | for (const tree of cursor) { 379 | layer.push(tree.keys) 380 | if (tree.children) nextCursor.push(...tree.children) 381 | } 382 | layers.push(layer) 383 | cursor = nextCursor 384 | } 385 | return layers 386 | } 387 | 388 | function print(x: any) { 389 | if (x === null) return "null" 390 | if (typeof x === "number") return x.toString() 391 | if (typeof x === "string") return JSON.stringify(x) 392 | if (Array.isArray(x)) return "[" + x.map(print).join(",") + "]" 393 | return "" 394 | } 395 | 396 | function inspect(tree: BinaryPlusKeyValueDatabase) { 397 | const keyTree = toKeyTree(tree) 398 | const layers = toTreeLayers(keyTree) 399 | const str = layers 400 | .map((layer) => 401 | layer.length === 1 ? print(layer[0]) : layer.map(print).join(" ") 402 | ) 403 | .join("\n") 404 | return str 405 | } 406 | 407 | function clone(tree: BinaryPlusKeyValueDatabase) { 408 | const kv = new KeyValueDatabase() 409 | kv.map = cloneDeep(tree.kv.map) 410 | const cloned = new BinaryPlusKeyValueDatabase(kv, tree.minSize, tree.maxSize) 411 | return cloned 412 | } 413 | 414 | function shallowClone(tree: BinaryPlusKeyValueDatabase) { 415 | const kv = new KeyValueDatabase() 416 | kv.map = { ...tree.kv.map } 417 | const cloned = new BinaryPlusKeyValueDatabase(kv, tree.minSize, tree.maxSize) 418 | return cloned 419 | } 420 | 421 | /** Check for node sizes. */ 422 | function verify(tree: BinaryPlusKeyValueDatabase, id = "root") { 423 | const node = tree.kv.get(id)?.value 424 | if (id === "root") { 425 | assert.equal(countNodes(tree), Object.keys(tree.kv.map).length) 426 | if (!node) return 427 | if (node.leaf) return 428 | for (const { value } of node.values) verify(tree, value) 429 | return 430 | } 431 | 432 | assert.ok(node) 433 | assert.ok(node.values.length >= tree.minSize) 434 | assert.ok(node.values.length <= tree.maxSize, inspect(tree)) 435 | 436 | if (node.leaf) return 437 | for (const { value } of node.values) verify(tree, value) 438 | } 439 | 440 | function countNodes(tree: BinaryPlusKeyValueDatabase, id = "root") { 441 | const node = tree.kv.get(id)?.value 442 | if (id === "root") { 443 | if (!node) return 0 444 | if (node.leaf) return 1 445 | let count = 1 446 | for (const { value } of node.values) count += countNodes(tree, value) 447 | return count 448 | } 449 | 450 | assert.ok(node) 451 | assert.ok(node.values.length >= tree.minSize) 452 | assert.ok(node.values.length <= tree.maxSize, inspect(tree)) 453 | 454 | if (node.leaf) return 1 455 | let count = 1 456 | for (const { value } of node.values) count += countNodes(tree, value) 457 | return count 458 | } 459 | 460 | function verifyImmutable(tree: BinaryPlusKeyValueDatabase, fn: () => void) { 461 | const shallow = shallowClone(tree) 462 | const deep = clone(tree) 463 | 464 | fn() 465 | 466 | const keys = uniq([...Object.keys(tree.kv.map), ...Object.keys(shallow)]) 467 | for (const key of keys) { 468 | const newNode = tree.kv.get(key) 469 | const originalValue = deep.kv.get(key) 470 | const originalRef = shallow.kv.get(key) 471 | 472 | if (isEqual(newNode, originalValue)) { 473 | assert.ok( 474 | newNode === originalRef 475 | // [inspect(deep), inspect(tree), JSON.stringify(newNode)].join("\n\n") 476 | ) 477 | } else { 478 | assert.ok( 479 | newNode !== originalRef 480 | // [inspect(deep), inspect(tree), JSON.stringify(newNode)].join("\n\n") 481 | ) 482 | } 483 | } 484 | } 485 | -------------------------------------------------------------------------------- /src/bptree-tx.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { cloneDeep, isEqual, uniq } from "lodash" 3 | import { describe, it } from "mocha" 4 | import { BinaryPlusTransactionalTree } from "./bptree-tx" 5 | 6 | // min = 2, max = 4 7 | const structuralTests24 = ` 8 | + 5 9 | [5] 10 | 11 | + 10 12 | [5,10] 13 | 14 | + 3 15 | [3,5,10] 16 | 17 | // Delete from root leaf 18 | - 5 19 | [3,10] 20 | 21 | + 5 22 | [3,5,10] 23 | 24 | + 7 25 | [3,5,7,10] 26 | 27 | // Split 28 | + 6 29 | [null,7] 30 | [3,5,6] [7,10] 31 | 32 | // Merge right branch 33 | - 7 34 | [3,5,6,10] 35 | 36 | + 7 37 | [null,7] 38 | [3,5,6] [7,10] 39 | 40 | - 6 41 | [null,7] 42 | [3,5] [7,10] 43 | 44 | // Merge left branch 45 | - 5 46 | [3,7,10] 47 | 48 | + 5 49 | [3,5,7,10] 50 | 51 | + 6 52 | [null,7] 53 | [3,5,6] [7,10] 54 | 55 | + 14 56 | [null,7] 57 | [3,5,6] [7,10,14] 58 | 59 | + 23 60 | [null,7] 61 | [3,5,6] [7,10,14,23] 62 | 63 | + 24 64 | [null,7,23] 65 | [3,5,6] [7,10,14] [23,24] 66 | 67 | // Merge right branch 68 | - 23 69 | [null,7] 70 | [3,5,6] [7,10,14,24] 71 | 72 | + 23 73 | [null,7,23] 74 | [3,5,6] [7,10,14] [23,24] 75 | 76 | // Update parent minKey 77 | - 7 78 | [null,10,23] 79 | [3,5,6] [10,14] [23,24] 80 | 81 | // Merge middle branch 82 | - 14 83 | [null,23] 84 | [3,5,6,10] [23,24] 85 | 86 | + 14 87 | [null,10,23] 88 | [3,5,6] [10,14] [23,24] 89 | 90 | - 3 91 | [null,10,23] 92 | [5,6] [10,14] [23,24] 93 | 94 | // Merge left branch 95 | - 6 96 | [null,23] 97 | [5,10,14] [23,24] 98 | 99 | + 3 100 | [null,23] 101 | [3,5,10,14] [23,24] 102 | 103 | + 6 104 | [null,10,23] 105 | [3,5,6] [10,14] [23,24] 106 | 107 | + 7 108 | [null,10,23] 109 | [3,5,6,7] [10,14] [23,24] 110 | 111 | + 8 112 | [null,7,10,23] 113 | [3,5,6] [7,8] [10,14] [23,24] 114 | 115 | + 11 116 | [null,7,10,23] 117 | [3,5,6] [7,8] [10,11,14] [23,24] 118 | 119 | + 12 120 | [null,7,10,23] 121 | [3,5,6] [7,8] [10,11,12,14] [23,24] 122 | 123 | // Double split 124 | + 13 125 | [null,13] 126 | [null,7,10] [13,23] 127 | [3,5,6] [7,8] [10,11,12] [13,14] [23,24] 128 | 129 | + 15 130 | [null,13] 131 | [null,7,10] [13,23] 132 | [3,5,6] [7,8] [10,11,12] [13,14,15] [23,24] 133 | 134 | // Double update minKey 135 | - 13 136 | [null,14] 137 | [null,7,10] [14,23] 138 | [3,5,6] [7,8] [10,11,12] [14,15] [23,24] 139 | 140 | // Double merge mid-right branch 141 | - 14 142 | [null,7,10,15] 143 | [3,5,6] [7,8] [10,11,12] [15,23,24] 144 | 145 | + 2 146 | [null,7,10,15] 147 | [2,3,5,6] [7,8] [10,11,12] [15,23,24] 148 | 149 | + 4 150 | [null,10] 151 | [null,5,7] [10,15] 152 | [2,3,4] [5,6] [7,8] [10,11,12] [15,23,24] 153 | 154 | - 8 155 | [null,10] 156 | [null,5] [10,15] 157 | [2,3,4] [5,6,7] [10,11,12] [15,23,24] 158 | 159 | - 3 160 | [null,10] 161 | [null,5] [10,15] 162 | [2,4] [5,6,7] [10,11,12] [15,23,24] 163 | 164 | // Double merge left branch 165 | - 2 166 | [null,10,15] 167 | [4,5,6,7] [10,11,12] [15,23,24] 168 | 169 | - 15 170 | [null,10,23] 171 | [4,5,6,7] [10,11,12] [23,24] 172 | 173 | + 20 174 | [null,10,23] 175 | [4,5,6,7] [10,11,12,20] [23,24] 176 | 177 | // Redistribute right 178 | - 24 179 | [null,10,20] 180 | [4,5,6,7] [10,11,12] [20,23] 181 | 182 | + 13 183 | [null,10,20] 184 | [4,5,6,7] [10,11,12,13] [20,23] 185 | 186 | - 4 187 | [null,10,20] 188 | [5,6,7] [10,11,12,13] [20,23] 189 | 190 | - 5 191 | [null,10,20] 192 | [6,7] [10,11,12,13] [20,23] 193 | 194 | // Redistribute left 195 | - 6 196 | [null,12,20] 197 | [7,10,11] [12,13] [20,23] 198 | 199 | ` 200 | 201 | describe("BinaryPlusTransactionalTree", () => { 202 | describe("structural tests 2-4", () => { 203 | const tree = new BinaryPlusTransactionalTree(2, 4) 204 | test(tree, structuralTests24) 205 | }) 206 | 207 | describe("property test 2-4 * 100", () => { 208 | propertyTest({ minSize: 2, maxSize: 4, testSize: 100 }) 209 | }) 210 | 211 | describe("property test 3-6 * 100", () => { 212 | propertyTest({ minSize: 3, maxSize: 6, testSize: 100 }) 213 | }) 214 | 215 | it("big tree", () => { 216 | const numbers = randomNumbers(20_000) 217 | const tree = new BinaryPlusTransactionalTree(3, 9) 218 | for (const number of numbers) { 219 | tree.set(number, number * 2) 220 | assert.equal(tree.get(number), number * 2) 221 | } 222 | for (const number of numbers) { 223 | tree.delete(number) 224 | assert.equal(tree.get(number), undefined) 225 | } 226 | assert.equal(tree.depth(), 1) 227 | }) 228 | 229 | function propertyTest(args: { 230 | minSize: number 231 | maxSize: number 232 | testSize: number 233 | }) { 234 | const numbers = randomNumbers(args.testSize) 235 | 236 | const tree = new BinaryPlusTransactionalTree(args.minSize, args.maxSize) 237 | for (let i = 0; i < numbers.length; i++) { 238 | const n = numbers[i] 239 | it(`Set ${i} : ${n}`, () => { 240 | // it(`+ ${n}`, () => { 241 | 242 | verifyImmutable(tree, () => { 243 | tree.set(n, n.toString()) 244 | verify(tree) 245 | }) 246 | 247 | for (let j = 0; j <= i; j++) { 248 | const x = numbers[j] 249 | assert.equal(tree.get(x), x.toString()) 250 | } 251 | // }) 252 | 253 | // Overwrite the jth key. 254 | for (let j = 0; j <= i; j++) { 255 | const x = numbers[j] 256 | 257 | // it(`Overwrite ${j}: ${x}`, () => { 258 | const t = clone(tree) 259 | 260 | verifyImmutable(t, () => { 261 | t.set(x, x * 2) 262 | verify(t) 263 | }) 264 | 265 | // Check get on all keys. 266 | for (let k = 0; k <= i; k++) { 267 | const y = numbers[k] 268 | if (x === y) assert.equal(t.get(y), y * 2) 269 | else assert.equal(t.get(y), y.toString()) 270 | } 271 | // }) 272 | } 273 | 274 | // Delete the jth key. 275 | for (let j = 0; j <= i; j++) { 276 | const x = numbers[j] 277 | 278 | // it(`Delete ${j} : ${x}`, () => { 279 | const t = clone(tree) 280 | verifyImmutable(t, () => { 281 | t.delete(x) 282 | verify(t) 283 | }) 284 | 285 | // Check get on all keys. 286 | for (let k = 0; k <= i; k++) { 287 | const y = numbers[k] 288 | if (x === y) assert.equal(t.get(y), undefined) 289 | else assert.equal(t.get(y), y.toString()) 290 | } 291 | // }) 292 | } 293 | }) 294 | } 295 | } 296 | }) 297 | 298 | function randomNumbers(size: number) { 299 | const numbers: number[] = [] 300 | for (let i = 0; i < size; i++) 301 | numbers.push(Math.round((Math.random() - 0.5) * size * 10)) 302 | return uniq(numbers) 303 | } 304 | 305 | function parseTests(str: string) { 306 | // Cleanup extra whitespace 307 | str = str 308 | .split("\n") 309 | .map((line) => line.trim()) 310 | .join("\n") 311 | .trim() 312 | 313 | return str.split("\n\n").map((block) => { 314 | const lines = block.split("\n") 315 | let comment = "" 316 | if (lines[0].startsWith("//")) { 317 | comment = lines[0].slice(3) 318 | lines.splice(0, 1) 319 | } 320 | const [op, nStr] = lines[0].split(" ") 321 | const n = parseInt(nStr) 322 | const tree = lines.slice(1).join("\n") 323 | return { comment, n, tree, op: op as "+" | "-" } 324 | }) 325 | } 326 | 327 | function test(tree: BinaryPlusTransactionalTree, str: string) { 328 | for (const test of parseTests(str)) { 329 | let label = `${test.op} ${test.n}` 330 | if (test.comment) label += " // " + test.comment 331 | it(label, () => { 332 | if (test.op === "+") tree.set(test.n, test.n.toString()) 333 | if (test.op === "-") tree.delete(test.n) 334 | assert.equal(inspect(tree), test.tree, test.comment) 335 | 336 | const value = test.op === "+" ? test.n.toString() : undefined 337 | assert.equal(tree.get(test.n), value, test.comment) 338 | 339 | assert.equal(tree.depth(), test.tree.split("\n").length, test.comment) 340 | }) 341 | } 342 | } 343 | 344 | type Key = string | number 345 | type KeyTree = 346 | | { keys: Key[]; children?: undefined } 347 | | { keys: Key[]; children: KeyTree[] } 348 | 349 | function toKeyTree(tree: BinaryPlusTransactionalTree, id = "root"): KeyTree { 350 | const node = tree.nodes[id] 351 | if (!node) { 352 | console.warn("Missing node!") 353 | // throw new Error("Missing node!") 354 | return { keys: [] } 355 | } 356 | 357 | const keys = node.values.map((v) => v.key) 358 | if (node.leaf) return { keys: keys } 359 | 360 | const subtrees = node.values.map((v) => toKeyTree(tree, v.value)) 361 | return { keys: keys, children: subtrees } 362 | } 363 | 364 | type TreeLayer = Key[][] 365 | 366 | function toTreeLayers(tree: KeyTree): TreeLayer[] { 367 | const layers: TreeLayer[] = [] 368 | 369 | let cursor = [tree] 370 | while (cursor.length > 0) { 371 | const layer: TreeLayer = [] 372 | const nextCursor: KeyTree[] = [] 373 | for (const tree of cursor) { 374 | layer.push(tree.keys) 375 | if (tree.children) nextCursor.push(...tree.children) 376 | } 377 | layers.push(layer) 378 | cursor = nextCursor 379 | } 380 | return layers 381 | } 382 | 383 | function print(x: any) { 384 | if (x === null) return "null" 385 | if (typeof x === "number") return x.toString() 386 | if (typeof x === "string") return JSON.stringify(x) 387 | if (Array.isArray(x)) return "[" + x.map(print).join(",") + "]" 388 | return "" 389 | } 390 | 391 | function inspect(tree: BinaryPlusTransactionalTree) { 392 | const keyTree = toKeyTree(tree) 393 | const layers = toTreeLayers(keyTree) 394 | const str = layers 395 | .map((layer) => 396 | layer.length === 1 ? print(layer[0]) : layer.map(print).join(" ") 397 | ) 398 | .join("\n") 399 | return str 400 | } 401 | 402 | function clone(tree: BinaryPlusTransactionalTree) { 403 | const cloned = new BinaryPlusTransactionalTree(tree.minSize, tree.maxSize) 404 | cloned.nodes = cloneDeep(tree.nodes) 405 | return cloned 406 | } 407 | 408 | function shallowClone(tree: BinaryPlusTransactionalTree) { 409 | const cloned = new BinaryPlusTransactionalTree(tree.minSize, tree.maxSize) 410 | cloned.nodes = { ...tree.nodes } 411 | return cloned 412 | } 413 | 414 | /** Check for node sizes. */ 415 | function verify(tree: BinaryPlusTransactionalTree, id = "root") { 416 | const node = tree.nodes[id] 417 | if (id === "root") { 418 | assert.equal(countNodes(tree), Object.keys(tree.nodes).length) 419 | 420 | if (!node) return 421 | if (node.leaf) return 422 | for (const { value } of node.values) verify(tree, value) 423 | return 424 | } 425 | 426 | assert.ok(node) 427 | assert.ok(node.values.length >= tree.minSize) 428 | assert.ok(node.values.length <= tree.maxSize, inspect(tree)) 429 | 430 | if (node.leaf) return 431 | for (const { value } of node.values) verify(tree, value) 432 | } 433 | 434 | function countNodes(tree: BinaryPlusTransactionalTree, id = "root") { 435 | const node = tree.nodes[id] 436 | if (id === "root") { 437 | if (!node) return 0 438 | if (node.leaf) return 1 439 | let count = 1 440 | for (const { value } of node.values) count += countNodes(tree, value) 441 | return count 442 | } 443 | 444 | assert.ok(node) 445 | assert.ok(node.values.length >= tree.minSize) 446 | assert.ok(node.values.length <= tree.maxSize, inspect(tree)) 447 | 448 | if (node.leaf) return 1 449 | let count = 1 450 | for (const { value } of node.values) count += countNodes(tree, value) 451 | return count 452 | } 453 | 454 | function verifyImmutable(tree: BinaryPlusTransactionalTree, fn: () => void) { 455 | const shallow = shallowClone(tree) 456 | const deep = clone(tree) 457 | 458 | fn() 459 | 460 | const keys = uniq([...Object.keys(tree.nodes), ...Object.keys(shallow.nodes)]) 461 | for (const key of keys) { 462 | const newNode = tree.nodes[key] 463 | const originalValue = deep.nodes[key] 464 | const originalRef = shallow.nodes[key] 465 | 466 | if (isEqual(newNode, originalValue)) { 467 | assert.ok( 468 | newNode === originalRef 469 | // [inspect(deep), inspect(tree), JSON.stringify(newNode)].join("\n\n") 470 | ) 471 | } else { 472 | assert.ok( 473 | newNode !== originalRef 474 | // [inspect(deep), inspect(tree), JSON.stringify(newNode)].join("\n\n") 475 | ) 476 | } 477 | } 478 | } 479 | -------------------------------------------------------------------------------- /src/bptree-tx.ts: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | Both Postgres and SQLite use B+ trees as the foundation of their indexes. 4 | 5 | Even though we have an OrderedKeyValueDatabase, let's build a B+ tree on top of a KeyValueDatabase 6 | so that we can later extend it to an interval tree and a range tree. 7 | 8 | */ 9 | 10 | import { orderedArray } from "@ccorcos/ordered-array" 11 | 12 | type Key = string | number 13 | 14 | /** 15 | * id references the node in a key-value database. 16 | * Each item in values has a `key` that is the minKey of the child node with id `value`. 17 | * The key will be null for the left-most branch nodes. 18 | */ 19 | export type BranchNode = { 20 | leaf?: false 21 | id: string 22 | values: { key: Key | null; value: string }[] 23 | } 24 | 25 | export type LeafNode = { 26 | leaf: true 27 | id: string 28 | values: { key: Key | null; value: any }[] 29 | } 30 | 31 | const { search, insert, remove } = orderedArray( 32 | (item: { key: Key | null }) => item.key, 33 | (a, b) => { 34 | if (a === b) return 0 35 | if (a === null) return -1 36 | if (b === null) return 1 37 | if (a > b) return 1 38 | else return -1 39 | } 40 | ) 41 | 42 | export class BinaryPlusTransactionalTree { 43 | // In preparation for storing nodes in a key-value database. 44 | nodes: { [key: Key]: BranchNode | LeafNode | undefined } = {} 45 | 46 | /** 47 | * minSize must be less than maxSize / 2. 48 | */ 49 | constructor(public minSize: number, public maxSize: number) { 50 | if (minSize > maxSize / 2) throw new Error("Invalid tree size.") 51 | } 52 | 53 | // Commit transaction for read-concurrency checks. 54 | get = (key: Key): any | undefined => { 55 | const tx = new Transaction(this.nodes) 56 | 57 | const root = tx.get("root") 58 | if (!root) { 59 | // No need to tx.check(), we only read one value. 60 | return // Empty tree 61 | } 62 | 63 | let node = root 64 | while (true) { 65 | if (node.leaf) { 66 | const result = search(node.values, key) 67 | if (result.found === undefined) { 68 | if (node.id !== "root") tx.check() 69 | return 70 | } 71 | if (node.id !== "root") tx.check() 72 | return node.values[result.found].value 73 | } 74 | 75 | const result = search(node.values, key) 76 | 77 | // Closest key that is at least as big as the key... 78 | // So the closest should never be less than the minKey. 79 | if (result.closest === 0) { 80 | tx.check() 81 | throw new Error("Broken.") 82 | } 83 | 84 | const childIndex = 85 | result.found !== undefined ? result.found : result.closest - 1 86 | const childId = node.values[childIndex].value 87 | const child = tx.get(childId) 88 | if (!child) { 89 | // Check first in case this node was deleted based on a concurrent write. 90 | tx.check() 91 | throw Error("Missing child node.") 92 | } 93 | node = child 94 | } 95 | } 96 | 97 | set = (key: Key, value: any) => { 98 | const tx = new Transaction(this.nodes) 99 | const root = tx.get("root") 100 | 101 | // Intitalize root node. 102 | if (!root) { 103 | tx.set("root", { 104 | leaf: true, 105 | id: "root", 106 | values: [{ key, value }], 107 | }) 108 | tx.commit() 109 | return 110 | } 111 | 112 | // Insert into leaf node. 113 | const nodePath = [root] 114 | const indexPath: number[] = [] 115 | while (true) { 116 | const node = nodePath[0] 117 | 118 | if (node.leaf) { 119 | const newNode = { ...node, values: [...node.values] } 120 | const existing = insert(newNode.values, { key, value }) 121 | tx.set(newNode.id, newNode) 122 | 123 | // No need to rebalance if we're replacing 124 | if (existing) { 125 | tx.commit() 126 | return 127 | } 128 | 129 | // Replace the node and balance the tree. 130 | nodePath[0] = newNode 131 | break 132 | } 133 | 134 | const result = search(node.values, key) 135 | const index = 136 | result.found !== undefined ? result.found : result.closest - 1 137 | const childId = node.values[index].value 138 | const child = tx.get(childId) 139 | if (!child) { 140 | tx.check() 141 | throw Error("Missing child node.") 142 | } 143 | // Recur into child. 144 | nodePath.unshift(child) 145 | indexPath.unshift(index) 146 | } 147 | 148 | // Balance the tree by splitting nodes, starting from the leaf. 149 | let node = nodePath.shift() 150 | while (node) { 151 | const size = node.values.length 152 | if (size <= this.maxSize) { 153 | tx.commit() 154 | return 155 | } 156 | 157 | const splitIndex = Math.round(size / 2) 158 | const rightNode: LeafNode | BranchNode = { 159 | id: randomId(), 160 | leaf: node.leaf, 161 | // TODO: fix mutation! 162 | values: node.values.splice(splitIndex), 163 | } 164 | tx.set(rightNode.id, rightNode) 165 | const rightMinKey = rightNode.values[0].key 166 | 167 | // If we're splitting the root node, we want to keep the root id. 168 | if (node.id === "root") { 169 | const leftNode: LeafNode | BranchNode = { 170 | id: randomId(), 171 | leaf: node.leaf, 172 | values: node.values, 173 | } 174 | tx.set(leftNode.id, leftNode) 175 | 176 | const newRoot: LeafNode | BranchNode = { 177 | id: "root", 178 | values: [ 179 | { key: null, value: leftNode.id }, 180 | { key: rightMinKey, value: rightNode.id }, 181 | ], 182 | } 183 | tx.set(newRoot.id, newRoot) 184 | tx.commit() 185 | return 186 | } 187 | 188 | // Insert right node into parent. 189 | const parent = nodePath.shift() 190 | const parentIndex = indexPath.shift() 191 | if (!parent) { 192 | tx.check() 193 | throw new Error("Broken.") 194 | } 195 | if (parentIndex === undefined) { 196 | tx.check() 197 | throw new Error("Broken.") 198 | } 199 | 200 | const newParent = { ...parent, values: [...parent.values] } 201 | newParent.values.splice(parentIndex + 1, 0, { 202 | key: rightMinKey, 203 | value: rightNode.id, 204 | }) 205 | tx.set(newParent.id, newParent) 206 | 207 | // Recur 208 | node = newParent 209 | } 210 | } 211 | 212 | delete = (key: Key) => { 213 | const tx = new Transaction(this.nodes) 214 | const root = tx.get("root") 215 | if (!root) { 216 | // No need to tx.check() 217 | return 218 | } 219 | 220 | // Delete from leaf node. 221 | const nodePath = [root] 222 | const indexPath: number[] = [] 223 | while (true) { 224 | const node = nodePath[0] 225 | 226 | if (node.leaf) { 227 | const newNode = { ...node, values: [...node.values] } 228 | const exists = remove(newNode.values, key) 229 | tx.set(newNode.id, newNode) 230 | if (!exists) { 231 | tx.commit() 232 | return 233 | } 234 | // Continue to rebalance. 235 | nodePath[0] = newNode 236 | break 237 | } 238 | 239 | const result = search(node.values, key) 240 | const index = 241 | result.found !== undefined ? result.found : result.closest - 1 242 | const childId = node.values[index].value 243 | const child = tx.get(childId) 244 | if (!child) { 245 | tx.check() 246 | throw Error("Missing child node.") 247 | } 248 | 249 | // Recur into the child. 250 | nodePath.unshift(child) 251 | indexPath.unshift(index) 252 | } 253 | 254 | /* 255 | 256 | Step-by-step explanation of the more complicated case. 257 | 258 | Imagine a tree with minSize = 2, maxSize = 4. 259 | 260 | [null,10] 261 | [null,5] [10,15] 262 | [2,4] [5,7] [10,11] [15,24] 263 | 264 | Removing 10 from the leaf 265 | 266 | [null,10] 267 | [null,5] [10,15] 268 | [2,4] [5,7] [11] [15,24] 269 | 270 | Loop: Merge and update parent pointers. 271 | 272 | [null,10] 273 | [null,5] [11] 274 | [2,4] [5,7] [11,15,24] 275 | 276 | Recurse into parent. 277 | 278 | [null] 279 | [null,5,11] 280 | [2,4] [5,7] [11,15,24] 281 | 282 | Replace the root with child if there is only one key 283 | 284 | [null,5,11] 285 | [2,4] [5,7] [11,15,24] 286 | 287 | */ 288 | 289 | let node = nodePath.shift() 290 | while (node) { 291 | if (node.id === "root") { 292 | // A root leaf node has no minSize constaint. 293 | if (node.leaf) { 294 | tx.commit() 295 | return 296 | } 297 | 298 | // Root node with only one child becomes its child. 299 | if (node.values.length === 1) { 300 | const childId = node.values[0].value 301 | const child = tx.get(childId) 302 | if (!child) { 303 | tx.check() 304 | throw new Error("Broken.") 305 | } 306 | const newRoot = { ...child, id: "root" } 307 | tx.set(newRoot.id, newRoot) 308 | tx.delete(childId) 309 | } 310 | 311 | tx.commit() 312 | return 313 | } 314 | 315 | const parent = nodePath.shift() 316 | const parentIndex = indexPath.shift() 317 | if (!parent) { 318 | tx.check() 319 | throw new Error("Broken.") 320 | } 321 | if (parentIndex === undefined) { 322 | tx.check() 323 | throw new Error("Broken.") 324 | } 325 | 326 | if (node.values.length >= this.minSize) { 327 | // No need to merge but we might need to update the minKey in the parent 328 | const parentItem = parent.values[parentIndex] 329 | // No need to recusively update the left-most branch. 330 | if (parentItem.key === null) { 331 | tx.commit() 332 | return 333 | } 334 | // No need to recursively update if the minKey didn't change. 335 | if (parentItem.key === node.values[0].key) { 336 | tx.commit() 337 | return 338 | } 339 | 340 | // Set the minKey and recur 341 | const newParent = { ...parent, values: [...parent.values] } 342 | newParent.values[parentIndex] = { 343 | key: node.values[0].key, 344 | value: parentItem.value, 345 | } 346 | tx.set(newParent.id, newParent) 347 | node = newParent 348 | continue 349 | } 350 | 351 | // Merge or redistribute 352 | if (parentIndex === 0) { 353 | // When we delete from the first element, merge/redistribute with right sibling. 354 | const rightId = parent.values[parentIndex + 1].value 355 | const rightSibling = tx.get(rightId) 356 | if (!rightSibling) { 357 | tx.check() 358 | throw new Error("Broken.") 359 | } 360 | 361 | const combinedSize = node.values.length + rightSibling.values.length 362 | if (combinedSize > this.maxSize) { 363 | // Redistribute between both nodes. 364 | const splitIndex = Math.round(combinedSize / 2) - node.values.length 365 | 366 | const newRight = { ...rightSibling, values: [...rightSibling.values] } 367 | const moveLeft = newRight.values.splice(0, splitIndex) 368 | tx.set(newRight.id, newRight) 369 | 370 | const newNode = { ...node, values: [...node.values] } 371 | newNode.values.push(...moveLeft) 372 | tx.set(newNode.id, newNode) 373 | 374 | // Update parent minKey. 375 | const newParent = { ...parent, values: [...parent.values] } 376 | if (parent.values[parentIndex].key !== null) { 377 | newParent.values[parentIndex] = { 378 | key: newNode.values[0].key, 379 | value: newParent.values[parentIndex].value, 380 | } 381 | } 382 | 383 | newParent.values[parentIndex + 1] = { 384 | key: newRight.values[0].key, 385 | value: newParent.values[parentIndex + 1].value, 386 | } 387 | tx.set(newParent.id, newParent) 388 | 389 | // Recur 390 | node = newParent 391 | continue 392 | } 393 | 394 | // Merge 395 | const newRight = { ...rightSibling, values: [...rightSibling.values] } 396 | newRight.values.unshift(...node.values) 397 | 398 | // Remove the old pointer to rightSibling 399 | const newParent = { ...parent, values: [...parent.values] } 400 | newParent.values.splice(1, 1) 401 | 402 | // Replace the node pointer with the new rightSibling 403 | const leftMost = newParent.values[0].key === null 404 | newParent.values[0] = { 405 | key: leftMost ? null : newRight.values[0].key, 406 | value: newRight.id, 407 | } 408 | tx.set(newRight.id, newRight) 409 | tx.set(newParent.id, newParent) 410 | tx.delete(node.id) 411 | 412 | // Recur 413 | node = newParent 414 | continue 415 | } 416 | 417 | // Merge/redistribute with left sibling. 418 | const leftId = parent.values[parentIndex - 1].value 419 | const leftSibling = tx.get(leftId) 420 | if (!leftSibling) { 421 | tx.check() 422 | throw new Error("Broken.") 423 | } 424 | 425 | const combinedSize = leftSibling.values.length + node.values.length 426 | if (combinedSize > this.maxSize) { 427 | // Redistribute 428 | const splitIndex = Math.round(combinedSize / 2) 429 | 430 | const newLeft = { ...leftSibling, values: [...leftSibling.values] } 431 | const moveRight = newLeft.values.splice(splitIndex, this.maxSize) 432 | 433 | const newNode = { ...node, values: [...node.values] } 434 | newNode.values.unshift(...moveRight) 435 | 436 | // Update parent keys. 437 | const newParent = { ...parent, values: [...parent.values] } 438 | newParent.values[parentIndex] = { 439 | key: newNode.values[0].key, 440 | value: newParent.values[parentIndex].value, 441 | } 442 | tx.set(newLeft.id, newLeft) 443 | tx.set(newNode.id, newNode) 444 | tx.set(newParent.id, newParent) 445 | 446 | // Recur 447 | node = newParent 448 | continue 449 | } 450 | 451 | // Merge 452 | const newLeft = { ...leftSibling, values: [...leftSibling.values] } 453 | newLeft.values.push(...node.values) 454 | 455 | // No need to update minKey because we added to the right. 456 | // Just need to delete the old node. 457 | const newParent = { ...parent, values: [...parent.values] } 458 | newParent.values.splice(parentIndex, 1) 459 | 460 | tx.set(newLeft.id, newLeft) 461 | tx.set(newParent.id, newParent) 462 | tx.delete(node.id) 463 | 464 | // Recur 465 | node = newParent 466 | continue 467 | } 468 | } 469 | 470 | depth() { 471 | const tx = new Transaction(this.nodes) 472 | const root = tx.get("root") 473 | if (!root) return 0 474 | let depth = 1 475 | let node = root 476 | while (!node.leaf) { 477 | depth += 1 478 | const nextNode = tx.get(node.values[0].value) 479 | if (!nextNode) { 480 | tx.check() 481 | throw new Error("Broken.") 482 | } 483 | node = nextNode 484 | } 485 | tx.check() 486 | return depth 487 | } 488 | } 489 | 490 | function randomId() { 491 | return Math.random().toString(36).slice(2, 10) 492 | } 493 | 494 | /** In preparation for using KeyValueDatabase */ 495 | export class Transaction { 496 | // checks: { [key: string]: string | undefined } = {} 497 | cache: { [key: string]: BranchNode | LeafNode | undefined } = {} 498 | sets: { [key: string]: BranchNode | LeafNode } = {} 499 | deletes = new Set() 500 | 501 | constructor( 502 | public nodes: { [key: Key]: BranchNode | LeafNode | undefined } 503 | ) {} 504 | 505 | get = (key: string): BranchNode | LeafNode | undefined => { 506 | if (key in this.cache) return this.cache[key] 507 | const result = this.nodes[key] 508 | this.cache[key] = result 509 | return result 510 | } 511 | 512 | set(key: string, value: BranchNode | LeafNode) { 513 | this.sets[key] = value 514 | this.cache[key] = value 515 | this.deletes.delete(key) 516 | } 517 | 518 | delete(key: string) { 519 | this.cache[key] = undefined 520 | delete this.sets[key] 521 | this.deletes.add(key) 522 | } 523 | 524 | check() { 525 | // For read consistency later. 526 | } 527 | 528 | commit() { 529 | for (const [key, value] of Object.entries(this.sets)) 530 | this.nodes[key] = value 531 | for (const key of this.deletes) delete this.nodes[key] 532 | } 533 | } 534 | -------------------------------------------------------------------------------- /src/bptree.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { cloneDeep } from "lodash" 3 | import { describe, it } from "mocha" 4 | import { BinaryPlusTree } from "./bptree" 5 | 6 | // min = 2, max = 4 7 | const structuralTests24 = ` 8 | + 5 9 | [5] 10 | 11 | + 10 12 | [5,10] 13 | 14 | + 3 15 | [3,5,10] 16 | 17 | // Delete from root leaf 18 | - 5 19 | [3,10] 20 | 21 | + 5 22 | [3,5,10] 23 | 24 | + 7 25 | [3,5,7,10] 26 | 27 | // Split 28 | + 6 29 | [null,7] 30 | [3,5,6] [7,10] 31 | 32 | // Merge right branch 33 | - 7 34 | [3,5,6,10] 35 | 36 | + 7 37 | [null,7] 38 | [3,5,6] [7,10] 39 | 40 | - 6 41 | [null,7] 42 | [3,5] [7,10] 43 | 44 | // Merge left branch 45 | - 5 46 | [3,7,10] 47 | 48 | + 5 49 | [3,5,7,10] 50 | 51 | + 6 52 | [null,7] 53 | [3,5,6] [7,10] 54 | 55 | + 14 56 | [null,7] 57 | [3,5,6] [7,10,14] 58 | 59 | + 23 60 | [null,7] 61 | [3,5,6] [7,10,14,23] 62 | 63 | + 24 64 | [null,7,23] 65 | [3,5,6] [7,10,14] [23,24] 66 | 67 | // Merge right branch 68 | - 23 69 | [null,7] 70 | [3,5,6] [7,10,14,24] 71 | 72 | + 23 73 | [null,7,23] 74 | [3,5,6] [7,10,14] [23,24] 75 | 76 | // Update parent minKey 77 | - 7 78 | [null,10,23] 79 | [3,5,6] [10,14] [23,24] 80 | 81 | // Merge middle branch 82 | - 14 83 | [null,23] 84 | [3,5,6,10] [23,24] 85 | 86 | + 14 87 | [null,10,23] 88 | [3,5,6] [10,14] [23,24] 89 | 90 | - 3 91 | [null,10,23] 92 | [5,6] [10,14] [23,24] 93 | 94 | // Merge left branch 95 | - 6 96 | [null,23] 97 | [5,10,14] [23,24] 98 | 99 | + 3 100 | [null,23] 101 | [3,5,10,14] [23,24] 102 | 103 | + 6 104 | [null,10,23] 105 | [3,5,6] [10,14] [23,24] 106 | 107 | + 7 108 | [null,10,23] 109 | [3,5,6,7] [10,14] [23,24] 110 | 111 | + 8 112 | [null,7,10,23] 113 | [3,5,6] [7,8] [10,14] [23,24] 114 | 115 | + 11 116 | [null,7,10,23] 117 | [3,5,6] [7,8] [10,11,14] [23,24] 118 | 119 | + 12 120 | [null,7,10,23] 121 | [3,5,6] [7,8] [10,11,12,14] [23,24] 122 | 123 | // Double split 124 | + 13 125 | [null,13] 126 | [null,7,10] [13,23] 127 | [3,5,6] [7,8] [10,11,12] [13,14] [23,24] 128 | 129 | + 15 130 | [null,13] 131 | [null,7,10] [13,23] 132 | [3,5,6] [7,8] [10,11,12] [13,14,15] [23,24] 133 | 134 | // Double update minKey 135 | - 13 136 | [null,14] 137 | [null,7,10] [14,23] 138 | [3,5,6] [7,8] [10,11,12] [14,15] [23,24] 139 | 140 | // Double merge mid-right branch 141 | - 14 142 | [null,7,10,15] 143 | [3,5,6] [7,8] [10,11,12] [15,23,24] 144 | 145 | + 2 146 | [null,7,10,15] 147 | [2,3,5,6] [7,8] [10,11,12] [15,23,24] 148 | 149 | + 4 150 | [null,10] 151 | [null,5,7] [10,15] 152 | [2,3,4] [5,6] [7,8] [10,11,12] [15,23,24] 153 | 154 | - 8 155 | [null,10] 156 | [null,5] [10,15] 157 | [2,3,4] [5,6,7] [10,11,12] [15,23,24] 158 | 159 | - 3 160 | [null,10] 161 | [null,5] [10,15] 162 | [2,4] [5,6,7] [10,11,12] [15,23,24] 163 | 164 | // Double merge left branch 165 | - 2 166 | [null,10,15] 167 | [4,5,6,7] [10,11,12] [15,23,24] 168 | 169 | - 15 170 | [null,10,23] 171 | [4,5,6,7] [10,11,12] [23,24] 172 | 173 | + 20 174 | [null,10,23] 175 | [4,5,6,7] [10,11,12,20] [23,24] 176 | 177 | // Redistribute right 178 | - 24 179 | [null,10,20] 180 | [4,5,6,7] [10,11,12] [20,23] 181 | 182 | + 13 183 | [null,10,20] 184 | [4,5,6,7] [10,11,12,13] [20,23] 185 | 186 | - 4 187 | [null,10,20] 188 | [5,6,7] [10,11,12,13] [20,23] 189 | 190 | - 5 191 | [null,10,20] 192 | [6,7] [10,11,12,13] [20,23] 193 | 194 | // Redistribute left 195 | - 6 196 | [null,12,20] 197 | [7,10,11] [12,13] [20,23] 198 | 199 | ` 200 | 201 | describe("BinaryPlusTree", () => { 202 | describe("structural tests 2-4", () => { 203 | const tree = new BinaryPlusTree(2, 4) 204 | test(tree, structuralTests24) 205 | }) 206 | 207 | describe("property test 2-4 * 100", () => { 208 | propertyTest({ minSize: 2, maxSize: 4, testSize: 100 }) 209 | }) 210 | 211 | describe("property test 3-6 * 100", () => { 212 | propertyTest({ minSize: 3, maxSize: 6, testSize: 100 }) 213 | }) 214 | 215 | it("big tree", () => { 216 | const numbers = randomNumbers(20_000) 217 | const tree = new BinaryPlusTree(3, 9) 218 | for (const number of numbers) { 219 | tree.set(number, number * 2) 220 | assert.equal(tree.get(number), number * 2) 221 | } 222 | for (const number of numbers) { 223 | tree.delete(number) 224 | assert.equal(tree.get(number), undefined) 225 | } 226 | assert.equal(tree.depth(), 1) 227 | }) 228 | 229 | function propertyTest(args: { 230 | minSize: number 231 | maxSize: number 232 | testSize: number 233 | }) { 234 | const size = args.testSize 235 | const numbers = randomNumbers(size) 236 | 237 | const tree = new BinaryPlusTree(args.minSize, args.maxSize) 238 | for (let i = 0; i < size; i++) { 239 | const n = numbers[i] 240 | it(`Set ${i} : ${n}`, () => { 241 | // it(`+ ${n}`, () => { 242 | tree.set(n, n.toString()) 243 | verify(tree) 244 | 245 | // Get works on every key so far. 246 | for (let j = 0; j <= i; j++) { 247 | const x = numbers[j] 248 | assert.equal(tree.get(x), x.toString()) 249 | } 250 | // }) 251 | 252 | // Overwrite the jth key. 253 | for (let j = 0; j <= i; j++) { 254 | const x = numbers[j] 255 | 256 | // it(`Overwrite ${j}: ${x}`, () => { 257 | const t = clone(tree) 258 | t.set(x, x * 2) 259 | verify(t) 260 | 261 | // Check get on all keys. 262 | for (let k = 0; k <= i; k++) { 263 | const y = numbers[k] 264 | if (x === y) assert.equal(t.get(y), y * 2) 265 | else assert.equal(t.get(y), y.toString()) 266 | } 267 | // }) 268 | } 269 | 270 | // Delete the jth key. 271 | for (let j = 0; j <= i; j++) { 272 | const x = numbers[j] 273 | 274 | // it(`Delete ${j} : ${x}`, () => { 275 | const t = clone(tree) 276 | t.delete(x) 277 | try { 278 | verify(t) 279 | } catch (error) { 280 | console.log("BEFORE", inspect(tree)) 281 | console.log("DELETE", x) 282 | console.log("AFTER", inspect(t)) 283 | throw error 284 | } 285 | 286 | // Check get on all keys. 287 | for (let k = 0; k <= i; k++) { 288 | const y = numbers[k] 289 | if (x === y) assert.equal(t.get(y), undefined) 290 | else assert.equal(t.get(y), y.toString()) 291 | } 292 | // }) 293 | } 294 | }) 295 | } 296 | } 297 | }) 298 | 299 | function randomNumbers(size: number) { 300 | const numbers: number[] = [] 301 | for (let i = 0; i < size; i++) 302 | numbers.push(Math.round((Math.random() - 0.5) * size * 10)) 303 | return numbers 304 | } 305 | 306 | function parseTests(str: string) { 307 | // Cleanup extra whitespace 308 | str = str 309 | .split("\n") 310 | .map((line) => line.trim()) 311 | .join("\n") 312 | .trim() 313 | 314 | return str.split("\n\n").map((block) => { 315 | const lines = block.split("\n") 316 | let comment = "" 317 | if (lines[0].startsWith("//")) { 318 | comment = lines[0].slice(3) 319 | lines.splice(0, 1) 320 | } 321 | const [op, nStr] = lines[0].split(" ") 322 | const n = parseInt(nStr) 323 | const tree = lines.slice(1).join("\n") 324 | return { comment, n, tree, op: op as "+" | "-" } 325 | }) 326 | } 327 | 328 | function test(tree: BinaryPlusTree, str: string) { 329 | for (const test of parseTests(structuralTests24)) { 330 | let label = `${test.op} ${test.n}` 331 | if (test.comment) label += " // " + test.comment 332 | it(label, () => { 333 | if (test.op === "+") tree.set(test.n, test.n.toString()) 334 | if (test.op === "-") tree.delete(test.n) 335 | assert.equal(inspect(tree), test.tree, test.comment) 336 | 337 | const value = test.op === "+" ? test.n.toString() : undefined 338 | assert.equal(tree.get(test.n), value, test.comment) 339 | 340 | assert.equal(tree.depth(), test.tree.split("\n").length, test.comment) 341 | }) 342 | } 343 | } 344 | 345 | type Key = string | number 346 | type KeyTree = 347 | | { keys: Key[]; children?: undefined } 348 | | { keys: Key[]; children: KeyTree[] } 349 | 350 | function toKeyTree(tree: BinaryPlusTree, id = "root"): KeyTree { 351 | const node = tree.nodes[id] 352 | if (!node) throw new Error("Missing node!") 353 | 354 | const keys = node.values.map((v) => v.key) 355 | if (node.leaf) return { keys: keys } 356 | 357 | const subtrees = node.values.map((v) => toKeyTree(tree, v.value)) 358 | return { keys: keys, children: subtrees } 359 | } 360 | 361 | type TreeLayer = Key[][] 362 | 363 | function toTreeLayers(tree: KeyTree): TreeLayer[] { 364 | const layers: TreeLayer[] = [] 365 | 366 | let cursor = [tree] 367 | while (cursor.length > 0) { 368 | const layer: TreeLayer = [] 369 | const nextCursor: KeyTree[] = [] 370 | for (const tree of cursor) { 371 | layer.push(tree.keys) 372 | if (tree.children) nextCursor.push(...tree.children) 373 | } 374 | layers.push(layer) 375 | cursor = nextCursor 376 | } 377 | return layers 378 | } 379 | 380 | function print(x: any) { 381 | if (x === null) return "null" 382 | if (typeof x === "number") return x.toString() 383 | if (typeof x === "string") return JSON.stringify(x) 384 | if (Array.isArray(x)) return "[" + x.map(print).join(",") + "]" 385 | return "" 386 | } 387 | 388 | function inspect(tree: BinaryPlusTree) { 389 | const keyTree = toKeyTree(tree) 390 | const layers = toTreeLayers(keyTree) 391 | const str = layers 392 | .map((layer) => 393 | layer.length === 1 ? print(layer[0]) : layer.map(print).join(" ") 394 | ) 395 | .join("\n") 396 | return str 397 | } 398 | 399 | function clone(tree: BinaryPlusTree) { 400 | const cloned = new BinaryPlusTree(tree.minSize, tree.maxSize) 401 | cloned.nodes = cloneDeep(tree.nodes) 402 | return cloned 403 | } 404 | 405 | /** Check for node sizes. */ 406 | function verify(tree: BinaryPlusTree, id = "root") { 407 | const node = tree.nodes[id] 408 | if (id === "root") { 409 | assert.equal(countNodes(tree), Object.keys(tree.nodes).length) 410 | if (!node) return 411 | if (node.leaf) return 412 | for (const { value } of node.values) verify(tree, value) 413 | return 414 | } 415 | 416 | assert.ok(node) 417 | assert.ok(node.values.length >= tree.minSize) 418 | assert.ok(node.values.length <= tree.maxSize, inspect(tree)) 419 | 420 | if (node.leaf) return 421 | for (const { value } of node.values) verify(tree, value) 422 | } 423 | 424 | function countNodes(tree: BinaryPlusTree, id = "root") { 425 | const node = tree.nodes[id] 426 | if (id === "root") { 427 | if (!node) return 0 428 | if (node.leaf) return 1 429 | let count = 1 430 | for (const { value } of node.values) count += countNodes(tree, value) 431 | return count 432 | } 433 | 434 | assert.ok(node) 435 | assert.ok(node.values.length >= tree.minSize) 436 | assert.ok(node.values.length <= tree.maxSize, inspect(tree)) 437 | 438 | if (node.leaf) return 1 439 | let count = 1 440 | for (const { value } of node.values) count += countNodes(tree, value) 441 | return count 442 | } 443 | -------------------------------------------------------------------------------- /src/bptree.ts: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | Both Postgres and SQLite use B+ trees as the foundation of their indexes. 4 | 5 | Even though we have an OrderedKeyValueDatabase, let's build a B+ tree on top of a KeyValueDatabase 6 | so that we can later extend it to an interval tree and a range tree. 7 | 8 | */ 9 | 10 | import { orderedArray } from "@ccorcos/ordered-array" 11 | 12 | type Key = string | number 13 | 14 | /** 15 | * id references the node in a key-value database. 16 | * Each item in values has a `key` that is the minKey of the child node with id `value`. 17 | * The key will be null for the left-most branch nodes. 18 | */ 19 | export type BranchNode = { 20 | leaf?: false 21 | id: string 22 | values: { key: Key | null; value: string }[] 23 | } 24 | 25 | export type LeafNode = { 26 | leaf: true 27 | id: string 28 | // Key can't be null in a leaf node, but leaving it here for type convenience. 29 | values: { key: Key | null; value: any }[] 30 | } 31 | 32 | const { search, insert, remove } = orderedArray( 33 | (item: { key: Key | null }) => item.key, 34 | (a, b) => { 35 | if (a === b) return 0 36 | if (a === null) return -1 37 | if (b === null) return 1 38 | if (a > b) return 1 39 | else return -1 40 | } 41 | ) 42 | 43 | export class BinaryPlusTree { 44 | // In preparation for storing nodes in a key-value database. 45 | nodes: { [key: Key]: BranchNode | LeafNode | undefined } = {} 46 | 47 | /** 48 | * minSize must be less than maxSize / 2. 49 | */ 50 | constructor(public minSize: number, public maxSize: number) { 51 | if (minSize > maxSize / 2) throw new Error("Invalid tree size.") 52 | } 53 | 54 | get = (key: Key): any | undefined => { 55 | const root = this.nodes["root"] 56 | if (!root) return // Empty tree 57 | 58 | let node = root 59 | while (true) { 60 | if (node.leaf) { 61 | const result = search(node.values, key) 62 | if (result.found === undefined) return 63 | return node.values[result.found].value 64 | } 65 | 66 | const result = search(node.values, key) 67 | 68 | // Closest key that is at least as big as the key... 69 | // So the closest should never be less than the minKey. 70 | if (result.closest === 0) throw new Error("Broken.") 71 | 72 | const childIndex = 73 | result.found !== undefined ? result.found : result.closest - 1 74 | const childId = node.values[childIndex].value 75 | const child = this.nodes[childId] 76 | if (!child) throw Error("Missing child node.") 77 | node = child 78 | } 79 | } 80 | 81 | set = (key: Key, value: any) => { 82 | const root = this.nodes["root"] 83 | 84 | // Intitalize root node. 85 | if (!root) { 86 | this.nodes["root"] = { 87 | leaf: true, 88 | id: "root", 89 | values: [{ key, value }], 90 | } 91 | return 92 | } 93 | 94 | // Insert into leaf node. 95 | const nodePath = [root] 96 | const indexPath: number[] = [] 97 | while (true) { 98 | const node = nodePath[0] 99 | 100 | if (node.leaf) { 101 | const existing = insert(node.values, { key, value }) 102 | // No need to rebalance if we're replacing 103 | if (existing) return 104 | break 105 | } 106 | 107 | const result = search(node.values, key) 108 | const index = 109 | result.found !== undefined ? result.found : result.closest - 1 110 | const childId = node.values[index].value 111 | const child = this.nodes[childId] 112 | if (!child) throw Error("Missing child node.") 113 | // Recur into child. 114 | nodePath.unshift(child) 115 | indexPath.unshift(index) 116 | } 117 | 118 | // Balance the tree by splitting nodes, starting from the leaf. 119 | let node = nodePath.shift() 120 | while (node) { 121 | const size = node.values.length 122 | if (size <= this.maxSize) break 123 | 124 | const splitIndex = Math.round(size / 2) 125 | const rightNode: LeafNode | BranchNode = { 126 | id: randomId(), 127 | leaf: node.leaf, 128 | values: node.values.splice(splitIndex), 129 | } 130 | this.nodes[rightNode.id] = rightNode 131 | const rightMinKey = rightNode.values[0].key 132 | 133 | // If we're splitting the root node. 134 | if (node.id === "root") { 135 | const leftNode: LeafNode | BranchNode = { 136 | id: randomId(), 137 | leaf: node.leaf, 138 | values: node.values, 139 | } 140 | this.nodes[leftNode.id] = leftNode 141 | 142 | this.nodes["root"] = { 143 | id: "root", 144 | values: [ 145 | { key: null, value: leftNode.id }, 146 | { key: rightMinKey, value: rightNode.id }, 147 | ], 148 | } 149 | break 150 | } 151 | 152 | // Insert right node into parent. 153 | const parent = nodePath.shift() 154 | const parentIndex = indexPath.shift() 155 | if (!parent) throw new Error("Broken.") 156 | if (parentIndex === undefined) throw new Error("Broken.") 157 | parent.values.splice(parentIndex + 1, 0, { 158 | key: rightMinKey, 159 | value: rightNode.id, 160 | }) 161 | 162 | // Recur 163 | node = parent 164 | } 165 | } 166 | 167 | delete = (key: Key) => { 168 | const root = this.nodes["root"] 169 | if (!root) return 170 | 171 | // Delete from leaf node. 172 | const nodePath = [root] 173 | const indexPath: number[] = [] 174 | while (true) { 175 | const node = nodePath[0] 176 | 177 | if (node.leaf) { 178 | const exists = remove(node.values, key) 179 | if (!exists) return 180 | break 181 | } 182 | 183 | const result = search(node.values, key) 184 | const index = 185 | result.found !== undefined ? result.found : result.closest - 1 186 | const childId = node.values[index].value 187 | const child = this.nodes[childId] 188 | if (!child) throw Error("Missing child node.") 189 | // Recur into the child. 190 | nodePath.unshift(child) 191 | indexPath.unshift(index) 192 | } 193 | 194 | /* 195 | 196 | Step-by-step explanation of the more complicated case. 197 | 198 | Imagine a tree with minSize = 2, maxSize = 4. 199 | 200 | [null,10] 201 | [null,5] [10,15] 202 | [2,4] [5,7] [10,11] [15,24] 203 | 204 | Removing 10 from the leaf 205 | 206 | [null,10] 207 | [null,5] [10,15] 208 | [2,4] [5,7] [11] [15,24] 209 | 210 | Loop: Merge and update parent pointers. 211 | 212 | [null,10] 213 | [null,5] [11] 214 | [2,4] [5,7] [11,15,24] 215 | 216 | Recurse into parent. 217 | 218 | [null] 219 | [null,5,11] 220 | [2,4] [5,7] [11,15,24] 221 | 222 | Replace the root with child if there is only one key 223 | 224 | [null,5,11] 225 | [2,4] [5,7] [11,15,24] 226 | 227 | */ 228 | 229 | let node = nodePath.shift() 230 | while (node) { 231 | if (node.id === "root") { 232 | // A root leaf node has no minSize constaint. 233 | if (node.leaf) return 234 | 235 | // If node with only one child becomes its child. 236 | if (node.values.length === 1) { 237 | const childId = node.values[0].value 238 | const childNode = this.nodes[childId] 239 | if (!childNode) throw new Error("Broken.") 240 | this.nodes["root"] = { ...childNode, id: "root" } 241 | delete this.nodes[childId] 242 | } 243 | return 244 | } 245 | 246 | const parent = nodePath.shift() 247 | const parentIndex = indexPath.shift() 248 | if (!parent) throw new Error("Broken.") 249 | if (parentIndex === undefined) throw new Error("Broken.") 250 | 251 | if (node.values.length >= this.minSize) { 252 | // No need to merge but we might need to update the minKey in the parent 253 | const parentItem = parent.values[parentIndex] 254 | // No need to recusively update the left-most branch. 255 | if (parentItem.key === null) return 256 | // No need to recursively update if the minKey didn't change. 257 | if (parentItem.key === node.values[0].key) return 258 | // Set the minKey and recur 259 | parentItem.key = node.values[0].key 260 | node = parent 261 | continue 262 | } 263 | 264 | // Merge or redistribute 265 | if (parentIndex === 0) { 266 | const rightSibling = this.nodes[parent.values[parentIndex + 1].value] 267 | if (!rightSibling) throw new Error("Broken.") 268 | 269 | const combinedSize = node.values.length + rightSibling.values.length 270 | if (combinedSize > this.maxSize) { 271 | // Redistribute 272 | const splitIndex = Math.round(combinedSize / 2) - node.values.length 273 | const moveLeft = rightSibling.values.splice(0, splitIndex) 274 | node.values.push(...moveLeft) 275 | 276 | // Update parent keys. 277 | if (parent.values[parentIndex].key !== null) { 278 | parent.values[parentIndex].key = node.values[0].key 279 | } 280 | parent.values[parentIndex + 1].key = rightSibling.values[0].key 281 | } else { 282 | // Merge 283 | rightSibling.values.unshift(...node.values) 284 | 285 | // Remove the old pointer to rightSibling 286 | parent.values.splice(1, 1) 287 | 288 | // Replace the node pointer with the new rightSibling 289 | const leftMost = parent.values[0].key === null 290 | parent.values[0] = { 291 | key: leftMost ? null : rightSibling.values[0].key, 292 | value: rightSibling.id, 293 | } 294 | delete this.nodes[node.id] 295 | } 296 | } else { 297 | const leftSibling = this.nodes[parent.values[parentIndex - 1].value] 298 | if (!leftSibling) throw new Error("Broken.") 299 | 300 | const combinedSize = leftSibling.values.length + node.values.length 301 | if (combinedSize > this.maxSize) { 302 | // Redistribute 303 | const splitIndex = Math.round(combinedSize / 2) 304 | 305 | const moveRight = leftSibling.values.splice(splitIndex, this.maxSize) 306 | node.values.unshift(...moveRight) 307 | 308 | // Update parent keys. 309 | parent.values[parentIndex].key = node.values[0].key 310 | } else { 311 | // Merge 312 | 313 | leftSibling.values.push(...node.values) 314 | // No need to update minKey because we added to the right. 315 | // Just need to delete the old node. 316 | parent.values.splice(parentIndex, 1) 317 | 318 | delete this.nodes[node.id] 319 | } 320 | } 321 | 322 | // Recur 323 | node = parent 324 | continue 325 | } 326 | } 327 | 328 | depth() { 329 | const root = this.nodes["root"] 330 | if (!root) return 0 331 | let depth = 1 332 | let node = root 333 | while (!node.leaf) { 334 | depth += 1 335 | const nextNode = this.nodes[node.values[0].value] 336 | if (!nextNode) throw new Error("Broken.") 337 | node = nextNode 338 | } 339 | return depth 340 | } 341 | } 342 | 343 | function randomId() { 344 | return Math.random().toString(36).slice(2, 10) 345 | } 346 | -------------------------------------------------------------------------------- /src/bptree2.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { jsonCodec } from "lexicodec" 3 | import { sum, uniqWith } from "lodash" 4 | import { describe, it } from "mocha" 5 | import { BinaryPlusTree2 } from "./bptree2" 6 | 7 | // min = 2, max = 4 8 | const structuralTests24 = ` 9 | + 5 10 | [5] 11 | 12 | + 10 13 | [5,10] 14 | 15 | + 3 16 | [3,5,10] 17 | 18 | // Delete from root leaf 19 | - 5 20 | [3,10] 21 | 22 | + 5 23 | [3,5,10] 24 | 25 | + 7 26 | [3,5,7,10] 27 | 28 | // Split 29 | + 6 30 | [null,7] 31 | [3,5,6] [7,10] 32 | 33 | // Merge right branch 34 | - 7 35 | [3,5,6,10] 36 | 37 | + 7 38 | [null,7] 39 | [3,5,6] [7,10] 40 | 41 | - 6 42 | [null,7] 43 | [3,5] [7,10] 44 | 45 | // Merge left branch 46 | - 5 47 | [3,7,10] 48 | 49 | + 5 50 | [3,5,7,10] 51 | 52 | + 6 53 | [null,7] 54 | [3,5,6] [7,10] 55 | 56 | + 14 57 | [null,7] 58 | [3,5,6] [7,10,14] 59 | 60 | + 23 61 | [null,7] 62 | [3,5,6] [7,10,14,23] 63 | 64 | + 24 65 | [null,7,23] 66 | [3,5,6] [7,10,14] [23,24] 67 | 68 | // Merge right branch 69 | - 23 70 | [null,7] 71 | [3,5,6] [7,10,14,24] 72 | 73 | + 23 74 | [null,7,23] 75 | [3,5,6] [7,10,14] [23,24] 76 | 77 | // Update parent minKey 78 | - 7 79 | [null,10,23] 80 | [3,5,6] [10,14] [23,24] 81 | 82 | // Merge middle branch 83 | - 14 84 | [null,23] 85 | [3,5,6,10] [23,24] 86 | 87 | + 14 88 | [null,10,23] 89 | [3,5,6] [10,14] [23,24] 90 | 91 | - 3 92 | [null,10,23] 93 | [5,6] [10,14] [23,24] 94 | 95 | // Merge left branch 96 | - 6 97 | [null,23] 98 | [5,10,14] [23,24] 99 | 100 | + 3 101 | [null,23] 102 | [3,5,10,14] [23,24] 103 | 104 | + 6 105 | [null,10,23] 106 | [3,5,6] [10,14] [23,24] 107 | 108 | + 7 109 | [null,10,23] 110 | [3,5,6,7] [10,14] [23,24] 111 | 112 | + 8 113 | [null,7,10,23] 114 | [3,5,6] [7,8] [10,14] [23,24] 115 | 116 | + 11 117 | [null,7,10,23] 118 | [3,5,6] [7,8] [10,11,14] [23,24] 119 | 120 | + 12 121 | [null,7,10,23] 122 | [3,5,6] [7,8] [10,11,12,14] [23,24] 123 | 124 | // Double split 125 | + 13 126 | [null,13] 127 | [null,7,10] [13,23] 128 | [3,5,6] [7,8] [10,11,12] [13,14] [23,24] 129 | 130 | + 15 131 | [null,13] 132 | [null,7,10] [13,23] 133 | [3,5,6] [7,8] [10,11,12] [13,14,15] [23,24] 134 | 135 | // Double update minKey 136 | - 13 137 | [null,14] 138 | [null,7,10] [14,23] 139 | [3,5,6] [7,8] [10,11,12] [14,15] [23,24] 140 | 141 | // Double merge mid-right branch 142 | - 14 143 | [null,7,10,15] 144 | [3,5,6] [7,8] [10,11,12] [15,23,24] 145 | 146 | + 2 147 | [null,7,10,15] 148 | [2,3,5,6] [7,8] [10,11,12] [15,23,24] 149 | 150 | + 4 151 | [null,10] 152 | [null,5,7] [10,15] 153 | [2,3,4] [5,6] [7,8] [10,11,12] [15,23,24] 154 | 155 | - 8 156 | [null,10] 157 | [null,5] [10,15] 158 | [2,3,4] [5,6,7] [10,11,12] [15,23,24] 159 | 160 | - 3 161 | [null,10] 162 | [null,5] [10,15] 163 | [2,4] [5,6,7] [10,11,12] [15,23,24] 164 | 165 | // Double merge left branch 166 | - 2 167 | [null,10,15] 168 | [4,5,6,7] [10,11,12] [15,23,24] 169 | 170 | - 15 171 | [null,10,23] 172 | [4,5,6,7] [10,11,12] [23,24] 173 | 174 | + 20 175 | [null,10,23] 176 | [4,5,6,7] [10,11,12,20] [23,24] 177 | 178 | // Redistribute right 179 | - 24 180 | [null,10,20] 181 | [4,5,6,7] [10,11,12] [20,23] 182 | 183 | + 13 184 | [null,10,20] 185 | [4,5,6,7] [10,11,12,13] [20,23] 186 | 187 | - 4 188 | [null,10,20] 189 | [5,6,7] [10,11,12,13] [20,23] 190 | 191 | - 5 192 | [null,10,20] 193 | [6,7] [10,11,12,13] [20,23] 194 | 195 | // Redistribute left 196 | - 6 197 | [null,12,20] 198 | [7,10,11] [12,13] [20,23] 199 | 200 | ` 201 | 202 | describe("BinaryPlusTree2", () => { 203 | describe("structural tests 2-4", () => { 204 | const tree = new BinaryPlusTree2(2, 4) 205 | test(tree, structuralTests24) 206 | }) 207 | 208 | describe("property test 2-4 * 100", () => { 209 | propertyTest({ minSize: 2, maxSize: 4, testSize: 100 }) 210 | }) 211 | 212 | describe("property test 3-6 * 100", () => { 213 | propertyTest({ minSize: 3, maxSize: 6, testSize: 100 }) 214 | }) 215 | 216 | it("big tree", () => { 217 | const numbers = randomNumbers(20_000) 218 | const tree = new BinaryPlusTree2(3, 9) 219 | for (const number of numbers) { 220 | tree.set(number, number * 2) 221 | assert.equal(tree.get(number), number * 2) 222 | } 223 | for (const number of numbers) { 224 | tree.delete(number) 225 | assert.equal(tree.get(number), undefined) 226 | } 227 | assert.equal(tree.depth(), 1) 228 | }) 229 | 230 | it("tuple keys", () => { 231 | const tree = new BinaryPlusTree2(3, 9, jsonCodec.compare) 232 | 233 | const numbers = randomNumbers(2000) 234 | for (const number of numbers) { 235 | tree.set(["user", number], { id: number }) 236 | tree.set(["profile", number], number) 237 | assert.deepEqual(tree.get(["user", number]), { id: number }) 238 | assert.deepEqual(tree.get(["profile", number]), number) 239 | } 240 | 241 | for (const number of numbers) { 242 | tree.delete(["user", number]) 243 | assert.equal(tree.get(["user", number]), undefined) 244 | } 245 | }) 246 | 247 | it("list", () => { 248 | const numbers = Array(1000) 249 | .fill(0) 250 | .map((x, i) => i * 2) 251 | const tree = new BinaryPlusTree2(3, 9) 252 | for (const number of numbers) { 253 | tree.set(number, number) 254 | } 255 | 256 | // Entire thing 257 | assert.deepEqual( 258 | tree.list({}), 259 | numbers.map((n) => ({ key: n, value: n })) 260 | ) 261 | 262 | // No start bound 263 | assert.deepEqual(tree.list({ end: 9 }), [ 264 | { key: 0, value: 0 }, 265 | { key: 2, value: 2 }, 266 | { key: 4, value: 4 }, 267 | { key: 6, value: 6 }, 268 | { key: 8, value: 8 }, 269 | ]) 270 | 271 | // Within the same branch. 272 | assert.deepEqual(tree.list({ start: 3, end: 9 }), [ 273 | { key: 4, value: 4 }, 274 | { key: 6, value: 6 }, 275 | { key: 8, value: 8 }, 276 | ]) 277 | 278 | assert.deepEqual(tree.list({ start: 4, end: 10 }), [ 279 | { key: 4, value: 4 }, 280 | { key: 6, value: 6 }, 281 | { key: 8, value: 8 }, 282 | ]) 283 | 284 | // Across branches. 285 | assert.deepEqual(tree.list({ start: 4, end: 24 }), [ 286 | { key: 4, value: 4 }, 287 | { key: 6, value: 6 }, 288 | { key: 8, value: 8 }, 289 | { key: 10, value: 10 }, 290 | { key: 12, value: 12 }, 291 | { key: 14, value: 14 }, 292 | { key: 16, value: 16 }, 293 | { key: 18, value: 18 }, 294 | { key: 20, value: 20 }, 295 | { key: 22, value: 22 }, 296 | ]) 297 | 298 | // No end bound. 299 | assert.deepEqual(tree.list({ start: 2000 - 4 }), [ 300 | { key: 1996, value: 1996 }, 301 | { key: 1998, value: 1998 }, 302 | ]) 303 | 304 | // Limit. 305 | assert.deepEqual(tree.list({ start: 4, end: 24, limit: 4 }), [ 306 | { key: 4, value: 4 }, 307 | { key: 6, value: 6 }, 308 | { key: 8, value: 8 }, 309 | { key: 10, value: 10 }, 310 | ]) 311 | }) 312 | 313 | it("list property test", () => { 314 | const randomTuples = ( 315 | n: number, 316 | len: number, 317 | range: [number, number] = [-10, 10] 318 | ) => 319 | Array(n) 320 | .fill(0) 321 | .map(() => randomNumbers(len, range)) 322 | 323 | let tuples = [ 324 | ...randomTuples(10, 1), 325 | ...randomTuples(50, 2), 326 | ...randomTuples(100, 3), 327 | ...randomTuples(500, 4), 328 | ...randomTuples(1000, 5), 329 | ] 330 | 331 | tuples = uniqWith(tuples, (a, b) => jsonCodec.compare(a, b) === 0) 332 | tuples.sort(jsonCodec.compare) 333 | 334 | const tree = new BinaryPlusTree2(3, 9, jsonCodec.compare) 335 | for (const tuple of tuples) { 336 | tree.set(tuple, sum(tuple)) 337 | } 338 | 339 | const ranges = randomTuples(10_000, 2, [0, tuples.length - 1]) 340 | .map((range) => { 341 | range.sort(jsonCodec.compare) 342 | return range 343 | }) 344 | // Ignore ranges where start and end are the same. 345 | .filter(([a, b]) => a !== b) 346 | 347 | for (const tuple of tuples) { 348 | const result = tree.get(tuple) 349 | assert.deepEqual(result, sum(tuple)) 350 | } 351 | 352 | for (const range of ranges) { 353 | const start = tuples[range[0]] 354 | const end = tuples[range[1]] 355 | const result = tree.list({ start, end }).map(({ key }) => key) 356 | const target = tuples.slice(range[0], range[1]) 357 | assert.deepEqual( 358 | result, 359 | target, 360 | `range: [${range[0]}, ${range[1]}] start: ${JSON.stringify( 361 | start 362 | )} end: ${JSON.stringify(end)}` 363 | ) 364 | } 365 | }) 366 | 367 | function propertyTest(args: { 368 | minSize: number 369 | maxSize: number 370 | testSize: number 371 | }) { 372 | const size = args.testSize 373 | const numbers = randomNumbers(size) 374 | 375 | const tree = new BinaryPlusTree2(args.minSize, args.maxSize) 376 | for (let i = 0; i < size; i++) { 377 | const n = numbers[i] 378 | it(`Set ${i} : ${n}`, () => { 379 | // it(`+ ${n}`, () => { 380 | tree.set(n, n.toString()) 381 | verify(tree) 382 | 383 | // Get works on every key so far. 384 | for (let j = 0; j <= i; j++) { 385 | const x = numbers[j] 386 | assert.equal(tree.get(x), x.toString()) 387 | } 388 | // }) 389 | 390 | // Overwrite the jth key. 391 | for (let j = 0; j <= i; j++) { 392 | const x = numbers[j] 393 | 394 | // it(`Overwrite ${j}: ${x}`, () => { 395 | const t = tree.clone() 396 | t.set(x, x * 2) 397 | verify(t) 398 | 399 | // Check get on all keys. 400 | for (let k = 0; k <= i; k++) { 401 | const y = numbers[k] 402 | if (x === y) assert.equal(t.get(y), y * 2) 403 | else assert.equal(t.get(y), y.toString()) 404 | } 405 | // }) 406 | } 407 | 408 | // Delete the jth key. 409 | for (let j = 0; j <= i; j++) { 410 | const x = numbers[j] 411 | 412 | // it(`Delete ${j} : ${x}`, () => { 413 | const t = tree.clone() 414 | t.delete(x) 415 | try { 416 | verify(t) 417 | } catch (error) { 418 | console.log("BEFORE", inspect(tree)) 419 | console.log("DELETE", x) 420 | console.log("AFTER", inspect(t)) 421 | throw error 422 | } 423 | 424 | // Check get on all keys. 425 | for (let k = 0; k <= i; k++) { 426 | const y = numbers[k] 427 | if (x === y) assert.equal(t.get(y), undefined) 428 | else assert.equal(t.get(y), y.toString()) 429 | } 430 | // }) 431 | } 432 | }) 433 | } 434 | } 435 | }) 436 | 437 | function randomNumbers(size: number, range?: [number, number]) { 438 | if (!range) range = [-size * 10, size * 10] 439 | const numbers: number[] = [] 440 | for (let i = 0; i < size; i++) 441 | numbers.push(Math.round(Math.random() * (range[1] - range[0]) - range[0])) 442 | return numbers 443 | } 444 | 445 | function parseTests(str: string) { 446 | // Cleanup extra whitespace 447 | str = str 448 | .split("\n") 449 | .map((line) => line.trim()) 450 | .join("\n") 451 | .trim() 452 | 453 | return str.split("\n\n").map((block) => { 454 | const lines = block.split("\n") 455 | let comment = "" 456 | if (lines[0].startsWith("//")) { 457 | comment = lines[0].slice(3) 458 | lines.splice(0, 1) 459 | } 460 | const [op, nStr] = lines[0].split(" ") 461 | const n = parseInt(nStr) 462 | const tree = lines.slice(1).join("\n") 463 | return { comment, n, tree, op: op as "+" | "-" } 464 | }) 465 | } 466 | 467 | function test(tree: BinaryPlusTree2, str: string) { 468 | for (const test of parseTests(structuralTests24)) { 469 | let label = `${test.op} ${test.n}` 470 | if (test.comment) label += " // " + test.comment 471 | it(label, () => { 472 | if (test.op === "+") tree.set(test.n, test.n.toString()) 473 | if (test.op === "-") tree.delete(test.n) 474 | assert.equal(inspect(tree), test.tree, test.comment) 475 | 476 | const value = test.op === "+" ? test.n.toString() : undefined 477 | assert.equal(tree.get(test.n), value, test.comment) 478 | 479 | assert.equal(tree.depth(), test.tree.split("\n").length, test.comment) 480 | 481 | verify(tree) 482 | }) 483 | } 484 | } 485 | 486 | type Key = string | number | null 487 | type KeyTree = 488 | | { keys: Key[]; children?: undefined } 489 | | { keys: Key[]; children: KeyTree[] } 490 | 491 | function toKeyTree(tree: BinaryPlusTree2, id = "root"): KeyTree { 492 | const node = tree.nodes[id] 493 | if (!node) throw new Error("Missing node!") 494 | 495 | const keys = node.leaf 496 | ? node.values.map((v) => v.key) 497 | : node.children.map((v) => v.minKey) 498 | 499 | if (node.leaf) return { keys: keys } 500 | const subtrees = node.children.map((v) => toKeyTree(tree, v.childId)) 501 | 502 | return { keys: keys, children: subtrees } 503 | } 504 | 505 | type TreeLayer = Key[][] 506 | 507 | function toTreeLayers(tree: KeyTree): TreeLayer[] { 508 | const layers: TreeLayer[] = [] 509 | 510 | let cursor = [tree] 511 | while (cursor.length > 0) { 512 | const layer: TreeLayer = [] 513 | const nextCursor: KeyTree[] = [] 514 | for (const tree of cursor) { 515 | layer.push(tree.keys) 516 | if (tree.children) nextCursor.push(...tree.children) 517 | } 518 | layers.push(layer) 519 | cursor = nextCursor 520 | } 521 | return layers 522 | } 523 | 524 | function print(x: any) { 525 | if (x === null) return "null" 526 | if (typeof x === "number") return x.toString() 527 | if (typeof x === "string") return JSON.stringify(x) 528 | if (Array.isArray(x)) return "[" + x.map(print).join(",") + "]" 529 | return "" 530 | } 531 | 532 | function inspect(tree: BinaryPlusTree2) { 533 | const keyTree = toKeyTree(tree) 534 | const layers = toTreeLayers(keyTree) 535 | const str = layers 536 | .map((layer) => 537 | layer.length === 1 ? print(layer[0]) : layer.map(print).join(" ") 538 | ) 539 | .join("\n") 540 | return str 541 | } 542 | 543 | /** Check for node sizes. */ 544 | function verify(tree: BinaryPlusTree2, id = "root") { 545 | const node = tree.nodes[id] 546 | if (id === "root") { 547 | assert.equal(countNodes(tree), Object.keys(tree.nodes).length) 548 | if (!node) return 549 | if (node.leaf) return 550 | for (const { childId } of node.children) verify(tree, childId) 551 | return 552 | } 553 | 554 | assert.ok(node) 555 | const size = node.leaf ? node.values.length : node.children.length 556 | assert.ok(size >= tree.minSize) 557 | assert.ok(size <= tree.maxSize, inspect(tree)) 558 | 559 | if (node.leaf) return 560 | for (const { childId } of node.children) verify(tree, childId) 561 | } 562 | 563 | function countNodes(tree: BinaryPlusTree2, id = "root") { 564 | const node = tree.nodes[id] 565 | if (id === "root") { 566 | if (!node) return 0 567 | if (node.leaf) return 1 568 | let count = 1 569 | for (const { childId } of node.children) count += countNodes(tree, childId) 570 | return count 571 | } 572 | 573 | assert.ok(node) 574 | if (node.leaf) return 1 575 | let count = 1 576 | for (const { childId } of node.children) count += countNodes(tree, childId) 577 | return count 578 | } 579 | -------------------------------------------------------------------------------- /src/concurrency.test.ts: -------------------------------------------------------------------------------- 1 | import { TestClock } from "@ccorcos/test-clock" 2 | import { strict as assert } from "assert" 3 | import { describe, it } from "mocha" 4 | import { ConcurrencyLocks } from "./concurrency" 5 | 6 | describe("ConcurrencyLocks", () => { 7 | it("run", async () => { 8 | const map = {} 9 | const locks = new ConcurrencyLocks() 10 | 11 | const { sleep, run } = new TestClock() 12 | 13 | const p1 = locks.run(async function* () { 14 | yield { a: "r" } 15 | await sleep(10) 16 | return map["a"] 17 | }) 18 | 19 | const p2 = locks.run(async function* () { 20 | await sleep(2) 21 | yield { a: "r" } 22 | return map["a"] 23 | }) 24 | 25 | const p3 = locks.run(async function* () { 26 | await sleep(1) 27 | yield { a: "rw" } 28 | await sleep(10) 29 | map["a"] = 1 30 | }) 31 | 32 | const p4 = locks.run(async function* () { 33 | await sleep(3) 34 | yield { a: "rw" } 35 | await sleep(10) 36 | map["a"] = 2 37 | return true 38 | }) 39 | 40 | const p5 = locks.run(async function* () { 41 | await sleep(4) 42 | yield { a: "r" } 43 | return map["a"] 44 | }) 45 | 46 | await run() 47 | const [r1, r2, r3, r4, r5] = await Promise.all([p1, p2, p3, p4, p5]) 48 | 49 | assert.deepEqual([r1, r2, r3, r4, r5], [undefined, 1, undefined, true, 2]) 50 | }) 51 | }) 52 | -------------------------------------------------------------------------------- /src/concurrency.ts: -------------------------------------------------------------------------------- 1 | // TODO: waiting on PR https://github.com/rocicorp/lock/pull/10 2 | // In the meantime, using `npm link /Users/chet/Code/external/lock` 3 | 4 | import { RWLockMap } from "@ccorcos/lock" 5 | 6 | export type LockCmd = { [key: string]: "r" | "rw" | undefined } 7 | 8 | export class ConcurrencyLocks extends RWLockMap { 9 | async lock(cmd: LockCmd) { 10 | const releases = await Promise.all( 11 | Object.entries(cmd).map(([key, value]) => { 12 | if (value === "r") return this.read(key) 13 | if (value === "rw") return this.write(key) 14 | }) 15 | ) 16 | 17 | let called = false 18 | return () => { 19 | if (called) return 20 | called = true 21 | for (const release of releases) if (release) release() 22 | } 23 | } 24 | 25 | /** I thought this generator approach would be cool, but idk. */ 26 | async run(fn: () => AsyncGenerator void>) { 27 | const gen = fn() 28 | let nextValue = await gen.next() 29 | const releases = new Set<() => void>() 30 | while (!nextValue.done) { 31 | const release = await this.lock(nextValue.value) 32 | releases.add(release) 33 | nextValue = await gen.next(release) 34 | } 35 | for (const release of releases) release() 36 | const result = nextValue.value 37 | return result 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/examples/dataTypes.ts: -------------------------------------------------------------------------------- 1 | import * as t from "data-type-ts" 2 | export * from "data-type-ts" 3 | 4 | export const uuid = new t.Validator({ 5 | validate: (value) => 6 | t.string.validate(value) || 7 | !value.match( 8 | /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/ 9 | ) 10 | ? { message: `${JSON.stringify(value)} is not a valid UUID.` } 11 | : undefined, 12 | inspect: () => "UUID", 13 | }) 14 | 15 | export const datetime = new t.Validator({ 16 | validate: (value) => 17 | t.string.validate(value) || 18 | !value.match(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{3})?Z$/) 19 | ? { 20 | message: `${JSON.stringify(value)} is not a valid ISO 8601 datetime string.`, 21 | } 22 | : undefined, 23 | inspect: () => "Datetime", 24 | }) 25 | -------------------------------------------------------------------------------- /src/examples/messaging.ts: -------------------------------------------------------------------------------- 1 | import * as t from "./dataTypes" 2 | 3 | const schema = { 4 | user: t.object({ 5 | id: t.uuid, 6 | username: t.string, 7 | }), 8 | thread: t.object({ 9 | id: t.uuid, 10 | created_at: t.datetime, 11 | created_by: t.uuid, 12 | member_ids: t.array(t.uuid), 13 | subject: t.string, 14 | deleted: t.optional(t.boolean), 15 | }), 16 | message: t.object({ 17 | id: t.uuid, 18 | thread_id: t.uuid, 19 | author_id: t.uuid, 20 | created_at: t.datetime, 21 | text: t.string, 22 | deleted: t.optional(t.boolean), 23 | }), 24 | } 25 | 26 | type Schema = { [K in keyof typeof schema]: t.Infer<(typeof schema)[K]> } 27 | 28 | type Args = { 29 | [K in keyof Tx]: { fn: K; args: Parameters } 30 | } 31 | 32 | type Tx = { 33 | get(table: T, id: string): Schema[T] | undefined 34 | set(table: T, id: string, value: Schema[T]): void 35 | delete(table: T, id: string, value: Schema[T]): void 36 | } 37 | 38 | type Argify = { 39 | [K in keyof T]: { fn: K; args: Parameters } 40 | }[keyof T] 41 | 42 | type Genify = { 43 | [K in keyof T]: ( 44 | ...args: Parameters 45 | ) => Generator, ReturnType, unknown> 46 | } 47 | 48 | type Tx2 = Genify 49 | 50 | function* insert(tx: Tx2, table: T, value: Schema[T]) { 51 | const error = schema[table].validate(value) 52 | if (error) throw new Error(t.formatError(error)) 53 | 54 | const m = yield* tx.get("message", "12") 55 | // const x = yield db.set([table, value.id], value) 56 | } 57 | 58 | function* constant(a: A): Generator { 59 | return yield a 60 | } 61 | 62 | // function remove(table: keyof Schema, id: string) { 63 | // const existing = db.get([table, "byId", id]) 64 | // if (!existing) return 65 | 66 | // db.delete([table, "byId", id]) 67 | 68 | // const tableIndexes = indexes[table] 69 | // if (tableIndexes) { 70 | // for (const [index, columns] of Object.entries(tableIndexes)) { 71 | // if (index === "byId") continue 72 | // const keys = columns.map((col) => existing[col]) 73 | // db.delete([table, index, ...keys]) 74 | // } 75 | // } 76 | // } 77 | 78 | // async getUserByUsername(username: string) { 79 | 80 | // async getRecord(pointer: RecordPointer) { 81 | // async getRecords(pointers: RecordPointer[]): Promise { 82 | // async getPassword(userId: string) { 83 | // async searchUsers(query: string) { 84 | // async getThreadIds(userId: string, limit: number): Promise { 85 | // async getMessageIds(threadId: string, limit: number): Promise { 86 | // async write(records: RecordWithTable[]): Promise { 87 | // async createAuthToken(token: AuthTokenRecord) { 88 | // async createPassword(password: PasswordRecord) { 89 | // async deleteAuthToken(authTokenId: string) { 90 | 91 | // Tuple Index 92 | // [table, id] -> record 93 | // 94 | 95 | const db: any = {} // KeyValueStore, OrderedKeyValueStore, ReducerTree 96 | const query: any = {} 97 | 98 | // All simple indexes... lets find a more challenging problem. 99 | const indexes = { 100 | userByUsername: { 101 | match: [ 102 | { 103 | user: { 104 | id: "id", 105 | username: "username", 106 | }, 107 | }, 108 | ], 109 | sort: ["username", "id"], 110 | }, 111 | } 112 | 113 | // explicit write checks. 114 | const queries = { 115 | // getUserByUsername 116 | 117 | get: query( 118 | ( 119 | tx, 120 | table: T, 121 | id: string 122 | ): Schema[T] | undefined => { 123 | return undefined 124 | } 125 | ), 126 | // signup() 127 | } 128 | 129 | // API 130 | // signup 131 | // login 132 | // logout 133 | // changePassword 134 | // createThread 135 | // deleteThread 136 | // inviteToThread 137 | // editThread 138 | // createMessage 139 | // deleteMessage 140 | 141 | // Permissions 142 | -------------------------------------------------------------------------------- /src/examples/minisql.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import * as t from "data-type-ts" 3 | import { jsonCodec } from "lexicodec" 4 | import { BTreeDb } from "../tupledb/BTreeDb" 5 | 6 | // TODO: 7 | // - follower timeline, you can't index with sql. 8 | // - async database with yields. 9 | 10 | /** All records must be objects with an `id: t.string` */ 11 | const tables = { 12 | user: t.object({ 13 | id: t.string, 14 | name: t.string, 15 | }), 16 | channel: t.object({ 17 | id: t.string, 18 | name: t.string, 19 | }), 20 | message: t.object({ 21 | id: t.string, 22 | text: t.string, 23 | created_at: t.string, 24 | channel_id: t.string, 25 | author_id: t.string, 26 | }), 27 | } 28 | 29 | type Schema = { [K in keyof typeof tables]: t.Infer<(typeof tables)[K]> } 30 | type Indexes = { [K in keyof Schema]?: Record> } 31 | 32 | /** `byId: ["id"]` is generated for every table by default. */ 33 | const indexes: Indexes = { 34 | user: { 35 | byName: ["name", "id"], 36 | }, 37 | message: { 38 | byChannel: ["channel_id", "created_at", "id"], 39 | }, 40 | } 41 | 42 | const db = new BTreeDb() 43 | 44 | function insert(table: T, value: Schema[T]) { 45 | const error = tables[table].validate(value) 46 | if (error) throw new Error(t.formatError(error)) 47 | 48 | db.set([table, "byId", value.id], value) 49 | const tableIndexes = indexes[table] 50 | if (tableIndexes) { 51 | for (const [index, columns] of Object.entries(tableIndexes)) { 52 | if (index === "byId") continue 53 | const keys = columns.map((col) => value[col]) 54 | db.set([table, index, ...keys], value) 55 | } 56 | } 57 | } 58 | 59 | function remove(table: keyof Schema, id: string) { 60 | const existing = db.get([table, "byId", id]) 61 | if (!existing) return 62 | 63 | db.delete([table, "byId", id]) 64 | 65 | const tableIndexes = indexes[table] 66 | if (tableIndexes) { 67 | for (const [index, columns] of Object.entries(tableIndexes)) { 68 | if (index === "byId") continue 69 | const keys = columns.map((col) => existing[col]) 70 | db.delete([table, index, ...keys]) 71 | } 72 | } 73 | } 74 | 75 | function randomId() { 76 | return Math.random().toString(36).slice(2, 10) 77 | } 78 | 79 | const chet: Schema["user"] = { id: randomId(), name: "Chet" } 80 | const simon: Schema["user"] = { id: randomId(), name: "Simon" } 81 | const rob: Schema["user"] = { id: randomId(), name: "Rob" } 82 | 83 | insert("user", chet) 84 | insert("user", simon) 85 | insert("user", rob) 86 | 87 | const general: Schema["channel"] = { id: randomId(), name: "General" } 88 | const engineering: Schema["channel"] = { id: randomId(), name: "Engineering" } 89 | const marketing: Schema["channel"] = { id: randomId(), name: "Marketing" } 90 | 91 | insert("channel", general) 92 | insert("channel", engineering) 93 | insert("channel", marketing) 94 | 95 | const message1: Schema["message"] = { 96 | id: randomId(), 97 | text: "Hello world!", 98 | created_at: new Date().toISOString(), 99 | channel_id: general.id, 100 | author_id: chet.id, 101 | } 102 | 103 | const message2: Schema["message"] = { 104 | id: randomId(), 105 | text: "What's up?", 106 | created_at: new Date(Date.now() + 1000).toISOString(), 107 | channel_id: general.id, 108 | author_id: simon.id, 109 | } 110 | 111 | insert("message", message1) 112 | insert("message", message2) 113 | 114 | // Get a record by id. 115 | assert.deepEqual(db.get(["user", "byId", rob.id]), rob) 116 | 117 | // Scan an index. 118 | assert.deepEqual( 119 | db 120 | .list({ 121 | // Prefix scan. 122 | gt: ["user", "byName", "Simon", jsonCodec.MIN], 123 | lt: ["user", "byName", "Simon", jsonCodec.MAX], 124 | }) 125 | .map(({ value }) => value), 126 | [simon] 127 | ) 128 | 129 | assert.deepEqual( 130 | db 131 | .list({ 132 | // Prefix scan. 133 | gt: ["user", "byName", "Simon", jsonCodec.MIN], 134 | lt: ["user", "byName", "Simon", jsonCodec.MAX], 135 | }) 136 | .map(({ value }) => value), 137 | [simon] 138 | ) 139 | 140 | // List messages, get the latest. 141 | assert.deepEqual( 142 | db 143 | .list({ 144 | gt: ["message", "byChannel", general.id, jsonCodec.MIN], 145 | lt: ["message", "byChannel", general.id, jsonCodec.MAX], 146 | reverse: true, 147 | limit: 1, 148 | }) 149 | .map(({ value }) => value), 150 | [message2] 151 | ) 152 | 153 | let called = 0 154 | db.subscribe( 155 | [ 156 | ["message", "byChannel", general.id, jsonCodec.MIN], 157 | ["message", "byChannel", general.id, jsonCodec.MAX], 158 | ], 159 | () => { 160 | called += 1 161 | } 162 | ) 163 | 164 | const message3: Schema["message"] = { 165 | id: randomId(), 166 | text: "Testing out subscriptions", 167 | created_at: new Date(Date.now() + 2000).toISOString(), 168 | channel_id: general.id, 169 | author_id: chet.id, 170 | } 171 | 172 | insert("message", message3) 173 | 174 | assert.deepEqual(called, 1) 175 | 176 | remove("message", message3.id) 177 | 178 | assert.deepEqual(called, 2) 179 | -------------------------------------------------------------------------------- /src/examples/old/MINISQL.md: -------------------------------------------------------------------------------- 1 | A relational model / dsl / syntax for creating indexes and working with data. 2 | 3 | 4 | 5 | What are some examples... 6 | 7 | - todomvc 8 | - social network follower feed 9 | - triplestore 10 | - end-user database 11 | 12 | 13 | HERE 14 | 15 | - follower timeline, you can't index with sql. 16 | - async database with yields. 17 | 18 | 19 | 20 | 21 | ```sql 22 | SELECT a.*, b.* 23 | FROM follow AS a 24 | JOIN follow AS b ON a.channel_id = b.channel_id 25 | WHERE a.user_id != b.user_id 26 | AND a.public = true 27 | AND b.public = true 28 | ORDER BY a.user_id, b.user_id; 29 | ``` 30 | 31 | ```js 32 | cofollowers = { 33 | select: { a: "follow", b: "follow" }, 34 | where: { 35 | and: [ 36 | { "a.channel_id": { eq: {$: "b.channel_id"} } }, 37 | { "a.user_id": { neq: {$: "b.user_id"} } }, 38 | { "a.public": { eq: true }}, 39 | { "b.public": { eq: true }}, 40 | ], 41 | }, 42 | sort: ["a.user_id", "b.user_id"], 43 | } 44 | ``` 45 | 46 | ``` 47 | select 48 | a -> follow 49 | b -> follow 50 | where 51 | a.channel_id = b.channel_id 52 | a.user_id != b.user_id 53 | a.public = true 54 | b.public = true 55 | order by 56 | a.user_id b.user_id 57 | ``` 58 | 59 | 60 | ```js 61 | cofollowers = { 62 | select: [ 63 | { follow: { user_id: "userA", channel_id: "channel" } }, 64 | { follow: { user_id: "userB", channel_id: "channel" } }, 65 | ], 66 | filter: { userA: { neq: {$: "userB"} } }, 67 | sort: ["userA", "userB"], 68 | } 69 | ``` 70 | 71 | 72 | ```js 73 | cofollowers = { 74 | select: { a: "follow", b: "follow" }, 75 | where: { 76 | and: [ 77 | { "a.channel_id": { eq: {$: "b.channel_id"} } }, 78 | { "a.user_id": { neq: {$: "b.user_id"} } }, 79 | { "a.public": { eq: true }}, 80 | { "b.public": { eq: true }}, 81 | ], 82 | }, 83 | sort: ["a.user_id", "b.user_id"], 84 | } 85 | ``` 86 | 87 | We can make a constaint that all indexes must include record ids, and there's no hardcoding specific values. No conditional indexes because those are hard to maintain? Lets just consider that later. 88 | 89 | ```js 90 | cofollowers = { 91 | select: { a: "follow", b: "follow" }, 92 | where: { 93 | and: [ 94 | { "a.channel_id": { eq: "b.channel_id" } }, 95 | { "a.user_id": { neq: "b.user_id" } }, 96 | { "a.public": { eq: true }}, 97 | { "b.public": { eq: true }}, 98 | ], 99 | }, 100 | sort: ["a.public", "b.public", "a.user_id", "b.user_id"], 101 | } 102 | ``` 103 | 104 | Maybe it makes sense that indexes can only be datalog-style matching queries because that's a bit more mechanical. 105 | 106 | Or maybe lets worry about that later and focus on UI for now. -------------------------------------------------------------------------------- /src/examples/old/messaging.ts: -------------------------------------------------------------------------------- 1 | // examples 2 | // - messaging app 3 | // - social app 4 | // - calendar app 5 | // - notes app 6 | // - end-user database app 7 | 8 | import * as t from "./dataTypes" 9 | 10 | const shared = { 11 | id: t.uuid, 12 | version: t.number, 13 | last_version: t.optional(t.number), 14 | created_at: t.datetime, 15 | updated_at: t.datetime, 16 | } 17 | 18 | const schema = { 19 | user: t.object({ 20 | ...shared, 21 | username: t.string, 22 | }), 23 | /** password.id is same as user.id */ 24 | password: t.object({ 25 | ...shared, 26 | password_hash: t.string, 27 | }), 28 | /** auth_token.id is the token */ 29 | auth_token: t.object({ 30 | ...shared, 31 | user_id: t.uuid, 32 | expires_at: t.datetime, 33 | }), 34 | thread: t.object({ 35 | ...shared, 36 | created_by: t.uuid, 37 | member_ids: t.array(t.uuid), 38 | subject: t.string, 39 | deleted: t.optional(t.boolean), 40 | }), 41 | message: t.object({ 42 | ...shared, 43 | thread_id: t.uuid, 44 | author_id: t.uuid, 45 | text: t.string, 46 | file_ids: t.optional(t.array(t.uuid)), 47 | deleted: t.optional(t.boolean), 48 | }), 49 | file: t.object({ 50 | ...shared, 51 | owner_id: t.uuid, 52 | filename: t.string, 53 | deleted: t.optional(t.boolean), 54 | }), 55 | } 56 | 57 | type Schema = { [K in keyof typeof schema]: t.Infer<(typeof schema)[K]> } 58 | 59 | const db: any = {} // KeyValueStore, OrderedKeyValueStore, ReducerTree 60 | const query: any = {} 61 | 62 | // All simple indexes... lets find a more challenging problem. 63 | const indexes = { 64 | userByUsername: { 65 | match: [ 66 | { 67 | user: { 68 | id: "id", 69 | username: "username", 70 | }, 71 | }, 72 | ], 73 | sort: ["username", "id"], 74 | }, 75 | } 76 | 77 | // explicit write checks. 78 | const queries = { 79 | // getUserByUsername 80 | 81 | get: query( 82 | ( 83 | tx, 84 | table: T, 85 | id: string 86 | ): Schema[T] | undefined => { 87 | return undefined 88 | } 89 | ), 90 | // signup() 91 | } 92 | 93 | // API 94 | // signup 95 | // login 96 | // logout 97 | // changePassword 98 | // createThread 99 | // deleteThread 100 | // inviteToThread 101 | // editThread 102 | // createMessage 103 | // deleteMessage 104 | 105 | // Permissions 106 | 107 | // async getRecord(pointer: RecordPointer) { 108 | // async getRecords(pointers: RecordPointer[]): Promise { 109 | // async getUserByUsername(username: string) { 110 | // async getPassword(userId: string) { 111 | // async searchUsers(query: string) { 112 | // async getThreadIds(userId: string, limit: number): Promise { 113 | // async getMessageIds(threadId: string, limit: number): Promise { 114 | // async write(records: RecordWithTable[]): Promise { 115 | // async createAuthToken(token: AuthTokenRecord) { 116 | // async createPassword(password: PasswordRecord) { 117 | // async deleteAuthToken(authTokenId: string) { 118 | -------------------------------------------------------------------------------- /src/examples/old/minisql2.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import * as t from "data-type-ts" 3 | import { jsonCodec } from "lexicodec" 4 | import { BTreeDb } from "../tupledb/BTreeDb" 5 | 6 | // TODO: 7 | // - follower timeline, you can't index with sql. 8 | // - async database with yields. 9 | 10 | /** All records must be objects with an `id: t.string` */ 11 | const tables = { 12 | user: t.object({ 13 | id: t.string, 14 | name: t.string, 15 | }), 16 | channel: t.object({ 17 | id: t.string, 18 | name: t.string, 19 | }), 20 | message: t.object({ 21 | id: t.string, 22 | text: t.string, 23 | created_at: t.string, 24 | channel_id: t.string, 25 | author_id: t.string, 26 | draft: t.optional(t.boolean), 27 | }), 28 | follow: t.object({ 29 | id: t.string, 30 | user_id: t.string, 31 | channel_id: t.string, 32 | }), 33 | } 34 | 35 | type Schema = { [K in keyof typeof tables]: t.Infer<(typeof tables)[K]> } 36 | 37 | type AndFilter = { and: Filter[] } 38 | type OrFilter = { or: Filter[] } 39 | type PropertyFilter = { 40 | [variable: string]: 41 | | ExistenceFilter 42 | | StringFilter 43 | | BooleanFilter 44 | | NumberFilter 45 | } 46 | 47 | type ExistenceFilter = { null: boolean } 48 | type BooleanFilter = { eq: boolean } 49 | type StringFilter = 50 | | { eq: string } 51 | | { neq: string } 52 | | { contains: string } 53 | | { ncontains: string } 54 | | { startsWith: string } 55 | | { endsWith: string } 56 | type NumberFilter = 57 | | { eq: number } 58 | | { neq: number } 59 | | { gt: number } 60 | | { gte: number } 61 | | { lt: number } 62 | | { lte: number } 63 | 64 | type Filter = AndFilter | OrFilter | PropertyFilter 65 | 66 | type Index = { 67 | select: { 68 | [K in keyof Schema]?: { 69 | [P in keyof Schema[K]]?: string 70 | } 71 | }[] 72 | filter?: Filter 73 | sort: string[] 74 | } 75 | 76 | const indexes: { [V: string]: Index } = { 77 | usersByName: { 78 | select: [{ user: { name: "name", id: "id" } }], 79 | sort: ["name", "id"], 80 | }, 81 | 82 | channelMessages: { 83 | select: [ 84 | { message: { id: "message", channel_id: "channel", created_at: "time" } }, 85 | ], 86 | sort: ["channel", "time", "message"], 87 | }, 88 | 89 | userFollowing: { 90 | select: [{ follow: { channel_id: "channel", user_id: "user" } }], 91 | sort: ["user", "channel"], 92 | }, 93 | 94 | channelFollowers: { 95 | select: [{ follow: { id: "id", user_id: "user", channel_id: "channel" } }], 96 | sort: ["channel", "user"], 97 | }, 98 | 99 | // select * from message 100 | // join follow on message.channel_id = follow.channel_id 101 | // where message.draft = false 102 | // order by follow.user_id, message.created_at, message.id 103 | 104 | timeline: { 105 | select: [ 106 | { follow: { user_id: "user", channel_id: "channel" } }, 107 | { 108 | message: { 109 | id: "message", 110 | channel_id: "channel", 111 | created_at: "time", 112 | draft: "draft", 113 | }, 114 | }, 115 | ], 116 | filter: { or: [{ draft: { null: true } }, { draft: { eq: false } }] }, 117 | sort: ["user", "time", "message"], 118 | }, 119 | 120 | // SELECT a.*, b.* 121 | // FROM follow AS a 122 | // JOIN follow AS b ON a.channel_id = b.channel_id 123 | // WHERE a.user_id != b.user_id 124 | // ORDER BY a.user_id, b.user_id; 125 | 126 | cofollowers: { 127 | select: [ 128 | { follow: { user_id: "userA", channel_id: "channel" } }, 129 | { follow: { user_id: "userB", channel_id: "channel" } }, 130 | ], 131 | filter: { userA: { neq: "userB" } }, 132 | sort: ["userA", "userB"], 133 | }, 134 | 135 | // cofollowers2: { 136 | // select: { a: "follow", b: "follow" }, 137 | // filter: { 138 | // and: [ 139 | // { "a.channel_id": { eq: "b.channel_id" } }, 140 | // { "a.user_id": { neq: "b.user_id" } }, 141 | // ], 142 | // }, 143 | // sort: ["a.user_id", "b.user_id"], 144 | // }, 145 | } 146 | 147 | // STOP 148 | 149 | const db = new BTreeDb() 150 | 151 | function insert(table: T, value: Schema[T]) { 152 | const error = tables[table].validate(value) 153 | if (error) throw new Error(t.formatError(error)) 154 | 155 | const prev = db.get([table, value.id]) 156 | 157 | if (prev) { 158 | for (const [indexName, index] of Object.entries(indexes)) { 159 | const vars = {} 160 | for (const select of index.select) { 161 | const varMap = select[table] 162 | if (!varMap) continue 163 | for (const [colName, varName] of Object.entries(varMap)) { 164 | vars[varName] = prev[colName] 165 | } 166 | } 167 | 168 | const tuple = index.sort.map((varName) => { 169 | if (!(varName in vars)) throw new Error("Missing ") 170 | }) 171 | } 172 | } 173 | 174 | // db.set([table, "byId", value.id], value) 175 | // const tableIndexes = indexes[table] 176 | // if (tableIndexes) { 177 | // for (const [index, columns] of Object.entries(tableIndexes)) { 178 | // if (index === "byId") continue 179 | // const keys = columns.map((col) => value[col]) 180 | // db.set([table, index, ...keys], value) 181 | // } 182 | // } 183 | } 184 | 185 | function remove(table: keyof Schema, id: string) { 186 | const existing = db.get([table, "byId", id]) 187 | if (!existing) return 188 | 189 | db.delete([table, "byId", id]) 190 | 191 | // const tableIndexes = indexes[table] 192 | // if (tableIndexes) { 193 | // for (const [index, columns] of Object.entries(tableIndexes)) { 194 | // if (index === "byId") continue 195 | // const keys = columns.map((col) => existing[col]) 196 | // db.delete([table, index, ...keys]) 197 | // } 198 | // } 199 | } 200 | 201 | function randomId() { 202 | return Math.random().toString(36).slice(2, 10) 203 | } 204 | 205 | const chet: Schema["user"] = { id: randomId(), name: "Chet" } 206 | const simon: Schema["user"] = { id: randomId(), name: "Simon" } 207 | const rob: Schema["user"] = { id: randomId(), name: "Rob" } 208 | 209 | insert("user", chet) 210 | insert("user", simon) 211 | insert("user", rob) 212 | 213 | const general: Schema["channel"] = { id: randomId(), name: "General" } 214 | const engineering: Schema["channel"] = { id: randomId(), name: "Engineering" } 215 | const marketing: Schema["channel"] = { id: randomId(), name: "Marketing" } 216 | 217 | insert("channel", general) 218 | insert("channel", engineering) 219 | insert("channel", marketing) 220 | 221 | const message1: Schema["message"] = { 222 | id: randomId(), 223 | text: "Hello world!", 224 | created_at: new Date().toISOString(), 225 | channel_id: general.id, 226 | author_id: chet.id, 227 | } 228 | 229 | const message2: Schema["message"] = { 230 | id: randomId(), 231 | text: "What's up?", 232 | created_at: new Date(Date.now() + 1000).toISOString(), 233 | channel_id: general.id, 234 | author_id: simon.id, 235 | } 236 | 237 | insert("message", message1) 238 | insert("message", message2) 239 | 240 | // Get a record by id. 241 | assert.deepEqual(db.get(["user", "byId", rob.id]), rob) 242 | 243 | // Scan an index. 244 | assert.deepEqual( 245 | db 246 | .list({ 247 | // Prefix scan. 248 | gt: ["user", "byName", "Simon", jsonCodec.MIN], 249 | lt: ["user", "byName", "Simon", jsonCodec.MAX], 250 | }) 251 | .map(({ value }) => value), 252 | [simon] 253 | ) 254 | 255 | assert.deepEqual( 256 | db 257 | .list({ 258 | // Prefix scan. 259 | gt: ["user", "byName", "Simon", jsonCodec.MIN], 260 | lt: ["user", "byName", "Simon", jsonCodec.MAX], 261 | }) 262 | .map(({ value }) => value), 263 | [simon] 264 | ) 265 | 266 | // List messages, get the latest. 267 | assert.deepEqual( 268 | db 269 | .list({ 270 | gt: ["message", "byChannel", general.id, jsonCodec.MIN], 271 | lt: ["message", "byChannel", general.id, jsonCodec.MAX], 272 | reverse: true, 273 | limit: 1, 274 | }) 275 | .map(({ value }) => value), 276 | [message2] 277 | ) 278 | 279 | let called = 0 280 | db.subscribe( 281 | [ 282 | ["message", "byChannel", general.id, jsonCodec.MIN], 283 | ["message", "byChannel", general.id, jsonCodec.MAX], 284 | ], 285 | () => { 286 | called += 1 287 | } 288 | ) 289 | 290 | const message3: Schema["message"] = { 291 | id: randomId(), 292 | text: "Testing out subscriptions", 293 | created_at: new Date(Date.now() + 2000).toISOString(), 294 | channel_id: general.id, 295 | author_id: chet.id, 296 | } 297 | 298 | insert("message", message3) 299 | 300 | assert.deepEqual(called, 1) 301 | 302 | remove("message", message3.id) 303 | 304 | assert.deepEqual(called, 2) 305 | -------------------------------------------------------------------------------- /src/examples/old/orm.ts: -------------------------------------------------------------------------------- 1 | import * as t from "data-type-ts" 2 | 3 | const Todo = t.object({ 4 | id: t.string, 5 | text: t.string, 6 | createdAt: t.string, 7 | completed: t.boolean, 8 | }) 9 | 10 | function query( 11 | fn: (tx: any, ...args: A) => Generator 12 | ) { 13 | return fn 14 | } 15 | 16 | const createTodo = query(function* (tx) { 17 | return 18 | }) 19 | -------------------------------------------------------------------------------- /src/examples/old/social.ts: -------------------------------------------------------------------------------- 1 | // examples 2 | // - social app 3 | // - calendar app 4 | // - notes app 5 | // - end-user database app 6 | 7 | import * as t from "./dataTypes" 8 | 9 | const schema = { 10 | user: t.object({ 11 | id: t.uuid, 12 | username: t.string, 13 | }), 14 | follow: t.object({ 15 | id: t.tuple(t.uuid, t.uuid), 16 | created_at: t.datetime, 17 | }), 18 | post: t.object({ 19 | id: t.uuid, 20 | created_at: t.datetime, 21 | author_id: t.uuid, 22 | text: t.string, 23 | }), 24 | } 25 | 26 | // user by username 27 | // user's follows 28 | // who's following me? 29 | // timeline 30 | 31 | const x = { 32 | userByUsername: { 33 | match: [{ user: { id: "id", username: "username" } }], 34 | sort: ["username", "id"], 35 | }, 36 | followed: { 37 | match: [{ follow: { id: ["from", "to"] } }], 38 | sort: ["to", "form"], 39 | }, 40 | secondOrderFollowing: { 41 | match: [{ follow: { id: ["A", "B"] } }, { follow: { id: ["B", "C"] } }], 42 | sort: ["A", "C", "B"], 43 | }, 44 | secondOrderFollowed: { 45 | match: [{ follow: { id: ["A", "B"] } }, { follow: { id: ["B", "C"] } }], 46 | sort: ["C", "A", "B"], 47 | }, 48 | } 49 | 50 | type Schema = { [K in keyof typeof schema]: t.Infer<(typeof schema)[K]> } 51 | 52 | const db: any = {} // KeyValueStore, OrderedKeyValueStore, ReducerTree 53 | const query: any = {} 54 | 55 | // All simple indexes... lets find a more challenging problem. 56 | const indexes = { 57 | userByUsername: { 58 | match: [ 59 | { 60 | user: { 61 | id: "id", 62 | username: "username", 63 | }, 64 | }, 65 | ], 66 | sort: ["username", "id"], 67 | }, 68 | } 69 | 70 | // explicit write checks. 71 | const queries = { 72 | // getUserByUsername 73 | 74 | get: query( 75 | ( 76 | tx, 77 | table: T, 78 | id: string 79 | ): Schema[T] | undefined => { 80 | return undefined 81 | } 82 | ), 83 | // signup() 84 | } 85 | 86 | // API 87 | // signup 88 | // login 89 | // logout 90 | // changePassword 91 | // createThread 92 | // deleteThread 93 | // inviteToThread 94 | // editThread 95 | // createMessage 96 | // deleteMessage 97 | 98 | // Permissions 99 | 100 | // async getRecord(pointer: RecordPointer) { 101 | // async getRecords(pointers: RecordPointer[]): Promise { 102 | // async getUserByUsername(username: string) { 103 | // async getPassword(userId: string) { 104 | // async searchUsers(query: string) { 105 | // async getThreadIds(userId: string, limit: number): Promise { 106 | // async getMessageIds(threadId: string, limit: number): Promise { 107 | // async write(records: RecordWithTable[]): Promise { 108 | // async createAuthToken(token: AuthTokenRecord) { 109 | // async createPassword(password: PasswordRecord) { 110 | // async deleteAuthToken(authTokenId: string) { 111 | -------------------------------------------------------------------------------- /src/generator.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | 3 | Generators are kind of confusing, so here's place to start. 4 | 5 | */ 6 | 7 | import { strict as assert } from "assert" 8 | import { describe, it } from "mocha" 9 | 10 | function run( 11 | fn: () => Generator 12 | ): string { 13 | const generator = fn() 14 | let nextValue = generator.next() 15 | while (!nextValue.done) { 16 | const double = nextValue.value * 2 17 | nextValue = generator.next({ double }) 18 | } 19 | 20 | const { sum } = nextValue.value 21 | return sum.toString() 22 | } 23 | 24 | function* f() { 25 | const { double: a } = yield 1 26 | assert.equal(a, 2) 27 | 28 | const { double: b } = yield 13 29 | assert.equal(b, 26) 30 | 31 | return { sum: a + b } 32 | } 33 | 34 | async function runAsync( 35 | generatorFunction: () => AsyncGenerator< 36 | number, 37 | { sum: number }, 38 | { double: number } 39 | > 40 | ): Promise { 41 | const generator = generatorFunction() 42 | let nextValue = await generator.next() 43 | 44 | while (!nextValue.done) { 45 | const double = nextValue.value * 2 46 | nextValue = await generator.next({ double }) 47 | } 48 | 49 | const { sum } = nextValue.value 50 | return sum.toString() 51 | } 52 | 53 | async function* g() { 54 | const { double: a } = yield 1 55 | assert.equal(a, 2) 56 | 57 | await Promise.resolve() 58 | 59 | const { double: b } = yield 13 60 | assert.equal(b, 26) 61 | 62 | return { sum: a + b } 63 | } 64 | 65 | describe("Generator Demo", () => { 66 | it("sync", () => { 67 | const result = run(f) 68 | assert.equal(result, "28") 69 | }) 70 | 71 | it("async", async () => { 72 | const result = await runAsync(g) 73 | assert.equal(result, "28") 74 | }) 75 | }) 76 | -------------------------------------------------------------------------------- /src/itree.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { jsonCodec } from "lexicodec" 3 | import { shuffle } from "lodash" 4 | import { describe, it } from "mocha" 5 | import { BinaryPlusIntervalTree } from "./itree" 6 | 7 | describe("BinaryPlusIntervalTree", () => { 8 | it("works", () => { 9 | type K = [number, number, string] 10 | 11 | function* makeTuples(min: number, max: number) { 12 | for (let start = min; start < max; start++) { 13 | for (let end = start; end < max; end++) { 14 | const sum = start + end 15 | const id = sum % 2 === 0 ? "even" : "odd" 16 | yield { key: [start, end, id] as K, value: sum } 17 | // Some ranges will have duplicate entries. 18 | if (sum % 3 === 0) { 19 | yield { key: [start, end, "third"] as K, value: sum } 20 | } 21 | } 22 | } 23 | } 24 | 25 | const tuples = Array.from(makeTuples(0, 100)) 26 | 27 | const reduceInterval = (a: [number, number], b: [number, number]) => { 28 | return [Math.min(a[0], b[0]), Math.max(a[1], b[1])] as [number, number] 29 | } 30 | 31 | const tree = new BinaryPlusIntervalTree( 32 | 3, 33 | 9, 34 | reduceInterval, 35 | jsonCodec.compare, 36 | jsonCodec.compare 37 | ) 38 | 39 | for (const { key, value } of shuffle(tuples)) { 40 | tree.set(key, value) 41 | } 42 | 43 | for (let start = -19; start < 121; start += 10) { 44 | for (let end = start; end < 121; end += 10) { 45 | const result = tree.overlaps([start, end]) 46 | assert.deepEqual( 47 | result, 48 | tuples.filter(({ key: [min, max] }) => start <= max && end >= min) 49 | ) 50 | } 51 | } 52 | 53 | // Decimal overlaps. 54 | for (let start = -1.2; start < 105; start += 10) { 55 | for (let end = start; end < start + 5; end += 0.4) { 56 | const result = tree.overlaps([start, end]) 57 | assert.deepEqual( 58 | result, 59 | tuples.filter(({ key: [min, max] }) => start <= max && end >= min) 60 | ) 61 | } 62 | } 63 | }) 64 | }) 65 | -------------------------------------------------------------------------------- /src/itree.ts: -------------------------------------------------------------------------------- 1 | import { 2 | BinaryPlusReducerTree, 3 | BranchNode, 4 | LeafNode, 5 | TreeReducer, 6 | } from "./bptree-reducer" 7 | 8 | function intervalReducer( 9 | reduce: (acc: I, interval: I) => I 10 | ): TreeReducer<[...I, any], any, I> { 11 | return { 12 | leaf: (values) => { 13 | let bound = values[0].key.slice(0, 2) as I 14 | for (let i = 1; i < values.length; i++) { 15 | bound = reduce(bound, values[i].key.slice(0, 2) as I) 16 | } 17 | return bound 18 | }, 19 | branch: (children) => { 20 | let bound = children[0].data 21 | for (let i = 1; i < children.length; i++) { 22 | bound = reduce(bound, children[i].data) 23 | } 24 | return bound 25 | }, 26 | } 27 | } 28 | 29 | export class BinaryPlusIntervalTree< 30 | B, 31 | K extends [B, B, any], 32 | V = any 33 | > extends BinaryPlusReducerTree { 34 | constructor( 35 | public minSize: number, 36 | public maxSize: number, 37 | public reduceInterval: (acc: [B, B], interval: [B, B]) => [B, B], 38 | public compareKey: (a: K, b: K) => number, 39 | public compareBound: (a: B, b: B) => number 40 | ) { 41 | const reducer = intervalReducer(reduceInterval) 42 | super(minSize, maxSize, reducer, compareKey) 43 | } 44 | 45 | private boundsOverlap(a: [B, B], b: [B, B]) { 46 | const [min, max] = a 47 | const [start, end] = b 48 | 49 | // return max >= start && min <= end 50 | return ( 51 | this.compareBound(max, start) >= 0 && this.compareBound(min, end) <= 0 52 | ) 53 | } 54 | 55 | overlaps([start, end]: [B, B]) { 56 | const root = this.nodes["root"] 57 | if (!root) return [] 58 | 59 | if (root.leaf) { 60 | return root.values.filter((item) => { 61 | const [min, max] = item.key 62 | return this.boundsOverlap([start, end], [min, max]) 63 | }) 64 | } 65 | 66 | { 67 | // No results. 68 | const [min, max] = root.data 69 | if (!this.boundsOverlap([start, end], [min, max])) { 70 | return [] 71 | } 72 | } 73 | 74 | // I'm not sure if we'd ever want to `limit` on this kind of query. But if we did, then we'd want 75 | // to do a depth-first traversal more lazily. 76 | let layer = [root] 77 | while (true) { 78 | const nextLayerIds: string[] = [] 79 | for (const node of layer) { 80 | for (const child of node.children) { 81 | const [min, max] = child.data 82 | if (this.boundsOverlap([start, end], [min, max])) { 83 | nextLayerIds.push(child.childId) 84 | } 85 | } 86 | } 87 | 88 | if (nextLayerIds.length === 0) return [] 89 | 90 | const nextLayer = nextLayerIds.map((childId) => { 91 | const node = this.nodes[childId] 92 | if (!node) throw new Error("Broken.") 93 | return node 94 | }) 95 | 96 | // Recur until we get to the leaves. 97 | if (!nextLayer[0].leaf) { 98 | layer = nextLayer as BranchNode[] 99 | continue 100 | } 101 | 102 | const leaves = nextLayer as LeafNode[] 103 | const result: { key: K; value: V }[] = [] 104 | for (const leaf of leaves) { 105 | for (const item of leaf.values) { 106 | const [min, max] = item.key 107 | if (this.boundsOverlap([start, end], [min, max])) { 108 | result.push(item) 109 | } 110 | } 111 | } 112 | return result 113 | } 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /src/kv-lock.test.ts: -------------------------------------------------------------------------------- 1 | import { TestClock } from "@ccorcos/test-clock" 2 | import { strict as assert } from "assert" 3 | import { describe, it } from "mocha" 4 | import { AsyncKeyValueDatabase } from "./kv-lock" 5 | 6 | describe("AsyncKeyValueDatabase", () => { 7 | it("get", async () => { 8 | const kv = new AsyncKeyValueDatabase() 9 | 10 | let result = await kv.get("a") 11 | assert.deepEqual(result, undefined) 12 | 13 | await kv.write({ set: [{ key: "a", value: 1 }] }) 14 | result = await kv.get("a") 15 | assert.deepEqual(result, 1) 16 | }) 17 | 18 | it("concurrency", async () => { 19 | const kv = new AsyncKeyValueDatabase() 20 | 21 | const { sleep, run } = new TestClock() 22 | 23 | const p1 = (async () => { 24 | const tx = kv.transact() 25 | 26 | await tx.readLock("a") 27 | await sleep(10) 28 | 29 | const result = await kv.get("a") 30 | tx.release() 31 | return result 32 | })() 33 | 34 | const p2 = (async () => { 35 | const tx = kv.transact() 36 | 37 | await sleep(2) 38 | await tx.readLock("a") 39 | 40 | const result = await kv.get("a") 41 | tx.release() 42 | return result 43 | })() 44 | 45 | const p3 = (async () => { 46 | const tx = kv.transact() 47 | 48 | await sleep(1) 49 | await tx.writeLock("a") 50 | 51 | await sleep(10) 52 | await tx.set("a", 1) 53 | await tx.commit() 54 | })() 55 | 56 | const p4 = (async () => { 57 | const tx = kv.transact() 58 | 59 | await sleep(3) 60 | await tx.writeLock("a") 61 | 62 | await sleep(10) 63 | await tx.set("a", 2) 64 | await tx.commit() 65 | return true 66 | })() 67 | 68 | const p5 = (async () => { 69 | const tx = kv.transact() 70 | 71 | await sleep(4) 72 | await tx.readLock("a") 73 | 74 | const result = await kv.get("a") 75 | tx.release() 76 | return result 77 | })() 78 | 79 | await run() 80 | const [r1, r2, r3, r4, r5] = await Promise.all([p1, p2, p3, p4, p5]) 81 | 82 | assert.deepEqual([r1, r2, r3, r4, r5], [undefined, 1, undefined, true, 2]) 83 | }) 84 | }) 85 | -------------------------------------------------------------------------------- /src/kv-lock.ts: -------------------------------------------------------------------------------- 1 | import { RWLockMap } from "@ccorcos/lock" 2 | 3 | export class AsyncKeyValueDatabase { 4 | map = new Map() 5 | 6 | async get(key: string) { 7 | return this.map.get(key) 8 | } 9 | 10 | async write(tx: { set?: { key: string; value: T }[]; delete?: string[] }) { 11 | for (const { key, value } of tx.set || []) this.map.set(key, value) 12 | for (const key of tx.delete || []) this.map.delete(key) 13 | } 14 | 15 | locks = new RWLockMap() 16 | 17 | transact() { 18 | return new AsyncKeyValueTransaction(this) 19 | } 20 | } 21 | 22 | export class AsyncKeyValueTransaction { 23 | locks = new Set<() => void>() 24 | cache: { [key: string]: T | undefined } = {} 25 | sets: { [key: string]: T } = {} 26 | deletes = new Set() 27 | 28 | constructor(public kv: AsyncKeyValueDatabase) {} 29 | 30 | async readLock(key: string) { 31 | // console.log("READ", key) 32 | const release = await this.kv.locks.read(key) 33 | this.locks.add(release) 34 | return () => { 35 | this.locks.delete(release) 36 | release() 37 | } 38 | } 39 | 40 | async writeLock(key: string) { 41 | // console.trace("WRITE", key) 42 | const release = await this.kv.locks.write(key) 43 | this.locks.add(release) 44 | return () => { 45 | this.locks.delete(release) 46 | release() 47 | } 48 | } 49 | 50 | async get(key: string): Promise { 51 | if (key in this.cache) return this.cache[key] 52 | const value = await this.kv.get(key) 53 | this.cache[key] = value 54 | return value 55 | } 56 | 57 | set(key: string, value: T) { 58 | this.sets[key] = value 59 | this.cache[key] = value 60 | this.deletes.delete(key) 61 | } 62 | 63 | delete(key: string) { 64 | this.cache[key] = undefined 65 | delete this.sets[key] 66 | this.deletes.add(key) 67 | } 68 | 69 | release() { 70 | for (const release of this.locks) release() 71 | } 72 | 73 | async commit() { 74 | await this.kv.write({ 75 | set: Object.entries(this.sets).map(([key, value]) => ({ key, value })), 76 | delete: Array.from(this.deletes), 77 | }) 78 | this.release() 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/kv.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { describe, it } from "mocha" 3 | import { KeyValueDatabase } from "./kv" 4 | 5 | describe("KeyValueDatabase", () => { 6 | it("get", () => { 7 | const kv = new KeyValueDatabase() 8 | 9 | let result = kv.get("a") 10 | assert.deepEqual(result?.value, undefined) 11 | 12 | kv.write({ set: [{ key: "a", value: 1 }] }) 13 | result = kv.get("a") 14 | assert.deepEqual(result?.value, 1) 15 | }) 16 | 17 | it("conflict", () => { 18 | const kv = new KeyValueDatabase() 19 | kv.write({ set: [{ key: "a", value: 1 }] }) 20 | 21 | const a = kv.get("a")! 22 | kv.write({ set: [{ key: "a", value: 2 }] }) 23 | 24 | assert.throws(() => { 25 | kv.write({ 26 | check: [{ key: "a", version: a.version }], 27 | set: [{ key: "b", value: a.value * 2 }], 28 | }) 29 | }) 30 | }) 31 | 32 | it("sum", () => { 33 | const kv = new KeyValueDatabase() 34 | kv.write({ sum: [{ key: "a", value: 1 }] }) 35 | assert.deepEqual(kv.get("a")?.value, 1) 36 | kv.write({ sum: [{ key: "a", value: 1 }] }) 37 | assert.deepEqual(kv.get("a")?.value, 2) 38 | }) 39 | }) 40 | -------------------------------------------------------------------------------- /src/kv.ts: -------------------------------------------------------------------------------- 1 | import { ulid } from "ulid" 2 | 3 | export class ConflictError extends Error {} 4 | 5 | export class KeyValueDatabase { 6 | map: { [key: string]: { value: V; version: string } } = {} 7 | 8 | get = (key: string): { value: V; version: string } | undefined => { 9 | const existing = this.map[key] 10 | if (existing) return existing 11 | else return undefined 12 | } 13 | 14 | write(tx: { 15 | check?: { key: string; version: string | undefined }[] 16 | set?: { key: string; value: V }[] 17 | delete?: string[] 18 | sum?: { key: string; value: number }[] 19 | min?: { key: string; value: number }[] 20 | max?: { key: string; value: number }[] 21 | }) { 22 | for (const { key, version } of tx.check || []) 23 | if (this.map[key]?.version !== version) 24 | throw new ConflictError(`Version check failed. ${key} ${version}`) 25 | 26 | const version = ulid() 27 | 28 | for (const { key, value } of tx.set || []) 29 | this.map[key] = { value, version } 30 | 31 | const replace = (key: string, update: (value?: any) => number) => { 32 | const existing = this.map[key] 33 | this.map[key] = { value: update(existing?.value) as any, version } 34 | } 35 | 36 | for (const { key, value } of tx.sum || []) 37 | replace(key, (existing) => { 38 | if (typeof existing === "number") return existing + value 39 | if (existing === undefined) return value 40 | console.warn("Calling sum on a non-number value:", key, existing) 41 | return value 42 | }) 43 | for (const { key, value } of tx.min || []) 44 | replace(key, (existing) => { 45 | if (typeof existing === "number") return Math.min(existing, value) 46 | if (existing === undefined) return value 47 | console.warn("Calling min on a non-number value:", key, existing) 48 | return value 49 | }) 50 | for (const { key, value } of tx.max || []) 51 | replace(key, (existing) => { 52 | if (typeof existing === "number") return Math.max(existing, value) 53 | if (existing === undefined) return value 54 | console.warn("Calling max on a non-number value:", key, existing) 55 | return value 56 | }) 57 | 58 | for (const key of tx.delete || []) delete this.map[key] 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/lib/AsyncIntervalTree.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { jsonCodec } from "lexicodec" 3 | import { cloneDeep, sample, shuffle, uniq } from "lodash" 4 | import { describe, it } from "mocha" 5 | import { AsyncIntervalTree } from "./AsyncIntervalTree" 6 | 7 | export class TestAsyncKeyValueStorage { 8 | map = new Map() 9 | 10 | constructor(private delay?: () => Promise) {} 11 | 12 | async get(key: string) { 13 | await this.delay?.() 14 | // console.log("GET", key, this.map.get(key)) 15 | return cloneDeep(this.map.get(key)) 16 | } 17 | 18 | async write(tx: { set?: { key: string; value: any }[]; delete?: string[] }) { 19 | await this.delay?.() 20 | for (const { key, value } of tx.set || []) { 21 | // console.log("SET", key, value) 22 | this.map.set(key, cloneDeep(value)) 23 | } 24 | for (const key of tx.delete || []) { 25 | // console.log("DELETE", key) 26 | this.map.delete(key) 27 | } 28 | } 29 | } 30 | 31 | describe("AsyncIntervalTree", () => { 32 | it("works", async function () { 33 | this.timeout(5_000) 34 | 35 | type K = [number, number, string] 36 | 37 | const storage = new TestAsyncKeyValueStorage() 38 | const tree = new AsyncIntervalTree( 39 | storage, 40 | 3, 41 | 9, 42 | jsonCodec.compare, 43 | jsonCodec.compare 44 | ) 45 | 46 | function* makeTuples(min: number, max: number) { 47 | for (let start = min; start < max; start++) { 48 | for (let end = start; end < max; end++) { 49 | const sum = start + end 50 | const id = sum % 2 === 0 ? "even" : "odd" 51 | yield { key: [start, end, id] as K, value: sum } 52 | // Some ranges will have duplicate entries. 53 | if (sum % 3 === 0) { 54 | yield { key: [start, end, "third"] as K, value: sum } 55 | } 56 | } 57 | } 58 | } 59 | 60 | const tuples = Array.from(makeTuples(0, 100)) 61 | for (const { key, value } of shuffle(tuples)) { 62 | await tree.set(key, value) 63 | } 64 | 65 | for (let start = -19; start < 121; start += 10) { 66 | for (let end = start; end < 121; end += 10) { 67 | const result = await tree.overlaps({ gte: start, lte: end }) 68 | assert.deepEqual( 69 | result, 70 | tuples.filter(({ key: [min, max] }) => start <= max && end >= min) 71 | ) 72 | } 73 | } 74 | 75 | // Decimal overlaps. 76 | for (let start = -1.2; start < 105; start += 10) { 77 | for (let end = start; end < start + 5; end += 0.4) { 78 | const result = await tree.overlaps({ gte: start, lte: end }) 79 | assert.deepEqual( 80 | result, 81 | tuples.filter(({ key: [min, max] }) => start <= max && end >= min) 82 | ) 83 | } 84 | } 85 | }) 86 | 87 | it("property test", async function () { 88 | this.timeout(10_000) 89 | type Key = [[string, string], [string, string], string] 90 | 91 | const size = 140 92 | const storage = new TestAsyncKeyValueStorage() 93 | 94 | const tree = new AsyncIntervalTree( 95 | storage, 96 | 3, 97 | 9, 98 | jsonCodec.compare, 99 | jsonCodec.compare 100 | ) 101 | 102 | const tuples: { key: Key; value: number }[] = [] 103 | 104 | // Completely random non-numerical bounds with tuple bounds. 105 | let i = 0 106 | while (i < size) { 107 | const [min, max] = [randomId(), randomId()].sort() 108 | tuples.push({ 109 | key: [["user", min], ["user", max], randomId()], 110 | value: i++, 111 | }) 112 | 113 | // Same bound 114 | if (i % 7 === 0) { 115 | tuples.push({ 116 | key: [["user", min], ["user", max], randomId()], 117 | value: i++, 118 | }) 119 | } 120 | 121 | // Half same bound 122 | if (i % 11 === 0) { 123 | const [min2, max2] = [min, randomId()].sort() 124 | tuples.push({ 125 | key: [["user", min2], ["user", max2], randomId()], 126 | value: i++, 127 | }) 128 | } 129 | } 130 | 131 | for (let i = 0; i < tuples.length; i++) { 132 | const { key, value } = tuples[i] 133 | await tree.set(key, value) 134 | 135 | const answer = async ( 136 | args: { gt?: Key[0]; gte?: Key[0]; lt?: Key[0]; lte?: Key[0] } = {} 137 | ) => { 138 | return (await tree.list()).filter(({ key: [start, end] }) => { 139 | if (args.gt !== undefined) { 140 | if (jsonCodec.compare(end, args.gt) <= 0) return false 141 | } else if (args.gte !== undefined) { 142 | if (jsonCodec.compare(end, args.gte) < 0) return false 143 | } 144 | if (args.lt !== undefined) { 145 | if (jsonCodec.compare(start, args.lt) >= 0) return false 146 | } else if (args.lte !== undefined) { 147 | if (jsonCodec.compare(start, args.lte) > 0) return false 148 | } 149 | return true 150 | }) 151 | } 152 | 153 | const testOverlaps = async ( 154 | args: { gt?: Key[0]; gte?: Key[0]; lt?: Key[0]; lte?: Key[0] } = {} 155 | ) => assert.deepEqual(await tree.overlaps(args), await answer(args)) 156 | 157 | const ranges = async (n: number) => { 158 | // Beyond the left and right bounds. 159 | const left = String.fromCharCode("0".charCodeAt(0) - 1) 160 | const right = String.fromCharCode("z".charCodeAt(0) + 1) 161 | 162 | // Sample bounds from the existing dataset. 163 | let bounds: string[] = [] 164 | for (const { key } of await tree.list()) { 165 | const [a, b] = key 166 | bounds.push(a[1], b[1]) 167 | } 168 | // Half the elements are existing boundaries, half are new random values. 169 | bounds = shuffle(uniq(bounds)).slice(0, Math.round(n / 2)) 170 | while (bounds.length < n) bounds.push(randomId()) 171 | 172 | const ranges: [Key[0], Key[0]][] = [] 173 | ranges.push([ 174 | ["user", left], 175 | ["user", sample(bounds)!], 176 | ]) 177 | ranges.push([ 178 | ["user", left], 179 | ["user", randomId()], 180 | ]) 181 | ranges.push([ 182 | ["user", sample(bounds)!], 183 | ["user", right], 184 | ]) 185 | ranges.push([ 186 | ["user", randomId()], 187 | ["user", right], 188 | ]) 189 | 190 | for (let i = 0; i < n - 4; i++) { 191 | const [min, max] = [sample(bounds)!, sample(bounds)!].sort() 192 | ranges.push([ 193 | ["user", min], 194 | ["user", max], 195 | ]) 196 | } 197 | return ranges 198 | } 199 | 200 | await testOverlaps() 201 | for (const [min, max] of await ranges(size / 20)) { 202 | await testOverlaps({ gt: min }) 203 | await testOverlaps({ gte: min }) 204 | await testOverlaps({ lt: min }) 205 | await testOverlaps({ lte: min }) 206 | await testOverlaps({ gt: max }) 207 | await testOverlaps({ gte: max }) 208 | await testOverlaps({ lt: max }) 209 | await testOverlaps({ lte: max }) 210 | await testOverlaps({ gt: min, lt: max }) 211 | await testOverlaps({ gt: min, lte: max }) 212 | await testOverlaps({ gte: min, lt: max }) 213 | await testOverlaps({ gte: min, lte: max }) 214 | } 215 | } 216 | }) 217 | }) 218 | 219 | function randomId() { 220 | return Math.random().toString(36).slice(2, 10) 221 | } 222 | -------------------------------------------------------------------------------- /src/lib/AsyncIntervalTree.ts: -------------------------------------------------------------------------------- 1 | import { 2 | AsyncReducerTree, 3 | BranchNode, 4 | LeafNode, 5 | ReadTransaction, 6 | TreeReducer, 7 | } from "./AsyncReducerTree" 8 | import { AsyncKeyValueApi, KeyValueApi } from "./types" 9 | 10 | export class AsyncIntervalTree< 11 | K extends [B, B, ...any[]], 12 | V = any, 13 | B = any, 14 | > extends AsyncReducerTree { 15 | constructor( 16 | public storage: 17 | | KeyValueApi | LeafNode> 18 | | AsyncKeyValueApi< 19 | string, 20 | BranchNode | LeafNode 21 | >, 22 | public minSize: number, 23 | public maxSize: number, 24 | public compareKey: (a: K, b: K) => number, 25 | public compareBound: (a: B, b: B) => number 26 | ) { 27 | const reducer: TreeReducer = { 28 | leaf: (values) => { 29 | let a = values[0].key.slice(0, 2) as [B, B] 30 | for (let i = 1; i < values.length; i++) { 31 | const b = values[i].key.slice(0, 2) as [B, B] 32 | a = [ 33 | this.compareBound(a[0], b[0]) <= 0 ? a[0] : b[0], 34 | this.compareBound(a[1], b[1]) >= 0 ? a[1] : b[1], 35 | ] 36 | } 37 | return a 38 | }, 39 | branch: (children) => { 40 | let a = children[0].data 41 | for (let i = 1; i < children.length; i++) { 42 | const b = children[i].data 43 | a = [ 44 | this.compareBound(a[0], b[0]) <= 0 ? a[0] : b[0], 45 | this.compareBound(a[1], b[1]) >= 0 ? a[1] : b[1], 46 | ] 47 | } 48 | return a 49 | }, 50 | } 51 | 52 | super(storage, minSize, maxSize, reducer, compareKey) 53 | } 54 | 55 | private boundsOverlap(args: { gt?: B; gte?: B; lt?: B; lte?: B }, b: [B, B]) { 56 | const [start, end] = b 57 | 58 | if (args.gt !== undefined) { 59 | if (this.compareBound(end, args.gt) <= 0) return false 60 | } else if (args.gte !== undefined) { 61 | if (this.compareBound(end, args.gte) < 0) return false 62 | } 63 | 64 | if (args.lt !== undefined) { 65 | if (this.compareBound(start, args.lt) >= 0) return false 66 | } else if (args.lte !== undefined) { 67 | if (this.compareBound(start, args.lte) > 0) return false 68 | } 69 | 70 | return true 71 | } 72 | 73 | async overlaps(args: { gt?: B; gte?: B; lt?: B; lte?: B } = {}) { 74 | return this.lock.withRead(async () => { 75 | const tx = new ReadTransaction(this.storage) 76 | const root = await tx.get("root") 77 | if (!root) return [] 78 | 79 | if (root.leaf) { 80 | return root.values.filter((item) => { 81 | const [min, max] = item.key 82 | return this.boundsOverlap(args, [min, max]) 83 | }) 84 | } 85 | 86 | { 87 | // No results. 88 | const [min, max] = root.data 89 | 90 | if (!this.boundsOverlap(args, [min, max])) { 91 | return [] 92 | } 93 | } 94 | 95 | // I'm not sure if we'd ever want to `limit` on this kind of query. But if we did, then we'd want 96 | // to do a depth-first traversal more lazily. 97 | let layer = [root] 98 | while (true) { 99 | const nextLayerIds: string[] = [] 100 | for (const node of layer) { 101 | for (const child of node.children) { 102 | const [min, max] = child.data 103 | if (this.boundsOverlap(args, [min, max])) { 104 | nextLayerIds.push(child.childId) 105 | } 106 | } 107 | } 108 | // console.log("trim", trim / total) 109 | 110 | if (nextLayerIds.length === 0) { 111 | return [] 112 | } 113 | 114 | const nextLayer = await Promise.all( 115 | nextLayerIds.map(async (childId) => { 116 | const node = await tx.get(childId) 117 | if (!node) throw new Error("Broken.") 118 | return node 119 | }) 120 | ) 121 | 122 | // Recur until we get to the leaves. 123 | if (!nextLayer[0].leaf) { 124 | layer = nextLayer as BranchNode[] 125 | continue 126 | } 127 | 128 | const leaves = nextLayer as LeafNode[] 129 | const result: { key: K; value: V }[] = [] 130 | for (const leaf of leaves) { 131 | for (const item of leaf.values) { 132 | const [min, max] = item.key 133 | if (this.boundsOverlap(args, [min, max])) { 134 | result.push(item) 135 | } 136 | } 137 | } 138 | return result 139 | } 140 | }) 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /src/lib/Database.ts: -------------------------------------------------------------------------------- 1 | import { RWLock } from "@ccorcos/lock" 2 | 3 | class Database { 4 | lock = new RWLock() 5 | } 6 | 7 | // export type KeyValueApi = { 8 | // get: (key: K) => V | undefined 9 | // write: (tx: { set?: { key: K; value: V }[]; delete?: K[] }) => void 10 | // } 11 | // export type AsyncKeyValueApi = AsyncApi> 12 | 13 | // export type OrderedKeyValueApi = KeyValueApi & { 14 | // list(args?: { 15 | // gt?: K 16 | // gte?: K 17 | // lt?: K 18 | // lte?: K 19 | // limit?: number 20 | // reverse?: boolean 21 | // }): { key: K; value: V }[] 22 | // } 23 | 24 | // export type AsyncOrderedKeyValueApi = AsyncApi< 25 | // OrderedKeyValueApi 26 | // > 27 | 28 | // export type ReducerTreeApi = OrderedKeyValueApi & { 29 | // reduce(args?: { gt?: K; gte?: K; lt?: K; lte?: K }): D 30 | // } 31 | 32 | // export type AsyncReducerTreeApi = AsyncApi< 33 | // ReducerTreeApi 34 | // > 35 | 36 | // export type IntervalTreeApi = ReducerTreeApi< 37 | // K, 38 | // V, 39 | // [B, B] 40 | // > & { 41 | // overlaps(args?: { gt?: B; gte?: B; lt?: B; lte?: B }): { key: K; value: V }[] 42 | // } 43 | 44 | // export type AsyncIntervalTreeApi = AsyncApi< 45 | // IntervalTreeApi 46 | // > 47 | 48 | // type Api = { [K: string]: (...args: any[]) => any } 49 | 50 | // type AsyncApi = { 51 | // [K in keyof T]: (...args: Parameters) => Promise> 52 | // } 53 | -------------------------------------------------------------------------------- /src/lib/InMemoryIntervalTree.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { jsonCodec } from "lexicodec" 3 | import { sample, shuffle, uniq } from "lodash" 4 | import { describe, it } from "mocha" 5 | import { InMemoryIntervalTree } from "./InMemoryIntervalTree" 6 | 7 | describe("InMemoryIntervalTree", () => { 8 | it("works", () => { 9 | type K = [number, number, string] 10 | 11 | const tree = new InMemoryIntervalTree( 12 | 3, 13 | 9, 14 | jsonCodec.compare, 15 | jsonCodec.compare 16 | ) 17 | 18 | function* makeTuples(min: number, max: number) { 19 | for (let start = min; start < max; start++) { 20 | for (let end = start; end < max; end++) { 21 | const sum = start + end 22 | const id = sum % 2 === 0 ? "even" : "odd" 23 | yield { key: [start, end, id] as K, value: sum } 24 | // Some ranges will have duplicate entries. 25 | if (sum % 3 === 0) { 26 | yield { key: [start, end, "third"] as K, value: sum } 27 | } 28 | } 29 | } 30 | } 31 | 32 | const tuples = Array.from(makeTuples(0, 100)) 33 | for (const { key, value } of shuffle(tuples)) { 34 | tree.set(key, value) 35 | } 36 | 37 | for (let start = -19; start < 121; start += 10) { 38 | for (let end = start; end < 121; end += 10) { 39 | const result = tree.overlaps({ gte: start, lte: end }) 40 | assert.deepEqual( 41 | result, 42 | tuples.filter(({ key: [min, max] }) => start <= max && end >= min) 43 | ) 44 | } 45 | } 46 | 47 | // Decimal overlaps. 48 | for (let start = -1.2; start < 105; start += 10) { 49 | for (let end = start; end < start + 5; end += 0.4) { 50 | const result = tree.overlaps({ gte: start, lte: end }) 51 | assert.deepEqual( 52 | result, 53 | tuples.filter(({ key: [min, max] }) => start <= max && end >= min) 54 | ) 55 | } 56 | } 57 | }) 58 | 59 | it("property test", function () { 60 | this.timeout(10_000) 61 | type Key = [[string, string], [string, string], string] 62 | 63 | const size = 300 64 | const tree = new InMemoryIntervalTree( 65 | 3, 66 | 9, 67 | jsonCodec.compare, 68 | jsonCodec.compare 69 | ) 70 | 71 | const tuples: { key: Key; value: number }[] = [] 72 | 73 | // Completely random non-numerical bounds with tuple bounds. 74 | let i = 0 75 | while (i < size) { 76 | const [min, max] = [randomId(), randomId()].sort() 77 | tuples.push({ 78 | key: [["user", min], ["user", max], randomId()], 79 | value: i++, 80 | }) 81 | 82 | // Same bound 83 | if (i % 7 === 0) { 84 | tuples.push({ 85 | key: [["user", min], ["user", max], randomId()], 86 | value: i++, 87 | }) 88 | } 89 | 90 | // Half same bound 91 | if (i % 11 === 0) { 92 | const [min2, max2] = [min, randomId()].sort() 93 | tuples.push({ 94 | key: [["user", min2], ["user", max2], randomId()], 95 | value: i++, 96 | }) 97 | } 98 | } 99 | 100 | for (let i = 0; i < tuples.length; i++) { 101 | const { key, value } = tuples[i] 102 | tree.set(key, value) 103 | 104 | const answer = ( 105 | args: { gt?: Key[0]; gte?: Key[0]; lt?: Key[0]; lte?: Key[0] } = {} 106 | ) => { 107 | return tree.list().filter(({ key: [start, end] }) => { 108 | if (args.gt !== undefined) { 109 | if (jsonCodec.compare(end, args.gt) <= 0) return false 110 | } else if (args.gte !== undefined) { 111 | if (jsonCodec.compare(end, args.gte) < 0) return false 112 | } 113 | if (args.lt !== undefined) { 114 | if (jsonCodec.compare(start, args.lt) >= 0) return false 115 | } else if (args.lte !== undefined) { 116 | if (jsonCodec.compare(start, args.lte) > 0) return false 117 | } 118 | return true 119 | }) 120 | } 121 | 122 | const testOverlaps = ( 123 | args: { gt?: Key[0]; gte?: Key[0]; lt?: Key[0]; lte?: Key[0] } = {} 124 | ) => assert.deepEqual(tree.overlaps(args), answer(args)) 125 | 126 | const ranges = (n: number) => { 127 | // Beyond the left and right bounds. 128 | const left = String.fromCharCode("0".charCodeAt(0) - 1) 129 | const right = String.fromCharCode("z".charCodeAt(0) + 1) 130 | 131 | // Sample bounds from the existing dataset. 132 | let bounds: string[] = [] 133 | for (const { key } of tree.list()) { 134 | const [a, b] = key 135 | bounds.push(a[1], b[1]) 136 | } 137 | // Half the elements are existing boundaries, half are new random values. 138 | bounds = shuffle(uniq(bounds)).slice(0, Math.round(n / 2)) 139 | while (bounds.length < n) bounds.push(randomId()) 140 | 141 | const ranges: [Key[0], Key[0]][] = [] 142 | ranges.push([ 143 | ["user", left], 144 | ["user", sample(bounds)!], 145 | ]) 146 | ranges.push([ 147 | ["user", left], 148 | ["user", randomId()], 149 | ]) 150 | ranges.push([ 151 | ["user", sample(bounds)!], 152 | ["user", right], 153 | ]) 154 | ranges.push([ 155 | ["user", randomId()], 156 | ["user", right], 157 | ]) 158 | 159 | for (let i = 0; i < n - 4; i++) { 160 | const [min, max] = [sample(bounds)!, sample(bounds)!].sort() 161 | ranges.push([ 162 | ["user", min], 163 | ["user", max], 164 | ]) 165 | } 166 | return ranges 167 | } 168 | 169 | testOverlaps() 170 | for (const [min, max] of ranges(size / 20)) { 171 | testOverlaps({ gt: min }) 172 | testOverlaps({ gte: min }) 173 | testOverlaps({ lt: min }) 174 | testOverlaps({ lte: min }) 175 | testOverlaps({ gt: max }) 176 | testOverlaps({ gte: max }) 177 | testOverlaps({ lt: max }) 178 | testOverlaps({ lte: max }) 179 | testOverlaps({ gt: min, lt: max }) 180 | testOverlaps({ gt: min, lte: max }) 181 | testOverlaps({ gte: min, lt: max }) 182 | testOverlaps({ gte: min, lte: max }) 183 | } 184 | } 185 | }) 186 | }) 187 | 188 | function randomId() { 189 | return Math.random().toString(36).slice(2, 10) 190 | } 191 | -------------------------------------------------------------------------------- /src/lib/InMemoryIntervalTree.ts: -------------------------------------------------------------------------------- 1 | import { 2 | BranchNode, 3 | InMemoryReducerTree, 4 | LeafNode, 5 | TreeReducer, 6 | } from "./InMemoryReducerTree" 7 | 8 | export class InMemoryIntervalTree< 9 | K extends [B, B, ...any[]], 10 | V = any, 11 | B = any, 12 | > extends InMemoryReducerTree { 13 | constructor( 14 | public minSize: number, 15 | public maxSize: number, 16 | public compareKey: (a: K, b: K) => number, 17 | public compareBound: (a: B, b: B) => number 18 | ) { 19 | const reducer: TreeReducer = { 20 | leaf: (values) => { 21 | let a = values[0].key.slice(0, 2) as [B, B] 22 | for (let i = 1; i < values.length; i++) { 23 | const b = values[i].key.slice(0, 2) as [B, B] 24 | a = [ 25 | this.compareBound(a[0], b[0]) <= 0 ? a[0] : b[0], 26 | this.compareBound(a[1], b[1]) >= 0 ? a[1] : b[1], 27 | ] 28 | } 29 | return a 30 | }, 31 | branch: (children) => { 32 | let a = children[0].data 33 | for (let i = 1; i < children.length; i++) { 34 | const b = children[i].data 35 | a = [ 36 | this.compareBound(a[0], b[0]) <= 0 ? a[0] : b[0], 37 | this.compareBound(a[1], b[1]) >= 0 ? a[1] : b[1], 38 | ] 39 | } 40 | return a 41 | }, 42 | } 43 | 44 | super(minSize, maxSize, reducer, compareKey) 45 | } 46 | 47 | private boundsOverlap(args: { gt?: B; gte?: B; lt?: B; lte?: B }, b: [B, B]) { 48 | const [start, end] = b 49 | 50 | if (args.gt !== undefined) { 51 | if (this.compareBound(end, args.gt) <= 0) return false 52 | } else if (args.gte !== undefined) { 53 | if (this.compareBound(end, args.gte) < 0) return false 54 | } 55 | 56 | if (args.lt !== undefined) { 57 | if (this.compareBound(start, args.lt) >= 0) return false 58 | } else if (args.lte !== undefined) { 59 | if (this.compareBound(start, args.lte) > 0) return false 60 | } 61 | 62 | return true 63 | } 64 | 65 | overlaps(args: { gt?: B; gte?: B; lt?: B; lte?: B } = {}) { 66 | const root = this.nodes.get("root") 67 | 68 | if (!root) return [] 69 | 70 | if (root.leaf) { 71 | return root.values.filter((item) => { 72 | const [min, max] = item.key 73 | return this.boundsOverlap(args, [min, max]) 74 | }) 75 | } 76 | 77 | { 78 | // No results. 79 | const [min, max] = root.data 80 | if (!this.boundsOverlap(args, [min, max])) { 81 | return [] 82 | } 83 | } 84 | 85 | // I'm not sure if we'd ever want to `limit` on this kind of query. But if we did, then we'd want 86 | // to do a depth-first traversal more lazily. 87 | let layer = [root] 88 | while (true) { 89 | const nextLayerIds: string[] = [] 90 | for (const node of layer) { 91 | for (const child of node.children) { 92 | const [min, max] = child.data 93 | if (this.boundsOverlap(args, [min, max])) { 94 | nextLayerIds.push(child.childId) 95 | } 96 | } 97 | } 98 | 99 | if (nextLayerIds.length === 0) { 100 | return [] 101 | } 102 | 103 | const nextLayer = nextLayerIds.map((childId) => { 104 | const node = this.nodes.get(childId) 105 | if (!node) throw new Error("Broken.") 106 | return node 107 | }) 108 | 109 | // Recur until we get to the leaves. 110 | if (!nextLayer[0].leaf) { 111 | layer = nextLayer as BranchNode[] 112 | continue 113 | } 114 | 115 | const leaves = nextLayer as LeafNode[] 116 | const result: { key: K; value: V }[] = [] 117 | for (const leaf of leaves) { 118 | for (const item of leaf.values) { 119 | const [min, max] = item.key 120 | if (this.boundsOverlap(args, [min, max])) { 121 | result.push(item) 122 | } 123 | } 124 | } 125 | return result 126 | } 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /src/lib/types.ts: -------------------------------------------------------------------------------- 1 | export type KeyValueApi = { 2 | get: (key: K) => V | undefined 3 | write: (tx: { set?: { key: K; value: V }[]; delete?: K[] }) => void 4 | } 5 | export type AsyncKeyValueApi = AsyncApi> 6 | 7 | export type OrderedKeyValueApi = KeyValueApi & { 8 | list(args?: { 9 | gt?: K 10 | gte?: K 11 | lt?: K 12 | lte?: K 13 | limit?: number 14 | reverse?: boolean 15 | }): { key: K; value: V }[] 16 | } 17 | 18 | export type AsyncOrderedKeyValueApi = AsyncApi< 19 | OrderedKeyValueApi 20 | > 21 | 22 | export type ReducerTreeApi = OrderedKeyValueApi & { 23 | reduce(args?: { gt?: K; gte?: K; lt?: K; lte?: K }): D 24 | } 25 | 26 | export type AsyncReducerTreeApi = AsyncApi< 27 | ReducerTreeApi 28 | > 29 | 30 | export type IntervalTreeApi = ReducerTreeApi< 31 | K, 32 | V, 33 | [B, B] 34 | > & { 35 | overlaps(args?: { gt?: B; gte?: B; lt?: B; lte?: B }): { key: K; value: V }[] 36 | } 37 | 38 | export type AsyncIntervalTreeApi = AsyncApi< 39 | IntervalTreeApi 40 | > 41 | 42 | type Api = { [K: string]: (...args: any[]) => any } 43 | 44 | type AsyncApi = { 45 | [K in keyof T]: (...args: Parameters) => Promise> 46 | } 47 | -------------------------------------------------------------------------------- /src/lists.ts: -------------------------------------------------------------------------------- 1 | import { PositionSource } from "position-strings" 2 | 3 | const positions: string[] = [] 4 | 5 | function insert(p: PositionSource, index: number, len: number) { 6 | for (let i = index; i < index + len; i++) { 7 | // console.log(positions, positions[i - 1]) 8 | positions.splice(i, 0, p.createBetween(positions[i - 1], positions[i])) 9 | } 10 | } 11 | 12 | const p = new PositionSource({ ID: "A" }) 13 | insert(p, 0, 5) 14 | console.log(positions) 15 | 16 | // [ 17 | // "A.B", <- A is the waypoint, B is the lex position. 18 | // "A.D", 19 | // "A.F", 20 | // "A.H", 21 | // "A.J" 22 | // ] 23 | 24 | insert(p, 2, 5) 25 | console.log(positions) 26 | 27 | // [ 28 | // "A.B", 29 | // "A.D", 30 | // "A.D0B", <- 0 implies insert on the "left side", and B is the nested lex position. 31 | // "A.D0D", <- 32 | // "A.D0F", <- 33 | // "A.D0H", <- 34 | // "A.D0J", <- 35 | // "A.F", 36 | // "A.H", 37 | // "A.J" 38 | // ] 39 | 40 | const p2 = new PositionSource({ ID: "B" }) 41 | insert(p2, 3, 2) 42 | console.log(positions) 43 | 44 | // [ 45 | // "A.B", 46 | // "A.D", 47 | // "A.D0B", 48 | // "A.D0B,B.B", <- `A.D0B,B` is the waypoint, B is the nested lex position. 49 | // "A.D0B,B.D", <- 50 | // "A.D0D", 51 | // "A.D0F", 52 | // "A.D0H", 53 | // "A.D0J", 54 | // "A.F", 55 | // "A.H", 56 | // "A.J" 57 | // ] 58 | 59 | insert(p, 3, 2) 60 | console.log(positions) 61 | 62 | // [ 63 | // "A.B", 64 | // "A.D", 65 | // "A.D0B", 66 | // "A.D0B,B.A1B", <- what's going on here? 67 | // "A.D0B,B.A1D", <- 68 | // "A.D0B,B.B", 69 | // "A.D0B,B.D", 70 | // "A.D0D", 71 | // "A.D0F", 72 | // "A.D0H", 73 | // "A.D0J", 74 | // "A.F", 75 | // "A.H", 76 | // "A.J" 77 | // ] 78 | 79 | insert(p2, 6, 2) 80 | console.log(positions) 81 | 82 | // [ 83 | // "A.B", 84 | // "A.D", 85 | // "A.D0B", 86 | // "A.D0B,B.A1B", 87 | // "A.D0B,B.A1D", 88 | // "A.D0B,B.B", 89 | // "A.D0B,B.B0B", <- 90 | // "A.D0B,B.B0D", <- 91 | // "A.D0B,B.D", 92 | // "A.D0D", 93 | // "A.D0F", 94 | // "A.D0H", 95 | // "A.D0J", 96 | // "A.F", 97 | // "A.H", 98 | // "A.J" 99 | // ] 100 | 101 | insert(p2, 3, 2) 102 | insert(p, 3, 2) 103 | insert(p2, 3, 2) 104 | console.log(positions) 105 | 106 | // [ 107 | // "A.B", 108 | // "A.D", 109 | // "A.D0B", 110 | // "A.D0B,B.A1A0A1A0B", 111 | // "A.D0B,B.A1A0A1A0D", 112 | // "A.D0B,B.A1A0A1B", 113 | // "A.D0B,B.A1A0A1D", 114 | // "A.D0B,B.A1A0B", 115 | // "A.D0B,B.A1A0D", 116 | // "A.D0B,B.A1B", 117 | // "A.D0B,B.A1D", 118 | // "A.D0B,B.B", 119 | // "A.D0B,B.B0B", 120 | // "A.D0B,B.B0D", 121 | // "A.D0B,B.D", 122 | // "A.D0D", 123 | // "A.D0F", 124 | // "A.D0H", 125 | // "A.D0J", 126 | // "A.F", 127 | // "A.H", 128 | // "A.J" 129 | // ] 130 | 131 | // What if we used structure instead of strings. 132 | 133 | // [Id, Pos] 134 | // [Id, Pos, Left/Right, Pos] 135 | // [Id, Pos, Left/Right, Pos] 136 | 137 | type LR = 0 | 1 138 | 139 | type Index = 140 | | [{ id: string; pos: string }] 141 | | [{ id: string; pos: string }, ...Array<{ lr: LR; pos: string }>] 142 | | [ 143 | { id: string; pos: string }, 144 | ...Array<{ lr: LR; pos: string }>, 145 | { id: string; pos: string }, 146 | ] 147 | | [ 148 | { id: string; pos: string }, 149 | ...Array<{ lr: LR; pos: string }>, 150 | { id: string; pos: string }, 151 | ...Array<{ lr: LR; pos: string }>, 152 | ] 153 | 154 | positions.splice(0, 9999) 155 | 156 | insert(p, 0, 20) 157 | 158 | insert(p2, 20, 2) 159 | console.log(positions) 160 | -------------------------------------------------------------------------------- /src/okv.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { describe, it } from "mocha" 3 | import { OrderedKeyValueDatabase } from "./okv" 4 | 5 | describe("OrderedKeyValueDatabase", () => { 6 | it("get", () => { 7 | const okv = new OrderedKeyValueDatabase() 8 | 9 | let result = okv.get("a") 10 | assert.deepEqual(result?.value, undefined) 11 | 12 | okv.write({ set: [{ key: "a", value: 1 }] }) 13 | result = okv.get("a") 14 | assert.deepEqual(result?.value, 1) 15 | }) 16 | 17 | it("list", () => { 18 | const okv = new OrderedKeyValueDatabase() 19 | 20 | okv.write({ 21 | set: [ 22 | { key: "a", value: 0 }, 23 | { key: "aa", value: 0 }, 24 | { key: "ab", value: 0 }, 25 | { key: "ac", value: 0 }, 26 | { key: "b", value: 0 }, 27 | { key: "ba", value: 0 }, 28 | { key: "bb", value: 0 }, 29 | { key: "bc", value: 0 }, 30 | ], 31 | }) 32 | 33 | let result = okv.list({ prefix: "a" }).map(({ key }) => key) 34 | assert.deepEqual(result, ["aa", "ab", "ac"]) 35 | 36 | result = okv.list({ prefix: "a", start: "ab" }).map(({ key }) => key) 37 | assert.deepEqual(result, ["ab", "ac"]) 38 | 39 | result = okv.list({ prefix: "a", end: "ab" }).map(({ key }) => key) 40 | assert.deepEqual(result, ["aa"]) 41 | 42 | result = okv.list({ start: "a", end: "bb" }).map(({ key }) => key) 43 | assert.deepEqual(result, ["a", "aa", "ab", "ac", "b", "ba"]) 44 | 45 | result = okv 46 | .list({ start: "a", end: "bb", reverse: true, limit: 4 }) 47 | .map(({ key }) => key) 48 | assert.deepEqual(result, ["ba", "b", "ac", "ab"]) 49 | }) 50 | 51 | it("conflict", () => { 52 | const okv = new OrderedKeyValueDatabase() 53 | okv.write({ set: [{ key: "a", value: 1 }] }) 54 | 55 | const a = okv.get("a")! 56 | okv.write({ set: [{ key: "a", value: 2 }] }) 57 | 58 | assert.throws(() => { 59 | okv.write({ 60 | check: [{ key: "a", version: a.version }], 61 | set: [{ key: "b", value: a.value * 2 }], 62 | }) 63 | }) 64 | }) 65 | 66 | it("sum", () => { 67 | const okv = new OrderedKeyValueDatabase() 68 | okv.write({ sum: [{ key: "a", value: 1 }] }) 69 | assert.deepEqual(okv.get("a")?.value, 1) 70 | okv.write({ sum: [{ key: "a", value: 1 }] }) 71 | assert.deepEqual(okv.get("a")?.value, 2) 72 | }) 73 | }) 74 | -------------------------------------------------------------------------------- /src/okv.ts: -------------------------------------------------------------------------------- 1 | import { orderedArray } from "@ccorcos/ordered-array" 2 | import { ulid } from "ulid" 3 | 4 | type Item = { key: string; value: any; version: string } 5 | const getKey = (item: Item) => item.key 6 | const { search, insert, update, remove } = orderedArray(getKey) 7 | 8 | export class ConflictError extends Error {} 9 | 10 | export class OrderedKeyValueDatabase { 11 | private data: { key: string; value: any; version: string }[] = [] 12 | 13 | get = (key: string): { value: any; version: string } | undefined => { 14 | const result = search(this.data, key) 15 | if (result.found === undefined) return 16 | const { value, version } = this.data[result.found] 17 | return { value, version } 18 | } 19 | 20 | /** 21 | * start is inclusive. end is exclusive. prefix is exclusive 22 | */ 23 | list = (args: { 24 | prefix?: string 25 | start?: string 26 | end?: string 27 | limit?: number 28 | reverse?: boolean 29 | }) => { 30 | let startKey: string | undefined 31 | let endKey: string | undefined 32 | if (args.prefix) { 33 | startKey = args.prefix + "\x00" 34 | endKey = args.prefix + "\xff" 35 | } 36 | if (args.start) { 37 | startKey = args.start 38 | } 39 | if (args.end) { 40 | endKey = args.end 41 | } 42 | 43 | if (startKey && endKey && startKey > endKey) { 44 | throw new Error("Invalid bounds.") 45 | } 46 | 47 | let startIndex: number = 0 48 | let endIndex: number = this.data.length - 1 49 | 50 | if (startKey) { 51 | const _start = startKey 52 | const result = search(this.data, _start) 53 | startIndex = result.found !== undefined ? result.found : result.closest 54 | } 55 | 56 | if (endKey) { 57 | const _end = endKey 58 | const result = search(this.data, _end) 59 | endIndex = result.found !== undefined ? result.found : result.closest 60 | } 61 | 62 | if (args.reverse) { 63 | if (!args.limit) return this.data.slice(startIndex, endIndex).reverse() 64 | return this.data 65 | .slice(Math.max(startIndex, endIndex - args.limit), endIndex) 66 | .reverse() 67 | } 68 | 69 | if (!args.limit) return this.data.slice(startIndex, endIndex) 70 | return this.data.slice( 71 | startIndex, 72 | Math.min(startIndex + args.limit, endIndex) 73 | ) 74 | } 75 | 76 | write(tx: { 77 | check?: { key: string; version: string }[] 78 | // TODO: check range 79 | set?: { key: string; value: any }[] 80 | sum?: { key: string; value: number }[] 81 | min?: { key: string; value: number }[] 82 | max?: { key: string; value: number }[] 83 | delete?: string[] 84 | // TODO: delete range 85 | }) { 86 | for (const { key, version } of tx.check || []) 87 | if (this.get(key)?.version !== version) 88 | throw new ConflictError(`Version check failed. ${key} ${version}`) 89 | 90 | const version = ulid() 91 | 92 | for (const { key, value } of tx.set || []) 93 | insert(this.data, { key, value, version }) 94 | 95 | const replaceValue = (key: string, fn: (existing?: any) => any) => 96 | update(this.data, key, (item) => ({ 97 | key, 98 | version, 99 | value: fn(item?.value), 100 | })) 101 | 102 | for (const { key, value } of tx.sum || []) 103 | replaceValue(key, (existing) => { 104 | if (typeof existing === "number") return existing + value 105 | if (existing === undefined) return value 106 | console.warn("Calling sum on a non-number value:", key, existing) 107 | return value 108 | }) 109 | for (const { key, value } of tx.min || []) 110 | replaceValue(key, (existing) => { 111 | if (typeof existing === "number") return Math.min(existing, value) 112 | if (existing === undefined) return value 113 | console.warn("Calling min on a non-number value:", key, existing) 114 | return value 115 | }) 116 | for (const { key, value } of tx.max || []) 117 | replaceValue(key, (existing) => { 118 | if (typeof existing === "number") return Math.max(existing, value) 119 | if (existing === undefined) return value 120 | console.warn("Calling max on a non-number value:", key, existing) 121 | return value 122 | }) 123 | 124 | for (const key of tx.delete || []) remove(this.data, key) 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /src/perfTools.ts: -------------------------------------------------------------------------------- 1 | import { Bench } from "tinybench" 2 | 3 | function prettyNs(timeNs: number) { 4 | const round = (n: number) => 5 | (Math.round(n * 1000) / 1000).toFixed(3).padStart(7, "0") 6 | 7 | const seconds = timeNs / (1000 * 1000 * 1000) 8 | if (seconds >= 1) return round(seconds) + "s" 9 | 10 | const ms = timeNs / (1000 * 1000) 11 | if (ms >= 1) return round(ms) + "ms" 12 | 13 | const us = timeNs / 1000 14 | if (us >= 1) return round(us) + "μs" 15 | 16 | return round(timeNs) + "ns" 17 | } 18 | 19 | export function printTable(bench: Bench) { 20 | const data = bench.table() 21 | console.table( 22 | data.map((item) => { 23 | if (!item) return 24 | const { "Average Time (ns)": time, ...rest } = item 25 | return { 26 | "Average Time": prettyNs(time as number), 27 | ...rest, 28 | } 29 | }) 30 | ) 31 | } 32 | -------------------------------------------------------------------------------- /src/performance.ts: -------------------------------------------------------------------------------- 1 | import { insert, remove } from "@ccorcos/ordered-array" 2 | import { sampleSize, shuffle } from "lodash" 3 | import { Bench } from "tinybench" 4 | import { BinaryPlusTree2 } from "./bptree2" 5 | import { printTable } from "./perfTools" 6 | 7 | async function test1() { 8 | const numbers = shuffle( 9 | Array(10_000) 10 | .fill(0) 11 | .map((x, i) => i) 12 | ) 13 | const bench = new Bench({ time: 2000, iterations: 2 }) 14 | bench 15 | .add("insert 10_000 ordered array", () => { 16 | console.log("insert 10_000 ordered array") 17 | const list: number[] = [] 18 | for (const n of numbers) insert(list, n) 19 | }) 20 | 21 | .add("insert 10_000 bptree2 50-100", async () => { 22 | console.log("insert 10_000 bptree2 50-100") 23 | const tree = new BinaryPlusTree2(50, 100) 24 | for (const n of numbers) tree.set(n, null) 25 | }) 26 | 27 | await bench.warmup() 28 | await bench.run() 29 | 30 | printTable(bench) 31 | // ┌─────────┬──────────────┬────────────────────────────────┬─────────┬──────────┬─────────┐ 32 | // │ (index) │ Average Time │ Task Name │ ops/sec │ Margin │ Samples │ 33 | // ├─────────┼──────────────┼────────────────────────────────┼─────────┼──────────┼─────────┤ 34 | // │ 0 │ '004.559ms' │ 'insert 10_000 ordered array' │ '219' │ '±0.36%' │ 439 │ 35 | // │ 1 │ '004.037ms' │ 'insert 10_000 bptree2 50-100' │ '247' │ '±1.67%' │ 496 │ 36 | // └─────────┴──────────────┴────────────────────────────────┴─────────┴──────────┴─────────┘ 37 | } 38 | 39 | async function test2() { 40 | const numbers = shuffle( 41 | Array(100_000) 42 | .fill(0) 43 | .map((x, i) => i) 44 | ) 45 | const bench = new Bench({ time: 2000, iterations: 2 }) 46 | bench 47 | .add("insert 100_000 ordered array", () => { 48 | console.log("insert 100_000 ordered array") 49 | const list: number[] = [] 50 | for (const n of numbers) insert(list, n) 51 | }) 52 | .add("insert 100_000 bptree2 10-20", async () => { 53 | console.log("insert 100_000 bptree2 10-20") 54 | const tree = new BinaryPlusTree2(10, 20) 55 | for (const n of numbers) tree.set(n, null) 56 | }) 57 | .add("insert 100_000 bptree2 50-100", async () => { 58 | console.log("insert 100_000 bptree2 50-100") 59 | const tree = new BinaryPlusTree2(50, 100) 60 | for (const n of numbers) tree.set(n, null) 61 | }) 62 | .add("insert 100_000 bptree2 100-200", async () => { 63 | console.log("insert 100_000 bptree2 100-200") 64 | const tree = new BinaryPlusTree2(100, 200) 65 | for (const n of numbers) tree.set(n, null) 66 | }) 67 | 68 | await bench.warmup() 69 | await bench.run() 70 | 71 | printTable(bench) 72 | 73 | // ┌─────────┬──────────────┬──────────────────────────────────┬─────────┬──────────┬─────────┐ 74 | // │ (index) │ Average Time │ Task Name │ ops/sec │ Margin │ Samples │ 75 | // ├─────────┼──────────────┼──────────────────────────────────┼─────────┼──────────┼─────────┤ 76 | // │ 0 │ '401.652ms' │ 'insert 100_000 ordered array' │ '2' │ '±3.47%' │ 5 │ 77 | // │ 1 │ '070.908ms' │ 'insert 100_000 bptree2 10-20' │ '14' │ '±2.89%' │ 29 │ 78 | // │ 2 │ '052.286ms' │ 'insert 100_000 bptree2 50-100' │ '19' │ '±2.06%' │ 39 │ 79 | // │ 3 │ '051.514ms' │ 'insert 100_000 bptree2 100-200' │ '19' │ '±1.85%' │ 39 │ 80 | // └─────────┴──────────────┴──────────────────────────────────┴─────────┴──────────┴─────────┘ 81 | } 82 | 83 | async function test3() { 84 | const baseArray = shuffle( 85 | Array(100_000) 86 | .fill(0) 87 | .map((x, i) => i) 88 | ) 89 | 90 | const baseTree = new BinaryPlusTree2(50, 100) 91 | for (const n of baseArray) baseTree.set(n, null) 92 | 93 | let array: number[] = [] 94 | let tree = new BinaryPlusTree2(50, 100) 95 | 96 | const bench = new Bench({ 97 | time: 2000, 98 | iterations: 2, 99 | setup: () => { 100 | array = [...baseArray] 101 | tree = baseTree.clone() 102 | }, 103 | }) 104 | 105 | const insertNumbers = sampleSize(baseArray, 1000).map((n) => n + 0.5) 106 | const deleteNumbers = sampleSize(baseArray, 1000) 107 | 108 | bench 109 | .add("insert 1000 more from array 100k", () => { 110 | for (const n of insertNumbers) insert(array, n) 111 | }) 112 | .add("insert 1000 more bptree2 50-100 100k", async () => { 113 | for (const n of insertNumbers) tree.set(n, null) 114 | }) 115 | .add("delete 1000 more from array 100k", async () => { 116 | for (const n of deleteNumbers) remove(array, n) 117 | }) 118 | .add("delete 1000 more bptree2 50-100 100k", async () => { 119 | for (const n of deleteNumbers) tree.delete(n) 120 | }) 121 | 122 | await bench.warmup() 123 | await bench.run() 124 | 125 | printTable(bench) 126 | 127 | // ┌─────────┬──────────────┬────────────────────────────────────────┬─────────┬───────────┬─────────┐ 128 | // │ (index) │ Average Time │ Task Name │ ops/sec │ Margin │ Samples │ 129 | // ├─────────┼──────────────┼────────────────────────────────────────┼─────────┼───────────┼─────────┤ 130 | // │ 0 │ '012.094ms' │ 'insert 1000 more from array 100k' │ '82' │ '±23.60%' │ 167 │ 131 | // │ 1 │ '557.679μs' │ 'insert 1000 more bptree2 50-100 100k' │ '1,793' │ '±0.52%' │ 3587 │ 132 | // │ 2 │ '169.889μs' │ 'delete 1000 more from array 100k' │ '5,886' │ '±0.22%' │ 11773 │ 133 | // │ 3 │ '547.311μs' │ 'delete 1000 more bptree2 50-100 100k' │ '1,827' │ '±0.33%' │ 3655 │ 134 | // └─────────┴──────────────┴────────────────────────────────────────┴─────────┴───────────┴─────────┘ 135 | } 136 | 137 | test2() 138 | 139 | // bptree with sqlite vs bptree with leveldb vs sqlite vs leveldb 140 | // sqlite vs tuple bptree 141 | // sqlite vs reducer tree 142 | // sqlite vs interval tree 143 | -------------------------------------------------------------------------------- /src/performance2.ts: -------------------------------------------------------------------------------- 1 | import sqlite from "better-sqlite3" 2 | import * as fs from "fs-extra" 3 | import { Level } from "level" 4 | import { chunk, sampleSize, shuffle } from "lodash" 5 | import { Bench } from "tinybench" 6 | import { AsyncBinaryPlusTree } from "./lib/AsyncBinaryPlusTree" 7 | import { InMemoryBinaryPlusTree } from "./lib/InMemoryBinaryPlusTree" 8 | import { printTable } from "./perfTools" 9 | import { LevelDbKeyValueStorage } from "./storage/LevelDbKeyValueStorage" 10 | import { SQLiteKeyValueStorage } from "./storage/SQLiteKeyValueStorage" 11 | /* 12 | 13 | Performance... 14 | - bptree with sqlite vs bptree with leveldb vs sqlite vs leveldb 15 | - sqlite vs tuple bptree 16 | 17 | 18 | minisql 19 | - create table 20 | - create index on table 21 | - insert into table 22 | - select from index 23 | 24 | */ 25 | 26 | let count = Date.now() 27 | function tmp(fileName: string) { 28 | const dirPath = __dirname + "../tmp/" + count++ 29 | fs.mkdirpSync(dirPath) 30 | return dirPath + "/" + fileName 31 | } 32 | 33 | async function test0() { 34 | const numbers = shuffle( 35 | Array(100_000) 36 | .fill(0) 37 | .map((x, i) => i) 38 | ) 39 | 40 | const insertNumbers1 = sampleSize(numbers, 10_000) 41 | const readNumbers1 = sampleSize(numbers, 2000) 42 | const deleteNumbers = sampleSize(numbers, 4000) 43 | const insertNumbers2 = sampleSize(numbers, 1000) 44 | const readNumbers2 = sampleSize(numbers, 4000) 45 | 46 | async function writeReadDelete(tree: AsyncBinaryPlusTree) { 47 | for (const numbers of chunk(insertNumbers1, 1000)) 48 | await tree.write({ set: numbers.map((n) => ({ key: n, value: n })) }) 49 | 50 | for (const number of readNumbers1) await tree.get(number) 51 | 52 | for (const numbers of chunk(deleteNumbers, 100)) 53 | await tree.write({ delete: numbers }) 54 | 55 | for (const numbers of chunk(insertNumbers2, 1000)) 56 | await tree.write({ set: numbers.map((n) => ({ key: n, value: n })) }) 57 | 58 | for (const number of readNumbers2) await tree.get(number) 59 | } 60 | 61 | const bench = new Bench({ time: 2000, iterations: 2 }) 62 | 63 | function sizeTest(min: number, max: number) { 64 | bench.add(`b+level ${min}-${max}`, async () => { 65 | const storage = new LevelDbKeyValueStorage(new Level(tmp("data.leveldb"))) 66 | const tree = new AsyncBinaryPlusTree(storage, min, max) 67 | await writeReadDelete(tree) 68 | }) 69 | } 70 | 71 | sizeTest(4, 9) 72 | sizeTest(10, 20) 73 | sizeTest(1, 20) 74 | sizeTest(20, 40) 75 | sizeTest(1, 40) 76 | sizeTest(40, 80) 77 | sizeTest(1, 80) 78 | 79 | // sizeTest(50, 100) 80 | // sizeTest(1, 100) 81 | // sizeTest(10, 100) 82 | // sizeTest(20, 100) 83 | 84 | // sizeTest(100, 200) 85 | // sizeTest(1, 200) 86 | // sizeTest(10, 200) 87 | // sizeTest(40, 200) 88 | 89 | // sizeTest(200, 400) 90 | // sizeTest(10, 400) 91 | 92 | // sizeTest(400, 800) 93 | // sizeTest(800, 1600) 94 | // sizeTest(2000, 4000) 95 | // sizeTest(10000, 20000) 96 | 97 | await bench.warmup() 98 | await bench.run() 99 | 100 | printTable(bench) 101 | 102 | // ┌─────────┬──────────────┬───────────────────────┬─────────┬───────────┬─────────┐ 103 | // │ (index) │ Average Time │ Task Name │ ops/sec │ Margin │ Samples │ 104 | // ├─────────┼──────────────┼───────────────────────┼─────────┼───────────┼─────────┤ 105 | // │ 0 │ '504.696ms' │ 'b+level 50-100' │ '1' │ '±5.88%' │ 4 │ 106 | // │ 1 │ '474.645ms' │ 'b+level 100-200' │ '2' │ '±3.55%' │ 5 │ 107 | // │ 2 │ '579.890ms' │ 'b+level 200-400' │ '1' │ '±3.01%' │ 4 │ 108 | // │ 3 │ '828.546ms' │ 'b+level 400-800' │ '1' │ '±4.55%' │ 3 │ 109 | // │ 4 │ '001.358s' │ 'b+level 800-1600' │ '0' │ '±11.91%' │ 2 │ 110 | // │ 5 │ '002.426s' │ 'b+level 2000-4000' │ '0' │ '±2.71%' │ 2 │ 111 | // │ 6 │ '009.078s' │ 'b+level 10000-20000' │ '0' │ '±3.50%' │ 2 │ 112 | // └─────────┴──────────────┴───────────────────────┴─────────┴───────────┴─────────┘ 113 | } 114 | 115 | async function test1() { 116 | const numbers = shuffle( 117 | Array(10_000) 118 | .fill(0) 119 | .map((x, i) => i) 120 | ) 121 | const bench = new Bench({ time: 2000, iterations: 2 }) 122 | bench 123 | .add("insert 10_000 memory", () => { 124 | const tree = new InMemoryBinaryPlusTree(50, 100) 125 | for (const n of numbers) tree.set(n, n) 126 | }) 127 | .add("insert 10_000 sqlite", async () => { 128 | const storage = new SQLiteKeyValueStorage(sqlite(tmp("data.sqlite"))) 129 | for (const n of numbers) 130 | await storage.write({ set: [{ key: n.toString(), value: n }] }) 131 | }) 132 | .add("insert 10_000 b+sqlite", async () => { 133 | const storage = new SQLiteKeyValueStorage(sqlite(tmp("data.sqlite"))) 134 | const tree = new AsyncBinaryPlusTree(storage, 1, 40) 135 | for (const n of numbers) await tree.set(n, n) 136 | }) 137 | .add("insert 10_000 level", async () => { 138 | const storage = new LevelDbKeyValueStorage(new Level(tmp("data.leveldb"))) 139 | for (const n of numbers) 140 | await storage.write({ set: [{ key: n.toString(), value: n }] }) 141 | }) 142 | .add("insert 10_000 b+level", async () => { 143 | const storage = new LevelDbKeyValueStorage(new Level(tmp("data.leveldb"))) 144 | const tree = new AsyncBinaryPlusTree(storage, 1, 40) 145 | for (const n of numbers) await tree.set(n, n) 146 | }) 147 | .add("insert batch 10_000 sqlite", async () => { 148 | const storage = new SQLiteKeyValueStorage(sqlite(tmp("data.sqlite"))) 149 | await storage.write({ 150 | set: numbers.map((n) => ({ key: n.toString(), value: n })), 151 | }) 152 | }) 153 | .add("insert batch 10_000 b+sqlite", async () => { 154 | const storage = new SQLiteKeyValueStorage(sqlite(tmp("data.sqlite"))) 155 | const tree = new AsyncBinaryPlusTree(storage, 1, 40) 156 | await tree.write({ 157 | set: numbers.map((n) => ({ key: n.toString(), value: n })), 158 | }) 159 | }) 160 | .add("insert batch 10_000 level", async () => { 161 | const storage = new LevelDbKeyValueStorage(new Level(tmp("data.leveldb"))) 162 | await storage.write({ 163 | set: numbers.map((n) => ({ key: n.toString(), value: n })), 164 | }) 165 | }) 166 | .add("insert batch 10_000 b+level", async () => { 167 | const storage = new LevelDbKeyValueStorage(new Level(tmp("data.leveldb"))) 168 | const tree = new AsyncBinaryPlusTree(storage, 1, 40) 169 | await tree.write({ 170 | set: numbers.map((n) => ({ key: n.toString(), value: n })), 171 | }) 172 | }) 173 | 174 | await bench.warmup() 175 | await bench.run() 176 | 177 | printTable(bench) 178 | } 179 | 180 | async function test2() { 181 | const baseArray = shuffle( 182 | Array(100_000) 183 | .fill(0) 184 | .map((x, i) => i) 185 | ) 186 | 187 | let storageSqlite: SQLiteKeyValueStorage 188 | let bpSqlite: AsyncBinaryPlusTree 189 | let storageLevel: LevelDbKeyValueStorage 190 | let bpLevel: AsyncBinaryPlusTree 191 | 192 | const bench = new Bench({ 193 | time: 2000, 194 | iterations: 2, 195 | setup: async () => { 196 | { 197 | storageSqlite = new SQLiteKeyValueStorage(sqlite(tmp("data.sqlite"))) 198 | 199 | await storageSqlite.write({ 200 | set: baseArray.map((n) => ({ key: n.toString(), value: n })), 201 | }) 202 | } 203 | { 204 | const storage = new SQLiteKeyValueStorage(sqlite(tmp("data.sqlite"))) 205 | bpSqlite = new AsyncBinaryPlusTree(storage, 1, 40) 206 | await bpSqlite.write({ 207 | set: baseArray.map((n) => ({ key: n.toString(), value: n })), 208 | }) 209 | } 210 | { 211 | storageLevel = new LevelDbKeyValueStorage( 212 | new Level(tmp("data.leveldb")) 213 | ) 214 | await storageLevel.write({ 215 | set: baseArray.map((n) => ({ key: n.toString(), value: n })), 216 | }) 217 | } 218 | { 219 | const storage = new LevelDbKeyValueStorage( 220 | new Level(tmp("data.leveldb")) 221 | ) 222 | bpLevel = new AsyncBinaryPlusTree(storage, 1, 40) 223 | await bpLevel.write({ 224 | set: baseArray.map((n) => ({ key: n.toString(), value: n })), 225 | }) 226 | } 227 | }, 228 | }) 229 | 230 | const insertNumbers = sampleSize(baseArray, 1000).map((n) => n + 0.5) 231 | const deleteNumbers = sampleSize(baseArray, 1000) 232 | const readNumbers = sampleSize(baseArray, 1000) 233 | 234 | bench 235 | .add("insert 1000 more from 100k sqlite", async () => { 236 | for (const n of insertNumbers) 237 | await storageSqlite.write({ set: [{ key: n.toString(), value: n }] }) 238 | }) 239 | .add("insert 1000 more from 100k b+ sqlite", async () => { 240 | for (const n of insertNumbers) 241 | await bpSqlite.write({ set: [{ key: n.toString(), value: n }] }) 242 | }) 243 | .add("insert 1000 more from 100k level", async () => { 244 | for (const n of insertNumbers) 245 | await storageLevel.write({ set: [{ key: n.toString(), value: n }] }) 246 | }) 247 | .add("insert 1000 more from 100k b+ level", async () => { 248 | for (const n of insertNumbers) 249 | await bpLevel.write({ set: [{ key: n.toString(), value: n }] }) 250 | }) 251 | .add("delete 1000 more from 100k sqlite", async () => { 252 | for (const n of deleteNumbers) 253 | await storageSqlite.write({ delete: [n.toString()] }) 254 | }) 255 | .add("delete 1000 more from 100k b+ sqlite", async () => { 256 | for (const n of deleteNumbers) 257 | await bpSqlite.write({ delete: [n.toString()] }) 258 | }) 259 | .add("delete 1000 more from 100k level", async () => { 260 | for (const n of deleteNumbers) 261 | await storageLevel.write({ delete: [n.toString()] }) 262 | }) 263 | .add("delete 1000 more from 100k b+ level", async () => { 264 | for (const n of deleteNumbers) 265 | await bpLevel.write({ delete: [n.toString()] }) 266 | }) 267 | .add("read 1000 from 100k sqlite", async () => { 268 | for (const n of readNumbers) await storageSqlite.get(n.toString()) 269 | }) 270 | .add("read 1000 from 100k b+ sqlite", async () => { 271 | for (const n of readNumbers) await bpSqlite.get(n.toString()) 272 | }) 273 | .add("read 1000 from 100k level", async () => { 274 | for (const n of readNumbers) await storageLevel.get(n.toString()) 275 | }) 276 | .add("read 1000 from 100k b+ level", async () => { 277 | for (const n of readNumbers) await bpLevel.get(n.toString()) 278 | }) 279 | 280 | await bench.warmup() 281 | await bench.run() 282 | 283 | printTable(bench) 284 | } 285 | 286 | test2() 287 | -------------------------------------------------------------------------------- /src/propertyTest.test.ts: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ccorcos/database-experiments/0e4f62ea96af5708641629669f370657dd25e623/src/propertyTest.test.ts -------------------------------------------------------------------------------- /src/storage/IndexedDbKeyValueStorage.ts: -------------------------------------------------------------------------------- 1 | import { IDBPDatabase, openDB } from "idb" 2 | import { AsyncKeyValueApi } from "../lib/types" 3 | 4 | const version = 1 5 | const storeName = "kv" 6 | 7 | export class IndexedDbKeyValueStorage 8 | implements AsyncKeyValueApi 9 | { 10 | private db: Promise> 11 | 12 | constructor(public dbName: string) { 13 | this.db = openDB(dbName, version, { 14 | upgrade(db) { 15 | db.createObjectStore(storeName) 16 | }, 17 | }) 18 | } 19 | 20 | async get(key: string) { 21 | const db = await this.db 22 | const value = await db.get(storeName, key) 23 | return value as V | undefined 24 | } 25 | 26 | async write(writes: { 27 | set?: { key: string; value: V }[] 28 | delete?: string[] 29 | }) { 30 | const db = await this.db 31 | const tx = db.transaction(storeName, "readwrite") 32 | for (const { key, value } of writes.set || []) { 33 | await tx.store.put(value, key) 34 | } 35 | for (const key of writes.delete || []) { 36 | await tx.store.delete(key) 37 | } 38 | await tx.done 39 | } 40 | 41 | async close() { 42 | const db = await this.db 43 | db.close() 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/storage/IndexedDbOrderedKeyValueStorage.ts: -------------------------------------------------------------------------------- 1 | import { IDBPDatabase, openDB } from "idb" 2 | import { AsyncOrderedKeyValueApi } from "../lib/types" 3 | 4 | const version = 1 5 | const storeName = "okv" 6 | 7 | export class IndexedDbOrderedKeyValueStorage 8 | implements AsyncOrderedKeyValueApi 9 | { 10 | private db: Promise> 11 | 12 | constructor(public dbName: string) { 13 | this.db = openDB(dbName, version, { 14 | upgrade(db) { 15 | db.createObjectStore(storeName) 16 | }, 17 | }) 18 | } 19 | 20 | async get(key: string) { 21 | const db = await this.db 22 | const value = await db.get(storeName, key) 23 | return value as V | undefined 24 | } 25 | 26 | async list( 27 | args: { 28 | gt?: string 29 | gte?: string 30 | lt?: string 31 | lte?: string 32 | limit?: number 33 | reverse?: boolean 34 | } = {} 35 | ) { 36 | const db = await this.db 37 | const tx = db.transaction(storeName, "readonly") 38 | const index = tx.store // primary key 39 | 40 | const start = args.gt ?? args.gte 41 | const startOpen = args.gt !== undefined 42 | const end = args.lt ?? args.lte 43 | const endOpen = args.lt !== undefined 44 | 45 | if (start !== undefined && end !== undefined) { 46 | if (start > end) { 47 | console.warn("Invalid bounds.", args) 48 | return [] 49 | } 50 | if (start === end && (startOpen || endOpen)) { 51 | console.warn("Invalid bounds.", args) 52 | return [] 53 | } 54 | } 55 | 56 | let range: IDBKeyRange | null 57 | if (end) { 58 | if (start) { 59 | range = IDBKeyRange.bound(start, end, startOpen, endOpen) 60 | } else { 61 | range = IDBKeyRange.upperBound(end, endOpen) 62 | } 63 | } else { 64 | if (start) { 65 | range = IDBKeyRange.lowerBound(start, startOpen) 66 | } else { 67 | range = null 68 | } 69 | } 70 | 71 | const direction: IDBCursorDirection = args?.reverse ? "prev" : "next" 72 | 73 | const limit = args?.limit || Infinity 74 | let results: { key: string; value: V }[] = [] 75 | for await (const cursor of index.iterate(range, direction)) { 76 | results.push({ key: cursor.key, value: cursor.value }) 77 | if (results.length >= limit) break 78 | } 79 | await tx.done 80 | 81 | return results 82 | } 83 | 84 | async write(writes: { 85 | set?: { key: string; value: V }[] 86 | delete?: string[] 87 | }) { 88 | const db = await this.db 89 | const tx = db.transaction(storeName, "readwrite") 90 | for (const { key, value } of writes.set || []) { 91 | tx.store.put(value, key) 92 | } 93 | for (const key of writes.delete || []) { 94 | tx.store.delete(key) 95 | } 96 | await tx.done 97 | } 98 | 99 | async close() { 100 | const db = await this.db 101 | db.close() 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /src/storage/JsonFileKeyValueStorage.ts: -------------------------------------------------------------------------------- 1 | import * as fs from "fs-extra" 2 | import * as path from "path" 3 | import { KeyValueApi } from "../lib/types" 4 | 5 | export class JsonFileKeyValueStorage 6 | implements KeyValueApi 7 | { 8 | map: { [key: string]: V } = {} 9 | 10 | constructor(public dbPath: string) { 11 | this.loadFile() 12 | } 13 | 14 | private loadFile() { 15 | // Check that the file exists. 16 | try { 17 | const stat = fs.statSync(this.dbPath) 18 | if (!stat.isFile()) { 19 | throw new Error("Database is not a file.") 20 | } 21 | } catch (error) { 22 | if (error.code === "ENOENT") { 23 | // File does not exist. 24 | return 25 | } 26 | throw error 27 | } 28 | 29 | // Read the file. 30 | const contents = fs.readFileSync(this.dbPath, "utf8") 31 | this.map = JSON.parse(contents) 32 | } 33 | 34 | private saveFile() { 35 | const contents = JSON.stringify(this.map) 36 | fs.mkdirpSync(path.dirname(this.dbPath)) 37 | fs.writeFileSync(this.dbPath, contents, "utf8") 38 | } 39 | 40 | get(key: string) { 41 | return this.map[key] 42 | } 43 | 44 | write(tx: { set?: { key: string; value: V }[]; delete?: string[] }) { 45 | for (const { key, value } of tx.set || []) { 46 | this.map[key] = value 47 | } 48 | for (const key of tx.delete || []) { 49 | delete this.map[key] 50 | } 51 | this.saveFile() 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/storage/JsonFileOrderedKeyValueStorage.ts: -------------------------------------------------------------------------------- 1 | import { insert, remove, search } from "@ccorcos/ordered-array" 2 | import * as fs from "fs-extra" 3 | import * as path from "path" 4 | import { OrderedKeyValueApi } from "../lib/types" 5 | 6 | function compare(a: any, b: any) { 7 | if (a === b) return 0 8 | if (a > b) return 1 9 | return -1 10 | } 11 | 12 | export class JsonFileOrderedKeyValueStorage 13 | implements OrderedKeyValueApi 14 | { 15 | data: { key: K; value: V }[] = [] 16 | 17 | constructor( 18 | public dbPath: string, 19 | public compareKey: (a: K, b: K) => number = compare 20 | ) { 21 | this.loadFile() 22 | } 23 | 24 | private loadFile() { 25 | // Check that the file exists. 26 | try { 27 | const stat = fs.statSync(this.dbPath) 28 | if (!stat.isFile()) { 29 | throw new Error("Database is not a file.") 30 | } 31 | } catch (error) { 32 | if (error.code === "ENOENT") { 33 | // File does not exist. 34 | return 35 | } 36 | throw error 37 | } 38 | 39 | // Read the file. 40 | const contents = fs.readFileSync(this.dbPath, "utf8") 41 | this.data = JSON.parse(contents) || [] 42 | } 43 | 44 | private saveFile() { 45 | const contents = JSON.stringify(this.data) 46 | fs.mkdirpSync(path.dirname(this.dbPath)) 47 | fs.writeFileSync(this.dbPath, contents, "utf8") 48 | } 49 | 50 | get(key: K) { 51 | const result = search(this.data, key, ({ key }) => key, this.compareKey) 52 | if (result.found === undefined) return 53 | return this.data[result.found].value 54 | } 55 | 56 | list( 57 | args: { 58 | gt?: K 59 | gte?: K 60 | lt?: K 61 | lte?: K 62 | limit?: number 63 | reverse?: boolean 64 | } = {} 65 | ): { key: K; value: V }[] { 66 | if (args.gt !== undefined && args.gte !== undefined) 67 | throw new Error("Invalid bounds: {gt, gte}") 68 | if (args.lt !== undefined && args.lte !== undefined) 69 | throw new Error("Invalid bounds: {lt, lte}") 70 | 71 | const start = args.gt ?? args.gte 72 | const startOpen = args.gt !== undefined 73 | const end = args.lt ?? args.lte 74 | const endOpen = args.lt !== undefined 75 | 76 | if (start !== undefined && end !== undefined) { 77 | const comp = this.compareKey(start, end) 78 | if (comp > 0) { 79 | console.warn("Invalid bounds.", args) 80 | return [] 81 | } 82 | if (comp === 0 && (startOpen || endOpen)) { 83 | console.warn("Invalid bounds.", args) 84 | return [] 85 | } 86 | } 87 | 88 | if (this.data.length === 0) return [] 89 | 90 | let startIndex = 0 91 | if (start !== undefined) { 92 | const result = search(this.data, start, ({ key }) => key, this.compareKey) 93 | if (result.found !== undefined) { 94 | if (startOpen) startIndex = result.found + 1 95 | else startIndex = result.found 96 | } else startIndex = result.closest 97 | } 98 | 99 | let endIndex = this.data.length 100 | if (end !== undefined) { 101 | const result = search(this.data, end, ({ key }) => key, this.compareKey) 102 | if (result.found !== undefined) { 103 | if (endOpen) endIndex = result.found 104 | else endIndex = result.found + 1 105 | } else endIndex = result.closest 106 | } 107 | 108 | const result = this.data.slice(startIndex, endIndex) 109 | if (args.reverse) result.reverse() 110 | if (args.limit) result.splice(args.limit, result.length) 111 | return result 112 | } 113 | 114 | write(tx: { set?: { key: K; value: V }[]; delete?: K[] }) { 115 | for (const { key, value } of tx.set || []) { 116 | insert(this.data, { key, value }, ({ key }) => key, this.compareKey) 117 | } 118 | for (const key of tx.delete || []) { 119 | remove(this.data, key, ({ key }) => key, this.compareKey) 120 | } 121 | this.saveFile() 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /src/storage/LevelDbKeyValueStorage.ts: -------------------------------------------------------------------------------- 1 | import { AbstractBatch } from "abstract-leveldown" 2 | import { Level } from "level" 3 | import { AsyncKeyValueApi } from "../lib/types" 4 | 5 | export class LevelDbKeyValueStorage 6 | implements AsyncKeyValueApi 7 | { 8 | /** 9 | * import { Level } from "level" 10 | * new LevelDbKeyValueStorage(new Level("path/to.db")) 11 | */ 12 | constructor(public db: Level) {} 13 | 14 | async get(key: string) { 15 | try { 16 | const value = await this.db.get(key) 17 | return JSON.parse(value) as V 18 | } catch (error) {} 19 | } 20 | 21 | async write( 22 | writes: { 23 | set?: { key: string; value: V }[] 24 | delete?: string[] 25 | } = {} 26 | ) { 27 | const ops: AbstractBatch[] = [] 28 | 29 | for (const key of writes.delete || []) { 30 | ops.push({ type: "del", key: key }) 31 | } 32 | for (const { key, value } of writes.set || []) { 33 | ops.push({ 34 | type: "put", 35 | key: key, 36 | value: JSON.stringify(value), 37 | }) 38 | } 39 | await this.db.batch(ops) 40 | } 41 | 42 | async close(): Promise { 43 | return this.db.close() 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/storage/LevelDbOrderedKeyValueStorage.ts: -------------------------------------------------------------------------------- 1 | import { AbstractBatch } from "abstract-leveldown" 2 | import { Level } from "level" 3 | import { AsyncOrderedKeyValueApi } from "../lib/types" 4 | 5 | export class LevelDbOrderedKeyValueStorage 6 | implements AsyncOrderedKeyValueApi 7 | { 8 | /** 9 | * import { Level } from "level" 10 | * new LevelDbOrderedKeyValueStorage(new Level("path/to.db")) 11 | */ 12 | constructor(public db: Level) {} 13 | 14 | async get(key: string) { 15 | try { 16 | const value = await this.db.get(key) 17 | return JSON.parse(value) as V 18 | } catch (error) {} 19 | } 20 | 21 | async list( 22 | args: { 23 | gt?: string 24 | gte?: string 25 | lt?: string 26 | lte?: string 27 | limit?: number 28 | reverse?: boolean 29 | } = {} 30 | ) { 31 | const results: { key: string; value: V }[] = [] 32 | for await (const [key, value] of this.db.iterator(args)) { 33 | results.push({ key: key, value: JSON.parse(value) }) 34 | } 35 | return results 36 | } 37 | 38 | async write( 39 | writes: { 40 | set?: { key: string; value: V }[] 41 | delete?: string[] 42 | } = {} 43 | ) { 44 | const ops: AbstractBatch[] = [] 45 | 46 | for (const key of writes.delete || []) { 47 | ops.push({ type: "del", key: key }) 48 | } 49 | for (const { key, value } of writes.set || []) { 50 | ops.push({ 51 | type: "put", 52 | key: key, 53 | value: JSON.stringify(value), 54 | }) 55 | } 56 | await this.db.batch(ops) 57 | } 58 | 59 | async close(): Promise { 60 | return this.db.close() 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/storage/SQLiteKeyValueStorage.ts: -------------------------------------------------------------------------------- 1 | import { Database, Statement, Transaction } from "better-sqlite3" 2 | import { KeyValueApi } from "../lib/types" 3 | 4 | export class SQLiteKeyValueStorage implements KeyValueApi { 5 | /** 6 | * import sqlite from "better-sqlite3" 7 | * new SQLiteKeyValueStorage(sqlite("path/to.db")) 8 | */ 9 | constructor(private db: Database) { 10 | const createTableQuery = db.prepare( 11 | `create table if not exists data ( key text primary key, value text)` 12 | ) 13 | 14 | // Make sure the table exists. 15 | createTableQuery.run() 16 | 17 | this.getQuery = db.prepare(`select * from data where key = $key`) 18 | 19 | const insertQuery = db.prepare( 20 | `insert or replace into data values ($key, $value)` 21 | ) 22 | const deleteQuery = db.prepare(`delete from data where key = $key`) 23 | 24 | this.writeFactsQuery = this.db.transaction( 25 | (tx: { set?: { key: string; value: any }[]; delete?: string[] }) => { 26 | for (const { key, value } of tx.set || []) { 27 | insertQuery.run({ key, value: JSON.stringify(value) }) 28 | } 29 | for (const key of tx.delete || []) { 30 | deleteQuery.run({ key: key }) 31 | } 32 | } 33 | ) 34 | } 35 | 36 | private getQuery: Statement 37 | private writeFactsQuery: Transaction 38 | 39 | get(key: string) { 40 | return this.getQuery 41 | .all({ key }) 42 | .map((row: any) => JSON.parse(row.value))[0] as V | undefined 43 | } 44 | 45 | write(tx: { set?: { key: string; value: V }[]; delete?: string[] }) { 46 | this.writeFactsQuery(tx) 47 | } 48 | 49 | close() { 50 | this.db.close() 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/storage/SQLiteOrderedKeyValueStorage.ts: -------------------------------------------------------------------------------- 1 | import { Database, Statement, Transaction } from "better-sqlite3" 2 | import { OrderedKeyValueApi } from "../lib/types" 3 | 4 | export class SQLiteOrderedKeyValueStorage 5 | implements OrderedKeyValueApi 6 | { 7 | /** 8 | * import sqlite from "better-sqlite3" 9 | * new SQLiteOrderedKeyValueStorage(sqlite("path/to.db")) 10 | */ 11 | constructor(private db: Database) { 12 | const createTableQuery = db.prepare( 13 | `create table if not exists data ( key text primary key, value text)` 14 | ) 15 | 16 | // Make sure the table exists. 17 | createTableQuery.run() 18 | 19 | this.getQuery = db.prepare(`select * from data where key = $key`) 20 | 21 | const insertQuery = db.prepare( 22 | `insert or replace into data values ($key, $value)` 23 | ) 24 | const deleteQuery = db.prepare(`delete from data where key = $key`) 25 | 26 | this.writeFactsQuery = this.db.transaction( 27 | (tx: { set?: { key: string; value: any }[]; delete?: string[] }) => { 28 | for (const { key, value } of tx.set || []) { 29 | insertQuery.run({ key, value: JSON.stringify(value) }) 30 | } 31 | for (const key of tx.delete || []) { 32 | deleteQuery.run({ key: key }) 33 | } 34 | } 35 | ) 36 | } 37 | 38 | private getQuery: Statement 39 | private writeFactsQuery: Transaction 40 | 41 | get(key: string) { 42 | return this.getQuery 43 | .all({ key }) 44 | .map((row: any) => JSON.parse(row.value))[0] as V | undefined 45 | } 46 | 47 | list( 48 | args: { 49 | gt?: string 50 | gte?: string 51 | lt?: string 52 | lte?: string 53 | limit?: number 54 | reverse?: boolean 55 | } = {} 56 | ) { 57 | const sqlArgs: any = {} 58 | const whereClauses: string[] = [] 59 | 60 | if (args.gte !== undefined) { 61 | sqlArgs.gte = args.gte 62 | whereClauses.push("key >= $gte") 63 | } else if (args.gt !== undefined) { 64 | sqlArgs.gt = args.gt 65 | whereClauses.push("key > $gt") 66 | } 67 | 68 | if (args.lte !== undefined) { 69 | sqlArgs.lte = args.lte 70 | whereClauses.push("key <= $lte") 71 | } else if (args.lt !== undefined) { 72 | sqlArgs.lt = args.lt 73 | whereClauses.push("key < $lt") 74 | } 75 | 76 | let sqlQuery = `select * from data` 77 | if (whereClauses.length) { 78 | sqlQuery += " where " 79 | sqlQuery += whereClauses.join(" and ") 80 | } 81 | 82 | sqlQuery += " order by key" 83 | if (args.reverse) { 84 | sqlQuery += " desc" 85 | } 86 | if (args.limit) { 87 | sqlArgs.limit = args.limit 88 | sqlQuery += ` limit $limit` 89 | } 90 | 91 | const results: any[] = this.db.prepare(sqlQuery).all(sqlArgs) 92 | 93 | return results.map(({ key, value }) => ({ 94 | key: key, 95 | value: JSON.parse(value), 96 | })) 97 | } 98 | 99 | write(tx: { set?: { key: string; value: V }[]; delete?: string[] }) { 100 | this.writeFactsQuery(tx) 101 | } 102 | 103 | close() { 104 | this.db.close() 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /src/storage/storage.test.ts: -------------------------------------------------------------------------------- 1 | import { search } from "@ccorcos/ordered-array" 2 | import { strict as assert } from "assert" 3 | import sqlite from "better-sqlite3" 4 | import { Level } from "level" 5 | import { uniq } from "lodash" 6 | import { describe, it } from "mocha" 7 | import { 8 | AsyncKeyValueApi, 9 | AsyncOrderedKeyValueApi, 10 | KeyValueApi, 11 | OrderedKeyValueApi, 12 | } from "../lib/types" 13 | import { IndexedDbKeyValueStorage } from "./IndexedDbKeyValueStorage" 14 | import { IndexedDbOrderedKeyValueStorage } from "./IndexedDbOrderedKeyValueStorage" 15 | import { JsonFileKeyValueStorage } from "./JsonFileKeyValueStorage" 16 | import { JsonFileOrderedKeyValueStorage } from "./JsonFileOrderedKeyValueStorage" 17 | import { LevelDbKeyValueStorage } from "./LevelDbKeyValueStorage" 18 | import { LevelDbOrderedKeyValueStorage } from "./LevelDbOrderedKeyValueStorage" 19 | import { SQLiteKeyValueStorage } from "./SQLiteKeyValueStorage" 20 | import { SQLiteOrderedKeyValueStorage } from "./SQLiteOrderedKeyValueStorage" 21 | 22 | async function keyValuePropertyTest(storage: AsyncKeyValueApi | KeyValueApi) { 23 | const numbers = randomNumbers(2_000) 24 | 25 | // Write them all. 26 | await storage.write({ 27 | set: numbers.map((n) => ({ key: n.toString(), value: n })), 28 | }) 29 | 30 | // Delete them all. 31 | for (const n of numbers) { 32 | const before = await storage.get(n.toString()) 33 | assert.equal(before, n) 34 | await storage.write({ delete: [n.toString()] }) 35 | const after = await storage.get(n.toString()) 36 | assert.equal(after, undefined) 37 | } 38 | } 39 | 40 | async function orderedKeyValuePropertyTest( 41 | storage: AsyncOrderedKeyValueApi | OrderedKeyValueApi 42 | ) { 43 | await keyValuePropertyTest(storage) 44 | 45 | const toKey = (n: number) => n.toString().padStart(5, "0") 46 | 47 | // Write some even numbers. 48 | const numbers = Array(1000) 49 | .fill(0) 50 | .map((_, i) => toKey(i * 2)) 51 | 52 | await storage.write({ 53 | set: numbers.map((n) => ({ key: n, value: n })), 54 | }) 55 | 56 | const listTest = async (start: number, end: number) => { 57 | const options: any[][] = [ 58 | [ 59 | { gt: toKey(start) }, 60 | { gte: toKey(start) }, 61 | { gt: toKey(start - 1) }, 62 | { gte: toKey(start - 1) }, 63 | ], 64 | [ 65 | { lt: toKey(end) }, 66 | { lte: toKey(end) }, 67 | { lt: toKey(end + 1) }, 68 | { lte: toKey(end + 1) }, 69 | ], 70 | [{}, { reverse: true }], 71 | [{}, { limit: 1 }, { limit: 10 }], 72 | ] 73 | 74 | for (const combination of permuteOptions(options)) { 75 | const args = Object.assign({}, ...combination) 76 | if (args.gt >= args.lt) continue 77 | if (args.gte >= args.lt) continue 78 | if (args.gt >= args.lte) continue 79 | if (args.gte > args.lte) continue 80 | 81 | const queryResult = (await storage.list(args)).map(({ key }) => key) 82 | 83 | const start = args.gt ?? args.gte 84 | const startOpen = args.gt !== undefined 85 | const end = args.lt ?? args.lte 86 | const endOpen = args.lt !== undefined 87 | 88 | let startIndex = 0 89 | if (start !== undefined) { 90 | const result = search(numbers, start) 91 | if (result.found !== undefined) { 92 | if (startOpen) startIndex = result.found + 1 93 | else startIndex = result.found 94 | } else startIndex = result.closest 95 | } 96 | 97 | let endIndex = numbers.length 98 | if (end !== undefined) { 99 | const result = search(numbers, end) 100 | if (result.found !== undefined) { 101 | if (endOpen) endIndex = result.found 102 | else endIndex = result.found + 1 103 | } else endIndex = result.closest 104 | } 105 | 106 | const expectedResult = numbers.slice(startIndex, endIndex) 107 | if (args.reverse) expectedResult.reverse() 108 | if (args.limit) expectedResult.splice(args.limit, expectedResult.length) 109 | 110 | assert.deepEqual(queryResult, expectedResult, JSON.stringify(args)) 111 | } 112 | } 113 | 114 | await listTest(0, 0) 115 | 116 | await listTest(-10, -4) 117 | await listTest(0, 1) 118 | await listTest(0, 2) 119 | await listTest(2, 2) 120 | await listTest(5000, 5020) 121 | await listTest(-10, 10) 122 | await listTest(0, 10) 123 | await listTest(1980, 2010) 124 | 125 | for (let i = 0; i < 100; i++) { 126 | const [start, end] = randomNumbers(2, [-100, 2100]).sort() 127 | await listTest(start, end) 128 | } 129 | } 130 | 131 | let log = false 132 | 133 | describe("KeyValueStorage", () => { 134 | const now = Date.now() 135 | const dirPath = __dirname + "/../../tmp/" + now 136 | // fs.mkdirpSync(dirPath) 137 | 138 | it("JsonFileKeyValueStorage", async () => { 139 | const storage = new JsonFileKeyValueStorage(dirPath + "/data2.json") 140 | await keyValuePropertyTest(storage) 141 | }) 142 | 143 | it("SQLiteKeyValueStorage", async () => { 144 | const storage = new SQLiteKeyValueStorage(sqlite(dirPath + "/data2.sqlite")) 145 | await keyValuePropertyTest(storage) 146 | }) 147 | 148 | it("LevelDbKeyValueStorage", async () => { 149 | const storage = new LevelDbKeyValueStorage( 150 | new Level(dirPath + "/data2.leveldb") 151 | ) 152 | await keyValuePropertyTest(storage) 153 | }) 154 | 155 | it("IndexedDbKeyValueStorage", async () => { 156 | require("fake-indexeddb/auto") 157 | const storage = new IndexedDbKeyValueStorage(now.toString() + "2") 158 | await keyValuePropertyTest(storage) 159 | }) 160 | }) 161 | 162 | describe("OrderedKeyValueStorage", () => { 163 | const now = Date.now() 164 | const dirPath = __dirname + "/../../tmp/" + now 165 | // fs.mkdirpSync(dirPath) 166 | 167 | it("JsonFileOrderedKeyValueStorage", async () => { 168 | const storage = new JsonFileOrderedKeyValueStorage(dirPath + "/data.json") 169 | await orderedKeyValuePropertyTest(storage) 170 | }) 171 | 172 | it("SQLiteOrderedKeyValueStorage", async () => { 173 | const storage = new SQLiteOrderedKeyValueStorage( 174 | sqlite(dirPath + "/data.sqlite") 175 | ) 176 | await orderedKeyValuePropertyTest(storage) 177 | }) 178 | 179 | it("LevelDbOrderedKeyValueStorage", async function () { 180 | this.timeout(10_000) 181 | const storage = new LevelDbOrderedKeyValueStorage( 182 | new Level(dirPath + "/data.leveldb") 183 | ) 184 | await orderedKeyValuePropertyTest(storage) 185 | }) 186 | 187 | it("IndexedDbOrderedKeyValueStorage", async function () { 188 | this.timeout(30_000) 189 | require("fake-indexeddb/auto") 190 | const storage = new IndexedDbOrderedKeyValueStorage(now.toString()) 191 | await orderedKeyValuePropertyTest(storage) 192 | }) 193 | }) 194 | 195 | function randomNumbers(size: number, range?: [number, number]) { 196 | if (!range) range = [-size * 10, size * 10] 197 | const numbers: number[] = [] 198 | for (let i = 0; i < size; i++) 199 | numbers.push(Math.round(Math.random() * (range[1] - range[0]) - range[0])) 200 | return uniq(numbers) 201 | } 202 | 203 | function permuteOptions(options: T[][]): T[][] { 204 | if (options.length === 0) return [] 205 | 206 | let result: T[][] = [[]] 207 | 208 | for (const group of options) { 209 | const expand: T[][] = [] 210 | for (const combination of result) { 211 | for (const value of group) { 212 | expand.push([...combination, value]) 213 | } 214 | } 215 | result = expand 216 | } 217 | 218 | return result 219 | } 220 | 221 | describe("permuteOptions", () => { 222 | it("permuteOptions", async () => { 223 | assert.deepEqual(permuteOptions([[1, 2, 3], [4, 5], [6], [7, 8]]), [ 224 | [1, 4, 6, 7], 225 | [1, 4, 6, 8], 226 | [1, 5, 6, 7], 227 | [1, 5, 6, 8], 228 | [2, 4, 6, 7], 229 | [2, 4, 6, 8], 230 | [2, 5, 6, 7], 231 | [2, 5, 6, 8], 232 | [3, 4, 6, 7], 233 | [3, 4, 6, 8], 234 | [3, 5, 6, 7], 235 | [3, 5, 6, 8], 236 | ]) 237 | }) 238 | }) 239 | -------------------------------------------------------------------------------- /src/tuple-okv.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { jsonCodec } from "lexicodec" 3 | import { describe, it } from "mocha" 4 | import { OrderedTupleValueDatabase } from "./tuple-okv" 5 | 6 | describe("OrderedTupleValueDatabase", () => { 7 | it("get", () => { 8 | const okv = new OrderedTupleValueDatabase(jsonCodec) 9 | 10 | let result = okv.get(["a"]) 11 | assert.deepEqual(result?.value, undefined) 12 | 13 | okv.write({ set: [{ key: ["a"], value: 1 }] }) 14 | result = okv.get(["a"]) 15 | assert.deepEqual(result?.value, 1) 16 | }) 17 | 18 | it("list", () => { 19 | const okv = new OrderedTupleValueDatabase(jsonCodec) 20 | 21 | okv.write({ 22 | set: [ 23 | { key: ["a"], value: 0 }, 24 | { key: ["a", "a"], value: 0 }, 25 | { key: ["a", "b"], value: 0 }, 26 | { key: ["a", "c"], value: 0 }, 27 | { key: ["b"], value: 0 }, 28 | { key: ["b", "a"], value: 0 }, 29 | { key: ["b", "b"], value: 0 }, 30 | { key: ["b", "c"], value: 0 }, 31 | ], 32 | }) 33 | 34 | let result = okv.list({ prefix: ["a"] }).map(({ key }) => key) 35 | assert.deepEqual(result, [ 36 | ["a", "a"], 37 | ["a", "b"], 38 | ["a", "c"], 39 | ]) 40 | 41 | result = okv 42 | .list({ prefix: ["a"], start: ["a", "b"] }) 43 | .map(({ key }) => key) 44 | assert.deepEqual(result, [ 45 | ["a", "b"], 46 | ["a", "c"], 47 | ]) 48 | 49 | result = okv.list({ prefix: ["a"], end: ["a", "b"] }).map(({ key }) => key) 50 | assert.deepEqual(result, [["a", "a"]]) 51 | 52 | result = okv.list({ start: ["a"], end: ["b", "b"] }).map(({ key }) => key) 53 | assert.deepEqual(result, [ 54 | ["a"], 55 | ["a", "a"], 56 | ["a", "b"], 57 | ["a", "c"], 58 | ["b"], 59 | ["b", "a"], 60 | ]) 61 | 62 | result = okv 63 | .list({ start: ["a"], end: ["b", "b"], reverse: true, limit: 4 }) 64 | .map(({ key }) => key) 65 | assert.deepEqual(result, [["b", "a"], ["b"], ["a", "c"], ["a", "b"]]) 66 | }) 67 | 68 | it("conflict", () => { 69 | const okv = new OrderedTupleValueDatabase(jsonCodec) 70 | okv.write({ set: [{ key: ["a"], value: 1 }] }) 71 | 72 | const a = okv.get(["a"])! 73 | okv.write({ set: [{ key: ["a"], value: 2 }] }) 74 | 75 | assert.throws(() => { 76 | okv.write({ 77 | check: [{ key: ["a"], version: a.version }], 78 | set: [{ key: ["b"], value: a.value * 2 }], 79 | }) 80 | }) 81 | }) 82 | 83 | it("sum", () => { 84 | const okv = new OrderedTupleValueDatabase(jsonCodec) 85 | okv.write({ sum: [{ key: ["a"], value: 1 }] }) 86 | assert.deepEqual(okv.get(["a"])?.value, 1) 87 | okv.write({ sum: [{ key: ["a"], value: 1 }] }) 88 | assert.deepEqual(okv.get(["a"])?.value, 2) 89 | }) 90 | }) 91 | -------------------------------------------------------------------------------- /src/tuple-okv.ts: -------------------------------------------------------------------------------- 1 | import { orderedArray } from "@ccorcos/ordered-array" 2 | import { Codec } from "lexicodec" 3 | import { ulid } from "ulid" 4 | 5 | type Tuple = any[] 6 | type Item = { key: Tuple; value: any; version: string } 7 | 8 | export class ConflictError extends Error {} 9 | 10 | export class OrderedTupleValueDatabase { 11 | constructor(private codec: Codec) {} 12 | private utils = orderedArray((item: Item) => item.key, this.codec.compare) 13 | 14 | private data: { key: Tuple; value: any; version: string }[] = [] 15 | 16 | get = (key: Tuple): { value: any; version: string } | undefined => { 17 | const result = this.utils.search(this.data, key) 18 | if (result.found === undefined) return 19 | const { value, version } = this.data[result.found] 20 | return { value, version } 21 | } 22 | 23 | /** 24 | * start is inclusive. end is exclusive. prefix is exclusive 25 | */ 26 | list = (args: { 27 | prefix?: Tuple 28 | start?: Tuple 29 | end?: Tuple 30 | limit?: number 31 | reverse?: boolean 32 | }) => { 33 | let startKey: Tuple | undefined 34 | let endKey: Tuple | undefined 35 | if (args.prefix) { 36 | startKey = args.prefix 37 | endKey = [...args.prefix, this.codec.MAX] 38 | } 39 | if (args.start) { 40 | startKey = args.start 41 | } 42 | if (args.end) { 43 | endKey = args.end 44 | } 45 | 46 | if (startKey && endKey && this.codec.compare(startKey, endKey) > 0) { 47 | throw new Error("Invalid bounds.") 48 | } 49 | 50 | let startIndex: number = 0 51 | let endIndex: number = this.data.length - 1 52 | 53 | if (startKey) { 54 | const _start = startKey 55 | const result = this.utils.search(this.data, _start) 56 | if (result.found === undefined) { 57 | startIndex = result.closest 58 | } else if (startKey === args.prefix) { 59 | startIndex = result.found + 1 60 | } else { 61 | startIndex = result.found 62 | } 63 | } 64 | 65 | if (endKey) { 66 | const _end = endKey 67 | const result = this.utils.search(this.data, _end) 68 | if (result.found === undefined) { 69 | endIndex = result.closest 70 | } else { 71 | endIndex = result.found 72 | } 73 | } 74 | 75 | if (args.reverse) { 76 | if (!args.limit) return this.data.slice(startIndex, endIndex).reverse() 77 | return this.data 78 | .slice(Math.max(startIndex, endIndex - args.limit), endIndex) 79 | .reverse() 80 | } 81 | 82 | if (!args.limit) return this.data.slice(startIndex, endIndex) 83 | return this.data.slice( 84 | startIndex, 85 | Math.min(startIndex + args.limit, endIndex) 86 | ) 87 | } 88 | 89 | write(tx: { 90 | check?: { key: Tuple; version: string }[] 91 | // TODO: check range 92 | set?: { key: Tuple; value: any }[] 93 | sum?: { key: Tuple; value: number }[] 94 | min?: { key: Tuple; value: number }[] 95 | max?: { key: Tuple; value: number }[] 96 | delete?: Tuple[] 97 | // TODO: delete range 98 | }) { 99 | for (const { key, version } of tx.check || []) 100 | if (this.get(key)?.version !== version) 101 | throw new ConflictError(`Version check failed. ${key} ${version}`) 102 | 103 | const version = ulid() 104 | 105 | for (const { key, value } of tx.set || []) 106 | this.utils.insert(this.data, { key, value, version }) 107 | 108 | const replaceValue = (key: Tuple, fn: (existing?: any) => any) => 109 | this.utils.update(this.data, key, (item) => ({ 110 | key, 111 | version, 112 | value: fn(item?.value), 113 | })) 114 | 115 | for (const { key, value } of tx.sum || []) 116 | replaceValue(key, (existing) => { 117 | if (typeof existing === "number") return existing + value 118 | if (existing === undefined) return value 119 | console.warn("Calling sum on a non-number value:", key, existing) 120 | return value 121 | }) 122 | for (const { key, value } of tx.min || []) 123 | replaceValue(key, (existing) => { 124 | if (typeof existing === "number") return Math.min(existing, value) 125 | if (existing === undefined) return value 126 | console.warn("Calling min on a non-number value:", key, existing) 127 | return value 128 | }) 129 | for (const { key, value } of tx.max || []) 130 | replaceValue(key, (existing) => { 131 | if (typeof existing === "number") return Math.max(existing, value) 132 | if (existing === undefined) return value 133 | console.warn("Calling max on a non-number value:", key, existing) 134 | return value 135 | }) 136 | 137 | for (const key of tx.delete || []) this.utils.remove(this.data, key) 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /src/tupledb/BTreeDb.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { jsonCodec } from "lexicodec" 3 | import { describe, it } from "mocha" 4 | import { BTreeDb } from "./BTreeDb" 5 | 6 | describe("BTreeDb", () => { 7 | it("works", () => { 8 | const db = new BTreeDb() 9 | 10 | const user1 = { id: randomId(), name: "Chet" } 11 | const user2 = { id: randomId(), name: "Simon" } 12 | 13 | { 14 | const tx = db.transact() 15 | tx.set(["user", user1.id], user1) 16 | tx.set(["user", user2.id], user2) 17 | tx.commit() 18 | } 19 | 20 | let called = 0 21 | db.subscribe( 22 | [ 23 | ["user", jsonCodec.MIN], 24 | ["user", jsonCodec.MAX], 25 | ], 26 | () => { 27 | called += 1 28 | } 29 | ) 30 | 31 | const user3 = { id: randomId(), name: "Rob" } 32 | const user4 = { id: randomId(), name: "Tanishq" } 33 | 34 | { 35 | const tx = db.transact() 36 | tx.set(["user", user3.id], user3) 37 | tx.set(["user", user4.id], user4) 38 | tx.commit() 39 | } 40 | 41 | assert.equal(called, 1) 42 | }) 43 | }) 44 | 45 | function randomId() { 46 | return Math.random().toString(36).slice(2, 10) 47 | } 48 | -------------------------------------------------------------------------------- /src/tupledb/BTreeDb.ts: -------------------------------------------------------------------------------- 1 | import { insert, remove } from "@ccorcos/ordered-array" 2 | import { jsonCodec } from "lexicodec" 3 | import { InMemoryBinaryPlusTree } from "../lib/InMemoryBinaryPlusTree" 4 | import { InMemoryIntervalTree } from "../lib/InMemoryIntervalTree" 5 | 6 | // No concurrency control because this is synchronous and embedded, just like SQLite. 7 | export class BTreeDb { 8 | constructor(public compareKey: (a: K, b: K) => number = jsonCodec.compare) {} 9 | 10 | data = new InMemoryBinaryPlusTree(1, 40, this.compareKey) 11 | 12 | listeners = new InMemoryIntervalTree<[K, K, string], () => void, K>( 13 | 1, 14 | 40, 15 | (a, b) => { 16 | let dir = this.compareKey(a[0], b[0]) 17 | if (dir !== 0) return dir 18 | dir = this.compareKey(a[1], b[1]) 19 | if (dir !== 0) return dir 20 | if (a[2] > b[2]) return 1 21 | if (a[2] < b[2]) return -1 22 | return 0 23 | }, 24 | this.compareKey 25 | ) 26 | 27 | get(key: K) { 28 | return this.data.get(key) 29 | } 30 | 31 | list( 32 | args: { 33 | gt?: K 34 | gte?: K 35 | lt?: K 36 | lte?: K 37 | limit?: number 38 | reverse?: boolean 39 | } = {} 40 | ) { 41 | return this.data.list(args) 42 | } 43 | 44 | subscribe(range: [K, K], fn: () => void) { 45 | const id = randomId() 46 | this.listeners.set([...range, id], fn) 47 | return () => this.listeners.delete([...range, id]) 48 | } 49 | 50 | set(key: K, value: V) { 51 | return this.write({ sets: [{ key, value }] }) 52 | } 53 | 54 | delete(key: K) { 55 | return this.write({ deletes: [key] }) 56 | } 57 | 58 | write(args: { sets?: { key: K; value: V }[]; deletes?: K[] }) { 59 | const keys: K[] = [] 60 | 61 | for (const { key, value } of args.sets || []) { 62 | this.data.set(key, value) 63 | keys.push(key) 64 | } 65 | 66 | for (const key of args.deletes || []) { 67 | this.data.delete(key) 68 | keys.push(key) 69 | } 70 | 71 | // Emit only once per caller. 72 | const fns = new Set<() => void>() 73 | for (const key of keys) { 74 | const results = this.listeners.overlaps({ gte: key, lte: key }) 75 | for (const { value: fn } of results) fns.add(fn) 76 | } 77 | for (const fn of fns) fn() 78 | } 79 | 80 | transact() { 81 | return new BTreeTx(this) 82 | } 83 | } 84 | 85 | export class BTreeTx { 86 | constructor(public db: BTreeDb) {} 87 | 88 | sets = new InMemoryBinaryPlusTree(1, 40, this.db.data.compareKey) 89 | deletes = new InMemoryBinaryPlusTree(1, 40, this.db.data.compareKey) 90 | 91 | get(key: K) { 92 | const alreadyWritten = this.sets.get(key) 93 | if (alreadyWritten !== undefined) return alreadyWritten as V 94 | 95 | const alreadyDeleted = this.deletes.get(key) 96 | if (alreadyDeleted !== undefined) return undefined 97 | 98 | return this.db.data.get(key) 99 | } 100 | 101 | list( 102 | args: { 103 | gt?: K 104 | gte?: K 105 | lt?: K 106 | lte?: K 107 | limit?: number 108 | reverse?: boolean 109 | } = {} 110 | ) { 111 | const sets = this.sets.list(args) 112 | const deletes = this.deletes.list(args) 113 | 114 | const limit = 115 | args.limit !== undefined ? args.limit + deletes.length : undefined 116 | 117 | const result = this.db.list({ ...args, limit }) 118 | 119 | const compareKey = (a: K, b: K) => { 120 | const dir = this.db.compareKey(a, b) * -1 121 | if (args.reverse) return dir * -1 122 | else return dir 123 | } 124 | 125 | for (const item of sets) { 126 | insert(result, item, ({ key }) => key, compareKey) 127 | } 128 | 129 | for (const { key } of deletes) { 130 | remove(result, key, ({ key }) => key, compareKey) 131 | } 132 | 133 | if (args.limit && result.length > args.limit) { 134 | result.splice(args.limit, result.length) 135 | } 136 | 137 | return result 138 | } 139 | 140 | set(key: K, value: V) { 141 | this.sets.set(key, value) 142 | this.deletes.delete(key) 143 | } 144 | 145 | delete(key: K) { 146 | this.sets.delete(key) 147 | this.deletes.set(key, true) 148 | } 149 | 150 | commit() { 151 | this.db.write({ 152 | sets: this.sets.list(), 153 | deletes: this.deletes.list().map(({ key }) => key), 154 | }) 155 | } 156 | } 157 | 158 | function randomId() { 159 | return Math.random().toString(36).slice(2, 10) 160 | } 161 | -------------------------------------------------------------------------------- /src/tupledb/ITreeDb.test.ts: -------------------------------------------------------------------------------- 1 | import { strict as assert } from "assert" 2 | import { describe, it } from "mocha" 3 | import { ITreeDb } from "./ITreeDb" 4 | 5 | describe("ITreeDb", () => { 6 | it("works", () => { 7 | const db = new ITreeDb() 8 | 9 | const now = Date.now() 10 | const hourMs = 1000 * 60 * 60 11 | const dayMs = hourMs * 24 12 | 13 | const event1 = { 14 | id: randomId(), 15 | start: new Date(now).toISOString(), 16 | end: new Date(now + hourMs).toISOString(), 17 | name: "Meeting", 18 | } 19 | const event2 = { 20 | id: randomId(), 21 | start: new Date(now + 2 * dayMs).toISOString(), 22 | end: new Date(now + 3 * dayMs).toISOString(), 23 | name: "Party", 24 | } 25 | 26 | { 27 | const tx = db.transact() 28 | tx.set([event1.start, event1.end, event1.id], event1) 29 | tx.set([event2.start, event2.end, event2.id], event2) 30 | tx.commit() 31 | } 32 | 33 | let called = 0 34 | db.subscribe( 35 | // Today 36 | [new Date(now).toISOString(), new Date(now + dayMs).toISOString()], 37 | () => { 38 | called += 1 39 | } 40 | ) 41 | 42 | const event3 = { 43 | id: randomId(), 44 | start: new Date(now + 2 * hourMs).toISOString(), 45 | end: new Date(now + 3 * hourMs).toISOString(), 46 | name: "Zoom call", 47 | } 48 | const event4 = { 49 | id: randomId(), 50 | start: new Date(now + 12 * hourMs).toISOString(), 51 | end: new Date(now + 12.5 * hourMs).toISOString(), 52 | name: "Dinner", 53 | } 54 | 55 | { 56 | const tx = db.transact() 57 | tx.set([event3.start, event3.end, event3.id], event3) 58 | tx.set([event4.start, event4.end, event4.id], event4) 59 | tx.commit() 60 | } 61 | 62 | assert.equal(called, 1) 63 | 64 | const event5 = { 65 | id: randomId(), 66 | start: new Date(now + 36 * hourMs).toISOString(), 67 | end: new Date(now + 39 * hourMs).toISOString(), 68 | name: "Later", 69 | } 70 | 71 | { 72 | const tx = db.transact() 73 | tx.set([event5.start, event5.end, event5.id], event5) 74 | tx.commit() 75 | } 76 | 77 | assert.equal(called, 1) 78 | }) 79 | }) 80 | 81 | function randomId() { 82 | return Math.random().toString(36).slice(2, 10) 83 | } 84 | -------------------------------------------------------------------------------- /src/tupledb/ITreeDb.ts: -------------------------------------------------------------------------------- 1 | import { insert, remove } from "@ccorcos/ordered-array" 2 | import { jsonCodec } from "lexicodec" 3 | import { InMemoryIntervalTree } from "../lib/InMemoryIntervalTree" 4 | 5 | // No concurrency control because this is synchronous and embedded, just like SQLite. 6 | export class ITreeDb { 7 | constructor( 8 | public compareBound: (a: B, b: B) => number = jsonCodec.compare, 9 | public compareKey: (a: K, b: K) => number = jsonCodec.compare 10 | ) {} 11 | 12 | compareTuple = (a: [B, B, K], b: [B, B, K]) => { 13 | let dir = this.compareBound(a[0], b[0]) 14 | if (dir !== 0) return dir 15 | dir = this.compareBound(a[1], b[1]) 16 | if (dir !== 0) return dir 17 | return this.compareKey(a[2], b[2]) 18 | } 19 | 20 | data = new InMemoryIntervalTree<[B, B, K], V, B>( 21 | 1, 22 | 40, 23 | this.compareTuple, 24 | this.compareBound 25 | ) 26 | 27 | listeners = new InMemoryIntervalTree<[B, B, string], () => void, B>( 28 | 1, 29 | 40, 30 | (a, b) => { 31 | let dir = this.compareBound(a[0], b[0]) 32 | if (dir !== 0) return dir 33 | dir = this.compareBound(a[1], b[1]) 34 | if (dir !== 0) return dir 35 | if (a[2] > b[2]) return 1 36 | if (a[2] < b[2]) return -1 37 | return 0 38 | }, 39 | this.compareBound 40 | ) 41 | 42 | get(key: [B, B, K]) { 43 | return this.data.get(key) 44 | } 45 | 46 | overlaps(args: { gt?: B; gte?: B; lt?: B; lte?: B } = {}) { 47 | return this.data.overlaps(args) 48 | } 49 | 50 | subscribe(range: [B, B], fn: () => void) { 51 | const id = randomId() 52 | this.listeners.set([...range, id], fn) 53 | return () => this.listeners.delete([...range, id]) 54 | } 55 | 56 | set(key: [B, B, K], value: V) { 57 | return this.write({ sets: [{ key, value }] }) 58 | } 59 | 60 | delete(key: [B, B, K]) { 61 | return this.write({ deletes: [key] }) 62 | } 63 | 64 | write(args: { 65 | sets?: { key: [B, B, K]; value: V }[] 66 | deletes?: [B, B, K][] 67 | }) { 68 | const keys: [B, B, K][] = [] 69 | 70 | for (const { key, value } of args.sets || []) { 71 | this.data.set(key, value) 72 | keys.push(key) 73 | } 74 | 75 | for (const key of args.deletes || []) { 76 | this.data.delete(key) 77 | keys.push(key) 78 | } 79 | 80 | // Emit only once per caller. 81 | const fns = new Set<() => void>() 82 | for (const key of keys) { 83 | const results = this.listeners.overlaps({ gte: key[0], lte: key[1] }) 84 | for (const { value: fn } of results) fns.add(fn) 85 | } 86 | for (const fn of fns) fn() 87 | } 88 | 89 | transact() { 90 | return new ITreeTx(this) 91 | } 92 | } 93 | 94 | export class ITreeTx { 95 | constructor(public db: ITreeDb) {} 96 | 97 | sets = new InMemoryIntervalTree<[B, B, K], V, B>( 98 | 1, 99 | 40, 100 | this.db.compareTuple, 101 | this.db.compareBound 102 | ) 103 | 104 | deletes = new InMemoryIntervalTree<[B, B, K], true, B>( 105 | 1, 106 | 40, 107 | this.db.compareTuple, 108 | this.db.compareBound 109 | ) 110 | 111 | get(key: [B, B, K]) { 112 | const alreadyWritten = this.sets.get(key) 113 | if (alreadyWritten !== undefined) return alreadyWritten as V 114 | 115 | const alreadyDeleted = this.deletes.get(key) 116 | if (alreadyDeleted !== undefined) return undefined 117 | 118 | return this.db.data.get(key) 119 | } 120 | 121 | overlaps(args: { gt?: B; gte?: B; lt?: B; lte?: B } = {}) { 122 | const sets = this.sets.overlaps(args) 123 | const deletes = this.deletes.overlaps(args) 124 | 125 | const result = this.db.overlaps(args) 126 | 127 | for (const item of sets) { 128 | insert(result, item, ({ key }) => key, this.db.compareTuple) 129 | } 130 | 131 | for (const { key } of deletes) { 132 | remove(result, key, ({ key }) => key, this.db.compareTuple) 133 | } 134 | 135 | return result 136 | } 137 | 138 | delete(key: [B, B, K]) { 139 | this.sets.delete(key) 140 | this.deletes.set(key, true) 141 | } 142 | 143 | set(key: [B, B, K], value: V) { 144 | this.sets.set(key, value) 145 | this.deletes.delete(key) 146 | } 147 | 148 | commit() { 149 | this.db.write({ 150 | sets: this.sets.list(), 151 | deletes: this.deletes.list().map(({ key }) => key), 152 | }) 153 | } 154 | } 155 | 156 | function randomId() { 157 | return Math.random().toString(36).slice(2, 10) 158 | } 159 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compileOnSave": false, 3 | "compilerOptions": { 4 | "target": "ES2018", 5 | "module": "CommonJS", 6 | "moduleResolution": "Node", 7 | "esModuleInterop": true, 8 | "allowSyntheticDefaultImports": true, 9 | "strictNullChecks": true, 10 | "strictFunctionTypes": true, 11 | "noImplicitThis": true, 12 | "noImplicitAny": false, 13 | "noUnusedLocals": false, 14 | "noUnusedParameters": false, 15 | "removeComments": false, 16 | "sourceMap": true, 17 | "lib": ["ES2018", "DOM"], 18 | "outDir": "build", 19 | "declaration": true 20 | }, 21 | "include": ["src/**/*"], 22 | "exclude": ["node_modules", "**/*.test.ts"] 23 | } 24 | --------------------------------------------------------------------------------