├── .gitignore ├── .vscode └── launch.json ├── README.md ├── bench.ts ├── crdts.ts ├── fugue-max-simple.ts ├── list-fugue-simple.ts ├── package.json ├── reference_test.ts ├── rle.ts ├── sync9.js ├── test.ts ├── trace.ts └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | yarn.lock 3 | 4 | dist 5 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "reference_test", 11 | "skipFiles": [ 12 | "/**" 13 | ], 14 | "program": "${workspaceFolder}/reference_test.ts", 15 | // "runtimeArgs": ["-r", "ts-node/register"], 16 | "outFiles": [ 17 | "${workspaceFolder}/**/*.js" 18 | ] 19 | }, 20 | { 21 | "type": "node", 22 | "request": "launch", 23 | "name": "trace", 24 | "skipFiles": [ 25 | "/**" 26 | ], 27 | "program": "${workspaceFolder}/trace.ts", 28 | // "runtimeArgs": ["-r", "ts-node/register"], 29 | "outFiles": [ 30 | "${workspaceFolder}/**/*.js" 31 | ] 32 | }, 33 | { 34 | "type": "node", 35 | "request": "launch", 36 | "name": "sync9", 37 | "skipFiles": [ 38 | "/**" 39 | ], 40 | "program": "${workspaceFolder}/sync9.js", 41 | // "runtimeArgs": ["-r", "ts-node/register"], 42 | "outFiles": [ 43 | "${workspaceFolder}/**/*.js" 44 | ] 45 | } 46 | ] 47 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Reference CRDTs 2 | 3 | This repository contains simple proof-of-concept reference implementations of yjs, automerge and sync9's list types - all implemented in the same codebase. The implementations are reference-correct. That is, the resulting document order in all cases is the same as it is in the "real" versions (from yjs, automerge and "loom" (sync9's implementation)). 4 | 5 | These reference implementation is (mostly) designed for readability and to show that the same codebase can handle all 3 implementations. But some complexity creeps in from overlaying all of the tricks needed for each approach. When code is only applicable to a single implementation, it is marked as such. (Eg maxSeq in document, or the alternate makeItem method for sync9). 6 | 7 | This implementation is *not* optimized for performance. Running the automerge-perf editing history takes 30 seconds with yjs here, vs 1 second with the real, optimized yjs library. 8 | 9 | This library does not contain all the supporting tools in yjs and automerge, like encoding / decoding or transaction support. It never will have these features. 10 | 11 | 12 | ### Whats in the box 13 | 14 | The actual CRDT implementations share almost all their code, which lives entirely in [crdts.ts](crdts.ts). The main point of divergence is the `integrate` functions for each algorithm. These methods are called when inserting a new item, to scan the document and find the position at which the item should be inserted. This follows yjs's implementation style. 15 | 16 | The document itself is a document-ordered list of items. Each item stores: 17 | 18 | ```typescript 19 | export type Item = { 20 | content: T, 21 | id: Id, 22 | 23 | originLeft: Id | null, // null for start. Aka "parent" in automerge semantics. 24 | 25 | originRight: Id | null, // Only used by yjs. Null for end. 26 | seq: number, // Only used by automerge. Larger than all known sequence numbers when created. 27 | insertAfter: boolean, // Only for sync9. Are we inserting before / after our parent? 28 | 29 | isDeleted: boolean, 30 | } 31 | ``` 32 | 33 | IDs are tuples of `[agent: string, seq: number]`. Each peer is expected to choose an agent identifier, then each insert uses a monotonically increasing sequence number. Note item.seq (automerge's helper for local ordering) has no relation to id.seq. They should probably be called different things. 34 | 35 | 36 | ## Running the tests 37 | 38 | I use ts-node to run the code in this project. After `npm install` / `yarn` you can run files with: 39 | 40 | ``` 41 | npx ts-node test.ts 42 | ``` 43 | 44 | ## Contribution policy 45 | 46 | Note: This code base was created for science and learning. It is not built to be a load bearing part of your infrastructure. 47 | 48 | I have no intention of spending my time maintaining this code. If you want to make changes, please do so in a fork. I'm not interested in pull requests. 49 | 50 | 51 | # LICENSE 52 | 53 | Shared under the ISC license: 54 | 55 | Copyright 2021 Joseph Gentle 56 | 57 | Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. 58 | 59 | THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 60 | -------------------------------------------------------------------------------- /bench.ts: -------------------------------------------------------------------------------- 1 | import zlib from 'zlib' 2 | import fs from 'fs' 3 | import {Algorithm, newDoc, localDelete, yjsMod, automerge, getArray, sync9} from './crdts.js' 4 | import assert from 'assert' 5 | // import v8 from 'v8' 6 | 7 | const bench = (algName: string, alg: Algorithm) => { 8 | // const filename = 'sveltecomponent' 9 | const filename = 'automerge-paper' 10 | const { 11 | startContent, 12 | endContent, 13 | txns 14 | } = JSON.parse(zlib.gunzipSync(fs.readFileSync(`../crdt-benchmarks/${filename}.json.gz`)).toString()) 15 | 16 | console.time(`${algName} ${filename}`) 17 | 18 | // ;(globalThis as any).gc() 19 | // const startMemory = v8.getHeapStatistics().used_heap_size 20 | 21 | const doc = newDoc() 22 | 23 | let i = 0 24 | for (const txn of txns) { 25 | if (++i % 10000 === 0) console.log(i) 26 | for (const patch of txn.patches) { 27 | // Ignoring any deletes for now. 28 | const [pos, delCount, inserted] = patch as [number, number, string] 29 | if (inserted.length) { 30 | alg.localInsert(doc, 'A', pos, inserted) 31 | } else if (delCount) { 32 | localDelete(doc, 'A', pos) 33 | } 34 | } 35 | } 36 | console.timeEnd(`${algName} ${filename}`) 37 | 38 | // ;(globalThis as any).gc() 39 | // console.log('RAM used:', v8.getHeapStatistics().used_heap_size - startMemory) 40 | assert.strictEqual(getArray(doc).join(''), endContent) 41 | console.log(txns.length) 42 | } 43 | 44 | bench('yjs mod', yjsMod) 45 | bench('automerge', automerge) 46 | bench('sync9', sync9) 47 | -------------------------------------------------------------------------------- /crdts.ts: -------------------------------------------------------------------------------- 1 | // This file implements 4 different list based CRDTs 2 | // (Yjs, Automerge, Sync9 and "YjsMod") in one implementation. 3 | 4 | import assert from 'assert' 5 | import consoleLib from 'console' 6 | import chalk from 'chalk' 7 | 8 | globalThis.console = new consoleLib.Console({ 9 | stdout: process.stdout, stderr: process.stderr, 10 | inspectOptions: {depth: null} 11 | }) 12 | 13 | // atEnd flag for sync9. 14 | export type Id = [agent: string, seq: number] 15 | export type Version = Record // Last seen seq for each agent. 16 | 17 | // export let iters = 0 18 | 19 | export type Algorithm = { 20 | localInsert: (this: Algorithm, doc: Doc, agent: string, pos: number, content: T) => void 21 | integrate: (doc: Doc, newItem: Item, idx_hint?: number) => void 22 | printDoc: (doc: Doc) => void 23 | ignoreTests?: string[] 24 | } 25 | 26 | // These aren't used, but they should be. They show how the items actually work for each algorithm. 27 | type YjsItem = { 28 | content: T, 29 | id: Id, 30 | 31 | // Left and right implicit in document list. 32 | // null represents document's root / end. 33 | originLeft: Id | null, 34 | originRight: Id | null, 35 | 36 | isDeleted: boolean, 37 | } 38 | 39 | type AMItem = { 40 | content: T, 41 | id: Id, 42 | 43 | originLeft: Id | null, 44 | seq: number, // Must be larger than all prev sequence numbers on the peer that created this. 45 | 46 | isDeleted: boolean, 47 | } 48 | 49 | type Sync9Item = { 50 | // Sync9 items are splittable spans - which is weird in this 51 | // library because items only contain 1 entry. So the entry is 52 | // nullable, thus having length 0 or 1. 53 | content: T | null, 54 | 55 | id: Id, 56 | 57 | originLeft: Id | null, 58 | insertAfter: boolean, // identifies whether we insert at the start / end of originLeft. 59 | 60 | isDeleted: boolean, 61 | } 62 | 63 | export type Item = { 64 | // Sync9 items must be splittable spans - which is weird in this 65 | // library because items only contain 1 entry. So the entry is 66 | // nullable, thus having length 0 or 1. 67 | content: T | null, 68 | 69 | // For sync9 the seq must advance by 2 each time, so we have insert positions both before and after this item. 70 | id: Id, 71 | 72 | originLeft: Id | null, 73 | originRight: Id | null, 74 | seq: number, 75 | insertAfter: boolean, // Only for sync9. 76 | 77 | isDeleted: boolean, 78 | } 79 | 80 | 81 | 82 | export interface Doc { 83 | content: Item[] // Could take Item as a type parameter, but eh. This is better for demos. 84 | 85 | version: Version // agent => last seen seq. 86 | length: number // Number of items not deleted 87 | 88 | maxSeq: number // Only for AM. 89 | } 90 | 91 | export const newDoc = (): Doc => ({ 92 | content: [], 93 | version: {}, 94 | length: 0, 95 | maxSeq: 0, 96 | }) 97 | 98 | // **** Common code and helpers 99 | 100 | // We never actually compare the third argument in sync9. 101 | const idEq2 = (a: Id | null, agent: string, seq: number): boolean => ( 102 | a != null && (a[0] === agent && a[1] === seq) 103 | ) 104 | const idEq = (a: Id | null, b: Id | null): boolean => ( 105 | a == b || (a != null && b != null && a[0] === b[0] && a[1] === b[1]) 106 | ) 107 | 108 | let hits = 0 109 | let misses = 0 110 | 111 | // Returns the index of the item with the specified Id, or -1. 112 | // idx_hint is a small optimization so when we know the general area of 113 | // an item, we search nearby instead of just scanning the whole document. 114 | const findItem2 = (doc: Doc, needle: Id | null, atEnd: boolean = false, idx_hint: number = -1): number => { 115 | if (needle == null) return -1 116 | else { 117 | const [agent, seq] = needle 118 | // This little optimization *halves* the time to run the editing trace benchmarks. 119 | if (idx_hint >= 0 && idx_hint < doc.content.length) { 120 | const hint_item = doc.content[idx_hint] 121 | if ((!atEnd && idEq2(hint_item.id, agent, seq)) 122 | || (hint_item.content != null && atEnd && idEq2(hint_item.id, agent, seq))) { 123 | hits++ 124 | return idx_hint 125 | } 126 | // Try nearby. 127 | // const RANGE = 10 128 | // for (let i = idx_hint < RANGE ? 0 : idx_hint - RANGE; i < doc.content.length && i < idx_hint + RANGE; i++) { 129 | // const item = doc.content[i] 130 | // if ((!atEnd && idEq2(item.id, agent, seq)) 131 | // || (item.content != null && atEnd && idEq2(item.id, agent, seq))) { 132 | // hits++ 133 | // return i 134 | // } 135 | // } 136 | } 137 | 138 | misses++ 139 | const idx = doc.content.findIndex(({content, id}) => ( 140 | (!atEnd && idEq2(id, agent, seq)) || (content != null && atEnd && idEq2(id, agent, seq))) 141 | ) 142 | // : doc.content.findIndex(({id}) => idEq(id, needle)) 143 | if (idx < 0) throw Error('Could not find item') // Could use a ternary if not for this! 144 | return idx 145 | } 146 | } 147 | 148 | const findItem = (doc: Doc, needle: Id | null, idx_hint: number = -1): number => ( 149 | findItem2(doc, needle, false, idx_hint) 150 | ) 151 | 152 | // const getNextSeq = (doc: Doc, agent: string): number => { 153 | // const last = doc.version[agent] 154 | // return last == null ? 0 : last + 1 155 | // } 156 | 157 | const findItemAtPos = (doc: Doc, pos: number, stick_end: boolean = false): number => { 158 | let i = 0 159 | // console.log('pos', pos, doc.length, doc.content.length) 160 | for (; i < doc.content.length; i++) { 161 | const item = doc.content[i] 162 | if (stick_end && pos === 0) return i 163 | else if (item.isDeleted || item.content == null) continue 164 | else if (pos === 0) return i 165 | 166 | pos-- 167 | } 168 | 169 | if (pos === 0) return i 170 | else throw Error('past end of the document') 171 | } 172 | 173 | // const nextSeq = (agent: string): number => 174 | 175 | function localInsert(this: Algorithm, doc: Doc, agent: string, pos: number, content: T) { 176 | let i = findItemAtPos(doc, pos) 177 | this.integrate(doc, { 178 | content, 179 | id: [agent, (doc.version[agent] ?? -1) + 1], 180 | isDeleted: false, 181 | originLeft: doc.content[i - 1]?.id ?? null, 182 | originRight: doc.content[i]?.id ?? null, // Only for yjs, yjsmod 183 | insertAfter: true, // Unused by yjs and rga 184 | seq: doc.maxSeq + 1, // Only for rga. 185 | }, i) 186 | } 187 | 188 | function localInsertSync9(this: Algorithm, doc: Doc, agent: string, pos: number, content: T) { 189 | let i = findItemAtPos(doc, pos, true) 190 | // For sync9 our insertion point is different based on whether or not our parent has children. 191 | let parentIdBase = doc.content[i - 1]?.id ?? null 192 | let originLeft: Id | null = parentIdBase == null ? null : [parentIdBase[0], parentIdBase[1]] 193 | let insertAfter = true 194 | 195 | for (;; i++) { 196 | // Scan until we find something with no children to insert after. 197 | let nextItem = doc.content[i] 198 | if (nextItem == null || !idEq(nextItem.originLeft, parentIdBase)) break 199 | 200 | parentIdBase = nextItem.id 201 | originLeft = [nextItem.id[0], nextItem.id[1]] 202 | insertAfter = false 203 | // If the current item has content, we need to slice it and insert before its content. 204 | if (nextItem.content != null) break 205 | } 206 | 207 | // console.log('parentId', parentId) 208 | 209 | this.integrate(doc, { 210 | content, 211 | id: [agent, (doc.version[agent] ?? -1) + 1], 212 | isDeleted: false, 213 | originLeft, 214 | insertAfter, 215 | 216 | originRight: null, // Only for yjs 217 | seq: 0, // Only for AM. 218 | }, i) 219 | } 220 | 221 | export const localDelete = (doc: Doc, agent: string, pos: number): void => { 222 | // This is very incomplete. 223 | const item = doc.content[findItemAtPos(doc, pos)] 224 | if (!item.isDeleted) { 225 | item.isDeleted = true 226 | doc.length -= 1 227 | } 228 | } 229 | 230 | export const getArray = (doc: Doc): T[] => ( 231 | doc.content.filter(i => !i.isDeleted && i.content != null).map(i => i.content!) 232 | ) 233 | 234 | const printdoc = (doc: Doc, showSeq: boolean, showOR: boolean, showIsAfter: boolean) => { 235 | const depth: Record = {} 236 | // const kForId = (id: Id, c: T | null) => `${id[0]} ${id[1]} ${id[2] ?? c != null}` 237 | const kForItem = (id: Id, isAfter: boolean) => `${id[0]} ${id[1]} ${isAfter}` 238 | for (const i of doc.content) { 239 | const d = i.originLeft == null ? 0 : depth[kForItem(i.originLeft, i.insertAfter)] + 1 240 | depth[kForItem(i.id, i.content != null)] = d 241 | 242 | let content = `${i.content == null 243 | ? '.' 244 | : i.isDeleted ? chalk.strikethrough(i.content) : chalk.yellow(i.content) 245 | } at [${i.id}] (par/left [${i.originLeft}])` 246 | if (showSeq) content += ` seq ${i.seq}` 247 | if (showOR) content += ` right [${i.originRight}]` 248 | if (showIsAfter) content += ` ${i.insertAfter ? 'after' : chalk.blue('before')}` 249 | // console.log(`${'| '.repeat(d)}${i.content == null ? chalk.strikethrough(content) : content}`) 250 | console.log(`${'| '.repeat(d)}${i.content == null ? chalk.grey(content) : content}`) 251 | } 252 | } 253 | 254 | export const isInVersion = (id: Id | null, version: Version) => { 255 | if (id == null) return true 256 | const seq = version[id[0]] 257 | return seq != null && seq >= id[1] 258 | } 259 | 260 | export const canInsertNow = (op: Item, doc: Doc): boolean => ( 261 | // We need op.id to not be in doc.versions, but originLeft and originRight to be in. 262 | // We're also inserting each item from each agent in sequence. 263 | !isInVersion(op.id, doc.version) 264 | && (op.id[1] === 0 || isInVersion([op.id[0], op.id[1] - 1], doc.version)) 265 | && isInVersion(op.originLeft, doc.version) 266 | && isInVersion(op.originRight, doc.version) 267 | ) 268 | 269 | // Merge all missing items from src into dest. 270 | // NOTE: This currently does not support moving deletes! 271 | export const mergeInto = (algorithm: Algorithm, dest: Doc, src: Doc) => { 272 | // The list of operations we need to integrate 273 | const missing: (Item | null)[] = src.content.filter(op => op.content != null && !isInVersion(op.id, dest.version)) 274 | let remaining = missing.length 275 | 276 | while (remaining > 0) { 277 | // Find the next item in remaining and insert it. 278 | let mergedOnThisPass = 0 279 | 280 | for (let i = 0; i < missing.length; i++) { 281 | const op = missing[i] 282 | if (op == null || !canInsertNow(op, dest)) continue 283 | algorithm.integrate(dest, op) 284 | missing[i] = null 285 | remaining-- 286 | mergedOnThisPass++ 287 | } 288 | 289 | assert(mergedOnThisPass) 290 | } 291 | } 292 | 293 | 294 | // *** Per algorithm integration functions. Note each CRDT will only use 295 | // one of these integration methods depending on the desired semantics. 296 | 297 | // This is a slight modification of yjs with a few tweaks to make some 298 | // of the CRDT puzzles resolve better. 299 | const integrateYjsMod = (doc: Doc, newItem: Item, idx_hint: number = -1) => { 300 | const lastSeen = doc.version[newItem.id[0]] ?? -1 301 | if (newItem.id[1] !== lastSeen + 1) throw Error('Operations out of order') 302 | doc.version[newItem.id[0]] = newItem.id[1] 303 | 304 | let left = findItem(doc, newItem.originLeft, idx_hint - 1) 305 | let destIdx = left + 1 306 | let right = newItem.originRight == null ? doc.content.length : findItem(doc, newItem.originRight, idx_hint) 307 | let scanning = false 308 | 309 | for (let i = destIdx; ; i++) { 310 | // Inserting at the end of the document. Just insert. 311 | if (!scanning) destIdx = i 312 | if (i === doc.content.length) break 313 | if (i === right) break // No ambiguity / concurrency. Insert here. 314 | 315 | let other = doc.content[i] 316 | 317 | let oleft = findItem(doc, other.originLeft, idx_hint - 1) 318 | let oright = other.originRight == null ? doc.content.length : findItem(doc, other.originRight, idx_hint) 319 | 320 | // The logic below summarizes to: 321 | // if (oleft < left || (oleft === left && oright === right && newItem.id[0] < o.id[0])) break 322 | // if (oleft === left) scanning = oright < right 323 | 324 | // Ok now we implement the punnet square of behaviour 325 | if (oleft < left) { 326 | // Top row. Insert, insert, arbitrary (insert) 327 | break 328 | } else if (oleft === left) { 329 | // Middle row. 330 | if (oright < right) { 331 | // This is tricky. We're looking at an item we *might* insert after - but we can't tell yet! 332 | scanning = true 333 | continue 334 | } else if (oright === right) { 335 | // Raw conflict. Order based on user agents. 336 | if (newItem.id[0] < other.id[0]) break 337 | else { 338 | scanning = false 339 | continue 340 | } 341 | } else { // oright > right 342 | scanning = false 343 | continue 344 | } 345 | } else { // oleft > left 346 | // Bottom row. Arbitrary (skip), skip, skip 347 | continue 348 | } 349 | } 350 | 351 | // We've found the position. Insert here. 352 | doc.content.splice(destIdx, 0, newItem) 353 | if (!newItem.isDeleted) doc.length += 1 354 | } 355 | 356 | // This implements the fugue algorithm listed here: 357 | // https://arxiv.org/abs/2305.00583 . 358 | // 359 | // Merging behaviour turns out to be identical to sync9. 360 | // This code is heavily based on yjsmod. The only difference is using getRightParentIdx instead 361 | // of simply searching for originRight. 362 | const integrateFugue = (doc: Doc, newItem: Item, idx_hint: number = -1) => { 363 | const lastSeen = doc.version[newItem.id[0]] ?? -1 364 | if (newItem.id[1] !== lastSeen + 1) throw Error('Operations out of order') 365 | doc.version[newItem.id[0]] = newItem.id[1] 366 | 367 | const endIdx = doc.content.length 368 | 369 | let scanning = false 370 | 371 | const leftIdx = findItem(doc, newItem.originLeft, idx_hint - 1) 372 | let destIdx = leftIdx + 1 373 | 374 | const getRightParentIdx = (item: Item): number => { 375 | const rightIdx = item.originRight == null ? endIdx : findItem(doc, item.originRight, idx_hint) 376 | const right: Item | undefined = doc.content[rightIdx] // Might be null if rightIdx is endIdx. 377 | return right == null || !idEq(right.originLeft, item.originLeft) ? endIdx : rightIdx 378 | } 379 | 380 | // const rightParentIdx = getRightParentIdx(rightIdx, newItem.originLeft) 381 | const rightPIdx = getRightParentIdx(newItem) 382 | 383 | for (let i = destIdx; ; i++) { 384 | if (!scanning) destIdx = i 385 | if (i === endIdx) break // Hit the end of the document 386 | 387 | let other = doc.content[i] 388 | if (idEq(other.id, newItem.originRight)) break // Hit originRight 389 | 390 | let oleftIdx = findItem(doc, other.originLeft, idx_hint - 1) 391 | const orightPIdx = getRightParentIdx(other) 392 | 393 | // This is identical to the logic in YjsMod, but summarized and using parent idx instead of 394 | // rightIdx. 395 | if (oleftIdx < leftIdx 396 | || (oleftIdx === leftIdx && orightPIdx === rightPIdx && newItem.id[0] < other.id[0])) break 397 | if (oleftIdx === leftIdx) scanning = orightPIdx < rightPIdx 398 | } 399 | 400 | // We've found the position. Insert here. 401 | doc.content.splice(destIdx, 0, newItem) 402 | if (!newItem.isDeleted) doc.length += 1 403 | } 404 | 405 | const integrateYjs = (doc: Doc, newItem: Item, idx_hint: number = -1) => { 406 | const lastSeen = doc.version[newItem.id[0]] ?? -1 407 | if (newItem.id[1] !== lastSeen + 1) throw Error('Operations out of order') 408 | doc.version[newItem.id[0]] = newItem.id[1] 409 | 410 | let left = findItem(doc, newItem.originLeft, idx_hint - 1) 411 | let destIdx = left + 1 412 | let right = newItem.originRight == null ? doc.content.length : findItem(doc, newItem.originRight, idx_hint) 413 | let scanning = false 414 | 415 | for (let i = destIdx; ; i++) { 416 | // Inserting at the end of the document. Just insert. 417 | if (!scanning) destIdx = i 418 | if (i === doc.content.length) break 419 | if (i === right) break // No ambiguity / concurrency. Insert here. 420 | 421 | let other = doc.content[i] 422 | 423 | let oleft = findItem(doc, other.originLeft, idx_hint - 1) 424 | let oright = other.originRight == null ? doc.content.length : findItem(doc, other.originRight, idx_hint) 425 | 426 | // The logic below can be summarized in these two lines: 427 | // if (oleft < left || (oleft === left && oright === right && newItem.id[0] <= o.id[0])) break 428 | // if (oleft === left) scanning = newItem.id[0] <= o.id[0] 429 | 430 | // Ok now we implement the punnet square of behaviour 431 | if (oleft < left) { 432 | // Top row. Insert, insert, arbitrary (insert) 433 | break 434 | } else if (oleft === left) { 435 | // Middle row. 436 | if (newItem.id[0] > other.id[0]) { 437 | scanning = false 438 | continue 439 | } else if (oright === right) { 440 | break 441 | } else { 442 | scanning = true 443 | continue 444 | } 445 | } else { 446 | // Bottom row. Arbitrary (skip), skip, skip 447 | continue 448 | } 449 | } 450 | 451 | // We've found the position. Insert here. 452 | doc.content.splice(destIdx, 0, newItem) 453 | if (!newItem.isDeleted) doc.length += 1 454 | } 455 | 456 | // Integration method for the RGA algorthm, used in Automerge. 457 | const integrateRGA = (doc: Doc, newItem: Item, idx_hint: number = -1) => { 458 | const {id} = newItem 459 | assert(newItem.seq >= 0) 460 | 461 | const lastSeen = doc.version[id[0]] ?? -1 462 | if (id[1] !== lastSeen + 1) throw Error('Operations out of order') 463 | doc.version[id[0]] = id[1] 464 | 465 | let parent = findItem(doc, newItem.originLeft, idx_hint - 1) 466 | let destIdx = parent + 1 467 | 468 | // Scan for the insert location. Stop if we reach the end of the document 469 | for (; destIdx < doc.content.length; destIdx++) { 470 | let o = doc.content[destIdx] 471 | 472 | // This is an unnecessary optimization (I couldn't help myself). It 473 | // doubles the speed when running the local editing traces by 474 | // avoiding calls to findItem() below. When newItem.seq > o.seq 475 | // we're guaranteed to end up falling into a branch that calls 476 | // break;. 477 | if (newItem.seq > o.seq) break 478 | 479 | // Optimization: This call halves the speed of this automerge 480 | // implementation. Its only needed to see if o.originLeft has been 481 | // visited in this loop, which we could calculate much more 482 | // efficiently. 483 | let oparent = findItem(doc, o.originLeft, idx_hint - 1) 484 | 485 | // All the logic below can be expressed in this single line: 486 | // if (oparent < parent || (oparent === parent && (newItem.seq === o.seq) && id[0] < o.id[0])) break 487 | 488 | // Ok now we implement the punnet square of behaviour 489 | if (oparent < parent) { 490 | // We've gotten to the end of the list of children. Stop here. 491 | break 492 | } else if (oparent === parent) { 493 | // Concurrent items from different useragents are sorted first by seq then agent. 494 | 495 | // NOTE: For consistency with the other algorithms, adjacent items 496 | // are sorted in *ascending* order of useragent rather than 497 | // *descending* order as in the actual automerge. It doesn't 498 | // matter for correctness, but its something to keep in mind if 499 | // compatibility matters. The reference checker inverts AM client 500 | // ids. 501 | 502 | // Inverted item sequence number comparisons are used in place of originRight for AM. 503 | if (newItem.seq > o.seq) { 504 | break 505 | } else if (newItem.seq === o.seq) { 506 | if (id[0] < o.id[0]) break 507 | else continue 508 | } else { 509 | continue 510 | } 511 | } else { 512 | // Skip child 513 | continue 514 | } 515 | } 516 | 517 | if (newItem.seq > doc.maxSeq) doc.maxSeq = newItem.seq 518 | 519 | // We've found the position. Insert here. 520 | doc.content.splice(destIdx, 0, newItem) 521 | if (!newItem.isDeleted) doc.length += 1 522 | } 523 | 524 | // Same as integrateRGA above, but shorter. 525 | const integrateRGASmol = (doc: Doc, newItem: Item, idx_hint: number = -1) => { 526 | const {id: [agent, seq]} = newItem 527 | const parent = findItem(doc, newItem.originLeft, idx_hint - 1) 528 | 529 | // Scan to find the insert location 530 | let i 531 | for (i = parent + 1; i < doc.content.length; i++) { 532 | let o = doc.content[i] 533 | if (newItem.seq > o.seq) break // Optimization to avoid findItem call along the hot path 534 | let oparent = findItem(doc, o.originLeft, idx_hint - 1) 535 | 536 | // Should we insert here? 537 | if (oparent < parent 538 | || (oparent === parent 539 | && (newItem.seq === o.seq) 540 | && agent < o.id[0]) 541 | ) break 542 | } 543 | 544 | // We've found the position. Insert at position *i*. 545 | doc.content.splice(i, 0, newItem) 546 | doc.version[agent] = seq 547 | doc.maxSeq = Math.max(doc.maxSeq, newItem.seq) 548 | if (!newItem.isDeleted) doc.length += 1 549 | } 550 | 551 | const integrateSync9 = (doc: Doc, newItem: Item, idx_hint: number = -1) => { 552 | const {id: [agent, seq]} = newItem 553 | const lastSeen = doc.version[agent] ?? -1 554 | if (seq !== lastSeen + 1) throw Error('Operations out of order') 555 | doc.version[agent] = seq 556 | 557 | let parentIdx = findItem2(doc, newItem.originLeft, newItem.insertAfter, idx_hint - 1) 558 | let destIdx = parentIdx + 1 559 | 560 | // if (parentIdx >= 0 && newItem.originLeft && (newItem.originLeft[1] === doc.content[parentIdx].id[1]) && doc.content[parentIdx].content != null) { 561 | if (parentIdx >= 0 && newItem.originLeft && !newItem.insertAfter && doc.content[parentIdx].content != null) { 562 | // Split left item to add null content item to the set 563 | doc.content.splice(parentIdx, 0, { 564 | ...doc.content[parentIdx], 565 | content: null 566 | }) 567 | // We can skip the loop because we know we're an only child. 568 | 569 | } else { 570 | for (; destIdx < doc.content.length; destIdx++) { 571 | let other = doc.content[destIdx] 572 | // We still need to skip children of originLeft. 573 | let oparentIdx = findItem2(doc, other.originLeft, other.insertAfter, idx_hint - 1) 574 | 575 | if (oparentIdx < parentIdx) break 576 | else if (oparentIdx === parentIdx) { 577 | // if (!idEq(other.originLeft, newItem.originLeft)) break 578 | if (newItem.id[0] < other.id[0]) break 579 | else continue 580 | } else continue 581 | } 582 | } 583 | 584 | // We've found the position. Insert here. 585 | doc.content.splice(destIdx, 0, newItem) 586 | if (!newItem.isDeleted && newItem.content != null) doc.length += 1 587 | } 588 | 589 | export const sync9: Algorithm = { 590 | localInsert: localInsertSync9, 591 | integrate: integrateSync9, 592 | printDoc(doc) { printdoc(doc, false, false, true) }, 593 | } 594 | 595 | export const yjsMod: Algorithm = { 596 | localInsert, 597 | integrate: integrateYjsMod, 598 | printDoc(doc) { printdoc(doc, false, true, false) }, 599 | } 600 | 601 | export const fugue: Algorithm = { 602 | localInsert, 603 | integrate: integrateFugue, 604 | printDoc(doc) { printdoc(doc, false, true, false) }, 605 | } 606 | 607 | export const fugueMax: Algorithm = { 608 | localInsert, 609 | // It'd be nice to have a FugueMax integration function here. 610 | // But validation against fugue-max-simple.ts shows its identical to yjsmod. 611 | integrate: integrateYjsMod, 612 | printDoc(doc) { printdoc(doc, false, true, false) }, 613 | } 614 | 615 | export const yjs: Algorithm = { 616 | localInsert, 617 | integrate: integrateYjs, 618 | printDoc(doc) { printdoc(doc, false, true, false) }, 619 | 620 | ignoreTests: ['withTails2'] 621 | } 622 | 623 | export const automerge: Algorithm = { 624 | localInsert, 625 | // The two integrate methods are equivalent. 626 | // integrate: integrateAutomerge, 627 | integrate: integrateRGASmol, 628 | printDoc(doc) { printdoc(doc, true, false, false) }, 629 | 630 | // Automerge doesn't handle these cases as I would expect. 631 | ignoreTests: [ 632 | 'interleavingBackward', 633 | 'interleavingBackward2', 634 | 'withTails', 635 | 'withTails2' 636 | ] 637 | } 638 | 639 | export const printDebugStats = () => { 640 | console.log('hits', hits, 'misses', misses) 641 | } 642 | 643 | 644 | // ;(() => { 645 | // // console.clear() 646 | 647 | // const alg = yjs 648 | 649 | // let doc1 = newDoc() 650 | 651 | // alg.localInsert(doc1, 'a', 0, 'x') 652 | // alg.localInsert(doc1, 'a', 1, 'y') 653 | // alg.localInsert(doc1, 'a', 0, 'z') // zxy 654 | 655 | // // alg.printDoc(doc1) 656 | 657 | // let doc2 = newDoc() 658 | 659 | // alg.localInsert(doc2, 'b', 0, 'a') 660 | // alg.localInsert(doc2, 'b', 1, 'b') 661 | // // alg.localInsert(doc2, 'b', 2, 'c') 662 | 663 | // mergeInto(alg, doc1, doc2) 664 | 665 | // alg.printDoc(doc1) 666 | 667 | // // console.log('\n\n\n') 668 | // })() -------------------------------------------------------------------------------- /fugue-max-simple.ts: -------------------------------------------------------------------------------- 1 | 2 | interface ID { 3 | sender: string; 4 | counter: number; 5 | } 6 | 7 | interface Node { 8 | /** For the root, this is ("", 0). */ 9 | id: ID; 10 | value: T | null; 11 | isDeleted: boolean; 12 | /** 13 | * null when this is the root. 14 | * For convenience, we store a pointer to the parent instead of just 15 | * its ID. 16 | */ 17 | parent: Node | null; 18 | side: "L" | "R"; 19 | // For traversals, store the children in sorted order. 20 | leftChildren: Node[]; 21 | rightChildren: Node[]; 22 | /** 23 | * The non-deleted size of the subtree rooted at this node. 24 | * 25 | * This is technically an optimization, but an easy & impactful one. 26 | */ 27 | size: number; 28 | /** 29 | * Our rightOrigin, if we're a right-side child. 30 | * null = our rightOrigin is the end of the list; 31 | * unset = we're not a right-side child. 32 | */ 33 | rightOrigin?: Node | null; 34 | } 35 | 36 | interface InsertMessage { 37 | type: "insert"; 38 | id: ID; 39 | value: T; 40 | parent: ID; 41 | side: "L" | "R"; 42 | rightOrigin?: ID | null; 43 | } 44 | 45 | interface DeleteMessage { 46 | type: "delete"; 47 | id: ID; 48 | } 49 | 50 | type Message = InsertMessage | DeleteMessage 51 | 52 | interface NodeSave { 53 | value: T | null; 54 | isDeleted: boolean; 55 | parent: ID | null; 56 | side: "L" | "R"; 57 | size: number; 58 | rightOrigin?: ID | null; 59 | } 60 | 61 | class Tree { 62 | readonly root: Node; 63 | /** 64 | * Used in getByID. 65 | * 66 | * Map from ID.sender, to an array that maps ID.counter, to node with that ID. 67 | */ 68 | private readonly nodesByID = new Map[]>(); 69 | 70 | constructor() { 71 | this.root = { 72 | id: { sender: "", counter: 0 }, 73 | value: null, 74 | isDeleted: true, 75 | parent: null, 76 | side: "R", 77 | leftChildren: [], 78 | rightChildren: [], 79 | size: 0, 80 | }; 81 | this.nodesByID.set("", [this.root]); 82 | } 83 | 84 | hasID(id: ID): boolean { 85 | const bySender = this.nodesByID.get(id.sender); 86 | if (bySender == null) return false 87 | return bySender[id.counter] != null 88 | } 89 | 90 | addNode( 91 | id: ID, 92 | value: T, 93 | parent: Node, 94 | side: "L" | "R", 95 | rightOriginID?: ID | null 96 | ): boolean { 97 | if (this.hasID(id)) return false 98 | 99 | const node: Node = { 100 | id, 101 | value, 102 | isDeleted: false, 103 | parent, 104 | side, 105 | leftChildren: [], 106 | rightChildren: [], 107 | size: 0, 108 | }; 109 | if (rightOriginID !== undefined) { 110 | node.rightOrigin = rightOriginID === null? null: this.getByID(rightOriginID); 111 | } 112 | 113 | // Add to nodesByID. 114 | let bySender = this.nodesByID.get(id.sender); 115 | if (bySender === undefined) { 116 | bySender = []; 117 | this.nodesByID.set(id.sender, bySender); 118 | } 119 | bySender.push(node); 120 | 121 | // Insert into parent's siblings. 122 | this.insertIntoSiblings(node); 123 | 124 | this.updateSize(node, 1); 125 | return true 126 | } 127 | 128 | private insertIntoSiblings(node: Node) { 129 | // Insert node among its same-side siblings. 130 | const parent = node.parent!; 131 | if (node.side === "R") { 132 | const rightSibs = parent.rightChildren; 133 | // Siblings are in order: *reverse* order of their rightOrigins, 134 | // breaking ties using the lexicographic order on id.sender. 135 | let i = 0; 136 | for (; i < rightSibs.length; i++) { 137 | if ( 138 | !( 139 | this.isLess(node.rightOrigin!, rightSibs[i].rightOrigin!) || 140 | (node.rightOrigin === rightSibs[i].rightOrigin && 141 | node.id.sender > rightSibs[i].id.sender) 142 | ) 143 | ) 144 | break; 145 | } 146 | rightSibs.splice(i, 0, node); 147 | } else { 148 | const leftSibs = parent.leftChildren; 149 | // Siblings are in lexicographic order by id.sender. 150 | let i = 0; 151 | for (; i < leftSibs.length; i++) { 152 | if (!(node.id.sender > leftSibs[i].id.sender)) break; 153 | } 154 | leftSibs.splice(i, 0, node); 155 | } 156 | } 157 | 158 | /** 159 | * Returns whether a < b in the existing list order. 160 | * 161 | * null values are treated as the end of the list. 162 | */ 163 | private isLess(a: Node | null, b: Node | null): boolean { 164 | if (a === b) return false; 165 | if (a === null) return false; 166 | if (b === null) return true; 167 | 168 | // Walk one node up the tree until they are both the same depth. 169 | const aDepth = this.depth(a); 170 | const bDepth = this.depth(b); 171 | let aAnc = a; 172 | let bAnc = b; 173 | if (aDepth > bDepth) { 174 | let lastSide: "L" | "R"; 175 | for (let i = aDepth; i > bDepth; i--) { 176 | lastSide = aAnc.side; 177 | aAnc = aAnc.parent!; 178 | } 179 | if (aAnc === b) { 180 | // a is a descendant of b on lastSide. 181 | return lastSide! === "L"; 182 | } 183 | } 184 | if (bDepth > aDepth) { 185 | let lastSide: "L" | "R"; 186 | for (let i = bDepth; i > aDepth; i--) { 187 | lastSide = bAnc.side; 188 | bAnc = bAnc.parent!; 189 | } 190 | if (bAnc === a) { 191 | // b is a descendant of a on lastSide. 192 | return lastSide! === "R"; 193 | } 194 | } 195 | 196 | // Walk both nodes up the tree until we find a common ancestor. 197 | while (aAnc.parent !== bAnc.parent) { 198 | // If we reach the root, the loop will terminate, so both parents 199 | // are non-null here. 200 | aAnc = aAnc.parent!; 201 | bAnc = bAnc.parent!; 202 | } 203 | // Now aAnc and bAnc are distinct siblings. See how they are sorted 204 | // in their parent's child arrays. 205 | if (aAnc.side !== bAnc.side) return aAnc.side === "L"; 206 | else { 207 | const siblings = 208 | aAnc.side === "L" 209 | ? aAnc.parent!.leftChildren 210 | : aAnc.parent!.rightChildren; 211 | return siblings.indexOf(aAnc) < siblings.indexOf(bAnc); 212 | } 213 | } 214 | 215 | /** 216 | * Returns node's depth in the tree. Root = depth 0. 217 | */ 218 | private depth(node: Node): number { 219 | let depth = 0; 220 | for ( 221 | let current = node; 222 | current.parent !== null; 223 | current = current.parent 224 | ) { 225 | depth++; 226 | } 227 | return depth; 228 | } 229 | 230 | /** 231 | * Adds delta to the sizes of node and all of its ancestors. 232 | */ 233 | updateSize(node: Node, delta: number) { 234 | for (let anc: Node | null = node; anc !== null; anc = anc.parent) { 235 | anc.size += delta; 236 | } 237 | } 238 | 239 | getByID(id: ID): Node { 240 | const bySender = this.nodesByID.get(id.sender); 241 | if (bySender !== undefined) { 242 | const node = bySender[id.counter]; 243 | if (node !== undefined) return node; 244 | } 245 | throw new Error("Unknown ID: " + JSON.stringify(id)); 246 | } 247 | 248 | /** 249 | * Returns the node at the given index within node's subtree. 250 | */ 251 | getByIndex(node: Node, index: number): Node { 252 | if (index < 0 || index >= node.size) { 253 | throw new Error( 254 | "Index out of range: " + index + " (size: " + node.size + ")" 255 | ); 256 | } 257 | 258 | // A recursive approach would be simpler, but overflows the stack at modest 259 | // depths (~4000). So we do an iterative approach instead. 260 | let remaining = index; 261 | recurse: while (true) { 262 | for (const child of node.leftChildren) { 263 | if (remaining < child.size) { 264 | node = child; 265 | continue recurse; 266 | } 267 | remaining -= child.size; 268 | } 269 | if (!node.isDeleted) { 270 | if (remaining === 0) return node; 271 | remaining--; 272 | } 273 | for (const child of node.rightChildren) { 274 | if (remaining < child.size) { 275 | node = child; 276 | continue recurse; 277 | } 278 | remaining -= child.size; 279 | } 280 | throw new Error("Index in range but not found"); 281 | } 282 | } 283 | 284 | /** 285 | * Returns the leftmost left-only descendant of node, i.e., the 286 | * first left child of the first left child ... of node. 287 | */ 288 | leftmostDescendant(node: Node): Node { 289 | let desc = node; 290 | for (; desc.leftChildren.length !== 0; desc = desc.leftChildren[0]) {} 291 | return desc; 292 | } 293 | 294 | /** 295 | * Returns the next node in the traversal that is *not* a 296 | * descendant of node, or null if that is the end. Includes tombstones. 297 | */ 298 | nextNonDescendant(node: Node): Node | null { 299 | let current = node; 300 | while (current.parent !== null) { 301 | const siblings = 302 | current.side === "L" 303 | ? current.parent.leftChildren 304 | : current.parent.rightChildren; 305 | const index = siblings.indexOf(current); 306 | if (index < siblings.length - 1) { 307 | // The next sibling's subtree immediately follows current's subtree. 308 | // Find its leftmost element. 309 | const nextSibling = siblings[index + 1]; 310 | return this.leftmostDescendant(nextSibling); 311 | } else if (current.side === "L") { 312 | // The parent immediately follows current's subtree. 313 | return current.parent; 314 | } 315 | current = current.parent; 316 | } 317 | // We've reached the root without finding any further-right subtrees. 318 | return null; 319 | } 320 | 321 | *traverse(node: Node): IterableIterator { 322 | // A recursive approach (like in the paper) would be simpler, 323 | // but overflows the stack at modest 324 | // depths (~4000). So we do an iterative approach instead. 325 | 326 | let current = node; 327 | // Stack records the next child to visit for that node. 328 | // We don't need to store node because we can infer it from the 329 | // current node's parent etc. 330 | const stack: { side: "L" | "R"; childIndex: number }[] = [ 331 | { side: "L", childIndex: 0 }, 332 | ]; 333 | while (true) { 334 | const top = stack[stack.length - 1]; 335 | const children = 336 | top.side === "L" ? current.leftChildren : current.rightChildren; 337 | if (top.childIndex === children.length) { 338 | // We are done with the children on top.side. 339 | if (top.side === "L") { 340 | // Visit us, then move to right children. 341 | if (!current.isDeleted) yield current.value!; 342 | top.side = "R"; 343 | top.childIndex = 0; 344 | } else { 345 | // Go to the parent. 346 | if (current.parent === null) return; 347 | current = current.parent; 348 | stack.pop(); 349 | } 350 | } else { 351 | const child = children[top.childIndex]; 352 | // Save for later that we need to visit the next child. 353 | top.childIndex++; 354 | if (child.size > 0) { 355 | // Traverse child. 356 | current = child; 357 | stack.push({ side: "L", childIndex: 0 }); 358 | } 359 | } 360 | } 361 | } 362 | 363 | save(): Uint8Array { 364 | // Convert nodesByID into JSON format, also converting each Node into a NodeSave. 365 | const save: { [sender: string]: NodeSave[] } = {}; 366 | for (const [sender, bySender] of this.nodesByID) { 367 | save[sender] = bySender.map((node) => { 368 | const nodeSave: NodeSave = { 369 | value: node.value, 370 | isDeleted: node.isDeleted, 371 | parent: node.parent === null ? null : node.parent.id, 372 | side: node.side, 373 | size: node.size, 374 | }; 375 | if (node.rightOrigin !== undefined) { 376 | nodeSave.rightOrigin = 377 | node.rightOrigin === null ? null : node.rightOrigin.id; 378 | } 379 | return nodeSave; 380 | }); 381 | } 382 | return new Uint8Array(Buffer.from(JSON.stringify(save))); 383 | } 384 | 385 | load(saveData: Uint8Array) { 386 | const save: { [sender: string]: NodeSave[] } = JSON.parse( 387 | Buffer.from(saveData).toString() 388 | ); 389 | // First create all nodes without pointers to other nodes (parent, children, 390 | // rightOrigin). 391 | for (const [sender, bySenderSave] of Object.entries(save)) { 392 | if (sender === "") { 393 | // Root node. Just set its size. 394 | this.root.size = bySenderSave[0].size; 395 | continue; 396 | } 397 | this.nodesByID.set( 398 | sender, 399 | bySenderSave.map((nodeSave, counter) => ({ 400 | id: { sender, counter }, 401 | parent: null, 402 | value: nodeSave.value, 403 | isDeleted: nodeSave.isDeleted, 404 | side: nodeSave.side, 405 | size: nodeSave.size, 406 | leftChildren: [], 407 | rightChildren: [], 408 | })) 409 | ); 410 | } 411 | // Next, fill in the parent and rightOrigin pointers. 412 | for (const [sender, bySender] of this.nodesByID) { 413 | if (sender === "") continue; 414 | const bySenderSave = save[sender]!; 415 | for (let i = 0; i < bySender.length; i++) { 416 | const node = bySender[i]; 417 | const nodeSave = bySenderSave[i]; 418 | if (nodeSave.parent !== null) { 419 | node.parent = this.getByID(nodeSave.parent); 420 | } 421 | if (nodeSave.rightOrigin !== undefined) { 422 | node.rightOrigin = 423 | nodeSave.rightOrigin === null 424 | ? null 425 | : this.getByID(nodeSave.rightOrigin); 426 | } 427 | } 428 | } 429 | 430 | // Finally, call insertIntoSiblings on each node to fill in the children 431 | // arrays. 432 | // We must be careful to wait until after doing so for node.rightOrigin 433 | // and its ancestors, since insertIntoSiblings references the existing list order 434 | // on node.rightOrigin. 435 | 436 | // Nodes go from "pending" -> "ready" (rightOrigin valid) -> 437 | // "valid" (insertIntoSiblings called). 438 | // readyNodes is a stack; pendingNodes maps from a node to its dependencies. 439 | const readyNodes: Node[] = []; 440 | const pendingNodes = new Map, Node[]>(); 441 | for (const [sender, bySender] of this.nodesByID) { 442 | if (sender === "") continue; 443 | for (let i = 0; i < bySender.length; i++) { 444 | const node = bySender[i]; 445 | if (node.rightOrigin === undefined || node.rightOrigin === null) { 446 | // rightOrigin not used or is the root; node is ready. 447 | readyNodes.push(node); 448 | } else { 449 | let pendingArr = pendingNodes.get(node.rightOrigin); 450 | if (pendingArr === undefined) { 451 | pendingArr = []; 452 | pendingNodes.set(node.rightOrigin, pendingArr); 453 | } 454 | pendingArr.push(node); 455 | } 456 | } 457 | } 458 | 459 | while (readyNodes.length !== 0) { 460 | const node = readyNodes.pop()!; 461 | this.insertIntoSiblings(node); 462 | // node's dependencies are now ready. 463 | const deps = pendingNodes.get(node); 464 | if (deps !== undefined) readyNodes.push(...deps); 465 | pendingNodes.delete(node); 466 | } 467 | if (pendingNodes.size !== 0) { 468 | throw new Error("Internal error: failed to validate all nodes"); 469 | } 470 | } 471 | } 472 | 473 | export class FugueMaxSimple { 474 | tree: Tree; 475 | 476 | counter = 0; 477 | replicaID: string 478 | 479 | // All the elements we've seen, in causal order. This is inefficient, but it makes saving & loading 480 | // much simpler to implement. 481 | msgsInCausalOrder: Message[] = [] 482 | 483 | 484 | constructor(replicaID: string) { 485 | this.replicaID = replicaID 486 | this.tree = new Tree(); 487 | } 488 | 489 | insert(index: number, ...values: T[]): void { 490 | for (let i = 0; i < values.length; i++) { 491 | this.insertOne(index + i, values[i]); 492 | } 493 | } 494 | 495 | private insertOne(index: number, value: T) { 496 | // insert generator. 497 | const id = { sender: this.replicaID, counter: this.counter }; 498 | this.counter++; 499 | const leftOrigin = 500 | index === 0 501 | ? this.tree.root 502 | : this.tree.getByIndex(this.tree.root, index - 1); 503 | 504 | let msg: InsertMessage; 505 | if (leftOrigin.rightChildren.length === 0) { 506 | // leftOrigin has no right children, so the new node becomes 507 | // a right child of leftOrigin. 508 | msg = { type: "insert", id, value, parent: leftOrigin.id, side: "R" }; 509 | // rightOrigin is the node after leftOrigin in the tree traversal, 510 | // given that leftOrigin has no right descendants. 511 | const rightOrigin = this.tree.nextNonDescendant(leftOrigin); 512 | msg.rightOrigin = rightOrigin === null ? null : rightOrigin.id; 513 | } else { 514 | // Otherwise, the new node is added as a left child of rightOrigin, which 515 | // is the next node after leftOrigin *including tombstones*. 516 | // In this case, rightOrigin is the leftmost descendant of leftOrigin's 517 | // first right child. 518 | const rightOrigin = this.tree.leftmostDescendant( 519 | leftOrigin.rightChildren[0] 520 | ); 521 | msg = { type: "insert", id, value, parent: rightOrigin.id, side: "L" }; 522 | } 523 | 524 | // Message is delivered to receivePrimitive ("on delivering" function). 525 | // super.sendPrimitive(JSON.stringify(msg)); 526 | this.receivePrimitive(msg) 527 | } 528 | 529 | delete(startIndex: number, count = 1): void { 530 | for (let i = 0; i < count; i++) this.deleteOne(startIndex); 531 | } 532 | 533 | private deleteOne(index: number): void { 534 | // delete generator. 535 | const node = this.tree.getByIndex(this.tree.root, index); 536 | const msg: DeleteMessage = { type: "delete", id: node.id }; 537 | // Message is delivered to receivePrimitive ("on delivering" function). 538 | this.receivePrimitive(msg); 539 | } 540 | 541 | protected receivePrimitive(msg: Message) { 542 | let inserted: boolean 543 | switch (msg.type) { 544 | case "insert": 545 | // insert effector 546 | inserted = this.tree.addNode( 547 | msg.id, 548 | msg.value, 549 | this.tree.getByID(msg.parent), 550 | msg.side, 551 | msg.rightOrigin 552 | ); 553 | // In a production implementation, we would emit an Insert event here. 554 | break; 555 | case "delete": 556 | // delete effector 557 | inserted = false 558 | if (this.tree.hasID(msg.id)) { 559 | const node = this.tree.getByID(msg.id); 560 | if (!node.isDeleted) { 561 | node.value = null; 562 | node.isDeleted = true; 563 | this.tree.updateSize(node, -1); 564 | inserted = true 565 | // In a production implementation, we would emit a Delete event here. 566 | } 567 | } 568 | break; 569 | default: 570 | throw new Error("Bad message: " + msg); 571 | } 572 | 573 | // We fall through if the message hasn't been processed yet. 574 | if (inserted) this.msgsInCausalOrder.push(msg) 575 | } 576 | 577 | get(index: number): T { 578 | if (index < 0 || index >= this.length) { 579 | throw new Error("index out of bounds: " + index); 580 | } 581 | const node = this.tree.getByIndex(this.tree.root, index); 582 | return node.value!; 583 | } 584 | 585 | values(): IterableIterator { 586 | return this.tree.traverse(this.tree.root); 587 | } 588 | 589 | toArray(): T[] { 590 | return [...this.values()] 591 | } 592 | 593 | get length(): number { 594 | return this.tree.root.size; 595 | } 596 | 597 | save(): Message[] { 598 | return this.msgsInCausalOrder 599 | } 600 | 601 | load(save: Message[]): void { 602 | for (const msg of save) { 603 | this.receivePrimitive(msg) 604 | } 605 | } 606 | 607 | mergeFrom(other: FugueMaxSimple) { 608 | const save = other.save() 609 | this.load(save) 610 | } 611 | 612 | 613 | // savePrimitive(): Uint8Array { 614 | // // No need to save this.counter because we will have a different 615 | // // replicaID next time. 616 | // let bytes = this.tree.save(); 617 | // if (GZIP) { 618 | // bytes = pako.gzip(bytes); 619 | // } 620 | // return bytes; 621 | // } 622 | 623 | // loadPrimitive(savedState: Uint8Array | null): void { 624 | // if (savedState === null) return; 625 | 626 | // if (GZIP) { 627 | // savedState = pako.ungzip(savedState); 628 | // } 629 | // this.tree.load(savedState); 630 | // } 631 | } 632 | -------------------------------------------------------------------------------- /list-fugue-simple.ts: -------------------------------------------------------------------------------- 1 | // This is a port from the fugue repository Oct 2023 2 | // Commit 98c0c7a965276fb9a22237562f642a6ce8d8e03f 3 | 4 | interface ID { 5 | sender: string; 6 | counter: number; 7 | } 8 | 9 | // const idEq = (a: ID | null | undefined, b: ID | null): boolean => ( 10 | // a == b || ( 11 | // a != null && b != null 12 | // && a.sender === b.sender && a.counter === b.counter 13 | // ) 14 | // ) 15 | 16 | interface Element { 17 | /** For the start & end, this is ("", 0) & ("", 1). */ 18 | id: ID; 19 | value: T | null; 20 | isDeleted: boolean; 21 | /** null for start and end. */ 22 | leftOrigin: Element | null; 23 | /** null for start and end. */ 24 | rightOrigin: Element | null; 25 | /** Linked list structure: the element currently to our left. */ 26 | left: Element | null; 27 | /** Linked list structure: the element currently to our right. */ 28 | right: Element | null; 29 | } 30 | 31 | interface InsertMessage { 32 | type: "insert"; 33 | id: ID; 34 | value: T; 35 | leftOrigin: ID; 36 | rightOrigin: ID; 37 | } 38 | 39 | interface DeleteMessage { 40 | type: "delete"; 41 | id: ID; 42 | } 43 | 44 | type Message = InsertMessage | DeleteMessage 45 | 46 | export class ListFugueSimple { 47 | readonly start: Element; 48 | readonly end: Element; 49 | 50 | counter = 0; 51 | 52 | /** 53 | * Used in getByID. 54 | * 55 | * Map from ID.sender, to an array that maps ID.counter, to element with that ID. 56 | */ 57 | readonly elementsByID = new Map[]>(); 58 | /** Cached length. */ 59 | _length = 0; 60 | 61 | // All the elements we've seen, in causal order. This makes saving & loading 62 | // much more simple. 63 | msgsInCausalOrder: Message[] = [] 64 | 65 | replicaId: string 66 | 67 | constructor(replicaId: string) { 68 | this.replicaId = replicaId 69 | this.start = { 70 | id: { sender: "", counter: 0 }, 71 | value: null, 72 | isDeleted: true, 73 | leftOrigin: null, 74 | rightOrigin: null, 75 | left: null, 76 | right: null, 77 | }; 78 | this.end = { 79 | id: { sender: "", counter: 1 }, 80 | value: null, 81 | isDeleted: true, 82 | leftOrigin: null, 83 | rightOrigin: null, 84 | left: this.start, 85 | right: null, 86 | }; 87 | this.start.right = this.end; 88 | this.elementsByID.set("", [this.start, this.end]); 89 | } 90 | 91 | insert(index: number, ...values: T[]): T | undefined { 92 | for (let i = 0; i < values.length; i++) { 93 | this.insertOne(index + i, values[i]); 94 | } 95 | 96 | // The return value is just an interface requirement; not relevant here. 97 | return undefined; 98 | } 99 | 100 | private insertOne(index: number, value: T) { 101 | // insert generator. 102 | const id = { sender: this.replicaId, counter: this.counter }; 103 | this.counter++; 104 | const leftOrigin = index === 0 ? this.start : this.getByIndex(index - 1); 105 | const rightOrigin = leftOrigin.right!; 106 | const msg: InsertMessage = { 107 | type: "insert", 108 | id, 109 | value, 110 | leftOrigin: leftOrigin.id, 111 | rightOrigin: rightOrigin.id, 112 | }; 113 | // Message is delivered to receivePrimitive (the effector). 114 | // super.sendPrimitive(JSON.stringify(msg)); 115 | this.receivePrimitive(msg) 116 | } 117 | 118 | delete(startIndex: number, count = 1): void { 119 | for (let i = 0; i < count; i++) this.deleteOne(startIndex); 120 | } 121 | 122 | private deleteOne(index: number): void { 123 | // delete generator. 124 | const elt = this.getByIndex(index); 125 | const msg: DeleteMessage = { type: "delete", id: elt.id }; 126 | // Message is delivered to receivePrimitive (the effector). 127 | // super.sendPrimitive(JSON.stringify(msg)); 128 | this.receivePrimitive(msg) 129 | } 130 | 131 | protected receivePrimitive(msg: Message): void { 132 | // const msg: InsertMessage | DeleteMessage = JSON.parse(message); 133 | switch (msg.type) { 134 | case "insert": { 135 | // insert effector 136 | if (this.hasID(msg.id)) return // We already have this item. 137 | 138 | const leftOrigin = this.getByID(msg.leftOrigin); 139 | const rightOrigin = this.getByID(msg.rightOrigin); 140 | const left = this.computeLeft(msg.id, leftOrigin, rightOrigin); 141 | 142 | // Insert a new elt into the linked last after left. 143 | const right = left.right!; 144 | const elt: Element = { 145 | id: msg.id, 146 | value: msg.value, 147 | isDeleted: false, 148 | leftOrigin, 149 | rightOrigin, 150 | left, 151 | right, 152 | }; 153 | left.right = elt; 154 | right.left = elt; 155 | 156 | // Add elt to elementsByID. 157 | let bySender = this.elementsByID.get(msg.id.sender); 158 | if (bySender === undefined) { 159 | bySender = []; 160 | this.elementsByID.set(msg.id.sender, bySender); 161 | } 162 | bySender[msg.id.counter] = elt; 163 | 164 | this._length++; 165 | 166 | // In a production implementation, we would emit an Insert event here. 167 | break; 168 | } 169 | case "delete": { 170 | // delete effector 171 | const elt = this.getByID(msg.id); 172 | if (elt.isDeleted) return 173 | 174 | elt.value = null; 175 | elt.isDeleted = true; 176 | this._length--; 177 | // In a production implementation, we would emit a Delete event here. 178 | break; 179 | } 180 | default: 181 | throw new Error("Bad message: " + msg); 182 | } 183 | 184 | // We fall through if the message hasn't been processed yet. 185 | this.msgsInCausalOrder.push(msg) 186 | } 187 | 188 | private computeLeft( 189 | id: ID, 190 | leftOrigin: Element, 191 | rightOrigin: Element 192 | ): Element { 193 | const rightParent = this.rightParent(leftOrigin, rightOrigin); 194 | 195 | let left = leftOrigin; 196 | let scanning = false; 197 | 198 | // o ranges from leftOrigin to (non-adjusted) rightOrigin, *exclusive*. 199 | // Note that o will never be start or end (within the loop), 200 | // so its origins are non-null. 201 | for (let o = leftOrigin.right!; o !== rightOrigin; o = o.right!) { 202 | if (this.lessThan(o.leftOrigin!, leftOrigin)) break; 203 | else if (o.leftOrigin === leftOrigin) { 204 | const oRightParent = this.rightParent(o.leftOrigin, o.rightOrigin!); 205 | 206 | if (this.lessThan(oRightParent, rightParent)) { 207 | scanning = true; 208 | } else if (oRightParent === rightParent) { 209 | // o and the new elt are double siblings. 210 | if (o.id.sender > id.sender) break; 211 | else scanning = false; 212 | } else { 213 | // oRightParent > rightParent 214 | scanning = false; 215 | } 216 | } 217 | 218 | if (!scanning) left = o; 219 | } 220 | 221 | return left; 222 | } 223 | 224 | private rightParent(leftOrigin: Element, rightOrigin: Element): Element { 225 | if (rightOrigin === this.end || rightOrigin.leftOrigin !== leftOrigin) return this.end; 226 | else return rightOrigin; 227 | } 228 | 229 | /** 230 | * Returns whether a < b in the linked list order. 231 | */ 232 | private lessThan(a: Element, b: Element): boolean { 233 | if (a === b) return false; 234 | // Loop forwards from each of a & b in parallel until one finds the other. 235 | // In principle this takes O(n) time, but in practice a & b should usually 236 | // be close together. 237 | let afterA = a; 238 | let afterB = b; 239 | while (true) { 240 | if (afterA === b || afterB.right === null) return true; 241 | if (afterB === a || afterA.right === null) return false; 242 | afterA = afterA.right; 243 | afterB = afterB.right; 244 | } 245 | } 246 | 247 | private hasID(id: ID): boolean { 248 | const bySender = this.elementsByID.get(id.sender); 249 | if (bySender == null) return false 250 | return bySender[id.counter] != null 251 | } 252 | 253 | private getByID(id: ID): Element { 254 | const bySender = this.elementsByID.get(id.sender); 255 | if (bySender !== undefined) { 256 | const node = bySender[id.counter]; 257 | if (node !== undefined) return node; 258 | } 259 | throw new Error("Unknown ID: " + JSON.stringify(id)); 260 | } 261 | 262 | private getByIndex(index: number): Element { 263 | if (index < 0 || index >= this.length) { 264 | throw new Error( 265 | "Index out of range: " + index + " (length: " + this.length + ")" 266 | ); 267 | } 268 | 269 | // For now, do a slow linear search, but from the end b/c that's more common. 270 | // An easy common-case optimization is to cache index "hints" like in Yjs. 271 | // A doable aysmptotic optimization is to build a balanced tree structure 272 | // on top of the non-deleted list elements and use that to convert between 273 | // indices & elements in O(log(n)) time. 274 | let remaining = this.length - 1 - index; 275 | for (let elt = this.end.left!; elt !== this.start; elt = elt.left!) { 276 | if (!elt.isDeleted) { 277 | if (remaining === 0) return elt; 278 | remaining--; 279 | } 280 | } 281 | throw new Error("Index in range but not found"); 282 | } 283 | 284 | get(index: number): T { 285 | return this.getByIndex(index).value!; 286 | } 287 | 288 | *values(): IterableIterator { 289 | // Walk the linked list. 290 | for ( 291 | let elt: Element | null = this.start; 292 | (elt = elt.right); 293 | elt !== null 294 | ) { 295 | if (!elt.isDeleted) yield elt.value!; 296 | } 297 | } 298 | 299 | toArray(): T[] { 300 | return [...this.values()] 301 | } 302 | 303 | get length(): number { 304 | return this._length; 305 | } 306 | 307 | save(): Message[] { 308 | // Save the linked list (less start & end) in causal order for easy merging. 309 | return this.msgsInCausalOrder 310 | } 311 | 312 | // load(bytes: Uint8Array): void { 313 | load(save: Message[]): void { 314 | for (const msg of save) { 315 | this.receivePrimitive(msg) 316 | } 317 | } 318 | 319 | mergeFrom(other: ListFugueSimple) { 320 | const save = other.save() 321 | this.load(save) 322 | } 323 | 324 | debugPrint() { 325 | // Walk the linked list. 326 | 327 | const depth: Record = {} 328 | // const kForId = (id: Id, c: T | null) => `${id[0]} ${id[1]} ${id[2] ?? c != null}` 329 | const eltId = (elt: Element) => elt.id.sender === '' ? 'ROOT' : `${elt.id.sender},${elt.id.counter}` 330 | depth[eltId(this.start)] = 0 331 | 332 | for ( 333 | let elt: Element | null = this.start; 334 | (elt = elt.right); 335 | elt !== null 336 | ) { 337 | // The only items with a null left / right are the roots. 338 | if (elt.leftOrigin == null || elt.rightOrigin == null) continue 339 | 340 | const isLeftChild = true 341 | // const isLeftChild = this.rightParent(elt.leftOrigin, elt.rightOrigin) === this.end 342 | const parent = isLeftChild ? elt.leftOrigin : elt.rightOrigin 343 | const d = (parent === this.start || parent === this.end) 344 | ? 0 345 | : depth[eltId(parent)] + 1 346 | 347 | depth[eltId(elt)] = d 348 | 349 | // let content = `${isLeftChild ? '/' : '\\'}${elt.value == null 350 | let content = `${elt.value == null 351 | ? '.' 352 | // : elt.isDeleted ? chalk.strikethrough(elt.value) : chalk.yellow(elt.value) 353 | : elt.value 354 | } at [${eltId(elt)}] (left [${eltId(elt.leftOrigin)}])` 355 | content += ` right [${eltId(elt.rightOrigin)}]` 356 | content += ` rightParent ${eltId(this.rightParent(elt.leftOrigin, elt.rightOrigin))}` 357 | // console.log(`${'| '.repeat(d)}${elt.value == null ? chalk.strikethrough(content) : content}`) 358 | console.log(`${'| '.repeat(d)}${elt.value == null ? content : content}`) 359 | } 360 | 361 | 362 | 363 | } 364 | } 365 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "reference-crdts", 3 | "version": "1.0.0", 4 | "description": "Simple list implementations of list CRDTs'", 5 | "main": "dist/crdts.js", 6 | "type": "module", 7 | "repository": "git@github.com:josephg/reference-crdts.git", 8 | "author": "Seph Gentle ", 9 | "license": "MIT", 10 | "scripts": { 11 | "prepare": "tsc", 12 | "test": "npx ts-node test.ts" 13 | }, 14 | "dependencies": { 15 | "@automerge/automerge": "^2.1.6", 16 | "@types/chalk": "^2.2.0", 17 | "@types/node": "^18.11.18", 18 | "chalk": "^4.1.1", 19 | "ot-text-unicode": "^4.0.0" 20 | }, 21 | "devDependencies": { 22 | "@types/seed-random": "^2.2.0", 23 | "seed-random": "^2.2.0", 24 | "ts-node": "^10.9.1", 25 | "typescript": "^4.9.4", 26 | "yjs": "^13.6.8" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /reference_test.ts: -------------------------------------------------------------------------------- 1 | // This file contains a fuzzer which checks these implementations vs the 2 | // real implementations in yjs and automerge. 3 | 4 | import * as Y from 'yjs' 5 | import * as automerge from '@automerge/automerge' 6 | import assert from 'assert/strict' 7 | import seed from 'seed-random' 8 | import consoleLib from 'console' 9 | import * as crdts from './crdts.js' 10 | import * as sync9 from './sync9.js' 11 | 12 | // For fugue. 13 | import { ListFugueSimple } from './list-fugue-simple.js' 14 | import { FugueMaxSimple } from './fugue-max-simple.js' 15 | 16 | type DocType = {arr: T[]} 17 | 18 | const amInit = automerge.from>({arr: []}) 19 | 20 | export enum Mode { 21 | Automerge, 22 | Yjs, 23 | YjsMod, 24 | Sync9, 25 | Fugue, 26 | FugueMax, 27 | } 28 | 29 | let log = '' 30 | 31 | type FugueMessage = { src: string, msg: Uint8Array } 32 | 33 | export class DocPair { 34 | id: number 35 | idStr: string 36 | 37 | algorithm: crdts.Algorithm 38 | sephdoc: crdts.Doc 39 | 40 | am?: automerge.Doc> 41 | ydoc?: Y.Doc 42 | sync9?: any 43 | // fugue?: { 44 | // // app: collabs.CRDTApp, 45 | // list: ListFugueSimple, 46 | // messages: FugueMessage[], 47 | // } 48 | fugue?: ListFugueSimple 49 | fugueMax?: FugueMaxSimple 50 | 51 | constructor(id: number, localMode: Mode, checkMode: Mode | null = localMode) { 52 | this.id = id 53 | this.idStr = 'abc'[id] 54 | 55 | this.algorithm = { 56 | [Mode.Automerge]: crdts.automerge, 57 | [Mode.Yjs]: crdts.yjs, 58 | [Mode.YjsMod]: crdts.yjsMod, 59 | [Mode.Sync9]: crdts.sync9, 60 | [Mode.Fugue]: crdts.fugue, 61 | [Mode.FugueMax]: crdts.fugueMax, 62 | }[localMode] 63 | if (this.algorithm == null) throw Error('Unknown algorithm: ' + localMode) 64 | 65 | this.sephdoc = crdts.newDoc() 66 | 67 | // this.am = automerge.from({arr: []}, idStr) 68 | // this.am = automerge.from(amInit, idStr) 69 | switch (checkMode) { 70 | case Mode.Automerge: { 71 | // Automerge client ID strings must be valid hex strings, and the 72 | // concurrent item ordering is reversed from my algorithms here. 73 | // (So I'm inversing their order here). 74 | const amId = Buffer.from([255 - id]).toString('hex') 75 | this.am = automerge.merge(automerge.init(amId), amInit) 76 | break 77 | } 78 | case Mode.Yjs: { 79 | this.ydoc = new Y.Doc() 80 | this.ydoc.clientID = id 81 | break 82 | } 83 | case Mode.Sync9: { 84 | this.sync9 = sync9.make(this.idStr) 85 | break 86 | } 87 | case Mode.Fugue: { 88 | this.fugue = new ListFugueSimple(this.idStr) 89 | break 90 | } 91 | case Mode.FugueMax: { 92 | this.fugueMax = new FugueMaxSimple(this.idStr) 93 | } 94 | } 95 | } 96 | 97 | // ins(pos: number, content: number[]) { 98 | 99 | insert(pos: number, ...content: T[]) { 100 | for (let i = 0; i < content.length; i++) { 101 | this.algorithm.localInsert(this.sephdoc, this.idStr, pos + i, content[i]) 102 | } 103 | 104 | this.ydoc?.getArray().insert(pos, content) 105 | 106 | if (this.am != null) { 107 | this.am = automerge.change(this.am, d => { 108 | d.arr.splice(pos, 0, ...content) 109 | }) 110 | } 111 | 112 | if (this.sync9 != null) { 113 | sync9.insertMany(this.sync9, pos, ...content) 114 | } 115 | 116 | if (this.fugue != null) { 117 | this.fugue.insert(pos, ...content) 118 | } 119 | 120 | if (this.fugueMax != null) { 121 | this.fugueMax.insert(pos, ...content) 122 | } 123 | } 124 | 125 | // insert(pos: number, content: T) { 126 | // // assert(content.length === 1) 127 | // this.algorithm.localInsert(this.sephdoc, this.idStr, pos, content) 128 | // // console.log('->ins', pos, content, this.sephdoc) 129 | 130 | // this.ydoc?.getArray().insert(pos, [content]) 131 | 132 | // if (this.am != null) { 133 | // this.am = automerge.change(this.am, d => { 134 | // d.arr.splice(pos, 0, content) 135 | // }) 136 | // } 137 | 138 | // if (this.sync9 != null) { 139 | // sync9.insert(this.sync9, pos, content) 140 | // } 141 | 142 | // if (this.fugue != null) { 143 | // this.fugue.insert(pos, content) 144 | // } 145 | 146 | // if (this.fugueMax != null) { 147 | // this.fugueMax.insert(pos, content) 148 | // } 149 | // } 150 | 151 | del(pos: number) { 152 | // I haven't added delete support to the merge() function in crdts. 153 | throw Error('NYI') 154 | 155 | // crdts.localDelete(this.sephdoc, this.idStr, pos) 156 | 157 | // this.ydoc?.getArray().delete(pos, 1) 158 | 159 | // if (this.am != null) { 160 | // this.am = automerge.change(this.am, d => { 161 | // d.arr.splice(pos, 1) 162 | // }) 163 | // } 164 | 165 | // if (this.sync9) throw Error('nyi') 166 | } 167 | 168 | mergeFrom(other: DocPair) { 169 | // console.log('merging', other.content, 'into', this.content) 170 | 171 | crdts.mergeInto(this.algorithm, this.sephdoc, other.sephdoc) 172 | 173 | if (this.am != null) { 174 | this.am = automerge.merge(this.am, other.am!) 175 | // console.log('am', this.am.arr) 176 | // console.log('hist', automerge.getHistory(this.am).map(e => e.change)) 177 | // console.log('am', other.am, this.am) 178 | } 179 | 180 | if (this.ydoc != null) { 181 | const sv = Y.encodeStateVector(this.ydoc) 182 | // console.log('sv', sv) 183 | const update = Y.encodeStateAsUpdateV2(other.ydoc!, sv) 184 | // Y.logUpdateV2(update) 185 | Y.applyUpdateV2(this.ydoc, update) 186 | 187 | // Y.logUpdateV2(Y.encodeStateAsUpdateV2(other.ydoc)) 188 | // Y.logUpdateV2(Y.encodeStateAsUpdateV2(this.ydoc)) 189 | // console.log('y', other.yarr.toArray(), this.ydoc?.getArray().toArray()) 190 | // console.log('am now', this.am) 191 | // console.log('yjs now', this.ydoc?.getArray().toArray()) 192 | } 193 | 194 | if (this.sync9 != null) { 195 | this.sync9 = sync9.merge(this.sync9, other.sync9) 196 | } 197 | 198 | if (this.fugue != null) { 199 | this.fugue.mergeFrom(other.fugue!) 200 | } 201 | if (this.fugueMax != null) { 202 | this.fugueMax.mergeFrom(other.fugueMax!) 203 | } 204 | 205 | this.check() 206 | // console.log('->', this.content) 207 | } 208 | 209 | merge(other: DocPair) { 210 | this.mergeFrom(other) 211 | other.mergeFrom(this) 212 | this.checkEq(other) 213 | } 214 | 215 | check() { 216 | const myContent = crdts.getArray(this.sephdoc) 217 | // console.log('am', this.sephdoc.content) 218 | if (this.am != null) { 219 | assert.deepStrictEqual(myContent, this.am.arr) 220 | } 221 | 222 | if (this.ydoc != null) { 223 | // assert.equal(this.am.arr.length, this.ydoc?.getArray().length) 224 | // assert.deepEqual(this.am.arr, this.ydoc?.getArray().toArray()) 225 | assert.deepStrictEqual(myContent, this.ydoc.getArray().toArray()) 226 | } 227 | 228 | if (this.sync9 != null) { 229 | try { 230 | // console.log(this.sephdoc) 231 | assert.deepStrictEqual(myContent, sync9.get_content(this.sync9)) 232 | } catch (e) { 233 | console.log('am', this.sephdoc.content) 234 | console.log(log) 235 | this.algorithm.printDoc(this.sephdoc) 236 | throw e 237 | } 238 | } 239 | 240 | if (this.fugue != null) { 241 | const fugueList = this.fugue.toArray() 242 | try { 243 | assert.deepStrictEqual(myContent, fugueList) 244 | } catch (e) { 245 | // console.log('fugue waypoints', this.fugue!.list.totalOrder.rootWaypoint) 246 | console.log('doc', this.idStr) 247 | console.log('local', this.sephdoc.content) 248 | 249 | console.log('fugue', fugueList) 250 | // for (let i = 0; i < fugueList.length; i++) { 251 | // console.log(` ${i}: ${fugueList[i]} pos: ${this.fugue.list.getPosition(i)}`) 252 | // } 253 | // this.fugue.list.list.printTreeWalk() 254 | // for (let i = 0; i < fugueList.length; i++) { 255 | // console.log(fugueList[i], this.fugue.list.totalOrder.decode(this.fugue.list.getPosition(i))) 256 | // } 257 | this.algorithm.printDoc(this.sephdoc) 258 | console.log('\n---fugue---') 259 | this.fugue.debugPrint() 260 | console.log(log) 261 | throw e 262 | } 263 | } 264 | 265 | if (this.fugueMax != null) { 266 | const fugueList = this.fugueMax.toArray() 267 | try { 268 | assert.deepStrictEqual(myContent, fugueList) 269 | } catch (e) { 270 | console.log('doc', this.idStr) 271 | console.log('local', this.sephdoc.content) 272 | 273 | console.log('fugue', fugueList) 274 | this.algorithm.printDoc(this.sephdoc) 275 | throw e 276 | } 277 | } 278 | 279 | // console.log('result', this.ydoc?.getArray().toArray()) 280 | } 281 | 282 | checkEq(other: DocPair) { 283 | // console.log('x', this.fugue?.list.slice()) 284 | // console.log('y', other.fugue?.list.slice()) 285 | this.check() 286 | other.check() 287 | assert.deepEqual(this.content, other.content) 288 | } 289 | 290 | get content(): T[] { 291 | return this.am != null 292 | ? this.am!.arr 293 | : crdts.getArray(this.sephdoc) 294 | } 295 | 296 | get length(): number { 297 | // return this.am.arr.length 298 | return this.sephdoc.content.reduce((sum, item) => item.isDeleted || item.content == null ? sum : sum + 1, 0) 299 | } 300 | } 301 | 302 | const randomizer = (localMode: Mode, checkMode: (Mode | null) = localMode) => { 303 | globalThis.console = new consoleLib.Console({ 304 | stdout: process.stdout, stderr: process.stderr, 305 | inspectOptions: {depth: null} 306 | }) 307 | 308 | const systemSeed = process.env['SEED'] ?? '' 309 | 310 | // for (let iter = 0; iter < 1000; iter++) { 311 | for (let iter = 0; ; iter++) { 312 | if (iter % 20 === 0) console.log('iter', iter) 313 | // console.log('iter', iter) 314 | // const random = seed(`bb ${iter}`) 315 | const random = seed(`${systemSeed} ${iter}`) 316 | const randInt = (n: number) => Math.floor(random() * n) 317 | const randBool = (weight: number = 0.5) => random() < weight 318 | 319 | const docs = new Array(3).fill(null).map((_, i) => new DocPair(i, localMode, checkMode)) 320 | // const docs = new Array(1).fill(null).map((_, i) => new DocPair(i, mode)) 321 | 322 | log = '' 323 | 324 | const randDoc = () => docs[randInt(docs.length)] 325 | 326 | let nextItem = 0 327 | // console.log(docs) 328 | for (let i = 0; i < 100; i++) { 329 | // console.log(i) 330 | // if (iter === 8 && i === 5) debugger 331 | // if (i % 100 === 0) console.log(i) 332 | 333 | // Generate some random operations 334 | for (let j = 0; j < 3; j++) { 335 | // for (let j = 0; j < 1; j++) { 336 | const doc = randDoc() 337 | 338 | // console.log('old content for doc', doc.id, doc.content) 339 | 340 | const len = doc.length 341 | // const insWeight = 1 342 | const insWeight = len < 100 ? 0.65 : 0.35 343 | // if (len === 0 || randBool(insWeight)) { 344 | if (true) { 345 | // Insert! 346 | // const content = new Array(randInt(3) + 1).fill(null).map(() => ++nextItem) 347 | 348 | const content = [] 349 | do { 350 | content.push(++nextItem) 351 | } while (randBool(0.5)) 352 | 353 | // const content = ++nextItem 354 | const pos = randInt(len + 1) 355 | // console.log('insert', pos, content) 356 | doc.insert(pos, ...content) 357 | 358 | log += `${doc.idStr}.insert(${pos}, ...${JSON.stringify(content)})\n` 359 | } else { 360 | // Delete something 361 | const pos = randInt(len) 362 | // const span = randInt(Math.min(len - pos, 3)) + 1 363 | // console.log('del', pos, span) 364 | doc.del(pos) 365 | } 366 | doc.check() 367 | // console.log('new content for doc', doc.id, doc.content) 368 | } 369 | 370 | // Pick a pair of documents and merge them 371 | const a = randDoc() 372 | const b = randDoc() 373 | if (a !== b) { 374 | // console.log('merging', a.id, b.id, a.content, b.content) 375 | log += `merge(${a.idStr}, ${b.idStr})\n` 376 | a.merge(b) 377 | } 378 | } 379 | } 380 | } 381 | 382 | function runRandomizer() { 383 | // randomizer(Mode.YjsMod, Mode.Sync9) 384 | // randomizer(Mode.Automerge) 385 | // randomizer(Mode.Fugue, Mode.Sync9) 386 | randomizer(Mode.Sync9) 387 | // randomizer(Mode.Sync9, Mode.Fugue) 388 | // randomizer(Mode.YjsMod, Mode.Fugue) 389 | // randomizer(Mode.YjsMod, Mode.FugueMax) 390 | // randomizer(Mode.YjsMod, null) 391 | // randomizer(Mode.YjsMod, Mode.Fugue) 392 | // console.log('iters', crdts.iters) 393 | 394 | // const docs = [new DocPair(0, Mode.Fugue), new DocPair(1, Mode.Fugue), new DocPair(2, Mode.Fugue)] 395 | // const [a, b, c] = docs 396 | 397 | // const merge = (a: DocPair, b: DocPair) => { a.merge(b) } 398 | 399 | // b.insert(0, 10) 400 | // merge(a, b) 401 | // a.insert(1, 20) 402 | // b.insert(1, 30) 403 | // merge(a, b) 404 | } 405 | 406 | runRandomizer() 407 | -------------------------------------------------------------------------------- /rle.ts: -------------------------------------------------------------------------------- 1 | // This implements just YjsMod but uses RLE optimizations. 2 | 3 | import assert from 'assert' 4 | import consoleLib from 'console' 5 | import chalk from 'chalk' 6 | 7 | globalThis.console = new consoleLib.Console({ 8 | stdout: process.stdout, stderr: process.stderr, 9 | inspectOptions: {depth: null} 10 | }) 11 | 12 | export type Id = [agent: string, seq: number] 13 | export type Version = Record // Last seen seq for each agent. 14 | 15 | // These aren't used, but they should be. They show how the items actually work for each algorithm. 16 | type Item = { 17 | content: T, 18 | id: Id, 19 | 20 | // Left and right implicit in document list. 21 | // null represents document's root / end. 22 | originLeft: Id | null, 23 | originRight: Id | null, 24 | 25 | isDeleted: boolean, 26 | } 27 | 28 | export interface Doc { 29 | content: Item[] // Could take Item as a type parameter, but eh. This is better for demos. 30 | 31 | version: Version // agent => last seen seq. 32 | length: number // Number of items not deleted 33 | 34 | maxSeq: number // Only for AM. 35 | } 36 | 37 | export const newDoc = (): Doc => ({ 38 | content: [], 39 | version: {}, 40 | length: 0, 41 | maxSeq: 0, 42 | }) 43 | 44 | // **** Common code and helpers 45 | 46 | // We never actually compare the third argument in sync9. 47 | const idEq2 = (a: Id | null, agent: string, seq: number): boolean => ( 48 | a != null && (a[0] === agent && a[1] === seq) 49 | ) 50 | const idEq = (a: Id | null, b: Id | null): boolean => ( 51 | a == b || (a != null && b != null && a[0] === b[0] && a[1] === b[1]) 52 | ) 53 | 54 | let hits = 0 55 | let misses = 0 56 | 57 | // idx_hint is a small optimization so when we know the general area of 58 | // an item, we search nearby instead of just scanning the whole document. 59 | const findItem2 = (doc: Doc, needle: Id | null, atEnd: boolean = false, idx_hint: number = -1): number => { 60 | if (needle == null) return -1 61 | else { 62 | const [agent, seq] = needle 63 | // This little optimization *halves* the time to run the editing trace benchmarks. 64 | if (idx_hint >= 0 && idx_hint < doc.content.length) { 65 | const hint_item = doc.content[idx_hint] 66 | if ((!atEnd && idEq2(hint_item.id, agent, seq)) 67 | || (hint_item.content != null && atEnd && idEq2(hint_item.id, agent, seq))) { 68 | hits++ 69 | return idx_hint 70 | } 71 | // Try nearby. 72 | // const RANGE = 10 73 | // for (let i = idx_hint < RANGE ? 0 : idx_hint - RANGE; i < doc.content.length && i < idx_hint + RANGE; i++) { 74 | // const item = doc.content[i] 75 | // if ((!atEnd && idEq2(item.id, agent, seq)) 76 | // || (item.content != null && atEnd && idEq2(item.id, agent, seq))) { 77 | // hits++ 78 | // return i 79 | // } 80 | // } 81 | } 82 | 83 | misses++ 84 | const idx = doc.content.findIndex(({content, id}) => ( 85 | (!atEnd && idEq2(id, agent, seq)) || (content != null && atEnd && idEq2(id, agent, seq))) 86 | ) 87 | // : doc.content.findIndex(({id}) => idEq(id, needle)) 88 | if (idx < 0) throw Error('Could not find item') // Could use a ternary if not for this! 89 | return idx 90 | } 91 | } 92 | 93 | const findItem = (doc: Doc, needle: Id | null, idx_hint: number = -1): number => ( 94 | findItem2(doc, needle, false, idx_hint) 95 | ) 96 | 97 | // const getNextSeq = (doc: Doc, agent: string): number => { 98 | // const last = doc.version[agent] 99 | // return last == null ? 0 : last + 1 100 | // } 101 | 102 | const findItemAtPos = (doc: Doc, pos: number, stick_end: boolean = false): number => { 103 | let i = 0 104 | // console.log('pos', pos, doc.length, doc.content.length) 105 | for (; i < doc.content.length; i++) { 106 | const item = doc.content[i] 107 | if (stick_end && pos === 0) return i 108 | else if (item.isDeleted || item.content == null) continue 109 | else if (pos === 0) return i 110 | 111 | pos-- 112 | } 113 | 114 | if (pos === 0) return i 115 | else throw Error('past end of the document') 116 | } 117 | 118 | // const nextSeq = (agent: string): number => 119 | 120 | export function localInsert(doc: Doc, agent: string, pos: number, content: T) { 121 | let i = findItemAtPos(doc, pos) 122 | integrate(doc, { 123 | content, 124 | id: [agent, (doc.version[agent] ?? -1) + 1], 125 | isDeleted: false, 126 | originLeft: doc.content[i - 1]?.id ?? null, 127 | originRight: doc.content[i]?.id ?? null, 128 | }, i) 129 | } 130 | 131 | export const localDelete = (doc: Doc, agent: string, pos: number): void => { 132 | // This is very incomplete. 133 | const item = doc.content[findItemAtPos(doc, pos)] 134 | if (!item.isDeleted) { 135 | item.isDeleted = true 136 | doc.length -= 1 137 | } 138 | } 139 | 140 | export const getArray = (doc: Doc): T[] => ( 141 | doc.content.filter(i => !i.isDeleted && i.content != null).map(i => i.content!) 142 | ) 143 | 144 | export const printDoc = (doc: Doc) => { 145 | const depth: Record = {} 146 | // const kForId = (id: Id, c: T | null) => `${id[0]} ${id[1]} ${id[2] ?? c != null}` 147 | const kForItem = (id: Id) => `${id[0]} ${id[1]}` 148 | for (const i of doc.content) { 149 | const d = i.originLeft == null ? 0 : depth[kForItem(i.originLeft)] + 1 150 | depth[kForItem(i.id)] = d 151 | 152 | let content = `${i.content == null 153 | ? '.' 154 | : i.isDeleted ? chalk.strikethrough(i.content) : chalk.yellow(i.content) 155 | } at [${i.id}] (parent [${i.originLeft}])` 156 | content += ` originRight [${i.originRight}]` 157 | // console.log(`${'| '.repeat(d)}${i.content == null ? chalk.strikethrough(content) : content}`) 158 | console.log(`${'| '.repeat(d)}${i.content == null ? chalk.grey(content) : content}`) 159 | } 160 | } 161 | 162 | export const isInVersion = (id: Id | null, version: Version) => { 163 | if (id == null) return true 164 | const seq = version[id[0]] 165 | return seq != null && seq >= id[1] 166 | } 167 | 168 | export const canInsertNow = (op: Item, doc: Doc): boolean => ( 169 | // We need op.id to not be in doc.versions, but originLeft and originRight to be in. 170 | // We're also inserting each item from each agent in sequence. 171 | !isInVersion(op.id, doc.version) 172 | && (op.id[1] === 0 || isInVersion([op.id[0], op.id[1] - 1], doc.version)) 173 | && isInVersion(op.originLeft, doc.version) 174 | && isInVersion(op.originRight, doc.version) 175 | ) 176 | 177 | // Merge all missing items from src into dest. 178 | // NOTE: This currently does not support moving deletes! 179 | export const mergeInto = (dest: Doc, src: Doc) => { 180 | // The list of operations we need to integrate 181 | const missing: (Item | null)[] = src.content.filter(op => op.content != null && !isInVersion(op.id, dest.version)) 182 | let remaining = missing.length 183 | 184 | while (remaining > 0) { 185 | // Find the next item in remaining and insert it. 186 | let mergedOnThisPass = 0 187 | 188 | for (let i = 0; i < missing.length; i++) { 189 | const op = missing[i] 190 | if (op == null || !canInsertNow(op, dest)) continue 191 | integrate(dest, op) 192 | missing[i] = null 193 | remaining-- 194 | mergedOnThisPass++ 195 | } 196 | 197 | assert(mergedOnThisPass) 198 | } 199 | } 200 | 201 | 202 | // *** Per algorithm integration functions. Note each CRDT will only use 203 | // one of these integration methods depending on the desired semantics. 204 | 205 | // This is a slight modification of yjs with a few tweaks to make some 206 | // of the CRDT puzzles resolve better. 207 | export function integrate(doc: Doc, newItem: Item, idx_hint: number = -1) { 208 | const lastSeen = doc.version[newItem.id[0]] ?? -1 209 | if (newItem.id[1] !== lastSeen + 1) throw Error('Operations out of order') 210 | doc.version[newItem.id[0]] = newItem.id[1] 211 | 212 | let left = findItem(doc, newItem.originLeft, idx_hint - 1) 213 | let destIdx = left + 1 214 | let right = newItem.originRight == null ? doc.content.length : findItem(doc, newItem.originRight, idx_hint) 215 | let scanning = false 216 | 217 | for (let i = destIdx; ; i++) { 218 | // Inserting at the end of the document. Just insert. 219 | if (!scanning) destIdx = i 220 | if (i === doc.content.length) break 221 | if (i === right) break // No ambiguity / concurrency. Insert here. 222 | 223 | let other = doc.content[i] 224 | 225 | let oleft = findItem(doc, other.originLeft, idx_hint - 1) 226 | let oright = other.originRight == null ? doc.content.length : findItem(doc, other.originRight, idx_hint) 227 | 228 | // The logic below summarizes to: 229 | // if (oleft < left || (oleft === left && oright === right && newItem.id[0] < o.id[0])) break 230 | // if (oleft === left) scanning = oright < right 231 | 232 | // Ok now we implement the punnet square of behaviour 233 | if (oleft < left) { 234 | // Top row. Insert, insert, arbitrary (insert) 235 | break 236 | } else if (oleft === left) { 237 | // Middle row. 238 | if (oright < right) { 239 | // This is tricky. We're looking at an item we *might* insert after - but we can't tell yet! 240 | scanning = true 241 | continue 242 | } else if (oright === right) { 243 | // Raw conflict. Order based on user agents. 244 | if (newItem.id[0] < other.id[0]) break 245 | else { 246 | scanning = false 247 | continue 248 | } 249 | } else { // oright > right 250 | scanning = false 251 | continue 252 | } 253 | } else { // oleft > left 254 | // Bottom row. Arbitrary (skip), skip, skip 255 | continue 256 | } 257 | } 258 | 259 | // We've found the position. Insert here. 260 | doc.content.splice(destIdx, 0, newItem) 261 | if (!newItem.isDeleted) doc.length += 1 262 | } 263 | 264 | 265 | // const yjsModRle: Algorithm = { 266 | // localInsert, 267 | // integrate, 268 | // printDoc, 269 | // } 270 | 271 | export const printDebugStats = () => { 272 | console.log('hits', hits, 'misses', misses) 273 | } 274 | 275 | 276 | // ;(() => { 277 | // // console.clear() 278 | 279 | // let doc1 = newDoc() 280 | 281 | // localInsert(doc1, 'a', 0, 'x') 282 | // localInsert(doc1, 'a', 1, 'y') 283 | // localInsert(doc1, 'a', 0, 'z') // zxy 284 | 285 | // // printDoc(doc1) 286 | 287 | // let doc2 = newDoc() 288 | 289 | // localInsert(doc2, 'b', 0, 'a') 290 | // localInsert(doc2, 'b', 1, 'b') 291 | // // localInsert(doc2, 'b', 2, 'c') 292 | 293 | // mergeInto(doc1, doc2) 294 | 295 | // printDoc(doc1) 296 | 297 | // // console.log('\n\n\n') 298 | // })() -------------------------------------------------------------------------------- /sync9.js: -------------------------------------------------------------------------------- 1 | const init_loom = create_loom(null, () => {}) 2 | init_loom.set('=[]') 3 | 4 | export function make(id = Math.random().toString(36).slice(2)) { 5 | let loom = create_loom({id}, () => {}) 6 | loom = merge(loom, init_loom) 7 | return loom 8 | } 9 | 10 | export function merge(loom1, loom2) { 11 | var new_loom1 = create_loom(JSON.parse(JSON.stringify(loom1)), (peer, msg) => { 12 | if (peer !== 'hi') new_loom2.receive(msg) 13 | }) 14 | var new_loom2 = create_loom(JSON.parse(JSON.stringify(loom2)), (peer, msg) => { 15 | if (peer !== 'hi') new_loom1.receive(msg) 16 | }) 17 | 18 | new_loom1.get('a') 19 | 20 | return new_loom1 21 | } 22 | 23 | export function get_content(loom) { 24 | return loom.read() 25 | } 26 | 27 | export function insert(loom, position, content) { 28 | loom.set(`[${position}:${position}]=[${JSON.stringify(content)}]`) 29 | } 30 | export function insertMany(loom, position, ...content) { 31 | loom.set(`[${position}:${position}]=${JSON.stringify(content)}`) 32 | } 33 | 34 | 35 | function create_loom(L, send) { 36 | L = L ?? {} 37 | 38 | if (!L.id) L.id = Math.random().toString(36).slice(2) 39 | if (!L.next_seq) L.next_seq = 0 40 | 41 | L.S = L.S ?? null 42 | L.T = L.T ?? {} 43 | L.current_version = L.current_version ?? {} 44 | 45 | L.peers = L.peers ?? {} 46 | L.version_cache = L.version_cache ?? {} 47 | L.fissures = L.fissures ?? {} 48 | L.acked_boundary = L.acked_boundary ?? {} 49 | L.unack_boundary = L.unack_boundary ?? {} 50 | L.acks_in_process = L.acks_in_process ?? {} 51 | 52 | var orig_send = send 53 | send = (to, msg) => { 54 | orig_send(to, {peer: L.id, conn: L.peers[to], ...msg}) 55 | } 56 | 57 | L.get = peer => { 58 | send(peer, {cmd: 'get', conn: Math.random().toString(36).slice(2)}) 59 | } 60 | 61 | L.forget = peer => { 62 | send(peer, {cmd: 'forget'}) 63 | } 64 | 65 | L.disconnect = peer => { 66 | if (!L.peers[peer]) return 67 | var conn = L.peers[peer] 68 | delete L.peers[peer] 69 | 70 | var versions = {} 71 | var ack_versions = ancestors(L.acked_boundary) 72 | Object.keys(L.T).forEach(v => { 73 | if (!ack_versions[v] || L.acked_boundary[v]) versions[v] = true 74 | }) 75 | 76 | L.receive({cmd: 'fissure', fissure: {a: L.id, b: peer, conn, versions, time: Date.now()}}) 77 | } 78 | 79 | L.set = (...patches) => { 80 | var version = `${L.id}@${L.next_seq++}` 81 | L.receive({cmd: 'set', version, parents: {...L.current_version}, patches}) 82 | return version 83 | } 84 | 85 | L.read = (is_anc) => { 86 | if (!is_anc) is_anc = () => true 87 | else if (typeof(is_anc) == 'string') { 88 | var ancs = loom1.ancestors({[is_anc]: true}) 89 | is_anc = v => ancs[v] 90 | } else if (typeof(is_anc) == 'object') { 91 | var ancs = loom1.ancestors(is_anc) 92 | is_anc = v => ancs[v] 93 | } 94 | 95 | return rec_read(L.S) 96 | function rec_read(x) { 97 | if (x && typeof(x) == 'object') { 98 | if (x.t == 'lit') return JSON.parse(JSON.stringify(x.S)) 99 | if (x.t == 'val') return rec_read(space_dag_get(x.S, 0, is_anc)) 100 | if (x.t == 'obj') { 101 | var o = {} 102 | Object.entries(x.S).forEach(([k, v]) => { 103 | var x = rec_read(v) 104 | if (x != null) o[k] = x 105 | }) 106 | return o 107 | } 108 | if (x.t == 'arr') { 109 | var a = [] 110 | traverse_space_dag(x.S, is_anc, (node, _, __, ___, ____, deleted) => { 111 | if (!deleted) node.elems.forEach((e) => a.push(rec_read(e))) 112 | }, true) 113 | return a 114 | } 115 | if (x.t == 'str') { 116 | var s = [] 117 | traverse_space_dag(x.S, is_anc, (node, _, __, ___, ____, deleted) => { 118 | if (!deleted) s.push(node.elems) 119 | }, true) 120 | return s.join('') 121 | } 122 | throw Error('bad') 123 | } return x 124 | } 125 | } 126 | 127 | L.receive = ({cmd, version, parents, patches, fissure, versions, fissures, unack_boundary, min_leaves, peer, conn}) => { 128 | if (cmd == 'get' || cmd == 'get_back') { 129 | // if (L.peers[peer]) throw Error('bad') 130 | L.peers[peer] = conn 131 | 132 | if (cmd == 'get') send(peer, {cmd: 'get_back'}) 133 | send(peer, {cmd: 'welcome', 134 | versions: generate_braid(parents), 135 | fissures: Object.values(L.fissures), 136 | parents: parents && Object.keys(parents).length ? get_leaves(ancestors(parents, true)) : {} 137 | }) 138 | } else if (cmd == 'forget') { 139 | if (!L.peers[peer]) throw Error('bad') 140 | delete L.peers[peer] 141 | L.acks_in_process = {} 142 | } else if (cmd == 'set') { 143 | for (const p in parents) if (!L.T[p]) return send(peer, {cmd: 'error'}) 144 | 145 | if (!peer || !L.T[version]) { 146 | var rebased_patches = add_version(version, parents, patches) 147 | for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'set', version, parents, patches}) 148 | 149 | L.acks_in_process[version] = {origin: peer, count: Object.keys(L.peers).length} 150 | if (peer) L.acks_in_process[version].count-- 151 | } else if (L.acks_in_process[version]) L.acks_in_process[version].count-- 152 | 153 | check_ack_count(version) 154 | return rebased_patches 155 | } else if (cmd == 'ack1') { 156 | if (L.acks_in_process[version]) { 157 | L.acks_in_process[version].count-- 158 | check_ack_count(version) 159 | } 160 | } else if (cmd == 'ack2') { 161 | if (!L.T[version]) return 162 | if (ancestors(L.unack_boundary)[version]) return 163 | if (ancestors(L.acked_boundary)[version]) return 164 | add_full_ack_leaf(version) 165 | for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'ack2', version}) 166 | } else if (cmd == 'fissure') { 167 | var key = fissure.a + ':' + fissure.b + ':' + fissure.conn 168 | if (!L.fissures[key]) { 169 | L.fissures[key] = fissure 170 | L.acks_in_process = {} 171 | for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'fissure', fissure}) 172 | if (fissure.b == L.id) L.receive({cmd: 'fissure', fissure: {...fissure, a: L.id, b: fissure.a}}) 173 | } 174 | } else if (cmd == 'welcome') { 175 | var versions_to_add = {} 176 | versions.forEach(v => versions_to_add[v.version] = v.parents) 177 | versions.forEach(v => { 178 | if (L.T[v.version]) { 179 | remove_ancestors(v.version) 180 | function remove_ancestors(v) { 181 | if (versions_to_add[v]) { 182 | Object.keys(versions_to_add[v]).forEach(remove_ancestors) 183 | delete versions_to_add[v] 184 | } 185 | } 186 | } 187 | }) 188 | 189 | var send_error = () => send(peer, {cmd: 'error'}) 190 | 191 | var rebased_patches = [] 192 | var added_versions = [] 193 | for (var v of versions) { 194 | if (versions_to_add[v.version]) { 195 | if (!Object.keys(v.parents).every(p => L.T[p])) return send_error() 196 | 197 | rebased_patches = rebased_patches.concat(add_version(v.version, v.parents, v.patches, v.sort_keys)) 198 | added_versions.push(v) 199 | } 200 | } 201 | 202 | if (((min_leaves && Object.keys(min_leaves).some(k => !L.T[k])) || (unack_boundary && Object.keys(unack_boundary).some(k => !L.T[k])))) return send_error() 203 | 204 | var new_fissures = [] 205 | var gen_fissures = [] 206 | fissures.forEach(f => { 207 | var key = f.a + ':' + f.b + ':' + f.conn 208 | if (!L.fissures[key]) { 209 | 210 | new_fissures.push(f) 211 | L.fissures[key] = f 212 | 213 | if (f.b == L.id) gen_fissures.push({...f, a: L.id, b: f.a}) 214 | } 215 | }) 216 | 217 | if (!unack_boundary) unack_boundary = {...L.current_version} 218 | 219 | var our_conn_versions = ancestors(L.T, L.unack_boundary) 220 | var new_conn_versions = ancestors(L.T, unack_boundary) 221 | 222 | Object.keys(L.unack_boundary).forEach(x => { 223 | if (new_conn_versions[x] && !unack_boundary[x]) 224 | delete L.unack_boundary[x] 225 | }) 226 | Object.keys(unack_boundary).forEach(x => { 227 | if (!our_conn_versions[x]) L.unack_boundary[x] = true 228 | }) 229 | 230 | if (!min_leaves) { 231 | if (versions.length === 0 && (!parents || Object.keys(parents).length === 0)) 232 | min_leaves = {...L.current_version} 233 | else { 234 | min_leaves = parents ? {...parents} : {} 235 | versions.forEach(v => { 236 | if (!versions_to_add[v.version]) min_leaves[v.version] = true 237 | }) 238 | min_leaves = get_leaves(ancestors(min_leaves, true)) 239 | } 240 | } 241 | 242 | var min_versions = ancestors(min_leaves) 243 | var ack_versions = ancestors(L.acked_boundary) 244 | Object.keys(L.acked_boundary).forEach(x => { 245 | if (!min_versions[x]) delete L.acked_boundary[x] 246 | }) 247 | Object.keys(min_leaves).forEach(x => { 248 | if (ack_versions[x]) L.acked_boundary[x] = true 249 | }) 250 | 251 | L.acks_in_process = {} 252 | 253 | if (added_versions.length > 0 || new_fissures.length > 0) { 254 | for (let p of Object.keys(L.peers)) if (p != peer) send(p, {cmd: 'welcome', key, versions: added_versions, unack_boundary,min_leaves, fissures: new_fissures}) 255 | } 256 | 257 | gen_fissures.forEach(f => L.receive({cmd: 'fissure', fissure: f})) 258 | 259 | return rebased_patches 260 | } 261 | } 262 | 263 | var is_lit = x => !x || typeof(x) != 'object' || x.t == 'lit' 264 | var get_lit = x => (x && typeof(x) == 'object' && x.t == 'lit') ? x.S : x 265 | let make_lit = x => (x && typeof(x) == 'object') ? {t: 'lit', S: x} : x 266 | 267 | function prune() { 268 | return 269 | var unremovable = {} 270 | 271 | Object.entries(L.fissures).forEach(x => { 272 | var other_key = x[1].b + ':' + x[1].a + ':' + x[1].conn 273 | var other = L.fissures[other_key] 274 | if (other) { 275 | delete L.fissures[x[0]] 276 | delete L.fissures[other_key] 277 | } 278 | }) 279 | 280 | if (L.fissure_lifetime != null) { 281 | var now = Date.now() 282 | Object.entries(L.fissures).forEach(([k, f]) => { 283 | if (f.time == null) f.time = now 284 | if (f.time <= now - L.fissure_lifetime) { 285 | delete L.fissures[k] 286 | } 287 | }) 288 | } 289 | 290 | var keep_us = {} 291 | 292 | Object.values(L.fissures).forEach(f => { 293 | Object.keys(f.versions).forEach(v => keep_us[v] = true) 294 | }) 295 | 296 | var acked = ancestors(L.T, L.acked_boundary) 297 | Object.keys(L.T).forEach(x => { 298 | if (!acked[x] || L.acked_boundary[x]) keep_us[x] = true 299 | }) 300 | 301 | var children = {} 302 | Object.entries(L.T).forEach(([v, parents]) => { 303 | Object.keys(parents).forEach(parent => { 304 | if (!children[parent]) children[parent] = {} 305 | children[parent][v] = true 306 | }) 307 | }) 308 | 309 | var to_bubble = {} 310 | var bubble_tops = {} 311 | var bubble_bottoms = {} 312 | 313 | function mark_bubble(bottom, top, tag) { 314 | if (!to_bubble[bottom]) { 315 | to_bubble[bottom] = tag 316 | if (bottom !== top) Object.keys(L.T[bottom]).forEach(p => mark_bubble(p, top, tag)) 317 | } 318 | } 319 | 320 | var done = {} 321 | function f(cur) { 322 | if (!L.T[cur]) return 323 | if (done[cur]) return 324 | done[cur] = true 325 | 326 | if (!to_bubble[cur] || bubble_tops[cur]) { 327 | var bubble_top = find_one_bubble(cur) 328 | if (bubble_top) { 329 | delete to_bubble[cur] 330 | mark_bubble(cur, bubble_top, bubble_tops[cur] || cur) 331 | bubble_tops[bubble_top] = bubble_tops[cur] || cur 332 | bubble_bottoms[bubble_tops[cur] || cur] = bubble_top 333 | } 334 | } 335 | 336 | Object.keys(L.T[cur]).forEach(f) 337 | } 338 | Object.keys(L.current_version).forEach(f) 339 | 340 | function find_one_bubble(cur) { 341 | var seen = {[cur]: true} 342 | var q = Object.keys(L.T[cur]) 343 | var expecting = Object.fromEntries(q.map(x => [x, true])) 344 | while (q.length) { 345 | cur = q.pop() 346 | if (!L.T[cur]) return null 347 | if (keep_us[cur]) return null 348 | if (Object.keys(children[cur]).every(c => seen[c])) { 349 | seen[cur] = true 350 | delete expecting[cur] 351 | if (!Object.keys(expecting).length) return cur 352 | 353 | Object.keys(L.T[cur]).forEach(p => { 354 | q.push(p) 355 | expecting[p] = true 356 | }) 357 | } 358 | } 359 | return null 360 | } 361 | 362 | to_bubble = Object.fromEntries(Object.entries(to_bubble).map( 363 | ([v, bub]) => [v, [bub, bubble_bottoms[bub]]] 364 | )) 365 | 366 | apply_bubbles(to_bubble) 367 | } 368 | 369 | function add_full_ack_leaf(version) { 370 | var marks = {} 371 | function f(v) { 372 | if (!marks[v]) { 373 | marks[v] = true 374 | delete L.unack_boundary[v] 375 | delete L.acked_boundary[v] 376 | delete L.acks_in_process[v] 377 | Object.keys(L.T[v]).forEach(f) 378 | } 379 | } 380 | f(version) 381 | 382 | L.acked_boundary[version] = true 383 | prune(L) 384 | } 385 | 386 | function check_ack_count(version) { 387 | if (L.acks_in_process[version] && L.acks_in_process[version].count == 0) { 388 | if (L.acks_in_process[version].origin) { 389 | send(L.acks_in_process[version].origin, {cmd: 'ack1', version}) 390 | } else { 391 | add_full_ack_leaf(version) 392 | for (let p of Object.keys(L.peers)) send(p, {cmd: 'ack2', version}) 393 | } 394 | } 395 | } 396 | 397 | function generate_braid(versions) { 398 | var anc = versions && Object.keys(versions).length ? ancestors(versions, true) : {} 399 | var is_anc = x => anc[x] 400 | 401 | if (Object.keys(L.T).length === 0) return [] 402 | 403 | return Object.entries(L.version_cache).filter(x => !is_anc(x[0])).map(([version, set_message]) => { 404 | return L.version_cache[version] = set_message || generate_set_message(version) 405 | }) 406 | 407 | function generate_set_message(version) { 408 | if (!Object.keys(L.T[version]).length) { 409 | return { 410 | version, 411 | parents: {}, 412 | patches: [` = ${JSON.stringify(L.read(v => v == version))}`] 413 | } 414 | } 415 | 416 | var is_lit = x => !x || typeof(x) !== 'object' || x.t === 'lit' 417 | var get_lit = x => (x && typeof(x) === 'object' && x.t === 'lit') ? x.S : x 418 | 419 | var ancs = ancestors({[version]: true}) 420 | delete ancs[version] 421 | var is_anc = x => ancs[x] 422 | var path = [] 423 | var patches = [] 424 | var sort_keys = {} 425 | recurse(L.S) 426 | function recurse(x) { 427 | if (is_lit(x)) { 428 | } else if (x.t === 'val') { 429 | space_dag_generate_braid(x.S, version, is_anc).forEach(s => { 430 | if (s[2].length) { 431 | patches.push(`${path.join('')} = ${JSON.stringify(s[2][0])}`) 432 | if (s[3]) sort_keys[patches.length - 1] = s[3] 433 | } 434 | }) 435 | traverse_space_dag(x.S, is_anc, node => { 436 | node.elems.forEach(recurse) 437 | }) 438 | } else if (x.t === 'arr') { 439 | space_dag_generate_braid(x.S, version, is_anc).forEach(s => { 440 | patches.push(`${path.join('')}[${s[0]}:${s[0] + s[1]}] = ${JSON.stringify(s[2])}`) 441 | if (s[3]) sort_keys[patches.length - 1] = s[3] 442 | }) 443 | var i = 0 444 | traverse_space_dag(x.S, is_anc, node => { 445 | node.elems.forEach(e => { 446 | path.push(`[${i++}]`) 447 | recurse(e) 448 | path.pop() 449 | }) 450 | }) 451 | } else if (x.t === 'obj') { 452 | Object.entries(x.S).forEach(e => { 453 | path.push('[' + JSON.stringify(e[0]) + ']') 454 | recurse(e[1]) 455 | path.pop() 456 | }) 457 | } else if (x.t === 'str') { 458 | space_dag_generate_braid(x.S, version, is_anc).forEach(s => { 459 | patches.push(`${path.join('')}[${s[0]}:${s[0] + s[1]}] = ${JSON.stringify(s[2])}`) 460 | if (s[3]) sort_keys[patches.length - 1] = s[3] 461 | }) 462 | } 463 | } 464 | 465 | return { 466 | version, 467 | parents: {...L.T[version]}, 468 | patches, 469 | sort_keys 470 | } 471 | } 472 | } 473 | 474 | function apply_bubbles(to_bubble) { 475 | function recurse(x) { 476 | if (is_lit(x)) return x 477 | if (x.t == 'val') { 478 | space_dag_apply_bubbles(x.S, to_bubble) 479 | traverse_space_dag(x.S, () => true, node => { 480 | node.elems = node.elems.slice(0, 1).map(recurse) 481 | }, true) 482 | if (x.S.nexts.length == 0 && !x.S.next && x.S.elems.length == 1 && is_lit(x.S.elems[0])) return x.S.elems[0] 483 | return x 484 | } 485 | if (x.t == 'arr') { 486 | space_dag_apply_bubbles(x.S, to_bubble) 487 | traverse_space_dag(x.S, () => true, node => { 488 | node.elems = node.elems.map(recurse) 489 | }, true) 490 | if (x.S.nexts.length == 0 && !x.S.next && x.S.elems.every(is_lit) && !Object.keys(x.S.deleted_by).length) return {t: 'lit', S: x.S.elems.map(get_lit)} 491 | return x 492 | } 493 | if (x.t == 'obj') { 494 | Object.entries(x.S).forEach(e => { 495 | var y = x.S[e[0]] = recurse(e[1]) 496 | if (y == null) delete x.S[e[0]] 497 | }) 498 | if (Object.values(x.S).every(is_lit)) { 499 | var o = {} 500 | Object.entries(x.S).forEach(e => o[e[0]] = get_lit(e[1])) 501 | return {t: 'lit', S: o} 502 | } 503 | return x 504 | } 505 | if (x.t == 'str') { 506 | space_dag_apply_bubbles(x.S, to_bubble) 507 | if (x.S.nexts.length == 0 && !x.S.next && !Object.keys(x.S.deleted_by).length) return x.S.elems 508 | return x 509 | } 510 | } 511 | L.S = recurse(L.S) 512 | 513 | Object.entries(to_bubble).forEach(([version, bubble]) => { 514 | if (version === bubble[1]) 515 | L.T[bubble[0]] = L.T[bubble[1]] 516 | if (version !== bubble[0]) { 517 | delete L.T[version] 518 | delete L.version_cache[version] 519 | } else L.version_cache[version] = null 520 | }) 521 | 522 | var leaves = Object.keys(L.current_version) 523 | var acked_boundary = Object.keys(L.acked_boundary) 524 | var fiss = Object.keys(L.fissures) 525 | if (leaves.length == 1 && acked_boundary.length == 1 526 | && leaves[0] == acked_boundary[0] && fiss.length == 0) { 527 | L.T = { [leaves[0]]: {} } 528 | L.S = make_lit(L.read()) 529 | } 530 | } 531 | 532 | function add_version(version, parents, patches, sort_keys, is_anc) { 533 | if (L.T[version]) return 534 | 535 | L.T[version] = {...parents} 536 | 537 | L.version_cache[version] = JSON.parse(JSON.stringify({ 538 | version, parents, patches, sort_keys 539 | })) 540 | 541 | Object.keys(parents).forEach(k => { 542 | if (L.current_version[k]) 543 | delete L.current_version[k] 544 | }) 545 | L.current_version[version] = true 546 | 547 | if (!sort_keys) sort_keys = {} 548 | 549 | if (!Object.keys(parents).length) { 550 | var parse = parse_patch(patches[0]) 551 | L.S = make_lit(parse.value) 552 | return patches 553 | } 554 | 555 | if (!is_anc) { 556 | if (parents == L.current_version) { 557 | is_anc = _version => _version != version 558 | } else { 559 | var ancs = ancestors(parents) 560 | is_anc = _version => ancs[_version] 561 | } 562 | } 563 | 564 | var rebased_patches = [] 565 | patches.forEach((patch, i) => { 566 | var sort_key = sort_keys[i] 567 | var parse = parse_patch(patch) 568 | var cur = resolve_path(parse) 569 | if (!parse.slice) { 570 | if (cur.t != 'val') throw Error('bad') 571 | var len = space_dag_length(cur.S, is_anc) 572 | space_dag_add_version(cur.S, version, [[0, len, [parse.delete ? null : make_lit(parse.value)]]], sort_key, is_anc) 573 | rebased_patches.push(patch) 574 | } else { 575 | if (typeof parse.value === 'string' && cur.t !== 'str') 576 | throw `Cannot splice string ${JSON.stringify(parse.value)} into non-string` 577 | if (parse.value instanceof Array && cur.t !== 'arr') 578 | throw `Cannot splice array ${JSON.stringify(parse.value)} into non-array` 579 | if (parse.value instanceof Array) 580 | parse.value = parse.value.map(x => make_lit(x)) 581 | 582 | var r0 = parse.slice[0] 583 | var r1 = parse.slice[1] 584 | if (r0 < 0 || Object.is(r0, -0) || r1 < 0 || Object.is(r1, -0)) { 585 | let len = space_dag_length(cur.S, is_anc) 586 | if (r0 < 0 || Object.is(r0, -0)) r0 = len + r0 587 | if (r1 < 0 || Object.is(r1, -0)) r1 = len + r1 588 | } 589 | 590 | var rebased_splices = space_dag_add_version(cur.S, version, [[r0, r1 - r0, parse.value]], sort_key, is_anc) 591 | for (let rebased_splice of rebased_splices) rebased_patches.push(`${parse.path.map(x => `[${JSON.stringify(x)}]`).join('')}[${rebased_splice[0]}:${rebased_splice[0] + rebased_splice[1]}] = ${JSON.stringify(rebased_splice[2])}`) 592 | } 593 | }) 594 | 595 | function resolve_path(parse) { 596 | var cur = L.S 597 | if (!cur || typeof(cur) != 'object' || cur.t == 'lit') 598 | cur = L.S = {t: 'val', S: create_space_dag_node(null, [cur])} 599 | var prev_S = null 600 | var prev_i = 0 601 | for (var i=0; i make_lit(x))) 609 | } else { 610 | if (typeof(cur.S) != 'object') throw Error('bad') 611 | new_cur.t = 'obj' 612 | new_cur.S = {} 613 | Object.entries(cur.S).forEach(e => new_cur.S[e[0]] = make_lit(e[1])) 614 | } 615 | cur = new_cur 616 | space_dag_set(prev_S, prev_i, cur, is_anc) 617 | } 618 | if (cur.t == 'obj') { 619 | let x = cur.S[key] 620 | if (!x || typeof(x) != 'object' || x.t == 'lit') 621 | x = cur.S[key] = {t: 'val', S: create_space_dag_node(null, [x == null ? null : x])} 622 | cur = x 623 | } else if (i == parse.path.length - 1 && !parse.slice) { 624 | parse.slice = [key, key + 1] 625 | parse.value = (cur.t == 'str') ? parse.value : [parse.value] 626 | } else if (cur.t == 'arr') { 627 | cur = space_dag_get(prev_S = cur.S, prev_i = key, is_anc) 628 | } else throw Error('bad') 629 | } 630 | if (parse.slice) { 631 | if (cur.t == 'val') cur = space_dag_get(prev_S = cur.S, prev_i = 0, is_anc) 632 | if (typeof(cur) == 'string') { 633 | cur = {t: 'str', S: create_space_dag_node(null, cur)} 634 | space_dag_set(prev_S, prev_i, cur, is_anc) 635 | } else if (cur.t == 'lit') { 636 | if (!(cur.S instanceof Array)) throw Error('bad') 637 | cur = {t: 'arr', S: create_space_dag_node(null, cur.S.map(x => make_lit(x)))} 638 | space_dag_set(prev_S, prev_i, cur, is_anc) 639 | } 640 | } 641 | return cur 642 | } 643 | 644 | return rebased_patches 645 | } 646 | 647 | function ancestors(versions, ignore_nonexistent) { 648 | var result = {} 649 | function recurse(version) { 650 | if (result[version]) return 651 | if (!L.T[version]) { 652 | if (ignore_nonexistent) return 653 | throw `The version ${version} no existo` 654 | } 655 | result[version] = true 656 | Object.keys(L.T[version]).forEach(recurse) 657 | } 658 | Object.keys(versions).forEach(recurse) 659 | return result 660 | } 661 | 662 | function get_leaves(versions) { 663 | var leaves = {...versions} 664 | Object.keys(versions).forEach(v => { 665 | Object.keys(L.T[v]).forEach(p => delete leaves[p]) 666 | }) 667 | return leaves 668 | } 669 | 670 | // L.receive({cmd: 'set', version: 'INITIAL', parents: {}, patches: '=[]'}) 671 | 672 | // if (!Object.keys(L.T).length) L.set('=[]') 673 | return L 674 | } 675 | 676 | function create_space_dag_node(version, elems, end_cap, sort_key) { 677 | return { 678 | version : version, 679 | sort_key : sort_key, 680 | elems : elems, 681 | deleted_by : {}, 682 | end_cap : end_cap, 683 | nexts : [], 684 | next : null 685 | } 686 | } 687 | 688 | function space_dag_generate_braid(S, version, is_anc) { 689 | var splices = [] 690 | 691 | function add_ins(offset, ins, sort_key, end_cap) { 692 | if (typeof(ins) !== 'string') 693 | ins = ins.map(x => read_raw(x, () => false)) 694 | if (splices.length > 0) { 695 | var prev = splices[splices.length - 1] 696 | if (prev[0] + prev[1] === offset && !end_cap && (prev[4] === 'i' || (prev[4] === 'r' && prev[1] === 0))) { 697 | prev[2] = prev[2].concat(ins) 698 | return 699 | } 700 | } 701 | splices.push([offset, 0, ins, sort_key, end_cap ? 'r' : 'i']) 702 | } 703 | 704 | function add_del(offset, del, ins) { 705 | if (splices.length > 0) { 706 | var prev = splices[splices.length - 1] 707 | if (prev[0] + prev[1] === offset && prev[4] !== 'i') { 708 | prev[1] += del 709 | return 710 | } 711 | } 712 | splices.push([offset, del, ins, null, 'd']) 713 | } 714 | 715 | var offset = 0 716 | function helper(node, _version, end_cap) { 717 | if (_version === version) { 718 | add_ins(offset, node.elems.slice(0), node.sort_key, end_cap) 719 | } else if (node.deleted_by[version] && node.elems.length > 0) { 720 | add_del(offset, node.elems.length, node.elems.slice(0, 0)) 721 | } 722 | 723 | if ((!_version || is_anc(_version)) && !Object.keys(node.deleted_by).some(is_anc)) { 724 | offset += node.elems.length 725 | } 726 | 727 | node.nexts.forEach(next => helper(next, next.version, node.end_cap)) 728 | if (node.next) helper(node.next, _version) 729 | } 730 | helper(S, null) 731 | splices.forEach(s => { 732 | // if we have replaces with 0 deletes, 733 | // make them have at least 1 delete.. 734 | // this can happen when there are multiple replaces of the same text, 735 | // and our code above will associate those deletes with only one of them 736 | if (s[4] === 'r' && s[1] === 0) s[1] = 1 737 | }) 738 | return splices 739 | } 740 | 741 | function space_dag_apply_bubbles(S, to_bubble) { 742 | 743 | traverse_space_dag(S, () => true, node => { 744 | if (to_bubble[node.version] && to_bubble[node.version][0] != node.version) { 745 | if (!node.sort_key) node.sort_key = node.version 746 | node.version = to_bubble[node.version][0] 747 | } 748 | 749 | for (var x of Object.keys(node.deleted_by)) { 750 | if (to_bubble[x]) { 751 | delete node.deleted_by[x] 752 | node.deleted_by[to_bubble[x][0]] = true 753 | } 754 | } 755 | }, true) 756 | 757 | function set_nnnext(node, next) { 758 | while (node.next) node = node.next 759 | node.next = next 760 | } 761 | 762 | do_line(S, S.version) 763 | function do_line(node, version) { 764 | var prev = null 765 | while (node) { 766 | if (node.nexts[0] && node.nexts[0].version == version) { 767 | for (let i = 0; i < node.nexts.length; i++) { 768 | delete node.nexts[i].version 769 | delete node.nexts[i].sort_key 770 | set_nnnext(node.nexts[i], i + 1 < node.nexts.length ? node.nexts[i + 1] : node.next) 771 | } 772 | node.next = node.nexts[0] 773 | node.nexts = [] 774 | } 775 | 776 | if (node.deleted_by[version]) { 777 | node.elems = node.elems.slice(0, 0) 778 | node.deleted_by = {} 779 | if (prev) { node = prev; continue } 780 | } 781 | 782 | var next = node.next 783 | 784 | if (!node.nexts.length && next && (!node.elems.length || !next.elems.length || (Object.keys(node.deleted_by).every(x => next.deleted_by[x]) && Object.keys(next.deleted_by).every(x => node.deleted_by[x])))) { 785 | if (!node.elems.length) node.deleted_by = next.deleted_by 786 | node.elems = node.elems.concat(next.elems) 787 | node.end_cap = next.end_cap 788 | node.nexts = next.nexts 789 | node.next = next.next 790 | continue 791 | } 792 | 793 | for (let n of node.nexts) do_line(n, n.version) 794 | 795 | prev = node 796 | node = next 797 | } 798 | } 799 | } 800 | 801 | function space_dag_get(S, i, is_anc) { 802 | var ret = null 803 | var offset = 0 804 | traverse_space_dag(S, is_anc ? is_anc : () => true, (node) => { 805 | if (i - offset < node.elems.length) { 806 | ret = node.elems[i - offset] 807 | return false 808 | } 809 | offset += node.elems.length 810 | }) 811 | return ret 812 | } 813 | 814 | function space_dag_set(S, i, v, is_anc) { 815 | var offset = 0 816 | traverse_space_dag(S, is_anc ? is_anc : () => true, (node) => { 817 | if (i - offset < node.elems.length) { 818 | node.elems[i - offset] = v 819 | return false 820 | } 821 | offset += node.elems.length 822 | }) 823 | } 824 | 825 | function space_dag_length(S, is_anc) { 826 | var count = 0 827 | traverse_space_dag(S, is_anc ? is_anc : () => true, node => { 828 | count += node.elems.length 829 | }) 830 | return count 831 | } 832 | 833 | function space_dag_break_node(node, x, end_cap, new_next) { 834 | var tail = create_space_dag_node(null, node.elems.slice(x), node.end_cap) 835 | Object.assign(tail.deleted_by, node.deleted_by) 836 | tail.nexts = node.nexts 837 | tail.next = node.next 838 | 839 | node.elems = node.elems.slice(0, x) 840 | node.end_cap = end_cap 841 | node.nexts = new_next ? [new_next] : [] 842 | node.next = tail 843 | 844 | return tail 845 | } 846 | 847 | function space_dag_add_version(S, version, splices, sort_key, is_anc) { 848 | 849 | var rebased_splices = [] 850 | 851 | function add_to_nexts(nexts, to) { 852 | var i = binarySearch(nexts, function (x) { 853 | if ((to.sort_key || to.version) < (x.sort_key || x.version)) return -1 854 | if ((to.sort_key || to.version) > (x.sort_key || x.version)) return 1 855 | return 0 856 | }) 857 | nexts.splice(i, 0, to) 858 | } 859 | 860 | var si = 0 861 | var delete_up_to = 0 862 | 863 | var process_patch = (node, offset, has_nexts, prev, _version, deleted) => { 864 | var s = splices[si] 865 | if (!s) return false 866 | 867 | if (deleted) { 868 | if (s[1] == 0 && s[0] == offset) { 869 | if (node.elems.length == 0 && !node.end_cap && has_nexts) return 870 | var new_node = create_space_dag_node(version, s[2], null, sort_key) 871 | 872 | rebased_splices.push([rebase_offset, 0, s[2]]) 873 | 874 | if (node.elems.length == 0 && !node.end_cap) 875 | add_to_nexts(node.nexts, new_node) 876 | else 877 | space_dag_break_node(node, 0, undefined, new_node) 878 | si++ 879 | } 880 | return 881 | } 882 | 883 | if (s[1] == 0) { 884 | var d = s[0] - (offset + node.elems.length) 885 | if (d > 0) return 886 | if (d == 0 && !node.end_cap && has_nexts) return 887 | var new_node = create_space_dag_node(version, s[2], null, sort_key) 888 | 889 | rebased_splices.push([rebase_offset + s[0] - offset, 0, s[2]]) 890 | 891 | if (d == 0 && !node.end_cap) { 892 | add_to_nexts(node.nexts, new_node) 893 | } else { 894 | space_dag_break_node(node, s[0] - offset, undefined, new_node) 895 | } 896 | si++ 897 | return 898 | } 899 | 900 | if (delete_up_to <= offset) { 901 | var d = s[0] - (offset + node.elems.length) 902 | if (d >= 0) return 903 | delete_up_to = s[0] + s[1] 904 | 905 | if (s[2]) { 906 | var new_node = create_space_dag_node(version, s[2], null, sort_key) 907 | 908 | rebased_splices.push([rebase_offset + s[0] - offset, 0, s[2]]) 909 | 910 | if (s[0] == offset && prev && prev.end_cap) { 911 | add_to_nexts(prev.nexts, new_node) 912 | } else { 913 | space_dag_break_node(node, s[0] - offset, true, new_node) 914 | return 915 | } 916 | } else { 917 | if (s[0] == offset) { 918 | } else { 919 | space_dag_break_node(node, s[0] - offset) 920 | return 921 | } 922 | } 923 | } 924 | 925 | if (delete_up_to > offset) { 926 | if (delete_up_to <= offset + node.elems.length) { 927 | if (delete_up_to < offset + node.elems.length) { 928 | space_dag_break_node(node, delete_up_to - offset) 929 | } 930 | si++ 931 | } 932 | node.deleted_by[version] = true 933 | 934 | rebased_splices.push([rebase_offset, node.elems.length, '']) 935 | 936 | return 937 | } 938 | } 939 | 940 | var f = is_anc 941 | var exit_early = {} 942 | var offset = 0 943 | var rebase_offset = 0 944 | function traverse(node, prev, version) { 945 | var rebase_deleted = Object.keys(node.deleted_by).length > 0 946 | if (!version || f(version)) { 947 | var has_nexts = node.nexts.find(next => f(next.version)) 948 | var deleted = Object.keys(node.deleted_by).some(version => f(version)) 949 | if (process_patch(node, offset, has_nexts, prev, version, deleted) == false) throw exit_early 950 | if (!deleted) offset += node.elems.length 951 | } 952 | if (!rebase_deleted) rebase_offset += node.elems.length 953 | 954 | for (var next of node.nexts) traverse(next, null, next.version) 955 | if (node.next) traverse(node.next, node, version) 956 | } 957 | try { 958 | traverse(S, null, S.version) 959 | } catch (e) { 960 | if (e != exit_early) throw e 961 | } 962 | 963 | return rebased_splices 964 | } 965 | 966 | function traverse_space_dag(S, f, cb, view_deleted, tail_cb) { 967 | var exit_early = {} 968 | var offset = 0 969 | function helper(node, prev, version) { 970 | var has_nexts = node.nexts.find(next => f(next.version)) 971 | var deleted = Object.keys(node.deleted_by).some(version => f(version)) 972 | if (view_deleted || !deleted) { 973 | if (cb(node, offset, has_nexts, prev, version, deleted) == false) 974 | throw exit_early 975 | offset += node.elems.length 976 | } 977 | for (var next of node.nexts) 978 | if (f(next.version)) helper(next, null, next.version) 979 | if (node.next) helper(node.next, node, version) 980 | else if (tail_cb) tail_cb(node) 981 | } 982 | try { 983 | helper(S, null, S.version) 984 | } catch (e) { 985 | if (e != exit_early) throw e 986 | } 987 | } 988 | 989 | function parse_patch(patch) { 990 | var ret = { path : [] } 991 | var re = /^(delete)\s+|\.?([^\.\[ =]+)|\[((\-?\d+)(:\-?\d+)?|'(\\'|[^'])*'|"(\\"|[^"])*")\]|\s*=\s*([\s\S]*)/g 992 | var m 993 | while (m = re.exec(patch)) { 994 | if (m[1]) ret.delete = true 995 | else if (m[2]) ret.path.push(m[2]) 996 | else if (m[3] && m[5]) ret.slice = [JSON.parse(m[4]), JSON.parse(m[5].substr(1))] 997 | else if (m[3]) ret.path.push(JSON.parse(m[3])) 998 | else if (m[8]) ret.value = JSON.parse(m[8]) 999 | } 1000 | return ret 1001 | } 1002 | 1003 | // modified from https://stackoverflow.com/questions/22697936/binary-search-in-javascript 1004 | function binarySearch(ar, compare_fn) { 1005 | var m = 0; 1006 | var n = ar.length - 1; 1007 | while (m <= n) { 1008 | var k = (n + m) >> 1; 1009 | var cmp = compare_fn(ar[k]); 1010 | if (cmp > 0) { 1011 | m = k + 1; 1012 | } else if(cmp < 0) { 1013 | n = k - 1; 1014 | } else { 1015 | return k; 1016 | } 1017 | } 1018 | return m; 1019 | } 1020 | 1021 | -------------------------------------------------------------------------------- /test.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import seed from 'seed-random' 3 | import {Item, Algorithm, newDoc, canInsertNow, getArray, mergeInto, localDelete, Doc, yjsMod, automerge, yjs, printDebugStats, sync9, Id, fugue} from './crdts.js' 4 | import * as rle from './rle.js' 5 | 6 | /// TESTS 7 | 8 | let errored = false 9 | 10 | const runTests = (algName: string, alg: Algorithm) => { // Separate scope for namespace protection. 11 | const random = seed('ssx') 12 | const randInt = (n: number) => Math.floor(random() * n) 13 | const randArrItem = (arr: any[] | string) => arr[randInt(arr.length)] 14 | const randBool = (weight: number = 0.5) => random() < weight 15 | 16 | const makeItem = (content: T, idOrAgent: string | Id, originLeft: Id | null, originRight: Id | null, amSeq: number, sync9Parent: Id | null = originLeft, sync9InsertAfter: boolean = true): Item => ({ 17 | content, 18 | id: typeof idOrAgent === 'string' ? [idOrAgent, 0] : idOrAgent, 19 | isDeleted: false, 20 | originLeft: alg === sync9 ? sync9Parent : originLeft, 21 | originRight, 22 | insertAfter: sync9InsertAfter, 23 | seq: amSeq ?? -1, // Only for AM. 24 | }) 25 | 26 | const integrateFuzzOnce = (ops: Item[], expectedResult: T[]): number => { 27 | let variants = 1 28 | const doc = newDoc() 29 | 30 | // Scan ops looking for candidates to integrate 31 | for (let numIntegrated = 0; numIntegrated < ops.length; numIntegrated++) { 32 | const candidates = [] 33 | for (const op of ops) { 34 | if (canInsertNow(op, doc)) { 35 | candidates.push(op) 36 | } 37 | } 38 | 39 | assert(candidates.length > 0) 40 | variants *= candidates.length 41 | // console.log('doc version', doc.version, 'candidates', candidates) 42 | 43 | // Pick one 44 | const op = candidates[randInt(candidates.length)] 45 | // console.log(op, doc.version) 46 | alg.integrate(doc, op) 47 | } 48 | 49 | // alg.printDoc(doc) 50 | 51 | try { 52 | assert.deepStrictEqual(getArray(doc), expectedResult) 53 | } catch(e) { 54 | console.log() 55 | alg.printDoc(doc) 56 | throw e 57 | } 58 | // console.log(variants) 59 | return variants // Rough guess at the number of orderings 60 | } 61 | 62 | 63 | const integrateFuzz = (ops: Item[], expectedResult: T[]) => { 64 | // Integrate the passed items a bunch of times, in different orders. 65 | let variants = integrateFuzzOnce(ops, expectedResult) 66 | for (let i = 1; i < Math.min(variants * 3, 100); i++) { 67 | let newVariants = integrateFuzzOnce(ops, expectedResult) 68 | variants = Math.max(variants, newVariants) 69 | } 70 | } 71 | 72 | const test = (fn: () => void) => { 73 | if (alg.ignoreTests && alg.ignoreTests.includes(fn.name)) { 74 | process.stdout.write(`SKIPPING ${fn.name}\n`) 75 | } else { 76 | process.stdout.write(`running ${fn.name} ...`) 77 | try { 78 | fn() 79 | process.stdout.write(`PASS\n`) 80 | } catch (e: any) { 81 | process.stdout.write(`FAIL:\n`) 82 | console.log(e.stack) 83 | errored = true 84 | } 85 | } 86 | } 87 | 88 | const smoke = () => { 89 | const doc = newDoc() 90 | alg.integrate(doc, makeItem('a', ['A', 0], null, null, 0)) 91 | alg.integrate(doc, makeItem('b', ['A', 1], ['A', 0], null, 1)) 92 | 93 | assert.deepStrictEqual(getArray(doc), ['a', 'b']) 94 | } 95 | 96 | const smokeMerge = () => { 97 | const doc = newDoc() 98 | alg.integrate(doc, makeItem('a', ['A', 0], null, null, 0)) 99 | alg.integrate(doc, makeItem('b', ['A', 1], ['A', 0], null, 1)) 100 | 101 | const doc2 = newDoc() 102 | mergeInto(alg, doc2, doc) 103 | assert.deepStrictEqual(getArray(doc2), ['a', 'b']) 104 | } 105 | 106 | const concurrentAvsB = () => { 107 | const a = makeItem('a', 'A', null, null, 0) 108 | const b = makeItem('b', 'B', null, null, 0) 109 | integrateFuzz([a, b], ['a', 'b']) 110 | } 111 | 112 | const interleavingForward = () => { 113 | const ops = [ 114 | makeItem('a', ['A', 0], null, null, 0), 115 | makeItem('a', ['A', 1], ['A', 0], null, 1), 116 | makeItem('a', ['A', 2], ['A', 1], null, 2), 117 | 118 | makeItem('b', ['B', 0], null, null, 0), 119 | makeItem('b', ['B', 1], ['B', 0], null, 1), 120 | makeItem('b', ['B', 2], ['B', 1], null, 2), 121 | ] 122 | 123 | integrateFuzz(ops, ['a', 'a', 'a', 'b', 'b', 'b']) 124 | } 125 | 126 | // Other variant with changed object IDs. The order should not be 127 | // dependent on the IDs of these items. I'd love to find a better way 128 | // to test this. 129 | const interleavingForward2 = () => { 130 | const ops = [ 131 | makeItem('a', ['A', 0], null, null, 0), 132 | makeItem('a', ['X', 0], ['A', 0], null, 1), 133 | makeItem('a', ['Y', 0], ['X', 0], null, 2), 134 | 135 | makeItem('b', ['B', 0], null, null, 0), 136 | makeItem('b', ['C', 0], ['B', 0], null, 1), 137 | makeItem('b', ['D', 0], ['C', 0], null, 2), 138 | ] 139 | 140 | integrateFuzz(ops, ['a', 'a', 'a', 'b', 'b', 'b']) 141 | } 142 | 143 | const interleavingBackward = () => { 144 | const ops = [ 145 | makeItem('a', ['A', 0], null, null, 0), 146 | makeItem('a', ['A', 1], null, ['A', 0], 1), 147 | makeItem('a', ['A', 2], null, ['A', 1], 2), 148 | 149 | makeItem('b', ['B', 0], null, null, 0), 150 | makeItem('b', ['B', 1], null, ['B', 0], 1), 151 | makeItem('b', ['B', 2], null, ['B', 1], 2), 152 | ] 153 | 154 | integrateFuzz(ops, ['a', 'a', 'a', 'b', 'b', 'b']) 155 | } 156 | 157 | const interleavingBackward2 = () => { 158 | const ops = [ 159 | makeItem('a', ['A', 0], null, null, 0), 160 | makeItem('a', ['X', 0], null, ['A', 0], 1, ['A', 0], false), 161 | 162 | makeItem('b', ['B', 0], null, null, 0), 163 | makeItem('b', ['B', 1], null, ['B', 0], 1, ['B', 0], false), 164 | ] 165 | 166 | integrateFuzz(ops, ['a', 'a', 'b', 'b']) 167 | } 168 | 169 | const withTails = () => { 170 | const ops = [ 171 | makeItem('a', ['A', 0], null, null, 0), 172 | makeItem('a0', ['A', 1], null, ['A', 0], 1, ['A', 0], false), // left 173 | makeItem('a1', ['A', 2], ['A', 0], null, 2), // right 174 | 175 | makeItem('b', ['B', 0], null, null, 0), 176 | makeItem('b0', ['B', 1], null, ['B', 0], 1, ['B', 0], false), // left 177 | makeItem('b1', ['B', 2], ['B', 0], null, 2), // right 178 | ] 179 | 180 | integrateFuzz(ops, ['a0', 'a', 'a1', 'b0', 'b', 'b1']) 181 | } 182 | 183 | const withTails2 = () => { 184 | const ops = [ 185 | makeItem('a', ['A', 0], null, null, 0), 186 | makeItem('a0', ['A', 1], null, ['A', 0], 1, ['A', 0], false), // left 187 | makeItem('a1', ['A', 2], ['A', 0], null, 2), // right 188 | 189 | makeItem('b', ['B', 0], null, null, 0), 190 | makeItem('b0', ['1', 0], null, ['B', 0], 1, ['B', 0], false), // left 191 | makeItem('b1', ['B', 1], ['B', 0], null, 2), // right 192 | ] 193 | 194 | integrateFuzz(ops, ['a0', 'a', 'a1', 'b0', 'b', 'b1']) 195 | } 196 | 197 | const localVsConcurrent = () => { 198 | // Check what happens when a top level concurrent change interacts 199 | // with a more localised change. (C vs D) 200 | const a = makeItem('a', 'A', null, null, 0) 201 | const c = makeItem('c', 'C', null, null, 0) 202 | 203 | // How do these two get ordered? 204 | const b = makeItem('b', 'B', null, null, 0) // Concurrent with a and c 205 | const d = makeItem('d', 'D', ['A', 0], ['C', 0], 1) // in between a and c 206 | 207 | // [a, b, d, c] would also be acceptable. 208 | integrateFuzz([a, b, c, d], ['a', 'd', 'b', 'c']) 209 | } 210 | 211 | const fuzzer1 = () => { 212 | const ops = [ 213 | makeItem(3, ['0', 0], null, null, 0), 214 | makeItem(5, ['1', 0], null, null, 0), 215 | makeItem(9, ['1', 1], null, ['1', 0], 1), 216 | makeItem(1, ['2', 0], null, null, 0), 217 | makeItem(4, ['2', 1], ['0', 0], ['2', 0], 1), 218 | 219 | makeItem(10, ['1', 2], ['2', 1], ['1', 1], 2), 220 | makeItem(7, ['2', 2], ['2', 1], ['2', 0], 2), 221 | ] 222 | 223 | const doc = newDoc() 224 | ops.forEach(op => alg.integrate(doc, op)) 225 | console.log(getArray(doc)) 226 | } 227 | 228 | const fuzzSequential = () => { 229 | const doc = newDoc() 230 | let expectedContent: string[] = [] 231 | const alphabet = 'xyz123' 232 | const agents = 'ABCDE' 233 | let nextContent = 1 234 | 235 | for (let i = 0; i < 1000; i++) { 236 | // console.log(i) 237 | // console.log(doc) 238 | if (doc.length === 0 || randBool(0.5)) { 239 | // insert 240 | const pos = randInt(doc.length + 1) 241 | // const content: string = randArrItem(alphabet) 242 | const content = ''+nextContent++ 243 | const agent = randArrItem(agents) 244 | // console.log('insert', agent, pos, `'${content}'`) 245 | alg.localInsert(doc, agent, pos, content) 246 | expectedContent.splice(pos, 0, content) 247 | } else { 248 | // Delete 249 | const pos = randInt(doc.length) 250 | const agent = randArrItem(agents) 251 | // console.log('delete', pos) 252 | localDelete(doc, agent, pos) 253 | expectedContent.splice(pos, 1) 254 | } 255 | // console.log('->', doc) 256 | 257 | // alg.printDoc(doc) 258 | assert.deepStrictEqual(doc.length, expectedContent.length) 259 | assert.deepStrictEqual(getArray(doc), expectedContent) 260 | } 261 | } 262 | 263 | const fuzzMultidoc = () => { 264 | const agents = ['A', 'B', 'C'] 265 | for (let j = 0; j < 10; j++) { 266 | process.stdout.write('.') 267 | const docs = new Array(3).fill(null).map((_, i) => { 268 | const doc: Doc & {agent: string} = newDoc() as any 269 | doc.agent = agents[i] 270 | return doc 271 | }) 272 | 273 | const randDoc = () => docs[randInt(docs.length)] 274 | 275 | let nextItem = 0 276 | // console.log(docs) 277 | for (let i = 0; i < 1000; i++) { 278 | // console.log(i) 279 | // if (i % 100 === 0) console.log(i) 280 | 281 | // Generate some random operations 282 | for (let j = 0; j < 3; j++) { 283 | const doc = randDoc() 284 | 285 | // if (doc.length === 0 || randBool(0.5)) { 286 | if (true) { 287 | // insert 288 | const pos = randInt(doc.length + 1) 289 | const content = ++nextItem 290 | // console.log('insert', agent, pos, content) 291 | alg.localInsert(doc, doc.agent, pos, content) 292 | } else { 293 | // Delete - disabled for now because mergeInto doesn't support deletes 294 | const pos = randInt(doc.length) 295 | // console.log('delete', pos) 296 | localDelete(doc, doc.agent, pos) 297 | } 298 | } 299 | 300 | // Pick a pair of documents and merge them 301 | const a = randDoc() 302 | const b = randDoc() 303 | if (a !== b) { 304 | // console.log('merging', a.agent, b.agent) 305 | mergeInto(alg, a, b) 306 | mergeInto(alg, b, a) 307 | try { 308 | assert.deepStrictEqual(getArray(a), getArray(b)) 309 | } catch (e) { 310 | console.log('\n') 311 | alg.printDoc(a) 312 | console.log('\n ---------------\n') 313 | alg.printDoc(b) 314 | throw e 315 | } 316 | } 317 | } 318 | } 319 | } 320 | 321 | 322 | console.log(`--- Running tests for ${algName} ---`) 323 | const tests = [ 324 | smoke, 325 | smokeMerge, 326 | concurrentAvsB, 327 | interleavingForward, 328 | interleavingForward2, 329 | interleavingBackward, 330 | interleavingBackward2, 331 | withTails, 332 | withTails2, 333 | localVsConcurrent, 334 | fuzzSequential, 335 | fuzzMultidoc 336 | ] 337 | tests.forEach(test) 338 | // interleavingBackwardSync9() 339 | // withTails2() 340 | // withTails2Sync9() 341 | // fuzzSequential() 342 | // fuzzMultidoc() 343 | // fuzzer1() 344 | console.log('\n\n') 345 | } 346 | 347 | runTests('fugue', fugue) 348 | runTests('yjsmod', yjsMod) 349 | runTests('yjs', yjs) 350 | runTests('automerge', automerge) 351 | runTests('sync9', sync9) 352 | 353 | const yjsModRle: Algorithm = { 354 | localInsert: rle.localInsert, 355 | integrate: rle.integrate, 356 | printDoc: rle.printDoc, 357 | } 358 | 359 | runTests('yjs mod rle', yjsModRle) 360 | 361 | // console.log('hits', hits, 'misses', misses) 362 | 363 | printDebugStats() 364 | 365 | process.exit(errored ? 1 : 0) -------------------------------------------------------------------------------- /trace.ts: -------------------------------------------------------------------------------- 1 | // This is a scratch space for running tracing code output by reference_test.ts. 2 | 3 | import {DocPair, Mode} from './reference_test.js' 4 | import * as sync9 from './sync9.js' 5 | 6 | const mode: Mode = Mode.Fugue 7 | const a = new DocPair(0, mode) 8 | const b = new DocPair(1, mode) 9 | const c = new DocPair(2, mode) 10 | 11 | const merge = (a: DocPair, b: DocPair) => a.merge(b) 12 | 13 | a.insert(0, 1) 14 | a.insert(1, 2) 15 | merge(a, c) 16 | b.insert(0, 6) 17 | c.insert(2, 7) 18 | merge(b, a) 19 | b.insert(2, 14) 20 | b.algorithm.printDoc(b.sephdoc) 21 | merge(c, b) 22 | 23 | // b.algorithm.printDoc(b.sephdoc) 24 | // console.log(sync9.get_content(c.sync9!)) -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 4 | 5 | /* Basic Options */ 6 | // "incremental": true, /* Enable incremental compilation */ 7 | "target": "ES2020", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */ 8 | "module": "Node16", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */ 9 | "moduleResolution": "Node16", 10 | // "lib": [], /* Specify library files to be included in the compilation. */ 11 | "allowJs": true, /* Allow javascript files to be compiled. */ 12 | // "checkJs": true, /* Report errors in .js files. */ 13 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */ 14 | "declaration": true, /* Generates corresponding '.d.ts' file. */ 15 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 16 | "sourceMap": true, /* Generates corresponding '.map' file. */ 17 | // "outFile": "./", /* Concatenate and emit output to single file. */ 18 | "outDir": "./dist", /* Redirect output structure to the directory. */ 19 | // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 20 | // "composite": true, /* Enable project compilation */ 21 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 22 | // "removeComments": true, /* Do not emit comments to output. */ 23 | // "noEmit": true, /* Do not emit outputs. */ 24 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 25 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 26 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 27 | 28 | /* Strict Type-Checking Options */ 29 | "strict": true, /* Enable all strict type-checking options. */ 30 | // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 31 | // "strictNullChecks": true, /* Enable strict null checks. */ 32 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 33 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 34 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 35 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 36 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 37 | 38 | /* Additional Checks */ 39 | // "noUnusedLocals": true, /* Report errors on unused locals. */ 40 | // "noUnusedParameters": true, /* Report errors on unused parameters. */ 41 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 42 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 43 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 44 | // "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */ 45 | 46 | /* Module Resolution Options */ 47 | // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 48 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 49 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 50 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 51 | // "typeRoots": [], /* List of folders to include type definitions from. */ 52 | // "types": [], /* Type declaration files to be included in compilation. */ 53 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 54 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 55 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 56 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 57 | 58 | /* Source Map Options */ 59 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 60 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 61 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 62 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 63 | 64 | /* Experimental Options */ 65 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 66 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 67 | 68 | /* Advanced Options */ 69 | "skipLibCheck": true, /* Skip type checking of declaration files. */ 70 | "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ 71 | } 72 | } 73 | --------------------------------------------------------------------------------