├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── README.md ├── build_swift.sh ├── crates ├── p2ptui │ ├── Cargo.lock │ ├── Cargo.toml │ └── src │ │ └── main.rs └── replica-swift │ ├── Bridge.swift │ ├── Cargo.toml │ ├── build.rs │ └── src │ └── lib.rs ├── examples └── blah.rs ├── js ├── package.json ├── src │ ├── fancydb │ │ ├── causal-graph.ts │ │ ├── index.ts │ │ ├── rle.ts │ │ └── stateset.ts │ ├── jsonlines.ts │ ├── node.ts │ ├── types.ts │ └── utils.ts ├── tsconfig.json └── yarn.lock └── src ├── cg_hacks.rs ├── database.rs ├── lib.rs ├── main.rs ├── protocol.rs └── stateset.rs /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist 3 | target 4 | .idea 5 | 6 | crates/replica-swift/bridge 7 | .*.swp 8 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "atty" 7 | version = "0.2.14" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" 10 | dependencies = [ 11 | "hermit-abi", 12 | "libc", 13 | "winapi", 14 | ] 15 | 16 | [[package]] 17 | name = "autocfg" 18 | version = "1.1.0" 19 | source = "registry+https://github.com/rust-lang/crates.io-index" 20 | checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" 21 | 22 | [[package]] 23 | name = "bitflags" 24 | version = "1.3.2" 25 | source = "registry+https://github.com/rust-lang/crates.io-index" 26 | checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" 27 | 28 | [[package]] 29 | name = "bpaf" 30 | version = "0.7.2" 31 | source = "registry+https://github.com/rust-lang/crates.io-index" 32 | checksum = "a1eefd44fa3be02b360bb22b44abaf204fb381baf5d2f24e270f58bb42dc3e73" 33 | dependencies = [ 34 | "bpaf_derive", 35 | "owo-colors", 36 | ] 37 | 38 | [[package]] 39 | name = "bpaf_derive" 40 | version = "0.3.1" 41 | source = "registry+https://github.com/rust-lang/crates.io-index" 42 | checksum = "f6ec104786b13bcaaa3548a689bd7527e02de0c92ea1035e64f5be501f741790" 43 | dependencies = [ 44 | "proc-macro2", 45 | "quote", 46 | "syn", 47 | ] 48 | 49 | [[package]] 50 | name = "bumpalo" 51 | version = "3.11.1" 52 | source = "registry+https://github.com/rust-lang/crates.io-index" 53 | checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" 54 | 55 | [[package]] 56 | name = "bytes" 57 | version = "1.2.1" 58 | source = "registry+https://github.com/rust-lang/crates.io-index" 59 | checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" 60 | 61 | [[package]] 62 | name = "cassowary" 63 | version = "0.3.0" 64 | source = "registry+https://github.com/rust-lang/crates.io-index" 65 | checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" 66 | 67 | [[package]] 68 | name = "cbindgen" 69 | version = "0.24.3" 70 | source = "registry+https://github.com/rust-lang/crates.io-index" 71 | checksum = "a6358dedf60f4d9b8db43ad187391afe959746101346fe51bb978126bec61dfb" 72 | dependencies = [ 73 | "clap", 74 | "heck", 75 | "indexmap", 76 | "log", 77 | "proc-macro2", 78 | "quote", 79 | "serde", 80 | "serde_json", 81 | "syn", 82 | "tempfile", 83 | "toml", 84 | ] 85 | 86 | [[package]] 87 | name = "cfg-if" 88 | version = "1.0.0" 89 | source = "registry+https://github.com/rust-lang/crates.io-index" 90 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 91 | 92 | [[package]] 93 | name = "clap" 94 | version = "3.2.23" 95 | source = "registry+https://github.com/rust-lang/crates.io-index" 96 | checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" 97 | dependencies = [ 98 | "atty", 99 | "bitflags", 100 | "clap_lex", 101 | "indexmap", 102 | "strsim", 103 | "termcolor", 104 | "textwrap", 105 | ] 106 | 107 | [[package]] 108 | name = "clap_lex" 109 | version = "0.2.4" 110 | source = "registry+https://github.com/rust-lang/crates.io-index" 111 | checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" 112 | dependencies = [ 113 | "os_str_bytes", 114 | ] 115 | 116 | [[package]] 117 | name = "content-tree" 118 | version = "0.2.0" 119 | dependencies = [ 120 | "humansize 1.1.1", 121 | "rle", 122 | "smallvec", 123 | ] 124 | 125 | [[package]] 126 | name = "crc" 127 | version = "3.0.0" 128 | source = "registry+https://github.com/rust-lang/crates.io-index" 129 | checksum = "53757d12b596c16c78b83458d732a5d1a17ab3f53f2f7412f6fb57cc8a140ab3" 130 | dependencies = [ 131 | "crc-catalog", 132 | ] 133 | 134 | [[package]] 135 | name = "crc-catalog" 136 | version = "2.1.0" 137 | source = "registry+https://github.com/rust-lang/crates.io-index" 138 | checksum = "2d0165d2900ae6778e36e80bbc4da3b5eefccee9ba939761f9c2882a5d9af3ff" 139 | 140 | [[package]] 141 | name = "crossterm" 142 | version = "0.25.0" 143 | source = "registry+https://github.com/rust-lang/crates.io-index" 144 | checksum = "e64e6c0fbe2c17357405f7c758c1ef960fce08bdfb2c03d88d2a18d7e09c4b67" 145 | dependencies = [ 146 | "bitflags", 147 | "crossterm_winapi", 148 | "libc", 149 | "mio", 150 | "parking_lot", 151 | "signal-hook", 152 | "signal-hook-mio", 153 | "winapi", 154 | ] 155 | 156 | [[package]] 157 | name = "crossterm_winapi" 158 | version = "0.9.0" 159 | source = "registry+https://github.com/rust-lang/crates.io-index" 160 | checksum = "2ae1b35a484aa10e07fe0638d02301c5ad24de82d310ccbd2f3693da5f09bf1c" 161 | dependencies = [ 162 | "winapi", 163 | ] 164 | 165 | [[package]] 166 | name = "diamond-types" 167 | version = "1.0.0" 168 | dependencies = [ 169 | "bumpalo", 170 | "content-tree", 171 | "crc", 172 | "humansize 2.1.0", 173 | "jumprope", 174 | "lazy_static", 175 | "libc", 176 | "lz4_flex", 177 | "num_enum", 178 | "rle", 179 | "serde", 180 | "smallvec", 181 | "smartstring", 182 | "str_indices", 183 | ] 184 | 185 | [[package]] 186 | name = "fastrand" 187 | version = "1.8.0" 188 | source = "registry+https://github.com/rust-lang/crates.io-index" 189 | checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" 190 | dependencies = [ 191 | "instant", 192 | ] 193 | 194 | [[package]] 195 | name = "getrandom" 196 | version = "0.2.8" 197 | source = "registry+https://github.com/rust-lang/crates.io-index" 198 | checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" 199 | dependencies = [ 200 | "cfg-if", 201 | "libc", 202 | "wasi", 203 | ] 204 | 205 | [[package]] 206 | name = "hashbrown" 207 | version = "0.12.3" 208 | source = "registry+https://github.com/rust-lang/crates.io-index" 209 | checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" 210 | 211 | [[package]] 212 | name = "heck" 213 | version = "0.4.0" 214 | source = "registry+https://github.com/rust-lang/crates.io-index" 215 | checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" 216 | 217 | [[package]] 218 | name = "hermit-abi" 219 | version = "0.1.19" 220 | source = "registry+https://github.com/rust-lang/crates.io-index" 221 | checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" 222 | dependencies = [ 223 | "libc", 224 | ] 225 | 226 | [[package]] 227 | name = "humansize" 228 | version = "1.1.1" 229 | source = "registry+https://github.com/rust-lang/crates.io-index" 230 | checksum = "02296996cb8796d7c6e3bc2d9211b7802812d36999a51bb754123ead7d37d026" 231 | 232 | [[package]] 233 | name = "humansize" 234 | version = "2.1.0" 235 | source = "registry+https://github.com/rust-lang/crates.io-index" 236 | checksum = "a866837516f34ad34fb221f3ee01fd0db75f2c2f6abeda2047dc6963fb04ad9a" 237 | dependencies = [ 238 | "libm", 239 | ] 240 | 241 | [[package]] 242 | name = "indexmap" 243 | version = "1.9.2" 244 | source = "registry+https://github.com/rust-lang/crates.io-index" 245 | checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" 246 | dependencies = [ 247 | "autocfg", 248 | "hashbrown", 249 | ] 250 | 251 | [[package]] 252 | name = "instant" 253 | version = "0.1.12" 254 | source = "registry+https://github.com/rust-lang/crates.io-index" 255 | checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" 256 | dependencies = [ 257 | "cfg-if", 258 | ] 259 | 260 | [[package]] 261 | name = "is_ci" 262 | version = "1.1.1" 263 | source = "registry+https://github.com/rust-lang/crates.io-index" 264 | checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb" 265 | 266 | [[package]] 267 | name = "itoa" 268 | version = "1.0.4" 269 | source = "registry+https://github.com/rust-lang/crates.io-index" 270 | checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" 271 | 272 | [[package]] 273 | name = "jumprope" 274 | version = "1.1.1" 275 | source = "registry+https://github.com/rust-lang/crates.io-index" 276 | checksum = "76ff13229092ab69baa5e45e49e3ebf75637ca61061b5c233b7d2eb3e91855a2" 277 | dependencies = [ 278 | "rand", 279 | "str_indices", 280 | ] 281 | 282 | [[package]] 283 | name = "lazy_static" 284 | version = "1.4.0" 285 | source = "registry+https://github.com/rust-lang/crates.io-index" 286 | checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" 287 | 288 | [[package]] 289 | name = "libc" 290 | version = "0.2.139" 291 | source = "registry+https://github.com/rust-lang/crates.io-index" 292 | checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" 293 | 294 | [[package]] 295 | name = "libm" 296 | version = "0.2.5" 297 | source = "registry+https://github.com/rust-lang/crates.io-index" 298 | checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565" 299 | 300 | [[package]] 301 | name = "lock_api" 302 | version = "0.4.9" 303 | source = "registry+https://github.com/rust-lang/crates.io-index" 304 | checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" 305 | dependencies = [ 306 | "autocfg", 307 | "scopeguard", 308 | ] 309 | 310 | [[package]] 311 | name = "log" 312 | version = "0.4.17" 313 | source = "registry+https://github.com/rust-lang/crates.io-index" 314 | checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" 315 | dependencies = [ 316 | "cfg-if", 317 | ] 318 | 319 | [[package]] 320 | name = "lz4_flex" 321 | version = "0.9.5" 322 | source = "registry+https://github.com/rust-lang/crates.io-index" 323 | checksum = "1a8cbbb2831780bc3b9c15a41f5b49222ef756b6730a95f3decfdd15903eb5a3" 324 | dependencies = [ 325 | "twox-hash", 326 | ] 327 | 328 | [[package]] 329 | name = "memchr" 330 | version = "2.5.0" 331 | source = "registry+https://github.com/rust-lang/crates.io-index" 332 | checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" 333 | 334 | [[package]] 335 | name = "mio" 336 | version = "0.8.5" 337 | source = "registry+https://github.com/rust-lang/crates.io-index" 338 | checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" 339 | dependencies = [ 340 | "libc", 341 | "log", 342 | "wasi", 343 | "windows-sys", 344 | ] 345 | 346 | [[package]] 347 | name = "num_cpus" 348 | version = "1.13.1" 349 | source = "registry+https://github.com/rust-lang/crates.io-index" 350 | checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" 351 | dependencies = [ 352 | "hermit-abi", 353 | "libc", 354 | ] 355 | 356 | [[package]] 357 | name = "num_enum" 358 | version = "0.5.7" 359 | source = "registry+https://github.com/rust-lang/crates.io-index" 360 | checksum = "cf5395665662ef45796a4ff5486c5d41d29e0c09640af4c5f17fd94ee2c119c9" 361 | dependencies = [ 362 | "num_enum_derive", 363 | ] 364 | 365 | [[package]] 366 | name = "num_enum_derive" 367 | version = "0.5.7" 368 | source = "registry+https://github.com/rust-lang/crates.io-index" 369 | checksum = "3b0498641e53dd6ac1a4f22547548caa6864cc4933784319cd1775271c5a46ce" 370 | dependencies = [ 371 | "proc-macro-crate", 372 | "proc-macro2", 373 | "quote", 374 | "syn", 375 | ] 376 | 377 | [[package]] 378 | name = "once_cell" 379 | version = "1.16.0" 380 | source = "registry+https://github.com/rust-lang/crates.io-index" 381 | checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860" 382 | 383 | [[package]] 384 | name = "os_str_bytes" 385 | version = "6.4.1" 386 | source = "registry+https://github.com/rust-lang/crates.io-index" 387 | checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" 388 | 389 | [[package]] 390 | name = "owo-colors" 391 | version = "3.5.0" 392 | source = "registry+https://github.com/rust-lang/crates.io-index" 393 | checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" 394 | dependencies = [ 395 | "supports-color", 396 | ] 397 | 398 | [[package]] 399 | name = "p2ptui" 400 | version = "0.1.0" 401 | dependencies = [ 402 | "crossterm", 403 | "tui", 404 | "tui-textarea", 405 | ] 406 | 407 | [[package]] 408 | name = "parking_lot" 409 | version = "0.12.1" 410 | source = "registry+https://github.com/rust-lang/crates.io-index" 411 | checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" 412 | dependencies = [ 413 | "lock_api", 414 | "parking_lot_core", 415 | ] 416 | 417 | [[package]] 418 | name = "parking_lot_core" 419 | version = "0.9.4" 420 | source = "registry+https://github.com/rust-lang/crates.io-index" 421 | checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" 422 | dependencies = [ 423 | "cfg-if", 424 | "libc", 425 | "redox_syscall", 426 | "smallvec", 427 | "windows-sys", 428 | ] 429 | 430 | [[package]] 431 | name = "pin-project-lite" 432 | version = "0.2.9" 433 | source = "registry+https://github.com/rust-lang/crates.io-index" 434 | checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" 435 | 436 | [[package]] 437 | name = "ppv-lite86" 438 | version = "0.2.16" 439 | source = "registry+https://github.com/rust-lang/crates.io-index" 440 | checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" 441 | 442 | [[package]] 443 | name = "proc-macro-crate" 444 | version = "1.2.1" 445 | source = "registry+https://github.com/rust-lang/crates.io-index" 446 | checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" 447 | dependencies = [ 448 | "once_cell", 449 | "thiserror", 450 | "toml", 451 | ] 452 | 453 | [[package]] 454 | name = "proc-macro2" 455 | version = "1.0.47" 456 | source = "registry+https://github.com/rust-lang/crates.io-index" 457 | checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" 458 | dependencies = [ 459 | "unicode-ident", 460 | ] 461 | 462 | [[package]] 463 | name = "quote" 464 | version = "1.0.21" 465 | source = "registry+https://github.com/rust-lang/crates.io-index" 466 | checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" 467 | dependencies = [ 468 | "proc-macro2", 469 | ] 470 | 471 | [[package]] 472 | name = "rand" 473 | version = "0.8.5" 474 | source = "registry+https://github.com/rust-lang/crates.io-index" 475 | checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" 476 | dependencies = [ 477 | "libc", 478 | "rand_chacha", 479 | "rand_core", 480 | ] 481 | 482 | [[package]] 483 | name = "rand_chacha" 484 | version = "0.3.1" 485 | source = "registry+https://github.com/rust-lang/crates.io-index" 486 | checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" 487 | dependencies = [ 488 | "ppv-lite86", 489 | "rand_core", 490 | ] 491 | 492 | [[package]] 493 | name = "rand_core" 494 | version = "0.6.4" 495 | source = "registry+https://github.com/rust-lang/crates.io-index" 496 | checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" 497 | dependencies = [ 498 | "getrandom", 499 | ] 500 | 501 | [[package]] 502 | name = "redox_syscall" 503 | version = "0.2.16" 504 | source = "registry+https://github.com/rust-lang/crates.io-index" 505 | checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" 506 | dependencies = [ 507 | "bitflags", 508 | ] 509 | 510 | [[package]] 511 | name = "remove_dir_all" 512 | version = "0.5.3" 513 | source = "registry+https://github.com/rust-lang/crates.io-index" 514 | checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" 515 | dependencies = [ 516 | "winapi", 517 | ] 518 | 519 | [[package]] 520 | name = "replica" 521 | version = "0.2.0" 522 | dependencies = [ 523 | "bpaf", 524 | "diamond-types", 525 | "rand", 526 | "serde", 527 | "serde_json", 528 | "smallvec", 529 | "smartstring", 530 | "tokio", 531 | ] 532 | 533 | [[package]] 534 | name = "replica-swift" 535 | version = "0.1.0" 536 | dependencies = [ 537 | "cbindgen", 538 | "diamond-types", 539 | "jumprope", 540 | "replica", 541 | "swift-bridge", 542 | "swift-bridge-build", 543 | "tokio", 544 | ] 545 | 546 | [[package]] 547 | name = "rle" 548 | version = "0.2.0" 549 | dependencies = [ 550 | "smallvec", 551 | ] 552 | 553 | [[package]] 554 | name = "ryu" 555 | version = "1.0.11" 556 | source = "registry+https://github.com/rust-lang/crates.io-index" 557 | checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" 558 | 559 | [[package]] 560 | name = "scopeguard" 561 | version = "1.1.0" 562 | source = "registry+https://github.com/rust-lang/crates.io-index" 563 | checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" 564 | 565 | [[package]] 566 | name = "serde" 567 | version = "1.0.147" 568 | source = "registry+https://github.com/rust-lang/crates.io-index" 569 | checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965" 570 | dependencies = [ 571 | "serde_derive", 572 | ] 573 | 574 | [[package]] 575 | name = "serde_derive" 576 | version = "1.0.147" 577 | source = "registry+https://github.com/rust-lang/crates.io-index" 578 | checksum = "4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852" 579 | dependencies = [ 580 | "proc-macro2", 581 | "quote", 582 | "syn", 583 | ] 584 | 585 | [[package]] 586 | name = "serde_json" 587 | version = "1.0.87" 588 | source = "registry+https://github.com/rust-lang/crates.io-index" 589 | checksum = "6ce777b7b150d76b9cf60d28b55f5847135a003f7d7350c6be7a773508ce7d45" 590 | dependencies = [ 591 | "itoa", 592 | "ryu", 593 | "serde", 594 | ] 595 | 596 | [[package]] 597 | name = "signal-hook" 598 | version = "0.3.14" 599 | source = "registry+https://github.com/rust-lang/crates.io-index" 600 | checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" 601 | dependencies = [ 602 | "libc", 603 | "signal-hook-registry", 604 | ] 605 | 606 | [[package]] 607 | name = "signal-hook-mio" 608 | version = "0.2.3" 609 | source = "registry+https://github.com/rust-lang/crates.io-index" 610 | checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" 611 | dependencies = [ 612 | "libc", 613 | "mio", 614 | "signal-hook", 615 | ] 616 | 617 | [[package]] 618 | name = "signal-hook-registry" 619 | version = "1.4.0" 620 | source = "registry+https://github.com/rust-lang/crates.io-index" 621 | checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" 622 | dependencies = [ 623 | "libc", 624 | ] 625 | 626 | [[package]] 627 | name = "smallvec" 628 | version = "1.10.0" 629 | source = "registry+https://github.com/rust-lang/crates.io-index" 630 | checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" 631 | dependencies = [ 632 | "serde", 633 | ] 634 | 635 | [[package]] 636 | name = "smartstring" 637 | version = "1.0.1" 638 | source = "registry+https://github.com/rust-lang/crates.io-index" 639 | checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29" 640 | dependencies = [ 641 | "autocfg", 642 | "serde", 643 | "static_assertions", 644 | "version_check", 645 | ] 646 | 647 | [[package]] 648 | name = "socket2" 649 | version = "0.4.7" 650 | source = "registry+https://github.com/rust-lang/crates.io-index" 651 | checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" 652 | dependencies = [ 653 | "libc", 654 | "winapi", 655 | ] 656 | 657 | [[package]] 658 | name = "static_assertions" 659 | version = "1.1.0" 660 | source = "registry+https://github.com/rust-lang/crates.io-index" 661 | checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" 662 | 663 | [[package]] 664 | name = "str_indices" 665 | version = "0.4.0" 666 | source = "registry+https://github.com/rust-lang/crates.io-index" 667 | checksum = "9d9199fa80c817e074620be84374a520062ebac833f358d74b37060ce4a0f2c0" 668 | 669 | [[package]] 670 | name = "strsim" 671 | version = "0.10.0" 672 | source = "registry+https://github.com/rust-lang/crates.io-index" 673 | checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" 674 | 675 | [[package]] 676 | name = "supports-color" 677 | version = "1.3.0" 678 | source = "registry+https://github.com/rust-lang/crates.io-index" 679 | checksum = "4872ced36b91d47bae8a214a683fe54e7078875b399dfa251df346c9b547d1f9" 680 | dependencies = [ 681 | "atty", 682 | "is_ci", 683 | ] 684 | 685 | [[package]] 686 | name = "swift-bridge" 687 | version = "0.1.41" 688 | source = "registry+https://github.com/rust-lang/crates.io-index" 689 | checksum = "a98ee362c6e94a09f4efbf955ea6672329ff85d3e38c456a04e5745d3317430e" 690 | dependencies = [ 691 | "swift-bridge-build", 692 | "swift-bridge-macro", 693 | ] 694 | 695 | [[package]] 696 | name = "swift-bridge-build" 697 | version = "0.1.41" 698 | source = "registry+https://github.com/rust-lang/crates.io-index" 699 | checksum = "d2f2c44717d6595fcf37975873f913f3bc715c39e376a9d9be2b75d785c50e87" 700 | dependencies = [ 701 | "proc-macro2", 702 | "swift-bridge-ir", 703 | "syn", 704 | "tempfile", 705 | ] 706 | 707 | [[package]] 708 | name = "swift-bridge-ir" 709 | version = "0.1.41" 710 | source = "registry+https://github.com/rust-lang/crates.io-index" 711 | checksum = "f2802fa649d4224e6153aa54f4c91910d3587f32776759f133b2c126cda64c22" 712 | dependencies = [ 713 | "proc-macro2", 714 | "quote", 715 | "syn", 716 | ] 717 | 718 | [[package]] 719 | name = "swift-bridge-macro" 720 | version = "0.1.41" 721 | source = "registry+https://github.com/rust-lang/crates.io-index" 722 | checksum = "c4014523352e66831037762abbf03beefd22c541eb1fefbe5fe6c517a7e069c4" 723 | dependencies = [ 724 | "proc-macro2", 725 | "quote", 726 | "swift-bridge-ir", 727 | "syn", 728 | ] 729 | 730 | [[package]] 731 | name = "syn" 732 | version = "1.0.103" 733 | source = "registry+https://github.com/rust-lang/crates.io-index" 734 | checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" 735 | dependencies = [ 736 | "proc-macro2", 737 | "quote", 738 | "unicode-ident", 739 | ] 740 | 741 | [[package]] 742 | name = "tempfile" 743 | version = "3.3.0" 744 | source = "registry+https://github.com/rust-lang/crates.io-index" 745 | checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" 746 | dependencies = [ 747 | "cfg-if", 748 | "fastrand", 749 | "libc", 750 | "redox_syscall", 751 | "remove_dir_all", 752 | "winapi", 753 | ] 754 | 755 | [[package]] 756 | name = "termcolor" 757 | version = "1.1.3" 758 | source = "registry+https://github.com/rust-lang/crates.io-index" 759 | checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" 760 | dependencies = [ 761 | "winapi-util", 762 | ] 763 | 764 | [[package]] 765 | name = "textwrap" 766 | version = "0.16.0" 767 | source = "registry+https://github.com/rust-lang/crates.io-index" 768 | checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" 769 | 770 | [[package]] 771 | name = "thiserror" 772 | version = "1.0.37" 773 | source = "registry+https://github.com/rust-lang/crates.io-index" 774 | checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" 775 | dependencies = [ 776 | "thiserror-impl", 777 | ] 778 | 779 | [[package]] 780 | name = "thiserror-impl" 781 | version = "1.0.37" 782 | source = "registry+https://github.com/rust-lang/crates.io-index" 783 | checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" 784 | dependencies = [ 785 | "proc-macro2", 786 | "quote", 787 | "syn", 788 | ] 789 | 790 | [[package]] 791 | name = "tokio" 792 | version = "1.21.2" 793 | source = "registry+https://github.com/rust-lang/crates.io-index" 794 | checksum = "a9e03c497dc955702ba729190dc4aac6f2a0ce97f913e5b1b5912fc5039d9099" 795 | dependencies = [ 796 | "autocfg", 797 | "bytes", 798 | "libc", 799 | "memchr", 800 | "mio", 801 | "num_cpus", 802 | "parking_lot", 803 | "pin-project-lite", 804 | "signal-hook-registry", 805 | "socket2", 806 | "tokio-macros", 807 | "winapi", 808 | ] 809 | 810 | [[package]] 811 | name = "tokio-macros" 812 | version = "1.8.0" 813 | source = "registry+https://github.com/rust-lang/crates.io-index" 814 | checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" 815 | dependencies = [ 816 | "proc-macro2", 817 | "quote", 818 | "syn", 819 | ] 820 | 821 | [[package]] 822 | name = "toml" 823 | version = "0.5.9" 824 | source = "registry+https://github.com/rust-lang/crates.io-index" 825 | checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" 826 | dependencies = [ 827 | "serde", 828 | ] 829 | 830 | [[package]] 831 | name = "tui" 832 | version = "0.19.0" 833 | source = "registry+https://github.com/rust-lang/crates.io-index" 834 | checksum = "ccdd26cbd674007e649a272da4475fb666d3aa0ad0531da7136db6fab0e5bad1" 835 | dependencies = [ 836 | "bitflags", 837 | "cassowary", 838 | "crossterm", 839 | "unicode-segmentation", 840 | "unicode-width", 841 | ] 842 | 843 | [[package]] 844 | name = "tui-textarea" 845 | version = "0.2.0" 846 | source = "registry+https://github.com/rust-lang/crates.io-index" 847 | checksum = "437ad97a57d66f7231dab16f51ede1ff5a3aac68c83eb10fa3a178e454b63cae" 848 | dependencies = [ 849 | "crossterm", 850 | "tui", 851 | ] 852 | 853 | [[package]] 854 | name = "twox-hash" 855 | version = "1.6.3" 856 | source = "registry+https://github.com/rust-lang/crates.io-index" 857 | checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" 858 | dependencies = [ 859 | "cfg-if", 860 | "static_assertions", 861 | ] 862 | 863 | [[package]] 864 | name = "unicode-ident" 865 | version = "1.0.5" 866 | source = "registry+https://github.com/rust-lang/crates.io-index" 867 | checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" 868 | 869 | [[package]] 870 | name = "unicode-segmentation" 871 | version = "1.10.0" 872 | source = "registry+https://github.com/rust-lang/crates.io-index" 873 | checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" 874 | 875 | [[package]] 876 | name = "unicode-width" 877 | version = "0.1.10" 878 | source = "registry+https://github.com/rust-lang/crates.io-index" 879 | checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" 880 | 881 | [[package]] 882 | name = "version_check" 883 | version = "0.9.4" 884 | source = "registry+https://github.com/rust-lang/crates.io-index" 885 | checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" 886 | 887 | [[package]] 888 | name = "wasi" 889 | version = "0.11.0+wasi-snapshot-preview1" 890 | source = "registry+https://github.com/rust-lang/crates.io-index" 891 | checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" 892 | 893 | [[package]] 894 | name = "winapi" 895 | version = "0.3.9" 896 | source = "registry+https://github.com/rust-lang/crates.io-index" 897 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 898 | dependencies = [ 899 | "winapi-i686-pc-windows-gnu", 900 | "winapi-x86_64-pc-windows-gnu", 901 | ] 902 | 903 | [[package]] 904 | name = "winapi-i686-pc-windows-gnu" 905 | version = "0.4.0" 906 | source = "registry+https://github.com/rust-lang/crates.io-index" 907 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 908 | 909 | [[package]] 910 | name = "winapi-util" 911 | version = "0.1.5" 912 | source = "registry+https://github.com/rust-lang/crates.io-index" 913 | checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" 914 | dependencies = [ 915 | "winapi", 916 | ] 917 | 918 | [[package]] 919 | name = "winapi-x86_64-pc-windows-gnu" 920 | version = "0.4.0" 921 | source = "registry+https://github.com/rust-lang/crates.io-index" 922 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 923 | 924 | [[package]] 925 | name = "windows-sys" 926 | version = "0.42.0" 927 | source = "registry+https://github.com/rust-lang/crates.io-index" 928 | checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" 929 | dependencies = [ 930 | "windows_aarch64_gnullvm", 931 | "windows_aarch64_msvc", 932 | "windows_i686_gnu", 933 | "windows_i686_msvc", 934 | "windows_x86_64_gnu", 935 | "windows_x86_64_gnullvm", 936 | "windows_x86_64_msvc", 937 | ] 938 | 939 | [[package]] 940 | name = "windows_aarch64_gnullvm" 941 | version = "0.42.0" 942 | source = "registry+https://github.com/rust-lang/crates.io-index" 943 | checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" 944 | 945 | [[package]] 946 | name = "windows_aarch64_msvc" 947 | version = "0.42.0" 948 | source = "registry+https://github.com/rust-lang/crates.io-index" 949 | checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" 950 | 951 | [[package]] 952 | name = "windows_i686_gnu" 953 | version = "0.42.0" 954 | source = "registry+https://github.com/rust-lang/crates.io-index" 955 | checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" 956 | 957 | [[package]] 958 | name = "windows_i686_msvc" 959 | version = "0.42.0" 960 | source = "registry+https://github.com/rust-lang/crates.io-index" 961 | checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" 962 | 963 | [[package]] 964 | name = "windows_x86_64_gnu" 965 | version = "0.42.0" 966 | source = "registry+https://github.com/rust-lang/crates.io-index" 967 | checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" 968 | 969 | [[package]] 970 | name = "windows_x86_64_gnullvm" 971 | version = "0.42.0" 972 | source = "registry+https://github.com/rust-lang/crates.io-index" 973 | checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" 974 | 975 | [[package]] 976 | name = "windows_x86_64_msvc" 977 | version = "0.42.0" 978 | source = "registry+https://github.com/rust-lang/crates.io-index" 979 | checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" 980 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "replica" 3 | version = "0.2.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [lib] 9 | path = "src/lib.rs" 10 | 11 | [[bin]] 12 | name = "replica" 13 | path = "src/main.rs" 14 | 15 | [workspace] 16 | members = ["crates/*"] 17 | 18 | [dependencies] 19 | tokio = { version = "1.21.2", features = ["full"] } 20 | bpaf = { version = "0.7.2", features = ["autocomplete", "derive"] } 21 | serde = { version = "1.0.147", features = ["derive"] } 22 | serde_json = "1.0.87" 23 | diamond-types = { version = "1.0.0", path = "../diamond-types", features = ["serde"] } 24 | smallvec = { version = "1.10.0", features = ["serde", "union", "const_generics"] } 25 | smartstring = "1.0.1" 26 | rand = "0.8.5" 27 | 28 | [features] 29 | default = ["dull-color"] 30 | bright-color = ["bpaf/bright-color"] 31 | dull-color = ["bpaf/dull-color"] 32 | 33 | [profile.release] 34 | opt-level = "s" 35 | codegen-units = 1 36 | lto = true 37 | #debug = false 38 | #strip = true -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Replicaaaaa 2 | 3 | > Status: Prototype! 4 | 5 | Replica is a platform for interoperable, local first data built on top of CRDTs. You have a personal set of documents and data, which can be replicated between your devices. The data is collaboratively editable between your phone and your laptop, or your phone and my phone. 6 | 7 | Replica is still in early development, and this is an early prototype of the code. Please don't expect too much yet! Most of the "hard" CRDT work is happening in the [diamond-types](https://github.com/josephg/diamond-types) library. -------------------------------------------------------------------------------- /build_swift.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | THISDIR=$(dirname $0) 6 | cd $THISDIR 7 | 8 | #FLAGS="" 9 | #MODE="debug" 10 | FLAGS="--release" 11 | MODE="release" 12 | 13 | export SWIFT_BRIDGE_OUT_DIR="$(pwd)/crates/replica-swift/bridge" 14 | export RUSTFLAGS="" 15 | # Build the project for the desired platforms: 16 | cargo build $FLAGS --target x86_64-apple-darwin -p replica-swift 17 | cargo build $FLAGS --target aarch64-apple-darwin -p replica-swift 18 | mkdir -p ./target/universal-macos/"$MODE" 19 | 20 | lipo \ 21 | ./target/aarch64-apple-darwin/"$MODE"/libreplica_swift.a \ 22 | ./target/x86_64-apple-darwin/"$MODE"/libreplica_swift.a \ 23 | -create -output ./target/universal-macos/"$MODE"/libreplica_swift.a 24 | 25 | #cargo build $FLAGS --target aarch64-apple-ios -p replica-swift 26 | #cargo build $FLAGS --target aarch64-apple-ios-sim -p replica-swift 27 | #mkdir -p ./target/universal-ios/"$MODE" 28 | 29 | #lipo \ 30 | # ./target/aarch64-apple-ios-sim/"$MODE"/libreplica_swift.a \ 31 | # -create -output ./target/universal-ios/"$MODE"/libreplica_swift.a 32 | # ./target/aarch64-apple-ios/"$MODE"/libreplica_swift.a \ 33 | 34 | swift-bridge-cli create-package \ 35 | --bridges-dir "$SWIFT_BRIDGE_OUT_DIR" \ 36 | --out-dir target/replica_swift \ 37 | --macos ./target/universal-macos/"$MODE"/libreplica_swift.a \ 38 | --name Replica 39 | # --ios ./target/aarch64-apple-ios/"$MODE"/libreplica_swift.a \ 40 | # --simulator ./target/aarch64-apple-ios-sim/"$MODE"/libreplica_swift.a \ 41 | 42 | 43 | #--simulator target/universal-ios/"$MODE"/libreplica_swift.a \ 44 | -------------------------------------------------------------------------------- /crates/p2ptui/Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "autocfg" 7 | version = "1.1.0" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" 10 | 11 | [[package]] 12 | name = "bitflags" 13 | version = "1.3.2" 14 | source = "registry+https://github.com/rust-lang/crates.io-index" 15 | checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" 16 | 17 | [[package]] 18 | name = "cassowary" 19 | version = "0.3.0" 20 | source = "registry+https://github.com/rust-lang/crates.io-index" 21 | checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" 22 | 23 | [[package]] 24 | name = "cfg-if" 25 | version = "1.0.0" 26 | source = "registry+https://github.com/rust-lang/crates.io-index" 27 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 28 | 29 | [[package]] 30 | name = "crossterm" 31 | version = "0.25.0" 32 | source = "registry+https://github.com/rust-lang/crates.io-index" 33 | checksum = "e64e6c0fbe2c17357405f7c758c1ef960fce08bdfb2c03d88d2a18d7e09c4b67" 34 | dependencies = [ 35 | "bitflags", 36 | "crossterm_winapi", 37 | "libc", 38 | "mio", 39 | "parking_lot", 40 | "signal-hook", 41 | "signal-hook-mio", 42 | "winapi", 43 | ] 44 | 45 | [[package]] 46 | name = "crossterm_winapi" 47 | version = "0.9.0" 48 | source = "registry+https://github.com/rust-lang/crates.io-index" 49 | checksum = "2ae1b35a484aa10e07fe0638d02301c5ad24de82d310ccbd2f3693da5f09bf1c" 50 | dependencies = [ 51 | "winapi", 52 | ] 53 | 54 | [[package]] 55 | name = "libc" 56 | version = "0.2.138" 57 | source = "registry+https://github.com/rust-lang/crates.io-index" 58 | checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8" 59 | 60 | [[package]] 61 | name = "lock_api" 62 | version = "0.4.9" 63 | source = "registry+https://github.com/rust-lang/crates.io-index" 64 | checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" 65 | dependencies = [ 66 | "autocfg", 67 | "scopeguard", 68 | ] 69 | 70 | [[package]] 71 | name = "log" 72 | version = "0.4.17" 73 | source = "registry+https://github.com/rust-lang/crates.io-index" 74 | checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" 75 | dependencies = [ 76 | "cfg-if", 77 | ] 78 | 79 | [[package]] 80 | name = "mio" 81 | version = "0.8.5" 82 | source = "registry+https://github.com/rust-lang/crates.io-index" 83 | checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" 84 | dependencies = [ 85 | "libc", 86 | "log", 87 | "wasi", 88 | "windows-sys", 89 | ] 90 | 91 | [[package]] 92 | name = "p2ptui" 93 | version = "0.1.0" 94 | dependencies = [ 95 | "crossterm", 96 | "tui", 97 | "tui-textarea", 98 | ] 99 | 100 | [[package]] 101 | name = "parking_lot" 102 | version = "0.12.1" 103 | source = "registry+https://github.com/rust-lang/crates.io-index" 104 | checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" 105 | dependencies = [ 106 | "lock_api", 107 | "parking_lot_core", 108 | ] 109 | 110 | [[package]] 111 | name = "parking_lot_core" 112 | version = "0.9.5" 113 | source = "registry+https://github.com/rust-lang/crates.io-index" 114 | checksum = "7ff9f3fef3968a3ec5945535ed654cb38ff72d7495a25619e2247fb15a2ed9ba" 115 | dependencies = [ 116 | "cfg-if", 117 | "libc", 118 | "redox_syscall", 119 | "smallvec", 120 | "windows-sys", 121 | ] 122 | 123 | [[package]] 124 | name = "redox_syscall" 125 | version = "0.2.16" 126 | source = "registry+https://github.com/rust-lang/crates.io-index" 127 | checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" 128 | dependencies = [ 129 | "bitflags", 130 | ] 131 | 132 | [[package]] 133 | name = "scopeguard" 134 | version = "1.1.0" 135 | source = "registry+https://github.com/rust-lang/crates.io-index" 136 | checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" 137 | 138 | [[package]] 139 | name = "signal-hook" 140 | version = "0.3.14" 141 | source = "registry+https://github.com/rust-lang/crates.io-index" 142 | checksum = "a253b5e89e2698464fc26b545c9edceb338e18a89effeeecfea192c3025be29d" 143 | dependencies = [ 144 | "libc", 145 | "signal-hook-registry", 146 | ] 147 | 148 | [[package]] 149 | name = "signal-hook-mio" 150 | version = "0.2.3" 151 | source = "registry+https://github.com/rust-lang/crates.io-index" 152 | checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" 153 | dependencies = [ 154 | "libc", 155 | "mio", 156 | "signal-hook", 157 | ] 158 | 159 | [[package]] 160 | name = "signal-hook-registry" 161 | version = "1.4.0" 162 | source = "registry+https://github.com/rust-lang/crates.io-index" 163 | checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" 164 | dependencies = [ 165 | "libc", 166 | ] 167 | 168 | [[package]] 169 | name = "smallvec" 170 | version = "1.10.0" 171 | source = "registry+https://github.com/rust-lang/crates.io-index" 172 | checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" 173 | 174 | [[package]] 175 | name = "tui" 176 | version = "0.19.0" 177 | source = "registry+https://github.com/rust-lang/crates.io-index" 178 | checksum = "ccdd26cbd674007e649a272da4475fb666d3aa0ad0531da7136db6fab0e5bad1" 179 | dependencies = [ 180 | "bitflags", 181 | "cassowary", 182 | "crossterm", 183 | "unicode-segmentation", 184 | "unicode-width", 185 | ] 186 | 187 | [[package]] 188 | name = "tui-textarea" 189 | version = "0.2.0" 190 | source = "registry+https://github.com/rust-lang/crates.io-index" 191 | checksum = "437ad97a57d66f7231dab16f51ede1ff5a3aac68c83eb10fa3a178e454b63cae" 192 | dependencies = [ 193 | "crossterm", 194 | "tui", 195 | ] 196 | 197 | [[package]] 198 | name = "unicode-segmentation" 199 | version = "1.10.0" 200 | source = "registry+https://github.com/rust-lang/crates.io-index" 201 | checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" 202 | 203 | [[package]] 204 | name = "unicode-width" 205 | version = "0.1.10" 206 | source = "registry+https://github.com/rust-lang/crates.io-index" 207 | checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" 208 | 209 | [[package]] 210 | name = "wasi" 211 | version = "0.11.0+wasi-snapshot-preview1" 212 | source = "registry+https://github.com/rust-lang/crates.io-index" 213 | checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" 214 | 215 | [[package]] 216 | name = "winapi" 217 | version = "0.3.9" 218 | source = "registry+https://github.com/rust-lang/crates.io-index" 219 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 220 | dependencies = [ 221 | "winapi-i686-pc-windows-gnu", 222 | "winapi-x86_64-pc-windows-gnu", 223 | ] 224 | 225 | [[package]] 226 | name = "winapi-i686-pc-windows-gnu" 227 | version = "0.4.0" 228 | source = "registry+https://github.com/rust-lang/crates.io-index" 229 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 230 | 231 | [[package]] 232 | name = "winapi-x86_64-pc-windows-gnu" 233 | version = "0.4.0" 234 | source = "registry+https://github.com/rust-lang/crates.io-index" 235 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 236 | 237 | [[package]] 238 | name = "windows-sys" 239 | version = "0.42.0" 240 | source = "registry+https://github.com/rust-lang/crates.io-index" 241 | checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" 242 | dependencies = [ 243 | "windows_aarch64_gnullvm", 244 | "windows_aarch64_msvc", 245 | "windows_i686_gnu", 246 | "windows_i686_msvc", 247 | "windows_x86_64_gnu", 248 | "windows_x86_64_gnullvm", 249 | "windows_x86_64_msvc", 250 | ] 251 | 252 | [[package]] 253 | name = "windows_aarch64_gnullvm" 254 | version = "0.42.0" 255 | source = "registry+https://github.com/rust-lang/crates.io-index" 256 | checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" 257 | 258 | [[package]] 259 | name = "windows_aarch64_msvc" 260 | version = "0.42.0" 261 | source = "registry+https://github.com/rust-lang/crates.io-index" 262 | checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" 263 | 264 | [[package]] 265 | name = "windows_i686_gnu" 266 | version = "0.42.0" 267 | source = "registry+https://github.com/rust-lang/crates.io-index" 268 | checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" 269 | 270 | [[package]] 271 | name = "windows_i686_msvc" 272 | version = "0.42.0" 273 | source = "registry+https://github.com/rust-lang/crates.io-index" 274 | checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" 275 | 276 | [[package]] 277 | name = "windows_x86_64_gnu" 278 | version = "0.42.0" 279 | source = "registry+https://github.com/rust-lang/crates.io-index" 280 | checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" 281 | 282 | [[package]] 283 | name = "windows_x86_64_gnullvm" 284 | version = "0.42.0" 285 | source = "registry+https://github.com/rust-lang/crates.io-index" 286 | checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" 287 | 288 | [[package]] 289 | name = "windows_x86_64_msvc" 290 | version = "0.42.0" 291 | source = "registry+https://github.com/rust-lang/crates.io-index" 292 | checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" 293 | -------------------------------------------------------------------------------- /crates/p2ptui/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "p2ptui" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | tui = "0.19.0" 10 | crossterm = "0.25.0" 11 | tui-textarea = "0.2.0" -------------------------------------------------------------------------------- /crates/p2ptui/src/main.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused_imports)] 2 | 3 | use std::{io, thread, time::Duration}; 4 | use tui::{backend::CrosstermBackend, widgets::{Widget, Block, Borders}, layout::{Layout, Constraint, Direction}, Terminal, Frame}; 5 | use crossterm::{ 6 | event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode}, 7 | execute, 8 | terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, 9 | }; 10 | use tui::backend::Backend; 11 | use tui_textarea::{Input, Key, TextArea}; 12 | 13 | // fn ui(f: &mut Frame) { 14 | // 15 | // } 16 | 17 | fn main() -> Result<(), io::Error> { 18 | // setup terminal 19 | enable_raw_mode()?; 20 | let mut stdout = io::stdout().lock(); 21 | // execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?; 22 | execute!(stdout, EnterAlternateScreen)?; 23 | let backend = CrosstermBackend::new(stdout); 24 | let mut terminal = Terminal::new(backend)?; 25 | 26 | let mut textarea = TextArea::default(); 27 | 28 | loop { 29 | terminal.draw(|f| { 30 | let chunks = Layout::default() 31 | .direction(Direction::Horizontal) 32 | .margin(1) 33 | .constraints( 34 | [ 35 | Constraint::Percentage(20), 36 | Constraint::Percentage(80), 37 | ].as_ref() 38 | ) 39 | .split(f.size()); 40 | 41 | let block = Block::default() 42 | .title("Note list") 43 | .borders(Borders::from_bits(Borders::TOP.bits() | Borders::RIGHT.bits()).unwrap()); 44 | f.render_widget(block, chunks[0]); 45 | 46 | // let block = Block::default() 47 | // .title("Cool note") 48 | // .borders(Borders::TOP); 49 | // f.render_widget(block, chunks[1]); 50 | f.render_widget(textarea.widget(), chunks[1]); 51 | })?; 52 | 53 | match crossterm::event::read()?.into() { 54 | Input { key: Key::Esc, .. } => break, 55 | input => { 56 | textarea.input(input); 57 | } 58 | } 59 | } 60 | 61 | // thread::sleep(Duration::from_millis(5000)); 62 | 63 | // restore terminal 64 | disable_raw_mode()?; 65 | execute!( 66 | terminal.backend_mut(), 67 | LeaveAlternateScreen, 68 | // DisableMouseCapture 69 | )?; 70 | terminal.show_cursor()?; 71 | 72 | Ok(()) 73 | } -------------------------------------------------------------------------------- /crates/replica-swift/Bridge.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | 3 | public func db_new() -> OpaquePointer { 4 | database_new()! 5 | } 6 | 7 | public func db_start(db: OpaquePointer, signal_data: UnsafeMutableRawPointer?, signal: CCallback) { 8 | database_start(db, signal_data, signal) 9 | } 10 | //public func db_start(db: OpaquePointer, init_data: UnsafeMutableRawPointer?, on_ready: CCallback, 11 | // signal_data: UnsafeMutableRawPointer?, signal: CCallback) { 12 | // database_start(db, init_data, on_ready, signal_data, signal) 13 | //} 14 | 15 | //public func database_start(db: OpaquePointer, ready: () -> Void) { 16 | // func onReady() { 17 | // print("READY") 18 | // ready() 19 | // } 20 | // 21 | // print("starting") 22 | // database_start(db, onReady) 23 | // print("xxxx") 24 | //} 25 | 26 | //public func db_num_posts(db: OpaquePointer) -> UInt64 { 27 | // database_num_posts(db) 28 | //} 29 | 30 | //void *signal_data, void (*cb)(void*, uintptr_t len, const uintptr_t *names)); 31 | public func db_get_posts(db: OpaquePointer, signal_data: UnsafeMutableRawPointer?, signal: (@convention(c) (UnsafeMutableRawPointer?, UInt, UnsafePointer?) -> Void)) { 32 | database_get_posts(db, signal_data, signal) 33 | } 34 | 35 | 36 | //void database_checkout(struct DatabaseConnection *this_, 37 | //uintptr_t doc_name, 38 | //void *signal_data, 39 | //void (*cb)(void*, struct Branch *content)); 40 | 41 | public func db_checkout(db: OpaquePointer, doc_name: UInt, signal_data: UnsafeMutableRawPointer?, signal: (@convention(c) (UnsafeMutableRawPointer?, UnsafeMutableRawPointer?) -> Void)) { 42 | database_checkout(db, doc_name, signal_data, signal) 43 | } 44 | 45 | public func db_connect(db: OpaquePointer, remote_host: GenericIntoRustString) { 46 | let rustString = remote_host.intoRustString(); 47 | rustString.isOwned = false; 48 | database_connect(db, OpaquePointer(rustString.ptr)) 49 | } 50 | 51 | //public func db_update_branch(db: OpaquePointer, doc_name: UInt, branch: BranchRefMut) { 52 | // database_update_branch(db, doc_name, branch.ptr) 53 | //} 54 | 55 | //void database_update_branch(struct DatabaseConnection *this_, 56 | //LV doc_name, 57 | //ExperimentalBranch *branch); 58 | 59 | 60 | // 61 | //void hello_world(void); 62 | // 63 | //struct Database *database_new(void); 64 | // 65 | //void database_free(struct Database *this_); 66 | // 67 | //void database_start(struct Database *this_); 68 | // 69 | //void database_borrow(struct Database *this_); 70 | 71 | -------------------------------------------------------------------------------- /crates/replica-swift/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "replica-swift" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [lib] 9 | crate-type = ["staticlib"] 10 | 11 | [build-dependencies] 12 | cbindgen = "0.24.3" 13 | swift-bridge-build = "0.1.41" 14 | 15 | [dependencies] 16 | #swift-bridge = { version = "0.1.41", features = ["async"] } 17 | swift-bridge = "0.1.41" 18 | #diamond-types = { path = "../..", features = ["serde", "wchar_conversion"] } 19 | #rand = { version = "0.8.5" } 20 | tokio = { version = "1.21.2", features = ["full"] } 21 | replica = { path = "../.." } 22 | diamond-types = { version = "1.0.0", path = "../../../diamond-types", features = ["serde"] } 23 | jumprope = { version = "1.1.1", features = ["wchar_conversion"] } -------------------------------------------------------------------------------- /crates/replica-swift/build.rs: -------------------------------------------------------------------------------- 1 | extern crate cbindgen; 2 | 3 | use std::env; 4 | use std::fs::File; 5 | use std::io::Write; 6 | use std::path::PathBuf; 7 | 8 | fn main() { 9 | let out_dir = PathBuf::from("./bridge"); 10 | 11 | let bridges = vec!["src/lib.rs"]; 12 | for path in bridges.iter() { 13 | println!("cargo:rerun-if-changed={}", path); 14 | } 15 | 16 | let swift_bridge_gen = swift_bridge_build::parse_bridges(bridges); 17 | swift_bridge_gen.write_all_concatenated(out_dir, "replica-swift"); 18 | 19 | let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); 20 | 21 | let mut config = cbindgen::Config::default(); 22 | config.language = cbindgen::Language::C; 23 | config.pragma_once = true; 24 | 25 | // There's a problem here: swift-bridge-cli does a great job at making 26 | // I'm using swift-bridge-cli, but it only supports one bridge .h / .swift file pair. 27 | // We'll concatenate the extra c methods. 28 | let headers_file = File::options().append(true).create_new(false) 29 | .open("bridge/replica-swift/replica-swift.h").unwrap(); 30 | 31 | cbindgen::generate_with_config(crate_dir, config) 32 | .expect("Unable to generate bindings") 33 | .write(headers_file); 34 | // .write_to_file("./temp_bridge.h"); 35 | 36 | // // So I'm going to concatenate the output from swift-bridge in place. 37 | // let mut headers_file = File::options().append(true).create_new(false) 38 | // .open("bridge/c_extras/bridge.h").unwrap(); 39 | // headers_file.write_all(swift_bridge_gen.concat_c().as_bytes()).unwrap(); 40 | 41 | let mut swift_bridge = File::options().append(true).create_new(false) 42 | .open("bridge/replica-swift/replica-swift.swift").unwrap(); 43 | let c_bridge_swift = std::fs::read("./Bridge.swift").unwrap(); 44 | swift_bridge.write_all(&c_bridge_swift).unwrap(); 45 | // swift_file.write_all(swift_bridge_gen.concat_swift().as_bytes()).unwrap(); 46 | } -------------------------------------------------------------------------------- /crates/replica-swift/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::ffi::c_void; 2 | use std::net::{SocketAddr, ToSocketAddrs}; 3 | use std::sync::Arc; 4 | use std::sync::atomic::{AtomicBool, Ordering}; 5 | use diamond_types::Branch; 6 | use diamond_types::list::operation::TextOperation; 7 | use diamond_types::LV; 8 | use tokio::runtime::Handle; 9 | use tokio::sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}; 10 | use tokio::sync::broadcast::Sender; 11 | use replica::connect_internal; 12 | use replica::database::Database; 13 | 14 | #[swift_bridge::bridge] 15 | mod ffi { 16 | // #[swift_bridge::bridge(swift_repr = "struct")] 17 | // struct BridgeTextOperation { 18 | // /// The range of items in the document being modified by this operation. 19 | // pub start: usize, 20 | // pub end: usize, 21 | // 22 | // /// Is this operation an insert or a delete? 23 | // pub is_insert: bool, 24 | // 25 | // /// What content is being inserted or deleted. Empty string for deletes. 26 | // pub content: String, 27 | // } 28 | 29 | 30 | extern "Rust" { 31 | // type DatabaseHandle; 32 | // #[swift_bridge::bridge(swift_repr = "struct")] 33 | // struct TextOp { 34 | // 35 | // } 36 | // 37 | fn foo() -> Vec; 38 | } 39 | 40 | extern "Rust" { 41 | type LocalBranch; 42 | 43 | fn get_version(&self) -> Vec; 44 | 45 | fn get_post_content(&self) -> String; 46 | fn update(&mut self, db: *mut DatabaseConnection) -> bool; 47 | // fn xf_operations_since(&self, frontier: &[usize]) -> Vec; 48 | 49 | fn replace_content_wchar(&mut self, db: *mut DatabaseConnection, replace_start: usize, replace_end: usize, ins_content: String); 50 | } 51 | 52 | } 53 | 54 | pub struct LocalBranch { 55 | doc_name: LV, 56 | content: Branch 57 | } 58 | 59 | 60 | impl LocalBranch { 61 | fn get_version(&self) -> Vec { 62 | self.content.frontier.iter().copied().collect() 63 | } 64 | 65 | fn get_post_content(&self) -> String { 66 | let content_crdt = self.content.text_at_path(&["content"]); 67 | self.content.texts.get(&content_crdt).unwrap().to_string() 68 | } 69 | 70 | fn update(&mut self, db: *mut DatabaseConnection) -> bool { 71 | let db = unsafe { &mut *db }; 72 | 73 | // with_read_database blocks, so this should be threadsafe (so long as branch is is Send). 74 | db.with_read_database(|db| { 75 | db.update_branch(self.doc_name, &mut self.content) 76 | }) 77 | } 78 | 79 | fn replace_content_wchar(&mut self, db: *mut DatabaseConnection, replace_start_wchars: usize, replace_end_wchars: usize, ins_content: String) { 80 | let db = unsafe { &mut *db }; 81 | db.with_write_database(|mut db| { 82 | let (oplog, agent) = db.get_doc_mut(self.doc_name).unwrap(); 83 | let content_crdt = oplog.text_at_path(&["content"]); 84 | 85 | let rope = self.content.texts.get(&content_crdt).unwrap().borrow(); 86 | let start_chars = rope.wchars_to_chars(replace_start_wchars); 87 | if replace_start_wchars != replace_end_wchars { 88 | let end_chars = rope.wchars_to_chars(replace_end_wchars); 89 | oplog.local_text_op(agent, content_crdt, TextOperation::new_delete(start_chars..end_chars)); 90 | } 91 | drop(rope); 92 | if !ins_content.is_empty() { 93 | oplog.local_text_op(agent, content_crdt, TextOperation::new_insert(start_chars, &ins_content)); 94 | } 95 | 96 | // This could be massively optimized, since we know we can FF. 97 | self.content.merge_changes_to_tip(&oplog); 98 | 99 | db.doc_updated(self.doc_name); 100 | }); 101 | db.sender.send(0).unwrap(); 102 | } 103 | 104 | } 105 | 106 | fn foo() -> Vec { 107 | vec![1,2,3] 108 | } 109 | 110 | type CCallback = extern "C" fn(*mut c_void) -> (); 111 | 112 | #[no_mangle] 113 | pub extern "C" fn database_new() -> *mut DatabaseConnection { Box::into_raw(Box::new(DatabaseConnection::new())) } 114 | 115 | #[no_mangle] 116 | pub extern "C" fn database_free(this: *mut DatabaseConnection) { 117 | let this = unsafe { Box::from_raw(this) }; 118 | drop(this); 119 | } 120 | 121 | struct SendCPtr(*mut c_void); 122 | 123 | unsafe impl Send for SendCPtr {} 124 | 125 | #[no_mangle] 126 | pub extern "C" fn database_start(this: *mut DatabaseConnection, signal_data: *mut c_void, signal_callback: CCallback) { 127 | let this = unsafe { &mut *this }; 128 | 129 | let signal_data = SendCPtr(signal_data); 130 | this.start(move || { 131 | let s = &signal_data; // Needed so we move signal_data itself. 132 | signal_callback(s.0) 133 | }); 134 | } 135 | 136 | /// Eg "localhost:4444". 137 | #[no_mangle] 138 | pub extern "C" fn database_connect(this: *mut DatabaseConnection, remote_host: *mut swift_bridge::string::RustString) { 139 | let this = unsafe { &mut *this }; 140 | 141 | let remote_host = unsafe { Box::from_raw(remote_host).0 }; 142 | this.connect(remote_host.to_socket_addrs().unwrap()); 143 | } 144 | 145 | 146 | // #[no_mangle] 147 | // pub extern "C" fn database_update_branch(this: *mut DatabaseConnection, doc_name: usize, branch: *mut c_void) -> bool { 148 | // let this = unsafe { &mut *this }; 149 | // let branch = unsafe { &mut *(branch as *mut ExperimentalBranch) }; 150 | // 151 | // // with_read_database blocks, so this should be threadsafe (so long as branch is is Send). 152 | // this.with_read_database(|db| { 153 | // db.update_branch(doc_name, branch) 154 | // }) 155 | // } 156 | 157 | // #[no_mangle] 158 | // pub extern "C" fn with_db(this: *mut DatabaseConnection, signal_data: *mut c_void, cb: extern "C" fn(*mut c_void, *const DatabaseHandle) -> ()) { 159 | // let this = unsafe { &mut *this }; 160 | // this.with_read_database(|db| { 161 | // 162 | // }); 163 | // } 164 | 165 | 166 | #[no_mangle] 167 | pub extern "C" fn database_num_posts(this: *mut DatabaseConnection) -> u64 { 168 | let this = unsafe { &mut *this }; 169 | this.with_read_database(|db| db.posts().count() as u64) 170 | } 171 | 172 | #[no_mangle] 173 | pub extern "C" fn database_get_edits_since(this: *mut DatabaseConnection, doc_name: usize, signal_data: *mut c_void, cb: extern "C" fn(*mut c_void, data: usize) -> ()) { 174 | let this = unsafe { &mut *this }; 175 | let ops = this.with_read_database(|db| { 176 | db.changes_to_post_content_since(doc_name, &[]) 177 | }); 178 | 179 | let num = ops.map(|ops| ops.0.len()).unwrap_or(0); 180 | cb(signal_data, num); 181 | } 182 | 183 | // pub extern "C" fn database_checkout(this: *mut DatabaseConnection, doc_name: usize, signal_data: *mut c_void, cb: extern "C" fn(*mut c_void, content: *mut Branch) -> ()) { 184 | #[no_mangle] 185 | pub extern "C" fn database_checkout(this: *mut DatabaseConnection, doc_name: usize, signal_data: *mut c_void, cb: extern "C" fn(*mut c_void, content: *mut c_void) -> ()) { 186 | let this = unsafe { &mut *this }; 187 | let post = this.with_read_database(|db| { 188 | db.checkout(doc_name) 189 | }).unwrap(); 190 | 191 | let ptr = Box::into_raw(Box::new(LocalBranch { 192 | doc_name, 193 | content: post 194 | })); 195 | cb(signal_data, ptr as *mut c_void); 196 | } 197 | // #[no_mangle] 198 | // pub extern "C" fn database_get_post_content(this: *mut DatabaseConnection, doc_name: usize, signal_data: *mut c_void, cb: extern "C" fn(*mut c_void, content: *const u8) -> ()) { 199 | // let this = unsafe { &mut *this }; 200 | // let content = this.with_read_database(|db| { 201 | // db.post_content(doc_name) 202 | // }); 203 | // 204 | // let ptr = content.map(|s| s.as_ptr()).unwrap_or(null()); 205 | // cb(signal_data, ptr); 206 | // } 207 | 208 | #[no_mangle] 209 | pub extern "C" fn database_get_posts(this: *mut DatabaseConnection, signal_data: *mut c_void, cb: extern "C" fn(*mut c_void, len: usize, bytes: *const usize) -> ()) { 210 | let this = unsafe { &mut *this }; 211 | let posts: Vec = this.with_read_database(|db| { 212 | db.posts().collect() 213 | }); 214 | 215 | cb(signal_data, posts.len(), posts.as_ptr()) 216 | // cb(signal_data, Box::into_raw(Box::new(posts))); 217 | } 218 | 219 | pub struct DatabaseConnection { 220 | tokio_handle: Option, 221 | db_handle: Arc>, 222 | running: AtomicBool, 223 | sender: Sender, 224 | } 225 | 226 | impl Drop for DatabaseConnection { 227 | fn drop(&mut self) { 228 | if self.running.load(Ordering::Relaxed) { 229 | panic!("Runtime not stopped"); 230 | } 231 | // println!("DATABASE DROPPED"); 232 | // panic!(); 233 | } 234 | } 235 | 236 | 237 | impl DatabaseConnection { 238 | fn new() -> Self { 239 | let database = Database::new(); 240 | // database.create_post(); 241 | 242 | let (sender, _) = tokio::sync::broadcast::channel(16); 243 | 244 | DatabaseConnection { 245 | tokio_handle: None, 246 | db_handle: Arc::new(RwLock::new(database)), 247 | running: AtomicBool::new(false), 248 | sender, 249 | } 250 | } 251 | 252 | fn start(&mut self, mut signal: S) { 253 | let was_running = self.running.swap(true, Ordering::Relaxed); 254 | if was_running { 255 | panic!("Tokio runtime already started"); 256 | } 257 | 258 | let rt = tokio::runtime::Builder::new_current_thread() 259 | .enable_io() 260 | .enable_time() 261 | .build() 262 | .unwrap(); 263 | self.tokio_handle = Some(rt.handle().clone()); 264 | 265 | // println!("XXXX"); 266 | // let handle = self.db_handle.clone(); 267 | 268 | // let (tx, mut rx) = (self.sender.clone(), self.sender.subscribe()); 269 | let mut rx = self.sender.subscribe(); 270 | std::thread::spawn(move || { 271 | // println!("AAA"); 272 | rt.block_on(async { 273 | // signal(); 274 | 275 | // println!("BBBb"); 276 | // let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 4444); 277 | // let socket_addrs = "test.replica.tech:4444".to_socket_addrs().unwrap(); 278 | // connect(socket_addrs.collect(), handle.clone(), tx); 279 | 280 | // callback(t); 281 | loop { 282 | // If rx.recv returns an error, its because there are no more tx (senders). 283 | // At that point its impossible for more network messages to be received. 284 | // 285 | // This will happen when the socket connection throws an error. 286 | // TODO: Do something better than panic here. 287 | rx.recv().await.unwrap(); 288 | // println!("recv: {x}"); 289 | // println!("CCCC"); 290 | signal(); 291 | } 292 | }); 293 | }); 294 | } 295 | 296 | fn connect>(&mut self, addr: S) { 297 | self.tokio_handle.as_ref().unwrap().spawn( 298 | connect_internal(addr.collect(), self.db_handle.clone(), self.sender.clone()) 299 | ); 300 | } 301 | 302 | fn with_read_database) -> R, R>(&self, f: F) -> R { 303 | let data = self.db_handle.clone(); 304 | self.tokio_handle.as_ref().unwrap().block_on(async move { 305 | let reader = data.read().await; 306 | f(reader) 307 | }) 308 | } 309 | #[allow(unused)] 310 | fn with_write_database) -> R, R>(&mut self, f: F) -> R { 311 | let data = self.db_handle.clone(); 312 | self.tokio_handle.as_ref().unwrap().block_on(async move { 313 | let writer = data.write().await; 314 | f(writer) 315 | }) 316 | } 317 | 318 | // fn num_posts(&self) -> usize { 319 | // self.with_read_database(|db| db.posts().count()) 320 | // } 321 | 322 | // fn checkout_post(&self, doc: LV) -> Option { 323 | // self.with_read_database(|db| { 324 | // db.checkout(doc) 325 | // }) 326 | // } 327 | 328 | // fn get_post_content(&self, idx: usize) -> Option { 329 | // self.with_read_database(|db| { 330 | // let mut posts = db.posts(); 331 | // let name = posts.nth(idx)?; 332 | // 333 | // Some(db.post_content(name)?) 334 | // }) 335 | // } 336 | 337 | 338 | // fn borrow_data(&mut self) -> u32 { 339 | // let data = self.data.clone(); 340 | // self.tokio_handle.as_ref().unwrap().block_on(async move { 341 | // let r = data.read().await; 342 | // // println!("{}", r); 343 | // *r 344 | // }) 345 | // // 100 346 | // } 347 | } -------------------------------------------------------------------------------- /examples/blah.rs: -------------------------------------------------------------------------------- 1 | use std::net::{IpAddr, Ipv4Addr, SocketAddr}; 2 | use std::sync::Arc; 3 | use std::sync::atomic::{AtomicBool, Ordering}; 4 | use std::time::Duration; 5 | use diamond_types::list::operation::TextOperation; 6 | use tokio::runtime::{Builder, Handle}; 7 | use tokio::sync::RwLock; 8 | use replica::connect; 9 | use replica::database::Database; 10 | 11 | pub struct DatabaseHandle { 12 | tokio_handle: Option, 13 | db_handle: Arc>, 14 | running: AtomicBool, 15 | // s: Sender, 16 | } 17 | 18 | // impl Drop for DatabaseHandle { 19 | // fn drop(&mut self) { 20 | // if self.running.load(Ordering::Relaxed) { 21 | // panic!("Runtime not stopped"); 22 | // } 23 | // // println!("DATABASE DROPPED"); 24 | // // panic!(); 25 | // } 26 | // } 27 | 28 | 29 | impl DatabaseHandle { 30 | fn new() -> Self { 31 | let database = Database::new(); 32 | 33 | DatabaseHandle { 34 | tokio_handle: None, 35 | db_handle: Arc::new(RwLock::new(database)), 36 | running: AtomicBool::new(false), 37 | } 38 | } 39 | 40 | fn start(&mut self, mut signal: S) { 41 | let was_running = self.running.swap(true, Ordering::Relaxed); 42 | if was_running { 43 | panic!("Tokio runtime already started"); 44 | } 45 | 46 | let rt = Builder::new_current_thread().enable_all().build().unwrap(); 47 | self.tokio_handle = Some(rt.handle().clone()); 48 | 49 | let handle = self.db_handle.clone(); 50 | 51 | std::thread::spawn(move || { 52 | rt.block_on(async { 53 | let (tx, mut rx) = tokio::sync::broadcast::channel(16); 54 | 55 | // let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 4444); 56 | connect(vec![ 57 | SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 4444), 58 | // SocketAddr::new(IpAddr::V6(Ipv6Addr::LOCALHOST), 4444), 59 | ], handle.clone(), tx.clone()); 60 | 61 | let post_name = loop { 62 | rx.recv().await.unwrap(); 63 | let db = handle.read().await; 64 | db.dbg_print_docs(); 65 | let post = db.posts().next(); 66 | if let Some(post) = post { 67 | break post; 68 | } 69 | }; 70 | 71 | tokio::spawn(async move { 72 | 73 | // let post_name = { 74 | // let mut db = handle.write().await; 75 | // db.create_post() 76 | // }; 77 | 78 | // let tx2 = tx.clone(); 79 | loop { 80 | tokio::time::sleep(Duration::from_secs(2)).await; 81 | 82 | let range = { 83 | let mut db = handle.write().await; 84 | let (post, agent) = db.get_doc_mut(post_name).unwrap(); 85 | let content = post.text_at_path(&["content"]); 86 | let range = post.local_text_op(agent, content, TextOperation::new_insert(0, "a")); 87 | db.doc_updated(post_name); 88 | // dbg!(range); 89 | range 90 | }; 91 | tx.send(range.end).unwrap(); 92 | } 93 | }); 94 | 95 | // callback(t); 96 | loop { 97 | let x = rx.recv().await; 98 | x.unwrap(); 99 | // println!("recv: {x}"); 100 | signal() 101 | } 102 | }); 103 | }); 104 | } 105 | 106 | 107 | // fn with_read_database) -> R, R>(&self, f: F) -> R { 108 | // let data = self.db_handle.clone(); 109 | // self.tokio_handle.as_ref().unwrap().block_on(async move { 110 | // let reader = data.read().await; 111 | // f(reader) 112 | // }) 113 | // } 114 | // fn with_write_database) -> R, R>(&mut self, f: F) -> R { 115 | // let data = self.db_handle.clone(); 116 | // self.tokio_handle.as_ref().unwrap().block_on(async move { 117 | // let writer = data.write().await; 118 | // f(writer) 119 | // }) 120 | // } 121 | 122 | // fn num_posts(&self) -> usize { 123 | // self.with_read_database(|db| db.posts().count()) 124 | // } 125 | 126 | // fn get_post_content(&self, idx: usize) -> Option { 127 | // self.with_read_database(|db| { 128 | // let mut posts = db.posts(); 129 | // let name = posts.nth(idx)?; 130 | // 131 | // Some(db.post_content(name)?) 132 | // }) 133 | // } 134 | 135 | 136 | // fn borrow_data(&mut self) -> u32 { 137 | // let data = self.data.clone(); 138 | // self.tokio_handle.as_ref().unwrap().block_on(async move { 139 | // let r = data.read().await; 140 | // // println!("{}", r); 141 | // *r 142 | // }) 143 | // // 100 144 | // } 145 | } 146 | 147 | fn main() { 148 | let mut db = DatabaseHandle::new(); 149 | db.start(|| { 150 | println!("signal!"); 151 | }); 152 | // db.stop(); 153 | std::thread::sleep(Duration::from_secs(1000)); 154 | } 155 | -------------------------------------------------------------------------------- /js/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@types/node": "^18.11.4", 4 | "@types/priorityqueuejs": "^1.0.1", 5 | "binary-search": "^1.3.6", 6 | "map2": "^1.1.2", 7 | "priorityqueuejs": "^2.0.0", 8 | "typescript": "^4.8.4" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /js/src/fancydb/causal-graph.ts: -------------------------------------------------------------------------------- 1 | // The causal graph puts a bunch of edits (each at some [agent, seq] version 2 | // pair) into a list. 3 | 4 | import PriorityQueue from 'priorityqueuejs' 5 | import bs from 'binary-search' 6 | import {AtLeast1, LV, LVRange, RawVersion, ROOT, ROOT_LV, VersionSummary} from '../types.js' 7 | import { pushRLEList, tryRangeAppend, tryRevRangeAppend } from './rle.js' 8 | import { createAgent } from '../utils.js' 9 | 10 | const min2 = (a: number, b: number) => a < b ? a : b 11 | const max2 = (a: number, b: number) => a > b ? a : b 12 | 13 | type CGEntry = { 14 | version: LV, 15 | vEnd: LV, 16 | 17 | agent: string, 18 | seq: number, // Seq for version. 19 | 20 | parents: LV[] // Parents for version 21 | } 22 | 23 | /** NOTE: A single ClientEntry might span multiple entries, with different parents! */ 24 | type ClientEntry = { 25 | seq: number, 26 | seqEnd: number, 27 | version: LV, 28 | } 29 | 30 | export interface CausalGraph { 31 | /** Current global version */ 32 | version: LV[], 33 | 34 | /** Map from LV -> RawVersion + parents */ 35 | entries: CGEntry[], 36 | 37 | /** Map from agent -> LV */ 38 | agentToVersion: {[k: string]: ClientEntry[]}, 39 | } 40 | 41 | export const create = (): CausalGraph => ({ 42 | entries: [], 43 | agentToVersion: {}, 44 | version: [] 45 | }) 46 | 47 | /** Sort in ascending order. */ 48 | const sortVersions = (v: LV[]): LV[] => v.sort((a, b) => a - b) 49 | 50 | export const advanceFrontier = (frontier: LV[], vLast: LV, parents: LV[]): LV[] => { 51 | // assert(!branchContainsVersion(db, order, branch), 'db already contains version') 52 | // for (const parent of op.parents) { 53 | // assert(branchContainsVersion(db, parent, branch), 'operation in the future') 54 | // } 55 | 56 | const f = frontier.filter(v => !parents.includes(v)) 57 | f.push(vLast) 58 | return sortVersions(f) 59 | } 60 | 61 | export const clientEntriesForAgent = (cg: CausalGraph, agent: string): ClientEntry[] => ( 62 | cg.agentToVersion[agent] ??= [] 63 | ) 64 | 65 | const lastOr = (list: T[], f: (t: T) => V, def: V): V => ( 66 | list.length === 0 ? def : f(list[list.length - 1]) 67 | ) 68 | 69 | const nextVersion = (cg: CausalGraph): LV => ( 70 | lastOr(cg.entries, e => e.vEnd, 0) 71 | ) 72 | 73 | const tryAppendEntries = (a: CGEntry, b: CGEntry): boolean => { 74 | const canAppend = b.version === a.vEnd 75 | && a.agent === b.agent 76 | && a.seq + (a.vEnd - a.version) === b.seq 77 | && b.parents.length === 1 && b.parents[0] === a.vEnd - 1 78 | 79 | if (canAppend) { 80 | a.vEnd = b.vEnd 81 | } 82 | 83 | return canAppend 84 | } 85 | 86 | const tryAppendClientEntry = (a: ClientEntry, b: ClientEntry): boolean => { 87 | const canAppend = b.seq === a.seqEnd 88 | && b.version === (a.version + (a.seqEnd - a.seq)) 89 | 90 | if (canAppend) { 91 | a.seqEnd = b.seqEnd 92 | } 93 | return canAppend 94 | } 95 | 96 | const findClientEntryRaw = (cg: CausalGraph, agent: string, seq: number): ClientEntry | null => { 97 | const av = cg.agentToVersion[agent] 98 | if (av == null) return null 99 | 100 | const result = bs(av, seq, (entry, needle) => ( 101 | needle < entry.seq ? 1 102 | : needle >= entry.seqEnd ? -1 103 | : 0 104 | )) 105 | 106 | return result < 0 ? null : av[result] 107 | } 108 | 109 | const findClientEntry = (cg: CausalGraph, agent: string, seq: number): [ClientEntry, number] | null => { 110 | const clientEntry = findClientEntryRaw(cg, agent, seq) 111 | return clientEntry == null ? null : [clientEntry, seq - clientEntry.seq] 112 | } 113 | 114 | const findClientEntryTrimmed = (cg: CausalGraph, agent: string, seq: number): ClientEntry | null => { 115 | const result = findClientEntry(cg, agent, seq) 116 | if (result == null) return null 117 | 118 | const [clientEntry, offset] = result 119 | return offset === 0 ? clientEntry : { 120 | seq, 121 | seqEnd: clientEntry.seqEnd, 122 | version: clientEntry.version + offset 123 | } 124 | } 125 | 126 | export const hasVersion = (cg: CausalGraph, agent: string, seq: number): boolean => ( 127 | findClientEntryRaw(cg, agent, seq) != null 128 | ) 129 | 130 | // export const addLocal = (cg: CausalGraph, id: RawVersion, len: number = 1): LV => { 131 | // return add(cg, id[0], id[1], id[1]+len, cg.version) 132 | // } 133 | 134 | /** Returns the first new version in the inserted set */ 135 | export const addRaw = (cg: CausalGraph, id: RawVersion, len: number = 1, rawParents?: RawVersion[]): LV => { 136 | const parents = rawParents != null 137 | ? rawToLVList(cg, rawParents) 138 | : cg.version 139 | 140 | return add(cg, id[0], id[1], id[1]+len, parents) 141 | } 142 | 143 | /** Returns the first new version in the inserted set */ 144 | export const add = (cg: CausalGraph, agent: string, seqStart: number, seqEnd: number, parents: LV[] = cg.version): LV => { 145 | const version = nextVersion(cg) 146 | 147 | while (true) { 148 | // Look for an equivalent existing entry in the causal graph starting at 149 | // seq_start. We only add the parts of the that do not already exist in CG. 150 | 151 | // The inserted items will either be the empty set or a range because of version semantics. 152 | const existingEntry = findClientEntryTrimmed(cg, agent, seqStart) 153 | // console.log(cg.agentToVersion[agent], seqStart, existingEntry) 154 | if (existingEntry == null) break // Insert start..end. 155 | 156 | if (existingEntry.seqEnd >= seqEnd) return -1 // Already inserted. 157 | 158 | // Or trim and loop. 159 | seqStart = existingEntry.seqEnd 160 | parents = [existingEntry.version + (existingEntry.seqEnd - existingEntry.seq) - 1] 161 | } 162 | 163 | const len = seqEnd - seqStart 164 | const vEnd = version + len 165 | const entry: CGEntry = { 166 | version, 167 | vEnd, 168 | 169 | agent, 170 | seq: seqStart, 171 | parents, 172 | } 173 | 174 | pushRLEList(cg.entries, entry, tryAppendEntries) 175 | pushRLEList(clientEntriesForAgent(cg, agent), { seq: seqStart, seqEnd, version}, tryAppendClientEntry) 176 | 177 | cg.version = advanceFrontier(cg.version, vEnd - 1, parents) 178 | return version 179 | } 180 | 181 | const versionCmp = ([a1, s1]: RawVersion, [a2, s2]: RawVersion) => ( 182 | a1 < a2 ? 1 183 | : a1 > a2 ? -1 184 | : s1 - s2 185 | ) 186 | 187 | export const tieBreakRegisters = (cg: CausalGraph, data: AtLeast1<[LV, T]>): T => { 188 | let winner = data.reduce((a, b) => { 189 | // Its a bit gross doing this lookup multiple times for the winning item, 190 | // but eh. The data set will almost always contain exactly 1 item anyway. 191 | const rawA = lvToRaw(cg, a[0]) 192 | const rawB = lvToRaw(cg, b[0]) 193 | 194 | return versionCmp(rawA, rawB) < 0 ? a : b 195 | }) 196 | 197 | return winner[1] 198 | } 199 | 200 | /** 201 | * Returns [seq, local version] for the new item (or the first item if num > 1). 202 | */ 203 | export const assignLocal = (cg: CausalGraph, agent: string, num: number = 1): [number, LV] => { 204 | let version = nextVersion(cg) 205 | const av = clientEntriesForAgent(cg, agent) 206 | const seq = lastOr(av, ce => ce.seqEnd, 0) 207 | add(cg, agent, seq, seq + num, cg.version) 208 | 209 | return [seq, version] 210 | } 211 | 212 | export const findEntryContainingRaw = (cg: CausalGraph, v: LV): CGEntry => { 213 | const idx = bs(cg.entries, v, (entry, needle) => ( 214 | needle < entry.version ? 1 215 | : needle >= entry.vEnd ? -1 216 | : 0 217 | )) 218 | if (idx < 0) throw Error('Invalid or unknown local version ' + v) 219 | return cg.entries[idx] 220 | } 221 | export const findEntryContaining = (cg: CausalGraph, v: LV): [CGEntry, number] => { 222 | const e = findEntryContainingRaw(cg, v) 223 | const offset = v - e.version 224 | return [e, offset] 225 | } 226 | 227 | export const lvToRawWithParents = (cg: CausalGraph, v: LV): [string, number, LV[]] => { 228 | const [e, offset] = findEntryContaining(cg, v) 229 | const parents = offset === 0 ? e.parents : [v-1] 230 | return [e.agent, e.seq + offset, parents] 231 | } 232 | 233 | export const lvToRaw = (cg: CausalGraph, v: LV): RawVersion => { 234 | if (v === ROOT_LV) return ROOT 235 | const [e, offset] = findEntryContaining(cg, v) 236 | return [e.agent, e.seq + offset] 237 | // causalGraph.entries[localIndex] 238 | } 239 | export const lvToRawList = (cg: CausalGraph, parents: LV[]): RawVersion[] => ( 240 | parents.map(v => lvToRaw(cg, v)) 241 | ) 242 | export const getRawVersion = (cg: CausalGraph): RawVersion[] => lvToRawList(cg, cg.version) 243 | 244 | 245 | // export const getParents = (cg: CausalGraph, v: LV): LV[] => ( 246 | // localVersionToRaw(cg, v)[2] 247 | // ) 248 | 249 | export const tryRawToLV = (cg: CausalGraph, agent: string, seq: number): LV | null => { 250 | if (agent === 'ROOT') return ROOT_LV 251 | 252 | const clientEntry = findClientEntryTrimmed(cg, agent, seq) 253 | return clientEntry?.version ?? null 254 | } 255 | export const rawToLV = (cg: CausalGraph, agent: string, seq: number): LV => { 256 | if (agent === 'ROOT') return ROOT_LV 257 | 258 | const clientEntry = findClientEntryTrimmed(cg, agent, seq) 259 | if (clientEntry == null) throw Error(`Unknown ID: (${agent}, ${seq})`) 260 | return clientEntry.version 261 | } 262 | export const rawToLV2 = (cg: CausalGraph, v: RawVersion): LV => ( 263 | rawToLV(cg, v[0], v[1]) 264 | ) 265 | 266 | export const rawToLVList = (cg: CausalGraph, parents: RawVersion[]): LV[] => ( 267 | parents.map(([agent, seq]) => rawToLV(cg, agent, seq)) 268 | ) 269 | 270 | export const summarizeVersion = (cg: CausalGraph): VersionSummary => { 271 | const result: VersionSummary = {} 272 | for (const k in cg.agentToVersion) { 273 | const av = cg.agentToVersion[k] 274 | if (av.length === 0) continue 275 | 276 | const versions: [number, number][] = [] 277 | for (const ce of av) { 278 | pushRLEList(versions, [ce.seq, ce.seqEnd], tryRangeAppend) 279 | } 280 | 281 | result[k] = versions 282 | } 283 | return result 284 | } 285 | 286 | const eachVersionBetween = (cg: CausalGraph, vStart: LV, vEnd: LV, visit: (e: CGEntry, vs: number, ve: number) => void) => { 287 | let idx = bs(cg.entries, vStart, (entry, needle) => ( 288 | needle < entry.version ? 1 289 | : needle >= entry.vEnd ? -1 290 | : 0 291 | )) 292 | if (idx < 0) throw Error('Invalid or missing version: ' + vStart) 293 | 294 | for (; idx < cg.entries.length; idx++) { 295 | const entry = cg.entries[idx] 296 | if (entry.version >= vEnd) break 297 | 298 | // const offset = max2(vStart - entry.version, 0) 299 | visit(entry, max2(vStart, entry.version), min2(vEnd, entry.vEnd)) 300 | } 301 | } 302 | 303 | /** version is -1 when the seq does not overlap. Each yield is guaranteed to be a version run. */ 304 | type IntersectVisitor = (agent: string, startSeq: number, endSeq: number, version: number) => void 305 | 306 | /** Scan the VersionSummary and report (via visitor function) which versions overlap */ 307 | const intersectWithSummaryFull = (cg: CausalGraph, summary: VersionSummary, visit: IntersectVisitor) => { 308 | for (const agent in summary) { 309 | const clientEntries = cg.agentToVersion[agent] 310 | 311 | for (let [startSeq, endSeq] of summary[agent]) { 312 | // This is a bit tricky, because a single item in ClientEntry might span multiple 313 | // entries. 314 | 315 | if (clientEntries != null) { 316 | let idx = bs(clientEntries, startSeq, (entry, needle) => ( 317 | needle < entry.seq ? 1 318 | : needle >= entry.seqEnd ? -1 319 | : 0 320 | )) 321 | 322 | // If startSeq isn't found, start at the next entry. 323 | if (idx < 0) idx = -idx - 1 324 | 325 | for (; idx < clientEntries.length; idx++) { 326 | const ce = clientEntries[idx] 327 | if (ce.seq >= endSeq) break 328 | 329 | if (ce.seq > startSeq) { 330 | visit(agent, startSeq, ce.seq, -1) 331 | startSeq = ce.seq 332 | } 333 | 334 | const seqOffset = startSeq - ce.seq 335 | const versionStart = ce.version + seqOffset 336 | 337 | const localSeqEnd = min2(ce.seqEnd, endSeq) 338 | 339 | visit(agent, startSeq, localSeqEnd, versionStart) 340 | 341 | startSeq = localSeqEnd 342 | } 343 | } 344 | 345 | 346 | if (startSeq < endSeq) visit(agent, startSeq, endSeq, -1) 347 | } 348 | } 349 | } 350 | 351 | /** Yields the intersection and "remainder" (if any) */ 352 | export const intersectWithSummary = (cg: CausalGraph, summary: VersionSummary, versions: LV[] = []): [LV[], VersionSummary | null] => { 353 | let remainder: null | VersionSummary = null 354 | 355 | intersectWithSummaryFull(cg, summary, (agent, startSeq, endSeq, versionStart) => { 356 | if (versionStart >= 0) { 357 | const versionEnd = versionStart + (endSeq - startSeq) 358 | 359 | // Ok, now we go through everything from versionStart to versionEnd! Wild. 360 | eachVersionBetween(cg, versionStart, versionEnd, (e, vs, ve) => { 361 | const vLast = ve - 1 362 | if (vLast < e.version) throw Error('Invalid state') 363 | versions.push(vLast) 364 | }) 365 | } else { 366 | remainder ??= {} 367 | const a = (remainder[agent] ??= []) 368 | a.push([startSeq, endSeq]) 369 | } 370 | }) 371 | 372 | return [findDominators(cg, versions), remainder] 373 | } 374 | 375 | // *** TOOLS *** 376 | 377 | type DiffResult = { 378 | // These are ranges. Unlike the rust code, they're in normal 379 | // (ascending) order. 380 | aOnly: LVRange[], bOnly: LVRange[] 381 | } 382 | 383 | const pushReversedRLE = (list: LVRange[], start: LV, end: LV) => { 384 | pushRLEList(list, [start, end] as [number, number], tryRevRangeAppend) 385 | } 386 | 387 | 388 | // Numerical values used by utility methods below. 389 | export const enum DiffFlag { A=0, B=1, Shared=2 } 390 | 391 | /** 392 | * This method takes in two versions (expressed as frontiers) and returns the 393 | * set of operations only appearing in the history of one version or the other. 394 | */ 395 | export const diff = (cg: CausalGraph, a: LV[], b: LV[]): DiffResult => { 396 | const flags = new Map() 397 | 398 | // Every order is in here at most once. Every entry in the queue is also in 399 | // itemType. 400 | const queue = new PriorityQueue() 401 | 402 | // Number of items in the queue in both transitive histories (state Shared). 403 | let numShared = 0 404 | 405 | const enq = (v: LV, flag: DiffFlag) => { 406 | // console.log('enq', v, flag) 407 | const currentType = flags.get(v) 408 | if (currentType == null) { 409 | queue.enq(v) 410 | flags.set(v, flag) 411 | // console.log('+++ ', order, type, getLocalVersion(db, order)) 412 | if (flag === DiffFlag.Shared) numShared++ 413 | } else if (flag !== currentType && currentType !== DiffFlag.Shared) { 414 | // This is sneaky. If the two types are different they have to be {A,B}, 415 | // {A,Shared} or {B,Shared}. In any of those cases the final result is 416 | // Shared. If the current type isn't shared, set it as such. 417 | flags.set(v, DiffFlag.Shared) 418 | numShared++ 419 | } 420 | } 421 | 422 | for (const v of a) enq(v, DiffFlag.A) 423 | for (const v of b) enq(v, DiffFlag.B) 424 | 425 | // console.log('QF', queue, flags) 426 | 427 | const aOnly: LVRange[] = [], bOnly: LVRange[] = [] 428 | 429 | const markRun = (start: LV, endInclusive: LV, flag: DiffFlag) => { 430 | if (endInclusive < start) throw Error('end < start') 431 | 432 | // console.log('markrun', start, end, flag) 433 | if (flag == DiffFlag.Shared) return 434 | const target = flag === DiffFlag.A ? aOnly : bOnly 435 | pushReversedRLE(target, start, endInclusive + 1) 436 | } 437 | 438 | // Loop until everything is shared. 439 | while (queue.size() > numShared) { 440 | let v = queue.deq() 441 | let flag = flags.get(v)! 442 | // It should be safe to remove the item from itemType here. 443 | 444 | // console.log('--- ', v, 'flag', flag, 'shared', numShared, 'num', queue.size()) 445 | if (flag == null) throw Error('Invalid type') 446 | 447 | if (flag === DiffFlag.Shared) numShared-- 448 | 449 | const e = findEntryContainingRaw(cg, v) 450 | // console.log(v, e) 451 | 452 | // We need to check if this entry contains the next item in the queue. 453 | while (!queue.isEmpty() && queue.peek() >= e.version) { 454 | const v2 = queue.deq() 455 | const flag2 = flags.get(v2)! 456 | // console.log('pop', v2, flag2) 457 | if (flag2 === DiffFlag.Shared) numShared--; 458 | 459 | if (flag2 !== flag) { // Mark from v2..=v and continue. 460 | // v2 + 1 is correct here - but you'll probably need a whiteboard to 461 | // understand why. 462 | markRun(v2 + 1, v, flag) 463 | v = v2 464 | flag = DiffFlag.Shared 465 | } 466 | } 467 | 468 | // console.log(e, v, flag) 469 | markRun(e.version, v, flag) 470 | 471 | for (const p of e.parents) enq(p, flag) 472 | } 473 | 474 | aOnly.reverse() 475 | bOnly.reverse() 476 | return {aOnly, bOnly} 477 | } 478 | 479 | 480 | /** Does frontier contain target? */ 481 | export const versionContainsTime = (cg: CausalGraph, frontier: LV[], target: LV): boolean => { 482 | if (target === ROOT_LV || frontier.includes(target)) return true 483 | 484 | const queue = new PriorityQueue() 485 | for (const v of frontier) if (v > target) queue.enq(v) 486 | 487 | while (queue.size() > 0) { 488 | const v = queue.deq() 489 | // console.log('deq v') 490 | 491 | // TODO: Will this ever hit? 492 | if (v === target) return true 493 | 494 | const e = findEntryContainingRaw(cg, v) 495 | if (e.version <= target) return true 496 | 497 | // Clear any queue items pointing to this entry. 498 | while (!queue.isEmpty() && queue.peek() >= e.version) { 499 | queue.deq() 500 | } 501 | 502 | for (const p of e.parents) { 503 | if (p === target) return true 504 | else if (p > target) queue.enq(p) 505 | } 506 | } 507 | 508 | return false 509 | } 510 | 511 | export function findDominators2(cg: CausalGraph, versions: LV[], cb: (v: LV, isDominator: boolean) => void) { 512 | if (versions.length === 0) return 513 | if (versions.length === 1) { 514 | cb(versions[0], true) 515 | return 516 | } 517 | 518 | // The queue contains (version, isInput) pairs encoded using even/odd numbers. 519 | const queue = new PriorityQueue() 520 | for (const v of versions) queue.enq(v * 2) 521 | 522 | let inputsRemaining = versions.length 523 | 524 | while (queue.size() > 0 && inputsRemaining > 0) { 525 | const vEnc = queue.deq() 526 | const isInput = (vEnc % 2) === 0 527 | const v = vEnc >> 1 528 | 529 | if (isInput) { 530 | cb(v, true) 531 | inputsRemaining -= 1 532 | } 533 | 534 | const e = findEntryContainingRaw(cg, v) 535 | 536 | // Clear any queue items pointing to this entry. 537 | while (!queue.isEmpty() && queue.peek() >= e.version * 2) { 538 | const v2Enc = queue.deq() 539 | const isInput2 = (v2Enc % 2) === 0 540 | if (isInput2) { 541 | cb(v2Enc >> 1, false) 542 | inputsRemaining -= 1 543 | } 544 | } 545 | 546 | for (const p of e.parents) { 547 | queue.enq(p * 2 + 1) 548 | } 549 | } 550 | } 551 | 552 | export function findDominators(cg: CausalGraph, versions: LV[]): LV[] { 553 | if (versions.length <= 1) return versions 554 | const result: LV[] = [] 555 | findDominators2(cg, versions, (v, isDominator) => { 556 | if (isDominator) result.push(v) 557 | }) 558 | return result.reverse() 559 | } 560 | 561 | export const lvEq = (a: LV[], b: LV[]) => ( 562 | a.length === b.length && a.every((val, idx) => b[idx] === val) 563 | ) 564 | 565 | export function findConflicting(cg: CausalGraph, a: LV[], b: LV[], visit: (range: LVRange, flag: DiffFlag) => void): LV[] { 566 | // dbg!(a, b); 567 | 568 | // Sorted highest to lowest (so we get the highest item first). 569 | type TimePoint = { 570 | v: LV[], // Sorted in inverse order (highest to lowest) 571 | flag: DiffFlag 572 | } 573 | 574 | const pointFromVersions = (v: LV[], flag: DiffFlag) => ({ 575 | v: v.length <= 1 ? v : v.slice().sort((a, b) => b - a), 576 | flag 577 | }) 578 | 579 | // The heap is sorted such that we pull the highest items first. 580 | // const queue: BinaryHeap<(TimePoint, DiffFlag)> = BinaryHeap::new(); 581 | const queue = new PriorityQueue((a, b) => { 582 | for (let i = 0; i < a.v.length; i++) { 583 | if (b.v.length <= i) return 1 584 | const c = a.v[i] - b.v[i] 585 | if (c !== 0) return c 586 | } 587 | if (a.v.length < b.v.length) return -1 588 | 589 | return a.flag - b.flag 590 | }) 591 | 592 | queue.enq(pointFromVersions(a, DiffFlag.A)); 593 | queue.enq(pointFromVersions(b, DiffFlag.B)); 594 | 595 | // Loop until we've collapsed the graph down to a single element. 596 | while (true) { 597 | let {v, flag} = queue.deq() 598 | // console.log('deq', v, flag) 599 | if (v.length === 0) return [] 600 | 601 | if (v[0] === ROOT_LV) throw Error('Should not happen') 602 | 603 | // Discard duplicate entries. 604 | 605 | // I could write this with an inner loop and a match statement, but this is shorter and 606 | // more readable. The optimizer has to earn its keep somehow. 607 | // while queue.peek() == Some(&time) { queue.pop(); } 608 | while (!queue.isEmpty()) { 609 | const {v: peekV, flag: peekFlag} = queue.peek() 610 | // console.log('peek', peekV, v, lvEq(v, peekV)) 611 | if (lvEq(v, peekV)) { 612 | if (peekFlag !== flag) flag = DiffFlag.Shared 613 | queue.deq() 614 | } else break 615 | } 616 | 617 | if (queue.isEmpty()) return v.reverse() 618 | 619 | // If this node is a merger, shatter it. 620 | if (v.length > 1) { 621 | // We'll deal with v[0] directly below. 622 | for (let i = 1; i < v.length; i++) { 623 | // console.log('shatter', v[i], 'flag', flag) 624 | queue.enq({v: [v[i]], flag}) 625 | } 626 | } 627 | 628 | const t = v[0] 629 | const containingTxn = findEntryContainingRaw(cg, t) 630 | 631 | // I want an inclusive iterator :p 632 | const txnStart = containingTxn.version 633 | let end = t + 1 634 | 635 | // Consume all other changes within this txn. 636 | while (true) { 637 | if (queue.isEmpty()) { 638 | return [end - 1] 639 | } else { 640 | const {v: peekV, flag: peekFlag} = queue.peek() 641 | // console.log('inner peek', peekV, (queue as any)._elements) 642 | 643 | if (peekV.length >= 1 && peekV[0] >= txnStart) { 644 | // The next item is within this txn. Consume it. 645 | queue.deq() 646 | // console.log('inner deq', peekV, peekFlag) 647 | 648 | const peekLast = peekV[0] 649 | 650 | // Only emit inner items when they aren't duplicates. 651 | if (peekLast + 1 < end) { 652 | // +1 because we don't want to include the actual merge point in the returned set. 653 | visit([peekLast + 1, end], flag) 654 | end = peekLast + 1 655 | } 656 | 657 | if (peekFlag !== flag) flag = DiffFlag.Shared 658 | 659 | if (peekV.length > 1) { 660 | // We've run into a merged item which uses part of this entry. 661 | // We've already pushed the necessary span to the result. Do the 662 | // normal merge & shatter logic with this item next. 663 | for (let i = 1; i < peekV.length; i++) { 664 | // console.log('shatter inner', peekV[i], 'flag', peekFlag) 665 | 666 | queue.enq({v: [peekV[i]], flag: peekFlag}) 667 | } 668 | } 669 | } else { 670 | // Emit the remainder of this txn. 671 | // console.log('processed txn', txnStart, end, 'flag', flag, 'parents', containingTxn.parents) 672 | visit([txnStart, end], flag) 673 | 674 | queue.enq(pointFromVersions(containingTxn.parents, flag)) 675 | break 676 | } 677 | } 678 | } 679 | } 680 | } 681 | 682 | 683 | 684 | /** 685 | * Two versions have one of 4 different relationship configurations: 686 | * - They're equal (a == b) 687 | * - They're concurrent (a || b) 688 | * - Or one dominates the other (a < b or b > a). 689 | * 690 | * This method depends on the caller to check if the passed versions are equal 691 | * (a === b). Otherwise it returns 0 if the operations are concurrent, 692 | * -1 if a < b or 1 if b > a. 693 | */ 694 | export const compareVersions = (cg: CausalGraph, a: LV, b: LV): number => { 695 | if (a > b) { 696 | return versionContainsTime(cg, [a], b) ? -1 : 0 697 | } else if (a < b) { 698 | return versionContainsTime(cg, [b], a) ? 1 : 0 699 | } 700 | throw new Error('a and b are equal') 701 | } 702 | 703 | 704 | 705 | type SerializedCGEntryV1 = [ 706 | version: LV, 707 | vEnd: LV, 708 | 709 | agent: string, 710 | seq: number, // Seq for version. 711 | 712 | parents: LV[] // Parents for version 713 | ] 714 | 715 | export interface SerializedCausalGraphV1 { 716 | version: LV[], 717 | entries: SerializedCGEntryV1[], 718 | } 719 | 720 | 721 | export function serialize(cg: CausalGraph): SerializedCausalGraphV1 { 722 | return { 723 | version: cg.version, 724 | entries: cg.entries.map(e => ([ 725 | e.version, e.vEnd, e.agent, e.seq, e.parents 726 | ])) 727 | } 728 | } 729 | 730 | export function fromSerialized(data: SerializedCausalGraphV1): CausalGraph { 731 | const cg: CausalGraph = { 732 | version: data.version, 733 | entries: data.entries.map(e => ({ 734 | version: e[0], vEnd: e[1], agent: e[2], seq: e[3], parents: e[4] 735 | })), 736 | agentToVersion: {} 737 | } 738 | 739 | for (const e of cg.entries) { 740 | const len = e.vEnd - e.version 741 | pushRLEList(clientEntriesForAgent(cg, e.agent), { 742 | seq: e.seq, seqEnd: e.seq + len, version: e.version 743 | }, tryAppendClientEntry) 744 | } 745 | 746 | return cg 747 | } 748 | 749 | 750 | type PartialSerializedCGEntryV1 = [ 751 | agent: string, 752 | seq: number, 753 | len: number, 754 | 755 | parents: RawVersion[] 756 | ] 757 | 758 | export type PartialSerializedCGV1 = PartialSerializedCGEntryV1[] 759 | 760 | export function serializeFromVersion(cg: CausalGraph, v: LV[]): PartialSerializedCGV1 { 761 | const ranges = diff(cg, v, cg.version).bOnly 762 | 763 | const entries: PartialSerializedCGEntryV1[] = [] 764 | for (const r of ranges) { 765 | let [start, end] = r 766 | while (start != end) { 767 | const [e, offset] = findEntryContaining(cg, start) 768 | 769 | const localEnd = min2(end, e.vEnd) 770 | const len = localEnd - start 771 | const parents: RawVersion[] = offset === 0 772 | ? lvToRawList(cg, e.parents) 773 | : [[e.agent, e.seq + offset - 1]] 774 | 775 | entries.push([ 776 | e.agent, 777 | e.seq + offset, 778 | len, 779 | parents 780 | ]) 781 | 782 | start += len 783 | } 784 | } 785 | 786 | return entries 787 | } 788 | 789 | export function mergePartialVersions(cg: CausalGraph, data: PartialSerializedCGV1): LVRange { 790 | const start = nextVersion(cg) 791 | 792 | for (const [agent, seq, len, parents] of data) { 793 | addRaw(cg, [agent, seq], len, parents) 794 | } 795 | 796 | return [start, nextVersion(cg)] 797 | } 798 | 799 | export function advanceVersionFromSerialized(cg: CausalGraph, data: PartialSerializedCGV1, version: LV[]): LV[] { 800 | for (const [agent, seq, len, rawParents] of data) { 801 | const parents = rawToLVList(cg, rawParents) 802 | const vLast = rawToLV(cg, agent, seq + len - 1) 803 | version = advanceFrontier(version, vLast, parents) 804 | } 805 | 806 | // NOTE: Callers might need to call findDominators on the result. 807 | return version 808 | } 809 | 810 | // ;(() => { 811 | // const cg1 = create() 812 | // const agent1 = createAgent('a') 813 | // const agent2 = createAgent('b') 814 | // addRaw(cg1, agent1(), 5) 815 | // const s1 = serializeFromVersion(cg1, []) 816 | // addRaw(cg1, agent2(), 10) 817 | // const s2 = serializeFromVersion(cg1, []) 818 | // console.dir(s2, {depth: null}) 819 | 820 | // const cg2 = create() 821 | // mergePartialVersions(cg2, s1) 822 | // mergePartialVersions(cg2, s2) 823 | // // mergePartialVersions(cg2, s) 824 | 825 | // // console.dir(cg2, {depth: null}) 826 | // })() 827 | 828 | // ;(() => { 829 | // const cg1 = create() 830 | // const agent1 = createAgent('a') 831 | // addRaw(cg1, agent1(), 5) 832 | // add(cg1, 'b', 0, 10, [2]) 833 | 834 | // // [3, 9] 835 | // console.log(findDominators2(cg1, [0, 1, 2, 3, 5, 9], (v, i) => console.log(v, i))) 836 | // })() 837 | 838 | 839 | // ;(() => { 840 | // const cg = create() 841 | 842 | // add(cg, 'a', 0, 5) 843 | // add(cg, 'b', 0, 10, [2]) 844 | // add(cg, 'a', 5, 10, [4, 14]) 845 | 846 | // console.dir(cg, {depth:null}) 847 | 848 | // const summary: VersionSummary = { 849 | // a: [[0, 6]], 850 | // b: [[0, 100]], 851 | // } 852 | // intersectWithSummaryFull(cg, summary, (agent, start, end, v) => { 853 | // console.log(agent, start, end, v) 854 | // }) 855 | 856 | // // [15] 857 | // console.dir(intersectWithSummary(cg, summary), {depth: null}) 858 | // })() -------------------------------------------------------------------------------- /js/src/fancydb/index.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert/strict" 2 | import Map2 from 'map2' 3 | import { AtLeast1, CreateValue, DBSnapshot, DBValue, LV, LVRange, Operation, Primitive, RawVersion, ROOT, ROOT_LV, SnapCRDTInfo, SnapRegisterValue } from '../types' 4 | import { AgentGenerator } from "../utils" 5 | import * as causalGraph from './causal-graph.js' 6 | import { CausalGraph } from "./causal-graph.js" 7 | 8 | type RegisterValue = {type: 'primitive', val: Primitive} 9 | | {type: 'crdt', id: LV} 10 | 11 | type MVRegister = AtLeast1<[LV, RegisterValue]> 12 | 13 | 14 | type CRDTMapInfo = { type: 'map', registers: {[k: string]: MVRegister} } 15 | type CRDTSetInfo = { type: 'set', values: Map } 16 | type CRDTRegisterInfo = { type: 'register', value: MVRegister } 17 | 18 | type CRDTInfo = CRDTMapInfo | CRDTSetInfo | CRDTRegisterInfo 19 | // } | { 20 | // type: 'stateset', 21 | // values: Map 22 | // } 23 | 24 | export interface FancyDB { 25 | crdts: Map, 26 | cg: CausalGraph, 27 | onop?: (db: FancyDB, op: Operation) => void 28 | } 29 | 30 | export function createDb(): FancyDB { 31 | const db: FancyDB = { 32 | crdts: new Map(), 33 | cg: causalGraph.create(), 34 | } 35 | 36 | db.crdts.set(ROOT_LV, { 37 | type: "map", 38 | registers: {} 39 | }) 40 | 41 | return db 42 | } 43 | 44 | 45 | function removeRecursive(db: FancyDB, value: RegisterValue) { 46 | if (value.type !== 'crdt') return 47 | 48 | const crdt = db.crdts.get(value.id) 49 | if (crdt == null) return 50 | 51 | switch (crdt.type) { 52 | case 'map': 53 | for (const k in crdt.registers) { 54 | const reg = crdt.registers[k] 55 | for (const [version, value] of reg) { 56 | removeRecursive(db, value) 57 | } 58 | } 59 | break 60 | case 'register': 61 | for (const [version, value] of crdt.value) { 62 | removeRecursive(db, value) 63 | } 64 | break 65 | case 'set': 66 | for (const [id, value] of crdt.values) { 67 | removeRecursive(db, value) 68 | } 69 | break 70 | // case 'stateset': 71 | // throw Error('Cannot remove from a stateset') 72 | 73 | default: throw Error('Unknown CRDT type!?') 74 | } 75 | 76 | db.crdts.delete(value.id) 77 | } 78 | 79 | const errExpr = (str: string): never => { throw Error(str) } 80 | 81 | function createCRDT(db: FancyDB, id: LV, type: 'map' | 'set' | 'register' | 'stateset') { 82 | if (db.crdts.has(id)) { 83 | throw Error('CRDT already exists !?') 84 | } 85 | 86 | const crdtInfo: CRDTInfo = type === 'map' ? { 87 | type: "map", 88 | registers: {}, 89 | } : type === 'register' ? { 90 | type: 'register', 91 | // Registers default to NULL when created. 92 | value: [[id, {type: 'primitive', val: null}]], 93 | } : type === 'set' ? { 94 | type: 'set', 95 | values: new Map, 96 | // } : type === 'stateset' ? { 97 | // type: 'stateset', 98 | // values: new Map, 99 | } : errExpr('Invalid CRDT type') 100 | 101 | db.crdts.set(id, crdtInfo) 102 | } 103 | 104 | function mergeRegister(db: FancyDB, globalParents: LV[], oldPairs: MVRegister, localParents: LV[], newVersion: LV, newVal: CreateValue): MVRegister { 105 | let newValue: RegisterValue 106 | if (newVal.type === 'primitive') { 107 | newValue = newVal 108 | } else { 109 | // Create it. 110 | createCRDT(db, newVersion, newVal.crdtKind) 111 | newValue = {type: "crdt", id: newVersion} 112 | } 113 | 114 | const newPairs: MVRegister = [[newVersion, newValue]] 115 | for (const [version, value] of oldPairs) { 116 | // Each item is either retained or removed. 117 | if (localParents.some(v2 => version === v2)) { 118 | // The item was named in parents. Remove it. 119 | // console.log('removing', value) 120 | removeRecursive(db, value) 121 | } else { 122 | // We're intending to retain this operation because its not explicitly 123 | // named, but that only makes sense if the retained version is concurrent 124 | // with the new version. 125 | if (causalGraph.versionContainsTime(db.cg, globalParents, version)) { 126 | throw Error('Invalid local parents in operation') 127 | } 128 | 129 | newPairs.push([version, value]) 130 | } 131 | } 132 | 133 | // Note we're sorting by *local version* here. This doesn't sort by LWW 134 | // priority. Could do - currently I'm figuring out the priority in the 135 | // get() method. 136 | newPairs.sort(([v1], [v2]) => v1 - v2) 137 | 138 | return newPairs 139 | } 140 | 141 | export function applyRemoteOp(db: FancyDB, op: Operation): LV { 142 | const newVersion = causalGraph.addRaw(db.cg, op.id, 1, op.globalParents) 143 | if (newVersion < 0) { 144 | // The operation is already known. 145 | console.warn('Operation already applied', op.id) 146 | return newVersion 147 | } 148 | 149 | const globalParents = causalGraph.rawToLVList(db.cg, op.globalParents) 150 | 151 | const crdtLV = causalGraph.rawToLV2(db.cg, op.crdtId) 152 | 153 | const crdt = db.crdts.get(crdtLV) 154 | if (crdt == null) { 155 | console.warn('CRDT has been deleted..') 156 | return newVersion 157 | } 158 | 159 | // Every register operation creates a new value, and removes 0-n other values. 160 | switch (op.action.type) { 161 | case 'registerSet': { 162 | if (crdt.type !== 'register') throw Error('Invalid operation type for target') 163 | const localParents = causalGraph.rawToLVList(db.cg, op.action.localParents) 164 | const newPairs = mergeRegister(db, globalParents, crdt.value, localParents, newVersion, op.action.val) 165 | 166 | crdt.value = newPairs 167 | break 168 | } 169 | case 'map': { 170 | if (crdt.type !== 'map') throw Error('Invalid operation type for target') 171 | 172 | const oldPairs = crdt.registers[op.action.key] ?? [] 173 | const localParents = causalGraph.rawToLVList(db.cg, op.action.localParents) 174 | 175 | const newPairs = mergeRegister(db, globalParents, oldPairs, localParents, newVersion, op.action.val) 176 | 177 | crdt.registers[op.action.key] = newPairs 178 | break 179 | } 180 | case 'setInsert': case 'setDelete': { // Sets! 181 | if (crdt.type !== 'set') throw Error('Invalid operation type for target') 182 | 183 | // Set operations are comparatively much simpler, because insert 184 | // operations cannot be concurrent and multiple overlapping delete 185 | // operations are ignored. 186 | if (op.action.type == 'setInsert') { 187 | if (op.action.val.type === 'primitive') { 188 | crdt.values.set(newVersion, op.action.val) 189 | } else { 190 | createCRDT(db, newVersion, op.action.val.crdtKind) 191 | crdt.values.set(newVersion, {type: "crdt", id: newVersion}) 192 | } 193 | } else { 194 | // Delete! 195 | const target = causalGraph.rawToLV2(db.cg, op.action.target) 196 | let oldVal = crdt.values.get(target) 197 | if (oldVal != null) { 198 | removeRecursive(db, oldVal) 199 | crdt.values.delete(target) 200 | } 201 | } 202 | 203 | break 204 | } 205 | 206 | default: throw Error('Invalid action type') 207 | } 208 | 209 | db.onop?.(db, op) 210 | return newVersion 211 | } 212 | 213 | 214 | const getMap = (db: FancyDB, mapId: LV): CRDTMapInfo => { 215 | const crdt = db.crdts.get(mapId) 216 | if (crdt == null || crdt.type !== 'map') throw Error('Invalid CRDT') 217 | return crdt 218 | } 219 | 220 | export function localMapInsert(db: FancyDB, id: RawVersion, mapId: LV, key: string, val: CreateValue): [Operation, LV] { 221 | const crdt = getMap(db, mapId) 222 | 223 | const crdtId = causalGraph.lvToRaw(db.cg, mapId) 224 | 225 | const localParentsLV = (crdt.registers[key] ?? []).map(([version]) => version) 226 | const localParents = causalGraph.lvToRawList(db.cg, localParentsLV) 227 | const op: Operation = { 228 | id, 229 | crdtId, 230 | globalParents: causalGraph.lvToRawList(db.cg, db.cg.version), 231 | action: { type: 'map', localParents, key, val } 232 | } 233 | 234 | // TODO: Could easily inline this - which would mean more code but higher performance. 235 | const v = applyRemoteOp(db, op) 236 | return [op, v] 237 | } 238 | 239 | 240 | 241 | 242 | /** Recursively set / insert values into the map to make the map resemble the input */ 243 | export function recursivelySetMap(db: FancyDB, agent: AgentGenerator, mapId: LV, val: Record) { 244 | // The root value already exists. Recursively insert / replace child items. 245 | const crdt = getMap(db, mapId) 246 | 247 | for (const k in val) { 248 | const v = val[k] 249 | // console.log(k, v) 250 | if (v === null || typeof v !== 'object') { 251 | // Set primitive into register. 252 | // This is a bit inefficient - it re-queries the CRDT and whatnot. 253 | // console.log('localMapInsert', v) 254 | localMapInsert(db, agent(), mapId, k, {type: 'primitive', val: v}) 255 | } else { 256 | if (Array.isArray(v)) throw Error('Arrays not supported') // Could just move this up for now. 257 | 258 | // Or we have a recursive object merge. 259 | const inner = crdt.registers[k] 260 | 261 | // Force the inner item to become a map. Rawr. 262 | let innerMapId 263 | const setToMap = () => ( 264 | localMapInsert(db, agent(), mapId, k, {type: "crdt", crdtKind: 'map'})[1] 265 | ) 266 | 267 | if (inner == null) innerMapId = setToMap() 268 | else { 269 | const activePair = causalGraph.tieBreakRegisters(db.cg, inner) 270 | 271 | if (activePair.type !== 'crdt') { 272 | innerMapId = setToMap() 273 | } else { 274 | const innerId = activePair.id 275 | const innerInfo = db.crdts.get(innerId)! 276 | if (innerInfo.type !== 'map') innerMapId = setToMap() 277 | else innerMapId = innerId 278 | } 279 | } 280 | 281 | // console.log('recursivelySetMap', innerMapId, v) 282 | recursivelySetMap(db, agent, innerMapId, v) 283 | } 284 | } 285 | } 286 | 287 | export function recursivelySetRoot(db: FancyDB, agent: AgentGenerator, val: Record) { 288 | // The root value already exists. Recursively insert / replace child items. 289 | recursivelySetMap(db, agent, ROOT_LV, val) 290 | } 291 | 292 | 293 | 294 | 295 | const registerToVal = (db: FancyDB, r: RegisterValue): DBValue => ( 296 | (r.type === 'primitive') 297 | ? r.val 298 | : get(db, r.id) // Recurse! 299 | ) 300 | 301 | export function get(db: FancyDB): {[k: string]: DBValue}; 302 | export function get(db: FancyDB, crdtId: LV): DBValue; 303 | export function get(db: FancyDB, crdtId: LV = ROOT_LV): DBValue { 304 | const crdt = db.crdts.get(crdtId) 305 | if (crdt == null) { return null } 306 | 307 | switch (crdt.type) { 308 | case 'register': { 309 | // When there's a tie, the active value is based on the order in pairs. 310 | const activePair = causalGraph.tieBreakRegisters(db.cg, crdt.value) 311 | return registerToVal(db, activePair) 312 | } 313 | case 'map': { 314 | const result: {[k: string]: DBValue} = {} 315 | for (const k in crdt.registers) { 316 | const activePair = causalGraph.tieBreakRegisters(db.cg, crdt.registers[k]) 317 | result[k] = registerToVal(db, activePair) 318 | } 319 | return result 320 | } 321 | case 'set': { 322 | const result = new Map2() 323 | 324 | for (const [version, value] of crdt.values) { 325 | const rawVersion = causalGraph.lvToRaw(db.cg, version) 326 | result.set(rawVersion[0], rawVersion[1], registerToVal(db, value)) 327 | } 328 | 329 | return result 330 | } 331 | default: throw Error('Invalid CRDT type in DB') 332 | } 333 | } 334 | 335 | const isObj = (x: Primitive): x is Record => x != null && typeof x === 'object' 336 | export function getPath(db: FancyDB, path: (string | number)[], base: LV = ROOT_LV): RegisterValue { 337 | let idx = 0 338 | 339 | let container: RegisterValue = {type: 'crdt', id: base} 340 | 341 | while (idx < path.length) { 342 | // ... Though if the value was an object, we really could! 343 | const p = path[idx] 344 | 345 | // if (container.type === 'primitive' && container.val != null && typeof container.val === 'object') { 346 | if (container.type === 'primitive') { 347 | if (isObj(container.val) && typeof p === 'string') { 348 | container = { 349 | type: 'primitive', 350 | val: container.val[p] 351 | } 352 | } else { 353 | throw Error('Cannot descend into primitive object') 354 | } 355 | } else { 356 | const crdt = db.crdts.get(container.id) 357 | if (crdt == null) throw Error('Cannot descend into CRDT') 358 | 359 | if (crdt.type === 'map' && typeof p === 'string') { 360 | const register = crdt.registers[p] 361 | container = causalGraph.tieBreakRegisters(db.cg, register) 362 | idx++ 363 | } else if (crdt.type === 'set' && typeof p === 'number') { 364 | container = crdt.values.get(p) ?? errExpr('Missing set value') 365 | idx++ 366 | } else if (crdt.type === 'register') { 367 | container = causalGraph.tieBreakRegisters(db.cg, crdt.value) 368 | } else { 369 | throw Error(`Cannot descend into ${crdt.type} with path '${p}'`) 370 | } 371 | } 372 | } 373 | 374 | return container 375 | } 376 | 377 | // *** Snapshot methods *** 378 | const registerValToJSON = (db: FancyDB, val: RegisterValue): SnapRegisterValue => ( 379 | val.type === 'crdt' ? { 380 | type: 'crdt', 381 | id: causalGraph.lvToRaw(db.cg, val.id) 382 | } : val 383 | ) 384 | 385 | const mvRegisterToJSON = (db: FancyDB, val: MVRegister): [RawVersion, SnapRegisterValue][] => ( 386 | val.map(([lv, val]) => { 387 | const v = causalGraph.lvToRaw(db.cg, lv) 388 | const mappedVal: SnapRegisterValue = registerValToJSON(db, val) 389 | return [v, mappedVal] 390 | }) 391 | ) 392 | 393 | /** Used for interoperability with SimpleDB */ 394 | export function toSnapshot(db: FancyDB): DBSnapshot { 395 | return { 396 | version: causalGraph.lvToRawList(db.cg, db.cg.version), 397 | crdts: Array.from(db.crdts.entries()).map(([lv, rawInfo]) => { 398 | const [agent, seq] = causalGraph.lvToRaw(db.cg, lv) 399 | const mappedInfo: SnapCRDTInfo = rawInfo.type === 'set' ? { 400 | type: rawInfo.type, 401 | values: Array.from(rawInfo.values).map(([lv, val]) => { 402 | const [agent, seq] = causalGraph.lvToRaw(db.cg, lv) 403 | return [agent, seq, registerValToJSON(db, val)] 404 | }) 405 | } : rawInfo.type === 'map' ? { 406 | type: rawInfo.type, 407 | registers: Object.fromEntries(Object.entries(rawInfo.registers) 408 | .map(([k, val]) => ([k, mvRegisterToJSON(db, val)]))) 409 | } : rawInfo.type === 'register' ? { 410 | type: rawInfo.type, 411 | value: mvRegisterToJSON(db, rawInfo.value) 412 | } : errExpr('Unknown CRDT type') // Never. 413 | return [agent, seq, mappedInfo] 414 | }) 415 | } 416 | } 417 | 418 | // *** Serialization *** 419 | 420 | type SerializedRegisterValue = [type: 'primitive', val: Primitive] 421 | | [type: 'crdt', id: LV] 422 | 423 | type SerializedMVRegister = [LV, SerializedRegisterValue][] 424 | 425 | type SerializedCRDTInfo = [ 426 | type: 'map', 427 | registers: [k: string, reg: SerializedMVRegister][], 428 | ] | [ 429 | type: 'set', 430 | values: [LV, SerializedRegisterValue][], 431 | ] | [ 432 | type: 'register', 433 | value: SerializedMVRegister, 434 | ] 435 | 436 | export interface SerializedFancyDBv1 { 437 | crdts: [LV, SerializedCRDTInfo][] 438 | cg: causalGraph.SerializedCausalGraphV1, 439 | } 440 | 441 | 442 | const serializeRegisterValue = (r: RegisterValue): SerializedRegisterValue => ( 443 | r.type === 'crdt' ? [r.type, r.id] 444 | : [r.type, r.val] 445 | ) 446 | const serializeMV = (r: MVRegister): SerializedMVRegister => ( 447 | r.map(([v, r]) => [v, serializeRegisterValue(r)]) 448 | ) 449 | 450 | const serializeCRDTInfo = (info: CRDTInfo): SerializedCRDTInfo => ( 451 | info.type === 'map' ? [ 452 | 'map', Object.entries(info.registers).map(([k, v]) => ([k, serializeMV(v)])) 453 | ] : info.type === 'set' ? [ 454 | 'set', Array.from(info.values).map(([id, v]) => [id, serializeRegisterValue(v)]) 455 | ] : info.type === 'register' ? [ 456 | 'register', serializeMV(info.value) 457 | ] : errExpr('Unknown CRDT type') 458 | ) 459 | 460 | export function serialize(db: FancyDB): SerializedFancyDBv1 { 461 | return { 462 | cg: causalGraph.serialize(db.cg), 463 | crdts: Array.from(db.crdts).map(([lv, info]) => ([ 464 | lv, serializeCRDTInfo(info) 465 | ])) 466 | } 467 | } 468 | 469 | 470 | 471 | const deserializeRegisterValue = (data: SerializedRegisterValue): RegisterValue => ( 472 | data[0] === 'crdt' ? {type: 'crdt', id: data[1]} 473 | : {type: 'primitive', val: data[1]} 474 | ) 475 | const deserializeMV = (r: SerializedMVRegister): MVRegister => { 476 | const result: [LV, RegisterValue][] = r.map(([v, r]) => [v, deserializeRegisterValue(r)]) 477 | if (result.length === 0) throw Error('Invalid MV register') 478 | return result as MVRegister 479 | } 480 | 481 | const deserializeCRDTInfo = (data: SerializedCRDTInfo): CRDTInfo => { 482 | const type = data[0] 483 | return type === 'map' ? { 484 | type: 'map', 485 | registers: Object.fromEntries(data[1].map(([k, r]) => ([k, deserializeMV(r)]))) 486 | } : type === 'register' ? { 487 | type: 'register', 488 | value: deserializeMV(data[1]) 489 | } : type === 'set' ? { 490 | type: 'set', 491 | values: new Map(data[1].map(([k, v]) => ([k, deserializeRegisterValue(v)]))) 492 | } : errExpr('Invalid or unknown type: ' + type) 493 | } 494 | 495 | export function fromSerialized(data: SerializedFancyDBv1): FancyDB { 496 | return { 497 | cg: causalGraph.fromSerialized(data.cg), 498 | crdts: new Map(data.crdts 499 | .map(([id, crdtData]) => [id, deserializeCRDTInfo(crdtData)])) 500 | } 501 | } 502 | 503 | /** Partial serialization */ 504 | 505 | type PSerializedRegisterValue = [type: 'primitive', val: Primitive] 506 | | [type: 'crdt', agent: string, seq: number] 507 | 508 | type PSerializedMVRegister = [agent: string, seq: number, val: PSerializedRegisterValue][] 509 | 510 | type PSerializedCRDTInfo = [ 511 | type: 'map', 512 | registers: [k: string, reg: PSerializedMVRegister][], 513 | ] | [ 514 | type: 'set', 515 | values: [agent: string, seq: number, val: PSerializedRegisterValue][], 516 | ] | [ 517 | type: 'register', 518 | value: PSerializedMVRegister, 519 | ] 520 | 521 | export interface PSerializedFancyDBv1 { 522 | cg: causalGraph.PartialSerializedCGV1, 523 | crdts: [agent: string, seq: number, info: PSerializedCRDTInfo][] 524 | } 525 | 526 | 527 | const deserializePRegisterValue = (data: PSerializedRegisterValue, cg: CausalGraph): RegisterValue => ( 528 | data[0] === 'crdt' ? {type: 'crdt', id: causalGraph.rawToLV(cg, data[1], data[2])} 529 | : {type: 'primitive', val: data[1]} 530 | ) 531 | 532 | const serializePRegisterValue = (data: RegisterValue, cg: CausalGraph): PSerializedRegisterValue => { 533 | if (data.type === 'crdt') { 534 | const rv = causalGraph.lvToRaw(cg, data.id) 535 | return ['crdt', rv[0], rv[1]] 536 | } else { 537 | return ['primitive', data.val] 538 | } 539 | } 540 | 541 | const serializePMVRegisterValue = (v: LV, val: RegisterValue, cg: CausalGraph): [agent: string, seq: number, val: PSerializedRegisterValue] => { 542 | const rv = causalGraph.lvToRaw(cg, v) 543 | return [rv[0], rv[1], serializePRegisterValue(val, cg)] 544 | } 545 | 546 | const mergePartialRegister = (reg: MVRegister, givenRawPairs: PSerializedMVRegister, cg: CausalGraph) => { 547 | // This function mirrors mergeSet in stateset code. 548 | const oldVersions = reg.map(([v]) => v) 549 | const newVersions = givenRawPairs.map(([agent, seq]) => causalGraph.rawToLV(cg, agent, seq)) 550 | 551 | // Throw them in a blender... 552 | 553 | let needsSort = false 554 | causalGraph.findDominators2(cg, [...oldVersions, ...newVersions], (v, isDominator) => { 555 | // 3 cases: v is in old, v is in new, or v is in both. 556 | if (isDominator && !oldVersions.includes(v)) { 557 | // Its in the new data set only. Copy it in. 558 | const idx = newVersions.indexOf(v) 559 | if (idx < 0) throw Error('Invalid state') 560 | reg.push([v, deserializePRegisterValue(givenRawPairs[idx][2], cg)]) 561 | 562 | needsSort = true 563 | } else if (!isDominator && !newVersions.includes(v)) { 564 | // The item is in old only, and its been superceded. Remove it! 565 | const idx = reg.findIndex(([v2]) => v2 == v) 566 | if (idx < 0) throw Error('Invalid state') 567 | reg.splice(idx, 1) 568 | } 569 | }) 570 | 571 | // Matching the sort in mergeRegister. Not sure if this is necessary. 572 | if (needsSort && reg.length > 1) reg.sort(([v1], [v2]) => v1 - v2) 573 | } 574 | 575 | export function mergePartialSerialized(db: FancyDB, data: PSerializedFancyDBv1): LVRange { 576 | const updated = causalGraph.mergePartialVersions(db.cg, data.cg) 577 | for (const [agent, seq, newInfo] of data.crdts) { 578 | const id = causalGraph.rawToLV(db.cg, agent, seq) 579 | 580 | const type = newInfo[0] 581 | 582 | let existingInfo = db.crdts.get(id) 583 | if (existingInfo == null) { 584 | existingInfo = type === 'map' ? { type, registers: {} } 585 | : type === 'set' ? { type, values: new Map() } 586 | : { type, value: [] as any } // shhhhh don't worry about it. We'll fix it below. 587 | 588 | db.crdts.set(id, existingInfo) 589 | } 590 | 591 | if (existingInfo.type !== type) throw Error('Unexpected CRDT type in data') 592 | 593 | switch (type) { 594 | case 'map': { 595 | for (const [k, regInfo] of newInfo[1]) { 596 | const r = (existingInfo as CRDTMapInfo).registers[k] 597 | if (r == null) { 598 | // Uhhh we could call mergePartial but it hurts. 599 | (existingInfo as CRDTMapInfo).registers[k] = regInfo.map( 600 | ([a, s, v]) => [causalGraph.rawToLV(db.cg, a, s), deserializePRegisterValue(v, db.cg)] 601 | ) as AtLeast1<[LV, RegisterValue]> 602 | } else { 603 | mergePartialRegister(r, regInfo, db.cg) 604 | } 605 | } 606 | break 607 | } 608 | case 'set': { 609 | const values = (existingInfo as CRDTSetInfo).values 610 | for (const [agent, seq, value] of newInfo[1]) { 611 | const k = causalGraph.rawToLV(db.cg, agent, seq) 612 | // Set values are immutable, so if it exists, we've got it. 613 | if (!values.has(k)) { 614 | values.set(k, deserializePRegisterValue(value, db.cg)) 615 | } 616 | } 617 | break 618 | } 619 | case 'register': { 620 | mergePartialRegister((existingInfo as CRDTRegisterInfo).value, newInfo[1], db.cg) 621 | break 622 | } 623 | } 624 | } 625 | 626 | return updated 627 | } 628 | 629 | export function serializePartialSince(db: FancyDB, v: LV[]): PSerializedFancyDBv1 { 630 | const cgData = causalGraph.serializeFromVersion(db.cg, v) 631 | const crdts: [agent: string, seq: number, info: PSerializedCRDTInfo][] = [] 632 | 633 | const diff = causalGraph.diff(db.cg, v, db.cg.version).bOnly 634 | 635 | const shouldIncludeV = (v: LV): boolean => ( 636 | // This could be implemented using a binary search, but given the sizes involved this is fine. 637 | diff.find(([start, end]) => (start <= v) && (v < end)) != null 638 | ) 639 | 640 | const encodeMVRegister = (reg: MVRegister, includeAll: boolean): null | PSerializedMVRegister => { 641 | // I'll do this in an imperative way because its called so much. 642 | let result: null | PSerializedMVRegister = null 643 | for (const [v, val] of reg) { 644 | if (includeAll || shouldIncludeV(v)) { 645 | result ??= [] 646 | result.push(serializePMVRegisterValue(v, val, db.cg)) 647 | } 648 | } 649 | return result 650 | } 651 | 652 | // So this is SLOOOW for big documents. A better implementation would store 653 | // operations and do a whole thing sending partial operation logs. 654 | for (const [id, info] of db.crdts.entries()) { 655 | // If the CRDT was created recently, just include all of it. 656 | const includeAll = shouldIncludeV(id) 657 | 658 | let infoOut: PSerializedCRDTInfo | null = null 659 | switch (info.type) { 660 | case 'map': { 661 | let result: null | [k: string, reg: PSerializedMVRegister][] = null 662 | for (let k in info.registers) { 663 | const v = info.registers[k] 664 | 665 | const valHere = encodeMVRegister(v, includeAll) 666 | // console.log('valHere', valHere) 667 | if (valHere != null) { 668 | result ??= [] 669 | result.push([k, valHere]) 670 | } 671 | } 672 | if (result != null) infoOut = ['map', result] 673 | break 674 | } 675 | case 'register': { 676 | const result = encodeMVRegister(info.value, includeAll) 677 | if (result != null) infoOut = ['register', result] 678 | // if (result != null) { 679 | // const rv = causalGraph.lvToRaw(db.cg, id) 680 | // crdts.push([rv[0], rv[1], ['register', result]]) 681 | // } 682 | 683 | break 684 | } 685 | 686 | case 'set': { 687 | // TODO: Weird - this looks almost identical to the register code! 688 | let result: null | [agent: string, seq: number, val: PSerializedRegisterValue][] = null 689 | for (const [k, val] of info.values.entries()) { 690 | if (includeAll || shouldIncludeV(k)) { 691 | result ??= [] 692 | result.push(serializePMVRegisterValue(k, val, db.cg)) 693 | } 694 | } 695 | if (result != null) infoOut = ['set', result] 696 | // if (result != null) { 697 | // const rv = causalGraph.lvToRaw(db.cg, id) 698 | // crdts.push([rv[0], rv[1], ['set', result]]) 699 | // } 700 | break 701 | } 702 | } 703 | 704 | if (infoOut != null) { 705 | const rv = causalGraph.lvToRaw(db.cg, id) 706 | crdts.push([rv[0], rv[1], infoOut]) 707 | } 708 | } 709 | 710 | return { 711 | cg: cgData, 712 | crdts 713 | } 714 | } 715 | 716 | ;(() => { 717 | let db = createDb() 718 | 719 | localMapInsert(db, ['seph', 0], ROOT_LV, 'yo', {type: 'primitive', val: 123}) 720 | assert.deepEqual(get(db), {yo: 123}) 721 | 722 | // **** 723 | db = createDb() 724 | // concurrent changes 725 | applyRemoteOp(db, { 726 | id: ['mike', 0], 727 | globalParents: [], 728 | crdtId: ROOT, 729 | action: {type: 'map', localParents: [], key: 'c', val: {type: 'primitive', val: 'mike'}}, 730 | }) 731 | applyRemoteOp(db, { 732 | id: ['seph', 1], 733 | globalParents: [], 734 | crdtId: ROOT, 735 | action: {type: 'map', localParents: [], key: 'c', val: {type: 'primitive', val: 'seph'}}, 736 | }) 737 | 738 | assert.deepEqual(get(db), {c: 'seph'}) 739 | 740 | applyRemoteOp(db, { 741 | id: ['mike', 1], 742 | // globalParents: [['mike', 0]], 743 | globalParents: [['mike', 0], ['seph', 1]], 744 | crdtId: ROOT, 745 | // action: {type: 'map', localParents: [['mike', 0]], key: 'yo', val: {type: 'primitive', val: 'both'}}, 746 | action: {type: 'map', localParents: [['mike', 0], ['seph', 1]], key: 'c', val: {type: 'primitive', val: 'both'}}, 747 | }) 748 | // console.dir(db, {depth: null}) 749 | assert.deepEqual(get(db), {c: 'both'}) 750 | 751 | // **** 752 | db = createDb() 753 | // Set a value in an inner map 754 | const [_, inner] = localMapInsert(db, ['seph', 1], ROOT_LV, 'stuff', {type: 'crdt', crdtKind: 'map'}) 755 | localMapInsert(db, ['seph', 2], inner, 'cool', {type: 'primitive', val: 'definitely'}) 756 | assert.deepEqual(get(db), {stuff: {cool: 'definitely'}}) 757 | 758 | 759 | 760 | const serialized = JSON.stringify(serialize(db)) 761 | const deser = fromSerialized(JSON.parse(serialized)) 762 | assert.deepEqual(db, deser) 763 | 764 | // console.dir(, {depth: null}) 765 | 766 | 767 | 768 | // // Insert a set 769 | // const innerSet = localMapInsert(db, ['seph', 2], ROOT, 'a set', {type: 'crdt', crdtKind: 'set'}) 770 | // localSetInsert(db, ['seph', 3], innerSet.id, {type: 'primitive', val: 'whoa'}) 771 | // localSetInsert(db, ['seph', 4], innerSet.id, {type: 'crdt', crdtKind: 'map'}) 772 | 773 | // console.log('db', get(db)) 774 | // console.log('db', db) 775 | 776 | 777 | // assert.deepEqual(db, fromJSON(toJSON(db))) 778 | })() 779 | 780 | 781 | ;(() => { 782 | let db = createDb() 783 | // console.log(serializePartialSince(db, [])) 784 | 785 | localMapInsert(db, ['seph', 0], ROOT_LV, 'yo', {type: 'primitive', val: 123}) 786 | assert.deepEqual(get(db), {yo: 123}) 787 | 788 | const serializedA = serializePartialSince(db, []) 789 | // console.dir(serializePartialSince(db, []), {depth:null}) 790 | 791 | // **** 792 | db = createDb() 793 | // concurrent changes 794 | applyRemoteOp(db, { 795 | id: ['mike', 0], 796 | globalParents: [], 797 | crdtId: ROOT, 798 | action: {type: 'map', localParents: [], key: 'c', val: {type: 'primitive', val: 'mike'}}, 799 | }) 800 | applyRemoteOp(db, { 801 | id: ['seph', 1], 802 | globalParents: [], 803 | crdtId: ROOT, 804 | action: {type: 'map', localParents: [], key: 'c', val: {type: 'primitive', val: 'seph'}}, 805 | }) 806 | 807 | assert.deepEqual(get(db), {c: 'seph'}) 808 | 809 | // const serializedB = serializePartialSince(db, []) 810 | const serializedB1 = serializePartialSince(db, [0]) 811 | const serializedB2 = serializePartialSince(db, [1]) 812 | // console.dir(serializedB, {depth:null}) 813 | 814 | const db2 = createDb() 815 | mergePartialSerialized(db2, serializedB2) 816 | mergePartialSerialized(db2, serializedB1) 817 | // console.dir(db, {depth:null}) 818 | // console.dir(db2, {depth:null}) 819 | assert.deepEqual(db, db2) 820 | assert.deepEqual(get(db), {c: 'seph'}) 821 | 822 | 823 | // applyRemoteOp(db, { 824 | // id: ['mike', 1], 825 | // // globalParents: [['mike', 0]], 826 | // globalParents: [['mike', 0], ['seph', 1]], 827 | // crdtId: ROOT, 828 | // // action: {type: 'map', localParents: [['mike', 0]], key: 'yo', val: {type: 'primitive', val: 'both'}}, 829 | // action: {type: 'map', localParents: [['mike', 0], ['seph', 1]], key: 'c', val: {type: 'primitive', val: 'both'}}, 830 | // }) 831 | // // console.dir(db, {depth: null}) 832 | // assert.deepEqual(get(db), {c: 'both'}) 833 | 834 | // // **** 835 | // db = createDb() 836 | // // Set a value in an inner map 837 | // const [_, inner] = localMapInsert(db, ['seph', 1], ROOT_LV, 'stuff', {type: 'crdt', crdtKind: 'map'}) 838 | // localMapInsert(db, ['seph', 2], inner, 'cool', {type: 'primitive', val: 'definitely'}) 839 | // assert.deepEqual(get(db), {stuff: {cool: 'definitely'}}) 840 | 841 | 842 | 843 | // const serialized = JSON.stringify(serialize(db)) 844 | // const deser = fromSerialized(JSON.parse(serialized)) 845 | // assert.deepEqual(db, deser) 846 | 847 | })() -------------------------------------------------------------------------------- /js/src/fancydb/rle.ts: -------------------------------------------------------------------------------- 1 | import { LVRange } from "../types" 2 | 3 | export const pushRLEList = (list: T[], newItem: T, tryAppend: (a: T, b: T) => boolean) => { 4 | if (list.length > 0) { 5 | if (tryAppend(list[list.length - 1], newItem)) return 6 | } 7 | list.push(newItem) 8 | } 9 | 10 | export const tryRangeAppend = (r1: LVRange, r2: LVRange): boolean => { 11 | if (r1[1] === r2[0]) { 12 | r1[1] = r2[1] 13 | return true 14 | } else return false 15 | } 16 | 17 | export const tryRevRangeAppend = (r1: LVRange, r2: LVRange): boolean => { 18 | if (r1[0] === r2[1]) { 19 | r1[0] = r2[0] 20 | return true 21 | } else return false 22 | } -------------------------------------------------------------------------------- /js/src/fancydb/stateset.ts: -------------------------------------------------------------------------------- 1 | import { AtLeast1, LV, LVRange, Primitive, RawVersion } from "../types" 2 | import { CausalGraph } from "./causal-graph.js" 3 | import * as causalGraph from './causal-graph.js' 4 | import bs from 'binary-search' 5 | import assert from 'assert/strict' 6 | import { assertSorted, createAgent } from "../utils" 7 | 8 | type Pair = [LV, T] 9 | type RawPair = [RawVersion, T] 10 | 11 | export interface StateSet { 12 | // ID -> [current value, current version] pairs. 13 | // NOTE: This is a MV register which only (always) stores primitive values. 14 | values: Map>>, 15 | 16 | // This is an index to quickly find the items to send when syncing. 17 | // Each value exists in this list once for each version it has. 18 | index: { 19 | v: LV 20 | key: LV, 21 | }[], 22 | 23 | cg: CausalGraph, 24 | } 25 | 26 | export function create(): StateSet { 27 | return { 28 | values: new Map(), 29 | index: [], 30 | cg: causalGraph.create(), 31 | } 32 | } 33 | 34 | function rawLookup(crdt: StateSet, v: LV): number { 35 | return bs(crdt.index, v, (entry, needle) => entry.v - needle) 36 | } 37 | 38 | function removeIndex(crdt: StateSet, v: LV) { 39 | const idx = rawLookup(crdt, v) 40 | if (idx < 0) throw Error('Missing old version in index') 41 | 42 | // Splice the entry out. The entry will usually be near the end, so this is 43 | // not crazy slow. 44 | crdt.index.splice(idx, 1) 45 | } 46 | 47 | function addIndex(crdt: StateSet, v: LV, key: LV) { 48 | const entry = {v, key} 49 | 50 | if (crdt.index.length == 0 || crdt.index[crdt.index.length - 1].v < v) { 51 | // Normal, fast case. 52 | crdt.index.push(entry) 53 | } else { 54 | const idx = rawLookup(crdt, v) 55 | if (idx >= 0) return // Already indexed. 56 | const insIdx = -idx - 1 57 | crdt.index.splice(insIdx, 0, entry) 58 | } 59 | } 60 | 61 | export function localSet(crdt: StateSet, version: RawVersion, key: LV | -1, value: T): LV { 62 | const lv = causalGraph.addRaw(crdt.cg, version) 63 | if (key == -1) key = lv 64 | 65 | const oldPairs = crdt.values.get(key) 66 | crdt.values.set(key, [[lv, value]]) 67 | 68 | if (oldPairs != null) { 69 | for (const [v, oldValue] of oldPairs) { 70 | // Remove from index 71 | const idx = rawLookup(crdt, v) 72 | if (idx < 0) throw Error('Missing old version in index') 73 | 74 | // Splice the entry out. The entry will usually be near the end, so this is 75 | // not crazy slow. 76 | crdt.index.splice(idx, 1) 77 | } 78 | } 79 | 80 | crdt.index.push({v: lv, key}) 81 | 82 | return lv 83 | } 84 | 85 | export function localInsert(crdt: StateSet, version: RawVersion, value: T): LV { 86 | return localSet(crdt, version, -1, value) 87 | } 88 | 89 | /** Get a list of the keys which have been modified in the range of [since..] */ 90 | export function modifiedKeysSince(crdt: StateSet, since: LV): LV[] { 91 | let idx = rawLookup(crdt, since) 92 | if (idx < 0) idx = -idx - 1 93 | 94 | const result = new Set() // To uniq() the results. 95 | for (; idx < crdt.index.length; idx++) { 96 | const {key} = crdt.index[idx] 97 | result.add(key) 98 | } 99 | return Array.from(result) 100 | } 101 | 102 | // *** Remote state *** 103 | export type RemoteStateDelta = { 104 | cg: causalGraph.PartialSerializedCGV1, 105 | 106 | ops: [ 107 | key: RawVersion, 108 | pairs: AtLeast1, // This could be optimized by referencing cg below. 109 | ][], 110 | } 111 | 112 | function mergeSet(crdt: StateSet, keyRaw: RawVersion, givenRawPairs: AtLeast1) { 113 | // const lv = causalGraph.addRaw(crdt.cg, version, 1, parents) 114 | 115 | const key = causalGraph.rawToLV2(crdt.cg, keyRaw) 116 | const pairs: Pair[] = crdt.values.get(key) ?? [] 117 | 118 | const oldVersions = pairs.map(([v]) => v) 119 | const newVersions = givenRawPairs.map(([rv]) => causalGraph.rawToLV2(crdt.cg, rv)) 120 | 121 | causalGraph.findDominators2(crdt.cg, [...oldVersions, ...newVersions], (v, isDominator) => { 122 | // There's 3 options here: Its in old, its in new, or its in both. 123 | if (isDominator && !oldVersions.includes(v)) { 124 | // Its in new only. Add it! 125 | addIndex(crdt, v, key) 126 | 127 | const idx = newVersions.indexOf(v) 128 | if (idx < 0) throw Error('Invalid state') 129 | pairs.push([v, givenRawPairs[idx][1]]) 130 | 131 | } else if (!isDominator && !newVersions.includes(v)) { 132 | // The item is in old only, and its been superceded. Remove it! 133 | removeIndex(crdt, v) 134 | const idx = pairs.findIndex(([v2]) => v2 === v) 135 | if (idx < 0) throw Error('Invalid state') 136 | pairs.splice(idx, 1) 137 | } 138 | }) 139 | 140 | if (pairs.length < 1) throw Error('Invalid pairs - all removed?') 141 | crdt.values.set(key, pairs as AtLeast1) 142 | } 143 | 144 | export function mergeDelta(crdt: StateSet, delta: RemoteStateDelta): LVRange { 145 | // We need to merge the causal graph stuff first because the ops depend on it. 146 | const updated = causalGraph.mergePartialVersions(crdt.cg, delta.cg) 147 | 148 | for (const [key, pairs] of delta.ops) { 149 | mergeSet(crdt, key, pairs) 150 | } 151 | 152 | return updated 153 | } 154 | 155 | export function deltaSince(crdt: StateSet, v: LV[] = []): RemoteStateDelta { 156 | const cgDelta = causalGraph.serializeFromVersion(crdt.cg, v) 157 | 158 | // This calculation is duplicated in the serializeFromVersion call. 159 | const ranges = causalGraph.diff(crdt.cg, v, crdt.cg.version).bOnly 160 | 161 | const pairs = new Map() 162 | 163 | for (const [start, end] of ranges) { 164 | let idx = rawLookup(crdt, start) 165 | if (idx < 0) idx = -idx - 1 166 | 167 | for (; idx < crdt.index.length; idx++) { 168 | const {key, v} = crdt.index[idx] 169 | if (v >= end) break 170 | 171 | // I could just add the data to ops, but this way we make sure to 172 | // only include the pairs within the requested range. 173 | const pair = crdt.values.get(key)!.find(([v2]) => v2 === v) 174 | if (pair == null) throw Error('Invalid state!') 175 | 176 | let p = pairs.get(key) as Pair[] | undefined 177 | if (p == null) { 178 | p = [] 179 | pairs.set(key, p) 180 | } 181 | p.push(pair) 182 | } 183 | } 184 | 185 | const ops: [ 186 | key: RawVersion, 187 | pairs: AtLeast1, // This could be optimized by referencing cg below. 188 | ][] = Array.from(pairs.entries()).map(([key, p]) => [ 189 | causalGraph.lvToRaw(crdt.cg, key), 190 | p.map(([v, val]): RawPair => [causalGraph.lvToRaw(crdt.cg, v), val]) as AtLeast1 191 | ]) 192 | 193 | return { cg: cgDelta, ops } 194 | } 195 | 196 | 197 | function lookupIndex(crdt: StateSet, v: LV): LV | null { 198 | const result = rawLookup(crdt, v) 199 | 200 | return result < 0 ? null 201 | : crdt.index[result].key 202 | } 203 | 204 | function check(crdt: StateSet) { 205 | let expectedIdxSize = 0 206 | 207 | for (const [key, pairs] of crdt.values.entries()) { 208 | assert(pairs.length >= 1) 209 | 210 | if (pairs.length >= 2) { 211 | const version = pairs.map(([v]) => v) 212 | 213 | // Check that all the versions are concurrent with each other. 214 | const dominators = causalGraph.findDominators(crdt.cg, version) 215 | assert.equal(version.length, dominators.length) 216 | 217 | assertSorted(version) 218 | } 219 | 220 | expectedIdxSize += pairs.length 221 | 222 | // Each entry should show up in the index. 223 | for (const [vv] of pairs) { 224 | assert.equal(key, lookupIndex(crdt, vv)) 225 | } 226 | } 227 | 228 | assert.equal(expectedIdxSize, crdt.index.length) 229 | } 230 | 231 | export function get(crdt: StateSet, key: LV): T[] { 232 | const pairs = (crdt.values.get(key) ?? []) as Pair[] 233 | 234 | return pairs.map(([_, val]) => val) 235 | } 236 | 237 | // ;(() => { 238 | // const crdt = create() 239 | // check(crdt) 240 | 241 | // const agent = createAgent('seph') 242 | // const key1 = localInsert(crdt, agent(), "yooo") 243 | // console.log('key', key1) 244 | 245 | // const crdt2 = create() 246 | // mergeDelta(crdt2, deltaSince(crdt)) 247 | // // console.log('----') 248 | // // console.dir(crdt2, {depth:null}) 249 | // // console.log('----') 250 | 251 | // const key2 = localInsert(crdt, agent(), "hiii") 252 | // console.log('key', key2) 253 | 254 | // const t1 = localSet(crdt, agent(), key1, "blah") 255 | // console.log(t1) 256 | 257 | // console.log(crdt) 258 | 259 | // console.dir(deltaSince(crdt, [1]), {depth:null}) 260 | 261 | // mergeDelta(crdt2, deltaSince(crdt, crdt2.cg.version)) 262 | // console.log('----') 263 | // console.dir(crdt, {depth:null}) 264 | // console.dir(crdt2, {depth:null}) 265 | // console.log('----') 266 | // assert.deepEqual(crdt, crdt2) 267 | // })() -------------------------------------------------------------------------------- /js/src/jsonlines.ts: -------------------------------------------------------------------------------- 1 | // Simple network protocol using serialized lines of JSON. 2 | 3 | import { Socket } from "net"; 4 | 5 | export type MsgHandler = (msg: Msg, sock: Socket) => void 6 | 7 | export default function handle(sock: Socket, onMsg: MsgHandler) { 8 | let closed = false 9 | 10 | ;(async () => { 11 | let buffer = '' 12 | for await (const _data of sock) { 13 | const data = _data as Buffer 14 | let s = data.toString('utf-8') 15 | // console.log('data', s) 16 | 17 | while (s.includes('\n')) { 18 | const idx = s.indexOf('\n') 19 | const before = s.slice(0, idx) 20 | s = s.slice(idx + 1) 21 | let msg = buffer + before 22 | 23 | try { 24 | let m = JSON.parse(msg) as InMsg 25 | onMsg(m, sock) 26 | if (closed) return 27 | } catch (e) { 28 | console.error('Error processing message from', sock.remoteAddress, sock.remotePort) 29 | console.error(e) 30 | sock.end() 31 | sock.destroy() 32 | return 33 | } 34 | buffer = '' 35 | } 36 | 37 | if (s !== '') buffer += s 38 | } 39 | })() 40 | 41 | return { 42 | write(msg: OutMsg) { 43 | if (sock.writable) { 44 | sock.write(JSON.stringify(msg) + '\n') 45 | } 46 | }, 47 | close() { 48 | closed = true 49 | sock.end() 50 | sock.destroy() 51 | } 52 | } 53 | } -------------------------------------------------------------------------------- /js/src/node.ts: -------------------------------------------------------------------------------- 1 | import {default as net} from 'net' 2 | import { Socket } from 'net' 3 | import * as dt from './fancydb' 4 | import * as ss from './fancydb/stateset' 5 | import * as causalGraph from './fancydb/causal-graph' 6 | import handle from './jsonlines' 7 | import { LV, Primitive, RawVersion, ROOT_LV, VersionSummary } from './types' 8 | import { AgentGenerator, createAgent, rateLimit } from './utils' 9 | import { finished } from 'stream' 10 | import repl from 'node:repl' 11 | import fs from 'node:fs' 12 | 13 | 14 | type InboxEntry = { 15 | v: RawVersion[], 16 | // type: string, 17 | } 18 | 19 | let inboxAgent = createAgent() 20 | 21 | const inbox = ss.create() 22 | const docs = new Map() 26 | 27 | type FileData = { 28 | inbox: ss.RemoteStateDelta, 29 | docs: [LV, dt.SerializedFancyDBv1][] 30 | } 31 | 32 | let filename: string | null = null 33 | const loadFromFile = (f: string) => { 34 | try { 35 | const dataStr = fs.readFileSync(f, 'utf-8') 36 | const data = JSON.parse(dataStr) as FileData 37 | 38 | ss.mergeDelta(inbox, data.inbox) 39 | for (const [key, snap] of data.docs) { 40 | docs.set(key, { 41 | agent: createAgent(), 42 | doc: dt.fromSerialized(snap) 43 | }) 44 | } 45 | 46 | console.log('Loaded from', f) 47 | } catch (e: any) { 48 | if (e.code !== 'ENOENT') throw e 49 | 50 | console.log('Using new database file') 51 | } 52 | 53 | filename = f 54 | } 55 | 56 | const save = rateLimit(100, () => { 57 | if (filename != null) { 58 | console.log('Saving to', filename) 59 | const data: FileData = { 60 | inbox: ss.deltaSince(inbox, []), 61 | docs: Array.from(docs.entries()).map(([lv, {doc}]) => [lv, dt.serialize(doc)]) 62 | } 63 | const dataStr = JSON.stringify(data) + '\n' 64 | fs.writeFileSync(filename, dataStr) 65 | } 66 | }) 67 | 68 | process.on('exit', () => { 69 | save.flushSync() 70 | }) 71 | 72 | process.on('SIGINT', () => { 73 | // Catching this to make sure we save! 74 | // console.log('SIGINT!') 75 | process.exit(1) 76 | }) 77 | 78 | // if (dt.get(db).time == null) { 79 | // console.log('Setting time = 0') 80 | // const serverAgent = createAgent() 81 | // dt.localMapInsert(db, serverAgent(), ROOT_LV, 'time', {type: 'primitive', val: 0}) 82 | // } 83 | 84 | 85 | // const port = +(process.env.PORT ?? '8008') 86 | 87 | type Msg = [ 88 | type: 'known idx version', 89 | vs: VersionSummary 90 | ] | [ 91 | type: 'idx delta', 92 | delta: ss.RemoteStateDelta 93 | ] | [ 94 | // Get the changes to a document since the named version. 95 | type: 'get doc', 96 | k: RawVersion, 97 | since: VersionSummary, // OPT: Could probably just send the version here most of the time. 98 | ] | [ 99 | type: 'doc delta', 100 | k: RawVersion, 101 | delta: dt.PSerializedFancyDBv1 102 | ] | [ 103 | // Unused! 104 | type: 'ack', 105 | v: RawVersion[] 106 | ] 107 | 108 | const dbListeners = new Set<() => void>() 109 | const indexDidChange = () => { 110 | console.log('BROADCAST') 111 | console.dir(inbox.values, {depth:null}) 112 | for (const l of dbListeners) l() 113 | save() 114 | } 115 | 116 | const resolvable = (): {promise: Promise, resolve(val: T): void, reject(val: any): void} => { 117 | let resolve: any, reject: any 118 | const promise = new Promise((res, rej) => { 119 | resolve = res 120 | reject = rej 121 | }) 122 | return {promise, resolve, reject} 123 | } 124 | 125 | const runProtocol = (sock: Socket): Promise => { 126 | type ProtocolState = {state: 'waitingForVersion'} 127 | | { 128 | state: 'established', 129 | remoteVersion: LV[], 130 | unknownVersions: VersionSummary | null 131 | } 132 | 133 | let state: ProtocolState = {state: 'waitingForVersion'} 134 | 135 | const sendDelta = (sinceVersion: LV[]) => { 136 | console.log('sending delta to', sock.remoteAddress, sock.remotePort, 'since', sinceVersion) 137 | const delta = ss.deltaSince(inbox, sinceVersion) 138 | handler.write(['idx delta', delta]) 139 | // remoteVersion = db.cg.version.slice() 140 | } 141 | 142 | const onVersionChanged = () => { 143 | console.log('onVersionChanged') 144 | if (state.state !== 'established') throw Error('Unexpected connection state') 145 | 146 | if (state.unknownVersions != null) { 147 | // The db might now include part of the remainder. Doing this works 148 | // around a bug where connecting to 2 computers will result in 149 | // re-sending known changes back to them. 150 | // console.log('unknown', state.unknownVersions) 151 | ;[state.remoteVersion, state.unknownVersions] = causalGraph.intersectWithSummary( 152 | inbox.cg, state.unknownVersions, state.remoteVersion 153 | ) 154 | // console.log('->known', state.unknownVersions) 155 | } 156 | 157 | if (!causalGraph.lvEq(state.remoteVersion, inbox.cg.version)) { 158 | sendDelta(state.remoteVersion) 159 | // The version will always (& only) advance forward. 160 | state.remoteVersion = inbox.cg.version.slice() 161 | } 162 | } 163 | 164 | const finishPromise = resolvable() 165 | 166 | finished(sock, (err) => { 167 | console.log('Socket closed', sock.remoteAddress, sock.remotePort) 168 | dbListeners.delete(onVersionChanged) 169 | 170 | if (err) finishPromise.reject(err) 171 | else finishPromise.resolve() 172 | }) 173 | 174 | const handler = handle(sock, msg => { 175 | console.log('msg', msg[0], 'from', sock.remoteAddress, sock.remotePort) 176 | // console.dir(msg, {depth:null}) 177 | 178 | const type = msg[0] 179 | switch (type) { 180 | case 'known idx version': { 181 | if (state.state !== 'waitingForVersion') throw Error('Unexpected connection state') 182 | 183 | // When we get the known idx version, we always send a delta so the remote 184 | // knows they're up to date (even if they were already anyway). 185 | const summary = msg[1] 186 | const [sv, remainder] = causalGraph.intersectWithSummary(inbox.cg, summary) 187 | console.log('known idx version', sv) 188 | if (!causalGraph.lvEq(sv, inbox.cg.version)) { 189 | // We could always send the delta here to let the remote peer know they're 190 | // up to date, but they can figure that out by looking at the known idx version 191 | // we send on first connect. 192 | sendDelta(sv) 193 | } 194 | 195 | state = { 196 | state: 'established', 197 | remoteVersion: sv, 198 | unknownVersions: remainder 199 | } 200 | 201 | dbListeners.add(onVersionChanged) // Only matters the first time. 202 | break 203 | } 204 | 205 | case 'idx delta': { 206 | if (state.state !== 'established') throw Error('Invalid state') 207 | 208 | const delta = msg[1] 209 | // console.log('got delta') 210 | // console.dir(delta, {depth:null}) 211 | const updated = ss.mergeDelta(inbox, delta) 212 | // console.log('Merged data') 213 | // console.dir(db.values, {depth:null}) 214 | 215 | // console.log('== v', remoteVersion, delta.cg) 216 | state.remoteVersion = causalGraph.advanceVersionFromSerialized( 217 | inbox.cg, delta.cg, state.remoteVersion 218 | ) 219 | // TODO: Ideally, this shouldn't be necessary! But it is because the remoteVersion 220 | // gets updated as a result of versions *we* send. 221 | state.remoteVersion = causalGraph.findDominators(inbox.cg, state.remoteVersion) 222 | 223 | // Presumably the remote peer has just sent us all the data it has that we were 224 | // missing. I could call intersectWithSummary2 here, but this should be 225 | // sufficient. 226 | state.unknownVersions = null 227 | 228 | if (updated[0] !== updated[1]) { 229 | const keys = ss.modifiedKeysSince(inbox, updated[0]) 230 | for (const k of keys) { 231 | // NOTE: Version here might have duplicate entries! 232 | // const version = ss.get(inbox, k).flatMap(data => data.v) 233 | 234 | const doc = docs.get(k) 235 | const kRaw = causalGraph.lvToRaw(inbox.cg, k) 236 | const vs = doc ? causalGraph.summarizeVersion(doc.doc.cg) : {} 237 | console.log('Requesting updated info on doc', k, kRaw, 'since', vs) 238 | handler.write(['get doc', kRaw, vs]) 239 | } 240 | 241 | // Uhhh should this wait until we've got the requested changes? 242 | indexDidChange() 243 | } 244 | // TODO: Send ack?? 245 | break 246 | } 247 | 248 | case 'get doc': { 249 | // Get the changes to the given document at some point in time 250 | const kRaw = msg[1] 251 | const vs = msg[2] 252 | 253 | const k = causalGraph.rawToLV2(inbox.cg, kRaw) 254 | const doc = docs.get(k) 255 | if (doc == null) throw Error('Requested unknown doc??') 256 | 257 | const commonVersion = causalGraph.intersectWithSummary(doc.doc.cg, vs)[0] 258 | const partial = dt.serializePartialSince(doc.doc, commonVersion) 259 | 260 | handler.write(['doc delta', kRaw, partial]) 261 | 262 | break 263 | } 264 | 265 | case 'doc delta': { 266 | // Merge the changes to the specified doc 267 | const kRaw = msg[1] 268 | const partial = msg[2] 269 | 270 | const k = causalGraph.rawToLV2(inbox.cg, kRaw) 271 | 272 | let doc = docs.get(k) 273 | if (doc == null) { 274 | doc = { 275 | agent: createAgent(), 276 | doc: dt.createDb() 277 | } 278 | docs.set(k, doc) 279 | } 280 | 281 | dt.mergePartialSerialized(doc.doc, partial) 282 | console.log('doc', k, 'now has value', dt.get(doc.doc)) 283 | break 284 | } 285 | 286 | default: console.warn('Unknown message type:', type) 287 | } 288 | }) 289 | 290 | handler.write(['known idx version', causalGraph.summarizeVersion(inbox.cg)]) 291 | 292 | return finishPromise.promise 293 | } 294 | 295 | const serverOnPort = (port: number) => { 296 | const server = net.createServer(async sock => { 297 | console.log('got server socket connection', sock.remoteAddress, sock.remotePort) 298 | runProtocol(sock) 299 | // handler.write({oh: 'hai'}) 300 | }) 301 | 302 | server.listen(port, () => { 303 | console.log(`Server listening on port ${port}`) 304 | }) 305 | } 306 | 307 | const connect1 = (host: string, port: number) => { 308 | const sock = net.connect({port, host}, () => { 309 | console.log('connected!') 310 | runProtocol(sock) 311 | }) 312 | } 313 | 314 | const wait = (timeout: number) => new Promise((res) => setTimeout(res, timeout)) 315 | 316 | const connect = (host: string, port: number) => { 317 | ;(async () => { 318 | while (true) { 319 | console.log('Connecting to', host, port, '...') 320 | const socket = new net.Socket() 321 | const connectPromise = resolvable() 322 | socket.once('connect', connectPromise.resolve) 323 | socket.once('error', connectPromise.reject) 324 | socket.connect({port, host}) 325 | 326 | try { 327 | await connectPromise.promise 328 | socket.removeListener('error', connectPromise.reject) 329 | await runProtocol(socket) 330 | } catch (e: any) { 331 | console.warn('Could not connect:', e.message) 332 | } 333 | 334 | console.log('Reconnecting in 3 seconds...') 335 | await wait(3000) 336 | } 337 | })() 338 | } 339 | 340 | // ***** Command line argument passing 341 | for (let i = 2; i < process.argv.length; i++) { 342 | const command = process.argv[i] 343 | switch (command) { 344 | case '-l': { 345 | const port = +process.argv[++i] 346 | if (port === 0 || isNaN(port)) throw Error('Invalid port (usage -l )') 347 | 348 | serverOnPort(port) 349 | break 350 | } 351 | 352 | case '-c': { 353 | const host = process.argv[++i] 354 | if (host == null) throw Error('Missing host to connect to! (usage -c ') 355 | const port = +process.argv[++i] 356 | if (port === 0 || isNaN(port)) throw Error('Invalid port (usage -c )') 357 | 358 | connect(host, port) 359 | console.log('connect', host, port) 360 | break 361 | } 362 | 363 | case '-f': { 364 | const f = process.argv[++i] 365 | loadFromFile(f) 366 | 367 | break 368 | } 369 | 370 | default: { 371 | throw Error(`Unknown command line argument '${command}'`) 372 | } 373 | } 374 | // console.log(process.argv[i]) 375 | } 376 | 377 | // ***** REPL 378 | const r = repl.start({ 379 | prompt: '> ', 380 | useColors: true, 381 | terminal: true, 382 | // completer: true, 383 | 384 | }) 385 | r.context.inbox = inbox 386 | r.context.docs = docs 387 | r.context.ss = ss 388 | r.context.dt = dt 389 | 390 | r.context.getDoc = (key: LV) => { 391 | const doc = docs.get(key) 392 | if (doc == null) throw Error('Missing doc') 393 | return dt.get(doc.doc) 394 | } 395 | 396 | // r.context.i = (val: Primitive) => { 397 | // const version = agent() 398 | // const lv = ss.localInsert(inbox, version, val) 399 | // console.log(`Inserted ${version[0]}/${version[1]} (LV ${lv})`, val) 400 | 401 | // dbDidChange() 402 | // } 403 | 404 | r.context.i = (data: Primitive) => { 405 | // We'll reuse the version for the document name. It shows up as a key in 406 | // the inbox as well. 407 | const docKey = inboxAgent() 408 | 409 | const doc = dt.createDb() 410 | const docAgent = createAgent() 411 | dt.recursivelySetRoot(doc, docAgent, { 412 | type: 'unknown', 413 | data, 414 | }) 415 | 416 | const lv = ss.localInsert(inbox, docKey, { 417 | v: causalGraph.getRawVersion(doc.cg) 418 | }) 419 | 420 | docs.set(lv, { 421 | agent: docAgent, 422 | doc 423 | }) 424 | 425 | // console.dir(doc, {depth:null}) 426 | 427 | console.log(`Inserted ${docKey[0]}/${docKey[1]} (LV ${lv})`, data) 428 | 429 | indexDidChange() 430 | } 431 | 432 | r.context.s = (docKey: LV, val: Primitive) => { 433 | const doc = docs.get(docKey) 434 | if (doc == null) throw Error('Missing or invalid key') 435 | 436 | dt.recursivelySetRoot(doc.doc, doc.agent, {data: val}) 437 | console.log(dt.get(doc.doc)) 438 | 439 | const version = inboxAgent() 440 | const lv = ss.localSet(inbox, version, docKey, { 441 | v: causalGraph.getRawVersion(doc.doc.cg) 442 | }) 443 | 444 | console.log(`Set ${docKey} data`, val) 445 | indexDidChange() 446 | } 447 | 448 | r.context.get = (docKey: LV) => dt.get(docs.get(docKey)!.doc) 449 | 450 | r.context.print = () => { 451 | for (const [k, doc] of docs.entries()) { 452 | console.log(k, ':', dt.get(doc.doc)) 453 | } 454 | } 455 | 456 | r.once('exit', () => { 457 | process.exit(0) 458 | }) 459 | 460 | // r.context.i({oh: 'hai'}) -------------------------------------------------------------------------------- /js/src/types.ts: -------------------------------------------------------------------------------- 1 | import Map2 from "map2" 2 | 3 | export type RawVersion = [agent: string, seq: number] 4 | 5 | export const ROOT: RawVersion = ['ROOT', 0] 6 | 7 | /** Local version */ 8 | export type LV = number 9 | 10 | /** Local version range. Range is [start, end). */ 11 | export type LVRange = [start: number, end: number] 12 | 13 | export const ROOT_LV: LV = -1 14 | 15 | export type Primitive = null 16 | | boolean 17 | | string 18 | | number 19 | | Primitive[] 20 | | {[k: string]: Primitive} 21 | 22 | export type CreateValue = {type: 'primitive', val: Primitive} 23 | | {type: 'crdt', crdtKind: 'map' | 'set' | 'register'} 24 | 25 | export type Action = 26 | { type: 'map', key: string, localParents: RawVersion[], val: CreateValue } 27 | | { type: 'registerSet', localParents: RawVersion[], val: CreateValue } 28 | | { type: 'setInsert', val: CreateValue } 29 | | { type: 'setDelete', target: RawVersion } 30 | 31 | export interface Operation { 32 | id: RawVersion, 33 | globalParents: RawVersion[], 34 | crdtId: RawVersion, 35 | action: Action, 36 | } 37 | 38 | export type DBValue = null 39 | | boolean 40 | | string 41 | | number 42 | | DBValue[] 43 | | {[k: string]: DBValue} // Map 44 | | Map2 // Set. 45 | 46 | 47 | /** Helper type for a list with at least 1 entry in it. */ 48 | export type AtLeast1 = [T, ...T[]] 49 | 50 | 51 | export type SnapRegisterValue = {type: 'primitive', val: Primitive} 52 | | {type: 'crdt', id: RawVersion} 53 | export type SnapMVRegister = [RawVersion, SnapRegisterValue][] 54 | export type SnapCRDTInfo = { 55 | type: 'map', 56 | registers: {[k: string]: SnapMVRegister}, 57 | } | { 58 | type: 'set', 59 | values: [string, number, SnapRegisterValue][], 60 | } | { 61 | type: 'register', 62 | value: SnapMVRegister, 63 | } 64 | 65 | export interface DBSnapshot { 66 | version: RawVersion[], 67 | crdts: [string, number, SnapCRDTInfo][] 68 | } 69 | 70 | export interface VersionSummary {[agent: string]: [number, number][]} 71 | -------------------------------------------------------------------------------- /js/src/utils.ts: -------------------------------------------------------------------------------- 1 | import { LV, RawVersion, VersionSummary } from "./types" 2 | 3 | export type AgentGenerator = () => RawVersion 4 | export function createAgent(name?: string): AgentGenerator { 5 | const agent = name ?? Math.random().toString(36).slice(2) 6 | let seq = 0 7 | return () => ([agent, seq++]) 8 | } 9 | 10 | type RateLimit = { 11 | flushSync(): void, 12 | (): void, 13 | } 14 | 15 | export function rateLimit(min_delay: number, fn: () => void): RateLimit { 16 | let next_call = 0 17 | let timer: NodeJS.Timeout | null = null 18 | 19 | const rl = () => { 20 | let now = Date.now() 21 | 22 | if (next_call <= now) { 23 | // Just call the function. 24 | next_call = now + min_delay 25 | 26 | if (timer != null) { 27 | clearTimeout(timer) 28 | timer = null 29 | } 30 | fn() 31 | } else { 32 | // Queue the function call. 33 | if (timer == null) { 34 | timer = setTimeout(() => { 35 | timer = null 36 | next_call = Date.now() + min_delay 37 | fn() 38 | }, next_call - now) 39 | } // Otherwise its already queued. 40 | } 41 | } 42 | 43 | rl.flushSync = () => { 44 | if (timer != null) { 45 | clearTimeout(timer) 46 | timer = null 47 | fn() 48 | } 49 | } 50 | 51 | return rl 52 | } 53 | 54 | export const versionInSummary = (vs: VersionSummary, [agent, seq]: RawVersion): boolean => { 55 | const ranges = vs[agent] 56 | if (ranges == null) return false 57 | // This could be implemented using a binary search, but thats probably fine here. 58 | return ranges.find(([from, to]) => seq >= from && seq < to) !== undefined 59 | } 60 | 61 | export const assertSorted = (v: LV[]) => { 62 | for (let i = 1; i < v.length; i++) { 63 | if (v[i-1] >= v[i]) throw Error('Version not sorted') 64 | } 65 | } -------------------------------------------------------------------------------- /js/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ 22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ 26 | 27 | /* Modules */ 28 | "module": "commonjs", /* Specify what module code is generated. */ 29 | "rootDir": "./src", /* Specify the root folder within your source files. */ 30 | // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ 31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ 35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ 38 | // "resolveJsonModule": true, /* Enable importing .json files. */ 39 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ 40 | 41 | /* JavaScript Support */ 42 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 43 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 44 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ 45 | 46 | /* Emit */ 47 | "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 48 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 49 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 50 | "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 51 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ 52 | "outDir": "./dist", /* Specify an output folder for all emitted files. */ 53 | // "removeComments": true, /* Disable emitting comments. */ 54 | // "noEmit": true, /* Disable emitting files from a compilation. */ 55 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 56 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ 57 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 58 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 59 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 60 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 61 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 62 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 63 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 64 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ 65 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ 66 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 67 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ 68 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 69 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 70 | 71 | /* Interop Constraints */ 72 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 73 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 74 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 75 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 76 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 77 | 78 | /* Type Checking */ 79 | "strict": true, /* Enable all strict type-checking options. */ 80 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 81 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 82 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 83 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ 84 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 85 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 86 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ 87 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 88 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 89 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 90 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 91 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 92 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 93 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ 94 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 95 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ 96 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 97 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 98 | 99 | /* Completeness */ 100 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 101 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /js/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "@types/node@^18.11.4": 6 | version "18.11.4" 7 | resolved "https://registry.yarnpkg.com/@types/node/-/node-18.11.4.tgz#7017a52e18dfaad32f55eebd539993014441949c" 8 | integrity sha512-BxcJpBu8D3kv/GZkx/gSMz6VnTJREBj/4lbzYOQueUOELkt8WrO6zAcSPmp9uRPEW/d+lUO8QK0W2xnS1hEU0A== 9 | 10 | "@types/priorityqueuejs@^1.0.1": 11 | version "1.0.1" 12 | resolved "https://registry.yarnpkg.com/@types/priorityqueuejs/-/priorityqueuejs-1.0.1.tgz#6eaac3247a4c5cefc94482e5d87c3730b35f5053" 13 | integrity sha512-rt2GvuoXcYb+R4X8SF4jlTSXDWoUmkZf7OB8iTRRfE5dmqHn47rY8CRIEPDD5lY28cU86+xXYQ5RsXq/9nydvQ== 14 | 15 | binary-search@^1.3.6: 16 | version "1.3.6" 17 | resolved "https://registry.yarnpkg.com/binary-search/-/binary-search-1.3.6.tgz#e32426016a0c5092f0f3598836a1c7da3560565c" 18 | integrity sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA== 19 | 20 | map2@^1.1.2: 21 | version "1.1.2" 22 | resolved "https://registry.yarnpkg.com/map2/-/map2-1.1.2.tgz#6a4e5d896f7aacc6d2cd7fdbe0d17ad72f99d6f6" 23 | integrity sha512-Kdt76+hzoMwA+Kiih0xtvBuuMrNbdBSZfZ4oC2hLBwp2uk3b3FopFEc/NkWu43ba1Fj93vmjxJ2DC0rfaI9W+w== 24 | 25 | priorityqueuejs@^2.0.0: 26 | version "2.0.0" 27 | resolved "https://registry.yarnpkg.com/priorityqueuejs/-/priorityqueuejs-2.0.0.tgz#96064040edd847ee9dd3013d8e16297399a6bd4f" 28 | integrity sha512-19BMarhgpq3x4ccvVi8k2QpJZcymo/iFUcrhPd4V96kYGovOdTsWwy7fxChYi4QY+m2EnGBWSX9Buakz+tWNQQ== 29 | 30 | typescript@^4.8.4: 31 | version "4.8.4" 32 | resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.4.tgz#c464abca159669597be5f96b8943500b238e60e6" 33 | integrity sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ== 34 | -------------------------------------------------------------------------------- /src/cg_hacks.rs: -------------------------------------------------------------------------------- 1 | use diamond_types::{CausalGraph, DTRange, Frontier, HasLength, LV}; 2 | use diamond_types::causalgraph::agent_assignment::remote_ids::RemoteVersion; 3 | use diamond_types::causalgraph::agent_span::AgentSpan; 4 | use serde::{Deserialize, Serialize}; 5 | use smallvec::{SmallVec, smallvec}; 6 | use smartstring::alias::String as SmartString; 7 | 8 | #[derive(Debug, Clone, Serialize, Deserialize)] 9 | pub(crate) struct PartialCGEntry<'a> { 10 | agent: SmartString, 11 | seq: usize, 12 | len: usize, 13 | #[serde(borrow)] 14 | parents: SmallVec<[RemoteVersion<'a>; 2]>, 15 | } 16 | 17 | pub(crate) fn serialize_cg_from_version<'a>(cg: &'a CausalGraph, v: &[LV], cur_version: &[LV]) -> SmallVec<[PartialCGEntry<'a>; 4]> { 18 | let ranges_rev = cg.graph.diff_rev(v, cur_version); 19 | assert!(ranges_rev.0.is_empty()); 20 | 21 | // dbg!(&ranges_rev); 22 | let mut entries = smallvec![]; 23 | for r in ranges_rev.1.into_iter().rev() { 24 | // dbg!(r, &cg); 25 | for entry in cg.iter_range(r) { 26 | entries.push(PartialCGEntry { 27 | agent: cg.agent_assignment.get_agent_name(entry.span.agent).into(), 28 | seq: entry.span.seq_range.start, 29 | len: entry.len(), 30 | parents: entry.parents.iter().map(|p| cg.agent_assignment.local_to_remote_version(*p)).collect() 31 | }) 32 | } 33 | } 34 | entries 35 | } 36 | 37 | pub(crate) fn merge_partial_versions(cg: &mut CausalGraph, pe: &[PartialCGEntry]) -> DTRange { 38 | let start = cg.len(); 39 | 40 | for e in pe { 41 | let parents = e.parents 42 | .iter() 43 | .map(|rv| cg.agent_assignment.try_remote_to_local_version(*rv).unwrap()) 44 | .collect::(); 45 | 46 | let agent = cg.get_or_create_agent_id(&e.agent); 47 | let _v_span = cg.merge_and_assign(parents.as_ref(), AgentSpan { 48 | agent, 49 | seq_range: (e.seq .. e.seq + e.len).into() 50 | }); 51 | } 52 | 53 | (start .. cg.len()).into() 54 | } 55 | 56 | pub(crate) fn advance_frontier_from_serialized(frontier: &mut Frontier, pe: &[PartialCGEntry], cg: &CausalGraph) { 57 | for e in pe { 58 | if let Ok(last) = cg.agent_assignment.try_remote_to_local_version(RemoteVersion(&e.agent, e.seq + e.len - 1)) { 59 | frontier.0.push(last); 60 | } 61 | } 62 | 63 | *frontier = cg.graph.find_dominators(frontier.as_ref()); 64 | } 65 | 66 | #[cfg(test)] 67 | mod tests { 68 | use diamond_types::CausalGraph; 69 | use crate::cg_hacks::serialize_cg_from_version; 70 | 71 | #[test] 72 | fn foo() { 73 | let mut cg = CausalGraph::new(); 74 | cg.get_or_create_agent_id("seph"); 75 | cg.assign_local_op_with_parents(&[], 0, 10); 76 | let s = serialize_cg_from_version(&cg, &[5], &[9]); 77 | dbg!(serde_json::to_string(&s).unwrap()); 78 | } 79 | } -------------------------------------------------------------------------------- /src/database.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | use smartstring::alias::String as SmartString; 3 | use rand::distributions::Alphanumeric; 4 | use diamond_types::{AgentId, CRDTKind, CreateValue, Frontier, LV, ROOT_CRDT_ID}; 5 | use diamond_types::causalgraph::agent_assignment::remote_ids::RemoteVersionOwned; 6 | use diamond_types::{Branch, OpLog, SerializedOps}; 7 | use diamond_types::list::operation::TextOperation; 8 | use rand::Rng; 9 | use smallvec::SmallVec; 10 | use crate::stateset::{LVKey, StateSet}; 11 | 12 | #[derive(Debug, PartialEq, Eq, Clone, serde::Serialize, serde::Deserialize, Default)] 13 | pub struct InboxEntry { 14 | version: SmallVec<[RemoteVersionOwned; 2]>, 15 | kind: SmartString, 16 | } 17 | 18 | impl InboxEntry { 19 | 20 | } 21 | 22 | #[derive(Debug)] 23 | pub struct Database { 24 | pub(crate) inbox: StateSet, 25 | pub(crate) docs: BTreeMap, 26 | pub(crate) index_agent: AgentId, 27 | } 28 | 29 | impl Database { 30 | pub fn new() -> Self { 31 | let agent: SmartString = rand::thread_rng() 32 | .sample_iter(&Alphanumeric) 33 | .take(7) 34 | .map(char::from) 35 | .collect(); 36 | 37 | let mut ss = StateSet::new(); 38 | let agent = ss.cg.get_or_create_agent_id(&agent); 39 | 40 | Self { 41 | inbox: ss, 42 | docs: Default::default(), 43 | index_agent: agent 44 | } 45 | } 46 | 47 | pub fn insert_new_item(&mut self, kind: &str, doc: OpLog) -> LVKey { 48 | let id = self.inbox.local_insert(self.index_agent, InboxEntry { 49 | version: doc.cg.remote_frontier_owned(), 50 | kind: kind.into(), 51 | }); 52 | self.docs.insert(id, doc); 53 | id 54 | } 55 | 56 | pub fn create_post(&mut self) -> LVKey { 57 | // To avoid sync problems, we'll initialize the post entirely using a special SCHEMA user. 58 | let mut doc = OpLog::new(); 59 | let agent = doc.cg.agent_assignment.get_or_create_agent_id("SCHEMA"); 60 | let title = doc.local_map_set(agent, ROOT_CRDT_ID, "title", CreateValue::NewCRDT(CRDTKind::Text)); 61 | doc.local_text_op(agent, title, TextOperation::new_insert(0, "Untitled")); 62 | let content = doc.local_map_set(agent, ROOT_CRDT_ID, "content", CreateValue::NewCRDT(CRDTKind::Text)); 63 | doc.local_text_op(agent, content, TextOperation::new_insert(0, "yo check out this sick post")); 64 | // dbg!(&doc.cg.version); 65 | 66 | self.insert_new_item("post", doc) 67 | } 68 | 69 | pub fn doc_updated(&mut self, item: LVKey) { 70 | let Some(existing_value) = self.inbox.get_value(item) else { return; }; 71 | 72 | let Some(doc) = self.docs.get(&item) else { return; }; 73 | 74 | // TODO: Change to a borrowed frontier. 75 | let doc_rv = doc.cg.agent_assignment.local_to_remote_frontier_owned(doc.cg.version.as_ref()); 76 | if existing_value.version != doc_rv { 77 | let mut new_value = existing_value.clone(); 78 | new_value.version = doc_rv; 79 | self.inbox.local_set(self.index_agent, item, new_value); 80 | } 81 | // pair. 82 | // let old_val = self.inbox.resolve_pairs() 83 | // self.inbox.local_set(self.index_agent, Some(item)) 84 | } 85 | 86 | pub fn posts(&self) -> impl Iterator + '_ { 87 | self.inbox.values.iter().filter_map(|(doc_name, pairs)| { 88 | let val = &self.inbox.resolve_pairs(pairs.as_slice()).1; 89 | if val.kind == "post" { 90 | Some(*doc_name) 91 | } else { None } 92 | }) 93 | } 94 | 95 | pub fn agent_name(&self) -> &str { 96 | self.inbox.cg.agent_assignment.get_agent_name(self.index_agent) 97 | } 98 | 99 | pub fn get_doc_mut(&mut self, key: LVKey) -> Option<(&mut OpLog, AgentId)> { 100 | self.docs.get_mut(&key) 101 | .map(|doc| { 102 | let agent_name = self.inbox.cg.agent_assignment.get_agent_name(self.index_agent); 103 | let agent = doc.cg.get_or_create_agent_id(agent_name); 104 | (doc, agent) 105 | }) 106 | } 107 | 108 | pub fn changes_to_doc_since(&self, doc: LVKey, v: &[LV]) -> Option { 109 | let doc = self.docs.get(&doc)?; 110 | Some(doc.ops_since(v)) 111 | } 112 | 113 | pub fn changes_to_post_content_since(&self, doc: LVKey, v: &[LV]) -> Option<(Vec, Frontier)> { 114 | let doc = self.docs.get(&doc)?; 115 | 116 | let content = doc.text_at_path(&["content"]); 117 | Some(( 118 | doc.text_changes_since(content, v) 119 | .into_iter() 120 | .filter_map(|(_, op)| op) 121 | .collect(), 122 | doc.cg.version.clone() 123 | )) 124 | } 125 | 126 | pub fn post_content(&self, doc: LVKey) -> Option { 127 | let doc = self.docs.get(&doc)?; 128 | let content = doc.text_at_path(&["content"]); 129 | Some(doc.checkout_text(content).to_string()) 130 | } 131 | 132 | pub fn checkout(&self, doc: LVKey) -> Option { 133 | self.docs.get(&doc) 134 | .map(|oplog| oplog.checkout_tip()) 135 | } 136 | 137 | /// returns if there were updates 138 | pub fn update_branch(&self, doc: LVKey, branch: &mut Branch) -> bool { 139 | let oplog = self.docs.get(&doc).unwrap(); 140 | let merged_versions = branch.merge_changes_to_tip(oplog); 141 | !merged_versions.is_empty() 142 | } 143 | 144 | pub fn dbg_print_docs(&self) { 145 | for (local_name, doc) in self.docs.iter() { 146 | println!("doc {} -> version {:?}, data: {:?}", local_name, doc.cg.version, doc.checkout()); 147 | } 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | mod stateset; 2 | mod cg_hacks; 3 | pub mod database; 4 | pub(crate) mod protocol; 5 | 6 | use std::net::{Ipv4Addr, SocketAddr}; 7 | use std::sync::Arc; 8 | use std::time::Duration; 9 | use tokio::io; 10 | use tokio::net::{TcpListener, TcpStream}; 11 | use tokio::sync::broadcast::Sender; 12 | use tokio::sync::RwLock; 13 | use crate::database::Database; 14 | use crate::protocol::Protocol; 15 | 16 | pub fn connect(addr: Vec, handle: Arc>, tx: Sender) { 17 | tokio::spawn( 18 | connect_internal(addr, handle, tx) 19 | ); 20 | } 21 | 22 | pub async fn connect_internal(addr: Vec, mut handle: Arc>, tx: Sender) -> Result<(), io::Error> { 23 | loop { 24 | println!("Trying to connect to {:?} ...", addr); 25 | // Walk through the socket addresses trying to connect 26 | let mut socket = None; 27 | for a in &addr { 28 | dbg!(a); 29 | let s = TcpStream::connect(a).await; 30 | match s { 31 | Ok(s) => { 32 | socket = Some(s); 33 | break; 34 | } 35 | Err(err) => { 36 | eprintln!("Could not connect to {}: {}", a, err); 37 | } 38 | } 39 | }; 40 | println!("ok so far..."); 41 | 42 | if let Some(socket) = socket { 43 | let (tx2, rx2) = (tx.clone(), tx.subscribe()); 44 | if let Err(e) = Protocol::start(socket, &mut handle, (tx2, rx2)).await { 45 | println!("Could not connect: {:?}", e); 46 | } 47 | println!("Disconnected! :("); 48 | } else { 49 | eprintln!("Could not connect to requested peer"); 50 | } 51 | 52 | tokio::time::sleep(Duration::from_secs(3)).await; 53 | } 54 | 55 | // #[allow(unreachable_code)] 56 | // Ok::<(), io::Error>(()) 57 | } 58 | 59 | pub fn listen(port: u16, handle: Arc>, tx: Sender) { 60 | tokio::spawn(async move { 61 | let listener = TcpListener::bind( 62 | (Ipv4Addr::new(0, 0, 0, 0), port) 63 | ).await?; 64 | 65 | loop { 66 | let (socket, addr) = listener.accept().await?; 67 | println!("{} connected", addr); 68 | let mut handle = handle.clone(); 69 | let tx = tx.clone(); 70 | tokio::spawn(async move { 71 | let (tx2, rx2) = (tx.clone(), tx.subscribe()); 72 | Protocol::start(socket, &mut handle, (tx2, rx2)).await?; 73 | println!("{} disconnected", addr); 74 | Ok::<(), io::Error>(()) 75 | }); 76 | } 77 | 78 | #[allow(unreachable_code)] 79 | Ok::<(), io::Error>(()) 80 | }); 81 | } 82 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused)] 2 | 3 | use std::net::{Ipv4Addr, SocketAddr, ToSocketAddrs}; 4 | use std::sync::Arc; 5 | use std::time::Duration; 6 | use std::vec; 7 | use bpaf::{Bpaf, Parser, short}; 8 | use diamond_types::causalgraph::summary::{VersionSummary, VersionSummaryFlat}; 9 | use diamond_types::{AgentId, CreateValue, Frontier, Primitive, ROOT_CRDT_ID}; 10 | use rand::distributions::Alphanumeric; 11 | use rand::{Rng, RngCore}; 12 | use tokio::{io, select, signal}; 13 | use tokio::io::{AsyncBufReadExt, AsyncWrite, AsyncWriteExt, BufReader, Lines}; 14 | use tokio::net::{TcpListener, TcpStream}; 15 | use serde::{Deserialize, Serialize}; 16 | use smartstring::alias::String as SmartString; 17 | use tokio::net::tcp::{ReadHalf, WriteHalf}; 18 | use tokio::sync::{broadcast, RwLock}; 19 | use std::io::{stdout, Write}; 20 | use std::ops::Deref; 21 | use replica::{connect, listen}; 22 | use replica::database::Database; 23 | 24 | 25 | // #[tokio::main(flavor= "current_thread")] 26 | #[tokio::main] 27 | async fn main() { 28 | let mut db = Database::new(); 29 | 30 | db.create_post(); 31 | db.dbg_print_docs(); 32 | 33 | // let name = db.create_item(); 34 | // let (doc, agent) = db.get_doc_mut(name).unwrap(); 35 | // doc.local_map_set(agent, ROOT_CRDT_ID, "yo", CreateValue::Primitive(Primitive::I64(rand::thread_rng().next_u32() as i64))); 36 | 37 | let database = Arc::new(RwLock::new(db)); 38 | 39 | let opts: CmdOpts = cmd_opts().run(); 40 | dbg!(&opts); 41 | 42 | // let colors: Vec = vec![ 43 | // Color::Green, 44 | // Color::Cyan, 45 | // Color::Yellow, 46 | // Color::Magenta, 47 | // ]; 48 | // let mut i = 0; 49 | // let mut next_color = move || { 50 | // i += 1; 51 | // println!("COLOR {i}"); 52 | // colors[i % colors.len()] 53 | // }; 54 | 55 | let (tx, rx1) = tokio::sync::broadcast::channel(16); 56 | drop(rx1); 57 | 58 | for port in opts.listen_ports.iter().copied() { 59 | listen(port, database.clone(), tx.clone()); 60 | } 61 | 62 | for addr in opts.connect.iter().cloned() { 63 | connect(addr.collect(), database.clone(), tx.clone()); 64 | } 65 | 66 | if opts.listen_ports.is_empty() && opts.connect.is_empty() { 67 | eprintln!("Nothing to do!"); 68 | return; 69 | } 70 | 71 | if let Err(err) = signal::ctrl_c().await { 72 | eprintln!("Unable to listen to shutdown signal {}", err); 73 | } 74 | } 75 | 76 | fn parse_connect() -> impl Parser>> { 77 | short('c') 78 | .long("connect") 79 | .argument("CONNECT") 80 | .map(|s: String| s.to_socket_addrs().unwrap()) 81 | .many() 82 | } 83 | 84 | #[derive(Debug, Clone, Bpaf)] 85 | #[bpaf(options, version)] 86 | struct CmdOpts { 87 | #[bpaf(short, long)] 88 | listen_ports: Vec, 89 | 90 | #[bpaf(external(parse_connect))] 91 | connect: Vec>, 92 | } -------------------------------------------------------------------------------- /src/protocol.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{BTreeMap, BTreeSet}; 2 | use diamond_types::Frontier; 3 | use diamond_types::causalgraph::summary::{VersionSummary, VersionSummaryFlat}; 4 | use tokio::net::TcpStream; 5 | use std::io; 6 | use std::sync::Arc; 7 | use tokio::sync::{broadcast, RwLock}; 8 | use tokio::io::{AsyncBufReadExt, AsyncWrite, AsyncWriteExt, BufReader}; 9 | use std::ops::{Deref, DerefMut}; 10 | use std::string::ParseError; 11 | use diamond_types::causalgraph::agent_assignment::remote_ids::RemoteVersion; 12 | use diamond_types::SerializedOps; 13 | use tokio::net::tcp::WriteHalf; 14 | use tokio::select; 15 | use crate::cg_hacks::advance_frontier_from_serialized; 16 | use crate::database::{Database, InboxEntry}; 17 | use crate::stateset::{LVKey, RemoteStateDelta}; 18 | use serde::{Serialize, Deserialize}; 19 | use smallvec::{SmallVec, smallvec}; 20 | 21 | #[derive(Debug, Clone, Serialize, Deserialize)] 22 | #[serde(tag = "type")] 23 | enum NetMessage<'a> { 24 | KnownIdx { vs: VersionSummaryFlat }, 25 | IdxDelta { 26 | #[serde(borrow)] 27 | idx_delta: Box>, 28 | #[serde(borrow)] 29 | sub_deltas: SmallVec<[(RemoteVersion<'a>, SerializedOps<'a>); 2]>, 30 | }, 31 | SubscribeDocs { 32 | // Not sure if we should fetch, or fetch and subscribe or what here. 33 | #[serde(borrow)] 34 | docs: SmallVec<[(RemoteVersion<'a>, VersionSummary); 2]>, 35 | }, 36 | DocsDelta { 37 | #[serde(borrow)] 38 | deltas: SmallVec<[(RemoteVersion<'a>, SerializedOps<'a>); 2]>, 39 | } 40 | } 41 | 42 | type DatabaseHandle = Arc>; 43 | 44 | async fn send_message<'a, W: AsyncWrite + Unpin>(stream: &mut W, msg: NetMessage<'a>) -> Result<(), io::Error> { 45 | let mut msg = serde_json::to_vec(&msg).unwrap(); 46 | msg.push(b'\n'); 47 | print!("WRITE {}", std::str::from_utf8(&msg).unwrap()); 48 | stream.write_all(&msg).await 49 | } 50 | 51 | #[derive(Debug)] 52 | struct ConnectionState { 53 | remote_frontier: Frontier, 54 | unknown_versions: Option, 55 | 56 | my_subscriptions: BTreeSet, 57 | remote_subscriptions: BTreeMap, // Storing the last known version on the remote peer. 58 | } 59 | 60 | type IndexChannel = (broadcast::Sender, broadcast::Receiver); 61 | 62 | /// Protocol stores & represents the state for a connection to another peer. 63 | pub struct Protocol<'a> { 64 | // I could store the network socket here too, but it makes it much harder with the borrowck. 65 | database: &'a mut DatabaseHandle, 66 | notify: IndexChannel, 67 | state: Option, 68 | 69 | // stdout: StandardStream, 70 | // color: Color, 71 | } 72 | 73 | impl<'a> Protocol<'a> { 74 | // fn new(mut socket: &'a TcpStream, database: &'a mut DatabaseHandle, mut notify: IndexChannel) -> Self { 75 | fn new(database: &'a mut DatabaseHandle, notify: IndexChannel) -> Self { 76 | Self { 77 | database, 78 | notify, 79 | state: None, 80 | // stdout, 81 | // color, 82 | } 83 | } 84 | 85 | async fn send_delta>(since_frontier: &mut Frontier, db: D, subs: Option<&mut BTreeMap>, writer: &mut WriteHalf<'_>) -> Result<(), io::Error> { 86 | let idx_delta = db.inbox.delta_since(since_frontier.as_ref()); 87 | 88 | let mut sub_deltas = smallvec![]; 89 | if let Some(subs) = subs { 90 | for key in db.inbox.modified_keys_since_frontier(since_frontier.as_ref()) { 91 | if let Some(known_frontier) = subs.get_mut(&key) { 92 | println!("Getting doc {} ops since {:?}", key, known_frontier.as_ref()); 93 | // We might not have any new operations for the doc yet, or even know about it. 94 | let Some(doc) = db.docs.get(&key) else { continue; }; 95 | 96 | if known_frontier.as_ref() != doc.cg.version.as_ref() { 97 | let remote_name = db.inbox.cg.agent_assignment.local_to_remote_version(key); 98 | let ops = doc.ops_since(known_frontier.as_ref()); 99 | sub_deltas.push((remote_name, ops)); 100 | subs.insert(key, doc.cg.version.clone()); 101 | } 102 | } 103 | } 104 | } 105 | 106 | send_message(writer, NetMessage::IdxDelta { idx_delta: Box::new(idx_delta), sub_deltas }).await?; 107 | since_frontier.replace(db.inbox.cg.version.as_ref()); 108 | Ok(()) 109 | } 110 | 111 | fn apply_doc_updates<'b, D: DerefMut, I: Iterator, SerializedOps<'b>)>>(db: &mut D, deltas_iter: I) -> Result<(), ParseError> { 112 | for (remote_name, changes) in deltas_iter { 113 | let local_name = db.inbox.cg.agent_assignment.remote_to_local_version(remote_name); 114 | 115 | let doc = db.docs.entry(local_name).or_default(); 116 | doc.merge_ops(changes).unwrap(); // TODO: Pass error correctly. 117 | 118 | // println!("doc {} -> version {:?}, data: {:?}", local_name, doc.cg.version, doc.checkout()); 119 | println!("updated doc {} ({:?}) to version {:?}", local_name, remote_name, doc.cg.version.as_ref()); 120 | } 121 | 122 | db.dbg_print_docs(); 123 | 124 | Ok(()) 125 | } 126 | 127 | // fn get_doc_updates_since>(db: D, v: &[LV]) -> SmallVec<[(RemoteVersion<'a>, SerializedOps<'a>); 2]> { 128 | // 129 | // } 130 | 131 | async fn handle_message(&mut self, msg: NetMessage<'_>, writer: &mut WriteHalf<'_>) -> Result<(), io::Error> { 132 | match msg { 133 | NetMessage::KnownIdx { vs } => { 134 | // dbg!(&vs); 135 | 136 | let db = self.database.read().await; 137 | let (mut remote_frontier, remainder) = db.inbox.cg.intersect_with_flat_summary(&vs, &[]); 138 | println!("remote frontier {:?}", remote_frontier); 139 | 140 | // dbg!(&remote_frontier, &remainder); 141 | 142 | if remote_frontier != db.inbox.cg.version { 143 | println!("Sending delta to {:?}", db.inbox.cg.version); 144 | Self::send_delta(&mut remote_frontier, db, None, writer).await?; 145 | } 146 | 147 | if let Some(r) = remainder.as_ref() { 148 | println!("Remainder {:?}", r); 149 | } 150 | 151 | self.state = Some(ConnectionState { 152 | remote_frontier, 153 | unknown_versions: remainder, 154 | my_subscriptions: Default::default(), 155 | remote_subscriptions: Default::default(), 156 | }); 157 | } 158 | NetMessage::IdxDelta { idx_delta: delta, sub_deltas } => { 159 | // dbg!(&delta); 160 | let mut db = self.database.write().await; 161 | 162 | let ops = delta.ops; 163 | let cg_delta = delta.cg; 164 | let diff = db.inbox.merge_delta(&cg_delta, ops); 165 | 166 | let state = self.state.as_mut().unwrap(); 167 | 168 | println!("remote frontier is {:?}", state.remote_frontier); 169 | advance_frontier_from_serialized(&mut state.remote_frontier, &cg_delta, &db.inbox.cg); 170 | println!("->mote frontier is {:?}", state.remote_frontier); 171 | state.unknown_versions = None; 172 | 173 | if !diff.is_empty() { 174 | let mut sub = smallvec![]; 175 | for key in db.inbox.modified_keys_since_v(diff.start) { 176 | if !state.my_subscriptions.contains(&key) { 177 | // Usually just one key. 178 | let remote_name = db.inbox.cg.agent_assignment.local_to_remote_version(key); 179 | // I'm going to take for granted that the version has changed, and we care about it. 180 | let local_vs = db.docs.get(&key) 181 | .map(|doc| { 182 | doc.cg.agent_assignment.summarize_versions() 183 | }) 184 | .unwrap_or_default(); 185 | 186 | println!("Subscribing to {} ({:?})", key, remote_name); 187 | sub.push((remote_name, local_vs)); 188 | 189 | // We should probably only do this after the sub message has been sent below? 190 | // Though if an error happens we'll bail anyway, so its fine for now ... 191 | state.my_subscriptions.insert(key); 192 | } 193 | } 194 | 195 | if !sub.is_empty() { 196 | send_message(writer, NetMessage::SubscribeDocs { docs: sub }).await?; 197 | } else { 198 | // I don't know why I need to do this, but its needed to make the borrowck happy. 199 | drop(sub); 200 | } 201 | 202 | Self::apply_doc_updates(&mut db, sub_deltas.into_iter()).unwrap(); 203 | 204 | // dbg!(diff); 205 | db.inbox.print_values(); 206 | drop(db); // TODO: Check performance of doing this. Should make read handles cheaper? 207 | self.notify.0.send(diff.end).unwrap(); 208 | } else { 209 | assert!(sub_deltas.is_empty()); 210 | } 211 | } 212 | 213 | NetMessage::SubscribeDocs { docs } => { 214 | let db = self.database.read().await; 215 | let state = self.state.as_mut().unwrap(); 216 | 217 | let mut deltas = smallvec![]; 218 | for (remote_name, vs) in docs { 219 | // We should always have this document at this point. 220 | // TODO: if we don't, error rather than panic. 221 | let local_name = db.inbox.cg.agent_assignment.remote_to_local_version(remote_name); 222 | 223 | println!("Remote peer subscribed to {} ({:?})", local_name, remote_name); 224 | 225 | // I'll ignore the known version summary for now. We could store that to avoid 226 | // re-sending operations later. 227 | let since_version = if let Some(oplog) = db.docs.get(&local_name) { 228 | let since_version = oplog.cg.intersect_with_summary(&vs, &[]).0; 229 | 230 | // I'll still include an empty set of changes even if the remote peer is up to date, 231 | // so they know the subscription is ready. 232 | let ops = oplog.ops_since(since_version.as_ref()); 233 | deltas.push((remote_name, ops)); 234 | since_version 235 | } else { Frontier::root() }; 236 | 237 | state.remote_subscriptions.insert(local_name, since_version); 238 | } 239 | 240 | send_message(writer, NetMessage::DocsDelta { deltas }).await?; 241 | } 242 | 243 | NetMessage::DocsDelta { deltas } => { 244 | let mut db = self.database.write().await; 245 | Self::apply_doc_updates(&mut db, deltas.into_iter()).unwrap(); 246 | } 247 | } 248 | 249 | Ok(()) 250 | } 251 | 252 | async fn on_database_updated<'b>(&mut self, writer: &mut WriteHalf<'b>, db_len: usize) -> Result<(), io::Error> { 253 | if let Some(state) = self.state.as_mut() { 254 | if db_len > 0 && state.remote_frontier.0.last() == Some(&(db_len - 1)) { 255 | // This is a bit ghastly. I want to filter out changes that the remote peer already 256 | // has before we acquire a (maybe expensive?) database read handle. 257 | // println!("Skipping update because peer is already up to date!"); 258 | return Ok(()); 259 | } 260 | 261 | println!("Got broadcast message!"); 262 | let db = self.database.read().await; 263 | if let Some(uv) = state.unknown_versions.as_mut() { 264 | // If the remote peer has some versions we don't know about, first check to see if 265 | // it already knows about some of the versions we've updated locally. This prevents 266 | // a bug where the peer oversends data. 267 | let (f, v) = db.inbox.cg.intersect_with_flat_summary(uv, state.remote_frontier.as_ref()); 268 | // I could just handle this via destructuring, but it causes an intellij error. 269 | state.remote_frontier = f; 270 | state.unknown_versions = v; 271 | 272 | println!("Trimmed unknown versions to {:?}", state.unknown_versions); 273 | } 274 | 275 | if state.remote_frontier != db.inbox.cg.version { 276 | // TODO: Optimize this by calculating & serializing the delta once instead of 277 | // once per peer. 278 | let state = self.state.as_mut().unwrap(); 279 | println!("Send delta! {:?} -> {:?}", state.remote_frontier, db.inbox.cg.version); 280 | Self::send_delta(&mut state.remote_frontier, db, Some(&mut state.remote_subscriptions), writer).await?; 281 | } 282 | } 283 | 284 | Ok(()) 285 | } 286 | 287 | pub async fn run(&mut self, mut socket: TcpStream) -> Result<(), io::Error> { 288 | let (reader, mut writer) = socket.split(); 289 | 290 | let reader = BufReader::new(reader); 291 | let mut line_reader = reader.lines(); 292 | 293 | send_message(&mut writer, NetMessage::KnownIdx { 294 | vs: self.database.read().await.inbox.cg.agent_assignment.summarize_versions_flat() 295 | }).await?; 296 | 297 | loop { 298 | select! { 299 | line = line_reader.next_line() => { 300 | if let Some(line) = line? { 301 | println!("READ {line}"); 302 | 303 | let msg: NetMessage = serde_json::from_str(&line)?; 304 | self.handle_message(msg, &mut writer).await?; 305 | } else { 306 | // End of network stream. 307 | println!("End of network stream"); 308 | break; 309 | } 310 | } 311 | 312 | recv_result = self.notify.1.recv() => { 313 | match recv_result { 314 | Err(e) => { 315 | // This can happen if the peer isn't keeping up. 316 | println!("Message error {:?}", e); 317 | break; 318 | } 319 | Ok(db_len) => { 320 | self.on_database_updated(&mut writer, db_len).await?; 321 | } 322 | } 323 | } 324 | } 325 | } 326 | 327 | Ok(()) 328 | } 329 | 330 | pub async fn start(socket: TcpStream, database: &mut DatabaseHandle, notify: IndexChannel) -> Result<(), io::Error> { 331 | Protocol::new(database, notify).run(socket).await 332 | } 333 | } 334 | -------------------------------------------------------------------------------- /src/stateset.rs: -------------------------------------------------------------------------------- 1 | use std::cmp::Ordering; 2 | use std::collections::btree_map::Entry; 3 | use std::collections::BTreeMap; 4 | use std::fmt::Debug; 5 | use diamond_types::{AgentId, CausalGraph, DTRange, LV}; 6 | use diamond_types::causalgraph::agent_assignment::remote_ids::RemoteVersion; 7 | use serde::{Deserialize, Serialize}; 8 | use serde::de::DeserializeOwned; 9 | use smallvec::{SmallVec, smallvec}; 10 | use crate::cg_hacks::{merge_partial_versions, PartialCGEntry, serialize_cg_from_version}; 11 | 12 | pub type LVKey = LV; 13 | 14 | type Pair = (LV, T); 15 | // type RawPair = (RemoteId, T); 16 | type RawPair<'a, T> = (RemoteVersion<'a>, T); 17 | 18 | // type RawVersionRef<'a> = (&'a str, usize); 19 | // type RawVersion = (SmartString, usize); 20 | // type RawVersion = RemoteId; 21 | 22 | // fn borrow_rv(rv: &RawVersion) -> RawVersionRef<'_> { 23 | // (rv.agent.as_str(), rv.seq) 24 | // } 25 | 26 | #[derive(Debug, Clone)] 27 | pub(crate) struct StateSet { 28 | pub(crate) values: BTreeMap; 1]>>, 29 | 30 | // Internal from version -> value at that version 31 | pub(crate) index: BTreeMap, 32 | 33 | // pub(crate) version: Frontier, 34 | pub(crate) cg: CausalGraph, 35 | } 36 | 37 | impl StateSet { 38 | pub fn new() -> Self { 39 | Self { 40 | values: Default::default(), 41 | index: Default::default(), 42 | // version: Default::default(), 43 | cg: Default::default() 44 | } 45 | } 46 | 47 | pub fn print_values(&self) where T: Debug { 48 | for (key, pairs) in &self.values { 49 | println!("{key}: {:?}", pairs); 50 | } 51 | } 52 | 53 | fn local_set_internal(&mut self, agent_id: AgentId, key: Option, value: T) -> LV { 54 | let v = self.cg.assign_local_op(agent_id, 1).start; 55 | 56 | let key = key.unwrap_or(v); 57 | let old_pairs = self.values.insert(key, smallvec![(v, value)]); 58 | 59 | if let Some(old_pairs) = old_pairs { 60 | for (v2, _) in old_pairs { 61 | self.index.remove(&v2); 62 | } 63 | } 64 | 65 | self.index.insert(v, key); 66 | 67 | v 68 | } 69 | 70 | pub fn is_empty(&self) -> bool { 71 | self.values.is_empty() 72 | } 73 | 74 | pub fn len(&self) -> usize { 75 | self.values.len() 76 | } 77 | 78 | pub fn local_insert(&mut self, agent_id: AgentId, value: T) -> LV { 79 | self.local_set_internal(agent_id, None, value) 80 | } 81 | 82 | pub fn local_set(&mut self, agent_id: AgentId, key: LV, value: T) -> LV { 83 | self.local_set_internal(agent_id, Some(key), value) 84 | } 85 | 86 | pub fn modified_keys_since_v(&self, since_v: LV) -> impl Iterator + '_ { 87 | self.index.range(since_v..).map(|(_v, &key)| { 88 | key 89 | }) 90 | } 91 | 92 | pub fn modified_keys_since_frontier(&self, since: &[LV]) -> impl Iterator + '_ { 93 | let diff = self.cg.graph.diff(since, self.cg.version.as_ref()).1; 94 | diff.into_iter().flat_map(|range| { 95 | self.index.range(range).map(|(_v, &key)| { 96 | key 97 | }) 98 | }) 99 | } 100 | 101 | // Could take &self here but we need to separate cg for the borrowck. 102 | fn raw_to_v(cg: &CausalGraph, rv: RemoteVersion) -> LV { 103 | cg.agent_assignment.remote_to_local_version(rv) 104 | } 105 | 106 | // fn add_index(&mut self, v: Time, key: DocName) { 107 | // todo!(); 108 | // } 109 | // fn remove_index(&mut self, v: Time) { 110 | // todo!(); 111 | // } 112 | 113 | /// The causal graph must be updated before this is called. 114 | fn merge_set(&mut self, key_raw: RemoteVersion<'_>, mut given_raw_pairs: SmallVec<[RawPair; 2]>) 115 | // where T: Clone 116 | { 117 | let key = Self::raw_to_v(&self.cg, key_raw); 118 | 119 | match self.values.entry(key) { 120 | Entry::Vacant(e) => { 121 | // Just insert the received value. 122 | e.insert(given_raw_pairs.into_iter().map(|(rv, val)| { 123 | let lv = self.cg.agent_assignment.remote_to_local_version(rv); 124 | self.index.insert(lv, key); 125 | (lv, val) 126 | }).collect()); 127 | } 128 | Entry::Occupied(mut e) => { 129 | // Merge the new entry with our existing entry. Usually this will be a 1-1 swap, 130 | // but we need to handle cases of concurrent writes too. 131 | let val = e.get_mut(); 132 | if val.len() == 1 && given_raw_pairs.len() == 1 { 133 | let old_lv = val[0].0; 134 | let new_lv = self.cg.agent_assignment.remote_to_local_version(given_raw_pairs[0].0); 135 | 136 | if let Some(ord) = self.cg.graph.version_cmp(new_lv, old_lv) { 137 | if ord == Ordering::Greater { 138 | // Replace it. 139 | let pair = given_raw_pairs.remove(0); // This is weird. 140 | drop(given_raw_pairs); 141 | val[0] = (new_lv, pair.1); 142 | // val[0] = given_raw_pairs[0].1.clone(); 143 | self.index.remove(&old_lv); 144 | self.index.insert(new_lv, key); 145 | } // Else the new item is old. Ignore it! 146 | return; 147 | } // else they're concurrent. Fall through below. 148 | } 149 | 150 | // Slow mode. Find all the versions at play, figure out which ones to keep and 151 | // build the new pairs list from that. 152 | 153 | // TODO: Using an arena allocator for all this junk would be better. 154 | let old_versions: SmallVec<[LV; 2]> = val.iter().map(|(v, _)| *v).collect(); 155 | let new_versions: SmallVec<[LV; 2]> = given_raw_pairs.iter().map(|(rv, _)| ( 156 | self.cg.agent_assignment.remote_to_local_version(*rv) 157 | )).collect(); 158 | 159 | // TODO: Might also be better to just clone() the items in here instead of copying 160 | // the memory all over the place. 161 | let mut new_values: SmallVec<[Option; 2]> = given_raw_pairs.into_iter() 162 | .map(|(_, val)| Some(val)) 163 | .collect(); 164 | 165 | let mut idx_changes: SmallVec<[(LV, bool); 2]> = smallvec![]; 166 | 167 | // dbg!(old_versions.iter().copied().chain(new_versions.iter().copied()).collect::>()); 168 | self.cg.graph.find_dominators_full( 169 | old_versions.iter().copied().chain(new_versions.iter().copied()), 170 | |v, dominates| { 171 | // There's 3 cases here: 172 | // - Its in the old set (val) 173 | // - Its in the new set (new_versions) 174 | // - Or its in both. 175 | if dominates && !old_versions.contains(&v) { 176 | // Its in new only and we need to add it. 177 | // self.add_index(v, key); 178 | idx_changes.push((v, true)); 179 | 180 | // let val = new_versions. 181 | let idx = new_versions.iter().position(|v2| *v2 == v) 182 | .unwrap(); 183 | val.push((v, new_values[idx].take().unwrap())); 184 | } else if !dominates && !new_versions.contains(&v) { 185 | // Its in old only, and its been superseded. Remove it! 186 | idx_changes.push((v, false)); 187 | let idx = val.iter().position(|(v2, _)| *v2 == v) 188 | .unwrap(); 189 | val.swap_remove(idx); 190 | } 191 | } 192 | ); 193 | 194 | if val.len() >= 2 { 195 | val.sort_unstable_by_key(|(v, _)| *v); 196 | } 197 | 198 | for (v, is_add) in idx_changes { 199 | if is_add { 200 | self.index.insert(v, key); 201 | } else { 202 | self.index.remove(&v); 203 | } 204 | } 205 | } 206 | } 207 | } 208 | 209 | #[allow(unused)] 210 | pub fn dbg_check(&self) { 211 | let mut expected_idx_size = 0; 212 | 213 | for (key, pairs) in self.values.iter() { 214 | if pairs.len() >= 2 { 215 | let version: SmallVec<[LV; 2]> = pairs.iter().map(|(v, _)| *v).collect(); 216 | 217 | let dominators = self.cg.graph.find_dominators(&version); 218 | assert_eq!(version.as_slice(), dominators.as_ref()); 219 | } 220 | 221 | expected_idx_size += pairs.len(); 222 | 223 | // Each entry should show up in the index. 224 | for (v, _) in pairs.iter() { 225 | assert_eq!(self.index.get(v), Some(key)); 226 | } 227 | } 228 | 229 | self.cg.dbg_check(false); 230 | 231 | assert_eq!(expected_idx_size, self.index.len()); 232 | } 233 | 234 | pub(crate) fn resolve_pairs<'a>(&'a self, pairs: &'a [Pair]) -> &Pair { 235 | let len = pairs.len(); 236 | 237 | let mut iter = pairs.iter(); 238 | let first = iter.next().expect("Internal consistency violation - pairs list empty"); 239 | 240 | if len > 1 { 241 | let av = self.cg.agent_assignment.local_to_agent_version(first.0); 242 | 243 | let (_, result) = iter.fold((av, first), |(av, pair1), pair2| { 244 | let av2 = self.cg.agent_assignment.local_to_agent_version(pair2.0); 245 | if self.cg.agent_assignment.tie_break_agent_versions(av, av2) == Ordering::Greater { 246 | (av, pair1) 247 | } else { 248 | (av2, pair2) 249 | } 250 | }); 251 | result 252 | } else { 253 | first 254 | } 255 | } 256 | 257 | #[allow(unused)] 258 | fn get_values_ref(&self, key: LVKey) -> Option> { 259 | self.values.get(&key) 260 | .map(|pairs| pairs.iter().map(|(_, val)| val)) 261 | } 262 | 263 | pub fn get_value(&self, key: LVKey) -> Option<&T> { 264 | let pairs = self.values.get(&key)?; 265 | // Some(self.resolve_pairs(pairs)) 266 | Some(&self.resolve_pairs(pairs).1) 267 | } 268 | } 269 | 270 | // TODO: Get rid of CGDelta and replace with a serialized u8. 271 | type CGDelta<'a> = SmallVec<[PartialCGEntry<'a>; 4]>; 272 | type SSDelta<'a, T> = SmallVec<[(RemoteVersion<'a>, SmallVec<[RawPair<'a, T>; 2]>); 4]>; 273 | 274 | #[derive(Clone, Debug, Serialize, Deserialize)] 275 | pub struct RemoteStateDelta<'a, T> { 276 | #[serde(borrow)] 277 | pub(crate) cg: CGDelta<'a>, 278 | #[serde(borrow)] 279 | pub ops: SSDelta<'a, T> 280 | } 281 | 282 | impl StateSet { 283 | pub fn merge_delta(&mut self, cg_delta: &CGDelta, ops: SSDelta) -> DTRange { 284 | let updated = merge_partial_versions(&mut self.cg, cg_delta); 285 | 286 | for (key, pairs) in ops { 287 | self.merge_set(key, pairs); 288 | } 289 | 290 | updated 291 | } 292 | 293 | pub fn delta_since(&self, v: &[LV]) -> RemoteStateDelta { 294 | let cg_delta = serialize_cg_from_version(&self.cg, v, self.cg.version.as_ref()); 295 | 296 | // dbg!(&self.version); 297 | let ranges = self.cg.graph.diff(v, self.cg.version.as_ref()); 298 | assert!(ranges.0.is_empty()); 299 | let ranges_rev = ranges.1; 300 | 301 | // dbg!(&ranges); 302 | 303 | let mut docs: BTreeMap; 2]>> = Default::default(); 304 | // let mut ops = smallvec![]; 305 | for r in ranges_rev { 306 | for (v, key) in self.index.range(r) { 307 | let pair = self.values.get(key) 308 | .unwrap() 309 | .iter() 310 | .find(|(v2, _)| *v2 == *v) 311 | .unwrap(); 312 | 313 | docs.entry(*key).or_default().push(pair.clone()); 314 | } 315 | } 316 | 317 | RemoteStateDelta { 318 | cg: cg_delta, 319 | ops: docs 320 | .into_iter() 321 | .map(|(name, pairs)| ( 322 | self.cg.agent_assignment.local_to_remote_version(name), 323 | pairs.into_iter().map(|(v, value)| (self.cg.agent_assignment.local_to_remote_version(v), value)) 324 | .collect() 325 | )) 326 | .collect() 327 | } 328 | } 329 | } 330 | 331 | impl PartialEq for StateSet { 332 | fn eq(&self, other: &Self) -> bool { 333 | // This isn't optimized or anything. 334 | if self.len() != other.len() { return false; } 335 | if self.cg != other.cg { return false; } 336 | 337 | for (local_key, local_pairs) in self.values.iter() { 338 | let remote_key = self.cg.agent_assignment.local_to_remote_version(*local_key); 339 | let other_key = other.cg.agent_assignment.remote_to_local_version(remote_key); 340 | 341 | let Some(other_pairs) = other.values.get(&other_key) else { return false; }; 342 | 343 | if local_pairs.len() != other_pairs.len() { return false; } 344 | 345 | for (v, t) in local_pairs.iter() { 346 | let rv = self.cg.agent_assignment.local_to_remote_version(*v); 347 | let other_v = other.cg.agent_assignment.remote_to_local_version(rv); 348 | 349 | let Some(other_t) = other_pairs.iter().find(|e| e.0 == other_v) else { return false; }; 350 | if t != &other_t.1 { return false; } 351 | } 352 | } 353 | 354 | true 355 | } 356 | } 357 | impl Eq for StateSet {} 358 | 359 | 360 | #[cfg(test)] 361 | mod fuzzer { 362 | use diamond_types::Frontier; 363 | use rand::prelude::*; 364 | use crate::stateset::StateSet; 365 | 366 | 367 | pub(crate) fn choose_2<'a, T>(arr: &'a mut [T], rng: &mut SmallRng) -> (usize, &'a mut T, usize, &'a mut T) { 368 | loop { 369 | // Then merge 2 branches at random 370 | let a_idx = rng.gen_range(0..arr.len()); 371 | let b_idx = rng.gen_range(0..arr.len()); 372 | 373 | if a_idx != b_idx { 374 | // Oh god this is awful. I can't take mutable references to two array items. 375 | let (a_idx, b_idx) = if a_idx < b_idx { (a_idx, b_idx) } else { (b_idx, a_idx) }; 376 | // a = StateSet::new(); 512 | let seph = ss.cg.get_or_create_agent_id("seph"); 513 | let mike = ss.cg.get_or_create_agent_id("mike"); 514 | ss.dbg_check(); 515 | 516 | ss.cg.assign_local_op_with_parents(&[], seph, 1).last(); 517 | ss.cg.version = Frontier::from_sorted(&[0]); 518 | ss.merge_set(RemoteVersion("seph", 0), smallvec![(("seph", 0).into(), "hi".into())]); 519 | ss.dbg_check(); 520 | assert!(ss.get_values_ref(0).unwrap().eq((&["hi"]).iter())); 521 | 522 | // Replacing it with the same value should do nothing. 523 | ss.merge_set(RemoteVersion("seph", 0), smallvec![(("seph", 0).into(), "hi".into())]); 524 | ss.dbg_check(); 525 | assert!(ss.get_values_ref(0).unwrap().eq((&["hi"]).iter())); 526 | 527 | // Now we'll supercede it 528 | let a = ss.cg.assign_local_op_with_parents(&[0], seph, 1).last(); 529 | ss.cg.version = Frontier::from_sorted(&[a]); 530 | ss.merge_set(RemoteVersion("seph", 0), smallvec![(("seph", 1).into(), "yo".into())]); 531 | ss.dbg_check(); 532 | assert!(ss.get_values_ref(0).unwrap().eq((&["yo"]).iter())); 533 | 534 | // And insert something concurrent... 535 | let b = ss.cg.assign_local_op_with_parents(&[], mike, 1).last(); 536 | ss.cg.version = Frontier::from_sorted(&[a, b]); 537 | ss.merge_set(RemoteVersion("seph", 0), smallvec![(("mike", 0).into(), "xxx".into())]); 538 | ss.dbg_check(); 539 | assert!(ss.get_values_ref(0).unwrap().eq((&["yo", "xxx"]).iter())); 540 | 541 | // dbg!(&ss); 542 | // And collapse the concurrent editing state 543 | // println!("\n\n------"); 544 | let c = ss.cg.assign_local_op_with_parents(&[a, b], seph, 1).last(); 545 | ss.cg.version = Frontier::from_sorted(&[c]); 546 | ss.merge_set(RemoteVersion("seph", 0), smallvec![(("seph", 2).into(), "m".into())]); 547 | ss.dbg_check(); 548 | // dbg!(ss.get_values_ref(0).unwrap().collect::>()); 549 | assert!(ss.get_values_ref(0).unwrap().eq((&["m"]).iter())); 550 | 551 | 552 | // dbg!(ss.delta_since(&[c])); 553 | } 554 | } 555 | --------------------------------------------------------------------------------