├── .gitignore ├── crdt ├── id.pony ├── bias.pony ├── causal.pony ├── _private │ ├── _dot.pony │ ├── eq.pony │ ├── dot_checklist.pony │ ├── dot_kernel_single.pony │ ├── dot_context.pony │ └── dot_kernel.pony ├── replicated.pony ├── _math.pony ├── _default_value_fn.pony ├── test │ ├── _test_tokens.pony │ ├── main.pony │ ├── prop_g_counter.pony │ ├── test_g_set.pony │ ├── test_ujson_node.pony │ ├── prop_pn_counter.pony │ ├── test_mv_reg.pony │ ├── test_t_reg.pony │ ├── prop_c_counter.pony │ ├── test_c_counter.pony │ ├── test_p2_set.pony │ ├── test_pn_counter.pony │ ├── test_t_set.pony │ ├── test_g_counter.pony │ ├── test_awor_set.pony │ ├── test_rwor_set.pony │ ├── test_c_keyspace.pony │ └── test_ujson.pony ├── ujson_value.pony ├── tokens.pony ├── convergent.pony ├── _ujson_eq.pony ├── _ujson_show.pony ├── ujson_node.pony ├── ujson_parse.pony ├── g_set.pony ├── c_counter.pony ├── mv_reg.pony ├── g_counter.pony ├── t_reg.pony ├── awor_set.pony ├── pn_counter.pony ├── p2_set.pony ├── c_keyspace.pony ├── rwor_set.pony ├── ujson.pony └── t_set.pony ├── bundle.json ├── Makefile ├── .circleci └── config.yml └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | .deps 2 | bin 3 | -------------------------------------------------------------------------------- /crdt/id.pony: -------------------------------------------------------------------------------- 1 | type ID is U64 2 | -------------------------------------------------------------------------------- /crdt/bias.pony: -------------------------------------------------------------------------------- 1 | primitive BiasInsert 2 | primitive BiasDelete 3 | primitive BiasGreater 4 | primitive BiasLesser 5 | -------------------------------------------------------------------------------- /crdt/causal.pony: -------------------------------------------------------------------------------- 1 | interface Causal[A: Causal[A] ref] is (Convergent[A] & Replicated) 2 | new ref create(id: ID) 3 | fun ref clear[D: A ref = A](delta': D = recover D(0) end): D 4 | -------------------------------------------------------------------------------- /bundle.json: -------------------------------------------------------------------------------- 1 | { 2 | "deps": [ 3 | { "type": "github", "repo": "jemc/pony-jason" }, 4 | { 5 | "tag": "0.5.0", 6 | "type": "github", 7 | "repo": "mfelsche/ponycheck" 8 | } 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /crdt/_private/_dot.pony: -------------------------------------------------------------------------------- 1 | use ".." 2 | use "collections" 3 | 4 | type _Dot is (ID, U64) 5 | 6 | primitive _DotHashFn is HashFunction[_Dot] 7 | // TODO: better hash combine? 8 | fun hash(x: _Dot): USize => (0x3f * x._1.hash()) + x._2.hash() 9 | fun eq(x: _Dot, y: _Dot): Bool => (x._1 == y._1) and (x._2 == y._2) 10 | -------------------------------------------------------------------------------- /crdt/replicated.pony: -------------------------------------------------------------------------------- 1 | interface Replicated 2 | fun ref from_tokens(that: TokensIterator)? 3 | """ 4 | Deserialize an instance of this data structure from a stream of tokens. 5 | """ 6 | 7 | fun ref each_token(tokens: Tokens) 8 | """ 9 | Serialize the data structure, capturing each token into the given Tokens. 10 | """ 11 | -------------------------------------------------------------------------------- /crdt/_math.pony: -------------------------------------------------------------------------------- 1 | primitive _Math 2 | fun saturated_sum[T: (Integer[T] val & Unsigned)](x: T, y: T): T => 3 | """ 4 | summing two unsigned integers while avoiding overflow by returning 5 | the datatypes maximum value in this case. 6 | """ 7 | (let sum: T, let overflow: Bool) = x.addc(y) 8 | if overflow then 9 | T.max_value() 10 | else 11 | sum 12 | end 13 | -------------------------------------------------------------------------------- /crdt/_default_value_fn.pony: -------------------------------------------------------------------------------- 1 | // TODO: Use Pony's future value-dependent types instead of this hack. 2 | interface val _DefaultValueFn[A] 3 | new val create() 4 | fun apply(): A 5 | 6 | primitive _DefaultValueString is _DefaultValueFn[String] 7 | new create() => None 8 | fun apply(): String => "" 9 | 10 | primitive _DefaultValueNumber[A: (Number & Real[A] val)] is _DefaultValueFn[A] 11 | new create() => None 12 | fun apply(): A => A.from[U8](0) 13 | -------------------------------------------------------------------------------- /crdt/_private/eq.pony: -------------------------------------------------------------------------------- 1 | 2 | interface EqFn[A] 3 | new val create() 4 | fun apply(a: A, a': A): Bool 5 | 6 | primitive Eq[A: Equatable[A] #read] 7 | fun apply(a: A, a': A): Bool => 8 | a.eq(a') 9 | 10 | primitive EqIs[A: Any #any] 11 | fun apply(a: A, a': A): Bool => 12 | a is a' 13 | 14 | primitive EqTuple2[A: Equatable[A] #read, B: Equatable[B] #read] 15 | fun apply(a: (A, B), a': (A, B)): Bool => 16 | a._1.eq(a'._1) and a._2.eq(a'._2) 17 | -------------------------------------------------------------------------------- /crdt/test/_test_tokens.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | primitive _TestTokensWellFormed 5 | fun apply(h: TestHelper, tokens: Tokens, loc: SourceLoc = __loc) => 6 | var expected: USize = 1 7 | var actual: USize = 0 8 | let iter = tokens.array.values() 9 | try 10 | while true do 11 | match iter.next()? 12 | | let size: USize => expected = expected + size 13 | end 14 | actual = actual + 1 15 | end 16 | end 17 | h.assert_eq[USize](expected, actual, "token count", loc) 18 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: test 2 | .PHONY: all test clean lldb lldb-test ci ci-setup 3 | 4 | PONYC ?= $(shell which ponyc) 5 | 6 | PKG=crdt 7 | 8 | .deps: bundle.json 9 | stable fetch 10 | 11 | bin/test: $(shell find ${PKG} -name *.pony) 12 | mkdir -p bin 13 | stable env $(PONYC) --debug -o bin ${PKG}/test 14 | 15 | test: bin/test 16 | $^ 17 | 18 | clean: 19 | rm -rf bin 20 | 21 | lldb: 22 | stable env lldb -o run -- $(PONYC) --debug -o /tmp ${PKG}/test 23 | 24 | lldb-test: bin/test 25 | lldb -o run -- bin/test 26 | 27 | ci: test 28 | 29 | ci-setup: 30 | stable fetch 31 | -------------------------------------------------------------------------------- /crdt/ujson_value.pony: -------------------------------------------------------------------------------- 1 | use "collections" 2 | 3 | type UJSONValue is (None | Bool | I64 | F64 | String) 4 | 5 | primitive _UJSONValueHashFn is HashFunction[UJSONValue] 6 | fun hash(x': UJSONValue): USize => digestof x' 7 | fun eq(x': UJSONValue, y: UJSONValue): Bool => 8 | match x' 9 | | let x: None => y is None 10 | | let x: Bool => try x == (y as Bool) else false end 11 | | let x: I64 => try x == (y as I64) else false end 12 | | let x: F64 => try x == (y as F64) else false end 13 | | let x: String => try x == (y as String) else false end 14 | end 15 | -------------------------------------------------------------------------------- /crdt/tokens.pony: -------------------------------------------------------------------------------- 1 | interface ref _TokensSource 2 | fun ref each_token(tokens: Tokens) 3 | 4 | class Tokens 5 | embed array: Array[Any val] = array.create() 6 | 7 | new ref create() => None 8 | fun ref push(a: Any val) => array.push(a) 9 | fun ref from(s: _TokensSource) => s.each_token(this) 10 | fun iterator(): TokensIterator => _TokensIterator(array.values()) 11 | 12 | interface TokensIterator 13 | fun ref next[A: Any val](): A? 14 | 15 | class _TokensIterator 16 | let _iter: Iterator[Any val] 17 | 18 | new ref create(iter': Iterator[Any val]) => _iter = iter' 19 | fun ref next[A: Any val](): A? => _iter.next()? as A 20 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | vs-ponyc-release: 4 | docker: 5 | - image: ponylang/ponyc:release 6 | steps: 7 | - checkout 8 | - run: make ci-setup 9 | - run: make ci 10 | vs-ponyc-master: 11 | docker: 12 | - image: ponylang/ponyc:latest 13 | steps: 14 | - checkout 15 | - run: make ci-setup 16 | - run: make ci 17 | 18 | workflows: 19 | version: 2 20 | commit: 21 | jobs: 22 | - vs-ponyc-release 23 | nightly: 24 | triggers: 25 | - schedule: 26 | cron: "0 0 * * *" 27 | filters: 28 | branches: 29 | only: master 30 | jobs: 31 | - vs-ponyc-master 32 | -------------------------------------------------------------------------------- /crdt/convergent.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | 3 | interface Convergent[A: Convergent[A] #read] 4 | fun ref converge(that: box->A): Bool 5 | """ 6 | Converge from that data structure into this one, mutating this one. 7 | The other data structure may be either a delta-state or a complete state. 8 | Returns true if the convergence added new information to the data structure. 9 | """ 10 | 11 | new ref _create_in(ctx: DotContext) 12 | """ 13 | Create an instance of the data structure within the context (if applicable). 14 | """ 15 | 16 | fun ref _converge_empty_in(ctx: DotContext box): Bool 17 | """ 18 | Converge an imaginary instance of a data structure with no information 19 | other than being within the given context. This saves on an allocation. 20 | """ 21 | 22 | fun is_empty(): Bool 23 | """ 24 | Return true if the data structure contains no information (bottom state). 25 | """ 26 | -------------------------------------------------------------------------------- /crdt/_private/dot_checklist.pony: -------------------------------------------------------------------------------- 1 | class DotChecklist 2 | """ 3 | This small class is used to integrate non-causal CRDTs with CKeyspace, 4 | where they expect to be used within a shared DotContext. 5 | 6 | The DotChecklist gives them a way to contribute to that causal history 7 | in a minimal way, where every write operation the CRDT results in a 8 | call to the `write` method of the checklist, which inserts a dot into history. 9 | Note that this isn't enough to actually make them causal CRDTs, though. 10 | 11 | Once integrated thus, the DotContext in a CKeyspace for a non-causal CRDT can 12 | be used to detect when there are local changes to any value, just like it 13 | already can do when used with causal CRDT values. This in turn allows for 14 | efficient anti-entropy mechanisms operating over the keyspace. 15 | """ 16 | let _ctx: DotContext 17 | new ref create(ctx': DotContext) => _ctx = ctx' 18 | fun ref write() => _ctx.next_dot() 19 | -------------------------------------------------------------------------------- /crdt/_ujson_eq.pony: -------------------------------------------------------------------------------- 1 | primitive _UJSONEq 2 | """ 3 | An "equality-testing" function that returns true if the path array on the left 4 | is an exact match of the path array on the right. 5 | """ 6 | fun apply( 7 | a: (Array[String] val, UJSONValue), 8 | a': (Array[String] val, UJSONValue)) 9 | : Bool => 10 | _UJSONPathEq(a, a') and _UJSONValueHashFn.eq(a._2, a'._2) 11 | 12 | primitive _UJSONPathEq 13 | """ 14 | An "equality-testing" function that returns true if the path array on the left 15 | is an exact match of the path array on the right. 16 | """ 17 | fun apply( 18 | a: (Array[String] val, UJSONValue), 19 | a': (Array[String] val, UJSONValue)) 20 | : Bool => 21 | (a._1.size() == a'._1.size()) and _UJSONPathEqPrefix(a, a') 22 | 23 | primitive _UJSONPathEqPrefix 24 | """ 25 | An "equality-testing" function that returns true if the path array on the left 26 | is a prefix of the path array on the right. Note that this is not commutative, 27 | so the order of arguments matters. 28 | """ 29 | fun apply( 30 | a: (Array[String] val, UJSONValue), 31 | a': (Array[String] val, UJSONValue)) 32 | : Bool => 33 | try 34 | for (index, path_segment) in a._1.pairs() do 35 | if a'._1(index)? != path_segment then return false end 36 | end 37 | true 38 | else 39 | false 40 | end 41 | -------------------------------------------------------------------------------- /crdt/test/main.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use "ponycheck" 3 | 4 | actor Main is TestList 5 | new create(env: Env) => PonyTest(env, this) 6 | 7 | fun tag tests(test: PonyTest) => 8 | // TODO add unit tests for edge cases of DotContext, DotKernel, etc 9 | 10 | test(TestGSet) 11 | test(TestGSetDelta) 12 | test(TestGSetTokens) 13 | 14 | test(TestP2Set) 15 | test(TestP2SetDelta) 16 | test(TestP2SetTokens) 17 | 18 | test(TestTSet) 19 | test(TestTSetDelta) 20 | test(TestTSetTokens) 21 | 22 | test(TestTReg) 23 | test(TestTRegDelta) 24 | test(TestTRegTokens) 25 | 26 | test(TestTLog) 27 | test(TestTLogDelta) 28 | test(TestTLogTokens) 29 | 30 | test(TestGCounter) 31 | test(TestGCounterDelta) 32 | test(TestGCounterTokens) 33 | test(TestGCounterMax) 34 | 35 | test(TestPNCounter) 36 | test(TestPNCounterDelta) 37 | test(TestPNCounterTokens) 38 | 39 | test(TestCCounter) 40 | test(TestCCounterDelta) 41 | test(TestCCounterTokens) 42 | 43 | test(TestAWORSet) 44 | test(TestAWORSetDelta) 45 | test(TestAWORSetTokens) 46 | 47 | test(TestRWORSet) 48 | test(TestRWORSetDelta) 49 | test(TestRWORSetTokens) 50 | 51 | test(TestMVReg) 52 | test(TestMVRegDelta) 53 | test(TestMVRegTokens) 54 | 55 | test(TestUJSON) 56 | test(TestUJSONDelta) 57 | test(TestUJSONTokens) 58 | test(TestUJSONNode) 59 | 60 | test(TestCKeyspace) 61 | test(TestCKeyspaceDelta) 62 | test(TestCKeyspaceTokens) 63 | 64 | test(Property1UnitTest[(USize, Array[_CmdOnReplica])](CCounterIncProperty)) 65 | test(Property1UnitTest[(USize, Array[_CmdOnReplica])](CCounterIncDecProperty)) 66 | test(Property1UnitTest[(USize, Array[_CmdOnReplica[U64]])](GCounterIncProperty)) 67 | test(Property1UnitTest[(USize, Array[_CmdOnReplica[_PNCounterCmd]])](PNCounterIncProperty)) 68 | test(Property1UnitTest[(USize, Array[_CmdOnReplica[_PNCounterCmd]])](PNCounterIncDecProperty)) 69 | -------------------------------------------------------------------------------- /crdt/_ujson_show.pony: -------------------------------------------------------------------------------- 1 | primitive _UJSONShow 2 | fun show_string(buf: String iso, s: String box): String iso^ => 3 | // TODO: proper escaping 4 | (consume buf).>push('"').>append(s.string()).>push('"') 5 | 6 | fun show_value(buf: String iso, value': UJSONValue): String iso^ => 7 | match value' 8 | | let value: None => (consume buf).>append("null") 9 | | let value: Bool => (consume buf).>append(value.string()) 10 | | let value: I64 => (consume buf).>append(value.string()) 11 | | let value: F64 => (consume buf).>append(value.string()) 12 | | let value: String => show_string(consume buf, value) 13 | end 14 | 15 | fun show_set( 16 | buf': String iso, 17 | iter': Iterator[UJSONValue], 18 | close_bracket: Bool = true) 19 | : String iso^ => 20 | var buf = consume buf' 21 | buf.push('[') 22 | for value in iter' do 23 | buf = show_value(consume buf, value) 24 | if iter'.has_next() then buf.push(',') end 25 | end 26 | if close_bracket then buf.push(']') end 27 | consume buf 28 | 29 | fun show_map( 30 | buf': String iso, 31 | iter': Iterator[(String, UJSONNode box)]) 32 | : String iso^ => 33 | var buf = consume buf' 34 | buf.push('{') 35 | for (key, node) in iter' do 36 | buf = show_string(consume buf, key) 37 | buf.push(':') 38 | buf = show_node(consume buf, node) 39 | if iter'.has_next() then buf.push(',') end 40 | end 41 | buf.push('}') 42 | consume buf 43 | 44 | fun show_node(buf': String iso, node': UJSONNode box): String iso^ => 45 | var buf = consume buf' 46 | if node'._next_size() == 0 then 47 | if node'._here_size() == 0 then 48 | buf 49 | elseif node'._here_size() == 1 then 50 | show_value(consume buf, try node'._here_values().next()? end) 51 | else 52 | show_set(consume buf, node'._here_values()) 53 | end 54 | else 55 | if node'._here_size() > 0 then 56 | buf = show_set(consume buf, node'._here_values(), false) 57 | buf.push(',') 58 | buf = show_map(consume buf, node'._next_pairs()) 59 | buf.push(']') 60 | buf 61 | else 62 | show_map(consume buf, node'._next_pairs()) 63 | end 64 | end -------------------------------------------------------------------------------- /crdt/ujson_node.pony: -------------------------------------------------------------------------------- 1 | use "collections" 2 | 3 | class _UJSONNodeBuilder 4 | let _path: Array[String] val 5 | let _root: UJSONNode 6 | 7 | new create(path': Array[String] val = [], root': UJSONNode = UJSONNode) => 8 | (_path, _root) = (path', root') 9 | 10 | fun root(): this->UJSONNode => _root 11 | 12 | fun ref collect(path': Array[String] val, value': UJSONValue) => 13 | if not _UJSONPathEqPrefix((_path, None), (path', None)) then return end 14 | let path_suffix = path'.trim(_path.size()) 15 | 16 | var node = _root 17 | for path_segment in path_suffix.values() do node = node(path_segment) end 18 | node.put(value') 19 | 20 | class UJSONNode is Equatable[UJSONNode] 21 | embed _here: HashSet[UJSONValue, _UJSONValueHashFn] = _here.create() 22 | embed _next: Map[String, UJSONNode] = _next.create() 23 | 24 | fun _here_size(): USize => _here.size() 25 | fun _next_size(): USize => _next.size() 26 | fun _here_values(): Iterator[UJSONValue]^ => _here.values() 27 | fun _next_pairs(): Iterator[(String, UJSONNode box)]^ => _next.pairs() 28 | 29 | new ref create() => None 30 | 31 | new ref from_string(s: String box, errs: Array[String] = [])? => 32 | UJSONParse._into(this, s, errs)? 33 | 34 | fun ref put(value': UJSONValue) => _here.set(value') 35 | 36 | fun ref apply(path_segment': String): UJSONNode => 37 | try _next(path_segment')? else 38 | let node = UJSONNode 39 | _next(path_segment') = node 40 | node 41 | end 42 | 43 | fun is_void(): Bool => (_here.size() == 0) and (_next.size() == 0) 44 | 45 | fun eq(that: UJSONNode box): Bool => 46 | if _here != that._here then return false end 47 | if _next.size() != that._next.size() then return false end 48 | for (key, node) in _next.pairs() do 49 | if try node != that._next(key)? else true end then return false end 50 | end 51 | true 52 | 53 | fun _flat_each( 54 | path: Array[String] val, 55 | fn: {ref(Array[String] val, UJSONValue)} ref) 56 | => 57 | for value in _here.values() do fn(path, value) end 58 | 59 | for (key, node) in _next.pairs() do 60 | node._flat_each(recover path.clone().>push(key) end, fn) 61 | end 62 | 63 | fun string(): String iso^ => _UJSONShow.show_node(recover String end, this) 64 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pony-crdt [![CircleCI](https://circleci.com/gh/jemc/pony-crdt.svg?style=shield)](https://circleci.com/gh/jemc/pony-crdt) 2 | 3 | Delta-State Convergent Replicated Data Types (ẟ-CRDTs) for the Pony language. 4 | 5 | CRDTs are a special class of data types that can be replicated in a highly-available, partition-tolerant distributed system to yield results that are eventually consistent. That is, with enough message passing, replicas will eventually converge to the same result, even faced with arbitrary partitions. 6 | 7 | In order to acheive commutativity and idempotence, these data structures all impose certain constraints that are often more limiting than typical localized data structures, but if you can model all the state in your application in terms of CRDTs, then you can get eventually consistent replication "for free". In practice, this kind of consideration is often a critical step to scaling a highly-available stateful service. 8 | 9 | Delta-State CRDTs are special CRDTs that can produce delta-states as a by-product of each mutable operation, where the delta-state may be shipped and converged between peers instead of shipping and converging the entire state, and the convergence of the delta-states retains the same guarantees of eventual consistency. This approach has the benefit of reducing the size of the state that must be transported between peers, making CRDTs more practical for real-world applications. 10 | 11 | This package provides CRDTs which may be freely converged using either the full-state or delta-state approach, based on the needs of the application. Every mutable operation returns the corresponding delta-state, and at any time the full-state may be fetched from the data type. So, the application may use the full-state replication approach by ignoring the delta-state return values, or may replicate the delta-state return values in lieu of fetching and replicating the full-state. 12 | 13 | This implementation of ẟ-CRDTs is inspired and informed by the following prior work: 14 | * [This 2016 academic paper](https://arxiv.org/abs/1603.01529) (***Delta State Replicated Data Types*** – *Almeida et al. 2016*). 15 | * [This informal summary of the paper](https://blog.acolyer.org/2016/04/25/delta-state-replicated-data-types/). 16 | * [This C++ reference implementation of the paper](https://github.com/CBaquero/delta-enabled-crdts). 17 | -------------------------------------------------------------------------------- /crdt/test/prop_g_counter.pony: -------------------------------------------------------------------------------- 1 | 2 | use "ponycheck" 3 | use ".." 4 | use "collections" 5 | 6 | class GCounterIncProperty is Property1[(USize, Array[_CmdOnReplica[U64]])] 7 | """ 8 | verify that a set of CCounter replicas that are only incremented 9 | behave like a single U64 counter once completely converged. 10 | """ 11 | 12 | fun name(): String => "crdt.prop.GCounter" 13 | 14 | fun gen(): Generator[(USize, Array[_CmdOnReplica[U64]])] => 15 | """ 16 | generate a random sequence of increment commands on random replicas 17 | """ 18 | Generators.usize(2, 10).flat_map[(USize, Array[_CmdOnReplica[U64]])]( 19 | {(num_replicas) => 20 | let cmds_gen = Generators.array_of[_CmdOnReplica[U64]]( 21 | Generators.map2[USize, U64, _CmdOnReplica[U64]]( 22 | Generators.usize(0, num_replicas-1), 23 | Generators.u64(), 24 | {(replica, inc) => 25 | _CmdOnReplica[U64](replica, inc) } 26 | ) 27 | ) 28 | Generators.zip2[USize, Array[_CmdOnReplica[U64]]]( 29 | Generators.unit[USize](num_replicas), cmds_gen) 30 | }) 31 | 32 | fun property(sample: (USize, Array[_CmdOnReplica[U64]]), h: PropertyHelper) ? => 33 | """ 34 | validate that an array of commands against random replicas 35 | converges to the same value as a U64 counter exposed to the same commands. 36 | """ 37 | (let num_replicas, let commands) = sample 38 | let replicas: Array[GCounter] = replicas.create(num_replicas) 39 | for x in Range[U64](0, num_replicas.u64()) do 40 | replicas.push(GCounter(x)) 41 | end 42 | 43 | var expected: U64 = 0 44 | let deltas = Array[GCounter](commands.size()) 45 | 46 | for command in commands.values() do 47 | let inc = command.cmd 48 | h.log("executing +" + inc.string(), true) 49 | 50 | deltas.push( 51 | replicas(command.replica)?.increment(inc)) 52 | (let sum, let overflowed) = expected.addc(inc) 53 | expected = if overflowed then U64.max_value() else sum end 54 | 55 | let observer = GCounter(U64.max_value()) 56 | for replica in replicas.values() do 57 | observer.converge(replica) 58 | end 59 | if not h.assert_eq[U64](observer.value(), expected) then return end 60 | end 61 | 62 | let delta_observer = GCounter(U64.max_value() - 1) 63 | for delta in deltas.values() do 64 | delta_observer.converge(delta) 65 | end 66 | h.assert_eq[U64](expected, delta_observer.value()) 67 | 68 | -------------------------------------------------------------------------------- /crdt/ujson_parse.pony: -------------------------------------------------------------------------------- 1 | use json = "jason" 2 | 3 | primitive UJSONParse 4 | fun _into(node': UJSONNode, source: String box, errs: Array[String])? => 5 | let builder = _UJSONNodeBuilder([], node') 6 | let parser = json.Parser 7 | let notify = _UJSONParserNotify({(path, value)(builder) => 8 | // TODO: Fix ponyc to allow iso clone for arrays with #share elements. 9 | let path' = recover Array[String](path.size()) end 10 | for segment in path.values() do path'.push(segment) end 11 | builder.collect(consume path', value) 12 | }) 13 | try 14 | parser.parse(source, notify)? 15 | else 16 | errs.push(parser.describe_error()) 17 | error 18 | end 19 | 20 | fun value(source: String box, errs: Array[String] = []): UJSONValue? => 21 | let register = Array[UJSONValue] 22 | let parser = json.Parser 23 | let notify = 24 | _UJSONParserNotifySingle({(value)(register) => register.push(value) }) 25 | try 26 | parser.parse(source, notify)? 27 | register.pop()? 28 | else 29 | errs.push(parser.describe_error()) 30 | error 31 | end 32 | 33 | fun node(source: String box, errs: Array[String] = []): UJSONNode? => 34 | let out = UJSONNode 35 | _into(out, source, errs)? 36 | out 37 | 38 | class _UJSONParserNotify is json.ParserNotify 39 | let _fn: {ref(Array[String] box, UJSONValue)} ref 40 | let _path: Array[String] = [] 41 | 42 | new ref create(fn': {ref(Array[String] box, UJSONValue)} ref) => _fn = fn' 43 | 44 | fun ref apply(parser: json.Parser, token: json.Token) => 45 | match token 46 | | json.TokenNull => _fn(_path, None) 47 | | json.TokenTrue => _fn(_path, true) 48 | | json.TokenFalse => _fn(_path, false) 49 | | json.TokenNumber => _fn(_path, parser.last_number) 50 | | json.TokenString => _fn(_path, parser.last_string) 51 | | json.TokenKey => _path.push(parser.last_string) 52 | | json.TokenPairPost => try _path.pop()? end 53 | end 54 | 55 | class _UJSONParserNotifySingle is json.ParserNotify 56 | let _fn: {ref(UJSONValue)} ref 57 | 58 | new ref create(fn': {ref(UJSONValue)} ref) => _fn = fn' 59 | 60 | fun ref apply(parser: json.Parser, token: json.Token) => 61 | match token 62 | | json.TokenNull => _fn(None) 63 | | json.TokenTrue => _fn(true) 64 | | json.TokenFalse => _fn(false) 65 | | json.TokenNumber => _fn(parser.last_number) 66 | | json.TokenString => _fn(parser.last_string) 67 | | json.TokenObjectStart => parser.abort() 68 | | json.TokenArrayStart => parser.abort() 69 | end 70 | -------------------------------------------------------------------------------- /crdt/test/test_g_set.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestGSet is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.GSet" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = GSet[String] 10 | let b = GSet[String] 11 | let c = GSet[String] 12 | 13 | a.set("apple") 14 | b.set("banana") 15 | c.set("currant") 16 | 17 | h.assert_eq[USize](a.size(), 1) 18 | h.assert_eq[USize](b.size(), 1) 19 | h.assert_eq[USize](c.size(), 1) 20 | h.assert_ne[GSet[String]](a, b) 21 | h.assert_ne[GSet[String]](b, c) 22 | h.assert_ne[GSet[String]](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[USize](a.size(), 3) 34 | h.assert_eq[USize](b.size(), 3) 35 | h.assert_eq[USize](c.size(), 3) 36 | h.assert_eq[GSet[String]](a, b) 37 | h.assert_eq[GSet[String]](b, c) 38 | h.assert_eq[GSet[String]](c, a) 39 | 40 | class TestGSetDelta is UnitTest 41 | new iso create() => None 42 | fun name(): String => "crdt.GSet (ẟ)" 43 | 44 | fun apply(h: TestHelper) => 45 | let a = GSet[String] 46 | let b = GSet[String] 47 | let c = GSet[String] 48 | 49 | let a_delta = a.set("apple") 50 | let b_delta = b.set("banana") 51 | let c_delta = c.set("currant") 52 | 53 | h.assert_eq[USize](a.size(), 1) 54 | h.assert_eq[USize](b.size(), 1) 55 | h.assert_eq[USize](c.size(), 1) 56 | h.assert_ne[GSet[String]](a, b) 57 | h.assert_ne[GSet[String]](b, c) 58 | h.assert_ne[GSet[String]](c, a) 59 | 60 | h.assert_false(a.converge(a_delta)) 61 | 62 | h.assert_true(a.converge(b_delta)) 63 | h.assert_true(a.converge(c_delta)) 64 | h.assert_true(b.converge(c_delta)) 65 | h.assert_true(b.converge(a_delta)) 66 | h.assert_true(c.converge(a_delta)) 67 | h.assert_true(c.converge(b_delta)) 68 | 69 | h.assert_eq[USize](a.size(), 3) 70 | h.assert_eq[USize](b.size(), 3) 71 | h.assert_eq[USize](c.size(), 3) 72 | h.assert_eq[GSet[String]](a, b) 73 | h.assert_eq[GSet[String]](b, c) 74 | h.assert_eq[GSet[String]](c, a) 75 | 76 | class TestGSetTokens is UnitTest 77 | new iso create() => None 78 | fun name(): String => "crdt.GSet (tokens)" 79 | 80 | fun apply(h: TestHelper) => 81 | let data = GSet[String] 82 | .> set("apple") 83 | .> set("banana") 84 | .> set("currant") 85 | 86 | let tokens = Tokens .> from(data) 87 | _TestTokensWellFormed(h, tokens) 88 | 89 | try 90 | h.assert_eq[GSet[String]]( 91 | data, 92 | data.create() .> from_tokens(tokens.iterator())? 93 | ) 94 | else 95 | h.fail("failed to parse token stream") 96 | end 97 | -------------------------------------------------------------------------------- /crdt/test/test_ujson_node.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestUJSONNode is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.UJSONNode (parse/print)" 7 | 8 | fun apply(h: TestHelper) => 9 | /// 10 | // Keywords 11 | 12 | example(h, "true", "true") 13 | example(h, "false", "false") 14 | example(h, "null", "null") 15 | 16 | /// 17 | // Numbers 18 | 19 | example(h, "123", "123") 20 | example(h, "-123", "-123") 21 | example(h, "123.456", "123.456") 22 | example(h, "-123.456", "-123.456") 23 | example(h, "123e2", "12300") 24 | example(h, "-123e-2", "-1.23") 25 | example(h, "-123.456e2", "-12345.6") 26 | example(h, "123.456e-2", "1.23456") 27 | 28 | /// 29 | // Maps and Sets 30 | 31 | example(h, 32 | """{"fruit":"apple"}""", 33 | """{"fruit":"apple"}""") 34 | 35 | example(h, 36 | """["apple","banana","currant"]""", 37 | """["apple","banana","currant"]""") 38 | 39 | example(h, 40 | """{"fruit":["apple","banana","currant"],"edible":true}""", 41 | """{"fruit":["apple","banana","currant"],"edible":true}""") 42 | 43 | example(h, 44 | """{"n":{"e":{"s":{"t":true}}}}""", 45 | """{"n":{"e":{"s":{"t":true}}}}""") 46 | 47 | example(h, // a single-element set will not be rendered as a set 48 | """{"fruit":["apple"]}""", 49 | """{"fruit":"apple"}""") 50 | 51 | example(h, // an empty set will be pruned 52 | """{"fruit":"apple","empty":[]}""", 53 | """{"fruit":"apple"}""") 54 | 55 | example(h, // an empty map will be pruned 56 | """{"fruit":"apple","empty":{}}""", 57 | """{"fruit":"apple"}""") 58 | 59 | example(h, // duplicate elements in a set will be pruned 60 | """{"fruit":["apple","banana","apple"]}""", 61 | """{"fruit":["apple","banana"]}""") 62 | 63 | example(h, // duplicate keys in a map will be merged 64 | """{"fruit":"apple","fruit":"banana","edible":true}""", 65 | """{"fruit":["apple","banana"],"edible":true}""") 66 | 67 | example(h, // a set of maps will be merged 68 | """[{"fruit":"apple"},{"fruit":"banana"},{"edible":true}]""", 69 | """{"fruit":["apple","banana"],"edible":true}""") 70 | 71 | example(h, // a set of maps and non-maps will only merge the maps 72 | """[1,2,3,{"fruit":"apple"},{"fruit":"banana"},{"edible":true}]""", 73 | """[1,2,3,{"fruit":["apple","banana"],"edible":true}]""") 74 | 75 | example(h, // empty sets and maps within a set will be pruned 76 | """[1,2,3,[],{},[{}],[[[]]]]""", 77 | """[1,2,3]""") 78 | 79 | example(h, // nested sets will be merged 80 | """[1,2,3,[4,[5,[6]]],[[[7]]]]""", 81 | """[1,2,3,4,5,6,7]""") 82 | 83 | /// 84 | // Void (no data present at all) 85 | 86 | example(h, "", "") 87 | example(h, "{}", "") 88 | example(h, "[]", "") 89 | example(h, "[{}]", "") 90 | example(h, "[{},{},{}]", "") 91 | example(h, "[[],[],[]]", "") 92 | example(h, "[[[[[]]]]]", "") 93 | 94 | fun example( 95 | h: TestHelper, 96 | parse: String, 97 | print: String, 98 | loc: SourceLoc = __loc) 99 | => 100 | let errs = Array[String] 101 | try 102 | let actual = UJSONNode.from_string(parse, errs)?.string() 103 | h.assert_eq[String](consume actual, print, "", loc) 104 | else 105 | for err in errs.values() do h.log(err) end 106 | h.assert_no_error({()? => error }, "Couldn't parse: " + parse, loc) 107 | end 108 | -------------------------------------------------------------------------------- /crdt/test/prop_pn_counter.pony: -------------------------------------------------------------------------------- 1 | 2 | use "ponycheck" 3 | use ".." 4 | use "collections" 5 | 6 | class val _PNCounterCmd is Stringable 7 | let u_cmd: {(U64): U64 } val 8 | let cc_cmd: {(PNCounter)} val 9 | let diff: U64 10 | let op: _CounterOp 11 | 12 | new val create(diff': U64, op': _CounterOp = _INC) => 13 | diff = diff' 14 | op = op' 15 | cc_cmd = {(cc) => if op is _INC then cc.increment(diff) else cc.decrement(diff) end } val 16 | u_cmd = {(t) => if op is _INC then t + diff else t - diff end } val 17 | 18 | fun string(): String iso^ => 19 | recover 20 | String() 21 | .>append(if op is _INC then "+" else "-" end + diff.string()) 22 | end 23 | 24 | trait PNCounterProperty is Property1[(USize, Array[_CmdOnReplica[_PNCounterCmd]])] 25 | fun property(sample: (USize, Array[_CmdOnReplica[_PNCounterCmd]]), h: PropertyHelper) ? => 26 | """ 27 | validate that an array of commands against random replicas 28 | converges to the same value as a U64 counter exposed to the same commands. 29 | """ 30 | (let num_replicas, let commands) = sample 31 | let replicas: Array[PNCounter] = replicas.create(num_replicas) 32 | for x in Range[U64](0, num_replicas.u64()) do 33 | replicas.push(PNCounter(x)) 34 | end 35 | 36 | var expected: U64 = 0 37 | for command in commands.values() do 38 | let cmd = command.cmd 39 | h.log("executing " + cmd.string(), true) 40 | 41 | cmd.cc_cmd(replicas(command.replica)?) 42 | expected = cmd.u_cmd(expected) 43 | 44 | let observer = PNCounter(U64.max_value()) 45 | for replica in replicas.values() do 46 | observer.converge(replica) 47 | end 48 | if not h.assert_eq[U64](observer.value(), expected) then return end 49 | end 50 | 51 | 52 | class PNCounterIncProperty is PNCounterProperty 53 | """ 54 | verify that a set of PNCounter replicas that are only incremented 55 | behave like a single U64 counter once completely converged. 56 | """ 57 | 58 | fun name(): String => "crdt.prop.PNCounter.Inc" 59 | 60 | fun gen(): Generator[(USize, Array[_CmdOnReplica[_PNCounterCmd]])] => 61 | Generators.usize(2, 10).flat_map[(USize, Array[_CmdOnReplica[_PNCounterCmd]])]({ 62 | (num_replicas) => 63 | let cmds_gen = Generators.array_of[_CmdOnReplica[_PNCounterCmd]]( 64 | Generators.map2[USize, U64, _CmdOnReplica[_PNCounterCmd]]( 65 | Generators.usize(0, num_replicas-1), 66 | Generators.u64(), 67 | {(replica, inc) => _CmdOnReplica[_PNCounterCmd](replica, _PNCounterCmd(inc, _INC)) } 68 | ) 69 | ) 70 | Generators.zip2[USize, Array[_CmdOnReplica[_PNCounterCmd]]]( 71 | Generators.unit[USize](num_replicas), cmds_gen 72 | ) 73 | }) 74 | 75 | 76 | class PNCounterIncDecProperty is PNCounterProperty 77 | """ 78 | verify that a set of PNCounter replicas that are incremented and decremented 79 | behave like a single U64 counter once completely converged. 80 | """ 81 | 82 | fun name(): String => "crdt.prop.PNCounter" 83 | 84 | fun gen(): Generator[(USize, Array[_CmdOnReplica[_PNCounterCmd]])] => 85 | Generators.usize(2, 10).flat_map[(USize, Array[_CmdOnReplica[_PNCounterCmd]])]({ 86 | (num_replicas) => 87 | let cmds_gen = Generators.array_of[_CmdOnReplica[_PNCounterCmd]]( 88 | Generators.map2[USize, _PNCounterCmd, _CmdOnReplica[_PNCounterCmd]]( 89 | Generators.usize(0, num_replicas-1), 90 | Generators.u64().flat_map[_PNCounterCmd]({ 91 | (u) => 92 | Generators.one_of[_PNCounterCmd]([ 93 | _PNCounterCmd(u, _INC) 94 | _PNCounterCmd(u, _DEC) 95 | ]) 96 | }), 97 | {(replica, cmd) => _CmdOnReplica[_PNCounterCmd](replica, cmd) } 98 | ) 99 | ) 100 | Generators.zip2[USize, Array[_CmdOnReplica[_PNCounterCmd]]]( 101 | Generators.unit[USize](num_replicas), cmds_gen 102 | ) 103 | }) 104 | 105 | -------------------------------------------------------------------------------- /crdt/test/test_mv_reg.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestMVReg is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.MVReg" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = MVReg[String]("a".hash64()) 10 | let b = MVReg[String]("b".hash64()) 11 | let c = MVReg[String]("c".hash64()) 12 | 13 | a.update("apple") 14 | b.update("banana") 15 | c.update("currant") 16 | 17 | h.assert_eq[USize](a.size(), 1) 18 | h.assert_eq[USize](b.size(), 1) 19 | h.assert_eq[USize](b.size(), 1) 20 | h.assert_ne[MVReg[String]](a, b) 21 | h.assert_ne[MVReg[String]](b, c) 22 | h.assert_ne[MVReg[String]](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[USize](a.size(), 3) 34 | h.assert_eq[USize](b.size(), 3) 35 | h.assert_eq[USize](b.size(), 3) 36 | h.assert_eq[MVReg[String]](a, b) 37 | h.assert_eq[MVReg[String]](b, c) 38 | h.assert_eq[MVReg[String]](c, a) 39 | 40 | c.update("currant") 41 | 42 | h.assert_true(a.converge(c)) 43 | h.assert_true(b.converge(c)) 44 | h.assert_false(a.converge(b)) 45 | 46 | h.assert_true(a.contains("currant")) 47 | h.assert_eq[USize](a.size(), 1) 48 | h.assert_eq[USize](b.size(), 1) 49 | h.assert_eq[USize](b.size(), 1) 50 | h.assert_eq[MVReg[String]](a, b) 51 | h.assert_eq[MVReg[String]](b, c) 52 | h.assert_eq[MVReg[String]](c, a) 53 | 54 | class TestMVRegDelta is UnitTest 55 | new iso create() => None 56 | fun name(): String => "crdt.MVReg (ẟ)" 57 | 58 | fun apply(h: TestHelper) => 59 | let a = MVReg[String]("a".hash64()) 60 | let b = MVReg[String]("b".hash64()) 61 | let c = MVReg[String]("c".hash64()) 62 | 63 | var a_delta = a.update("apple") 64 | var b_delta = b.update("banana") 65 | var c_delta = c.update("currant") 66 | 67 | h.assert_eq[USize](a.size(), 1) 68 | h.assert_eq[USize](b.size(), 1) 69 | h.assert_eq[USize](b.size(), 1) 70 | h.assert_ne[MVReg[String]](a, b) 71 | h.assert_ne[MVReg[String]](b, c) 72 | h.assert_ne[MVReg[String]](c, a) 73 | 74 | h.assert_false(a.converge(a_delta)) 75 | 76 | h.assert_true(a.converge(b_delta)) 77 | h.assert_true(a.converge(c_delta)) 78 | h.assert_true(b.converge(c_delta)) 79 | h.assert_true(b.converge(a_delta)) 80 | h.assert_true(c.converge(a_delta)) 81 | h.assert_true(c.converge(b_delta)) 82 | 83 | h.assert_eq[USize](a.size(), 3) 84 | h.assert_eq[USize](b.size(), 3) 85 | h.assert_eq[USize](b.size(), 3) 86 | h.assert_eq[MVReg[String]](a, b) 87 | h.assert_eq[MVReg[String]](b, c) 88 | h.assert_eq[MVReg[String]](c, a) 89 | 90 | c_delta = c.update("currant") 91 | 92 | h.assert_true(a.converge(c_delta)) 93 | h.assert_true(b.converge(c_delta)) 94 | h.assert_false(c.converge(c_delta)) 95 | 96 | h.assert_true(a.contains("currant")) 97 | h.assert_eq[USize](a.size(), 1) 98 | h.assert_eq[USize](b.size(), 1) 99 | h.assert_eq[USize](b.size(), 1) 100 | h.assert_eq[MVReg[String]](a, b) 101 | h.assert_eq[MVReg[String]](b, c) 102 | h.assert_eq[MVReg[String]](c, a) 103 | 104 | class TestMVRegTokens is UnitTest 105 | new iso create() => None 106 | fun name(): String => "crdt.MVReg (tokens)" 107 | 108 | fun apply(h: TestHelper) => 109 | let data = MVReg[String]("a".hash64()) 110 | let data' = MVReg[String]("b".hash64()) 111 | let data'' = MVReg[String]("c".hash64()) 112 | 113 | data.update("apple") 114 | data'.update("banana") 115 | data''.update("currant") 116 | 117 | data.converge(data') 118 | data.converge(data'') 119 | 120 | let tokens = Tokens .> from(data) 121 | _TestTokensWellFormed(h, tokens) 122 | 123 | try 124 | h.assert_eq[MVReg[String]]( 125 | data, 126 | data.create(0) .> from_tokens(tokens.iterator())? 127 | ) 128 | else 129 | h.fail("failed to parse token stream") 130 | end 131 | -------------------------------------------------------------------------------- /crdt/test/test_t_reg.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestTReg is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.TReg" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = TRegString.>update("apple", 3) 10 | let b = TRegString.>update("banana", 2) 11 | let c = TRegString.>update("currant", 1) 12 | 13 | h.assert_eq[String](a.value(), "apple") 14 | h.assert_eq[String](b.value(), "banana") 15 | h.assert_eq[String](c.value(), "currant") 16 | h.assert_eq[U64](a.timestamp(), 3) 17 | h.assert_eq[U64](b.timestamp(), 2) 18 | h.assert_eq[U64](c.timestamp(), 1) 19 | 20 | h.assert_false(a.converge(b)) 21 | h.assert_false(a.converge(c)) 22 | h.assert_false(b.converge(c)) 23 | h.assert_true(b.converge(a)) 24 | h.assert_true(c.converge(a)) 25 | h.assert_false(c.converge(b)) 26 | 27 | h.assert_eq[String](a.value(), "apple") 28 | h.assert_eq[String](b.value(), "apple") 29 | h.assert_eq[String](c.value(), "apple") 30 | h.assert_eq[U64](a.timestamp(), 3) 31 | h.assert_eq[U64](b.timestamp(), 3) 32 | h.assert_eq[U64](c.timestamp(), 3) 33 | 34 | a.update("apple", 5) 35 | b.update("banana", 5) 36 | c.update("currant", 5) 37 | 38 | h.assert_eq[String](a.value(), "apple") 39 | h.assert_eq[String](b.value(), "banana") 40 | h.assert_eq[String](c.value(), "currant") 41 | h.assert_eq[U64](a.timestamp(), 5) 42 | h.assert_eq[U64](b.timestamp(), 5) 43 | h.assert_eq[U64](c.timestamp(), 5) 44 | 45 | h.assert_true(a.converge(b)) 46 | h.assert_true(a.converge(c)) 47 | h.assert_true(b.converge(c)) 48 | h.assert_false(b.converge(a)) 49 | h.assert_false(c.converge(a)) 50 | h.assert_false(c.converge(b)) 51 | 52 | h.assert_eq[String](a.value(), "currant") 53 | h.assert_eq[String](b.value(), "currant") 54 | h.assert_eq[String](c.value(), "currant") 55 | h.assert_eq[U64](a.timestamp(), 5) 56 | h.assert_eq[U64](b.timestamp(), 5) 57 | h.assert_eq[U64](c.timestamp(), 5) 58 | 59 | class TestTRegDelta is UnitTest 60 | new iso create() => None 61 | fun name(): String => "crdt.TReg (ẟ)" 62 | 63 | fun apply(h: TestHelper) => 64 | let a = TRegString.>update("apple", 3) 65 | let b = TRegString.>update("banana", 2) 66 | let c = TRegString.>update("currant", 1) 67 | 68 | h.assert_eq[String](a.value(), "apple") 69 | h.assert_eq[String](b.value(), "banana") 70 | h.assert_eq[String](c.value(), "currant") 71 | h.assert_eq[U64](a.timestamp(), 3) 72 | h.assert_eq[U64](b.timestamp(), 2) 73 | h.assert_eq[U64](c.timestamp(), 1) 74 | 75 | h.assert_false(a.converge(b)) 76 | h.assert_false(a.converge(c)) 77 | h.assert_false(b.converge(c)) 78 | h.assert_true(b.converge(a)) 79 | h.assert_true(c.converge(a)) 80 | h.assert_false(c.converge(b)) 81 | 82 | h.assert_eq[String](a.value(), "apple") 83 | h.assert_eq[String](b.value(), "apple") 84 | h.assert_eq[String](c.value(), "apple") 85 | h.assert_eq[U64](a.timestamp(), 3) 86 | h.assert_eq[U64](b.timestamp(), 3) 87 | h.assert_eq[U64](c.timestamp(), 3) 88 | 89 | var a_delta = a.update("apple", 5) 90 | var b_delta = b.update("banana", 5) 91 | var c_delta = c.update("currant", 5) 92 | 93 | h.assert_eq[String](a.value(), "apple") 94 | h.assert_eq[String](b.value(), "banana") 95 | h.assert_eq[String](c.value(), "currant") 96 | h.assert_eq[U64](a.timestamp(), 5) 97 | h.assert_eq[U64](b.timestamp(), 5) 98 | h.assert_eq[U64](c.timestamp(), 5) 99 | 100 | h.assert_true(a.converge(b_delta)) 101 | h.assert_true(a.converge(c_delta)) 102 | h.assert_true(b.converge(c_delta)) 103 | h.assert_false(b.converge(a_delta)) 104 | h.assert_false(c.converge(a_delta)) 105 | h.assert_false(c.converge(b_delta)) 106 | 107 | h.assert_eq[String](a.value(), "currant") 108 | h.assert_eq[String](b.value(), "currant") 109 | h.assert_eq[String](c.value(), "currant") 110 | h.assert_eq[U64](a.timestamp(), 5) 111 | h.assert_eq[U64](b.timestamp(), 5) 112 | h.assert_eq[U64](c.timestamp(), 5) 113 | 114 | class TestTRegTokens is UnitTest 115 | new iso create() => None 116 | fun name(): String => "crdt.TReg (tokens)" 117 | 118 | fun apply(h: TestHelper) => 119 | let data = TRegString .> update("apple", 5) 120 | 121 | let tokens = Tokens .> from(data) 122 | _TestTokensWellFormed(h, tokens) 123 | 124 | try 125 | h.assert_eq[TRegString]( 126 | data, 127 | data.create() .> from_tokens(tokens.iterator())? 128 | ) 129 | else 130 | h.fail("failed to parse token stream") 131 | end 132 | -------------------------------------------------------------------------------- /crdt/g_set.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | type GSet[A: (Hashable val & Equatable[A])] is GHashSet[A, HashEq[A]] 5 | 6 | type GSetIs[A: Any val] is GHashSet[A, HashIs[A]] 7 | 8 | class ref GHashSet[A: Any val, H: HashFunction[A] val] 9 | is (Comparable[GHashSet[A, H]] & Convergent[GHashSet[A, H]] & Replicated) 10 | """ 11 | An unordered mutable grow-only set. That is, it only allows insertion. 12 | 13 | Because the set is unordered and elements can only be added (never deleted), 14 | the results are eventually consistent when converged. 15 | 16 | All mutator methods accept and return a convergent delta-state. 17 | """ 18 | embed _data: HashSet[A, H] = _data.create() 19 | let _checklist: (DotChecklist | None) 20 | 21 | new ref create() => 22 | _checklist = None 23 | 24 | new ref _create_in(ctx: DotContext) => 25 | _checklist = DotChecklist(ctx) 26 | 27 | fun ref _checklist_write() => 28 | match _checklist | let c: DotChecklist => c.write() end 29 | 30 | fun ref _converge_empty_in(ctx: DotContext box): Bool => // ignore the context 31 | false 32 | 33 | fun is_empty(): Bool => 34 | """ 35 | Return true if the data structure contains no information (bottom state). 36 | """ 37 | _data.size() == 0 38 | 39 | fun ref _data_set(value: A) => _data.set(value) 40 | 41 | fun size(): USize => 42 | """ 43 | Return the number of items in the set. 44 | """ 45 | _data.size() 46 | 47 | fun apply(value: val->A): val->A ? => 48 | """ 49 | Return the value if it's in the set, otherwise raise an error. 50 | """ 51 | _data(value)? 52 | 53 | fun contains(value: val->A): Bool => 54 | """ 55 | Check whether the set contains the given value. 56 | """ 57 | _data.contains(value) 58 | 59 | fun ref set[D: GHashSet[A, H] ref = GHashSet[A, H]]( 60 | value: A, 61 | delta: D = recover GHashSet[A, H] end) 62 | : D^ => 63 | """ 64 | Add a value to the set. 65 | Accepts and returns a convergent delta-state. 66 | """ 67 | _data.set(value) 68 | _checklist_write() 69 | delta._data_set(value) 70 | delta 71 | 72 | fun ref union[D: GHashSet[A, H] ref = GHashSet[A, H]]( 73 | that: Iterator[A], 74 | delta: D = recover GHashSet[A, H] end) 75 | : D^ => 76 | """ 77 | Add everything in the given iterator to the set. 78 | Accepts and returns a convergent delta-state. 79 | """ 80 | for value in that do 81 | _data.set(value) 82 | delta._data_set(value) 83 | end 84 | _checklist_write() 85 | delta 86 | 87 | fun ref converge(that: GHashSet[A, H] box): Bool => 88 | """ 89 | Converge from the given GSet into this one. 90 | For this data type, the convergence is a simple union. 91 | Returns true if the convergence added new information to the data structure. 92 | """ 93 | let orig_size = _data.size() 94 | union(that._data.values()) 95 | orig_size != _data.size() 96 | 97 | fun string(): String iso^ => 98 | """ 99 | Return a best effort at printing the set. If A is a Stringable, use the 100 | string representation of each value; otherwise print them as question marks. 101 | """ 102 | let buf = recover String((size() * 3) + 1) end 103 | buf.push('%') 104 | buf.push('{') 105 | var first = true 106 | for value in values() do 107 | if first then first = false else buf .> push(';').push(' ') end 108 | iftype A <: Stringable val then 109 | buf.append(value.string()) 110 | else 111 | buf.push('?') 112 | end 113 | end 114 | buf.push('}') 115 | consume buf 116 | 117 | fun eq(that: GHashSet[A, H] box): Bool => _data.eq(that._data) 118 | fun ne(that: GHashSet[A, H] box): Bool => _data.ne(that._data) 119 | fun lt(that: GHashSet[A, H] box): Bool => _data.lt(that._data) 120 | fun le(that: GHashSet[A, H] box): Bool => _data.le(that._data) 121 | fun gt(that: GHashSet[A, H] box): Bool => _data.gt(that._data) 122 | fun ge(that: GHashSet[A, H] box): Bool => _data.ge(that._data) 123 | fun values(): Iterator[A]^ => _data.values() 124 | 125 | fun ref from_tokens(that: TokensIterator)? => 126 | """ 127 | Deserialize an instance of this data structure from a stream of tokens. 128 | """ 129 | var count = that.next[USize]()? 130 | // TODO: _data.reserve(count) 131 | while (count = count - 1) > 0 do 132 | _data.set(that.next[A]()?) 133 | end 134 | 135 | fun ref each_token(tokens: Tokens) => 136 | """ 137 | Serialize the data structure, capturing each token into the given Tokens. 138 | """ 139 | tokens.push(_data.size()) 140 | for value in _data.values() do tokens.push(value) end 141 | -------------------------------------------------------------------------------- /crdt/test/prop_c_counter.pony: -------------------------------------------------------------------------------- 1 | use "ponycheck" 2 | use ".." 3 | use "collections" 4 | 5 | primitive _INC 6 | primitive _DEC 7 | type _CounterOp is (_DEC|_INC) 8 | 9 | class val _CCounterCmd is Stringable 10 | let u_cmd: {(U64): U64 } val 11 | let cc_cmd: {(CCounter): CCounter^ } val 12 | let diff: U64 13 | let op: _CounterOp 14 | 15 | new val create(diff': U64, op': _CounterOp = _INC) => 16 | diff = diff' 17 | op = op' 18 | cc_cmd = {(cc) => if op is _INC then cc.increment(diff) else cc.decrement(diff) end } val 19 | u_cmd = {(t) => if op is _INC then t + diff else t - diff end } val 20 | 21 | fun string(): String iso^ => 22 | recover 23 | String() 24 | .>append(if op is _INC then "+" else "-" end + diff.string()) 25 | end 26 | 27 | class _CmdOnReplica[T = _CCounterCmd] 28 | let replica: USize 29 | let cmd: T 30 | 31 | new create(r: USize, c: T) => 32 | replica = r 33 | cmd = consume c 34 | 35 | fun string(): String iso^ => 36 | let str = iftype T <: Stringable #read then cmd.string() else "cmd" end 37 | recover 38 | String() 39 | .>append("_Cmd(") 40 | .>append(str) 41 | .>append(",") 42 | .>append(replica.string()) 43 | .>append(")") 44 | end 45 | 46 | trait CCounterProperty is Property1[(USize, Array[_CmdOnReplica])] 47 | fun property(sample: (USize, Array[_CmdOnReplica]), h: PropertyHelper) ? => 48 | """ 49 | validate that an array of commands against random replicas 50 | converges to the same value as a U64 counter exposed to the same commands. 51 | """ 52 | (let num_replicas, let commands) = sample 53 | let replicas: Array[CCounter] = replicas.create(num_replicas) 54 | for x in Range[U64](0, num_replicas.u64()) do 55 | replicas.push(CCounter(x)) 56 | end 57 | 58 | var expected: U64 = 0 59 | let deltas = Array[CCounter](commands.size()) 60 | for command in commands.values() do 61 | let cmd = command.cmd 62 | h.log("executing " + cmd.string(), true) 63 | 64 | deltas.push( 65 | cmd.cc_cmd(replicas(command.replica)?) 66 | ) 67 | expected = cmd.u_cmd(expected) 68 | 69 | let observer = CCounter(U64.max_value()) 70 | for replica in replicas.values() do 71 | observer.converge(replica) 72 | end 73 | if not h.assert_eq[U64](expected, observer.value()) then return end 74 | end 75 | let delta_observer = CCounter(U64.max_value() - 1) 76 | for delta in deltas.values() do 77 | delta_observer.converge(delta) 78 | end 79 | h.assert_eq[U64](expected, delta_observer.value()) 80 | 81 | 82 | class CCounterIncProperty is CCounterProperty 83 | """ 84 | verify that a set of CCounter replicas that are only incremented 85 | behave like a single U64 counter once completely converged. 86 | """ 87 | 88 | fun name(): String => "crdt.prop.CCounter.Inc" 89 | 90 | fun gen(): Generator[(USize, Array[_CmdOnReplica])] => 91 | Generators.usize(2, 10).flat_map[(USize, Array[_CmdOnReplica])]({ 92 | (num_replicas) => 93 | let cmds_gen = Generators.array_of[_CmdOnReplica]( 94 | Generators.map2[USize, U64, _CmdOnReplica]( 95 | Generators.usize(0, num_replicas-1), 96 | Generators.u64(), 97 | {(replica, inc) => _CmdOnReplica(replica, _CCounterCmd(inc, _INC)) } 98 | ) 99 | ) 100 | Generators.zip2[USize, Array[_CmdOnReplica]]( 101 | Generators.unit[USize](num_replicas), cmds_gen) 102 | }) 103 | 104 | 105 | class CCounterIncDecProperty is CCounterProperty 106 | """ 107 | verify that a set of CCounter replicas that are incremented and decremented 108 | behave like a single U64 counter once completely converged. 109 | """ 110 | 111 | fun name(): String => "crdt.prop.CCounter" 112 | 113 | fun gen(): Generator[(USize, Array[_CmdOnReplica])] => 114 | Generators.usize(2, 10).flat_map[(USize, Array[_CmdOnReplica])]({ 115 | (num_replicas) => 116 | let cmds_gen = 117 | Generators.array_of[_CmdOnReplica]( 118 | Generators.map2[USize, _CCounterCmd, _CmdOnReplica]( 119 | Generators.usize(0, num_replicas-1), 120 | Generators.u64().flat_map[_CCounterCmd]({ 121 | (u) => 122 | Generators.one_of[_CCounterCmd]([ 123 | _CCounterCmd(u, _INC) 124 | _CCounterCmd(u, _DEC) 125 | ]) 126 | }), 127 | {(replica, cmd) => _CmdOnReplica(replica, cmd) } 128 | ) 129 | ) 130 | Generators.zip2[USize, Array[_CmdOnReplica]]( 131 | Generators.unit[USize](num_replicas), 132 | cmds_gen) 133 | }) 134 | 135 | -------------------------------------------------------------------------------- /crdt/test/test_c_counter.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestCCounter is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.CCounter" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = CCounter("a".hash64()) 10 | let b = CCounter("b".hash64()) 11 | let c = CCounter("c".hash64()) 12 | 13 | a.increment(1) 14 | b.decrement(2) 15 | c.increment(3) 16 | 17 | h.assert_eq[U64](a.value(), 1) 18 | h.assert_eq[U64](b.value(), -2) 19 | h.assert_eq[U64](c.value(), 3) 20 | h.assert_ne[CCounter](a, b) 21 | h.assert_ne[CCounter](b, c) 22 | h.assert_ne[CCounter](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[U64](a.value(), 2) 34 | h.assert_eq[U64](b.value(), 2) 35 | h.assert_eq[U64](c.value(), 2) 36 | h.assert_eq[CCounter](a, b) 37 | h.assert_eq[CCounter](b, c) 38 | h.assert_eq[CCounter](c, a) 39 | 40 | a.increment(9) 41 | b.increment(8) 42 | c.decrement(7) 43 | 44 | h.assert_eq[U64](a.value(), 11) 45 | h.assert_eq[U64](b.value(), 10) 46 | h.assert_eq[U64](c.value(), -5) 47 | h.assert_ne[CCounter](a, b) 48 | h.assert_ne[CCounter](b, c) 49 | h.assert_ne[CCounter](c, a) 50 | 51 | h.assert_true(a.converge(b)) 52 | h.assert_true(a.converge(c)) 53 | h.assert_true(b.converge(c)) 54 | h.assert_true(b.converge(a)) 55 | h.assert_true(c.converge(a)) 56 | h.assert_false(c.converge(b)) 57 | 58 | h.assert_eq[U64](a.value(), 12) 59 | h.assert_eq[U64](b.value(), 12) 60 | h.assert_eq[U64](c.value(), 12) 61 | h.assert_eq[CCounter](a, b) 62 | h.assert_eq[CCounter](b, c) 63 | h.assert_eq[CCounter](c, a) 64 | 65 | class TestCCounterDelta is UnitTest 66 | new iso create() => None 67 | fun name(): String => "crdt.CCounter (ẟ)" 68 | 69 | fun apply(h: TestHelper) => 70 | let a = CCounter("a".hash64()) 71 | let b = CCounter("b".hash64()) 72 | let c = CCounter("c".hash64()) 73 | 74 | var a_delta = a.increment(1) 75 | var b_delta = b.decrement(2) 76 | var c_delta = c.increment(3) 77 | 78 | h.assert_eq[U64](a.value(), 1) 79 | h.assert_eq[U64](b.value(), -2) 80 | h.assert_eq[U64](c.value(), 3) 81 | h.assert_ne[CCounter](a, b) 82 | h.assert_ne[CCounter](b, c) 83 | h.assert_ne[CCounter](c, a) 84 | 85 | h.assert_false(a.converge(a_delta)) 86 | 87 | h.assert_true(a.converge(b_delta)) 88 | h.assert_true(a.converge(c_delta)) 89 | h.assert_true(b.converge(c_delta)) 90 | h.assert_true(b.converge(a_delta)) 91 | h.assert_true(c.converge(a_delta)) 92 | h.assert_true(c.converge(b_delta)) 93 | 94 | h.assert_eq[U64](a.value(), 2) 95 | h.assert_eq[U64](b.value(), 2) 96 | h.assert_eq[U64](c.value(), 2) 97 | h.assert_eq[CCounter](a, b) 98 | h.assert_eq[CCounter](b, c) 99 | h.assert_eq[CCounter](c, a) 100 | 101 | a_delta = a.increment(9) 102 | b_delta = b.increment(8) 103 | c_delta = c.decrement(7) 104 | 105 | h.assert_eq[U64](a.value(), 11) 106 | h.assert_eq[U64](b.value(), 10) 107 | h.assert_eq[U64](c.value(), -5) 108 | h.assert_ne[CCounter](a, b) 109 | h.assert_ne[CCounter](b, c) 110 | h.assert_ne[CCounter](c, a) 111 | 112 | h.assert_true(a.converge(b_delta)) 113 | h.assert_true(a.converge(c_delta)) 114 | h.assert_true(b.converge(c_delta)) 115 | h.assert_true(b.converge(a_delta)) 116 | h.assert_true(c.converge(a_delta)) 117 | h.assert_true(c.converge(b_delta)) 118 | 119 | h.assert_eq[U64](a.value(), 12) 120 | h.assert_eq[U64](b.value(), 12) 121 | h.assert_eq[U64](c.value(), 12) 122 | h.assert_eq[CCounter](a, b) 123 | h.assert_eq[CCounter](b, c) 124 | h.assert_eq[CCounter](c, a) 125 | 126 | class TestCCounterTokens is UnitTest 127 | new iso create() => None 128 | fun name(): String => "crdt.CCounter (tokens)" 129 | 130 | fun apply(h: TestHelper) => 131 | let data = CCounter[U8]("a".hash64()) 132 | let data' = CCounter[U8]("b".hash64()) 133 | let data'' = CCounter[U8]("c".hash64()) 134 | 135 | data.increment(4) 136 | data'.decrement(5) 137 | data''.increment(6) 138 | 139 | data.converge(data') 140 | data.converge(data'') 141 | 142 | let tokens = Tokens .> from(data) 143 | _TestTokensWellFormed(h, tokens) 144 | 145 | try 146 | h.assert_eq[CCounter[U8]]( 147 | data, 148 | data.create(0) .> from_tokens(tokens.iterator())? 149 | ) 150 | else 151 | h.fail("failed to parse token stream") 152 | end 153 | -------------------------------------------------------------------------------- /crdt/test/test_p2_set.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestP2Set is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.P2Set" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = P2Set[String] 10 | let b = P2Set[String] 11 | let c = P2Set[String] 12 | 13 | a.set("apple") 14 | b.set("banana") 15 | c.set("currant") 16 | 17 | h.assert_eq[USize](a.size(), 1) 18 | h.assert_eq[USize](b.size(), 1) 19 | h.assert_eq[USize](b.size(), 1) 20 | h.assert_ne[P2Set[String]](a, b) 21 | h.assert_ne[P2Set[String]](b, c) 22 | h.assert_ne[P2Set[String]](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[USize](a.size(), 3) 34 | h.assert_eq[USize](b.size(), 3) 35 | h.assert_eq[USize](b.size(), 3) 36 | h.assert_eq[P2Set[String]](a, b) 37 | h.assert_eq[P2Set[String]](b, c) 38 | h.assert_eq[P2Set[String]](c, a) 39 | 40 | c.unset("currant") 41 | h.assert_true(a.converge(c)) 42 | h.assert_true(b.converge(c)) 43 | 44 | h.assert_false(a.converge(b)) 45 | 46 | h.assert_eq[USize](a.size(), 2) 47 | h.assert_eq[USize](b.size(), 2) 48 | h.assert_eq[USize](b.size(), 2) 49 | h.assert_eq[P2Set[String]](a, b) 50 | h.assert_eq[P2Set[String]](b, c) 51 | h.assert_eq[P2Set[String]](c, a) 52 | 53 | c.unset("banana") 54 | c.unset("apple") 55 | h.assert_true(a.converge(c)) 56 | h.assert_true(b.converge(c)) 57 | 58 | h.assert_false(a.converge(b)) 59 | 60 | h.assert_eq[USize](a.size(), 0) 61 | h.assert_eq[USize](b.size(), 0) 62 | h.assert_eq[USize](b.size(), 0) 63 | h.assert_eq[P2Set[String]](a, b) 64 | h.assert_eq[P2Set[String]](b, c) 65 | h.assert_eq[P2Set[String]](c, a) 66 | 67 | class TestP2SetDelta is UnitTest 68 | new iso create() => None 69 | fun name(): String => "crdt.P2Set (ẟ)" 70 | 71 | fun apply(h: TestHelper) => 72 | let a = P2Set[String] 73 | let b = P2Set[String] 74 | let c = P2Set[String] 75 | 76 | var a_delta = a.set("apple") 77 | var b_delta = b.set("banana") 78 | var c_delta = c.set("currant") 79 | 80 | h.assert_eq[USize](a.size(), 1) 81 | h.assert_eq[USize](b.size(), 1) 82 | h.assert_eq[USize](b.size(), 1) 83 | h.assert_ne[P2Set[String]](a, b) 84 | h.assert_ne[P2Set[String]](b, c) 85 | h.assert_ne[P2Set[String]](c, a) 86 | 87 | h.assert_false(a.converge(a_delta)) 88 | 89 | h.assert_true(a.converge(b_delta)) 90 | h.assert_true(a.converge(c_delta)) 91 | h.assert_true(b.converge(c_delta)) 92 | h.assert_true(b.converge(a_delta)) 93 | h.assert_true(c.converge(a_delta)) 94 | h.assert_true(c.converge(b_delta)) 95 | 96 | h.assert_eq[USize](a.size(), 3) 97 | h.assert_eq[USize](b.size(), 3) 98 | h.assert_eq[USize](b.size(), 3) 99 | h.assert_eq[P2Set[String]](a, b) 100 | h.assert_eq[P2Set[String]](b, c) 101 | h.assert_eq[P2Set[String]](c, a) 102 | 103 | c_delta = c.unset("currant") 104 | h.assert_true(a.converge(c_delta)) 105 | h.assert_true(b.converge(c_delta)) 106 | 107 | h.assert_false(c.converge(c_delta)) 108 | 109 | h.assert_eq[USize](a.size(), 2) 110 | h.assert_eq[USize](b.size(), 2) 111 | h.assert_eq[USize](b.size(), 2) 112 | h.assert_eq[P2Set[String]](a, b) 113 | h.assert_eq[P2Set[String]](b, c) 114 | h.assert_eq[P2Set[String]](c, a) 115 | 116 | c_delta = c.unset("banana") 117 | c_delta = c.unset("apple", consume c_delta) 118 | h.assert_true(a.converge(c_delta)) 119 | h.assert_true(b.converge(c_delta)) 120 | 121 | h.assert_false(c.converge(c_delta)) 122 | 123 | h.assert_eq[USize](a.size(), 0) 124 | h.assert_eq[USize](b.size(), 0) 125 | h.assert_eq[USize](b.size(), 0) 126 | h.assert_eq[P2Set[String]](a, b) 127 | h.assert_eq[P2Set[String]](b, c) 128 | h.assert_eq[P2Set[String]](c, a) 129 | 130 | class TestP2SetTokens is UnitTest 131 | new iso create() => None 132 | fun name(): String => "crdt.P2Set (tokens)" 133 | 134 | fun apply(h: TestHelper) => 135 | let data = P2Set[String] 136 | .> set("apple") 137 | .> set("banana") 138 | .> set("currant") 139 | .> set("dewberry") 140 | .> unset("dewberry") 141 | .> unset("apple") 142 | 143 | let tokens = Tokens .> from(data) 144 | _TestTokensWellFormed(h, tokens) 145 | 146 | try 147 | h.assert_eq[P2Set[String]]( 148 | data, 149 | data.create() .> from_tokens(tokens.iterator())? 150 | ) 151 | else 152 | h.fail("failed to parse token stream") 153 | end 154 | -------------------------------------------------------------------------------- /crdt/test/test_pn_counter.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestPNCounter is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.PNCounter" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = PNCounter("a".hash64()) 10 | let b = PNCounter("b".hash64()) 11 | let c = PNCounter("c".hash64()) 12 | 13 | a.increment(1) 14 | b.decrement(2) 15 | c.increment(3) 16 | 17 | h.assert_eq[U64](a.value(), 1) 18 | h.assert_eq[U64](b.value(), -2) 19 | h.assert_eq[U64](c.value(), 3) 20 | h.assert_ne[PNCounter](a, b) 21 | h.assert_ne[PNCounter](b, c) 22 | h.assert_ne[PNCounter](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[U64](a.value(), 2) 34 | h.assert_eq[U64](b.value(), 2) 35 | h.assert_eq[U64](c.value(), 2) 36 | h.assert_eq[PNCounter](a, b) 37 | h.assert_eq[PNCounter](b, c) 38 | h.assert_eq[PNCounter](c, a) 39 | 40 | a.increment(9) 41 | b.increment(8) 42 | c.decrement(7) 43 | 44 | h.assert_eq[U64](a.value(), 11) 45 | h.assert_eq[U64](b.value(), 10) 46 | h.assert_eq[U64](c.value(), -5) 47 | h.assert_ne[PNCounter](a, b) 48 | h.assert_ne[PNCounter](b, c) 49 | h.assert_ne[PNCounter](c, a) 50 | 51 | h.assert_true(a.converge(b)) 52 | h.assert_true(a.converge(c)) 53 | h.assert_true(b.converge(c)) 54 | h.assert_true(b.converge(a)) 55 | h.assert_true(c.converge(a)) 56 | h.assert_false(c.converge(b)) 57 | 58 | h.assert_eq[U64](a.value(), 12) 59 | h.assert_eq[U64](b.value(), 12) 60 | h.assert_eq[U64](c.value(), 12) 61 | h.assert_eq[PNCounter](a, b) 62 | h.assert_eq[PNCounter](b, c) 63 | h.assert_eq[PNCounter](c, a) 64 | 65 | class TestPNCounterDelta is UnitTest 66 | new iso create() => None 67 | fun name(): String => "crdt.PNCounter (ẟ)" 68 | 69 | fun apply(h: TestHelper) => 70 | let a = PNCounter("a".hash64()) 71 | let b = PNCounter("b".hash64()) 72 | let c = PNCounter("c".hash64()) 73 | 74 | var a_delta = a.increment(1) 75 | var b_delta = b.decrement(2) 76 | var c_delta = c.increment(3) 77 | 78 | h.assert_eq[U64](a.value(), 1) 79 | h.assert_eq[U64](b.value(), -2) 80 | h.assert_eq[U64](c.value(), 3) 81 | h.assert_ne[PNCounter](a, b) 82 | h.assert_ne[PNCounter](b, c) 83 | h.assert_ne[PNCounter](c, a) 84 | 85 | h.assert_false(a.converge(a_delta)) 86 | 87 | h.assert_true(a.converge(b_delta)) 88 | h.assert_true(a.converge(c_delta)) 89 | h.assert_true(b.converge(c_delta)) 90 | h.assert_true(b.converge(a_delta)) 91 | h.assert_true(c.converge(a_delta)) 92 | h.assert_true(c.converge(b_delta)) 93 | 94 | h.assert_eq[U64](a.value(), 2) 95 | h.assert_eq[U64](b.value(), 2) 96 | h.assert_eq[U64](c.value(), 2) 97 | h.assert_eq[PNCounter](a, b) 98 | h.assert_eq[PNCounter](b, c) 99 | h.assert_eq[PNCounter](c, a) 100 | 101 | a_delta = a.increment(9) 102 | b_delta = b.increment(8) 103 | c_delta = c.decrement(7) 104 | 105 | h.assert_eq[U64](a.value(), 11) 106 | h.assert_eq[U64](b.value(), 10) 107 | h.assert_eq[U64](c.value(), -5) 108 | h.assert_ne[PNCounter](a, b) 109 | h.assert_ne[PNCounter](b, c) 110 | h.assert_ne[PNCounter](c, a) 111 | 112 | h.assert_true(a.converge(b_delta)) 113 | h.assert_true(a.converge(c_delta)) 114 | h.assert_true(b.converge(c_delta)) 115 | h.assert_true(b.converge(a_delta)) 116 | h.assert_true(c.converge(a_delta)) 117 | h.assert_true(c.converge(b_delta)) 118 | 119 | h.assert_eq[U64](a.value(), 12) 120 | h.assert_eq[U64](b.value(), 12) 121 | h.assert_eq[U64](c.value(), 12) 122 | h.assert_eq[PNCounter](a, b) 123 | h.assert_eq[PNCounter](b, c) 124 | h.assert_eq[PNCounter](c, a) 125 | 126 | class TestPNCounterTokens is UnitTest 127 | new iso create() => None 128 | fun name(): String => "crdt.PNCounter (tokens)" 129 | 130 | fun apply(h: TestHelper) => 131 | let data = PNCounter[U8]("a".hash64()) 132 | let data' = PNCounter[U8]("b".hash64()) 133 | let data'' = PNCounter[U8]("c".hash64()) 134 | 135 | data.increment(4) 136 | data'.decrement(5) 137 | data''.increment(6) 138 | 139 | data.converge(data') 140 | data.converge(data'') 141 | 142 | let tokens = Tokens .> from(data) 143 | _TestTokensWellFormed(h, tokens) 144 | 145 | try 146 | h.assert_eq[PNCounter[U8]]( 147 | data, 148 | data.create(0) .> from_tokens(tokens.iterator())? 149 | ) 150 | else 151 | h.fail("failed to parse token stream") 152 | end 153 | -------------------------------------------------------------------------------- /crdt/c_counter.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | class ref CCounter[A: (Integer[A] val & (Unsigned | Signed)) = U64] 5 | is (Comparable[CCounter[A]] & Causal[CCounter[A]]) 6 | """ 7 | A mutable counter, which can be both increased and decreased. 8 | 9 | This data type has the same general semantics as PNCounter, but instead of 10 | being modeled as two GCounters (positive and negative), is is built with the 11 | generic "dot kernel" abstraction used for tracking causality of updates. 12 | 13 | Each replica tracks its local value as a dot in the dot kernel. When updating 14 | the local value, the old dot is removed and a new dot with a happens-after 15 | relationship to the old dot (sequence number incremented) is added. 16 | When converging, dots that have the same id are trimmed so that only the one 17 | with the latest sequence number will remain. 18 | 19 | The total value of the counter is the sum of the local values of all replicas. 20 | 21 | Because the dot kernel abstraction provides an eventually-consistent set 22 | of replica-associated values, and this data structure uses a commutative 23 | strategy for folding them into a result, that result is eventually consistent. 24 | 25 | All mutator methods accept and return a convergent delta-state. 26 | """ 27 | embed _kernel: DotKernelSingle[A] 28 | 29 | new ref create(id: ID) => 30 | """ 31 | Instantiate under the given unique replica id. 32 | """ 33 | _kernel = _kernel.create(id) 34 | 35 | new ref _create_in(ctx': DotContext) => 36 | _kernel = _kernel.create_in(ctx') 37 | 38 | fun _context(): this->DotContext => 39 | _kernel.context() 40 | 41 | fun is_empty(): Bool => 42 | """ 43 | Return true if there are no values ever recorded from any replica. 44 | This is true at creation, after calling the clear method, 45 | or after a converge that results in all values being cleared. 46 | """ 47 | _kernel.is_empty() 48 | 49 | fun apply(): A => 50 | """ 51 | Return the current value of the counter (the sum of all local values). 52 | """ 53 | value() 54 | 55 | fun value(): A => 56 | """ 57 | Return the current value of the counter (the sum of all local values). 58 | """ 59 | var sum: A = 0 60 | for v in _kernel.values() do sum = sum + v end 61 | sum 62 | 63 | fun ref increment[D: CCounter[A] ref = CCounter[A]]( 64 | value': A = 1, 65 | delta': D = recover CCounter[A](0) end) 66 | : D^ => 67 | """ 68 | Increment the counter by the given value. 69 | Accepts and returns a convergent delta-state. 70 | """ 71 | _kernel.upsert(value', {(v, value') => v + value' }, delta'._kernel) 72 | delta' 73 | 74 | fun ref decrement[D: CCounter[A] ref = CCounter[A]]( 75 | value': A = 1, 76 | delta': D = recover CCounter[A](0) end) 77 | : D^ => 78 | """ 79 | Decrement the counter by the given value. 80 | Accepts and returns a convergent delta-state. 81 | """ 82 | _kernel.upsert(-value', {(v, value') => v + value' }, delta'._kernel) 83 | delta' 84 | 85 | fun ref clear[D: CCounter[A] ref = CCounter[A]]( 86 | delta': D = recover CCounter[A](0) end) 87 | : D^ => 88 | """ 89 | Remove all locally visible changes to the counter, resetting it to zero. 90 | Accepts and returns a convergent delta-state. 91 | """ 92 | _kernel.remove_all(delta'._kernel) 93 | delta' 94 | 95 | fun ref converge(that: CCounter[A] box): Bool => 96 | """ 97 | Converge from the given CCounter into this one. 98 | Returns true if the convergence added new information to the data structure. 99 | """ 100 | _kernel.converge(that._kernel) 101 | 102 | fun ref _converge_empty_in(ctx': DotContext box): Bool => 103 | """ 104 | Optimize for the special case of converging from a peer with an empty map, 105 | taking only their DotContext as an argument for resolving disagreements. 106 | """ 107 | _kernel.converge_empty_in(ctx') 108 | 109 | fun string(): String iso^ => 110 | """ 111 | Print the value of the counter. 112 | """ 113 | value().string() 114 | 115 | fun eq(that: CCounter[A] box): Bool => value().eq(that.value()) 116 | fun ne(that: CCounter[A] box): Bool => value().ne(that.value()) 117 | fun lt(that: CCounter[A] box): Bool => value().lt(that.value()) 118 | fun le(that: CCounter[A] box): Bool => value().le(that.value()) 119 | fun gt(that: CCounter[A] box): Bool => value().gt(that.value()) 120 | fun ge(that: CCounter[A] box): Bool => value().ge(that.value()) 121 | 122 | fun ref from_tokens(that: TokensIterator)? => 123 | """ 124 | Deserialize an instance of this data structure from a stream of tokens. 125 | """ 126 | _kernel.from_tokens(that)? 127 | 128 | fun ref each_token(tokens: Tokens) => 129 | """ 130 | Serialize the data structure, capturing each token into the given Tokens. 131 | """ 132 | _kernel.each_token(tokens) 133 | -------------------------------------------------------------------------------- /crdt/test/test_t_set.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestTSet is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.TSet" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = TSet[String] 10 | let b = TSet[String] 11 | let c = TSet[String] 12 | 13 | a.set("apple", 5) 14 | b.set("banana", 5) 15 | c.set("currant", 5) 16 | 17 | h.assert_eq[USize](a.size(), 1) 18 | h.assert_eq[USize](b.size(), 1) 19 | h.assert_eq[USize](c.size(), 1) 20 | h.assert_ne[TSet[String]](a, b) 21 | h.assert_ne[TSet[String]](b, c) 22 | h.assert_ne[TSet[String]](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[USize](a.size(), 3) 34 | h.assert_eq[USize](b.size(), 3) 35 | h.assert_eq[USize](c.size(), 3) 36 | h.assert_eq[TSet[String]](a, b) 37 | h.assert_eq[TSet[String]](b, c) 38 | h.assert_eq[TSet[String]](c, a) 39 | 40 | c.unset("currant", 6) 41 | h.assert_true(a.converge(c)) 42 | h.assert_true(b.converge(c)) 43 | 44 | h.assert_eq[USize](a.size(), 2) 45 | h.assert_eq[USize](b.size(), 2) 46 | h.assert_eq[USize](c.size(), 2) 47 | h.assert_eq[TSet[String]](a, b) 48 | h.assert_eq[TSet[String]](b, c) 49 | h.assert_eq[TSet[String]](c, a) 50 | 51 | c.unset("banana", 4) 52 | c.unset("apple", 5) 53 | h.assert_false(a.converge(c)) 54 | h.assert_false(b.converge(c)) 55 | 56 | h.assert_eq[USize](a.size(), 2) 57 | h.assert_eq[USize](b.size(), 2) 58 | h.assert_eq[USize](c.size(), 2) 59 | h.assert_eq[TSet[String]](a, b) 60 | h.assert_eq[TSet[String]](b, c) 61 | h.assert_eq[TSet[String]](c, a) 62 | 63 | c.unset("banana", 7) 64 | c.unset("apple", 8) 65 | h.assert_true(a.converge(c)) 66 | h.assert_true(b.converge(c)) 67 | 68 | h.assert_eq[USize](a.size(), 0) 69 | h.assert_eq[USize](b.size(), 0) 70 | h.assert_eq[USize](c.size(), 0) 71 | h.assert_eq[TSet[String]](a, b) 72 | h.assert_eq[TSet[String]](b, c) 73 | h.assert_eq[TSet[String]](c, a) 74 | 75 | class TestTSetDelta is UnitTest 76 | new iso create() => None 77 | fun name(): String => "crdt.TSet (ẟ)" 78 | 79 | fun apply(h: TestHelper) => 80 | let a = TSet[String] 81 | let b = TSet[String] 82 | let c = TSet[String] 83 | 84 | var a_delta = a.set("apple", 5) 85 | var b_delta = b.set("banana", 5) 86 | var c_delta = c.set("currant", 5) 87 | 88 | h.assert_eq[USize](a.size(), 1) 89 | h.assert_eq[USize](b.size(), 1) 90 | h.assert_eq[USize](c.size(), 1) 91 | h.assert_ne[TSet[String]](a, b) 92 | h.assert_ne[TSet[String]](b, c) 93 | h.assert_ne[TSet[String]](c, a) 94 | 95 | h.assert_false(a.converge(a_delta)) 96 | 97 | h.assert_true(a.converge(b_delta)) 98 | h.assert_true(a.converge(c_delta)) 99 | h.assert_true(b.converge(c_delta)) 100 | h.assert_true(b.converge(a_delta)) 101 | h.assert_true(c.converge(a_delta)) 102 | h.assert_true(c.converge(b_delta)) 103 | 104 | h.assert_eq[USize](a.size(), 3) 105 | h.assert_eq[USize](b.size(), 3) 106 | h.assert_eq[USize](c.size(), 3) 107 | h.assert_eq[TSet[String]](a, b) 108 | h.assert_eq[TSet[String]](b, c) 109 | h.assert_eq[TSet[String]](c, a) 110 | 111 | c_delta = c.unset("currant", 6) 112 | h.assert_true(a.converge(c_delta)) 113 | h.assert_true(b.converge(c_delta)) 114 | 115 | h.assert_eq[USize](a.size(), 2) 116 | h.assert_eq[USize](b.size(), 2) 117 | h.assert_eq[USize](c.size(), 2) 118 | h.assert_eq[TSet[String]](a, b) 119 | h.assert_eq[TSet[String]](b, c) 120 | h.assert_eq[TSet[String]](c, a) 121 | 122 | c_delta = c.unset("banana", 4) 123 | c_delta = c.unset("apple", 5, consume c_delta) 124 | h.assert_false(a.converge(c_delta)) 125 | h.assert_false(b.converge(c_delta)) 126 | 127 | h.assert_eq[USize](a.size(), 2) 128 | h.assert_eq[USize](b.size(), 2) 129 | h.assert_eq[USize](c.size(), 2) 130 | h.assert_eq[TSet[String]](a, b) 131 | h.assert_eq[TSet[String]](b, c) 132 | h.assert_eq[TSet[String]](c, a) 133 | 134 | c_delta = c.unset("banana", 7) 135 | c_delta = c.unset("apple", 8, consume c_delta) 136 | h.assert_true(a.converge(c_delta)) 137 | h.assert_true(b.converge(c_delta)) 138 | 139 | h.assert_eq[USize](a.size(), 0) 140 | h.assert_eq[USize](b.size(), 0) 141 | h.assert_eq[USize](c.size(), 0) 142 | h.assert_eq[TSet[String]](a, b) 143 | h.assert_eq[TSet[String]](b, c) 144 | h.assert_eq[TSet[String]](c, a) 145 | 146 | class TestTSetTokens is UnitTest 147 | new iso create() => None 148 | fun name(): String => "crdt.TSet (tokens)" 149 | 150 | fun apply(h: TestHelper) => 151 | let data = TSet[String] 152 | .> set("apple", 5) 153 | .> set("banana", 5) 154 | .> set("currant", 5) 155 | .> set("dewberry", 5) 156 | .> unset("dewberry", 6) 157 | .> unset("apple", 6) 158 | 159 | let tokens = Tokens .> from(data) 160 | _TestTokensWellFormed(h, tokens) 161 | 162 | try 163 | h.assert_eq[TSet[String]]( 164 | data, 165 | data.create() .> from_tokens(tokens.iterator())? 166 | ) 167 | else 168 | h.fail("failed to parse token stream") 169 | end 170 | -------------------------------------------------------------------------------- /crdt/mv_reg.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | type MVReg[A: (Hashable val & Equatable[A])] 5 | is MVHashReg[A, HashEq[A]] 6 | 7 | type MVRegIs[A: (Hashable val & Equatable[A])] 8 | is MVHashReg[A, HashIs[A]] 9 | 10 | class ref MVHashReg[A: Equatable[A] val, H: HashFunction[A] val] 11 | is (Comparable[MVHashReg[A, H]] & Causal[MVHashReg[A, H]]) 12 | """ 13 | An unordered mutable set that supports removing locally visible elements 14 | ("observed remove") using per-replica sequence numbers to track causality. 15 | 16 | In the case where an insertion and a deletion for the same element have 17 | no causal relationship (they happened concurrently on differen replicas), 18 | the insertion will override the deletion ("add wins"). For a similar data 19 | structure with the opposite bias, see the "remove wins" variant (RWORSet). 20 | 21 | This data structure delegates causality tracking to the reusable "dot kernel" 22 | abstraction. Because that abstraction provides an eventually-consistent set 23 | of replica-associated values, and this data structure uses a commutative 24 | strategy for reading out the values, the result is eventually consistent. 25 | 26 | All mutator methods accept and return a convergent delta-state. 27 | """ 28 | embed _kernel: DotKernel[A] 29 | 30 | new ref create(id: ID) => 31 | """ 32 | Instantiate under the given unique replica id. 33 | """ 34 | _kernel = _kernel.create(id) 35 | 36 | new ref _create_in(ctx': DotContext) => 37 | _kernel = _kernel.create_in(ctx') 38 | 39 | fun _context(): this->DotContext => 40 | _kernel.context() 41 | 42 | fun is_empty(): Bool => 43 | """ 44 | Return true if there are no values ever recorded from any replica. 45 | This is true at creation, after calling the clear method, 46 | or after a converge that results in all values being cleared. 47 | """ 48 | _kernel.is_empty() 49 | 50 | fun result(): HashSet[A, H] => 51 | """ 52 | Return the elements of the resulting logical set as a single flat set. 53 | Information about specific deletions is discarded, so that the case of a 54 | deleted element is indistinct from that of an element never inserted. 55 | """ 56 | var out = HashSet[A, H] 57 | for value in _kernel.values() do out.set(value) end 58 | out 59 | 60 | fun size(): USize => 61 | """ 62 | Return the number of items in the set. 63 | """ 64 | result().size() 65 | 66 | fun contains(value': A): Bool => 67 | """ 68 | Check whether the set contains the given value. 69 | """ 70 | for value in _kernel.values() do 71 | if value == value' then return true end 72 | end 73 | false 74 | 75 | fun ref update[D: MVHashReg[A, H] ref = MVHashReg[A, H]]( 76 | value': A, 77 | delta': D = recover MVHashReg[A, H](0) end) 78 | : D^ => 79 | """ 80 | Set the value of the register, overriding all currently visible values. 81 | After this function, the register will have a single value locally, at least 82 | until any concurrent updates are converged, adding more values into the set. 83 | Accepts and returns a convergent delta-state. 84 | """ 85 | _kernel.remove_all(delta'._kernel) 86 | _kernel.set(value', delta'._kernel) 87 | delta' 88 | 89 | fun ref clear[D: MVHashReg[A, H] ref = MVHashReg[A, H]]( 90 | delta': D = recover MVHashReg[A, H](0) end) 91 | : D^ => 92 | """ 93 | Remove all locally visible elements from the set. 94 | Accepts and returns a convergent delta-state. 95 | """ 96 | _kernel.remove_all(delta'._kernel) 97 | delta' 98 | 99 | fun ref converge(that: MVHashReg[A, H] box): Bool => 100 | """ 101 | Converge from the given MVReg into this one. 102 | Returns true if the convergence added new information to the data structure. 103 | """ 104 | _kernel.converge(that._kernel) 105 | 106 | fun ref _converge_empty_in(ctx': DotContext box): Bool => 107 | """ 108 | Optimize for the special case of converging from a peer with an empty map, 109 | taking only their DotContext as an argument for resolving disagreements. 110 | """ 111 | _kernel.converge_empty_in(ctx') 112 | 113 | fun string(): String iso^ => 114 | """ 115 | Return a best effort at printing the set. If A is a Stringable, use the 116 | string representation of each value; otherwise print them as question marks. 117 | """ 118 | let buf = recover String((size() * 3) + 1) end 119 | buf.push('%') 120 | buf.push('{') 121 | var first = true 122 | for value in values() do 123 | if first then first = false else buf .> push(';').push(' ') end 124 | iftype A <: Stringable then 125 | buf.append(value.string()) 126 | else 127 | buf.push('?') 128 | end 129 | end 130 | buf.push('}') 131 | consume buf 132 | 133 | // TODO: optimize comparison functions: 134 | fun eq(that: MVHashReg[A, H] box): Bool => result().eq(that.result()) 135 | fun ne(that: MVHashReg[A, H] box): Bool => result().ne(that.result()) 136 | fun lt(that: MVHashReg[A, H] box): Bool => result().lt(that.result()) 137 | fun le(that: MVHashReg[A, H] box): Bool => result().le(that.result()) 138 | fun gt(that: MVHashReg[A, H] box): Bool => result().gt(that.result()) 139 | fun ge(that: MVHashReg[A, H] box): Bool => result().ge(that.result()) 140 | fun values(): Iterator[A]^ => result().values() 141 | 142 | fun ref from_tokens(that: TokensIterator)? => 143 | """ 144 | Deserialize an instance of this data structure from a stream of tokens. 145 | """ 146 | _kernel.from_tokens(that)? 147 | 148 | fun ref each_token(tokens: Tokens) => 149 | """ 150 | Serialize the data structure, capturing each token into the given Tokens. 151 | """ 152 | _kernel.each_token(tokens) 153 | -------------------------------------------------------------------------------- /crdt/g_counter.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | class ref GCounter[A: (Integer[A] val & Unsigned) = U64] 5 | is (Comparable[GCounter[A]] & Convergent[GCounter[A]] & Replicated) 6 | """ 7 | A mutable grow-only counter. That is, the value can only be increased. 8 | 9 | It is limited by the maximum value of the used unsigned integer datatype. 10 | Any operation that would lead to an overflow (if the maximum is the 11 | maximum value for the used unsigned integer type) will result in the value 12 | being set to the maximum. So once the maximum is reached, the GCounter will 13 | never change. 14 | 15 | This data type tracks the state seen from each replica, thus the size of the 16 | state will grow proportionally with the number of total replicas. New replicas 17 | may be added as peers at any time, provided that they use unique ids. 18 | Read-only replicas which never change state and only observe need not use 19 | unique ids, and should use an id of zero, by convention. 20 | 21 | The state of each replica represents the value incremented so far by that 22 | particular replica. This local value may only ever increase, never decrease. 23 | The total value of the counter is the sum of the local value of all replicas. 24 | 25 | When converging state from other replicas, we retain the maximum observed 26 | value from each replica id. Because a higher value always implies later 27 | logical time for that replica, and we only keep the highest value seen from 28 | each replica, we will always retain the latest value seen from each replica. 29 | 30 | Because the view of values from each other replica is eventually consistent, 31 | the summed value of the overall counter is also eventually consistent. 32 | 33 | All mutator methods accept and return a convergent delta-state. 34 | """ 35 | var _id: ID 36 | embed _data: Map[ID, A] 37 | let _checklist: (DotChecklist | None) 38 | 39 | new ref create(id': ID) => 40 | """ 41 | Instantiate the GCounter under the given unique replica id. 42 | """ 43 | _id = id' 44 | _data = Map[ID, A] 45 | _checklist = None 46 | 47 | new ref _create_in(ctx: DotContext) => 48 | _id = ctx.id() 49 | _data = _data.create() 50 | _checklist = DotChecklist(ctx) 51 | 52 | fun ref _checklist_write() => 53 | match _checklist | let c: DotChecklist => c.write() end 54 | 55 | fun ref _converge_empty_in(ctx: DotContext box): Bool => // ignore the context 56 | false 57 | 58 | fun is_empty(): Bool => 59 | """ 60 | Return true if the data structure contains no information (bottom state). 61 | """ 62 | _data.size() == 0 63 | 64 | fun apply(): A => 65 | """ 66 | Return the current value of the counter (the sum of all replica values). 67 | """ 68 | value() 69 | 70 | fun value(): A => 71 | """ 72 | Return the current value of the counter (the sum of all replica values). 73 | """ 74 | var sum = A(0) 75 | for v in _data.values() do sum = _Math.saturated_sum[A](sum, v) end 76 | sum 77 | 78 | fun ref _data_update(id': ID, value': A) => _data(id') = value' 79 | 80 | fun ref increment[D: GCounter[A] ref = GCounter[A]]( 81 | value': A = 1, 82 | delta': D = recover GCounter[A](0) end) 83 | : D^ => 84 | """ 85 | Increment the counter by the given value. 86 | Accepts and returns a convergent delta-state. 87 | """ 88 | let v' = _data.upsert(_id, value', {(x, y) => _Math.saturated_sum[A](x, y) }) 89 | _checklist_write() 90 | delta'._data_update(_id, v') 91 | consume delta' 92 | 93 | fun ref converge(that: GCounter[A] box): Bool => 94 | """ 95 | Converge from the given GCounter into this one. 96 | For each replica state, we select the maximum value seen so far (grow-only). 97 | Returns true if the convergence added new information to the data structure. 98 | """ 99 | var changed = false 100 | for (id, value') in that._data.pairs() do 101 | // TODO: introduce a stateful upsert in ponyc Map? 102 | if try value' > _data(id)? else true end then 103 | _data(id) = value' 104 | changed = true 105 | end 106 | end 107 | changed 108 | 109 | fun string(): String iso^ => 110 | """ 111 | Return a best effort at printing the register. If A is Stringable, use 112 | the string representation of the value; otherwise print as a question mark. 113 | """ 114 | iftype A <: Stringable val then 115 | value().string() 116 | else 117 | "?".clone() 118 | end 119 | 120 | fun eq(that: GCounter[A] box): Bool => value().eq(that.value()) 121 | fun ne(that: GCounter[A] box): Bool => value().ne(that.value()) 122 | fun lt(that: GCounter[A] box): Bool => value().lt(that.value()) 123 | fun le(that: GCounter[A] box): Bool => value().le(that.value()) 124 | fun gt(that: GCounter[A] box): Bool => value().gt(that.value()) 125 | fun ge(that: GCounter[A] box): Bool => value().ge(that.value()) 126 | 127 | fun ref from_tokens(that: TokensIterator)? => 128 | """ 129 | Deserialize an instance of this data structure from a stream of tokens. 130 | """ 131 | var count = that.next[USize]()? 132 | 133 | if count < 1 then error end 134 | count = count - 1 135 | _id = that.next[ID]()? 136 | 137 | if (count % 2) != 0 then error end 138 | count = count / 2 139 | 140 | // TODO: _data.reserve(count) 141 | while (count = count - 1) > 0 do 142 | _data.update(that.next[ID]()?, that.next[A]()?) 143 | end 144 | 145 | fun ref each_token(tokens: Tokens) => 146 | """ 147 | Serialize the data structure, capturing each token into the given Tokens. 148 | """ 149 | tokens.push(1 + (_data.size() * 2)) 150 | tokens.push(_id) 151 | for (id, v) in _data.pairs() do 152 | tokens.push(id) 153 | tokens.push(v) 154 | end 155 | -------------------------------------------------------------------------------- /crdt/t_reg.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | 3 | type TRegString[ 4 | T: (Integer[T] & Unsigned) = U64, 5 | B: (BiasGreater | BiasLesser) = BiasGreater] 6 | is TReg[String, _DefaultValueString, T, B] 7 | 8 | type TRegNumber[ 9 | A: (Number & Real[A] val), 10 | T: (Integer[T] & Unsigned) = U64, 11 | B: (BiasGreater | BiasLesser) = BiasGreater] 12 | is TReg[A, _DefaultValueNumber[A], T, B] 13 | 14 | class ref TReg[ 15 | A: Comparable[A] val, 16 | V: _DefaultValueFn[A] val, 17 | T: (Integer[T] & Unsigned) = U64, 18 | B: (BiasGreater | BiasLesser) = BiasGreater] 19 | is (Equatable[TReg[A, V, T, B]] & Convergent[TReg[A, V, T, B]] & Replicated) 20 | """ 21 | A mutable register with last-write-wins semantics for updating the value. 22 | That is, every update operation includes a logical timestamp (U64 by default, 23 | though it may be any unsigned integer type), and update operationss are 24 | overridden only by those with a higher logical timestamp. 25 | 26 | This implies that the timestamps must be correct (or at least logically so) 27 | in order for the last-write-wins semantics to hold true. 28 | 29 | If the logical timestamp is equal for two compared operations, the tie will 30 | be broken by the bias type parameter. BiasGreater implies that the greater of 31 | the two compared values will be chosen, while BiasLesser implies the opposite. 32 | The default bias is BiasGreater. 33 | 34 | Because there is an order-independent way of comparing both the timestamp and 35 | the value term of all update operations, all conflicts can be resolved in a 36 | commutative way; thus, the result is eventually consistent in all replicas. 37 | The same bias must be used on all replicas for tie results to be consistent. 38 | 39 | All mutator methods accept and return a convergent delta-state. 40 | """ 41 | var _value: A = V() 42 | var _timestamp: T = T.from[U8](0) 43 | let _checklist: (DotChecklist | None) 44 | 45 | new ref create() => 46 | _checklist = None 47 | 48 | new ref _create_in(ctx: DotContext) => 49 | _checklist = DotChecklist(ctx) 50 | 51 | fun ref _checklist_write() => 52 | match _checklist | let c: DotChecklist => c.write() end 53 | 54 | fun ref _converge_empty_in(ctx: DotContext box): Bool => // ignore the context 55 | false 56 | 57 | fun is_empty(): Bool => 58 | """ 59 | Return true if the data structure contains no information (bottom state). 60 | """ 61 | _timestamp == T.from[U8](0) 62 | 63 | fun apply(): A => 64 | """ 65 | Return the current value of the register. 66 | """ 67 | _value 68 | 69 | fun value(): A => 70 | """ 71 | Return the current value of the register. 72 | """ 73 | _value 74 | 75 | fun timestamp(): T => 76 | """ 77 | Return the latest timestamp of the register. 78 | """ 79 | _timestamp 80 | 81 | fun ref _update_no_delta(value': A, timestamp': T): Bool => 82 | if 83 | (timestamp' > _timestamp) or ( 84 | (timestamp' == _timestamp) 85 | and 86 | iftype B <: BiasGreater 87 | then value' > _value 88 | else value' < _value 89 | end 90 | ) 91 | then 92 | _value = value' 93 | _timestamp = timestamp' 94 | true 95 | else 96 | false 97 | end 98 | 99 | fun ref update[D: TReg[A, V, T, B] ref = TReg[A, V, T, B]]( 100 | value': A, 101 | timestamp': T, 102 | delta': D = D) 103 | : D^ => 104 | """ 105 | Update the value and timestamp of the register, provided that the given 106 | timestamp is newer than the current timestamp of the register. 107 | If the given timestamp is older, the update is ignored. 108 | Accepts and returns a convergent delta-state. 109 | """ 110 | _update_no_delta(value', timestamp') 111 | _checklist_write() 112 | 113 | delta' .> _update_no_delta(value', timestamp') 114 | 115 | fun ref converge(that: TReg[A, V, T, B] box): Bool => 116 | """ 117 | Converge from the given TReg into this one. 118 | For this data type, the convergence is a simple update operation. 119 | Returns true if the convergence added new information to the data structure. 120 | """ 121 | _update_no_delta(that.value(), that.timestamp()) 122 | 123 | fun string(): String iso^ => 124 | """ 125 | Return a best effort at printing the log. If A and T are Stringable, use 126 | the string representation of them; otherwise print as question marks. 127 | """ 128 | let buf = recover String(8) end 129 | buf.push('%') 130 | buf.push('(') 131 | iftype A <: Stringable val then 132 | buf.append(value().string()) 133 | else 134 | buf.push('?') 135 | end 136 | buf .> push(',').push(' ') 137 | iftype T <: Stringable val then 138 | buf.append(timestamp().string()) 139 | else 140 | buf.push('?') 141 | end 142 | buf.push(')') 143 | consume buf 144 | 145 | fun eq(that: TReg[A, V, T, B] box): Bool => value().eq(that.value()) 146 | fun ne(that: TReg[A, V, T, B] box): Bool => value().ne(that.value()) 147 | fun lt(that: TReg[A, V, T, B] box): Bool => value().lt(that.value()) 148 | fun le(that: TReg[A, V, T, B] box): Bool => value().le(that.value()) 149 | fun gt(that: TReg[A, V, T, B] box): Bool => value().gt(that.value()) 150 | fun ge(that: TReg[A, V, T, B] box): Bool => value().ge(that.value()) 151 | 152 | fun ref from_tokens(that: TokensIterator)? => 153 | """ 154 | Deserialize an instance of this data structure from a stream of tokens. 155 | """ 156 | if that.next[USize]()? != 2 then error end 157 | _value = that.next[A]()? 158 | _timestamp = that.next[T]()? 159 | 160 | fun ref each_token(tokens: Tokens) => 161 | """ 162 | Serialize the data structure, capturing each token into the given Tokens. 163 | """ 164 | tokens.push(USize(2)) 165 | tokens.push(_value) 166 | tokens.push(_timestamp) 167 | -------------------------------------------------------------------------------- /crdt/test/test_g_counter.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestGCounter is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.GCounter" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = GCounter("a".hash64()) 10 | let b = GCounter("b".hash64()) 11 | let c = GCounter("c".hash64()) 12 | 13 | a.increment(1) 14 | b.increment(2) 15 | c.increment(3) 16 | 17 | h.assert_eq[U64](a.value(), 1) 18 | h.assert_eq[U64](b.value(), 2) 19 | h.assert_eq[U64](c.value(), 3) 20 | h.assert_ne[GCounter](a, b) 21 | h.assert_ne[GCounter](b, c) 22 | h.assert_ne[GCounter](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[U64](a.value(), 6) 34 | h.assert_eq[U64](b.value(), 6) 35 | h.assert_eq[U64](c.value(), 6) 36 | h.assert_eq[GCounter](a, b) 37 | h.assert_eq[GCounter](b, c) 38 | h.assert_eq[GCounter](c, a) 39 | 40 | a.increment(9) 41 | b.increment(8) 42 | c.increment(7) 43 | 44 | h.assert_eq[U64](a.value(), 15) 45 | h.assert_eq[U64](b.value(), 14) 46 | h.assert_eq[U64](c.value(), 13) 47 | h.assert_ne[GCounter](a, b) 48 | h.assert_ne[GCounter](b, c) 49 | h.assert_ne[GCounter](c, a) 50 | 51 | h.assert_true(a.converge(b)) 52 | h.assert_true(a.converge(c)) 53 | h.assert_true(b.converge(c)) 54 | h.assert_true(b.converge(a)) 55 | h.assert_true(c.converge(a)) 56 | h.assert_false(c.converge(b)) 57 | 58 | h.assert_eq[U64](a.value(), 30) 59 | h.assert_eq[U64](b.value(), 30) 60 | h.assert_eq[U64](c.value(), 30) 61 | h.assert_eq[GCounter](a, b) 62 | h.assert_eq[GCounter](b, c) 63 | h.assert_eq[GCounter](c, a) 64 | 65 | class TestGCounterDelta is UnitTest 66 | new iso create() => None 67 | fun name(): String => "crdt.GCounter (ẟ)" 68 | 69 | fun apply(h: TestHelper) => 70 | let a = GCounter("a".hash64()) 71 | let b = GCounter("b".hash64()) 72 | let c = GCounter("c".hash64()) 73 | 74 | var a_delta = a.increment(1) 75 | var b_delta = b.increment(2) 76 | var c_delta = c.increment(3) 77 | 78 | h.assert_eq[U64](a.value(), 1) 79 | h.assert_eq[U64](b.value(), 2) 80 | h.assert_eq[U64](c.value(), 3) 81 | h.assert_ne[GCounter](a, b) 82 | h.assert_ne[GCounter](b, c) 83 | h.assert_ne[GCounter](c, a) 84 | 85 | h.assert_false(a.converge(a_delta)) 86 | 87 | h.assert_true(a.converge(b_delta)) 88 | h.assert_true(a.converge(c_delta)) 89 | h.assert_true(b.converge(c_delta)) 90 | h.assert_true(b.converge(a_delta)) 91 | h.assert_true(c.converge(a_delta)) 92 | h.assert_true(c.converge(b_delta)) 93 | 94 | h.assert_eq[U64](a.value(), 6) 95 | h.assert_eq[U64](b.value(), 6) 96 | h.assert_eq[U64](c.value(), 6) 97 | h.assert_eq[GCounter](a, b) 98 | h.assert_eq[GCounter](b, c) 99 | h.assert_eq[GCounter](c, a) 100 | 101 | a_delta = a.increment(9) 102 | b_delta = b.increment(8) 103 | c_delta = c.increment(7) 104 | 105 | h.assert_eq[U64](a.value(), 15) 106 | h.assert_eq[U64](b.value(), 14) 107 | h.assert_eq[U64](c.value(), 13) 108 | h.assert_ne[GCounter](a, b) 109 | h.assert_ne[GCounter](b, c) 110 | h.assert_ne[GCounter](c, a) 111 | 112 | h.assert_true(a.converge(b_delta)) 113 | h.assert_true(a.converge(c_delta)) 114 | h.assert_true(b.converge(c_delta)) 115 | h.assert_true(b.converge(a_delta)) 116 | h.assert_true(c.converge(a_delta)) 117 | h.assert_true(c.converge(b_delta)) 118 | 119 | h.assert_eq[U64](a.value(), 30) 120 | h.assert_eq[U64](b.value(), 30) 121 | h.assert_eq[U64](c.value(), 30) 122 | h.assert_eq[GCounter](a, b) 123 | h.assert_eq[GCounter](b, c) 124 | h.assert_eq[GCounter](c, a) 125 | 126 | class TestGCounterTokens is UnitTest 127 | new iso create() => None 128 | fun name(): String => "crdt.GCounter (tokens)" 129 | 130 | fun apply(h: TestHelper) => 131 | let data = GCounter[U8]("a".hash64()) 132 | let data' = GCounter[U8]("b".hash64()) 133 | let data'' = GCounter[U8]("c".hash64()) 134 | 135 | data.increment(4) 136 | data'.increment(5) 137 | data''.increment(6) 138 | 139 | data.converge(data') 140 | data.converge(data'') 141 | 142 | let tokens = Tokens .> from(data) 143 | _TestTokensWellFormed(h, tokens) 144 | 145 | try 146 | h.assert_eq[GCounter[U8]]( 147 | data, 148 | data.create(0) .> from_tokens(tokens.iterator())? 149 | ) 150 | else 151 | h.fail("failed to parse token stream") 152 | end 153 | 154 | class TestGCounterMax is UnitTest 155 | new iso create() => None 156 | fun name(): String => "crdt.GCounter (max)" 157 | fun apply(h: TestHelper) => 158 | let data = GCounter[U8]("a".hash64()) 159 | let data' = GCounter[U8]("b".hash64()) 160 | let data'' = GCounter[U8]("c".hash64()) 161 | 162 | data.increment(250) 163 | data'.increment(253) 164 | data''.increment(254) 165 | 166 | h.assert_true(data.converge(data')) 167 | h.assert_true(data.converge(data'')) 168 | h.assert_true(data'.converge(data)) 169 | h.assert_false(data'.converge(data'')) // data' == data'' 170 | h.assert_true(data''.converge(data)) 171 | h.assert_false(data''.converge(data')) // data'' == data' 172 | 173 | data.increment(7) 174 | data''.increment(1) 175 | 176 | h.assert_true(data''.converge(data)) 177 | h.assert_false(data''.converge(data')) // data'' > data' 178 | 179 | h.assert_eq[U8](data.value(), U8.max_value()) 180 | h.assert_eq[U8](data'.value(), U8.max_value()) 181 | h.assert_eq[U8](data''.value(), U8.max_value()) 182 | 183 | data.increment(42) 184 | h.assert_eq[U8](data.value(), U8.max_value()) 185 | 186 | -------------------------------------------------------------------------------- /crdt/test/test_awor_set.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestAWORSet is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.AWORSet" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = AWORSet[String]("a".hash64()) 10 | let b = AWORSet[String]("b".hash64()) 11 | let c = AWORSet[String]("c".hash64()) 12 | 13 | a.set("apple") 14 | b.set("banana") 15 | c.set("currant") 16 | 17 | h.assert_eq[USize](a.size(), 1) 18 | h.assert_eq[USize](b.size(), 1) 19 | h.assert_eq[USize](b.size(), 1) 20 | h.assert_ne[AWORSet[String]](a, b) 21 | h.assert_ne[AWORSet[String]](b, c) 22 | h.assert_ne[AWORSet[String]](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[USize](a.size(), 3) 34 | h.assert_eq[USize](b.size(), 3) 35 | h.assert_eq[USize](b.size(), 3) 36 | h.assert_eq[AWORSet[String]](a, b) 37 | h.assert_eq[AWORSet[String]](b, c) 38 | h.assert_eq[AWORSet[String]](c, a) 39 | 40 | c.unset("currant") 41 | 42 | h.assert_true(a.converge(c)) 43 | h.assert_true(b.converge(c)) 44 | h.assert_false(a.converge(b)) 45 | 46 | h.assert_eq[USize](a.size(), 2) 47 | h.assert_eq[USize](b.size(), 2) 48 | h.assert_eq[USize](b.size(), 2) 49 | h.assert_eq[AWORSet[String]](a, b) 50 | h.assert_eq[AWORSet[String]](b, c) 51 | h.assert_eq[AWORSet[String]](c, a) 52 | 53 | c.unset("apple") 54 | c.unset("banana") 55 | c.set("currant") 56 | 57 | h.assert_true(a.converge(c)) 58 | h.assert_true(b.converge(c)) 59 | h.assert_false(a.converge(b)) 60 | 61 | h.assert_true(a.contains("currant")) 62 | h.assert_eq[USize](a.size(), 1) 63 | h.assert_eq[USize](b.size(), 1) 64 | h.assert_eq[USize](b.size(), 1) 65 | h.assert_eq[AWORSet[String]](a, b) 66 | h.assert_eq[AWORSet[String]](b, c) 67 | h.assert_eq[AWORSet[String]](c, a) 68 | 69 | a.set("dewberry") 70 | a.unset("dewberry") 71 | b.set("dewberry") 72 | 73 | h.assert_true(a.converge(b)) 74 | h.assert_false(a.converge(c)) 75 | h.assert_false(b.converge(c)) 76 | h.assert_true(b.converge(a)) 77 | h.assert_true(c.converge(a)) 78 | h.assert_false(c.converge(b)) 79 | 80 | h.assert_true(c.contains("dewberry")) // add wins 81 | h.assert_eq[USize](a.size(), 2) 82 | h.assert_eq[USize](b.size(), 2) 83 | h.assert_eq[USize](b.size(), 2) 84 | h.assert_eq[AWORSet[String]](a, b) 85 | h.assert_eq[AWORSet[String]](b, c) 86 | h.assert_eq[AWORSet[String]](c, a) 87 | 88 | class TestAWORSetDelta is UnitTest 89 | new iso create() => None 90 | fun name(): String => "crdt.AWORSet (ẟ)" 91 | 92 | fun apply(h: TestHelper) => 93 | let a = AWORSet[String]("a".hash64()) 94 | let b = AWORSet[String]("b".hash64()) 95 | let c = AWORSet[String]("c".hash64()) 96 | 97 | var a_delta = a.set("apple") 98 | var b_delta = b.set("banana") 99 | var c_delta = c.set("currant") 100 | 101 | h.assert_eq[USize](a.size(), 1) 102 | h.assert_eq[USize](b.size(), 1) 103 | h.assert_eq[USize](b.size(), 1) 104 | h.assert_ne[AWORSet[String]](a, b) 105 | h.assert_ne[AWORSet[String]](b, c) 106 | h.assert_ne[AWORSet[String]](c, a) 107 | 108 | h.assert_false(a.converge(a_delta)) 109 | 110 | h.assert_true(a.converge(b_delta)) 111 | h.assert_true(a.converge(c_delta)) 112 | h.assert_true(b.converge(c_delta)) 113 | h.assert_true(b.converge(a_delta)) 114 | h.assert_true(c.converge(a_delta)) 115 | h.assert_true(c.converge(b_delta)) 116 | 117 | h.assert_eq[USize](a.size(), 3) 118 | h.assert_eq[USize](b.size(), 3) 119 | h.assert_eq[USize](b.size(), 3) 120 | h.assert_eq[AWORSet[String]](a, b) 121 | h.assert_eq[AWORSet[String]](b, c) 122 | h.assert_eq[AWORSet[String]](c, a) 123 | 124 | c_delta = c.unset("currant") 125 | 126 | h.assert_true(a.converge(c_delta)) 127 | h.assert_true(b.converge(c_delta)) 128 | h.assert_false(c.converge(c_delta)) 129 | 130 | h.assert_eq[USize](a.size(), 2) 131 | h.assert_eq[USize](b.size(), 2) 132 | h.assert_eq[USize](b.size(), 2) 133 | h.assert_eq[AWORSet[String]](a, b) 134 | h.assert_eq[AWORSet[String]](b, c) 135 | h.assert_eq[AWORSet[String]](c, a) 136 | 137 | c_delta = c.unset("banana") 138 | c_delta = c.unset("apple", consume c_delta) 139 | c_delta = c.set("currant", consume c_delta) 140 | 141 | h.assert_true(a.converge(c_delta)) 142 | h.assert_true(b.converge(c_delta)) 143 | h.assert_false(c.converge(c_delta)) 144 | 145 | h.assert_true(a.contains("currant")) 146 | h.assert_eq[USize](a.size(), 1) 147 | h.assert_eq[USize](b.size(), 1) 148 | h.assert_eq[USize](b.size(), 1) 149 | h.assert_eq[AWORSet[String]](a, b) 150 | h.assert_eq[AWORSet[String]](b, c) 151 | h.assert_eq[AWORSet[String]](c, a) 152 | 153 | a_delta = a.set("dewberry") 154 | a_delta = a.unset("dewberry", consume a_delta) 155 | b_delta = b.set("dewberry") 156 | 157 | h.assert_true(a.converge(b_delta)) 158 | h.assert_true(b.converge(a_delta)) 159 | h.assert_true(c.converge(a_delta)) 160 | h.assert_true(c.converge(b_delta)) 161 | 162 | h.assert_true(a.contains("dewberry")) // add wins 163 | h.assert_eq[USize](a.size(), 2) 164 | h.assert_eq[USize](b.size(), 2) 165 | h.assert_eq[USize](b.size(), 2) 166 | h.assert_eq[AWORSet[String]](a, b) 167 | h.assert_eq[AWORSet[String]](b, c) 168 | h.assert_eq[AWORSet[String]](c, a) 169 | 170 | class TestAWORSetTokens is UnitTest 171 | new iso create() => None 172 | fun name(): String => "crdt.AWORSet (tokens)" 173 | 174 | fun apply(h: TestHelper) => 175 | let data = AWORSet[String]("a".hash64()) 176 | let data' = AWORSet[String]("b".hash64()) 177 | let data'' = AWORSet[String]("c".hash64()) 178 | 179 | data.set("apple") 180 | data'.unset("apple") 181 | data''.set("banana") 182 | 183 | data.converge(data') 184 | data.converge(data'') 185 | 186 | let tokens = Tokens .> from(data) 187 | _TestTokensWellFormed(h, tokens) 188 | 189 | try 190 | h.assert_eq[AWORSet[String]]( 191 | data, 192 | data.create(0) .> from_tokens(tokens.iterator())? 193 | ) 194 | else 195 | h.fail("failed to parse token stream") 196 | end 197 | -------------------------------------------------------------------------------- /crdt/test/test_rwor_set.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestRWORSet is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.RWORSet" 7 | 8 | fun apply(h: TestHelper) => 9 | let a = RWORSet[String]("a".hash64()) 10 | let b = RWORSet[String]("b".hash64()) 11 | let c = RWORSet[String]("c".hash64()) 12 | 13 | a.set("apple") 14 | b.set("banana") 15 | c.set("currant") 16 | 17 | h.assert_eq[USize](a.size(), 1) 18 | h.assert_eq[USize](b.size(), 1) 19 | h.assert_eq[USize](b.size(), 1) 20 | h.assert_ne[RWORSet[String]](a, b) 21 | h.assert_ne[RWORSet[String]](b, c) 22 | h.assert_ne[RWORSet[String]](c, a) 23 | 24 | h.assert_false(a.converge(a)) 25 | 26 | h.assert_true(a.converge(b)) 27 | h.assert_true(a.converge(c)) 28 | h.assert_true(b.converge(c)) 29 | h.assert_true(b.converge(a)) 30 | h.assert_true(c.converge(a)) 31 | h.assert_false(c.converge(b)) 32 | 33 | h.assert_eq[USize](a.size(), 3) 34 | h.assert_eq[USize](b.size(), 3) 35 | h.assert_eq[USize](b.size(), 3) 36 | h.assert_eq[RWORSet[String]](a, b) 37 | h.assert_eq[RWORSet[String]](b, c) 38 | h.assert_eq[RWORSet[String]](c, a) 39 | 40 | c.unset("currant") 41 | 42 | h.assert_true(a.converge(c)) 43 | h.assert_true(b.converge(c)) 44 | h.assert_false(a.converge(b)) 45 | 46 | h.assert_eq[USize](a.size(), 2) 47 | h.assert_eq[USize](b.size(), 2) 48 | h.assert_eq[USize](b.size(), 2) 49 | h.assert_eq[RWORSet[String]](a, b) 50 | h.assert_eq[RWORSet[String]](b, c) 51 | h.assert_eq[RWORSet[String]](c, a) 52 | 53 | c.unset("apple") 54 | c.unset("banana") 55 | c.set("currant") 56 | 57 | h.assert_true(a.converge(c)) 58 | h.assert_true(b.converge(c)) 59 | h.assert_false(a.converge(b)) 60 | 61 | h.assert_true(a.contains("currant")) 62 | h.assert_eq[USize](a.size(), 1) 63 | h.assert_eq[USize](b.size(), 1) 64 | h.assert_eq[USize](b.size(), 1) 65 | h.assert_eq[RWORSet[String]](a, b) 66 | h.assert_eq[RWORSet[String]](b, c) 67 | h.assert_eq[RWORSet[String]](c, a) 68 | 69 | a.set("dewberry") 70 | a.unset("dewberry") 71 | b.set("dewberry") 72 | 73 | h.assert_true(a.converge(b)) 74 | h.assert_false(a.converge(c)) 75 | h.assert_false(b.converge(c)) 76 | h.assert_true(b.converge(a)) 77 | h.assert_true(c.converge(a)) 78 | h.assert_false(c.converge(b)) 79 | 80 | h.assert_false(c.contains("dewberry")) // remove wins 81 | h.assert_eq[USize](a.size(), 1) 82 | h.assert_eq[USize](b.size(), 1) 83 | h.assert_eq[USize](b.size(), 1) 84 | h.assert_eq[RWORSet[String]](a, b) 85 | h.assert_eq[RWORSet[String]](b, c) 86 | h.assert_eq[RWORSet[String]](c, a) 87 | 88 | class TestRWORSetDelta is UnitTest 89 | new iso create() => None 90 | fun name(): String => "crdt.RWORSet (ẟ)" 91 | 92 | fun apply(h: TestHelper) => 93 | let a = RWORSet[String]("a".hash64()) 94 | let b = RWORSet[String]("b".hash64()) 95 | let c = RWORSet[String]("c".hash64()) 96 | 97 | var a_delta = a.set("apple") 98 | var b_delta = b.set("banana") 99 | var c_delta = c.set("currant") 100 | 101 | h.assert_eq[USize](a.size(), 1) 102 | h.assert_eq[USize](b.size(), 1) 103 | h.assert_eq[USize](b.size(), 1) 104 | h.assert_ne[RWORSet[String]](a, b) 105 | h.assert_ne[RWORSet[String]](b, c) 106 | h.assert_ne[RWORSet[String]](c, a) 107 | 108 | h.assert_false(a.converge(a_delta)) 109 | 110 | h.assert_true(a.converge(b_delta)) 111 | h.assert_true(a.converge(c_delta)) 112 | h.assert_true(b.converge(c_delta)) 113 | h.assert_true(b.converge(a_delta)) 114 | h.assert_true(c.converge(a_delta)) 115 | h.assert_true(c.converge(b_delta)) 116 | 117 | h.assert_eq[USize](a.size(), 3) 118 | h.assert_eq[USize](b.size(), 3) 119 | h.assert_eq[USize](b.size(), 3) 120 | h.assert_eq[RWORSet[String]](a, b) 121 | h.assert_eq[RWORSet[String]](b, c) 122 | h.assert_eq[RWORSet[String]](c, a) 123 | 124 | c_delta = c.unset("currant") 125 | 126 | h.assert_true(a.converge(c_delta)) 127 | h.assert_true(b.converge(c_delta)) 128 | h.assert_false(c.converge(c_delta)) 129 | 130 | h.assert_eq[USize](a.size(), 2) 131 | h.assert_eq[USize](b.size(), 2) 132 | h.assert_eq[USize](b.size(), 2) 133 | h.assert_eq[RWORSet[String]](a, b) 134 | h.assert_eq[RWORSet[String]](b, c) 135 | h.assert_eq[RWORSet[String]](c, a) 136 | 137 | c_delta = c.unset("banana") 138 | c_delta = c.unset("apple", consume c_delta) 139 | c_delta = c.set("currant", consume c_delta) 140 | 141 | h.assert_true(a.converge(c_delta)) 142 | h.assert_true(b.converge(c_delta)) 143 | h.assert_false(c.converge(c_delta)) 144 | 145 | h.assert_true(a.contains("currant")) 146 | h.assert_eq[USize](a.size(), 1) 147 | h.assert_eq[USize](b.size(), 1) 148 | h.assert_eq[USize](b.size(), 1) 149 | h.assert_eq[RWORSet[String]](a, b) 150 | h.assert_eq[RWORSet[String]](b, c) 151 | h.assert_eq[RWORSet[String]](c, a) 152 | 153 | a_delta = a.set("dewberry") 154 | a_delta = a.unset("dewberry", consume a_delta) 155 | b_delta = b.set("dewberry") 156 | 157 | h.assert_true(a.converge(b_delta)) 158 | h.assert_true(b.converge(a_delta)) 159 | h.assert_true(c.converge(a_delta)) 160 | h.assert_true(c.converge(b_delta)) 161 | 162 | h.assert_false(a.contains("dewberry")) // remove wins 163 | h.assert_eq[USize](a.size(), 1) 164 | h.assert_eq[USize](b.size(), 1) 165 | h.assert_eq[USize](b.size(), 1) 166 | h.assert_eq[RWORSet[String]](a, b) 167 | h.assert_eq[RWORSet[String]](b, c) 168 | h.assert_eq[RWORSet[String]](c, a) 169 | 170 | class TestRWORSetTokens is UnitTest 171 | new iso create() => None 172 | fun name(): String => "crdt.RWORSet (tokens)" 173 | 174 | fun apply(h: TestHelper) => 175 | let data = RWORSet[String]("a".hash64()) 176 | let data' = RWORSet[String]("b".hash64()) 177 | let data'' = RWORSet[String]("c".hash64()) 178 | 179 | data.set("apple") 180 | data'.unset("apple") 181 | data''.set("banana") 182 | 183 | data.converge(data') 184 | data.converge(data'') 185 | 186 | let tokens = Tokens .> from(data) 187 | _TestTokensWellFormed(h, tokens) 188 | 189 | try 190 | h.assert_eq[RWORSet[String]]( 191 | data, 192 | data.create(0) .> from_tokens(tokens.iterator())? 193 | ) 194 | else 195 | h.fail("failed to parse token stream") 196 | end 197 | -------------------------------------------------------------------------------- /crdt/awor_set.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | type AWORSet[A: (Hashable val & Equatable[A])] 5 | is AWORHashSet[A, HashEq[A]] 6 | 7 | type AWORSetIs[A: (Hashable val & Equatable[A])] 8 | is AWORHashSet[A, HashIs[A]] 9 | 10 | class ref AWORHashSet[A: Equatable[A] val, H: HashFunction[A] val] 11 | is (Comparable[AWORHashSet[A, H]] & Causal[AWORHashSet[A, H]]) 12 | """ 13 | An unordered mutable set that supports removing locally visible elements 14 | ("observed remove") using per-replica sequence numbers to track causality. 15 | 16 | In the case where an insertion and a deletion for the same element have 17 | no causal relationship (they happened concurrently on differen replicas), 18 | the insertion will override the deletion ("add wins"). For a similar data 19 | structure with the opposite bias, see the "remove wins" variant (RWORSet). 20 | 21 | This data structure delegates causality tracking to the reusable "dot kernel" 22 | abstraction. Because that abstraction provides an eventually-consistent set 23 | of replica-associated values, and this data structure uses a commutative 24 | strategy for reading out the values, the result is eventually consistent. 25 | 26 | All mutator methods accept and return a convergent delta-state. 27 | """ 28 | embed _kernel: DotKernel[A] 29 | 30 | new ref create(id: ID) => 31 | """ 32 | Instantiate under the given unique replica id. 33 | """ 34 | _kernel = _kernel.create(id) 35 | 36 | new ref _create_in(ctx': DotContext) => 37 | _kernel = _kernel.create_in(ctx') 38 | 39 | fun _context(): this->DotContext => 40 | _kernel.context() 41 | 42 | fun is_empty(): Bool => 43 | """ 44 | Return true if there are no values ever recorded from any replica. 45 | This is true at creation, after calling the clear method, 46 | or after a converge that results in all values being cleared. 47 | """ 48 | _kernel.is_empty() 49 | 50 | fun result(): HashSet[A, H] => 51 | """ 52 | Return the elements of the resulting logical set as a single flat set. 53 | Information about specific deletions is discarded, so that the case of a 54 | deleted element is indistinct from that of an element never inserted. 55 | """ 56 | var out = HashSet[A, H] 57 | for value in _kernel.values() do out.set(value) end 58 | out 59 | 60 | fun size(): USize => 61 | """ 62 | Return the number of items in the set. 63 | """ 64 | result().size() 65 | 66 | fun contains(value': A): Bool => 67 | """ 68 | Check whether the set contains the given value. 69 | """ 70 | for value in _kernel.values() do 71 | if value == value' then return true end 72 | end 73 | false 74 | 75 | fun ref set[D: AWORHashSet[A, H] ref = AWORHashSet[A, H]]( 76 | value': A, 77 | delta': D = recover AWORHashSet[A, H](0) end) 78 | : D^ => 79 | """ 80 | Add a value to the set. 81 | Accepts and returns a convergent delta-state. 82 | """ 83 | // As a memory optimization, first remove value' in any/all replicas. 84 | // The value only needs a dot in one replica - this one we're in now. 85 | _kernel.remove_value[Eq[A]](value', delta'._kernel) 86 | _kernel.set(value', delta'._kernel) 87 | delta' 88 | 89 | fun ref unset[D: AWORHashSet[A, H] ref = AWORHashSet[A, H]]( 90 | value': A, 91 | delta': D = recover AWORHashSet[A, H](0) end) 92 | : D^ => 93 | """ 94 | Remove a value from the set. 95 | Accepts and returns a convergent delta-state. 96 | """ 97 | _kernel.remove_value[Eq[A]](value', delta'._kernel) 98 | delta' 99 | 100 | fun ref clear[D: AWORHashSet[A, H] ref = AWORHashSet[A, H]]( 101 | delta': D = recover AWORHashSet[A, H](0) end) 102 | : D^ => 103 | """ 104 | Remove all locally visible elements from the set. 105 | Accepts and returns a convergent delta-state. 106 | """ 107 | _kernel.remove_all(delta'._kernel) 108 | delta' 109 | 110 | fun ref union[D: AWORHashSet[A, H] ref = AWORHashSet[A, H]]( 111 | that': Iterator[A], 112 | delta': D = recover AWORHashSet[A, H](0) end) 113 | : D^ => 114 | """ 115 | Add everything in the given iterator to the set. 116 | Accepts and returns a convergent delta-state. 117 | """ 118 | for value' in that' do set(value', delta') end 119 | delta' 120 | 121 | fun ref converge(that: AWORHashSet[A, H] box): Bool => 122 | """ 123 | Converge from the given AWORSet into this one. 124 | Returns true if the convergence added new information to the data structure. 125 | """ 126 | _kernel.converge(that._kernel) 127 | 128 | fun ref _converge_empty_in(ctx': DotContext box): Bool => 129 | """ 130 | Optimize for the special case of converging from a peer with an empty map, 131 | taking only their DotContext as an argument for resolving disagreements. 132 | """ 133 | _kernel.converge_empty_in(ctx') 134 | 135 | fun string(): String iso^ => 136 | """ 137 | Return a best effort at printing the set. If A is a Stringable, use the 138 | string representation of each value; otherwise print them as question marks. 139 | """ 140 | let buf = recover String((size() * 3) + 1) end 141 | buf.push('%') 142 | buf.push('{') 143 | var first = true 144 | for value in values() do 145 | if first then first = false else buf .> push(';').push(' ') end 146 | iftype A <: Stringable then 147 | buf.append(value.string()) 148 | else 149 | buf.push('?') 150 | end 151 | end 152 | buf.push('}') 153 | consume buf 154 | 155 | // TODO: optimize comparison functions: 156 | fun eq(that: AWORHashSet[A, H] box): Bool => result().eq(that.result()) 157 | fun ne(that: AWORHashSet[A, H] box): Bool => result().ne(that.result()) 158 | fun lt(that: AWORHashSet[A, H] box): Bool => result().lt(that.result()) 159 | fun le(that: AWORHashSet[A, H] box): Bool => result().le(that.result()) 160 | fun gt(that: AWORHashSet[A, H] box): Bool => result().gt(that.result()) 161 | fun ge(that: AWORHashSet[A, H] box): Bool => result().ge(that.result()) 162 | fun values(): Iterator[A]^ => result().values() 163 | 164 | fun ref from_tokens(that: TokensIterator)? => 165 | """ 166 | Deserialize an instance of this data structure from a stream of tokens. 167 | """ 168 | _kernel.from_tokens(that)? 169 | 170 | fun ref each_token(tokens: Tokens) => 171 | """ 172 | Serialize the data structure, capturing each token into the given Tokens. 173 | """ 174 | _kernel.each_token(tokens) 175 | -------------------------------------------------------------------------------- /crdt/pn_counter.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | class ref PNCounter[A: (Integer[A] val & Unsigned) = U64] 5 | is (Comparable[PNCounter[A]] & Convergent[PNCounter[A]] & Replicated) 6 | """ 7 | A mutable counter, which can be both increased and decreased. 8 | 9 | This data type tracks the state seen from each replica, thus the size of the 10 | state will grow proportionally with the number of total replicas. New replicas 11 | may be added as peers at any time, provided that they use unique ids. 12 | Read-only replicas which never change state and only observe need not use 13 | unique ids, and should use an id of zero, by convention. 14 | 15 | The counter is implemented as a pair of grow-only counters, with one counter 16 | representing growth in the positive direction, and the other counter 17 | representing growth in the negative direction, with the total value of the 18 | counter being calculated from the difference in magnitude. 19 | 20 | Because the data type is composed of a pair of eventually consistent CRDTs, 21 | the calculated value of the overall counter is also eventually consistent. 22 | 23 | All mutator methods accept and return a convergent delta-state. 24 | """ 25 | var _id: ID 26 | embed _pos: Map[ID, A] 27 | embed _neg: Map[ID, A] 28 | let _checklist: (DotChecklist | None) 29 | 30 | new ref create(id': ID) => 31 | """ 32 | Instantiate the PNCounter under the given unique replica id. 33 | """ 34 | _id = id' 35 | _pos = _pos.create() 36 | _neg = _neg.create() 37 | _checklist = None 38 | 39 | new ref _create_in(ctx: DotContext) => 40 | _id = ctx.id() 41 | _pos = _pos.create() 42 | _neg = _neg.create() 43 | _checklist = DotChecklist(ctx) 44 | 45 | fun ref _checklist_write() => 46 | match _checklist | let c: DotChecklist => c.write() end 47 | 48 | fun ref _converge_empty_in(ctx: DotContext box): Bool => // ignore the context 49 | false 50 | 51 | fun is_empty(): Bool => 52 | """ 53 | Return true if the data structure contains no information (bottom state). 54 | """ 55 | (_pos.size() == 0) and (_neg.size() == 0) 56 | 57 | fun apply(): A => 58 | """ 59 | Return the current value of the counter (the difference in magnitude). 60 | """ 61 | value() 62 | 63 | fun value(): A => 64 | """ 65 | Return the current value of the counter (the difference in magnitude). 66 | """ 67 | var sum = A(0) 68 | for v in _pos.values() do sum = sum + v end 69 | for v in _neg.values() do sum = sum - v end 70 | sum 71 | 72 | fun ref _pos_update(id': ID, value': A) => _pos(id') = value' 73 | fun ref _neg_update(id': ID, value': A) => _neg(id') = value' 74 | 75 | fun ref increment[D: PNCounter[A] ref = PNCounter[A]]( 76 | value': A = 1, 77 | delta': D = recover PNCounter[A](0) end) 78 | : D^ => 79 | """ 80 | Increment the counter by the given value. 81 | Accepts and returns a convergent delta-state. 82 | """ 83 | let v' = _pos.upsert(_id, value', {(v: A, value': A): A => v + value' }) 84 | _checklist_write() 85 | delta'._pos_update(_id, v') 86 | consume delta' 87 | 88 | fun ref decrement[D: PNCounter[A] ref = PNCounter[A]]( 89 | value': A = 1, 90 | delta': D = recover PNCounter[A](0) end) 91 | : D^ => 92 | """ 93 | Decrement the counter by the given value. 94 | Accepts and returns a convergent delta-state. 95 | """ 96 | let v' = _neg.upsert(_id, value', {(v: A, value': A): A => v + value' }) 97 | _checklist_write() 98 | delta'._neg_update(_id, v') 99 | consume delta' 100 | 101 | fun ref converge(that: PNCounter[A] box): Bool => 102 | """ 103 | Converge from the given PNCounter into this one. 104 | We converge the positive and negative counters, pairwise. 105 | Returns true if the convergence added new information to the data structure. 106 | """ 107 | var changed = false 108 | for (id, value') in that._pos.pairs() do 109 | // TODO: introduce a stateful upsert in ponyc Map? 110 | if try value' > _pos(id)? else true end then 111 | _pos(id) = value' 112 | changed = true 113 | end 114 | end 115 | for (id, value') in that._neg.pairs() do 116 | // TODO: introduce a stateful upsert in ponyc Map? 117 | if try value' > _neg(id)? else true end then 118 | _neg(id) = value' 119 | changed = true 120 | end 121 | end 122 | changed 123 | 124 | fun string(): String iso^ => 125 | """ 126 | Return a best effort at printing the register. If A is Stringable, use 127 | the string representation of the value; otherwise print as a question mark. 128 | """ 129 | iftype A <: Stringable val then 130 | value().string() 131 | else 132 | "?".clone() 133 | end 134 | 135 | fun eq(that: PNCounter[A] box): Bool => value().eq(that.value()) 136 | fun ne(that: PNCounter[A] box): Bool => value().ne(that.value()) 137 | fun lt(that: PNCounter[A] box): Bool => value().lt(that.value()) 138 | fun le(that: PNCounter[A] box): Bool => value().le(that.value()) 139 | fun gt(that: PNCounter[A] box): Bool => value().gt(that.value()) 140 | fun ge(that: PNCounter[A] box): Bool => value().ge(that.value()) 141 | 142 | fun ref from_tokens(that: TokensIterator)? => 143 | """ 144 | Deserialize an instance of this data structure from a stream of tokens. 145 | """ 146 | if that.next[USize]()? != 3 then error end 147 | 148 | _id = that.next[ID]()? 149 | 150 | var pos_count = that.next[USize]()? 151 | if (pos_count % 2) != 0 then error end 152 | pos_count = pos_count / 2 153 | 154 | // TODO: _pos.reserve(pos_count) 155 | while (pos_count = pos_count - 1) > 0 do 156 | _pos.update(that.next[ID]()?, that.next[A]()?) 157 | end 158 | 159 | var neg_count = that.next[USize]()? 160 | if (neg_count % 2) != 0 then error end 161 | neg_count = neg_count / 2 162 | 163 | // TODO: _neg.reserve(neg_count) 164 | while (neg_count = neg_count - 1) > 0 do 165 | _neg.update(that.next[ID]()?, that.next[A]()?) 166 | end 167 | 168 | fun ref each_token(tokens: Tokens) => 169 | """ 170 | Serialize the data structure, capturing each token into the given Tokens. 171 | """ 172 | tokens.push(USize(3)) 173 | 174 | tokens.push(_id) 175 | 176 | tokens.push(_pos.size() * 2) 177 | for (id, v) in _pos.pairs() do 178 | tokens.push(id) 179 | tokens.push(v) 180 | end 181 | 182 | tokens.push(_neg.size() * 2) 183 | for (id, v) in _neg.pairs() do 184 | tokens.push(id) 185 | tokens.push(v) 186 | end 187 | -------------------------------------------------------------------------------- /crdt/p2_set.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | type P2Set[A: (Hashable val & Equatable[A])] is P2HashSet[A, HashEq[A]] 5 | 6 | type P2SetIs[A: Any val] is P2HashSet[A, HashIs[A]] 7 | 8 | class ref P2HashSet[A: Any val, H: HashFunction[A] val] 9 | is (Comparable[P2HashSet[A, H]] & Convergent[P2HashSet[A, H]] & Replicated) 10 | """ 11 | An unordered mutable two-phase set that supports one-time removal. 12 | That is, once an element has been deleted it may never be inserted again. 13 | In other words, the deletion is final, and may not be overridden. 14 | Any attempts to insert an already-deleted element will be silently ignored. 15 | 16 | This data structure is based on two grow-only sets (GSet); one for insertions 17 | and one for deletions. An element is present in the combined logical set if 18 | it is present in only the insertion set (not in the deletion set). 19 | 20 | Because the set is composed of two grow-only sets that are eventually 21 | consistent when converged, the overall result is also eventually consistent. 22 | 23 | All mutator methods accept and return a convergent delta-state. 24 | """ 25 | embed _ins: HashSet[A, H] = _ins.create() 26 | embed _del: HashSet[A, H] = _del.create() 27 | let _checklist: (DotChecklist | None) 28 | 29 | new ref create() => 30 | _checklist = None 31 | 32 | new ref _create_in(ctx: DotContext) => 33 | _checklist = DotChecklist(ctx) 34 | 35 | fun ref _checklist_write() => 36 | match _checklist | let c: DotChecklist => c.write() end 37 | 38 | fun ref _converge_empty_in(ctx: DotContext box): Bool => // ignore the context 39 | false 40 | 41 | fun is_empty(): Bool => 42 | """ 43 | Return true if the data structure contains no information (bottom state). 44 | """ 45 | (_ins.size() == 0) and (_del.size() == 0) 46 | 47 | fun ref _ins_set(value: A) => _ins.set(value) 48 | fun ref _del_set(value: A) => _del.set(value) 49 | 50 | fun size(): USize => 51 | """ 52 | Return the number of items in the set. 53 | """ 54 | result().size() 55 | 56 | fun apply(value: val->A): val->A ? => 57 | """ 58 | Return the value if it's in the set, otherwise raise an error. 59 | """ 60 | if _del.contains(value) then error else _ins(value)? end 61 | 62 | fun contains(value: val->A): Bool => 63 | """ 64 | Check whether the set contains the given value. 65 | """ 66 | _ins.contains(value) and not _del.contains(value) 67 | 68 | fun ref clear() => 69 | """ 70 | Remove all elements from the set. 71 | """ 72 | _del.union(_ins.values()) 73 | _ins.clear() // not strictly necessary, but reduces memory footprint 74 | 75 | fun ref set[D: P2HashSet[A, H] ref = P2HashSet[A, H]]( 76 | value: A, 77 | delta: D = recover P2HashSet[A, H] end) 78 | : D^ => 79 | """ 80 | Add a value to the set. 81 | Accepts and returns a convergent delta-state. 82 | """ 83 | if not _del.contains(value) then 84 | _ins.set(value) 85 | _checklist_write() 86 | delta._ins_set(value) 87 | end 88 | consume delta 89 | 90 | fun ref unset[D: P2HashSet[A, H] ref = P2HashSet[A, H]]( 91 | value: A, 92 | delta: D = recover P2HashSet[A, H] end) 93 | : D^ => 94 | """ 95 | Remove a value from the set. 96 | Accepts and returns a convergent delta-state. 97 | """ 98 | // TODO: Reduce memory footprint by also removing from _ins set? 99 | _ins.unset(value) // not strictly necessary, but reduces memory footprint 100 | _del.set(value) 101 | _checklist_write() 102 | delta._del_set(value) 103 | consume delta 104 | 105 | fun ref union[D: P2HashSet[A, H] ref = P2HashSet[A, H]]( 106 | that: Iterator[A], 107 | delta: D = recover P2HashSet[A, H] end) 108 | : D^ => 109 | """ 110 | Add everything in the given iterator to the set. 111 | Accepts and returns a convergent delta-state. 112 | """ 113 | var delta' = consume delta 114 | for value in that do 115 | delta' = set[D](value, consume delta') 116 | end 117 | _checklist_write() 118 | consume delta' 119 | 120 | fun ref converge(that: P2HashSet[A, H] box): Bool => 121 | """ 122 | Converge from the given P2Set into this one. 123 | For this data type, the convergence is the union of both constituent sets. 124 | Returns true if the convergence added new information to the data structure. 125 | """ 126 | let orig_size = _ins.size() + _del.size() 127 | // TODO: deal with cases where we want _ins to be deleted. 128 | _ins.union(that._ins.values()) 129 | _del.union(that._del.values()) 130 | orig_size != (_ins.size() + _del.size()) 131 | 132 | fun result(): HashSet[A, H] => 133 | """ 134 | Return the elements of the resulting logical set as a single flat set. 135 | Information about specific deletions is discarded, so that the case of a 136 | deleted element is indistinct from that of an element never inserted. 137 | """ 138 | _ins.without(_del) 139 | 140 | fun string(): String iso^ => 141 | """ 142 | Return a best effort at printing the set. If A is a Stringable, use the 143 | string representation of each value; otherwise print them as question marks. 144 | """ 145 | let buf = recover String((size() * 3) + 1) end 146 | buf.push('%') 147 | buf.push('{') 148 | var first = true 149 | for value in values() do 150 | if first then first = false else buf .> push(';').push(' ') end 151 | iftype A <: Stringable val then 152 | buf.append(value.string()) 153 | else 154 | buf.push('?') 155 | end 156 | end 157 | buf.push('}') 158 | consume buf 159 | 160 | // TODO: optimize comparison functions: 161 | fun eq(that: P2HashSet[A, H] box): Bool => result().eq(that.result()) 162 | fun ne(that: P2HashSet[A, H] box): Bool => result().ne(that.result()) 163 | fun lt(that: P2HashSet[A, H] box): Bool => result().lt(that.result()) 164 | fun le(that: P2HashSet[A, H] box): Bool => result().le(that.result()) 165 | fun gt(that: P2HashSet[A, H] box): Bool => result().gt(that.result()) 166 | fun ge(that: P2HashSet[A, H] box): Bool => result().ge(that.result()) 167 | fun values(): Iterator[A]^ => result().values() 168 | 169 | fun ref from_tokens(that: TokensIterator)? => 170 | """ 171 | Deserialize an instance of this data structure from a stream of tokens. 172 | """ 173 | if that.next[USize]()? != 2 then error end 174 | 175 | var ins_count = that.next[USize]()? 176 | // TODO: _ins.reserve(ins_count) 177 | while (ins_count = ins_count - 1) > 0 do 178 | _ins.set(that.next[A]()?) 179 | end 180 | 181 | var del_count = that.next[USize]()? 182 | // TODO: _del.reserve(del_count) 183 | while (del_count = del_count - 1) > 0 do 184 | _del.set(that.next[A]()?) 185 | end 186 | 187 | fun ref each_token(tokens: Tokens) => 188 | """ 189 | Serialize the data structure, capturing each token into the given Tokens. 190 | """ 191 | tokens.push(USize(2)) 192 | 193 | tokens.push(_ins.size()) 194 | for value in _ins.values() do tokens.push(value) end 195 | 196 | tokens.push(_del.size()) 197 | for value in _del.values() do tokens.push(value) end 198 | -------------------------------------------------------------------------------- /crdt/test/test_c_keyspace.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | use "debug" 4 | 5 | class TestCKeyspace is UnitTest 6 | new iso create() => None 7 | fun name(): String => "crdt.CKeyspace" 8 | 9 | fun _compare_history( 10 | h: TestHelper, 11 | l: CKeyspace[String, CCounter], 12 | r: CKeyspace[String, CCounter], 13 | result_l: Bool, 14 | result_r: Bool, 15 | loc: SourceLoc = __loc)? 16 | => 17 | let tokens = Tokens 18 | r.each_token_of_history(tokens) 19 | 20 | (let result_l', let result_r') = 21 | l.compare_history_with_tokens(tokens.iterator())? 22 | 23 | h.assert_eq[Bool](result_l, result_l', "result_l", loc) 24 | h.assert_eq[Bool](result_r, result_r', "result_r", loc) 25 | 26 | fun apply(h: TestHelper)? => 27 | let a = CKeyspace[String, CCounter]("a".hash64()) 28 | let b = CKeyspace[String, CCounter]("b".hash64()) 29 | let c = CKeyspace[String, CCounter]("c".hash64()) 30 | 31 | a.at("apple").increment(4) 32 | b.at("apple").decrement(5) 33 | c.at("apple").increment(6) 34 | 35 | b.at("banana").decrement(7) 36 | c.at("currant").increment(8) 37 | 38 | h.assert_true(a.converge(b)) 39 | h.assert_true(a.converge(c)) 40 | h.assert_true(b.converge(a)) 41 | 42 | _compare_history(h, a, b, false, false)? 43 | _compare_history(h, a, c, true, false)? 44 | _compare_history(h, b, a, false, false)? 45 | _compare_history(h, b, c, true, false)? 46 | _compare_history(h, c, a, false, true)? 47 | _compare_history(h, c, b, false, true)? 48 | 49 | h.assert_false(b.converge(c)) 50 | h.assert_true(c.converge(a)) 51 | h.assert_false(c.converge(b)) 52 | 53 | h.assert_eq[U64](a.at("apple").value(), 5) 54 | h.assert_eq[U64](a.at("banana").value(), -7) 55 | h.assert_eq[U64](a.at("currant").value(), 8) 56 | h.assert_eq[U64](try a("date")?.value() else 0xDEAD end, 0xDEAD) 57 | h.assert_eq[String](a.string(), b.string()) 58 | h.assert_eq[String](b.string(), c.string()) 59 | h.assert_eq[String](c.string(), a.string()) 60 | 61 | // TODO: Find a way to bring remove functionality back? 62 | // a.at("date").increment(9) 63 | // b.remove("date") 64 | // c.remove("currant") 65 | // a.remove("banana") 66 | // b.at("banana").increment(10) 67 | 68 | // h.assert_true(a.converge(b)) 69 | // h.assert_true(a.converge(c)) 70 | // h.assert_true(b.converge(c)) 71 | // h.assert_true(b.converge(a)) 72 | // h.assert_true(c.converge(a)) 73 | // h.assert_false(c.converge(b)) 74 | 75 | // h.assert_eq[U64](a.at("apple").value(), 5) 76 | // h.assert_eq[U64](a.at("banana").value(), 3) 77 | // h.assert_eq[U64](try a("currant")?.value() else 0xDEAD end, 0xDEAD) 78 | // h.assert_eq[U64](a.at("date").value(), 9) 79 | // h.assert_eq[String](a.string(), b.string()) 80 | // h.assert_eq[String](b.string(), c.string()) 81 | // h.assert_eq[String](c.string(), a.string()) 82 | 83 | // a.clear() 84 | 85 | // h.assert_eq[U64](try a("apple")?.value() else 0xDEAD end, 0xDEAD) 86 | // h.assert_eq[U64](try a("banana")?.value() else 0xDEAD end, 0xDEAD) 87 | // h.assert_eq[U64](try a("currant")?.value() else 0xDEAD end, 0xDEAD) 88 | // h.assert_eq[U64](try a("date")?.value() else 0xDEAD end, 0xDEAD) 89 | 90 | class TestCKeyspaceDelta is UnitTest 91 | new iso create() => None 92 | fun name(): String => "crdt.CKeyspace (ẟ)" 93 | 94 | fun apply(h: TestHelper) => 95 | let a = CKeyspace[String, CCounter]("a".hash64()) 96 | let b = CKeyspace[String, CCounter]("b".hash64()) 97 | let c = CKeyspace[String, CCounter]("c".hash64()) 98 | 99 | var a_delta = CKeyspace[String, CCounter](0) 100 | var b_delta = CKeyspace[String, CCounter](0) 101 | var c_delta = CKeyspace[String, CCounter](0) 102 | 103 | a.at("apple").increment(4, a_delta.at("apple")) 104 | b.at("apple").decrement(5, b_delta.at("apple")) 105 | c.at("apple").increment(6, c_delta.at("apple")) 106 | 107 | b.at("banana").decrement(7, b_delta.at("banana")) 108 | c.at("currant").increment(8, c_delta.at("currant")) 109 | 110 | h.assert_true(a.converge(b_delta)) 111 | h.assert_true(a.converge(c_delta)) 112 | h.assert_true(b.converge(c_delta)) 113 | h.assert_true(b.converge(a_delta)) 114 | h.assert_true(c.converge(a_delta)) 115 | h.assert_true(c.converge(b_delta)) 116 | 117 | h.assert_eq[U64](a.at("apple").value(), 5) 118 | h.assert_eq[U64](a.at("banana").value(), -7) 119 | h.assert_eq[U64](a.at("currant").value(), 8) 120 | h.assert_eq[U64](try a("date")?.value() else 0xDEAD end, 0xDEAD) 121 | h.assert_eq[String](a.string(), b.string()) 122 | h.assert_eq[String](b.string(), c.string()) 123 | h.assert_eq[String](c.string(), a.string()) 124 | 125 | // TODO: Find a way to bring remove functionality back? 126 | // a_delta = CKeyspace[String, CCounter](0) 127 | // b_delta = CKeyspace[String, CCounter](0) 128 | // c_delta = CKeyspace[String, CCounter](0) 129 | 130 | // a.at("date").increment(9, a_delta.at("date")) 131 | // b.remove("date", b_delta) 132 | // c.remove("currant", c_delta) 133 | // a.remove("banana", a_delta) 134 | // b.at("banana").increment(10, b_delta.at("banana")) 135 | 136 | // h.assert_true(a.converge(b_delta)) 137 | // h.assert_true(a.converge(c_delta)) 138 | // h.assert_true(b.converge(c_delta)) 139 | // h.assert_true(b.converge(a_delta)) 140 | // h.assert_true(c.converge(a_delta)) 141 | // h.assert_true(c.converge(b_delta)) 142 | 143 | // h.assert_eq[U64](a.at("apple").value(), 5) 144 | // h.assert_eq[U64](a.at("banana").value(), 3) 145 | // h.assert_eq[U64](try a("currant")?.value() else 0xDEAD end, 0xDEAD) 146 | // h.assert_eq[U64](a.at("date").value(), 9) 147 | // h.assert_eq[String](a.string(), b.string()) 148 | // h.assert_eq[String](b.string(), c.string()) 149 | // h.assert_eq[String](c.string(), a.string()) 150 | 151 | // b_delta = CKeyspace[String, CCounter](0) 152 | 153 | // b.clear(b_delta) 154 | 155 | // h.assert_true(a.converge(b_delta)) 156 | // h.assert_false(b.converge(b_delta)) 157 | // h.assert_true(c.converge(b_delta)) 158 | 159 | // h.assert_eq[U64](try a("apple")?.value() else 0xDEAD end, 0xDEAD) 160 | // h.assert_eq[U64](try a("banana")?.value() else 0xDEAD end, 0xDEAD) 161 | // h.assert_eq[U64](try a("currant")?.value() else 0xDEAD end, 0xDEAD) 162 | // h.assert_eq[U64](try a("date")?.value() else 0xDEAD end, 0xDEAD) 163 | 164 | class TestCKeyspaceTokens is UnitTest 165 | new iso create() => None 166 | fun name(): String => "crdt.CKeyspace (tokens)" 167 | 168 | fun apply(h: TestHelper) => 169 | let data = CKeyspace[String, CCounter[U8]]("a".hash64()) 170 | let data' = CKeyspace[String, CCounter[U8]]("b".hash64()) 171 | let data'' = CKeyspace[String, CCounter[U8]]("c".hash64()) 172 | 173 | data.at("apple").increment(4) 174 | data'.at("apple").decrement(5) 175 | data''.at("apple").increment(6) 176 | 177 | data.converge(data') 178 | data.converge(data'') 179 | 180 | let tokens = Tokens .> from(data) 181 | _TestTokensWellFormed(h, tokens) 182 | 183 | try 184 | h.assert_eq[String]( 185 | data.string(), 186 | data.create(0) .> from_tokens(tokens.iterator())?.string() 187 | ) 188 | else 189 | h.fail("failed to parse token stream") 190 | end 191 | -------------------------------------------------------------------------------- /crdt/c_keyspace.pony: -------------------------------------------------------------------------------- 1 | use "collections" 2 | use "_private" 3 | 4 | type CKeyspace[ 5 | K: (Hashable & Equatable[K] val), 6 | V: (Convergent[V] ref & Replicated ref)] 7 | is HashCKeyspace[K, V, HashEq[K]] 8 | 9 | type CKeyspaceIs[ 10 | K: Any val, 11 | V: (Convergent[V] ref & Replicated ref)] 12 | is HashCKeyspace[K, V, HashIs[K]] 13 | 14 | class ref HashCKeyspace[ 15 | K: Any val, 16 | V: (Convergent[V] ref & Replicated ref), 17 | H: HashFunction[K] val] 18 | 19 | let _ctx: DotContext 20 | embed _map: HashMap[K, V, H] 21 | 22 | new ref create(id: ID) => (_ctx, _map) = (_ctx.create(id), _map.create()) 23 | new ref _create_in(ctx': DotContext) => (_ctx, _map) = (ctx', _map.create()) 24 | fun _context(): this->DotContext => _ctx 25 | fun is_empty(): Bool => _map.size() == 0 26 | fun size(): USize => _map.size() 27 | fun keys(): Iterator[this->K]^ => _map.keys() 28 | fun values(): Iterator[this->V]^ => _map.values() 29 | fun pairs(): Iterator[(this->K, this->V)]^ => _map.pairs() 30 | fun apply(k: box->K!): this->V? => _map(k)? 31 | 32 | fun ref at(k: box->K!): V => 33 | // TODO: add an optimized function in Pony's Map for this use case. 34 | try _map(k)? else 35 | let empty = V._create_in(_ctx) 36 | _map(k) = empty 37 | empty 38 | end 39 | 40 | // TODO: Find a way to bring remove functionality back? 41 | // fun ref remove[D: HashCKeyspace[K, V, H] ref = HashCKeyspace[K, V, H] ref] 42 | // (k: K, delta': D = recover D(0) end): D 43 | // => 44 | // try 45 | // let v = _map.remove(k)?._2 46 | // delta'.at(k).converge(v.clear()) 47 | // end 48 | // consume delta' 49 | 50 | // fun ref clear[D: HashCKeyspace[K, V, H] ref = HashCKeyspace[K, V, H] ref] 51 | // (delta': D = recover D(0) end): D 52 | // => 53 | // for (k, v) in _map.pairs() do 54 | // delta'.at(k).converge(v.clear()) 55 | // end 56 | // _map.clear() 57 | // consume delta' 58 | 59 | fun ref converge(that: HashCKeyspace[K, V, H] box): Bool => 60 | var changed = false 61 | 62 | // Temporarily disable convergence of the shared context. 63 | // Each of the inner CRDTs will try to converge their context, 64 | // but we can't allow this when the context is shared, 65 | // because their converge logic rely on converging the context last. 66 | let converge_disabled = _ctx.set_converge_disabled(true) 67 | 68 | // TODO: Find a way to bring remove functionality back? 69 | // // For each entry that exists only here, and not in that keyspace, 70 | // // converge an imaginary empty instance into our local instance. 71 | // // This is how removals are propagated. 72 | // // TODO: Ouch! This seems very inefficient to do as described in the paper. 73 | // // How can we improve on this model, maybe sacrificing some failure modes? 74 | // for (k, v) in _map.pairs() do 75 | // if not that._map.contains(k) then 76 | // if v._converge_empty_in(that._ctx) then changed = true end 77 | // if v.is_empty() then try _map.remove(k)? end end 78 | // end 79 | // end 80 | 81 | // For each entry in the other map, converge locally. 82 | // If this results in an empty data structure, remove it to save memory. 83 | for (k, v) in that._map.pairs() do 84 | let local = at(k) 85 | if local.converge(v) then changed = true end 86 | if local.is_empty() then try _map.remove(k)? end end 87 | end 88 | 89 | // Re-enable converge for the context, then converge it. 90 | _ctx.set_converge_disabled(converge_disabled) 91 | if _ctx.converge(that._ctx) then changed = true end 92 | 93 | changed 94 | 95 | fun ref _converge_empty_in(ctx': DotContext box): Bool => 96 | var changed = false 97 | 98 | // Temporarily disable convergence of the shared context. 99 | // Each of the inner CRDTs will try to converge their context, 100 | // but we can't allow this when the context is shared, 101 | // because their converge logic rely on converging the context last. 102 | let converge_disabled = _ctx.set_converge_disabled(true) 103 | 104 | // For each entry that exists only here, and not in that keyspace, 105 | // converge an imaginary empty instance into our local instance. 106 | // This is how removals are propagated. 107 | // TODO: This seems pretty inefficient... how can we improve on this model? 108 | for (k, v) in _map.pairs() do 109 | if v._converge_empty_in(ctx') then changed = true end 110 | if v.is_empty() then try _map.remove(k)? end end 111 | end 112 | 113 | // Re-enable converge for the context, then converge it. 114 | _ctx.set_converge_disabled(converge_disabled) 115 | if _ctx.converge(ctx') then changed = true end 116 | 117 | changed 118 | 119 | fun string(): String iso^ => 120 | """ 121 | Return a best effort at printing the map. If K and V are Stringable, use 122 | string representations of them; otherwise print them as question marks. 123 | """ 124 | var buf = recover String((size() * 8) + 1) end 125 | buf.push('%') 126 | buf.push('{') 127 | var first = true 128 | for (k, v) in pairs() do 129 | if first then first = false else buf .> push(';').push(' ') end 130 | iftype K <: Stringable #read then 131 | buf.append(k.string()) 132 | else 133 | buf.push('?') 134 | end 135 | buf .> push(' ') .> push('=') .> push('>') .> push(' ') 136 | iftype V <: Stringable #read then 137 | buf.append(v.string()) 138 | else 139 | buf.push('?') 140 | end 141 | end 142 | buf.push('}') 143 | consume buf 144 | 145 | fun ref from_tokens(that: TokensIterator)? => 146 | """ 147 | Deserialize an instance of this data structure from a stream of tokens. 148 | """ 149 | if that.next[USize]()? != 2 then error end 150 | 151 | _ctx.from_tokens(that)? 152 | 153 | let converge_disabled = _ctx.set_converge_disabled(true) 154 | 155 | var count = that.next[USize]()? 156 | if (count % 2) != 0 then error end 157 | count = count / 2 158 | 159 | // TODO: _map.reserve(count) 160 | while (count = count - 1) > 0 do 161 | _map.update(that.next[K]()?, V._create_in(_ctx) .> from_tokens(that)?) 162 | end 163 | 164 | _ctx.set_converge_disabled(converge_disabled) 165 | 166 | fun ref each_token(tokens: Tokens) => 167 | """ 168 | Serialize the data structure, capturing each token into the given Tokens. 169 | """ 170 | tokens.push(USize(2)) 171 | 172 | _ctx.each_token(tokens) 173 | 174 | let converge_disabled = _ctx.set_converge_disabled(true) 175 | 176 | tokens.push(_map.size() * 2) 177 | for (k, v) in _map.pairs() do 178 | tokens.push(k) 179 | v.each_token(tokens) 180 | end 181 | 182 | _ctx.set_converge_disabled(converge_disabled) 183 | 184 | fun each_token_of_history(tokens: Tokens) => 185 | """ 186 | Serialize the causal history, capturing each token into the given Tokens. 187 | """ 188 | _ctx.each_token(tokens) 189 | 190 | fun compare_history_with_tokens(that: TokensIterator): (Bool, Bool)? => 191 | """ 192 | Compare the causal context with that represented by the given token stream. 193 | Raises an error if the tokens couldn't be parsed as a causal context. 194 | Returns two boolean values, representing differences that are present. 195 | The first return value is true if this context has dots missing in that one. 196 | The other return value is true if that context has dots missing in this one. 197 | """ 198 | // TODO: consider comparing without allocating and deserializing the tokens. 199 | _ctx.compare(DotContext(0) .> from_tokens(that)?) 200 | -------------------------------------------------------------------------------- /crdt/rwor_set.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | type RWORSet[A: (Hashable val & Equatable[A])] 5 | is RWORHashSet[A, HashEq[A]] 6 | 7 | type RWORSetIs[A: (Hashable val & Equatable[A])] 8 | is RWORHashSet[A, HashIs[A]] 9 | 10 | class ref RWORHashSet[A: Equatable[A] val, H: HashFunction[A] val] 11 | is (Comparable[RWORHashSet[A, H]] & Causal[RWORHashSet[A, H]]) 12 | """ 13 | An unordered mutable set that supports removing locally visible elements 14 | ("observed remove") using per-replica sequence numbers to track causality. 15 | 16 | In the case where an insertion and a deletion for the same element have 17 | no causal relationship (they happened concurrently on differen replicas), 18 | the deletion will override the insertion ("remove wins"). For a similar data 19 | structure with the opposite bias, see the "add wins" variant (AWORSet). 20 | 21 | This data structure delegates causality tracking to the reusable "dot kernel" 22 | abstraction. Because that abstraction provides an eventually-consistent set 23 | of replica-associated values, and this data structure uses a commutative 24 | strategy for reading out the values, the result is eventually consistent. 25 | 26 | All mutator methods accept and return a convergent delta-state. 27 | """ 28 | embed _kernel: DotKernel[(A, Bool)] 29 | 30 | new ref create(id: ID) => 31 | """ 32 | Instantiate under the given unique replica id. 33 | """ 34 | _kernel = _kernel.create(id) 35 | 36 | new ref _create_in(ctx': DotContext) => 37 | _kernel = _kernel.create_in(ctx') 38 | 39 | fun _context(): this->DotContext => 40 | _kernel.context() 41 | 42 | fun is_empty(): Bool => 43 | """ 44 | Return true if there are no values ever recorded from any replica. 45 | This is true both at creation, after calling the clear method, 46 | or after a converge that results in all values being cleared. 47 | """ 48 | _kernel.is_empty() 49 | 50 | fun result(): HashSet[A, H] => 51 | """ 52 | Return the elements of the resulting logical set as a single flat set. 53 | Information about specific deletions is discarded, so that the case of a 54 | deleted element is indistinct from that of an element never inserted. 55 | """ 56 | // For each distinct value in the dot kernel, check the insert/delete tokens 57 | // to calculate a final boolean token, with deletes shadowing insertions. 58 | let tokens = HashMap[A, Bool, H] 59 | for (value, is_insert) in _kernel.values() do 60 | tokens(value) = is_insert and try tokens(value)? else true end 61 | end 62 | 63 | // Read the merged tokens' values into the output, counting only insertions. 64 | let out = HashSet[A, H] 65 | for (value, is_insert) in tokens.pairs() do 66 | if is_insert then out.set(value) end 67 | end 68 | 69 | out 70 | 71 | fun size(): USize => 72 | """ 73 | Return the number of items in the set. 74 | """ 75 | result().size() 76 | 77 | fun contains(value': A): Bool => 78 | """ 79 | Check whether the set contains the given value. 80 | """ 81 | var inserted = false 82 | 83 | // For each instance of this value in the dot kernel, take the 84 | // insert/delete tokens into account, with deletions shado 85 | for (value, is_insert) in _kernel.values() do 86 | if value == value' then 87 | if is_insert 88 | then inserted = true 89 | else return false // if we see a deletion, it shadows all insertions 90 | end 91 | end 92 | end 93 | 94 | inserted 95 | 96 | fun ref set[D: RWORHashSet[A, H] ref = RWORHashSet[A, H]]( 97 | value': A, 98 | delta': D = recover RWORHashSet[A, H](0) end) 99 | : D^ => 100 | """ 101 | Add a value to the set. 102 | Accepts and returns a convergent delta-state. 103 | """ 104 | // Clear any locally visible insertions and deletions for this value, 105 | // then add an insertion token (true) for it. 106 | _kernel.remove_value[EqTuple2[A, Bool]]((value', true), delta'._kernel) 107 | _kernel.remove_value[EqTuple2[A, Bool]]((value', false), delta'._kernel) 108 | _kernel.set((value', true), delta'._kernel) 109 | delta' 110 | 111 | fun ref unset[D: RWORHashSet[A, H] ref = RWORHashSet[A, H]]( 112 | value': A, 113 | delta': D = recover RWORHashSet[A, H](0) end) 114 | : D^ => 115 | """ 116 | Remove a value from the set. 117 | Accepts and returns a convergent delta-state. 118 | """ 119 | // Clear any locally visible insertions and deletions for this value, 120 | // then add an deletion token (false) for it. 121 | _kernel.remove_value[EqTuple2[A, Bool]]((value', true), delta'._kernel) 122 | _kernel.remove_value[EqTuple2[A, Bool]]((value', false), delta'._kernel) 123 | _kernel.set((value', false), delta'._kernel) 124 | delta' 125 | 126 | fun ref clear[D: RWORHashSet[A, H] ref = RWORHashSet[A, H]]( 127 | delta': D = recover RWORHashSet[A, H](0) end) 128 | : D^ => 129 | """ 130 | Remove all locally visible elements from the set. 131 | Accepts and returns a convergent delta-state. 132 | """ 133 | _kernel.remove_all(delta'._kernel) 134 | delta' 135 | 136 | fun ref union[D: RWORHashSet[A, H] ref = RWORHashSet[A, H]]( 137 | that': Iterator[A], 138 | delta': D = recover RWORHashSet[A, H](0) end) 139 | : D^ => 140 | """ 141 | Add everything in the given iterator to the set. 142 | Accepts and returns a convergent delta-state. 143 | """ 144 | for value' in that' do set(value', delta') end 145 | delta' 146 | 147 | fun ref converge(that: RWORHashSet[A, H] box): Bool => 148 | """ 149 | Converge from the given RWORSet into this one. 150 | Returns true if the convergence added new information to the data structure. 151 | """ 152 | _kernel.converge(that._kernel) 153 | 154 | fun ref _converge_empty_in(ctx': DotContext box): Bool => 155 | """ 156 | Optimize for the special case of converging from a peer with an empty map, 157 | taking only their DotContext as an argument for resolving disagreements. 158 | """ 159 | _kernel.converge_empty_in(ctx') 160 | 161 | fun string(): String iso^ => 162 | """ 163 | Return a best effort at printing the set. If A is a Stringable, use the 164 | string representation of each value; otherwise print them as question marks. 165 | """ 166 | let buf = recover String((size() * 3) + 1) end 167 | buf.push('%') 168 | buf.push('{') 169 | var first = true 170 | for value in values() do 171 | if first then first = false else buf .> push(';').push(' ') end 172 | iftype A <: Stringable then 173 | buf.append(value.string()) 174 | else 175 | buf.push('?') 176 | end 177 | end 178 | buf.push('}') 179 | consume buf 180 | 181 | // TODO: optimize comparison functions: 182 | fun eq(that: RWORHashSet[A, H] box): Bool => result().eq(that.result()) 183 | fun ne(that: RWORHashSet[A, H] box): Bool => result().ne(that.result()) 184 | fun lt(that: RWORHashSet[A, H] box): Bool => result().lt(that.result()) 185 | fun le(that: RWORHashSet[A, H] box): Bool => result().le(that.result()) 186 | fun gt(that: RWORHashSet[A, H] box): Bool => result().gt(that.result()) 187 | fun ge(that: RWORHashSet[A, H] box): Bool => result().ge(that.result()) 188 | fun values(): Iterator[A]^ => result().values() 189 | 190 | fun ref from_tokens(that: TokensIterator)? => 191 | """ 192 | Deserialize an instance of this data structure from a stream of tokens. 193 | """ 194 | _kernel.from_tokens(that)? 195 | 196 | fun ref each_token(tokens: Tokens) => 197 | """ 198 | Serialize the data structure, capturing each token into the given Tokens. 199 | """ 200 | _kernel.each_token(tokens) 201 | -------------------------------------------------------------------------------- /crdt/ujson.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | 3 | class ref UJSON is (Equatable[UJSON] & Causal[UJSON]) 4 | """ 5 | UJSON is a subset of JSON that contains only unordered data structures. 6 | In effect, UJSON data acts like multi-value registers (MVReg) inside 7 | nested observed-remove maps (ORMap), stored in a simple, efficient way. 8 | 9 | Like a multi-value register, concurrent writes to the same key in will appear 10 | as a set of values when those writes converge to being locally visible. 11 | In serialized UJSON, this is represented with JSON's array notation, which 12 | is available for this semantics because ordered arrays are not supported. 13 | 14 | When nested keys are located under the same multi-value register, they 15 | appear as a nested map, using JSON's object notation. All such keys are 16 | treated as being part of the same map; thus, it is impossible to hold multiple 17 | distinct maps in the same multi-value register, because those maps become 18 | merged into the same map. To keep nested maps separate, they must be nested 19 | under different keys of the outer map rather than under the same key. 20 | 21 | UJSON output can be parsed by any standard JSON parser. It can also accept 22 | any valid JSON string as input (though information may be lost, in accordance 23 | with the constraints in the previous two paragraphs). 24 | 25 | This data structure delegates causality tracking to the reusable "dot kernel" 26 | abstraction. Because that abstraction provides an eventually-consistent set 27 | of replica-associated values, and this data structure uses a commutative 28 | strategy for reading out the values, the result is eventually consistent. 29 | 30 | All mutator methods accept and return a convergent delta-state. 31 | """ 32 | embed _kernel: DotKernel[(Array[String] val, UJSONValue)] 33 | 34 | // TODO: Fix ponyc to allow getting private fields from a lambda in this type, 35 | // then remove this workaround method. 36 | fun _get_kernel(): this->DotKernel[(Array[String] val, UJSONValue)] => _kernel 37 | 38 | new ref create(id: ID) => 39 | """ 40 | Instantiate under the given unique replica id. 41 | """ 42 | _kernel = _kernel.create(id) 43 | 44 | new ref _create_in(ctx': DotContext) => 45 | _kernel = _kernel.create_in(ctx') 46 | 47 | fun _context(): this->DotContext => 48 | _kernel.context() 49 | 50 | fun is_empty(): Bool => 51 | """ 52 | Return true if there are no values ever recorded from any replica. 53 | This is true both at creation, after calling the clear method, 54 | or after a converge that results in all values being cleared. 55 | """ 56 | _kernel.is_empty() 57 | 58 | fun get(path': Array[String] val = []): UJSONNode => 59 | """ 60 | Get a UJSONNode representing all of the values at or under the given path. 61 | Call UJSONNode.is_void to check for the case of no values for this path. 62 | The result is optimized for printing as JSON with UJSONNode.string. 63 | """ 64 | let builder = _UJSONNodeBuilder(path') 65 | for (path, value) in _kernel.values() do builder.collect(path, value) end 66 | builder.root() 67 | 68 | fun ref put[D: UJSON ref = UJSON]( 69 | path': Array[String] val, 70 | node': UJSONNode, 71 | delta': D = recover UJSON(0) end) 72 | : D^ => 73 | """ 74 | Put a UJSONNode (all the values at and within it) at the given path. 75 | All locally visible values currently at or under that path will be removed. 76 | Accepts and returns a convergent delta-state. 77 | """ 78 | _kernel.remove_value[_UJSONPathEqPrefix]((path', None), delta'._kernel) 79 | node'._flat_each(path', {(path, value)(delta') => 80 | _kernel.set((path, value), delta'._get_kernel()) 81 | }) 82 | delta' 83 | 84 | fun ref update[D: UJSON ref = UJSON]( 85 | path': Array[String] val, 86 | value': UJSONValue, 87 | delta': D = recover UJSON(0) end) 88 | : D^ => 89 | """ 90 | Set a new value for the specified path. 91 | All locally visible values currently at or under that path will be removed. 92 | Accepts and returns a convergent delta-state. 93 | """ 94 | _kernel.remove_value[_UJSONPathEqPrefix]((path', None), delta'._kernel) 95 | _kernel.set((path', value'), delta'._kernel) 96 | delta' 97 | 98 | fun ref clear_at[D: UJSON ref = UJSON]( 99 | path': Array[String] val, 100 | delta': D = recover UJSON(0) end) 101 | : D^ => 102 | """ 103 | Remove all locally visible values currently at or under the given path. 104 | Accepts and returns a convergent delta-state. 105 | """ 106 | _kernel.remove_value[_UJSONPathEqPrefix]((path', None), delta'._kernel) 107 | delta' 108 | 109 | fun ref insert[D: UJSON ref = UJSON]( 110 | path': Array[String] val, 111 | value': UJSONValue, 112 | delta': D = recover UJSON(0) end) 113 | : D^ => 114 | """ 115 | Add a new value at the specified path. 116 | Any other locally visible values at or under that path will be retained. 117 | Accepts and returns a convergent delta-state. 118 | """ 119 | _kernel.set((path', value'), delta'._kernel) 120 | delta' 121 | 122 | fun ref remove[D: UJSON ref = UJSON]( 123 | path': Array[String] val, 124 | value': UJSONValue, 125 | delta': D = recover UJSON(0) end) 126 | : D^ => 127 | """ 128 | Remove the specified value from the specified path, if it exists there. 129 | If that value is not locally visible at that path, nothing will happen. 130 | Any other locally visible values at or under that path will be retained. 131 | Accepts and returns a convergent delta-state. 132 | """ 133 | _kernel.remove_value[_UJSONEq]((path', value'), delta'._kernel) 134 | delta' 135 | 136 | fun ref clear[D: UJSON ref = UJSON](delta': D = recover UJSON(0) end): D^ => 137 | """ 138 | Remove all locally visible values, across all paths. 139 | Accepts and returns a convergent delta-state. 140 | """ 141 | _kernel.remove_all(delta'._kernel) 142 | delta' 143 | 144 | fun ref converge(that: UJSON box): Bool => 145 | """ 146 | Converge from the given AWORSet into this one. 147 | Returns true if the convergence added new information to the data structure. 148 | """ 149 | _kernel.converge(that._kernel) 150 | 151 | fun ref _converge_empty_in(ctx': DotContext box): Bool => 152 | """ 153 | Optimize for the special case of converging from a peer with an empty map, 154 | taking only their DotContext as an argument for resolving disagreements. 155 | """ 156 | _kernel.converge_empty_in(ctx') 157 | 158 | fun string(): String iso^ => 159 | """ 160 | Return the values in the data type, printed as a JSON object/set. 161 | """ 162 | get().string() 163 | 164 | fun eq(that: UJSON box): Bool => get() == that.get() 165 | fun ne(that: UJSON box): Bool => not eq(that) 166 | 167 | fun ref from_tokens(that: TokensIterator)? => 168 | """ 169 | Deserialize an instance of this data structure from a stream of tokens. 170 | """ 171 | _kernel.from_tokens_map(that, {(that)? => 172 | var count = that.next[USize]()? 173 | 174 | if count < 1 then error end 175 | count = count - 1 176 | 177 | let path = recover trn Array[String](count) end 178 | while (count = count - 1) > 0 do 179 | path.push(that.next[String]()?) 180 | end 181 | 182 | let value = that.next[UJSONValue]()? 183 | 184 | (consume path, value) 185 | })? 186 | 187 | fun ref each_token(tokens: Tokens) => 188 | """ 189 | Serialize the data structure, capturing each token into the given Tokens. 190 | """ 191 | _kernel.each_token_map(tokens, {(tokens, a) => 192 | (let path, let value) = a 193 | tokens.push(path.size() + 1) 194 | for segment in path.values() do tokens.push(segment) end 195 | tokens.push(value) 196 | }) 197 | -------------------------------------------------------------------------------- /crdt/_private/dot_kernel_single.pony: -------------------------------------------------------------------------------- 1 | use ".." 2 | use "collections" 3 | 4 | class ref DotKernelSingle[A: Any val] is Replicated 5 | """ 6 | This class is a reusable abstraction meant for use inside other CRDTs. 7 | 8 | It is a variant of the DotKernel class which changes the indexing of the 9 | map of active values, such that at most one value per replica is retained. 10 | This simplifies the logic for data structures like CCounter which operate 11 | with this assumption. 12 | 13 | See the docs for the DotKernel class for more information. 14 | """ 15 | let _ctx: DotContext 16 | embed _map: Map[ID, (U64, A)] 17 | 18 | new create(id': ID) => 19 | """ 20 | Instantiate under the given unique replica id. 21 | 22 | It will only be possible to add dotted values under this replica id, 23 | aside from converging it as external data with the `converge` function. 24 | """ 25 | _ctx = _ctx.create(id') 26 | _map = _map.create() 27 | 28 | new create_in(ctx': DotContext) => 29 | """ 30 | Instantiate under the given DotContext. 31 | """ 32 | _ctx = ctx' 33 | _map = _map.create() 34 | 35 | fun context(): this->DotContext => 36 | """ 37 | Get the underlying DotContext. 38 | """ 39 | _ctx 40 | 41 | fun is_empty(): Bool => 42 | """ 43 | Return true if there are no values ever recorded from any replica. 44 | This is true at creation, after calling the clear method, 45 | or after a converge that results in all values being cleared. 46 | """ 47 | _map.size() == 0 48 | 49 | fun values(): Iterator[A]^ => 50 | """ 51 | Return an iterator over the active values in this kernel. 52 | """ 53 | object is Iterator[A] 54 | let iter: Iterator[(U64, A)] = _map.values() 55 | fun ref has_next(): Bool => iter.has_next() 56 | fun ref next(): A? => iter.next()?._2 57 | end 58 | 59 | fun ref update[D: DotKernelSingle[A] ref = DotKernelSingle[A]]( 60 | value': A, 61 | delta': D = recover DotKernelSingle[A](0) end) 62 | : D^ => 63 | """ 64 | Update the value for this replica in the map of active values. 65 | The next-sequence-numbered dot for this replica will be used, so that the 66 | new value has a happens-after causal relationship with the previous value. 67 | """ 68 | let dot = _ctx.next_dot() 69 | _map(dot._1) = (dot._2, value') 70 | delta'._map(dot._1) = (dot._2, value') 71 | delta'._ctx.set(dot) 72 | delta' 73 | 74 | fun ref upsert[D: DotKernelSingle[A] ref = DotKernelSingle[A]]( 75 | value': A, 76 | fn': {(A, A): A^} box, 77 | delta': D = recover DotKernelSingle[A](0) end) 78 | : D^ => 79 | """ 80 | Update the value for this replica in the map of active values, 81 | using a function to define the strategy for updating an existing value. 82 | The next-sequence-numbered dot for this replica will be used, so that the 83 | new value has a happens-after causal relationship with the previous value. 84 | """ 85 | let value = try fn'(_map(_ctx.id())?._2, value') else value' end 86 | update(value, delta') 87 | 88 | fun ref remove_value[ 89 | E: EqFn[A] val, 90 | D: DotKernelSingle[A] ref = DotKernelSingle[A]]( 91 | value': A, 92 | delta': D = recover DotKernelSingle[A](0) end) 93 | : D^ => 94 | """ 95 | Remove all dots with this value from the map of active values, using the 96 | given eq_fn for testing equality between pairs of values of type A. 97 | They will be retained in the causal context (if they were already present). 98 | 99 | This removes the dots and associated value while keeping reminders that 100 | we have seen them before, so that we can ignore them if we see them again. 101 | 102 | If the value was not present, this function silently does nothing. 103 | 104 | Accepts and returns a convergent delta-state. 105 | """ 106 | let removables: Array[ID] = [] 107 | for (id', (n, value)) in _map.pairs() do 108 | if E(value', value) then 109 | removables.push(id') 110 | let dot = (id', n) 111 | delta'._ctx.set(dot, false) // wait to compact until the end 112 | end 113 | end 114 | for id' in removables.values() do try _map.remove(id')? end end 115 | 116 | delta'._ctx.compact() // now we can compact just once 117 | delta' 118 | 119 | fun ref remove_all[D: DotKernelSingle[A] ref = DotKernelSingle[A]]( 120 | delta': D = recover DotKernelSingle[A](0) end) 121 | : D^ => 122 | """ 123 | Remove all dots currently present in the map of active values. 124 | They will be retained in the causal context. 125 | 126 | This removes the dots and associated values while keeping reminders that 127 | we have seen them before, so that we can ignore them if we see them again. 128 | 129 | Accepts and returns a convergent delta-state. 130 | """ 131 | for (id', (n, value)) in _map.pairs() do 132 | let dot = (id', n) 133 | delta'._ctx.set(dot, false) // wait to compact until the end 134 | end 135 | 136 | _map.clear() 137 | 138 | delta'._ctx.compact() // now we can compact just once 139 | delta' 140 | 141 | fun ref converge(that: DotKernelSingle[A] box): Bool => 142 | """ 143 | Catch up on active values and dot history from that kernel into this one, 144 | using the dot history as a context for understanding for which disagreements 145 | we are out of date, and for which disagreements the other is out of date. 146 | """ 147 | var changed = false 148 | // TODO: more efficient algorithm? 149 | 150 | // Active values that exist only in the other kernel and haven't yet been 151 | // seen in our history of dots should be added to our map of active values. 152 | for (id', (n, value)) in that._map.pairs() do 153 | let dot = (id', n) 154 | if (_map.get_or_else(id', (0, value))._1 < n) and (not _ctx.contains(dot)) then 155 | _map(id') = (n, value) 156 | changed = true 157 | end 158 | end 159 | 160 | // Active values that now exist only in our kernel but were already seen 161 | // by that kernel's history of dots should be removed from our map. 162 | let removables: Array[ID] = [] 163 | for (id', (n, value)) in _map.pairs() do 164 | let dot = (id', n) 165 | if (not that._map.contains(id')) and that._ctx.contains(dot) then 166 | removables.push(id') 167 | changed = true 168 | end 169 | end 170 | for id' in removables.values() do try _map.remove(id')? end end 171 | 172 | // Finally, catch up on the entire history of dots that the other kernel 173 | // knows about, Because we're now caught up on the fruits of that history. 174 | // It's important that we do this as the last step; both this local logic, 175 | // and some broader assumptions regarding sharing contexts rely on the 176 | // fact that the context is converged after the data. 177 | // Note that this call will be a no-op when the context is shared. 178 | if _ctx.converge(that._ctx) then 179 | changed = true 180 | end 181 | 182 | changed 183 | 184 | fun ref converge_empty_in(ctx': DotContext box): Bool => 185 | """ 186 | Optimize for the special case of converging from a peer with an empty map, 187 | taking only their DotContext as an argument for resolving disagreements. 188 | """ 189 | var changed = false 190 | 191 | // Active values that now exist only in our kernel but were already seen 192 | // by that kernel's history of dots should be removed from our map. 193 | let removables: Array[ID] = [] 194 | for (id', (n, value)) in _map.pairs() do 195 | let dot = (id', n) 196 | if ctx'.contains(dot) then 197 | removables.push(id') 198 | changed = true 199 | end 200 | end 201 | for id' in removables.values() do try _map.remove(id')? end end 202 | 203 | // Finally, catch up on the entire history of dots that the other kernel 204 | // knows about, Because we're now caught up on the fruits of that history. 205 | // It's important that we do this as the last step; both this local logic, 206 | // and some broader assumptions regarding sharing contexts rely on the 207 | // fact that the context is converged after the data. 208 | // Note that this call will be a no-op when the context is shared. 209 | if _ctx.converge(ctx') then 210 | changed = true 211 | end 212 | 213 | changed 214 | 215 | fun string(): String iso^ => 216 | """ 217 | Return a best effort at printing the data structure. 218 | This is intended for debugging purposes only. 219 | """ 220 | let out = recover String end 221 | out.append("(DotKernelSingle") 222 | for (id', (n, value)) in _map.pairs() do 223 | let dot = (id', n) 224 | out.>push(';').>push(' ').>push('(') 225 | out.append(dot._1.string()) 226 | out.>push(',').>push(' ') 227 | out.append(dot._2.string()) 228 | out.>push(')').>push(' ').>push('-').>push('>').>push(' ') 229 | iftype A <: Stringable #read 230 | then out.append(value.string()) 231 | else out.push('?') 232 | end 233 | end 234 | out.>push(';').>push(' ') 235 | out.append(_ctx.string()) 236 | out 237 | 238 | fun ref from_tokens(that: TokensIterator)? => 239 | """ 240 | Deserialize an instance of this data structure from a stream of tokens. 241 | """ 242 | if that.next[USize]()? != 2 then error end 243 | 244 | _ctx.from_tokens(that)? 245 | 246 | var count = that.next[USize]()? 247 | if (count % 3) != 0 then error end 248 | count = count / 3 249 | 250 | // TODO: _map.reserve(count) 251 | while (count = count - 1) > 0 do 252 | _map.update( 253 | that.next[ID]()?, 254 | (that.next[U64]()?, that.next[A]()?) 255 | ) 256 | end 257 | 258 | fun ref each_token(tokens: Tokens) => 259 | """ 260 | Serialize the data structure, capturing each token into the given Tokens. 261 | """ 262 | tokens.push(USize(2)) 263 | 264 | _ctx.each_token(tokens) 265 | 266 | tokens.push(_map.size() * 3) 267 | for (i, (n, v)) in _map.pairs() do 268 | tokens.push(i) 269 | tokens.push(n) 270 | tokens.push(v) 271 | end 272 | -------------------------------------------------------------------------------- /crdt/_private/dot_context.pony: -------------------------------------------------------------------------------- 1 | use ".." 2 | use "collections" 3 | 4 | class ref DotContext is Replicated 5 | """ 6 | This data structure is used internally. 7 | There shouldn't really be a reason to use it outside of that context, 8 | and be aware that if you do, there are unsound patterns of use to avoid. 9 | See the rest of the docstrings in this file for more information. 10 | 11 | Represents the total set of "dots" received so far in known history, 12 | where each dot is a unique replica identifier and a sequence number. 13 | 14 | As a memory optimization, we represent that total set in two structures: 15 | The _complete history and the _dot_cloud. By compacting from _dot_cloud into 16 | the _complete history, we can avoid letting memory grow without bound, 17 | as long as any gaps in history eventually get filled by incoming dots. 18 | 19 | The _complete history represents the range of consecutive sequence numbers 20 | starting with zero that have already been observed for a given replica ID. 21 | Nothing new can be learned about this region of history. 22 | 23 | The _dot_cloud represents the set of arbitrary (ID, N) pairings which have 24 | not yet been absorbed into _complete because they are not consecutive. 25 | When enough dots are accumulated into the _dot_cloud to be consecutive 26 | with the current threshold of _complete history, they can compacted into it. 27 | """ 28 | var _id: ID 29 | embed _complete: Map[ID, U64] 30 | embed _dot_cloud: HashSet[_Dot, _DotHashFn] 31 | var _converge_disabled: Bool = false 32 | 33 | new ref create(id': ID) => 34 | """ 35 | Instantiate under the given unique replica id. 36 | 37 | It will only be possible to add dots values under this replica id, 38 | aside from converging it as external data with the `converge` function. 39 | """ 40 | _id = id' 41 | _complete = _complete.create() 42 | _dot_cloud = _dot_cloud.create() 43 | 44 | fun clone(): DotContext => 45 | let that = create(_id) 46 | for (k, v) in _complete.pairs() do that._complete(k) = v end 47 | for d in _dot_cloud.values() do that._dot_cloud.set(d) end 48 | that 49 | 50 | fun id(): ID => 51 | """ 52 | Return the replica id used to instantiate this context. 53 | """ 54 | _id 55 | 56 | fun contains(dot: _Dot): Bool => 57 | """ 58 | Test if the given dot has been received yet in this causal history. 59 | """ 60 | (_complete.get_or_else(dot._1, 0) >= dot._2) or _dot_cloud.contains(dot) 61 | 62 | fun ref compact() => 63 | """ 64 | Reduce memory footprint by absorbing as many members of the _dot_cloud set 65 | as possible into the _complete. 66 | 67 | Dots which represent the next sequence number for a known ID are moved into 68 | the _complete by incrementing the sequence number for that ID. Every 69 | missing ID in the _complete is treated as zero, with the next sequence 70 | number expected being one. 71 | 72 | Dots that are already present or outdated in the _complete (those whose 73 | sequence numbers are less than or equal to the known number for the same ID) 74 | are discarded. 75 | 76 | All other dots are kept in the _dot_cloud. 77 | 78 | The compaction operation does not lose any information. 79 | """ 80 | var keep_compacting = true 81 | while keep_compacting do 82 | keep_compacting = false 83 | 84 | let remove_dots = Array[_Dot] 85 | for dot in _dot_cloud.values() do 86 | (let id', let n) = dot 87 | let n' = _complete.get_or_else(id', 0) 88 | 89 | if n == (n' + 1) then // this dot has the next sequence number 90 | _complete(id') = n 91 | remove_dots.push(dot) 92 | keep_compacting = true 93 | elseif n <= n' then // this dot is present/outdated 94 | remove_dots.push(dot) 95 | end 96 | end 97 | _dot_cloud.remove(remove_dots.values()) 98 | end 99 | 100 | fun ref next_dot(): _Dot => 101 | """ 102 | Update _complete with the next sequence number for the local replica ID, 103 | also returning the resulting dot. 104 | 105 | This is only valid when there are no dots for it in _dot_cloud, so that's 106 | why it can only be used with the id of the local replica. 107 | 108 | WARNING: any `set` calls with `compact_now = false` must be followed 109 | `compact` before calling this function. 110 | 111 | In the future, we want to consider refactoring this abstraction to make 112 | it more difficult to make a mistake that breaks these assumptions, using 113 | Pony idioms of having the type system prevent you from doing unsafe actions. 114 | """ 115 | // TODO: consider the refactor mentioned in the docstring. 116 | let n = _complete.upsert(_id, 1, {(n', _) => n' + 1 }) 117 | (_id, n) 118 | 119 | fun ref set(dot: _Dot, compact_now: Bool = true) => 120 | """ 121 | Add the given dot into the causal history represented here. 122 | 123 | If compact_now is set to false, auto-compaction will be skipped. 124 | This is useful for optimizing sites where `set` is called many times, but 125 | proceed with care, because operations like `next_dot` depend on compaction; 126 | make sure `compact` is called after any such optimized group of `set` calls. 127 | """ 128 | _dot_cloud.set(dot) 129 | if compact_now then compact() end 130 | 131 | fun ref set_converge_disabled(value': Bool): Bool => 132 | """ 133 | Set the new value of the _converge_disabled field, returning the old value. 134 | 135 | While _converge_disabled is true, the following methods will be no-ops: 136 | converge, from_tokens, each_token. 137 | 138 | This is used in situations where the context is shared by many instances. 139 | """ 140 | _converge_disabled = value' 141 | 142 | fun ref converge(that: DotContext box): Bool => 143 | """ 144 | Add all dots from that causal history into this one. 145 | 146 | The consecutive ranges in _complete can be updated to the maximum range. 147 | The _dot_cloud can be updated by taking the union of the two sets. 148 | """ 149 | if _converge_disabled then return false end 150 | 151 | var changed = false 152 | 153 | for (id', n) in that._complete.pairs() do 154 | if n > _complete.get_or_else(id', 0) then 155 | _complete(id') = n 156 | changed = true 157 | end 158 | end 159 | 160 | for dot in that._dot_cloud.values() do 161 | if _complete.get_or_else(dot._1, 0) < dot._2 then 162 | if not _dot_cloud.contains(dot) then 163 | _dot_cloud.set(dot) 164 | changed = true 165 | end 166 | end 167 | end 168 | 169 | _dot_cloud.union(that._dot_cloud.values()) 170 | compact() 171 | changed 172 | 173 | fun compare(that: DotContext box): (Bool, Bool) => 174 | """ 175 | Compare the dots in this causal context with those in the other one. 176 | Returns two boolean values, representing differences that are present. 177 | The first return value is true if this context has dots missing in that one. 178 | The other return value is true if that context has dots missing in this one. 179 | """ 180 | (_compare_unidir(this, that), _compare_unidir(that, this)) 181 | 182 | fun _compare_unidir(x: DotContext box, y: DotContext box): Bool => 183 | for (id', n) in x._complete.pairs() do 184 | if n > y._complete.get_or_else(id', 0) then 185 | return true 186 | end 187 | end 188 | 189 | for dot in x._dot_cloud.values() do 190 | if y._complete.get_or_else(dot._1, 0) < dot._2 then 191 | if not y._dot_cloud.contains(dot) then 192 | return true 193 | end 194 | end 195 | end 196 | 197 | false 198 | 199 | fun string(): String iso^ => 200 | """ 201 | Return a best effort at printing the data structure. 202 | This is intended for debugging purposes only. 203 | """ 204 | let out = recover String end 205 | out.append("(DotContext") 206 | for (id', n) in _complete.pairs() do 207 | out.>push(';').>push(' ') 208 | out.append(id'.string()) 209 | out.>push(' ').>push('<').>push('=').>push(' ') 210 | out.append(n.string()) 211 | end 212 | for (id', n) in _dot_cloud.values() do 213 | out.>push(';').>push(' ') 214 | out.append(id'.string()) 215 | out.>push(' ').>push('=').>push('=').>push(' ') 216 | out.append(n.string()) 217 | end 218 | out.push(')') 219 | out 220 | 221 | fun ref from_tokens(that: TokensIterator)? => 222 | """ 223 | Deserialize an instance of this data structure from a stream of tokens. 224 | """ 225 | if _converge_disabled then 226 | if that.next[USize]()? != 0 then error end 227 | return 228 | end 229 | 230 | if that.next[USize]()? != 3 then error end 231 | 232 | _id = that.next[ID]()? 233 | 234 | var complete_count = that.next[USize]()? 235 | if (complete_count % 2) != 0 then error end 236 | complete_count = complete_count / 2 237 | 238 | // TODO: _complete.reserve(complete_count) 239 | while (complete_count = complete_count - 1) > 0 do 240 | _complete.update( 241 | that.next[ID]()?, 242 | that.next[U64]()? 243 | ) 244 | end 245 | 246 | var dot_cloud_count = that.next[USize]()? 247 | if (dot_cloud_count % 2) != 0 then error end 248 | dot_cloud_count = dot_cloud_count / 2 249 | 250 | // TODO: _dot_cloud.reserve(dot_cloud_count) 251 | while (dot_cloud_count = dot_cloud_count - 1) > 0 do 252 | _dot_cloud.set( 253 | (that.next[ID]()?, that.next[U64]()?) 254 | ) 255 | end 256 | 257 | fun each_token(tokens: Tokens) => 258 | """ 259 | Serialize the data structure, capturing each token into the given Tokens. 260 | """ 261 | if _converge_disabled then 262 | tokens.push(USize(0)) 263 | return 264 | end 265 | 266 | tokens.push(USize(3)) 267 | 268 | tokens.push(_id) 269 | 270 | tokens.push(_complete.size() * 2) 271 | for (id', n) in _complete.pairs() do 272 | tokens.push(id') 273 | tokens.push(n) 274 | end 275 | 276 | tokens.push(_dot_cloud.size() * 2) 277 | for (id', n) in _dot_cloud.values() do 278 | tokens.push(id') 279 | tokens.push(n) 280 | end 281 | -------------------------------------------------------------------------------- /crdt/test/test_ujson.pony: -------------------------------------------------------------------------------- 1 | use "ponytest" 2 | use ".." 3 | 4 | class TestUJSON is UnitTest 5 | new iso create() => None 6 | fun name(): String => "crdt.UJSON" 7 | 8 | fun apply(h: TestHelper) => 9 | """ 10 | This test implements a few of the examples depicted in the paper: 11 | A Conflict-Free Replicated JSON Datatype 12 | (Martin Kleppmann, Alastair R. Beresford) 13 | https://arxiv.org/abs/1608.03960 14 | 15 | Note that not all of the examples depicted there apply to the UJSON data 16 | type, because UJSON does not handle ordered lists. 17 | 18 | Because those examples don't exercise the full capabilities of UJSON, 19 | we also include some examples of our own after the "figure" examples. 20 | """ 21 | figure_1(h) 22 | figure_2(h) 23 | figure_5(h) 24 | add_wins(h) 25 | 26 | fun figure_1(h: TestHelper) => 27 | """ 28 | Concurrent assignment to the same register by different replicas. 29 | """ 30 | let p = UJSON("p".hash64()) 31 | let q = UJSON("q".hash64()) 32 | 33 | // TODO: use update sugar after fixing ponyc to allow value' as param name. 34 | p.update(["key"], "A") 35 | 36 | h.assert_false(p.converge(q)) 37 | h.assert_true(q.converge(p)) 38 | 39 | var expected = """{"key":"A"}""" 40 | h.assert_eq[String](p.get().string(), expected) 41 | h.assert_eq[String](q.get().string(), expected) 42 | 43 | p.update(["key"], "B") 44 | q.update(["key"], "C") 45 | 46 | h.assert_true(p.converge(q)) 47 | h.assert_true(q.converge(p)) 48 | 49 | expected = """{"key":["B","C"]}""" 50 | h.assert_eq[String](p.get().string(), expected) 51 | h.assert_eq[String](q.get().string(), expected) 52 | 53 | fun figure_2(h: TestHelper) => 54 | """ 55 | Modifying a nested map while concurrently the entire map is overwritten. 56 | """ 57 | let p = UJSON("p".hash64()) 58 | let q = UJSON("q".hash64()) 59 | 60 | p.update(["colors"; "blue"], "#0000ff") 61 | 62 | h.assert_false(p.converge(q)) 63 | h.assert_true(q.converge(p)) 64 | 65 | var expected = """{"colors":{"blue":"#0000ff"}}""" 66 | h.assert_eq[String](p.get().string(), expected) 67 | h.assert_eq[String](q.get().string(), expected) 68 | 69 | p.update(["colors"; "red"], "#ff0000") 70 | 71 | expected = """{"colors":{"red":"#ff0000","blue":"#0000ff"}}""" 72 | h.assert_eq[String](p.get().string(), expected) 73 | 74 | q.clear_at(["colors"]) 75 | q.update(["colors"; "green"], "#00ff00") 76 | 77 | expected = """{"colors":{"green":"#00ff00"}}""" 78 | h.assert_eq[String](q.get().string(), expected) 79 | 80 | h.assert_true(p.converge(q)) 81 | h.assert_true(q.converge(p)) 82 | 83 | expected = """{"colors":{"red":"#ff0000","green":"#00ff00"}}""" 84 | h.assert_eq[String](p.get().string(), expected) 85 | h.assert_eq[String](q.get().string(), expected) 86 | 87 | fun figure_5(h: TestHelper) => 88 | """ 89 | Concurrently assigning values of different types to the same map key. 90 | """ 91 | let p = UJSON("p".hash64()) 92 | let q = UJSON("q".hash64()) 93 | 94 | p.update(["a"; "x"], "y") 95 | 96 | var expected = """{"a":{"x":"y"}}""" 97 | h.assert_eq[String](p.get().string(), expected) 98 | 99 | q.update(["a"], "z") 100 | 101 | expected = """{"a":"z"}""" 102 | h.assert_eq[String](q.get().string(), expected) 103 | 104 | h.assert_true(p.converge(q)) 105 | h.assert_true(q.converge(p)) 106 | 107 | expected = """{"a":["z",{"x":"y"}]}""" 108 | h.assert_eq[String](p.get().string(), expected) 109 | h.assert_eq[String](q.get().string(), expected) 110 | 111 | // Add on some tests for accessing and printing nested values. 112 | expected = """["z",{"x":"y"}]""" 113 | h.assert_eq[String](p.get(["a"]).string(), expected) 114 | 115 | expected = """"y"""" 116 | h.assert_eq[String](p.get(["a"; "x"]).string(), expected) 117 | 118 | expected = """""" 119 | h.assert_eq[String](p.get(["a"; "bogus"]).string(), expected) 120 | 121 | fun add_wins(h: TestHelper) => 122 | """ 123 | Concurrent insertion and deletion the same element favors the insertion. 124 | """ 125 | let p = UJSON("p".hash64()) 126 | let q = UJSON("q".hash64()) 127 | 128 | p.insert(["fruits"], "apple") 129 | 130 | h.assert_false(p.converge(q)) 131 | h.assert_true(q.converge(p)) 132 | 133 | var expected = """{"fruits":"apple"}""" 134 | h.assert_eq[String](p.get().string(), expected) 135 | h.assert_eq[String](q.get().string(), expected) 136 | 137 | p.insert(["fruits"], "dewberry") 138 | p.remove(["fruits"], "dewberry") 139 | q.insert(["fruits"], "dewberry") 140 | 141 | expected = """{"fruits":"apple"}""" 142 | h.assert_eq[String](p.get().string(), expected) 143 | 144 | expected = """{"fruits":["apple","dewberry"]}""" 145 | h.assert_eq[String](q.get().string(), expected) 146 | 147 | h.assert_true(p.converge(q)) 148 | h.assert_true(q.converge(p)) 149 | 150 | expected = """{"fruits":["apple","dewberry"]}""" 151 | h.assert_eq[String](p.get().string(), expected) 152 | h.assert_eq[String](q.get().string(), expected) 153 | 154 | class TestUJSONDelta is UnitTest 155 | new iso create() => None 156 | fun name(): String => "crdt.UJSON (ẟ)" 157 | 158 | fun apply(h: TestHelper) => 159 | """ 160 | See docstring for TestUJSON. 161 | """ 162 | figure_1(h) 163 | figure_2(h) 164 | figure_5(h) 165 | add_wins(h) 166 | 167 | fun figure_1(h: TestHelper) => 168 | """ 169 | Concurrent assignment to the same register by different replicas. 170 | """ 171 | let p = UJSON("p".hash64()) 172 | let q = UJSON("q".hash64()) 173 | 174 | // TODO: use update sugar after fixing ponyc to allow value' as param name. 175 | var p_delta = p.update(["key"], "A") 176 | 177 | h.assert_false(p.converge(p_delta)) 178 | h.assert_true(q.converge(p_delta)) 179 | 180 | var expected = """{"key":"A"}""" 181 | h.assert_eq[String](p.get().string(), expected) 182 | h.assert_eq[String](q.get().string(), expected) 183 | 184 | p_delta = p.update(["key"], "B") 185 | var q_delta = q.update(["key"], "C") 186 | 187 | h.assert_true(p.converge(q_delta)) 188 | h.assert_true(q.converge(p_delta)) 189 | 190 | expected = """{"key":["B","C"]}""" 191 | h.assert_eq[String](p.get().string(), expected) 192 | h.assert_eq[String](q.get().string(), expected) 193 | 194 | fun figure_2(h: TestHelper) => 195 | """ 196 | Modifying a nested map while concurrently the entire map is overwritten. 197 | """ 198 | let p = UJSON("p".hash64()) 199 | let q = UJSON("q".hash64()) 200 | 201 | var p_delta = p.update(["colors"; "blue"], "#0000ff") 202 | 203 | h.assert_false(p.converge(p_delta)) 204 | h.assert_true(q.converge(p_delta)) 205 | 206 | var expected = """{"colors":{"blue":"#0000ff"}}""" 207 | h.assert_eq[String](p.get().string(), expected) 208 | h.assert_eq[String](q.get().string(), expected) 209 | 210 | p_delta = p.update(["colors"; "red"], "#ff0000") 211 | 212 | expected = """{"colors":{"red":"#ff0000","blue":"#0000ff"}}""" 213 | h.assert_eq[String](p.get().string(), expected) 214 | 215 | var q_delta = q.clear_at(["colors"]) 216 | q_delta = q.update(["colors"; "green"], "#00ff00", q_delta) 217 | 218 | expected = """{"colors":{"green":"#00ff00"}}""" 219 | h.assert_eq[String](q.get().string(), expected) 220 | 221 | h.assert_true(p.converge(q_delta)) 222 | h.assert_true(q.converge(p_delta)) 223 | 224 | expected = """{"colors":{"red":"#ff0000","green":"#00ff00"}}""" 225 | h.assert_eq[String](p.get().string(), expected) 226 | h.assert_eq[String](q.get().string(), expected) 227 | 228 | fun figure_5(h: TestHelper) => 229 | """ 230 | Concurrently assigning values of different types to the same map key. 231 | """ 232 | let p = UJSON("p".hash64()) 233 | let q = UJSON("q".hash64()) 234 | 235 | var p_delta = p.update(["a"; "x"], "y") 236 | 237 | var expected = """{"a":{"x":"y"}}""" 238 | h.assert_eq[String](p.get().string(), expected) 239 | 240 | var q_delta = q.update(["a"], "z") 241 | 242 | expected = """{"a":"z"}""" 243 | h.assert_eq[String](q.get().string(), expected) 244 | 245 | h.assert_true(p.converge(q_delta)) 246 | h.assert_true(q.converge(p_delta)) 247 | 248 | expected = """{"a":["z",{"x":"y"}]}""" 249 | h.assert_eq[String](p.get().string(), expected) 250 | h.assert_eq[String](q.get().string(), expected) 251 | 252 | // Add on some tests for accessing and printing nested values. 253 | expected = """["z",{"x":"y"}]""" 254 | h.assert_eq[String](p.get(["a"]).string(), expected) 255 | 256 | expected = """"y"""" 257 | h.assert_eq[String](p.get(["a"; "x"]).string(), expected) 258 | 259 | expected = """""" 260 | h.assert_eq[String](p.get(["a"; "bogus"]).string(), expected) 261 | 262 | fun add_wins(h: TestHelper) => 263 | """ 264 | Concurrent insertion and deletion the same element favors the insertion. 265 | """ 266 | let p = UJSON("p".hash64()) 267 | let q = UJSON("q".hash64()) 268 | 269 | var p_delta = p.insert(["fruits"], "apple") 270 | 271 | h.assert_false(p.converge(p_delta)) 272 | h.assert_true(q.converge(p_delta)) 273 | 274 | var expected = """{"fruits":"apple"}""" 275 | h.assert_eq[String](p.get().string(), expected) 276 | h.assert_eq[String](q.get().string(), expected) 277 | 278 | p_delta = p.insert(["fruits"], "dewberry") 279 | p_delta = p.remove(["fruits"], "dewberry", p_delta) 280 | var q_delta = q.insert(["fruits"], "dewberry") 281 | 282 | expected = """{"fruits":"apple"}""" 283 | h.assert_eq[String](p.get().string(), expected) 284 | 285 | expected = """{"fruits":["apple","dewberry"]}""" 286 | h.assert_eq[String](q.get().string(), expected) 287 | 288 | h.assert_true(p.converge(q_delta)) 289 | h.assert_true(q.converge(p_delta)) 290 | 291 | h.assert_eq[String](p.get().string(), """{"fruits":["apple","dewberry"]}""") 292 | h.assert_eq[String](q.get().string(), """{"fruits":["dewberry","apple"]}""") 293 | 294 | class TestUJSONTokens is UnitTest 295 | new iso create() => None 296 | fun name(): String => "crdt.UJSON (tokens)" 297 | 298 | fun apply(h: TestHelper) => 299 | let data = UJSON("a".hash64()) 300 | let data' = UJSON("b".hash64()) 301 | let data'' = UJSON("c".hash64()) 302 | 303 | data.insert(["fruits"], "apple") 304 | data'.remove(["fruits"], "apple") 305 | data''.insert(["fruits"], "banana") 306 | 307 | data.converge(data') 308 | data.converge(data'') 309 | 310 | let tokens = Tokens .> from(data) 311 | _TestTokensWellFormed(h, tokens) 312 | 313 | try 314 | h.assert_eq[UJSON]( 315 | data, 316 | data.create(0) .> from_tokens(tokens.iterator())? 317 | ) 318 | else 319 | h.fail("failed to parse token stream") 320 | end 321 | -------------------------------------------------------------------------------- /crdt/_private/dot_kernel.pony: -------------------------------------------------------------------------------- 1 | use ".." 2 | use "collections" 3 | 4 | class ref DotKernel[A: Any val] is Replicated 5 | """ 6 | This class is a reusable abstraction meant for use inside other CRDTs. 7 | 8 | It contains a "dot context", which is used to track a logical remembrance 9 | of all changes we've generated and observed. Each is represented by a "dot", 10 | where the dot is a unique replica identifier and a sequence number. 11 | See docs for the DotContext type for more information on how this works. 12 | 13 | We also maintain a map of "active" values - those we wish to retain for 14 | inclusion in the calculation of the result value, using whatever semantics 15 | are appropriate for that calculation, based on the needs of the outer 16 | data structure that holds this kernel. For example, the CCounter calculates 17 | its result by summing all active values, while the AWORSet calculates its 18 | result by returning the active values as a set. Other data structures may 19 | use more exotic calculations. 20 | 21 | Each active value is associated with a "dot" - a point in causal history 22 | on a particular replica. Because we retain a remembrance of all dots we've 23 | ever seen in the "dot context", we can determine whether data we observe is 24 | new to us or outdated by checking if the dot is already in the dot context. 25 | 26 | Note that because active values are indexed by their dot (and not simply 27 | their replica id) it is possible to retain multiple active values per 28 | replica if the outer data structure doesn't take steps to prevent this. 29 | For some data structures, this is desirable; for others, it isn't. 30 | If you wish to always keep only the latest causal active value per replica, 31 | prefer using the DotKernelSingle class instead of this one. 32 | """ 33 | let _ctx: DotContext 34 | embed _map: HashMap[_Dot, A, _DotHashFn] 35 | 36 | new create(id': ID) => 37 | """ 38 | Instantiate under the given unique replica id. 39 | 40 | It will only be possible to add dotted values under this replica id, 41 | aside from converging it as external data with the `converge` function. 42 | """ 43 | _ctx = _ctx.create(id') 44 | _map = _map.create() 45 | 46 | new create_in(ctx': DotContext) => 47 | """ 48 | Instantiate under the given DotContext. 49 | """ 50 | _ctx = ctx' 51 | _map = _map.create() 52 | 53 | fun context(): this->DotContext => 54 | """ 55 | Get the underlying DotContext. 56 | """ 57 | _ctx 58 | 59 | fun is_empty(): Bool => 60 | """ 61 | Return true if there are no values ever recorded from any replica. 62 | This is true at creation, after calling the clear method, 63 | or after a converge that results in all values being cleared. 64 | """ 65 | _map.size() == 0 66 | 67 | fun values(): Iterator[A]^ => 68 | """ 69 | Return an iterator over the active values in this kernel. 70 | """ 71 | _map.values() 72 | 73 | fun pairs(): Iterator[(_Dot, A)]^ => 74 | """ 75 | Return an iterator over the active values and their associated dots. 76 | """ 77 | _map.pairs() 78 | 79 | fun ref set[D: DotKernel[A] ref = DotKernel[A]]( 80 | value': A, 81 | delta': D = recover DotKernel[A](0) end) 82 | : D^ => 83 | """ 84 | Add the given value to the map of active values, under this replica id. 85 | The next-sequence-numbered dot for this replica will be used, so that the 86 | new value has a happens-after causal relationship with previous value(s). 87 | """ 88 | let dot = _ctx.next_dot() 89 | _map(dot) = value' 90 | delta'._map(dot) = value' 91 | delta'._ctx.set(dot) 92 | delta' 93 | 94 | fun ref remove_value[E: EqFn[A] val, D: DotKernel[A] ref = DotKernel[A]]( 95 | value': A, 96 | delta': D = recover DotKernel[A](0) end) 97 | : D^ => 98 | """ 99 | Remove all dots with this value from the map of active values, using the 100 | given eq_fn for testing equality between pairs of values of type A. 101 | They will be retained in the causal context (if they were already present). 102 | 103 | This removes the dots and associated value while keeping reminders that 104 | we have seen them before, so that we can ignore them if we see them again. 105 | 106 | If the value was not present, this function silently does nothing. 107 | 108 | Accepts and returns a convergent delta-state. 109 | """ 110 | let removables: Array[_Dot] = [] 111 | for (dot, value) in _map.pairs() do 112 | if E(value', value) then 113 | removables.push(dot) 114 | delta'._ctx.set(dot, false) // wait to compact until the end 115 | end 116 | end 117 | for dot in removables.values() do try _map.remove(dot)? end end 118 | 119 | delta'._ctx.compact() // now we can compact just once 120 | delta' 121 | 122 | fun ref remove_all[D: DotKernel[A] ref = DotKernel[A]]( 123 | delta': D = recover DotKernel[A](0) end) 124 | : D^ => 125 | """ 126 | Remove all dots currently present in the map of active values. 127 | They will be retained in the causal context. 128 | 129 | This removes the dots and associated values while keeping reminders that 130 | we have seen them before, so that we can ignore them if we see them again. 131 | 132 | Accepts and returns a convergent delta-state. 133 | """ 134 | for dot in _map.keys() do 135 | delta'._ctx.set(dot, false) // wait to compact until the end 136 | end 137 | 138 | _map.clear() 139 | 140 | delta'._ctx.compact() // now we can compact just once 141 | delta' 142 | 143 | fun ref converge(that: DotKernel[A] box): Bool => 144 | """ 145 | Catch up on active values and dot history from that kernel into this one, 146 | using the dot history as a context for understanding for which disagreements 147 | we are out of date, and for which disagreements the other is out of date. 148 | """ 149 | var changed = false 150 | // TODO: more efficient algorithm? 151 | 152 | // Active values that exist only in the other kernel and haven't yet been 153 | // seen in our history of dots should be added to our map of active values. 154 | for (dot, value) in that.pairs() do 155 | if (not _map.contains(dot)) and (not _ctx.contains(dot)) then 156 | _map(dot) = value 157 | changed = true 158 | end 159 | end 160 | 161 | // Active values that now exist only in our kernel but were already seen 162 | // by that kernel's history of dots should be removed from our map. 163 | let removables: Array[_Dot] = [] 164 | for dot in _map.keys() do 165 | if (not that._map.contains(dot)) and that._ctx.contains(dot) then 166 | removables.push(dot) 167 | changed = true 168 | end 169 | end 170 | for dot' in removables.values() do try _map.remove(dot')? end end 171 | 172 | // Finally, catch up on the entire history of dots that the other kernel 173 | // knows about, Because we're now caught up on the fruits of that history. 174 | // It's important that we do this as the last step; both this local logic, 175 | // and some broader assumptions regarding sharing contexts rely on the 176 | // fact that the context is converged after the data. 177 | // Note that this call will be a no-op when the context is shared. 178 | if _ctx.converge(that._ctx) then 179 | changed = true 180 | end 181 | 182 | changed 183 | 184 | fun ref converge_empty_in(ctx': DotContext box): Bool => 185 | """ 186 | Optimize for the special case of converging from a peer with an empty map, 187 | taking only their DotContext as an argument for resolving disagreements. 188 | """ 189 | var changed = false 190 | 191 | // Active values that now exist only in our kernel but were already seen 192 | // by that kernel's history of dots should be removed from our map. 193 | let removables: Array[_Dot] = [] 194 | for dot in _map.keys() do 195 | if ctx'.contains(dot) then 196 | removables.push(dot) 197 | changed = true 198 | end 199 | end 200 | for dot' in removables.values() do try _map.remove(dot')? end end 201 | 202 | // Finally, catch up on the entire history of dots that the other kernel 203 | // knows about, Because we're now caught up on the fruits of that history. 204 | // It's important that we do this as the last step; both this local logic, 205 | // and some broader assumptions regarding sharing contexts rely on the 206 | // fact that the context is converged after the data. 207 | // Note that this call will be a no-op when the context is shared. 208 | if _ctx.converge(ctx') then 209 | changed = true 210 | end 211 | 212 | changed 213 | 214 | fun string(): String iso^ => 215 | """ 216 | Return a best effort at printing the data structure. 217 | This is intended for debugging purposes only. 218 | """ 219 | let out = recover String end 220 | out.append("(DotKernel") 221 | for (dot, value) in _map.pairs() do 222 | out.>push(';').>push(' ').>push('(') 223 | out.append(dot._1.string()) 224 | out.>push(',').>push(' ') 225 | out.append(dot._2.string()) 226 | out.>push(')').>push(' ').>push('-').>push('>').>push(' ') 227 | iftype A <: Stringable #read 228 | then out.append(value.string()) 229 | else out.push('?') 230 | end 231 | end 232 | out.>push(';').>push(' ') 233 | out.append(_ctx.string()) 234 | out 235 | 236 | fun ref from_tokens(that: TokensIterator) ? => 237 | """ 238 | Deserialize an instance of this data structure from a stream of tokens. 239 | """ 240 | if that.next[USize]()? != 2 then error end 241 | 242 | _ctx.from_tokens(that)? 243 | 244 | var count = that.next[USize]()? 245 | if (count % 3) != 0 then error end 246 | count = count / 3 247 | 248 | // TODO: _map.reserve(count) 249 | while (count = count - 1) > 0 do 250 | _map.update((that.next[ID]()?, that.next[U64]()?), that.next[A]()?) 251 | end 252 | 253 | fun ref from_tokens_map( 254 | that: TokensIterator, 255 | a_fn: {(TokensIterator): A?} val) 256 | ? 257 | => 258 | """ 259 | Deserialize an instance of this data structure from a stream of tokens, 260 | using a custom function for deserializing the B tokens as instance(s) of A. 261 | """ 262 | if that.next[USize]()? != 2 then error end 263 | 264 | _ctx.from_tokens(that)? 265 | 266 | var count = that.next[USize]()? 267 | if (count % 3) != 0 then error end 268 | count = count / 3 269 | 270 | // TODO: _map.reserve(count) 271 | while (count = count - 1) > 0 do 272 | _map.update((that.next[ID]()?, that.next[U64]()?), a_fn(that)?) 273 | end 274 | 275 | fun ref each_token(tokens: Tokens) => 276 | """ 277 | Serialize the data structure, capturing each token into the given Tokens. 278 | """ 279 | each_token_map(tokens, {(tokens, a) => tokens.push(a) }) 280 | 281 | fun ref each_token_map(tokens: Tokens, a_fn: {(Tokens, A)} val) => 282 | """ 283 | Serialize the data structure, capturing each token into the given Tokens. 284 | using a custom function for serializing the A type as one or more B tokens. 285 | """ 286 | tokens.push(USize(2)) 287 | 288 | _ctx.each_token(tokens) 289 | 290 | tokens.push(_map.size() * 3) 291 | for ((i, n), v) in _map.pairs() do 292 | tokens.push(i) 293 | tokens.push(n) 294 | a_fn(tokens, v) 295 | end 296 | -------------------------------------------------------------------------------- /crdt/t_set.pony: -------------------------------------------------------------------------------- 1 | use "_private" 2 | use "collections" 3 | 4 | type TSet[ 5 | A: (Hashable val & Equatable[A]), 6 | T: Comparable[T] val = U64, 7 | B: (BiasInsert | BiasDelete) = BiasInsert] 8 | is THashSet[A, T, B, HashEq[A]] 9 | 10 | type TSetIs[ 11 | A: (Hashable val & Equatable[A]), 12 | T: Comparable[T] val = U64, 13 | B: (BiasInsert | BiasDelete) = BiasInsert] 14 | is THashSet[A, T, B, HashIs[A]] 15 | 16 | class ref THashSet[ 17 | A: Any val, 18 | T: Comparable[T] val, 19 | B: (BiasInsert | BiasDelete), 20 | H: HashFunction[A] val] 21 | is 22 | ( Comparable[THashSet[A, T, B, H]] 23 | & Convergent[THashSet[A, T, B, H]] 24 | & Replicated) 25 | """ 26 | A mutable set with last-write-wins semantics for insertion and deletion. 27 | That is, every insertion and deletion operation includes a logical timestamp 28 | (U64 by default, though it may be any Comparable immutable type), and 29 | operations are overridden only by those with a higher logical timestamp. 30 | 31 | This implies that the timestamps must be correct (or at least logically so) 32 | in order for the last-write-wins semantics to hold true. 33 | 34 | This data structure is conceptually composed of two grow-only sets (GSet); 35 | one for insertions and one for deletions. Both sets include the logical 36 | timestamp for each element. An element is present in the combined logical set 37 | if it is present in only the insertion set (not in the deletion set), or if 38 | the logical timestamp of the insertion is higher than that of the deletion. 39 | 40 | The actual implementation is a bit more memory-optimized than a literal pair 41 | of GSets - it stores the data as a map with the elements as keys and each 42 | value being a 2-tuple with the highest logical timestamp so far and a boolean 43 | indicating whether that timestamp represents an insertion or a deletion. 44 | 45 | If the logical timestamp is equal for two compared operations, the tie will 46 | be broken by the bias type parameter. BiasInsert implies that inserts will 47 | override deletions in a tie, while BiasDelete implies the opposite. 48 | The default bias is BiasInsert. 49 | 50 | Because the set is composed of two grow-only sets that are eventually 51 | consistent when converged, the overall result is also eventually consistent. 52 | The same bias must be used on all replicas for tie results to be consistent. 53 | 54 | All mutator methods accept and return a convergent delta-state. 55 | """ 56 | embed _data: HashMap[A, (T, Bool), H] = _data.create() 57 | let _checklist: (DotChecklist | None) 58 | 59 | new ref create() => 60 | _checklist = None 61 | 62 | new ref _create_in(ctx: DotContext) => 63 | _checklist = DotChecklist(ctx) 64 | 65 | fun ref _checklist_write() => 66 | match _checklist | let c: DotChecklist => c.write() end 67 | 68 | fun ref _converge_empty_in(ctx: DotContext box): Bool => // ignore the context 69 | false 70 | 71 | fun is_empty(): Bool => 72 | """ 73 | Return true if the data structure contains no information (bottom state). 74 | """ 75 | _data.size() == 0 76 | 77 | fun size(): USize => 78 | """ 79 | Return the number of items in the set. 80 | """ 81 | result().size() 82 | 83 | fun apply(value: val->A): T ? => 84 | """ 85 | Return the logical timestamp if it's in the set, otherwise raise an error. 86 | """ 87 | (let timestamp, let present) = _data(value)? 88 | if not present then error end 89 | timestamp 90 | 91 | fun contains(value: val->A): Bool => 92 | """ 93 | Check whether the set contains the given value. 94 | """ 95 | _data.contains(value) and (try _data(value)? else return false end)._2 96 | 97 | fun ref _set_no_delta(value: A, timestamp: T): Bool => 98 | try 99 | (let current_timestamp, let current_status) = _data(value)? 100 | if timestamp < current_timestamp then return false end 101 | iftype B <: BiasDelete then 102 | if (timestamp == current_timestamp) then return false end 103 | end 104 | if (timestamp == current_timestamp) and (current_status == true) then 105 | return false 106 | end 107 | end 108 | _data(value) = (timestamp, true) 109 | true 110 | 111 | fun ref _unset_no_delta(value: box->A!, timestamp: T): Bool => 112 | try 113 | (let current_timestamp, let current_status) = _data(value)? 114 | if timestamp < current_timestamp then return false end 115 | iftype B <: BiasInsert then 116 | if (timestamp == current_timestamp) then return false end 117 | end 118 | if (timestamp == current_timestamp) and (current_status == false) then 119 | return false 120 | end 121 | end 122 | _data(value) = (timestamp, false) 123 | true 124 | 125 | fun ref clear[D: THashSet[A, T, B, H] ref = THashSet[A, T, B, H]]( 126 | timestamp: T, 127 | delta: D = recover THashSet[A, T, B, H] end) 128 | : D^ => 129 | """ 130 | Remove all elements from the set. 131 | Accepts and returns a convergent delta-state. 132 | """ 133 | // TODO: save memory and have stronger consistency by setting a "cleared" 134 | // timestamp internally, removing all entries older than this timestamp, 135 | // and testing against that timestamp before receiving any new entries. 136 | // This timestamp could also be "raised" in a periodic garbage collection 137 | // to shrink the memory footprint of the state without losing information. 138 | // Note that this timestamp will need to be part of the replicated state. 139 | // When this feature is added, it should be noted in the dosctring for this 140 | // data type that the memory usage is not grow-only, which is a highly 141 | // desirable feature that we want to highlight wherever we can. 142 | for value in _data.keys() do 143 | _unset_no_delta(value, timestamp) 144 | delta._unset_no_delta(value, timestamp) 145 | end 146 | _checklist_write() 147 | consume delta 148 | 149 | fun ref set[D: THashSet[A, T, B, H] ref = THashSet[A, T, B, H]]( 150 | value: A, 151 | timestamp: T, 152 | delta: D = recover THashSet[A, T, B, H] end) 153 | : D^ => 154 | """ 155 | Add a value to the set. 156 | Accepts and returns a convergent delta-state. 157 | """ 158 | _set_no_delta(value, timestamp) 159 | _checklist_write() 160 | delta._set_no_delta(value, timestamp) 161 | consume delta 162 | 163 | fun ref unset[D: THashSet[A, T, B, H] ref = THashSet[A, T, B, H]]( 164 | value: box->A!, 165 | timestamp: T, 166 | delta: D = recover THashSet[A, T, B, H] end) 167 | : D^ => 168 | """ 169 | Remove a value from the set. 170 | Accepts and returns a convergent delta-state. 171 | """ 172 | _unset_no_delta(value, timestamp) 173 | _checklist_write() 174 | delta._unset_no_delta(value, timestamp) 175 | consume delta 176 | 177 | fun ref union[D: THashSet[A, T, B, H] ref = THashSet[A, T, B, H]]( 178 | that: Iterator[(A, T)], 179 | delta: D = recover THashSet[A, T, B, H] end) 180 | : D^ => 181 | """ 182 | Add everything in the given iterator to the set. 183 | Accepts and returns a convergent delta-state. 184 | """ 185 | for (value, timestamp) in that do 186 | _set_no_delta(value, timestamp) 187 | delta._set_no_delta(value, timestamp) 188 | end 189 | _checklist_write() 190 | consume delta 191 | 192 | fun ref converge(that: THashSet[A, T, B, H] box): Bool => 193 | """ 194 | Converge from the given TSet into this one. 195 | For this data type, the convergence is the union of both constituent sets. 196 | Returns true if the convergence added new information to the data structure. 197 | """ 198 | var changed = false 199 | for (value, (timestamp, present)) in that._data.pairs() do 200 | let this_value_changed = 201 | if present 202 | then _set_no_delta(value, timestamp) 203 | else _unset_no_delta(value, timestamp) 204 | end 205 | changed = changed or this_value_changed 206 | end 207 | changed 208 | 209 | fun result(): HashSet[A, H] => 210 | """ 211 | Return the elements of the resulting logical set as a single flat set. 212 | Information about specific deletions is discarded, so that the case of a 213 | deleted element is indistinct from that of an element never inserted. 214 | """ 215 | var out = HashSet[A, H] 216 | for (value, (timestamp, present)) in _data.pairs() do 217 | if present then out.set(value) end 218 | end 219 | out 220 | 221 | fun map(): HashMap[A, T, H] => 222 | """ 223 | Return the elements of the resulting logical set as a single flat map, with 224 | the elements as keys and logical timestamps of the insertion as timestamps. 225 | Information about specific deletions is discarded, so that the case of a 226 | deleted element is indistinct from that of an element never inserted. 227 | """ 228 | var out = HashMap[A, T, H] 229 | for (value, (timestamp, present)) in _data.pairs() do 230 | if present then out(value) = timestamp end 231 | end 232 | out 233 | 234 | fun string(): String iso^ => 235 | """ 236 | Return a best effort at printing the set. If A is a Stringable box, use the 237 | string representation of each value; otherwise print the as question marks. 238 | """ 239 | let buf = recover String((size() * 6) + 1) end 240 | buf.push('%') 241 | buf.push('{') 242 | var first = true 243 | for (value, timestamp) in pairs() do 244 | if first then first = false else buf .> push(';').push(' ') end 245 | iftype A <: Stringable val then 246 | buf.append(value.string()) 247 | else 248 | buf.push('?') 249 | end 250 | buf .> push(',').push(' ') 251 | iftype T <: Stringable val then 252 | buf.append(timestamp.string()) 253 | else 254 | buf.push('?') 255 | end 256 | end 257 | buf.push('}') 258 | consume buf 259 | 260 | // TODO: optimize comparison functions: 261 | fun eq(that: THashSet[A, T, B, H] box): Bool => result().eq(that.result()) 262 | fun ne(that: THashSet[A, T, B, H] box): Bool => result().ne(that.result()) 263 | fun lt(that: THashSet[A, T, B, H] box): Bool => result().lt(that.result()) 264 | fun le(that: THashSet[A, T, B, H] box): Bool => result().le(that.result()) 265 | fun gt(that: THashSet[A, T, B, H] box): Bool => result().gt(that.result()) 266 | fun ge(that: THashSet[A, T, B, H] box): Bool => result().ge(that.result()) 267 | fun values(): Iterator[A]^ => result().values() 268 | fun timestamps(): Iterator[T]^ => map().values() 269 | fun pairs(): Iterator[(A, T)]^ => map().pairs() 270 | 271 | fun ref from_tokens(that: TokensIterator)? => 272 | """ 273 | Deserialize an instance of this data structure from a stream of tokens. 274 | """ 275 | var count = that.next[USize]()? 276 | 277 | if (count % 3) != 0 then error end 278 | count = count / 3 279 | 280 | // TODO: _data.reserve(count) 281 | while (count = count - 1) > 0 do 282 | _data.update(that.next[A]()?, (that.next[T]()?, that.next[Bool]()?)) 283 | end 284 | 285 | fun ref each_token(tokens: Tokens) => 286 | """ 287 | Serialize the data structure, capturing each token into the given Tokens. 288 | """ 289 | tokens.push(_data.size() * 3) 290 | for (k, (t, b)) in _data.pairs() do 291 | tokens.push(k) 292 | tokens.push(t) 293 | tokens.push(b) 294 | end 295 | --------------------------------------------------------------------------------