├── .gitignore ├── LICENSE ├── README.md ├── TODO.txt ├── efficient_ancestor.ML ├── evaluation ├── Dockerfile ├── build.sbt ├── data │ ├── crdt-generated.gnuplot │ ├── crdt-generated.pdf │ ├── crdt-optimised.gnuplot │ ├── crdt-optimised.pdf │ ├── leader-vs-crdt.gnuplot │ ├── leader-vs-crdt.pdf │ ├── logs-crdt-generated │ │ ├── interval_100000_ap_southeast_1.log.gz │ │ ├── interval_100000_eu_west_1.log.gz │ │ ├── interval_100000_us_west_1.log.gz │ │ ├── interval_10000_ap_southeast_1.log.gz │ │ ├── interval_10000_eu_west_1.log.gz │ │ ├── interval_10000_us_west_1.log.gz │ │ ├── interval_13333_ap_southeast_1.log.gz │ │ ├── interval_13333_eu_west_1.log.gz │ │ ├── interval_13333_us_west_1.log.gz │ │ ├── interval_20000_ap_southeast_1.log.gz │ │ ├── interval_20000_eu_west_1.log.gz │ │ ├── interval_20000_us_west_1.log.gz │ │ ├── interval_50000_ap_southeast_1.log.gz │ │ ├── interval_50000_eu_west_1.log.gz │ │ ├── interval_50000_us_west_1.log.gz │ │ ├── interval_5000_ap_southeast_1.log.gz │ │ ├── interval_5000_eu_west_1.log.gz │ │ ├── interval_5000_us_west_1.log.gz │ │ ├── interval_6250_ap_southeast_1.log.gz │ │ ├── interval_6250_eu_west_1.log.gz │ │ ├── interval_6250_us_west_1.log.gz │ │ ├── interval_7700_ap_southeast_1.log.gz │ │ ├── interval_7700_eu_west_1.log.gz │ │ ├── interval_7700_us_west_1.log.gz │ │ └── summary.data │ ├── logs-crdt-optimised │ │ ├── interval_100000_ap_southeast_1.log.gz │ │ ├── interval_100000_eu_west_1.log.gz │ │ ├── interval_100000_us_west_1.log.gz │ │ ├── interval_10000_ap_southeast_1.log.gz │ │ ├── interval_10000_eu_west_1.log.gz │ │ ├── interval_10000_us_west_1.log.gz │ │ ├── interval_1000_ap_southeast_1.log.gz │ │ ├── interval_1000_eu_west_1.log.gz │ │ ├── interval_1000_us_west_1.log.gz │ │ ├── interval_1333_ap_southeast_1.log.gz │ │ ├── interval_1333_eu_west_1.log.gz │ │ ├── interval_1333_us_west_1.log.gz │ │ ├── interval_20000_ap_southeast_1.log.gz │ │ ├── interval_20000_eu_west_1.log.gz │ │ ├── interval_20000_us_west_1.log.gz │ │ ├── interval_2000_ap_southeast_1.log.gz │ │ ├── interval_2000_eu_west_1.log.gz │ │ ├── interval_2000_us_west_1.log.gz │ │ ├── interval_50000_ap_southeast_1.log.gz │ │ ├── interval_50000_eu_west_1.log.gz │ │ ├── interval_50000_us_west_1.log.gz │ │ ├── interval_5000_ap_southeast_1.log.gz │ │ ├── interval_5000_eu_west_1.log.gz │ │ ├── interval_5000_us_west_1.log.gz │ │ ├── interval_500_ap_southeast_1.log.gz │ │ ├── interval_500_eu_west_1.log.gz │ │ ├── interval_500_us_west_1.log.gz │ │ ├── interval_625_ap_southeast_1.log.gz │ │ ├── interval_625_eu_west_1.log.gz │ │ ├── interval_625_us_west_1.log.gz │ │ ├── interval_770_ap_southeast_1.log.gz │ │ ├── interval_770_eu_west_1.log.gz │ │ ├── interval_770_us_west_1.log.gz │ │ └── summary.data │ ├── logs-leader-generated │ │ ├── interval_100000_ap_southeast_1.log.gz │ │ ├── interval_100000_eu_west_1.log.gz │ │ ├── interval_100000_us_west_1.log.gz │ │ ├── interval_10000_ap_southeast_1.log.gz │ │ ├── interval_10000_eu_west_1.log.gz │ │ ├── interval_10000_us_west_1.log.gz │ │ ├── interval_1000_ap_southeast_1.log.gz │ │ ├── interval_1000_eu_west_1.log.gz │ │ ├── interval_1000_us_west_1.log.gz │ │ ├── interval_100_ap_southeast_1.log.gz │ │ ├── interval_100_eu_west_1.log.gz │ │ ├── interval_100_us_west_1.log.gz │ │ ├── interval_20000_ap_southeast_1.log.gz │ │ ├── interval_20000_eu_west_1.log.gz │ │ ├── interval_20000_us_west_1.log.gz │ │ ├── interval_2000_ap_southeast_1.log.gz │ │ ├── interval_2000_eu_west_1.log.gz │ │ ├── interval_2000_us_west_1.log.gz │ │ ├── interval_200_ap_southeast_1.log.gz │ │ ├── interval_200_eu_west_1.log.gz │ │ ├── interval_200_us_west_1.log.gz │ │ ├── interval_50000_ap_southeast_1.log.gz │ │ ├── interval_50000_eu_west_1.log.gz │ │ ├── interval_50000_us_west_1.log.gz │ │ ├── interval_5000_ap_southeast_1.log.gz │ │ ├── interval_5000_eu_west_1.log.gz │ │ ├── interval_5000_us_west_1.log.gz │ │ ├── interval_500_ap_southeast_1.log.gz │ │ ├── interval_500_eu_west_1.log.gz │ │ ├── interval_500_us_west_1.log.gz │ │ └── summary.data │ ├── logs-leader-optimised │ │ ├── interval_100000_ap_southeast_1.log.gz │ │ ├── interval_100000_eu_west_1.log.gz │ │ ├── interval_100000_us_west_1.log.gz │ │ ├── interval_10000_ap_southeast_1.log.gz │ │ ├── interval_10000_eu_west_1.log.gz │ │ ├── interval_10000_us_west_1.log.gz │ │ ├── interval_1000_ap_southeast_1.log.gz │ │ ├── interval_1000_eu_west_1.log.gz │ │ ├── interval_1000_us_west_1.log.gz │ │ ├── interval_100_ap_southeast_1.log.gz │ │ ├── interval_100_eu_west_1.log.gz │ │ ├── interval_100_us_west_1.log.gz │ │ ├── interval_20000_ap_southeast_1.log.gz │ │ ├── interval_20000_eu_west_1.log.gz │ │ ├── interval_20000_us_west_1.log.gz │ │ ├── interval_2000_ap_southeast_1.log.gz │ │ ├── interval_2000_eu_west_1.log.gz │ │ ├── interval_2000_us_west_1.log.gz │ │ ├── interval_200_ap_southeast_1.log.gz │ │ ├── interval_200_eu_west_1.log.gz │ │ ├── interval_200_us_west_1.log.gz │ │ ├── interval_50000_ap_southeast_1.log.gz │ │ ├── interval_50000_eu_west_1.log.gz │ │ ├── interval_50000_us_west_1.log.gz │ │ ├── interval_5000_ap_southeast_1.log.gz │ │ ├── interval_5000_eu_west_1.log.gz │ │ ├── interval_5000_us_west_1.log.gz │ │ ├── interval_500_ap_southeast_1.log.gz │ │ ├── interval_500_eu_west_1.log.gz │ │ ├── interval_500_us_west_1.log.gz │ │ ├── interval_50_ap_southeast_1.log.gz │ │ ├── interval_50_eu_west_1.log.gz │ │ ├── interval_50_us_west_1.log.gz │ │ └── summary.data │ ├── process-crdt.awk │ ├── process-crdt.sh │ ├── process-leader.awk │ ├── process-leader.sh │ └── round_trip_times.awk ├── project │ └── build.properties ├── run-all.sh ├── run-test.sh └── src │ └── main │ └── scala │ ├── Main.scala │ ├── Move_Code.scala │ └── TestReplica.scala ├── experiment.sql ├── paper ├── ACM-Reference-Format.bst ├── IEEEtran.bst ├── IEEEtran.cls ├── Makefile ├── acmart.cls ├── crdt-generated.pdf ├── crdt-optimised.pdf ├── gdrive-error.png ├── isabelle.sty ├── isabellesym.sty ├── leader-vs-crdt.pdf ├── list-move.tex ├── move-op.tex ├── photo-alastair.jpg ├── photo-dominic.jpg ├── photo-martin.jpg ├── photo-victor.jpg ├── references.bib ├── review-response.tex ├── review-response2.tex ├── smiley.pdf └── smiley.svg ├── proof ├── Ancestor_LFP_Executable_Code.thy ├── Move.thy ├── Move_Acyclic.thy ├── Move_Code.thy ├── Move_Create.thy ├── Move_SEC.thy ├── ROOT ├── document │ └── root.tex └── proof.pdf └── tests.txt /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | evaluation/project/target 3 | evaluation/target 4 | /paper/comment.cut 5 | /paper/move-op.aux 6 | /paper/move-op.bbl 7 | /paper/move-op.blg 8 | /paper/move-op.log 9 | /paper/move-op.out 10 | /paper/move-op.pdf 11 | /paper/move-op.vtc 12 | /paper/list-move.aux 13 | /paper/list-move.bbl 14 | /paper/list-move.blg 15 | /paper/list-move.log 16 | /paper/list-move.out 17 | /paper/list-move.pdf 18 | /paper/review-response.aux 19 | /paper/review-response.log 20 | /paper/review-response.out 21 | /paper/review-response.pdf 22 | /paper/review-response2.aux 23 | /paper/review-response2.log 24 | /paper/review-response2.out 25 | /paper/review-response2.pdf 26 | /output 27 | /ancestor.ML 28 | /generated.SML 29 | /generated.ml 30 | /generated.scala 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2019-2021 Martin Kleppmann, Dominic P. Mulligan, Victor B. F. Gomes, 2 | and Alastair R. Beresford 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy of 5 | this software and associated documentation files (the "Software"), to deal in 6 | the Software without restriction, including without limitation the rights to 7 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 8 | the Software, and to permit persons to whom the Software is furnished to do so, 9 | subject to the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in all 12 | copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 16 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 17 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 18 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 19 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | A highly-available move operation for replicated trees 2 | ====================================================== 3 | 4 | This repository contains work on move operations in conflict-free replicated data types (CRDTs), 5 | by Martin Kleppmann, Dominic P. Mulligan, Victor B. F. Gomes, and Alastair R. Beresford. 6 | 7 | For background and details, please see these two papers: 8 | 9 | * Martin Kleppmann, Dominic P. Mulligan, Victor B. F. Gomes, and Alastair R. Beresford. 10 | A highly-available move operation for replicated trees. 11 | IEEE Transactions on Parallel and Distributed Systems, 2021. 12 | ([PDF](https://martin.kleppmann.com/papers/move-op.pdf), 13 | [doi:10.1109/TPDS.2021.3118603](https://doi.org/10.1109/TPDS.2021.3118603)) 14 | * Martin Kleppmann. Moving elements in list CRDTs. 7th Workshop on Principles and Practice 15 | of Consistency for Distributed Data ([PaPoC](https://papoc-workshop.github.io/2020/)), April 2020. 16 | ([PDF](https://martin.kleppmann.com/papers/list-move-papoc20.pdf), 17 | [doi:10.1145/3380787.3393677](https://dl.acm.org/doi/abs/10.1145/3380787.3393677), 18 | [workshop presentation](https://martin.kleppmann.com/2020/04/27/papoc-list-move.html)) 19 | 20 | 21 | Proofs 22 | ------ 23 | 24 | The [Isabelle/HOL](http://isabelle.in.tum.de/) formalisation and proof of correctness can be found 25 | in the following files in the `proof` directory: 26 | 27 | * `proof.pdf` contains a PDF rendering of the whole proof. 28 | * `Move.thy` contains the definition of the move algorithm for trees, a proof that a tree node 29 | has at most one parent, and a proof that the move operation is commutative. 30 | * `Move_Acyclic.thy` contains a proof that the tree contains no cycles. 31 | * `Move_SEC.thy` contains a proof that the algorithm provides Strong Eventual Consistency, 32 | as formalised in [our proof framework](https://dl.acm.org/doi/10.1145/3133933). 33 | * `Move_Code.thy` contains an alternative definition of the algorithm that is efficiently 34 | executable, and a proof that it is equivalent to the earlier, more abstract algorithm. 35 | * `Move_Create.thy` contains a proof that it is safe for node creation operations to use an 36 | optimised code path. 37 | 38 | To check the proofs, [download Isabelle](https://isabelle.in.tum.de/) and install it. 39 | The `Move_SEC` theory depends on the definition of Strong Eventual Consistency in the 40 | [Isabelle Archive of Formal Proofs](https://www.isa-afp.org/entries/CRDT.html). Download a 41 | [release](https://www.isa-afp.org/download.html) of the AfP and 42 | [configure your Isabelle installation to use it](https://www.isa-afp.org/using.html). 43 | 44 | You can either run the Isabelle GUI interactively, or you can run it from the command line. 45 | This is how you run it on Mac OS (adjust the path to your Isabelle installation): 46 | 47 | /Applications/Isabelle2019.app/Isabelle/bin/isabelle build -D . 48 | 49 | The Isabelle-generated Scala source is written to `Move_Code.scala`. To use it in the evaluation, 50 | copy that file to `evaluation/src/main/scala/`. 51 | 52 | 53 | Papers 54 | ------ 55 | 56 | There are two papers in the `paper` directory: 57 | 58 | * *A highly-available move operation for replicated trees* is in `paper/move-op.tex` 59 | * *Moving elements in list CRDTs* is in `paper/list-move.tex` 60 | 61 | To build PDFs of the papers with LaTeX: 62 | 63 | cd paper 64 | make move-op.pdf list-move.pdf 65 | 66 | 67 | Evaluation 68 | ---------- 69 | 70 | In `evaluation/src/main/scala/TestReplica.scala` there is a simple network server and client 71 | that is used to evaluate the performance of the algorithm. To build it, you need 72 | [sbt](https://www.scala-sbt.org/) installed; then you can run: 73 | 74 | cd evaluation 75 | sbt compile 76 | 77 | **Note:** annoyingly, the Isabelle-generated code contains classes whose name differs only in 78 | case. For this reason, it cannot be compiled and run on a case-insensitive filesystem (macOS, 79 | Windows): the class files generated by the compiler would clash. You need to build it on Linux 80 | instead. For people running other OSes, there is a Docker setup in `evaluation/Dockerfile` 81 | that installs sbt and compiles the source. After 82 | [installing Docker](https://www.docker.com/get-started) you can run this: 83 | 84 | # Run this in the root directory of this repository, 85 | # not in the `evaluation` directory 86 | docker build -t move-op:latest evaluation 87 | docker run -it --rm move-op /bin/bash 88 | 89 | # Then run this inside the container to watch source files for changes: 90 | cd evaluation && sbt ~compile 91 | 92 | # Edit a file outside of the container, and then copy it into the 93 | # container to compile it, like this: 94 | docker cp evaluation/src/main/scala/TestReplica.scala 10c36574237a:/evaluation/src/main/scala 95 | # Replace 10c36574237a with the running container ID (see `docker ps`) 96 | 97 | To run on AWS, log into the AWS Management Console in 98 | [us-west-1](https://us-west-1.console.aws.amazon.com/console/home?region=us-west-1), 99 | [eu-west-1](https://eu-west-1.console.aws.amazon.com/console/home?region=eu-west-1), and 100 | [ap-southeast-1](https://ap-southeast-1.console.aws.amazon.com/console/home?region=ap-southeast-1) respectively. 101 | In each region, launch a `c5.large` instance running Ubuntu 18.04. 102 | (Hint: using the *request spot instance* feature can be considerably cheaper, 103 | but the user interface is a nightmare.) 104 | 105 | Configure their security groups so that you can log in via SSH (TCP port 22), 106 | and so that the three instances can talk to each other on TCP port 8080. 107 | 108 | Modify the script `evaluation/run-all.sh` to contain the IP addresses of your 109 | instances, and the location of the SSH private keys on your filesystem. 110 | Manually log in to each of the instances as shown in that script, and run the 111 | following one-off setup on each: 112 | 113 | sudo apt-get update && sudo apt-get upgrade -y && sudo apt-get install -y apt-transport-https gnupg wget unzip 114 | echo "deb https://dl.bintray.com/sbt/debian /" | sudo tee -a /etc/apt/sources.list.d/sbt.list 115 | sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823 116 | sudo apt-get update && sudo apt-get install -y openjdk-8-jdk-headless sbt 117 | git clone https://github.com/trvedata/move-op.git && cd move-op/evaluation && sbt compile 118 | 119 | Once that setup is done, you can run the script `evaluation/run-all.sh` from 120 | your local machine to perform a test run on all three instances concurrently. 121 | It takes one argument: the interval between successive move operations generated 122 | on each replica, in microseconds. 123 | 124 | The script logs into the instances by SSH, updates the configuration, runs the 125 | experiment, and copies the logs off the instances into `evaluation/data/logs/*.log.gz`. 126 | Each test run lasts for 10 minutes and then automatically shuts down. This 127 | repository contains the logs from our evaluation in the following directories: 128 | 129 | * `evaluation/data/logs-crdt-generated`: Running in CRDT mode, using the code 130 | extracted from Isabelle. 131 | * `evaluation/data/logs-crdt-optimised`: Running in CRDT mode, using the 132 | hand-optimised (not verified) implementation. 133 | * `evaluation/data/logs-leader-generated`: Running in state machine replication 134 | mode, using the code extracted from Isabelle. 135 | * `evaluation/data/logs-leader-optimised`: Running in state machine replication 136 | mode, using the hand-optimised (not verified) implementation. 137 | 138 | Those logs are then analysed by the scripts `evaluation/data/process-crdt.sh` 139 | (for mode USE_LEADER=false) and `evaluation/data/process-leader.sh` (for 140 | USE_LEADER=true). Pass a directory name to these scripts and they will write a 141 | file called `summary.data` in that directory. Those data files are then used 142 | to plot the graphs in the paper using Gnuplot. To refresh the graphs: 143 | 144 | gnuplot crdt-generated.gnuplot 145 | gnuplot crdt-optimised.gnuplot 146 | gnuplot leader-vs-crdt.gnuplot 147 | 148 | License 149 | ------- 150 | 151 | This project is made available under the terms of the 152 | [MIT License](https://opensource.org/licenses/MIT). 153 | -------------------------------------------------------------------------------- /TODO.txt: -------------------------------------------------------------------------------- 1 | - http://www.coda.cs.cmu.edu 2 | - https://www.cis.upenn.edu/~bcpierce/unison/ 3 | - Ficus: https://pdos.csail.mit.edu/6.824/papers/ficus.pdf 4 | - Section 6 in https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/tr-2007-75.pdf 5 | 6 | - Bayou 7 | - Norman Ramsey, El Csirmaz, et al. An algebraic approach to file synchronization. ACM SIGSOFT Software Engineering Notes, 26(5):175–185, 2001. 8 | - More references in Vinh Tao's paper 9 | 10 | - Move operation mentioned in: Mehdi Ahmed-Nacer, Pascal Urso, Valter Balegas, and Nuno Preguiça: “Concurrency Control and Awareness Support for Multi-synchronous Collaborative Editing,” at 9th IEEE International Conference on Collaborative Computing, 2013. 11 | -------------------------------------------------------------------------------- /efficient_ancestor.ML: -------------------------------------------------------------------------------- 1 | (* Test that words can handle numbers between 0 and 31 *) 2 | val _ = if 5 <= Word.wordSize then () else raise (Fail ("wordSize less than 5")); 3 | 4 | structure Uint32 : sig 5 | val set_bit : Word32.word -> IntInf.int -> bool -> Word32.word 6 | val shiftl : Word32.word -> IntInf.int -> Word32.word 7 | val shiftr : Word32.word -> IntInf.int -> Word32.word 8 | val shiftr_signed : Word32.word -> IntInf.int -> Word32.word 9 | val test_bit : Word32.word -> IntInf.int -> bool 10 | end = struct 11 | 12 | fun set_bit x n b = 13 | let val mask = Word32.<< (0wx1, Word.fromLargeInt (IntInf.toLarge n)) 14 | in if b then Word32.orb (x, mask) 15 | else Word32.andb (x, Word32.notb mask) 16 | end 17 | 18 | fun shiftl x n = 19 | Word32.<< (x, Word.fromLargeInt (IntInf.toLarge n)) 20 | 21 | fun shiftr x n = 22 | Word32.>> (x, Word.fromLargeInt (IntInf.toLarge n)) 23 | 24 | fun shiftr_signed x n = 25 | Word32.~>> (x, Word.fromLargeInt (IntInf.toLarge n)) 26 | 27 | fun test_bit x n = 28 | Word32.andb (x, Word32.<< (0wx1, Word.fromLargeInt (IntInf.toLarge n))) <> Word32.fromInt 0 29 | 30 | end; (* struct Uint32 *) 31 | 32 | 33 | structure STArray = struct 34 | 35 | datatype 'a Cell = Invalid | Value of 'a array; 36 | 37 | exception AccessedOldVersion; 38 | 39 | type 'a array = 'a Cell Unsynchronized.ref; 40 | 41 | fun fromList l = Unsynchronized.ref (Value (Array.fromList l)); 42 | fun array (size, v) = Unsynchronized.ref (Value (Array.array (size,v))); 43 | fun tabulate (size, f) = Unsynchronized.ref (Value (Array.tabulate(size, f))); 44 | fun sub (Unsynchronized.ref Invalid, idx) = raise AccessedOldVersion | 45 | sub (Unsynchronized.ref (Value a), idx) = Array.sub (a,idx); 46 | fun update (aref,idx,v) = 47 | case aref of 48 | (Unsynchronized.ref Invalid) => raise AccessedOldVersion | 49 | (Unsynchronized.ref (Value a)) => ( 50 | aref := Invalid; 51 | Array.update (a,idx,v); 52 | Unsynchronized.ref (Value a) 53 | ); 54 | 55 | fun length (Unsynchronized.ref Invalid) = raise AccessedOldVersion | 56 | length (Unsynchronized.ref (Value a)) = Array.length a 57 | 58 | fun grow (aref, i, x) = case aref of 59 | (Unsynchronized.ref Invalid) => raise AccessedOldVersion | 60 | (Unsynchronized.ref (Value a)) => ( 61 | let val len=Array.length a; 62 | val na = Array.array (len+i,x) 63 | in 64 | aref := Invalid; 65 | Array.copy {src=a, dst=na, di=0}; 66 | Unsynchronized.ref (Value na) 67 | end 68 | ); 69 | 70 | fun shrink (aref, sz) = case aref of 71 | (Unsynchronized.ref Invalid) => raise AccessedOldVersion | 72 | (Unsynchronized.ref (Value a)) => ( 73 | if sz > Array.length a then 74 | raise Size 75 | else ( 76 | aref:=Invalid; 77 | Unsynchronized.ref (Value (Array.tabulate (sz,fn i => Array.sub (a,i)))) 78 | ) 79 | ); 80 | 81 | structure IsabelleMapping = struct 82 | type 'a ArrayType = 'a array; 83 | 84 | fun new_array (a:'a) (n:IntInf.int) = array (IntInf.toInt n, a); 85 | 86 | fun array_length (a:'a ArrayType) = IntInf.fromInt (length a); 87 | 88 | fun array_get (a:'a ArrayType) (i:IntInf.int) = sub (a, IntInf.toInt i); 89 | 90 | fun array_set (a:'a ArrayType) (i:IntInf.int) (e:'a) = update (a, IntInf.toInt i, e); 91 | 92 | fun array_of_list (xs:'a list) = fromList xs; 93 | 94 | fun array_grow (a:'a ArrayType) (i:IntInf.int) (x:'a) = grow (a, IntInf.toInt i, x); 95 | 96 | fun array_shrink (a:'a ArrayType) (sz:IntInf.int) = shrink (a,IntInf.toInt sz); 97 | 98 | end; 99 | 100 | end; 101 | 102 | structure FArray = struct 103 | datatype 'a Cell = Value of 'a Array.array | Upd of (int*'a*'a Cell Unsynchronized.ref); 104 | 105 | type 'a array = 'a Cell Unsynchronized.ref; 106 | 107 | fun array (size,v) = Unsynchronized.ref (Value (Array.array (size,v))); 108 | fun tabulate (size, f) = Unsynchronized.ref (Value (Array.tabulate(size, f))); 109 | fun fromList l = Unsynchronized.ref (Value (Array.fromList l)); 110 | 111 | fun sub (Unsynchronized.ref (Value a), idx) = Array.sub (a,idx) | 112 | sub (Unsynchronized.ref (Upd (i,v,cr)),idx) = 113 | if i=idx then v 114 | else sub (cr,idx); 115 | 116 | fun length (Unsynchronized.ref (Value a)) = Array.length a | 117 | length (Unsynchronized.ref (Upd (i,v,cr))) = length cr; 118 | 119 | fun realize_aux (aref, v) = 120 | case aref of 121 | (Unsynchronized.ref (Value a)) => ( 122 | let 123 | val len = Array.length a; 124 | val a' = Array.array (len,v); 125 | in 126 | Array.copy {src=a, dst=a', di=0}; 127 | Unsynchronized.ref (Value a') 128 | end 129 | ) | 130 | (Unsynchronized.ref (Upd (i,v,cr))) => ( 131 | let val res=realize_aux (cr,v) in 132 | case res of 133 | (Unsynchronized.ref (Value a)) => (Array.update (a,i,v); res) 134 | end 135 | ); 136 | 137 | fun realize aref = 138 | case aref of 139 | (Unsynchronized.ref (Value _)) => aref | 140 | (Unsynchronized.ref (Upd (i,v,cr))) => realize_aux(aref,v); 141 | 142 | fun update (aref,idx,v) = 143 | case aref of 144 | (Unsynchronized.ref (Value a)) => ( 145 | let val nref=Unsynchronized.ref (Value a) in 146 | aref := Upd (idx,Array.sub(a,idx),nref); 147 | Array.update (a,idx,v); 148 | nref 149 | end 150 | ) | 151 | (Unsynchronized.ref (Upd _)) => 152 | let val ra = realize_aux(aref,v) in 153 | case ra of 154 | (Unsynchronized.ref (Value a)) => Array.update (a,idx,v); 155 | ra 156 | end 157 | ; 158 | 159 | fun grow (aref, inc, x) = case aref of 160 | (Unsynchronized.ref (Value a)) => ( 161 | let val len=Array.length a; 162 | val na = Array.array (len+inc,x) 163 | in 164 | Array.copy {src=a, dst=na, di=0}; 165 | Unsynchronized.ref (Value na) 166 | end 167 | ) 168 | | (Unsynchronized.ref (Upd _)) => ( 169 | grow (realize aref, inc, x) 170 | ); 171 | 172 | fun shrink (aref, sz) = case aref of 173 | (Unsynchronized.ref (Value a)) => ( 174 | if sz > Array.length a then 175 | raise Size 176 | else ( 177 | Unsynchronized.ref (Value (Array.tabulate (sz,fn i => Array.sub (a,i)))) 178 | ) 179 | ) | 180 | (Unsynchronized.ref (Upd _)) => ( 181 | shrink (realize aref,sz) 182 | ); 183 | 184 | structure IsabelleMapping = struct 185 | type 'a ArrayType = 'a array; 186 | 187 | fun new_array (a:'a) (n:IntInf.int) = array (IntInf.toInt n, a); 188 | 189 | fun array_length (a:'a ArrayType) = IntInf.fromInt (length a); 190 | 191 | fun array_get (a:'a ArrayType) (i:IntInf.int) = sub (a, IntInf.toInt i); 192 | 193 | fun array_set (a:'a ArrayType) (i:IntInf.int) (e:'a) = update (a, IntInf.toInt i, e); 194 | 195 | fun array_of_list (xs:'a list) = fromList xs; 196 | 197 | fun array_grow (a:'a ArrayType) (i:IntInf.int) (x:'a) = grow (a, IntInf.toInt i, x); 198 | 199 | fun array_shrink (a:'a ArrayType) (sz:IntInf.int) = shrink (a,IntInf.toInt sz); 200 | 201 | fun array_get_oo (d:'a) (a:'a ArrayType) (i:IntInf.int) = 202 | sub (a,IntInf.toInt i) handle Subscript => d 203 | 204 | fun array_set_oo (d:(unit->'a ArrayType)) (a:'a ArrayType) (i:IntInf.int) (e:'a) = 205 | update (a, IntInf.toInt i, e) handle Subscript => d () 206 | 207 | end; 208 | end; 209 | 210 | 211 | 212 | 213 | structure Bits_Integer : sig 214 | val set_bit : IntInf.int -> IntInf.int -> bool -> IntInf.int 215 | val shiftl : IntInf.int -> IntInf.int -> IntInf.int 216 | val shiftr : IntInf.int -> IntInf.int -> IntInf.int 217 | val test_bit : IntInf.int -> IntInf.int -> bool 218 | end = struct 219 | 220 | val maxWord = IntInf.pow (2, Word.wordSize); 221 | 222 | fun set_bit x n b = 223 | if n < maxWord then 224 | if b then IntInf.orb (x, IntInf.<< (1, Word.fromLargeInt (IntInf.toLarge n))) 225 | else IntInf.andb (x, IntInf.notb (IntInf.<< (1, Word.fromLargeInt (IntInf.toLarge n)))) 226 | else raise (Fail ("Bit index too large: " ^ IntInf.toString n)); 227 | 228 | fun shiftl x n = 229 | if n < maxWord then IntInf.<< (x, Word.fromLargeInt (IntInf.toLarge n)) 230 | else raise (Fail ("Shift operand too large: " ^ IntInf.toString n)); 231 | 232 | fun shiftr x n = 233 | if n < maxWord then IntInf.~>> (x, Word.fromLargeInt (IntInf.toLarge n)) 234 | else raise (Fail ("Shift operand too large: " ^ IntInf.toString n)); 235 | 236 | fun test_bit x n = 237 | if n < maxWord then IntInf.andb (x, IntInf.<< (1, Word.fromLargeInt (IntInf.toLarge n))) <> 0 238 | else raise (Fail ("Bit index too large: " ^ IntInf.toString n)); 239 | 240 | end; (*struct Bits_Integer*) 241 | 242 | structure HOL : sig 243 | type 'a equal 244 | type 'a itself 245 | val eq : 'a equal -> 'a -> 'a -> bool 246 | end = struct 247 | 248 | type 'a equal = {equal : 'a -> 'a -> bool}; 249 | val equal = #equal : 'a equal -> 'a -> 'a -> bool; 250 | 251 | datatype 'a itself = Type; 252 | 253 | fun eq A_ a b = equal A_ a b; 254 | 255 | end; (*struct HOL*) 256 | 257 | structure Map : sig 258 | val map_of : 'a HOL.equal -> ('a * 'b) list -> 'a -> 'b option 259 | end = struct 260 | 261 | fun map_of A_ ((l, v) :: ps) k = 262 | (if HOL.eq A_ l k then SOME v else map_of A_ ps k) 263 | | map_of A_ [] k = NONE; 264 | 265 | end; (*struct Map*) 266 | 267 | structure Orderings : sig 268 | type 'a ord 269 | val less_eq : 'a ord -> 'a -> 'a -> bool 270 | val less : 'a ord -> 'a -> 'a -> bool 271 | type 'a preorder 272 | val ord_preorder : 'a preorder -> 'a ord 273 | type 'a order 274 | val preorder_order : 'a order -> 'a preorder 275 | type 'a linorder 276 | val order_linorder : 'a linorder -> 'a order 277 | end = struct 278 | 279 | type 'a ord = {less_eq : 'a -> 'a -> bool, less : 'a -> 'a -> bool}; 280 | val less_eq = #less_eq : 'a ord -> 'a -> 'a -> bool; 281 | val less = #less : 'a ord -> 'a -> 'a -> bool; 282 | 283 | type 'a preorder = {ord_preorder : 'a ord}; 284 | val ord_preorder = #ord_preorder : 'a preorder -> 'a ord; 285 | 286 | type 'a order = {preorder_order : 'a preorder}; 287 | val preorder_order = #preorder_order : 'a order -> 'a preorder; 288 | 289 | type 'a linorder = {order_linorder : 'a order}; 290 | val order_linorder = #order_linorder : 'a linorder -> 'a order; 291 | 292 | end; (*struct Orderings*) 293 | 294 | structure RBT_Impl : sig 295 | type color 296 | type ('a, 'b) rbt 297 | val rbt_lookup : 'a Orderings.ord -> ('a, 'b) rbt -> 'a -> 'b option 298 | end = struct 299 | 300 | datatype color = R | B; 301 | 302 | datatype ('a, 'b) rbt = Empty | 303 | Branch of color * ('a, 'b) rbt * 'a * 'b * ('a, 'b) rbt; 304 | 305 | fun rbt_lookup A_ Empty k = NONE 306 | | rbt_lookup A_ (Branch (uu, l, x, y, r)) k = 307 | (if Orderings.less A_ k x then rbt_lookup A_ l k 308 | else (if Orderings.less A_ x k then rbt_lookup A_ r k else SOME y)); 309 | 310 | end; (*struct RBT_Impl*) 311 | 312 | structure RBT : sig 313 | type ('b, 'a) rbt 314 | val lookup : 'a Orderings.linorder -> ('a, 'b) rbt -> 'a -> 'b option 315 | end = struct 316 | 317 | datatype ('b, 'a) rbt = RBT of ('b, 'a) RBT_Impl.rbt; 318 | 319 | fun impl_of B_ (RBT x) = x; 320 | 321 | fun lookup A_ x = 322 | RBT_Impl.rbt_lookup 323 | ((Orderings.ord_preorder o Orderings.preorder_order o 324 | Orderings.order_linorder) 325 | A_) 326 | (impl_of A_ x); 327 | 328 | end; (*struct RBT*) 329 | 330 | structure Arith : sig 331 | type nat 332 | end = struct 333 | 334 | datatype nat = Nat of IntInf.int; 335 | 336 | end; (*struct Arith*) 337 | 338 | structure HashCode : sig 339 | type 'a hashable 340 | val hashcode : 'a hashable -> 'a -> Word32.word 341 | val def_hashmap_size : 'a hashable -> 'a HOL.itself -> Arith.nat 342 | end = struct 343 | 344 | type 'a hashable = 345 | {hashcode : 'a -> Word32.word, def_hashmap_size : 'a HOL.itself -> Arith.nat}; 346 | val hashcode = #hashcode : 'a hashable -> 'a -> Word32.word; 347 | val def_hashmap_size = #def_hashmap_size : 348 | 'a hashable -> 'a HOL.itself -> Arith.nat; 349 | 350 | end; (*struct HashCode*) 351 | 352 | structure Assoc_List : sig 353 | type ('b, 'a) assoc_list 354 | val lookup : 'a HOL.equal -> ('a, 'b) assoc_list -> 'a -> 'b option 355 | end = struct 356 | 357 | datatype ('b, 'a) assoc_list = Assoc_List of ('b * 'a) list; 358 | 359 | fun impl_of (Assoc_List x) = x; 360 | 361 | fun lookup A_ al = Map.map_of A_ (impl_of al); 362 | 363 | end; (*struct Assoc_List*) 364 | 365 | structure Uint32a : sig 366 | val linorder_uint32 : Word32.word Orderings.linorder 367 | end = struct 368 | 369 | val ord_uint32 = 370 | {less_eq = (fn a => fn b => Word32.<= (a, b)), 371 | less = (fn a => fn b => Word32.< (a, b))} 372 | : Word32.word Orderings.ord; 373 | 374 | val preorder_uint32 = {ord_preorder = ord_uint32} : 375 | Word32.word Orderings.preorder; 376 | 377 | val order_uint32 = {preorder_order = preorder_uint32} : 378 | Word32.word Orderings.order; 379 | 380 | val linorder_uint32 = {order_linorder = order_uint32} : 381 | Word32.word Orderings.linorder; 382 | 383 | end; (*struct Uint32a*) 384 | 385 | structure HashMap_Impl : sig 386 | val lookup : 387 | 'a HOL.equal * 'a HashCode.hashable -> 388 | 'a -> (Word32.word, ('a, 'b) Assoc_List.assoc_list) RBT.rbt -> 'b option 389 | end = struct 390 | 391 | fun lookup (A1_, A2_) k m = 392 | (case RBT.lookup Uint32a.linorder_uint32 m (HashCode.hashcode A2_ k) 393 | of NONE => NONE | SOME lm => Assoc_List.lookup A1_ lm k); 394 | 395 | end; (*struct HashMap_Impl*) 396 | 397 | structure HashMap : sig 398 | type ('b, 'a) hashmap 399 | val hm_lookup : 400 | 'a HOL.equal * 'a HashCode.hashable -> 'a -> ('a, 'b) hashmap -> 'b option 401 | end = struct 402 | 403 | datatype ('b, 'a) hashmap = 404 | RBT_HM of (Word32.word, ('b, 'a) Assoc_List.assoc_list) RBT.rbt; 405 | 406 | fun impl_of_RBT_HM B_ (RBT_HM x) = x; 407 | 408 | fun hm_lookup (A1_, A2_) k hm = 409 | HashMap_Impl.lookup (A1_, A2_) k (impl_of_RBT_HM A2_ hm); 410 | 411 | end; (*struct HashMap*) 412 | 413 | structure Move : sig 414 | val efficient_ancestor : 415 | 'a HOL.equal * 'a HashCode.hashable -> 416 | ('a, ('b * 'a)) HashMap.hashmap -> 'a -> 'a -> bool 417 | end = struct 418 | 419 | fun efficient_ancestor (A1_, A2_) t p c = 420 | (case HashMap.hm_lookup (A1_, A2_) c t of NONE => false 421 | | SOME a => let 422 | val (_, aa) = a; 423 | in 424 | HOL.eq A1_ aa p orelse efficient_ancestor (A1_, A2_) t p aa 425 | end); 426 | 427 | end; (*struct Move*) 428 | -------------------------------------------------------------------------------- /evaluation/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:latest 2 | 3 | # YourKit setup instructions taken from https://www.yourkit.com/docs/java/help/docker.jsp 4 | RUN set -x \ 5 | && apt-get update \ 6 | && apt-get install -y apt-transport-https gnupg wget unzip \ 7 | && echo 'deb https://dl.bintray.com/sbt/debian /' >> /etc/apt/sources.list.d/sbt.list \ 8 | && apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823 \ 9 | && apt-get update \ 10 | && apt-get install -y openjdk-8-jdk-headless sbt 11 | 12 | # Uncomment the below to enable YourKit profiling 13 | #RUN set -x \ 14 | # && wget -q https://www.yourkit.com/download/docker/YourKit-JavaProfiler-2019.1-docker.zip -P /tmp/ \ 15 | # && unzip /tmp/YourKit-JavaProfiler-2019.1-docker.zip -d /usr/local \ 16 | # && rm /tmp/YourKit-JavaProfiler-2019.1-docker.zip 17 | #EXPOSE 10001 18 | 19 | # To profile inside the Docker container, uncomment the -agentpath option in build.sbt 20 | # and run the container with option `-p 10001:10001` 21 | 22 | # To connect to the port inside the container, start it like this: 23 | # docker run -p 8080:8080 24 | #EXPOSE 8080 25 | 26 | ADD . evaluation/ 27 | 28 | RUN set -x \ 29 | && cd evaluation \ 30 | && sbt compile 31 | -------------------------------------------------------------------------------- /evaluation/build.sbt: -------------------------------------------------------------------------------- 1 | scalaVersion := "2.12.8" 2 | name := "move-op" 3 | organization := "com.martinkl.crdts" 4 | version := "1.0" 5 | libraryDependencies += "io.dropwizard.metrics" % "metrics-core" % "4.1.0" 6 | libraryDependencies += "org.slf4j" % "slf4j-simple" % "1.7.26" 7 | 8 | // Uncomment this to enable profiling of application run with 'sbt run' 9 | //javaOptions in run += "-agentpath:/usr/local/YourKit-JavaProfiler-2019.1/bin/linux-x86-64/libyjpagent.so=port=10001,listen=all" 10 | -------------------------------------------------------------------------------- /evaluation/data/crdt-generated.gnuplot: -------------------------------------------------------------------------------- 1 | set encoding utf8 2 | set terminal postscript eps size 3,2.5 enhanced color 3 | set output '| ps2pdf -dEPSCrop - crdt-generated.pdf' 4 | set multiplot layout 2,1 5 | set key off 6 | set style line 1 linewidth 2.5 linecolor rgb '#0077BB' pointsize 1.3 pointtype 1 7 | set style line 2 linewidth 2.5 linecolor rgb '#33BBEE' pointsize 1.3 pointtype 2 8 | 9 | set size 1,0.6 10 | set origin 0,0.4 11 | set xrange [0:620] 12 | set yrange [0:1900] 13 | set ylabel 'Time [µs]' 14 | set lmargin 10 15 | set rmargin 3 16 | set label 'Time to apply remote operation' at 120,1700 17 | plot 'logs-crdt-generated/summary.data' using ($2+$9+$16):($7+$14+$21)/3:($6+$13+$20)/3:($8+$15+$22)/3 with errorlines linestyle 1 18 | 19 | set size 1,0.4 20 | set xrange [0:620] 21 | set yrange [0:100] 22 | set xlabel 'Move operations per second' 23 | set ylabel 'Time [µs]' 24 | set lmargin 10 25 | set rmargin 3 26 | set ytics 0,20 27 | set label 'Time to apply local operation' at 120,80 28 | plot 'logs-crdt-generated/summary.data' using ($2+$9+$16):($4+$11+$18)/3:($3+$10+$17)/3:($5+$12+$19)/3 with errorlines linestyle 2 29 | -------------------------------------------------------------------------------- /evaluation/data/crdt-generated.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/crdt-generated.pdf -------------------------------------------------------------------------------- /evaluation/data/crdt-optimised.gnuplot: -------------------------------------------------------------------------------- 1 | set encoding utf8 2 | set terminal postscript eps size 3,2.5 enhanced color 3 | set output '| ps2pdf -dEPSCrop - crdt-optimised.pdf' 4 | set multiplot layout 2,1 5 | set key off 6 | set style line 3 linewidth 2.5 linecolor rgb '#EE7733' pointsize 1.3 pointtype 5 7 | set style line 4 linewidth 2.5 linecolor rgb '#EE3377' pointsize 1.3 pointtype 9 8 | 9 | set size 1,0.6 10 | set origin 0,0.4 11 | set xrange [0:6200] 12 | set yrange [0:300] 13 | set ylabel 'Time [µs]' 14 | set lmargin 10 15 | set rmargin 3 16 | set label 'Time to apply remote operation' at 1200,270 17 | plot 'logs-crdt-optimised/summary.data' using ($2+$9+$16):($7+$14+$21)/3:($6+$13+$20)/3:($8+$15+$22)/3 with errorlines linestyle 3 18 | 19 | set size 1,0.4 20 | set xrange [0:6200] 21 | set yrange [0:14] 22 | set xlabel 'Move operations per second' 23 | set ylabel 'Time [µs]' 24 | set lmargin 10 25 | set rmargin 3 26 | set label 'Time to apply local operation' at 1200,11 27 | plot 'logs-crdt-optimised/summary.data' using ($2+$9+$16):($4+$11+$18)/3:($3+$10+$17)/3:($5+$12+$19)/3 with errorlines linestyle 4 28 | -------------------------------------------------------------------------------- /evaluation/data/crdt-optimised.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/crdt-optimised.pdf -------------------------------------------------------------------------------- /evaluation/data/leader-vs-crdt.gnuplot: -------------------------------------------------------------------------------- 1 | set encoding utf8 2 | set terminal postscript eps size 6,2.5 enhanced color 3 | set output '| ps2pdf -dEPSCrop - leader-vs-crdt.pdf' 4 | set key off 5 | 6 | # Colour scheme from https://personal.sron.nl/~pault/data/colourschemes.pdf 7 | set style line 1 linewidth 2.5 linecolor rgb '#0077BB' pointsize 1.3 pointtype 1 8 | set style line 2 linewidth 2.5 linecolor rgb '#33BBEE' pointsize 1.3 pointtype 2 9 | set style line 3 linewidth 2.5 linecolor rgb '#EE7733' pointsize 1.3 pointtype 5 10 | set style line 4 linewidth 2.5 linecolor rgb '#EE3377' pointsize 1.3 pointtype 9 11 | set style line 5 linewidth 2.5 linecolor rgb '#009988' pointsize 1.3 pointtype 6 12 | set style line 6 linewidth 2.5 linecolor rgb '#CC3311' pointsize 1.3 pointtype 4 13 | 14 | set logscale 15 | set xlabel 'Move operations per second' 16 | set ylabel 'Median response time per operation [µs]' 17 | set label "CRDT remote operations\n(Isabelle-generated code)" at 30,3000 textcolor rgb '#0077BB' 18 | set arrow from 80,900 to 100,300 19 | set label "CRDT local operations\n(Isabelle-generated code)" at 300,250 textcolor rgb '#33BBEE' 20 | set arrow from 290,190 to 200,50 21 | set label "CRDT remote operations (optimised)" at 2100,13 textcolor rgb '#EE7733' 22 | set arrow from 2000,13 to 1500,30 23 | set label "CRDT local operations (optimised)" at 2100,5 textcolor rgb '#EE3377' 24 | set arrow from 2000,5 to 1500,2 25 | set label "Ireland to leader in California" at 60,50000 textcolor rgb '#009988' 26 | set arrow from 57,50000 to 42,110000 27 | set label "Singapore to leader in California" at 60,500000 textcolor rgb '#CC3311' 28 | set arrow from 57,500000 to 42,220000 29 | set label "Leader-based operations\n(Isabelle-generated code)" at 3000,50000 30 | set arrow from 12000,70000 to 14000,500000 31 | set label "Leader-based operations\n(optimised code)" at 4000,10000 32 | set arrow from 17000,13000 to 20000,100000 33 | 34 | plot 'logs-crdt-generated/summary.data' using ($2+$9+$16):($7+$14+$21)/3 with linespoints linestyle 1, \ 35 | 'logs-crdt-generated/summary.data' using ($2+$9+$16):($4+$11+$18)/3 with linespoints linestyle 2, \ 36 | 'logs-crdt-optimised/summary.data' using ($2+$9+$16):($7+$14+$21)/3 with linespoints linestyle 3, \ 37 | 'logs-crdt-optimised/summary.data' using ($2+$9+$16):($4+$11+$18)/3 with linespoints linestyle 4, \ 38 | 'logs-leader-generated/summary.data' using 2:5 with linespoints linestyle 5, \ 39 | 'logs-leader-generated/summary.data' using 2:9 with linespoints linestyle 6, \ 40 | 'logs-leader-optimised/summary.data' using 2:5 with linespoints linestyle 5, \ 41 | 'logs-leader-optimised/summary.data' using 2:9 with linespoints linestyle 6 42 | -------------------------------------------------------------------------------- /evaluation/data/leader-vs-crdt.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/leader-vs-crdt.pdf -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_100000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_100000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_100000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_100000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_100000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_100000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_10000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_10000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_10000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_10000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_10000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_10000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_13333_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_13333_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_13333_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_13333_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_13333_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_13333_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_20000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_20000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_20000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_20000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_20000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_20000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_50000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_50000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_50000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_50000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_50000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_50000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_5000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_5000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_5000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_5000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_5000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_5000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_6250_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_6250_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_6250_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_6250_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_6250_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_6250_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_7700_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_7700_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_7700_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_7700_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/interval_7700_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-generated/interval_7700_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-generated/summary.data: -------------------------------------------------------------------------------- 1 | 100000 10.00 2.61 53.43 79.16 14.31 119.27 180.59 10.00 5.05 75.64 108.61 13.28 100.61 206.71 10.00 2.12 55.71 78.05 63.23 182.93 260.38 2 | 50000 20.00 1.97 50.33 72.74 12.96 152.48 249.94 20.00 4.19 55.03 80.65 168.89 265.56 311.70 20.00 2.34 46.58 63.53 62.70 160.60 367.87 3 | 20000 50.00 1.29 43.21 57.96 220.31 395.58 593.33 49.99 1.62 48.22 65.41 156.18 383.99 547.44 50.00 1.19 46.90 64.46 140.03 309.18 511.73 4 | 13333 74.99 1.04 40.24 52.11 278.53 504.58 642.33 74.99 3.17 48.05 64.28 231.78 491.99 769.71 74.99 3.34 43.81 57.60 101.31 491.38 600.70 5 | 10000 99.99 1.58 44.66 58.40 319.49 494.41 698.67 99.99 1.27 45.97 61.72 329.85 597.62 753.92 99.99 1.64 45.10 57.77 353.91 812.59 921.65 6 | 7700 129.85 2.30 42.48 58.02 198.22 567.48 1290.57 129.85 1.22 42.48 56.03 49.78 652.08 1308.39 129.86 1.27 42.82 56.82 48.87 1311.23 1641.85 7 | 6250 159.98 1.91 37.62 46.47 41.58 694.77 1249.08 159.99 1.21 39.25 49.37 247.48 679.82 1186.00 159.98 1.32 40.22 53.42 47.24 1198.91 1688.09 8 | 5000 199.81 0.94 39.30 50.38 10.01 1043.91 2085.78 199.79 1.10 42.87 52.38 352.01 1078.31 2206.95 199.80 1.54 42.67 55.74 658.41 960.83 1094.86 9 | -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_100000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_100000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_100000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_100000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_100000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_100000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_10000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_10000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_10000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_10000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_10000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_10000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_1000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_1000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_1000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_1000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_1000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_1000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_1333_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_1333_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_1333_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_1333_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_1333_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_1333_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_20000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_20000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_20000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_20000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_20000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_20000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_2000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_2000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_2000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_2000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_2000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_2000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_50000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_50000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_50000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_50000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_50000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_50000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_5000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_5000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_5000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_5000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_5000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_5000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_500_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_500_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_500_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_500_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_500_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_500_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_625_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_625_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_625_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_625_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_625_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_625_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_770_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_770_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_770_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_770_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/interval_770_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-crdt-optimised/interval_770_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-crdt-optimised/summary.data: -------------------------------------------------------------------------------- 1 | 100000 10.00 0.85 6.17 13.07 7.00 12.86 21.68 10.00 0.85 7.18 14.89 9.09 20.68 32.35 10.00 0.60 4.72 10.20 9.19 15.09 24.10 2 | 50000 20.00 0.43 3.98 9.75 6.20 13.29 21.20 20.00 0.48 3.96 9.82 8.48 17.72 26.88 20.00 0.59 3.66 7.89 9.58 17.38 25.50 3 | 20000 50.00 0.31 3.42 8.04 6.76 17.70 27.68 50.00 0.59 4.99 12.21 6.68 18.14 31.25 50.00 0.36 3.48 8.53 14.54 26.11 35.76 4 | 10000 99.99 0.27 2.05 5.30 6.91 11.50 18.67 99.99 0.39 2.17 5.23 6.24 16.54 23.70 99.99 0.48 2.23 5.38 11.70 19.90 39.25 5 | 5000 199.98 0.42 1.91 3.56 9.49 27.17 47.16 199.98 0.38 1.99 4.46 8.95 27.29 46.41 199.99 0.35 2.15 5.02 9.69 20.26 35.12 6 | 2000 499.96 0.33 1.44 2.59 2.44 49.55 84.94 488.58 0.26 1.44 2.67 3.32 45.38 95.69 486.23 0.33 1.61 3.26 2.49 30.96 53.50 7 | 1333 750.12 0.35 1.49 2.64 20.25 52.76 88.57 750.11 0.35 1.66 3.22 26.40 58.47 88.84 750.13 0.39 1.54 2.83 22.47 49.48 71.30 8 | 1000 999.90 0.35 1.41 2.66 5.33 56.61 87.21 999.91 0.35 1.50 2.71 20.27 63.88 91.75 999.93 0.35 1.42 2.71 27.65 75.80 109.75 9 | 770 1298.58 0.37 1.56 2.78 10.40 70.23 134.19 1296.64 0.32 1.54 2.95 5.24 62.29 110.17 1298.19 0.39 1.40 3.08 10.07 122.03 180.27 10 | 625 1599.88 0.28 1.21 2.38 33.56 106.47 189.98 1599.90 0.31 1.29 2.52 37.65 115.91 196.35 1599.75 0.31 1.27 2.50 36.94 98.59 148.01 11 | 500 1902.80 0.27 1.32 2.55 3.97 144.35 264.43 1885.77 0.30 1.30 2.69 1.77 119.67 271.00 1893.74 0.30 1.30 2.43 1.79 112.86 216.40 12 | -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_100000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_100000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_100000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_100000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_100000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_100000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_10000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_10000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_10000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_10000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_10000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_10000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_1000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_1000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_1000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_1000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_1000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_1000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_100_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_100_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_100_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_100_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_100_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_100_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_20000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_20000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_20000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_20000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_20000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_20000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_2000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_2000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_2000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_2000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_2000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_2000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_200_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_200_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_200_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_200_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_200_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_200_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_50000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_50000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_50000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_50000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_50000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_50000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_5000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_5000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_5000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_5000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_5000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_5000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_500_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_500_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_500_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_500_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/interval_500_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-generated/interval_500_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-generated/summary.data: -------------------------------------------------------------------------------- 1 | 100000 20.00 10.00 136399.33 136470.33 136510.69 10.00 174793.35 174864.26 174901.22 2 | 50000 40.00 20.00 137500.43 138195.24 138235.23 20.00 175553.73 175618.00 175655.73 3 | 20000 99.99 50.00 137541.53 137600.11 137630.71 50.00 176089.08 176153.67 176182.13 4 | 10000 199.98 99.99 138115.00 138163.00 138193.11 99.99 174844.91 174902.90 174928.22 5 | 5000 399.96 199.98 135829.62 135881.25 135932.03 199.99 175466.33 176197.61 176221.77 6 | 2000 999.94 499.99 137454.83 137502.04 137529.37 499.97 174061.50 174114.17 174140.90 7 | 1000 1993.44 999.93 137588.72 137641.63 137664.47 1000.21 174023.43 175346.88 175379.63 8 | 500 4000.50 2000.01 137675.62 138319.51 138418.90 1999.19 173989.78 174039.54 219207.35 9 | 200 10011.75 5005.99 136598.47 136674.45 454976.44 5003.70 174771.53 174846.09 469897.43 10 | 100 14046.06 7002.70 137243.53 664468.99 930622.37 6934.09 174061.39 669059.41 1003325.51 11 | -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_100000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_100000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_100000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_100000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_100000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_100000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_10000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_10000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_10000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_10000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_10000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_10000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_1000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_1000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_1000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_1000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_1000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_1000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_100_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_100_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_100_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_100_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_100_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_100_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_20000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_20000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_20000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_20000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_20000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_20000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_2000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_2000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_2000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_2000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_2000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_2000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_200_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_200_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_200_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_200_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_200_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_200_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_50000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_50000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_50000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_50000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_50000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_50000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_5000_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_5000_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_5000_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_5000_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_5000_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_5000_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_500_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_500_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_500_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_500_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_500_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_500_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_50_ap_southeast_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_50_ap_southeast_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_50_eu_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_50_eu_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/interval_50_us_west_1.log.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/evaluation/data/logs-leader-optimised/interval_50_us_west_1.log.gz -------------------------------------------------------------------------------- /evaluation/data/logs-leader-optimised/summary.data: -------------------------------------------------------------------------------- 1 | 100000 20.00 10.00 138191.11 138212.21 138239.57 10.00 174186.34 174210.11 174286.08 2 | 50000 40.00 20.00 137525.80 137543.82 137559.30 20.00 174855.96 174887.05 174932.04 3 | 20000 99.99 50.00 137002.32 137020.36 137035.80 50.00 174009.49 174028.97 174065.36 4 | 10000 199.98 99.99 136480.73 137061.15 137077.87 99.99 174068.35 174088.70 174120.54 5 | 5000 399.96 199.98 136678.63 136697.84 136717.34 199.99 173999.41 174796.36 174823.64 6 | 2000 999.90 499.95 136865.11 136884.23 136904.54 499.96 173460.75 174764.19 174790.16 7 | 1000 1999.81 999.90 137078.06 137096.11 137116.32 999.93 173115.01 173129.21 173159.49 8 | 500 3999.63 1999.81 136998.11 137018.82 137063.73 1999.88 174026.40 174044.26 174081.39 9 | 200 9999.03 4999.51 136538.99 136557.65 136580.68 4999.64 172636.82 172658.59 172696.59 10 | 100 19998.01 9999.10 136861.15 136890.38 138128.19 9999.28 174065.56 174099.50 174204.11 11 | 50 22402.56 11685.54 203403.10 404595.72 480686.38 10676.88 175495.24 486708.03 512816.79 12 | -------------------------------------------------------------------------------- /evaluation/data/process-crdt.awk: -------------------------------------------------------------------------------- 1 | /^ReplicaThread.local/,/95%/ { 2 | if ($1 == "1-minute") local_rate = $4 3 | if ($1 == "min") local_min = $3 4 | if ($1 == "median") local_median = $3 5 | if ($1 == "95%") local_p95 = $3 6 | } 7 | 8 | /^ReplicaThread.remote/,/95%/ { 9 | if ($1 == "min") remote_min = $3 10 | if ($1 == "median") remote_median = $3 11 | if ($1 == "95%") print local_rate "," local_min "," local_median "," local_p95 "," remote_min "," remote_median "," $3 12 | } 13 | -------------------------------------------------------------------------------- /evaluation/data/process-crdt.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | LOGDIR="${1?Please specify directory containing log files}" 5 | 6 | # Processes the log files produced by runs of the experiment in CRDT mode 7 | # (USE_LEADER = false), and extracts the numbers we're interested in, in 8 | # a format ready for Gnuplot. The output file contains the following columns: 9 | # 10 | # 1. Interval between generated ops (microseconds) 11 | # 2. Ops/sec generated by the us-west-1 replica 12 | # 3. Minimum time to apply a local operation in us-west-1 (microseconds) 13 | # 4. Median time to apply a local operation in us-west-1 (microseconds) 14 | # 5. 95th perc. time to apply a local operation in us-west-1 (microseconds) 15 | # 6. Minimum time to apply a remote operation in us-west-1 (microseconds) 16 | # 7. Median time to apply a remote operation in us-west-1 (microseconds) 17 | # 8. 95th perc. time to apply a remote operation in us-west-1 (microseconds) 18 | # 9. Ops/sec generated by the eu-west-1 replica 19 | # 10. Minimum time to apply a local operation in eu-west-1 (microseconds) 20 | # 11. Median time to apply a local operation in eu-west-1 (microseconds) 21 | # 12. 95th perc. time to apply a local operation in eu-west-1 (microseconds) 22 | # 13. Minimum time to apply a remote operation in eu-west-1 (microseconds) 23 | # 14. Median time to apply a remote operation in eu-west-1 (microseconds) 24 | # 15. 95th perc. time to apply a remote operation in eu-west-1 (microseconds) 25 | # 16. Ops/sec generated by the ap-southeast-1 replica 26 | # 17. Minimum time to apply a local operation in ap-southeast-1 (microseconds) 27 | # 18. Median time to apply a local operation in ap-southeast-1 (microseconds) 28 | # 19. 95th perc. time to apply a local operation in ap-southeast-1 (microseconds) 29 | # 20. Minimum time to apply a remote operation in ap-southeast-1 (microseconds) 30 | # 21. Median time to apply a remote operation in ap-southeast-1 (microseconds) 31 | # 22. 95th perc. time to apply a remote operation in ap-southeast-1 (microseconds) 32 | 33 | rm -f "$LOGDIR/summary.data" 34 | 35 | for interval in $(ls "$LOGDIR"/*.log.gz | sed -e 's/.*interval_//' -e 's/_.*//' | uniq | sort -rn); do 36 | us_times="$(cat "${LOGDIR}/interval_${interval}_us_west_1.log.gz" | gunzip | awk -f process-crdt.awk | tail -n 2 | head -n 1 | tr '\n' ',')" 37 | eu_times="$(cat "${LOGDIR}/interval_${interval}_eu_west_1.log.gz" | gunzip | awk -f process-crdt.awk | tail -n 2 | head -n 1 | tr '\n' ',')" 38 | ap_times="$(cat "${LOGDIR}/interval_${interval}_ap_southeast_1.log.gz" | gunzip | awk -f process-crdt.awk | tail -n 2 | head -n 1 | tr '\n' ',')" 39 | echo "${interval},${us_times}${eu_times}${ap_times}" | tr ',' '\t' >> "$LOGDIR/summary.data" 40 | done 41 | -------------------------------------------------------------------------------- /evaluation/data/process-leader.awk: -------------------------------------------------------------------------------- 1 | /^ClientThread.*requests/,/95%/ { 2 | if ($1 == "1-minute") request_rate = $4 3 | if ($1 == "min") rtt_min = $3 4 | if ($1 == "median") rtt_median = $3 5 | if ($1 == "95%") rtt_p95 = $3 6 | } 7 | 8 | /^ReplicaThread.remote/,/95%/ { 9 | if ($1 == "1-minute") total_rate = $4 10 | if ($1 == "95%") print total_rate "," request_rate "," rtt_min "," rtt_median "," rtt_p95 11 | } 12 | -------------------------------------------------------------------------------- /evaluation/data/process-leader.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | LOGDIR="${1?Please specify directory containing log files}" 5 | 6 | # Processes the log files produced by runs of the experiment in leader mode 7 | # (state machine replication, USE_LEADER = true), and extracts the numbers 8 | # we're interested in, in a format ready for Gnuplot. The output file contains 9 | # the following columns: 10 | # 11 | # 1. Interval between generated ops (microseconds) 12 | # 2. Operations/sec processed by all replicas 13 | # 3. Requests/sec made by eu-west-1 replica to the leader (us-west-1) 14 | # 4. Minimum round trip time from eu-west-1 to the leader (microseconds) 15 | # 5. Median round trip time from eu-west-1 to the leader (microseconds) 16 | # 6. 95th perc. round trip time from eu-west-1 to the leader (microseconds) 17 | # 7. Requests/sec made by ap-southeast-1 replica to the leader (us-west-1) 18 | # 8. Minimum round trip time from ap-southeast-1 to the leader (microseconds) 19 | # 9. Median round trip time from ap-southeast-1 to the leader (microseconds) 20 | # 10. 95th perc. round trip time from ap-southeast-1 to the leader (microseconds) 21 | 22 | rm -f "$LOGDIR/summary.data" 23 | 24 | for interval in $(ls "$LOGDIR"/*.log.gz | sed -e 's/.*interval_//' -e 's/_.*//' | uniq | sort -rn); do 25 | eu_times="$(cat "${LOGDIR}/interval_${interval}_eu_west_1.log.gz" | gunzip | awk -f process-leader.awk | tail -n 2 | head -n 1 | tr '\n' ',')" 26 | ap_times="$(cat "${LOGDIR}/interval_${interval}_ap_southeast_1.log.gz" | gunzip | awk -f process-leader.awk | tail -n 2 | head -n 1 | tr '\n' ',')" 27 | echo "${interval},${eu_times}${ap_times#*,}" | tr ',' '\t' >> "$LOGDIR/summary.data" 28 | done 29 | -------------------------------------------------------------------------------- /evaluation/data/round_trip_times.awk: -------------------------------------------------------------------------------- 1 | BEGIN { 2 | first_line = 1 3 | } 4 | 5 | /^-- Timers ---/ { 6 | first_ip = "" 7 | second_ip = "" 8 | } 9 | 10 | match($0, /^ClientThread\((.*)\)\.requests/, matches) { 11 | if (first_ip == "") first_ip = matches[1]; else second_ip = matches[1]; 12 | if (second_ip != "" && first_line) { 13 | print first_ip "," second_ip 14 | first_line = 0 15 | } 16 | } 17 | 18 | /^ClientThread\(.*\)\.requests/,/median/ { 19 | if ($1 == "median") { 20 | if (second_ip == "") { 21 | first_time = $3 22 | } else { 23 | print first_time "," $3 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /evaluation/project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.2.8 2 | -------------------------------------------------------------------------------- /evaluation/run-all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | # Run this script with one argument: the interval between generated move 5 | # operations, in microseconds, on each replica. 6 | INTERVAL="${1?Please specify interval between operations (in microseconds)}" 7 | 8 | # If false, runs in CRDT mode. If true, runs in leader-based mode. 9 | USE_LEADER=false 10 | 11 | # If true, uses algorithm implementation extracted from Isabelle. 12 | # If false, uses a hand-written (not formally verified) implementation. 13 | USE_GENERATED_CODE=false 14 | 15 | # To run the experiments, log in to the AWS EC2 console and start up a c5.large 16 | # instance running Ubuntu 18.04 in each of the three regions us-west-1, eu-west-1, 17 | # and ap-southeast-1. Then fill in their IP addresses here: 18 | US_WEST_1=18.144.81.71 19 | EU_WEST_1=52.17.89.174 20 | AP_SOUTHEAST_1=18.141.237.55 21 | 22 | # Set up the security groups such that you can log in to the VMs by SSH (TCP port 22), 23 | # and that they can all connect to each other on TCP port 8080. 24 | # Then, to set up the VMs, log in to each by SSH and run the following: 25 | 26 | # sudo apt-get update && sudo apt-get upgrade -y && sudo apt-get install -y apt-transport-https gnupg wget unzip 27 | # echo "deb https://dl.bintray.com/sbt/debian /" | sudo tee -a /etc/apt/sources.list.d/sbt.list 28 | # sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823 29 | # sudo apt-get update && sudo apt-get install -y openjdk-8-jdk-headless sbt 30 | # git clone https://github.com/trvedata/move-op.git && cd move-op/evaluation && sbt compile 31 | 32 | # Once that setup is done, the following commands run the test script on the 33 | # three instances concurrently, and then copy the log files off the instances 34 | # into evaluation/data/logs/*.log.gz. Those logs are then analysed by the 35 | # scripts evaluation/data/process-crdt.sh (for USE_LEADER=false) and 36 | # evaluation/data/process-leader.sh (for USE_LEADER=true). 37 | 38 | if [ "$USE_LEADER" = "true" ]; then 39 | LOGDIR="data/logs-leader" 40 | # Define us-west-1 to be the leader 41 | ssh -i ~/.ec2/martin-aws-us-west-1.pem ubuntu@$US_WEST_1 \ 42 | /home/ubuntu/move-op/evaluation/run-test.sh $INTERVAL $USE_LEADER $USE_GENERATED_CODE 0 '' '' & 43 | ssh -i ~/.ec2/martin-aws-eu-west-1.pem ubuntu@$EU_WEST_1 \ 44 | /home/ubuntu/move-op/evaluation/run-test.sh $INTERVAL $USE_LEADER $USE_GENERATED_CODE 1 $US_WEST_1 '' & 45 | ssh -i ~/.ec2/martin-aws-ap-southeast-1.pem ubuntu@$AP_SOUTHEAST_1 \ 46 | /home/ubuntu/move-op/evaluation/run-test.sh $INTERVAL $USE_LEADER $USE_GENERATED_CODE 2 $US_WEST_1 '' & 47 | wait 48 | else 49 | LOGDIR="data/logs" 50 | ssh -i ~/.ec2/martin-aws-us-west-1.pem ubuntu@$US_WEST_1 \ 51 | /home/ubuntu/move-op/evaluation/run-test.sh $INTERVAL $USE_LEADER $USE_GENERATED_CODE 1 $EU_WEST_1 $AP_SOUTHEAST_1 & 52 | ssh -i ~/.ec2/martin-aws-eu-west-1.pem ubuntu@$EU_WEST_1 \ 53 | /home/ubuntu/move-op/evaluation/run-test.sh $INTERVAL $USE_LEADER $USE_GENERATED_CODE 2 $US_WEST_1 $AP_SOUTHEAST_1 & 54 | ssh -i ~/.ec2/martin-aws-ap-southeast-1.pem ubuntu@$AP_SOUTHEAST_1 \ 55 | /home/ubuntu/move-op/evaluation/run-test.sh $INTERVAL $USE_LEADER $USE_GENERATED_CODE 3 $US_WEST_1 $EU_WEST_1 & 56 | wait 57 | fi 58 | 59 | cd "$(dirname "$0")" 60 | mkdir -p "$LOGDIR" 61 | 62 | scp -i ~/.ec2/martin-aws-us-west-1.pem ubuntu@$US_WEST_1:/home/ubuntu/move-op/evaluation/$LOGDIR/interval_$INTERVAL.log $LOGDIR/interval_${INTERVAL}_us_west_1.log 63 | scp -i ~/.ec2/martin-aws-eu-west-1.pem ubuntu@$EU_WEST_1:/home/ubuntu/move-op/evaluation/$LOGDIR/interval_$INTERVAL.log $LOGDIR/interval_${INTERVAL}_eu_west_1.log 64 | scp -i ~/.ec2/martin-aws-ap-southeast-1.pem ubuntu@$AP_SOUTHEAST_1:/home/ubuntu/move-op/evaluation/$LOGDIR/interval_$INTERVAL.log $LOGDIR/interval_${INTERVAL}_ap_southeast_1.log 65 | 66 | gzip $LOGDIR/interval_${INTERVAL}_us_west_1.log $LOGDIR/interval_${INTERVAL}_eu_west_1.log $LOGDIR/interval_${INTERVAL}_ap_southeast_1.log 67 | -------------------------------------------------------------------------------- /evaluation/run-test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eo pipefail 3 | 4 | # This script executes on an EC2 VM. It runs a replica that generates 5 | # operations with a given inter-operation interval. 6 | 7 | INTERVAL="${1?Please specify interval}" 8 | USE_LEADER="${2?Please specify whether to use leader}" 9 | USE_GENERATED_CODE="${3?Please specify whether to use generated code}" 10 | REPLICA_ID="${4?Please specify replica ID}" 11 | REMOTE1="$5" 12 | REMOTE2="$6" 13 | 14 | if [ "$USE_LEADER" = "true" ]; then 15 | LOGDIR="data/logs-leader" 16 | else 17 | LOGDIR="data/logs" 18 | fi 19 | 20 | LOGFILE="$LOGDIR/interval_$INTERVAL.log" 21 | 22 | cd "$(dirname "$0")" 23 | 24 | sed -i~ -e "s/\\(val OPERATION_INTERVAL =.*\\)([0-9]*)/\\1($INTERVAL)/" \ 25 | -e "s/\\(val USE_LEADER = \\).*/\\1$USE_LEADER/" \ 26 | -e "s/\\(val USE_GENERATED_CODE = \\).*/\\1$USE_GENERATED_CODE/" \ 27 | src/main/scala/TestReplica.scala 28 | 29 | mkdir -p "$LOGDIR" 30 | 31 | if [ -f "$LOGFILE" ]; then 32 | echo "log file already exists" 33 | exit 1 34 | fi 35 | 36 | sbt --mem 3072 "runMain TestReplica $REPLICA_ID $REMOTE1 $REMOTE2" 2>&1 | tee "$LOGFILE" 37 | -------------------------------------------------------------------------------- /evaluation/src/main/scala/Main.scala: -------------------------------------------------------------------------------- 1 | import java.util.Random 2 | 3 | object Main extends App { 4 | val random = new Random() 5 | 6 | def operations(num: Int, actorId: BigInt) = { 7 | (0 to num).map { i => 8 | val parent = BigInt(random.nextInt(1000)) 9 | val child = BigInt(random.nextInt(1000)) 10 | generated.Move((BigInt(i), actorId), parent, "", child) 11 | } 12 | } 13 | 14 | var state: (List[generated.log_op[(BigInt, BigInt), BigInt, String]], generated.hashmap[BigInt, (String, BigInt)]) 15 | = (Nil, generated.hm_empty[BigInt, (String, BigInt)].apply(())) 16 | 17 | println("applying local ops...") 18 | val start1 = System.nanoTime() 19 | for (op <- operations(1000, BigInt(1))) { 20 | state = generated.integer_apply_op(op)(state) 21 | } 22 | println("elapsed time: %d ms" format ((System.nanoTime() - start1) / 1000000)) 23 | 24 | println("applying remote ops...") 25 | val start2 = System.nanoTime() 26 | for (op <- operations(1000, BigInt(2))) { 27 | state = generated.integer_apply_op(op)(state) 28 | } 29 | println("elapsed time: %d ms" format ((System.nanoTime() - start2) / 1000000)) 30 | } 31 | -------------------------------------------------------------------------------- /evaluation/src/main/scala/TestReplica.scala: -------------------------------------------------------------------------------- 1 | import com.codahale.metrics.{ ConsoleReporter, MetricRegistry, Timer, Gauge } 2 | import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream, InputStream, OutputStream } 3 | import java.net.{ ServerSocket, Socket } 4 | import java.util.concurrent.{ArrayBlockingQueue, ConcurrentHashMap, TimeUnit} 5 | import java.util.{ ArrayList, HashMap, Random } 6 | 7 | object TestReplica { 8 | 9 | // TCP port number for communication between replicas 10 | val PORT = 8080 11 | 12 | // Time interval between generated operations (= 1 / operation rate) 13 | val OPERATION_INTERVAL = TimeUnit.MICROSECONDS.toNanos(1000000) 14 | 15 | // If false, runs in CRDT mode. If true, uses leader-based replication. 16 | val USE_LEADER = false 17 | 18 | // In CRDT mode: if false, use hand-written implementation of the algorithm; 19 | // if true, use the code generated by Isabelle. 20 | val USE_GENERATED_CODE = true 21 | 22 | // How long to run the test before shutting down 23 | val RUN_DURATION = TimeUnit.MINUTES.toNanos(10) 24 | 25 | // Backpressure kicks in if more than this number of requests in flight 26 | val MAX_PENDING_REQUESTS = if (USE_LEADER) 5000 else if (USE_GENERATED_CODE) 50 else 400 27 | 28 | def startDaemon[T <: Runnable](runnable: T): T = { 29 | val thread = new Thread(runnable) 30 | thread.setDaemon(true) 31 | thread.start() 32 | return runnable 33 | } 34 | 35 | def main(args: Array[String]): Unit = { 36 | if (args.length < 1) { 37 | throw new Exception("Usage: TestReplica replica-id remote-ip1 [remote-ip2 ...]") 38 | } 39 | 40 | implicit val metrics = new MetricRegistry() 41 | ConsoleReporter.forRegistry(metrics). 42 | convertDurationsTo(TimeUnit.MICROSECONDS). 43 | build().start(20, TimeUnit.SECONDS) 44 | 45 | val replicaId = args(0).toLong 46 | val replica = new ReplicaThread(replicaId, metrics) 47 | new Thread(replica).start() 48 | 49 | startDaemon(new AcceptThread(replica)) 50 | 51 | TimeUnit.SECONDS.sleep(10) // time for other servers to come up 52 | 53 | if (USE_LEADER) { 54 | // Leader mode: connect to only one IP, namely the leader 55 | if (replicaId > 0) { 56 | replica.addClient(startDaemon(new FollowerThread(args(1), replica, metrics))) 57 | } 58 | } else { 59 | // CRDT mode: connect to all of the remote IPs 60 | for (remoteIp <- args.drop(1)) { 61 | replica.addClient(startDaemon(new CRDTClientThread(remoteIp, metrics))) 62 | } 63 | } 64 | } 65 | } 66 | 67 | // Encoding/decoding objects <--> bytes 68 | object Protocol { 69 | case class Move(time: Long, replica: Long, parent: Long, child: Long) 70 | case class Ack(time: Long, replica: Long) 71 | 72 | def encodeMove(move: Move): Array[Byte] = { 73 | val bytes = new ByteArrayOutputStream(4 * 8) 74 | val data = new DataOutputStream(bytes) 75 | data.writeLong(move.time) 76 | data.writeLong(move.replica) 77 | data.writeLong(move.parent) 78 | data.writeLong(move.child) 79 | bytes.toByteArray() 80 | } 81 | 82 | def encodeAck(ack: Ack): Array[Byte] = { 83 | val bytes = new ByteArrayOutputStream(2 * 8) 84 | val data = new DataOutputStream(bytes) 85 | data.writeLong(ack.time) 86 | data.writeLong(ack.replica) 87 | bytes.toByteArray() 88 | } 89 | 90 | def decodeMove(bytes: Array[Byte]): Move = { 91 | val data = new DataInputStream(new ByteArrayInputStream(bytes)) 92 | val time = data.readLong() 93 | val replica = data.readLong() 94 | val parent = data.readLong() 95 | val child = data.readLong() 96 | Move(time, replica, parent, child) 97 | } 98 | 99 | def decodeAck(bytes: Array[Byte]): Ack = { 100 | val data = new DataInputStream(new ByteArrayInputStream(bytes)) 101 | val time = data.readLong() 102 | val replica = data.readLong() 103 | Ack(time, replica) 104 | } 105 | } 106 | 107 | // One log entry (in the handwritten implementation) 108 | class LogOp( 109 | val time: Long, 110 | val replica: Long, 111 | var old: Option[Tuple2[String, Long]], 112 | val parent: Long, 113 | val meta: String, 114 | val child: Long 115 | ) { 116 | override def toString() = s"LogOp($time, $replica, $old, $parent, $meta, $child)" 117 | } 118 | 119 | // Base class for ClientThread and ServerThread. Assumes that each incoming 120 | // message has a fixed size in bytes (given as recvFrameSize). 121 | abstract class Connection(socket: Socket, recvFrameSize: Int) extends Runnable { 122 | socket.setTcpNoDelay(true) 123 | 124 | def send(bytes: Array[Byte]) { 125 | this.synchronized { 126 | socket.getOutputStream().write(bytes) 127 | } 128 | } 129 | 130 | // Called when a whole incoming message has been received 131 | def receive(bytes: Array[Byte]) 132 | 133 | // The run loop blocks waiting for bytes to be received. It waits for a message 134 | // (recvFrameSize bytes) to be received and then calls receive(). 135 | def run() { 136 | try { 137 | val recvBuf = new Array[Byte](recvFrameSize) 138 | val inputStream = socket.getInputStream() 139 | var bytesRead = 0 140 | while (true) { 141 | val ret = inputStream.read(recvBuf, bytesRead, recvFrameSize - bytesRead) 142 | if (ret <= 0) return 143 | bytesRead += ret 144 | if (bytesRead == recvFrameSize) { 145 | receive(recvBuf) 146 | bytesRead = 0 147 | } 148 | } 149 | } finally { 150 | println(s"Closing connection: ${this}") 151 | socket.close() 152 | } 153 | } 154 | } 155 | 156 | trait ClientThread { 157 | def send(move: Protocol.Move) 158 | def backpressure: Boolean 159 | } 160 | 161 | // Thread that handles the client side of a connection. It sends Move requests 162 | // to the server, and waits for Ack responses in reply. 163 | class CRDTClientThread(val remoteIp: String, metrics: MetricRegistry) 164 | extends Connection(new Socket(remoteIp, TestReplica.PORT), 2 * 8) with ClientThread { 165 | 166 | val timer = metrics.timer(s"ClientThread($remoteIp).requests") 167 | val requests = new ConcurrentHashMap[Protocol.Ack, Timer.Context]() 168 | 169 | def send(move: Protocol.Move) { 170 | val requestId = Protocol.Ack(move.time, move.replica) 171 | requests.putIfAbsent(requestId, timer.time()) 172 | this.send(Protocol.encodeMove(move)) 173 | } 174 | 175 | def receive(bytes: Array[Byte]) { 176 | val ack = Protocol.decodeAck(bytes) 177 | requests.remove(ack).stop() 178 | } 179 | 180 | // Returns false if we're happy to accept more requests, and true if we need 181 | // to hold off on enqueueing more requests for now. 182 | def backpressure: Boolean = { 183 | requests.size() >= TestReplica.MAX_PENDING_REQUESTS 184 | } 185 | } 186 | 187 | // Thread that handles the client side of a connection to a leader. It sends 188 | // Move requests to the leader, and waits for Move responses in reply. 189 | class FollowerThread(val leaderIp: String, replica: ReplicaThread, metrics: MetricRegistry) 190 | extends Connection(new Socket(leaderIp, TestReplica.PORT), 4 * 8) with ClientThread { 191 | 192 | val timer = metrics.timer(s"ClientThread($leaderIp).requests") 193 | val requests = new ConcurrentHashMap[Protocol.Ack, Timer.Context]() 194 | 195 | metrics.register("RequestsMapSize", new Gauge[Int] { 196 | def getValue: Int = requests.size 197 | }) 198 | 199 | def send(move: Protocol.Move) { 200 | val requestId = Protocol.Ack(move.time, move.replica) 201 | requests.putIfAbsent(requestId, timer.time()) 202 | this.send(Protocol.encodeMove(move)) 203 | } 204 | 205 | def receive(bytes: Array[Byte]) { 206 | val move = Protocol.decodeMove(bytes) 207 | val requestId = Protocol.Ack(move.time, move.replica) 208 | val timer = requests.remove(requestId) 209 | if (timer != null) timer.stop() 210 | replica.request(move, null) 211 | } 212 | 213 | // Returns false if we're happy to accept more requests, and true if we need 214 | // to hold off on enqueueing more requests for now. 215 | def backpressure: Boolean = { 216 | requests.size() >= TestReplica.MAX_PENDING_REQUESTS 217 | } 218 | } 219 | 220 | // Thread that handles the server side of a connection. It waits for Move 221 | // requests from a client, and gets the replica to process them. When done, it 222 | // sends either an Ack response (in CRDT mode) or a Move response (in leader mode) 223 | // back to the client. 224 | class ServerThread(replica: ReplicaThread, socket: Socket) extends Connection(socket, 4 * 8) { 225 | replica.addServer(this) 226 | 227 | def send(ack: Protocol.Ack) { 228 | this.send(Protocol.encodeAck(ack)) 229 | } 230 | 231 | def send(move: Protocol.Move) { 232 | this.send(Protocol.encodeMove(move)) 233 | } 234 | 235 | def receive(bytes: Array[Byte]) { 236 | replica.request(Protocol.decodeMove(bytes), this) 237 | } 238 | } 239 | 240 | // Thread that accepts connections on a server socket, and spawns a new 241 | // ServerThread for each incoming connection. 242 | class AcceptThread(replica: ReplicaThread) extends Runnable { 243 | def run() { 244 | val server = new ServerSocket(TestReplica.PORT) 245 | while (true) { 246 | val socket = server.accept() 247 | println(s"Incoming connection: ${socket}") 248 | new Thread(new ServerThread(replica, socket)).start() 249 | } 250 | } 251 | } 252 | 253 | // This thread is the main execution loop of a replica. It manages the replica 254 | // state and calls into the Isabelle-generated code to update the state. 255 | class ReplicaThread(replicaId: Long, metrics: MetricRegistry) extends Runnable { 256 | val localTimer = metrics.timer("ReplicaThread.local") 257 | val remoteTimer = metrics.timer("ReplicaThread.remote") 258 | val backpressure = metrics.meter("ReplicaThread.backpressure") 259 | val undosRedos = metrics.histogram("ReplicaThread.undosRedos") 260 | val queue = new ArrayBlockingQueue[Tuple2[Protocol.Move, ServerThread]](64) 261 | val random = new Random() 262 | 263 | var clients: List[ClientThread] = Nil 264 | var servers: List[ServerThread] = Nil 265 | 266 | // For Lamport timestamps 267 | var counter: Long = 0 268 | 269 | // The current state of the replica (consisting of both log and tree). 270 | // Type comes from generated code, hence horrible. 271 | var state: (List[generated.log_op[(BigInt, BigInt), BigInt, String]], generated.hashmap[BigInt, (String, BigInt)]) 272 | = (Nil, generated.hm_empty[BigInt, (String, BigInt)].apply(())) 273 | 274 | // The current tree of the replica when using the handwritten 275 | // (non-Isabelle-generated) implementation. 276 | val tree: HashMap[Long, Tuple2[String, Long]] = new HashMap() 277 | 278 | // The operation log of the replica when using the handwritten 279 | // (non-Isabelle-generated) implementation. 280 | val log: ArrayList[LogOp] = new ArrayList() 281 | 282 | def addClient(client: ClientThread) { 283 | clients = client :: clients 284 | } 285 | 286 | def addServer(server: ServerThread) { 287 | servers = server :: servers 288 | } 289 | 290 | // Incoming request from a ServerThread. The calling object is passed in so 291 | // that we know where to send the response once we've processed the operation. 292 | def request(move: Protocol.Move, sender: ServerThread) { 293 | queue.put((move, sender)) 294 | } 295 | 296 | // Executes a remote operation. This is called on the replica thread. 297 | private[this] def processRequest(move: Protocol.Move, sender: ServerThread) { 298 | val timer = remoteTimer.time() 299 | try { 300 | //println(s"Received: ${move}") 301 | applyMove(move) 302 | if (TestReplica.USE_LEADER) { 303 | // On the leader: replicate to all followers. On a follower: servers is empty. 304 | for (server <- servers) server.send(move) 305 | } else { 306 | sender.send(Protocol.Ack(move.time, move.replica)) 307 | } 308 | } finally { 309 | timer.stop() 310 | } 311 | } 312 | 313 | // Generates a new move operation. 314 | // In CRDT mode: applies it locally, and sends it to all of the clients. 315 | // In leader mode: does nothing if we are the leader (replicaId == 0), 316 | // otherwise generates a move operation and sends it to the leader. 317 | private[this] def generateMove() { 318 | if (TestReplica.USE_LEADER && replicaId == 0) return 319 | counter += 1 320 | val move = Protocol.Move(counter, replicaId, random.nextInt(1000), random.nextInt(1000)) 321 | //println(s"Generated: ${move}") 322 | if (!TestReplica.USE_LEADER) { 323 | val timer = localTimer.time() 324 | try { 325 | this.applyMove(move) 326 | } finally { 327 | timer.stop() 328 | } 329 | } 330 | for (client <- clients) client.send(move) 331 | } 332 | 333 | // Actually applies a move operation to the current state (calls into 334 | // Isabelle-generated code). Both local and remote operations. 335 | private[this] def applyMove(moveOp: Protocol.Move) { 336 | var move = moveOp 337 | if (TestReplica.USE_LEADER) { 338 | // Give operations consecutive timestamps in the order we received them from the leader 339 | counter += 1 340 | move = Protocol.Move(counter, replicaId, move.parent, move.child) 341 | } else if (move.time > counter) { 342 | // Lamport timestamp maintenance 343 | counter = move.time 344 | } 345 | 346 | if (!TestReplica.USE_GENERATED_CODE) { 347 | applyMoveOptimised(move) 348 | } else { 349 | val timestamp = (BigInt(move.time), BigInt(move.replica)) 350 | val operation = generated.Move(timestamp, BigInt(move.parent), "", BigInt(move.child)) 351 | state = generated.integer_apply_op(operation)(state) 352 | 353 | // Truncate the log from time to time 354 | if (counter % 100000 == 0) { 355 | state = (state._1.take(10000), state._2) 356 | } 357 | } 358 | } 359 | 360 | // Alternative (not formally verified) implementation of move algorithm. 361 | private[this] def applyMoveOptimised(move: Protocol.Move) { 362 | var index = log.size 363 | var finished = false 364 | log.add(null) // make space for the new log entry 365 | 366 | // Go through log backwards and undo all ops with greater timestamp 367 | while (index > 0 && !finished) { 368 | val logOp = log.get(index - 1) 369 | if (logOp.time > move.time || (logOp.time == move.time && logOp.replica > move.replica)) { 370 | log.set(index, logOp) 371 | logOp.old match { 372 | case None => tree.remove(logOp.child) 373 | case Some(old) => tree.put(logOp.child, old) 374 | } 375 | index -= 1 376 | } else { 377 | finished = true 378 | } 379 | } 380 | 381 | // Insert the new entry into the log 382 | log.set(index, new LogOp(move.time, move.replica, None, move.parent, "", move.child)) 383 | undosRedos.update(log.size - index - 1) 384 | 385 | // Do/redo all log entries including the new one 386 | while (index < log.size) { 387 | val logOp = log.get(index) 388 | val previous = tree.get(logOp.child) 389 | if (previous == null) { 390 | logOp.old = None 391 | } else { 392 | logOp.old = Some(previous) 393 | } 394 | if (!isAncestor(logOp.child, logOp.parent) && logOp.child != logOp.parent) { 395 | tree.put(logOp.child, (logOp.meta, logOp.parent)) 396 | } 397 | index += 1 398 | } 399 | 400 | // Truncate the log from time to time 401 | if (log.size > 1000000) { 402 | log.subList(0, log.size - 10000).clear() 403 | } 404 | } 405 | 406 | // Returns true if `ancestor` is an ancestor of `child` in `this.tree` 407 | // (code not Isabelle-generated). 408 | private[this] def isAncestor(ancestor: Long, child: Long): Boolean = { 409 | var parent = tree.get(child) 410 | while (parent != null) { 411 | if (parent._2 == ancestor) return true 412 | parent = tree.get(parent._2) 413 | } 414 | return false 415 | } 416 | 417 | // The run loop does two things: it blocks waiting for incoming requests from 418 | // other replicas on the blocking queue, and it also generates a new operation 419 | // every REQUEST_INTERVAL (unless backpressure is applied). 420 | def run() { 421 | TimeUnit.SECONDS.sleep(20) // time for all replicas to start up 422 | val startTime = System.nanoTime() 423 | var nextTick = startTime + TestReplica.OPERATION_INTERVAL 424 | while (System.nanoTime() < startTime + TestReplica.RUN_DURATION) { 425 | val request = queue.poll(nextTick - System.nanoTime(), TimeUnit.NANOSECONDS) 426 | if (request == null) { 427 | if (clients.exists(_.backpressure)) { 428 | backpressure.mark() 429 | } else { 430 | generateMove() 431 | } 432 | nextTick += TestReplica.OPERATION_INTERVAL 433 | } else { 434 | processRequest(request._1, request._2) 435 | } 436 | } 437 | } 438 | } 439 | -------------------------------------------------------------------------------- /experiment.sql: -------------------------------------------------------------------------------- 1 | CREATE SEQUENCE move_ops_seq; 2 | CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; 3 | 4 | CREATE TYPE unique_timestamp AS ( 5 | time timestamptz, 6 | counter bigint, 7 | id uuid 8 | ); 9 | 10 | CREATE TABLE move_ops2 ( 11 | time unique_timestamp PRIMARY KEY DEFAULT (now(), nextval('move_ops_seq'), uuid_generate_v4()), 12 | parent bigint NOT NULL, 13 | name text NOT NULL, 14 | child bigint NOT NULL, 15 | valid boolean NOT NULL DEFAULT 'true' 16 | ); 17 | 18 | CREATE INDEX ON move_ops (parent); 19 | CREATE INDEX ON move_ops (child); 20 | 21 | CREATE OR REPLACE VIEW tree AS 22 | SELECT m1.parent AS parent, m1.name AS name, m1.child AS child 23 | FROM move_ops AS m1 24 | LEFT JOIN move_ops AS m2 25 | ON m1.time < m2.time AND m1.child = m2.child AND m2.valid 26 | WHERE m1.valid AND m2 IS NULL; 27 | 28 | CREATE OR REPLACE FUNCTION ancestors_before(parent bigint, threshold unique_timestamp) 29 | RETURNS TABLE(ancestor bigint) AS $$ 30 | WITH RECURSIVE ancestors(ancestor) AS ( 31 | VALUES (parent) 32 | UNION 33 | SELECT m1.parent FROM ancestors 34 | JOIN move_ops AS m1 35 | ON m1.child = ancestor AND m1.valid AND m1.time < threshold 36 | LEFT JOIN move_ops AS m2 37 | ON m2.child = ancestor AND m2.valid AND m2.time < threshold AND m1.time < m2.time 38 | WHERE m2 IS NULL 39 | ) 40 | SELECT * from ancestors 41 | $$ LANGUAGE SQL; 42 | 43 | CREATE OR REPLACE FUNCTION process_move() RETURNS trigger AS $$ 44 | DECLARE 45 | move RECORD; 46 | BEGIN 47 | FOR move IN SELECT * FROM move_ops WHERE time >= NEW.time ORDER BY time FOR UPDATE LOOP 48 | IF move.child IN (SELECT ancestors_before(move.parent, move.time)) THEN 49 | RAISE NOTICE 'INVALID: %', move; 50 | IF move.valid THEN 51 | UPDATE move_ops SET valid = 'false' WHERE time = move.time; 52 | END IF; 53 | ELSE 54 | RAISE NOTICE 'valid: %', move; 55 | IF NOT move.valid THEN 56 | UPDATE move_ops SET valid = 'true' WHERE time = move.time; 57 | END IF; 58 | END IF; 59 | END LOOP; 60 | RETURN NULL; 61 | END; 62 | $$ LANGUAGE plpgsql; 63 | 64 | CREATE TRIGGER process_move AFTER INSERT ON move_ops 65 | FOR EACH ROW EXECUTE FUNCTION process_move(); 66 | 67 | CREATE OR REPLACE VIEW tree_view AS 68 | WITH RECURSIVE tree_view (node, path) AS ( 69 | VALUES(0::bigint, ARRAY[]::text[]) 70 | UNION ALL 71 | SELECT tree.child, tree_view.path || tree.name 72 | FROM tree_view JOIN tree ON tree_view.node = tree.parent 73 | ) 74 | SELECT CASE 75 | WHEN path = array[]::text[] THEN node::text 76 | ELSE repeat('|- ', array_length(path, 1)) || path[array_length(path, 1)] || ': ' || node::text 77 | END 78 | FROM tree_view ORDER BY path; 79 | -------------------------------------------------------------------------------- /paper/Makefile: -------------------------------------------------------------------------------- 1 | .SUFFIXES: .tex .bib .aux .bbl .dvi .ps .pdf .thy 2 | 3 | all: move-op.pdf 4 | 5 | %.pdf: %.bbl 6 | pdflatex $(@:.pdf=) 7 | pdflatex $(@:.pdf=) 8 | 9 | %.bbl: references.bib %.aux 10 | bibtex $(@:.bbl=) 11 | 12 | %.aux: *.tex 13 | pdflatex $(@:.aux=) 14 | 15 | %-arxiv.tex: %.tex %.bbl 16 | sed -e "/\\\\bibliography{references}/ r $(@:-arxiv.tex=.bbl)" -e '/\\bibliography{references}/ d' $(@:-arxiv.tex=.tex) > $@ 17 | 18 | clean: 19 | rm -f move-op.{log,aux,out,bbl,blg,dvi,ps,pdf} *-arxiv.tex comment.cut 20 | -------------------------------------------------------------------------------- /paper/crdt-generated.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/crdt-generated.pdf -------------------------------------------------------------------------------- /paper/crdt-optimised.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/crdt-optimised.pdf -------------------------------------------------------------------------------- /paper/gdrive-error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/gdrive-error.png -------------------------------------------------------------------------------- /paper/isabelle.sty: -------------------------------------------------------------------------------- 1 | %% 2 | %% macros for Isabelle generated LaTeX output 3 | %% 4 | 5 | %%% Simple document preparation (based on theory token language and symbols) 6 | 7 | % isabelle environments 8 | 9 | \newcommand{\isabellecontext}{UNKNOWN} 10 | \newcommand{\setisabellecontext}[1]{\def\isabellecontext{#1}} 11 | 12 | \newcommand{\isastyle}{\UNDEF} 13 | \newcommand{\isastylett}{\UNDEF} 14 | \newcommand{\isastyleminor}{\UNDEF} 15 | \newcommand{\isastyleminortt}{\UNDEF} 16 | \newcommand{\isastylescript}{\UNDEF} 17 | \newcommand{\isastyletext}{\normalsize\rm} 18 | \newcommand{\isastyletxt}{\rm} 19 | \newcommand{\isastylecmt}{\rm} 20 | 21 | \newcommand{\isaspacing}{% 22 | \sfcode 42 1000 % . 23 | \sfcode 63 1000 % ? 24 | \sfcode 33 1000 % ! 25 | \sfcode 58 1000 % : 26 | \sfcode 59 1000 % ; 27 | \sfcode 44 1000 % , 28 | } 29 | 30 | %symbol markup -- \emph achieves decent spacing via italic corrections 31 | \newcommand{\isamath}[1]{\emph{$#1$}} 32 | \newcommand{\isatext}[1]{\emph{#1}} 33 | \DeclareRobustCommand{\isascriptstyle}{\def\isamath##1{##1}\def\isatext##1{\mbox{\isaspacing\isastylescript##1}}} 34 | \newcommand{\isactrlsub}[1]{\emph{\isascriptstyle${}\sb{#1}$}} 35 | \newcommand{\isactrlsup}[1]{\emph{\isascriptstyle${}\sp{#1}$}} 36 | \DeclareRobustCommand{\isactrlbsub}{\emph\bgroup\math{}\sb\bgroup\mbox\bgroup\isaspacing\isastylescript} 37 | \DeclareRobustCommand{\isactrlesub}{\egroup\egroup\endmath\egroup} 38 | \DeclareRobustCommand{\isactrlbsup}{\emph\bgroup\math{}\sp\bgroup\mbox\bgroup\isaspacing\isastylescript} 39 | \DeclareRobustCommand{\isactrlesup}{\egroup\egroup\endmath\egroup} 40 | \newcommand{\isactrlbold}[1]{{\bfseries\upshape\boldmath#1}} 41 | 42 | \newcommand{\isaantiqcontrol}[1]{\isatt{{\char`\\}{\char`\<}{\char`\^}#1{\char`\>}}} 43 | \newenvironment{isaantiq}{{\isacharat\isacharbraceleft}}{{\isacharbraceright}} 44 | 45 | \newdimen\isa@parindent\newdimen\isa@parskip 46 | 47 | \newenvironment{isabellebody}{% 48 | \isamarkuptrue\par% 49 | \isa@parindent\parindent\parindent0pt% 50 | \isa@parskip\parskip\parskip0pt% 51 | \isaspacing\isastyle}{\par} 52 | 53 | \newenvironment{isabellebodytt}{% 54 | \isamarkuptrue\par% 55 | \isa@parindent\parindent\parindent0pt% 56 | \isa@parskip\parskip\parskip0pt% 57 | \isaspacing\isastylett}{\par} 58 | 59 | \newenvironment{isabelle} 60 | {\begin{trivlist}\begin{isabellebody}\item\relax} 61 | {\end{isabellebody}\end{trivlist}} 62 | 63 | \newenvironment{isabellett} 64 | {\begin{trivlist}\begin{isabellebodytt}\item\relax} 65 | {\end{isabellebodytt}\end{trivlist}} 66 | 67 | \newcommand{\isa}[1]{\emph{\isaspacing\isastyleminor #1}} 68 | \newcommand{\isatt}[1]{\emph{\isaspacing\isastyleminortt #1}} 69 | 70 | \newcommand{\isaindent}[1]{\hphantom{#1}} 71 | \newcommand{\isanewline}{\mbox{}\par\mbox{}} 72 | \newcommand{\isasep}{} 73 | \newcommand{\isadigit}[1]{#1} 74 | 75 | \newcommand{\isachardefaults}{% 76 | \def\isacharbell{\isamath{\bigbox}}%requires stmaryrd 77 | \chardef\isacharbang=`\!% 78 | \chardef\isachardoublequote=`\"% 79 | \chardef\isachardoublequoteopen=`\"% 80 | \chardef\isachardoublequoteclose=`\"% 81 | \chardef\isacharhash=`\#% 82 | \chardef\isachardollar=`\$% 83 | \chardef\isacharpercent=`\%% 84 | \chardef\isacharampersand=`\&% 85 | \chardef\isacharprime=`\'% 86 | \chardef\isacharparenleft=`\(% 87 | \chardef\isacharparenright=`\)% 88 | \chardef\isacharasterisk=`\*% 89 | \chardef\isacharplus=`\+% 90 | \chardef\isacharcomma=`\,% 91 | \chardef\isacharminus=`\-% 92 | \chardef\isachardot=`\.% 93 | \chardef\isacharslash=`\/% 94 | \chardef\isacharcolon=`\:% 95 | \chardef\isacharsemicolon=`\;% 96 | \chardef\isacharless=`\<% 97 | \chardef\isacharequal=`\=% 98 | \chardef\isachargreater=`\>% 99 | \chardef\isacharquery=`\?% 100 | \chardef\isacharat=`\@% 101 | \chardef\isacharbrackleft=`\[% 102 | \chardef\isacharbackslash=`\\% 103 | \chardef\isacharbrackright=`\]% 104 | \chardef\isacharcircum=`\^% 105 | \chardef\isacharunderscore=`\_% 106 | \def\isacharunderscorekeyword{\_}% 107 | \chardef\isacharbackquote=`\`% 108 | \chardef\isacharbackquoteopen=`\`% 109 | \chardef\isacharbackquoteclose=`\`% 110 | \chardef\isacharbraceleft=`\{% 111 | \chardef\isacharbar=`\|% 112 | \chardef\isacharbraceright=`\}% 113 | \chardef\isachartilde=`\~% 114 | \def\isacharverbatimopen{\isacharbraceleft\isacharasterisk}% 115 | \def\isacharverbatimclose{\isacharasterisk\isacharbraceright}% 116 | \def\isacartoucheopen{\isatext{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}}% 117 | \def\isacartoucheclose{\isatext{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}}% 118 | } 119 | 120 | 121 | % keyword and section markup 122 | 123 | \newcommand{\isakeyword}[1] 124 | {\emph{\bf\def\isachardot{.}\def\isacharunderscore{\isacharunderscorekeyword}% 125 | \def\isacharbraceleft{\{}\def\isacharbraceright{\}}#1}} 126 | \newcommand{\isacommand}[1]{\isakeyword{#1}} 127 | 128 | \newcommand{\isamarkupheader}[1]{\section{#1}} 129 | \newcommand{\isamarkupchapter}[1]{\chapter{#1}} 130 | \newcommand{\isamarkupsection}[1]{\section{#1}} 131 | \newcommand{\isamarkupsubsection}[1]{\subsection{#1}} 132 | \newcommand{\isamarkupsubsubsection}[1]{\subsubsection{#1}} 133 | \newcommand{\isamarkupparagraph}[1]{\paragraph{#1}} 134 | \newcommand{\isamarkupsubparagraph}[1]{\subparagraph{#1}} 135 | 136 | \newif\ifisamarkup 137 | \newcommand{\isabeginpar}{\par\ifisamarkup\relax\else\medskip\fi} 138 | \newcommand{\isaendpar}{\par\medskip} 139 | \newenvironment{isapar}{\parindent\isa@parindent\parskip\isa@parskip\isabeginpar}{\isaendpar} 140 | \newenvironment{isamarkuptext}{\par\isastyletext\begin{isapar}}{\end{isapar}} 141 | \newenvironment{isamarkuptxt}{\par\isastyletxt\begin{isapar}}{\end{isapar}} 142 | \newcommand{\isamarkupcmt}[1]{{\isastylecmt--- #1}} 143 | 144 | 145 | % styles 146 | 147 | \def\isabellestyle#1{\csname isabellestyle#1\endcsname} 148 | 149 | \newcommand{\isabellestyledefault}{% 150 | \def\isastyle{\small\tt\slshape}% 151 | \def\isastylett{\small\tt}% 152 | \def\isastyleminor{\small\tt\slshape}% 153 | \def\isastyleminortt{\small\tt}% 154 | \def\isastylescript{\footnotesize\tt\slshape}% 155 | \isachardefaults% 156 | } 157 | \isabellestyledefault 158 | 159 | \newcommand{\isabellestylett}{% 160 | \def\isastyle{\small\tt}% 161 | \def\isastylett{\small\tt}% 162 | \def\isastyleminor{\small\tt}% 163 | \def\isastyleminortt{\small\tt}% 164 | \def\isastylescript{\footnotesize\tt}% 165 | \isachardefaults% 166 | } 167 | 168 | \newcommand{\isabellestyleit}{% 169 | \def\isastyle{\it}% 170 | \def\isastylett{\tt}% 171 | \def\isastyleminor{\it}% 172 | \def\isastyleminortt{\tt}% 173 | \def\isastylescript{\footnotesize\it}% 174 | \isachardefaults% 175 | \def\isacharunderscorekeyword{\textunderscore}% 176 | \def\isacharbang{\isamath{!}}% 177 | \def\isachardoublequote{}% 178 | \def\isachardoublequoteopen{}% 179 | \def\isachardoublequoteclose{}% 180 | \def\isacharhash{\isamath{\#}}% 181 | \def\isachardollar{\isamath{\$}}% 182 | \def\isacharpercent{\isamath{\%}}% 183 | \def\isacharampersand{\isamath{\&}}% 184 | \def\isacharprime{\isamath{\mskip2mu{'}\mskip-2mu}}% 185 | \def\isacharparenleft{\isamath{(}}% 186 | \def\isacharparenright{\isamath{)}}% 187 | \def\isacharasterisk{\isamath{*}}% 188 | \def\isacharplus{\isamath{+}}% 189 | \def\isacharcomma{\isamath{\mathord,}}% 190 | \def\isacharminus{\isamath{-}}% 191 | \def\isachardot{\isamath{\mathord.}}% 192 | \def\isacharslash{\isamath{/}}% 193 | \def\isacharcolon{\isamath{\mathord:}}% 194 | \def\isacharsemicolon{\isamath{\mathord;}}% 195 | \def\isacharless{\isamath{<}}% 196 | \def\isacharequal{\isamath{=}}% 197 | \def\isachargreater{\isamath{>}}% 198 | \def\isacharat{\isamath{@}}% 199 | \def\isacharbrackleft{\isamath{[}}% 200 | \def\isacharbackslash{\isamath{\backslash}}% 201 | \def\isacharbrackright{\isamath{]}}% 202 | \def\isacharunderscore{\textunderscore}% 203 | \def\isacharbraceleft{\isamath{\{}}% 204 | \def\isacharbar{\isamath{\mid}}% 205 | \def\isacharbraceright{\isamath{\}}}% 206 | \def\isachartilde{\isamath{{}\sp{\sim}}}% 207 | \def\isacharbackquoteopen{\isatext{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}}% 208 | \def\isacharbackquoteclose{\isatext{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}}% 209 | \def\isacharverbatimopen{\isamath{\langle\!\langle}}% 210 | \def\isacharverbatimclose{\isamath{\rangle\!\rangle}}% 211 | } 212 | 213 | \newcommand{\isabellestyleliteral}{% 214 | \isabellestyleit% 215 | \def\isacharunderscore{\_}% 216 | \def\isacharunderscorekeyword{\_}% 217 | \chardef\isacharbackquoteopen=`\`% 218 | \chardef\isacharbackquoteclose=`\`% 219 | } 220 | 221 | \newcommand{\isabellestyleliteralunderscore}{% 222 | \isabellestyleliteral% 223 | \def\isacharunderscore{\textunderscore}% 224 | \def\isacharunderscorekeyword{\textunderscore}% 225 | } 226 | 227 | \newcommand{\isabellestylesl}{% 228 | \isabellestyleit% 229 | \def\isastyle{\small\sl}% 230 | \def\isastylett{\small\tt}% 231 | \def\isastyleminor{\sl}% 232 | \def\isastyleminortt{\tt}% 233 | \def\isastylescript{\footnotesize\sl}% 234 | } 235 | 236 | \newcommand{\isabellestylerm}{% 237 | \isabellestyleit% 238 | \def\isastyle{\rm}% 239 | \def\isastylett{\tt}% 240 | \def\isastyleminor{\rm}% 241 | \def\isastyleminortt{\tt}% 242 | \def\isastylescript{\footnotesize\rm}% 243 | \def\isacharunderscore{\textunderscore}% 244 | \def\isacharunderscorekeyword{\textunderscore}% 245 | } 246 | 247 | 248 | % tagged regions 249 | 250 | %plain TeX version of comment package -- much faster! 251 | \let\isafmtname\fmtname\def\fmtname{plain} 252 | \usepackage{comment} 253 | \let\fmtname\isafmtname 254 | 255 | \newcommand{\isafold}[1]{\emph{$\langle\mathord{\mathit{#1}}\rangle$}} 256 | 257 | \newcommand{\isakeeptag}[1]% 258 | {\includecomment{isadelim#1}\includecomment{isatag#1}\csarg\def{isafold#1}{}} 259 | \newcommand{\isadroptag}[1]% 260 | {\excludecomment{isadelim#1}\excludecomment{isatag#1}\csarg\def{isafold#1}{}} 261 | \newcommand{\isafoldtag}[1]% 262 | {\includecomment{isadelim#1}\excludecomment{isatag#1}\csarg\def{isafold#1}{\isafold{#1}}} 263 | 264 | \isakeeptag{theory} 265 | \isakeeptag{proof} 266 | \isakeeptag{ML} 267 | \isakeeptag{visible} 268 | \isadroptag{invisible} 269 | 270 | \IfFileExists{isabelletags.sty}{\usepackage{isabelletags}}{} 271 | -------------------------------------------------------------------------------- /paper/isabellesym.sty: -------------------------------------------------------------------------------- 1 | %% 2 | %% definitions of standard Isabelle symbols 3 | %% 4 | 5 | \newcommand{\isasymzero}{\isamath{\mathbf{0}}} %requires amssymb 6 | \newcommand{\isasymone}{\isamath{\mathbf{1}}} %requires amssymb 7 | \newcommand{\isasymtwo}{\isamath{\mathbf{2}}} %requires amssymb 8 | \newcommand{\isasymthree}{\isamath{\mathbf{3}}} %requires amssymb 9 | \newcommand{\isasymfour}{\isamath{\mathbf{4}}} %requires amssymb 10 | \newcommand{\isasymfive}{\isamath{\mathbf{5}}} %requires amssymb 11 | \newcommand{\isasymsix}{\isamath{\mathbf{6}}} %requires amssymb 12 | \newcommand{\isasymseven}{\isamath{\mathbf{7}}} %requires amssymb 13 | \newcommand{\isasymeight}{\isamath{\mathbf{8}}} %requires amssymb 14 | \newcommand{\isasymnine}{\isamath{\mathbf{9}}} %requires amssymb 15 | \newcommand{\isasymA}{\isamath{\mathcal{A}}} 16 | \newcommand{\isasymB}{\isamath{\mathcal{B}}} 17 | \newcommand{\isasymC}{\isamath{\mathcal{C}}} 18 | \newcommand{\isasymD}{\isamath{\mathcal{D}}} 19 | \newcommand{\isasymE}{\isamath{\mathcal{E}}} 20 | \newcommand{\isasymF}{\isamath{\mathcal{F}}} 21 | \newcommand{\isasymG}{\isamath{\mathcal{G}}} 22 | \newcommand{\isasymH}{\isamath{\mathcal{H}}} 23 | \newcommand{\isasymI}{\isamath{\mathcal{I}}} 24 | \newcommand{\isasymJ}{\isamath{\mathcal{J}}} 25 | \newcommand{\isasymK}{\isamath{\mathcal{K}}} 26 | \newcommand{\isasymL}{\isamath{\mathcal{L}}} 27 | \newcommand{\isasymM}{\isamath{\mathcal{M}}} 28 | \newcommand{\isasymN}{\isamath{\mathcal{N}}} 29 | \newcommand{\isasymO}{\isamath{\mathcal{O}}} 30 | \newcommand{\isasymP}{\isamath{\mathcal{P}}} 31 | \newcommand{\isasymQ}{\isamath{\mathcal{Q}}} 32 | \newcommand{\isasymR}{\isamath{\mathcal{R}}} 33 | \newcommand{\isasymS}{\isamath{\mathcal{S}}} 34 | \newcommand{\isasymT}{\isamath{\mathcal{T}}} 35 | \newcommand{\isasymU}{\isamath{\mathcal{U}}} 36 | \newcommand{\isasymV}{\isamath{\mathcal{V}}} 37 | \newcommand{\isasymW}{\isamath{\mathcal{W}}} 38 | \newcommand{\isasymX}{\isamath{\mathcal{X}}} 39 | \newcommand{\isasymY}{\isamath{\mathcal{Y}}} 40 | \newcommand{\isasymZ}{\isamath{\mathcal{Z}}} 41 | \newcommand{\isasyma}{\isamath{\mathrm{a}}} 42 | \newcommand{\isasymb}{\isamath{\mathrm{b}}} 43 | \newcommand{\isasymc}{\isamath{\mathrm{c}}} 44 | \newcommand{\isasymd}{\isamath{\mathrm{d}}} 45 | \newcommand{\isasyme}{\isamath{\mathrm{e}}} 46 | \newcommand{\isasymf}{\isamath{\mathrm{f}}} 47 | \newcommand{\isasymg}{\isamath{\mathrm{g}}} 48 | \newcommand{\isasymh}{\isamath{\mathrm{h}}} 49 | \newcommand{\isasymi}{\isamath{\mathrm{i}}} 50 | \newcommand{\isasymj}{\isamath{\mathrm{j}}} 51 | \newcommand{\isasymk}{\isamath{\mathrm{k}}} 52 | \newcommand{\isasyml}{\isamath{\mathrm{l}}} 53 | \newcommand{\isasymm}{\isamath{\mathrm{m}}} 54 | \newcommand{\isasymn}{\isamath{\mathrm{n}}} 55 | \newcommand{\isasymo}{\isamath{\mathrm{o}}} 56 | \newcommand{\isasymp}{\isamath{\mathrm{p}}} 57 | \newcommand{\isasymq}{\isamath{\mathrm{q}}} 58 | \newcommand{\isasymr}{\isamath{\mathrm{r}}} 59 | \newcommand{\isasyms}{\isamath{\mathrm{s}}} 60 | \newcommand{\isasymt}{\isamath{\mathrm{t}}} 61 | \newcommand{\isasymu}{\isamath{\mathrm{u}}} 62 | \newcommand{\isasymv}{\isamath{\mathrm{v}}} 63 | \newcommand{\isasymw}{\isamath{\mathrm{w}}} 64 | \newcommand{\isasymx}{\isamath{\mathrm{x}}} 65 | \newcommand{\isasymy}{\isamath{\mathrm{y}}} 66 | \newcommand{\isasymz}{\isamath{\mathrm{z}}} 67 | \newcommand{\isasymAA}{\isamath{\mathfrak{A}}} %requires eufrak 68 | \newcommand{\isasymBB}{\isamath{\mathfrak{B}}} %requires eufrak 69 | \newcommand{\isasymCC}{\isamath{\mathfrak{C}}} %requires eufrak 70 | \newcommand{\isasymDD}{\isamath{\mathfrak{D}}} %requires eufrak 71 | \newcommand{\isasymEE}{\isamath{\mathfrak{E}}} %requires eufrak 72 | \newcommand{\isasymFF}{\isamath{\mathfrak{F}}} %requires eufrak 73 | \newcommand{\isasymGG}{\isamath{\mathfrak{G}}} %requires eufrak 74 | \newcommand{\isasymHH}{\isamath{\mathfrak{H}}} %requires eufrak 75 | \newcommand{\isasymII}{\isamath{\mathfrak{I}}} %requires eufrak 76 | \newcommand{\isasymJJ}{\isamath{\mathfrak{J}}} %requires eufrak 77 | \newcommand{\isasymKK}{\isamath{\mathfrak{K}}} %requires eufrak 78 | \newcommand{\isasymLL}{\isamath{\mathfrak{L}}} %requires eufrak 79 | \newcommand{\isasymMM}{\isamath{\mathfrak{M}}} %requires eufrak 80 | \newcommand{\isasymNN}{\isamath{\mathfrak{N}}} %requires eufrak 81 | \newcommand{\isasymOO}{\isamath{\mathfrak{O}}} %requires eufrak 82 | \newcommand{\isasymPP}{\isamath{\mathfrak{P}}} %requires eufrak 83 | \newcommand{\isasymQQ}{\isamath{\mathfrak{Q}}} %requires eufrak 84 | \newcommand{\isasymRR}{\isamath{\mathfrak{R}}} %requires eufrak 85 | \newcommand{\isasymSS}{\isamath{\mathfrak{S}}} %requires eufrak 86 | \newcommand{\isasymTT}{\isamath{\mathfrak{T}}} %requires eufrak 87 | \newcommand{\isasymUU}{\isamath{\mathfrak{U}}} %requires eufrak 88 | \newcommand{\isasymVV}{\isamath{\mathfrak{V}}} %requires eufrak 89 | \newcommand{\isasymWW}{\isamath{\mathfrak{W}}} %requires eufrak 90 | \newcommand{\isasymXX}{\isamath{\mathfrak{X}}} %requires eufrak 91 | \newcommand{\isasymYY}{\isamath{\mathfrak{Y}}} %requires eufrak 92 | \newcommand{\isasymZZ}{\isamath{\mathfrak{Z}}} %requires eufrak 93 | \newcommand{\isasymaa}{\isamath{\mathfrak{a}}} %requires eufrak 94 | \newcommand{\isasymbb}{\isamath{\mathfrak{b}}} %requires eufrak 95 | \newcommand{\isasymcc}{\isamath{\mathfrak{c}}} %requires eufrak 96 | \newcommand{\isasymdd}{\isamath{\mathfrak{d}}} %requires eufrak 97 | \newcommand{\isasymee}{\isamath{\mathfrak{e}}} %requires eufrak 98 | \newcommand{\isasymff}{\isamath{\mathfrak{f}}} %requires eufrak 99 | \newcommand{\isasymgg}{\isamath{\mathfrak{g}}} %requires eufrak 100 | \newcommand{\isasymhh}{\isamath{\mathfrak{h}}} %requires eufrak 101 | \newcommand{\isasymii}{\isamath{\mathfrak{i}}} %requires eufrak 102 | \newcommand{\isasymjj}{\isamath{\mathfrak{j}}} %requires eufrak 103 | \newcommand{\isasymkk}{\isamath{\mathfrak{k}}} %requires eufrak 104 | \newcommand{\isasymll}{\isamath{\mathfrak{l}}} %requires eufrak 105 | \newcommand{\isasymmm}{\isamath{\mathfrak{m}}} %requires eufrak 106 | \newcommand{\isasymnn}{\isamath{\mathfrak{n}}} %requires eufrak 107 | \newcommand{\isasymoo}{\isamath{\mathfrak{o}}} %requires eufrak 108 | \newcommand{\isasympp}{\isamath{\mathfrak{p}}} %requires eufrak 109 | \newcommand{\isasymqq}{\isamath{\mathfrak{q}}} %requires eufrak 110 | \newcommand{\isasymrr}{\isamath{\mathfrak{r}}} %requires eufrak 111 | \newcommand{\isasymss}{\isamath{\mathfrak{s}}} %requires eufrak 112 | \newcommand{\isasymtt}{\isamath{\mathfrak{t}}} %requires eufrak 113 | \newcommand{\isasymuu}{\isamath{\mathfrak{u}}} %requires eufrak 114 | \newcommand{\isasymvv}{\isamath{\mathfrak{v}}} %requires eufrak 115 | \newcommand{\isasymww}{\isamath{\mathfrak{w}}} %requires eufrak 116 | \newcommand{\isasymxx}{\isamath{\mathfrak{x}}} %requires eufrak 117 | \newcommand{\isasymyy}{\isamath{\mathfrak{y}}} %requires eufrak 118 | \newcommand{\isasymzz}{\isamath{\mathfrak{z}}} %requires eufrak 119 | \newcommand{\isasymalpha}{\isamath{\alpha}} 120 | \newcommand{\isasymbeta}{\isamath{\beta}} 121 | \newcommand{\isasymgamma}{\isamath{\gamma}} 122 | \newcommand{\isasymdelta}{\isamath{\delta}} 123 | \newcommand{\isasymepsilon}{\isamath{\varepsilon}} 124 | \newcommand{\isasymzeta}{\isamath{\zeta}} 125 | \newcommand{\isasymeta}{\isamath{\eta}} 126 | \newcommand{\isasymtheta}{\isamath{\vartheta}} 127 | \newcommand{\isasymiota}{\isamath{\iota}} 128 | \newcommand{\isasymkappa}{\isamath{\kappa}} 129 | \newcommand{\isasymlambda}{\isamath{\lambda}} 130 | \newcommand{\isasymmu}{\isamath{\mu}} 131 | \newcommand{\isasymnu}{\isamath{\nu}} 132 | \newcommand{\isasymxi}{\isamath{\xi}} 133 | \newcommand{\isasympi}{\isamath{\pi}} 134 | \newcommand{\isasymrho}{\isamath{\varrho}} 135 | \newcommand{\isasymsigma}{\isamath{\sigma}} 136 | \newcommand{\isasymtau}{\isamath{\tau}} 137 | \newcommand{\isasymupsilon}{\isamath{\upsilon}} 138 | \newcommand{\isasymphi}{\isamath{\varphi}} 139 | \newcommand{\isasymchi}{\isamath{\chi}} 140 | \newcommand{\isasympsi}{\isamath{\psi}} 141 | \newcommand{\isasymomega}{\isamath{\omega}} 142 | \newcommand{\isasymGamma}{\isamath{\Gamma}} 143 | \newcommand{\isasymDelta}{\isamath{\Delta}} 144 | \newcommand{\isasymTheta}{\isamath{\Theta}} 145 | \newcommand{\isasymLambda}{\isamath{\Lambda}} 146 | \newcommand{\isasymXi}{\isamath{\Xi}} 147 | \newcommand{\isasymPi}{\isamath{\Pi}} 148 | \newcommand{\isasymSigma}{\isamath{\Sigma}} 149 | \newcommand{\isasymUpsilon}{\isamath{\Upsilon}} 150 | \newcommand{\isasymPhi}{\isamath{\Phi}} 151 | \newcommand{\isasymPsi}{\isamath{\Psi}} 152 | \newcommand{\isasymOmega}{\isamath{\Omega}} 153 | \newcommand{\isasymbool}{\isamath{\mathrm{I}\mkern-3.8mu\mathrm{B}}} 154 | \newcommand{\isasymcomplex}{\isamath{\mathrm{C}\mkern-15mu{\phantom{\mathrm{t}}\vrule}\mkern9mu}} 155 | \newcommand{\isasymnat}{\isamath{\mathrm{I}\mkern-3.8mu\mathrm{N}}} 156 | \newcommand{\isasymrat}{\isamath{\mathrm{Q}\mkern-16mu{\phantom{\mathrm{t}}\vrule}\mkern10mu}} 157 | \newcommand{\isasymreal}{\isamath{\mathrm{I}\mkern-3.8mu\mathrm{R}}} 158 | \newcommand{\isasymint}{\isamath{\mathsf{Z}\mkern-7.5mu\mathsf{Z}}} 159 | \newcommand{\isasymleftarrow}{\isamath{\leftarrow}} 160 | \newcommand{\isasymrightarrow}{\isamath{\rightarrow}} 161 | \newcommand{\isasymlongleftarrow}{\isamath{\longleftarrow}} 162 | \newcommand{\isasymlongrightarrow}{\isamath{\longrightarrow}} 163 | \newcommand{\isasymlonglongleftarrow}{\isamath{\xleftarrow{\hphantom{AAA}}}} %requires amsmath 164 | \newcommand{\isasymlonglongrightarrow}{\isamath{\xrightarrow{\hphantom{AAA}}}} %requires amsmath 165 | \newcommand{\isasymlonglonglongleftarrow}{\isamath{\xleftarrow{\hphantom{AAAA}}}} %requires amsmath 166 | \newcommand{\isasymlonglonglongrightarrow}{\isamath{\xrightarrow{\hphantom{AAAA}}}} %requires amsmath 167 | \newcommand{\isasymLeftarrow}{\isamath{\Leftarrow}} 168 | \newcommand{\isasymRightarrow}{\isamath{\Rightarrow}} 169 | \newcommand{\isasymLongleftarrow}{\isamath{\Longleftarrow}} 170 | \newcommand{\isasymLongrightarrow}{\isamath{\Longrightarrow}} 171 | \newcommand{\isasymLleftarrow}{\isamath{\Lleftarrow}} %requires amssymb 172 | \newcommand{\isasymRrightarrow}{\isamath{\Rrightarrow}} %requires amssymb 173 | \newcommand{\isasymleftrightarrow}{\isamath{\leftrightarrow}} 174 | \newcommand{\isasymLeftrightarrow}{\isamath{\Leftrightarrow}} 175 | \newcommand{\isasymlongleftrightarrow}{\isamath{\longleftrightarrow}} 176 | \newcommand{\isasymLongleftrightarrow}{\isamath{\Longleftrightarrow}} 177 | \newcommand{\isasymmapsto}{\isamath{\mapsto}} 178 | \newcommand{\isasymlongmapsto}{\isamath{\longmapsto}} 179 | \newcommand{\isasymmidarrow}{\isamath{\relbar}} 180 | \newcommand{\isasymMidarrow}{\isamath{\Relbar}} 181 | \newcommand{\isasymhookleftarrow}{\isamath{\hookleftarrow}} 182 | \newcommand{\isasymhookrightarrow}{\isamath{\hookrightarrow}} 183 | \newcommand{\isasymleftharpoondown}{\isamath{\leftharpoondown}} 184 | \newcommand{\isasymrightharpoondown}{\isamath{\rightharpoondown}} 185 | \newcommand{\isasymleftharpoonup}{\isamath{\leftharpoonup}} 186 | \newcommand{\isasymrightharpoonup}{\isamath{\rightharpoonup}} 187 | \newcommand{\isasymrightleftharpoons}{\isamath{\rightleftharpoons}} 188 | \newcommand{\isasymleadsto}{\isamath{\leadsto}} %requires amssymb 189 | \newcommand{\isasymdownharpoonleft}{\isamath{\downharpoonleft}} %requires amssymb 190 | \newcommand{\isasymdownharpoonright}{\isamath{\downharpoonright}} %requires amssymb 191 | \newcommand{\isasymupharpoonleft}{\isamath{\upharpoonleft}} %requires amssymb 192 | \newcommand{\isasymupharpoonright}{\isamath{\upharpoonright}} %requires amssymb 193 | \newcommand{\isasymrestriction}{\isamath{\restriction}} %requires amssymb 194 | \newcommand{\isasymColon}{\isamath{\mathrel{::}}} 195 | \newcommand{\isasymup}{\isamath{\uparrow}} 196 | \newcommand{\isasymUp}{\isamath{\Uparrow}} 197 | \newcommand{\isasymdown}{\isamath{\downarrow}} 198 | \newcommand{\isasymDown}{\isamath{\Downarrow}} 199 | \newcommand{\isasymupdown}{\isamath{\updownarrow}} 200 | \newcommand{\isasymUpdown}{\isamath{\Updownarrow}} 201 | \newcommand{\isasymlangle}{\isamath{\langle}} 202 | \newcommand{\isasymrangle}{\isamath{\rangle}} 203 | \newcommand{\isasymlceil}{\isamath{\lceil}} 204 | \newcommand{\isasymrceil}{\isamath{\rceil}} 205 | \newcommand{\isasymlfloor}{\isamath{\lfloor}} 206 | \newcommand{\isasymrfloor}{\isamath{\rfloor}} 207 | \newcommand{\isasymlparr}{\isamath{\mathopen{(\mkern-3mu\mid}}} 208 | \newcommand{\isasymrparr}{\isamath{\mathclose{\mid\mkern-3mu)}}} 209 | \newcommand{\isasymlbrakk}{\isamath{\mathopen{\lbrack\mkern-3mu\lbrack}}} 210 | \newcommand{\isasymrbrakk}{\isamath{\mathclose{\rbrack\mkern-3mu\rbrack}}} 211 | \newcommand{\isasymlbrace}{\isamath{\mathopen{\lbrace\mkern-4.5mu\mid}}} 212 | \newcommand{\isasymrbrace}{\isamath{\mathclose{\mid\mkern-4.5mu\rbrace}}} 213 | \newcommand{\isasymguillemotleft}{\isatext{\flqq}} %requires babel 214 | \newcommand{\isasymguillemotright}{\isatext{\frqq}} %requires babel 215 | \newcommand{\isasymbottom}{\isamath{\bot}} 216 | \newcommand{\isasymtop}{\isamath{\top}} 217 | \newcommand{\isasymand}{\isamath{\wedge}} 218 | \newcommand{\isasymAnd}{\isamath{\bigwedge}} 219 | \newcommand{\isasymor}{\isamath{\vee}} 220 | \newcommand{\isasymOr}{\isamath{\bigvee}} 221 | \newcommand{\isasymforall}{\isamath{\forall\,}} 222 | \newcommand{\isasymexists}{\isamath{\exists\,}} 223 | \newcommand{\isasymnot}{\isamath{\neg}} 224 | \newcommand{\isasymnexists}{\isamath{\nexists\,}} %requires amssymb 225 | \newcommand{\isasymcircle}{\isamath{\ocircle}} %requires wasysym 226 | \newcommand{\isasymbox}{\isamath{\Box}} %requires amssymb 227 | \newcommand{\isasymdiamond}{\isamath{\Diamond}} %requires amssymb 228 | \newcommand{\isasymdiamondop}{\isamath{\diamond}} 229 | \newcommand{\isasymsurd}{\isamath{\surd}} 230 | \newcommand{\isasymturnstile}{\isamath{\vdash}} 231 | \newcommand{\isasymTurnstile}{\isamath{\models}} 232 | \newcommand{\isasymtturnstile}{\isamath{\vdash\!\!\!\vdash}} 233 | \newcommand{\isasymTTurnstile}{\isamath{\mid\!\models}} 234 | \newcommand{\isasymstileturn}{\isamath{\dashv}} 235 | \newcommand{\isasymle}{\isamath{\le}} 236 | \newcommand{\isasymge}{\isamath{\ge}} 237 | \newcommand{\isasymlless}{\isamath{\ll}} 238 | \newcommand{\isasymggreater}{\isamath{\gg}} 239 | \newcommand{\isasymlesssim}{\isamath{\lesssim}} %requires amssymb 240 | \newcommand{\isasymgreatersim}{\isamath{\gtrsim}} %requires amssymb 241 | \newcommand{\isasymlessapprox}{\isamath{\lessapprox}} %requires amssymb 242 | \newcommand{\isasymgreaterapprox}{\isamath{\gtrapprox}} %requires amssymb 243 | \newcommand{\isasymin}{\isamath{\in}} 244 | \newcommand{\isasymnotin}{\isamath{\notin}} 245 | \newcommand{\isasymsubset}{\isamath{\subset}} 246 | \newcommand{\isasymsupset}{\isamath{\supset}} 247 | \newcommand{\isasymsubseteq}{\isamath{\subseteq}} 248 | \newcommand{\isasymsupseteq}{\isamath{\supseteq}} 249 | \newcommand{\isasymsqsubset}{\isamath{\sqsubset}} %requires amssymb 250 | \newcommand{\isasymsqsupset}{\isamath{\sqsupset}} %requires amssymb 251 | \newcommand{\isasymsqsubseteq}{\isamath{\sqsubseteq}} 252 | \newcommand{\isasymsqsupseteq}{\isamath{\sqsupseteq}} 253 | \newcommand{\isasyminter}{\isamath{\cap}} 254 | \newcommand{\isasymInter}{\isamath{\bigcap\,}} 255 | \newcommand{\isasymunion}{\isamath{\cup}} 256 | \newcommand{\isasymUnion}{\isamath{\bigcup\,}} 257 | \newcommand{\isasymsqunion}{\isamath{\sqcup}} 258 | \newcommand{\isasymSqunion}{\isamath{\bigsqcup\,}} 259 | \newcommand{\isasymsqinter}{\isamath{\sqcap}} 260 | \newcommand{\isasymSqinter}{\isamath{\bigsqcap\,}} %requires stmaryrd 261 | \newcommand{\isasymsetminus}{\isamath{\setminus}} 262 | \newcommand{\isasympropto}{\isamath{\propto}} 263 | \newcommand{\isasymuplus}{\isamath{\uplus}} 264 | \newcommand{\isasymUplus}{\isamath{\biguplus\,}} 265 | \newcommand{\isasymnoteq}{\isamath{\not=}} 266 | \newcommand{\isasymsim}{\isamath{\sim}} 267 | \newcommand{\isasymdoteq}{\isamath{\doteq}} 268 | \newcommand{\isasymsimeq}{\isamath{\simeq}} 269 | \newcommand{\isasymapprox}{\isamath{\approx}} 270 | \newcommand{\isasymasymp}{\isamath{\asymp}} 271 | \newcommand{\isasymcong}{\isamath{\cong}} 272 | \newcommand{\isasymsmile}{\isamath{\smile}} 273 | \newcommand{\isasymequiv}{\isamath{\equiv}} 274 | \newcommand{\isasymfrown}{\isamath{\frown}} 275 | \newcommand{\isasymJoin}{\isamath{\Join}} %requires amssymb 276 | \newcommand{\isasymbowtie}{\isamath{\bowtie}} 277 | \newcommand{\isasymprec}{\isamath{\prec}} 278 | \newcommand{\isasymsucc}{\isamath{\succ}} 279 | \newcommand{\isasympreceq}{\isamath{\preceq}} 280 | \newcommand{\isasymsucceq}{\isamath{\succeq}} 281 | \newcommand{\isasymparallel}{\isamath{\parallel}} 282 | \newcommand{\isasymbar}{\isamath{\mid}} 283 | \newcommand{\isasymplusminus}{\isamath{\pm}} 284 | \newcommand{\isasymminusplus}{\isamath{\mp}} 285 | \newcommand{\isasymtimes}{\isamath{\times}} 286 | \newcommand{\isasymdiv}{\isamath{\div}} 287 | \newcommand{\isasymcdot}{\isamath{\cdot}} 288 | \newcommand{\isasymstar}{\isamath{\star}} 289 | \newcommand{\isasymbullet}{\boldmath\isamath{\mathchoice{\displaystyle{\cdot}}{\textstyle{\cdot}}{\scriptstyle{\bullet}}{\scriptscriptstyle{\bullet}}}} 290 | \newcommand{\isasymcirc}{\isamath{\circ}} 291 | \newcommand{\isasymdagger}{\isamath{\dagger}} 292 | \newcommand{\isasymddagger}{\isamath{\ddagger}} 293 | \newcommand{\isasymlhd}{\isamath{\lhd}} %requires amssymb 294 | \newcommand{\isasymrhd}{\isamath{\rhd}} %requires amssymb 295 | \newcommand{\isasymunlhd}{\isamath{\unlhd}} %requires amssymb 296 | \newcommand{\isasymunrhd}{\isamath{\unrhd}} %requires amssymb 297 | \newcommand{\isasymtriangleleft}{\isamath{\triangleleft}} 298 | \newcommand{\isasymtriangleright}{\isamath{\triangleright}} 299 | \newcommand{\isasymtriangle}{\isamath{\triangle}} 300 | \newcommand{\isasymtriangleq}{\isamath{\triangleq}} %requires amssymb 301 | \newcommand{\isasymoplus}{\isamath{\oplus}} 302 | \newcommand{\isasymOplus}{\isamath{\bigoplus\,}} 303 | \newcommand{\isasymotimes}{\isamath{\otimes}} 304 | \newcommand{\isasymOtimes}{\isamath{\bigotimes\,}} 305 | \newcommand{\isasymodot}{\isamath{\odot}} 306 | \newcommand{\isasymOdot}{\isamath{\bigodot\,}} 307 | \newcommand{\isasymominus}{\isamath{\ominus}} 308 | \newcommand{\isasymoslash}{\isamath{\oslash}} 309 | \newcommand{\isasymdots}{\isamath{\dots}} 310 | \newcommand{\isasymcdots}{\isamath{\cdots}} 311 | \newcommand{\isasymSum}{\isamath{\sum\,}} 312 | \newcommand{\isasymProd}{\isamath{\prod\,}} 313 | \newcommand{\isasymCoprod}{\isamath{\coprod\,}} 314 | \newcommand{\isasyminfinity}{\isamath{\infty}} 315 | \newcommand{\isasymintegral}{\isamath{\int\,}} 316 | \newcommand{\isasymointegral}{\isamath{\oint\,}} 317 | \newcommand{\isasymclubsuit}{\isamath{\clubsuit}} 318 | \newcommand{\isasymdiamondsuit}{\isamath{\diamondsuit}} 319 | \newcommand{\isasymheartsuit}{\isamath{\heartsuit}} 320 | \newcommand{\isasymspadesuit}{\isamath{\spadesuit}} 321 | \newcommand{\isasymaleph}{\isamath{\aleph}} 322 | \newcommand{\isasymemptyset}{\isamath{\emptyset}} 323 | \newcommand{\isasymnabla}{\isamath{\nabla}} 324 | \newcommand{\isasympartial}{\isamath{\partial}} 325 | \newcommand{\isasymRe}{\isamath{\Re}} 326 | \newcommand{\isasymIm}{\isamath{\Im}} 327 | \newcommand{\isasymflat}{\isamath{\flat}} 328 | \newcommand{\isasymnatural}{\isamath{\natural}} 329 | \newcommand{\isasymsharp}{\isamath{\sharp}} 330 | \newcommand{\isasymangle}{\isamath{\angle}} 331 | \newcommand{\isasymcopyright}{\isatext{\rm\copyright}} 332 | \newcommand{\isasymregistered}{\isatext{\rm\textregistered}} 333 | \newcommand{\isasyminverse}{\isamath{{}^{-1}}} 334 | \newcommand{\isasymonequarter}{\isatext{\rm\textonequarter}} %requires textcomp 335 | \newcommand{\isasymonehalf}{\isatext{\rm\textonehalf}} %requires textcomp 336 | \newcommand{\isasymthreequarters}{\isatext{\rm\textthreequarters}} %requires textcomp 337 | \newcommand{\isasymordfeminine}{\isatext{\rm\textordfeminine}} 338 | \newcommand{\isasymordmasculine}{\isatext{\rm\textordmasculine}} 339 | \newcommand{\isasymsection}{\isatext{\rm\S}} 340 | \newcommand{\isasymparagraph}{\isatext{\rm\P}} 341 | \newcommand{\isasymexclamdown}{\isatext{\rm\textexclamdown}} 342 | \newcommand{\isasymquestiondown}{\isatext{\rm\textquestiondown}} 343 | \newcommand{\isasymeuro}{\isatext{\euro}} %requires eurosym 344 | \newcommand{\isasympounds}{\isamath{\pounds}} 345 | \newcommand{\isasymyen}{\isatext{\yen}} %requires amssymb 346 | \newcommand{\isasymcent}{\isatext{\textcent}} %requires textcomp 347 | \newcommand{\isasymcurrency}{\isatext{\textcurrency}} %requires textcomp 348 | \newcommand{\isasymdegree}{\isatext{\rm\textdegree}} %requires textcomp 349 | \newcommand{\isasymhyphen}{\isatext{\rm-}} 350 | \newcommand{\isasymamalg}{\isamath{\amalg}} 351 | \newcommand{\isasymmho}{\isamath{\mho}} %requires amssymb 352 | \newcommand{\isasymlozenge}{\isamath{\lozenge}} %requires amssymb 353 | \newcommand{\isasymwp}{\isamath{\wp}} 354 | \newcommand{\isasymwrong}{\isamath{\wr}} 355 | \newcommand{\isasymacute}{\isatext{\'\relax}} 356 | \newcommand{\isasymindex}{\isatext{\i}} 357 | \newcommand{\isasymdieresis}{\isatext{\"\relax}} 358 | \newcommand{\isasymcedilla}{\isatext{\c\relax}} 359 | \newcommand{\isasymhungarumlaut}{\isatext{\H\relax}} 360 | \newcommand{\isasymmodule}{\isamath{\langle}\isakeyword{module}\isamath{\rangle}} 361 | \newcommand{\isasymsome}{\isamath{\epsilon\,}} 362 | \newcommand{\isasymhole}{\isatext{\rm\wasylozenge}} %requires wasysym 363 | \newcommand{\isasymbind}{\isamath{\mathbin{>\!\!\!>\mkern-6.7mu=}}} 364 | \newcommand{\isasymthen}{\isamath{\mathbin{>\!\!\!>}}} 365 | \newcommand{\isasymopen}{\isatext{\raise.3ex\hbox{$\scriptscriptstyle\langle$}}} 366 | \newcommand{\isasymclose}{\isatext{\raise.3ex\hbox{$\scriptscriptstyle\rangle$}}} 367 | \newcommand{\isasymnewline}{\isatext{\fbox{$\hookleftarrow$}}} 368 | \newcommand{\isasymcomment}{\isatext{---}} 369 | \newcommand{\isasymproof}{\isamath{\,\langle\mathit{proof}\rangle}} 370 | \newcommand{\isactrlundefined}{\isakeyword{undefined}\ } 371 | \newcommand{\isactrlfile}{\isakeyword{file}\ } 372 | \newcommand{\isactrldir}{\isakeyword{dir}\ } 373 | -------------------------------------------------------------------------------- /paper/leader-vs-crdt.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/leader-vs-crdt.pdf -------------------------------------------------------------------------------- /paper/photo-alastair.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/photo-alastair.jpg -------------------------------------------------------------------------------- /paper/photo-dominic.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/photo-dominic.jpg -------------------------------------------------------------------------------- /paper/photo-martin.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/photo-martin.jpg -------------------------------------------------------------------------------- /paper/photo-victor.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/photo-victor.jpg -------------------------------------------------------------------------------- /paper/review-response2.tex: -------------------------------------------------------------------------------- 1 | \documentclass[10pt]{article} 2 | \usepackage[a4paper,margin=1in]{geometry} 3 | \usepackage{spverbatim} 4 | \sloppy 5 | 6 | \usepackage{xr} 7 | \externaldocument{move-op} 8 | 9 | \newcommand{\todo}[1]{\textcolor{red}{TODO: #1}} 10 | \newcommand{\authorcomment}[1]{\begin{quote}\textbf{Author comment:} #1\end{quote}} 11 | 12 | \begin{document} 13 | \title{Summary of changes to ``A highly-available move operation for replicated trees''} 14 | \author{Martin Kleppmann, Dominic P.\ Mulligan, Victor B.F.\ Gomes, Alastair R.\ Beresford} 15 | \date{} 16 | \maketitle 17 | 18 | We thank all reviewers for this second round of reviews and are happy to note that reviewers 2 and 3 both would like to see the paper accepted as-is with no further changes. 19 | In this revision we have therefore only made minor changes to address comments by reviewer 1, and added some minor clarifications. 20 | 21 | \section{Reviewer 1} 22 | 23 | \begin{spverbatim} 24 | Recommendation: Author Should Prepare A Major Revision For A Second Review 25 | 26 | Comments: 27 | What is the difference between the proposed algorithm and the data replication algorithms in the cloud? Please state their challenges in the introduction by mentioning references. 28 | A review of data replication based on meta-heuristics approach in cloud computing and data grid. doi.org/10.1007/s00500-020-04802-1. 29 | \end{spverbatim} 30 | \authorcomment{The cited paper is about using optimisation algorithms to decide what to replicate, how many replicas to create, where to locate them, when to access them, and other similar questions. 31 | Whilst we agree that those questions are indeed important when deciding how to deploy real-world systems we think that they are unrelated to our work, which is a largely theoretical work introducing (and thereafter proving correct) a novel distributed algorithm. 32 | In particular, our paper introduces techniques that ensure that all replicas converge to a consistent ``view'' of a distributed tree data structure. 33 | Our work can be deployed in any system that requires the use of a replicated tree, regardless of whether it is cloud-based, client-server, peer-to-peer, or uses any other topology---from our point-of-view we are completely ambivalent about such deployment questions as they exist at a level of abstraction \emph{below} that in which we are working. 34 | Note that, whilst we provide an empirical assessment of our algorithm, this assessment is merely intended to demonstrate that our algorithm is not \emph{ipso facto} unworkably inefficient. 35 | 36 | Simply put: there is no need for us to distinguish between cloud systems and other types of replicated system because from the point of view of our algorithm they are the same.} 37 | \begin{spverbatim} 38 | A brief survey on replica consistency in cloud environments. doi.org/10.1186/s13174-020-0122-y 39 | \end{spverbatim} 40 | \authorcomment{As we explain in {\S}1, {\S}4.2, and {\S}6, our algorithm provides a consistency model called ``strong eventual consistency'' [Shapiro et al., 2011]. 41 | The survey referenced above unfortunately overlooks this important consistency model. 42 | Strong eventual consistency is a strengthening of eventual consistency, is often combined with causal consistency, and it is one of the strongest consistency models that is always provides ``availability'' in the sense of the CAP theorem. 43 | We believe our paper already adequately explains the consistency model, and the prior works [2,3,4] referenced in our paper explain it in further detail.} 44 | \begin{spverbatim} 45 | Explain the time complexity of the proposed method. 46 | \end{spverbatim} 47 | \authorcomment{At the request of the reviewer we have added a discussion of the time complexity of our algorithm to the beginning of {\S}5.1.} 48 | \begin{spverbatim} 49 | Unfortunately, there are no "quantitative" performance comparisons among related strategies. It’s also important if the author can summarize the advantages, and disadvantages of each strategy according to the performance metrics. 50 | \end{spverbatim} 51 | \authorcomment{Our evaluation ({\S}5) performs a quantitative performance comparison between two implementations of our algorithm and state machine replication. 52 | We explain the advantages and disadvantages of these approaches in detail in this section. 53 | In \S~5.2 we also compare our algorithm to a locking-based approach, and show that it would have three orders of magnitude lower throughput than our algorithm in our test setup (5.7 versus 5,700 ops/sec). 54 | 55 | We therefore believe that our paper already contains the quantitative comparison that the reviewer is asking for. 56 | If there is a particular performance comparison that is missing in the reviewer's opinion, we kindly request that they specify precisely what should be added.} 57 | \begin{spverbatim} 58 | 59 | Additional Questions: 60 | 1. Please explain how this manuscript advances this field of research and/or contributes something new to the literature.: Present Conflict-free Replicated Data Type for trees that allow move operations without any coordination between replicas. 61 | 62 | 2. Is the manuscript technically sound? Please explain your answer under Public Comments below.: Appears to be - but didn't check completely 63 | 64 | 1. Which category describes this manuscript?: Research/Technology 65 | 66 | 2. How relevant is this manuscript to the readers of this periodical? Please explain your rating under Public Comments below.: Relevant 67 | 68 | 1. Are the title, abstract, and keywords appropriate? Please explain under Public Comments below.: Yes 69 | 70 | 2. Does the manuscript contain sufficient and appropriate references? Please explain under Public Comments below.: Important references are missing; more references are needed 71 | \end{spverbatim} 72 | \authorcomment{If references are missing, please specify what they are.} 73 | \begin{spverbatim} 74 | 75 | 3. Does the introduction state the objectives of the manuscript in terms that encourage the reader to read on? Please explain your answer under Public Comments below.: Could be improved 76 | 77 | 4. How would you rate the organization of the manuscript? Is it focused? Is the length appropriate for the topic? Please explain under Public Comments below.: Satisfactory 78 | 79 | 5. Please rate the readability of the manuscript. Explain your rating under Public Comments below.: Readable - but requires some effort to understand 80 | 81 | 6. Should the supplemental material be included? (Click on the Supplementary Files icon to view files): Yes, as part of the main paper if accepted (cannot exceed the strict page limit) 82 | 83 | 7. If yes to 6, should it be accepted: After revisions. Please include explanation under Public Comments below. 84 | 85 | 8. Would you recommend adding the code/data associated with this paper to help address your concerns and/or strengthen the paper?: No 86 | 87 | Please rate the manuscript. Please explain your choice.: Good 88 | \end{spverbatim} 89 | 90 | 91 | \section{Reviewer 2} 92 | 93 | \begin{spverbatim} 94 | Recommendation: Accept With No Changes 95 | 96 | Comments: 97 | The authors have addressed adequately my comments. 98 | 99 | Additional Questions: 100 | 1. Please explain how this manuscript advances this field of research and/or contributes something new to the literature.: The paper presents a tree replicated data type that allows concurrent move operations without any coordination between replicas. The tree data type relies on an optimistic replication algorithm that ensures state convergence and that the tree structure properties are preserved at all times. Given the complex (inductive) structure of trees, a strong point of the work is the mechanized proof that provides certification for the correctness of the algorithm. 101 | 102 | 2. Is the manuscript technically sound? Please explain your answer under Public Comments below.: Yes 103 | 104 | 1. Which category describes this manuscript?: Research/Technology 105 | 106 | 2. How relevant is this manuscript to the readers of this periodical? Please explain your rating under Public Comments below.: Relevant 107 | 108 | 1. Are the title, abstract, and keywords appropriate? Please explain under Public Comments below.: Yes 109 | 110 | 2. Does the manuscript contain sufficient and appropriate references? Please explain under Public Comments below.: References are sufficient and appropriate 111 | 112 | 3. Does the introduction state the objectives of the manuscript in terms that encourage the reader to read on? Please explain your answer under Public Comments below.: Yes 113 | 114 | 4. How would you rate the organization of the manuscript? Is it focused? Is the length appropriate for the topic? Please explain under Public Comments below.: Satisfactory 115 | 116 | 5. Please rate the readability of the manuscript. Explain your rating under Public Comments below.: Easy to read 117 | 118 | 6. Should the supplemental material be included? (Click on the Supplementary Files icon to view files): Yes, as part of the digital library for this submission if accepted 119 | 120 | 7. If yes to 6, should it be accepted: As is 121 | 122 | 8. Would you recommend adding the code/data associated with this paper to help address your concerns and/or strengthen the paper?: No 123 | 124 | Please rate the manuscript. Please explain your choice.: Good 125 | \end{spverbatim} 126 | 127 | 128 | \section{Reviewer 3} 129 | 130 | \begin{spverbatim} 131 | Recommendation: Accept With No Changes 132 | 133 | Comments: 134 | I followed the list of revisions and I think that the authors did a great job in addressing the comments of the other reviewers. I think that the paper is great and should be accepted in the current form. 135 | 136 | Additional Questions: 137 | 1. Please explain how this manuscript advances this field of research and/or contributes something new to the literature.: The paper presents a new formally-verified implementation of a replicated tree data type, which is an essential ingredient of a wide range of distributed software like distributed file systems, editors, applications manipulating XML/JSON structures, etc. This implementation is highly-available (the latency of applying an operation is independent of the network’s latency) and eventually consistent (all replicas reach the same state when all updates have been propagated). These properties are essential in modern distributed services. The implementation solves existing issues in industrial applications like Google Drive and Dropbox, and contradicts claims about the existence of such an implementation in previous research articles. 138 | 139 | 2. Is the manuscript technically sound? Please explain your answer under Public Comments below.: Yes 140 | 141 | 1. Which category describes this manuscript?: Research/Technology 142 | 143 | 2. How relevant is this manuscript to the readers of this periodical? Please explain your rating under Public Comments below.: Very Relevant 144 | 145 | 1. Are the title, abstract, and keywords appropriate? Please explain under Public Comments below.: Yes 146 | 147 | 2. Does the manuscript contain sufficient and appropriate references? Please explain under Public Comments below.: References are sufficient and appropriate 148 | 149 | 3. Does the introduction state the objectives of the manuscript in terms that encourage the reader to read on? Please explain your answer under Public Comments below.: Yes 150 | 151 | 4. How would you rate the organization of the manuscript? Is it focused? Is the length appropriate for the topic? Please explain under Public Comments below.: Satisfactory 152 | 153 | 5. Please rate the readability of the manuscript. Explain your rating under Public Comments below.: Easy to read 154 | 155 | 6. Should the supplemental material be included? (Click on the Supplementary Files icon to view files): Yes, as part of the digital library for this submission if accepted 156 | 157 | 7. If yes to 6, should it be accepted: As is 158 | 159 | 8. Would you recommend adding the code/data associated with this paper to help address your concerns and/or strengthen the paper?: Yes 160 | 161 | Please rate the manuscript. Please explain your choice.: Excellent 162 | \end{spverbatim} 163 | 164 | \end{document} 165 | -------------------------------------------------------------------------------- /paper/smiley.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/paper/smiley.pdf -------------------------------------------------------------------------------- /proof/Ancestor_LFP_Executable_Code.thy: -------------------------------------------------------------------------------- 1 | theory Ancestor_LFP_Executable_Code 2 | imports Main "HOL-Library.While_Combinator" "HOL-Library.Code_Target_Int" 3 | begin 4 | 5 | inductive ancestor :: \('a \ 'a) set \ 'a \ 'a \ bool\ 6 | where \(p, c) \ S \ ancestor S p c\ | 7 | \(p, c) \ S \ ancestor S c g \ ancestor S p g\ 8 | 9 | lemma ancestor_trans: 10 | assumes \ancestor S p c\ 11 | and \ancestor S c g\ 12 | shows \ancestor S p g\ 13 | using assms by(induction rule: ancestor.induct) (force intro: ancestor.intros)+ 14 | 15 | definition ancestor' :: \('a::{finite} \ 'a) set \ 'a \ 'a \ bool\ 16 | where \ancestor' S p c \ (p, c) \ lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T })\ 17 | 18 | lemma mono_ancestor_functorI [intro!, code_unfold]: 19 | shows \mono (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T })\ 20 | by(rule monoI, force) 21 | 22 | lemma ancestor_alt_def1: 23 | assumes \ancestor' S p c\ 24 | shows \ancestor S p c\ 25 | proof - 26 | have \lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T }) \ { (p, c). ancestor S p c }\ 27 | proof(rule lfp_induct) 28 | show \mono (\T. S \ {(p, g). \c. (p, c) \ S \ (c, g) \ T})\ 29 | by force 30 | next 31 | show \S \ {(p, g). \c. (p, c) \ S \ (c, g) \ lfp (\T. S \ {(p, g). \c. (p, c) \ S \ (c, g) \ T}) \ {(p, c). ancestor S p c}} \ {(p, c). ancestor S p c}\ 32 | by(force intro: ancestor.intros)+ 33 | qed 34 | from this have \{ (p, c). ancestor' S p c } \ { (p, c). ancestor S p c }\ 35 | by(auto simp add: ancestor'_def) 36 | from this and assms show \ancestor S p c\ 37 | by auto 38 | qed 39 | 40 | lemma ancestor_functor_refl [intro!]: 41 | assumes \(p, c) \ S\ 42 | shows \(p, c) \ lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T })\ 43 | using assms by(subst lfp_unfold, auto) 44 | 45 | lemma ancestor_functor_trans [intro!]: 46 | fixes p :: \'a::{finite}\ 47 | assumes \(p, c) \ S\ and \(c, g) \ lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T })\ 48 | shows \(p, g) \ lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T })\ 49 | proof - 50 | have \S \ { (p, g). \c. (p, c) \ S \ (c, g) \ lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T }) } = 51 | lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T })\ 52 | by(rule lfp_unfold[OF mono_ancestor_functorI, symmetric]) 53 | also have \(p, g) \ S \ { (p, g). \c. (p, c) \ S \ (c, g) \ lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T }) }\ 54 | using assms by clarsimp 55 | ultimately show \?thesis\ 56 | by auto 57 | qed 58 | 59 | lemma ancestor_alt_def2: 60 | assumes \ancestor S p c\ 61 | shows \ancestor' S p c\ 62 | using assms 63 | proof(induction rule: ancestor.induct) 64 | fix p c and S :: \('a \ 'a) set\ 65 | assume \(p, c) \ S\ 66 | from this show \ancestor' S p c\ 67 | by(force simp add: ancestor'_def) 68 | next 69 | fix p c g and S :: \('a \ 'a) set\ 70 | assume *: \(p, c) \ S\ and \ancestor' S c g\ 71 | from this also have **: \(c, g) \ lfp (\T. S \ { (p, g). \c. (p, c) \ S \ (c, g) \ T })\ 72 | by(clarsimp simp add: ancestor'_def) 73 | ultimately show \ancestor' S p g\ 74 | by(clarsimp simp add: ancestor'_def intro!: ancestor_functor_trans[OF * **]) 75 | qed 76 | 77 | lemma ancestor_alt_def [simp, code]: 78 | shows \ancestor S p c \ ancestor' S p c\ 79 | using ancestor_alt_def1 ancestor_alt_def2 by force 80 | 81 | declare lfp_while_lattice [code_unfold] 82 | declare finite_class.finite [code_unfold] 83 | 84 | section\Efficiency improvements\ 85 | 86 | lemma ancestor_unwind [code]: 87 | shows \ancestor Ss p c \ ((p, c) \ Ss \ (\(x, y)\Ss. p = x \ ancestor Ss y c))\ 88 | apply(rule iffI) 89 | apply(induction rule: ancestor.induct) 90 | apply force+ 91 | apply(erule disjE) 92 | apply(force intro!: ancestor.intros) 93 | apply(clarsimp simp add: ancestor.intros) 94 | done 95 | 96 | record person = 97 | name :: \String.literal\ 98 | age :: \int\ 99 | 100 | value \ 101 | let db = {(\name = String.implode ''Alan'', age = 34\, \name = String.implode ''Bill'', age = 18\), 102 | (\name = String.implode ''Bill'', age = 18\, \name = String.implode ''Charles'', age = 1\), 103 | (\name = String.implode ''Alan'', age = 34\, \name = String.implode ''Diane'', age = 17\), 104 | (\name = String.implode ''Diane'', age = 17\, \name = String.implode ''Elizabeth'', age = 0\)}; 105 | pr = \name = String.implode ''Alan'', age = 34\; 106 | cd = \name = String.implode ''Elizabeth'', age = 0\ 107 | in ancestor db pr cd\ 108 | 109 | definition support :: \('a \ 'a) set \ 'a set\ 110 | where \support Ss \ \(x, y) \ Ss. {x, y}\ 111 | 112 | definition has_descendent :: \('a \ 'a) set \ 'a \ bool\ 113 | where \has_descendent Ss p \ \s\support Ss. ancestor Ss p s\ 114 | 115 | definition has_ancestor :: \('a \ 'a) set \ 'a \ bool\ 116 | where \has_ancestor Ss c \ \a\support Ss. ancestor Ss a c\ 117 | 118 | definition have_common_ancestor :: \(person \ person) set \ person \ person \ bool\ 119 | where \have_common_ancestor Ss c1 c2 \ \a\support Ss. ancestor Ss a c1 \ ancestor Ss a c2\ 120 | 121 | value [code] \ 122 | let db = {(\name = String.implode ''Alan'', age = 34\, \name = String.implode ''Bill'', age = 18\), 123 | (\name = String.implode ''Bill'', age = 18\, \name = String.implode ''Charles'', age = 1\), 124 | (\name = String.implode ''Alan'', age = 34\, \name = String.implode ''Diane'', age = 17\), 125 | (\name = String.implode ''Diane'', age = 17\, \name = String.implode ''Elizabeth'', age = 0\)}; 126 | pr = \name = String.implode ''Diane'', age = 17\; 127 | cd = \name = String.implode ''Elizabeth'', age = 0\ 128 | in have_common_ancestor db \name = String.implode ''Bill'', age = 18\ \name = String.implode ''Diane'', age = 17\\ 129 | 130 | definition mk_person :: \String.literal \ int \ person\ 131 | where \mk_person n a \ \name = n, age = a\\ 132 | 133 | export_code has_ancestor has_descendent ancestor have_common_ancestor mk_person in SML 134 | module_name Ancestor file ancestor.ML 135 | 136 | end -------------------------------------------------------------------------------- /proof/Move_Acyclic.thy: -------------------------------------------------------------------------------- 1 | theory Move_Acyclic 2 | imports Move 3 | begin 4 | 5 | section \Tree invariant 2: no cycles\ 6 | 7 | definition acyclic :: \('n \ 'm \ 'n) set \ bool\ where 8 | \acyclic tree \ (\n. ancestor tree n n)\ 9 | 10 | lemma acyclic_empty [simp]: \acyclic {}\ 11 | by (meson acyclic_def ancestor_indcases empty_iff) 12 | 13 | lemma acyclicE [elim]: 14 | assumes \acyclic \\ 15 | and \(\n. ancestor \ n n) \ P\ 16 | shows \P\ 17 | using assms by (auto simp add: acyclic_def) 18 | 19 | lemma ancestor_empty_False [simp]: 20 | shows \ancestor {} p c = False\ 21 | by (meson ancestor_indcases emptyE) 22 | 23 | lemma ancestor_superset_closed: 24 | assumes \ancestor \ p c\ 25 | and \\ \ \\ 26 | shows \ancestor \ p c\ 27 | using assms by (induction rule: ancestor.induct) (auto intro: ancestor.intros) 28 | 29 | lemma acyclic_subset: 30 | assumes \acyclic T\ 31 | and \S \ T\ 32 | shows \acyclic S\ 33 | using assms ancestor_superset_closed by (metis acyclic_def) 34 | 35 | inductive path :: \('n \ 'm \ 'n) set \ 'n \ 'n \ ('n \ 'n) list \ bool\ where 36 | \\(b, x, e) \ T\ \ path T b e [(b, e)]\ | 37 | \\path T b m xs; (m, e) \ set xs; (m, x, e) \ T\ \ path T b e (xs @ [(m, e)])\ 38 | 39 | inductive_cases path_indcases: \path T b e xs\ 40 | 41 | lemma empty_path: 42 | shows \\ path T x y []\ 43 | using path_indcases by fastforce 44 | 45 | lemma singleton_path: 46 | assumes \path T b m [(p, c)]\ 47 | shows \b = p \ m = c\ 48 | using assms by (metis (no_types, lifting) butlast.simps(2) butlast_snoc empty_path 49 | list.inject path.cases prod.inject) 50 | 51 | lemma last_path: 52 | assumes \path T b e (xs @ [(p, c)])\ 53 | shows \e = c\ 54 | using assms path.cases by force 55 | 56 | lemma path_drop1: 57 | assumes \path T b e (xs @ [(a, e)])\ 58 | and \xs \ []\ 59 | shows \path T b a xs \ (a, e) \ set xs\ 60 | using assms path.cases by force 61 | 62 | lemma path_drop: 63 | assumes \path T b e (xs @ ys)\ 64 | and \xs \ []\ 65 | shows \\m. path T b m xs\ 66 | using assms proof(induction ys arbitrary: xs, force) 67 | case (Cons x ys) 68 | from this obtain m where IH: \path T b m (xs @ [x])\ 69 | by fastforce 70 | moreover obtain a e where \x = (a, e)\ 71 | by fastforce 72 | moreover from this have \m = e\ 73 | using IH last_path by fastforce 74 | ultimately show ?case 75 | using Cons.prems(2) path_drop1 by fastforce 76 | qed 77 | 78 | lemma fst_path: 79 | assumes \path T b e ((p, c) # xs)\ 80 | shows \b = p\ 81 | using assms proof(induction xs arbitrary: e rule: List.rev_induct) 82 | case Nil then show ?case 83 | by (simp add: singleton_path) 84 | next 85 | case (snoc x xs) 86 | then show ?case 87 | by (metis append_Cons list.distinct(1) path_drop) 88 | qed 89 | 90 | lemma path_split: 91 | assumes \path T m n xs\ 92 | and \(p, c) \ set xs\ 93 | shows \\ys zs. (ys = [] \ path T m p ys) \ (zs = [] \ path T c n zs) \ 94 | (xs = ys @ [(p, c)] @ zs) \ (p, c) \ set ys \ (p, c) \ set zs\ 95 | using assms proof(induction rule: path.induct, force) 96 | case step: (2 T b m xs e) 97 | then show ?case 98 | proof(cases \(p, c) = (m, e)\) 99 | case True 100 | then show ?thesis using step.hyps by force 101 | next 102 | case pc_xs: False (* (p, c) \ set xs *) 103 | then obtain ys zs where yszs: \(ys = [] \ path T b p ys) \ (zs = [] \ path T c m zs) \ 104 | xs = ys @ [(p, c)] @ zs \ (p, c) \ set ys \ (p, c) \ set zs\ 105 | using step.IH step.prems by auto 106 | have path_zs: \path T c e (zs @ [(m, e)])\ 107 | by (metis (no_types, lifting) Un_iff append_Cons last_path path.simps 108 | self_append_conv2 set_append step.hyps(1) step.hyps(2) step.hyps(3) yszs) 109 | then show ?thesis 110 | proof(cases \ys = []\) 111 | case True 112 | hence \\zsa. ([] = [] \ path T b p []) \ (zsa = [] \ path T c e zsa) \ 113 | (p, c) # zs @ [(m, e)] = [] @ (p, c) # zsa \ (p, c) \ set [] \ (p, c) \ set zsa\ 114 | using pc_xs path_zs yszs by auto 115 | then show ?thesis 116 | using yszs by force 117 | next 118 | case False 119 | hence \\zsa. (ys = [] \ path T b p ys) \ (zsa = [] \ path T c e zsa) \ 120 | ys @ (p, c) # zs @ [(m, e)] = ys @ (p, c) # zsa \ (p, c) \ set ys \ (p, c) \ set zsa\ 121 | using path_zs pc_xs yszs by auto 122 | then show ?thesis 123 | using yszs by force 124 | qed 125 | qed 126 | qed 127 | 128 | lemma anc_path: 129 | assumes \ancestor T p c\ 130 | shows \\xs. path T p c xs\ 131 | using assms proof(induction rule: ancestor.induct) 132 | case (1 parent meta child tree) 133 | then show ?case by (meson path.intros(1)) 134 | next 135 | case step: (2 parent meta child tree anc) 136 | then obtain xs where xs: \path tree anc parent xs\ 137 | by blast 138 | then show ?case 139 | proof(cases \(parent, child) \ set xs\) 140 | case True 141 | then show ?thesis 142 | by (metis step.hyps(1) xs append_Cons append_Nil fst_path path.intros path_split) 143 | next 144 | case False 145 | then show ?thesis 146 | by (meson path.intros(2) step.hyps(1) xs) 147 | qed 148 | qed 149 | 150 | lemma path_anc: 151 | assumes \path T p c xs\ 152 | shows \ancestor T p c\ 153 | using assms by (induction rule: path.induct, auto simp add: ancestor.intros) 154 | 155 | lemma anc_path_eq: 156 | shows \ancestor T p c \ (\xs. path T p c xs)\ 157 | by (meson anc_path path_anc) 158 | 159 | lemma acyclic_path_eq: 160 | shows \acyclic T \ (\n xs. path T n n xs)\ 161 | by (meson anc_path acyclic_def path_anc) 162 | 163 | 164 | lemma rem_edge_path: 165 | assumes \path T m n xs\ 166 | and \T = insert (p, x, c) S\ 167 | and \(p, c) \ set xs\ 168 | shows \path S m n xs\ 169 | using assms by (induction rule: path.induct, auto simp add: path.intros) 170 | 171 | lemma ancestor_transitive: 172 | assumes \ancestor \ n p\ and \ancestor \ m n\ 173 | shows \ancestor \ m p\ 174 | using assms by (induction rule: ancestor.induct) (auto intro: ancestor.intros) 175 | 176 | lemma cyclic_path_technical: 177 | assumes \path T m m xs\ 178 | and \T = insert (p, x, c) S\ 179 | and \\n. \ ancestor S n n\ 180 | and \c \ p\ 181 | shows \ancestor S c p\ 182 | proof(cases \(p, c) \ set xs\) 183 | case True 184 | then obtain ys zs where yszs: \(ys = [] \ path T m p ys) \ (zs = [] \ path T c m zs) \ 185 | xs = ys @ [(p, c)] @ zs \ (p, c) \ set ys \ (p, c) \ set zs\ 186 | using assms(1) path_split by force 187 | then show ?thesis 188 | proof(cases \ys = []\) 189 | case True 190 | then show ?thesis using assms by (metis append_Cons append_Nil fst_path path_anc 191 | rem_edge_path singleton_path yszs) 192 | next 193 | case False 194 | then show ?thesis using assms by (metis ancestor_transitive last_path path_anc 195 | rem_edge_path self_append_conv yszs) 196 | qed 197 | next 198 | case False 199 | then show ?thesis 200 | using assms by (metis path_anc rem_edge_path) 201 | qed 202 | 203 | lemma cyclic_ancestor: 204 | assumes \\ acyclic (S \ {(p, x, c)})\ 205 | and \acyclic S\ 206 | and \c \ p\ 207 | shows \ancestor S c p\ 208 | using assms anc_path acyclic_def cyclic_path_technical by fastforce 209 | 210 | lemma do_op_acyclic: 211 | assumes \acyclic tree1\ 212 | and \do_op (Move t newp m c, tree1) = (log_oper, tree2)\ 213 | shows \acyclic tree2\ 214 | proof(cases \ancestor tree1 c newp \ c = newp\) 215 | case True 216 | then show \acyclic tree2\ 217 | using assms by auto 218 | next 219 | case False 220 | hence A: \tree2 = {(p', m', c') \ tree1. c' \ c} \ {(newp, m, c)}\ 221 | using assms(2) by auto 222 | moreover have \{(p', m', c') \ tree1. c' \ c} \ tree1\ 223 | by blast 224 | moreover have \acyclic tree1\ 225 | using assms and acyclic_def by auto 226 | moreover have B: \acyclic {(p', m', c') \ tree1. c' \ c}\ 227 | using acyclic_subset calculation(2) calculation(3) by blast 228 | { 229 | assume \\ acyclic tree2\ 230 | hence \ancestor {(p', m', c') \ tree1. c' \ c} c newp\ 231 | using cyclic_ancestor False A B by force 232 | from this have \False\ 233 | using False ancestor_superset_closed calculation(2) by fastforce 234 | } 235 | from this show \acyclic tree2\ 236 | using acyclic_def by auto 237 | qed 238 | 239 | lemma do_op_acyclic_var: 240 | assumes \acyclic tree1\ 241 | and \do_op (oper, tree1) = (log_oper, tree2)\ 242 | shows \acyclic tree2\ 243 | using assms by (metis do_op_acyclic operation.exhaust_sel) 244 | 245 | lemma redo_op_acyclic_var: 246 | assumes \acyclic tree1\ 247 | and \redo_op (LogMove t oldp p m c) (log1, tree1) = (log2, tree2)\ 248 | shows \acyclic tree2\ 249 | using assms by (subst (asm) redo_op.simps) (rule do_op_acyclic, assumption, fastforce) 250 | 251 | corollary redo_op_acyclic: 252 | assumes \acyclic tree1\ 253 | and \redo_op logop (log1, tree1) = (log2, tree2)\ 254 | shows \acyclic tree2\ 255 | using assms by (cases logop) (metis redo_op_acyclic_var) 256 | 257 | inductive steps :: \(('t, 'n, 'm) log_op list \ ('n \ 'm \ 'n) set) list \ bool\ where 258 | \\do_op (oper, {}) = (logop, tree)\ \ steps [([logop], tree)]\ | 259 | \\steps (ss @ [(log, tree)]); do_op (oper, tree) = (logop, tree2)\ \ steps (ss @ [(log, tree), (logop # log, tree2)])\ 260 | 261 | inductive_cases steps_indcases [elim]: \steps ss\ 262 | inductive_cases steps_singleton_indcases [elim]: \steps [s]\ 263 | inductive_cases steps_snoc_indcases [elim]: \steps (ss@[s])\ 264 | 265 | lemma steps_empty [elim]: 266 | assumes \steps (ss @ [([], tree)])\ 267 | shows \False\ 268 | using assms by force 269 | 270 | lemma steps_snocI: 271 | assumes \steps (ss @ [(log, tree)])\ 272 | and \do_op (oper, tree) = (logop, tree2)\ 273 | and \suf = [(log, tree), (logop # log, tree2)]\ 274 | shows \steps (ss @ suf)\ 275 | using assms steps.intros(2) by blast 276 | 277 | lemma steps_unique_parent: 278 | assumes \steps ss\ 279 | and \ss = ss'@[(log, tree)]\ 280 | shows \unique_parent tree\ 281 | using assms by(induction arbitrary: ss' log tree rule: steps.induct) 282 | (clarsimp, metis do_op_unique_parent emptyE operation.exhaust_sel unique_parentI)+ 283 | 284 | 285 | lemma apply_op_steps_exist: 286 | assumes \apply_op oper (log1, tree1) = (log2, tree2)\ 287 | and \steps (ss@[(log1, tree1)])\ 288 | shows \\ss'. steps (ss'@[(log2,tree2)])\ 289 | using assms proof(induction log1 arbitrary: tree1 log2 tree2 ss) 290 | case Nil 291 | thus ?case using steps_empty by blast 292 | next 293 | case (Cons logop ops) 294 | { assume \move_time oper < log_time logop\ 295 | hence *:\apply_op oper (logop # ops, tree1) = 296 | redo_op logop (apply_op oper (ops, undo_op (logop, tree1)))\ 297 | by simp 298 | moreover { 299 | fix oper' 300 | assume asm: \do_op (oper', {}) = (logop, tree1)\ \ss = []\ \(logop # ops, tree1) = ([logop], tree1)\ 301 | hence undo: \undo_op (logop, tree1) = {}\ 302 | using asm Cons by (metis apply_ops_Nil apply_ops_unique_parent do_op.cases do_undo_op_inv old.prod.inject) 303 | obtain t oldp p m c where logmove: \logop = LogMove t oldp p m c\ 304 | using log_op.exhaust by blast 305 | obtain logop'' tree'' where do: \do_op (oper, {}) = (logop'', tree'')\ 306 | by fastforce 307 | hence redo: \redo_op logop ([logop''], tree'') = (log2, tree2)\ 308 | using Cons.prems(1) asm undo calculation by auto 309 | then obtain op2 where op2: \do_op (Move t p m c, tree'') = (op2, tree2)\ 310 | by (simp add: logmove) 311 | hence log2: \log2 = op2 # [logop'']\ 312 | using logmove redo by auto 313 | have \steps ([] @ [([logop''], tree''), (op2 # [logop''], tree2)])\ 314 | using do op2 by (fastforce intro: steps.intros) 315 | hence \steps ([([logop''], tree'')] @ [(log2, tree2)])\ 316 | by (simp add: log2) 317 | hence \\ss'. steps (ss' @ [(log2, tree2)])\ 318 | by fastforce 319 | } moreover { 320 | fix pre_ss tree' oper' 321 | assume asm: \steps (pre_ss @ [(ops, tree')])\ 322 | \do_op (oper', tree') = (logop, tree1)\ 323 | \ss = pre_ss @ [(ops, tree')]\ 324 | hence undo: \undo_op (logop, tree1) = tree'\ 325 | using do_undo_op_inv_var steps_unique_parent by metis 326 | obtain log'' tree'' where apply_op: \apply_op oper (ops, undo_op (logop, tree1)) = (log'', tree'')\ 327 | by (meson surj_pair) 328 | moreover have \steps (pre_ss @ [(ops, undo_op (logop, tree1))])\ 329 | by (simp add: undo asm) 330 | ultimately obtain ss' where ss': \steps (ss' @ [(log'', tree'')])\ 331 | using Cons.IH by blast 332 | obtain t oldp p m c where logmove: \logop = LogMove t oldp p m c\ 333 | using log_op.exhaust by blast 334 | hence redo: \redo_op logop (log'', tree'') = (log2, tree2)\ 335 | using Cons.prems(1) * apply_op by auto 336 | then obtain op2 where op2: \do_op (Move t p m c, tree'') = (op2, tree2)\ 337 | using logmove redo by auto 338 | hence log2: \log2 = op2 # log''\ 339 | using logmove redo by auto 340 | hence \steps (ss' @ [(log'', tree''), (op2 # log'', tree2)])\ 341 | using ss' op2 by (fastforce intro!: steps.intros) 342 | hence \steps ((ss' @ [(log'', tree'')]) @ [(log2, tree2)])\ 343 | by (simp add: log2) 344 | hence \\ss'. steps (ss' @ [(log2, tree2)])\ 345 | by blast 346 | } ultimately have \\ss'. steps (ss' @ [(log2, tree2)])\ 347 | using Cons by auto 348 | } moreover { 349 | assume \\ (move_time oper < log_time logop)\ 350 | hence \apply_op oper (logop # ops, tree1) = 351 | (let (op2, tree2) = do_op (oper, tree1) in (op2 # logop # ops, tree2))\ 352 | by simp 353 | moreover then obtain logop2 where \do_op (oper, tree1) = (logop2, tree2)\ 354 | by (metis (mono_tags, lifting) Cons.prems(1) case_prod_beta' prod.collapse snd_conv) 355 | moreover hence \steps (ss @ [(logop # ops, tree1), (logop2 # logop # ops, tree2)])\ 356 | using Cons.prems(2) steps_snocI by blast 357 | ultimately have \\ss'. steps (ss' @ [(log2, tree2)])\ 358 | using Cons by (metis (mono_tags) Cons_eq_appendI append_eq_appendI append_self_conv2 insert_Nil 359 | prod.sel(1) prod.sel(2) rotate1.simps(2) split_beta) 360 | } ultimately show ?case 361 | by auto 362 | qed 363 | 364 | 365 | lemma last_helper: 366 | assumes \last xs = x\ \xs \ []\ 367 | shows \\pre. xs = pre @ [x]\ 368 | using assms by (induction xs arbitrary: x rule: rev_induct; simp) 369 | 370 | lemma steps_exist: 371 | fixes log :: \('t::{linorder}, 'n, 'm) log_op list\ 372 | assumes \apply_ops ops = (log, tree)\ and \ops \ []\ 373 | shows \\ss. steps ss \ last ss = (log, tree)\ 374 | using assms proof(induction ops arbitrary: log tree rule: List.rev_induct, simp) 375 | case (snoc oper ops) 376 | then show ?case 377 | proof (cases ops) 378 | case Nil 379 | moreover obtain op2 tree2 where \do_op (oper, {}) = (op2, tree2)\ 380 | by fastforce 381 | moreover have \apply_ops (ops @ [oper]) = (let (op2, tree2) = do_op (oper, {}) in ([op2], tree2))\ 382 | by (metis apply_op.simps(1) apply_ops_Nil apply_ops_step calculation) 383 | moreover have \log = [op2]\ \tree = tree2\ 384 | using calculation(2) calculation(3) snoc.prems(1) by auto 385 | ultimately have \steps [(log, tree)]\ 386 | using steps.simps by auto 387 | then show ?thesis 388 | by force 389 | next 390 | case (Cons a list) 391 | 392 | obtain log1 tree1 where \apply_ops ops = (log1, tree1)\ 393 | by fastforce 394 | moreover from this obtain ss where \steps ss \ (last ss) = (log1, tree1) \ ss \ []\ 395 | using snoc.IH Cons by blast 396 | moreover then obtain pre_ss where \steps (pre_ss @ [(log1, tree1)]) \ 397 | using last_helper by fastforce 398 | moreover have \apply_op oper (log1, tree1) = (log, tree)\ 399 | using calculation(1) snoc.prems(1) by auto 400 | ultimately obtain ss' where \steps (ss' @ [(log, tree)])\ 401 | using apply_op_steps_exist by blast 402 | then show ?thesis 403 | by force 404 | qed 405 | qed 406 | 407 | lemma steps_remove1: 408 | assumes \steps (ss @ [s])\ 409 | shows \steps ss \ ss = []\ 410 | using assms steps.cases by fastforce 411 | 412 | lemma steps_singleton: 413 | assumes \steps [s]\ 414 | shows \\oper. let (logop, tree) = do_op (oper, {}) in s = ([logop], tree)\ 415 | using assms steps_singleton_indcases 416 | by (metis (mono_tags, lifting) case_prodI) 417 | 418 | lemma steps_acyclic: 419 | assumes \steps ss\ 420 | shows \acyclic (snd (last ss))\ 421 | using assms apply (induction rule: steps.induct; clarsimp) 422 | apply (metis acyclic_empty do_op_acyclic operation.exhaust_sel) 423 | using do_op_acyclic_var by auto 424 | 425 | theorem apply_ops_acyclic: 426 | fixes ops :: \('t::{linorder}, 'n, 'm) operation list\ 427 | assumes \apply_ops ops = (log, tree)\ 428 | shows \acyclic tree\ 429 | proof(cases \ops = []\) 430 | case True 431 | then show \acyclic tree\ 432 | using acyclic_def assms by fastforce 433 | next 434 | case False 435 | then obtain ss :: \(('t, 'n, 'm) log_op list \ ('n \ 'm \ 'n) set) list\ 436 | where \steps ss \ snd (last ss) = tree\ 437 | using assms steps_exist 438 | by (metis snd_conv) 439 | then show \acyclic tree\ 440 | using steps_acyclic by blast 441 | qed 442 | 443 | end -------------------------------------------------------------------------------- /proof/Move_SEC.thy: -------------------------------------------------------------------------------- 1 | theory Move_SEC 2 | imports Move CRDT.Network 3 | begin 4 | 5 | section \Strong eventual consistency\ 6 | 7 | definition apply_op' :: \('t::{linorder}, 'n, 'm) operation \ ('t, 'n, 'm) state \ ('t, 'n, 'm) state\ where 8 | \apply_op' x s \ case s of (log, tree) \ 9 | if unique_parent tree \ distinct (map log_time log @ [move_time x]) then 10 | Some (apply_op x s) 11 | else None\ 12 | 13 | fun valid_move_opers :: \('t, 'n, 'm) state \ 't \('t, 'n, 'm) operation \ bool\ where 14 | \valid_move_opers _ (i, Move t _ _ _) = (i = t)\ 15 | 16 | locale move = network_with_constrained_ops _ apply_op' \([], {})\ valid_move_opers 17 | begin 18 | 19 | lemma kleisli_apply_op' [iff]: 20 | shows \apply_op' (x :: ('t :: {linorder}, 'n, 'm) operation) \ apply_op' y = apply_op' y \ apply_op' x\ 21 | proof (unfold kleisli_def, rule ext, clarify) 22 | fix log :: \('t, 'n, 'm) log_op list\ and tree :: \('n \ 'm \ 'n) set\ 23 | { assume *: \unique_parent tree\ \distinct (map log_time log @ [move_time x])\ \distinct (map log_time log @ [move_time y])\ \move_time x \ move_time y\ 24 | obtain logx treex where 1: \apply_op x (log, tree) = (logx, treex)\ 25 | using * by (clarsimp simp: apply_op'_def) (metis surj_pair) 26 | hence \set (map log_time logx) = {move_time x} \ set (map log_time log)\ 27 | using * by (cases x) (rule apply_op_timestampI2; force) 28 | moreover have \distinct (map log_time logx)\ 29 | using * 1 by (cases x) (rule apply_op_timestampI1; force) 30 | ultimately have 2: \distinct (map log_time logx @ [move_time y])\ 31 | using * by simp 32 | obtain logy treey where 3: \apply_op y (log, tree) = (logy, treey)\ 33 | using * by (clarsimp simp: apply_op'_def) (metis surj_pair) 34 | hence \set (map log_time logy) = {move_time y} \ set (map log_time log)\ 35 | using * by (cases y) (rule apply_op_timestampI2; force) 36 | moreover have \distinct (map log_time logy)\ 37 | using * 3 by (cases y) (rule apply_op_timestampI1, force, force) 38 | ultimately have 4: \distinct (map log_time logy @ [move_time x])\ 39 | using * by simp 40 | have \unique_parent treex\ \unique_parent treey\ 41 | using * 1 3 apply_op_unique_parent by blast+ 42 | hence \apply_op' x (log, tree) \ apply_op' y = apply_op' y (log, tree) \ apply_op' x\ 43 | using * 1 2 3 4 by (cases x, cases y, clarsimp simp: apply_op'_def) (rule apply_op_commute2I; force) 44 | } 45 | moreover { 46 | assume *: \unique_parent tree\ \distinct (map log_time log @ [move_time x])\ \distinct (map log_time log @ [move_time y])\ \move_time x = move_time y\ 47 | obtain logx treex where 1: \apply_op x (log, tree) = (logx, treex)\ 48 | using * by (clarsimp simp: apply_op'_def) (metis surj_pair) 49 | hence \set (map log_time logx) = {move_time x} \ set (map log_time log)\ 50 | using * by (cases x) (rule apply_op_timestampI2; force) 51 | hence 2: \\ distinct (map log_time logx @ [move_time y])\ 52 | using * by simp 53 | obtain logy treey where 3: \apply_op y (log, tree) = (logy, treey)\ 54 | using * by (clarsimp simp: apply_op'_def) (metis surj_pair) 55 | hence \ set (map log_time logy) = {move_time y} \ set (map log_time log)\ 56 | using * by (cases y) (rule apply_op_timestampI2; force) 57 | hence 4: \\ distinct (map log_time logy @ [move_time x])\ 58 | using * by simp 59 | have \apply_op' x (log, tree) \ apply_op' y = apply_op' y (log, tree) \ apply_op' x\ 60 | using * 1 2 3 4 by (clarsimp simp: apply_op'_def) 61 | } 62 | moreover { 63 | assume *: \unique_parent tree\ \\ distinct (map log_time log @ [move_time x])\ \distinct (map log_time log @ [move_time y])\ 64 | then have **: \move_time x \ set (map log_time log)\ 65 | by auto 66 | obtain log1 tree1 where \apply_op y (log, tree) = (log1, tree1)\ 67 | using * by (clarsimp simp: apply_op'_def) (metis surj_pair) 68 | moreover hence \ set (map log_time log1) = {move_time y} \ set (map log_time log)\ 69 | using * by (cases y) (rule apply_op_timestampI2; force) 70 | hence \move_time x \ set (map log_time log1)\ 71 | using ** by blast 72 | moreover hence \\ distinct (map log_time log1 @ [move_time x])\ 73 | by simp 74 | ultimately have \apply_op' x (log, tree) \ apply_op' y = apply_op' y (log, tree) \ apply_op' x\ 75 | using * by (clarsimp simp: apply_op'_def) 76 | } 77 | moreover { 78 | assume *: \unique_parent tree\ \distinct (map log_time log @ [move_time x])\ \\ distinct (map log_time log @ [move_time y])\ 79 | then have **: \move_time y \ set (map log_time log)\ 80 | by auto 81 | obtain log1 tree1 where \apply_op x (log, tree) = (log1, tree1)\ 82 | using * by (clarsimp simp: apply_op'_def) (metis surj_pair) 83 | moreover hence \ set (map log_time log1) = {move_time x} \ set (map log_time log)\ 84 | using * by (cases x) (rule apply_op_timestampI2; force) 85 | hence \move_time y \ set (map log_time log1)\ 86 | using ** by blast 87 | moreover hence \\ distinct (map log_time log1 @ [move_time y])\ 88 | by simp 89 | ultimately have \apply_op' x (log, tree) \ apply_op' y = apply_op' y (log, tree) \ apply_op' x\ 90 | using * by (clarsimp simp: apply_op'_def) 91 | } 92 | ultimately show \apply_op' x (log, tree) \ apply_op' y = apply_op' y (log, tree) \ apply_op' x\ 93 | by (clarsimp simp: apply_op'_def) fastforce 94 | qed 95 | 96 | lemma concurrent_operations_commute: 97 | assumes \xs prefix of i\ 98 | shows \hb.concurrent_ops_commute (node_deliver_messages xs)\ 99 | using assms by (clarsimp simp add: hb.concurrent_ops_commute_def) (unfold interp_msg_def; simp) 100 | 101 | corollary apply_operations_Snoc2: 102 | \hb.apply_operations (xs @ [x]) s = (hb.apply_operations xs \ interp_msg x) s\ 103 | using hb.apply_operations_Snoc by auto 104 | 105 | lemma unique_parent_empty[simp]: 106 | shows \unique_parent {}\ 107 | by (auto simp: unique_parent_def) 108 | 109 | lemma log_tree_invariant: 110 | assumes \xs prefix of i\ \apply_operations xs = Some (log, tree)\ 111 | shows \distinct (map log_time log) \ unique_parent tree\ 112 | using assms proof (induct xs arbitrary: log tree rule: rev_induct, clarsimp) 113 | case (snoc x xs) 114 | hence \apply_operations xs \ None\ 115 | by (case_tac x; clarsimp simp: apply_operations_def node_deliver_messages_append kleisli_def) 116 | (metis (no_types, hide_lams) bind_eq_Some_conv surj_pair) 117 | then obtain log1 tree1 where *: \apply_operations xs = Some (log1, tree1)\ 118 | by auto 119 | moreover have \xs prefix of i\ 120 | using snoc.prems(1) by blast 121 | ultimately have **: \distinct (map log_time log1)\ \unique_parent tree1\ 122 | using snoc.hyps by blast+ 123 | show ?case 124 | proof (case_tac x) 125 | fix m assume \x = Broadcast m\ 126 | hence \apply_operations (xs @ [x]) = apply_operations xs\ 127 | by simp 128 | thus \distinct (map log_time log) \ unique_parent tree\ 129 | using \xs prefix of i\ snoc.hyps snoc.prems(2) by presburger 130 | next 131 | fix m assume 1: \x = Deliver m\ 132 | obtain t oper where 2: "m = (t, oper)" 133 | by force 134 | hence \interp_msg (t, oper) (log1, tree1) = Some (log, tree)\ 135 | using \apply_operations xs = Some (log1, tree1)\ snoc.prems(2) 1 2 by simp 136 | hence 4: \apply_op' oper (log1, tree1) = Some (log, tree)\ 137 | by (clarsimp simp: interp_msg_def apply_op'_def) 138 | hence \distinct ((map log_time log1) @ [move_time oper])\ 139 | by (clarsimp simp: apply_op'_def) (meson option.distinct(1)) 140 | moreover hence 5: \apply_op oper (log1, tree1) = (log, tree)\ 141 | using 4 ** by (clarsimp simp: apply_op'_def) 142 | ultimately have \distinct (map log_time log)\ 143 | by (case_tac oper, clarsimp) (rule apply_op_timestampI1, assumption, clarsimp) 144 | thus \distinct (map log_time log) \ unique_parent tree\ 145 | using ** 5 apply_op_unique_parent by blast 146 | qed 147 | qed 148 | 149 | definition indices :: "('id \ ('id, 'v, 'm) operation) event list \ 'id list" where 150 | \indices xs \ List.map_filter (\x. case x of Deliver (i, _) \ Some i | _ \ None) xs\ 151 | 152 | lemma indices_Nil [simp]: 153 | shows \indices [] = []\ 154 | by(auto simp: indices_def map_filter_def) 155 | 156 | lemma indices_append [simp]: 157 | shows \indices (xs@ys) = indices xs @ indices ys\ 158 | by(auto simp: indices_def map_filter_def) 159 | 160 | lemma indices_Broadcast_singleton [simp]: 161 | shows \indices [Broadcast b] = []\ 162 | by(auto simp: indices_def map_filter_def) 163 | 164 | lemma indices_Deliver_Insert [simp]: 165 | shows \indices [Deliver (i, x)] = [i]\ 166 | by(auto simp: indices_def map_filter_def) 167 | 168 | lemma idx_in_elem[intro]: 169 | assumes \Deliver (i, x) \ set xs\ 170 | shows \i \ set (indices xs)\ 171 | using assms by(induction xs, auto simp add: indices_def map_filter_def) 172 | 173 | lemma valid_move_oper_delivered: 174 | assumes \xs@[Deliver (t, oper)] prefix of i\ 175 | shows \move_time oper = t\ 176 | by (metis assms deliver_in_prefix_is_valid in_set_conv_decomp operation.set_cases(1) operation.set_sel(1) valid_move_opers.simps) 177 | 178 | find_theorems "apply_operations (?xs @ [?x])" 179 | 180 | lemma apply_opers_idx_elems: 181 | assumes \xs prefix of i\ \apply_operations xs = Some (log, tree)\ 182 | shows \set (map log_time log) = set (indices xs)\ 183 | using assms proof (induction xs arbitrary: log tree rule: rev_induct, force) 184 | case (snoc x xs) 185 | moreover have prefix: \xs prefix of i\ 186 | using snoc by force 187 | ultimately show ?case 188 | proof (cases x, force) 189 | case (Deliver m) 190 | then obtain t oper where m: \m = (t, oper)\ 191 | by fastforce 192 | from Deliver and snoc show ?thesis 193 | proof (cases \apply_operations xs\, force) 194 | case (Some st) 195 | then obtain log' tree' where st: \st = (log', tree')\ 196 | by (meson surj_pair) 197 | have set_indices: \log_time ` set log' = set (indices xs)\ 198 | using Some prefix snoc.IH st by auto 199 | hence *:\unique_parent tree' \ distinct (map log_time log')\ 200 | using st Some prefix by (simp add: log_tree_invariant) 201 | hence **: \apply_operations (xs @ [x]) = 202 | (if move_time oper \ set (indices xs) then Some (apply_op (snd (t, oper)) (log', tree')) 203 | else None)\ 204 | using Deliver Some st m set_indices by (auto simp: interp_msg_def apply_op'_def) 205 | hence ***: \move_time oper \ set (indices xs)\ 206 | using snoc.prems(2) by auto 207 | obtain t' p m c where oper: \oper = Move t' p m c\ 208 | using operation.exhaust by blast 209 | hence \t = t'\ 210 | using valid_move_oper_delivered Deliver m snoc.prems(1) by fastforce 211 | hence \apply_op (Move t p m c) (log', tree') = (log, tree)\ 212 | by (metis ** oper option.discI option.simps(1) prod.sel(2) snoc.prems(2)) 213 | hence \set (map log_time log) = {t} \ set (map log_time log')\ 214 | apply (rule apply_op_timestampI2) 215 | using Deliver * *** m set_indices snoc.prems(1) valid_move_oper_delivered by auto 216 | thus ?thesis 217 | using Deliver m set_indices by (clarsimp simp: interp_msg_def apply_op'_def) 218 | qed 219 | qed 220 | qed 221 | 222 | lemma indices_distinct_aux: 223 | assumes \xs @ [Deliver (a, b)] prefix of i\ 224 | shows \a \ set (indices xs)\ 225 | proof 226 | have 1: \xs prefix of i\ 227 | using assms by force 228 | assume \a \ set (indices xs)\ 229 | hence \\x. Deliver (a, x) \ set xs\ 230 | by (clarsimp simp: indices_def map_filter_def, case_tac x; force) 231 | then obtain c where 2: \Deliver (a, c) \ set xs\ 232 | by auto 233 | moreover then obtain j where \Broadcast (a, c) \ set (history j)\ 234 | using 1 delivery_has_a_cause prefix_elem_to_carriers by blast 235 | moreover obtain k where \Broadcast (a, b) \ set (history k)\ 236 | by (meson assms delivery_has_a_cause in_set_conv_decomp prefix_elem_to_carriers) 237 | ultimately have \b = c\ 238 | by (metis fst_conv network.msg_id_unique network_axioms old.prod.inject) 239 | hence \\ distinct (xs @ [Deliver (a, b)])\ 240 | by (simp add: 2) 241 | thus \False\ 242 | using assms prefix_distinct by blast 243 | qed 244 | 245 | 246 | lemma indices_distinct: 247 | assumes \xs prefix of i\ 248 | shows \distinct (indices xs)\ 249 | using assms proof (induct xs rule: rev_induct, clarsimp) 250 | case (snoc x xs) 251 | hence \xs prefix of i\ 252 | by force 253 | moreover hence \distinct (indices xs)\ 254 | by (simp add: snoc.hyps) 255 | ultimately show ?case 256 | using indices_distinct_aux snoc.prems by (case_tac x; force) 257 | qed 258 | 259 | lemma log_time_invariant: 260 | assumes \xs@[Deliver (t, oper)] prefix of i\ \apply_operations xs = Some (log, tree)\ 261 | shows \move_time oper \ set (map log_time log)\ 262 | proof - 263 | have \xs prefix of i\ 264 | using assms by force 265 | have \move_time oper = t\ 266 | using assms valid_move_oper_delivered by auto 267 | moreover have \indices (xs @ [Deliver (t, oper)]) = indices xs @ [t]\ 268 | by simp 269 | moreover have \distinct (indices (xs @ [Deliver (t, oper)]))\ 270 | using assms indices_distinct by blast 271 | ultimately show ?thesis 272 | using apply_opers_idx_elems assms indices_distinct_aux by blast 273 | qed 274 | 275 | lemma apply_operations_never_fails: 276 | assumes \xs prefix of i\ 277 | shows \apply_operations xs \ None\ 278 | using assms proof(induct xs rule: rev_induct, clarsimp) 279 | case (snoc x xs) 280 | hence \apply_operations xs \ None\ 281 | by blast 282 | then obtain log1 tree1 where *: \apply_operations xs = Some (log1, tree1)\ 283 | by auto 284 | moreover hence \distinct (map log_time log1) \ unique_parent tree1\ 285 | using log_tree_invariant snoc.prems by blast 286 | ultimately show ?case 287 | using log_time_invariant snoc.prems 288 | by (cases x; clarsimp simp: interp_msg_def) (clarsimp simp: apply_op'_def) 289 | qed 290 | 291 | sublocale sec: strong_eventual_consistency weak_hb hb interp_msg 292 | \\os. \xs i. xs prefix of i \ node_deliver_messages xs = os\ \([], {})\ 293 | proof (standard; clarsimp) 294 | fix xsa i 295 | assume \xsa prefix of i\ 296 | thus \hb.hb_consistent (node_deliver_messages xsa)\ 297 | by(auto simp add: hb_consistent_prefix) 298 | next 299 | fix xsa i 300 | assume \xsa prefix of i\ 301 | thus \distinct (node_deliver_messages xsa)\ 302 | by(auto simp add: node_deliver_messages_distinct) 303 | next 304 | fix xsa i 305 | assume \xsa prefix of i\ 306 | thus \hb.concurrent_ops_commute (node_deliver_messages xsa)\ 307 | by(auto simp add: concurrent_operations_commute) 308 | next 309 | fix xs a b state xsa x 310 | assume \hb.apply_operations xs ([], {}) = Some state\ 311 | \node_deliver_messages xsa = xs @ [(a, b)]\ 312 | \xsa prefix of x\ 313 | moreover hence \apply_operations xsa \ None\ 314 | using apply_operations_never_fails by blast 315 | ultimately show \\ab bb. interp_msg (a, b) state = Some (ab, bb)\ 316 | by (clarsimp simp: apply_operations_def kleisli_def) 317 | next 318 | fix xs a b xsa x 319 | assume \node_deliver_messages xsa = xs @ [(a, b)]\ 320 | and \xsa prefix of x\ 321 | thus \\xsa. (\x. xsa prefix of x) \ node_deliver_messages xsa = xs\ 322 | using drop_last_message by blast 323 | qed 324 | 325 | end 326 | 327 | end -------------------------------------------------------------------------------- /proof/ROOT: -------------------------------------------------------------------------------- 1 | chapter AFP 2 | session "CRDT-Tree" (AFP) = "HOL" + 3 | options [timeout = 3600, document = pdf, document_output = "output"] 4 | sessions 5 | CRDT 6 | Collections 7 | theories 8 | Move 9 | Move_Acyclic 10 | Move_SEC 11 | Move_Code 12 | Move_Create 13 | document_files 14 | "root.tex" 15 | -------------------------------------------------------------------------------- /proof/document/root.tex: -------------------------------------------------------------------------------- 1 | \documentclass[10pt]{article} 2 | \usepackage{isabelle,isabellesym} 3 | \usepackage[a4paper,portrait,margin=1in]{geometry} 4 | \usepackage{amsmath} 5 | \usepackage{amssymb} 6 | \usepackage{pdfsetup} 7 | 8 | \urlstyle{rm} 9 | \isabellestyle{tt} 10 | 11 | \begin{document} 12 | \title{A highly-available move operation for replicated trees (Proof Document)} 13 | \author{Martin Kleppmann, Dominic P. Mulligan, Victor B. F. Gomes, and Alastair R. Beresford} 14 | \date{} 15 | \maketitle 16 | \tableofcontents 17 | \parindent 0pt\parskip 0.5ex 18 | 19 | \input{session} % generated text of all theories 20 | \end{document} 21 | -------------------------------------------------------------------------------- /proof/proof.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/trvedata/move-op/6c23447c12a7862ff31b7fc2205f6c90fbdb9dc0/proof/proof.pdf -------------------------------------------------------------------------------- /tests.txt: -------------------------------------------------------------------------------- 1 | Comparing the concurrency behaviour of Dropbox, Microsoft OneDrive, and 2 | Google Drive. Say $ROOT is the root directory of the synced filesystem. 3 | 4 | 1. Create directories $ROOT/experiment/folder-{a,b,c} 5 | and files $ROOT/experiment/folder-{a,b,c}/{a,b,c}{1,2}.txt 6 | Let the devices sync up fully. 7 | 8 | 2. Take both devices offline. On one device, move folder-c to be a child 9 | of folder-a; on the other device, move folder-c to be a child of 10 | folder-b. Then take both devices online again and observe the 11 | outcome. 12 | 13 | Dropbox: duplicates the moved folder and all files within it (so we 14 | have both folder-a/folder-c/c{1,2}.txt and 15 | folder-b/folder-c/c{1,2}.txt). Before the next experiment I deleted 16 | folder-b/folder-c in order to have the same starting state. 17 | 18 | OneDrive: picks one of the two destination directories, in this case 19 | folder-a, so we have folder-a/folder-c/c{1,2}.txt. No duplication. 20 | 21 | GDrive: picks one of the two destination directories, in this case 22 | folder-a, so we have folder-a/folder-c/c{1,2}.txt. No duplication. 23 | 24 | In all cases, we now have experiment/folder-a/a{1,2}.txt, 25 | experiment/folder-a/folder-c/c{1,2}.txt, and 26 | experiment/folder-b/b{1,2}.txt. 27 | 28 | 3. Take both devices offline again. On one device, move folder-a to be a 29 | child of folder-b; on the other device, move folder-b to be a child 30 | of folder-a. Then take both devices online again and observe the 31 | outcome. 32 | 33 | Dropbox: duplicates the parent folder. In the merged state, we have 34 | folder-a/folder-b/b{1,2}.txt, folder-b/folder-a/a{1,2}.txt, and 35 | folder-b/folder-a/folder-c/c{1,2}.txt (but no folder-a/a{1,2}.txt). 36 | 37 | OneDrive: picks one of the two moves. In the merged state, we have 38 | folder-b/b{1,2}.txt, folder-b/folder-a/a{1,2}.txt, and 39 | folder-b/folder-a/folder-c/c{1,2}.txt. 40 | 41 | GDrive: fails to sync, replicas become permanently inconsistent. On 42 | the replica where I moved folder-b to be a child of folder-a, this 43 | file structure remains unchanged (i.e. we have folder-a/folder-b), 44 | and no error is shown. On the replica where I moved folder-a to be a 45 | child of folder-b, this file structure also remains unchanged (i.e. 46 | we have folder-b/folder-a), and the sync history shows an error 47 | "Can't sync 1 item", and viewing details brings up the error message: 48 | "Can't remove files with changes pending. Try again after sync is 49 | complete. Download error - My Drive/experiment/folder-b". See 50 | screenshots in ~/Dropbox/Screenshots/*2019-02-20*.png. Clicking 51 | "retry all" does not resolve the error (the client keeps trying again 52 | and again, never succeeding). After I manually move the folders into 53 | the locations to match the first replica, the error goes away. 54 | 55 | 4. Move everything back to the original structure of point 1. 56 | Take both devices offline. On one device, rename folder-c to 57 | folder-c1, and rename folder-a/a1.txt to folder-a/a1-1.txt. On the 58 | other device, rename folder-c to folder-c2, and rename 59 | folder-a/a1.txt to folder-a/a1-2.txt. Then take both devices online. 60 | 61 | Dropbox: duplicates both the folder and the file; for the duplicated 62 | folder, all files within it are also duplicated. Thus, we end up with 63 | folder-a/a{1-1,1-2,2}.txt, folder-b/b{1,2}.txt, folder-c1/c{1,2}.txt, 64 | and folder-c2/c{1,2}.txt. 65 | 66 | OneDrive: picks one of the two renames, in this case a1-2.txt and 67 | folder-c2. Thus, we end up with folder-a/a{1-2,2}.txt, 68 | folder-b/b{1,2}.txt, and folder-c2/c{1,2}.txt. 69 | 70 | GDrive: same as OneDrive. 71 | 72 | 4. Move everything back to the original structure of point 1. 73 | Additionally create files folder-b/b{3,4}.txt. Take both devices 74 | offline. On one device, rename folder-a/a1.txt to folder-a/a.txt, 75 | rename folder-b/b1.txt to folder-b/b.txt, edit the contents of 76 | folder-b/b2.txt, delete folder-b/b3.txt, and leave folder-b/b4.txt 77 | untouched. On the other device, rename folder-a/a2.txt to 78 | folder-a/a.txt, and delete folder-b. Then bring both devices online. 79 | 80 | Dropbox: folder-a/a.txt contains the contents of former a2.txt, and 81 | "folder-a/a (ecureuil.local's conflicted copy 2019-02-20).txt" 82 | contains the contents of former a1.txt. folder-b is restored on the 83 | device where it was deleted, containing folder-b/b1.txt (in its 84 | edited form) and folder-b/b.txt, but b3.txt and b4.txt are gone. 85 | 86 | OneDrive: folder-a/a.txt contains the contents of former a2.txt, and 87 | folder-a/a-écureuil.txt contains the contents of former a1.txt. 88 | folder-b is restored on the device where it was deleted, containing 89 | folder-b/b1.txt (in its edited form) and folder-b/b.txt, but b3.txt 90 | and b4.txt are gone. 91 | 92 | GDrive: folder-a/a.txt contains the contents of former a2.txt, and 93 | "folder-a/a (1).txt" contains the contents of former a1.txt. folder-b 94 | is deleted on both devices, and the edits to b2.txt are lost. 95 | 96 | 97 | [1] Vinh Tao, Marc Shapiro, and Vianney Rancurel: “Merging semantics for 98 | conflict updates in geo-distributed file systems,” at 8th ACM 99 | International Systems and Storage Conference (SYSTOR), 2015. 100 | https://pages.lip6.fr/Marc.Shapiro/papers/geodistr-FS-Systor-2015.pdf 101 | 102 | [2] Mehdi Ahmed-Nacer, Stéphane Martin, and Pascal Urso: “File system on 103 | CRDT,” INRIA, Technical Report no. RR-8027, Jul. 2012. 104 | https://hal.inria.fr/hal-00720681/ 105 | --------------------------------------------------------------------------------