├── .github └── workflows │ └── tests.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── biome.json ├── build.sh ├── cmd └── wasm │ └── main.go ├── go.mod ├── go.sum ├── helm └── tardis │ ├── .helmignore │ ├── Chart.yaml │ ├── templates │ ├── NOTES.txt │ ├── _helpers.tpl │ ├── deployment.yaml │ ├── hpa.yaml │ ├── ingress.yaml │ ├── service.yaml │ ├── serviceaccount.yaml │ └── tests │ │ └── test-connection.yaml │ └── values.yaml ├── img ├── sugiyama.png ├── tardis.png └── zherebko.png ├── index.html ├── jest.config.js ├── package.json ├── public ├── favicon.svg ├── gmsl.wasm └── wasm_exec.js ├── shims └── synapse │ ├── .dockerignore │ ├── Dockerfile │ ├── README.md │ ├── build.sh │ ├── requirements-freeze.txt │ ├── requirements.txt │ └── shim.py ├── src ├── auth_dag.ts ├── cache.ts ├── debugger.ts ├── event_list.ts ├── graph.ts ├── index.ts ├── preloaded_scenarios.ts ├── scenario.ts ├── state_resolver.test.ts └── state_resolver.ts ├── style.css ├── tsconfig.json └── yarn.lock /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | 8 | permissions: 9 | packages: read 10 | # Note: from https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs 11 | # > If you specify the access for any of these scopes, all of those that are not specified are set to none. 12 | 13 | jobs: 14 | check-signoff: 15 | if: "github.event_name == 'pull_request'" 16 | uses: "matrix-org/backend-meta/.github/workflows/sign-off.yml@v2" 17 | 18 | tests: 19 | runs-on: ubuntu-latest 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | 24 | - uses: actions/setup-node@v4 25 | with: 26 | node-version: 20 27 | 28 | - name: Install 29 | run: yarn install 30 | 31 | - name: Build 32 | run: yarn build 33 | 34 | - name: Lint 35 | run: yarn lint ./src 36 | 37 | - name: Test 38 | run: yarn test 39 | 40 | - name: Docker Build 41 | run: docker build -t tardis . -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .zed/ 2 | dist/ 3 | node_modules/ 4 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:lts 2 | 3 | WORKDIR /app 4 | COPY yarn.lock package.json ./ 5 | RUN yarn install 6 | 7 | COPY . . 8 | EXPOSE 5173 9 | CMD ["yarn", "start", "--host", "0.0.0.0", "--port", "5173"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## TARDIS - Time Agnostic Room DAG Inspection Service 2 | 3 | TARDIS is a time-travelling debugger for Matrix room DAGs, which reads a plaintext file 4 | to graphically visualise a room using [d3-dag](https://github.com/erikbrinkman/d3-dag) for 5 | debugging purposes. It can then perform state resolution at any given event on the DAG via 6 | a shim server. TARDIS comes with a Synapse shim server, which needs to be run in addition 7 | to TARDIS. See `shims/synapse`. 8 | 9 | ## Generates stuff like this: 10 | 11 | ![](img/tardis.png) 12 | 13 | ### To use: 14 | 15 | Requires node 20+ for global crypto variables. 16 | ``` 17 | yarn install 18 | yarn run start 19 | ``` 20 | or: 21 | ``` 22 | docker build -t tardis . 23 | docker run --rm -p 5173:5173 tardis 24 | ``` 25 | 26 | ## Loading events 27 | 28 | ### ..via existing events in a database 29 | Provide a new-line delimited JSON file which contains events to render in the full federation format (with `prev_events`, etc). 30 | To get such a file _for Synapse installations on Postgres_, run the following (assuming `matrix` is the name of your DB): 31 | ``` 32 | $ psql matrix 33 | matrix=> \t 34 | Tuples only is on. 35 | matrix=> \o the-file.ndjson 36 | matrix=> select jsonb_insert(json::JSONB, '{event_id}', ('"' || event_id || '"')::JSONB) from event_json where event_id in 37 | (select event_id from events where 38 | room_id='!THE_ROOM_ID' and 39 | stream_ordering < (select stream_ordering from events where event_id='$LATEST_EVENT_ID') and 40 | stream_ordering > (select stream_ordering from events where event_id='$EARLIEST_EVENT_ID') 41 | order by stream_ordering asc 42 | ); 43 | ``` 44 | 45 | For SQLite3 use `select '{"event_id":"' || event_id || '",' || substr(json, 2) from event_json`. 46 | 47 | You can drop the `stream_ordering` clauses if the room is small and you want to see the entire thing. 48 | 49 | It is important that the events are sorted in causal order. To do this with [jq](https://jqlang.github.io/jq/): just do: 50 | ``` 51 | cat the-file.ndjson | jq -s 'sort_by(.depth)' > sorted-file.ndjson 52 | ``` 53 | The file created by these commands can be dropped **as-is** into TARDIS. 54 | 55 | ### ..via scenario JSON5 files 56 | Provide a JSON5 file which contains the scenario to run. See the `examples` directory for examples on 57 | the keys available. 58 | 59 | ## Developing 60 | 61 | 62 | ### Building WASM 63 | Sometimes we want to create mock scenarios which don't exist in databases. We use placeholder event IDs in this case. However, state 64 | resolution demands that they are actual event IDs. Tardis uses the same code paths as Dendrite (via wasm) to generate the correct event IDs. 65 | To build the wasm, you need [tinygo](https://tinygo.org/) installed and then: 66 | ``` 67 | GOOS=js GOARCH=wasm tinygo build -o ./public/gmsl.wasm --no-debug ./cmd/wasm/main.go 68 | ``` 69 | A working version is already supplied with this repository. 70 | -------------------------------------------------------------------------------- /biome.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://biomejs.dev/schemas/1.8.3/schema.json", 3 | "vcs": { 4 | "enabled": true, 5 | "clientKind": "git", 6 | "useIgnoreFile": true 7 | }, 8 | "organizeImports": { 9 | "enabled": true 10 | }, 11 | "formatter": { 12 | "enabled": true, 13 | "indentStyle": "space", 14 | "indentWidth": 4, 15 | "lineWidth": 120 16 | }, 17 | "linter": { 18 | "enabled": true, 19 | "rules": { 20 | "recommended": true, 21 | "style": { 22 | "noNonNullAssertion": "off" 23 | } 24 | } 25 | }, 26 | "files": { 27 | "maxSize": 2097152, 28 | "ignoreUnknown": true, 29 | "ignore": ["shims/*", "public/*"] 30 | }, 31 | "json": { 32 | "formatter": { 33 | "indentWidth": 2 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | yarn build # dumps to ./dist 3 | git checkout gh-pages # we gitignore ./dist so the directory is carried over to this branch 4 | git ls-files -z | xargs -0 rm -f # remove previous release 5 | cp -r ./dist/* . # dump dist files at the root 6 | git add assets/ index.html # add them 7 | git status # show the user so they can commit -------------------------------------------------------------------------------- /cmd/wasm/main.go: -------------------------------------------------------------------------------- 1 | // Includes functions useful for tardis, written in Go. 2 | // Compiled using TinyGo to keep .wasm file sizes small. 3 | package main 4 | 5 | import ( 6 | "crypto/sha256" 7 | "encoding/base64" 8 | "encoding/json" 9 | "fmt" 10 | "syscall/js" 11 | 12 | "github.com/matrix-org/gomatrixserverlib" 13 | "github.com/matrix-org/gomatrixserverlib/spec" 14 | ) 15 | 16 | // This function is exported to JS, and returns the event ID for the input event JSON and 17 | // room version using the same code paths as Dendrite. 18 | func eventIDForEvent(this js.Value, args []js.Value) any { 19 | if len(args) != 2 { 20 | fmt.Println("eventIDForEvent: must be called with (event, roomVer)") 21 | return "" 22 | } 23 | eventJSON := args[0].String() 24 | roomVerStr := args[1].String() 25 | roomVersion := gomatrixserverlib.RoomVersion(roomVerStr) 26 | verImpl, err := gomatrixserverlib.GetRoomVersion(roomVersion) 27 | if err != nil { 28 | return "" 29 | } 30 | redactedJSON, err := verImpl.RedactEventJSON([]byte(eventJSON)) 31 | if err != nil { 32 | return "" 33 | } 34 | 35 | var event map[string]spec.RawJSON 36 | if err = json.Unmarshal(redactedJSON, &event); err != nil { 37 | return "" 38 | } 39 | 40 | delete(event, "signatures") 41 | delete(event, "unsigned") 42 | existingEventID := event["event_id"] 43 | delete(event, "event_id") 44 | 45 | hashableEventJSON, err := json.Marshal(event) 46 | if err != nil { 47 | return "" 48 | } 49 | 50 | hashableEventJSON, err = gomatrixserverlib.CanonicalJSON(hashableEventJSON) 51 | if err != nil { 52 | return "" 53 | } 54 | 55 | sha256Hash := sha256.Sum256(hashableEventJSON) 56 | var eventID string 57 | 58 | eventFormat := verImpl.EventFormat() 59 | eventIDFormat := verImpl.EventIDFormat() 60 | 61 | switch eventFormat { 62 | case gomatrixserverlib.EventFormatV1: 63 | if err = json.Unmarshal(existingEventID, &eventID); err != nil { 64 | return "" 65 | } 66 | case gomatrixserverlib.EventFormatV2: 67 | var encoder *base64.Encoding 68 | switch eventIDFormat { 69 | case gomatrixserverlib.EventIDFormatV2: 70 | encoder = base64.RawStdEncoding.WithPadding(base64.NoPadding) 71 | case gomatrixserverlib.EventIDFormatV3: 72 | encoder = base64.RawURLEncoding.WithPadding(base64.NoPadding) 73 | default: 74 | return "" 75 | } 76 | eventID = "$" + encoder.EncodeToString(sha256Hash[:]) 77 | default: 78 | return "" 79 | } 80 | 81 | return eventID 82 | } 83 | 84 | func main() { 85 | wait := make(chan struct{}, 0) 86 | js.Global().Set("gmslEventIDForEvent", js.FuncOf(eventIDForEvent)) 87 | <-wait 88 | } 89 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/matrix-org/tardis 2 | 3 | go 1.23.1 4 | 5 | require ( 6 | github.com/matrix-org/gomatrixserverlib v0.0.0-20240910190622-2c764912ce93 // indirect 7 | github.com/matrix-org/util v0.0.0-20200807132607-55161520e1d4 // indirect 8 | github.com/sirupsen/logrus v1.9.0 // indirect 9 | github.com/tidwall/gjson v1.14.3 // indirect 10 | github.com/tidwall/match v1.1.1 // indirect 11 | github.com/tidwall/pretty v1.2.0 // indirect 12 | github.com/tidwall/sjson v1.2.5 // indirect 13 | golang.org/x/crypto v0.17.0 // indirect 14 | golang.org/x/sys v0.15.0 // indirect 15 | ) 16 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 2 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 3 | github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= 4 | github.com/matrix-org/gomatrixserverlib v0.0.0-20240910190622-2c764912ce93 h1:FbyZ/xkeBVYHi2xfwAVaNmDhP+4HNbt9e6ucOR+jvBk= 5 | github.com/matrix-org/gomatrixserverlib v0.0.0-20240910190622-2c764912ce93/go.mod h1:HZGsVJ3bUE+DkZtufkH9H0mlsvbhEGK5CpX0Zlavylg= 6 | github.com/matrix-org/util v0.0.0-20200807132607-55161520e1d4 h1:eCEHXWDv9Rm335MSuB49mFUK44bwZPFSDde3ORE3syk= 7 | github.com/matrix-org/util v0.0.0-20200807132607-55161520e1d4/go.mod h1:vVQlW/emklohkZnOPwD3LrZUBqdfsbiyO3p1lNV8F6U= 8 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 9 | github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= 10 | github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= 11 | github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= 12 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 13 | github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 14 | github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= 15 | github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= 16 | github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= 17 | github.com/tidwall/gjson v1.14.3 h1:9jvXn7olKEHU1S9vwoMGliaT8jq1vJ7IH/n9zD9Dnlw= 18 | github.com/tidwall/gjson v1.14.3/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= 19 | github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= 20 | github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= 21 | github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= 22 | github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= 23 | github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= 24 | github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= 25 | golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= 26 | golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= 27 | golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 28 | golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 29 | golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= 30 | golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= 31 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 32 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 33 | -------------------------------------------------------------------------------- /helm/tardis/.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *.orig 18 | *~ 19 | # Various IDEs 20 | .project 21 | .idea/ 22 | *.tmproj 23 | .vscode/ 24 | -------------------------------------------------------------------------------- /helm/tardis/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: tardis 3 | description: A Helm chart for shim servers for tardis 4 | 5 | # A chart can be either an 'application' or a 'library' chart. 6 | # 7 | # Application charts are a collection of templates that can be packaged into versioned archives 8 | # to be deployed. 9 | # 10 | # Library charts provide useful utilities or functions for the chart developer. They're included as 11 | # a dependency of application charts to inject those utilities and functions into the rendering 12 | # pipeline. Library charts do not define any templates and therefore cannot be deployed. 13 | type: application 14 | 15 | # This is the chart version. This version number should be incremented each time you make changes 16 | # to the chart and its templates, including the app version. 17 | # Versions are expected to follow Semantic Versioning (https://semver.org/) 18 | version: 0.1.0 19 | 20 | # This is the version number of the application being deployed. This version number should be 21 | # incremented each time you make changes to the application. Versions are not expected to 22 | # follow Semantic Versioning. They should reflect the version the application is using. 23 | # It is recommended to use it with quotes. 24 | appVersion: "v0.0.1" 25 | -------------------------------------------------------------------------------- /helm/tardis/templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | 1. Get the application URL by running these commands: 2 | {{- if .Values.ingress.enabled }} 3 | {{- range $host := .Values.ingress.hosts }} 4 | {{- range .paths }} 5 | http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }} 6 | {{- end }} 7 | {{- end }} 8 | {{- else if contains "NodePort" .Values.service.type }} 9 | export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "tardis.fullname" . }}) 10 | export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}") 11 | echo http://$NODE_IP:$NODE_PORT 12 | {{- else if contains "LoadBalancer" .Values.service.type }} 13 | NOTE: It may take a few minutes for the LoadBalancer IP to be available. 14 | You can watch its status by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "tardis.fullname" . }}' 15 | export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "tardis.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}") 16 | echo http://$SERVICE_IP:{{ .Values.service.port }} 17 | {{- else if contains "ClusterIP" .Values.service.type }} 18 | export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "tardis.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}") 19 | export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}") 20 | echo "Visit http://127.0.0.1:8080 to use your application" 21 | kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT 22 | {{- end }} 23 | -------------------------------------------------------------------------------- /helm/tardis/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* 2 | Expand the name of the chart. 3 | */}} 4 | {{- define "tardis.name" -}} 5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} 6 | {{- end }} 7 | 8 | {{/* 9 | Create a default fully qualified app name. 10 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). 11 | If release name contains chart name it will be used as a full name. 12 | */}} 13 | {{- define "tardis.fullname" -}} 14 | {{- if .Values.fullnameOverride }} 15 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} 16 | {{- else }} 17 | {{- $name := default .Chart.Name .Values.nameOverride }} 18 | {{- if contains $name .Release.Name }} 19 | {{- .Release.Name | trunc 63 | trimSuffix "-" }} 20 | {{- else }} 21 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} 22 | {{- end }} 23 | {{- end }} 24 | {{- end }} 25 | 26 | {{/* 27 | Create chart name and version as used by the chart label. 28 | */}} 29 | {{- define "tardis.chart" -}} 30 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} 31 | {{- end }} 32 | 33 | {{/* 34 | Common labels 35 | */}} 36 | {{- define "tardis.labels" -}} 37 | helm.sh/chart: {{ include "tardis.chart" . }} 38 | {{ include "tardis.selectorLabels" . }} 39 | {{- if .Chart.AppVersion }} 40 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} 41 | {{- end }} 42 | app.kubernetes.io/managed-by: {{ .Release.Service }} 43 | {{- end }} 44 | 45 | {{/* 46 | Selector labels 47 | */}} 48 | {{- define "tardis.selectorLabels" -}} 49 | app.kubernetes.io/name: {{ include "tardis.name" . }} 50 | app.kubernetes.io/instance: {{ .Release.Name }} 51 | {{- end }} 52 | 53 | {{/* 54 | Create the name of the service account to use 55 | */}} 56 | {{- define "tardis.serviceAccountName" -}} 57 | {{- if .Values.serviceAccount.create }} 58 | {{- default (include "tardis.fullname" .) .Values.serviceAccount.name }} 59 | {{- else }} 60 | {{- default "default" .Values.serviceAccount.name }} 61 | {{- end }} 62 | {{- end }} 63 | -------------------------------------------------------------------------------- /helm/tardis/templates/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ include "tardis.fullname" . }} 5 | labels: 6 | {{- include "tardis.labels" . | nindent 4 }} 7 | spec: 8 | {{- if not .Values.autoscaling.enabled }} 9 | replicas: {{ .Values.replicaCount }} 10 | {{- end }} 11 | selector: 12 | matchLabels: 13 | {{- include "tardis.selectorLabels" . | nindent 6 }} 14 | template: 15 | metadata: 16 | {{- with .Values.podAnnotations }} 17 | annotations: 18 | {{- toYaml . | nindent 8 }} 19 | {{- end }} 20 | labels: 21 | {{- include "tardis.labels" . | nindent 8 }} 22 | {{- with .Values.podLabels }} 23 | {{- toYaml . | nindent 8 }} 24 | {{- end }} 25 | spec: 26 | {{- with .Values.imagePullSecrets }} 27 | imagePullSecrets: 28 | {{- toYaml . | nindent 8 }} 29 | {{- end }} 30 | serviceAccountName: {{ include "tardis.serviceAccountName" . }} 31 | securityContext: 32 | {{- toYaml .Values.podSecurityContext | nindent 8 }} 33 | containers: 34 | - name: {{ .Chart.Name }} 35 | securityContext: 36 | {{- toYaml .Values.securityContext | nindent 12 }} 37 | image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" 38 | imagePullPolicy: {{ .Values.image.pullPolicy }} 39 | ports: 40 | - name: http 41 | containerPort: 1234 42 | protocol: TCP 43 | livenessProbe: 44 | {{- toYaml .Values.livenessProbe | nindent 12 }} 45 | readinessProbe: 46 | {{- toYaml .Values.readinessProbe | nindent 12 }} 47 | resources: 48 | {{- toYaml .Values.resources | nindent 12 }} 49 | {{- with .Values.volumeMounts }} 50 | volumeMounts: 51 | {{- toYaml . | nindent 12 }} 52 | {{- end }} 53 | {{- with .Values.volumes }} 54 | volumes: 55 | {{- toYaml . | nindent 8 }} 56 | {{- end }} 57 | {{- with .Values.nodeSelector }} 58 | nodeSelector: 59 | {{- toYaml . | nindent 8 }} 60 | {{- end }} 61 | {{- with .Values.affinity }} 62 | affinity: 63 | {{- toYaml . | nindent 8 }} 64 | {{- end }} 65 | {{- with .Values.tolerations }} 66 | tolerations: 67 | {{- toYaml . | nindent 8 }} 68 | {{- end }} 69 | -------------------------------------------------------------------------------- /helm/tardis/templates/hpa.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.autoscaling.enabled }} 2 | apiVersion: autoscaling/v2 3 | kind: HorizontalPodAutoscaler 4 | metadata: 5 | name: {{ include "tardis.fullname" . }} 6 | labels: 7 | {{- include "tardis.labels" . | nindent 4 }} 8 | spec: 9 | scaleTargetRef: 10 | apiVersion: apps/v1 11 | kind: Deployment 12 | name: {{ include "tardis.fullname" . }} 13 | minReplicas: {{ .Values.autoscaling.minReplicas }} 14 | maxReplicas: {{ .Values.autoscaling.maxReplicas }} 15 | metrics: 16 | {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} 17 | - type: Resource 18 | resource: 19 | name: cpu 20 | target: 21 | type: Utilization 22 | averageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} 23 | {{- end }} 24 | {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} 25 | - type: Resource 26 | resource: 27 | name: memory 28 | target: 29 | type: Utilization 30 | averageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} 31 | {{- end }} 32 | {{- end }} 33 | -------------------------------------------------------------------------------- /helm/tardis/templates/ingress.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ingress.enabled -}} 2 | {{- $fullName := include "tardis.fullname" . -}} 3 | {{- $svcPort := .Values.service.port -}} 4 | {{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }} 5 | {{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }} 6 | {{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}} 7 | {{- end }} 8 | {{- end }} 9 | {{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}} 10 | apiVersion: networking.k8s.io/v1 11 | {{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} 12 | apiVersion: networking.k8s.io/v1beta1 13 | {{- else -}} 14 | apiVersion: extensions/v1beta1 15 | {{- end }} 16 | kind: Ingress 17 | metadata: 18 | name: {{ $fullName }} 19 | labels: 20 | {{- include "tardis.labels" . | nindent 4 }} 21 | {{- with .Values.ingress.annotations }} 22 | annotations: 23 | {{- toYaml . | nindent 4 }} 24 | {{- end }} 25 | spec: 26 | {{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }} 27 | ingressClassName: {{ .Values.ingress.className }} 28 | {{- end }} 29 | {{- if .Values.ingress.tls }} 30 | tls: 31 | {{- range .Values.ingress.tls }} 32 | - hosts: 33 | {{- range .hosts }} 34 | - {{ . | quote }} 35 | {{- end }} 36 | secretName: {{ .secretName }} 37 | {{- end }} 38 | {{- end }} 39 | rules: 40 | {{- range .Values.ingress.hosts }} 41 | - host: {{ .host | quote }} 42 | http: 43 | paths: 44 | {{- range .paths }} 45 | - path: {{ .path }} 46 | {{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }} 47 | pathType: {{ .pathType }} 48 | {{- end }} 49 | backend: 50 | {{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }} 51 | service: 52 | name: {{ $fullName }} 53 | port: 54 | number: {{ $svcPort }} 55 | {{- else }} 56 | serviceName: {{ $fullName }} 57 | servicePort: {{ $svcPort }} 58 | {{- end }} 59 | {{- end }} 60 | {{- end }} 61 | {{- end }} 62 | -------------------------------------------------------------------------------- /helm/tardis/templates/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ include "tardis.fullname" . }} 5 | labels: 6 | {{- include "tardis.labels" . | nindent 4 }} 7 | spec: 8 | type: {{ .Values.service.type }} 9 | ports: 10 | - port: {{ .Values.service.port }} 11 | targetPort: http 12 | protocol: TCP 13 | name: http 14 | selector: 15 | {{- include "tardis.selectorLabels" . | nindent 4 }} 16 | -------------------------------------------------------------------------------- /helm/tardis/templates/serviceaccount.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.serviceAccount.create -}} 2 | apiVersion: v1 3 | kind: ServiceAccount 4 | metadata: 5 | name: {{ include "tardis.serviceAccountName" . }} 6 | labels: 7 | {{- include "tardis.labels" . | nindent 4 }} 8 | {{- with .Values.serviceAccount.annotations }} 9 | annotations: 10 | {{- toYaml . | nindent 4 }} 11 | {{- end }} 12 | automountServiceAccountToken: {{ .Values.serviceAccount.automount }} 13 | {{- end }} 14 | -------------------------------------------------------------------------------- /helm/tardis/templates/tests/test-connection.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Pod 3 | metadata: 4 | name: "{{ include "tardis.fullname" . }}-test-connection" 5 | labels: 6 | {{- include "tardis.labels" . | nindent 4 }} 7 | annotations: 8 | "helm.sh/hook": test 9 | spec: 10 | containers: 11 | - name: wget 12 | image: busybox 13 | command: ['wget'] 14 | args: ['{{ include "tardis.fullname" . }}:{{ .Values.service.port }}'] 15 | restartPolicy: Never 16 | -------------------------------------------------------------------------------- /helm/tardis/values.yaml: -------------------------------------------------------------------------------- 1 | # Default values for tardis. 2 | # This is a YAML-formatted file. 3 | # Declare variables to be passed into your templates. 4 | 5 | replicaCount: 1 6 | 7 | image: 8 | repository: ghcr.io/matrix-org/tardis-synapse 9 | pullPolicy: IfNotPresent 10 | # Overrides the image tag whose default is the chart appVersion. 11 | tag: "" 12 | 13 | imagePullSecrets: [] 14 | nameOverride: "" 15 | fullnameOverride: "" 16 | 17 | serviceAccount: 18 | # Specifies whether a service account should be created 19 | create: true 20 | # Automatically mount a ServiceAccount's API credentials? 21 | automount: true 22 | # Annotations to add to the service account 23 | annotations: {} 24 | # The name of the service account to use. 25 | # If not set and create is true, a name is generated using the fullname template 26 | name: "" 27 | 28 | podAnnotations: {} 29 | podLabels: {} 30 | 31 | podSecurityContext: {} 32 | # fsGroup: 2000 33 | 34 | securityContext: {} 35 | # capabilities: 36 | # drop: 37 | # - ALL 38 | # readOnlyRootFilesystem: true 39 | # runAsNonRoot: true 40 | # runAsUser: 1000 41 | 42 | service: 43 | type: ClusterIP 44 | port: 80 45 | 46 | ingress: 47 | enabled: false 48 | className: "" 49 | annotations: {} 50 | # kubernetes.io/ingress.class: nginx 51 | # kubernetes.io/tls-acme: "true" 52 | hosts: 53 | - host: chart-example.local 54 | paths: 55 | - path: / 56 | pathType: ImplementationSpecific 57 | tls: [] 58 | # - secretName: chart-example-tls 59 | # hosts: 60 | # - chart-example.local 61 | 62 | resources: {} 63 | # We usually recommend not to specify default resources and to leave this as a conscious 64 | # choice for the user. This also increases chances charts run on environments with little 65 | # resources, such as Minikube. If you do want to specify resources, uncomment the following 66 | # lines, adjust them as necessary, and remove the curly braces after 'resources:'. 67 | # limits: 68 | # cpu: 100m 69 | # memory: 128Mi 70 | # requests: 71 | # cpu: 100m 72 | # memory: 128Mi 73 | 74 | livenessProbe: 75 | tcpSocket: 76 | port: http 77 | readinessProbe: 78 | tcpSocket: 79 | port: http 80 | 81 | autoscaling: 82 | enabled: false 83 | minReplicas: 1 84 | maxReplicas: 100 85 | targetCPUUtilizationPercentage: 80 86 | # targetMemoryUtilizationPercentage: 80 87 | 88 | # Additional volumes on the output Deployment definition. 89 | volumes: [] 90 | # - name: foo 91 | # secret: 92 | # secretName: mysecret 93 | # optional: false 94 | 95 | # Additional volumeMounts on the output Deployment definition. 96 | volumeMounts: [] 97 | # - name: foo 98 | # mountPath: "/etc/foo" 99 | # readOnly: true 100 | 101 | nodeSelector: {} 102 | 103 | tolerations: [] 104 | 105 | affinity: {} 106 | -------------------------------------------------------------------------------- /img/sugiyama.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/tardis/1c10b1b2a6503f7814c102d16fa428d159862c60/img/sugiyama.png -------------------------------------------------------------------------------- /img/tardis.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/tardis/1c10b1b2a6503f7814c102d16fa428d159862c60/img/tardis.png -------------------------------------------------------------------------------- /img/zherebko.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/tardis/1c10b1b2a6503f7814c102d16fa428d159862c60/img/zherebko.png -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | TARDIS - Time Agnostic Room DAG Inspection Service 7 | 8 | 9 | 10 | 11 |
12 | 26 |
27 |
[ Close ]
28 |
29 |         
30 |
31 |
32 | 33 |
34 | 35 | 47 | 48 | 49 | 50 | 51 | 52 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest').JestConfigWithTsJest} **/ 2 | export default { 3 | testEnvironment: "node", 4 | transform: { 5 | "^.+.tsx?$": ["ts-jest", {}], 6 | }, 7 | }; 8 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tardis", 3 | "version": "0.0.0", 4 | "description": "Time Agnostic Room DAG Inspection Service", 5 | "type": "module", 6 | "scripts": { 7 | "build": "vite build --base=/tardis", 8 | "start": "vite", 9 | "test": "jest ./src", 10 | "lint": "biome check", 11 | "format": "biome format" 12 | }, 13 | "repository": { 14 | "type": "git", 15 | "url": "git+ssh://git@github.com/matrix-org/tardis.git" 16 | }, 17 | "author": "The Matrix.org Foundation C.I.C.", 18 | "license": "Apache-2.0", 19 | "bugs": { 20 | "url": "https://github.com/matrix-org/tardis/issues" 21 | }, 22 | "homepage": "https://github.com/matrix-org/tardis#readme", 23 | "dependencies": { 24 | "@types/d3": "6.7.0", 25 | "@types/jest": "^29.5.13", 26 | "d3": "^6.7.0", 27 | "jest": "^29.7.0", 28 | "json5": "^2.2.3", 29 | "ts-jest": "^29.2.5", 30 | "typescript": "^5.6.3" 31 | }, 32 | "devDependencies": { 33 | "@biomejs/biome": "1.9.2", 34 | "vite": "^5.4.8" 35 | }, 36 | "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e" 37 | } 38 | -------------------------------------------------------------------------------- /public/favicon.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /public/gmsl.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matrix-org/tardis/1c10b1b2a6503f7814c102d16fa428d159862c60/public/gmsl.wasm -------------------------------------------------------------------------------- /public/wasm_exec.js: -------------------------------------------------------------------------------- 1 | // Copyright 2018 The Go Authors. All rights reserved. 2 | // Use of this source code is governed by a BSD-style 3 | // license that can be found in the LICENSE file. 4 | // 5 | // This file has been modified for use by the TinyGo compiler. 6 | 7 | (() => { 8 | // Map multiple JavaScript environments to a single common API, 9 | // preferring web standards over Node.js API. 10 | // 11 | // Environments considered: 12 | // - Browsers 13 | // - Node.js 14 | // - Electron 15 | // - Parcel 16 | 17 | if (typeof global !== "undefined") { 18 | // global already exists 19 | } else if (typeof window !== "undefined") { 20 | window.global = window; 21 | } else if (typeof self !== "undefined") { 22 | self.global = self; 23 | } else { 24 | throw new Error("cannot export Go (neither global, window nor self is defined)"); 25 | } 26 | 27 | if (!global.require && typeof require !== "undefined") { 28 | global.require = require; 29 | } 30 | 31 | if (!global.fs && global.require) { 32 | global.fs = require("node:fs"); 33 | } 34 | 35 | const enosys = () => { 36 | const err = new Error("not implemented"); 37 | err.code = "ENOSYS"; 38 | return err; 39 | }; 40 | 41 | if (!global.fs) { 42 | let outputBuf = ""; 43 | global.fs = { 44 | constants: { O_WRONLY: -1, O_RDWR: -1, O_CREAT: -1, O_TRUNC: -1, O_APPEND: -1, O_EXCL: -1 }, // unused 45 | writeSync(fd, buf) { 46 | outputBuf += decoder.decode(buf); 47 | const nl = outputBuf.lastIndexOf("\n"); 48 | if (nl != -1) { 49 | console.log(outputBuf.substr(0, nl)); 50 | outputBuf = outputBuf.substr(nl + 1); 51 | } 52 | return buf.length; 53 | }, 54 | write(fd, buf, offset, length, position, callback) { 55 | if (offset !== 0 || length !== buf.length || position !== null) { 56 | callback(enosys()); 57 | return; 58 | } 59 | const n = this.writeSync(fd, buf); 60 | callback(null, n); 61 | }, 62 | chmod(path, mode, callback) { 63 | callback(enosys()); 64 | }, 65 | chown(path, uid, gid, callback) { 66 | callback(enosys()); 67 | }, 68 | close(fd, callback) { 69 | callback(enosys()); 70 | }, 71 | fchmod(fd, mode, callback) { 72 | callback(enosys()); 73 | }, 74 | fchown(fd, uid, gid, callback) { 75 | callback(enosys()); 76 | }, 77 | fstat(fd, callback) { 78 | callback(enosys()); 79 | }, 80 | fsync(fd, callback) { 81 | callback(null); 82 | }, 83 | ftruncate(fd, length, callback) { 84 | callback(enosys()); 85 | }, 86 | lchown(path, uid, gid, callback) { 87 | callback(enosys()); 88 | }, 89 | link(path, link, callback) { 90 | callback(enosys()); 91 | }, 92 | lstat(path, callback) { 93 | callback(enosys()); 94 | }, 95 | mkdir(path, perm, callback) { 96 | callback(enosys()); 97 | }, 98 | open(path, flags, mode, callback) { 99 | callback(enosys()); 100 | }, 101 | read(fd, buffer, offset, length, position, callback) { 102 | callback(enosys()); 103 | }, 104 | readdir(path, callback) { 105 | callback(enosys()); 106 | }, 107 | readlink(path, callback) { 108 | callback(enosys()); 109 | }, 110 | rename(from, to, callback) { 111 | callback(enosys()); 112 | }, 113 | rmdir(path, callback) { 114 | callback(enosys()); 115 | }, 116 | stat(path, callback) { 117 | callback(enosys()); 118 | }, 119 | symlink(path, link, callback) { 120 | callback(enosys()); 121 | }, 122 | truncate(path, length, callback) { 123 | callback(enosys()); 124 | }, 125 | unlink(path, callback) { 126 | callback(enosys()); 127 | }, 128 | utimes(path, atime, mtime, callback) { 129 | callback(enosys()); 130 | }, 131 | }; 132 | } 133 | 134 | if (!global.process) { 135 | global.process = { 136 | getuid() { 137 | return -1; 138 | }, 139 | getgid() { 140 | return -1; 141 | }, 142 | geteuid() { 143 | return -1; 144 | }, 145 | getegid() { 146 | return -1; 147 | }, 148 | getgroups() { 149 | throw enosys(); 150 | }, 151 | pid: -1, 152 | ppid: -1, 153 | umask() { 154 | throw enosys(); 155 | }, 156 | cwd() { 157 | throw enosys(); 158 | }, 159 | chdir() { 160 | throw enosys(); 161 | }, 162 | }; 163 | } 164 | 165 | if (!global.crypto) { 166 | const nodeCrypto = require("node:crypto"); 167 | global.crypto = { 168 | getRandomValues(b) { 169 | nodeCrypto.randomFillSync(b); 170 | }, 171 | }; 172 | } 173 | 174 | if (!global.performance) { 175 | global.performance = { 176 | now() { 177 | const [sec, nsec] = process.hrtime(); 178 | return sec * 1000 + nsec / 1000000; 179 | }, 180 | }; 181 | } 182 | 183 | if (!global.TextEncoder) { 184 | global.TextEncoder = require("node:util").TextEncoder; 185 | } 186 | 187 | if (!global.TextDecoder) { 188 | global.TextDecoder = require("node:util").TextDecoder; 189 | } 190 | 191 | // End of polyfills for common API. 192 | 193 | const encoder = new TextEncoder("utf-8"); 194 | const decoder = new TextDecoder("utf-8"); 195 | const reinterpretBuf = new DataView(new ArrayBuffer(8)); 196 | var logLine = []; 197 | 198 | global.Go = class { 199 | constructor() { 200 | this._callbackTimeouts = new Map(); 201 | this._nextCallbackTimeoutID = 1; 202 | 203 | const mem = () => { 204 | // The buffer may change when requesting more memory. 205 | return new DataView(this._inst.exports.memory.buffer); 206 | }; 207 | 208 | const unboxValue = (v_ref) => { 209 | reinterpretBuf.setBigInt64(0, v_ref, true); 210 | const f = reinterpretBuf.getFloat64(0, true); 211 | if (f === 0) { 212 | return undefined; 213 | } 214 | if (!isNaN(f)) { 215 | return f; 216 | } 217 | 218 | const id = v_ref & 0xffffffffn; 219 | return this._values[id]; 220 | }; 221 | 222 | const loadValue = (addr) => { 223 | const v_ref = mem().getBigUint64(addr, true); 224 | return unboxValue(v_ref); 225 | }; 226 | 227 | const boxValue = (v) => { 228 | const nanHead = 0x7ff80000n; 229 | 230 | if (typeof v === "number") { 231 | if (isNaN(v)) { 232 | return nanHead << 32n; 233 | } 234 | if (v === 0) { 235 | return (nanHead << 32n) | 1n; 236 | } 237 | reinterpretBuf.setFloat64(0, v, true); 238 | return reinterpretBuf.getBigInt64(0, true); 239 | } 240 | 241 | switch (v) { 242 | case undefined: 243 | return 0n; 244 | case null: 245 | return (nanHead << 32n) | 2n; 246 | case true: 247 | return (nanHead << 32n) | 3n; 248 | case false: 249 | return (nanHead << 32n) | 4n; 250 | } 251 | 252 | let id = this._ids.get(v); 253 | if (id === undefined) { 254 | id = this._idPool.pop(); 255 | if (id === undefined) { 256 | id = BigInt(this._values.length); 257 | } 258 | this._values[id] = v; 259 | this._goRefCounts[id] = 0; 260 | this._ids.set(v, id); 261 | } 262 | this._goRefCounts[id]++; 263 | let typeFlag = 1n; 264 | switch (typeof v) { 265 | case "string": 266 | typeFlag = 2n; 267 | break; 268 | case "symbol": 269 | typeFlag = 3n; 270 | break; 271 | case "function": 272 | typeFlag = 4n; 273 | break; 274 | } 275 | return id | ((nanHead | typeFlag) << 32n); 276 | }; 277 | 278 | const storeValue = (addr, v) => { 279 | const v_ref = boxValue(v); 280 | mem().setBigUint64(addr, v_ref, true); 281 | }; 282 | 283 | const loadSlice = (array, len, cap) => { 284 | return new Uint8Array(this._inst.exports.memory.buffer, array, len); 285 | }; 286 | 287 | const loadSliceOfValues = (array, len, cap) => { 288 | const a = new Array(len); 289 | for (let i = 0; i < len; i++) { 290 | a[i] = loadValue(array + i * 8); 291 | } 292 | return a; 293 | }; 294 | 295 | const loadString = (ptr, len) => { 296 | return decoder.decode(new DataView(this._inst.exports.memory.buffer, ptr, len)); 297 | }; 298 | 299 | const timeOrigin = Date.now() - performance.now(); 300 | this.importObject = { 301 | wasi_snapshot_preview1: { 302 | // https://github.com/WebAssembly/WASI/blob/main/phases/snapshot/docs.md#fd_write 303 | fd_write: (fd, iovs_ptr, iovs_len, nwritten_ptr) => { 304 | let nwritten = 0; 305 | if (fd == 1) { 306 | for (let iovs_i = 0; iovs_i < iovs_len; iovs_i++) { 307 | const iov_ptr = iovs_ptr + iovs_i * 8; // assuming wasm32 308 | const ptr = mem().getUint32(iov_ptr + 0, true); 309 | const len = mem().getUint32(iov_ptr + 4, true); 310 | nwritten += len; 311 | for (let i = 0; i < len; i++) { 312 | const c = mem().getUint8(ptr + i); 313 | if (c == 13) { 314 | // CR 315 | // ignore 316 | } else if (c == 10) { 317 | // LF 318 | // write line 319 | const line = decoder.decode(new Uint8Array(logLine)); 320 | logLine = []; 321 | console.log(line); 322 | } else { 323 | logLine.push(c); 324 | } 325 | } 326 | } 327 | } else { 328 | console.error("invalid file descriptor:", fd); 329 | } 330 | mem().setUint32(nwritten_ptr, nwritten, true); 331 | return 0; 332 | }, 333 | fd_close: () => 0, // dummy 334 | fd_fdstat_get: () => 0, // dummy 335 | fd_seek: () => 0, // dummy 336 | proc_exit: (code) => { 337 | if (global.process) { 338 | // Node.js 339 | process.exit(code); 340 | } else { 341 | // Can't exit in a browser. 342 | throw "trying to exit with code " + code; 343 | } 344 | }, 345 | random_get: (bufPtr, bufLen) => { 346 | crypto.getRandomValues(loadSlice(bufPtr, bufLen)); 347 | return 0; 348 | }, 349 | }, 350 | gojs: { 351 | // func ticks() float64 352 | "runtime.ticks": () => { 353 | return timeOrigin + performance.now(); 354 | }, 355 | 356 | // func sleepTicks(timeout float64) 357 | "runtime.sleepTicks": (timeout) => { 358 | // Do not sleep, only reactivate scheduler after the given timeout. 359 | setTimeout(this._inst.exports.go_scheduler, timeout); 360 | }, 361 | 362 | // func finalizeRef(v ref) 363 | "syscall/js.finalizeRef": (v_ref) => { 364 | // Note: TinyGo does not support finalizers so this should never be 365 | // called. 366 | // console.error('syscall/js.finalizeRef not implemented'); 367 | }, 368 | 369 | // func stringVal(value string) ref 370 | "syscall/js.stringVal": (value_ptr, value_len) => { 371 | const s = loadString(value_ptr, value_len); 372 | return boxValue(s); 373 | }, 374 | 375 | // func valueGet(v ref, p string) ref 376 | "syscall/js.valueGet": (v_ref, p_ptr, p_len) => { 377 | const prop = loadString(p_ptr, p_len); 378 | const v = unboxValue(v_ref); 379 | const result = Reflect.get(v, prop); 380 | return boxValue(result); 381 | }, 382 | 383 | // func valueSet(v ref, p string, x ref) 384 | "syscall/js.valueSet": (v_ref, p_ptr, p_len, x_ref) => { 385 | const v = unboxValue(v_ref); 386 | const p = loadString(p_ptr, p_len); 387 | const x = unboxValue(x_ref); 388 | Reflect.set(v, p, x); 389 | }, 390 | 391 | // func valueDelete(v ref, p string) 392 | "syscall/js.valueDelete": (v_ref, p_ptr, p_len) => { 393 | const v = unboxValue(v_ref); 394 | const p = loadString(p_ptr, p_len); 395 | Reflect.deleteProperty(v, p); 396 | }, 397 | 398 | // func valueIndex(v ref, i int) ref 399 | "syscall/js.valueIndex": (v_ref, i) => { 400 | return boxValue(Reflect.get(unboxValue(v_ref), i)); 401 | }, 402 | 403 | // valueSetIndex(v ref, i int, x ref) 404 | "syscall/js.valueSetIndex": (v_ref, i, x_ref) => { 405 | Reflect.set(unboxValue(v_ref), i, unboxValue(x_ref)); 406 | }, 407 | 408 | // func valueCall(v ref, m string, args []ref) (ref, bool) 409 | "syscall/js.valueCall": (ret_addr, v_ref, m_ptr, m_len, args_ptr, args_len, args_cap) => { 410 | const v = unboxValue(v_ref); 411 | const name = loadString(m_ptr, m_len); 412 | const args = loadSliceOfValues(args_ptr, args_len, args_cap); 413 | try { 414 | const m = Reflect.get(v, name); 415 | storeValue(ret_addr, Reflect.apply(m, v, args)); 416 | mem().setUint8(ret_addr + 8, 1); 417 | } catch (err) { 418 | storeValue(ret_addr, err); 419 | mem().setUint8(ret_addr + 8, 0); 420 | } 421 | }, 422 | 423 | // func valueInvoke(v ref, args []ref) (ref, bool) 424 | "syscall/js.valueInvoke": (ret_addr, v_ref, args_ptr, args_len, args_cap) => { 425 | try { 426 | const v = unboxValue(v_ref); 427 | const args = loadSliceOfValues(args_ptr, args_len, args_cap); 428 | storeValue(ret_addr, Reflect.apply(v, undefined, args)); 429 | mem().setUint8(ret_addr + 8, 1); 430 | } catch (err) { 431 | storeValue(ret_addr, err); 432 | mem().setUint8(ret_addr + 8, 0); 433 | } 434 | }, 435 | 436 | // func valueNew(v ref, args []ref) (ref, bool) 437 | "syscall/js.valueNew": (ret_addr, v_ref, args_ptr, args_len, args_cap) => { 438 | const v = unboxValue(v_ref); 439 | const args = loadSliceOfValues(args_ptr, args_len, args_cap); 440 | try { 441 | storeValue(ret_addr, Reflect.construct(v, args)); 442 | mem().setUint8(ret_addr + 8, 1); 443 | } catch (err) { 444 | storeValue(ret_addr, err); 445 | mem().setUint8(ret_addr + 8, 0); 446 | } 447 | }, 448 | 449 | // func valueLength(v ref) int 450 | "syscall/js.valueLength": (v_ref) => { 451 | return unboxValue(v_ref).length; 452 | }, 453 | 454 | // valuePrepareString(v ref) (ref, int) 455 | "syscall/js.valuePrepareString": (ret_addr, v_ref) => { 456 | const s = String(unboxValue(v_ref)); 457 | const str = encoder.encode(s); 458 | storeValue(ret_addr, str); 459 | mem().setInt32(ret_addr + 8, str.length, true); 460 | }, 461 | 462 | // valueLoadString(v ref, b []byte) 463 | "syscall/js.valueLoadString": (v_ref, slice_ptr, slice_len, slice_cap) => { 464 | const str = unboxValue(v_ref); 465 | loadSlice(slice_ptr, slice_len, slice_cap).set(str); 466 | }, 467 | 468 | // func valueInstanceOf(v ref, t ref) bool 469 | "syscall/js.valueInstanceOf": (v_ref, t_ref) => { 470 | return unboxValue(v_ref) instanceof unboxValue(t_ref); 471 | }, 472 | 473 | // func copyBytesToGo(dst []byte, src ref) (int, bool) 474 | "syscall/js.copyBytesToGo": (ret_addr, dest_addr, dest_len, dest_cap, src_ref) => { 475 | const num_bytes_copied_addr = ret_addr; 476 | const returned_status_addr = ret_addr + 4; // Address of returned boolean status variable 477 | 478 | const dst = loadSlice(dest_addr, dest_len); 479 | const src = unboxValue(src_ref); 480 | if (!(src instanceof Uint8Array || src instanceof Uint8ClampedArray)) { 481 | mem().setUint8(returned_status_addr, 0); // Return "not ok" status 482 | return; 483 | } 484 | const toCopy = src.subarray(0, dst.length); 485 | dst.set(toCopy); 486 | mem().setUint32(num_bytes_copied_addr, toCopy.length, true); 487 | mem().setUint8(returned_status_addr, 1); // Return "ok" status 488 | }, 489 | 490 | // copyBytesToJS(dst ref, src []byte) (int, bool) 491 | // Originally copied from upstream Go project, then modified: 492 | // https://github.com/golang/go/blob/3f995c3f3b43033013013e6c7ccc93a9b1411ca9/misc/wasm/wasm_exec.js#L404-L416 493 | "syscall/js.copyBytesToJS": (ret_addr, dst_ref, src_addr, src_len, src_cap) => { 494 | const num_bytes_copied_addr = ret_addr; 495 | const returned_status_addr = ret_addr + 4; // Address of returned boolean status variable 496 | 497 | const dst = unboxValue(dst_ref); 498 | const src = loadSlice(src_addr, src_len); 499 | if (!(dst instanceof Uint8Array || dst instanceof Uint8ClampedArray)) { 500 | mem().setUint8(returned_status_addr, 0); // Return "not ok" status 501 | return; 502 | } 503 | const toCopy = src.subarray(0, dst.length); 504 | dst.set(toCopy); 505 | mem().setUint32(num_bytes_copied_addr, toCopy.length, true); 506 | mem().setUint8(returned_status_addr, 1); // Return "ok" status 507 | }, 508 | }, 509 | }; 510 | 511 | // Go 1.20 uses 'env'. Go 1.21 uses 'gojs'. 512 | // For compatibility, we use both as long as Go 1.20 is supported. 513 | this.importObject.env = this.importObject.gojs; 514 | } 515 | 516 | async run(instance) { 517 | this._inst = instance; 518 | this._values = [ 519 | // JS values that Go currently has references to, indexed by reference id 520 | Number.NaN, 521 | 0, 522 | null, 523 | true, 524 | false, 525 | global, 526 | this, 527 | ]; 528 | this._goRefCounts = []; // number of references that Go has to a JS value, indexed by reference id 529 | this._ids = new Map(); // mapping from JS values to reference ids 530 | this._idPool = []; // unused ids that have been garbage collected 531 | this.exited = false; // whether the Go program has exited 532 | 533 | if (this._inst.exports._start) { 534 | this._inst.exports._start(); 535 | 536 | // TODO: wait until the program exists. 537 | await new Promise(() => {}); 538 | } else { 539 | this._inst.exports._initialize(); 540 | } 541 | } 542 | 543 | _resume() { 544 | if (this.exited) { 545 | throw new Error("Go program has already exited"); 546 | } 547 | this._inst.exports.resume(); 548 | if (this.exited) { 549 | this._resolveExitPromise(); 550 | } 551 | } 552 | 553 | _makeFuncWrapper(id) { 554 | const go = this; 555 | return function () { 556 | const event = { id: id, this: this, args: arguments }; 557 | go._pendingEvent = event; 558 | go._resume(); 559 | return event.result; 560 | }; 561 | } 562 | }; 563 | 564 | if ( 565 | global.require && 566 | global.require.main === module && 567 | global.process && 568 | global.process.versions && 569 | !global.process.versions.electron 570 | ) { 571 | if (process.argv.length != 3) { 572 | console.error("usage: go_js_wasm_exec [wasm binary] [arguments]"); 573 | process.exit(1); 574 | } 575 | 576 | const go = new Go(); 577 | WebAssembly.instantiate(fs.readFileSync(process.argv[2]), go.importObject) 578 | .then((result) => { 579 | return go.run(result.instance); 580 | }) 581 | .catch((err) => { 582 | console.error(err); 583 | process.exit(1); 584 | }); 585 | } 586 | })(); 587 | -------------------------------------------------------------------------------- /shims/synapse/.dockerignore: -------------------------------------------------------------------------------- 1 | venv -------------------------------------------------------------------------------- /shims/synapse/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12 2 | COPY requirements-freeze.txt . 3 | # don't use requirements.txt as it doesn't pin transitive deps 4 | # twisted 24.10 breaks synapse as synapse is using `_encodeName` from twisted 5 | # so stick with a frozen version which uses 24.7 6 | RUN pip install -r requirements-freeze.txt 7 | COPY shim.py shim.py 8 | EXPOSE 1234 9 | ARG COMMIT=unset 10 | ENV COMMIT=${COMMIT} 11 | CMD ["python", "shim.py"] -------------------------------------------------------------------------------- /shims/synapse/README.md: -------------------------------------------------------------------------------- 1 | ## Synapse Shim 2 | 3 | This is a shim for implementing Synapse's state resolution algorithms. It works by importing the relevant 4 | Synapse libraries and calling it according to the TARDIS WebSockets API. Because it imports libraries within 5 | Synapse, it may break between major Synapse releases. 6 | 7 | The easiest way to use this shim is to build the Dockerfile (otherwise use a venv): 8 | 9 | ``` 10 | python -m venv ./venv 11 | source ./venv/bin/activate 12 | pip install -r requirements.txt 13 | python shim.py 14 | ``` -------------------------------------------------------------------------------- /shims/synapse/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -eu 2 | 3 | COMMIT=$(git rev-parse --short HEAD) 4 | DOCKER_DEFAULT_PLATFORM=linux/amd64 docker build --build-arg "COMMIT=$COMMIT" -t "tardis-synapse:$COMMIT" . -------------------------------------------------------------------------------- /shims/synapse/requirements-freeze.txt: -------------------------------------------------------------------------------- 1 | annotated-types==0.7.0 2 | attrs==24.2.0 3 | Automat==24.8.1 4 | bcrypt==4.2.0 5 | bleach==6.1.0 6 | canonicaljson==2.0.0 7 | certifi==2024.8.30 8 | cffi==1.17.1 9 | charset-normalizer==3.4.0 10 | constantly==23.10.4 11 | cryptography==43.0.1 12 | hyperlink==21.0.0 13 | idna==3.10 14 | ijson==3.3.0 15 | immutabledict==4.2.0 16 | incremental==24.7.2 17 | Jinja2==3.1.4 18 | jsonschema==4.23.0 19 | jsonschema-specifications==2024.10.1 20 | MarkupSafe==3.0.1 21 | matrix-common==1.3.0 22 | matrix-synapse==1.117.0 23 | msgpack==1.1.0 24 | netaddr==1.3.0 25 | packaging==24.1 26 | phonenumbers==8.13.47 27 | pillow==11.0.0 28 | prometheus_client==0.21.0 29 | pyasn1==0.6.1 30 | pyasn1_modules==0.4.1 31 | pycparser==2.22 32 | pydantic==2.9.2 33 | pydantic_core==2.23.4 34 | pymacaroons==0.13.0 35 | PyNaCl==1.5.0 36 | pyOpenSSL==24.2.1 37 | python-multipart==0.0.12 38 | PyYAML==6.0.2 39 | referencing==0.35.1 40 | requests==2.32.3 41 | rpds-py==0.20.0 42 | semantic-version==2.10.0 43 | service-identity==24.1.0 44 | setuptools==75.2.0 45 | setuptools-rust==1.10.2 46 | signedjson==1.1.4 47 | six==1.16.0 48 | sortedcontainers==2.4.0 49 | treq==24.9.1 50 | Twisted==24.7.0 51 | typing_extensions==4.12.2 52 | unpaddedbase64==2.1.0 53 | urllib3==2.2.3 54 | webencodings==0.5.1 55 | websockets==13.1 56 | zope.interface==7.1.0 57 | -------------------------------------------------------------------------------- /shims/synapse/requirements.txt: -------------------------------------------------------------------------------- 1 | matrix-synapse==1.117.0 2 | websockets==13.1 3 | pydantic==2.9.2 -------------------------------------------------------------------------------- /shims/synapse/shim.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import asyncio 3 | import json 4 | import logging 5 | import os 6 | import uuid 7 | from typing import Collection, Dict, Iterable, List, Optional, Sequence, Set 8 | from pydantic import BaseModel 9 | from websockets.asyncio.server import serve 10 | 11 | from twisted.internet import defer 12 | from synapse.api.errors import AuthError 13 | from synapse.api.room_versions import RoomVersions, KNOWN_ROOM_VERSIONS 14 | from synapse.event_auth import ( 15 | check_state_independent_auth_rules, 16 | check_state_dependent_auth_rules, 17 | ) 18 | from synapse.storage.databases.main.events_worker import EventRedactBehaviour 19 | from synapse.state.v2 import resolve_events_with_store 20 | from synapse.events import EventBase, make_event_from_dict 21 | from synapse.types import StateMap 22 | 23 | logging.basicConfig(level=logging.DEBUG) 24 | 25 | 26 | class WebSocketMessage(BaseModel): 27 | type: str 28 | id: str 29 | error: Optional[str] = None 30 | data: dict 31 | 32 | 33 | class FakeClock: 34 | def sleep(self, msec: float) -> "defer.Deferred[None]": 35 | return defer.succeed(None) 36 | 37 | 38 | class EventSourceStore: 39 | def __init__(self, conn): 40 | self.conn = conn 41 | 42 | async def get_events( 43 | self, 44 | event_ids: Collection[str], 45 | redact_behaviour: EventRedactBehaviour, 46 | get_prev_content: bool = False, 47 | allow_rejected: bool = False, 48 | ) -> Dict[str, "EventBase"]: 49 | return await self.conn.get_events(event_ids) 50 | 51 | 52 | class Connection: 53 | event_map: Dict[str, EventBase] = {} 54 | 55 | def __init__(self, ws): 56 | self.ws = ws 57 | self.outstanding_requests = {} 58 | self.room_ver = RoomVersions.V10 59 | 60 | def event_source_store(self): 61 | return EventSourceStore(self) 62 | 63 | # Array> 64 | async def resolve_state( 65 | self, 66 | id: str, 67 | room_id: str, 68 | room_ver_str: str, 69 | state_sets_wire_format: Sequence[Dict[str, str]], 70 | at_event_json: dict, 71 | ): 72 | print(f"resolve_state: {id} in {room_id} on version {room_ver_str}") 73 | if KNOWN_ROOM_VERSIONS.get(room_ver_str) is None: 74 | print(f" resolve_state: {id} WARNING: unknown room version {room_ver_str}") 75 | 76 | self.room_ver = KNOWN_ROOM_VERSIONS[room_ver_str] 77 | print(f"resolve_state {state_sets_wire_format} at event {at_event_json}") 78 | 79 | # map the wire format to a form synapse wants, notably this is converting the JSON stringified tuples 80 | # back into real tuples 81 | state_sets: Sequence[StateMap[str]] = [ 82 | {tuple(json.loads(k)): sswf[k] for k in sswf} 83 | for sswf in state_sets_wire_format 84 | ] 85 | r = await resolve_events_with_store( 86 | FakeClock(), 87 | room_id, 88 | self.room_ver, 89 | state_sets, 90 | event_map=None, 91 | state_res_store=self, 92 | ) 93 | # use the state to auth the new event 94 | err_str = "" 95 | if at_event_json.get("state_key") is not None: 96 | print(f"authing at_event {at_event_json["type"]}") 97 | try: 98 | at_event_json.pop("event_id") 99 | at_event = make_event_from_dict( 100 | at_event_json, room_version=self.room_ver 101 | ) 102 | await check_state_independent_auth_rules( 103 | self.event_source_store(), at_event 104 | ) 105 | curr_state_event_ids = iter(r.values()) 106 | curr_state_events = await self.get_events(curr_state_event_ids) 107 | check_state_dependent_auth_rules( 108 | at_event, iter(curr_state_events.values()) 109 | ) 110 | print(f"event {at_event} passes auth checks!") 111 | r[(at_event.type, at_event.state_key)] = at_event.event_id 112 | except AuthError as err: 113 | print(f"event {at_event} failed auth checks! {err}") 114 | err_str = f"{err}" 115 | 116 | print(f"resolve_state: {id} responding") 117 | # convert tuple keys to strings 118 | r = {json.dumps(k): v for k, v in r.items()} 119 | await self.ws.send( 120 | json.dumps( 121 | { 122 | "id": id, 123 | "type": "resolve_state", 124 | "data": { 125 | "result": r, 126 | "error": err_str, 127 | }, 128 | } 129 | ) 130 | ) 131 | return [] 132 | 133 | async def get_event(self, event_id: str) -> EventBase: 134 | id = str(uuid.uuid4()) 135 | print(f" get_event {event_id} -> {id}") 136 | loop = asyncio.get_running_loop() 137 | fut = loop.create_future() 138 | self.outstanding_requests[id] = fut 139 | await self.ws.send( 140 | json.dumps( 141 | { 142 | "id": id, 143 | "type": "get_event", 144 | "data": { 145 | "event_id": event_id, 146 | }, 147 | } 148 | ) 149 | ) 150 | await fut 151 | ev_dict = self.outstanding_requests[id].result() 152 | ev_dict.pop( 153 | "event_id" 154 | ) # wire format shouldn't have this, but tardis includes it. 155 | return make_event_from_dict(ev_dict, room_version=self.room_ver) 156 | 157 | async def get_events( 158 | self, event_ids: Collection[str], allow_rejected: bool = False 159 | ) -> Dict[str, EventBase]: 160 | """Get events from the database 161 | 162 | Args: 163 | event_ids: The event_ids of the events to fetch 164 | allow_rejected: If True return rejected events. 165 | 166 | Returns: 167 | Dict from event_id to event. 168 | """ 169 | result = {} 170 | for event_id in event_ids: 171 | print(f" get_event {event_id}") 172 | ev = await self.get_event(event_id) 173 | print(f" get_event {event_id} obtained. type={ev["type"]}") 174 | result[event_id] = ev 175 | 176 | return result 177 | 178 | async def _get_auth_chain(self, event_ids: Iterable[str]) -> List[str]: 179 | """Gets the full auth chain for a set of events (including rejected 180 | events). 181 | 182 | Includes the given event IDs in the result. 183 | 184 | Note that: 185 | 1. All events must be state events. 186 | 2. For v1 rooms this may not have the full auth chain in the 187 | presence of rejected events 188 | 189 | Args: 190 | event_ids: The event IDs of the events to fetch the auth 191 | chain for. Must be state events. 192 | Returns: 193 | List of event IDs of the auth chain. 194 | """ 195 | 196 | # Simple DFS for auth chain 197 | result = set() 198 | stack = list(event_ids) 199 | while stack: 200 | event_id = stack.pop() 201 | if event_id in result: 202 | continue 203 | 204 | result.add(event_id) 205 | 206 | event = self.event_map.get(event_id, None) 207 | if event is None: 208 | event = await self.get_event(event_id) 209 | self.event_map[event_id] = event 210 | for aid in event.auth_event_ids(): 211 | stack.append(aid) 212 | 213 | return list(result) 214 | 215 | async def get_auth_chain_difference( 216 | self, room_id: str, auth_sets: List[Set[str]] 217 | ) -> Set[str]: 218 | chains = [frozenset(await self._get_auth_chain(a)) for a in auth_sets] 219 | common = set(chains[0]).intersection(*chains[1:]) 220 | return set(chains[0]).union(*chains[1:]) - common 221 | 222 | 223 | async def handler(websocket): 224 | c = Connection(websocket) 225 | async for message in websocket: 226 | try: 227 | wsm = WebSocketMessage(**json.loads(message)) 228 | print(f"RECV {wsm.type} {wsm.id}") 229 | if wsm.type == "get_event": # incoming response 230 | fut = c.outstanding_requests.get(wsm.id) 231 | if fut: 232 | fut.set_result(wsm.data["event"]) 233 | elif wsm.type == "resolve_state": # incoming request 234 | # we can't await and return the response here because resolve_state needs to 235 | # call get_event which needs more WS messages, so we cannot block the processing 236 | # of incoming WS messages. When resolve_state concludes, it will send the response, 237 | # hence why we pass in the id here so it can pair it up. 238 | asyncio.create_task( 239 | c.resolve_state( 240 | wsm.id, 241 | wsm.data["room_id"], 242 | wsm.data["room_version"], 243 | wsm.data["state"], 244 | wsm.data["event"], 245 | ) 246 | ) 247 | else: 248 | print(f"unknown type: {wsm.type}") 249 | except Exception as err: 250 | print(f"recv error {err}") 251 | 252 | 253 | async def main(): 254 | print(f"Running on commit {os.getenv("COMMIT")}", flush=True) 255 | print("Listening on 0.0.0.0:1234") 256 | async with serve(handler, "0.0.0.0", 1234): 257 | await asyncio.get_running_loop().create_future() # run forever 258 | 259 | 260 | if __name__ == "__main__": 261 | asyncio.run(main()) 262 | -------------------------------------------------------------------------------- /src/auth_dag.ts: -------------------------------------------------------------------------------- 1 | import type { Scenario } from "./scenario"; 2 | 3 | /** 4 | * Print debug statistics about the provided auth DAG room. 5 | * This currently prints to the console, but could be represented in a prettier format e.g grafana style. 6 | * - The maximum number of prev_auth_events on a single event. 7 | * - The histogram and CDF of prev_auth_events counts (1,2,3,4,5,6,7,8,9,10,15,20,50) 8 | * - Whether the auth DAG is connected (all prev_auth_events are known) 9 | * @param scenario The scenario with events to analyse 10 | */ 11 | export function printAuthDagAnalysis(scenario: Scenario) { 12 | // we tag all buckets <= prev_auth_events.length for the CDF 13 | // we tag the exact bucket for the histogram 14 | const buckets = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20, 50, Number.POSITIVE_INFINITY]; 15 | const cdf = new Map(buckets.map((val) => [val, 0])); 16 | const histogram = new Map(buckets.map((val) => [val, 0])); 17 | let maxPrevAuthEvents = 0; 18 | const allEvents = new Set(); 19 | const allPrevAuthEvents = new Set(); 20 | for (const ev of scenario.events) { 21 | allEvents.add(ev.event_id); 22 | if (ev.prev_auth_events === undefined) { 23 | // every event must have this 24 | console.error(`printAuthDagAnalysis: event ${ev.event_id} has no prev_auth_events. Bailing.`); 25 | return; 26 | } 27 | if (ev.prev_auth_events.length > maxPrevAuthEvents) { 28 | maxPrevAuthEvents = ev.prev_auth_events.length; 29 | } 30 | if (ev.prev_auth_events.length === 0) { 31 | continue; // create event 32 | } 33 | for (const pae of ev.prev_auth_events) { 34 | allPrevAuthEvents.add(pae); 35 | } 36 | // snap ev.prev_auth_events.length to a bucket 37 | let highestBucket = ev.prev_auth_events.length; 38 | if (ev.prev_auth_events.length > 10) { 39 | // <= 10 can use the exact number 40 | if (ev.prev_auth_events.length <= 15) { 41 | highestBucket = 15; 42 | } else if (ev.prev_auth_events.length <= 20) { 43 | highestBucket = 20; 44 | } else if (ev.prev_auth_events.length <= 50) { 45 | highestBucket = 50; 46 | } else { 47 | highestBucket = Number.POSITIVE_INFINITY; 48 | } 49 | } 50 | histogram.set(highestBucket, (histogram.get(highestBucket) || 0) + 1); 51 | for (const bucket of buckets) { 52 | if (bucket > highestBucket) { 53 | break; // buckets are sorted so when we go beyond the highest val we can bail 54 | } 55 | cdf.set(bucket, (cdf.get(bucket) || 0) + 1); 56 | } 57 | } 58 | 59 | const s = ["Auth DAG Analysis:", `Max prev_auth_events: ${maxPrevAuthEvents}`, "Histogram:"]; 60 | histogram.forEach((val, key) => { 61 | s.push(`${key} ${val}`); 62 | }); 63 | s.push("CDF:"); 64 | cdf.forEach((val, key) => { 65 | s.push(`${key} ${val}`); 66 | }); 67 | // the graph is connected if we have the events for all known prev_auth_events, in other words 68 | // prev_auth_events is a subset of allEvents. isSubsetOf is a 2024 thing. 69 | const isConnected = allPrevAuthEvents.isSubsetOf(allEvents); 70 | s.push(`Connected: ${isConnected}`); 71 | console.log(s.join("\n")); 72 | } 73 | -------------------------------------------------------------------------------- /src/cache.ts: -------------------------------------------------------------------------------- 1 | import type { EventID, MatrixEvent, StateKeyTuple } from "./state_resolver"; 2 | 3 | export class Cache { 4 | stateAtEvent: StateAtEvent; 5 | eventCache: EventCache; 6 | constructor() { 7 | this.stateAtEvent = new StateAtEvent(); 8 | this.eventCache = new EventCache(); 9 | } 10 | } 11 | 12 | export class StateAtEvent { 13 | // private as we may want to do funny shenanigans later one e.g cache the result in indexeddb 14 | private state: Record>; 15 | 16 | constructor() { 17 | this.state = {}; 18 | } 19 | 20 | setState(eventId: EventID, events: Record) { 21 | this.state[eventId] = events; 22 | console.log(`StateAtEvent ${eventId} is`, events); 23 | } 24 | 25 | getStateAsEventIds(eventId: EventID): Set { 26 | if (!this.state[eventId]) { 27 | return new Set(); 28 | } 29 | return new Set(Object.values(this.state[eventId])); 30 | } 31 | 32 | getState(eventId: EventID): Record { 33 | if (!this.state[eventId]) { 34 | return {}; 35 | } 36 | return JSON.parse(JSON.stringify(this.state[eventId])); 37 | } 38 | } 39 | 40 | export class EventCache { 41 | cache: Map; 42 | constructor() { 43 | // in-memory for now, but could be stored in idb or elsewhere. 44 | this.cache = new Map(); 45 | } 46 | store(ev: MatrixEvent) { 47 | this.cache.set(ev.event_id, ev); 48 | } 49 | get(eventId: string): MatrixEvent | undefined { 50 | return this.cache.get(eventId); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/debugger.ts: -------------------------------------------------------------------------------- 1 | import type { Cache } from "./cache"; 2 | import type { Scenario } from "./scenario"; 3 | import type { EventID, MatrixEvent, StateKeyTuple } from "./state_resolver"; 4 | 5 | // Debugger provides a mechanism for stepping through a scenario, exposing UI elements and calling out to resolve state. 6 | export class Debugger { 7 | private index: number; 8 | private eventIdOrdering: string[]; 9 | private currentEventId: string; 10 | 11 | constructor(readonly scenario: Scenario) { 12 | this.index = scenario.events.length - 1; // last event 13 | this.eventIdOrdering = scenario.events.map((ev) => ev.event_id); 14 | this.currentEventId = this.eventIdOrdering[this.index]; 15 | } 16 | 17 | next() { 18 | this.index++; 19 | if (this.index >= this.eventIdOrdering.length) { 20 | this.index = this.eventIdOrdering.length - 1; 21 | } 22 | this.currentEventId = this.eventIdOrdering[this.index]; 23 | } 24 | previous() { 25 | this.index--; 26 | if (this.index < 0) { 27 | this.index = 0; 28 | } 29 | this.currentEventId = this.eventIdOrdering[this.index]; 30 | } 31 | goTo(eventId: string) { 32 | for (let i = 0; i < this.eventIdOrdering.length; i++) { 33 | if (this.eventIdOrdering[i] === eventId) { 34 | this.index = i; 35 | this.currentEventId = this.eventIdOrdering[this.index]; 36 | break; 37 | } 38 | } 39 | } 40 | current(): string { 41 | return this.currentEventId; 42 | } 43 | eventsUpToCurrent(): string[] { 44 | const eventIds: string[] = []; 45 | for (let i = 0; i <= this.index; i++) { 46 | eventIds.push(this.eventIdOrdering[i]); 47 | } 48 | return eventIds; 49 | } 50 | 51 | // Perform state resolution at the current step. 52 | // Stores results in the cache via cache.stateAtEvent.setState 53 | // Pulls events from the cache via cache.eventCache.get 54 | // Calls the callback to perform state resolution on a set of states. 55 | async resolve( 56 | cache: Cache, 57 | resolveState: ( 58 | roomId: string, 59 | roomVer: string, 60 | states: Array>, 61 | atEvent: MatrixEvent, 62 | ) => Promise>, 63 | ): Promise { 64 | // we don't just resolve the current step, but resolve all steps up to and including the current 65 | // step. If we've done it before then it will no-op. We need to do this as to work out the state 66 | // at event N we need to know the state at event N-1. 67 | for (const oldEventId of this.eventsUpToCurrent()) { 68 | await this.resolveEvent(oldEventId, cache, resolveState); 69 | } 70 | } 71 | 72 | private async resolveEvent( 73 | atEventId: string, 74 | cache: Cache, 75 | resolveState: ( 76 | roomId: string, 77 | roomVer: string, 78 | states: Array>, 79 | atEvent: MatrixEvent, 80 | ) => Promise>, 81 | ): Promise { 82 | if (cache.stateAtEvent.getStateAsEventIds(atEventId).size > 0) { 83 | return; // we've already worked out the state at this event. 84 | } 85 | const atEvent = cache.eventCache.get(atEventId)!; 86 | // we need to do state resolution. 87 | const states: Array> = []; 88 | for (const prevEventId of atEvent.prev_events) { 89 | const prevState = cache.stateAtEvent.getState(prevEventId); 90 | if (Object.keys(prevState).length === 0) { 91 | console.error( 92 | `WARN: we do not know the state at ${prevEventId} yet, so the state calculation for ${atEventId} may be wrong!`, 93 | ); 94 | } 95 | states.push(prevState); 96 | } 97 | console.log("performing state resolution for prev_events:", atEvent.prev_events); 98 | const theState = await resolveState(atEvent.room_id, this.scenario.roomVersion, states, atEvent); 99 | cache.stateAtEvent.setState(atEventId, theState); 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /src/event_list.ts: -------------------------------------------------------------------------------- 1 | import type { MatrixEvent } from "./state_resolver"; 2 | 3 | export class EventList { 4 | private highlightedEventId: string; 5 | private positionListener: (eventId: string) => void; 6 | private jsonListener: (eventId: string) => void; 7 | constructor( 8 | readonly container: HTMLElement, 9 | readonly template: HTMLTemplateElement, 10 | ) {} 11 | 12 | clear(): void { 13 | this.container.innerHTML = ""; 14 | } 15 | 16 | onEventClick(fn): void { 17 | this.positionListener = fn; 18 | } 19 | onEventJsonClick(fn): void { 20 | this.jsonListener = fn; 21 | } 22 | 23 | private onCellClick(ev) { 24 | this.onClick(ev, this.positionListener); 25 | } 26 | 27 | private onJsonClick(ev) { 28 | this.onClick(ev, this.jsonListener); 29 | } 30 | 31 | private onClick(ev, fn) { 32 | const row = ev.target?.parentElement; 33 | if (!row) { 34 | return; 35 | } 36 | const eventId = row.getAttribute("id"); 37 | if (eventId && fn) { 38 | fn(eventId); 39 | } 40 | } 41 | 42 | appendEvent(index: number, ev: MatrixEvent) { 43 | // https://developer.mozilla.org/en-US/docs/Web/HTML/Element/template#avoiding_documentfragment_pitfall 44 | const row = this.template.content.firstElementChild!.cloneNode(true) as HTMLDivElement; 45 | row.setAttribute("id", ev.event_id); 46 | const prefix = row.getElementsByClassName("eventlistrowprefix")[0]; 47 | prefix.textContent = String(index); 48 | row.addEventListener("click", this.onCellClick.bind(this)); 49 | const jsonButton = row.getElementsByClassName("eventlistrowjson")[0]; 50 | jsonButton.addEventListener("click", this.onJsonClick.bind(this)); 51 | row.getElementsByClassName("eventlistrowbody")[0].textContent = textRepresentation(ev); 52 | row.getElementsByClassName("eventlistroweventid")[0].textContent = ev.event_id.substr(0, 5); 53 | if (ev.state_key != null) { 54 | row.style.fontWeight = "600"; 55 | } 56 | this.container.appendChild(row); 57 | } 58 | 59 | highlight(eventId: string) { 60 | if (this.highlightedEventId) { 61 | const oldElement = document.getElementById(this.highlightedEventId); 62 | if (oldElement) { 63 | oldElement.style.backgroundColor = ""; 64 | } 65 | } 66 | document.getElementById(eventId)!.style.backgroundColor = "#6f8ea9"; 67 | this.highlightedEventId = eventId; 68 | } 69 | } 70 | 71 | export function textRepresentation(ev: MatrixEvent): string { 72 | let stateDescription = ""; 73 | let messageDescription = ""; 74 | if (ev.state_key != null) { 75 | switch (ev.type) { 76 | case "m.room.create": 77 | stateDescription = `by ${ev.content.creator}`; 78 | break; 79 | case "m.room.member": 80 | stateDescription = `${ev.state_key}=${ev.content.membership}`; 81 | break; 82 | case "m.room.join_rules": 83 | stateDescription = `(${ev.content.join_rule})`; 84 | break; 85 | case "m.room.history_visibility": 86 | stateDescription = `(${ev.content.history_visibility})`; 87 | break; 88 | case "m.room.name": 89 | stateDescription = `(${ev.content.name})`; 90 | break; 91 | case "m.room.topic": 92 | stateDescription = `(${ev.content.topic})`; 93 | break; 94 | default: 95 | if (ev.state_key !== "") { 96 | stateDescription = ev.state_key; 97 | } 98 | } 99 | } else { 100 | switch (ev.type) { 101 | case "m.reaction": 102 | messageDescription = ev.content["m.relates_to"]?.key; 103 | break; 104 | case "m.room.message": 105 | messageDescription = ev.content.body; 106 | break; 107 | } 108 | } 109 | return `${ev.type} ${stateDescription}${messageDescription}`; 110 | } 111 | -------------------------------------------------------------------------------- /src/graph.ts: -------------------------------------------------------------------------------- 1 | import * as d3 from "d3"; 2 | import { textRepresentation } from "./event_list"; 3 | import type { Scenario } from "./scenario"; 4 | import type { EventID, MatrixEvent } from "./state_resolver"; 5 | 6 | export interface RenderOptions { 7 | currentEventId: string; 8 | stateAtEvent?: Set; 9 | scenario?: Scenario; 10 | showAuthChain: boolean; 11 | showAuthDAG: boolean; 12 | } 13 | 14 | interface RenderableMatrixEvent extends MatrixEvent { 15 | prev_auth_events: Array; // until MatrixEvent knows about it 16 | authed_list: Array; // list of events which this one is authenticated by in an auth DAG 17 | auth_list: Array; // list of events which this one authenticates in an auth DAG 18 | next_events: Array; 19 | x: number; 20 | y: number; 21 | laneWidth: number; 22 | streamPosition: number; 23 | authLane: number; // which lane for auth events which point to this event, if any 24 | authLaneStart: number; // what's the oldest auth lane in play at this event (for layout) 25 | } 26 | 27 | // const edgesForEvent = (ev: RenderableMatrixEvent, opts: RenderOptions): string[] => { 28 | // if (opts.showAuthChain) { 29 | // return (ev.prev_events || []).concat(ev.auth_events || []); 30 | // } 31 | // return ev.prev_events; 32 | // }; 33 | 34 | const textualRepresentation = (ev: RenderableMatrixEvent, scenario?: Scenario) => { 35 | const eventId = ev.event_id; 36 | if (scenario?.annotations?.events?.[eventId]) { 37 | return `${scenario?.annotations?.events[eventId]}`; 38 | } 39 | const text = textRepresentation(ev); 40 | const collapse = ev._collapse ? `+${ev._collapse} more` : ""; 41 | return `${text} ${collapse}`; 42 | }; 43 | 44 | const redraw = (vis: HTMLDivElement, events: MatrixEvent[], opts: RenderOptions) => { 45 | // copy the events so we don't alter the caller's copy 46 | // biome-ignore lint/style/noParameterAssign: 47 | events = JSON.parse(JSON.stringify(events)); 48 | // sort events chronologically 49 | const data: Array = events; // .sort((a, b) => a.origin_server_ts - b.origin_server_ts); 50 | 51 | const eventsById: Map = new Map(); 52 | for (let i = 0; i < data.length; i++) { 53 | data[i].streamPosition = i; 54 | eventsById.set(data[i].event_id, data[i]); 55 | } 56 | 57 | // and insert potential placeholders for dangling edges. 58 | // we slice to do a shallow copy given we're inserting placeholders into data 59 | for (const d of data.slice()) { 60 | // order parents chronologically 61 | d.prev_events = d.prev_events.sort((a: string, b: string) => { 62 | return (eventsById.get(a)?.streamPosition || 0) - (eventsById.get(b)?.streamPosition || 0); 63 | }); 64 | // order auth events reverse chronologically 65 | d.auth_events = (d.auth_events || []).sort((a: string, b: string) => { 66 | return (eventsById.get(b)?.streamPosition || 0) - (eventsById.get(a)?.streamPosition || 0); 67 | }); 68 | // remove auth events that point to create events, as they are very duplicative. 69 | //d.auth_events = d.auth_events.filter(id => eventsById.get(id)?.type !== 'm.room.create'); 70 | 71 | for (const p of d.prev_events) { 72 | if (!eventsById.get(p)) { 73 | const placeholder = { 74 | event_id: p, 75 | type: "dangling", 76 | prev_events: [], 77 | next_events: [], 78 | content: {}, 79 | sender: "dangling", 80 | auth_events: [], 81 | room_id: "", 82 | origin_server_ts: 0, 83 | }; 84 | eventsById.set(p, placeholder); 85 | // insert the placeholder immediately before the event which refers to it 86 | const i = data.findIndex((ev) => ev.event_id === d.event_id); 87 | console.log("inserting placeholder prev_event at ", i); 88 | data.splice(i, 0, placeholder); 89 | } 90 | 91 | // update children on parents 92 | const parent = eventsById.get(p)!; 93 | if (!parent.next_events) parent.next_events = []; 94 | //console.log(`push ${d.event_id} onto ${parent.event_id}.next_events`); 95 | parent.next_events.push(d.event_id); 96 | } 97 | } 98 | 99 | // which lanes are in use for prev_events that point to a given event_id 100 | // so we know how to fill up the lanes. 101 | const lanes: Array = []; 102 | // the height at which a given lane ended (i.e. was terminated) 103 | const laneEnd: Array = []; 104 | 105 | // for balanced layout: 106 | const balanced = false; 107 | const laneWidth = 100; 108 | if (balanced) { 109 | lanes.length = laneWidth; 110 | laneEnd.length = laneWidth; 111 | } 112 | 113 | function getNextLane(after: number | null = null) { 114 | if (balanced) { 115 | // biome-ignore lint/style/noParameterAssign: 116 | if (after == null) after = 0; 117 | // finds the next empty lane 118 | // if (after >= lanes.length) return after; 119 | 120 | let foundAfter = false; 121 | for (let i = 0; i < lanes.length; i++) { 122 | // 0, -1, 1, -2, 2, -3, 3 etc. 123 | let x = Math.ceil(i / 2) * (((i + 1) % 2) * 2 - 1); 124 | x += laneWidth / 2; 125 | if (after) { 126 | if (after === x) { 127 | foundAfter = true; 128 | continue; 129 | } 130 | if (!foundAfter) continue; 131 | } 132 | if (!lanes[x]) return x; 133 | } 134 | } else { 135 | const startingAt = after == null ? 0 : after + 1; 136 | // finds the next empty lane 137 | if (startingAt >= lanes.length) return startingAt; 138 | 139 | for (let i = startingAt; i < lanes.length; i++) { 140 | if (!lanes[i]) return i; 141 | } 142 | 143 | return lanes.length; 144 | } 145 | } 146 | 147 | data[0].x = 0; 148 | for (let i = 0; i < data.length; i++) { 149 | const d = data[i]; 150 | // console.log( 151 | // y, 152 | // d.event_id.slice(0, 5), 153 | // d.sender, 154 | // d.type, 155 | // lanes.map((id) => id?.substr(0, 5)).join(", "), 156 | // `p:${d.prev_events.map((id) => id.substr(0, 5)).join(", ")}`, 157 | // `n:${d.next_events?.map((id) => id.substr(0, 5)).join(", ")}`, 158 | // ); 159 | 160 | d.y = i; 161 | 162 | // if any of my parents has a lane, position me under it, preferring the oldest 163 | let foundLane = false; 164 | for (const p of d.prev_events!) { 165 | const parent = eventsById.get(p)!; 166 | if (lanes.findIndex((id) => id === parent.event_id) !== -1) { 167 | d.x = parent.x; 168 | foundLane = true; 169 | } 170 | break; 171 | } 172 | 173 | // otherwise, start a new lane 174 | if (!foundLane) { 175 | // don't re-use lanes if you have prev_events higher than the end of the lane 176 | // otherwise you'll overlap them. 177 | d.x = getNextLane(); 178 | if (d.prev_events && eventsById.get(d.prev_events[0])) { 179 | const oldestPrevEventY = eventsById.get(d.prev_events[0])!.y; 180 | while (laneEnd[d.x] !== undefined && oldestPrevEventY < laneEnd[d.x]) { 181 | d.x = getNextLane(d.x); 182 | } 183 | } 184 | } 185 | 186 | // if am not the oldest parent of any of my children, terminate this lane, 187 | // as they will never try to position themselves under me. 188 | let oldestParent = false; 189 | if (d.next_events) { 190 | for (const c of d.next_events) { 191 | const child = eventsById.get(c); 192 | if (child!.prev_events![0] === d.event_id) { 193 | oldestParent = true; 194 | break; 195 | } 196 | } 197 | } 198 | 199 | if (oldestParent) { 200 | // label this lane with my id for the benefit of whatever child 201 | // will go under it, to stop other nodes grabbing it 202 | lanes[d.x] = d.event_id; 203 | } else { 204 | //console.log(`terminating lane ${d.x}`); 205 | delete lanes[d.x]; 206 | laneEnd[d.x] = i; 207 | } 208 | } 209 | 210 | // the current list of authLanes on the go, so we know where to insert new ones. 211 | const authLanes: Array = []; 212 | 213 | function getNextAuthLane(y1: number, y2: number) { 214 | let rightHandEdge = 0; 215 | for (let y = y1; y <= y2; y++) { 216 | rightHandEdge = data[y].x > rightHandEdge ? data[y].x : rightHandEdge; 217 | // XXX: alternatively, we could push out beyond the prev-event laneWidth 218 | // to avoid crisscrossing the prev-event DAG with the auth DAG 219 | } 220 | rightHandEdge++; 221 | // XXX: ideally we'd ensure that the oldest lane keeps getting pushed out by newer ones 222 | // as we find them, rather than just appending like this. 223 | // So, we'd find the right slot based on comparing y1 with the y offsets of the 224 | // events for these lanes, and then shuffle the events over if needed. 225 | // however, this would be tricky when reusing lanes, as the order will break. 226 | while (authLanes[rightHandEdge] !== undefined) { 227 | rightHandEdge++; 228 | } 229 | return rightHandEdge; 230 | } 231 | 232 | // pass from bottom to top to figure out auth dag 233 | for (let i = data.length - 1; i >= 0; i--) { 234 | const d = data[i]; 235 | const authEvents = opts.showAuthChain ? d.auth_events : opts.showAuthDAG ? d.prev_auth_events : undefined; 236 | if (!authEvents) continue; 237 | 238 | if (opts.showAuthDAG) { 239 | // walk the DAG to the root to get authed & authing events 240 | const walk = (e) => { 241 | e.authed_list ||= []; 242 | e.authed_list.push(d.event_id); 243 | for (const id of e.prev_auth_events) { 244 | walk(eventsById.get(id)); 245 | } 246 | }; 247 | walk(d); 248 | } 249 | 250 | d.auth_list = []; 251 | for (const id of authEvents) { 252 | d.auth_list.push(id); 253 | const p = eventsById.get(id)!; 254 | if (!p.authLane) { 255 | const lane = getNextAuthLane(p.y, i); 256 | p.authLane = lane; 257 | authLanes[lane] = id; 258 | p.authLaneStart = authLanes.findIndex((lane) => lane !== undefined) - 1; 259 | } 260 | } 261 | // reclaim lanes once we've moved past their events 262 | if (d.authLane) { 263 | delete authLanes[d.authLane]; 264 | } 265 | } 266 | 267 | const balanceTwoWayForks = true; 268 | 269 | // another pass to figure out the right-hand edge 270 | let maxAuthLane = 0; 271 | let maxAuthLaneStart = 0; 272 | const edges: Array<{ x: number; y: number }> = []; 273 | data[0].laneWidth = 0; 274 | for (let i = 1; i < data.length; i++) { 275 | const p = data[i - 1]; 276 | const d = data[i]; 277 | if (p.authLane > maxAuthLane) { 278 | maxAuthLane = p.authLane; 279 | maxAuthLaneStart = p.authLaneStart; 280 | } 281 | while (edges.length > 0 && i > edges.at(-1)?.y) edges.pop(); 282 | if (p.next_events) { 283 | edges.push({ 284 | x: eventsById.get(p.next_events.at(-1)).x, 285 | y: eventsById.get(p.next_events.at(-1)).y, 286 | }); 287 | } 288 | edges.sort((a, b) => a.x - b.x); 289 | d.laneWidth = edges.at(-1)?.x; 290 | if (balanceTwoWayForks && d.laneWidth % 2) { 291 | // balance simple 2-way forks 292 | d.x -= 0.5; 293 | d.laneWidth -= 0.5; 294 | } 295 | } 296 | 297 | const margin = { 298 | top: 20, 299 | right: 20, 300 | bottom: 30, 301 | left: 230, 302 | }; 303 | 304 | let currTitle = opts.scenario?.annotations?.titles?.[opts.currentEventId]; 305 | if (!currTitle) { 306 | // ...fallback to the global title or nothing 307 | currTitle = opts.scenario?.annotations?.title || ""; 308 | } 309 | const lines = currTitle.split("\n"); 310 | const lineHeight = 20; 311 | 312 | // 313 | // Drawing operations below 314 | // 315 | const gx = 40; // horizontal grid spacing 316 | const gy = 25; // vertical grid spacing 317 | const r = 5; // node size 318 | 319 | const lineWidth = 2; 320 | const lineWidthHighlight = 3; 321 | 322 | const authLineWidth = 1; 323 | const authLineWidthHighlight = 3; 324 | 325 | const prevColor = "#f00"; 326 | const currColor = "#0a0"; 327 | const nextColor = "#00f"; 328 | const prevAuthColor = "#faa"; 329 | const nextAuthColor = "#aaf"; 330 | const authColor = "#888"; 331 | 332 | // empty vis div 333 | d3.select(vis).html(null); 334 | 335 | // determine width & height of parent element and subtract the margin 336 | const width = lanes.length * gx + 1140; 337 | const height = data.length * gy; 338 | 339 | // create svg and create a group inside that is moved by means of margin 340 | const svg = d3 341 | .select(vis) 342 | .append("svg") 343 | .attr("width", width + margin.left + margin.right) 344 | .attr("height", height + margin.top + margin.bottom + lineHeight * lines.length) 345 | .append("g") 346 | .attr("transform", `translate(${[margin.left, margin.top]})`); 347 | 348 | const node = svg 349 | .selectAll("g") 350 | .data(data) 351 | .enter() 352 | .append("g") 353 | .attr("class", (d) => `node-${d.event_id.slice(1, 5)}`) 354 | .on("mouseover", function (e, d) { 355 | const node = d3.select(this); 356 | node.attr("fill", currColor).attr("font-weight", "bold"); 357 | 358 | d3.selectAll(`.child-${d.event_id.slice(1, 5)}`) 359 | .raise() 360 | .attr("stroke", nextColor) 361 | .attr("stroke-width", lineWidthHighlight); 362 | d3.selectAll(`.parent-${d.event_id.slice(1, 5)}`) 363 | .raise() 364 | .attr("stroke", prevColor) 365 | .attr("stroke-width", lineWidthHighlight); 366 | 367 | d3.selectAll(`.authchild-${d.event_id.slice(1, 5)}`) 368 | .raise() 369 | .attr("stroke", nextAuthColor) 370 | .attr("stroke-width", authLineWidthHighlight); 371 | // .each(function() { 372 | // d3.select(this.parentNode).raise(); 373 | // }); 374 | d3.selectAll(`.authparent-${d.event_id.slice(1, 5)}`) 375 | .raise() 376 | .attr("stroke", prevAuthColor) 377 | .attr("stroke-width", authLineWidthHighlight); 378 | // .each(function() { 379 | // d3.select(this.parentNode).raise(); 380 | // }); 381 | 382 | for (const id of d.next_events || []) { 383 | d3.select(`.node-${id.slice(1, 5)}`).attr("fill", nextColor); 384 | } 385 | for (const id of d.prev_events!) { 386 | d3.select(`.node-${id.slice(1, 5)}`).attr("fill", prevColor); 387 | } 388 | }) 389 | .on("mouseout", function (e, d) { 390 | d3.select(this).attr("fill", null).attr("font-weight", null); 391 | 392 | for (const id of d.next_events || []) { 393 | d3.select(`.node-${id.slice(1, 5)}`).attr("fill", null); 394 | } 395 | for (const id of d.prev_events!) { 396 | d3.select(`.node-${id.slice(1, 5)}`).attr("fill", null); 397 | } 398 | 399 | d3.selectAll(`.child-${d.event_id.slice(1, 5)}`) 400 | .attr("stroke", "black") 401 | .attr("stroke-width", lineWidth); 402 | d3.selectAll(`.parent-${d.event_id.slice(1, 5)}`) 403 | .attr("stroke", "black") 404 | .attr("stroke-width", lineWidth); 405 | 406 | d3.selectAll(`.authchild-${d.event_id.slice(1, 5)}`) 407 | .attr("stroke", authColor) 408 | .attr("stroke-width", authLineWidth); 409 | d3.selectAll(`.authparent-${d.event_id.slice(1, 5)}`) 410 | .attr("stroke", authColor) 411 | .attr("stroke-width", authLineWidth); 412 | }); 413 | 414 | // draw data points 415 | node.append("circle") 416 | .attr("cx", (d) => d.x * gx) 417 | .attr("cy", (d) => d.y * gy) 418 | .attr("r", r) 419 | .style("fill", (d) => { 420 | if (opts.stateAtEvent?.has(d.event_id)) { 421 | return "#43ff00"; 422 | } 423 | return d.state_key != null ? "#4300ff" : "#111111"; 424 | }) 425 | .style("fill-opacity", "0.5") 426 | .style("stroke", (d) => { 427 | if (opts.stateAtEvent?.has(d.event_id)) { 428 | return "#43ff00"; 429 | } 430 | return d.state_key != null ? "#4300ff" : "#111111"; 431 | }); 432 | 433 | const nudgeOffset = 0; 434 | 435 | // next-events outlines 436 | if (!nudgeOffset) { 437 | node.append("path") 438 | .attr("d", (d) => { 439 | const path = d3.path(); 440 | if (d.next_events) { 441 | for (const child of d.next_events) { 442 | const c = eventsById.get(child); 443 | path.moveTo(d.x * gx, d.y * gy + r); 444 | path.arcTo(d.x * gx, (d.y + 0.5) * gy, c.x * gx, (d.y + 0.5) * gy, r); 445 | path.arcTo(c.x * gx, (d.y + 0.5) * gy, c.x * gx, c.y * gy - r, r); 446 | path.lineTo(c.x * gx, c.y * gy - r); 447 | } 448 | } 449 | 450 | return path; 451 | }) 452 | .attr("stroke", "white") 453 | .attr("stroke-width", lineWidth + 2) 454 | .attr("fill", "none"); 455 | } 456 | 457 | // links 458 | node.each((d, i, nodes) => { 459 | const n = d3.select(nodes[i]); 460 | 461 | if (d.next_events) { 462 | let childIndex = 0; 463 | for (const child of d.next_events) { 464 | const c = eventsById.get(child); 465 | if (!c) continue; 466 | 467 | const path = d3.path(); 468 | 469 | let nudge_x = 0; 470 | let nudge_y = 0; 471 | 472 | if (nudgeOffset) { 473 | // nudge horizontal up or down based on how many next_events there are from this node. 474 | nudge_y = 475 | d.next_events.length > 1 ? nudgeOffset * (childIndex - (d.next_events.length - 2) / 2) : 0; 476 | // nudge vertical left or right based on how many prev_events there are from this child. 477 | const childParentIndex = c.prev_events!.findIndex((id) => id === d.event_id); 478 | nudge_x = nudgeOffset * (childParentIndex - (c.prev_events!.length - 1) / 2); 479 | } 480 | 481 | path.moveTo(d.x * gx, d.y * gy + r + nudge_y); 482 | 483 | // path.lineTo(c.x * g, d.y * gy); 484 | // path.lineTo(c.x * g, c.y * gy); 485 | // path.quadraticCurveTo(c.x * gx, d.y * gy, c.x * gx, c.y * gy); 486 | 487 | // path.arcTo(c.x * gx, d.y * gy, c.x * gx, c.y * gy, gy/2); 488 | // path.lineTo(c.x * gx, c.y * gy); 489 | 490 | if (nudgeOffset) { 491 | path.lineTo(d.x * gx, (d.y + 0.5) * gy + nudge_y); 492 | path.lineTo(c.x * gx + nudge_x, (d.y + 0.5) * gy + nudge_y); 493 | } else { 494 | path.arcTo(d.x * gx, (d.y + 0.5) * gy, c.x * gx, (d.y + 0.5) * gy, r); 495 | path.arcTo(c.x * gx, (d.y + 0.5) * gy, c.x * gx, c.y * gy - r, r); 496 | } 497 | 498 | path.lineTo(c.x * gx + nudge_x, c.y * gy - r); 499 | 500 | // arrowhead - we draw one per link so that prev_event highlighting works 501 | path.moveTo(d.x * gx - r / 3, d.y * gy + r + r / 2); 502 | path.lineTo(d.x * gx, d.y * gy + r); 503 | path.lineTo(d.x * gx + r / 3, d.y * gy + r + r / 2); 504 | path.lineTo(d.x * gx - r / 3, d.y * gy + r + r / 2); 505 | path.lineTo(d.x * gx, d.y * gy + r); 506 | 507 | childIndex++; 508 | 509 | n.append("path") 510 | .attr("d", path.toString()) 511 | .attr("class", (d) => `child-${d.event_id.slice(1, 5)} parent-${c?.event_id.slice(1, 5)}`) 512 | .attr("stroke", "black") 513 | .attr("stroke-width", lineWidth) 514 | .attr("fill", "none"); 515 | } 516 | } 517 | }); 518 | 519 | // auth chains 520 | const agx = gx / 2; // tighter grid for auth events 521 | 522 | if (opts.showAuthChain || opts.showAuthDAG) { 523 | node.each((d, i, nodes) => { 524 | const n = d3.select(nodes[i]); 525 | 526 | const authEvents = opts.showAuthChain ? d.auth_events : d.prev_auth_events; 527 | if (authEvents) { 528 | for (const parent of authEvents) { 529 | const p = eventsById.get(parent); 530 | if (!p) continue; 531 | 532 | const path = d3.path(); 533 | 534 | const nudge_y = 0; 535 | const nudge_x = 0; 536 | 537 | // XXX: is authLaneStart going to be constant enough for this to work? 538 | const authOffset = p.authLaneStart * gx + (p.authLane - p.authLaneStart) * agx; 539 | 540 | path.moveTo(d.x * gx + r + nudge_x, d.y * gy + nudge_y); 541 | path.arcTo(authOffset, d.y * gy + nudge_y, authOffset, p.y * gy + nudge_y, r * 2); 542 | path.arcTo(authOffset, p.y * gy + nudge_y, p.x * gx + r + nudge_x, p.y * gy + nudge_y, r * 2); 543 | // path.lineTo(p.authLane * gx, d.y * gy + nudge); 544 | // path.lineTo(p.authLane * gx, p.y * gy + nudge); 545 | path.lineTo(p.x * gx + r + nudge_x, p.y * gy + nudge_y); 546 | 547 | // arrowhead 548 | path.moveTo(p.x * gx + nudge_x + r + r / 2, p.y * gy + nudge_y + r / 3); 549 | path.lineTo(p.x * gx + nudge_x + r, p.y * gy + nudge_y); 550 | path.lineTo(p.x * gx + nudge_x + r + r / 2, p.y * gy + nudge_y - r / 3); 551 | path.lineTo(p.x * gx + nudge_x + r + r / 2, p.y * gy + nudge_y + r / 3); 552 | path.lineTo(p.x * gx + nudge_x + r, p.y * gy + nudge_y); 553 | 554 | const classes = (d) => { 555 | if (opts.showAuthChain) { 556 | return `authchild-${p.event_id.slice(1, 5)} authparent-${d?.event_id.slice(1, 5)}`; 557 | } 558 | return `${d.authed_list.map((id) => `authparent-${id?.slice(1, 5)}`).join(" ")} ${d.auth_list.map((id) => `authchild-${id?.slice(1, 5)}`).join(" ")}`; 559 | }; 560 | 561 | n.append("path") 562 | .attr("d", path.toString()) 563 | .attr("class", classes) 564 | .attr("stroke", authColor) 565 | .attr("stroke-width", 1) 566 | // .attr("stroke-dasharray", `${lineWidth * 2},${lineWidth}`) 567 | .attr("fill", "none"); 568 | } 569 | } 570 | }); 571 | } 572 | 573 | /* 574 | // auth chain made out of arcs 575 | node.each((d, i, nodes) => { 576 | const n = d3.select(nodes[i]); 577 | 578 | if (d.auth_events) { 579 | for (const parent of d.auth_events) { 580 | const p = eventsById.get(parent); 581 | if (!p) continue; 582 | 583 | const path = d3.path(); 584 | 585 | path.moveTo(d.x * gx, d.y * gy); 586 | path.arcTo( 587 | ((d.x + p.x) * 0.5 * gx) + ((d.y - p.y) * 0.5 * gy), (p.y + d.y) * 0.5 * gy, 588 | p.x * gx, p.y * gy, 589 | (d.y - p.y) * 0.5 * (gx + gy)/2 590 | ); 591 | 592 | n.append("path") 593 | .attr("d", path.toString()) 594 | // .attr("class", (d) => `child-${d.event_id.slice(1, 5)} parent-${c?.event_id.slice(1, 5)}`) 595 | .attr("stroke", "#888") 596 | .attr("stroke-width", 1) 597 | .attr("stroke-dasharray", `${lineWidth * 2},${lineWidth}`) 598 | .attr("fill", "none"); 599 | } 600 | } 601 | }); 602 | */ 603 | 604 | const textOffset = (d) => 605 | opts.showAuthChain || opts.showAuthDAG 606 | ? maxAuthLaneStart * gx + (maxAuthLane - maxAuthLaneStart) * agx 607 | : (d.laneWidth ?? 0) * gx; 608 | 609 | // Add event IDs on the right side 610 | node.append("text") 611 | .text((d) => { 612 | return d.event_id.substr(0, 5); 613 | }) 614 | .attr("x", (d) => textOffset(d) + agx) 615 | .attr("y", (d) => d.y * gy + 4); 616 | 617 | // Add descriptions alongside the event ID 618 | node.append("text") 619 | .text((d) => { 620 | return textualRepresentation(d, opts.scenario); 621 | }) 622 | .attr("class", (d) => "node-text") 623 | // .text( 624 | // (d) => 625 | // `${d.y} ${d.event_id.slice(0, 5)} ${d.sender} P:${d.prev_events.map((id) => id.slice(0, 5)).join(", ")} | N:${d.next_events?.map((id) => id.slice(0, 5)).join(", ")}`, 626 | // ) 627 | //.text(d => `${d.y} ${d.event_id.substr(0, 5)} ${d.sender} ${d.type} prev:${d.prev_events.map(id => id.substr(0, 5)).join(", ")}`) 628 | .attr("x", (d) => textOffset(d) + agx + 70) 629 | .attr("y", (d) => d.y * gy + 4); 630 | 631 | node.append("text") 632 | .text((d) => 633 | d.origin_server_ts 634 | ? new Date(d.origin_server_ts).toLocaleString(undefined, { 635 | day: "2-digit", 636 | month: "2-digit", 637 | year: "numeric", 638 | hour: "2-digit", 639 | minute: "2-digit", 640 | second: "2-digit", 641 | }) 642 | : "", 643 | ) 644 | .attr("x", -margin.left) 645 | .attr("y", (d) => d.y * gy + 4); 646 | 647 | // use the title for the current event 648 | const title = svg.append("text").attr("class", "node-text").attr("x", -margin.left).attr("y", height); 649 | 650 | for (let i = 0; i < lines.length; i++) { 651 | title 652 | .append("tspan") 653 | .attr("x", -margin.left) 654 | .attr("y", height + i * lineHeight) 655 | .text(lines[i]); 656 | } 657 | //title.text(currTitle); 658 | }; 659 | 660 | export { redraw }; 661 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { printAuthDagAnalysis } from "./auth_dag"; 2 | import { Cache } from "./cache"; 3 | import { Debugger } from "./debugger"; 4 | import { EventList } from "./event_list"; 5 | import { redraw } from "./graph"; 6 | import { mainlineForks, quickstartFile, reverseTopologicalPowerOrdering } from "./preloaded_scenarios"; 7 | import { type Scenario, type ScenarioFile, loadScenarioFromFile, loadScenarioFromScenarioFile } from "./scenario"; 8 | import { 9 | type DataGetEvent, 10 | type EventID, 11 | type MatrixEvent, 12 | type StateKeyTuple, 13 | StateResolver, 14 | StateResolverTransport, 15 | } from "./state_resolver"; 16 | 17 | const preloadedScenarios: Record = { 18 | "Quick Start": quickstartFile, 19 | "Mainline Ordering": mainlineForks, 20 | "Reverse Topological Power Ordering": reverseTopologicalPowerOrdering, 21 | }; 22 | 23 | const eventList = new EventList( 24 | document.getElementById("eventlist")!, 25 | document.getElementById("eventlisttemplate") as HTMLTemplateElement, 26 | ); 27 | 28 | class Dag { 29 | cache: Cache; 30 | createEventId: string | null; 31 | showAuthChain: boolean; 32 | showAuthDAG: boolean; 33 | showOutliers: boolean; 34 | collapse: boolean; 35 | shimUrl?: string; 36 | 37 | debugger: Debugger; 38 | 39 | renderEvents: Record; 40 | scenario?: Scenario; 41 | 42 | constructor(cache: Cache) { 43 | this.cache = cache; 44 | this.createEventId = null; 45 | this.showAuthChain = false; 46 | this.showAuthDAG = false; 47 | this.showOutliers = false; 48 | this.collapse = false; 49 | this.renderEvents = {}; 50 | } 51 | 52 | setShimUrl(u: string) { 53 | this.shimUrl = u; 54 | console.log("setShimUrl", u); 55 | } 56 | 57 | async loadFile(file: File) { 58 | const scenario = await loadScenarioFromFile(file); 59 | this.loadScenario(scenario); 60 | } 61 | 62 | loadScenario(scenario: Scenario) { 63 | for (const ev of scenario.events) { 64 | this.cache.eventCache.store(ev); 65 | if (ev.type === "m.room.create" && ev.state_key === "") { 66 | this.createEventId = ev.event_id; 67 | } 68 | } 69 | if (scenario.precalculatedStateAfter) { 70 | for (const preCalcEventId in scenario.precalculatedStateAfter) { 71 | const stateMap: Record = {}; 72 | for (const stateEventId of scenario.precalculatedStateAfter[preCalcEventId]) { 73 | const stateEvent = this.cache.eventCache.get(stateEventId); 74 | if (!stateEvent || stateEvent.state_key == null) { 75 | console.log( 76 | `precalculated_state_after for ${preCalcEventId} includes ${stateEventId} but it isn't a state event we know about. Skipping.`, 77 | ); 78 | continue; 79 | } 80 | stateMap[JSON.stringify([stateEvent.type, stateEvent.state_key])] = stateEvent.event_id; 81 | } 82 | this.cache.stateAtEvent.setState(preCalcEventId, stateMap); 83 | } 84 | } 85 | this.scenario = scenario; 86 | this.debugger = new Debugger(scenario); 87 | if (scenario.onLoadAtStart && scenario.events.length >= 2) { 88 | this.debugger.goTo(scenario.events[1].event_id); 89 | } 90 | eventList.clear(); 91 | let hasAuthDAGEvents = false; 92 | scenario.events.forEach((ev, i) => { 93 | eventList.appendEvent(i, ev); 94 | if (ev.prev_auth_events && ev.prev_auth_events.length > 0) { 95 | hasAuthDAGEvents = true; 96 | } 97 | }); 98 | eventList.highlight(this.debugger.current()); 99 | eventList.onEventClick((eventId: string) => { 100 | this.debugger.goTo(eventId); 101 | this.refresh(); 102 | eventList.highlight(dag.debugger.current()); 103 | }); 104 | eventList.onEventJsonClick((eventId: string) => { 105 | document.getElementById("eventdetails")!.textContent = JSON.stringify( 106 | this.cache.eventCache.get(eventId), 107 | null, 108 | 2, 109 | ); 110 | document.getElementById("infocontainer")!.style.display = "block"; 111 | }); 112 | if (hasAuthDAGEvents) { 113 | printAuthDagAnalysis(scenario); 114 | } 115 | this.refresh(); 116 | } 117 | setShowAuthChain(show: boolean) { 118 | this.showAuthChain = show; 119 | } 120 | setShowAuthDAG(show: boolean) { 121 | this.showAuthDAG = show; 122 | } 123 | setShowOutliers(show: boolean) { 124 | this.showOutliers = show; 125 | } 126 | setCollapse(col: boolean) { 127 | this.collapse = col; 128 | } 129 | async refresh() { 130 | let renderEvents = Object.create(null); 131 | for (const eventId of this.debugger.eventsUpToCurrent()) { 132 | renderEvents[eventId] = this.cache.eventCache.get(eventId); 133 | } 134 | if (this.collapse) { 135 | renderEvents = this.collapsifier(renderEvents); 136 | } 137 | const eventsArray: Array = []; 138 | for (const k in renderEvents) { 139 | eventsArray.push(renderEvents[k]); 140 | } 141 | redraw(document.getElementById("svgcontainer")! as HTMLDivElement, eventsArray, { 142 | currentEventId: this.debugger.current(), 143 | scenario: this.scenario, 144 | stateAtEvent: this.cache.stateAtEvent.getStateAsEventIds(this.debugger.current()), 145 | showAuthChain: this.showAuthChain, 146 | showAuthDAG: this.showAuthDAG, 147 | }); 148 | } 149 | // find the event(s) which aren't pointed to by anyone which has prev/auth events, as this is the 150 | // forward extremity, we do this by playing a deathmatch - everyone is eligible at first and 151 | // then we loop all the prev/auth events and remove from the set until only the ones not being 152 | // pointed at exist. 153 | findForwardExtremities(events): Set { 154 | const s = new Set(); 155 | 156 | for (const id in events) { 157 | s.add(id); 158 | } 159 | for (const id in events) { 160 | const ev = events[id]; 161 | for (const k of ev.prev_events) { 162 | s.delete(k); 163 | } 164 | for (const k of ev.auth_events) { 165 | s.delete(k); 166 | } 167 | } 168 | return s; 169 | } 170 | 171 | // Removes events from this map for long linear sequences, instead replacing with a placeholder 172 | // "... N more ..." event. Forks are never replaced. 173 | collapsifier(eventsOrig: Record): Record { 174 | // take a copy of the events as we will be directly altering prev_events 175 | const events = JSON.parse(JSON.stringify(eventsOrig)) as Record; 176 | const latestEvents = this.findForwardExtremities(events); 177 | 178 | // this algorithm works in two phases: 179 | // - figure out the "interesting" events (events which merge or create forks, fwd/backwards extremities) 180 | // - figure out the "keep list" which is the set of interesting events + 1 event padding for all interesting events 181 | // we need the event padding so we can show forks sensibly, e.g consider: 182 | // A <-- keep as pointed to by >1 event 183 | // / \ 184 | // B C <-- these will be discarded as they have 1 prev_event and aren't a fwd extremity. 185 | // \ / 186 | // D <-- keep as >1 prev_event 187 | // | 188 | // E <-- keep as this is fwd extremity 189 | 190 | // work out the "interesting" events, which meet one of the criteria: 191 | // - Has 0 or 2+ prev_events (i.e not linear or is create/missing event) 192 | // - is a forward extremity 193 | // - is pointed to by >1 event (i.e "next_events") 194 | const interestingEvents = new Set(); 195 | for (const id of latestEvents) { 196 | interestingEvents.add(id); // is a forward extremity 197 | } 198 | const pointCount = Object.create(null); // event ID -> num events pointing to it 199 | for (const evId in events) { 200 | const ev = events[evId]; 201 | for (const pe of ev.prev_events) { 202 | const val = pointCount[pe] || 0; 203 | pointCount[pe] = val + 1; 204 | } 205 | if (ev.prev_events.length !== 1) { 206 | interestingEvents.add(ev.event_id); // Has 0 or 2+ prev_events (i.e not linear or is create/missing event) 207 | } 208 | } 209 | for (const id in pointCount) { 210 | if (pointCount[id] > 1) { 211 | interestingEvents.add(id); // is pointed to by >1 event (i.e "next_events") 212 | } 213 | } 214 | 215 | // make the keep list 216 | const keepList = new Set(); 217 | for (const evId in events) { 218 | if (interestingEvents.has(evId)) { 219 | keepList.add(evId); 220 | continue; 221 | } 222 | // we might have this id in the keep list, if: 223 | // - THIS event points to an interesting event (C -> A in the example above) 224 | // - ANY interesting event points to THIS event (D -> C in the example above) 225 | const ev = events[evId]; 226 | if (interestingEvents.has(ev.prev_events[0])) { 227 | keepList.add(evId); 228 | continue; 229 | } 230 | // slower O(n) loop 231 | for (const interestingId of interestingEvents) { 232 | const interestingEvent = events[interestingId]; 233 | if (!interestingEvent) { 234 | continue; 235 | } 236 | let added = false; 237 | for (const pe of interestingEvent.prev_events) { 238 | if (pe === evId) { 239 | keepList.add(evId); 240 | added = true; 241 | break; 242 | } 243 | } 244 | if (added) { 245 | break; 246 | } 247 | } 248 | } 249 | 250 | const queue = [] as Array<{ id: string; from: string }>; 251 | const seenQueue = new Set(); 252 | for (const id of latestEvents) { 253 | queue.push({ 254 | id: id, 255 | from: id, 256 | }); 257 | } 258 | 259 | while (queue.length > 0) { 260 | const data = queue.pop(); 261 | if (!data) { 262 | break; 263 | } 264 | const id = data.id; 265 | const ev = events[id]; 266 | if (seenQueue.has(id)) { 267 | continue; // don't infinite loop 268 | } 269 | seenQueue.add(id); 270 | if (!ev) { 271 | continue; 272 | } 273 | // continue walking.. 274 | for (const k of ev.prev_events) { 275 | queue.push({ 276 | id: k, 277 | from: data.id, 278 | }); 279 | } 280 | 281 | if (keepList.has(id)) { 282 | continue; 283 | } 284 | 285 | // at this point we know this event is uninteresting, so remove ourselves and fix up the graph as we go 286 | delete events[id]; 287 | const child = events[data.from]; 288 | // console.log("Delete ", id, "new: ", child.prev_events, " -> ", ev.prev_events); 289 | const newPrevEvents = [ev.prev_events[0]]; 290 | // the child may have interesting prev events, keep the ones in the keep list 291 | for (const pe in child.prev_events) { 292 | if (keepList.has(pe)) { 293 | newPrevEvents.push(pe); 294 | } 295 | } 296 | child.prev_events = newPrevEvents; 297 | child._collapse = child._collapse || 0; 298 | child._collapse += 1; 299 | events[data.from] = child; 300 | // anything in the queue referencing this id needs to be repointed to reference the child 301 | for (const q of queue) { 302 | if (q.from === id) { 303 | q.from = child.event_id; 304 | } 305 | } 306 | } 307 | console.log("collapsifier complete"); 308 | return events; 309 | } 310 | } 311 | const shimInputElement = document.getElementById("shimurl") as HTMLInputElement; 312 | let dag = new Dag(new Cache()); 313 | dag.setShimUrl(shimInputElement.value); // TODO: this is annoying in so many places.. 314 | const transport = new StateResolverTransport(); 315 | const resolver = new StateResolver(transport, (data: DataGetEvent): MatrixEvent => { 316 | return dag.cache.eventCache.get(data.event_id)!; 317 | }); 318 | 319 | document.getElementById("showauthevents")!.addEventListener("change", (ev) => { 320 | dag.setShowAuthChain((ev.target)!.checked); 321 | if ((ev.target)!.checked) { 322 | dag.setShowAuthDAG(false); 323 | } 324 | dag.refresh(); 325 | (document.getElementById("showauthevents"))!.checked = dag.showAuthChain; 326 | (document.getElementById("showauthdag"))!.checked = dag.showAuthDAG; 327 | }); 328 | 329 | document.getElementById("showauthdag")!.addEventListener("change", (ev) => { 330 | dag.setShowAuthDAG((ev.target)!.checked); 331 | if ((ev.target)!.checked) { 332 | dag.setShowAuthChain(false); 333 | } 334 | dag.refresh(); 335 | (document.getElementById("showauthevents"))!.checked = dag.showAuthChain; 336 | (document.getElementById("showauthdag"))!.checked = dag.showAuthDAG; 337 | }); 338 | 339 | document.getElementById("showoutliers")!.addEventListener("change", (ev) => { 340 | dag.setShowOutliers((ev.target)!.checked); 341 | dag.refresh(); 342 | }); 343 | (document.getElementById("showoutliers"))!.checked = dag.showOutliers; 344 | document.getElementById("collapse")!.addEventListener("change", (ev) => { 345 | dag.setCollapse((ev.target)!.checked); 346 | dag.refresh(); 347 | }); 348 | (document.getElementById("collapse"))!.checked = dag.collapse; 349 | (document.getElementById("jsonfile")).addEventListener( 350 | "change", 351 | async (ev) => { 352 | const files = (document.getElementById("jsonfile")).files; 353 | if (!files) { 354 | return; 355 | } 356 | dag = new Dag(new Cache()); 357 | // set it initially from the input value else we might resolve without ever calling setShimUrl 358 | dag.setShimUrl(shimInputElement.value); 359 | await dag.loadFile(files[0]); 360 | }, 361 | false, 362 | ); 363 | 364 | document.getElementById("closeinfocontainer")!.addEventListener("click", (ev) => { 365 | document.getElementById("infocontainer")!.style.display = "none"; 366 | }); 367 | document.getElementById("infocontainer")!.style.display = "none"; 368 | 369 | document.getElementById("stepfwd")!.addEventListener("click", async (ev) => { 370 | dag.debugger.next(); 371 | dag.refresh(); 372 | eventList.highlight(dag.debugger.current()); 373 | }); 374 | document.getElementById("stepbwd")!.addEventListener("click", async (ev) => { 375 | dag.debugger.previous(); 376 | dag.refresh(); 377 | eventList.highlight(dag.debugger.current()); 378 | }); 379 | 380 | shimInputElement.addEventListener("change", (ev) => { 381 | const newUrl = (ev.target)!.value; 382 | dag.setShimUrl(newUrl); 383 | globalThis.localStorage.setItem("shim_url", newUrl); 384 | }); 385 | // set placeholder from local storage 386 | const existingShimUrl = globalThis.localStorage.getItem("shim_url"); 387 | if (existingShimUrl) { 388 | console.log("setting shim url from local storage"); 389 | shimInputElement.value = existingShimUrl; 390 | dag.setShimUrl(existingShimUrl); 391 | } 392 | 393 | const loaderElement = document.getElementById("loader")!; 394 | const loaderMsgElement = document.getElementById("loader-status")!; 395 | 396 | const setLoaderMessage = (text: string) => { 397 | loaderMsgElement.innerText = text; 398 | }; 399 | 400 | document.getElementById("resolve")!.addEventListener("click", async (_) => { 401 | if (!dag.shimUrl) { 402 | console.error("you need to set a shim url to resolve state!"); 403 | return {}; 404 | } 405 | loaderElement.style.display = "block"; 406 | setLoaderMessage(`Connecting to ${dag.shimUrl}`); 407 | try { 408 | await transport.connect(dag.shimUrl, resolver); 409 | await dag.debugger.resolve( 410 | dag.cache, 411 | async ( 412 | roomId: string, 413 | roomVer: string, 414 | states: Array>, 415 | atEvent: MatrixEvent, 416 | ): Promise> => { 417 | try { 418 | setLoaderMessage(`Resolving state at event ${atEvent.event_id}`); 419 | const r = await resolver.resolveState(roomId, roomVer, states, atEvent); 420 | return r.state; 421 | } catch (err) { 422 | console.error("failed to resolve state:", err); 423 | setLoaderMessage(`Failed to resolve state at event ${atEvent.event_id} : ${err}`); 424 | throw err; 425 | } 426 | }, 427 | ); 428 | setLoaderMessage(""); 429 | } catch (err) { 430 | console.error("resolving state failed: ", err); 431 | } finally { 432 | transport.close(); 433 | } 434 | loaderElement.style.display = "none"; 435 | dag.refresh(); 436 | }); 437 | 438 | // pull in GMSL bits 439 | const go = new Go(); // Defined in wasm_exec.js 440 | WebAssembly.instantiateStreaming(fetch("gmsl.wasm"), go.importObject).then((obj) => { 441 | globalThis.wasm = obj.instance; 442 | go.run(globalThis.wasm); 443 | 444 | const loadPreloadedFile = (sf: ScenarioFile) => { 445 | // now load the tutorial scenario 446 | const tutorial = loadScenarioFromScenarioFile(sf); 447 | dag.loadScenario(tutorial); 448 | }; 449 | 450 | const select = document.getElementById("file-select"); 451 | if (select) { 452 | select.innerHTML = ""; 453 | Object.keys(preloadedScenarios).forEach((val, index) => { 454 | select[index] = new Option(val); 455 | }); 456 | select.addEventListener("change", (event) => { 457 | const sf = preloadedScenarios[event?.target?.value]; 458 | loadPreloadedFile(sf); 459 | }); 460 | } 461 | loadPreloadedFile(quickstartFile); 462 | }); 463 | -------------------------------------------------------------------------------- /src/preloaded_scenarios.ts: -------------------------------------------------------------------------------- 1 | import type { ScenarioFile } from "./scenario"; 2 | 3 | const quickstartFile: ScenarioFile = { 4 | calculate_event_ids: true, 5 | on_load_at_start: true, 6 | events: [ 7 | { 8 | type: "m.room.create", 9 | state_key: "", 10 | sender: "@creator:tardis", 11 | auth_events: [], 12 | prev_events: [], 13 | content: { creator: "@creator:tardis" }, 14 | event_id: "$CREATE", 15 | }, 16 | { 17 | type: "m.room.member", 18 | state_key: "@creator:tardis", 19 | sender: "@creator:tardis", 20 | auth_events: ["$CREATE"], 21 | prev_events: ["$CREATE"], 22 | content: { membership: "join" }, 23 | event_id: "$JOIN", 24 | }, 25 | { 26 | type: "m.room.message", 27 | sender: "@creator:tardis", 28 | auth_events: ["$CREATE", "$JOIN"], 29 | prev_events: ["$JOIN"], 30 | content: { body: "A wild fork appears!" }, 31 | event_id: "$FORK1", 32 | }, 33 | { 34 | type: "m.room.message", 35 | sender: "@creator:tardis", 36 | auth_events: ["$CREATE", "$JOIN"], 37 | prev_events: ["$JOIN"], 38 | content: { body: "Another wild fork appears!" }, 39 | event_id: "$FORK2", 40 | }, 41 | { 42 | type: "m.room.message", 43 | sender: "@creator:tardis", 44 | auth_events: ["$CREATE", "$JOIN"], 45 | prev_events: ["$FORK1", "$FORK2"], 46 | content: { body: "Merged!" }, 47 | event_id: "$MERGE", 48 | }, 49 | { 50 | type: "m.room.message", 51 | sender: "@creator:tardis", 52 | auth_events: ["$CREATE", "$JOIN"], 53 | prev_events: ["$MERGE"], 54 | content: { body: "This event has precalculated state" }, 55 | event_id: "$PRESTATE", 56 | }, 57 | { 58 | type: "m.room.name", 59 | state_key: "", 60 | sender: "@creator:tardis", 61 | auth_events: ["$CREATE", "$JOIN"], 62 | prev_events: ["$PRESTATE"], 63 | content: { name: "State events are blue, messages are grey" }, 64 | event_id: "$MSG", 65 | }, 66 | { 67 | type: "m.room.message", 68 | sender: "@creator:tardis", 69 | auth_events: ["$CREATE", "$JOIN"], 70 | prev_events: ["$MSG"], 71 | content: { body: "Boring long chains..." }, 72 | event_id: "$MSG2", 73 | }, 74 | { 75 | type: "m.room.message", 76 | sender: "@creator:tardis", 77 | auth_events: ["$CREATE", "$JOIN"], 78 | prev_events: ["$MSG2"], 79 | content: { body: "...can be collapsed..." }, 80 | event_id: "$MSG3", 81 | }, 82 | { 83 | type: "m.room.message", 84 | sender: "@creator:tardis", 85 | auth_events: ["$CREATE", "$JOIN"], 86 | prev_events: ["$MSG3"], 87 | content: { body: "...by checking the collapse checkbox." }, 88 | event_id: "$MSG4", 89 | }, 90 | ], 91 | room_id: "!quickstart:tardis", 92 | room_version: "10", 93 | tardis_version: 1, 94 | annotations: { 95 | title: ["Welcome to TARDIS! Press the → button to continue."].join("\n"), 96 | titles: { 97 | $FORK1: "State events are highlighted in blue. Messages are highlighted in grey.", 98 | $FORK2: "The DAG can fork, which indicates some events were sent at the same time.", 99 | $MERGE: "The DAG can merge, which merges state from each fork together. This is state resolution.", 100 | $PRESTATE: "Green events indicate the state at this event. Message events will never be green.", 101 | $MSG: "Check the 'Auth Chain' box to show the `auth_events` for each event.", 102 | $MSG2: "Press the 'Resolve State' button to calculate which events are part of the current room state.", 103 | $MSG3: [ 104 | "As state resolution is iterative, it will resolve state for all earlier events as well.", 105 | "Click on an earlier event in the list to jump to that event.", 106 | ].join("\n"), 107 | $MSG4: [ 108 | "Now load a file or use one of the pre-loaded files to experiment with state resolution in Matrix!", 109 | ].join("\n"), 110 | }, 111 | events: { 112 | $MSG: "Blue nodes like this one are state events.", 113 | $MSG2: "Boring long chains...", 114 | $MSG3: "...can be collapsed...", 115 | $MSG4: "...by checking the collapse checkbox.", 116 | $PRESTATE: "This event has pre-calculated state (in green) after this event", 117 | }, 118 | }, 119 | precalculated_state_after: { 120 | $PRESTATE: ["$CREATE", "$JOIN"], 121 | }, 122 | }; 123 | 124 | const mainlineForks: ScenarioFile = { 125 | calculate_event_ids: true, 126 | on_load_at_start: true, 127 | events: [ 128 | { 129 | type: "m.room.create", 130 | state_key: "", 131 | sender: "@alice:tardis", 132 | auth_events: [], 133 | prev_events: [], 134 | content: { creator: "@alice:tardis" }, 135 | event_id: "$CREATE", 136 | }, 137 | { 138 | type: "m.room.member", 139 | state_key: "@alice:tardis", 140 | sender: "@alice:tardis", 141 | auth_events: ["$CREATE"], 142 | prev_events: ["$CREATE"], 143 | content: { membership: "join" }, 144 | event_id: "$ALICE", 145 | }, 146 | { 147 | type: "m.room.power_levels", 148 | state_key: "", 149 | sender: "@alice:tardis", 150 | auth_events: ["$CREATE", "$ALICE"], 151 | prev_events: ["$ALICE"], 152 | content: { users: { "@alice:tardis": 100 }, events: { "m.room.name": 50 }, users_default: 50 }, 153 | event_id: "$PL", 154 | }, 155 | { 156 | type: "m.room.join_rules", 157 | state_key: "", 158 | sender: "@alice:tardis", 159 | auth_events: ["$CREATE", "$ALICE", "$PL"], 160 | prev_events: ["$PL"], 161 | content: { join_rule: "public" }, 162 | event_id: "$JR", 163 | }, 164 | { 165 | type: "m.room.member", 166 | state_key: "@bob:tardis", 167 | sender: "@bob:tardis", 168 | auth_events: ["$CREATE", "$JR", "$PL"], 169 | prev_events: ["$JR"], 170 | content: { membership: "join" }, 171 | event_id: "$BOB", 172 | }, 173 | { 174 | type: "m.room.name", 175 | state_key: "", 176 | sender: "@alice:tardis", 177 | auth_events: ["$CREATE", "$ALICE", "$PL"], 178 | prev_events: ["$BOB"], 179 | content: { name: "Alice Room" }, 180 | event_id: "$ALICE_NAME", 181 | }, 182 | { 183 | type: "m.room.name", 184 | state_key: "", 185 | sender: "@bob:tardis", 186 | auth_events: ["$CREATE", "$BOB", "$PL"], 187 | prev_events: ["$BOB"], 188 | content: { name: "Bob Room" }, 189 | event_id: "$BOB_NAME", 190 | }, 191 | { 192 | type: "m.room.message", 193 | sender: "@alice:tardis", 194 | auth_events: ["$CREATE", "$ALICE", "$PL"], 195 | prev_events: ["$ALICE_NAME", "$BOB_NAME"], 196 | content: { body: "Bob wins." }, 197 | event_id: "$MERGE1", 198 | }, 199 | { 200 | type: "m.room.name", 201 | state_key: "", 202 | sender: "@alice:tardis", 203 | auth_events: ["$CREATE", "$ALICE", "$PL"], 204 | prev_events: ["$MERGE1"], 205 | content: { name: "Alice Room 2" }, 206 | origin_server_ts: 1704067281337, 207 | event_id: "$ALICE_NAME2", 208 | }, 209 | { 210 | type: "m.room.name", 211 | state_key: "", 212 | sender: "@bob:tardis", 213 | auth_events: ["$CREATE", "$BOB", "$PL"], 214 | prev_events: ["$MERGE1"], 215 | content: { name: "Bob Room 2" }, 216 | origin_server_ts: 1704067281337, 217 | event_id: "$BOB_NAME2", 218 | }, 219 | { 220 | type: "m.room.message", 221 | sender: "@alice:tardis", 222 | auth_events: ["$CREATE", "$ALICE", "$PL"], 223 | prev_events: ["$ALICE_NAME2", "$BOB_NAME2"], 224 | content: { body: "Alice wins." }, 225 | event_id: "$MERGE2", 226 | }, 227 | { 228 | type: "m.room.name", 229 | state_key: "", 230 | sender: "@bob:tardis", 231 | auth_events: ["$CREATE", "$BOB", "$PL"], 232 | prev_events: ["$MERGE2"], 233 | content: { name: "Bob Room 3" }, 234 | origin_server_ts: 1704077300300, 235 | event_id: "$BOB_NAME3", 236 | }, 237 | { 238 | type: "m.room.power_levels", 239 | state_key: "", 240 | sender: "@alice:tardis", 241 | auth_events: ["$CREATE", "$ALICE"], 242 | prev_events: ["$MERGE2"], 243 | content: { users: { "@alice:tardis": 100 }, events: { "m.room.name": 50 }, users_default: 50 }, 244 | event_id: "$PL2", 245 | }, 246 | { 247 | type: "m.room.name", 248 | state_key: "", 249 | sender: "@alice:tardis", 250 | auth_events: ["$CREATE", "$ALICE", "$PL2"], 251 | prev_events: ["$PL2"], 252 | content: { name: "Alice Room 3" }, 253 | origin_server_ts: 1704077299300, 254 | event_id: "$ALICE_NAME3", 255 | }, 256 | { 257 | type: "m.room.message", 258 | sender: "@alice:tardis", 259 | auth_events: ["$CREATE", "$ALICE", "$PL2"], 260 | prev_events: ["$ALICE_NAME3", "$BOB_NAME3"], 261 | content: { body: "Alice wins." }, 262 | event_id: "$MERGE3", 263 | }, 264 | ], 265 | room_id: "!mainline-fork:tardis", 266 | room_version: "10", 267 | tardis_version: 1, 268 | annotations: { 269 | title: "The winner follows this priority: Mainline depth THEN origin_server_ts THEN event ID", 270 | titles: { 271 | $ALICE: [ 272 | "Mainline Ordering:", 273 | "The conflicting state events in this example are all room name changes and hence do not restrict anyone's permissions.", 274 | "This makes it easier to explain. All of the merges in this example are due to 'mainline ordering'.", 275 | ].join("\n"), 276 | $MERGE1: [ 277 | "Bob wins because his event has a higher origin_server_ts. Both events have the same mainline depth.", 278 | ].join("\n"), 279 | $MERGE2: ["Both events have the same timestamp. Alice wins with a higher event ID (A < Z < a < z)."].join( 280 | "\n", 281 | ), 282 | $MERGE3: [ 283 | "Bob's event has a higher timestamp but Alice wins because her event happened AFTER a change to the power levels.", 284 | "This gives her event a higher 'mainline position' which is considered before the timestamp or event ID.", 285 | ].join("\n"), 286 | }, 287 | events: {}, 288 | }, 289 | }; 290 | 291 | const reverseTopologicalPowerOrdering: ScenarioFile = { 292 | calculate_event_ids: true, 293 | on_load_at_start: true, 294 | events: [ 295 | { 296 | type: "m.room.create", 297 | state_key: "", 298 | sender: "@alice:tardis", 299 | auth_events: [], 300 | prev_events: [], 301 | content: { creator: "@alice:tardis" }, 302 | event_id: "$CREATE", 303 | }, 304 | { 305 | type: "m.room.member", 306 | state_key: "@alice:tardis", 307 | sender: "@alice:tardis", 308 | auth_events: ["$CREATE"], 309 | prev_events: ["$CREATE"], 310 | content: { membership: "join" }, 311 | event_id: "$ALICE", 312 | }, 313 | { 314 | type: "m.room.power_levels", 315 | state_key: "", 316 | sender: "@alice:tardis", 317 | auth_events: ["$CREATE", "$ALICE"], 318 | prev_events: ["$ALICE"], 319 | content: { users: { "@alice:tardis": 100 }, events: { "m.room.join_rules": 50 }, users_default: 50 }, 320 | event_id: "$PL", 321 | }, 322 | { 323 | type: "m.room.join_rules", 324 | state_key: "", 325 | sender: "@alice:tardis", 326 | auth_events: ["$CREATE", "$ALICE", "$PL"], 327 | prev_events: ["$PL"], 328 | content: { join_rule: "public" }, 329 | event_id: "$JR", 330 | }, 331 | { 332 | type: "m.room.member", 333 | state_key: "@bob:tardis", 334 | sender: "@bob:tardis", 335 | auth_events: ["$CREATE", "$JR", "$PL"], 336 | prev_events: ["$JR"], 337 | content: { membership: "join" }, 338 | event_id: "$BOB", 339 | }, 340 | { 341 | type: "m.room.member", 342 | state_key: "@charlie:tardis", 343 | sender: "@charlie:tardis", 344 | auth_events: ["$CREATE", "$JR", "$PL"], 345 | prev_events: ["$BOB"], 346 | content: { membership: "join" }, 347 | event_id: "$CHARLIE", 348 | }, 349 | { 350 | type: "m.room.join_rules", 351 | state_key: "", 352 | sender: "@alice:tardis", 353 | auth_events: ["$CREATE", "$ALICE", "$PL"], 354 | prev_events: ["$CHARLIE"], 355 | content: { join_rule: "invite" }, 356 | event_id: "$ALICE_JR", 357 | }, 358 | { 359 | type: "m.room.join_rules", 360 | state_key: "", 361 | sender: "@bob:tardis", 362 | auth_events: ["$CREATE", "$BOB", "$PL"], 363 | prev_events: ["$CHARLIE"], 364 | content: { join_rule: "knock" }, 365 | event_id: "$BOB_JR", 366 | }, 367 | { 368 | type: "m.room.message", 369 | sender: "@alice:tardis", 370 | auth_events: ["$CREATE", "$ALICE", "$PL"], 371 | prev_events: ["$ALICE_JR", "$BOB_JR"], 372 | content: { body: "Bob wins." }, 373 | event_id: "$MERGE1", 374 | }, 375 | { 376 | type: "m.room.join_rules", 377 | state_key: "", 378 | sender: "@bob:tardis", 379 | auth_events: ["$CREATE", "$BOB", "$PL"], 380 | prev_events: ["$MERGE1"], 381 | content: { join_rule: "knock" }, 382 | event_id: "$BOB_JR2", 383 | }, 384 | { 385 | type: "m.room.join_rules", 386 | state_key: "", 387 | sender: "@charlie:tardis", 388 | auth_events: ["$CREATE", "$CHARLIE", "$PL"], 389 | prev_events: ["$MERGE1"], 390 | content: { join_rule: "public" }, 391 | event_id: "$CHARLIE_JR", 392 | }, 393 | { 394 | type: "m.room.message", 395 | sender: "@alice:tardis", 396 | auth_events: ["$CREATE", "$ALICE", "$PL"], 397 | prev_events: ["$BOB_JR2", "$CHARLIE_JR"], 398 | content: { body: "Charlie wins." }, 399 | event_id: "$MERGE2", 400 | }, 401 | { 402 | type: "m.room.join_rules", 403 | state_key: "", 404 | sender: "@bob:tardis", 405 | auth_events: ["$CREATE", "$BOB", "$PL"], 406 | prev_events: ["$MERGE2"], 407 | content: { join_rule: "knock" }, 408 | origin_server_ts: 1704077299001, 409 | event_id: "$BOB_JR3", 410 | }, 411 | { 412 | type: "m.room.join_rules", 413 | state_key: "", 414 | sender: "@charlie:tardis", 415 | auth_events: ["$CREATE", "$CHARLIE", "$PL"], 416 | prev_events: ["$MERGE2"], 417 | content: { join_rule: "invite" }, 418 | origin_server_ts: 1704077299001, 419 | event_id: "$CHARLIE_JR2", 420 | }, 421 | { 422 | type: "m.room.message", 423 | sender: "@alice:tardis", 424 | auth_events: ["$CREATE", "$ALICE", "$PL"], 425 | prev_events: ["$CHARLIE_JR2", "$BOB_JR3"], 426 | content: { body: "Bob wins." }, 427 | event_id: "$MERGE3", 428 | }, 429 | ], 430 | room_id: "!power-ordering:tardis", 431 | room_version: "10", 432 | tardis_version: 1, 433 | annotations: { 434 | title: "The winner follows this priority: Sender Power Level THEN origin_server_ts THEN event ID", 435 | titles: { 436 | $PL: "Join rules can be modified by ANYONE", 437 | $ALICE: [ 438 | "Reverse Topological Power Ordering:", 439 | "The conflicting state events in this example potentially restrict permissions because they are join rules.", 440 | "All of the merges in this example are due to this ordering.", 441 | ].join("\n"), 442 | $MERGE1: [ 443 | "Bob's 'knock' wins because he has a lower PL (50 vs 100).", 444 | "This seems undesirable at first, but this can be worded another way: Alice's 'invite' is APPLIED FIRST, and then Bob's.", 445 | "This order ensures if Alice revokes Bob's permissions, Alice wins.", 446 | ].join("\n"), 447 | $MERGE2: [ 448 | "Both Bob and Charlie have the same PL (50). Charlie's 'public' wins because his event has a higher origin_server_ts", 449 | ].join("\n"), 450 | $MERGE3: [ 451 | "Both events have the same timestamp. Bob's 'knock' wins because his event ID is greater than Charlie's (A < Z < a < z)", 452 | ].join("\n"), 453 | }, 454 | events: {}, 455 | }, 456 | }; 457 | 458 | export { quickstartFile, mainlineForks, reverseTopologicalPowerOrdering }; 459 | -------------------------------------------------------------------------------- /src/scenario.ts: -------------------------------------------------------------------------------- 1 | import JSON5 from "json5"; 2 | import type { EventID, MatrixEvent } from "./state_resolver"; 3 | 4 | export const DEFAULT_ROOM_VERSION = "10"; 5 | 6 | // ScenarioFile is the file format of .json5 files used with tardis. 7 | export interface ScenarioFile { 8 | // Required. The version of the file, always '1'. 9 | tardis_version: number; 10 | // Required. The events in this scenario, in the order they should be processed (typically topologically sorted) 11 | events: Array; 12 | // Optional. The room version these events are represented in. Default: DEFAULT_ROOM_VERSION. 13 | room_version: string; 14 | // Optional. If events are missing a room_id key, populate it from this field. For brevity. 15 | room_id: string; 16 | // Optional. If true, calculates the event_id field. 17 | calculate_event_ids: boolean; 18 | // Optional. Can force the "state after the event" to be these events. Useful for testing /state_ids responses. 19 | precalculated_state_after?: Record>; 20 | // Optional. If true, when this file is loaded it will start at the first event rather than the last event. Default: false. 21 | // In general, starting at the beginning is only useful for tutorials. 22 | on_load_at_start: boolean; 23 | // Optional. Can set custom strings for nodes (events) or on the graph in general (title). Use '\n\' to get line breaks 24 | // both in the file and rendered. 25 | annotations?: { 26 | title?: string; 27 | titles: Record; 28 | events: Record; 29 | }; 30 | } 31 | 32 | export interface ScenarioEvent { 33 | // subset of MatrixEvent 34 | event_id: string; 35 | type: string; 36 | state_key?: string; 37 | // biome-ignore lint/suspicious/noExplicitAny: we don't know the values. 38 | content: Record; 39 | sender: string; 40 | prev_events: Array; 41 | auth_events: Array; 42 | origin_server_ts?: number; 43 | room_id?: string; 44 | } 45 | 46 | // Scenario is a loaded scenario for use with tardis. ScenarioFiles end up being represented as Scenarios. 47 | export interface Scenario { 48 | // The events in this scenario, in the order they should be processed (typically topologically sorted). 49 | events: Array; 50 | // The room version for these events 51 | roomVersion: string; 52 | // Pre-calculated state (useful for /state_ids responses, or for just demoing tardis without a shim!) 53 | precalculatedStateAfter?: Record>; 54 | // If true, when this scenario is loaded, start at the first event. 55 | onLoadAtStart: boolean; 56 | // Any annotations for the graph. 57 | annotations?: { 58 | title?: string; 59 | titles?: Record; 60 | events?: Record; 61 | }; 62 | } 63 | 64 | // loadScenarioFromFile loads a scenario file (.json5) or new-line delimited JSON / JSON array, which represents the events in the scenario, 65 | // in the order they should be processed. 66 | // Throws if there is malformed events or malformed data. Requires `globalThis.gmslEventIDForEvent` to exist (loaded via gmsl.wasm). 67 | export async function loadScenarioFromFile(f: File): Promise { 68 | // read the file 69 | const eventsOrScenario = await new Promise( 70 | (resolve: (value: Array | ScenarioFile) => void, reject) => { 71 | const reader = new FileReader(); 72 | reader.onload = (data) => { 73 | if (!data.target || !data.target.result) { 74 | return; 75 | } 76 | if (f.name.endsWith(".json5")) { 77 | // scenario file 78 | const sfJson = JSON5.parse(data.target.result as string); 79 | if (sfJson.on_load_at_start == null) { 80 | sfJson.on_load_at_start = false; 81 | } 82 | if (sfJson.calculate_event_ids == null) { 83 | sfJson.calculate_event_ids = false; 84 | } 85 | if (sfJson.room_version == null) { 86 | sfJson.room_version = DEFAULT_ROOM_VERSION; 87 | } 88 | resolve(sfJson as ScenarioFile); 89 | return; 90 | } 91 | const contents = data.target.result as string; 92 | if (contents.startsWith("[")) { 93 | // it's a json array, resolve as-is. 94 | const j = JSON.parse(contents); 95 | resolve(j as Array); 96 | return; 97 | } 98 | // it's ndjson 99 | const ndjson = contents 100 | .split("\n") 101 | .filter((line) => { 102 | return line.trim().length > 0; 103 | }) 104 | .map((line) => { 105 | const j = JSON.parse(line); 106 | return j; 107 | }); 108 | resolve(ndjson as Array); 109 | }; 110 | reader.readAsText(f); 111 | }, 112 | ); 113 | // work out which file format we're dealing with and make a scenario file 114 | let scenarioFile: ScenarioFile; 115 | if (Array.isArray(eventsOrScenario)) { 116 | scenarioFile = { 117 | tardis_version: 1, 118 | room_version: DEFAULT_ROOM_VERSION, 119 | room_id: eventsOrScenario[0].room_id, 120 | calculate_event_ids: false, 121 | on_load_at_start: false, 122 | events: eventsOrScenario, 123 | }; 124 | } else { 125 | // it's a test scenario 126 | scenarioFile = eventsOrScenario; 127 | } 128 | return loadScenarioFromScenarioFile(scenarioFile); 129 | } 130 | 131 | export function loadScenarioFromScenarioFile(scenarioFile: ScenarioFile): Scenario { 132 | const scenario: Scenario = { 133 | events: [], 134 | roomVersion: scenarioFile.room_version, 135 | annotations: scenarioFile.annotations, 136 | precalculatedStateAfter: scenarioFile.precalculated_state_after, 137 | onLoadAtStart: scenarioFile.on_load_at_start, 138 | }; 139 | // validate and preprocess the scenario file into a valid scenario 140 | const fakeEventIdToRealEventId = new Map(); 141 | let time = new Date(2024, 0, 1).getTime(); 142 | for (const ev of scenarioFile.events) { 143 | if (!ev) { 144 | throw new Error("missing event"); 145 | } 146 | if (!ev.event_id) { 147 | throw new Error(`event is missing 'event_id', got ${JSON.stringify(ev)}`); 148 | } 149 | if (!ev.type) { 150 | throw new Error(`event is missing 'type' field, got ${JSON.stringify(ev)}`); 151 | } 152 | if (!ev.origin_server_ts) { 153 | ev.origin_server_ts = time; 154 | time += 1000; 155 | } else { 156 | time = ev.origin_server_ts + 1000; 157 | } 158 | if (!ev.room_id && scenarioFile.room_id) { 159 | ev.room_id = scenarioFile.room_id; 160 | } 161 | if (scenarioFile.calculate_event_ids) { 162 | const fakeEventId = ev.event_id; 163 | // replace any references in prev_events and auth_events 164 | for (const key of ["prev_events", "auth_events"]) { 165 | const replacement: Array = []; 166 | for (const eventIdToReplace of ev[key]) { 167 | const realEventId = fakeEventIdToRealEventId.get(eventIdToReplace); 168 | if (realEventId) { 169 | replacement.push(realEventId); 170 | } else { 171 | replacement.push(eventIdToReplace); 172 | } 173 | } 174 | ev[key] = replacement; 175 | } 176 | // replace the fake event ID AFTER we mutate the event to set prev/auth events 177 | const realEventId = globalThis.gmslEventIDForEvent(JSON.stringify(ev), scenarioFile.room_version); 178 | fakeEventIdToRealEventId.set(fakeEventId, realEventId); 179 | ev.event_id = realEventId; 180 | 181 | // also replace any references in annotations 182 | if (scenario.annotations?.events?.[fakeEventId]) { 183 | scenario.annotations.events[realEventId] = scenario.annotations.events[fakeEventId]; 184 | } 185 | if (scenario.annotations?.titles?.[fakeEventId]) { 186 | scenario.annotations.titles[realEventId] = scenario.annotations.titles[fakeEventId]; 187 | } 188 | } 189 | scenario.events.push(ev as MatrixEvent); 190 | } 191 | // also also replace any references in precalculatedStateAfter AFTER we've processed all events 192 | if (scenario.precalculatedStateAfter) { 193 | for (const fakeAtStateEventId in scenario.precalculatedStateAfter) { 194 | const realAtStateEventId = fakeEventIdToRealEventId.get(fakeAtStateEventId); 195 | if (!realAtStateEventId) { 196 | console.error( 197 | `precalculated_state_after references ${fakeAtStateEventId} but this does not exist in the events array. Skipping.`, 198 | ); 199 | continue; 200 | } 201 | const stateAtEvent: string[] = []; 202 | for (const fakeStateEventId of scenario.precalculatedStateAfter[fakeAtStateEventId]) { 203 | const e = fakeEventIdToRealEventId.get(fakeStateEventId); 204 | if (e) { 205 | stateAtEvent.push(e); 206 | } else { 207 | console.error( 208 | `precalculated_state_after for ${fakeAtStateEventId} references ${fakeStateEventId} but this does not exist in the events array. Skipping.`, 209 | ); 210 | } 211 | } 212 | scenario.precalculatedStateAfter[realAtStateEventId] = stateAtEvent; 213 | delete scenario.precalculatedStateAfter[fakeAtStateEventId]; 214 | } 215 | } 216 | console.log(scenario); 217 | return scenario; 218 | } 219 | -------------------------------------------------------------------------------- /src/state_resolver.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, it } from "@jest/globals"; 2 | import { type DataGetEvent, type DataResolveState, type MatrixEvent, StateResolver } from "./state_resolver"; 3 | 4 | const roomVer = "custom"; 5 | const roomId = "!foo"; 6 | 7 | describe("StateResolver", () => { 8 | describe("resolveState", () => { 9 | const eventMap: Record = { 10 | $foo: { 11 | type: "m.room.create", 12 | state_key: "", 13 | content: {}, 14 | auth_events: [], 15 | prev_events: [], 16 | prev_auth_events: [], 17 | event_id: "$foo", 18 | sender: "@alice", 19 | room_id: "!foo", 20 | origin_server_ts: 1, 21 | }, 22 | $foomember: { 23 | type: "m.room.member", 24 | state_key: "@alice", 25 | content: { membership: "join" }, 26 | auth_events: [], 27 | prev_events: [], 28 | prev_auth_events: [], 29 | event_id: "$foomember", 30 | sender: "@alice", 31 | room_id: "!foo", 32 | origin_server_ts: 1, 33 | }, 34 | $bar: { 35 | type: "m.room.create", 36 | state_key: "", 37 | content: {}, 38 | auth_events: [], 39 | prev_events: [], 40 | prev_auth_events: [], 41 | event_id: "$bar", 42 | sender: "@alice", 43 | room_id: "!foo", 44 | origin_server_ts: 1, 45 | }, 46 | }; 47 | const atFoo: MatrixEvent = { 48 | type: "foo", 49 | content: {}, 50 | sender: "@alice", 51 | auth_events: [], 52 | prev_events: [], 53 | prev_auth_events: [], 54 | room_id: "!foo", 55 | origin_server_ts: 2, 56 | event_id: "$atFoo", 57 | }; 58 | const atBar: MatrixEvent = { 59 | type: "bar", 60 | content: {}, 61 | sender: "@alice", 62 | auth_events: [], 63 | prev_events: [], 64 | prev_auth_events: [], 65 | room_id: "!bar", 66 | origin_server_ts: 2, 67 | event_id: "$atBar", 68 | }; 69 | 70 | it("pairs up requests and sends the right request shape", async () => { 71 | const outstandingRequests: Array<{ id: string; data: DataResolveState }> = []; 72 | const sr = new StateResolver( 73 | { 74 | sendResolveState: async (id: string, data: DataResolveState) => { 75 | outstandingRequests.push({ 76 | id: id, 77 | data: data, 78 | }); 79 | }, 80 | }, 81 | (data: DataGetEvent): MatrixEvent => { 82 | return eventMap[data.event_id]; 83 | }, 84 | ); 85 | // biome-ignore lint/complexity/useLiteralKeys: it reads much nicer in IDEs to use this form 86 | const fooState = [{ [`["m.room.create",""]`]: "$foo", [`["m.room.member","@alice"]`]: "$foomember" }]; 87 | const promiseFoo = sr.resolveState(roomId, roomVer, fooState, atFoo); 88 | let fooResolved = false; 89 | promiseFoo.then(() => { 90 | fooResolved = true; 91 | }); 92 | // biome-ignore lint/complexity/useLiteralKeys: it reads much nicer in IDEs to use this form 93 | const barState = [{ [`["m.room.create",""]`]: "$bar" }]; 94 | const promiseBar = sr.resolveState(roomId, roomVer, barState, atBar); 95 | let barResolved = false; 96 | promiseBar.then(() => { 97 | barResolved = true; 98 | }); 99 | expect(outstandingRequests.length).toEqual(2); 100 | const fooRequest = outstandingRequests[0]; 101 | expect(fooRequest.id).toBeDefined(); 102 | expect(fooRequest.data).toEqual({ 103 | room_id: "!foo", 104 | room_version: roomVer, 105 | state: fooState, 106 | event: atFoo, 107 | }); 108 | const barRequest = outstandingRequests[1]; 109 | expect(barRequest.id).toBeDefined(); 110 | expect(barRequest.data).toEqual({ 111 | room_id: "!foo", 112 | room_version: roomVer, 113 | state: barState, 114 | event: atBar, 115 | }); 116 | 117 | // neither promise should have resolved yet 118 | expect(fooResolved).toBe(false); 119 | expect(barResolved).toBe(false); 120 | 121 | // now resolve bar first even though it came last, to ensure we are pairing up based on the ID. 122 | sr.onResolveStateResponse(barRequest.id, { 123 | state: [], 124 | // biome-ignore lint/complexity/useLiteralKeys: it reads much nicer in IDEs to use this form 125 | result: { [`["m.room.create",""]`]: "$bar" }, 126 | room_id: "!foo", 127 | room_version: roomVer, 128 | event: atBar, 129 | }); 130 | const barResult = await promiseBar; 131 | expect(barResolved).toBe(true); 132 | expect(fooResolved).toBe(false); 133 | expect(barResult.state).toEqual({ 134 | // biome-ignore lint/complexity/useLiteralKeys: 135 | [`["m.room.create",""]`]: "$bar", 136 | }); 137 | 138 | sr.onResolveStateResponse(fooRequest.id, { 139 | state: [], 140 | // biome-ignore lint/complexity/useLiteralKeys: it reads much nicer in IDEs to use this form 141 | result: { [`["m.room.create",""]`]: "$foo" }, 142 | room_id: "!foo", 143 | room_version: roomVer, 144 | event: atFoo, 145 | }); 146 | const fooResult = await promiseFoo; 147 | expect(fooResolved).toBe(true); 148 | expect(fooResult.state).toEqual({ 149 | // biome-ignore lint/complexity/useLiteralKeys: 150 | [`["m.room.create",""]`]: "$foo", 151 | }); 152 | }); 153 | }); 154 | }); 155 | -------------------------------------------------------------------------------- /src/state_resolver.ts: -------------------------------------------------------------------------------- 1 | interface MatrixEvent { 2 | event_id: string; 3 | type: string; 4 | state_key?: string; 5 | // biome-ignore lint/suspicious/noExplicitAny: we don't know the values. 6 | content: Record; 7 | sender: string; 8 | prev_events: Array; 9 | auth_events: Array; 10 | room_id: string; 11 | origin_server_ts: number; 12 | 13 | // only for auth dags 14 | prev_auth_events: Array | undefined; 15 | 16 | // TODO: fix metadata fields 17 | _collapse?: number; 18 | _backwards_extremity_key?: string; 19 | } 20 | 21 | enum MsgType { 22 | GetEvent = "get_event", 23 | ResolveState = "resolve_state", 24 | } 25 | 26 | interface WebSocketMessage { 27 | type: MsgType; 28 | id: string; 29 | error?: string; 30 | data: T; 31 | } 32 | 33 | export type StateKeyTuple = string; // JSON encoded array of 2 string elements [type, state_key] 34 | export type EventID = string; 35 | 36 | interface DataResolveState { 37 | room_id: string; 38 | room_version: string; 39 | state: Array>; 40 | event: MatrixEvent; 41 | result?: Record; 42 | error?: string; 43 | } 44 | interface DataGetEvent { 45 | event_id: string; 46 | event: MatrixEvent; 47 | } 48 | 49 | interface StateResolverReceiver { 50 | onGetEventRequest(data: DataGetEvent): MatrixEvent; 51 | onResolveStateResponse(id: string, data: DataResolveState): void; 52 | } 53 | 54 | interface StateResolverSender { 55 | sendResolveState(id: string, data: DataResolveState): Promise; 56 | } 57 | 58 | interface ResolvedState { 59 | state: Record; 60 | } 61 | 62 | class StateResolver implements StateResolverReceiver { 63 | inflightRequests: Map void>; 64 | constructor( 65 | readonly sender: StateResolverSender, 66 | readonly getEvent: (data: DataGetEvent) => MatrixEvent, 67 | ) { 68 | this.inflightRequests = new Map(); 69 | } 70 | 71 | onGetEventRequest(data: DataGetEvent): MatrixEvent { 72 | const ev = this.getEvent(data); 73 | if (!ev) { 74 | console.error(`WS: asked for event ${data.event_id} but didn't find it.`); 75 | } 76 | return ev; 77 | } 78 | 79 | onResolveStateResponse(id: string, data: DataResolveState) { 80 | const resolve = this.inflightRequests.get(id); 81 | if (!resolve) { 82 | console.error(`onResolveStateResponse: no request id for response! id=${id}`); 83 | return; 84 | } 85 | if (data.error) { 86 | console.error(id, data.error); 87 | } 88 | resolve(data); 89 | this.inflightRequests.delete(id); 90 | } 91 | 92 | async resolveState( 93 | roomId: string, 94 | roomVersion: string, 95 | states: Array>, 96 | atEvent: MatrixEvent, 97 | ): Promise { 98 | console.log("resolveState", states, atEvent); 99 | // make an id so we can pair it up when we get the response 100 | const id = globalThis.crypto.randomUUID(); 101 | const promise = new Promise((resolve, reject) => { 102 | this.inflightRequests.set(id, (resolvedData: DataResolveState) => { 103 | if (!resolvedData.result) { 104 | console.error("State resolved to the empty set"); 105 | resolve({ state: {} }); 106 | return; 107 | } 108 | if (resolvedData.error && resolvedData.error !== "") { 109 | reject(resolvedData.error); 110 | return; 111 | } 112 | resolve({ 113 | state: resolvedData.result, 114 | }); 115 | }); 116 | this.sender.sendResolveState(id, { 117 | state: states, 118 | room_id: roomId, 119 | room_version: roomVersion, 120 | event: atEvent, 121 | }); 122 | }); 123 | 124 | return promise; 125 | } 126 | } 127 | 128 | class StateResolverTransport implements StateResolverSender { 129 | ws: WebSocket; 130 | receiver: StateResolverReceiver; 131 | 132 | async sendResolveState(id: string, data: DataResolveState) { 133 | this.sendWs({ 134 | id: id, 135 | type: MsgType.ResolveState, 136 | data: data, 137 | }); 138 | } 139 | 140 | // WebSocket functions below 141 | 142 | async connect(url: string, receiver: StateResolverReceiver) { 143 | this.receiver = receiver; 144 | this.ws = new WebSocket(url); 145 | return new Promise((resolve) => { 146 | this.ws.addEventListener("open", () => { 147 | console.log("WS open"); 148 | resolve(); 149 | }); 150 | this.ws.addEventListener("error", this.onWsError.bind(this)); 151 | this.ws.addEventListener("close", this.onWsClose.bind(this)); 152 | this.ws.addEventListener("message", this.onWsMessage.bind(this)); 153 | }); 154 | } 155 | 156 | close() { 157 | this.ws.close(); 158 | } 159 | 160 | // biome-ignore lint/suspicious/noExplicitAny: 161 | sendWs(msg: WebSocketMessage) { 162 | console.log("send", msg); 163 | this.ws.send(JSON.stringify(msg)); 164 | } 165 | 166 | onWsClose(_: CloseEvent) {} 167 | onWsError(_: Event) {} 168 | onWsMessage(ev: MessageEvent) { 169 | // biome-ignore lint/suspicious/noExplicitAny: 170 | const msg = JSON.parse(ev.data) as WebSocketMessage; 171 | console.log("recv", msg); 172 | switch (msg.type) { 173 | case MsgType.GetEvent: { 174 | const data = msg.data as DataGetEvent; 175 | const response = this.receiver.onGetEventRequest(data); 176 | data.event = response; 177 | msg.data = data; 178 | this.sendWs(msg); 179 | break; 180 | } 181 | case MsgType.ResolveState: { 182 | const data = msg.data as DataResolveState; 183 | this.receiver.onResolveStateResponse(msg.id, data); 184 | break; 185 | } 186 | } 187 | } 188 | } 189 | 190 | export { StateResolver, StateResolverTransport, type DataGetEvent, type DataResolveState, type MatrixEvent }; 191 | -------------------------------------------------------------------------------- /style.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0px; 3 | } 4 | #page { 5 | display: grid; 6 | grid-template-areas: 7 | "header header" 8 | "eventlist svgcontainer" 9 | "eventlist svgcontainer"; 10 | grid-template-rows: min-content auto auto; 11 | /* Make the sidebar just enough wide for the content but the svgcontainer take the most space */ 12 | grid-template-columns: 25% auto; 13 | height: 100%; 14 | } 15 | 16 | #header { 17 | grid-area: header; 18 | height: min-content; 19 | padding: 8px; 20 | background-color: #a6b8c7; 21 | border: 1px solid #00060b; 22 | margin: 5px; 23 | display: flex; 24 | gap: 8px; 25 | } 26 | 27 | #infocontainer { 28 | position: absolute; 29 | background-color: #eee; 30 | padding: 10px; 31 | width: min-content; 32 | max-width: 50%; /* don't let the popup occlude the entire graph */ 33 | overflow: scroll; 34 | } 35 | 36 | #closeinfocontainer { 37 | cursor: pointer; 38 | } 39 | 40 | #svgcontainer { 41 | grid-area: svgcontainer; 42 | /* Prevent the svg from filling more than the available space by allowing it to scroll in both directions */ 43 | overflow: scroll; 44 | } 45 | 46 | #eventlist { 47 | grid-area: eventlist; 48 | border: 1px solid; 49 | border-color: #00060b; 50 | margin: 5px; 51 | overflow-y: scroll; 52 | } 53 | 54 | .eventlistrow { 55 | display: flex; 56 | border-bottom: 1px solid; 57 | border-color: #00060b; 58 | cursor: pointer; 59 | } 60 | .eventlistrow:hover { 61 | background-color: #a6b8c7; 62 | } 63 | 64 | .eventlistrowprefix { 65 | padding: 5px; 66 | } 67 | .eventlistrowjson { 68 | padding: 5px; 69 | cursor: pointer; 70 | text-wrap: nowrap; 71 | } 72 | .eventlistroweventid { 73 | padding: 5px; 74 | text-wrap: nowrap; 75 | } 76 | 77 | .eventlistrowbody { 78 | overflow: scroll; 79 | white-space: nowrap; 80 | padding: 5px; 81 | flex-grow: 1; 82 | } 83 | 84 | .loader { 85 | border: 3px solid #f3f3f3; /* Light grey */ 86 | border-top: 3px solid #3498db; /* Blue */ 87 | border-radius: 50%; 88 | width: 16px; 89 | height: 16px; 90 | animation: spin 1s linear infinite; 91 | margin: 0 10 0 10; 92 | display: none; 93 | } 94 | 95 | @keyframes spin { 96 | 0% { 97 | transform: rotate(0deg); 98 | } 99 | 100% { 100 | transform: rotate(360deg); 101 | } 102 | } 103 | 104 | .node-text { 105 | font-family: Gill Sans, Calibri, sans-serif; 106 | } 107 | input { 108 | font-family: Gill Sans, Calibri, sans-serif; 109 | } 110 | div { 111 | font-family: Gill Sans, Calibri, sans-serif; 112 | } 113 | body { 114 | height: 100%; 115 | } 116 | .monospace { 117 | font-family: "Lucida Console", Courier, monospace; 118 | } 119 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "useDefineForClassFields": true, 5 | "module": "ESNext", 6 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 7 | "skipLibCheck": true, 8 | /* Bundler mode */ 9 | "moduleResolution": "bundler", 10 | "allowImportingTsExtensions": true, 11 | "isolatedModules": true, 12 | "moduleDetection": "force", 13 | "noEmit": true, 14 | /* Linting */ 15 | "strict": true, 16 | "noUnusedLocals": true, 17 | "noUnusedParameters": true, 18 | "noFallthroughCasesInSwitch": true, 19 | "strictPropertyInitialization": false 20 | }, 21 | "include": ["src"] 22 | } 23 | --------------------------------------------------------------------------------