├── .github └── FUNDING.yml ├── .gitignore ├── .tool-versions ├── LICENSE ├── README.md ├── dev-env ├── .env ├── LICENSE ├── README.md ├── backend │ ├── excalidraw │ │ └── Dockerfile │ ├── livebook │ │ └── Dockerfile │ ├── postgresql │ │ ├── Dockerfile │ │ ├── app.env │ │ ├── init │ │ │ └── init-db.sql │ │ └── swai_app.env │ ├── watchtower │ │ ├── .env │ │ └── Dockerfile │ └── xoom │ │ ├── designer │ │ └── .env │ │ ├── schemata-db │ │ └── .env │ │ └── schemata │ │ └── .env ├── build-clique1.sh ├── clean-docker.sh ├── deploy-ex-esdb-clique1.yaml ├── ex-esdb-cluster.yaml ├── excalidraw.yml ├── livebook.yml ├── networks.yml ├── push.sh ├── rm-cont.sh ├── run-clique1.sh ├── run-tools.sh ├── swarm-down.sh ├── swarm-up.sh ├── watchtower.yml └── xoom-designer.yml ├── push.sh └── system ├── .credo.exs ├── .formatter.exs ├── ADR.md ├── CHANGELOG.md ├── Dockerfile ├── SUBSCRIPTIONS.md ├── check-ex-esdb.sh ├── config ├── config.exs ├── dev.exs ├── prod.exs ├── runtime.exs └── test.exs ├── examples ├── .keep └── park_fac │ ├── .keep │ └── readme.md ├── guides ├── getting_started.md └── testing.md ├── lib ├── app.ex ├── beam_campus │ ├── bit_flags.ex │ └── color_funcs.ex ├── commanded │ ├── adapter.ex │ └── mapper.ex ├── en_vars.ex ├── ex_esdb.ex ├── ex_esdb │ ├── aggregator.ex │ ├── cluster.ex │ ├── emitter.ex │ ├── event_projector.ex │ ├── event_store.ex │ ├── messages.ex │ ├── pubsub.ex │ ├── schema │ │ ├── event_record.ex │ │ ├── ex_esdb_schema.ex │ │ ├── new_event.ex │ │ ├── snapshot_record.ex │ │ └── subscripition_record.ex │ ├── snapshots.ex │ ├── store_info.ex │ ├── streams.ex │ ├── subscriptions.ex │ ├── system.ex │ └── version_formatter.ex ├── khepri │ └── conditions.ex ├── options.ex ├── repl.ex ├── repl │ ├── generator.ex │ └── monitor.ex └── themes.ex ├── mix.exs ├── mix.lock ├── priv └── protos │ └── ex_esdb.proto ├── protoc.exs ├── pub2hex.sh ├── rm-cont.sh ├── run-ex-esdb.sh ├── src ├── emitter_group.erl ├── erts_v.erl ├── ex_esdb_filter.erl └── ex_esdb_triggers.erl └── test ├── beam_campus ├── bit_flags │ ├── has_all_test.exs │ ├── has_any_test.exs │ ├── set_test.exs │ ├── toggle_test.exs │ ├── unset_test.exs │ └── untoggle_test.exs ├── bit_flags_test.exs └── color_funcs_test.exs ├── commanded ├── adapter │ ├── ack_event_test.exs │ ├── append_to_stream_test.exs │ ├── child_spec_test.exs │ ├── delete_snapshot_test.exs │ ├── delete_subscription_test.exs │ ├── read_snapshot_test.exs │ ├── record_snapshot_test.exs │ ├── stream_forward_test.exs │ ├── subscribe_test.exs │ ├── subscribe_to_test.exs │ └── unsubscribe_test.exs ├── adapter_test.exs └── mapper_test.exs ├── ex_esdb ├── aggregator │ ├── finalize_map_test.exs │ └── foldl_test.exs ├── aggregator_test.exs ├── cluster_test.exs ├── emitter_test.exs ├── event_store_test.exs ├── options_test.exs ├── projections_test.exs ├── registry_test.exs ├── snapshots │ ├── read_snapshot_test.exs │ └── record_snapshot_test.exs ├── snapshots_test.exs ├── store_info_test.exs ├── streams_test.exs ├── subscriptions_test.exs └── system_test.exs ├── support ├── storage.ex └── test_case.ex └── test_helper.exs /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [rgfaber] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 12 | polar: # Replace with a single Polar username 13 | buy_me_a_coffee: beamologist 14 | thanks_dev: # Replace with a single thanks.dev username 15 | custom: ["https://paypal.me/rgfaber"] 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## IDE 2 | .idea/ 3 | .vscode/ 4 | .elixir_ls/ 5 | .history/ 6 | .lexical/ 7 | .elixir-tools/ 8 | .obsidian/ 9 | tmp/ 10 | 11 | 12 | ## Elixir 13 | /_build 14 | /cover 15 | /deps 16 | /.fetch 17 | /tmp 18 | /system/tmp 19 | /system/doc 20 | 21 | 22 | erl_crash.dump 23 | *.ez 24 | *.beam 25 | apis-*.tar.gz 26 | *.tar 27 | 28 | 29 | /config/*.secret.exs 30 | .elixir_ls/ 31 | 32 | 33 | ## Erlang 34 | .eunit 35 | *.o 36 | *.beam 37 | *.plt 38 | erl_crash.dump 39 | .concrete/DEV_MODE 40 | 41 | # rebar 2.x 42 | .rebar 43 | rel/example_project 44 | ebin/*.beam 45 | deps 46 | 47 | # rebar 3 48 | .rebar3 49 | _build/ 50 | _checkouts/ 51 | 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | erlang 27.3.4 2 | elixir 1.17.3-otp-27 3 | rust 1.87.0 4 | rebar 3.24.0 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 BEAM Campus 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ExESDB - A BEAM-native Event Store 2 | 3 | `ExESDB` is a BEAM-native Event Store, built on top of the [khepri](https://github.com/rabbitmq/khepri) and [ra](https://github.com/rabbitmq/ra) subsystems. 4 | 5 | ## Motivation 6 | 7 | One of the arguments for BEAM development is that it comes "batteries included". Be it caching, storage, pub/sub, observability etc... the Erlang ecosystem always has the option to avoid external dependencies. 8 | 9 | For Event Sourcing use cases however, the Event Store is often a separate service. 10 | 11 | This project is an attempt at addressing this point, by building further upon the work of the `rabbitmq/khepri` and `rabbitmq/ra` subsystems. 12 | 13 | ## Contents 14 | 15 | - [Getting Started](system/guides/getting_started.md) 16 | 17 | ## Releases 18 | 19 | - [On Hex](https://hex.pm/packages/ex_esdb) 20 | - [Release Documentation](https://hexdocs.pm/ex_esdb/index.html) 21 | -------------------------------------------------------------------------------- /dev-env/.env: -------------------------------------------------------------------------------- 1 | NATS_SURVEYOR_SERVERS=nats://a:a@liftbridge:4222 2 | NATS_SURVEYOR_CREDS=./SYS.creds 3 | NATS_SURVEYOR_SERVER_COUNT=1 4 | 5 | PROMETHEUS_STORAGE=./storage/prometheus 6 | 7 | SURVEYOR_DOCKER_TAG=latest 8 | PROMETHEUS_DOCKER_TAG=latest 9 | GRAFANA_DOCKER_TAG=latest 10 | 11 | NATS_URI=nats-streaming 12 | NATS_URIS=nats-streaming:4222 13 | NATS_HOST=nats-streaming 14 | NATS_PORT=4222 15 | 16 | EVENTSTORE_URI=tcp://eventstore:1113 17 | 18 | COUCH_LOCAL_HOST=couch 19 | COUCH_LOCAL_USER=root 20 | COUCH_LOCAL_PWD=dev 21 | -------------------------------------------------------------------------------- /dev-env/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 R.G. Lefever 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /dev-env/README.md: -------------------------------------------------------------------------------- 1 | # Compose Dev Stack 2 | 3 | - A stack of back-end services for local development purposes using docker compose. 4 | -------------------------------------------------------------------------------- /dev-env/backend/excalidraw/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM excalidraw/excalidraw:latest 2 | -------------------------------------------------------------------------------- /dev-env/backend/livebook/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/livebook-dev/livebook 2 | 3 | -------------------------------------------------------------------------------- /dev-env/backend/postgresql/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM postgres:12.19-bullseye 2 | 3 | COPY init/init-db.sql /docker-entrypoint-initdb.d/ -------------------------------------------------------------------------------- /dev-env/backend/postgresql/app.env: -------------------------------------------------------------------------------- 1 | POSTGRES_PASSWORD=dev 2 | POSTGRES_USER=dev 3 | POSTGRES_DB=macula_dev -------------------------------------------------------------------------------- /dev-env/backend/postgresql/init/init-db.sql: -------------------------------------------------------------------------------- 1 | -- For roles, you can use a similar approach: 2 | DO $$ BEGIN IF NOT EXISTS ( 3 | SELECT 4 | FROM pg_roles 5 | WHERE rolname = 'admin' 6 | ) THEN CREATE ROLE admin WITH LOGIN PASSWORD 'admin' SUPERUSER; 7 | END IF; 8 | END $$; 9 | 10 | DO $$ BEGIN IF NOT EXISTS ( 11 | SELECT 12 | FROM pg_roles 13 | WHERE rolname = 'dev' 14 | ) THEN CREATE ROLE dev WITH LOGIN PASSWORD 'dev' SUPERUSER; 15 | END IF; 16 | END $$; 17 | 18 | DO $$ BEGIN IF NOT EXISTS ( 19 | SELECT 20 | FROM pg_roles 21 | WHERE rolname = 'swai_dev' 22 | ) THEN CREATE ROLE swai_dev WITH LOGIN PASSWORD 'swai_dev' SUPERUSER; 23 | END IF; 24 | END $$; 25 | 26 | DO $$ BEGIN IF NOT EXISTS ( 27 | SELECT 28 | FROM pg_roles 29 | WHERE rolname = 'logatron_dev' 30 | ) THEN CREATE ROLE logatron_dev WITH LOGIN PASSWORD 'logatron_dev' SUPERUSER; 31 | END IF; 32 | END $$; 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /dev-env/backend/postgresql/swai_app.env: -------------------------------------------------------------------------------- 1 | POSTGRES_PASSWORD=swai_dev 2 | POSTGRES_USER=swai_dev 3 | POSTGRES_DB=swai_dev 4 | -------------------------------------------------------------------------------- /dev-env/backend/watchtower/.env: -------------------------------------------------------------------------------- 1 | ## WatchTower Settings 2 | #REPO_USER=logatron-cid 3 | #REPO_PASS=rl 4 | DOCKER_HOST=https://registry.macula.io 5 | #WATCHTOWER_NOTIFICATIONS=slack 6 | #WATCHTOWER_NOTIFICATION_SLACK_HOOK_URL=https://hooks.slack.com/services/ 7 | -------------------------------------------------------------------------------- /dev-env/backend/watchtower/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM v2tec/watchtower -------------------------------------------------------------------------------- /dev-env/backend/xoom/designer/.env: -------------------------------------------------------------------------------- 1 | VLINGO_XOOM_DESIGNER_SERVER_PORT=19090 2 | VLINGO_XOOM_DESIGNER_ENV=CONTAINER 3 | SCHEMATA_SERVICE_NAME=xoom-schemata 4 | SCHEMATA_SERVICE_PORT=9019 5 | -------------------------------------------------------------------------------- /dev-env/backend/xoom/schemata-db/.env: -------------------------------------------------------------------------------- 1 | POSTGRES_DB=xoom_schemata 2 | POSTGRES_USER=xoom_test 3 | POSTGRES_PASSWORD=vlingo123 -------------------------------------------------------------------------------- /dev-env/backend/xoom/schemata/.env: -------------------------------------------------------------------------------- 1 | XOOM_SCHEMATA_PORT=9019 2 | XOOM_ENV=env 3 | XOOM_SCHEMATA_DB_URL=jdbc:postgresql://schemata-db/ 4 | -------------------------------------------------------------------------------- /dev-env/build-clique1.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | docker-compose -f ex-esdb-clique1.yaml \ 4 | build \ 5 | --no-cache \ 6 | --progress=plain 7 | -------------------------------------------------------------------------------- /dev-env/clean-docker.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | docker container stop $(docker container list -qa) 3 | docker rmi -f $(docker images -qa) 4 | if [ "$1" = "-v" ]; 5 | then 6 | docker system prune -f --volumes 7 | fi -------------------------------------------------------------------------------- /dev-env/deploy-ex-esdb-clique1.yaml: -------------------------------------------------------------------------------- 1 | networks: 2 | esdb-net: 3 | name: esdb-net 4 | driver: overlay 5 | 6 | volumes: 7 | esdb0-data: 8 | driver: local 9 | driver_opts: 10 | type: none 11 | o: bind 12 | device: /volume/ex-esdb/data0 13 | esdb1-data: 14 | driver: local 15 | driver_opts: 16 | type: none 17 | o: bind 18 | device: /volume/ex-esdb/data1 19 | esdb2-data: 20 | driver: local 21 | driver_opts: 22 | type: none 23 | o: bind 24 | device: /volume/ex-esdb/data2 25 | esdb3-data: 26 | driver: local 27 | driver_opts: 28 | type: none 29 | o: bind 30 | device: /volume/ex-esdb/data3 31 | esdb4-data: 32 | driver: local 33 | driver_opts: 34 | type: none 35 | o: bind 36 | device: /volume/ex-esdb/data4 37 | 38 | services: 39 | ex-esdb0: 40 | image: local/ex-esdb 41 | hostname: node0 42 | container_name: ex-esdb0 43 | volumes: 44 | - esdb0-data:/data 45 | networks: 46 | - esdb-net 47 | environment: 48 | EX_ESDB_STORE_ID: "reg_gh" 49 | EX_ESDB_COOKIE: "reg_gh_clique1" 50 | EX_ESDB_SEED_NODES: "ex_esdb@node0" 51 | stop_grace_period: 30s 52 | deploy: 53 | replicas: 1 54 | restart_policy: 55 | condition: on-failure 56 | window: 120s 57 | max_attempts: 3 58 | delay: 10s 59 | 60 | ex-esdb1: 61 | image: local/ex-esdb 62 | hostname: node1 63 | container_name: ex-esdb1 64 | volumes: 65 | - esdb1-data:/data 66 | networks: 67 | - esdb-net 68 | environment: 69 | EX_ESDB_STORE_ID: "reg_gh" 70 | EX_ESDB_COOKIE: "reg_gh_clique1" 71 | EX_ESDB_SEED_NODES: "ex_esdb@node0,ex_esdb@node2,ex_esdb@node3,ex_esdb@node4" 72 | stop_grace_period: 30s 73 | deploy: 74 | replicas: 1 75 | restart_policy: 76 | condition: on-failure 77 | window: 120s 78 | max_attempts: 3 79 | delay: 10s 80 | 81 | ex-esdb2: 82 | image: local/ex-esdb 83 | hostname: node2 84 | container_name: ex-esdb2 85 | volumes: 86 | - esdb2-data:/data 87 | networks: 88 | - esdb-net 89 | environment: 90 | EX_ESDB_STORE_ID: "reg_gh" 91 | EX_ESDB_COOKIE: "reg_gh_clique1" 92 | EX_ESDB_SEED_NODES: "ex_esdb@node0,ex_esdb@node1,ex_esdb@node3,ex_esdb@node4" 93 | stop_grace_period: 30s 94 | deploy: 95 | replicas: 1 96 | restart_policy: 97 | condition: on-failure 98 | window: 120s 99 | max_attempts: 3 100 | delay: 10s 101 | 102 | ex-esdb3: 103 | image: local/ex-esdb 104 | hostname: node3 105 | container_name: ex-esdb3 106 | volumes: 107 | - esdb3-data:/data 108 | networks: 109 | - esdb-net 110 | environment: 111 | EX_ESDB_STORE_ID: "reg_gh" 112 | EX_ESDB_COOKIE: "reg_gh_clique1" 113 | EX_ESDB_SEED_NODES: "ex_esdb@node0,ex_esdb@node1,ex_esdb@node2,ex_esdb@node4" 114 | stop_grace_period: 30s 115 | deploy: 116 | replicas: 1 117 | restart_policy: 118 | condition: on-failure 119 | window: 120s 120 | max_attempts: 3 121 | delay: 10s 122 | 123 | ex-esdb4: 124 | image: local/ex-esdb 125 | hostname: node4 126 | container_name: ex-esdb4 127 | volumes: 128 | - esdb4-data:/data 129 | networks: 130 | - esdb-net 131 | environment: 132 | EX_ESDB_STORE_ID: "reg_gh" 133 | EX_ESDB_COOKIE: "reg_gh_clique1" 134 | EX_ESDB_SEED_NODES: "ex_esdb@node0,ex_esdb@node1,ex_esdb@node2,ex_esdb@node3" 135 | stop_grace_period: 30s 136 | deploy: 137 | replicas: 1 138 | restart_policy: 139 | condition: on-failure 140 | window: 120s 141 | max_attempts: 3 142 | delay: 10s 143 | -------------------------------------------------------------------------------- /dev-env/ex-esdb-cluster.yaml: -------------------------------------------------------------------------------- 1 | networks: 2 | ex-esdb-net: 3 | name: ex-esdb-net 4 | driver: bridge 5 | 6 | volumes: 7 | ex-esdb0-data: 8 | driver: local 9 | driver_opts: 10 | type: none 11 | o: bind 12 | device: /volume/ex-esdb/data0 13 | 14 | ex-esdb1-data: 15 | driver: local 16 | driver_opts: 17 | type: none 18 | o: bind 19 | device: /volume/ex-esdb/data1 20 | 21 | ex-esdb2-data: 22 | driver: local 23 | driver_opts: 24 | type: none 25 | o: bind 26 | device: /volume/ex-esdb/data2 27 | 28 | ex-esdb3-data: 29 | driver: local 30 | driver_opts: 31 | type: none 32 | o: bind 33 | device: /volume/ex-esdb/data3 34 | 35 | ex-esdb4-data: 36 | driver: local 37 | driver_opts: 38 | type: none 39 | o: bind 40 | device: /volume/ex-esdb/data4 41 | 42 | services: 43 | ex-esdb0: 44 | image: local/ex-esdb 45 | build: 46 | context: ../system 47 | hostname: node0 48 | container_name: ex-esdb0 49 | networks: 50 | - ex-esdb-net 51 | volumes: 52 | - ex-esdb0-data:/data 53 | stop_grace_period: 10s 54 | environment: 55 | EX_ESDB_STORE_ID: "reg_gh" 56 | EX_ESDB_COOKIE: "reg_greenhouse_cookie" 57 | EX_ESDB_SEED_NODES: "ex_esdb@node1,ex_esdb@node2,ex_esdb@node3,ex_esdb@node4" 58 | 59 | ex-esdb1: 60 | image: local/ex-esdb 61 | build: 62 | context: ../system 63 | hostname: node1 64 | container_name: ex-esdb1 65 | networks: 66 | - ex-esdb-net 67 | volumes: 68 | - ex-esdb1-data:/data 69 | environment: 70 | EX_ESDB_STORE_ID: "reg_gh" 71 | EX_ESDB_COOKIE: "reg_greenhouse_cookie" 72 | EX_ESDB_SEED_NODES: "ex_esdb@node0,ex_esdb@node2,ex_esdb@node3,ex_esdb@node4" 73 | stop_grace_period: 10s 74 | depends_on: 75 | ex-esdb0: 76 | condition: service_healthy 77 | 78 | ex-esdb2: 79 | image: local/ex-esdb 80 | build: 81 | context: ../system 82 | hostname: node2 83 | container_name: ex-esdb2 84 | networks: 85 | - ex-esdb-net 86 | volumes: 87 | - ex-esdb2-data:/data 88 | environment: 89 | EX_ESDB_STORE_ID: "reg_gh" 90 | EX_ESDB_COOKIE: "reg_greenhouse_cookie" 91 | EX_ESDB_SEED_NODES: "ex_esdb@node0,ex_esdb@node1,ex_esdb@node3,ex_esdb@node4" 92 | stop_grace_period: 10s 93 | depends_on: 94 | ex-esdb1: 95 | condition: service_healthy 96 | 97 | ex-esdb3: 98 | image: local/ex-esdb 99 | build: 100 | context: ../system 101 | hostname: node3 102 | container_name: ex-esdb3 103 | networks: 104 | - ex-esdb-net 105 | volumes: 106 | - ex-esdb3-data:/data 107 | environment: 108 | EX_ESDB_STORE_ID: "reg_gh" 109 | EX_ESDB_COOKIE: "reg_greenhouse_cookie" 110 | EX_ESDB_SEED_NODES: "ex_esdb@node0,ex_esdb@node1,ex_esdb@node2,ex_esdb@node4" 111 | stop_grace_period: 10s 112 | depends_on: 113 | ex-esdb2: 114 | condition: service_healthy 115 | 116 | ex-esdb4: 117 | image: local/ex-esdb 118 | build: 119 | context: ../system 120 | hostname: node4 121 | container_name: ex-esdb4 122 | networks: 123 | - ex-esdb-net 124 | volumes: 125 | - ex-esdb4-data:/data 126 | environment: 127 | EX_ESDB_STORE_ID: "reg_gh" 128 | EX_ESDB_COOKIE: "reg_greenhouse_cookie" 129 | EX_ESDB_SEED_NODES: "ex_esdb@node0,ex_esdb@node1,ex_esdb@node2,ex_esdb@node3" 130 | stop_grace_period: 10s 131 | depends_on: 132 | ex-esdb3: 133 | condition: service_healthy 134 | -------------------------------------------------------------------------------- /dev-env/excalidraw.yml: -------------------------------------------------------------------------------- 1 | volumes: 2 | excalidraw: 3 | driver: local 4 | driver_opts: 5 | type: none 6 | device: /volume/excalidraw/data 7 | o: bind 8 | 9 | services: 10 | excalidraw: 11 | image: excalidraw/excalidraw:latest 12 | container_name: excalidraw 13 | ports: 14 | - 80:80 15 | networks: 16 | - dev-net 17 | volumes: 18 | - excalidraw:/excalidraw 19 | # environment: 20 | # - EXCALIDRAW_BASE_URL=http://localhost:3000 21 | # - EXCALIDRAW_PORT=3000 22 | # - EXCALIDRAW_HOST=localhost 23 | -------------------------------------------------------------------------------- /dev-env/livebook.yml: -------------------------------------------------------------------------------- 1 | services: 2 | livebook: 3 | image: local/livebook 4 | build: 5 | context: ./backend/livebook 6 | container_name: livebook 7 | hostname: livebook 8 | networks: 9 | - dev-net 10 | ports: 11 | - 8080:8080 12 | - 8081:8081 13 | environment: 14 | - LIVEBOOK_PASSWORD 15 | -------------------------------------------------------------------------------- /dev-env/networks.yml: -------------------------------------------------------------------------------- 1 | networks: 2 | dev-net: 3 | name: dev-net 4 | driver: bridge 5 | -------------------------------------------------------------------------------- /dev-env/push.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | clear 3 | echo '-----------------------------------' 4 | echo pushing version "$1" to main branch 5 | echo '-----------------------------------' 6 | git add . 7 | git commit -a -m "$1" 8 | git push 9 | -------------------------------------------------------------------------------- /dev-env/rm-cont.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | docker container stop $(docker container list -qa) 3 | docker container rm -f $(docker container list -qa) -------------------------------------------------------------------------------- /dev-env/run-clique1.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | ## CLEAR ALL DATA 4 | # echo "Clearing all data" 5 | # sudo rm -rf /volume 6 | docker-compose -f ex-esdb-cluster.yaml \ 7 | down 8 | 9 | ## CACHES 10 | echo "Removing caches folder" 11 | sudo rm -rf /volume/caches 12 | echo "Creating caches folder" 13 | sudo mkdir -p /volume/caches 14 | # ExESDB 15 | echo "removing ExESDB data folders" 16 | sudo rm -rf /volume/ex-esdb 17 | echo "creating ExESDB data folders" 18 | sudo mkdir -p \ 19 | /volume/ex-esdb/data0 \ 20 | /volume/ex-esdb/data1 \ 21 | /volume/ex-esdb/data2 \ 22 | /volume/ex-esdb/data3 \ 23 | /volume/ex-esdb/data4 24 | 25 | sudo chown "$USER" -R /volume/ 26 | 27 | docker-compose -f ex-esdb-cluster.yaml \ 28 | up \ 29 | --remove-orphans \ 30 | --build \ 31 | -d 32 | -------------------------------------------------------------------------------- /dev-env/run-tools.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | ## CLEAR ALL DATA 4 | # echo "Clearing all data" 5 | # sudo rm -rf /volume 6 | 7 | ## EXCALIDRAW 8 | echo "Creating excalidraw folder" 9 | sudo mkdir -p /volume/excalidraw/data 10 | 11 | sudo chown "$USER" -R /volume/ 12 | 13 | docker-compose -f livebook.yml \ 14 | -f excalidraw.yml \ 15 | -f networks.yml \ 16 | down 17 | 18 | docker-compose -f livebook.yml \ 19 | -f excalidraw.yml \ 20 | -f networks.yml \ 21 | up --remove-orphans --build -d 22 | -------------------------------------------------------------------------------- /dev-env/swarm-down.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker swarm leave --force 4 | -------------------------------------------------------------------------------- /dev-env/swarm-up.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | ## CLEAR ALL DATA 4 | # echo "Clearing all data" 5 | # sudo rm -rf /volume 6 | 7 | ## CACHES 8 | echo "Recreating caches folder" 9 | rm -rf /volume/caches 10 | sudo mkdir -p /volume/caches 11 | # ExESDB 12 | echo "Recreating ExESDB data folders" 13 | sudo rm -rf /volume/ex-esdb 14 | sudo mkdir -p \ 15 | /volume/ex-esdb/data0 \ 16 | /volume/ex-esdb/data1 \ 17 | /volume/ex-esdb/data2 \ 18 | /volume/ex-esdb/data3 \ 19 | /volume/ex-esdb/data4 20 | 21 | sudo chown "$USER" -R /volume/ 22 | 23 | docker swarm leave --force 24 | 25 | docker network rm esdb-net 26 | 27 | docker build -t local/ex-esdb ../system 28 | 29 | docker swarm init 30 | 31 | docker stack rm ex-esdb 32 | 33 | docker stack deploy \ 34 | -c deploy-ex-esdb-clique1.yaml \ 35 | -d \ 36 | ex-esdb 37 | -------------------------------------------------------------------------------- /dev-env/watchtower.yml: -------------------------------------------------------------------------------- 1 | services: 2 | watchtower-svc: 3 | image: local/watchtower 4 | build: 5 | context: ./backend/watchtower 6 | networks: 7 | - dev-net 8 | restart: always 9 | container_name: watchtower 10 | command: --interval 30 11 | # environment: 12 | # DOCKER_HOST: "https://docker.io" 13 | # REPO_USER: "${LOGATRON_CID_USER}" 14 | # REPO_PASS: "${LOGATRON_CID_PWD}" 15 | volumes: 16 | - /var/run/docker.sock:/var/run/docker.sock 17 | -------------------------------------------------------------------------------- /dev-env/xoom-designer.yml: -------------------------------------------------------------------------------- 1 | services: 2 | designer: 3 | image: vlingo/xoom-designer:1.11.0 4 | container_name: xoom-designer 5 | networks: 6 | - dev-net 7 | ports: 8 | - 19090:19090 9 | env_file: 10 | - ./backend/xoom/designer/.env 11 | 12 | schemata: 13 | image: vlingo/xoom-schemata:1.11.0 14 | container_name: xoom-schemata 15 | networks: 16 | - dev-net 17 | ports: 18 | - 9019:9019 19 | env_file: 20 | - ./backend/xoom/schemata/.env 21 | depends_on: 22 | - schemata-db 23 | 24 | schemata-db: 25 | image: postgres:latest 26 | container_name: xoom-schemata-db 27 | restart: always 28 | networks: 29 | - dev-net 30 | ports: 31 | - 5432:5432 32 | env_file: 33 | - ./backend/xoom/schemata-db/.env 34 | -------------------------------------------------------------------------------- /push.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | clear 3 | echo '------------------------------------' 4 | echo pushing version "$1" to master branch 5 | echo '------------------------------------' 6 | git add . 7 | git commit -m "$1" -a 8 | git push 9 | -------------------------------------------------------------------------------- /system/.credo.exs: -------------------------------------------------------------------------------- 1 | # This file contains the configuration for Credo and you are probably reading 2 | # this after creating it with `mix credo.gen.config`. 3 | # 4 | # If you find anything wrong or unclear in this file, please report an 5 | # issue on GitHub: https://github.com/rrrene/credo/issues 6 | # 7 | %{ 8 | # 9 | # You can have as many configs as you like in the `configs:` field. 10 | configs: [ 11 | %{ 12 | # 13 | # Run any config using `mix credo -C `. If no config name is given 14 | # "default" is used. 15 | # 16 | name: "default", 17 | # 18 | # These are the files included in the analysis: 19 | files: %{ 20 | # 21 | # You can give explicit globs or simply directories. 22 | # In the latter case `**/*.{ex,exs}` will be used. 23 | # 24 | included: [ 25 | "lib/", 26 | "src/", 27 | "test/", 28 | "web/", 29 | "apps/*/lib/", 30 | "apps/*/src/", 31 | "apps/*/test/", 32 | "apps/*/web/" 33 | ], 34 | excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] 35 | }, 36 | # 37 | # Load and configure plugins here: 38 | # 39 | plugins: [], 40 | # 41 | # If you create your own checks, you must specify the source files for 42 | # them here, so they can be loaded by Credo before running the analysis. 43 | # 44 | requires: [], 45 | # 46 | # If you want to enforce a style guide and need a more traditional linting 47 | # experience, you can change `strict` to `true` below: 48 | # 49 | strict: false, 50 | # 51 | # To modify the timeout for parsing files, change this value: 52 | # 53 | parse_timeout: 5000, 54 | # 55 | # If you want to use uncolored output by default, you can change `color` 56 | # to `false` below: 57 | # 58 | color: true, 59 | # 60 | # You can customize the parameters of any check by adding a second element 61 | # to the tuple. 62 | # 63 | # To disable a check put `false` as second element: 64 | # 65 | # {Credo.Check.Design.DuplicatedCode, false} 66 | # 67 | checks: %{ 68 | enabled: [ 69 | # 70 | ## Consistency Checks 71 | # 72 | {Credo.Check.Consistency.ExceptionNames, []}, 73 | {Credo.Check.Consistency.LineEndings, []}, 74 | {Credo.Check.Consistency.ParameterPatternMatching, []}, 75 | {Credo.Check.Consistency.SpaceAroundOperators, []}, 76 | {Credo.Check.Consistency.SpaceInParentheses, []}, 77 | {Credo.Check.Consistency.TabsOrSpaces, []}, 78 | 79 | # 80 | ## Design Checks 81 | # 82 | # You can customize the priority of any check 83 | # Priority values are: `low, normal, high, higher` 84 | # 85 | {Credo.Check.Design.AliasUsage, 86 | [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]}, 87 | {Credo.Check.Design.TagFIXME, []}, 88 | # You can also customize the exit_status of each check. 89 | # If you don't want TODO comments to cause `mix credo` to fail, just 90 | # set this value to 0 (zero). 91 | # 92 | {Credo.Check.Design.TagTODO, [exit_status: 2]}, 93 | 94 | # 95 | ## Readability Checks 96 | # 97 | {Credo.Check.Readability.AliasOrder, []}, 98 | {Credo.Check.Readability.FunctionNames, []}, 99 | {Credo.Check.Readability.LargeNumbers, []}, 100 | {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]}, 101 | {Credo.Check.Readability.ModuleAttributeNames, []}, 102 | {Credo.Check.Readability.ModuleDoc, []}, 103 | {Credo.Check.Readability.ModuleNames, []}, 104 | {Credo.Check.Readability.ParenthesesInCondition, []}, 105 | {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []}, 106 | {Credo.Check.Readability.PipeIntoAnonymousFunctions, []}, 107 | {Credo.Check.Readability.PredicateFunctionNames, []}, 108 | {Credo.Check.Readability.PreferImplicitTry, []}, 109 | {Credo.Check.Readability.RedundantBlankLines, []}, 110 | {Credo.Check.Readability.Semicolons, []}, 111 | {Credo.Check.Readability.SpaceAfterCommas, []}, 112 | {Credo.Check.Readability.StringSigils, []}, 113 | {Credo.Check.Readability.TrailingBlankLine, []}, 114 | {Credo.Check.Readability.TrailingWhiteSpace, []}, 115 | {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, 116 | {Credo.Check.Readability.VariableNames, []}, 117 | {Credo.Check.Readability.WithSingleClause, []}, 118 | 119 | # 120 | ## Refactoring Opportunities 121 | # 122 | {Credo.Check.Refactor.Apply, []}, 123 | {Credo.Check.Refactor.CondStatements, []}, 124 | {Credo.Check.Refactor.CyclomaticComplexity, []}, 125 | {Credo.Check.Refactor.FilterCount, []}, 126 | {Credo.Check.Refactor.FilterFilter, []}, 127 | {Credo.Check.Refactor.FunctionArity, []}, 128 | {Credo.Check.Refactor.LongQuoteBlocks, []}, 129 | {Credo.Check.Refactor.MapJoin, []}, 130 | {Credo.Check.Refactor.MatchInCondition, []}, 131 | {Credo.Check.Refactor.NegatedConditionsInUnless, []}, 132 | {Credo.Check.Refactor.NegatedConditionsWithElse, []}, 133 | {Credo.Check.Refactor.Nesting, []}, 134 | {Credo.Check.Refactor.RedundantWithClauseResult, []}, 135 | {Credo.Check.Refactor.RejectReject, []}, 136 | {Credo.Check.Refactor.UnlessWithElse, []}, 137 | {Credo.Check.Refactor.WithClauses, []}, 138 | 139 | # 140 | ## Warnings 141 | # 142 | {Credo.Check.Warning.ApplicationConfigInModuleAttribute, []}, 143 | {Credo.Check.Warning.BoolOperationOnSameValues, []}, 144 | {Credo.Check.Warning.Dbg, []}, 145 | {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, 146 | {Credo.Check.Warning.IExPry, []}, 147 | {Credo.Check.Warning.IoInspect, []}, 148 | {Credo.Check.Warning.MissedMetadataKeyInLoggerConfig, []}, 149 | {Credo.Check.Warning.OperationOnSameValues, []}, 150 | {Credo.Check.Warning.OperationWithConstantResult, []}, 151 | {Credo.Check.Warning.RaiseInsideRescue, []}, 152 | {Credo.Check.Warning.SpecWithStruct, []}, 153 | {Credo.Check.Warning.UnsafeExec, []}, 154 | {Credo.Check.Warning.UnusedEnumOperation, []}, 155 | {Credo.Check.Warning.UnusedFileOperation, []}, 156 | {Credo.Check.Warning.UnusedKeywordOperation, []}, 157 | {Credo.Check.Warning.UnusedListOperation, []}, 158 | {Credo.Check.Warning.UnusedPathOperation, []}, 159 | {Credo.Check.Warning.UnusedRegexOperation, []}, 160 | {Credo.Check.Warning.UnusedStringOperation, []}, 161 | {Credo.Check.Warning.UnusedTupleOperation, []}, 162 | {Credo.Check.Warning.WrongTestFileExtension, []} 163 | ], 164 | disabled: [ 165 | # 166 | # Checks scheduled for next check update (opt-in for now) 167 | {Credo.Check.Refactor.UtcNowTruncate, []}, 168 | 169 | # 170 | # Controversial and experimental checks (opt-in, just move the check to `:enabled` 171 | # and be sure to use `mix credo --strict` to see low priority checks) 172 | # 173 | {Credo.Check.Consistency.MultiAliasImportRequireUse, []}, 174 | {Credo.Check.Consistency.UnusedVariableNames, []}, 175 | {Credo.Check.Design.DuplicatedCode, []}, 176 | {Credo.Check.Design.SkipTestWithoutComment, []}, 177 | {Credo.Check.Readability.AliasAs, []}, 178 | {Credo.Check.Readability.BlockPipe, []}, 179 | {Credo.Check.Readability.ImplTrue, []}, 180 | {Credo.Check.Readability.MultiAlias, []}, 181 | {Credo.Check.Readability.NestedFunctionCalls, []}, 182 | {Credo.Check.Readability.OneArityFunctionInPipe, []}, 183 | {Credo.Check.Readability.OnePipePerLine, []}, 184 | {Credo.Check.Readability.SeparateAliasRequire, []}, 185 | {Credo.Check.Readability.SingleFunctionToBlockPipe, []}, 186 | {Credo.Check.Readability.SinglePipe, []}, 187 | {Credo.Check.Readability.Specs, []}, 188 | {Credo.Check.Readability.StrictModuleLayout, []}, 189 | {Credo.Check.Readability.WithCustomTaggedTuple, []}, 190 | {Credo.Check.Refactor.ABCSize, []}, 191 | {Credo.Check.Refactor.AppendSingleItem, []}, 192 | {Credo.Check.Refactor.DoubleBooleanNegation, []}, 193 | {Credo.Check.Refactor.FilterReject, []}, 194 | {Credo.Check.Refactor.IoPuts, []}, 195 | {Credo.Check.Refactor.MapMap, []}, 196 | {Credo.Check.Refactor.ModuleDependencies, []}, 197 | {Credo.Check.Refactor.NegatedIsNil, []}, 198 | {Credo.Check.Refactor.PassAsyncInTestCases, []}, 199 | {Credo.Check.Refactor.PipeChainStart, []}, 200 | {Credo.Check.Refactor.RejectFilter, []}, 201 | {Credo.Check.Refactor.VariableRebinding, []}, 202 | {Credo.Check.Warning.LazyLogging, []}, 203 | {Credo.Check.Warning.LeakyEnvironment, []}, 204 | {Credo.Check.Warning.MapGetUnsafePass, []}, 205 | {Credo.Check.Warning.MixEnv, []}, 206 | {Credo.Check.Warning.UnsafeToAtom, []} 207 | 208 | # {Credo.Check.Refactor.MapInto, []}, 209 | 210 | # 211 | # Custom checks can be created using `mix credo.gen.check`. 212 | # 213 | ] 214 | } 215 | } 216 | ] 217 | } 218 | -------------------------------------------------------------------------------- /system/.formatter.exs: -------------------------------------------------------------------------------- 1 | 2 | [ 3 | plugins: [ 4 | Phoenix.LiveView.HTMLFormatter, # Existing 5 | ], 6 | inputs: [ 7 | "mix.exs", 8 | "config/*.exs", 9 | "{lib,test}/**/*.{ex,exs}", # Explicit Elixir pattern 10 | "src/**/*.{erl,hrl}" # Add Erlang file patterns 11 | ], 12 | subdirectories: ["./*"] 13 | ] 14 | 15 | -------------------------------------------------------------------------------- /system/ADR.md: -------------------------------------------------------------------------------- 1 | # Architecture Decision Records 2 | 3 | ## 2025.05.24 4 | 5 | ### Emitter pools must run on the Ra leader. 6 | 7 | Given that Khepri triggers are executed on the Ra leader, emitter pools must run on the Ra leader. 8 | In order to achieve this, it is necessary that a separate process existst, that monitors the Cluster and subscribes to Ra's leadership changes, by starting relevant emitter pools on the new leader. 9 | 10 | ## 2025.05.02 11 | 12 | ### Triggers will obtain Emitter Pids from :pg (Process Groups) 13 | 14 | Experimentation with :khepri's trigger model has shown that it is advised to use as few dependencies as possible, when defining the trigger functions. This is because :khepri uses :horus to decompile such functions and build a separate module. For this reason, we limit the dependencies for these trigger functions to :pg (Process Groups), which is an :erlang native module that allows for inter-process communication. 15 | 16 | ## 2025.04.16 17 | 18 | ### Subscriptions will be managed via a registry. 19 | 20 | `subscribe_to` and `unsubscribe` will interact with the :subscriptions branch of the store, but instead of storing the pid of the subscriber, the subscriber will be stored in a registry. 21 | 22 | ## 2025.04.13 23 | 24 | ### Each Store will contain separate branches for Streams, Subscriptions, and Projections 25 | 26 | - `:streams` will be used to store the events that are being read from and written to the store. 27 | - `:subscriptions` will be used to store the subscription information of so-called `Persistent Subscriptions`. 28 | - `:projections` can best be thought of as stored procedures that are used to transform the events in the `Streams` into a different format or to enrich the streams with secondary or derived events, to name a few possible use cases. 29 | 30 | Thus: 31 | 32 | ```mono 33 | :khepri 34 | | 35 | +-:manage_orders 36 | | 37 | +-:streams 38 | | 39 | +-:subscriptions 40 | | 41 | +-:projections 42 | ``` 43 | -------------------------------------------------------------------------------- /system/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## version 0.0.9-alpha 4 | 5 | ### 2025.05.04 6 | 7 | #### Added support for Subscriptions 8 | 9 | - `ExESDB.Subscriptions` module 10 | - `func_registrations.exs` file 11 | - emitter trigger in `khepri` now only uses the `erlang`-native :pg library (process groups) 12 | 13 | #### Added support for Commanded 14 | 15 | - `ExESDB.Commanded.Adapter` module 16 | - `ExESDB.Commanded.Mapper` module 17 | 18 | ## version 0.0.8-alpha 19 | 20 | ### 2025.04.13 21 | 22 | - Added `ExESDB.EventStore.stream_forward/4` function 23 | - Added `BeamCampus.ColorFuncs` module 24 | - Added `ExESDB.Commanded.Adapter` module 25 | - Refactored `ExESDB.EventStreamReader` and `ExESDB.EventStreamWriter` modules: 26 | - Streams are now read and written using the `ExESDB.Streams` module 27 | - Removed `ExESDB.EventStreamReader` module 28 | - Removed `ExESDB.EventStreamWriter` module 29 | 30 | ## version 0.0.7-alpha 31 | 32 | ## version 0.0.1-alpha 33 | 34 | ### 2025.03.25 35 | 36 | - Initial release 37 | -------------------------------------------------------------------------------- /system/Dockerfile: -------------------------------------------------------------------------------- 1 | ################# Variables ################ 2 | #ARG ELIXIR_VERSION=1.17.2 3 | #ARG OTP_VERSION=27.0.1 4 | #ARG OS_VERSION=bullseye-20240722-slim 5 | # ARG ELIXIR_VERSION=1.17.3 6 | # ARG OTP_VERSION=27.1.2 7 | # ARG OS_VERSION=bullseye-20241111-slim 8 | # ARG OS_TYPE=debian 9 | 10 | ARG ELIXIR_VERSION=1.18.3 11 | ARG OTP_VERSION=27.3 12 | ARG OS_VERSION=bullseye-20250224-slim 13 | ARG OS_TYPE=debian 14 | 15 | ARG BUILDER_IMAGE="hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-${OS_TYPE}-${OS_VERSION}" 16 | ARG RUNNER_IMAGE="${OS_TYPE}:${OS_VERSION}" 17 | ########################################## 18 | ################# BUILDER ################ 19 | ########################################## 20 | FROM ${BUILDER_IMAGE} AS builder 21 | 22 | RUN apt-get update -y && \ 23 | apt-get upgrade -y --autoremove && \ 24 | apt-get clean 25 | 26 | # && \ 27 | # rm -f /var/lib/apt/lists/*_* 28 | 29 | RUN apt-get install -y pkg-config openssl curl build-essential git 30 | # prepare build dir 31 | WORKDIR /build_space 32 | 33 | # install hex + rebar 34 | RUN mix local.hex --force && \ 35 | mix local.rebar --force 36 | 37 | # set build ENV 38 | ARG MIX_ENV=prod 39 | ENV MIX_ENV=prod 40 | 41 | ENV OTPROOT=/usr/lib/erlang 42 | ENV ERL_LIBS=/usr/lib/erlang/lib 43 | 44 | # copy sources 45 | COPY lib/ lib/ 46 | COPY src/ src/ 47 | COPY priv/ priv/ 48 | 49 | COPY config/config.exs config/prod.exs config/runtime.exs config/ 50 | COPY mix.exs ./ 51 | #COPY mix.exs mix.lock ./ 52 | # install mix dependencies 53 | RUN MIX_ENV="prod" mix do deps.get --only "prod", deps.update --all, deps.compile 54 | # build assets 55 | RUN MIX_ENV="prod" mix do compile, release ex_esdb 56 | ## Release 57 | 58 | ################################### 59 | ########### RUNTIME ############### 60 | ################################### 61 | 62 | FROM ${RUNNER_IMAGE} AS ex_esdb 63 | 64 | ENV HOME=/system 65 | ENV MIX_ENV="prod" 66 | ENV PATH="${PATH}" 67 | 68 | ARG EX_ESDB_COOKIE 69 | ARG EX_ESDB_DATA_DIR 70 | ARG EX_ESDB_STORE_ID 71 | 72 | ENV ERTS_VERSION=erts-15.2.3 73 | 74 | RUN echo "!!!!!!!! Building runner image...!!!!!!!" && sleep 2 75 | 76 | RUN apt-get update -y && \ 77 | apt-get upgrade -y --autoremove && \ 78 | apt-get clean && \ 79 | apt-get install -y pkg-config openssl libncurses5 locales ca-certificates curl systemd htop && \ 80 | rm -f /var/lib/apt/lists/*_* 81 | # 82 | # Set the locale 83 | #RUN locale-gen en_US.UTF-8 84 | RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen 85 | # 86 | ENV LANG=en_US.UTF-8 87 | ENV LANGUAGE=en_US:en 88 | ENV LC_ALL=en_US.UTF-8 89 | 90 | WORKDIR /system 91 | 92 | RUN mkdir /data 93 | 94 | RUN chown -R nobody /data 95 | 96 | COPY --from=builder --chown=nobody /build_space/_build/prod/rel/ex_esdb/ ./ 97 | 98 | COPY run-ex-esdb.sh . 99 | COPY check-ex-esdb.sh . 100 | 101 | RUN chmod +x run-ex-esdb.sh check-ex-esdb.sh 102 | 103 | ENV PATH="${PATH}:/system/${ERTS_VERSION}/bin" 104 | 105 | # RUN chown nobody /system 106 | 107 | VOLUME /data 108 | 109 | EXPOSE 4369 9000-9100 110 | 111 | RUN echo $EX_ESDB_COOKIE > ~/.erlang.cookie 112 | RUN chmod 400 ~/.erlang.cookie 113 | 114 | HEALTHCHECK --interval=10s --timeout=3s \ 115 | CMD ["./check-ex-esdb.sh"] 116 | 117 | CMD ["./run-ex-esdb.sh"] 118 | -------------------------------------------------------------------------------- /system/SUBSCRIPTIONS.md: -------------------------------------------------------------------------------- 1 | # Subscriptions Mechanism 2 | 3 | ## The Subscriptions Process 4 | 5 | - Subscriptions are based on actions that are performed on the event store. 6 | - Khepri supports Trigger functions to be defined in the store. 7 | - Such Triggers are guarded by a Event filter. 8 | - At this moment, Khepri only supports triggers at the level of Path changes (create, update, delete). 9 | 10 | -- GIVEN: a Khepri Store 11 | -- WHEN: a Subscription is registered 12 | -- THEN: 13 | --- an `EmitterGroup` is created, that will spawn a number of `EmitterWorker` processes. 14 | 15 | --- 16 | -------------------------------------------------------------------------------- /system/check-ex-esdb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Checking if ex_esdb with store id [$EX_ESDB_STORE_ID] is up on [$(hostname)]...for clique [$EX_ESDB_COOKIE]" 4 | 5 | epmd -names | grep -q ex_esdb && 6 | /system/bin/ex_esdb eval "if :khepri_cluster.is_store_running(:${EX_ESDB_STORE_ID}) do :init.stop(0) else :init.stop(1) end" 7 | # erl -noshell \ 8 | # -name "health@$(hostname)" \ 9 | # -setcookie "$EX_ESDB_COOKIE" \ 10 | # -eval 'case khepri_cluster:is_store_running($EX_ESDB_STORE_ID) of true -> halt(0); _ -> halt(1) end.' 11 | -------------------------------------------------------------------------------- /system/config/config.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | config :logger, :console, 4 | format: "$time $metadata[$level] $message\n", 5 | metadata: [:mfa] 6 | 7 | import_config "#{Mix.env()}.exs" 8 | -------------------------------------------------------------------------------- /system/config/dev.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | alias ExESDB.EnVars, as: EnVars 4 | 5 | config :khepri, 6 | log_level: :info, 7 | logger: true 8 | 9 | config :ra, 10 | log_level: :info, 11 | logger: true 12 | 13 | config :logger, :console, 14 | format: "$time ($metadata) [$level] $message\n", 15 | metadata: [:mfa], 16 | level: :info 17 | 18 | config :ex_esdb, :logger, level: :debug 19 | 20 | config :ex_esdb, :khepri, 21 | data_dir: "tmp/reg_gh", 22 | store_id: :reg_gh, 23 | timeout: 10_000, 24 | db_type: :single, 25 | seed_nodes: [], 26 | pub_sub: :ex_esdb_pubsub 27 | -------------------------------------------------------------------------------- /system/config/prod.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | config :khepri, 4 | log_level: :information, 5 | logger: true 6 | 7 | config :ra, 8 | log_level: :information, 9 | logger: true 10 | 11 | config :logger, :console, 12 | format: "$time ($metadata) [$level] $message\n", 13 | metadata: [:mfa], 14 | level: :debug 15 | 16 | config :ex_esdb, 17 | logger: true, 18 | log_level: :debug 19 | 20 | config :ex_esdb, :khepri, 21 | data_dir: "/data", 22 | store_id: :reg_gh, 23 | timeout: 2_000, 24 | db_type: :cluster, 25 | seed_nodes: [], 26 | pub_sub: :ex_esdb_pubsub 27 | -------------------------------------------------------------------------------- /system/config/runtime.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | alias ExESDB.EnVars, as: EnVars 4 | import ExESDB.Options 5 | 6 | config :ex_esdb, :khepri, 7 | data_dir: data_dir(), 8 | store_id: store_id(), 9 | timeout: timeout(), 10 | db_type: db_type(), 11 | seed_nodes: seed_nodes(), 12 | pub_sub: pub_sub() 13 | -------------------------------------------------------------------------------- /system/config/test.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | config :ex_unit, 4 | capture_log: false, 5 | assert_receive_timeout: 5_000, 6 | refute_receive_timeout: 1_000, 7 | exclude: [:skip], 8 | logger: true 9 | 10 | config :ex_esdb, :khepri, 11 | data_dir: "tmp/ex_esdb_store", 12 | store_id: :ex_test_store, 13 | timeout: 1_000, 14 | db_type: :single, 15 | seed_nodes: [], 16 | pub_sub: :ex_esdb_pub_sub 17 | -------------------------------------------------------------------------------- /system/examples/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/examples/.keep -------------------------------------------------------------------------------- /system/examples/park_fac/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/examples/park_fac/.keep -------------------------------------------------------------------------------- /system/examples/park_fac/readme.md: -------------------------------------------------------------------------------- 1 | # ExESDB Parking Facility Example 2 | 3 | This is an example of how to use ExESDB to manage the processes in a parking facility. 4 | 5 | The following processes will be included in this example: 6 | 7 | - Arrive-to-Service (Vehicle) 8 | - Service-to-Depart (Vehicle) 9 | - Pay-to-Unlock (Person) 10 | - Leave-to-Visit (Person) 11 | - Enter-to-Interact (Person) 12 | 13 | ## Processes 14 | 15 | ### Arrive-to-Service 16 | 17 | ### Service-to-Depart 18 | 19 | ### Pay-to-Unlock 20 | 21 | ### Leave-to-Visit 22 | 23 | ### Enter-to-Interact 24 | -------------------------------------------------------------------------------- /system/guides/getting_started.md: -------------------------------------------------------------------------------- 1 | # Getting Started with ExESDB 2 | 3 | ## Introduction 4 | 5 | Event Sourcing with CQRS is a technique for building applications that are based on an immutable log of events, which makes it ideal for building concurrent, distributed systems. 6 | 7 | Though it is gaining popularity, the number of options for storing these events is limited and require specialized services like Kurrent (aka Greg's EventStore) or AxonIQ. 8 | 9 | One of the strong-points of the BEAM is, that it comes 'batteries included': there are BEAM-native libraries for many common tasks, like: storage, pub/sub, caching, logging, telemetry, etc. 10 | 11 | `ExESDB` is an attempt to create a BEAM-native Event Store written in Elixir, building further upon the [Khepri](https://github.com/rabbitmq/khepri) library, which in turn builds upon the [Ra](https://github.com/rabbitmq/ra) library. 12 | 13 | ## Status 14 | 15 | **This is a work in progress** 16 | 17 | The project is in an early stage of development, and is not ready for production use. 18 | 19 | Source code is available on [GitHub](https://github.com/beam-campus/ex-esdb). 20 | 21 | ## Installation 22 | 23 | In your `mix.exs` file: 24 | 25 | ```elixir 26 | def deps do 27 | [ 28 | {:ex_esdb, "~> 0.0.8-alpha"} 29 | ] 30 | end 31 | ``` 32 | 33 | ## Configuration 34 | 35 | 1. in your `config/config.exs` file: 36 | 37 | ```elixir 38 | config :ex_esdb, :khepri, 39 | # the directory where the khepri store will be created 40 | data_dir: "/data", 41 | # the id of the khepri store. 42 | store_id: :ex_esdb_store, 43 | # the type of database setup to use 44 | db_type: :single, 45 | # a global timeout in milliseconds 46 | timeout: 10_000, 47 | # a list of seed nodes to connect to 48 | seed_nodes: [], 49 | # the name of the pub/sub module to use 50 | pub_sub: :ex_esdb_pub_sub 51 | 52 | ``` 53 | 54 | 2. from the ENVIRONMENT: 55 | 56 | ```bash 57 | 58 | EX_ESDB_DATA_DIR="/data" 59 | EX_ESDB_STORE_ID=ex_esdb_store 60 | EX_ESDB_DB_TYPE=single 61 | EX_ESDB_TIMEOUT=10000 62 | EX_ESDB_SEED_NODES="" 63 | EX_ESDB_PUB_SUB=ex_esdb_pub_sub 64 | 65 | ``` 66 | 67 | ## Usage 68 | 69 | ```elixir 70 | defmodule MyApp.Application do 71 | use Application 72 | 73 | @impl true 74 | def start(_type, _args) do 75 | opts = ExESDB.Options.app_env() 76 | children = [ 77 | {ExESDB.System, opts}, 78 | ] 79 | 80 | opts = [strategy: :one_for_one, name: MyApp.Supervisor] 81 | Supervisor.start_link(children, opts) 82 | end 83 | 84 | end 85 | ``` 86 | -------------------------------------------------------------------------------- /system/guides/testing.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/guides/testing.md -------------------------------------------------------------------------------- /system/lib/app.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.App do 2 | @moduledoc """ 3 | This module is used to start the ExESDB system. 4 | """ 5 | use Application, 6 | otp_app: :ex_esdb 7 | 8 | alias ExESDB.Options, as: Options 9 | alias ExESDB.Themes, as: Themes 10 | 11 | require Logger 12 | require Phoenix.PubSub 13 | 14 | @impl true 15 | def start(_type, _args) do 16 | opts = Options.app_env() 17 | 18 | children = [ 19 | {ExESDB.System, opts} 20 | ] 21 | 22 | Logger.warning("#{Themes.app(self())} is UP.") 23 | 24 | opts = [strategy: :one_for_one, name: ExESDB.Supervisor] 25 | Supervisor.start_link(children, opts) 26 | end 27 | 28 | @impl true 29 | def stop(state) do 30 | ExESDB.System.stop(:normal) 31 | Logger.warning("STOPPING APP #{inspect(state, pretty: true)}") 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /system/lib/beam_campus/bit_flags.ex: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.BitFlags do 2 | @moduledoc """ 3 | This module is used to manipulate bitwise flags. 4 | 5 | Inspired by: [Flags in C#](https://stackoverflow.com/questions/8447/what-does-the-flags-enum-attribute-mean-in-c) 6 | 7 | Event sourced systems often rely on flags to indicate the state of the aggregate at any given time. 8 | In essence, an event sourced aggregate is a finite state machine and this state is 9 | often represented as a set of flags, to be used as a shorthand for the state of the aggregate. 10 | 11 | In this module, we define a set of functions that can be used to manipulate these flags. 12 | """ 13 | import Bitwise 14 | 15 | @doc """ 16 | ### Description 17 | 18 | Returns the bitwise OR of two flags. 19 | In other words, it sets the bit that corresopnds to the flag 20 | 21 | - GIVEN: original_state is `0b00100100` (integer: 36) 22 | - WHEN the flag to be set is `0b01000000` (integer: 64) 23 | - THEN the result is `0b01100100` (integer: 100) 24 | 25 | ### Parameters 26 | 27 | - target: the integer to be set 28 | - flag: the integer to be set 29 | ### Output 30 | 31 | The integer representation of the state after setting the flag 32 | 33 | ### Examples 34 | 35 | iex> BitFlags.set(36, 64) 36 | 100 37 | """ 38 | def set(target, flag) 39 | when is_integer(target) and 40 | is_integer(flag), 41 | do: target ||| flag 42 | 43 | @doc """ 44 | ### Description 45 | 46 | Returns the bitwise AND of two flags. 47 | In other words, it unsets the bit that corresopnds to the flag 48 | 49 | - GIVEN: original_state is `0b01100100` (integer: 100) 50 | - WHEN the flag to be unset is `0b01000000` (integer: 64) 51 | - THEN the result is `0b00100100` (integer: 36) 52 | 53 | ### Parameters 54 | 55 | - `target`: the integer representation of the state 56 | - `flag` : the integer representation of the flag 57 | 58 | ### Output 59 | 60 | The integer representation of the state after unsetting the flag 61 | 62 | ### Examples 63 | 64 | iex> BitFlags.unset(100, 64) 65 | 36 66 | """ 67 | def unset(target, flag) 68 | when is_integer(target) and 69 | is_integer(flag), 70 | do: target &&& bnot(flag) 71 | 72 | @doc """ 73 | Returns the bitwise OR of multiple flags against a given state. 74 | In other words, it sets the bits that corresopnds to the flags 75 | GIVEN: original_state is `0b00100100` (integer: 36) 76 | WHEN the flags to be set are `[0b01000000, 0b10000000]` (integers: 64, 128) 77 | THEN the result is `0b11100100` (integer: 228) 78 | 79 | Example: 80 | iex> BitFlags.set_all(36, [64, 128]) 81 | 228 82 | """ 83 | def set_all(target, flags) do 84 | Enum.reduce(flags, target, fn flag, acc -> 85 | acc ||| flag 86 | end) 87 | end 88 | 89 | @doc """ 90 | Returns the bitwise AND of multiple flags against a given state. 91 | In other words, it unsets the bits that corresopnds to the flags 92 | GIVEN: original_state is `0b11100100` (integer: 228) 93 | WHEN the flags to be unset are `[0b01000000, 0b10000000]` (integers: 64, 128) 94 | THEN the result is `0b00100100` (integer: 36) 95 | 96 | Example: 97 | iex> BitFlags.unset_all(228, [64, 128]) 98 | 36 99 | """ 100 | def unset_all(target, flags) do 101 | Enum.reduce(flags, target, fn flag, acc -> 102 | acc &&& bnot(flag) 103 | end) 104 | end 105 | 106 | @doc """ 107 | Returs true if the given flag is set in the target state. 108 | In other words, it returns true if the bit that corresponds to the flag is set. 109 | GIVEN: original_state is `0b01100100` (integer: 100) 110 | WHEN the flag to be checked is `0b01000000` (integer: 64) 111 | THEN the result is `true` 112 | 113 | Example: 114 | iex> BitFlags.has?(100, 64) 115 | true 116 | """ 117 | def has?(target, flag), do: (target &&& flag) == flag 118 | 119 | @doc """ 120 | Returns true if the given flag is NOT set in the target state. 121 | In other words, it returns true if the bit that corresponds to the flag is NOT set. 122 | GIVEN: original_state is `0b01100100` (integer: 100) 123 | WHEN the flag to be checked is `0b01000000` (integer: 64) 124 | THEN the result is `false` 125 | AND WHEN the flag to be checked is `0b00000100` (integer: 8) 126 | THEN the result is `true` 127 | 128 | Example: 129 | iex> BitFlags.has_not?(100, 64) 130 | false 131 | iex> BitFlags.has_not?(100, 8) 132 | true 133 | """ 134 | def has_not?(target, flag), do: (target &&& flag) != flag 135 | 136 | @doc """ 137 | Returns true if ALL the flags are set in the target state. 138 | In other words, it returns true if ALL the bits that correspond to the flags are set. 139 | GIVEN: original_state is `0b01100100` (integer: 100) 140 | WHEN the flags to be checked are `[0b01000000, 0b10000000]` (integers: 64, 128) 141 | THEN the result is `true` 142 | AND WHEN the flags to be checked are `[0b01000000, 0b00000100]` (integers: 64, 8) 143 | THEN the result is `false` 144 | 145 | Example: 146 | iex> BitFlags.has_all?(100, [64, 128]) 147 | true 148 | iex> BitFlags.has_all?(100, [64, 8]) 149 | false 150 | """ 151 | def has_all?(status, flags) do 152 | flags |> Enum.all?(fn flag -> has?(status, flag) end) 153 | end 154 | 155 | @doc """ 156 | Returns true if any of the flags are set in the target state. 157 | In other words, it returns true if any of the bits that correspond to the flags are set. 158 | GIVEN: original_state is `0b01100100` (integer: 100) 159 | WHEN the flags to be checked are `[0b01000000, 0b10000000]` (integers: 64, 128) 160 | THEN the result is `true` 161 | 162 | Example: 163 | iex> BitFlags.has_any?(100, [64, 128]) 164 | true 165 | """ 166 | def has_any?(status, flags) do 167 | flags |> Enum.any?(fn flag -> has?(status, flag) end) 168 | end 169 | 170 | @doc """ 171 | Returns a list of flag descriptions that are set in the target state. 172 | GIVEN: original_state is `0b01100100` (integer: 100) 173 | AND the flag_map is: 174 | %{ 175 | 0 => "None", 176 | 1 => "Ready", 177 | 2 => "In Progress", 178 | 4 => "Completed", 179 | 8 => "Cancelled", 180 | 16 => "Failed", 181 | 32 => "Archived", 182 | 64 => "Ready to Archive", 183 | 128 => "Ready to Publish", 184 | 256 => "Published", 185 | 512 => "Unpublished", 186 | } 187 | WHEN the target state is `0b01100100` (integer: 100) 188 | THEN the result is `["Completed", "Archived", "Ready to Archive"]` 189 | 190 | Example: 191 | iex> descriptions = 192 | ...> %{ 193 | ...> 0 => "None", 194 | ...> 1 => "Ready", 195 | ...> 2 => "In Progress", 196 | ...> 4 => "Completed", 197 | ...> 8 => "Cancelled", 198 | ...> 16 => "Failed", 199 | ...> 32 => "Archived", 200 | ...> 64 => "Ready to Archive", 201 | ...> 128 => "Ready to Publish", 202 | ...> 256 => "Published", 203 | ...> 512 => "Unpublished", 204 | ...> } 205 | iex> BitFlags.to_list(100, descriptions) 206 | ["Completed", "Archived", "Ready to Archive"] 207 | """ 208 | def to_list(0, flag_map), do: [flag_map[0]] 209 | 210 | def to_list(n, flag_map) when n > 0 do 211 | # Extract keys (powers of 2) from the map and sort them 212 | keys = Map.keys(flag_map) |> Enum.sort() 213 | # Iterate through the keys and collect the flags that are set in `n` 214 | flags = 215 | Enum.reduce(keys, [], fn key, acc -> 216 | if Bitwise.band(n, key) != 0 do 217 | [flag_map[key] | acc] 218 | else 219 | acc 220 | end 221 | end) 222 | 223 | # Since we collected the flags in reverse order, reverse the list before returning 224 | Enum.reverse(flags) 225 | end 226 | 227 | @doc """ 228 | Returns the highest flag in the target state. 229 | GIVEN: target_state is `0b01100100` (integer: 100) 230 | AND the flag_map is: 231 | %{ 232 | 0 => "None", 233 | 1 => "Ready", 234 | 2 => "In Progress", 235 | 4 => "Completed", 236 | 8 => "Cancelled", 237 | 16 => "Failed", 238 | 32 => "Archived", 239 | 64 => "Ready to Archive", 240 | 128 => "Ready to Publish", 241 | } 242 | THEN the result is `Ready to Archive` 243 | 244 | Example: 245 | iex> descriptions = 246 | ...> %{ 247 | ...> 0 => "None", 248 | ...> 1 => "Ready", 249 | ...> 2 => "In Progress", 250 | ...> 4 => "Completed", 251 | ...> 8 => "Cancelled", 252 | ...> 16 => "Failed", 253 | ...> 32 => "Archived", 254 | ...> 64 => "Ready to Archive", 255 | ...> 128 => "Ready to Publish", 256 | ...> } 257 | %{ 258 | 0 => "None", 259 | 1 => "Ready", 260 | 2 => "In Progress", 261 | 4 => "Completed", 262 | 8 => "Cancelled", 263 | 16 => "Failed", 264 | 32 => "Archived", 265 | 64 => "Ready to Archive", 266 | 128 => "Ready to Publish", 267 | } 268 | iex> BitFlags.highest(100, descriptions) 269 | "Ready to Archive" 270 | """ 271 | def highest(n, flag_map) do 272 | [head | _] = 273 | to_list(n, flag_map) 274 | |> Enum.reverse() 275 | 276 | head 277 | end 278 | 279 | @doc """ 280 | Returns the lowest flag in the bit flag map. 281 | GIVEN: target_state is `0b01100100` (integer: 100) 282 | AND the flag_map is: 283 | %{ 284 | 0 => "None", 285 | 1 => "Ready", 286 | 2 => "In Progress", 287 | 4 => "Completed", 288 | 8 => "Cancelled", 289 | 16 => "Failed", 290 | 32 => "Archived", 291 | 64 => "Ready to Archive", 292 | 128 => "Ready to Publish", 293 | } 294 | THEN the result is `Ready` 295 | 296 | Example: 297 | iex> descriptions = 298 | ...> %{ 299 | ...> 0 => "None", 300 | ...> 1 => "Ready", 301 | ...> 2 => "In Progress", 302 | ...> 4 => "Completed", 303 | ...> 8 => "Cancelled", 304 | ...> 16 => "Failed", 305 | ...> 32 => "Archived", 306 | ...> 64 => "Ready to Archive", 307 | ...> 128 => "Ready to Publish", 308 | ...> } 309 | %{ 310 | 0 => "None", 311 | 1 => "Ready", 312 | 2 => "In Progress", 313 | 4 => "Completed", 314 | 8 => "Cancelled", 315 | 16 => "Failed", 316 | 32 => "Archived", 317 | 64 => "Ready to Archive", 318 | 128 => "Ready to Publish", 319 | } 320 | iex> BitFlags.lowest(100, descriptions) 321 | "Ready" 322 | """ 323 | def lowest(n, flag_map) do 324 | [head | _] = 325 | to_list(n, flag_map) 326 | 327 | head 328 | end 329 | 330 | @doc """ 331 | Returns a string representation of the bit flags. 332 | GIVEN: target_state is `0b01100100` (integer: 100) 333 | AND the flag_map is: 334 | %{ 335 | 0 => "None", 336 | 1 => "Ready", 337 | 2 => "In Progress", 338 | 4 => "Completed", 339 | 8 => "Cancelled", 340 | 16 => "Failed", 341 | 32 => "Archived", 342 | 64 => "Ready to Archive", 343 | 128 => "Ready to Publish", 344 | } 345 | THEN the result is `"Completed, Archived, Ready to Archive"` 346 | 347 | Example: 348 | iex> descriptions = 349 | ...> %{ 350 | ...> 0 => "None", 351 | ...> 1 => "Ready", 352 | ...> 2 => "In Progress", 353 | ...> 4 => "Completed", 354 | ...> 8 => "Cancelled", 355 | ...> 16 => "Failed", 356 | ...> 32 => "Archived", 357 | ...> 64 => "Ready to Archive", 358 | ...> 128 => "Ready to Publish", 359 | ...> } 360 | iex> BitFlags.to_string(100, descriptions) 361 | "Completed, Archived, Ready to Archive" 362 | """ 363 | def to_string(n, flag_map) do 364 | to_list(n, flag_map) 365 | |> Enum.join(", ") 366 | end 367 | 368 | defp decompose(0, _, acc), do: Enum.reverse(acc) 369 | 370 | defp decompose(target, power, acc) do 371 | if Bitwise.band(target, power) != 0 do 372 | decompose(target - power, power <<< 1, [power | acc]) 373 | else 374 | decompose(target, power <<< 1, acc) 375 | end 376 | end 377 | 378 | def decompose(target) when target > 0 do 379 | decompose(target, 1, []) 380 | end 381 | end 382 | -------------------------------------------------------------------------------- /system/lib/beam_campus/color_funcs.ex: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.ColorFuncs do 2 | @moduledoc """ 3 | This module is used to manipulate colors. 4 | It offers a set of functions that can be used to 5 | change the color of text in the terminal, using ANSI escape codes. 6 | It covers all color combinations and effects supported by ANSI. 7 | """ 8 | 9 | # Reset 10 | def reset, do: "\e[0m" 11 | 12 | # Basic colors (0-7) and bright colors (8-15) 13 | @colors [ 14 | black: 0, 15 | red: 1, 16 | green: 2, 17 | yellow: 3, 18 | blue: 4, 19 | magenta: 5, 20 | cyan: 6, 21 | white: 7, 22 | bright_black: 8, 23 | bright_red: 9, 24 | bright_green: 10, 25 | bright_yellow: 11, 26 | bright_blue: 12, 27 | bright_magenta: 13, 28 | bright_cyan: 14, 29 | bright_white: 15 30 | ] 31 | 32 | # Text effects 33 | @effects [ 34 | bold: 1, 35 | dim: 2, 36 | italic: 3, 37 | underline: 4, 38 | blink: 5, 39 | rapid_blink: 6, 40 | reverse: 7, 41 | hidden: 8, 42 | strikethrough: 9 43 | ] 44 | 45 | # Convert lists to maps for easy lookup 46 | @colors_map Enum.into(@colors, %{}) 47 | @effects_map Enum.into(@effects, %{}) 48 | 49 | def tui(fg_color, bg_color, effects \\ []) do 50 | fg_code = Map.get(@colors_map, fg_color) 51 | bg_code = Map.get(@colors_map, bg_color) 52 | 53 | effect_codes = 54 | effects 55 | |> Enum.map_join(";", &Map.get(@effects_map, &1)) 56 | 57 | "\e[38;5;#{fg_code};48;5;#{bg_code};#{effect_codes}m" 58 | end 59 | 60 | # Generate all color combinations 61 | contents = 62 | for {fg_name, fg_code} <- @colors, {bg_name, bg_code} <- @colors do 63 | f_name = String.to_atom("#{fg_name}_on_#{bg_name}") 64 | f_body = "\e[38;5;#{fg_code};48;5;#{bg_code}m" 65 | 66 | IO.puts("Declaring function #{f_name}") 67 | 68 | quote bind_quoted: [f_name: f_name, f_body: f_body] do 69 | def unquote(f_name)(), do: unquote(f_body) 70 | end 71 | end 72 | 73 | Module.eval_quoted(__MODULE__, contents, []) 74 | # Code.eval_quoted(contents, []) 75 | end 76 | -------------------------------------------------------------------------------- /system/lib/commanded/adapter.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter do 2 | @moduledoc """ 3 | An adapter for Commanded to use ExESDB as the event store. 4 | for reference, see: https://hexdocs.pm/commanded/Commanded.EventStore.Adapter.html 5 | """ 6 | @behaviour Commanded.EventStore.Adapter 7 | 8 | require Logger 9 | alias ExESDB.EventStore, as: Store 10 | 11 | alias ExESDB.Snapshots, as: Snapshots 12 | alias ExESDB.Streams, as: Streams 13 | alias ExESDB.Subscriptions, as: Subscriptions 14 | 15 | alias ExESDB.Commanded.Mapper, as: Mapper 16 | 17 | @type adapter_meta :: map() 18 | @type application :: Commanded.Application.t() 19 | @type config :: Keyword.t() 20 | @type stream_uuid :: String.t() 21 | @type start_from :: :origin | :current | integer 22 | @type expected_version :: :any_version | :no_stream | :stream_exists | non_neg_integer 23 | @type subscription_name :: String.t() 24 | @type subscription :: any 25 | @type subscriber :: pid 26 | @type source_uuid :: String.t() 27 | @type error :: term 28 | 29 | # @spec ack_event( 30 | # adapter_meta(), 31 | # pid(), 32 | # Commanded.EventStore.EventData.t()) :: :ok | {:error, error()}) 33 | @impl Commanded.EventStore.Adapter 34 | def ack_event(meta, pid, event) do 35 | Logger.warning( 36 | "ack_event/3 is not implemented for #{inspect(meta)}, #{inspect(pid)}, #{inspect(event)}" 37 | ) 38 | 39 | :ok 40 | end 41 | 42 | @doc """ 43 | Append one or more events to a stream atomically. 44 | """ 45 | @spec append_to_stream( 46 | adapter_meta :: map(), 47 | stream_uuid :: String.t(), 48 | expected_version :: integer(), 49 | events :: list(Commanded.EventStore.EventData.t()), 50 | opts :: Keyword.t() 51 | ) :: 52 | :ok | {:error, :wrong_expected_version} | {:error, term()} 53 | @impl Commanded.EventStore.Adapter 54 | def append_to_stream(%{store_id: store}, stream_uuid, expected_version, events, _opts) do 55 | new_events = 56 | events 57 | |> Enum.map(&Mapper.to_new_event/1) 58 | 59 | store 60 | |> Streams.append_events(stream_uuid, expected_version, new_events) 61 | end 62 | 63 | @doc """ 64 | Return a child spec defining all processes required by the event store. 65 | """ 66 | @spec child_spec( 67 | application(), 68 | Keyword.t() 69 | ) :: 70 | {:ok, [:supervisor.child_spec() | {Module.t(), term} | Module.t()], adapter_meta} 71 | @impl Commanded.EventStore.Adapter 72 | def child_spec(application, opts) do 73 | meta = 74 | opts 75 | |> Keyword.put(:application, application) 76 | |> Map.new() 77 | 78 | {:ok, [ExESDB.System.child_spec(opts)], meta} 79 | end 80 | 81 | @doc """ 82 | Delete a snapshot of the current state of the event store. 83 | """ 84 | @spec delete_snapshot( 85 | adapter_meta :: adapter_meta, 86 | source_uuid :: source_uuid 87 | ) :: :ok | {:error, error} 88 | @impl Commanded.EventStore.Adapter 89 | def delete_snapshot(%{store_id: store}, source_uuid) do 90 | case store 91 | |> Snapshots.delete_snapshot(source_uuid) do 92 | {:ok, _} -> :ok 93 | {:error, reason} -> {:error, reason} 94 | end 95 | end 96 | 97 | @doc """ 98 | Delete a subscription. 99 | """ 100 | @spec delete_subscription( 101 | adapter_meta :: adapter_meta, 102 | arg2 :: stream_uuid, 103 | subscription_name :: subscription_name 104 | ) :: :ok | {:error, error} 105 | @impl Commanded.EventStore.Adapter 106 | def delete_subscription(%{store_id: store}, "$all", subscription_name) do 107 | case store 108 | |> Subscriptions.delete_subscription("$all", subscription_name) do 109 | {:ok, _} -> :ok 110 | {:error, reason} -> {:error, reason} 111 | end 112 | end 113 | 114 | @impl Commanded.EventStore.Adapter 115 | def delete_subscription(%{store_id: store}, stream_uuid, subscription_name) do 116 | case store 117 | |> Subscriptions.delete_subscription(stream_uuid, subscription_name) do 118 | {:ok, _} -> :ok 119 | {:error, reason} -> {:error, reason} 120 | end 121 | end 122 | 123 | @impl Commanded.EventStore.Adapter 124 | def read_snapshot(%{store_id: store}, source_uuid) do 125 | case store 126 | |> Snapshots.read_snapshot(source_uuid) do 127 | {:ok, snapshot_record} -> 128 | {:ok, Mapper.to_snapshot_data(snapshot_record)} 129 | 130 | {:error, reason} -> 131 | {:error, reason} 132 | end 133 | end 134 | 135 | @doc """ 136 | Record a snapshot of the current state of the event store. 137 | """ 138 | @spec record_snapshot( 139 | adapter_meta :: adapter_meta, 140 | snapshot_data :: any 141 | ) :: :ok | {:error, error} 142 | @impl Commanded.EventStore.Adapter 143 | def record_snapshot(%{store_id: store}, snapshot_data) do 144 | record = Mapper.to_snapshot_record(snapshot_data) 145 | 146 | store 147 | |> Snapshots.record_snapshot(record) 148 | end 149 | 150 | @doc """ 151 | Streams events from the given stream, in the order in which they were 152 | originally written. 153 | """ 154 | @spec stream_forward( 155 | adapter_meta :: adapter_meta, 156 | stream_uuid :: stream_uuid, 157 | start_version :: non_neg_integer, 158 | read_batch_size :: non_neg_integer 159 | ) :: 160 | Enumerable.t() 161 | | {:error, :stream_not_found} 162 | | {:error, error} 163 | @impl Commanded.EventStore.Adapter 164 | def stream_forward(adapter_meta, stream_uuid, start_version, read_batch_size) do 165 | store = Map.get(adapter_meta, :store_id) 166 | 167 | case store 168 | |> Streams.stream_forward(stream_uuid, start_version, read_batch_size) do 169 | {:ok, stream} -> 170 | stream 171 | |> Stream.map(&Mapper.to_recorded_event/1) 172 | 173 | {:error, :stream_not_found} -> 174 | {:error, :stream_not_found} 175 | 176 | {:error, reason} -> 177 | {:error, reason} 178 | end 179 | end 180 | 181 | @doc """ 182 | Create a transient subscription to a single event stream. 183 | 184 | The event store will publish any events appended to the given stream to the 185 | `subscriber` process as an `{:events, events}` message. 186 | 187 | The subscriber does not need to acknowledge receipt of the events. 188 | """ 189 | @spec subscribe( 190 | adapter_meta :: adapter_meta, 191 | stream :: String.t() 192 | ) :: 193 | :ok | {:error, error} 194 | 195 | @impl Commanded.EventStore.Adapter 196 | def subscribe(adapter_meta, stream) do 197 | Logger.warning( 198 | "subscribe/2 is not implemented for #{inspect(adapter_meta)}, #{inspect(stream)}" 199 | ) 200 | 201 | store = Map.get(adapter_meta, :store_id) 202 | 203 | store 204 | |> Subscriptions.subscribe(stream) 205 | end 206 | 207 | @doc """ 208 | Create a persistent subscription to an event stream. 209 | """ 210 | @spec subscribe_to( 211 | adapter_meta :: adapter_meta, 212 | stream :: String.t(), 213 | subscription_name :: String.t(), 214 | subscriber :: pid, 215 | start_from :: :origin | :current | non_neg_integer, 216 | opts :: Keyword.t() 217 | ) :: 218 | {:ok, subscription} 219 | | {:error, :subscription_already_exists} 220 | | {:error, error} 221 | 222 | @impl Commanded.EventStore.Adapter 223 | def subscribe_to(adapter_meta, stream, subscription_name, subscriber, start_from, opts) do 224 | Logger.warning( 225 | "subscribe_to/7 is ROUGHLY implemented for #{inspect(adapter_meta)}, #{inspect(stream)}, #{inspect(subscription_name)}, #{inspect(subscriber)}, #{inspect(start_from)}, #{inspect(opts)}" 226 | ) 227 | 228 | store = Map.get(adapter_meta, :store_id) 229 | 230 | store 231 | |> Subscriptions.subscribe_to(stream, subscription_name, subscriber, start_from, opts) 232 | 233 | {:error, :not_implemented} 234 | end 235 | 236 | @impl Commanded.EventStore.Adapter 237 | def unsubscribe(adapter_meta, subscription_name) do 238 | Logger.warning( 239 | "unsubscribe/3 is not implemented for #{inspect(adapter_meta)}, #{inspect(subscription_name)}" 240 | ) 241 | 242 | {:error, :not_implemented} 243 | end 244 | end 245 | -------------------------------------------------------------------------------- /system/lib/commanded/mapper.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Mapper do 2 | @moduledoc """ 3 | A mapper for Commanded to use ExESDB as the event store. 4 | """ 5 | alias Commanded.EventStore.EventData, as: EventData 6 | alias Commanded.EventStore.RecordedEvent, as: RecordedEvent 7 | alias Commanded.EventStore.SnapshotData, as: SnapshotData 8 | 9 | alias ExESDB.EventRecord, as: EventRecord 10 | alias ExESDB.NewEvent, as: NewEvent 11 | alias ExESDB.Schema.SnapshotRecord, as: SnapshotRecord 12 | 13 | require UUIDv7 14 | 15 | @doc """ 16 | Converts a Commanded EventData struct to an ExESDB NewEvent struct. 17 | """ 18 | @spec to_new_event(EventData.t()) :: NewEvent.t() 19 | def to_new_event(event_data) 20 | when is_struct(event_data, EventData), 21 | do: %NewEvent{ 22 | event_id: UUIDv7.generate(), 23 | event_type: event_data.event_type, 24 | data_content_type: 1, 25 | metadata_content_type: 1, 26 | data: event_data.data, 27 | metadata: %{ 28 | correlation_id: event_data.correlation_id, 29 | causation_id: event_data.causation_id, 30 | stream_version: nil 31 | } 32 | } 33 | 34 | @doc """ 35 | Converts an ExESDB EventRecord struct to a Commanded RecordedEvent struct. 36 | """ 37 | @spec to_recorded_event(EventRecord.t()) :: RecordedEvent.t() 38 | def to_recorded_event( 39 | %{ 40 | metadata: %{ 41 | stream_version: stream_version, 42 | correlation_id: correlation_id, 43 | causation_id: causation_id 44 | } 45 | } = event_record 46 | ) 47 | when is_struct(event_record, EventRecord), 48 | do: %RecordedEvent{ 49 | event_id: event_record.event_id, 50 | event_number: event_record.event_number, 51 | event_type: event_record.event_type, 52 | data: event_record.data, 53 | metadata: event_record.metadata, 54 | created_at: event_record.created, 55 | stream_id: event_record.event_stream_id, 56 | stream_version: stream_version, 57 | correlation_id: correlation_id, 58 | causation_id: causation_id 59 | } 60 | 61 | @doc """ 62 | Converts an Commanded SnapshotData struct to an ExESDB SnapshotRecord struct. 63 | """ 64 | @spec to_snapshot_record(SnapshotData.t()) :: SnapshotRecord.t() 65 | def to_snapshot_record(snapshot_data) 66 | when is_struct(snapshot_data, SnapshotData), 67 | do: %SnapshotRecord{ 68 | source_uuid: snapshot_data.source_uuid, 69 | source_version: snapshot_data.source_version, 70 | source_type: snapshot_data.source_type, 71 | data: snapshot_data.data, 72 | metadata: snapshot_data.metadata, 73 | created_at: snapshot_data.created_at, 74 | created_epoch: DateTime.to_unix(snapshot_data.created_at, :millisecond) 75 | } 76 | 77 | @doc """ 78 | Converts an ExESDB SnapshotRecord struct to a Commanded SnapshotData struct. 79 | """ 80 | def to_snapshot_data(snapshot_record) 81 | when is_struct(snapshot_record, SnapshotRecord), 82 | do: %SnapshotData{ 83 | source_uuid: snapshot_record.source_uuid, 84 | source_version: snapshot_record.source_version, 85 | source_type: snapshot_record.source_type, 86 | data: snapshot_record.data, 87 | metadata: snapshot_record.metadata, 88 | created_at: snapshot_record.created_at 89 | } 90 | end 91 | -------------------------------------------------------------------------------- /system/lib/en_vars.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.EnVars do 2 | @moduledoc """ 3 | This module contains the environment variables that are used by ExESDB 4 | """ 5 | @doc """ 6 | Returns the data directory. default: `/data` 7 | """ 8 | def data_dir, do: "EX_ESDB_DATA_DIR" 9 | @doc """ 10 | Returns the khepri store id. default: `ex_esdb_store` 11 | """ 12 | def store_id, do: "EX_ESDB_STORE_ID" 13 | @doc """ 14 | Returns the db type. `single` or `cluster`. default: `single` 15 | """ 16 | def db_type, do: "EX_ESDB_DB_TYPE" 17 | @doc """ 18 | Returns the timeout in milliseconds. default: `10_000` 19 | """ 20 | def timeout, do: "EX_ESDB_TIMEOUT" 21 | @doc """ 22 | Returns the seed nodes. default: `nil` 23 | """ 24 | def seed_nodes, do: "EX_ESDB_SEED_NODES" 25 | @doc """ 26 | Returns the name of the pub/sub. default: `ex_esdb_pub_sub` 27 | """ 28 | def pub_sub, do: "EX_ESDB_PUB_SUB" 29 | end 30 | -------------------------------------------------------------------------------- /system/lib/ex_esdb.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB do 2 | @moduledoc """ 3 | ExESDB is a wrapper around the khepri library. 4 | Its intention is to provide an interface to khepri, 5 | with a focus on event sourcing. 6 | """ 7 | end 8 | 9 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/aggregator.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Aggregator do 2 | @moduledoc """ 3 | Aggregates events from an event stream using tagged rules: 4 | GIVEN: an Event of roughly this format: 5 | %{ 6 | event_id: "1234567890", 7 | event_type: "user.birthday_celebrated:v1", 8 | stream_id: "celebrate-user-birthday-john", 9 | version: 1, 10 | data: %{ 11 | name: "John", 12 | age: {:sum, 1}, 13 | venue: {:overwrite, "New York"} 14 | }, 15 | timestamp: ~U[2022-01-01 12:00:00Z], 16 | epoch: 1641013200, 17 | metadata: %{ 18 | source_id: "1234567890" 19 | } 20 | } 21 | """ 22 | 23 | @doc """ 24 | Folds a list of events into a single map. 25 | """ 26 | def foldl(sorted_events, state \\ %{}) do 27 | # Perform left fold (reduce) 28 | sorted_events 29 | |> Enum.reduce(state, fn evt, acc -> apply_evt(acc, evt) end) 30 | end 31 | 32 | defp get_current_num(map, key) do 33 | case map[key] do 34 | {:sum, value} -> value 35 | value when is_number(value) -> value 36 | nil -> 0 37 | 38 | _ -> 0 39 | end 40 | end 41 | 42 | defp apply_evt(state, event) do 43 | # Process each key-value pair in the event 44 | Map.keys(event) 45 | |> Enum.reduce(state, fn key, acc_map -> 46 | value = event[key] 47 | case value do 48 | {:sum, num} when is_number(num) -> 49 | current = get_current_num(acc_map, key) 50 | Map.put(acc_map, key, {:sum, current + num}) 51 | {:overwrite, new_value} -> 52 | acc_map 53 | |> Map.put(key, new_value) 54 | _ -> 55 | Map.put(acc_map, key, value) 56 | end 57 | end) 58 | end 59 | 60 | def finalize_map(tagged_map) do 61 | Map.new(tagged_map, fn 62 | {key, {:sum, value}} -> {key, value} 63 | {key, {:overwrite, value}} -> {key, value} 64 | {key, value} -> {key, value} 65 | end) 66 | end 67 | end 68 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/cluster.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Cluster do 2 | @moduledoc false 3 | use GenServer 4 | 5 | require Logger 6 | 7 | alias ExESDB.Themes, as: Themes 8 | 9 | alias ExESDB.Options, as: Opts 10 | 11 | # defp ping?(node) do 12 | # case :net_adm.ping(node) do 13 | # :pong -> true 14 | # _ -> false 15 | # end 16 | # end 17 | 18 | defp join(store) do 19 | Opts.seed_nodes() 20 | |> Enum.map(fn seed -> 21 | Logger.debug("#{Themes.cluster(node())} => Joining: #{inspect(seed)}") 22 | 23 | store 24 | |> :khepri_cluster.join(seed) 25 | end) 26 | end 27 | 28 | defp leave(store) do 29 | case store |> :khepri_cluster.reset() do 30 | :ok -> 31 | Logger.warning("#{Themes.cluster(node())} => Left cluster") 32 | :ok 33 | 34 | {:error, reason} -> 35 | Logger.error( 36 | "#{Themes.cluster(node())} => Failed to leave cluster. reason: #{inspect(reason)}" 37 | ) 38 | 39 | {:error, reason} 40 | end 41 | end 42 | 43 | defp members(store) do 44 | case store 45 | |> :khepri_cluster.members() do 46 | {:error, reason} -> 47 | Logger.error( 48 | "#{Themes.cluster(node())} => Failed to get store members. reason: #{inspect(reason)}" 49 | ) 50 | 51 | members -> 52 | Logger.debug("#{Themes.cluster(node())} => members: #{inspect(members, pretty: true)}") 53 | end 54 | end 55 | 56 | @impl true 57 | def handle_info(:join, state) do 58 | state[:store_id] 59 | |> join() 60 | 61 | {:noreply, state} 62 | end 63 | 64 | @impl true 65 | def handle_info(:members, state) do 66 | state[:store_id] 67 | |> members() 68 | 69 | Process.send_after(self(), :members, 2 * state[:timeout]) 70 | {:noreply, state} 71 | end 72 | 73 | @impl true 74 | def handle_info(:check_leader, state) do 75 | timeout = state[:timeout] 76 | 77 | current_leader = 78 | state 79 | |> Keyword.get(:current_leader) 80 | 81 | IO.puts("ℹ️ℹ️ LEADER: #{inspect(current_leader)}) ℹ️ℹ️") 82 | 83 | store = state[:store_id] 84 | 85 | {_, leader_node} = 86 | :ra_leaderboard.lookup_leader(store) 87 | 88 | new_state = 89 | state 90 | |> Keyword.put(:current_leader, leader_node) 91 | 92 | if node() == leader_node && current_leader != leader_node do 93 | IO.puts("⚠️⚠️ LEADER change: [#{inspect(current_leader)}] => [#{inspect(leader_node)}] ⚠️⚠️") 94 | end 95 | 96 | if node() == leader_node do 97 | IO.puts("✅✅ I am LEADER! ✅✅") 98 | end 99 | 100 | Process.send_after(self(), :check_leader, 2 * timeout) 101 | {:noreply, new_state} 102 | end 103 | 104 | @impl true 105 | def handle_info({:leader_changed, old_leader, new_leader}, state) do 106 | Logger.alert("!! LEADER has changed: [#{inspect(old_leader)}] ~> [#{inspect(new_leader)}] !!") 107 | {:noreply, state} 108 | end 109 | 110 | @impl true 111 | def handle_info({:DOWN, _ref, :process, pid, reason}, state) do 112 | state[:store_id] 113 | |> leave() 114 | 115 | Logger.warning("#{Themes.cluster(pid)} going down with reason: #{inspect(reason)}") 116 | {:noreply, state} 117 | end 118 | 119 | @impl true 120 | def handle_info({:EXIT, pid, reason}, state) do 121 | Logger.warning("#{Themes.cluster(pid)} exited with reason: #{inspect(reason)}") 122 | 123 | state[:store_id] 124 | |> leave() 125 | 126 | {:noreply, state} 127 | end 128 | 129 | @impl true 130 | def handle_info(_, state) do 131 | {:noreply, state} 132 | end 133 | 134 | ############# PLUMBING ############# 135 | @impl true 136 | def terminate(reason, state) do 137 | Logger.warning("#{Themes.cluster(self())} terminating with reason: #{inspect(reason)}") 138 | 139 | state[:store_id] 140 | |> leave() 141 | 142 | :ok 143 | end 144 | 145 | @impl true 146 | def init(config) do 147 | timeout = config[:timeout] || 1000 148 | state = Keyword.put(config, :timeout, timeout) 149 | Logger.warning("#{Themes.cluster(self())} is UP") 150 | Process.flag(:trap_exit, true) 151 | Process.send_after(self(), :join, timeout) 152 | Process.send_after(self(), :members, 2 * timeout) 153 | Process.send_after(self(), :check_leader, 5 * timeout) 154 | {:ok, state} 155 | end 156 | 157 | def start_link(opts), 158 | do: 159 | GenServer.start_link( 160 | __MODULE__, 161 | opts, 162 | name: __MODULE__ 163 | ) 164 | 165 | def child_spec(opts), 166 | do: %{ 167 | id: __MODULE__, 168 | start: {__MODULE__, :start_link, [opts]}, 169 | restart: :permanent, 170 | shutdown: 10_000, 171 | type: :worker 172 | } 173 | end 174 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/emitter.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Emitters do 2 | @moduledoc """ 3 | As part of the ExESDB.System, ExESDB.Emitters is responsible for managing the 4 | lifetime of the Emitter processes. 5 | """ 6 | 7 | alias ExESDB.Options 8 | 9 | # use DynamicSupervisor 10 | # @impl DynamicSupervisor 11 | # def init(_) do 12 | # DynamicSupervisor.init(strategy: :one_for_one) 13 | # end 14 | # 15 | 16 | def start_all_emitter(store, pool_size \\ 3) do 17 | filter = :ex_esdb_filter.by_stream("$all") 18 | start_emitter(store, "$all", pool_size, filter) 19 | end 20 | 21 | def start_stream_emitter(store, stream, pool_size \\ 3) do 22 | filter = :ex_esdb_filter.by_stream(stream) 23 | start_emitter(store, stream, pool_size, filter) 24 | end 25 | 26 | def start_type_emitter(store, type, pool_size \\ 3) do 27 | filter = :ex_esdb_filter.by_event_type(type) 28 | start_emitter(store, type, pool_size, filter) 29 | end 30 | 31 | def start_custom_emitter(store, id, pattern, pool_size \\ 3) do 32 | filter = :ex_esdb_filter.by_event_pattern(pattern) 33 | start_emitter(store, id, pool_size, filter) 34 | end 35 | 36 | defp start_emitter(store, id, pool_size, filter) do 37 | pubsub = Options.pub_sub() 38 | 39 | DynamicSupervisor.start_child( 40 | {:via, PartitionSupervisor, {ExESDB.EmitterPools, self()}}, 41 | {ExESDB.EmitterPool, {store, id, pubsub, pool_size, filter}} 42 | ) 43 | end 44 | end 45 | 46 | defmodule ExESDB.EmitterPool do 47 | @moduledoc """ 48 | As part of the ExESDB.System, 49 | """ 50 | use Supervisor 51 | 52 | require Logger 53 | require ExESDB.Themes, as: Themes 54 | 55 | def start_link({store, id, pubsub, pool_size, filter}) do 56 | Supervisor.start_link(__MODULE__, {store, id, pubsub, pool_size, filter}, 57 | name: :"#{store}:#{id}_emitter_pool" 58 | ) 59 | end 60 | 61 | @impl Supervisor 62 | def init({store, id, pubsub, pool_size, filter}) do 63 | scheduler_id = :erlang.system_info(:scheduler_id) 64 | 65 | emitters = 66 | :ex_esdb_triggers.setup_emitters(store, id, filter, pool_size) 67 | 68 | children = 69 | for emitter <- emitters do 70 | Supervisor.child_spec({ExESDB.EmitterWorker, {store, id, pubsub, emitter}}, 71 | id: emitter 72 | ) 73 | end 74 | 75 | Logger.warning(" 76 | Starting Children: \n#{inspect(children)} 77 | ") 78 | 79 | Logger.warning("#{Themes.emitter_pool(self())} is UP on scheduler #{inspect(scheduler_id)}") 80 | Supervisor.init(children, strategy: :one_for_one) 81 | end 82 | end 83 | 84 | defmodule ExESDB.EmitterWorker do 85 | @moduledoc """ 86 | As part of the ExESDB.System, 87 | the EmitterWorker is responsible for managing the communication 88 | between the Event Store and the PubSub mechanism. 89 | """ 90 | use GenServer 91 | 92 | alias Phoenix.PubSub, as: PubSub 93 | 94 | require ExESDB.Themes, as: Themes 95 | 96 | require Logger 97 | 98 | defp emit(pub_sub, topic, event), 99 | do: 100 | pub_sub 101 | |> PubSub.broadcast(topic, {:event_emitted, event}) 102 | 103 | @impl GenServer 104 | def init({store, id, pubsub}) do 105 | scheduler_id = :erlang.system_info(:scheduler_id) 106 | topic = :emitter_group.topic(store, id) 107 | :ok = :emitter_group.join(store, id, self()) 108 | 109 | Logger.warning( 110 | "#{Themes.emitter_worker(self())} for #{inspect(topic, pretty: true)} is UP on scheduler #{inspect(scheduler_id)}" 111 | ) 112 | 113 | {:ok, pubsub} 114 | end 115 | 116 | def start_link({store, id, pubsub, emitter}), 117 | do: 118 | GenServer.start_link( 119 | __MODULE__, 120 | {store, id, pubsub}, 121 | name: emitter 122 | ) 123 | 124 | @impl true 125 | def handle_info({:broadcast, topic, event}, pubsub) do 126 | pubsub 127 | |> emit(topic, event) 128 | 129 | Logger.warning( 130 | "#{Themes.emitter_worker(self())} BROADCAST #{inspect(event, pretty: true)} => #{inspect(topic, pretty: true)}" 131 | ) 132 | 133 | {:noreply, pubsub} 134 | end 135 | 136 | @impl true 137 | def handle_info({:forward_to_local, topic, event}, pubsub) do 138 | pubsub 139 | |> emit(topic, event) 140 | 141 | Logger.warning( 142 | "#{Themes.emitter_worker(self())} FORWARD_TO_LOCAL #{inspect(event, pretty: true)} => #{inspect(topic, pretty: true)}" 143 | ) 144 | 145 | {:noreply, pubsub} 146 | end 147 | 148 | @impl true 149 | def handle_info(_, pubsub) do 150 | {:noreply, pubsub} 151 | end 152 | end 153 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/event_projector.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.EventProjector do 2 | @moduledoc """ 3 | This module contains the event projector functionality 4 | """ 5 | use GenServer 6 | 7 | alias ExESDB.Themes, as: Themes 8 | alias Phoenix.PubSub, as: PubSub 9 | 10 | require Logger 11 | 12 | @impl true 13 | def handle_info({:event, event}, state) do 14 | Logger.info("#{Themes.projector(self())} => Received event: #{inspect(event, pretty: true)}") 15 | {:noreply, state} 16 | end 17 | 18 | @impl true 19 | def handle_info({:DOWN, _ref, :process, _pid, _reason}, state) do 20 | Logger.info("#{Themes.projector(self())} => Projector DOWN") 21 | {:noreply, state} 22 | end 23 | 24 | @impl true 25 | def handle_info(msg, state) do 26 | Logger.info("#{Themes.projector(self())} => Unknown message: #{inspect(msg, pretty: true)}") 27 | {:noreply, state} 28 | end 29 | 30 | ##### PLUMBING ##### 31 | @impl true 32 | def init(opts) do 33 | Logger.info("#{Themes.projector(self())} is UP.") 34 | opts[:pub_sub] 35 | |> PubSub.subscribe(to_string(opts[:store_id])) 36 | {:ok, opts} 37 | end 38 | 39 | def start_link(opts) do 40 | GenServer.start_link( 41 | __MODULE__, 42 | opts, 43 | name: __MODULE__ 44 | ) 45 | end 46 | 47 | def child_spec(opts) do 48 | %{ 49 | id: __MODULE__, 50 | start: {__MODULE__, :start_link, [opts]}, 51 | type: :worker, 52 | restart: :permanent, 53 | shutdown: 5000, 54 | } 55 | end 56 | 57 | end 58 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/event_store.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.EventStore do 2 | @moduledoc """ 3 | A GenServer wrapper around :khepri to act as a distributed event store. 4 | Inspired by EventStoreDB's API. 5 | """ 6 | use GenServer 7 | 8 | require Logger 9 | 10 | alias ExESDB.Themes, as: Themes 11 | 12 | defp start_khepri(opts) do 13 | store = opts[:store_id] 14 | timeout = opts[:timeout] 15 | data_dir = opts[:data_dir] 16 | :khepri.start(data_dir, store, timeout) 17 | end 18 | 19 | # Client API 20 | @doc """ 21 | Get the current state of the store. 22 | ## Returns 23 | 24 | - `{:ok, state}` if successful. 25 | - `{:error, reason}` if unsuccessful. 26 | 27 | """ 28 | def get_state(), 29 | do: 30 | GenServer.call( 31 | __MODULE__, 32 | {:get_state} 33 | ) 34 | 35 | ## CALLBACKS 36 | @impl true 37 | def handle_call({:get_state}, _from, state) do 38 | {:reply, {:ok, state}, state} 39 | end 40 | 41 | #### PLUMBING 42 | def child_spec(opts) do 43 | %{ 44 | id: __MODULE__, 45 | start: {__MODULE__, :start_link, [opts]}, 46 | restart: :permanent, 47 | shutdown: 10_000, 48 | type: :worker 49 | } 50 | end 51 | 52 | def start_link(opts), 53 | do: 54 | GenServer.start_link( 55 | __MODULE__, 56 | opts, 57 | name: __MODULE__ 58 | ) 59 | 60 | # Server Callbacks 61 | @impl true 62 | def init(opts) do 63 | Logger.warning("#{Themes.store(self())} is UP.") 64 | Process.flag(:trap_exit, true) 65 | 66 | case start_khepri(opts) do 67 | {:ok, store} -> 68 | Logger.debug("Started store: #{inspect(store)}") 69 | {:ok, [config: opts, store: store]} 70 | 71 | reason -> 72 | Logger.error("Failed to start khepri. reason: #{inspect(reason)}") 73 | 74 | {:error, [config: opts, store: nil]} 75 | end 76 | end 77 | end 78 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/messages.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Messages do 2 | @moduledoc """ 3 | Messages are based on official proto file 4 | """ 5 | use Protobuf, 6 | from: Path.expand("../../priv/protos/ex_esdb.proto", __DIR__), 7 | use_package_names: true 8 | end 9 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/pubsub.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.PubSub do 2 | @moduledoc """ 3 | Provides functions for working with event store pub/sub. 4 | """ 5 | use Horde.Registry 6 | require Logger 7 | alias ExESDB.Themes, as: Themes 8 | 9 | def start_link(opts) do 10 | Horde.Registry.start_link( 11 | __MODULE__, 12 | [keys: :unique] ++ opts, 13 | name: __MODULE__ 14 | ) 15 | end 16 | 17 | @impl true 18 | def init(init_arg) do 19 | Logger.warning("#{Themes.pubsub(self())} is UP") 20 | 21 | [members: members()] 22 | |> Keyword.merge(init_arg) 23 | |> Horde.Registry.init() 24 | end 25 | 26 | defp members do 27 | [Node.self() | Node.list()] 28 | |> Enum.map(fn node -> {__MODULE__, node} end) 29 | end 30 | 31 | def register(name, value) do 32 | Horde.Registry.register(__MODULE__, name, value) 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/schema/event_record.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.EventRecord do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.1", syntax: :proto2 5 | 6 | field(:event_stream_id, 1, required: true, type: :string, json_name: "eventStreamId") 7 | field(:event_number, 2, required: true, type: :int64, json_name: "eventNumber") 8 | field(:event_id, 3, required: true, type: :bytes, json_name: "eventId") 9 | field(:event_type, 4, required: true, type: :string, json_name: "eventType") 10 | field(:data_content_type, 5, required: true, type: :int32, json_name: "dataContentType") 11 | 12 | field(:metadata_content_type, 6, 13 | required: true, 14 | type: :int32, 15 | json_name: "metadataContentType" 16 | ) 17 | 18 | field(:data, 7, required: true, type: :bytes) 19 | field(:metadata, 8, optional: true, type: :bytes) 20 | field(:created, 9, optional: true, type: :int64) 21 | field(:created_epoch, 10, optional: true, type: :int64, json_name: "createdEpoch") 22 | end 23 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/schema/new_event.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.NewEvent do 2 | @moduledoc false 3 | 4 | use Protobuf, protoc_gen_elixir_version: "0.14.1", syntax: :proto2 5 | 6 | field(:event_id, 1, required: true, type: :bytes, json_name: "eventId") 7 | field(:event_type, 2, required: true, type: :string, json_name: "eventType") 8 | field(:data_content_type, 3, required: true, type: :int32, json_name: "dataContentType") 9 | 10 | field(:metadata_content_type, 4, 11 | required: true, 12 | type: :int32, 13 | json_name: "metadataContentType" 14 | ) 15 | 16 | field(:data, 5, required: true, type: :bytes) 17 | field(:metadata, 6, optional: true, type: :bytes) 18 | 19 | end 20 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/schema/snapshot_record.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Schema.SnapshotRecord do 2 | @moduledoc """ 3 | A snapshot record 4 | """ 5 | @type t :: %ExESDB.Schema.SnapshotRecord{ 6 | source_uuid: String.t(), 7 | source_version: non_neg_integer, 8 | source_type: String.t(), 9 | data: binary, 10 | metadata: binary, 11 | created_at: DateTime.t(), 12 | created_epoch: non_neg_integer 13 | } 14 | 15 | defstruct [ 16 | :source_uuid, 17 | :source_version, 18 | :source_type, 19 | :data, 20 | :metadata, 21 | :created_at, 22 | :created_epoch 23 | ] 24 | end 25 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/schema/subscripition_record.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Schema.SubscriptionRecord do 2 | @moduledoc false 3 | use Protobuf, protoc_gen_elixir_version: "0.14.1", syntax: :proto2 4 | 5 | field(:subscription_group_name, 1, 6 | required: true, 7 | type: :string, 8 | json_name: "subscriptionGroupName" 9 | ) 10 | 11 | field(:event_stream_id, 2, required: true, type: :string, json_name: "eventStreamId") 12 | field(:resolve_link_tos, 3, required: true, type: :bool, json_name: "resolveLinkTos") 13 | field(:start_from, 4, required: true, type: :int64, json_name: "startFrom") 14 | 15 | field(:message_timeout_milliseconds, 5, 16 | required: true, 17 | type: :int32, 18 | json_name: "messageTimeoutMilliseconds" 19 | ) 20 | 21 | field(:record_statistics, 6, required: true, type: :bool, json_name: "recordStatistics") 22 | field(:live_buffer_size, 7, required: true, type: :int32, json_name: "liveBufferSize") 23 | field(:read_batch_size, 8, required: true, type: :int32, json_name: "readBatchSize") 24 | field(:buffer_size, 9, required: true, type: :int32, json_name: "bufferSize") 25 | field(:max_retry_count, 10, required: true, type: :int32, json_name: "maxRetryCount") 26 | field(:prefer_round_robin, 11, required: true, type: :bool, json_name: "preferRoundRobin") 27 | 28 | field(:checkpoint_after_time, 12, 29 | required: true, 30 | type: :int32, 31 | json_name: "checkpointAfterTime" 32 | ) 33 | 34 | field(:checkpoint_max_count, 13, required: true, type: :int32, json_name: "checkpointMaxCount") 35 | field(:checkpoint_min_count, 14, required: true, type: :int32, json_name: "checkpointMinCount") 36 | field(:subscriber_max_count, 15, required: true, type: :int32, json_name: "subscriberMaxCount") 37 | 38 | field(:named_consumer_strategy, 16, 39 | optional: true, 40 | type: :string, 41 | json_name: "namedConsumerStrategy" 42 | ) 43 | end 44 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/snapshots.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Snapshots do 2 | @moduledoc """ 3 | Provides functions for working with snapshots 4 | """ 5 | alias ExESDB.Schema.SnapshotRecord, as: SnapshotRecord 6 | 7 | @doc """ 8 | Delete a snapshot of the current state of the event store. 9 | """ 10 | @spec delete_snapshot( 11 | store :: any, 12 | source_uuid :: any 13 | ) :: :ok | {:error, any} 14 | def delete_snapshot(store, source_uuid) do 15 | case store 16 | |> :khepri.delete!([:snapshots, source_uuid]) do 17 | {:ok, _} -> :ok 18 | {:error, reason} -> {:error, reason} 19 | end 20 | end 21 | 22 | @doc """ 23 | Read a snapshot of the current state of the event store. 24 | """ 25 | @spec read_snapshot( 26 | store :: any, 27 | source_uuid :: any 28 | ) :: {:ok, SnapshotRecord.t()} | {:error, any} 29 | def read_snapshot(store, source_uuid) do 30 | case store 31 | |> :khepri.get!([:snapshots, source_uuid]) do 32 | {:ok, snapshot_record} -> {:ok, snapshot_record} 33 | {:error, reason} -> {:error, reason} 34 | end 35 | end 36 | 37 | @doc """ 38 | Record a snapshot of the current state of the event store. 39 | """ 40 | @spec record_snapshot( 41 | store :: any, 42 | snapshot_record :: any 43 | ) :: :ok | {:error, any} 44 | def record_snapshot(store, %{source_uuid: source_uuid} = snapshot_record) 45 | when is_struct(snapshot_record, SnapshotRecord) do 46 | case store 47 | |> :khepri.put!([:snapshots, source_uuid], snapshot_record) do 48 | {:ok, _} -> :ok 49 | {:error, reason} -> {:error, reason} 50 | end 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/store_info.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.StoreInfo do 2 | @moduledoc """ 3 | This module provides functions to get information about the EXESDB event store. 4 | """ 5 | 6 | import ExESDB.Khepri.Conditions 7 | 8 | def get_streams_raw(store), 9 | do: 10 | store 11 | |> :khepri.fold( 12 | [:streams, if_node_exists(exists: true)], 13 | fn path, props, acc -> acc ++ [{path, props}] end, 14 | [], 15 | %{props_to_return: [:child_list_length]} 16 | ) 17 | 18 | @doc """ 19 | Returns the list of streams in the store. 20 | """ 21 | def get_streams!(store) do 22 | case store 23 | |> get_streams_raw() do 24 | {:ok, streams} -> 25 | streams 26 | |> Stream.map(fn {[:streams, stream_name], %{child_list_length: nbr_of_events}} -> 27 | {stream_name, nbr_of_events} 28 | end) 29 | |> Enum.to_list() 30 | 31 | result -> 32 | result 33 | end 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/streams.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Streams do 2 | @moduledoc """ 3 | Provides functions for working with streams 4 | """ 5 | 6 | import ExESDB.Khepri.Conditions 7 | 8 | alias ExESDB.StoreInfo, as: ESInfo 9 | 10 | defp handle_transaction_result({:ok, {:commit, result}}), do: {:ok, result} 11 | defp handle_transaction_result({:ok, {:abort, reason}}), do: {:error, reason} 12 | defp handle_transaction_result({:error, reason}), do: {:error, reason} 13 | 14 | @doc """ 15 | Returns events from a stream, in a forward direction, as an Elixir Stream 16 | ## Parameters 17 | # 18 | # - `store` is the name of the store. 19 | # - `stream_id` is the name of the stream. 20 | # - `start_version` is the version of the first event to return. 21 | # - `count` is the number of events to return. 22 | # 23 | ## Returns 24 | # 25 | # - `{:ok, events}` if successful. 26 | # - `{:error, reason}` if unsuccessful. 27 | """ 28 | @spec stream_events( 29 | store :: atom(), 30 | stream_id :: any(), 31 | start_version :: integer(), 32 | count :: integer() 33 | ) :: {:ok, Enumerable.t()} | {:error, term()} 34 | def stream_events(store, stream_id, start_version, count) do 35 | event_stream = 36 | start_version..(start_version + count - 1) 37 | |> Stream.map(fn version -> 38 | padded_version = ExESDB.VersionFormatter.pad_version(version, 6) 39 | 40 | store 41 | |> :khepri.get!([:streams, stream_id, padded_version]) 42 | end) 43 | |> Stream.reject(&is_nil/1) 44 | 45 | {:ok, event_stream} 46 | end 47 | 48 | @doc """ 49 | Returns a list of all streams in the store. 50 | ## Parameters 51 | # - `store` is the name of the store. 52 | ## Returns 53 | # - `{:ok, streams}` if successful. 54 | """ 55 | @spec get_streams(store :: atom()) :: {:ok, list()} | {:error, term()} 56 | def get_streams(store) do 57 | store 58 | |> :khepri.get!([:streams]) 59 | |> Enum.reduce([], fn {stream_id, _stream}, acc -> stream_id ++ acc end) 60 | end 61 | 62 | def stream_exists?(store, stream_id) do 63 | store 64 | |> :khepri.exists([:streams, stream_id]) 65 | end 66 | 67 | @doc """ 68 | Read events from a stream, in a forward direction. 69 | """ 70 | @spec stream_forward( 71 | store :: atom(), 72 | stream_id :: any(), 73 | start_version :: integer(), 74 | count :: integer() 75 | ) :: {:ok, list()} | {:error, term()} 76 | def stream_forward(store, stream_id, start_version, count) do 77 | try do 78 | case store 79 | |> stream_exists?(stream_id) do 80 | true -> 81 | store 82 | |> stream_events(stream_id, start_version, count) 83 | 84 | false -> 85 | {:error, :stream_not_found} 86 | end 87 | rescue 88 | e -> {:error, e} 89 | end 90 | end 91 | 92 | @doc """ 93 | Append events to a stream using a transaction. 94 | """ 95 | @spec append_events_tx( 96 | store :: atom(), 97 | stream_id :: any(), 98 | events :: list() 99 | ) :: {:ok, integer()} | {:error, term()} 100 | def append_events_tx(store, stream_id, events) do 101 | case store 102 | |> :khepri.transaction(fn -> 103 | actual_version = 104 | store 105 | |> ESInfo.get_version!(stream_id) 106 | 107 | store 108 | |> append_events(stream_id, actual_version, events) 109 | end) 110 | |> handle_transaction_result() do 111 | {:ok, new_version} -> 112 | {:ok, new_version} 113 | 114 | {:error, reason} -> 115 | {:error, reason} 116 | end 117 | end 118 | 119 | @doc """ 120 | Append events to a stream. 121 | """ 122 | @spec append_events( 123 | store :: atom(), 124 | stream_id :: any(), 125 | expected_version :: integer(), 126 | events :: list() 127 | ) :: {:ok, integer()} | {:error, term()} 128 | def append_events(store, stream_id, expected_version, events) do 129 | current_version = 130 | store 131 | |> ESInfo.get_version!(stream_id) 132 | 133 | if current_version == expected_version do 134 | new_version = 135 | events 136 | |> Enum.reduce( 137 | current_version, 138 | fn event, version -> 139 | new_version = version + 1 140 | padded_version = ExESDB.VersionFormatter.pad_version(new_version, 6) 141 | 142 | now = 143 | DateTime.utc_now() 144 | 145 | created = now 146 | 147 | created_epoch = 148 | now 149 | |> DateTime.to_unix(:microsecond) 150 | 151 | recorded_event = 152 | event 153 | |> to_event_record( 154 | stream_id, 155 | new_version, 156 | created, 157 | created_epoch 158 | ) 159 | 160 | store 161 | |> :khepri.put!([:streams, stream_id, padded_version], recorded_event) 162 | 163 | new_version 164 | end 165 | ) 166 | 167 | {:ok, new_version} 168 | else 169 | {:error, :wrong_expected_version} 170 | end 171 | end 172 | 173 | @doc """ 174 | Returns the version of the stream. 175 | ## Parameters 176 | - `store` is the name of the store. 177 | - `stream_id` is the name of the stream. 178 | 179 | ## Returns 180 | - `{:ok, version}` if successful. 181 | - `{:error, reason}` if unsuccessful. 182 | """ 183 | def get_version!(store, stream_id) do 184 | case store 185 | |> :khepri.count([ 186 | :streams, 187 | stream_id, 188 | if_node_exists(exists: true) 189 | ]) do 190 | {:ok, version} -> version 191 | _ -> 0 192 | end 193 | end 194 | 195 | defp to_event_record( 196 | %ExESDB.NewEvent{} = new_event, 197 | stream_id, 198 | version, 199 | created, 200 | created_epoch 201 | ), 202 | do: %ExESDB.EventRecord{ 203 | event_stream_id: stream_id, 204 | event_number: version, 205 | event_id: new_event.event_id, 206 | event_type: new_event.event_type, 207 | data_content_type: new_event.data_content_type, 208 | metadata_content_type: new_event.metadata_content_type, 209 | data: new_event.data, 210 | metadata: new_event.metadata, 211 | created: created, 212 | created_epoch: created_epoch 213 | } 214 | end 215 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/subscriptions.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Subscriptions do 2 | @moduledoc """ 3 | Provides functions for working with event store subscriptions. 4 | """ 5 | @type store :: atom() 6 | @type stream :: String.t() 7 | @type subscription_name :: String.t() 8 | @type error :: term 9 | 10 | use GenServer 11 | 12 | alias ExESDB.Emitters 13 | require Logger 14 | 15 | @doc """ 16 | Create a transient subscription for a specific stream or for all streams. 17 | """ 18 | @spec subscribe( 19 | store :: store, 20 | stream :: stream 21 | ) :: :ok | {:error, error} 22 | def subscribe(store, stream \\ "$all") do 23 | store 24 | |> Emitters.start_stream_emitter(stream) 25 | 26 | case store 27 | |> :khepri.put( 28 | [:subscriptions, stream], 29 | %{ 30 | subscriber: self(), 31 | start_from: 0, 32 | opts: [] 33 | } 34 | ) do 35 | :ok -> 36 | ExESDB.Emitters.start_stream_emitter(store, stream) 37 | :ok 38 | 39 | {:error, reason} -> 40 | {:error, reason} 41 | end 42 | end 43 | 44 | @spec subscribe_to( 45 | store :: store, 46 | stream :: String.t(), 47 | subscription_name :: subscription_name, 48 | subscriber :: pid, 49 | start_from :: integer, 50 | opts :: Keyword.t() 51 | ) :: :ok | {:error, error} 52 | def subscribe_to( 53 | store, 54 | stream, 55 | subscription_name, 56 | subscriber, 57 | start_from \\ 0, 58 | opts \\ [] 59 | ) do 60 | case store 61 | |> :khepri.put( 62 | [:subscriptions, stream, subscription_name], 63 | %{ 64 | subscriber: subscriber, 65 | start_from: start_from, 66 | opts: opts 67 | } 68 | ) do 69 | :ok -> 70 | store 71 | |> Emitters.start_stream_emitter(stream) 72 | 73 | :ok 74 | 75 | {:error, reason} -> 76 | {:error, reason} 77 | end 78 | end 79 | 80 | def unsubscribe(store, subscription_name) do 81 | store 82 | |> :khepri.delete!([:subscriptions, subscription_name]) 83 | end 84 | 85 | @doc """ 86 | Delete a subscription. 87 | """ 88 | @spec delete_subscription( 89 | store :: any, 90 | subscription_name :: subscription_name, 91 | stream :: stream 92 | ) :: :ok | {:error, error} 93 | def delete_subscription(store, subscription_name, stream \\ "$all") do 94 | store 95 | |> :khepri.delete!([:subscriptions, subscription_name, stream]) 96 | end 97 | 98 | def all(store) do 99 | case store 100 | |> :khepri.get([:subscriptions]) do 101 | {:ok, result} -> 102 | result 103 | |> Enum.map(fn item -> item end) 104 | 105 | {:error, reason} -> 106 | {:error, reason} 107 | end 108 | end 109 | 110 | def by_name(store, subscription_name) do 111 | case store 112 | |> :khepri.get([:subscriptions, subscription_name]) do 113 | {:ok, result} -> 114 | result 115 | |> Enum.map(fn item -> item end) 116 | 117 | {:error, reason} -> 118 | {:error, reason} 119 | end 120 | end 121 | 122 | ####### PLUMBING ####### 123 | def child_spec(opts) do 124 | %{ 125 | id: __MODULE__, 126 | start: {__MODULE__, :start_link, [opts]}, 127 | type: :worker, 128 | restart: :permanent, 129 | shutdown: 5000 130 | } 131 | end 132 | 133 | def start_link(opts) do 134 | GenServer.start_link( 135 | __MODULE__, 136 | opts, 137 | name: __MODULE__ 138 | ) 139 | end 140 | 141 | @impl true 142 | def init(opts) do 143 | {:ok, opts} 144 | end 145 | end 146 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/system.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.System do 2 | @moduledoc """ 3 | This module is the top level supervisor for the ExESDB system. 4 | It is responsible for supervising: 5 | - The PubSub mechanism 6 | - the Event Store 7 | - the Cluster 8 | """ 9 | use Supervisor 10 | 11 | alias ExESDB.Themes, as: Themes 12 | 13 | require Logger 14 | require Phoenix.PubSub 15 | 16 | @impl true 17 | def init(opts) do 18 | Logger.warning("#{Themes.system(self())} is UP") 19 | 20 | children = [ 21 | add_pub_sub(opts), 22 | {ExESDB.EventStore, opts}, 23 | {ExESDB.Cluster, opts}, 24 | {PartitionSupervisor, child_spec: DynamicSupervisor, name: ExESDB.EmitterPools} 25 | ] 26 | 27 | :os.set_signal(:sigterm, :handle) 28 | :os.set_signal(:sigquit, :handle) 29 | 30 | spawn(fn -> handle_os_signal() end) 31 | 32 | Supervisor.init( 33 | children, 34 | strategy: :one_for_one 35 | ) 36 | end 37 | 38 | defp add_pub_sub(opts) do 39 | pub_sub = Keyword.get(opts, :pub_sub) 40 | 41 | case pub_sub do 42 | nil -> 43 | add_pub_sub([pub_sub: :native] ++ opts) 44 | 45 | :native -> 46 | {ExESDB.PubSub, opts} 47 | 48 | _ -> 49 | {Phoenix.PubSub, name: pub_sub} 50 | end 51 | end 52 | 53 | defp handle_os_signal do 54 | receive do 55 | {:signal, :sigterm} -> 56 | Logger.warning("SIGTERM received. Stopping ExESDB") 57 | stop(:sigterm) 58 | 59 | {:signal, :sigquit} -> 60 | Logger.warning("SIGQUIT received. Stopping ExESDB") 61 | stop(:sigquit) 62 | 63 | msg -> 64 | IO.puts("Unknown signal: #{inspect(msg)}") 65 | Logger.warning("Received unknown signal: #{inspect(msg)}") 66 | end 67 | 68 | handle_os_signal() 69 | end 70 | 71 | def stop(_reason \\ :normal) do 72 | Process.sleep(2_000) 73 | Application.stop(:ex_esdb) 74 | end 75 | 76 | def start_link(opts), 77 | do: 78 | Supervisor.start_link( 79 | __MODULE__, 80 | opts, 81 | name: __MODULE__ 82 | ) 83 | 84 | def start(opts) do 85 | case start_link(opts) do 86 | {:ok, pid} -> pid 87 | {:error, {:already_started, pid}} -> pid 88 | {:error, reason} -> raise "failed to start eventstores supervisor: #{inspect(reason)}" 89 | end 90 | end 91 | 92 | def child_spec(opts) do 93 | %{ 94 | id: __MODULE__, 95 | start: {__MODULE__, :start_link, [opts]}, 96 | type: :supervisor 97 | } 98 | end 99 | end 100 | -------------------------------------------------------------------------------- /system/lib/ex_esdb/version_formatter.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.VersionFormatter do 2 | @moduledoc false 3 | 4 | ## Examples 5 | # iex > version_to_integer("0042") 6 | # 42 7 | 8 | # iex > version_to_integer("123") 9 | # 123 10 | 11 | # iex > version_to_integer("000") 12 | # 0 13 | 14 | def version_to_integer(padded_version) when is_binary(padded_version) do 15 | padded_version 16 | |> String.trim_leading("0") 17 | |> case do 18 | # Handle all-zero case 19 | "" -> 0 20 | num_str -> String.to_integer(num_str) 21 | end 22 | end 23 | 24 | # Original padding function remains unchanged 25 | def pad_version(version, length) when is_integer(version) and length > 0 do 26 | version 27 | |> Integer.to_string() 28 | |> String.pad_leading(length, "0") 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /system/lib/khepri/conditions.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Khepri.Conditions do 2 | @moduledoc false 3 | import Record 4 | 5 | @khepri_hrl "khepri/include/khepri.hrl" 6 | defrecord :if_name_matches, 7 | extract(:if_name_matches, from_lib: @khepri_hrl) 8 | 9 | defrecord :if_path_matches, 10 | extract(:if_path_matches, from_lib: @khepri_hrl) 11 | 12 | defrecord :if_has_payload, 13 | extract(:if_has_payload, from_lib: @khepri_hrl) 14 | 15 | defrecord :if_has_data, 16 | extract(:if_has_data, from_lib: @khepri_hrl) 17 | 18 | defrecord :if_has_sproc, 19 | extract(:if_has_sproc, from_lib: @khepri_hrl) 20 | 21 | defrecord :if_data_matches, 22 | extract(:if_data_matches, from_lib: @khepri_hrl) 23 | 24 | defrecord :if_node_exists, 25 | extract(:if_node_exists, from_lib: @khepri_hrl) 26 | 27 | defrecord :if_payload_version, 28 | extract(:if_payload_version, from_lib: @khepri_hrl) 29 | 30 | defrecord :if_child_list_version, 31 | extract(:if_child_list_version, from_lib: @khepri_hrl) 32 | 33 | defrecord :if_child_list_length, 34 | extract(:if_child_list_length, from_lib: @khepri_hrl) 35 | 36 | defrecord :if_not, 37 | extract(:if_not, from_lib: @khepri_hrl) 38 | 39 | defrecord :if_all, 40 | extract(:if_all, from_lib: @khepri_hrl) 41 | 42 | defrecord :if_any, 43 | extract(:if_any, from_lib: @khepri_hrl) 44 | 45 | 46 | end 47 | 48 | -------------------------------------------------------------------------------- /system/lib/options.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Options do 2 | @moduledoc """ 3 | This module contains the options helper functions for ExESDB 4 | """ 5 | alias ExESDB.EnVars, as: EnVars 6 | 7 | @data_dir EnVars.data_dir() 8 | @store_id EnVars.store_id() 9 | @timeout EnVars.timeout() 10 | @db_type EnVars.db_type() 11 | @seed_nodes EnVars.seed_nodes() 12 | @pub_sub EnVars.pub_sub() 13 | 14 | def sys_env(key), do: System.get_env(key) 15 | def app_env, do: Application.get_env(:ex_esdb, :khepri) 16 | def app_env(key), do: Keyword.get(app_env(), key) 17 | 18 | def data_dir do 19 | case sys_env(@data_dir) do 20 | nil -> app_env(:data_dir) || "/data" 21 | data_dir -> data_dir 22 | end 23 | end 24 | 25 | def store_id do 26 | case sys_env(@store_id) do 27 | nil -> app_env(:store_id) || :ex_esdb_store 28 | store_id -> to_unique_atom(store_id) 29 | end 30 | end 31 | 32 | def timeout do 33 | case sys_env(@timeout) do 34 | nil -> app_env(:timeout) || 10_000 35 | timeout -> String.to_integer(timeout) 36 | end 37 | end 38 | 39 | def db_type do 40 | case sys_env(@db_type) do 41 | nil -> app_env(:db_type) || :single 42 | db_type -> String.to_atom(db_type) 43 | end 44 | end 45 | 46 | def seed_nodes do 47 | case sys_env(@seed_nodes) do 48 | nil -> app_env(:seeds) || [node()] 49 | seeds -> to_atoms_list(seeds) 50 | end 51 | end 52 | 53 | def pub_sub do 54 | case sys_env(@pub_sub) do 55 | nil -> app_env(:pub_sub) || :native 56 | pub_sub -> to_unique_atom(pub_sub) 57 | end 58 | end 59 | 60 | defp to_atoms_list(seeds) do 61 | seeds 62 | |> String.split(",") 63 | |> Enum.map(&clean_node/1) 64 | |> Enum.map(&to_unique_atom/1) 65 | end 66 | 67 | defp clean_node(node), 68 | do: 69 | String.trim(node) 70 | |> String.downcase() 71 | |> String.replace(" ", "") 72 | |> String.replace(",", "") 73 | |> String.replace(".", "") 74 | |> String.replace(":", "") 75 | 76 | defp to_unique_atom(candidate) do 77 | try do 78 | String.to_existing_atom(candidate) 79 | rescue 80 | _ -> String.to_atom(candidate) 81 | end 82 | end 83 | end 84 | -------------------------------------------------------------------------------- /system/lib/repl.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Repl do 2 | @moduledoc """ 3 | This module is to interact with the ExESDB.system, 4 | running a store called "reg_gh" (Regulate Greenhouse) 5 | 6 | """ 7 | alias ExESDB.Repl.EventGenerator, as: ESGen 8 | alias ExESDB.Repl.EventStreamMonitor, as: ESMonitor 9 | 10 | alias ExESDB.StoreInfo, as: ESInfo 11 | alias ExESDB.Streams, as: ESStreams 12 | alias ExESDB.Subscriptions, as: ESSubscriptions 13 | alias ExESDB.System, as: ESSystem 14 | 15 | require Logger 16 | 17 | @store :reg_gh 18 | @greenhouse1 :greenhouse1 19 | @greenhouse2 :greenhouse2 20 | @greenhouse3 :greenhouse3 21 | @greenhouse4 :greenhouse4 22 | @greenhouse5 :greenhouse5 23 | 24 | def store, do: @store 25 | def stream1, do: @greenhouse1 26 | def stream2, do: @greenhouse2 27 | def stream3, do: @greenhouse3 28 | def stream4, do: @greenhouse4 29 | def stream5, do: @greenhouse5 30 | 31 | def get_opts, do: ExESDB.Options.app_env() 32 | 33 | def get_streams, 34 | do: ESInfo.get_streams_raw(@store) 35 | 36 | def get_subscriptions, do: ESSubscriptions.all(@store) 37 | 38 | def start_monitor do 39 | opts = get_opts() 40 | 41 | case ESMonitor.start_link(opts) do 42 | {:ok, pid} -> 43 | IO.puts("Monitor started with pid #{inspect(pid)}") 44 | 45 | {:error, {:already_started, pid}} -> 46 | IO.puts("Monitor already started with pid #{inspect(pid)}") 47 | 48 | {:error, reason} -> 49 | raise "Failed to start monitor. Reason: #{inspect(reason)}" 50 | end 51 | end 52 | 53 | @doc """ 54 | Append events to a stream. 55 | """ 56 | @spec append( 57 | stream :: atom(), 58 | nbr_of_events :: integer() 59 | ) :: {:ok, list(), integer()} | {:error, term()} 60 | def append(stream, nbr_of_events) do 61 | version = ESInfo.get_version!(@store, stream) 62 | 63 | events = ESGen.generate_events(version, nbr_of_events) 64 | 65 | case @store 66 | |> ESStreams.append_events(stream, version, events) do 67 | {:ok, new_version} -> 68 | {:ok, result} = 69 | @store 70 | |> ESStreams.stream_forward(stream, 1, new_version) 71 | 72 | {:ok, result, result |> Enum.count()} 73 | 74 | {:error, reason} -> 75 | {:error, reason} 76 | end 77 | end 78 | 79 | def all(stream) do 80 | case @store 81 | |> ESInfo.get_version!(stream) do 82 | 0 -> 83 | nil 84 | 85 | version -> 86 | {:ok, events} = 87 | @store 88 | |> ESStreams.stream_forward(stream, 1, version) 89 | 90 | events 91 | end 92 | end 93 | 94 | def start_system do 95 | opts = get_opts() 96 | 97 | case ESSystem.start_link(opts) do 98 | {:ok, pid} -> 99 | IO.puts("System started with pid #{inspect(pid)}") 100 | pid 101 | 102 | {:error, {:already_started, pid}} -> 103 | IO.puts("System already started with pid #{inspect(pid)}") 104 | pid 105 | 106 | {:error, reason} -> 107 | raise "Failed to start system. Reason: #{inspect(reason)}" 108 | end 109 | end 110 | 111 | def test_initialized_v1_emitter(stream) do 112 | pubsub = 113 | get_opts() 114 | |> Keyword.get(:pub_sub) 115 | 116 | pubsub 117 | |> Phoenix.PubSub.subscribe("reg_gh:initialized:v1") 118 | 119 | ExESDB.Emitters.start_type_emitter(@store, "initialized:v1") 120 | 121 | append(stream, 2) 122 | 123 | self() |> Process.info(:messages) 124 | end 125 | 126 | def test_all_emitter(stream) do 127 | pubsub = 128 | get_opts() 129 | |> Keyword.get(:pub_sub) 130 | 131 | pubsub 132 | |> Phoenix.PubSub.subscribe("reg_gh:$all") 133 | 134 | ExESDB.Emitters.start_all_emitter(@store) 135 | 136 | append(stream, 2) 137 | 138 | self() |> Process.info(:messages) 139 | end 140 | end 141 | -------------------------------------------------------------------------------- /system/lib/repl/generator.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Repl.EventGenerator do 2 | @moduledoc false 3 | 4 | require UUIDv7 5 | require UUID 6 | 7 | @initialized_v1 "initialized:v1" 8 | 9 | @temperature_measured_v1 "temperature_measured:v1" 10 | @humidity_measured_v1 "humidity_measured:v1" 11 | @light_measured_v1 "light_measured:v1" 12 | 13 | @fan_activated_v1 "fan_activated:v1" 14 | @fan_deactivated_v1 "fan_deactivated:v1" 15 | @light_activated_v1 "light_activated:v1" 16 | @light_deactivated_v1 "light_deactivated:v1" 17 | @heater_activated_v1 "heater_activated:v1" 18 | @heater_deactivated_v1 "heater_deactivated:v1" 19 | @sprinkler_activated_v1 "sprinkler_activated:v1" 20 | @sprinkler_deactivated_v1 "sprinkler_deactivated:v1" 21 | 22 | @desired_temperature_set_v1 "desired_temperature_set:v1" 23 | @desired_humidity_set_v1 "desired_humidity_set:v1" 24 | @desired_light_set_v1 "desired_light_set:v1" 25 | 26 | @event_types [ 27 | @initialized_v1, 28 | @temperature_measured_v1, 29 | @humidity_measured_v1, 30 | @light_measured_v1, 31 | @desired_temperature_set_v1, 32 | @desired_humidity_set_v1, 33 | @desired_light_set_v1, 34 | @light_activated_v1, 35 | @light_deactivated_v1, 36 | @fan_activated_v1, 37 | @fan_deactivated_v1, 38 | @heater_activated_v1, 39 | @heater_deactivated_v1, 40 | @sprinkler_activated_v1, 41 | @sprinkler_deactivated_v1 42 | ] 43 | 44 | @operators [ 45 | "John", 46 | "Paul", 47 | "George", 48 | "Ringo" 49 | ] 50 | 51 | # Example content type values 52 | @content_types [1, 2, 3] 53 | 54 | defp random_operator, do: Enum.random(@operators) 55 | defp random_temperature, do: 5 + :rand.uniform(30) 56 | defp random_humidity, do: :rand.uniform(100) 57 | defp random_light, do: :rand.uniform(100) 58 | defp random_intensity, do: :rand.uniform(100) 59 | 60 | defp initialized, 61 | do: %ExESDB.NewEvent{ 62 | event_id: generate_uuid(), 63 | event_type: @initialized_v1, 64 | data_content_type: 1, 65 | metadata_content_type: 1, 66 | data: %{ 67 | temperature: random_temperature(), 68 | humidity: random_humidity(), 69 | light: random_light() 70 | } 71 | } 72 | 73 | def generate_events(start_from, count) when is_integer(count) and count > 0 do 74 | case start_from do 75 | 0 -> 76 | [initialized() | generate_events(count - 1)] 77 | 78 | _ -> 79 | generate_events(count) 80 | end 81 | end 82 | 83 | def generate_events(0), do: [] 84 | 85 | def generate_events(count) 86 | when is_integer(count) and count > 0, 87 | do: 88 | 1..count 89 | |> Enum.map(&generate_event/1) 90 | 91 | defp generate_event(_) do 92 | event_id = generate_uuid() 93 | event_type = Enum.random(@event_types) 94 | 95 | %ExESDB.NewEvent{ 96 | event_id: event_id, 97 | event_type: event_type, 98 | data_content_type: Enum.random(@content_types), 99 | metadata_content_type: Enum.random(@content_types), 100 | data: random_payload(event_type), 101 | metadata: generate_optional_metadata() 102 | } 103 | end 104 | 105 | defp generate_uuid, do: UUIDv7.generate() 106 | 107 | defp random_payload(@temperature_measured_v1), 108 | do: %{ 109 | temperature: random_temperature() 110 | } 111 | 112 | defp random_payload(@humidity_measured_v1), 113 | do: %{ 114 | humidity: random_humidity() 115 | } 116 | 117 | defp random_payload(@light_measured_v1), 118 | do: %{ 119 | light: random_light() 120 | } 121 | 122 | defp random_payload(@desired_temperature_set_v1), 123 | do: %{ 124 | temperature: random_temperature(), 125 | operator: random_operator() 126 | } 127 | 128 | defp random_payload(@desired_humidity_set_v1), 129 | do: %{ 130 | humidity: random_humidity(), 131 | operator: random_operator() 132 | } 133 | 134 | defp random_payload(@desired_light_set_v1), 135 | do: %{ 136 | light: random_light(), 137 | operator: random_operator() 138 | } 139 | 140 | defp random_payload(_), 141 | do: %{ 142 | intensity: random_intensity() 143 | } 144 | 145 | defp generate_optional_metadata do 146 | # 70% chance of metadata 147 | %{ 148 | causation_id: generate_uuid(), 149 | correlation_id: generate_uuid() 150 | } 151 | end 152 | end 153 | -------------------------------------------------------------------------------- /system/lib/repl/monitor.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Repl.EventStreamMonitor do 2 | @moduledoc false 3 | use GenServer 4 | require Logger 5 | require ExESDB.Subscriptions, as: Subscriptions 6 | 7 | alias ExESDB.Themes, as: Themes 8 | alias Phoenix.PubSub, as: PubSub 9 | 10 | def start do 11 | opts = ExESDB.Options.app_env() 12 | 13 | case start_link(opts) do 14 | {:ok, pid} -> 15 | Logger.info("Monitor started with pid #{inspect(pid)}") 16 | pid 17 | 18 | {:error, {:already_started, pid}} -> 19 | IO.puts("Monitor already started with pid #{inspect(pid)}") 20 | pid 21 | 22 | {:error, reason} -> 23 | raise "Failed to start monitor. Reason: #{inspect(reason)}" 24 | end 25 | end 26 | 27 | defp subscribe_all(store, opts) do 28 | topic = :erlang.atom_to_binary(store) 29 | pub_sub = opts[:pub_sub] 30 | 31 | case store 32 | |> Subscriptions.subscribe_to("$all", "all_to_pg", pub_sub, 0, opts) do 33 | :ok -> 34 | Logger.info( 35 | "#{Themes.monitor(self())} => Subscribed to Topic #{inspect(topic, pretty: true)}" 36 | ) 37 | 38 | msg -> 39 | msg 40 | end 41 | end 42 | 43 | defp subscribe(opts) do 44 | store = opts[:store_id] 45 | topic = :erlang.atom_to_binary(store, :utf8) 46 | pub_sub = opts[:pub_sub] 47 | 48 | case pub_sub 49 | |> PubSub.subscribe(topic) do 50 | :ok -> 51 | subscribe_all(store, opts) 52 | 53 | error -> 54 | Logger.error( 55 | "#{Themes.monitor(self())} => Failed to subscribe to Topic #{inspect(topic, pretty: true)}. Reason: #{inspect(error)}" 56 | ) 57 | end 58 | end 59 | 60 | @impl true 61 | def handle_info({:event_emitted, event}, state) do 62 | Logger.warning("#{Themes.monitor(self())} => Seen event #{inspect(event)}") 63 | {:noreply, state} 64 | end 65 | 66 | @impl true 67 | def handle_info(unknown, state) do 68 | IO.puts("Unknown message #{inspect(unknown)}") 69 | {:noreply, state} 70 | end 71 | 72 | @impl true 73 | def init(opts) do 74 | Logger.info( 75 | "#{Themes.monitor(self())} => Starting monitor for #{inspect(opts[:store_id], pretty: true)}" 76 | ) 77 | 78 | subscribe(opts) 79 | 80 | {:ok, opts} 81 | end 82 | 83 | def start_link(args) do 84 | GenServer.start_link( 85 | __MODULE__, 86 | args, 87 | name: __MODULE__ 88 | ) 89 | end 90 | 91 | def child_spec(opts) do 92 | %{ 93 | id: __MODULE__, 94 | start: {__MODULE__, :start_link, [opts]}, 95 | restart: :permanent, 96 | shutdown: 5000, 97 | type: :worker 98 | } 99 | end 100 | end 101 | -------------------------------------------------------------------------------- /system/lib/themes.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Themes do 2 | @moduledoc false 3 | alias BeamCampus.ColorFuncs, as: CF 4 | 5 | def app(pid), 6 | do: "ESDB_APP [#{CF.black_on_blue()}#{inspect(pid)}#{CF.reset()}]" 7 | 8 | def system(pid), 9 | do: "ESDB_SYSTEM [#{CF.black_on_magenta()}#{inspect(pid)}#{CF.reset()}]" 10 | 11 | def store(pid), 12 | do: "ESDB_STORE [#{CF.black_on_green()}#{inspect(pid)}#{CF.reset()}]" 13 | 14 | def cluster(pid), 15 | do: "ESDB_CLUSTER [#{CF.yellow_on_red()}#{inspect(pid)}#{CF.reset()}]" 16 | 17 | def projector(pid), 18 | do: "ESDB_PROJECTOR [#{CF.black_on_white()}#{inspect(pid)}#{CF.reset()}]" 19 | 20 | def monitor(pid), 21 | do: "ESDB_MONITOR [#{CF.yellow_on_magenta()}#{inspect(pid)}#{CF.reset()}]" 22 | 23 | def emitter_pool(pid), 24 | do: "ESDB_EMITTER_POOL [#{CF.black_on_yellow()}#{inspect(pid)}#{CF.reset()}]" 25 | 26 | def emitter_worker(pid), 27 | do: "ESDB_EMITTER_WORKER [#{CF.yellow_on_black()}#{inspect(pid)}#{CF.reset()}]" 28 | 29 | def pubsub(pid), 30 | do: "ESDB_PUBSUB [#{CF.black_on_cyan()}#{inspect(pid)}#{CF.reset()}]" 31 | 32 | def subscriptions(msg), 33 | do: "ESDB_SUBSCRIPTIONS [#{CF.black_on_white()}#{msg}#{CF.reset()}]" 34 | end 35 | -------------------------------------------------------------------------------- /system/mix.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.MixProject do 2 | @moduledoc false 3 | use Mix.Project 4 | 5 | @app_name :ex_esdb 6 | @elixir_version "~> 1.17" 7 | @version "0.0.9-alpha" 8 | @source_url "https://codeberg.org/beam-campus/ex-esdb" 9 | # @homepage_url "https://github.com/beam-campus/ex-esdb" 10 | @docs_url "https://hexdocs.pm/ex_esdb" 11 | # @package_url "https://hex.pm/packages/ex_esdb" 12 | # @issues_url "https://github.com/beam-campus/ex-esdb/issues" 13 | @description "ExESDB is a reincarnation of rabbitmq/khepri, specialized for use as a BEAM-native event store." 14 | 15 | def project do 16 | [ 17 | app: @app_name, 18 | version: @version, 19 | deps: deps(), 20 | elixir: @elixir_version, 21 | elixirc_paths: elixirc_paths(Mix.env()), 22 | erl_opts: erl_opts(), 23 | erlc_paths: erlc_paths(Mix.env()), 24 | consolidate_protocols: Mix.env() != :test, 25 | description: @description, 26 | docs: docs(), 27 | package: package(), 28 | releases: releases(), 29 | start_permanent: Mix.env() == :prod, 30 | test_coverage: [tool: coverage_tool()], 31 | preferred_cli_env: [coveralls: :test] 32 | ] 33 | end 34 | 35 | defp releases, 36 | do: [ 37 | ex_esdb: [ 38 | include_erts: true, 39 | include_executables_for: [:unix], 40 | steps: [:assemble, :tar], 41 | applications: [ 42 | runtime_tools: :permanent, 43 | logger: :permanent, 44 | os_mon: :permanent 45 | ] 46 | ] 47 | ] 48 | 49 | # Run "mix help compile.app" to learn about applications. 50 | def application, 51 | do: [ 52 | mod: {ExESDB.App, []}, 53 | extra_applications: [ 54 | :logger, 55 | :eex, 56 | :os_mon, 57 | :runtime_tools, 58 | :khepri, 59 | :gen_retry 60 | ] 61 | ] 62 | 63 | defp erlc_paths(_), 64 | do: [ 65 | "src" 66 | ] 67 | 68 | def erl_opts, 69 | do: [ 70 | {:i, "deps/khepri/include"} 71 | ] 72 | 73 | defp elixirc_paths(:test), 74 | do: [ 75 | "lib", 76 | "test/support" 77 | ] 78 | 79 | defp elixirc_paths(_), do: ["lib"] 80 | 81 | defp deps do 82 | [ 83 | {:dialyze, "~> 0.2.0", only: [:dev]}, 84 | {:dialyxir, "~> 1.0", only: [:dev], runtime: false}, 85 | {:makeup_html, ">= 0.0.0", only: :dev, runtime: false}, 86 | {:ex_doc, "~> 0.37", only: [:dev], runtime: false}, 87 | {:mix_test_watch, "~> 1.1", only: [:dev, :test], runtime: false}, 88 | {:credo, "~> 1.7", only: [:dev, :test], runtime: false}, 89 | {:meck, "~> 0.9", only: [:test], runtime: false}, 90 | {:eunit_formatters, "~> 0.5", only: [:test], runtime: false}, 91 | {:mox, "~> 1.0", only: [:test], runtime: false}, 92 | {:jason, "~> 1.4", optional: true}, 93 | {:horde, "~> 0.9"}, 94 | {:phoenix_pubsub, "~> 2.1"}, 95 | {:khepri, "~> 0.17"}, 96 | {:protobuf, "~> 0.14"}, 97 | {:gen_retry, "~> 1.4"}, 98 | {:uuidv7, "~> 1.0"}, 99 | {:elixir_uuid, "~> 1.2"}, 100 | {:commanded, "~> 1.4"} 101 | ] 102 | end 103 | 104 | defp coverage_tool do 105 | # Optional coverage configuration 106 | {:cover, [output: "_build/cover"]} 107 | end 108 | 109 | defp docs do 110 | [ 111 | main: "readme", 112 | canonical: @docs_url, 113 | source_ref: "v#{@version}", 114 | extra_section: "guides", 115 | extras: [ 116 | "ADR.md", 117 | "CHANGELOG.md", 118 | "guides/getting_started.md": [ 119 | filename: "getting-started", 120 | title: "Getting Started" 121 | ], 122 | "guides/testing.md": [ 123 | filename: "testing", 124 | title: "Testing" 125 | ], 126 | "../README.md": [ 127 | filename: "readme", 128 | title: "Read Me" 129 | ] 130 | ] 131 | ] 132 | end 133 | 134 | defp package do 135 | [ 136 | name: @app_name, 137 | description: @description, 138 | version: @version, 139 | # files: [ 140 | # "lib", 141 | # "src", 142 | # "priv", 143 | # "mix.exs", 144 | # "../README*", 145 | # "../LICENSE*" 146 | # ], 147 | maintainers: ["rgfaber"], 148 | # organization: "beam-campus", 149 | licenses: ["MIT"], 150 | links: %{ 151 | "Codeberg" => @source_url 152 | }, 153 | source_url: @source_url 154 | ] 155 | end 156 | end 157 | -------------------------------------------------------------------------------- /system/mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "aten": {:hex, :aten, "0.6.0", "7a57b275a6daf515ac3683fb9853e280b4d0dcdd74292fd66ac4a01c8694f8c7", [:rebar3], [], "hexpm", "5f39a164206ae3f211ef5880b1f7819415686436e3229d30b6a058564fbaa168"}, 3 | "backoff": {:hex, :backoff, "1.1.6", "83b72ed2108ba1ee8f7d1c22e0b4a00cfe3593a67dbc792799e8cce9f42f796b", [:rebar3], [], "hexpm", "cf0cfff8995fb20562f822e5cc47d8ccf664c5ecdc26a684cbe85c225f9d7c39"}, 4 | "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, 5 | "commanded": {:hex, :commanded, "1.4.8", "a35b3894126a90a47400e670fead1ce2227c3bedfbd1ee15a78a088ae62bf251", [:mix], [{:backoff, "~> 1.1", [hex: :backoff, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_registry, "~> 0.2", [hex: :telemetry_registry, repo: "hexpm", optional: false]}], "hexpm", "bfbfb93dc017f4c225a64ac78cc0c09d7d97e16f71dd8b97b4fcc6e59ac01caa"}, 6 | "credo": {:hex, :credo, "1.7.12", "9e3c20463de4b5f3f23721527fcaf16722ec815e70ff6c60b86412c695d426c1", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8493d45c656c5427d9c729235b99d498bd133421f3e0a683e5c1b561471291e5"}, 7 | "db_connection": {:hex, :db_connection, "2.7.0", "b99faa9291bb09892c7da373bb82cba59aefa9b36300f6145c5f201c7adf48ec", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dcf08f31b2701f857dfc787fbad78223d61a32204f217f15e881dd93e4bdd3ff"}, 8 | "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"}, 9 | "delta_crdt": {:hex, :delta_crdt, "0.6.5", "c7bb8c2c7e60f59e46557ab4e0224f67ba22f04c02826e273738f3dcc4767adc", [:mix], [{:merkle_map, "~> 0.2.0", [hex: :merkle_map, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c6ae23a525d30f96494186dd11bf19ed9ae21d9fe2c1f1b217d492a7cc7294ae"}, 10 | "dialyxir": {:hex, :dialyxir, "1.4.5", "ca1571ac18e0f88d4ab245f0b60fa31ff1b12cbae2b11bd25d207f865e8ae78a", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b0fb08bb8107c750db5c0b324fa2df5ceaa0f9307690ee3c1f6ba5b9eb5d35c3"}, 11 | "dialyze": {:hex, :dialyze, "0.2.1", "9fb71767f96649020d769db7cbd7290059daff23707d6e851e206b1fdfa92f9d", [:mix], [], "hexpm", "f485181fa53229356621261a384963cb47511cccf1454e82ca4fde53274fcd48"}, 12 | "earmark_parser": {:hex, :earmark_parser, "1.4.44", "f20830dd6b5c77afe2b063777ddbbff09f9759396500cdbe7523efd58d7a339c", [:mix], [], "hexpm", "4778ac752b4701a5599215f7030989c989ffdc4f6df457c5f36938cc2d2a2750"}, 13 | "ecto": {:hex, :ecto, "3.12.5", "4a312960ce612e17337e7cefcf9be45b95a3be6b36b6f94dfb3d8c361d631866", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6eb18e80bef8bb57e17f5a7f068a1719fbda384d40fc37acb8eb8aeca493b6ea"}, 14 | "ecto_sql": {:hex, :ecto_sql, "3.12.1", "c0d0d60e85d9ff4631f12bafa454bc392ce8b9ec83531a412c12a0d415a3a4d0", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "aff5b958a899762c5f09028c847569f7dfb9cc9d63bdb8133bff8a5546de6bf5"}, 15 | "elixir_uuid": {:hex, :elixir_uuid, "1.2.1", "dce506597acb7e6b0daeaff52ff6a9043f5919a4c3315abb4143f0b00378c097", [:mix], [], "hexpm", "f7eba2ea6c3555cea09706492716b0d87397b88946e6380898c2889d68585752"}, 16 | "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, 17 | "eunit_formatters": {:hex, :eunit_formatters, "0.6.0", "34f03685a64e75e603bae1fb73303bcfc19886f671a97daf85cedd4088a92e5f", [:rebar3], [], "hexpm", "28425f5708fed6fa0bbc1b2b114df097b4450a69cedc396cb25cc4d8706622f8"}, 18 | "ex_doc": {:hex, :ex_doc, "0.38.1", "bae0a0bd5b5925b1caef4987e3470902d072d03347114ffe03a55dbe206dd4c2", [:mix], [{:earmark_parser, "~> 1.4.44", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "754636236d191b895e1e4de2ebb504c057fe1995fdfdd92e9d75c4b05633008b"}, 19 | "exconstructor": {:hex, :exconstructor, "1.2.13", "7021eed1450202dcbcd1ef021d6aacf7351854ff9d7964f166931567f9dfa9fb", [:mix], [], "hexpm", "69d3f0251a07bb7c5ef85bde22a1eee577dfbb49852d77fb7ad7b937035aeef2"}, 20 | "file_system": {:hex, :file_system, "1.1.0", "08d232062284546c6c34426997dd7ef6ec9f8bbd090eb91780283c9016840e8f", [:mix], [], "hexpm", "bfcf81244f416871f2a2e15c1b515287faa5db9c6bcf290222206d120b3d43f6"}, 21 | "gen_batch_server": {:hex, :gen_batch_server, "0.8.9", "1c6bc0f530bf8c17e8b4acc20c2cc369ffa5bee2b46de01e21410745f24b1bc9", [:rebar3], [], "hexpm", "c8581fe4a4b6bccf91e53ce6a8c7e6c27c8c591bab5408b160166463f5579c22"}, 22 | "gen_retry": {:hex, :gen_retry, "1.4.0", "682fadcb7ebf629f8dc4d84b4e62e03dce1baa526141133a19a8e7cee5ef7e1b", [:mix], [{:exconstructor, "~> 1.0", [hex: :exconstructor, repo: "hexpm", optional: false]}], "hexpm", "c15311afe0770f5fc58f4391b2ff3362fd25f1e107d02ff63592d36f30eaaae4"}, 23 | "horde": {:hex, :horde, "0.9.0", "522342bd7149aeed453c97692a8bca9cf7c9368c5a489afd802e575dc8df54a6", [:mix], [{:delta_crdt, "~> 0.6.2", [hex: :delta_crdt, repo: "hexpm", optional: false]}, {:libring, "~> 1.4", [hex: :libring, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 0.5.0 or ~> 1.0", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "fae11e5bc9c980038607d0c3338cdf7f97124a5d5382fd4b6fb6beaab8e214fe"}, 24 | "horus": {:hex, :horus, "0.3.1", "a5274c96e15924c28413752617b06050e4b08c04628b88209aff9ea076f2bcb5", [:mix, :rebar3], [], "hexpm", "d564d30ebc274f0d92c3d44a336d0b892f000be159912ae4e6838701e85495ec"}, 25 | "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, 26 | "khepri": {:hex, :khepri, "0.17.1", "b3f09238df2240f29392cc894091c80711cb2ff4430e44b2828e75893c6484dd", [:mix, :rebar3], [{:horus, "0.3.1", [hex: :horus, repo: "hexpm", optional: false]}, {:ra, "2.16.7", [hex: :ra, repo: "hexpm", optional: false]}], "hexpm", "a030f3f675e88b8727af33a6d01441a54a7b1d9f1106ec33fbcae26119db0843"}, 27 | "libring": {:hex, :libring, "1.7.0", "4f245d2f1476cd7ed8f03740f6431acba815401e40299208c7f5c640e1883bda", [:mix], [], "hexpm", "070e3593cb572e04f2c8470dd0c119bc1817a7a0a7f88229f43cf0345268ec42"}, 28 | "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, 29 | "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"}, 30 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"}, 31 | "makeup_html": {:hex, :makeup_html, "0.2.0", "9f810da8d43d625ccd3f7ea25997e588fa541d80e0a8c6b895157ad5c7e9ca13", [:mix], [{:makeup, "~> 1.2", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "0856f7beb9a6a642ab1307e06d990fe39f0ba58690d0b8e662aa2e027ba331b2"}, 32 | "meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"}, 33 | "merkle_map": {:hex, :merkle_map, "0.2.1", "01a88c87a6b9fb594c67c17ebaf047ee55ffa34e74297aa583ed87148006c4c8", [:mix], [], "hexpm", "fed4d143a5c8166eee4fa2b49564f3c4eace9cb252f0a82c1613bba905b2d04d"}, 34 | "mix_test_watch": {:hex, :mix_test_watch, "1.2.0", "1f9acd9e1104f62f280e30fc2243ae5e6d8ddc2f7f4dc9bceb454b9a41c82b42", [:mix], [{:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "278dc955c20b3fb9a3168b5c2493c2e5cffad133548d307e0a50c7f2cfbf34f6"}, 35 | "mox": {:hex, :mox, "1.2.0", "a2cd96b4b80a3883e3100a221e8adc1b98e4c3a332a8fc434c39526babafd5b3", [:mix], [{:nimble_ownership, "~> 1.0", [hex: :nimble_ownership, repo: "hexpm", optional: false]}], "hexpm", "c7b92b3cc69ee24a7eeeaf944cd7be22013c52fcb580c1f33f50845ec821089a"}, 36 | "nimble_ownership": {:hex, :nimble_ownership, "1.0.1", "f69fae0cdd451b1614364013544e66e4f5d25f36a2056a9698b793305c5aa3a6", [:mix], [], "hexpm", "3825e461025464f519f3f3e4a1f9b68c47dc151369611629ad08b636b73bb22d"}, 37 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, 38 | "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"}, 39 | "protobuf": {:hex, :protobuf, "0.14.1", "9ac0582170df27669ccb2ef6cb0a3d55020d58896edbba330f20d0748881530a", [:mix], [{:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "39a9d49d346e3ed597e5ae3168a43d9603870fc159419617f584cdf6071f0e25"}, 40 | "ra": {:hex, :ra, "2.16.7", "1582982cc88c2fa4d6afae6dcf3e80e6ab4058a8beeda82a9a374216329b7b29", [:rebar3], [{:aten, "0.6.0", [hex: :aten, repo: "hexpm", optional: false]}, {:gen_batch_server, "0.8.9", [hex: :gen_batch_server, repo: "hexpm", optional: false]}, {:seshat, "0.6.0", [hex: :seshat, repo: "hexpm", optional: false]}], "hexpm", "46f81f5eb98015f4574cef523fa49b3bac3035e9e9b8001d4c2d312ed94b61bd"}, 41 | "seshat": {:hex, :seshat, "0.6.0", "3172eb1d7a2a4f66108cd6933a4e465aff80f84aa90ed83f047b92f636123ccd", [:rebar3], [], "hexpm", "7cef700f92831dd7cae6a6dd223ccc55ac88ecce0631ee9ab0f2b5fb70e79b90"}, 42 | "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, 43 | "telemetry_poller": {:hex, :telemetry_poller, "1.2.0", "ba82e333215aed9dd2096f93bd1d13ae89d249f82760fcada0850ba33bac154b", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7216e21a6c326eb9aa44328028c34e9fd348fb53667ca837be59d0aa2a0156e8"}, 44 | "telemetry_registry": {:hex, :telemetry_registry, "0.3.2", "701576890320be6428189bff963e865e8f23e0ff3615eade8f78662be0fc003c", [:mix, :rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e7ed191eb1d115a3034af8e1e35e4e63d5348851d556646d46ca3d1b4e16bab9"}, 45 | "uuidv7": {:hex, :uuidv7, "1.0.0", "659179b2e248b98f96e7e988b882d369c055b6ae7a836237ccca52cd4d0f6988", [:mix], [{:ecto, "~> 3.12", [hex: :ecto, repo: "hexpm", optional: false]}], "hexpm", "0ecd337108456f7d8b1a9a54ef435443d3f8c10a5b685bd866ef9e396b444cbc"}, 46 | } 47 | -------------------------------------------------------------------------------- /system/priv/protos/ex_esdb.proto: -------------------------------------------------------------------------------- 1 | enum OperationResult 2 | { 3 | success = 0; 4 | prepare_timeout = 1; 5 | commit_timeout = 2; 6 | forward_timeout = 3; 7 | wrong_expected_version = 4; 8 | stream_deleted = 5; 9 | invalid_transaction = 6; 10 | access_denied = 7; 11 | } 12 | 13 | message NewEvent { 14 | required bytes event_id = 1; 15 | required string event_type = 2; 16 | required int32 data_content_type = 3; 17 | required int32 metadata_content_type = 4; 18 | required bytes data = 5; 19 | optional bytes metadata = 6; 20 | } 21 | 22 | message EventRecord { 23 | required string event_stream_id = 1; 24 | required int64 event_number = 2; 25 | required bytes event_id = 3; 26 | required string event_type = 4; 27 | required int32 data_content_type = 5; 28 | required int32 metadata_content_type = 6; 29 | required bytes data = 7; 30 | optional bytes metadata = 8; 31 | optional int64 created = 9; 32 | optional int64 created_epoch = 10; 33 | } 34 | 35 | message ResolvedIndexedEvent { 36 | required EventRecord event = 1; 37 | optional EventRecord link = 2; 38 | } 39 | 40 | message ResolvedEvent { 41 | required EventRecord event = 1; 42 | optional EventRecord link = 2; 43 | required int64 commit_position = 3; 44 | required int64 prepare_position = 4; 45 | } 46 | 47 | message WriteEvents { 48 | required string event_stream_id = 1; 49 | required int64 expected_version = 2; 50 | repeated NewEvent events = 3; 51 | required bool require_master = 4; 52 | } 53 | 54 | message WriteEventsCompleted { 55 | required OperationResult result = 1; 56 | optional string message = 2; 57 | required int64 first_event_number = 3; 58 | required int64 last_event_number = 4; 59 | optional int64 prepare_position = 5; 60 | optional int64 commit_position = 6; 61 | optional int64 current_version = 7; 62 | } 63 | 64 | message DeleteStream { 65 | required string event_stream_id = 1; 66 | required int64 expected_version = 2; 67 | required bool require_master = 3; 68 | optional bool hard_delete = 4; 69 | } 70 | 71 | message DeleteStreamCompleted { 72 | required OperationResult result = 1; 73 | optional string message = 2; 74 | optional int64 prepare_position = 3; 75 | optional int64 commit_position = 4; 76 | } 77 | 78 | message TransactionStart { 79 | required string event_stream_id = 1; 80 | required int64 expected_version = 2; 81 | required bool require_master = 3; 82 | } 83 | 84 | message TransactionStartCompleted { 85 | required int64 transaction_id = 1; 86 | required OperationResult result = 2; 87 | optional string message = 3; 88 | } 89 | 90 | message TransactionWrite { 91 | required int64 transaction_id = 1; 92 | repeated NewEvent events = 2; 93 | required bool require_master = 3; 94 | } 95 | 96 | message TransactionWriteCompleted { 97 | required int64 transaction_id = 1; 98 | required OperationResult result = 2; 99 | optional string message = 3; 100 | } 101 | 102 | message TransactionCommit { 103 | required int64 transaction_id = 1; 104 | required bool require_master = 2; 105 | } 106 | 107 | message TransactionCommitCompleted { 108 | required int64 transaction_id = 1; 109 | required OperationResult result = 2; 110 | optional string message = 3; 111 | required int64 first_event_number = 4; 112 | required int64 last_event_number = 5; 113 | optional int64 prepare_position = 6; 114 | optional int64 commit_position = 7; 115 | } 116 | 117 | message ReadEvent { 118 | required string event_stream_id = 1; 119 | required int64 event_number = 2; 120 | required bool resolve_link_tos = 3; 121 | required bool require_master = 4; 122 | } 123 | 124 | message ReadEventCompleted { 125 | 126 | enum ReadEventResult { 127 | success = 0; 128 | not_found = 1; 129 | no_stream = 2; 130 | stream_deleted = 3; 131 | error = 4; 132 | access_denied = 5; 133 | } 134 | 135 | required ReadEventResult result = 1; 136 | required ResolvedIndexedEvent event = 2; 137 | 138 | optional string io_error = 3; 139 | } 140 | 141 | message ReadStreamEvents { 142 | required string event_stream_id = 1; 143 | required int64 from_event_number = 2; 144 | required int32 max_count = 3; 145 | required bool resolve_link_tos = 4; 146 | required bool require_master = 5; 147 | } 148 | 149 | message ReadStreamEventsBackward { 150 | required string event_stream_id = 1; 151 | required int64 from_event_number = 2; 152 | required int32 max_count = 3; 153 | required bool resolve_link_tos = 4; 154 | required bool require_master = 5; 155 | } 156 | 157 | message ReadStreamEventsCompleted { 158 | 159 | enum ReadStreamResult { 160 | success = 0; 161 | no_stream = 1; 162 | stream_deleted = 2; 163 | not_modified = 3; 164 | error = 4; 165 | access_denied = 5; 166 | } 167 | 168 | repeated ResolvedIndexedEvent events = 1; 169 | required ReadStreamResult result = 2; 170 | required int64 next_event_number = 3; 171 | required int64 last_event_number = 4; 172 | required bool is_end_of_stream = 5; 173 | required int64 last_commit_position = 6; 174 | 175 | optional string io_error = 7; 176 | } 177 | 178 | message ReadAllEvents { 179 | required int64 commit_position = 1; 180 | required int64 prepare_position = 2; 181 | required int32 max_count = 3; 182 | required bool resolve_link_tos = 4; 183 | required bool require_master = 5; 184 | } 185 | 186 | message ReadAllEventsCompleted { 187 | 188 | enum ReadAllResult { 189 | success = 0; 190 | not_modified = 1; 191 | error = 2; 192 | access_denied = 3; 193 | } 194 | 195 | required int64 commit_position = 1; 196 | required int64 prepare_position = 2; 197 | repeated ResolvedEvent events = 3; 198 | required int64 next_commit_position = 4; 199 | required int64 next_prepare_position = 5; 200 | 201 | optional ReadAllResult result = 6 [default = success]; 202 | optional string io_error = 7; 203 | } 204 | 205 | message CreatePersistentSubscription { 206 | required string subscription_group_name = 1; 207 | required string event_stream_id = 2; 208 | required bool resolve_link_tos = 3; 209 | required int64 start_from = 4; 210 | required int32 message_timeout_milliseconds = 5; 211 | required bool record_statistics = 6; 212 | required int32 live_buffer_size = 7; 213 | required int32 read_batch_size = 8; 214 | required int32 buffer_size = 9; 215 | required int32 max_retry_count = 10; 216 | required bool prefer_round_robin = 11; 217 | required int32 checkpoint_after_time = 12; 218 | required int32 checkpoint_max_count = 13; 219 | required int32 checkpoint_min_count = 14; 220 | required int32 subscriber_max_count = 15; 221 | optional string named_consumer_strategy = 16; 222 | } 223 | 224 | message DeletePersistentSubscription { 225 | required string subscription_group_name = 1; 226 | required string event_stream_id = 2; 227 | } 228 | 229 | message UpdatePersistentSubscription { 230 | required string subscription_group_name = 1; 231 | required string event_stream_id = 2; 232 | required bool resolve_link_tos = 3; 233 | required int64 start_from = 4; 234 | required int32 message_timeout_milliseconds = 5; 235 | required bool record_statistics = 6; 236 | required int32 live_buffer_size = 7; 237 | required int32 read_batch_size = 8; 238 | required int32 buffer_size = 9; 239 | required int32 max_retry_count = 10; 240 | required bool prefer_round_robin = 11; 241 | required int32 checkpoint_after_time = 12; 242 | required int32 checkpoint_max_count = 13; 243 | required int32 checkpoint_min_count = 14; 244 | required int32 subscriber_max_count = 15; 245 | optional string named_consumer_strategy = 16; 246 | } 247 | 248 | message UpdatePersistentSubscriptionCompleted { 249 | enum UpdatePersistentSubscriptionResult { 250 | success = 0; 251 | does_not_exist = 1; 252 | fail = 2; 253 | access_denied=3; 254 | } 255 | required UpdatePersistentSubscriptionResult result = 1 [default = success]; 256 | optional string reason = 2; 257 | } 258 | 259 | message CreatePersistentSubscriptionCompleted { 260 | enum CreatePersistentSubscriptionResult { 261 | success = 0; 262 | already_exists = 1; 263 | fail = 2; 264 | access_denied=3; 265 | } 266 | required CreatePersistentSubscriptionResult result = 1 [default = success]; 267 | optional string reason = 2; 268 | } 269 | 270 | message DeletePersistentSubscriptionCompleted { 271 | enum DeletePersistentSubscriptionResult { 272 | success = 0; 273 | does_not_exist = 1; 274 | fail = 2; 275 | access_denied = 3; 276 | } 277 | required DeletePersistentSubscriptionResult result = 1 [default = success]; 278 | optional string reason = 2; 279 | } 280 | 281 | message ConnectToPersistentSubscription { 282 | required string subscription_id = 1; 283 | required string event_stream_id = 2; 284 | required int32 allowed_in_flight_messages = 3; 285 | 286 | } 287 | 288 | message PersistentSubscriptionAckEvents { 289 | required string subscription_id = 1; 290 | repeated bytes processed_event_ids = 2; 291 | } 292 | 293 | message PersistentSubscriptionNakEvents { 294 | enum NakAction { 295 | unknown = 0; 296 | park = 1; 297 | retry = 2; 298 | skip = 3; 299 | stop = 4; 300 | } 301 | 302 | required string subscription_id = 1; 303 | repeated bytes processed_event_ids = 2; 304 | optional string message = 3; 305 | required NakAction action = 4 [default = unknown]; 306 | } 307 | 308 | message PersistentSubscriptionConfirmation { 309 | required int64 last_commit_position = 1; 310 | required string subscription_id = 2; 311 | optional int64 last_event_number = 3; 312 | } 313 | 314 | message PersistentSubscriptionStreamEventAppeared { 315 | required ResolvedIndexedEvent event = 1; 316 | optional int32 retryCount = 2; 317 | } 318 | 319 | message SubscribeToStream { 320 | required string event_stream_id = 1; 321 | required bool resolve_link_tos = 2; 322 | } 323 | 324 | message SubscriptionConfirmation { 325 | required int64 last_commit_position = 1; 326 | optional int64 last_event_number = 2; 327 | } 328 | 329 | message StreamEventAppeared { 330 | required ResolvedEvent event = 1; 331 | } 332 | 333 | message UnsubscribeFromStream { 334 | } 335 | 336 | message SubscriptionDropped { 337 | 338 | enum SubscriptionDropReason { 339 | unsubscribed = 0; 340 | access_denied = 1; 341 | not_found=2; 342 | persistent_subscription_deleted=3; 343 | subscriber_max_count_reached=4; 344 | } 345 | 346 | optional SubscriptionDropReason reason = 1 [default = unsubscribed]; 347 | } 348 | 349 | message NotHandled { 350 | 351 | enum NotHandledReason { 352 | not_ready = 0; 353 | too_busy = 1; 354 | not_master = 2; 355 | } 356 | 357 | required NotHandledReason reason = 1; 358 | optional bytes additional_info = 2; 359 | 360 | message MasterInfo { 361 | required string external_tcp_address = 1; 362 | required int32 external_tcp_port = 2; 363 | required string external_http_address = 3; 364 | required int32 external_http_port = 4; 365 | optional string external_secure_tcp_address = 5; 366 | optional int32 external_secure_tcp_port = 6; 367 | } 368 | } 369 | 370 | message ScavengeDatabase { 371 | } 372 | 373 | message ScavengeDatabaseCompleted { 374 | 375 | enum ScavengeResult { 376 | success = 0; 377 | in_progress = 1; 378 | failed = 2; 379 | } 380 | 381 | required ScavengeResult result = 1; 382 | optional string error = 2; 383 | required int32 total_time_ms = 3; 384 | required int64 total_space_saved = 4; 385 | } 386 | 387 | message IdentifyClient { 388 | required int32 version = 1; 389 | optional string connection_name = 2; 390 | } 391 | 392 | message ClientIdentified { 393 | } 394 | -------------------------------------------------------------------------------- /system/protoc.exs: -------------------------------------------------------------------------------- 1 | defmodule Protoc do 2 | use Mix.Task 3 | 4 | def run(_args) do 5 | File.mkdir_p!("lib/generated") 6 | 7 | {result, _} = 8 | System.cmd( 9 | "protoc", 10 | [ 11 | "--proto_path=priv/protos", 12 | "--elixir_out=lib/generated", 13 | "priv/protos/*.proto" 14 | ], 15 | stderr_to_stdout: true 16 | ) 17 | 18 | IO.puts(result) 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /system/pub2hex.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | rm -rf ./doc 4 | 5 | mix hex.build 6 | 7 | mix docs 8 | 9 | mix hex.user auth 10 | 11 | mix hex.publish 12 | -------------------------------------------------------------------------------- /system/rm-cont.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | docker container stop $(docker container list -qa) 3 | docker container rm -f $(docker container list -qa) -------------------------------------------------------------------------------- /system/run-ex-esdb.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | echo "PATH=${PATH}" 4 | echo "EX_ESDB_SEED_NODES=${EX_ESDB_SEED_NODES}" 5 | echo "EX_ESDB_COOKIE=${EX_ESDB_COOKIE}" 6 | 7 | echo "stored COOKIE:" 8 | cat ~/.erlang.cookie 9 | 10 | sleep 10 11 | 12 | exec /system/bin/ex_esdb start 13 | -------------------------------------------------------------------------------- /system/src/emitter_group.erl: -------------------------------------------------------------------------------- 1 | -module(emitter_group). 2 | 3 | -export([join/3, members/2, broadcast/3, group_key/2, topic/2, emitter_name/2, 4 | emitter_name/3, persist_emitters/3]). 5 | 6 | -spec join(Store :: atom(), Id :: string(), PidOrPids :: pid() | [pid()]) -> ok. 7 | join(Store, Id, PidOrPids) when is_atom(Store) -> 8 | Group = group_key(Store, Id), 9 | logger:warning("JOINING ~p", [Group]), 10 | ok = pg:join('Elixir.Phoenix.PubSub', Group, PidOrPids), 11 | ok. 12 | 13 | -spec members(Store :: atom(), Id :: string()) -> [pid()]. 14 | members(Store, Id) when is_atom(Store) -> 15 | Group = group_key(Store, Id), 16 | pg:get_members('Elixir.Phoenix.PubSub', Group). 17 | 18 | -spec random_emitter(Emitters :: [pid()]) -> {error, {no_such_group, term()}} | pid(). 19 | random_emitter(Emitters) -> 20 | Size = tuple_size(Emitters), 21 | Random = rand:uniform(Size), 22 | erlang:element(Random, Emitters). 23 | 24 | -spec broadcast(Store :: atom(), Id :: string(), Event :: map()) -> ok | {error, term()}. 25 | broadcast(Store, Id, Event) when is_atom(Store) -> 26 | Topic = topic(Store, Id), 27 | case random_emitter(members(Store, Id)) of 28 | {error, {no_such_group, _}} -> 29 | logger:error("NO_GROUP [~p]~n", [Topic]), 30 | {error, no_such_group}; 31 | Members -> 32 | lists:foreach(fun(Pid) -> 33 | Message = 34 | if node(Pid) =:= node() -> forward_to_local_msg(Topic, Event); 35 | true -> broadcast_msg(Topic, Event) 36 | end, 37 | Pid ! Message 38 | end, 39 | Members) 40 | end. 41 | 42 | -spec forward_to_local_msg(Topic :: binary(), Event :: map()) -> tuple(). 43 | forward_to_local_msg(Topic, Event) -> 44 | {forward_to_local, Topic, Event}. 45 | 46 | -spec broadcast_msg(Topic :: binary(), Event :: map()) -> tuple(). 47 | broadcast_msg(Topic, Event) -> 48 | {broadcast, Topic, Event}. 49 | 50 | group_key(Store, Id) -> 51 | {Store, Id, emitters}. 52 | 53 | -spec topic(Store :: atom(), Id :: string()) -> binary(). 54 | topic(Store, <<"$all">>) -> 55 | iolist_to_binary(io_lib:format("~s:$all", [Store])); 56 | topic(Store, Id) -> 57 | iolist_to_binary(io_lib:format("~s:~s", [Store, Id])). 58 | 59 | -spec emitter_name(Store :: atom(), Id :: string()) -> atom(). 60 | emitter_name(Store, Id) -> 61 | list_to_atom(lists:flatten( 62 | io_lib:format("~s_~s_emitter", [Store, Id]))). 63 | 64 | -spec emitter_name(Store :: atom(), Id :: string(), Number :: integer()) -> atom(). 65 | emitter_name(Store, Id, Number) -> 66 | list_to_atom(lists:flatten( 67 | io_lib:format("~s_~s_emitter_~p", [Store, Id, Number]))). 68 | 69 | -spec persist_emitters(Store :: atom(), Id :: string(), PoolSize :: integer()) -> list(). 70 | persist_emitters(Store, Id, PoolSize) -> 71 | % Generate a list of emitter names 72 | EmitterList = [emitter_name(Store, Id, Number) || Number <- lists:seq(1, PoolSize)], 73 | Emitters = [emitter_name(Store, Id) | EmitterList], 74 | Key = group_key(Store, Id), 75 | persistent_term:put(Key, list_to_tuple(Emitters)), 76 | Emitters. 77 | 78 | -spec retrieve_emitters(Store :: atom(), Id :: string()) -> list(). 79 | retrieve_emitters(Store, Id) -> 80 | Key = group_key(Store, Id), 81 | EmitterTuple = persistent_term:get(Key), 82 | tuple_to_list(EmitterTuple). 83 | -------------------------------------------------------------------------------- /system/src/erts_v.erl: -------------------------------------------------------------------------------- 1 | -module(erts_v). 2 | 3 | -export([parse_transform/2]). 4 | 5 | parse_transform(AST, _Opts) -> 6 | io:format("~p~n", [AST]). 7 | -------------------------------------------------------------------------------- /system/src/ex_esdb_filter.erl: -------------------------------------------------------------------------------- 1 | -module(ex_esdb_filter). 2 | 3 | -export([by_stream/1, by_event_type/1, by_event_pattern/1]). 4 | 5 | -include_lib("../deps/khepri/include/khepri.hrl"). 6 | 7 | -spec event_pattern_conditions(EventPattern :: map()) -> khepri_condition:conditions(). 8 | event_pattern_conditions(EventPattern) -> 9 | [#if_path_matches{regex = any}, 10 | #if_has_data{has_data = true}, 11 | #if_data_matches{pattern = EventPattern}]. 12 | 13 | -spec all_events() -> khepri_condition:conditions(). 14 | all_events() -> 15 | [#if_path_matches{regex = any}, #if_has_data{has_data = true}]. 16 | 17 | -spec by_stream(Stream :: string()) -> khepri:filter() | {error, term()}. 18 | by_stream(<<"$all">>) -> 19 | khepri_evf:tree([streams, 20 | #if_path_matches{regex = any}, 21 | #if_all{conditions = all_events()}], 22 | #{on_actions => [create]}); 23 | by_stream(Stream) -> 24 | List = binary_to_list(Stream), 25 | case string:chr(List, $$) of 26 | 0 -> 27 | {error, invalid_stream}; 28 | DollarPos -> 29 | StreamUuid = string:substr(List, DollarPos + 1), 30 | khepri_evf:tree([streams, list_to_binary(StreamUuid), #if_all{conditions = all_events()}], 31 | #{on_actions => [create]}) 32 | end. 33 | 34 | -spec by_event_type(EventType :: string()) -> khepri:filter(). 35 | by_event_type(EventType) -> 36 | by_event_pattern(#{event_type => EventType}). 37 | 38 | -spec by_event_pattern(EventPattern :: map()) -> khepri:filter(). 39 | by_event_pattern(EventPattern) -> 40 | khepri_evf:tree([streams, 41 | #if_path_matches{regex = any}, 42 | #if_all{conditions = event_pattern_conditions(EventPattern)}], 43 | #{on_actions => [create]}). 44 | -------------------------------------------------------------------------------- /system/src/ex_esdb_triggers.erl: -------------------------------------------------------------------------------- 1 | -module(ex_esdb_triggers). 2 | 3 | -export([setup_emitters/4, get_on_new_event/2]). 4 | 5 | -spec get_on_new_event(Store :: khepri:store(), Id :: string()) -> ok | {error, term()}. 6 | get_on_new_event(Store, Id) when is_atom(Store) -> 7 | Topic = emitter_group:topic(Store, Id), 8 | khepri:get(Store, [procs, on_new_event, Topic]). 9 | 10 | -spec put_on_new_event(Store :: khepri:store(), Id :: string()) -> ok | {error, term()}. 11 | put_on_new_event(Store, Id) when is_atom(Store) -> 12 | Topic = emitter_group:topic(Store, Id), 13 | case khepri:exists(Store, [procs, on_new_event, Topic]) of 14 | true -> 15 | ok; 16 | false -> 17 | ok = 18 | khepri:put(Store, 19 | [procs, on_new_event, Topic], 20 | fun(Props) -> 21 | case maps:get(path, Props, undefined) of 22 | undefined -> ok; 23 | Path -> 24 | case get_event(Store, Path) of 25 | {ok, undefined} -> ok; 26 | {ok, Event} -> 27 | emitter_group:broadcast(Store, Id, Event), 28 | ok; 29 | {error, Reason} -> 30 | io:format("Broadcasting failed for path ~p to ~p ~n Reason: ~p~n", 31 | [Path, Topic, Reason]), 32 | ok 33 | end 34 | end 35 | end), 36 | ok 37 | end. 38 | 39 | -spec register_on_new_event(Store :: khepri:store(), 40 | Id :: string(), 41 | Filter :: khepri:filter()) -> 42 | ok | {error, term()}. 43 | register_on_new_event(Store, Id, Filter) -> 44 | Topic = emitter_group:topic(Store, Id), 45 | PropOpts = 46 | #{expect_specific_node => false, 47 | props_to_return => 48 | [payload, payload_version, child_list_version, child_list_length, child_names], 49 | include_root_props => true}, 50 | logger:warning("Registering [procs, on_new_event, ~s] FILTER: ~p~n", [Topic, Filter]), 51 | khepri:register_trigger(Store, Id, Filter, [procs, on_new_event, Topic], PropOpts). 52 | 53 | -spec setup_new_event_trigger(Store :: khepri:store(), 54 | Id :: string(), 55 | Filter :: khepri:filter()) -> 56 | ok | {error, term()}. 57 | setup_new_event_trigger(Store, Id, Filter) -> 58 | ok = put_on_new_event(Store, Id), 59 | ok = register_on_new_event(Store, Id, Filter), 60 | ok. 61 | 62 | -spec setup_emitters(Store :: khepri:store(), 63 | Id :: string(), 64 | Filter :: khepri:filter(), 65 | PoolSize :: integer()) -> 66 | list(). 67 | setup_emitters(Store, Id, Filter, PoolSize) -> 68 | ok = setup_new_event_trigger(Store, Id, Filter), 69 | Emitters = emitter_group:persist_emitters(Store, Id, PoolSize), 70 | Emitters. 71 | 72 | -spec get_event(Store :: khepri:store(), Path :: khepri_path:path()) -> 73 | {ok, khepri:props()} | {error, term()}. 74 | get_event(Store, Path) -> 75 | khepri:get(Store, Path). 76 | -------------------------------------------------------------------------------- /system/test/beam_campus/bit_flags/has_all_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.BitFlags.HasAllTest do 2 | @moduledoc """ 3 | This module contains tests for the BeamCampus.BitFlags module. 4 | """ 5 | use ExUnit.Case 6 | 7 | describe "has_all/2" do 8 | @tag :bc_bitflags 9 | test "GIVEN: original_state is `0b00100100` (integer: 36) 10 | WHEN the flags to be checked are `0b01000000` (integer: 64) and `0b10000000` (integer: 128) 11 | THEN the result is `true`" do 12 | assert BitFlags.has_all(36, [64, 128]) 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /system/test/beam_campus/bit_flags/has_any_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.BitFlags.HasAnyTest do 2 | @moduledoc """ 3 | This module contains tests for the BeamCampus.BitFlags module. 4 | """ 5 | use ExUnit.Case 6 | 7 | describe "has_any/2" do 8 | @tag :bc_bitflags 9 | test "GIVEN: original_state is `0b00100100` (integer: 36) 10 | WHEN the flags to be checked are `0b01000000` (integer: 64) and `0b10000000` (integer: 128) 11 | THEN the result is `true`" do 12 | assert BitFlags.has_any(36, [64, 128]) 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /system/test/beam_campus/bit_flags/set_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.BitFlags.SetTest do 2 | @moduledoc """ 3 | This module contains tests for the BeamCampus.BitFlags module. 4 | """ 5 | use ExUnit.Case 6 | 7 | describe "set/2" do 8 | @tag :bc_bitflags 9 | test "GIVEN: original_state is `0b00100100` (integer: 36) 10 | WHEN the flag to be set is `0b01000000` (integer: 64) 11 | THEN the result is `0b01100100` (integer: 100)" do 12 | assert BitFlags.set(36, 64) == 100 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /system/test/beam_campus/bit_flags/toggle_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.BitFlags.ToggleTest do 2 | @moduledoc """ 3 | This module contains tests for the BeamCampus.BitFlags module. 4 | """ 5 | use ExUnit.Case 6 | 7 | describe "toggle/2" do 8 | @tag :bc_bitflags 9 | test "GIVEN: original_state is `0b00100100` (integer: 36) 10 | WHEN the flag to be toggled is `0b01000000` (integer: 64) 11 | THEN the result is `0b00100100` (integer: 36)" do 12 | assert BitFlags.toggle(36, 64) == 36 13 | end 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /system/test/beam_campus/bit_flags/unset_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.BitFlags.UnsetTest do 2 | @moduledoc """ 3 | This module contains tests for the BeamCampus.BitFlags module. 4 | """ 5 | use ExUnit.Case 6 | 7 | alias BeamCampus.BitFlags, as: BitFlags 8 | 9 | describe "unset/2" do 10 | @tag :bc_bitflags 11 | test "GIVEN: original_state is `0b00100100` (integer: 36) 12 | WHEN the flag to be unset is `0b01000000` (integer: 64) 13 | THEN the result is `0b00010100` (integer: 36)" do 14 | assert BitFlags.unset(36, 64) == 36 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /system/test/beam_campus/bit_flags/untoggle_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.BitFlags.UntoggleTest do 2 | @moduledoc """ 3 | This module contains tests for the BeamCampus.BitFlags module. 4 | """ 5 | use ExUnit.Case 6 | alias BeamCampus.BitFlags, as: BitFlags 7 | 8 | describe "untoggle/2" do 9 | @tag :bc_bitflags 10 | test "GIVEN: original_state is `0b00100100` (integer: 36) 11 | WHEN the flag to be untoggled is `0b01000000` (integer: 64) 12 | THEN the result is `0b00100100` (integer: 36)" do 13 | assert BitFlags.untoggle(36, 64) == 36 14 | end 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /system/test/beam_campus/bit_flags_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.BitFlagsTest do 2 | @moduledoc """ 3 | This module contains tests for the BeamCampus.BitFlags module. 4 | """ 5 | use ExUnit.Case 6 | 7 | @tag :bc_bitflags 8 | @tag :bc_docs 9 | doctest BeamCampus.BitFlags 10 | 11 | describe "set/2" do 12 | @tag :bc_bitflags 13 | test "GIVEN: original_state is `0b00100100` (integer: 36) 14 | WHEN the flag to be set is `0b01000000` (integer: 64) 15 | THEN the result is `0b01100100` (integer: 100)" do 16 | assert BitFlags.set(36, 64) == 100 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /system/test/beam_campus/color_funcs_test.exs: -------------------------------------------------------------------------------- 1 | defmodule BeamCampus.ColorFuncsTest do 2 | @moduledoc """ 3 | This module contains tests for the BeamCampus.ColorFuncs module. 4 | """ 5 | use ExUnit.Case 6 | @tag :bc_color_funcs 7 | @tag :bc_docs 8 | doctest BeamCampus.ColorFuncs 9 | 10 | describe "black_on_white/0" do 11 | @tag :bc_color_funcs 12 | test "GIVEN we start with a black background and a white foreground 13 | WHEN we call color_0_on_0 14 | THEN the result is a black foreground on a white background" do 15 | assert BeamCampus.ColorFuncs.black_on_white() == "\e[30m\e[47m" 16 | end 17 | 18 | @tag :bc_color_funcs 19 | test "GIVEN we start with a black background and a white foreground 20 | WHEN we call red_on_black() 21 | THEN the result is a red foreground on a black background" do 22 | assert BeamCampus.ColorFuncs.red_on_black() == "\e[31m\e[40m" 23 | end 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/ack_event_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.AckEventTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | doctest ExESDB.Commanded.Adapter 7 | 8 | describe "ack_event/3" do 9 | test "logs a warning and returns :ok" do 10 | # Arrange 11 | meta = %{some: "meta"} 12 | pid = self() 13 | event = %{event_id: "123", event_type: "SomeEvent", data: %{}} 14 | 15 | # Act 16 | result = Adapter.ack_event(meta, pid, event) 17 | 18 | # Assert 19 | assert result == :ok 20 | assert_receive {:log, :warning, "ack_event/3 is not implemented for " <> _} 21 | end 22 | end 23 | 24 | # Helper function to capture log messages 25 | setup do 26 | :meck.new(Logger, [:passthrough]) 27 | 28 | :meck.expect(Logger, :warning, fn message -> 29 | send(self(), {:log, :warning, message}) 30 | :ok 31 | end) 32 | 33 | on_exit(fn -> 34 | :meck.unload(Logger) 35 | end) 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/append_to_stream_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.AppendToStreamTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | 5 | alias ExESDB.Commanded.Adapter 6 | 7 | describe "append_to_stream/5" do 8 | @tag :ex_esdb_commanded 9 | test "logs a warning and returns {:error, :not_implemented}" do 10 | # Arrange 11 | adapter_meta = %{some: "meta"} 12 | stream_uuid = "some-stream" 13 | expected_version = 1 14 | events = [%{event_id: "123", event_type: "SomeEvent", data: %{}}] 15 | opts = [] 16 | 17 | # Act 18 | result = Adapter.append_to_stream(adapter_meta, stream_uuid, expected_version, events, opts) 19 | 20 | # Assert 21 | assert result == {:error, :not_implemented} 22 | assert_receive {:log, :warning, "append_to_stream/5 is not implemented for " <> _} 23 | end 24 | end 25 | 26 | # Helper function to capture log messages 27 | setup do 28 | :meck.new(Logger, [:passthrough]) 29 | 30 | :meck.expect(Logger, :warning, fn message -> 31 | send(self(), {:log, :warning, message}) 32 | :ok 33 | end) 34 | 35 | on_exit(fn -> 36 | :meck.unload(Logger) 37 | end) 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/child_spec_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.ChildSpecTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | describe "child_spec/2" do 7 | test "logs a warning and returns {:ok, [nil]}" do 8 | # Arrange 9 | application = :some_app 10 | opts = [] 11 | 12 | # Act 13 | result = Adapter.child_spec(application, opts) 14 | 15 | # Assert 16 | assert result == {:ok, [nil]} 17 | assert_receive {:log, :warning, "child_spec/2 is not implemented for " <> _} 18 | end 19 | end 20 | 21 | # Helper function to capture log messages 22 | setup do 23 | :meck.new(Logger, [:passthrough]) 24 | 25 | :meck.expect(Logger, :warning, fn message -> 26 | send(self(), {:log, :warning, message}) 27 | :ok 28 | end) 29 | 30 | on_exit(fn -> 31 | :meck.unload(Logger) 32 | end) 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/delete_snapshot_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.DeleteSnapshotTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | describe "delete_snapshot/2" do 7 | test "logs a warning and returns {:error, :not_implemented}" do 8 | # Arrange 9 | adapter_meta = %{some: "meta"} 10 | source_uuid = "some-source" 11 | 12 | # Act 13 | result = Adapter.delete_snapshot(adapter_meta, source_uuid) 14 | 15 | # Assert 16 | assert result == {:error, :not_implemented} 17 | assert_receive {:log, :warning, "delete_snapshot/4 is not implemented for " <> _} 18 | end 19 | end 20 | 21 | # Helper function to capture log messages 22 | setup do 23 | :meck.new(Logger, [:passthrough]) 24 | 25 | :meck.expect(Logger, :warning, fn message -> 26 | send(self(), {:log, :warning, message}) 27 | :ok 28 | end) 29 | 30 | on_exit(fn -> 31 | :meck.unload(Logger) 32 | end) 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/delete_subscription_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.DeleteSubscriptionTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | describe "delete_subscription/3" do 7 | test "logs a warning and returns {:error, :not_implemented}" do 8 | # Arrange 9 | adapter_meta = %{some: "meta"} 10 | arg2 = "some-arg" 11 | subscription_name = "some-subscription" 12 | 13 | # Act 14 | result = Adapter.delete_subscription(adapter_meta, arg2, subscription_name) 15 | 16 | # Assert 17 | assert result == {:error, :not_implemented} 18 | assert_receive {:log, :warning, "delete_subscription/4 is not implemented for " <> _} 19 | end 20 | end 21 | 22 | # Helper function to capture log messages 23 | setup do 24 | :meck.new(Logger, [:passthrough]) 25 | 26 | :meck.expect(Logger, :warning, fn message -> 27 | send(self(), {:log, :warning, message}) 28 | :ok 29 | end) 30 | 31 | on_exit(fn -> 32 | :meck.unload(Logger) 33 | end) 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/read_snapshot_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.ReadSnapshotTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | describe "read_snapshot/2" do 7 | test "logs a warning and returns {:error, :not_implemented}" do 8 | # Arrange 9 | adapter_meta = %{some: "meta"} 10 | stream_uuid = "some-stream" 11 | 12 | # Act 13 | result = Adapter.read_snapshot(adapter_meta, stream_uuid) 14 | 15 | # Assert 16 | assert result == {:error, :not_implemented} 17 | assert_receive {:log, :warning, "read_snapshot/5 is not implemented for " <> _} 18 | end 19 | end 20 | 21 | # Helper function to capture log messages 22 | setup do 23 | :meck.new(Logger, [:passthrough]) 24 | 25 | :meck.expect(Logger, :warning, fn message -> 26 | send(self(), {:log, :warning, message}) 27 | :ok 28 | end) 29 | 30 | on_exit(fn -> 31 | :meck.unload(Logger) 32 | end) 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/record_snapshot_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.RecordSnapshotTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | describe "record_snapshot/2" do 7 | test "logs a warning and returns {:error, :not_implemented}" do 8 | # Arrange 9 | adapter_meta = %{some: "meta"} 10 | snapshot_data = %{some: "data"} 11 | 12 | # Act 13 | result = Adapter.record_snapshot(adapter_meta, snapshot_data) 14 | 15 | # Assert 16 | assert result == {:error, :not_implemented} 17 | assert_receive {:log, :warning, "record_snapshot/3 is not implemented for " <> _} 18 | end 19 | end 20 | 21 | # Helper function to capture log messages 22 | setup do 23 | :meck.new(Logger, [:passthrough]) 24 | 25 | :meck.expect(Logger, :warning, fn message -> 26 | send(self(), {:log, :warning, message}) 27 | :ok 28 | end) 29 | 30 | on_exit(fn -> 31 | :meck.unload(Logger) 32 | end) 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/stream_forward_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.StreamForwardTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | describe "stream_forward/4" do 7 | test "logs a warning and returns {:error, :not_implemented}" do 8 | # Arrange 9 | adapter_meta = %{some: "meta"} 10 | stream_uuid = "some-stream" 11 | start_version = 0 12 | read_batch_size = 100 13 | 14 | # Act 15 | result = Adapter.stream_forward(adapter_meta, stream_uuid, start_version, read_batch_size) 16 | 17 | # Assert 18 | assert result == {:error, :not_implemented} 19 | assert_receive {:log, :warning, "stream_forward/5 is not implemented for " <> _} 20 | end 21 | end 22 | 23 | # Helper function to capture log messages 24 | setup do 25 | :meck.new(Logger, [:passthrough]) 26 | 27 | :meck.expect(Logger, :warning, fn message -> 28 | send(self(), {:log, :warning, message}) 29 | :ok 30 | end) 31 | 32 | on_exit(fn -> 33 | :meck.unload(Logger) 34 | end) 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/subscribe_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.SubscribeTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | describe "subscribe/2" do 7 | test "logs a warning and returns {:error, :not_implemented}" do 8 | # Arrange 9 | adapter_meta = %{some: "meta"} 10 | arg2 = "some-arg" 11 | 12 | # Act 13 | result = Adapter.subscribe(adapter_meta, arg2) 14 | 15 | # Assert 16 | assert result == {:error, :not_implemented} 17 | assert_receive {:log, :warning, "subscribe/2 is not implemented for " <> _} 18 | end 19 | end 20 | 21 | # Helper function to capture log messages 22 | setup do 23 | :meck.new(Logger, [:passthrough]) 24 | 25 | :meck.expect(Logger, :warning, fn message -> 26 | send(self(), {:log, :warning, message}) 27 | :ok 28 | end) 29 | 30 | on_exit(fn -> 31 | :meck.unload(Logger) 32 | end) 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/subscribe_to_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.SubscribeToTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | describe "subscribe_to/6" do 7 | test "logs a warning and returns {:error, :not_implemented}" do 8 | # Arrange 9 | adapter_meta = %{some: "meta"} 10 | arg2 = "some-arg" 11 | subscription_name = "some-subscription" 12 | subscriber = self() 13 | start_from = :origin 14 | opts = [] 15 | 16 | # Act 17 | result = 18 | Adapter.subscribe_to(adapter_meta, arg2, subscription_name, subscriber, start_from, opts) 19 | 20 | # Assert 21 | assert result == {:error, :not_implemented} 22 | assert_receive {:log, :warning, "subscribe_to/7 is not implemented for " <> _} 23 | end 24 | end 25 | 26 | # Helper function to capture log messages 27 | setup do 28 | :meck.new(Logger, [:passthrough]) 29 | 30 | :meck.expect(Logger, :warning, fn message -> 31 | send(self(), {:log, :warning, message}) 32 | :ok 33 | end) 34 | 35 | on_exit(fn -> 36 | :meck.unload(Logger) 37 | end) 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /system/test/commanded/adapter/unsubscribe_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.Adapter.UnsubscribeTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | 5 | alias ExESDB.Commanded.Adapter 6 | 7 | doctest ExESDB.Commanded.Adapter 8 | 9 | describe "unsubscribe/2" do 10 | @tag :ex_esdb_commanded 11 | test "logs a warning and returns {:error, :not_implemented}" do 12 | # Arrange 13 | adapter_meta = %{some: "meta"} 14 | subscription_name = "some-subscription" 15 | 16 | # Act 17 | result = Adapter.unsubscribe(adapter_meta, subscription_name) 18 | 19 | # Assert 20 | assert result == {:error, :not_implemented} 21 | assert_receive {:log, :warning, "unsubscribe/3 is not implemented for " <> _} 22 | end 23 | end 24 | 25 | # Helper function to capture log messages 26 | setup do 27 | :meck.new(Logger, [:passthrough]) 28 | 29 | :meck.expect(Logger, :warning, fn message -> 30 | send(self(), {:log, :warning, message}) 31 | :ok 32 | end) 33 | 34 | on_exit(fn -> 35 | :meck.unload(Logger) 36 | end) 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /system/test/commanded/adapter_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.AdapterTest do 2 | use ExUnit.Case 3 | 4 | alias ExESDB.Commanded.Adapter 5 | 6 | doctest ExESDB.Commanded.Adapter 7 | 8 | describe "ack_event/3" do 9 | test "logs a warning and returns :ok" do 10 | # Arrange 11 | meta = %{some: "meta"} 12 | pid = self() 13 | event = %{event_id: "123", event_type: "SomeEvent", data: %{}} 14 | 15 | # Act 16 | result = Adapter.ack_event(meta, pid, event) 17 | 18 | # Assert 19 | assert result == :ok 20 | assert_receive {:log, :warning, "ack_event/3 is not implemented for " <> _} 21 | end 22 | end 23 | 24 | describe "append_to_stream/5" do 25 | test "logs a warning and returns {:error, :not_implemented}" do 26 | # Arrange 27 | adapter_meta = %{some: "meta"} 28 | stream_uuid = "some-stream" 29 | expected_version = 1 30 | events = [%{event_id: "123", event_type: "SomeEvent", data: %{}}] 31 | opts = [] 32 | 33 | # Act 34 | result = Adapter.append_to_stream(adapter_meta, stream_uuid, expected_version, events, opts) 35 | 36 | # Assert 37 | assert result == {:error, :not_implemented} 38 | assert_receive {:log, :warning, "append_to_stream/5 is not implemented for " <> _} 39 | end 40 | end 41 | 42 | describe "child_spec/2" do 43 | test "logs a warning and returns {:ok, [nil]}" do 44 | # Arrange 45 | application = :some_app 46 | opts = [] 47 | 48 | # Act 49 | result = Adapter.child_spec(application, opts) 50 | 51 | # Assert 52 | assert result == {:ok, [nil]} 53 | assert_receive {:log, :warning, "child_spec/2 is not implemented for " <> _} 54 | end 55 | end 56 | 57 | describe "delete_snapshot/2" do 58 | test "logs a warning and returns {:error, :not_implemented}" do 59 | # Arrange 60 | adapter_meta = %{some: "meta"} 61 | source_uuid = "some-source" 62 | 63 | # Act 64 | result = Adapter.delete_snapshot(adapter_meta, source_uuid) 65 | 66 | # Assert 67 | assert result == {:error, :not_implemented} 68 | assert_receive {:log, :warning, "delete_snapshot/4 is not implemented for " <> _} 69 | end 70 | end 71 | 72 | describe "delete_subscription/3" do 73 | test "logs a warning and returns {:error, :not_implemented}" do 74 | # Arrange 75 | adapter_meta = %{some: "meta"} 76 | arg2 = "some-arg" 77 | subscription_name = "some-subscription" 78 | 79 | # Act 80 | result = Adapter.delete_subscription(adapter_meta, arg2, subscription_name) 81 | 82 | # Assert 83 | assert result == {:error, :not_implemented} 84 | assert_receive {:log, :warning, "delete_subscription/4 is not implemented for " <> _} 85 | end 86 | end 87 | 88 | describe "read_snapshot/2" do 89 | test "logs a warning and returns {:error, :not_implemented}" do 90 | # Arrange 91 | adapter_meta = %{some: "meta"} 92 | stream_uuid = "some-stream" 93 | 94 | # Act 95 | result = Adapter.read_snapshot(adapter_meta, stream_uuid) 96 | 97 | # Assert 98 | assert result == {:error, :not_implemented} 99 | assert_receive {:log, :warning, "read_snapshot/5 is not implemented for " <> _} 100 | end 101 | end 102 | 103 | describe "record_snapshot/2" do 104 | test "logs a warning and returns {:error, :not_implemented}" do 105 | # Arrange 106 | adapter_meta = %{some: "meta"} 107 | snapshot_data = %{some: "data"} 108 | 109 | # Act 110 | result = Adapter.record_snapshot(adapter_meta, snapshot_data) 111 | 112 | # Assert 113 | assert result == {:error, :not_implemented} 114 | assert_receive {:log, :warning, "record_snapshot/3 is not implemented for " <> _} 115 | end 116 | end 117 | 118 | describe "stream_forward/4" do 119 | test "logs a warning and returns {:error, :not_implemented}" do 120 | # Arrange 121 | adapter_meta = %{some: "meta"} 122 | stream_uuid = "some-stream" 123 | start_version = 0 124 | read_batch_size = 100 125 | 126 | # Act 127 | result = Adapter.stream_forward(adapter_meta, stream_uuid, start_version, read_batch_size) 128 | 129 | # Assert 130 | assert result == {:error, :not_implemented} 131 | assert_receive {:log, :warning, "stream_forward/5 is not implemented for " <> _} 132 | end 133 | end 134 | 135 | describe "subscribe/2" do 136 | test "logs a warning and returns {:error, :not_implemented}" do 137 | # Arrange 138 | adapter_meta = %{some: "meta"} 139 | arg2 = "some-arg" 140 | 141 | # Act 142 | result = Adapter.subscribe(adapter_meta, arg2) 143 | 144 | # Assert 145 | assert result == {:error, :not_implemented} 146 | assert_receive {:log, :warning, "subscribe/2 is not implemented for " <> _} 147 | end 148 | end 149 | 150 | describe "subscribe_to/6" do 151 | test "logs a warning and returns {:error, :not_implemented}" do 152 | # Arrange 153 | adapter_meta = %{some: "meta"} 154 | arg2 = "some-arg" 155 | subscription_name = "some-subscription" 156 | subscriber = self() 157 | start_from = :origin 158 | opts = [] 159 | 160 | # Act 161 | result = Adapter.subscribe_to(adapter_meta, arg2, subscription_name, subscriber, start_from, opts) 162 | 163 | # Assert 164 | assert result == {:error, :not_implemented} 165 | assert_receive {:log, :warning, "subscribe_to/7 is not implemented for " <> _} 166 | end 167 | end 168 | 169 | describe "unsubscribe/2" do 170 | test "logs a warning and returns {:error, :not_implemented}" do 171 | # Arrange 172 | adapter_meta = %{some: "meta"} 173 | subscription_name = "some-subscription" 174 | 175 | # Act 176 | result = Adapter.unsubscribe(adapter_meta, subscription_name) 177 | 178 | # Assert 179 | assert result == {:error, :not_implemented} 180 | assert_receive {:log, :warning, "unsubscribe/3 is not implemented for " <> _} 181 | end 182 | end 183 | 184 | # Helper function to capture log messages 185 | setup do 186 | :meck.new(Logger, [:passthrough]) 187 | :meck.expect(Logger, :warning, fn message -> 188 | send(self(), {:log, :warning, message}) 189 | :ok 190 | end) 191 | 192 | on_exit(fn -> 193 | :meck.unload(Logger) 194 | end) 195 | end 196 | end 197 | -------------------------------------------------------------------------------- /system/test/commanded/mapper_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Commanded.MapperTest do 2 | @moduledoc false 3 | 4 | use ExUnit.Case, async: true 5 | alias Commanded.EventStore.EventData 6 | alias ExESDB.Commanded.Mapper 7 | alias ExESDB.NewEvent 8 | require UUIDv7 9 | 10 | @doctest ExESDB.Commanded.Mapper 11 | 12 | describe "to_new_event/1" do 13 | 14 | @tag :mapper 15 | test "GIVEN: an event_data structure 16 | WHEN: to_new_event is called 17 | THEN: a new event is returned" do 18 | # GIVEN 19 | event_data = %EventData{ 20 | event_type: "user_registered", 21 | data: %{email: "test@example.com"}, 22 | metadata: nil, 23 | correlation_id: "corr-123", 24 | causation_id: "cause-456" 25 | } 26 | # WHEN 27 | result = Mapper.to_new_event(event_data) 28 | # THEN 29 | assert %NewEvent{} = result 30 | assert byte_size(result.event_id) == 36 31 | assert result.event_type == "user_registered" 32 | assert result.data_content_type == 1 33 | assert result.metadata_content_type == 1 34 | assert result.data == %{email: "test@example.com"} 35 | assert result.metadata.correlation_id == "corr-123" 36 | assert result.metadata.causation_id == "cause-456" 37 | end 38 | 39 | @tag :mapper 40 | test "GIVEN: an event_data structure 41 | WHEN: to_new_event is called 42 | THEN: the event_id is a UUIDv7 string" do 43 | original_uuid = UUIDv7.generate() 44 | # GIVEN 45 | event_data = %EventData{ 46 | event_type: "tested", 47 | data: "data", 48 | correlation_id: "corr-123", 49 | causation_id: "cause-456" 50 | } 51 | # WHEN 52 | result = Mapper.to_new_event(event_data) 53 | # THEN 54 | refute result.event_id == original_uuid 55 | assert result.event_type == "tested" 56 | assert result.data == "data" 57 | end 58 | 59 | @tag :mapper 60 | test "GIVEN: an event_data structure 61 | WHEN: to_new_event is called 62 | THEN: the event_type is a string" do 63 | # GIVEN 64 | event_data = %EventData{ 65 | event_type: "empty_event", 66 | data: nil, 67 | correlation_id: nil, 68 | causation_id: nil 69 | } 70 | # WHEN 71 | result = Mapper.to_new_event(event_data) 72 | # THEN 73 | assert result.event_type == "empty_event" 74 | assert result.data == nil 75 | assert result.metadata.correlation_id == nil 76 | assert result.metadata.causation_id == nil 77 | end 78 | end 79 | end 80 | -------------------------------------------------------------------------------- /system/test/ex_esdb/aggregator/finalize_map_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Aggregator.FinalizeMapTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | alias ExESDB.Aggregator 5 | 6 | describe "finalize_map/1" do 7 | @tag :ex_esdb_aggregator 8 | test "unwraps tagged values in map" do 9 | tagged_map = %{ 10 | name: "John", 11 | counter: {:sum, 15}, 12 | status: {:overwrite, "active"} 13 | } 14 | 15 | result = Aggregator.finalize_map(tagged_map) 16 | 17 | assert result == %{ 18 | name: "John", 19 | counter: 15, 20 | status: "active" 21 | } 22 | end 23 | 24 | @tag :ex_esdb_aggregator 25 | test "handles empty map" do 26 | result = Aggregator.finalize_map(%{}) 27 | assert result == %{} 28 | end 29 | 30 | @tag :ex_esdb_aggregator 31 | test "preserves untagged values" do 32 | tagged_map = %{ 33 | name: "John", 34 | age: 30, 35 | counter: {:sum, 15} 36 | } 37 | 38 | result = Aggregator.finalize_map(tagged_map) 39 | 40 | assert result == %{ 41 | name: "John", 42 | age: 30, 43 | counter: 15 44 | } 45 | end 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /system/test/ex_esdb/aggregator/foldl_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Aggregator.FoldlTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | 5 | alias ExESDB.Aggregator 6 | 7 | describe "foldl/2" do 8 | @tag :bc_aggregator 9 | test "handles empty event list" do 10 | events = [] 11 | result = Aggregator.foldl(events) 12 | assert result == %{} 13 | end 14 | 15 | test "applies a single event to an empty state" do 16 | events = [%{name: "John", age: 30}] 17 | result = Aggregator.foldl(events) 18 | assert result == %{name: "John", age: 30} 19 | end 20 | 21 | test "applies multiple events sequentially" do 22 | events = [ 23 | %{name: "John", age: 30}, 24 | %{location: "New York"} 25 | ] 26 | 27 | result = Aggregator.foldl(events) 28 | assert result == %{name: "John", age: 30, location: "New York"} 29 | end 30 | 31 | test "handles sum operations" do 32 | events = [ 33 | %{counter: {:sum, 5}}, 34 | %{counter: {:sum, 10}} 35 | ] 36 | 37 | result = Aggregator.foldl(events) 38 | assert result == %{counter: {:sum, 15}} 39 | end 40 | 41 | test "handles overwrite operations" do 42 | events = [ 43 | %{name: "John"}, 44 | %{name: {:overwrite, "Jane"}} 45 | ] 46 | 47 | result = Aggregator.foldl(events) 48 | assert result == %{name: "Jane"} 49 | end 50 | 51 | test "handles mixed operations" do 52 | events = [ 53 | %{name: "John", counter: 5}, 54 | %{counter: {:sum, 10}, status: "active"}, 55 | %{name: {:overwrite, "Jane"}} 56 | ] 57 | 58 | result = Aggregator.foldl(events) 59 | assert result == %{name: "Jane", counter: {:sum, 15}, status: "active"} 60 | end 61 | 62 | test "handles initial state" do 63 | events = [ 64 | %{counter: {:sum, 10}} 65 | ] 66 | 67 | initial_state = %{name: "John", counter: 5} 68 | result = Aggregator.foldl(events, initial_state) 69 | assert result == %{name: "John", counter: {:sum, 15}} 70 | end 71 | end 72 | 73 | describe "integration tests" do 74 | test "complete workflow with folding and finalizing" do 75 | events = [ 76 | %{name: "John", counter: 5}, 77 | %{counter: {:sum, 10}, active: true}, 78 | %{name: {:overwrite, "Jane"}, location: "New York"} 79 | ] 80 | 81 | folded_result = Aggregator.foldl(events) 82 | final_result = Aggregator.finalize_map(folded_result) 83 | 84 | assert final_result == %{ 85 | name: "Jane", 86 | counter: 15, 87 | active: true, 88 | location: "New York" 89 | } 90 | end 91 | end 92 | end 93 | -------------------------------------------------------------------------------- /system/test/ex_esdb/aggregator_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.AggregatorTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | @tag :ex_esdb_aggregator 5 | @tag :ex_esdb_docs 6 | doctest ExESDB.Aggregator 7 | 8 | alias ExESDB.Aggregator, as: Aggregator 9 | 10 | describe "integration tests" do 11 | @tag :ex_esdb_aggregator 12 | test "complete workflow with folding and finalizing" do 13 | events = [ 14 | %{name: "John", counter: 5}, 15 | %{counter: {:sum, 10}, active: true}, 16 | %{name: {:overwrite, "Jane"}, location: "New York"} 17 | ] 18 | 19 | folded_result = Aggregator.foldl(events) 20 | final_result = Aggregator.finalize_map(folded_result) 21 | 22 | assert final_result == %{ 23 | name: "Jane", 24 | counter: 15, 25 | active: true, 26 | location: "New York" 27 | } 28 | end 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /system/test/ex_esdb/cluster_test.exs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/test/ex_esdb/cluster_test.exs -------------------------------------------------------------------------------- /system/test/ex_esdb/emitter_test.exs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/test/ex_esdb/emitter_test.exs -------------------------------------------------------------------------------- /system/test/ex_esdb/event_store_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.EventStoreTest do 2 | use ExUnit.Case 3 | doctest ExESDB.EventStore 4 | 5 | alias ExESDB.EventStore 6 | alias ExESDB.Options, as: Options 7 | 8 | require Logger 9 | 10 | setup do 11 | opts = Options.app_env() 12 | 13 | start_supervised!({EventStore, opts}) 14 | 15 | on_exit(fn -> 16 | File.rm_rf!(opts[:data_dir]) 17 | end) 18 | 19 | opts 20 | end 21 | 22 | describe "GIVEN a valid set of options" do 23 | test "WHEN the EventStore is started 24 | THEN the EventStore is started and the pid is returned" do 25 | opts = Options.app_env() 26 | {:ok, res} = EventStore.start_link(opts) 27 | Logger.warning("EventStore pid: #{inspect(res, pretty: true)}") 28 | end 29 | end 30 | 31 | describe "append_to_stream/3" do 32 | test "appends events to a new stream" do 33 | opts = Options.app_env() 34 | stream_name = "test_stream" 35 | events = [%{type: "TestEvent", data: "test data"}] 36 | 37 | assert {:ok, 1} = 38 | Options.store 39 | |> EventStore.append_to_stream(stream_name, 0, events) 40 | 41 | res = Options.store 42 | |> EventStore.read_stream_forward(stream_name, 0, 1) 43 | Logger.warning("stream: #{inspect(res, pretty: true)}") 44 | end 45 | 46 | test "appends events to an existing stream" do 47 | %{store_id: store_id} = Config.fetch_env!(:node_app) 48 | stream_name = "test_stream" 49 | events1 = [%{type: "TestEvent1", data: "test data 1"}] 50 | events2 = [%{type: "TestEvent2", data: "test data 2"}] 51 | 52 | assert {:ok, 1} = EventStore.append_to_stream(store_id, stream_name, 0, events1) 53 | assert {:ok, 2} = EventStore.append_to_stream(store_id, stream_name, 1, events2) 54 | end 55 | 56 | @tag :skip 57 | test "returns error when expected version is incorrect" do 58 | stream_name = "test_stream" 59 | events = [%{type: "TestEvent", data: "test data"}] 60 | 61 | assert {:error, :wrong_expected_version} = 62 | EventStore.append_to_stream(stream_name, 1, events) 63 | end 64 | end 65 | 66 | describe "read_stream_forward/3" do 67 | @tag :skip 68 | test "reads events from a stream" do 69 | stream_name = "test_stream" 70 | 71 | events = [ 72 | %{type: "TestEvent1", data: "test data 1"}, 73 | %{type: "TestEvent2", data: "test data 2"}, 74 | %{type: "TestEvent3", data: "test data 3"} 75 | ] 76 | 77 | EventStore.append_to_stream(stream_name, 0, events) 78 | 79 | assert {:ok, read_events} = EventStore.read_stream_forward(stream_name, 1, 2) 80 | assert length(read_events) == 2 81 | assert Enum.at(read_events, 0) == Enum.at(events, 0) 82 | assert Enum.at(read_events, 1) == Enum.at(events, 1) 83 | end 84 | 85 | @tag :skip 86 | test "returns empty list when reading beyond stream end" do 87 | stream_name = "test_stream" 88 | events = [%{type: "TestEvent", data: "test data"}] 89 | 90 | EventStore.append_to_stream(stream_name, 0, events) 91 | 92 | assert {:ok, []} = EventStore.read_stream_forward(stream_name, 2, 2) 93 | end 94 | end 95 | 96 | describe "stream_version/1" do 97 | @tag :skip 98 | test "returns the current version of a stream" do 99 | stream_name = "test_stream" 100 | 101 | events = [ 102 | %{type: "TestEvent1", data: "test data 1"}, 103 | %{type: "TestEvent2", data: "test data 2"} 104 | ] 105 | 106 | EventStore.append_to_stream(stream_name, 0, events) 107 | 108 | assert {:ok, 2} = EventStore.stream_version(stream_name) 109 | end 110 | 111 | @tag :skip 112 | test "returns 0 for a non-existent stream" do 113 | stream_name = "non_existent_stream" 114 | 115 | assert {:ok, 0} = EventStore.stream_version(stream_name) 116 | end 117 | end 118 | end 119 | -------------------------------------------------------------------------------- /system/test/ex_esdb/options_test.exs: -------------------------------------------------------------------------------- 1 | 2 | defmodule ExESDB.OptionsTest do 3 | use ExUnit.Case, async: true 4 | @doctest ExESDB.Options 5 | 6 | import ExESDB.Options 7 | alias ExESDB.EnVars, as: EnVars 8 | 9 | describe "opts/1" do 10 | setup do 11 | original = Application.get_env(:ex_esdb, :khepri) 12 | Application.put_env(:ex_esdb, :khepri, %{test_key: "test_value"}) 13 | on_exit(fn -> Application.put_env(:ex_esdb, :khepri, original) end) 14 | end 15 | 16 | test "returns config value for given key" do 17 | assert app_env(:test_key) == "test_value" 18 | end 19 | end 20 | 21 | describe "data_dir/0" do 22 | test "returns env var when set" do 23 | System.put_env(EnVars.data_dir(), "/custom_data") 24 | assert data_dir() == "/custom_data" 25 | end 26 | 27 | test "returns config value when env not set" do 28 | Application.put_env(:ex_esdb, :khepri, %{data_dir: "/config_data"}) 29 | assert data_dir() == "/config_data" 30 | end 31 | 32 | test "returns default when neither set" do 33 | System.delete_env(EnVars.data_dir()) 34 | Application.put_env(:ex_esdb, :khepri, %{}) 35 | assert data_dir() == "/data" 36 | end 37 | end 38 | 39 | describe "store_id/0" do 40 | test "converts env string to atom" do 41 | System.put_env(EnVars.store_id(), "custom_store") 42 | assert store_id() == :custom_store 43 | end 44 | 45 | test "returns config atom when env not set" do 46 | Application.put_env(:ex_esdb, :khepri, %{store_id: :config_store}) 47 | assert store_id() == :config_store 48 | end 49 | 50 | test "returns default atom when neither set" do 51 | assert store_id() == :ex_store 52 | end 53 | end 54 | 55 | describe "timeout/0" do 56 | test "converts env string to integer" do 57 | System.put_env(EnVars.timeout(), "5000") 58 | assert timeout() == 5000 59 | end 60 | 61 | test "returns config value when env not set" do 62 | Application.put_env(:ex_esdb, :khepri, %{timeout: 7500}) 63 | assert timeout() == 7500 64 | end 65 | 66 | test "returns default when neither set" do 67 | assert timeout() == 10_000 68 | end 69 | end 70 | 71 | describe "seed_nodes/0" do 72 | test "parses env string into atoms" do 73 | System.put_env(EnVars.seed_nodes(), "node1, node-2, node three") 74 | assert seed_nodes() == [:node1, :node_2, :node_three] 75 | end 76 | 77 | test "returns config list when env not set" do 78 | Application.put_env(:ex_esdb, :khepri, %{seeds_nodes: [:n1, :n2]}) 79 | assert seed_nodes() == [:n1, :n2] 80 | end 81 | 82 | test "returns default list when neither set" do 83 | assert seed_nodes() == [node()] 84 | end 85 | end 86 | 87 | end 88 | -------------------------------------------------------------------------------- /system/test/ex_esdb/projections_test.exs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/test/ex_esdb/projections_test.exs -------------------------------------------------------------------------------- /system/test/ex_esdb/registry_test.exs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/test/ex_esdb/registry_test.exs -------------------------------------------------------------------------------- /system/test/ex_esdb/snapshots/read_snapshot_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.Snapshots.ReadSnapshotTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | 5 | alias ExESDB.Snapshots 6 | 7 | alias ExESDB.Schema.SnapshotRecord 8 | 9 | describe "read_snapshot/2" do 10 | @tag :ex_esdb_snapshots 11 | test "GIVEN a store with a snapshot 12 | WHEN read_snapshot is called with the source_uuid 13 | THEN it returns the snapshot" do 14 | store = ExESDB.TestSupport.Store.store() 15 | source_uuid = UUIDv7.generate() 16 | 17 | snapshot_record = %SnapshotRecord{ 18 | source_uuid: source_uuid, 19 | source_version: 1, 20 | source_type: "test", 21 | data: %{name: "John"}, 22 | metadata: %{}, 23 | created_at: DateTime.utc_now(), 24 | created_epoch: DateTime.utc_now() |> DateTime.to_unix(:millisecond) 25 | } 26 | 27 | assert Snapshots.record_snapshot(store, snapshot_record) == :ok 28 | assert Snapshots.read_snapshot(store, source_uuid) == {:ok, snapshot_record} 29 | end 30 | 31 | @tag :ex_esdb_snapshots 32 | test "GIVEN a store without a snapshot 33 | WHEN read_snapshot is called with the source_uuid 34 | THEN it returns an error" do 35 | store = ExESDB.TestSupport.Store.store() 36 | source_uuid = UUIDv7.generate() 37 | assert Snapshots.read_snapshot(store, source_uuid) == {:error, :not_found} 38 | end 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /system/test/ex_esdb/snapshots/record_snapshot_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExEsdb.Snapshots.RecordSnapshotTest do 2 | @moduledoc false 3 | use ExUnit.Case, async: true 4 | 5 | alias ExESDB.Snapshots, as: Snapshots 6 | 7 | alias ExESDB.Schema.SnapshotRecord, as: SnapshotRecord 8 | 9 | describe "record_snapshot/2" do 10 | @tag :ex_esdb_snapshots 11 | test "GIVEN a store 12 | WHEN record_snapshot is called with a snapshot record 13 | THEN it returns :ok" do 14 | store = ExESDB.TestSupport.Store.store() 15 | source_uuid = UUIDv7.generate() 16 | utc_now = DateTime.utc_now() 17 | 18 | snapshot_record = %SnapshotRecord{ 19 | source_uuid: source_uuid, 20 | source_version: 1, 21 | source_type: "test", 22 | data: %{name: "John"}, 23 | metadata: %{}, 24 | created_at: utc_now, 25 | created_epoch: utc_now |> DateTime.to_unix(:millisecond) 26 | } 27 | 28 | assert Snapshots.record_snapshot(store, snapshot_record) == :ok 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /system/test/ex_esdb/snapshots_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.SnapshotsTest do 2 | @moduledoc false 3 | use ExUnit.Case, async: true 4 | 5 | @tag :ex_esdb_docs 6 | doctest ExESDB.Snapshots 7 | end 8 | -------------------------------------------------------------------------------- /system/test/ex_esdb/store_info_test.exs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/test/ex_esdb/store_info_test.exs -------------------------------------------------------------------------------- /system/test/ex_esdb/streams_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.StreamsTest do 2 | @moduledoc false 3 | use ExUnit.Case 4 | 5 | @tag :ex_esdb_docs 6 | doctest ExESDB.EventStreamReader 7 | 8 | alias ExESDB.EventStreamReader 9 | 10 | describe "GIVEN a store with a stream" do 11 | test "WHEN get_current_version is called 12 | THEN it returns the current version" do 13 | assert EventStreamReader.get_current_version(ExESDB.TestSupport.Store.store(), :test_stream) == 14 | 0 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /system/test/ex_esdb/subscriptions_test.exs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/test/ex_esdb/subscriptions_test.exs -------------------------------------------------------------------------------- /system/test/ex_esdb/system_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.SystemTest do 2 | use ExUnit.Case 3 | doctest ExESDB.System 4 | 5 | require Logger 6 | 7 | alias ExESDB.System, as: ESDBSystem 8 | alias ExESDB.EventStore, as: EventStore 9 | alias ExESDB.Options, as: Opttions 10 | 11 | @tag :skip 12 | test "that the ExESDB System starts the EventStore" do 13 | opts = Options.app_env() 14 | res = ESDBSystem.start(opts) 15 | Logger.debug("System pid: #{inspect(res, pretty: true)}") 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /system/test/support/storage.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.TestSupport.Storage do 2 | @moduledoc false 3 | 4 | alias ExESDB.Options, as: Options 5 | 6 | require Logger 7 | 8 | def wait_for_event_store(timeout \\ 5_000) when is_integer(timeout) do 9 | task = Task.async(&esdb_check/0) 10 | 11 | case Task.yield(task, timeout) || Task.shutdown(task) do 12 | {:ok, result} -> 13 | result 14 | 15 | nil -> 16 | {:error, :timeout} 17 | end 18 | end 19 | 20 | defp esdb_check do 21 | case ExESDB.System.start(Options.app_env()) do 22 | {:ok, _} -> 23 | :timer.sleep(1_000) 24 | :ok 25 | 26 | _ -> 27 | :timer.sleep(1_000) 28 | esdb_check() 29 | end 30 | end 31 | 32 | def store do 33 | Options.store_id() 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /system/test/support/test_case.ex: -------------------------------------------------------------------------------- 1 | defmodule ExESDB.TestCase do 2 | @moduledoc false 3 | use ExUnit.CaseTemplate 4 | 5 | alias ExESDB.Options, as: Options 6 | alias ExESDB.System, as: ESDBSystem 7 | 8 | require Logger 9 | 10 | setup do 11 | {:ok, esdb_meta} = start_supervised({ESDBSystem, Options.app_env()}) 12 | Logger.debug("esdb_meta: #{inspect(esdb_meta, pretty: true)}") 13 | [esdb_meta: esdb_meta] 14 | end 15 | 16 | end 17 | -------------------------------------------------------------------------------- /system/test/test_helper.exs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/beam-campus/ex-esdb/035fd162d64dfc63dae47c8d8a9ad1b011263b5e/system/test/test_helper.exs --------------------------------------------------------------------------------