├── .gitignore ├── README.md ├── config ├── config.exs ├── dev.exs └── test.exs ├── lib ├── migrator.ex └── migrator │ └── tasks │ ├── remove_empty_streams.ex │ ├── reset_stream_sequence.ex │ ├── set_last_seen_event.ex │ ├── stream_all_events.ex │ ├── table_copier.ex │ └── write_events.ex ├── mix.exs ├── mix.lock └── test ├── aggregate_events_test.exs ├── migrate_serialization_format_test.exs ├── remove_event_test.exs ├── support ├── event_factory.ex ├── json_serializer.ex ├── reader.ex └── storage_case.ex ├── test_helper.exs └── upgrade_event_test.exs /.gitignore: -------------------------------------------------------------------------------- 1 | /_build 2 | /cover 3 | /deps 4 | /doc 5 | /.fetch 6 | erl_crash.dump 7 | *.ez 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # EventStore migrator 2 | 3 | Copy & transform migration strategy for [eventstore](https://github.com/slashdotdash/eventstore). 4 | 5 | > Copy and transformation transforms every event to a new store. In this technique the old event store stays intact, and a new store is created instead. 6 | 7 | ## Usage 8 | 9 | `EventStore.Migrator` copies an [event store](https://github.com/slashdotdash/eventstore) PostgreSQL database from a source to a target. 10 | 11 | It allows you to modify the events during the copy. You can transform, remove, aggregate, and alter the serialization format of the events. 12 | 13 | The migrator exposes a single `migrate` function. It expects an anonymous function that receives an event stream. You can use any of the functions from Elixir's [stream](https://hexdocs.pm/elixir/Stream.html) module to mutate the event stream. The modified events are appended to the target event store database. 14 | 15 | ### Remove an unwanted event 16 | 17 | ```elixir 18 | EventStore.Migrator.migrate(fn stream -> 19 | Stream.reject( 20 | stream, 21 | fn (event_data) -> event_data.event_type == "UnwantedEvent" end 22 | ) 23 | end) 24 | ``` 25 | 26 | ### Upgrade an event 27 | 28 | ```elixir 29 | defmodule OriginalEvent, do: defstruct [uuid: nil] 30 | defmodule UpgradedEvent, do: defstruct [uuid: nil, additional: nil] 31 | 32 | EventStore.Migrator.migrate(fn stream -> 33 | Stream.map( 34 | stream, 35 | fn (event) -> 36 | case event.data do 37 | %OriginalEvent{uuid: uuid} -> 38 | %EventStore.RecordedEvent{event | 39 | event_type: "UpgradedEvent", 40 | data: %UpgradedEvent{uuid: uuid, additional: "upgraded #{uuid}"}, 41 | } 42 | _ -> event 43 | end 44 | end 45 | ) 46 | end) 47 | ``` 48 | 49 | ### Aggregate multiple events into one event 50 | 51 | ```elixir 52 | defmodule SingleEvent, do: defstruct [uuid: nil, group: nil] 53 | defmodule AggregatedEvent, do: defstruct [uuids: [], group: nil] 54 | 55 | # aggregate multiple single events for the same group into one aggregated event 56 | defp aggregate([%{data: %SingleEvent{}}] = events), do: events 57 | defp aggregate([%{data: %SingleEvent{group: group}} = source | _] = events) do 58 | [ 59 | %EventStore.RecordedEvent{source | 60 | data: %AggregatedEvent{ 61 | uuids: Enum.map(events, fn event -> event.data.uuid end), 62 | group: group, 63 | }, 64 | event_type: "AggregatedEvent", 65 | }, 66 | ] 67 | end 68 | defp aggregate(events), do: events 69 | 70 | EventStore.Migrator.migrate(fn stream -> 71 | stream 72 | |> Stream.chunk_by(fn event -> {event.stream_id, event.event_type} end) 73 | |> Stream.map(fn events -> aggregate(events) end) 74 | |> Stream.flat_map(fn events -> events end) 75 | end) 76 | ``` 77 | 78 | ### Migrate serialization format 79 | 80 | Configure the source and target serializers in the environment configuration file (e.g. `config/dev.exs`). 81 | 82 | ```elixir 83 | config :eventstore, EventStore.Storage, 84 | serializer: JsonSerializer, 85 | # ... 86 | 87 | config :eventstore_migrator, EventStore.Migrator, 88 | serializer: AlternateSerializer, 89 | # ... 90 | ``` 91 | 92 | Run the migration without changing the event stream. 93 | 94 | ```elixir 95 | EventStore.Migrator.migrate(source_config, target_config, fn stream -> stream end) 96 | ``` 97 | 98 | ### Remarks 99 | 100 | Streams are composable so you can combine multiple transforms in a single migration. 101 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | import_config "#{Mix.env}.exs" 4 | -------------------------------------------------------------------------------- /config/dev.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | # Do not include metadata nor timestamps in development logs 4 | config :logger, :console, format: "[$level] $message\n" 5 | 6 | config :eventstore, EventStore.Storage, 7 | username: "postgres", 8 | password: "postgres", 9 | database: "eventstore_dev", 10 | hostname: "localhost", 11 | pool_size: 10 12 | -------------------------------------------------------------------------------- /config/test.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | # Print only warnings and errors during test 4 | config :logger, :console, level: :warn, format: "[$level] $message\n" 5 | 6 | config :ex_unit, capture_log: true 7 | 8 | config :eventstore, EventStore.Storage, 9 | serializer: EventStore.Migrator.JsonSerializer, 10 | username: "postgres", 11 | password: "postgres", 12 | database: "eventstore_test", 13 | hostname: "localhost", 14 | pool_size: 1 15 | 16 | config :eventstore_migrator, EventStore.Migrator, 17 | serializer: EventStore.Migrator.JsonSerializer, 18 | username: "postgres", 19 | password: "postgres", 20 | database: "eventstore_migrator_test", 21 | hostname: "localhost", 22 | pool_size: 1 23 | -------------------------------------------------------------------------------- /lib/migrator.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator do 2 | @moduledoc """ 3 | Migrate an event store using a copy & transform strategy 4 | """ 5 | 6 | alias EventStore.Migrator.Tasks.{ 7 | RemoveEmptyStreams, 8 | ResetTableSequence, 9 | SetLastSeenEvent, 10 | StreamAllEvents, 11 | TableCopier, 12 | WriteEvents, 13 | } 14 | 15 | defmodule Config do 16 | defstruct [config: nil, conn: nil, serializer: nil] 17 | end 18 | 19 | alias EventStore.Migrator.Config 20 | 21 | @doc """ 22 | Migrate the event store using the given migrator function 23 | """ 24 | def migrate(migrator) when is_function(migrator) do 25 | source_config = Application.get_env(:eventstore, EventStore.Storage) 26 | target_config = Application.get_env(:eventstore_migrator, EventStore.Migrator) 27 | 28 | migrate(source_config, target_config, migrator) 29 | end 30 | 31 | @doc """ 32 | Migrate the event store using the given migrator function and source and target event store configurations 33 | """ 34 | def migrate(source, target, migrator) when is_function(migrator) do 35 | source_config = parse_config(source) 36 | target_config = parse_config(target) 37 | 38 | do_migrate(source_config, target_config, migrator) 39 | end 40 | 41 | defp do_migrate(%Config{} = source, %Config{} = target, migrator) when is_function(migrator) do 42 | EventStore.Storage.Initializer.run!(target.conn) 43 | 44 | copy_table(source, target, "snapshots") 45 | copy_table(source, target, "streams") 46 | copy_table(source, target, "subscriptions") 47 | 48 | migrate_events(migrator, target) 49 | 50 | RemoveEmptyStreams.execute(target.conn) 51 | SetLastSeenEvent.execute(target.conn) 52 | ResetTableSequence.execute(target.conn, "streams", "stream_id") 53 | ResetTableSequence.execute(target.conn, "subscriptions", "subscription_id") 54 | end 55 | 56 | defp parse_config(config) do 57 | {:ok, conn} = Postgrex.start_link(config) 58 | 59 | %Config{ 60 | config: config, 61 | conn: conn, 62 | serializer: config[:serializer] 63 | } 64 | end 65 | 66 | defp copy_table(%Config{conn: source}, %Config{conn: target}, table) do 67 | TableCopier.execute(source, target, table) 68 | end 69 | 70 | # migrate events between source and target using a stream passed to a migrator function that can modify any events 71 | defp migrate_events(migrator, %Config{} = target) do 72 | StreamAllEvents.execute() 73 | |> migrator.() 74 | |> WriteEvents.execute(target) 75 | |> Stream.run 76 | end 77 | end 78 | -------------------------------------------------------------------------------- /lib/migrator/tasks/remove_empty_streams.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.Tasks.RemoveEmptyStreams do 2 | def execute(conn) do 3 | Postgrex.query!(conn, remove_empty_streams(), []) 4 | end 5 | 6 | defp remove_empty_streams do 7 | """ 8 | DELETE FROM streams s 9 | WHERE NOT EXISTS 10 | (SELECT stream_id FROM events e 11 | WHERE e.stream_id = s.stream_id 12 | LIMIT 1); 13 | """ 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /lib/migrator/tasks/reset_stream_sequence.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.Tasks.ResetTableSequence do 2 | def execute(conn, table, primary_key) do 3 | Postgrex.query!(conn, reset_stream_sequence(table, primary_key), []) 4 | end 5 | 6 | defp reset_stream_sequence(table, primary_key) do 7 | """ 8 | SELECT setval('#{table}_#{primary_key}_seq', COALESCE((SELECT MAX(#{primary_key}) + 1 FROM #{table}), 1), false); 9 | """ 10 | end 11 | end 12 | -------------------------------------------------------------------------------- /lib/migrator/tasks/set_last_seen_event.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.Tasks.SetLastSeenEvent do 2 | def execute(conn) do 3 | Postgrex.query!(conn, set_all_stream_subscription_last_seen_event_id(), []) 4 | Postgrex.query!(conn, set_single_stream_subscription_last_seen_event_id(), []) 5 | Postgrex.query!(conn, set_snapshots_last_seen_event_id(), []) 6 | end 7 | 8 | defp set_all_stream_subscription_last_seen_event_id do 9 | """ 10 | UPDATE subscriptions s 11 | SET last_seen_event_id = 12 | (SELECT COALESCE(MAX(e.event_id), 0) 13 | FROM events e) 14 | WHERE s.stream_uuid = '$all'; 15 | """ 16 | end 17 | 18 | defp set_single_stream_subscription_last_seen_event_id do 19 | """ 20 | UPDATE subscriptions 21 | SET last_seen_stream_version = 22 | (SELECT COALESCE(MAX(events.event_id), 0) 23 | FROM events 24 | WHERE events.stream_id = 25 | (SELECT streams.stream_id 26 | FROM streams 27 | WHERE streams.stream_uuid = subscriptions.stream_uuid)) 28 | WHERE subscriptions.stream_uuid <> '$all'; 29 | """ 30 | end 31 | 32 | defp set_snapshots_last_seen_event_id do 33 | """ 34 | UPDATE snapshots s 35 | SET source_version = 36 | (SELECT COALESCE(MAX(e.event_id), 0) 37 | FROM events e); 38 | """ 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/migrator/tasks/stream_all_events.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.Tasks.StreamAllEvents do 2 | def execute() do 3 | EventStore.stream_all_forward() 4 | end 5 | end 6 | -------------------------------------------------------------------------------- /lib/migrator/tasks/table_copier.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.Tasks.TableCopier do 2 | def execute(source, target, table) do 3 | Postgrex.transaction(source, fn source_conn -> 4 | Postgrex.transaction(target, fn target_conn -> 5 | copy(source_conn, target_conn, table) 6 | end) 7 | end) 8 | end 9 | 10 | defp copy(source_conn, target_conn, table) do 11 | query = Postgrex.prepare!(source_conn, "", "COPY #{table} TO STDOUT") 12 | source_stream = Postgrex.stream(source_conn, query, []) 13 | result_to_iodata = fn %Postgrex.Result{rows: rows} -> rows end 14 | 15 | target_stream = Postgrex.stream(target_conn, "COPY #{table} FROM STDIN", []) 16 | 17 | Enum.into(source_stream, target_stream, result_to_iodata) 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /lib/migrator/tasks/write_events.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.Tasks.WriteEvents do 2 | defmodule State do 3 | defstruct [ 4 | conn: nil, 5 | serializer: nil, 6 | next_event_id: 1, 7 | stream_versions: %{}, 8 | ] 9 | end 10 | 11 | alias EventStore.Migrator.Tasks.WriteEvents.State 12 | 13 | def execute(events, %{conn: conn, serializer: serializer} = config, write_batch_size \\ 1_000) do 14 | events 15 | |> Stream.transform(%State{conn: conn, serializer: serializer}, &map_to_recorded_event/2) 16 | |> Stream.chunk(write_batch_size, write_batch_size, []) 17 | |> Stream.each(&append_event_batch(&1, config)) 18 | end 19 | 20 | defp map_to_recorded_event(event, %State{next_event_id: next_event_id, serializer: serializer, stream_versions: stream_versions} = state) do 21 | stream_version = Map.get(stream_versions, event.stream_id, 0) + 1 22 | 23 | recorded_event = %EventStore.RecordedEvent{ 24 | event_id: next_event_id, 25 | stream_id: event.stream_id, 26 | stream_version: stream_version, 27 | correlation_id: event.correlation_id, 28 | event_type: event.event_type, 29 | data: serializer.serialize(event.data), 30 | metadata: serializer.serialize(event.metadata), 31 | created_at: event.created_at, 32 | } 33 | 34 | state = %State{state | 35 | next_event_id: next_event_id + 1, 36 | stream_versions: Map.put(stream_versions, event.stream_id, stream_version) 37 | } 38 | 39 | {[recorded_event], state} 40 | end 41 | 42 | defp append_event_batch(events, %{conn: conn}) do 43 | expected_count = length(events) 44 | 45 | {:ok, ^expected_count} = execute_using_multirow_value_insert(conn, events) 46 | end 47 | 48 | defp execute_using_multirow_value_insert(conn, events) do 49 | statement = build_insert_statement(events) 50 | parameters = build_insert_parameters(events) 51 | 52 | conn 53 | |> Postgrex.query(statement, parameters) 54 | |> handle_response() 55 | end 56 | 57 | defp build_insert_statement(events) do 58 | EventStore.Sql.Statements.create_events(length(events)) 59 | end 60 | 61 | defp build_insert_parameters(events) do 62 | events 63 | |> Enum.flat_map(fn event -> 64 | [ 65 | event.event_id, 66 | event.stream_id, 67 | event.stream_version, 68 | event.correlation_id, 69 | event.event_type, 70 | event.data, 71 | event.metadata, 72 | event.created_at, 73 | ] 74 | end) 75 | end 76 | 77 | defp handle_response({:ok, %Postgrex.Result{num_rows: rows}}), do: {:ok, rows} 78 | defp handle_response({:error, reason}), do: {:error, reason} 79 | end 80 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.Mixfile do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :eventstore_migrator, 7 | version: "0.1.1", 8 | elixir: "~> 1.4", 9 | elixirc_paths: elixirc_paths(Mix.env), 10 | description: description(), 11 | package: package(), 12 | build_embedded: Mix.env == :prod, 13 | start_permanent: Mix.env == :prod, 14 | deps: deps(), 15 | ] 16 | end 17 | 18 | def application do 19 | [ 20 | extra_applications: [:logger] 21 | ] 22 | end 23 | 24 | defp elixirc_paths(:test), do: ["lib", "test/support"] 25 | defp elixirc_paths(_), do: ["lib"] 26 | 27 | defp deps do 28 | [ 29 | {:eventstore, "~> 0.8"}, 30 | {:mix_test_watch, "~> 0.2", only: :dev}, 31 | {:poison, "~> 3.0", only: [:test]}, 32 | {:postgrex, "~> 0.13"}, 33 | {:uuid, "~> 1.1", only: :test} 34 | ] 35 | end 36 | 37 | defp description do 38 | """ 39 | Copy & transformation migration strategy for EventStore. 40 | """ 41 | end 42 | 43 | defp package do 44 | [ 45 | files: ["lib", "mix.exs", "README*", "LICENSE*"], 46 | maintainers: ["Ben Smith"], 47 | licenses: ["MIT"], 48 | links: %{"GitHub" => "https://github.com/slashdotdash/eventstore-migrator", 49 | "Docs" => "https://hexdocs.pm/eventstore-migrator/"} 50 | ] 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{"connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], []}, 2 | "db_connection": {:hex, :db_connection, "1.1.0", "b2b88db6d7d12f99997b584d09fad98e560b817a20dab6a526830e339f54cdb3", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, optional: true]}]}, 3 | "decimal": {:hex, :decimal, "1.3.1", "157b3cedb2bfcb5359372a7766dd7a41091ad34578296e951f58a946fcab49c6", [:mix], []}, 4 | "eventstore": {:hex, :eventstore, "0.8.0", "677c8897d97b263b4358370bea3be4220df814f483fcd281a45e4db19fb3dee0", [:mix], [{:fsm, "~> 0.3", [hex: :fsm, optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: false]}, {:postgrex, "~> 0.13", [hex: :postgrex, optional: false]}]}, 5 | "fs": {:hex, :fs, "0.9.2", "ed17036c26c3f70ac49781ed9220a50c36775c6ca2cf8182d123b6566e49ec59", [:rebar], []}, 6 | "fsm": {:hex, :fsm, "0.3.0", "d00e0a3c68f8cf8feb24ce3a732164638ec652c48ce416b66d4e375b6ee415eb", [:mix], []}, 7 | "mix_test_watch": {:hex, :mix_test_watch, "0.2.6", "9fcc2b1b89d1594c4a8300959c19d50da2f0ff13642c8f681692a6e507f92cab", [:mix], [{:fs, "~> 0.9.1", [hex: :fs, optional: false]}]}, 8 | "poison": {:hex, :poison, "3.0.0", "625ebd64d33ae2e65201c2c14d6c85c27cc8b68f2d0dd37828fde9c6920dd131", [:mix], []}, 9 | "poolboy": {:hex, :poolboy, "1.5.1", "6b46163901cfd0a1b43d692657ed9d7e599853b3b21b95ae5ae0a777cf9b6ca8", [:rebar], []}, 10 | "postgrex": {:hex, :postgrex, "0.13.0", "e101ab47d0725955c5c8830ae8812412992e02e4bd9db09e17abb0a5d82d09c7", [:mix], [{:connection, "~> 1.0", [hex: :connection, optional: false]}, {:db_connection, "~> 1.1", [hex: :db_connection, optional: false]}, {:decimal, "~> 1.0", [hex: :decimal, optional: false]}]}, 11 | "uuid": {:hex, :uuid, "1.1.6", "4927232f244e69c6e255643014c2d639dad5b8313dc2a6976ee1c3724e6ca60d", [:mix], []}} 12 | -------------------------------------------------------------------------------- /test/aggregate_events_test.exs: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.AggregateEventsTest do 2 | use EventStore.Migrator.StorageCase 3 | 4 | alias EventStore.Migrator.EventFactory 5 | 6 | defmodule SingleEvent, do: defstruct [uuid: nil, group: nil] 7 | defmodule AggregatedEvent, do: defstruct [uuids: [], group: nil] 8 | 9 | describe "combine events" do 10 | setup [:append_events, :migrate] 11 | 12 | test "should remove individual events and replace with aggregated event" do 13 | {:ok, events} = EventStore.Migrator.Reader.read_migrated_events() 14 | 15 | assert length(events) == 4 16 | assert pluck(events, :event_id) == [1, 2, 3, 4] 17 | assert pluck(events, :stream_version) == [1, 1, 1, 1] 18 | assert pluck(events, :event_type) == [ 19 | "#{__MODULE__}.AggregatedEvent", 20 | "#{__MODULE__}.SingleEvent", 21 | "#{__MODULE__}.SingleEvent", 22 | "#{__MODULE__}.AggregatedEvent", 23 | ] 24 | assert Enum.at(events, 0).data == "{\"uuids\":[1,2,3],\"group\":\"A\"}" 25 | end 26 | 27 | test "should copy stream", context do 28 | {:ok, stream_id, stream_version} = EventStore.Migrator.Reader.stream_info(context[:stream1_uuid]) 29 | 30 | assert stream_id == 1 31 | assert stream_version == 1 32 | end 33 | end 34 | 35 | defp migrate(context) do 36 | EventStore.Migrator.migrate(fn stream -> 37 | stream 38 | |> Stream.chunk_by(fn event -> {event.stream_id, event.event_type} end) 39 | |> Stream.map(fn events -> aggregate(events) end) 40 | |> Stream.flat_map(fn events -> events end) 41 | end) 42 | 43 | context 44 | end 45 | 46 | # aggregate multiple single events for the same group into one aggregated event 47 | defp aggregate([%{data: %SingleEvent{}}] = events), do: events 48 | defp aggregate([%{data: %SingleEvent{group: group}} = source | _] = events) do 49 | [ 50 | %EventStore.RecordedEvent{source | 51 | data: %AggregatedEvent{ 52 | uuids: Enum.map(events, fn event -> event.data.uuid end), 53 | group: group, 54 | }, 55 | event_type: "#{__MODULE__}.AggregatedEvent", 56 | }, 57 | ] 58 | end 59 | defp aggregate(events), do: events 60 | 61 | defp append_events(_context) do 62 | stream1_uuid = UUID.uuid4() 63 | stream2_uuid = UUID.uuid4() 64 | stream3_uuid = UUID.uuid4() 65 | stream4_uuid = UUID.uuid4() 66 | 67 | EventStore.append_to_stream(stream1_uuid, 0, EventFactory.to_event_data([ 68 | %SingleEvent{uuid: 1, group: "A"}, 69 | %SingleEvent{uuid: 2, group: "A"}, 70 | %SingleEvent{uuid: 3, group: "A"} 71 | ])) 72 | 73 | EventStore.append_to_stream(stream2_uuid, 0, EventFactory.to_event_data([ 74 | %SingleEvent{uuid: 4, group: "B"}, 75 | ])) 76 | 77 | EventStore.append_to_stream(stream3_uuid, 0, EventFactory.to_event_data([ 78 | %SingleEvent{uuid: 5, group: "C"}, 79 | ])) 80 | 81 | EventStore.append_to_stream(stream4_uuid, 0, EventFactory.to_event_data([ 82 | %SingleEvent{uuid: 6, group: "D"}, 83 | %SingleEvent{uuid: 7, group: "D"}, 84 | ])) 85 | 86 | [ 87 | stream1_uuid: stream1_uuid, 88 | stream2_uuid: stream2_uuid, 89 | stream3_uuid: stream3_uuid, 90 | stream4_uuid: stream4_uuid, 91 | ] 92 | end 93 | 94 | def pluck(enumerable, field) do 95 | Enum.map(enumerable, &Map.get(&1, field)) 96 | end 97 | end 98 | -------------------------------------------------------------------------------- /test/migrate_serialization_format_test.exs: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.MigrateSerializationFormatTest do 2 | use EventStore.Migrator.StorageCase 3 | 4 | alias EventStore.Migrator.EventFactory 5 | 6 | defmodule AnEvent, do: defstruct [uuid: nil] 7 | defmodule AnotherEvent, do: defstruct [uuid: nil] 8 | 9 | describe "upgrade an event" do 10 | setup [:append_events, :migrate] 11 | 12 | test "should migrate all events using new serialization format" do 13 | {:ok, events} = EventStore.Migrator.Reader.read_migrated_events() 14 | 15 | assert length(events) == 3 16 | assert pluck(events, :event_id) == [1, 2, 3] 17 | assert pluck(events, :stream_version) == [1, 2, 3] 18 | assert pluck(events, :event_type) == [ 19 | "#{__MODULE__}.AnEvent", 20 | "#{__MODULE__}.AnotherEvent", 21 | "#{__MODULE__}.AnotherEvent" 22 | ] 23 | assert Enum.at(events, 0).data == :erlang.term_to_binary(%AnEvent{uuid: 1}) 24 | end 25 | 26 | test "should copy stream", context do 27 | {:ok, stream_id, stream_version} = EventStore.Migrator.Reader.stream_info(context[:stream_uuid]) 28 | 29 | assert stream_id == 1 30 | assert stream_version == 3 31 | end 32 | end 33 | 34 | defp migrate(context) do 35 | source_config = Application.get_env(:eventstore, EventStore.Storage) 36 | target_config = Application.get_env(:eventstore_migrator, EventStore.Migrator) 37 | 38 | # switch serializer from JSON to Erlang term format 39 | target_config = Keyword.put(target_config, :serializer, EventStore.TermSerializer) 40 | 41 | EventStore.Migrator.migrate(source_config, target_config, fn stream -> stream end) 42 | 43 | context 44 | end 45 | 46 | defp append_events(_context) do 47 | stream_uuid = UUID.uuid4() 48 | 49 | events = EventFactory.to_event_data([ 50 | %AnEvent{uuid: 1}, 51 | %AnotherEvent{uuid: 2}, 52 | %AnotherEvent{uuid: 3} 53 | ]) 54 | 55 | EventStore.append_to_stream(stream_uuid, 0, events) 56 | 57 | [stream_uuid: stream_uuid] 58 | end 59 | 60 | def pluck(enumerable, field) do 61 | Enum.map(enumerable, &Map.get(&1, field)) 62 | end 63 | end 64 | -------------------------------------------------------------------------------- /test/remove_event_test.exs: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.RemoveEventTest do 2 | use EventStore.Migrator.StorageCase 3 | 4 | alias EventStore.Migrator.EventFactory 5 | 6 | defmodule WantedEvent, do: defstruct [uuid: nil] 7 | defmodule UnwantedEvent, do: defstruct [uuid: nil] 8 | 9 | defmodule ASnapshot, do: defstruct [uuid: nil] 10 | 11 | describe "remove an event" do 12 | setup [:append_events, :migrate] 13 | 14 | test "should remove only unwanted events" do 15 | {:ok, events} = EventStore.Migrator.Reader.read_migrated_events() 16 | 17 | assert length(events) == 2 18 | assert pluck(events, :event_id) == [1, 2] 19 | assert pluck(events, :stream_version) == [1, 2] 20 | end 21 | 22 | test "should copy stream", context do 23 | {:ok, stream_id, stream_version} = EventStore.Migrator.Reader.stream_info(context[:stream_uuid]) 24 | 25 | assert stream_id == 1 26 | assert stream_version == 2 27 | end 28 | 29 | test "should reset stream id sequence" do 30 | %{rows: [[next_value]]} = Postgrex.query!(conn(), "SELECT nextval('streams_stream_id_seq');", []) 31 | assert next_value == 2 32 | end 33 | end 34 | 35 | describe "remove all events from a stream" do 36 | setup [:append_unwanted_events_to_single_stream, :migrate] 37 | 38 | test "should remove all unwanted events" do 39 | {:ok, events} = EventStore.Migrator.Reader.read_migrated_events() 40 | 41 | assert length(events) == 0 42 | end 43 | 44 | test "should remove stream", context do 45 | {:ok, stream_id, stream_version} = EventStore.Migrator.Reader.stream_info(context[:stream_uuid]) 46 | 47 | assert stream_id == nil 48 | assert stream_version == 0 49 | end 50 | end 51 | 52 | describe "remove an event with all stream subscription" do 53 | setup [:append_events, :create_all_stream_subscription, :migrate] 54 | 55 | test "should copy subscription", context do 56 | {:ok, [subscription]} = EventStore.Migrator.Reader.subscriptions() 57 | 58 | assert subscription.stream_uuid == "$all" 59 | assert subscription.subscription_name == context[:subscription_name] 60 | assert subscription.last_seen_event_id == 2 61 | assert subscription.last_seen_stream_version == nil 62 | end 63 | 64 | test "should reset subscription id sequence" do 65 | %{rows: [[next_value]]} = Postgrex.query!(conn(), "SELECT nextval('subscriptions_subscription_id_seq');", []) 66 | assert next_value == 2 67 | end 68 | end 69 | 70 | describe "remove an event with single stream subscription" do 71 | setup [:append_events, :create_single_stream_subscription, :migrate] 72 | 73 | test "should copy subscription", context do 74 | {:ok, [subscription]} = EventStore.Migrator.Reader.subscriptions() 75 | 76 | assert subscription.stream_uuid == context[:stream_uuid] 77 | assert subscription.subscription_name == context[:subscription_name] 78 | assert subscription.last_seen_event_id == nil 79 | assert subscription.last_seen_stream_version == 2 80 | end 81 | end 82 | 83 | describe "remove an event with a snapshot" do 84 | setup [:append_events, :create_snapshot, :migrate] 85 | 86 | test "should copy snapsot", context do 87 | {:ok, snapshot} = EventStore.Migrator.Reader.read_snapshot(context[:stream_uuid]) 88 | 89 | assert snapshot.source_uuid == context[:stream_uuid] 90 | assert snapshot.source_version == 2 91 | end 92 | end 93 | 94 | defp migrate(context) do 95 | EventStore.Migrator.migrate(fn stream -> 96 | Stream.reject( 97 | stream, 98 | fn (event_data) -> event_data.event_type == "#{__MODULE__}.UnwantedEvent" end 99 | ) 100 | end) 101 | 102 | context 103 | end 104 | 105 | defp append_events(_context) do 106 | stream_uuid = UUID.uuid4() 107 | 108 | events = EventFactory.to_event_data([ 109 | %WantedEvent{uuid: 1}, 110 | %UnwantedEvent{uuid: 2}, 111 | %WantedEvent{uuid: 3} 112 | ]) 113 | 114 | EventStore.append_to_stream(stream_uuid, 0, events) 115 | 116 | [stream_uuid: stream_uuid] 117 | end 118 | 119 | defp append_unwanted_events_to_single_stream(_context) do 120 | stream_uuid = UUID.uuid4() 121 | 122 | events = EventFactory.to_event_data([ 123 | %UnwantedEvent{uuid: 1}, 124 | %UnwantedEvent{uuid: 2}, 125 | %UnwantedEvent{uuid: 3} 126 | ]) 127 | 128 | EventStore.append_to_stream(stream_uuid, 0, events) 129 | 130 | [stream_uuid: stream_uuid] 131 | end 132 | 133 | defp create_all_stream_subscription(_context) do 134 | subscription_name = "test-all-subscription" 135 | 136 | EventStore.subscribe_to_all_streams(subscription_name, self(), 0) 137 | 138 | [subscription_name: subscription_name] 139 | end 140 | 141 | defp create_single_stream_subscription(context) do 142 | subscription_name = "test-single-subscription" 143 | 144 | EventStore.subscribe_to_stream(context[:stream_uuid], subscription_name, self(), 0) 145 | 146 | [subscription_name: subscription_name] 147 | end 148 | 149 | defp create_snapshot(context) do 150 | EventStore.record_snapshot(%EventStore.Snapshots.SnapshotData{ 151 | source_uuid: context[:stream_uuid], 152 | source_version: 0, 153 | source_type: "Elixir.EventStore.RemoveEventTest.ASnapshot", 154 | data: "{\"uuid\":1}", 155 | metadata: "{}", 156 | created_at: DateTime.utc_now |> DateTime.to_naive, 157 | }) 158 | 159 | [] 160 | end 161 | 162 | defp conn do 163 | storage_config = Application.get_env(:eventstore_migrator, EventStore.Migrator) 164 | 165 | {:ok, conn} = Postgrex.start_link(storage_config) 166 | 167 | conn 168 | end 169 | 170 | def pluck(enumerable, field) do 171 | Enum.map(enumerable, &Map.get(&1, field)) 172 | end 173 | end 174 | -------------------------------------------------------------------------------- /test/support/event_factory.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.EventFactory do 2 | def to_event_data(events) do 3 | correlation_id = UUID.uuid4() 4 | 5 | Enum.map(events, fn event -> to_event_data(event, correlation_id) end) 6 | end 7 | 8 | def to_event_data(event, correlation_id) do 9 | %EventStore.EventData{ 10 | correlation_id: correlation_id, 11 | event_type: Atom.to_string(event.__struct__), 12 | data: event, 13 | metadata: %{}, 14 | } 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /test/support/json_serializer.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.JsonSerializer do 2 | @moduledoc """ 3 | A serializer that uses the JSON format. 4 | """ 5 | 6 | @behaviour EventStore.Serializer 7 | 8 | @doc """ 9 | Serialize given term to JSON binary data. 10 | """ 11 | def serialize(term) do 12 | Poison.encode!(term) 13 | end 14 | 15 | @doc """ 16 | Deserialize given JSON binary data to the expected type. 17 | """ 18 | def deserialize(binary, config) do 19 | type = case Keyword.get(config, :type, nil) do 20 | nil -> [] 21 | type -> type |> String.to_existing_atom |> struct 22 | end 23 | Poison.decode!(binary, as: type) 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /test/support/reader.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.Reader do 2 | def read_migrated_events do 3 | EventStore.Storage.Reader.read_all_forward(conn(), 0, 1_000) 4 | end 5 | 6 | def stream_info(stream_uuid) do 7 | EventStore.Storage.Stream.stream_info(conn(), stream_uuid) 8 | end 9 | 10 | def subscriptions do 11 | EventStore.Storage.Subscription.subscriptions(conn()) 12 | end 13 | 14 | def read_snapshot(source_uuid) do 15 | EventStore.Storage.Snapshot.read_snapshot(conn(), source_uuid) 16 | end 17 | 18 | defp conn do 19 | storage_config = Application.get_env(:eventstore_migrator, EventStore.Migrator) 20 | 21 | {:ok, conn} = Postgrex.start_link(storage_config) 22 | 23 | conn 24 | end 25 | end 26 | -------------------------------------------------------------------------------- /test/support/storage_case.ex: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.StorageCase do 2 | use ExUnit.CaseTemplate 3 | 4 | setup do 5 | source_config = Application.get_env(:eventstore, EventStore.Storage) 6 | target_config = Application.get_env(:eventstore_migrator, EventStore.Migrator) 7 | 8 | Application.stop(:eventstore) 9 | 10 | reset_storage(source_config) 11 | create_target_storage(target_config) 12 | drop_tables(target_config) 13 | 14 | Application.ensure_all_started(:eventstore) 15 | 16 | :ok 17 | end 18 | 19 | defp reset_storage(storage_config) do 20 | {:ok, conn} = Postgrex.start_link(storage_config) 21 | 22 | EventStore.Storage.Initializer.reset!(conn) 23 | end 24 | 25 | defp create_target_storage(storage_config) do 26 | System.cmd("createdb", [storage_config[:database]], stderr_to_stdout: true) 27 | end 28 | 29 | defp drop_tables(storage_config) do 30 | {:ok, conn} = Postgrex.start_link(storage_config) 31 | 32 | EventStore.Storage.Initializer.reset!(conn) 33 | 34 | drop_table(conn, "snapshots") 35 | drop_table(conn, "subscriptions") 36 | drop_table(conn, "streams, events") 37 | drop_table(conn, "events") 38 | end 39 | 40 | defp drop_table(conn, table) do 41 | Postgrex.query!(conn, "DROP TABLE IF EXISTS #{table}", []) 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | -------------------------------------------------------------------------------- /test/upgrade_event_test.exs: -------------------------------------------------------------------------------- 1 | defmodule EventStore.Migrator.UpgradeEventTest do 2 | use EventStore.Migrator.StorageCase 3 | 4 | alias EventStore.Migrator.EventFactory 5 | 6 | defmodule OriginalEvent, do: defstruct [uuid: nil] 7 | defmodule UpgradedEvent, do: defstruct [uuid: nil, additional: nil] 8 | defmodule AnotherEvent, do: defstruct [uuid: nil] 9 | 10 | describe "upgrade an event" do 11 | setup [:append_events, :migrate] 12 | 13 | test "should upgrade only matching events" do 14 | {:ok, events} = EventStore.Migrator.Reader.read_migrated_events() 15 | 16 | assert length(events) == 3 17 | assert pluck(events, :event_id) == [1, 2, 3] 18 | assert pluck(events, :stream_version) == [1, 2, 3] 19 | assert pluck(events, :event_type) == [ 20 | "#{__MODULE__}.AnotherEvent", 21 | "#{__MODULE__}.UpgradedEvent", 22 | "#{__MODULE__}.AnotherEvent" 23 | ] 24 | assert Enum.at(events, 1).data == String.trim(""" 25 | {\"uuid\":2,\"additional\":\"upgraded\"} 26 | """) 27 | end 28 | 29 | test "should copy stream", context do 30 | {:ok, stream_id, stream_version} = EventStore.Migrator.Reader.stream_info(context[:stream_uuid]) 31 | 32 | assert stream_id == 1 33 | assert stream_version == 3 34 | end 35 | end 36 | 37 | defp migrate(context) do 38 | EventStore.Migrator.migrate(fn stream -> 39 | Stream.map( 40 | stream, 41 | fn (event) -> 42 | case event.data do 43 | %OriginalEvent{uuid: uuid} -> 44 | %EventStore.RecordedEvent{event | 45 | event_type: "#{__MODULE__}.UpgradedEvent", 46 | data: %UpgradedEvent{uuid: uuid, additional: "upgraded"}, 47 | } 48 | _ -> event 49 | end 50 | end 51 | ) 52 | end) 53 | 54 | context 55 | end 56 | 57 | defp append_events(_context) do 58 | stream_uuid = UUID.uuid4() 59 | 60 | events = EventFactory.to_event_data([ 61 | %AnotherEvent{uuid: 1}, 62 | %OriginalEvent{uuid: 2}, 63 | %AnotherEvent{uuid: 3} 64 | ]) 65 | 66 | EventStore.append_to_stream(stream_uuid, 0, events) 67 | 68 | [stream_uuid: stream_uuid] 69 | end 70 | 71 | def pluck(enumerable, field) do 72 | Enum.map(enumerable, &Map.get(&1, field)) 73 | end 74 | end 75 | --------------------------------------------------------------------------------