├── .formatter.exs ├── .github └── workflows │ └── test-elixir.yaml ├── .gitignore ├── README.md ├── config ├── config.exs ├── dev.exs ├── prod.exs ├── prod.secret.exs └── test.exs ├── lib ├── crush.ex ├── crush │ ├── application.ex │ ├── cluster.ex │ ├── differ.ex │ ├── persister.ex │ ├── store.ex │ └── utils.ex ├── crush_web.ex └── crush_web │ ├── caching_body_reader.ex │ ├── channels │ └── user_socket.ex │ ├── controllers │ └── api_controller.ex │ ├── endpoint.ex │ ├── router.ex │ ├── telemetry.ex │ └── views │ ├── error_helpers.ex │ └── error_view.ex ├── mix.exs ├── mix.lock └── test ├── crush └── store_test.exs ├── crush_web ├── controllers │ └── api_controller_test.exs └── views │ └── error_view_test.exs ├── support ├── channel_case.ex └── conn_case.ex └── test_helper.exs /.formatter.exs: -------------------------------------------------------------------------------- 1 | [ 2 | import_deps: [:phoenix], 3 | inputs: ["*.{ex,exs}", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /.github/workflows/test-elixir.yaml: -------------------------------------------------------------------------------- 1 | name: Elixir tests 2 | on: push 3 | 4 | jobs: 5 | test: 6 | runs-on: ubuntu-latest 7 | steps: 8 | - uses: actions/checkout@v2 9 | - uses: erlef/setup-beam@v1 10 | with: 11 | otp-version: 23.1.2 12 | elixir-version: 1.11.3 13 | - run: env MIX_ENV=test mix deps.get 14 | - run: env MIX_ENV=test mix compile --warnings-as-errors 15 | - run: | 16 | epmd -daemon 17 | env MIX_ENV=test mix test -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /_build/ 2 | /cover/ 3 | /deps/ 4 | /doc/ 5 | /.fetch 6 | erl_crash.dump 7 | *.ez 8 | crush-*.tar 9 | /priv/static/ 10 | /assets/node_modules 11 | /*.sh 12 | /*@* -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # crush 2 | 3 | A time-traveling, replicated, eventually-consistent key-value store. 4 | 5 | ## Warning 6 | 7 | **Operations are currently NOT atomic!** Writing to a key concurrently can and 8 | likely **will** cause issues. You have been warned! 9 | 10 | ## API 11 | 12 | ### `GET /:key` 13 | 14 | Returns the key and its revisions. The returned value looks like: 15 | 16 | ```Javascript 17 | [current_value, revisions] 18 | ``` 19 | where `revisions` is a possibly-empty list. 20 | 21 | If the key does not exist, an empty list is returned: 22 | 23 | ```Javascript 24 | [] 25 | ``` 26 | 27 | #### Query parameters 28 | 29 | - `revisions`: The number of revisions to return. Set to `all` for all 30 | revisions. If this value is not specified, or is not a number or the literal 31 | string `"all"`, an empty list of revisions will be returned. 32 | - `patch`: Whether or not to apply patches. By default, the returned revisions 33 | are a set of patches that can be used to revert to each previous state. If 34 | `patch` is set to `true`, the patches will be applied, and the computed 35 | values will be returned as the list of revisions. Defaults to `false`. 36 | 37 | ### `GET /:key/info` 38 | 39 | Returns info about the given key. 40 | 41 | ```js 42 | // ▶ curl localhost:7654/test/info 43 | {"key":"test","revision_count":11} 44 | ``` 45 | 46 | ### `PUT /:key` 47 | 48 | Sets the value at the given key. The request body is the value that is set. 49 | Returns the value that was set. 50 | 51 | ### `DELETE /:key` 52 | 53 | Deletes the key from the store. Returns the following JSON: 54 | ```js 55 | { 56 | "status": "ok", 57 | } 58 | ``` 59 | 60 | ### Forks 61 | 62 | All of these routes take a `:fork` parameter, like this: 63 | 64 | ``` 65 | GET /:key/:fork 66 | PUT /:key/:fork 67 | DELETE /:key/:fork 68 | GET /:key/:fork/info 69 | ``` 70 | 71 | If you don't provide a fork, the default fork is used. 72 | 73 | ### Forking keys 74 | 75 | You can fork a key to operate on a copy of its data without affecting the 76 | original. Likewise, you can merge a fork into a target fork. 77 | 78 | #### `POST /:key/:fork/fork/:target` 79 | 80 | ```js 81 | // On success 82 | { 83 | "status": "ok", 84 | } 85 | 86 | // On failure 87 | { 88 | "status": "error", 89 | "error": "not_found", 90 | } 91 | ``` 92 | 93 | #### `POST /:key/:fork/merge/:target` 94 | 95 | Blindly merges the fork's data into the target. **This is a destructive 96 | operation.** You are on your own for rollbacks. 97 | 98 | **This *overwrites* the target fork with the provided fork!** This does NOT 99 | do some sort of smart merge! The originating fork's value OVERWRITES the target 100 | fork's value directly, and has the relevant patch added to its history. 101 | 102 | ```js 103 | // On success 104 | { 105 | "status": "ok", 106 | } 107 | 108 | // On failure 109 | { 110 | "status": "error", 111 | "error": "fork_not_found", 112 | } 113 | 114 | { 115 | "status": "error", 116 | "error": "target_not_found", 117 | } 118 | ``` -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | config :crush, CrushWeb.Endpoint, 4 | url: [host: "localhost"], 5 | secret_key_base: "q06p4npNdXspi42YkYDdpSHvlBYiyJG5MKeA9nPbM9brPRIuEMQPWDcr41JH8sE4", 6 | render_errors: [view: CrushWeb.ErrorView, accepts: ~w(json), layout: false], 7 | pubsub_server: Crush.PubSub, 8 | live_view: [signing_salt: "NiUSI7dK"] 9 | 10 | config :logger, :console, 11 | format: "$time $metadata[$level] $message\n", 12 | metadata: [:request_id] 13 | 14 | config :phoenix, :json_library, Jason 15 | 16 | gossip_config = 17 | if System.get_env("GOSSIP_AUTH") do 18 | [secret: System.get_env("GOSSIP_AUTH")] 19 | else 20 | [] 21 | end 22 | 23 | gossip_topology = 24 | [ 25 | fumetsu_gossip: [ 26 | strategy: Cluster.Strategy.Gossip, 27 | config: gossip_config, 28 | ] 29 | ] 30 | 31 | # Erlang distribution cookie 32 | cookie = 33 | if Mix.env() == :prod do 34 | System.get_env("COOKIE") || raise """ 35 | \n 36 | ### ERROR ### 37 | 38 | You did not provide a cookie! This is REALLY DANGEROUS. You MUST provide a 39 | cookie, via the `COOKIE` environment variable, for crush to run! 40 | 41 | ## Why? 42 | 43 | crush enables Erlang distribution by default, to allow for automagic 44 | cluster formation. However, this means that, without a cookie, anyone can 45 | connect to your cluster and do all sorts of evil. 46 | 47 | See https://erlang.org/doc/reference_manual/distributed.html#security for 48 | more info. 49 | """ 50 | else 51 | System.get_env("COOKIE") || "a" 52 | end 53 | 54 | config :crush, 55 | cookie: cookie, 56 | topology: gossip_topology 57 | 58 | import_config "#{Mix.env()}.exs" 59 | -------------------------------------------------------------------------------- /config/dev.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | # For development, we disable any cache and enable 4 | # debugging and code reloading. 5 | # 6 | # The watchers configuration can be used to run external 7 | # watchers to your application. For example, we use it 8 | # with webpack to recompile .js and .css sources. 9 | config :crush, CrushWeb.Endpoint, 10 | http: [port: String.to_integer(System.get_env("PORT") || "7654")], 11 | debug_errors: true, 12 | code_reloader: true, 13 | check_origin: false, 14 | watchers: [] 15 | 16 | # ## SSL Support 17 | # 18 | # In order to use HTTPS in development, a self-signed 19 | # certificate can be generated by running the following 20 | # Mix task: 21 | # 22 | # mix phx.gen.cert 23 | # 24 | # Note that this task requires Erlang/OTP 20 or later. 25 | # Run `mix help phx.gen.cert` for more information. 26 | # 27 | # The `http:` config above can be replaced with: 28 | # 29 | # https: [ 30 | # port: 4001, 31 | # cipher_suite: :strong, 32 | # keyfile: "priv/cert/selfsigned_key.pem", 33 | # certfile: "priv/cert/selfsigned.pem" 34 | # ], 35 | # 36 | # If desired, both `http:` and `https:` keys can be 37 | # configured to run both http and https servers on 38 | # different ports. 39 | 40 | # Do not include metadata nor timestamps in development logs 41 | config :logger, :console, format: "[$level] $message\n" 42 | 43 | # Set a higher stacktrace during development. Avoid configuring such 44 | # in production as building large stacktraces may be expensive. 45 | config :phoenix, :stacktrace_depth, 20 46 | config :phoenix, :logger, false 47 | 48 | # Initialize plugs at runtime for faster development compilation 49 | config :phoenix, :plug_init_mode, :runtime 50 | -------------------------------------------------------------------------------- /config/prod.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | # For production, don't forget to configure the url host 4 | # to something meaningful, Phoenix uses this information 5 | # when generating URLs. 6 | # 7 | # Note we also include the path to a cache manifest 8 | # containing the digested version of static files. This 9 | # manifest is generated by the `mix phx.digest` task, 10 | # which you should run after static files are built and 11 | # before starting your production server. 12 | config :crush, CrushWeb.Endpoint, 13 | url: [host: "example.com", port: String.to_integer(System.get_env("PORT") || "80")], 14 | cache_static_manifest: "priv/static/cache_manifest.json" 15 | 16 | # Do not print debug messages in production 17 | config :logger, level: :info 18 | 19 | # ## SSL Support 20 | # 21 | # To get SSL working, you will need to add the `https` key 22 | # to the previous section and set your `:url` port to 443: 23 | # 24 | # config :crush, CrushWeb.Endpoint, 25 | # ... 26 | # url: [host: "example.com", port: 443], 27 | # https: [ 28 | # port: 443, 29 | # cipher_suite: :strong, 30 | # keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"), 31 | # certfile: System.get_env("SOME_APP_SSL_CERT_PATH"), 32 | # transport_options: [socket_opts: [:inet6]] 33 | # ] 34 | # 35 | # The `cipher_suite` is set to `:strong` to support only the 36 | # latest and more secure SSL ciphers. This means old browsers 37 | # and clients may not be supported. You can set it to 38 | # `:compatible` for wider support. 39 | # 40 | # `:keyfile` and `:certfile` expect an absolute path to the key 41 | # and cert in disk or a relative path inside priv, for example 42 | # "priv/ssl/server.key". For all supported SSL configuration 43 | # options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1 44 | # 45 | # We also recommend setting `force_ssl` in your endpoint, ensuring 46 | # no data is ever sent via http, always redirecting to https: 47 | # 48 | # config :crush, CrushWeb.Endpoint, 49 | # force_ssl: [hsts: true] 50 | # 51 | # Check `Plug.SSL` for all available options in `force_ssl`. 52 | 53 | # Finally import the config/prod.secret.exs which loads secrets 54 | # and configuration from environment variables. 55 | import_config "prod.secret.exs" 56 | -------------------------------------------------------------------------------- /config/prod.secret.exs: -------------------------------------------------------------------------------- 1 | # In this file, we load production configuration and secrets 2 | # from environment variables. You can also hardcode secrets, 3 | # although such is generally not recommended and you have to 4 | # remember to add this file to your .gitignore. 5 | use Mix.Config 6 | 7 | secret_key_base = 8 | System.get_env("SECRET_KEY_BASE") || 9 | raise """ 10 | environment variable SECRET_KEY_BASE is missing. 11 | You can generate one by calling: mix phx.gen.secret 12 | """ 13 | 14 | config :crush, CrushWeb.Endpoint, 15 | http: [ 16 | port: String.to_integer(System.get_env("PORT") || "4000"), 17 | transport_options: [socket_opts: [:inet6]] 18 | ], 19 | secret_key_base: secret_key_base 20 | 21 | # ## Using releases (Elixir v1.9+) 22 | # 23 | # If you are doing OTP releases, you need to instruct Phoenix 24 | # to start each relevant endpoint: 25 | # 26 | # config :crush, CrushWeb.Endpoint, server: true 27 | # 28 | # Then you can assemble a release by calling `mix release`. 29 | # See `mix help release` for more information. 30 | -------------------------------------------------------------------------------- /config/test.exs: -------------------------------------------------------------------------------- 1 | use Mix.Config 2 | 3 | # We don't run a server during test. If one is required, 4 | # you can enable the server option below. 5 | config :crush, CrushWeb.Endpoint, 6 | http: [port: 4002], 7 | server: false 8 | 9 | # Print only warnings and errors during test 10 | config :logger, level: :warn 11 | -------------------------------------------------------------------------------- /lib/crush.ex: -------------------------------------------------------------------------------- 1 | defmodule Crush do 2 | @moduledoc """ 3 | Crush keeps the contexts that define your domain 4 | and business logic. 5 | 6 | Contexts are also responsible for managing your data, regardless 7 | if it comes from the database, an external API or others. 8 | """ 9 | end 10 | -------------------------------------------------------------------------------- /lib/crush/application.ex: -------------------------------------------------------------------------------- 1 | defmodule Crush.Application do 2 | @moduledoc false 3 | 4 | use Application 5 | alias Crush.Utils 6 | require Logger 7 | 8 | def start(_type, _args) do 9 | cookie = :crush |> Application.get_env(:cookie) |> String.to_atom 10 | topology = Application.get_env :crush, :topology 11 | 12 | if Node.alive?(), do: Node.stop() 13 | node_name = 14 | 32 15 | |> Utils.random_string 16 | |> String.to_atom 17 | 18 | Logger.info "[CRUSH] [APP] node: booting @ #{node_name}" 19 | 20 | Node.start node_name, :shortnames 21 | Node.set_cookie Node.self(), cookie 22 | 23 | children = [ 24 | {Task.Supervisor, name: Crush.Tasker}, 25 | Crush.Cluster, 26 | {Cluster.Supervisor, [topology, [name: Crush.ClusterSupervisor]]}, 27 | CrushWeb.Telemetry, 28 | {Phoenix.PubSub, name: Crush.PubSub}, 29 | CrushWeb.Endpoint, 30 | ] 31 | 32 | opts = [strategy: :one_for_one, name: Crush.Supervisor] 33 | Supervisor.start_link(children, opts) 34 | end 35 | 36 | def config_change(changed, _new, removed) do 37 | CrushWeb.Endpoint.config_change(changed, removed) 38 | :ok 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/crush/cluster.ex: -------------------------------------------------------------------------------- 1 | defmodule Crush.Cluster do 2 | use GenServer 3 | require Logger 4 | 5 | @table :cluster_crdt 6 | 7 | def start_link(_) do 8 | GenServer.start_link __MODULE__, 0, name: __MODULE__ 9 | end 10 | 11 | def init(_) do 12 | :net_kernel.monitor_nodes true 13 | Logger.debug "[CRUSH] [CLUSTER] boot: node: monitor up" 14 | {:ok, crdt} = DeltaCrdt.start_link DeltaCrdt.AWLWWMap, storage_module: Crush.Persister 15 | :ets.new @table, [:named_table, :public, :set, read_concurrency: true] 16 | :ets.insert @table, {:crdt, crdt} 17 | {:ok, crdt} 18 | end 19 | 20 | def handle_info({msg, _}, crdt) when msg in [:nodeup, :nodedown] do 21 | Logger.info "[CRUSH] [CLUSTER] topology: crdt: neighbours updating..." 22 | neighbours = 23 | Node.list() 24 | |> Enum.map(fn node -> 25 | Task.Supervisor.async {Crush.Tasker, node}, fn -> 26 | __MODULE__.get_crdt() 27 | end 28 | end) 29 | |> Enum.map(&Task.await/1) 30 | 31 | :ok = DeltaCrdt.set_neighbours crdt, neighbours 32 | Logger.info "[CRUSH] [CLUSTER] topology: crdt: neighbours updated" 33 | {:noreply, crdt} 34 | end 35 | 36 | @spec get_crdt :: pid() 37 | def get_crdt do 38 | :ok = spin_on_table() 39 | [{:crdt, crdt}] = :ets.lookup @table, :crdt 40 | crdt 41 | end 42 | 43 | defp spin_on_table do 44 | case :ets.whereis(@table) do 45 | :undefined -> 46 | # This should effectively never really spin 47 | :timer.sleep 5 48 | spin_on_table() 49 | 50 | _ -> :ok 51 | end 52 | end 53 | 54 | @spec write(String.t(), any()) :: :ok 55 | def write(k, v) do 56 | DeltaCrdt.mutate get_crdt(), :add, [k, v] 57 | end 58 | 59 | @spec read(String.t()) :: any() 60 | def read(k) do 61 | get_crdt() |> DeltaCrdt.read |> Map.get(k) 62 | end 63 | 64 | @spec delete(String.t()) :: :ok 65 | def delete(k) do 66 | DeltaCrdt.mutate get_crdt(), :remove, [k] 67 | end 68 | 69 | @spec keys() :: [binary()] 70 | def keys do 71 | get_crdt() |> DeltaCrdt.read |> Map.keys 72 | end 73 | end 74 | -------------------------------------------------------------------------------- /lib/crush/differ.ex: -------------------------------------------------------------------------------- 1 | defmodule Crush.Differ do 2 | def diff(curr, prev) do 3 | Differ.diff curr, prev 4 | end 5 | 6 | def patch(curr, patch) do 7 | {:ok, patched} = Differ.patch curr, patch 8 | patched 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /lib/crush/persister.ex: -------------------------------------------------------------------------------- 1 | defmodule Crush.Persister do 2 | @moduledoc """ 3 | Persistence for the DeltaCrdt that backs crush. 4 | 5 | A bit counter-intuitively, this does **not** store individual key-value 6 | pairs, but rather stores the state of the *entire* crdt. 7 | """ 8 | 9 | @behaviour DeltaCrdt.Storage 10 | 11 | @type storage_format() :: {node_id :: term(), sequence_number :: integer(), crdt_state :: term()} 12 | 13 | @impl DeltaCrdt.Storage 14 | @spec read(term()) :: storage_format() | nil 15 | def read(name) do 16 | name 17 | |> file_name 18 | |> File.exists? 19 | |> if do 20 | data = File.read! file_name(name) 21 | {^name, storage_format} = :erlang.binary_to_term data 22 | storage_format 23 | else 24 | nil 25 | end 26 | end 27 | 28 | @impl DeltaCrdt.Storage 29 | @spec write(term(), storage_format()) :: :ok 30 | def write(name, storage_format) do 31 | data = :erlang.term_to_binary {name, storage_format} 32 | File.mkdir_p! "./#{dir_name()}" 33 | File.write! file_name(name), data 34 | :ok 35 | end 36 | 37 | defp dir_name, do: "#{Node.self()}" 38 | defp file_name(name), do: "./#{Node.self()}/#{name_to_key(name)}.crush" 39 | defp name_to_key(nil), do: "store" 40 | defp name_to_key(name) when is_binary(name), do: name 41 | defp name_to_key(name) when is_atom(name), do: Atom.to_string name 42 | defp name_to_key(name), do: :erlang.term_to_binary name 43 | end 44 | -------------------------------------------------------------------------------- /lib/crush/store.ex: -------------------------------------------------------------------------------- 1 | defmodule Crush.Store do 2 | use TypedStruct 3 | alias Crush.{Cluster, Differ} 4 | 5 | @default_fork "default" 6 | 7 | typedstruct module: Item do 8 | field :value, binary() 9 | field :patches, [binary()] 10 | field :fork, String.t(), default: "default" 11 | field :ancestors, [Ancestor.t()], default: [] 12 | field :rev, non_neg_integer(), default: 0 13 | end 14 | 15 | typedstruct module: Ancestor do 16 | field :fork, String.t() 17 | field :rev, non_neg_integer() 18 | end 19 | 20 | @spec get(String.t(), String.t(), :all | non_neg_integer(), boolean()) :: nil | Crush.Store.Item.t() 21 | def get(fork, key, revisions \\ 0, patch? \\ true) do 22 | case Cluster.read(to_key(fork, key)) do 23 | nil -> nil 24 | value -> extract_value_with_revisions value, revisions, patch? 25 | end 26 | end 27 | 28 | defp extract_value_with_revisions(%Item{patches: []} = item, _, _) do 29 | item 30 | end 31 | 32 | defp extract_value_with_revisions(%Item{} = item, 0, _) do 33 | %{item | patches: []} 34 | end 35 | 36 | defp extract_value_with_revisions(%Item{value: value, patches: patches} = item, :all, true) do 37 | %{item | patches: reduce_revisions(value, patches)} 38 | end 39 | 40 | defp extract_value_with_revisions(%Item{} = item, :all, false) do 41 | item 42 | end 43 | 44 | defp extract_value_with_revisions(%Item{value: value, patches: patches} = item, revs, true) do 45 | requested_patches = Enum.take patches, revs 46 | %{item | patches: reduce_revisions(value, requested_patches)} 47 | end 48 | 49 | defp extract_value_with_revisions(%Item{patches: patches} = item, revs, false) do 50 | requested_patches = Enum.take patches, revs 51 | %{item | patches: requested_patches} 52 | end 53 | 54 | defp reduce_revisions(value, patches) do 55 | Enum.reduce patches, [], fn patch, revisions -> 56 | case revisions do 57 | [] -> 58 | revision = Differ.patch value, patch 59 | [revision] 60 | 61 | _ -> 62 | last_revision = Enum.at revisions, -1 63 | revision = Differ.patch last_revision, patch 64 | revisions ++ [revision] 65 | end 66 | end 67 | end 68 | 69 | @spec set(String.t(), String.t(), binary()) :: binary() 70 | def set(fork, key, incoming_value) do 71 | case get(fork, key, :all, false) do 72 | nil -> 73 | :ok = Cluster.write to_key(fork, key), %Item{value: incoming_value, patches: []} 74 | incoming_value 75 | 76 | %Item{value: value, patches: patches, rev: rev} = item -> 77 | # Diff required to move from stored value to incoming value 78 | next_patch = Differ.diff incoming_value, value 79 | 80 | # Write all patches and new value 81 | :ok = Cluster.write to_key(fork, key), %{ 82 | item 83 | | value: incoming_value, 84 | patches: [next_patch | patches], 85 | rev: rev + 1, 86 | } 87 | 88 | incoming_value 89 | end 90 | end 91 | 92 | @spec fork(String.t(), String.t(), __MODULE__.Item.t()) :: :ok 93 | def fork(key, target, %Item{fork: fork, ancestors: ancestors, rev: rev} = item) do 94 | # Move the item to the target fork, and prepend previous fork to ancestors 95 | new_item = %{item | fork: target, ancestors: [%Ancestor{fork: fork, rev: rev} | ancestors]} 96 | Cluster.write to_key(target, key), new_item 97 | end 98 | 99 | @spec merge(String.t(), String.t(), String.t()) :: :ok 100 | def merge(key, source_fork, target_fork) do 101 | %Item{ 102 | value: source_value, 103 | fork: source_fork, 104 | rev: source_rev, 105 | } = get source_fork, key, :all, false 106 | 107 | %Item{ 108 | value: target_value, 109 | fork: ^target_fork, 110 | patches: patches, 111 | ancestors: ancestors, 112 | rev: target_rev, 113 | } = target = get target_fork, key, :all, false 114 | 115 | next_patch = Differ.diff source_value, target_value 116 | merged_item = %{ 117 | target 118 | | value: source_value, 119 | patches: [next_patch | patches], 120 | ancestors: [%Ancestor{fork: source_fork, rev: source_rev} | ancestors], 121 | rev: target_rev + 1, 122 | } 123 | 124 | Cluster.write to_key(target_fork, key), merged_item 125 | end 126 | 127 | @spec del(String.t(), String.t()) :: :ok 128 | def del(fork, key) do 129 | Cluster.delete to_key(fork, key) 130 | :ok 131 | end 132 | 133 | defp to_key(fork, key), do: fork <> ":" <> key 134 | 135 | @spec default_fork() :: String.t() 136 | def default_fork, do: @default_fork 137 | end 138 | -------------------------------------------------------------------------------- /lib/crush/utils.ex: -------------------------------------------------------------------------------- 1 | defmodule Crush.Utils do 2 | def random_string(length) do 3 | length 4 | |> :crypto.strong_rand_bytes 5 | |> Base.url_encode64(padding: false) 6 | end 7 | end 8 | -------------------------------------------------------------------------------- /lib/crush_web.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb do 2 | @moduledoc """ 3 | The entrypoint for defining your web interface, such 4 | as controllers, views, channels and so on. 5 | 6 | This can be used in your application as: 7 | 8 | use CrushWeb, :controller 9 | use CrushWeb, :view 10 | 11 | The definitions below will be executed for every view, 12 | controller, etc, so keep them short and clean, focused 13 | on imports, uses and aliases. 14 | 15 | Do NOT define functions inside the quoted expressions 16 | below. Instead, define any helper function in modules 17 | and import those modules here. 18 | """ 19 | 20 | def controller do 21 | quote do 22 | use Phoenix.Controller, namespace: CrushWeb 23 | 24 | import Plug.Conn 25 | alias CrushWeb.Router.Helpers, as: Routes 26 | end 27 | end 28 | 29 | def view do 30 | quote do 31 | use Phoenix.View, 32 | root: "lib/crush_web/templates", 33 | namespace: CrushWeb 34 | 35 | # Import convenience functions from controllers 36 | import Phoenix.Controller, 37 | only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1] 38 | 39 | # Include shared imports and aliases for views 40 | unquote(view_helpers()) 41 | end 42 | end 43 | 44 | def router do 45 | quote do 46 | use Phoenix.Router 47 | 48 | import Plug.Conn 49 | import Phoenix.Controller 50 | end 51 | end 52 | 53 | def channel do 54 | quote do 55 | use Phoenix.Channel 56 | end 57 | end 58 | 59 | defp view_helpers do 60 | quote do 61 | # Import basic rendering functionality (render, render_layout, etc) 62 | import Phoenix.View 63 | 64 | import CrushWeb.ErrorHelpers 65 | alias CrushWeb.Router.Helpers, as: Routes 66 | end 67 | end 68 | 69 | @doc """ 70 | When used, dispatch to the appropriate controller/view/etc. 71 | """ 72 | defmacro __using__(which) when is_atom(which) do 73 | apply(__MODULE__, which, []) 74 | end 75 | end 76 | -------------------------------------------------------------------------------- /lib/crush_web/caching_body_reader.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.CachingBodyReader do 2 | @moduledoc false 3 | # https://hexdocs.pm/plug/Plug.Parsers.html#module-custom-body-reader 4 | @behaviour Plug 5 | 6 | require Logger 7 | 8 | def init(opts) do 9 | opts 10 | end 11 | 12 | def call(conn, opts) do 13 | {:ok, _, conn} = read_body conn, opts 14 | conn 15 | end 16 | 17 | def read_body(conn, opts) do 18 | {:ok, body, conn} = read_full_body conn, opts 19 | assigns = 20 | if conn.assigns[:raw_body] != nil and conn.assigns[:raw_body] != "" do 21 | conn.assigns 22 | else 23 | Map.put conn.assigns, :raw_body, body 24 | end 25 | 26 | conn = %{conn | assigns: assigns} 27 | {:ok, body, conn} 28 | end 29 | 30 | defp read_full_body(conn, opts, body \\ "") do 31 | case Plug.Conn.read_body(conn, opts) do 32 | {:ok, req_body, conn} -> 33 | {:ok, body <> req_body, conn} 34 | 35 | {:more, partial_body, conn} -> 36 | read_full_body conn, opts, body <> partial_body 37 | end 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/crush_web/channels/user_socket.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.UserSocket do 2 | use Phoenix.Socket 3 | 4 | ## Channels 5 | # channel "room:*", CrushWeb.RoomChannel 6 | 7 | # Socket params are passed from the client and can 8 | # be used to verify and authenticate a user. After 9 | # verification, you can put default assigns into 10 | # the socket that will be set for all channels, ie 11 | # 12 | # {:ok, assign(socket, :user_id, verified_user_id)} 13 | # 14 | # To deny connection, return `:error`. 15 | # 16 | # See `Phoenix.Token` documentation for examples in 17 | # performing token verification on connect. 18 | @impl true 19 | def connect(_params, socket, _connect_info) do 20 | {:ok, socket} 21 | end 22 | 23 | # Socket id's are topics that allow you to identify all sockets for a given user: 24 | # 25 | # def id(socket), do: "user_socket:#{socket.assigns.user_id}" 26 | # 27 | # Would allow you to broadcast a "disconnect" event and terminate 28 | # all active sockets and channels for a given user: 29 | # 30 | # CrushWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{}) 31 | # 32 | # Returning `nil` makes this socket anonymous. 33 | @impl true 34 | def id(_socket), do: nil 35 | end 36 | -------------------------------------------------------------------------------- /lib/crush_web/controllers/api_controller.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.ApiController do 2 | use CrushWeb, :controller 3 | alias Crush.{Cluster, Store} 4 | alias Crush.Store.Item 5 | 6 | def get(conn, %{"key" => key} = params) do 7 | fork = params["fork"] || Store.default_fork() 8 | rev_count = 9 | case params["revisions"] do 10 | "all" -> :all 11 | nil -> 0 12 | value -> 13 | case Integer.parse(value) do 14 | {integer, _} -> integer 15 | :error -> 0 16 | end 17 | end 18 | 19 | patch? = params["patch"] == "true" 20 | 21 | case Store.get(fork, key, rev_count, patch?) do 22 | %Item{value: value, patches: patches} -> 23 | json conn, [value_to_json(value), patches_to_json(patches)] 24 | 25 | nil -> json conn, [] 26 | end 27 | end 28 | 29 | defp value_to_json(part) when is_tuple(part) do 30 | part 31 | |> Tuple.to_list 32 | |> Enum.map(fn 33 | x when is_binary(x) -> Base.encode64 x 34 | x when is_atom(x) -> Atom.to_string x 35 | x -> value_to_json x 36 | end) 37 | end 38 | defp value_to_json(part) when is_list(part) do 39 | Enum.map part, fn 40 | x when is_binary(x) -> Base.encode64 x 41 | x when is_atom(x) -> Atom.to_string x 42 | x -> value_to_json x 43 | end 44 | end 45 | defp value_to_json(part) when is_binary(part), do: Base.encode64 part 46 | defp value_to_json(part), do: part 47 | 48 | defp patches_to_json([]), do: [] 49 | defp patches_to_json(patches) do 50 | Enum.map patches, &value_to_json/1 51 | end 52 | 53 | def set(conn, %{"key" => key} = params) do 54 | fork = params["fork"] || Store.default_fork() 55 | body = conn.assigns.raw_body 56 | Store.set(fork, key, body) 57 | json conn, %{status: :ok} 58 | end 59 | 60 | def del(conn, %{"key" => key} = params) do 61 | fork = params["fork"] || Store.default_fork() 62 | :ok = Store.del(fork, key) 63 | json conn, %{status: :ok} 64 | end 65 | 66 | def key_info(conn, %{"key" => key} = params) do 67 | fork = params["fork"] || Store.default_fork() 68 | revision_count = 69 | case Store.get(fork, key, :all, false) do 70 | %Item{value: _, patches: patches} -> length(patches) 71 | nil -> 0 72 | end 73 | 74 | info = %{ 75 | key: key, 76 | revision_count: revision_count, 77 | } 78 | 79 | json conn, info 80 | end 81 | 82 | def fork(conn, %{"key" => key, "fork" => fork, "target" => target}) do 83 | item = 84 | case Store.get(fork, key, :all, false) do 85 | %Item{} = item -> item 86 | _ -> nil 87 | end 88 | 89 | if item do 90 | :ok = Store.fork key, target, item 91 | json conn, %{status: :ok} 92 | else 93 | conn 94 | |> put_status(:not_found) 95 | |> json(%{status: :error, error: :not_found}) 96 | end 97 | end 98 | 99 | def merge(conn, %{"key" => key, "fork" => fork, "target" => target}) do 100 | source? = 101 | case Store.get(fork, key, 0, false) do 102 | %Item{} -> true 103 | _ -> false 104 | end 105 | 106 | target? = 107 | case Store.get(target, key, :all, false) do 108 | %Item{} -> true 109 | _ -> false 110 | end 111 | 112 | cond do 113 | not source? -> 114 | conn 115 | |> put_status(404) 116 | |> json(%{status: :error, error: :source_not_found}) 117 | 118 | not target? -> 119 | conn 120 | |> put_status(404) 121 | |> json(%{status: :error, error: :target_not_found}) 122 | 123 | true -> 124 | :ok = Store.merge key, fork, target 125 | json conn, %{status: :ok} 126 | end 127 | end 128 | 129 | def keys(conn, %{"prefix" => prefix} = params) do 130 | prefix = (params["fork"] || Store.default_fork()) <> ":" <> prefix 131 | keys = 132 | Enum.filter(Cluster.keys(), fn key -> 133 | String.starts_with? key, prefix 134 | end) 135 | 136 | json conn, keys 137 | end 138 | 139 | def keys(conn, _) do 140 | json conn, Cluster.keys() 141 | end 142 | end 143 | -------------------------------------------------------------------------------- /lib/crush_web/endpoint.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.Endpoint do 2 | use Phoenix.Endpoint, otp_app: :crush 3 | 4 | socket "/socket", CrushWeb.UserSocket, 5 | websocket: true, 6 | longpoll: false 7 | 8 | # Code reloading can be explicitly enabled under the 9 | # :code_reloader configuration of your endpoint. 10 | if code_reloading? do 11 | plug Phoenix.CodeReloader 12 | end 13 | 14 | plug Plug.RequestId 15 | plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint] 16 | 17 | plug CrushWeb.CachingBodyReader 18 | 19 | plug Plug.Parsers, 20 | parsers: [:urlencoded, :multipart], 21 | pass: ["*/*"], 22 | json_decoder: Phoenix.json_library() 23 | 24 | plug Plug.MethodOverride 25 | plug Plug.Head 26 | plug CrushWeb.Router 27 | end 28 | -------------------------------------------------------------------------------- /lib/crush_web/router.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.Router do 2 | use CrushWeb, :router 3 | 4 | pipeline :api do 5 | # plug :accepts, ["json"] 6 | end 7 | 8 | scope "/", CrushWeb do 9 | pipe_through :api 10 | get "/keys", ApiController, :keys 11 | get "/:key", ApiController, :get 12 | put "/:key", ApiController, :set 13 | delete "/:key", ApiController, :del 14 | get "/:key/:fork", ApiController, :get 15 | put "/:key/:fork", ApiController, :set 16 | delete "/:key/:fork", ApiController, :del 17 | 18 | get "/:key/info", ApiController, :key_info 19 | get "/:key/:fork/info", ApiController, :key_info 20 | post "/:key/:fork/fork/:target", ApiController, :fork 21 | post "/:key/:fork/merge/:target", ApiController, :merge 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /lib/crush_web/telemetry.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.Telemetry do 2 | use Supervisor 3 | import Telemetry.Metrics 4 | 5 | def start_link(arg) do 6 | Supervisor.start_link(__MODULE__, arg, name: __MODULE__) 7 | end 8 | 9 | @impl true 10 | def init(_arg) do 11 | children = [ 12 | # Telemetry poller will execute the given period measurements 13 | # every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics 14 | {:telemetry_poller, measurements: periodic_measurements(), period: 10_000} 15 | # Add reporters as children of your supervision tree. 16 | # {Telemetry.Metrics.ConsoleReporter, metrics: metrics()} 17 | ] 18 | 19 | Supervisor.init(children, strategy: :one_for_one) 20 | end 21 | 22 | def metrics do 23 | [ 24 | # Phoenix Metrics 25 | summary("phoenix.endpoint.stop.duration", 26 | unit: {:native, :millisecond} 27 | ), 28 | summary("phoenix.router_dispatch.stop.duration", 29 | tags: [:route], 30 | unit: {:native, :millisecond} 31 | ), 32 | 33 | # VM Metrics 34 | summary("vm.memory.total", unit: {:byte, :kilobyte}), 35 | summary("vm.total_run_queue_lengths.total"), 36 | summary("vm.total_run_queue_lengths.cpu"), 37 | summary("vm.total_run_queue_lengths.io") 38 | ] 39 | end 40 | 41 | defp periodic_measurements do 42 | [ 43 | # A module, function and arguments to be invoked periodically. 44 | # This function must call :telemetry.execute/3 and a metric must be added above. 45 | # {CrushWeb, :count_users, []} 46 | ] 47 | end 48 | end 49 | -------------------------------------------------------------------------------- /lib/crush_web/views/error_helpers.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.ErrorHelpers do 2 | @moduledoc """ 3 | Conveniences for translating and building error messages. 4 | """ 5 | 6 | @doc """ 7 | Translates an error message. 8 | """ 9 | def translate_error({msg, opts}) do 10 | # Because the error messages we show in our forms and APIs 11 | # are defined inside Ecto, we need to translate them dynamically. 12 | Enum.reduce(opts, msg, fn {key, value}, acc -> 13 | String.replace(acc, "%{#{key}}", to_string(value)) 14 | end) 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /lib/crush_web/views/error_view.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.ErrorView do 2 | use CrushWeb, :view 3 | 4 | # If you want to customize a particular status code 5 | # for a certain format, you may uncomment below. 6 | # def render("500.json", _assigns) do 7 | # %{errors: %{detail: "Internal Server Error"}} 8 | # end 9 | 10 | # By default, Phoenix returns the status message from 11 | # the template name. For example, "404.json" becomes 12 | # "Not Found". 13 | def template_not_found(template, _assigns) do 14 | %{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}} 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Crush.MixProject do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :crush, 7 | version: "0.1.0", 8 | elixir: "~> 1.7", 9 | elixirc_paths: elixirc_paths(Mix.env()), 10 | compilers: [:phoenix] ++ Mix.compilers(), 11 | start_permanent: Mix.env() == :prod, 12 | aliases: aliases(), 13 | deps: deps() 14 | ] 15 | end 16 | 17 | # Configuration for the OTP application. 18 | # 19 | # Type `mix help compile.app` for more information. 20 | def application do 21 | [ 22 | mod: {Crush.Application, []}, 23 | extra_applications: [:logger, :runtime_tools] 24 | ] 25 | end 26 | 27 | # Specifies which paths to compile per environment. 28 | defp elixirc_paths(:test), do: ["lib", "test/support"] 29 | defp elixirc_paths(_), do: ["lib"] 30 | 31 | # Specifies your project dependencies. 32 | # 33 | # Type `mix help deps` for examples and options. 34 | defp deps do 35 | [ 36 | {:phoenix, "~> 1.5.7"}, 37 | {:telemetry_metrics, "~> 0.4"}, 38 | {:telemetry_poller, "~> 0.4"}, 39 | {:jason, "~> 1.0"}, 40 | {:plug_cowboy, "~> 2.0"}, 41 | {:libcluster, github: "svrdlans/libcluster", ref: "6f57e1f"}, 42 | {:delta_crdt, "~> 0.5.10"}, 43 | {:differ, "~> 0.1.1"}, 44 | {:typed_struct, "~> 0.2.1"}, 45 | ] 46 | end 47 | 48 | # Aliases are shortcuts or tasks specific to the current project. 49 | # For example, to install project dependencies and perform other setup tasks, run: 50 | # 51 | # $ mix setup 52 | # 53 | # See the documentation for `Mix` for more info on aliases. 54 | defp aliases do 55 | [ 56 | setup: ["deps.get"] 57 | ] 58 | end 59 | end 60 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "cowboy": {:hex, :cowboy, "2.9.0", "865dd8b6607e14cf03282e10e934023a1bd8be6f6bacf921a7e2a96d800cd452", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "2c729f934b4e1aa149aff882f57c6372c15399a20d54f65c8d67bef583021bde"}, 3 | "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.3.1", "ebd1a1d7aff97f27c66654e78ece187abdc646992714164380d8a041eda16754", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3a6efd3366130eab84ca372cbd4a7d3c3a97bdfcfb4911233b035d117063f0af"}, 4 | "cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"}, 5 | "delta_crdt": {:hex, :delta_crdt, "0.5.10", "e866f8d1b89bee497a98b9793e9ba0ea514112a1c41a0c30dcde3463d4984d14", [:mix], [{:merkle_map, "~> 0.2.0", [hex: :merkle_map, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ed5c685df9528788d7c056762c23f75358f3cadd4779698188a55ccae24d087a"}, 6 | "differ": {:hex, :differ, "0.1.1", "581a90ced623e5f3949d115959251f200062538274ec624484f3373af62d824e", [:mix], [], "hexpm", "f1f9d3dd4509a5c1e505c9556e6b0d80f20db2826a06c4bd6a044f77424c0db3"}, 7 | "jason": {:hex, :jason, "1.2.2", "ba43e3f2709fd1aa1dce90aaabfd039d000469c05c56f0b8e31978e03fa39052", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "18a228f5f0058ee183f29f9eae0805c6e59d61c3b006760668d8d18ff0d12179"}, 8 | "libcluster": {:git, "https://github.com/svrdlans/libcluster.git", "6f57e1f3ed80ebbeb353290c7fd79e45dc8c946c", [ref: "6f57e1f"]}, 9 | "merkle_map": {:hex, :merkle_map, "0.2.1", "01a88c87a6b9fb594c67c17ebaf047ee55ffa34e74297aa583ed87148006c4c8", [:mix], [], "hexpm", "fed4d143a5c8166eee4fa2b49564f3c4eace9cb252f0a82c1613bba905b2d04d"}, 10 | "mime": {:hex, :mime, "1.6.0", "dabde576a497cef4bbdd60aceee8160e02a6c89250d6c0b29e56c0dfb00db3d2", [:mix], [], "hexpm", "31a1a8613f8321143dde1dafc36006a17d28d02bdfecb9e95a880fa7aabd19a7"}, 11 | "phoenix": {:hex, :phoenix, "1.5.9", "a6368d36cfd59d917b37c44386e01315bc89f7609a10a45a22f47c007edf2597", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.13 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.1.2 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7e4bce20a67c012f1fbb0af90e5da49fa7bf0d34e3a067795703b74aef75427d"}, 12 | "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.0.0", "a1ae76717bb168cdeb10ec9d92d1480fec99e3080f011402c0a2d68d47395ffb", [:mix], [], "hexpm", "c52d948c4f261577b9c6fa804be91884b381a7f8f18450c5045975435350f771"}, 13 | "plug": {:hex, :plug, "1.11.1", "f2992bac66fdae679453c9e86134a4201f6f43a687d8ff1cd1b2862d53c80259", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "23524e4fefbb587c11f0833b3910bfb414bf2e2534d61928e920f54e3a1b881f"}, 14 | "plug_cowboy": {:hex, :plug_cowboy, "2.5.0", "51c998f788c4e68fc9f947a5eba8c215fbb1d63a520f7604134cab0270ea6513", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5b2c8925a5e2587446f33810a58c01e66b3c345652eeec809b76ba007acde71a"}, 15 | "plug_crypto": {:hex, :plug_crypto, "1.2.2", "05654514ac717ff3a1843204b424477d9e60c143406aa94daf2274fdd280794d", [:mix], [], "hexpm", "87631c7ad914a5a445f0a3809f99b079113ae4ed4b867348dd9eec288cecb6db"}, 16 | "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, 17 | "telemetry": {:hex, :telemetry, "0.4.3", "a06428a514bdbc63293cd9a6263aad00ddeb66f608163bdec7c8995784080818", [:rebar3], [], "hexpm", "eb72b8365ffda5bed68a620d1da88525e326cb82a75ee61354fc24b844768041"}, 18 | "telemetry_metrics": {:hex, :telemetry_metrics, "0.6.0", "da9d49ee7e6bb1c259d36ce6539cd45ae14d81247a2b0c90edf55e2b50507f7b", [:mix], [{:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5cfe67ad464b243835512aa44321cee91faed6ea868d7fb761d7016e02915c3d"}, 19 | "telemetry_poller": {:hex, :telemetry_poller, "0.5.1", "21071cc2e536810bac5628b935521ff3e28f0303e770951158c73eaaa01e962a", [:rebar3], [{:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4cab72069210bc6e7a080cec9afffad1b33370149ed5d379b81c7c5f0c663fd4"}, 20 | "typed_struct": {:hex, :typed_struct, "0.2.1", "e1993414c371f09ff25231393b6430bd89d780e2a499ae3b2d2b00852f593d97", [:mix], [], "hexpm", "8f5218c35ec38262f627b2c522542f1eae41f625f92649c0af701a6fab2e11b3"}, 21 | } 22 | -------------------------------------------------------------------------------- /test/crush/store_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Crush.StoreTest do 2 | use ExUnit.Case, async: false 3 | alias Crush.Store 4 | alias Crush.Store.{Ancestor, Item} 5 | 6 | @key "test" 7 | @value "test 1" 8 | @value_2 "test 2" 9 | 10 | @fork "test-fork" 11 | @default_fork Store.default_fork() 12 | 13 | setup do 14 | on_exit fn -> 15 | :ok = Store.del @default_fork, @key 16 | :ok = Store.del @fork, @key 17 | end 18 | end 19 | 20 | test "it works" do 21 | assert @value == Store.set @default_fork, @key, @value 22 | assert %Item{ 23 | value: @value, 24 | patches: [], 25 | rev: 0, 26 | } == Store.get @default_fork, @key, :all 27 | end 28 | 29 | test "fetching revisions works" do 30 | Store.set @default_fork, @key, @value 31 | Store.set @default_fork, @key, @value_2 32 | 33 | assert %Item{ 34 | value: @value_2, 35 | patches: [ 36 | [eq: "test ", del: "2", ins: "1"] 37 | ], 38 | rev: 1, 39 | } == Store.get @default_fork, @key, :all, false 40 | end 41 | 42 | test "patching revisions works" do 43 | Store.set @default_fork, @key, @value 44 | Store.set @default_fork, @key, @value_2 45 | Store.set @default_fork, @key, @value 46 | 47 | assert %Item{ 48 | value: @value, 49 | patches: [@value_2, @value], 50 | rev: 2, 51 | } == Store.get @default_fork, @key, :all, true 52 | end 53 | 54 | test "forking works" do 55 | Store.set @default_fork, @key, @value 56 | 57 | item = Store.get @default_fork, @key 58 | assert :ok == Store.fork @key, @fork, item 59 | 60 | default = @default_fork 61 | assert match? %Item{ 62 | value: @value, 63 | fork: @fork, 64 | ancestors: [%Ancestor{fork: ^default, rev: 0}], 65 | }, Store.get(@fork, @key) 66 | end 67 | 68 | test "merging works" do 69 | Store.set @default_fork, @key, @value 70 | 71 | item = Store.get @default_fork, @key 72 | assert :ok == Store.fork @key, @fork, item 73 | 74 | Store.set @fork, @key, @value_2 75 | 76 | assert :ok == Store.merge @key, @fork, @default_fork 77 | 78 | item = Store.get @default_fork, @key, :all, false 79 | default = @default_fork 80 | assert match? %Item{ 81 | fork: ^default, 82 | ancestors: [%Ancestor{fork: @fork, rev: 1}], 83 | patches: [_], 84 | value: @value_2, 85 | rev: 1, 86 | }, item 87 | end 88 | end 89 | -------------------------------------------------------------------------------- /test/crush_web/controllers/api_controller_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.ApiControllerTest do 2 | use CrushWeb.ConnCase, async: false 3 | alias Crush.Store 4 | alias CrushWeb.Router.Helpers, as: Routes 5 | alias Plug.Conn 6 | 7 | @key "test" 8 | @value "value" 9 | @value_2 "value 2" 10 | @fork "test-fork" 11 | @default_fork Store.default_fork() 12 | 13 | setup do 14 | %{conn: build_conn()} 15 | 16 | on_exit fn -> 17 | :ok = Store.del @default_fork, @key 18 | :ok = Store.del @fork, @key 19 | end 20 | end 21 | 22 | test "that fetching a non-existent key returns empty list", %{conn: conn} do 23 | res = 24 | conn 25 | |> get(Routes.api_path(conn, :get, @key)) 26 | |> json_response(200) 27 | 28 | assert [] == res 29 | end 30 | 31 | test "that fetching a key works", %{conn: conn} do 32 | res = 33 | conn 34 | |> put_req_header("content-type", "text/plain") 35 | |> Conn.assign(:raw_body, @value) 36 | |> put(Routes.api_path(conn, :set, @key)) 37 | |> response(200) 38 | 39 | assert res == "{\"status\":\"ok\"}" 40 | 41 | res = 42 | conn 43 | |> get(Routes.api_path(conn, :get, @key)) 44 | |> json_response(200) 45 | 46 | assert [Base.encode64(@value), []] == res 47 | end 48 | 49 | test "that forking a key works", %{conn: conn} do 50 | conn 51 | |> put_req_header("content-type", "text/plain") 52 | |> Conn.assign(:raw_body, @value) 53 | |> put(Routes.api_path(conn, :set, @key)) 54 | |> response(200) 55 | 56 | %{"status" => status} = 57 | conn 58 | |> put_req_header("content-type", "text/plain") 59 | |> post(Routes.api_path(conn, :fork, @key, @default_fork, @fork)) 60 | |> json_response(200) 61 | 62 | assert "ok" == status 63 | end 64 | 65 | test "that forking and merging a key works", %{conn: conn} do 66 | conn 67 | |> put_req_header("content-type", "text/plain") 68 | |> Conn.assign(:raw_body, @value) 69 | |> put(Routes.api_path(conn, :set, @key)) 70 | |> response(200) 71 | 72 | conn 73 | |> put_req_header("content-type", "text/plain") 74 | |> post(Routes.api_path(conn, :fork, @key, @default_fork, @fork)) 75 | |> json_response(200) 76 | 77 | conn 78 | |> put_req_header("content-type", "text/plain") 79 | |> Conn.assign(:raw_body, @value_2) 80 | |> put(Routes.api_path(conn, :set, @key, @fork)) 81 | |> response(200) 82 | 83 | conn 84 | |> put_req_header("content-type", "text/plain") 85 | |> post(Routes.api_path(conn, :merge, @key, @fork, @default_fork)) 86 | |> json_response(200) 87 | 88 | res = 89 | conn 90 | |> put_req_header("content-type", "text/plain") 91 | |> get(Routes.api_path(conn, :get, @key), revisions: "all") 92 | |> json_response(200) 93 | 94 | assert [Base.encode64(@value_2), [[["eq", Base.encode64(@value)], ["del", Base.encode64(" 2")]]]] == res 95 | end 96 | end 97 | -------------------------------------------------------------------------------- /test/crush_web/views/error_view_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.ErrorViewTest do 2 | use CrushWeb.ConnCase, async: true 3 | 4 | # Bring render/3 and render_to_string/3 for testing custom views 5 | import Phoenix.View 6 | 7 | test "renders 404.json" do 8 | assert render(CrushWeb.ErrorView, "404.json", []) == %{errors: %{detail: "Not Found"}} 9 | end 10 | 11 | test "renders 500.json" do 12 | assert render(CrushWeb.ErrorView, "500.json", []) == 13 | %{errors: %{detail: "Internal Server Error"}} 14 | end 15 | end 16 | -------------------------------------------------------------------------------- /test/support/channel_case.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.ChannelCase do 2 | @moduledoc """ 3 | This module defines the test case to be used by 4 | channel tests. 5 | 6 | Such tests rely on `Phoenix.ChannelTest` and also 7 | import other functionality to make it easier 8 | to build common data structures and query the data layer. 9 | 10 | Finally, if the test case interacts with the database, 11 | we enable the SQL sandbox, so changes done to the database 12 | are reverted at the end of every test. If you are using 13 | PostgreSQL, you can even run database tests asynchronously 14 | by setting `use CrushWeb.ChannelCase, async: true`, although 15 | this option is not recommended for other databases. 16 | """ 17 | 18 | use ExUnit.CaseTemplate 19 | 20 | using do 21 | quote do 22 | # Import conveniences for testing with channels 23 | import Phoenix.ChannelTest 24 | import CrushWeb.ChannelCase 25 | 26 | # The default endpoint for testing 27 | @endpoint CrushWeb.Endpoint 28 | end 29 | end 30 | 31 | setup _tags do 32 | :ok 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /test/support/conn_case.ex: -------------------------------------------------------------------------------- 1 | defmodule CrushWeb.ConnCase do 2 | @moduledoc """ 3 | This module defines the test case to be used by 4 | tests that require setting up a connection. 5 | 6 | Such tests rely on `Phoenix.ConnTest` and also 7 | import other functionality to make it easier 8 | to build common data structures and query the data layer. 9 | 10 | Finally, if the test case interacts with the database, 11 | we enable the SQL sandbox, so changes done to the database 12 | are reverted at the end of every test. If you are using 13 | PostgreSQL, you can even run database tests asynchronously 14 | by setting `use CrushWeb.ConnCase, async: true`, although 15 | this option is not recommended for other databases. 16 | """ 17 | 18 | use ExUnit.CaseTemplate 19 | 20 | using do 21 | quote do 22 | # Import conveniences for testing with connections 23 | import Plug.Conn 24 | import Phoenix.ConnTest 25 | import CrushWeb.ConnCase 26 | 27 | alias CrushWeb.Router.Helpers, as: Routes 28 | 29 | # The default endpoint for testing 30 | @endpoint CrushWeb.Endpoint 31 | end 32 | end 33 | 34 | setup _tags do 35 | {:ok, conn: Phoenix.ConnTest.build_conn()} 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | --------------------------------------------------------------------------------