├── test └── test_helper.exs ├── .formatter.exs ├── config └── config.exs ├── .gitignore ├── mix.exs ├── README.md ├── mix.lock └── lib ├── cockroachdb.ex └── cockroachdb └── connection.ex /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | -------------------------------------------------------------------------------- /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | # This file is responsible for configuring your application 2 | # and its dependencies with the aid of the Mix.Config module. 3 | use Mix.Config 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where third-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | ecto_cockroachdb-*.tar 24 | 25 | # Ignore ElixirLs 26 | .elixir_ls 27 | 28 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule EctoCockroachDB.MixProject do 2 | use Mix.Project 3 | 4 | @version "1.0.0" 5 | 6 | def project do 7 | [ 8 | app: :ecto_cockroachdb, 9 | version: @version, 10 | elixir: "~> 1.5", 11 | start_permanent: Mix.env() == :prod, 12 | deps: deps(), 13 | name: "CockroachDB Ecto Adaptor", 14 | description: "CockroachDB adaptor for Ecto", 15 | source_url: "https://github.com/jumpn/ecto_cockroachdb", 16 | docs: docs(), 17 | package: package() 18 | ] 19 | end 20 | 21 | # Run "mix help compile.app" to learn about applications. 22 | def application do 23 | [ 24 | extra_applications: [:logger] 25 | ] 26 | end 27 | 28 | defp docs do 29 | [ 30 | source_ref: "v#{@version}", 31 | main: "readme", 32 | extras: ["README.md"] 33 | ] 34 | end 35 | 36 | defp deps do 37 | [ 38 | {:ex_doc, "~> 0.20", only: :dev}, 39 | {:ecto, "~> 3.1"}, 40 | {:ecto_sql, "~> 3.1"}, 41 | {:postgrex, ">= 0.14.3"} 42 | ] 43 | end 44 | 45 | defp package do 46 | [ 47 | maintainers: ["Christian Meunier"], 48 | licenses: ["MIT"], 49 | links: %{"Github" => "https://github.com/jumpn/ecto_cockroachdb"} 50 | ] 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Deprecated / No longer needed ! 2 | 3 | This fork is no longer necessary given CockroackDB 20.1+ supports timestamp with optional precision. 4 | You can now use the Postgresql Adaptor bundled with `ecto_sql`, the only special things you need to do is to disable migration locking in your repo config with: 5 | `migration_lock: nil` 6 | 7 | 8 | 9 | # CockroachDB Ecto Adaptor 10 | 11 | This is a very small fork of the official Postgresql Adaptor bundled with `ecto_sql`. 12 | 13 | The main reason for this fork is Ecto 3.x no longer keeps microseconds information for `:time`, `:naive_datetime` and `:utc_datetime`. 14 | 15 | The net effect is it uses timestamp(0) when creating fields with such types and the migration table used by Ecto is using one of those types. 16 | 17 | The problem is CockroachDB does not support timestamp with precision unless it's the default precision (6). 18 | 19 | This adaptor use the default precision for those types. 20 | It also disable the migration lock given locking is not supported by CockroachDB either. 21 | 22 | For more information, check the [github issue](https://github.com/cockroachdb/cockroach/issues/32098) 23 | ## Installation 24 | 25 | The package can be installed 26 | by adding `ecto_cockroachdb` to your list of dependencies in `mix.exs`: 27 | 28 | ```elixir 29 | def deps do 30 | [ 31 | {:ecto_cockroachdb, "~> 1.0.0"} 32 | ] 33 | end 34 | ``` 35 | 36 | ## Usage 37 | 38 | ```elixir 39 | # In your application code 40 | defmodule Sample.Repo do 41 | use Ecto.Repo, 42 | otp_app: :my_app, 43 | adapter: Ecto.Adapters.CockroachDB 44 | end 45 | ``` 46 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"}, 3 | "db_connection": {:hex, :db_connection, "2.0.6", "bde2f85d047969c5b5800cb8f4b3ed6316c8cb11487afedac4aa5f93fd39abfa", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"}, 4 | "decimal": {:hex, :decimal, "1.7.0", "30d6b52c88541f9a66637359ddf85016df9eb266170d53105f02e4a67e00c5aa", [:mix], [], "hexpm"}, 5 | "earmark": {:hex, :earmark, "1.3.2", "b840562ea3d67795ffbb5bd88940b1bed0ed9fa32834915125ea7d02e35888a5", [:mix], [], "hexpm"}, 6 | "ecto": {:hex, :ecto, "3.1.4", "69d852da7a9f04ede725855a35ede48d158ca11a404fe94f8b2fb3b2162cd3c9", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, 7 | "ecto_sql": {:hex, :ecto_sql, "3.1.2", "529908c0ab6aac9465a429fd13552b0cca86ee338c19bec61a4d2df2d6c9ab31", [:mix], [{:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.1.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.9.1", [hex: :mariaex, repo: "hexpm", optional: true]}, {:myxql, "~> 0.2.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.14.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"}, 8 | "ex_doc": {:hex, :ex_doc, "0.20.2", "1bd0dfb0304bade58beb77f20f21ee3558cc3c753743ae0ddbb0fd7ba2912331", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"}, 9 | "makeup": {:hex, :makeup, "0.8.0", "9cf32aea71c7fe0a4b2e9246c2c4978f9070257e5c9ce6d4a28ec450a839b55f", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"}, 10 | "makeup_elixir": {:hex, :makeup_elixir, "0.13.0", "be7a477997dcac2e48a9d695ec730b2d22418292675c75aa2d34ba0909dcdeda", [:mix], [{:makeup, "~> 0.8", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"}, 11 | "nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"}, 12 | "postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"}, 13 | "telemetry": {:hex, :telemetry, "0.4.0", "8339bee3fa8b91cb84d14c2935f8ecf399ccd87301ad6da6b71c09553834b2ab", [:rebar3], [], "hexpm"}, 14 | } 15 | -------------------------------------------------------------------------------- /lib/cockroachdb.ex: -------------------------------------------------------------------------------- 1 | defmodule Ecto.Adapters.CockroachDB do 2 | @moduledoc """ 3 | Adapter module for CockroachDB. 4 | 5 | It uses `Postgrex` for communicating to the database. 6 | 7 | ## Features 8 | 9 | * Full query support (including joins, preloads and associations) 10 | * Support for transactions 11 | * Support for data migrations 12 | * Support for ecto.create and ecto.drop operations 13 | * Support for transactional tests via `Ecto.Adapters.SQL` 14 | 15 | ## Options 16 | 17 | Postgres options split in different categories described 18 | below. All options can be given via the repository 19 | configuration: 20 | 21 | config :your_app, YourApp.Repo, 22 | ... 23 | 24 | ### Connection options 25 | 26 | * `:hostname` - Server hostname 27 | * `:socket_dir` - Connect to Postgres via UNIX sockets in the given directory 28 | The socket name is derived based on the port. This is the preferred method 29 | for configuring sockets and it takes precedence over the hostname. If you are 30 | connecting to a socket outside of the Postgres convention, use `:socket` instead; 31 | * `:socket` - Connect to Postgres via UNIX sockets in the given path. 32 | This option takes precedence over the `:hostname` and `:socket_dir` 33 | * `:username` - Username 34 | * `:password` - User password 35 | * `:port` - Server port (default: 5432) 36 | * `:database` - the database to connect to 37 | * `:maintenance_database` - Specifies the name of the database to connect to when 38 | creating or dropping the database. Defaults to `"postgres"` 39 | * `:pool` - The connection pool module, defaults to `DBConnection.ConnectionPool` 40 | * `:ssl` - Set to true if ssl should be used (default: false) 41 | * `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs 42 | * `:parameters` - Keyword list of connection parameters 43 | * `:connect_timeout` - The timeout for establishing new connections (default: 5000) 44 | * `:prepare` - How to prepare queries, either `:named` to use named queries 45 | or `:unnamed` to force unnamed queries (default: `:named`) 46 | * `:socket_options` - Specifies socket configuration 47 | 48 | The `:socket_options` are particularly useful when configuring the size 49 | of both send and receive buffers. For example, when Ecto starts with a 50 | pool of 20 connections, the memory usage may quickly grow from 20MB to 51 | 50MB based on the operating system default values for TCP buffers. It is 52 | advised to stick with the operating system defaults but they can be 53 | tweaked if desired: 54 | 55 | socket_options: [recbuf: 8192, sndbuf: 8192] 56 | 57 | We also recommend developers to consult the `Postgrex.start_link/1` 58 | documentation for a complete listing of all supported options. 59 | 60 | ### Storage options 61 | 62 | * `:encoding` - the database encoding (default: "UTF8") 63 | * `:template` - the template to create the database from 64 | * `:lc_collate` - the collation order 65 | * `:lc_ctype` - the character classification 66 | * `:dump_path` - where to place dumped structures 67 | 68 | ### After connect callback 69 | 70 | If you want to execute a callback as soon as connection is established 71 | to the database, you can use the `:after_connect` configuration. For 72 | example, in your repository configuration you can add: 73 | 74 | after_connect: {Postgrex, :query!, ["SET search_path TO global_prefix", []]} 75 | 76 | You can also specify your own module that will receive the Postgrex 77 | connection as argument. 78 | 79 | ## Extensions 80 | 81 | Both PostgreSQL and its adapter for Elixir, Postgrex, support an 82 | extension system. If you want to use custom extensions for Postgrex 83 | alongside Ecto, you must define a type module with your extensions. 84 | Create a new file anywhere in your application with the following: 85 | 86 | Postgrex.Types.define(MyApp.PostgresTypes, 87 | [MyExtension.Foo, MyExtensionBar] ++ Ecto.Adapters.Postgres.extensions()) 88 | 89 | Once your type module is defined, you can configure the repository to use it: 90 | 91 | config :my_app, MyApp.Repo, types: MyApp.PostgresTypes 92 | 93 | """ 94 | 95 | # Inherit all behaviour from Ecto.Adapters.SQL 96 | use Ecto.Adapters.SQL, 97 | driver: :postgrex, 98 | migration_lock: nil 99 | 100 | # And provide a custom storage implementation 101 | @behaviour Ecto.Adapter.Storage 102 | @behaviour Ecto.Adapter.Structure 103 | 104 | @default_maintenance_database "postgres" 105 | 106 | @doc """ 107 | All Ecto extensions for Postgrex. 108 | """ 109 | def extensions do 110 | [] 111 | end 112 | 113 | # Support arrays in place of IN 114 | @impl true 115 | def dumpers({:embed, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)] 116 | def dumpers({:map, _} = type, _), do: [&Ecto.Adapters.SQL.dump_embed(type, &1)] 117 | def dumpers({:in, sub}, {:in, sub}), do: [{:array, sub}] 118 | def dumpers(:binary_id, type), do: [type, Ecto.UUID] 119 | def dumpers(_, type), do: [type] 120 | 121 | ## Storage API 122 | 123 | @impl true 124 | def storage_up(opts) do 125 | database = 126 | Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration" 127 | 128 | encoding = opts[:encoding] || "UTF8" 129 | maintenance_database = Keyword.get(opts, :maintenance_database, @default_maintenance_database) 130 | opts = Keyword.put(opts, :database, maintenance_database) 131 | 132 | command = 133 | ~s(CREATE DATABASE "#{database}" ENCODING '#{encoding}') 134 | |> concat_if(opts[:template], &"TEMPLATE=#{&1}") 135 | |> concat_if(opts[:lc_ctype], &"LC_CTYPE='#{&1}'") 136 | |> concat_if(opts[:lc_collate], &"LC_COLLATE='#{&1}'") 137 | 138 | case run_query(command, opts) do 139 | {:ok, _} -> 140 | :ok 141 | 142 | {:error, %{postgres: %{code: :duplicate_database}}} -> 143 | {:error, :already_up} 144 | 145 | {:error, error} -> 146 | {:error, Exception.message(error)} 147 | end 148 | end 149 | 150 | defp concat_if(content, nil, _fun), do: content 151 | defp concat_if(content, value, fun), do: content <> " " <> fun.(value) 152 | 153 | @impl true 154 | def storage_down(opts) do 155 | database = 156 | Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration" 157 | 158 | command = "DROP DATABASE \"#{database}\"" 159 | maintenance_database = Keyword.get(opts, :maintenance_database, @default_maintenance_database) 160 | opts = Keyword.put(opts, :database, maintenance_database) 161 | 162 | case run_query(command, opts) do 163 | {:ok, _} -> 164 | :ok 165 | 166 | {:error, %{postgres: %{code: :invalid_catalog_name}}} -> 167 | {:error, :already_down} 168 | 169 | {:error, error} -> 170 | {:error, Exception.message(error)} 171 | end 172 | end 173 | 174 | @impl true 175 | def supports_ddl_transaction? do 176 | true 177 | end 178 | 179 | @impl true 180 | def structure_dump(default, config) do 181 | table = config[:migration_source] || "schema_migrations" 182 | 183 | with {:ok, versions} <- select_versions(table, config), 184 | {:ok, path} <- pg_dump(default, config), 185 | do: append_versions(table, versions, path) 186 | end 187 | 188 | defp select_versions(table, config) do 189 | case run_query(~s[SELECT version FROM public."#{table}" ORDER BY version], config) do 190 | {:ok, %{rows: rows}} -> {:ok, Enum.map(rows, &hd/1)} 191 | {:error, %{postgres: %{code: :undefined_table}}} -> {:ok, []} 192 | {:error, _} = error -> error 193 | end 194 | end 195 | 196 | defp pg_dump(default, config) do 197 | path = config[:dump_path] || Path.join(default, "structure.sql") 198 | File.mkdir_p!(Path.dirname(path)) 199 | 200 | case run_with_cmd("pg_dump", config, [ 201 | "--file", 202 | path, 203 | "--schema-only", 204 | "--no-acl", 205 | "--no-owner", 206 | config[:database] 207 | ]) do 208 | {_output, 0} -> 209 | {:ok, path} 210 | 211 | {output, _} -> 212 | {:error, output} 213 | end 214 | end 215 | 216 | defp append_versions(_table, [], path) do 217 | {:ok, path} 218 | end 219 | 220 | defp append_versions(table, versions, path) do 221 | sql = 222 | ~s[INSERT INTO public."#{table}" (version) VALUES ] <> 223 | Enum.map_join(versions, ", ", &"(#{&1})") <> ~s[;\n\n] 224 | 225 | File.open!(path, [:append], fn file -> 226 | IO.write(file, sql) 227 | end) 228 | 229 | {:ok, path} 230 | end 231 | 232 | @impl true 233 | def structure_load(default, config) do 234 | path = config[:dump_path] || Path.join(default, "structure.sql") 235 | 236 | args = [ 237 | "--quiet", 238 | "--file", 239 | path, 240 | "-vON_ERROR_STOP=1", 241 | "--single-transaction", 242 | config[:database] 243 | ] 244 | 245 | case run_with_cmd("psql", config, args) do 246 | {_output, 0} -> {:ok, path} 247 | {output, _} -> {:error, output} 248 | end 249 | end 250 | 251 | ## Helpers 252 | 253 | defp run_query(sql, opts) do 254 | {:ok, _} = Application.ensure_all_started(:postgrex) 255 | 256 | opts = 257 | opts 258 | |> Keyword.drop([:name, :log, :pool, :pool_size]) 259 | |> Keyword.put(:backoff_type, :stop) 260 | |> Keyword.put(:max_restarts, 0) 261 | 262 | {:ok, pid} = Task.Supervisor.start_link() 263 | 264 | task = 265 | Task.Supervisor.async_nolink(pid, fn -> 266 | {:ok, conn} = Postgrex.start_link(opts) 267 | 268 | value = Postgrex.query(conn, sql, [], opts) 269 | GenServer.stop(conn) 270 | value 271 | end) 272 | 273 | timeout = Keyword.get(opts, :timeout, 15_000) 274 | 275 | case Task.yield(task, timeout) || Task.shutdown(task) do 276 | {:ok, {:ok, result}} -> 277 | {:ok, result} 278 | 279 | {:ok, {:error, error}} -> 280 | {:error, error} 281 | 282 | {:exit, {%{__struct__: struct} = error, _}} 283 | when struct in [Postgrex.Error, DBConnection.Error] -> 284 | {:error, error} 285 | 286 | {:exit, reason} -> 287 | {:error, RuntimeError.exception(Exception.format_exit(reason))} 288 | 289 | nil -> 290 | {:error, RuntimeError.exception("command timed out")} 291 | end 292 | end 293 | 294 | defp run_with_cmd(cmd, opts, opt_args) do 295 | unless System.find_executable(cmd) do 296 | raise "could not find executable `#{cmd}` in path, " <> 297 | "please guarantee it is available before running ecto commands" 298 | end 299 | 300 | env = [{"PGCONNECT_TIMEOUT", "10"}] 301 | 302 | env = 303 | if password = opts[:password] do 304 | [{"PGPASSWORD", password} | env] 305 | else 306 | env 307 | end 308 | 309 | args = [] 310 | args = if username = opts[:username], do: ["-U", username | args], else: args 311 | args = if port = opts[:port], do: ["-p", to_string(port) | args], else: args 312 | 313 | host = opts[:hostname] || System.get_env("PGHOST") || "localhost" 314 | args = ["--host", host | args] 315 | args = args ++ opt_args 316 | System.cmd(cmd, args, env: env, stderr_to_stdout: true) 317 | end 318 | end 319 | -------------------------------------------------------------------------------- /lib/cockroachdb/connection.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Postgrex) do 2 | defmodule Ecto.Adapters.CockroachDB.Connection do 3 | @moduledoc false 4 | 5 | @default_port 26257 6 | @behaviour Ecto.Adapters.SQL.Connection 7 | 8 | ## Module and Options 9 | 10 | @impl true 11 | def child_spec(opts) do 12 | opts 13 | |> Keyword.put_new(:port, @default_port) 14 | |> Postgrex.child_spec() 15 | end 16 | 17 | @impl true 18 | def to_constraints(%Postgrex.Error{postgres: %{code: :unique_violation, constraint: constraint}}), 19 | do: [unique: constraint] 20 | def to_constraints(%Postgrex.Error{postgres: %{code: :foreign_key_violation, constraint: constraint}}), 21 | do: [foreign_key: constraint] 22 | def to_constraints(%Postgrex.Error{postgres: %{code: :exclusion_violation, constraint: constraint}}), 23 | do: [exclusion: constraint] 24 | def to_constraints(%Postgrex.Error{postgres: %{code: :check_violation, constraint: constraint}}), 25 | do: [check: constraint] 26 | 27 | # Postgres 9.2 and earlier does not provide the constraint field 28 | @impl true 29 | def to_constraints(%Postgrex.Error{postgres: %{code: :unique_violation, message: message}}) do 30 | case :binary.split(message, " unique constraint ") do 31 | [_, quoted] -> [unique: strip_quotes(quoted)] 32 | _ -> [] 33 | end 34 | end 35 | def to_constraints(%Postgrex.Error{postgres: %{code: :foreign_key_violation, message: message}}) do 36 | case :binary.split(message, " foreign key constraint ") do 37 | [_, quoted] -> 38 | [quoted | _] = :binary.split(quoted, " on table ") 39 | [foreign_key: strip_quotes(quoted)] 40 | _ -> 41 | [] 42 | end 43 | end 44 | def to_constraints(%Postgrex.Error{postgres: %{code: :exclusion_violation, message: message}}) do 45 | case :binary.split(message, " exclusion constraint ") do 46 | [_, quoted] -> [exclusion: strip_quotes(quoted)] 47 | _ -> [] 48 | end 49 | end 50 | def to_constraints(%Postgrex.Error{postgres: %{code: :check_violation, message: message}}) do 51 | case :binary.split(message, " check constraint ") do 52 | [_, quoted] -> [check: strip_quotes(quoted)] 53 | _ -> [] 54 | end 55 | end 56 | 57 | def to_constraints(_), 58 | do: [] 59 | 60 | defp strip_quotes(quoted) do 61 | size = byte_size(quoted) - 2 62 | <<_, unquoted::binary-size(size), _>> = quoted 63 | unquoted 64 | end 65 | 66 | ## Query 67 | 68 | @impl true 69 | def prepare_execute(conn, name, sql, params, opts) do 70 | Postgrex.prepare_execute(conn, name, sql, params, opts) 71 | end 72 | 73 | @impl true 74 | def query(conn, sql, params, opts) do 75 | Postgrex.query(conn, sql, params, opts) 76 | end 77 | 78 | @impl true 79 | def execute(conn, %{ref: ref} = query, params, opts) do 80 | case Postgrex.execute(conn, query, params, opts) do 81 | {:ok, %{ref: ^ref}, result} -> 82 | {:ok, result} 83 | 84 | {:ok, _, _} = ok -> 85 | ok 86 | 87 | {:error, %Postgrex.QueryError{} = err} -> 88 | {:reset, err} 89 | 90 | {:error, %Postgrex.Error{postgres: %{code: :feature_not_supported}} = err} -> 91 | {:reset, err} 92 | 93 | {:error, _} = error -> 94 | error 95 | end 96 | end 97 | 98 | @impl true 99 | def stream(conn, sql, params, opts) do 100 | Postgrex.stream(conn, sql, params, opts) 101 | end 102 | 103 | alias Ecto.Query.{BooleanExpr, JoinExpr, QueryExpr} 104 | 105 | @impl true 106 | def all(query) do 107 | sources = create_names(query) 108 | {select_distinct, order_by_distinct} = distinct(query.distinct, sources, query) 109 | 110 | from = from(query, sources) 111 | select = select(query, select_distinct, sources) 112 | join = join(query, sources) 113 | where = where(query, sources) 114 | group_by = group_by(query, sources) 115 | having = having(query, sources) 116 | window = window(query, sources) 117 | combinations = combinations(query) 118 | order_by = order_by(query, order_by_distinct, sources) 119 | limit = limit(query, sources) 120 | offset = offset(query, sources) 121 | lock = lock(query.lock) 122 | 123 | [select, from, join, where, group_by, having, window, combinations, order_by, limit, offset | lock] 124 | end 125 | 126 | @impl true 127 | def update_all(%{from: %{source: source}} = query, prefix \\ nil) do 128 | sources = create_names(query) 129 | {from, name} = get_source(query, sources, 0, source) 130 | 131 | prefix = prefix || ["UPDATE ", from, " AS ", name | " SET "] 132 | fields = update_fields(query, sources) 133 | {join, wheres} = using_join(query, :update_all, "FROM", sources) 134 | where = where(%{query | wheres: wheres ++ query.wheres}, sources) 135 | 136 | [prefix, fields, join, where | returning(query, sources)] 137 | end 138 | 139 | @impl true 140 | def delete_all(%{from: from} = query) do 141 | sources = create_names(query) 142 | {from, name} = get_source(query, sources, 0, from) 143 | 144 | {join, wheres} = using_join(query, :delete_all, "USING", sources) 145 | where = where(%{query | wheres: wheres ++ query.wheres}, sources) 146 | 147 | ["DELETE FROM ", from, " AS ", name, join, where | returning(query, sources)] 148 | end 149 | 150 | @impl true 151 | def insert(prefix, table, header, rows, on_conflict, returning) do 152 | values = 153 | if header == [] do 154 | [" VALUES " | intersperse_map(rows, ?,, fn _ -> "(DEFAULT)" end)] 155 | else 156 | [?\s, ?(, intersperse_map(header, ?,, "e_name/1), ") VALUES " | insert_all(rows, 1)] 157 | end 158 | 159 | ["INSERT INTO ", quote_table(prefix, table), insert_as(on_conflict), 160 | values, on_conflict(on_conflict, header) | returning(returning)] 161 | end 162 | 163 | defp insert_as({%{sources: sources}, _, _}) do 164 | {_expr, name, _schema} = create_name(sources, 0) 165 | [" AS " | name] 166 | end 167 | defp insert_as({_, _, _}) do 168 | [] 169 | end 170 | 171 | defp on_conflict({:raise, _, []}, _header), 172 | do: [] 173 | defp on_conflict({:nothing, _, targets}, _header), 174 | do: [" ON CONFLICT ", conflict_target(targets) | "DO NOTHING"] 175 | defp on_conflict({fields, _, targets}, _header) when is_list(fields), 176 | do: [" ON CONFLICT ", conflict_target(targets), "DO " | replace(fields)] 177 | defp on_conflict({query, _, targets}, _header), 178 | do: [" ON CONFLICT ", conflict_target(targets), "DO " | update_all(query, "UPDATE SET ")] 179 | 180 | defp conflict_target({:constraint, constraint}), 181 | do: ["ON CONSTRAINT ", quote_name(constraint), ?\s] 182 | defp conflict_target({:unsafe_fragment, fragment}), 183 | do: [fragment, ?\s] 184 | defp conflict_target([]), 185 | do: [] 186 | defp conflict_target(targets), 187 | do: [?(, intersperse_map(targets, ?,, "e_name/1), ?), ?\s] 188 | 189 | defp replace(fields) do 190 | ["UPDATE SET " | 191 | intersperse_map(fields, ?,, fn field -> 192 | quoted = quote_name(field) 193 | [quoted, " = ", "EXCLUDED." | quoted] 194 | end)] 195 | end 196 | 197 | defp insert_all(rows, counter) do 198 | intersperse_reduce(rows, ?,, counter, fn row, counter -> 199 | {row, counter} = insert_each(row, counter) 200 | {[?(, row, ?)], counter} 201 | end) 202 | |> elem(0) 203 | end 204 | 205 | defp insert_each(values, counter) do 206 | intersperse_reduce(values, ?,, counter, fn 207 | nil, counter -> 208 | {"DEFAULT", counter} 209 | 210 | {%Ecto.Query{} = query, params_counter}, counter -> 211 | {[?(, all(query), ?)], counter + params_counter} 212 | 213 | _, counter -> 214 | {[?$ | Integer.to_string(counter)], counter + 1} 215 | end) 216 | end 217 | 218 | @impl true 219 | def update(prefix, table, fields, filters, returning) do 220 | {fields, count} = intersperse_reduce(fields, ", ", 1, fn field, acc -> 221 | {[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1} 222 | end) 223 | 224 | {filters, _count} = intersperse_reduce(filters, " AND ", count, fn 225 | {field, nil}, acc -> 226 | {[quote_name(field), " IS NULL"], acc} 227 | 228 | {field, _value}, acc -> 229 | {[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1} 230 | end) 231 | 232 | ["UPDATE ", quote_table(prefix, table), " SET ", 233 | fields, " WHERE ", filters | returning(returning)] 234 | end 235 | 236 | @impl true 237 | def delete(prefix, table, filters, returning) do 238 | {filters, _} = intersperse_reduce(filters, " AND ", 1, fn 239 | {field, nil}, acc -> 240 | {[quote_name(field), " IS NULL"], acc} 241 | 242 | {field, _value}, acc -> 243 | {[quote_name(field), " = $" | Integer.to_string(acc)], acc + 1} 244 | end) 245 | 246 | ["DELETE FROM ", quote_table(prefix, table), " WHERE ", filters | returning(returning)] 247 | end 248 | 249 | ## Query generation 250 | 251 | binary_ops = 252 | [==: " = ", !=: " != ", <=: " <= ", >=: " >= ", <: " < ", >: " > ", 253 | +: " + ", -: " - ", *: " * ", /: " / ", 254 | and: " AND ", or: " OR ", ilike: " ILIKE ", like: " LIKE "] 255 | 256 | @binary_ops Keyword.keys(binary_ops) 257 | 258 | Enum.map(binary_ops, fn {op, str} -> 259 | defp handle_call(unquote(op), 2), do: {:binary_op, unquote(str)} 260 | end) 261 | 262 | defp handle_call(fun, _arity), do: {:fun, Atom.to_string(fun)} 263 | 264 | defp select(%{select: %{fields: fields}} = query, select_distinct, sources) do 265 | ["SELECT", select_distinct, ?\s | select_fields(fields, sources, query)] 266 | end 267 | 268 | defp select_fields([], _sources, _query), 269 | do: "TRUE" 270 | defp select_fields(fields, sources, query) do 271 | intersperse_map(fields, ", ", fn 272 | {key, value} -> 273 | [expr(value, sources, query), " AS " | quote_name(key)] 274 | value -> 275 | expr(value, sources, query) 276 | end) 277 | end 278 | 279 | defp distinct(nil, _, _), do: {[], []} 280 | defp distinct(%QueryExpr{expr: []}, _, _), do: {[], []} 281 | defp distinct(%QueryExpr{expr: true}, _, _), do: {" DISTINCT", []} 282 | defp distinct(%QueryExpr{expr: false}, _, _), do: {[], []} 283 | defp distinct(%QueryExpr{expr: exprs}, sources, query) do 284 | {[" DISTINCT ON (", 285 | intersperse_map(exprs, ", ", fn {_, expr} -> expr(expr, sources, query) end), ?)], 286 | exprs} 287 | end 288 | 289 | defp from(%{from: %{hints: [_ | _]}} = query, _sources) do 290 | error!(query, "table hints are not supported by PostgreSQL") 291 | end 292 | 293 | defp from(%{from: %{source: source}} = query, sources) do 294 | {from, name} = get_source(query, sources, 0, source) 295 | [" FROM ", from, " AS " | name] 296 | end 297 | 298 | defp update_fields(%{updates: updates} = query, sources) do 299 | for(%{expr: expr} <- updates, 300 | {op, kw} <- expr, 301 | {key, value} <- kw, 302 | do: update_op(op, key, value, sources, query)) |> Enum.intersperse(", ") 303 | end 304 | 305 | defp update_op(:set, key, value, sources, query) do 306 | [quote_name(key), " = " | expr(value, sources, query)] 307 | end 308 | 309 | defp update_op(:inc, key, value, sources, query) do 310 | [quote_name(key), " = ", quote_qualified_name(key, sources, 0), " + " | 311 | expr(value, sources, query)] 312 | end 313 | 314 | defp update_op(:push, key, value, sources, query) do 315 | [quote_name(key), " = array_append(", quote_qualified_name(key, sources, 0), 316 | ", ", expr(value, sources, query), ?)] 317 | end 318 | 319 | defp update_op(:pull, key, value, sources, query) do 320 | [quote_name(key), " = array_remove(", quote_qualified_name(key, sources, 0), 321 | ", ", expr(value, sources, query), ?)] 322 | end 323 | 324 | defp update_op(command, _key, _value, _sources, query) do 325 | error!(query, "unknown update operation #{inspect command} for PostgreSQL") 326 | end 327 | 328 | defp using_join(%{joins: []}, _kind, _prefix, _sources), do: {[], []} 329 | defp using_join(%{joins: joins} = query, kind, prefix, sources) do 330 | froms = 331 | intersperse_map(joins, ", ", fn 332 | %JoinExpr{qual: :inner, ix: ix, source: source} -> 333 | {join, name} = get_source(query, sources, ix, source) 334 | [join, " AS " | name] 335 | %JoinExpr{qual: qual} -> 336 | error!(query, "PostgreSQL supports only inner joins on #{kind}, got: `#{qual}`") 337 | end) 338 | 339 | wheres = 340 | for %JoinExpr{on: %QueryExpr{expr: value} = expr} <- joins, 341 | value != true, 342 | do: expr |> Map.put(:__struct__, BooleanExpr) |> Map.put(:op, :and) 343 | 344 | {[?\s, prefix, ?\s | froms], wheres} 345 | end 346 | 347 | defp join(%{joins: []}, _sources), do: [] 348 | defp join(%{joins: joins} = query, sources) do 349 | [?\s | intersperse_map(joins, ?\s, fn 350 | %JoinExpr{on: %QueryExpr{expr: expr}, qual: qual, ix: ix, source: source, hints: hints} -> 351 | if hints != [] do 352 | error!(query, "table hints are not supported by PostgreSQL") 353 | end 354 | 355 | {join, name} = get_source(query, sources, ix, source) 356 | [join_qual(qual), join, " AS ", name | join_on(qual, expr, sources, query)] 357 | end)] 358 | end 359 | 360 | defp join_on(:cross, true, _sources, _query), do: [] 361 | defp join_on(_qual, expr, sources, query), do: [" ON " | expr(expr, sources, query)] 362 | 363 | defp join_qual(:inner), do: "INNER JOIN " 364 | defp join_qual(:inner_lateral), do: "INNER JOIN LATERAL " 365 | defp join_qual(:left), do: "LEFT OUTER JOIN " 366 | defp join_qual(:left_lateral), do: "LEFT OUTER JOIN LATERAL " 367 | defp join_qual(:right), do: "RIGHT OUTER JOIN " 368 | defp join_qual(:full), do: "FULL OUTER JOIN " 369 | defp join_qual(:cross), do: "CROSS JOIN " 370 | 371 | defp where(%{wheres: wheres} = query, sources) do 372 | boolean(" WHERE ", wheres, sources, query) 373 | end 374 | 375 | defp having(%{havings: havings} = query, sources) do 376 | boolean(" HAVING ", havings, sources, query) 377 | end 378 | 379 | defp group_by(%{group_bys: []}, _sources), do: [] 380 | defp group_by(%{group_bys: group_bys} = query, sources) do 381 | [" GROUP BY " | 382 | intersperse_map(group_bys, ", ", fn 383 | %QueryExpr{expr: expr} -> 384 | intersperse_map(expr, ", ", &expr(&1, sources, query)) 385 | end)] 386 | end 387 | 388 | defp window(%{windows: []}, _sources), do: [] 389 | defp window(%{windows: windows} = query, sources) do 390 | [" WINDOW " | 391 | intersperse_map(windows, ", ", fn {name, %{expr: kw}} -> 392 | [quote_name(name), " AS " | window_exprs(kw, sources, query)] 393 | end)] 394 | end 395 | 396 | defp window_exprs(kw, sources, query) do 397 | [?(, intersperse_map(kw, ?\s, &window_expr(&1, sources, query)), ?)] 398 | end 399 | 400 | defp window_expr({:partition_by, fields}, sources, query) do 401 | ["PARTITION BY " | intersperse_map(fields, ", ", &expr(&1, sources, query))] 402 | end 403 | 404 | defp window_expr({:order_by, fields}, sources, query) do 405 | ["ORDER BY " | intersperse_map(fields, ", ", &order_by_expr(&1, sources, query))] 406 | end 407 | 408 | defp window_expr({:frame, {:fragment, _, _} = fragment}, sources, query) do 409 | expr(fragment, sources, query) 410 | end 411 | 412 | defp order_by(%{order_bys: []}, _distinct, _sources), do: [] 413 | defp order_by(%{order_bys: order_bys} = query, distinct, sources) do 414 | order_bys = Enum.flat_map(order_bys, & &1.expr) 415 | [" ORDER BY " | 416 | intersperse_map(distinct ++ order_bys, ", ", &order_by_expr(&1, sources, query))] 417 | end 418 | 419 | defp order_by_expr({dir, expr}, sources, query) do 420 | str = expr(expr, sources, query) 421 | 422 | case dir do 423 | :asc -> str 424 | :asc_nulls_last -> [str | " ASC NULLS LAST"] 425 | :asc_nulls_first -> [str | " ASC NULLS FIRST"] 426 | :desc -> [str | " DESC"] 427 | :desc_nulls_last -> [str | " DESC NULLS LAST"] 428 | :desc_nulls_first -> [str | " DESC NULLS FIRST"] 429 | end 430 | end 431 | 432 | defp limit(%{limit: nil}, _sources), do: [] 433 | defp limit(%{limit: %QueryExpr{expr: expr}} = query, sources) do 434 | [" LIMIT " | expr(expr, sources, query)] 435 | end 436 | 437 | defp offset(%{offset: nil}, _sources), do: [] 438 | defp offset(%{offset: %QueryExpr{expr: expr}} = query, sources) do 439 | [" OFFSET " | expr(expr, sources, query)] 440 | end 441 | 442 | defp combinations(%{combinations: combinations}) do 443 | Enum.map(combinations, fn 444 | {:union, query} -> [" UNION (", all(query), ")"] 445 | {:union_all, query} -> [" UNION ALL (", all(query), ")"] 446 | {:except, query} -> [" EXCEPT (", all(query), ")"] 447 | {:except_all, query} -> [" EXCEPT ALL (", all(query), ")"] 448 | {:intersect, query} -> [" INTERSECT (", all(query), ")"] 449 | {:intersect_all, query} -> [" INTERSECT ALL (", all(query), ")"] 450 | end) 451 | end 452 | 453 | defp lock(nil), do: [] 454 | defp lock(lock_clause), do: [?\s | lock_clause] 455 | 456 | defp boolean(_name, [], _sources, _query), do: [] 457 | defp boolean(name, [%{expr: expr, op: op} | query_exprs], sources, query) do 458 | [name | 459 | Enum.reduce(query_exprs, {op, paren_expr(expr, sources, query)}, fn 460 | %BooleanExpr{expr: expr, op: op}, {op, acc} -> 461 | {op, [acc, operator_to_boolean(op), paren_expr(expr, sources, query)]} 462 | %BooleanExpr{expr: expr, op: op}, {_, acc} -> 463 | {op, [?(, acc, ?), operator_to_boolean(op), paren_expr(expr, sources, query)]} 464 | end) |> elem(1)] 465 | end 466 | 467 | defp operator_to_boolean(:and), do: " AND " 468 | defp operator_to_boolean(:or), do: " OR " 469 | 470 | defp parens_for_select([first_expr | _] = expr) do 471 | if is_binary(first_expr) and String.starts_with?(first_expr, ["SELECT", "select"]) do 472 | [?(, expr, ?)] 473 | else 474 | expr 475 | end 476 | end 477 | 478 | defp paren_expr(expr, sources, query) do 479 | [?(, expr(expr, sources, query), ?)] 480 | end 481 | 482 | defp expr({:^, [], [ix]}, _sources, _query) do 483 | [?$ | Integer.to_string(ix + 1)] 484 | end 485 | 486 | defp expr({{:., _, [{:&, _, [idx]}, field]}, _, []}, sources, _query) when is_atom(field) do 487 | quote_qualified_name(field, sources, idx) 488 | end 489 | 490 | defp expr({:&, _, [idx]}, sources, _query) do 491 | {_, source, _} = elem(sources, idx) 492 | source 493 | end 494 | 495 | defp expr({:in, _, [_left, []]}, _sources, _query) do 496 | "false" 497 | end 498 | 499 | defp expr({:in, _, [left, right]}, sources, query) when is_list(right) do 500 | args = intersperse_map(right, ?,, &expr(&1, sources, query)) 501 | [expr(left, sources, query), " IN (", args, ?)] 502 | end 503 | 504 | defp expr({:in, _, [left, {:^, _, [ix, _]}]}, sources, query) do 505 | [expr(left, sources, query), " = ANY($", Integer.to_string(ix + 1), ?)] 506 | end 507 | 508 | defp expr({:in, _, [left, right]}, sources, query) do 509 | [expr(left, sources, query), " = ANY(", expr(right, sources, query), ?)] 510 | end 511 | 512 | defp expr({:is_nil, _, [arg]}, sources, query) do 513 | [expr(arg, sources, query) | " IS NULL"] 514 | end 515 | 516 | defp expr({:not, _, [expr]}, sources, query) do 517 | ["NOT (", expr(expr, sources, query), ?)] 518 | end 519 | 520 | defp expr(%Ecto.SubQuery{query: query}, _sources, _query) do 521 | [?(, all(query), ?)] 522 | end 523 | 524 | defp expr({:fragment, _, [kw]}, _sources, query) when is_list(kw) or tuple_size(kw) == 3 do 525 | error!(query, "PostgreSQL adapter does not support keyword or interpolated fragments") 526 | end 527 | 528 | defp expr({:fragment, _, parts}, sources, query) do 529 | Enum.map(parts, fn 530 | {:raw, part} -> part 531 | {:expr, expr} -> expr(expr, sources, query) 532 | end) 533 | |> parens_for_select 534 | end 535 | 536 | defp expr({:datetime_add, _, [datetime, count, interval]}, sources, query) do 537 | [expr(datetime, sources, query), "::timestamp + ", 538 | interval(count, interval, sources, query)] 539 | end 540 | 541 | defp expr({:date_add, _, [date, count, interval]}, sources, query) do 542 | [?(, expr(date, sources, query), "::date + ", 543 | interval(count, interval, sources, query) | ")::date"] 544 | end 545 | 546 | defp expr({:filter, _, [agg, filter]}, sources, query) do 547 | aggregate = expr(agg, sources, query) 548 | [aggregate, " FILTER (WHERE ", expr(filter, sources, query), ?)] 549 | end 550 | 551 | defp expr({:over, _, [agg, name]}, sources, query) when is_atom(name) do 552 | aggregate = expr(agg, sources, query) 553 | [aggregate, " OVER " | quote_name(name)] 554 | end 555 | 556 | defp expr({:over, _, [agg, kw]}, sources, query) do 557 | aggregate = expr(agg, sources, query) 558 | [aggregate, " OVER ", window_exprs(kw, sources, query)] 559 | end 560 | 561 | defp expr({:{}, _, elems}, sources, query) do 562 | [?(, intersperse_map(elems, ?,, &expr(&1, sources, query)), ?)] 563 | end 564 | 565 | defp expr({:count, _, []}, _sources, _query), do: "count(*)" 566 | 567 | defp expr({fun, _, args}, sources, query) when is_atom(fun) and is_list(args) do 568 | {modifier, args} = 569 | case args do 570 | [rest, :distinct] -> {"DISTINCT ", [rest]} 571 | _ -> {[], args} 572 | end 573 | 574 | case handle_call(fun, length(args)) do 575 | {:binary_op, op} -> 576 | [left, right] = args 577 | [op_to_binary(left, sources, query), op | op_to_binary(right, sources, query)] 578 | {:fun, fun} -> 579 | [fun, ?(, modifier, intersperse_map(args, ", ", &expr(&1, sources, query)), ?)] 580 | end 581 | end 582 | 583 | defp expr(list, sources, query) when is_list(list) do 584 | ["ARRAY[", intersperse_map(list, ?,, &expr(&1, sources, query)), ?]] 585 | end 586 | 587 | defp expr(%Decimal{} = decimal, _sources, _query) do 588 | Decimal.to_string(decimal, :normal) 589 | end 590 | 591 | defp expr(%Ecto.Query.Tagged{value: binary, type: :binary}, _sources, _query) 592 | when is_binary(binary) do 593 | ["'\\x", Base.encode16(binary, case: :lower) | "'::bytea"] 594 | end 595 | 596 | defp expr(%Ecto.Query.Tagged{value: other, type: type}, sources, query) do 597 | [expr(other, sources, query), ?:, ?: | tagged_to_db(type)] 598 | end 599 | 600 | defp expr(nil, _sources, _query), do: "NULL" 601 | defp expr(true, _sources, _query), do: "TRUE" 602 | defp expr(false, _sources, _query), do: "FALSE" 603 | 604 | defp expr(literal, _sources, _query) when is_binary(literal) do 605 | [?\', escape_string(literal), ?\'] 606 | end 607 | 608 | defp expr(literal, _sources, _query) when is_integer(literal) do 609 | Integer.to_string(literal) 610 | end 611 | 612 | defp expr(literal, _sources, _query) when is_float(literal) do 613 | [Float.to_string(literal) | "::float"] 614 | end 615 | 616 | defp tagged_to_db({:array, type}), do: [tagged_to_db(type), ?[, ?]] 617 | # Always use the largest possible type for integers 618 | defp tagged_to_db(:id), do: "bigint" 619 | defp tagged_to_db(:integer), do: "bigint" 620 | defp tagged_to_db(type), do: ecto_to_db(type) 621 | 622 | defp interval(count, interval, _sources, _query) when is_integer(count) do 623 | ["interval '", String.Chars.Integer.to_string(count), ?\s, interval, ?\'] 624 | end 625 | 626 | defp interval(count, interval, _sources, _query) when is_float(count) do 627 | count = :erlang.float_to_binary(count, [:compact, decimals: 16]) 628 | ["interval '", count, ?\s, interval, ?\'] 629 | end 630 | 631 | defp interval(count, interval, sources, query) do 632 | [?(, expr(count, sources, query), "::numeric * ", 633 | interval(1, interval, sources, query), ?)] 634 | end 635 | 636 | defp op_to_binary({op, _, [_, _]} = expr, sources, query) when op in @binary_ops do 637 | paren_expr(expr, sources, query) 638 | end 639 | 640 | defp op_to_binary(expr, sources, query) do 641 | expr(expr, sources, query) 642 | end 643 | 644 | defp returning(%{select: nil}, _sources), 645 | do: [] 646 | defp returning(%{select: %{fields: fields}} = query, sources), 647 | do: [" RETURNING " | select_fields(fields, sources, query)] 648 | 649 | defp returning([]), 650 | do: [] 651 | defp returning(returning), 652 | do: [" RETURNING " | intersperse_map(returning, ", ", "e_name/1)] 653 | 654 | defp create_names(%{sources: sources}) do 655 | create_names(sources, 0, tuple_size(sources)) |> List.to_tuple() 656 | end 657 | 658 | defp create_names(sources, pos, limit) when pos < limit do 659 | [create_name(sources, pos) | create_names(sources, pos + 1, limit)] 660 | end 661 | 662 | defp create_names(_sources, pos, pos) do 663 | [] 664 | end 665 | 666 | defp create_name(sources, pos) do 667 | case elem(sources, pos) do 668 | {:fragment, _, _} -> 669 | {nil, [?f | Integer.to_string(pos)], nil} 670 | 671 | {table, schema, prefix} -> 672 | name = [create_alias(table) | Integer.to_string(pos)] 673 | {quote_table(prefix, table), name, schema} 674 | 675 | %Ecto.SubQuery{} -> 676 | {nil, [?s | Integer.to_string(pos)], nil} 677 | end 678 | end 679 | 680 | defp create_alias(<>) when first in ?a..?z when first in ?A..?Z do 681 | <> 682 | end 683 | defp create_alias(_) do 684 | "t" 685 | end 686 | 687 | # DDL 688 | 689 | alias Ecto.Migration.{Table, Index, Reference, Constraint} 690 | 691 | @creates [:create, :create_if_not_exists] 692 | @drops [:drop, :drop_if_exists] 693 | 694 | @impl true 695 | def execute_ddl({command, %Table{} = table, columns}) when command in @creates do 696 | table_name = quote_table(table.prefix, table.name) 697 | query = ["CREATE TABLE ", 698 | if_do(command == :create_if_not_exists, "IF NOT EXISTS "), 699 | table_name, ?\s, ?(, 700 | column_definitions(table, columns), pk_definition(columns, ", "), ?), 701 | options_expr(table.options)] 702 | 703 | [query] ++ 704 | comments_on("TABLE", table_name, table.comment) ++ 705 | comments_for_columns(table_name, columns) 706 | end 707 | 708 | def execute_ddl({command, %Table{} = table}) when command in @drops do 709 | [["DROP TABLE ", if_do(command == :drop_if_exists, "IF EXISTS "), 710 | quote_table(table.prefix, table.name)]] 711 | end 712 | 713 | def execute_ddl({:alter, %Table{} = table, changes}) do 714 | table_name = quote_table(table.prefix, table.name) 715 | query = ["ALTER TABLE ", table_name, ?\s, 716 | column_changes(table, changes), pk_definition(changes, ", ADD ")] 717 | 718 | [query] ++ 719 | comments_on("TABLE", table_name, table.comment) ++ 720 | comments_for_columns(table_name, changes) 721 | end 722 | 723 | def execute_ddl({:create, %Index{} = index}) do 724 | fields = intersperse_map(index.columns, ", ", &index_expr/1) 725 | 726 | queries = [["CREATE ", 727 | if_do(index.unique, "UNIQUE "), 728 | "INDEX ", 729 | if_do(index.concurrently, "CONCURRENTLY "), 730 | quote_name(index.name), 731 | " ON ", 732 | quote_table(index.prefix, index.table), 733 | if_do(index.using, [" USING " , to_string(index.using)]), 734 | ?\s, ?(, fields, ?), 735 | if_do(index.where, [" WHERE ", to_string(index.where)])]] 736 | 737 | queries ++ comments_on("INDEX", quote_name(index.name), index.comment) 738 | end 739 | 740 | def execute_ddl({:create_if_not_exists, %Index{} = index}) do 741 | if index.concurrently do 742 | raise ArgumentError, 743 | "concurrent index and create_if_not_exists is not supported by the Postgres adapter" 744 | end 745 | 746 | [["DO $$ BEGIN ", 747 | execute_ddl({:create, index}), ";", 748 | "EXCEPTION WHEN duplicate_table THEN END; $$;"]] 749 | end 750 | 751 | def execute_ddl({command, %Index{} = index}) when command in @drops do 752 | [["DROP INDEX ", 753 | if_do(index.concurrently, "CONCURRENTLY "), 754 | if_do(command == :drop_if_exists, "IF EXISTS "), 755 | quote_table(index.prefix, index.name)]] 756 | end 757 | 758 | def execute_ddl({:rename, %Table{} = current_table, %Table{} = new_table}) do 759 | [["ALTER TABLE ", quote_table(current_table.prefix, current_table.name), 760 | " RENAME TO ", quote_table(nil, new_table.name)]] 761 | end 762 | 763 | def execute_ddl({:rename, %Table{} = table, current_column, new_column}) do 764 | [["ALTER TABLE ", quote_table(table.prefix, table.name), " RENAME ", 765 | quote_name(current_column), " TO ", quote_name(new_column)]] 766 | end 767 | 768 | def execute_ddl({:create, %Constraint{} = constraint}) do 769 | table_name = quote_table(constraint.prefix, constraint.table) 770 | queries = [["ALTER TABLE ", table_name, 771 | " ADD ", new_constraint_expr(constraint)]] 772 | 773 | queries ++ comments_on("CONSTRAINT", constraint.name, constraint.comment, table_name) 774 | end 775 | 776 | def execute_ddl({:drop, %Constraint{} = constraint}) do 777 | [["ALTER TABLE ", quote_table(constraint.prefix, constraint.table), 778 | " DROP CONSTRAINT ", quote_name(constraint.name)]] 779 | end 780 | 781 | def execute_ddl({:drop_if_exists, %Constraint{} = constraint}) do 782 | [["ALTER TABLE ", quote_table(constraint.prefix, constraint.table), 783 | " DROP CONSTRAINT IF EXISTS ", quote_name(constraint.name)]] 784 | end 785 | 786 | def execute_ddl(string) when is_binary(string), do: [string] 787 | 788 | def execute_ddl(keyword) when is_list(keyword), 789 | do: error!(nil, "PostgreSQL adapter does not support keyword lists in execute") 790 | 791 | @impl true 792 | def ddl_logs(%Postgrex.Result{} = result) do 793 | %{messages: messages} = result 794 | 795 | for message <- messages do 796 | %{message: message, severity: severity} = message 797 | 798 | {ddl_log_level(severity), message, []} 799 | end 800 | end 801 | 802 | @impl true 803 | def table_exists_query(table) do 804 | {"SELECT true FROM information_schema.tables WHERE table_name = $1 AND table_schema = current_schema() LIMIT 1", [table]} 805 | end 806 | 807 | # From https://www.postgresql.org/docs/9.3/static/protocol-error-fields.html. 808 | defp ddl_log_level("DEBUG"), do: :debug 809 | defp ddl_log_level("LOG"), do: :info 810 | defp ddl_log_level("INFO"), do: :info 811 | defp ddl_log_level("NOTICE"), do: :info 812 | defp ddl_log_level("WARNING"), do: :warn 813 | defp ddl_log_level("ERROR"), do: :error 814 | defp ddl_log_level("FATAL"), do: :error 815 | defp ddl_log_level("PANIC"), do: :error 816 | defp ddl_log_level(_severity), do: :info 817 | 818 | defp pk_definition(columns, prefix) do 819 | pks = 820 | for {_, name, _, opts} <- columns, 821 | opts[:primary_key], 822 | do: name 823 | 824 | case pks do 825 | [] -> [] 826 | _ -> [prefix, "PRIMARY KEY (", intersperse_map(pks, ", ", "e_name/1), ")"] 827 | end 828 | end 829 | 830 | defp comments_on(_object, _name, nil), do: [] 831 | defp comments_on(object, name, comment) do 832 | [["COMMENT ON ", object, ?\s, name, " IS ", single_quote(comment)]] 833 | end 834 | 835 | defp comments_on(_object, _name, nil, _table_name), do: [] 836 | defp comments_on(object, name, comment, table_name) do 837 | [["COMMENT ON ", object, ?\s, quote_name(name), " ON ", table_name, 838 | " IS ", single_quote(comment)]] 839 | end 840 | 841 | defp comments_for_columns(table_name, columns) do 842 | Enum.flat_map(columns, fn 843 | {_operation, column_name, _column_type, opts} -> 844 | column_name = [table_name, ?. | quote_name(column_name)] 845 | comments_on("COLUMN", column_name, opts[:comment]) 846 | _ -> [] 847 | end) 848 | end 849 | 850 | defp column_definitions(table, columns) do 851 | intersperse_map(columns, ", ", &column_definition(table, &1)) 852 | end 853 | 854 | defp column_definition(table, {:add, name, %Reference{} = ref, opts}) do 855 | [quote_name(name), ?\s, reference_column_type(ref.type, opts), 856 | column_options(ref.type, opts), reference_expr(ref, table, name)] 857 | end 858 | 859 | defp column_definition(_table, {:add, name, type, opts}) do 860 | [quote_name(name), ?\s, column_type(type, opts), column_options(type, opts)] 861 | end 862 | 863 | defp column_changes(table, columns) do 864 | intersperse_map(columns, ", ", &column_change(table, &1)) 865 | end 866 | 867 | defp column_change(table, {:add, name, %Reference{} = ref, opts}) do 868 | ["ADD COLUMN ", quote_name(name), ?\s, reference_column_type(ref.type, opts), 869 | column_options(ref.type, opts), reference_expr(ref, table, name)] 870 | end 871 | 872 | defp column_change(_table, {:add, name, type, opts}) do 873 | ["ADD COLUMN ", quote_name(name), ?\s, column_type(type, opts), 874 | column_options(type, opts)] 875 | end 876 | 877 | defp column_change(table, {:add_if_not_exists, name, %Reference{} = ref, opts}) do 878 | ["ADD COLUMN IF NOT EXISTS ", quote_name(name), ?\s, reference_column_type(ref.type, opts), 879 | column_options(ref.type, opts), reference_expr(ref, table, name)] 880 | end 881 | 882 | defp column_change(_table, {:add_if_not_exists, name, type, opts}) do 883 | ["ADD COLUMN IF NOT EXISTS ", quote_name(name), ?\s, column_type(type, opts), 884 | column_options(type, opts)] 885 | end 886 | 887 | defp column_change(table, {:modify, name, %Reference{} = ref, opts}) do 888 | [drop_constraint_expr(opts[:from], table, name), "ALTER COLUMN ", quote_name(name), " TYPE ", reference_column_type(ref.type, opts), 889 | constraint_expr(ref, table, name), modify_null(name, opts), modify_default(name, ref.type, opts)] 890 | end 891 | 892 | defp column_change(table, {:modify, name, type, opts}) do 893 | [drop_constraint_expr(opts[:from], table, name), "ALTER COLUMN ", quote_name(name), " TYPE ", 894 | column_type(type, opts), modify_null(name, opts), modify_default(name, type, opts)] 895 | end 896 | 897 | defp column_change(_table, {:remove, name}), do: ["DROP COLUMN ", quote_name(name)] 898 | defp column_change(table, {:remove, name, %Reference{} = ref, _opts}) do 899 | [drop_constraint_expr(ref, table, name), "DROP COLUMN ", quote_name(name)] 900 | end 901 | defp column_change(_table, {:remove, name, _type, _opts}), do: ["DROP COLUMN ", quote_name(name)] 902 | 903 | defp column_change(table, {:remove_if_exists, name, %Reference{} = ref}) do 904 | [drop_constraint_if_exists_expr(ref, table, name), "DROP COLUMN IF EXISTS ", quote_name(name)] 905 | end 906 | defp column_change(_table, {:remove_if_exists, name, _type}), do: ["DROP COLUMN IF EXISTS ", quote_name(name)] 907 | 908 | defp modify_null(name, opts) do 909 | case Keyword.get(opts, :null) do 910 | true -> [", ALTER COLUMN ", quote_name(name), " DROP NOT NULL"] 911 | false -> [", ALTER COLUMN ", quote_name(name), " SET NOT NULL"] 912 | nil -> [] 913 | end 914 | end 915 | 916 | defp modify_default(name, type, opts) do 917 | case Keyword.fetch(opts, :default) do 918 | {:ok, val} -> [", ALTER COLUMN ", quote_name(name), " SET", default_expr({:ok, val}, type)] 919 | :error -> [] 920 | end 921 | end 922 | 923 | defp column_options(type, opts) do 924 | default = Keyword.fetch(opts, :default) 925 | null = Keyword.get(opts, :null) 926 | [default_expr(default, type), null_expr(null)] 927 | end 928 | 929 | defp null_expr(false), do: " NOT NULL" 930 | defp null_expr(true), do: " NULL" 931 | defp null_expr(_), do: [] 932 | 933 | defp new_constraint_expr(%Constraint{check: check} = constraint) when is_binary(check) do 934 | ["CONSTRAINT ", quote_name(constraint.name), " CHECK (", check, ")"] 935 | end 936 | defp new_constraint_expr(%Constraint{exclude: exclude} = constraint) when is_binary(exclude) do 937 | ["CONSTRAINT ", quote_name(constraint.name), " EXCLUDE USING ", exclude] 938 | end 939 | 940 | defp default_expr({:ok, nil}, _type), do: " DEFAULT NULL" 941 | defp default_expr({:ok, literal}, type), do: [" DEFAULT ", default_type(literal, type)] 942 | defp default_expr(:error, _), do: [] 943 | 944 | defp default_type(list, {:array, inner} = type) when is_list(list) do 945 | ["ARRAY[", Enum.map(list, &default_type(&1, inner)) |> Enum.intersperse(?,), "]::", ecto_to_db(type)] 946 | end 947 | defp default_type(literal, _type) when is_binary(literal) do 948 | if :binary.match(literal, <<0>>) == :nomatch and String.valid?(literal) do 949 | [?', escape_string(literal), ?'] 950 | else 951 | encoded = "\\x" <> Base.encode16(literal, case: :lower) 952 | raise ArgumentError, "default values are interpolated as UTF-8 strings and cannot contain null bytes. " <> 953 | "`#{inspect literal}` is invalid. If you want to write it as a binary, use \"#{encoded}\", " <> 954 | "otherwise refer to PostgreSQL documentation for instructions on how to escape this SQL type" 955 | end 956 | end 957 | defp default_type(literal, _type) when is_number(literal), do: to_string(literal) 958 | defp default_type(literal, _type) when is_boolean(literal), do: to_string(literal) 959 | defp default_type(%{} = map, :map) do 960 | library = Application.get_env(:postgrex, :json_library, Jason) 961 | default = IO.iodata_to_binary(library.encode_to_iodata!(map)) 962 | [single_quote(default)] 963 | end 964 | defp default_type({:fragment, expr}, _type), 965 | do: [expr] 966 | defp default_type(expr, type), 967 | do: raise(ArgumentError, "unknown default `#{inspect expr}` for type `#{inspect type}`. " <> 968 | ":default may be a string, number, boolean, list of strings, list of integers, map (when type is Map), or a fragment(...)") 969 | 970 | defp index_expr(literal) when is_binary(literal), 971 | do: literal 972 | defp index_expr(literal), 973 | do: quote_name(literal) 974 | 975 | defp options_expr(nil), 976 | do: [] 977 | defp options_expr(keyword) when is_list(keyword), 978 | do: error!(nil, "PostgreSQL adapter does not support keyword lists in :options") 979 | defp options_expr(options), 980 | do: [?\s, options] 981 | 982 | defp column_type({:array, type}, opts), 983 | do: [column_type(type, opts), "[]"] 984 | 985 | defp column_type(type, _opts) when type in ~w(time utc_datetime naive_datetime)a, 986 | do: ecto_to_db(type) 987 | 988 | defp column_type(type, opts) when type in ~w(time_usec utc_datetime_usec naive_datetime_usec)a do 989 | precision = Keyword.get(opts, :precision) 990 | type_name = ecto_to_db(type) 991 | 992 | if precision do 993 | [type_name, ?(, to_string(precision), ?)] 994 | else 995 | type_name 996 | end 997 | end 998 | 999 | defp column_type(type, opts) do 1000 | size = Keyword.get(opts, :size) 1001 | precision = Keyword.get(opts, :precision) 1002 | scale = Keyword.get(opts, :scale) 1003 | type_name = ecto_to_db(type) 1004 | 1005 | cond do 1006 | size -> [type_name, ?(, to_string(size), ?)] 1007 | precision -> [type_name, ?(, to_string(precision), ?,, to_string(scale || 0), ?)] 1008 | type == :string -> [type_name, "(255)"] 1009 | true -> type_name 1010 | end 1011 | end 1012 | 1013 | defp reference_expr(%Reference{} = ref, table, name), 1014 | do: [" CONSTRAINT ", reference_name(ref, table, name), " REFERENCES ", 1015 | quote_table(ref.prefix || table.prefix, ref.table), ?(, quote_name(ref.column), ?), 1016 | reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)] 1017 | 1018 | defp constraint_expr(%Reference{} = ref, table, name), 1019 | do: [", ADD CONSTRAINT ", reference_name(ref, table, name), ?\s, 1020 | "FOREIGN KEY (", quote_name(name), ") REFERENCES ", 1021 | quote_table(ref.prefix || table.prefix, ref.table), ?(, quote_name(ref.column), ?), 1022 | reference_on_delete(ref.on_delete), reference_on_update(ref.on_update)] 1023 | 1024 | defp drop_constraint_expr(%Reference{} = ref, table, name), 1025 | do: ["DROP CONSTRAINT ", reference_name(ref, table, name), ", "] 1026 | defp drop_constraint_expr(_, _, _), 1027 | do: [] 1028 | 1029 | defp drop_constraint_if_exists_expr(%Reference{} = ref, table, name), 1030 | do: ["DROP CONSTRAINT IF EXISTS ", reference_name(ref, table, name), ", "] 1031 | defp drop_constraint_if_exists_expr(_, _, _), 1032 | do: [] 1033 | 1034 | defp reference_name(%Reference{name: nil}, table, column), 1035 | do: quote_name("#{table.name}_#{column}_fkey") 1036 | defp reference_name(%Reference{name: name}, _table, _column), 1037 | do: quote_name(name) 1038 | 1039 | defp reference_column_type(:serial, _opts), do: "integer" 1040 | defp reference_column_type(:bigserial, _opts), do: "bigint" 1041 | defp reference_column_type(type, opts), do: column_type(type, opts) 1042 | 1043 | defp reference_on_delete(:nilify_all), do: " ON DELETE SET NULL" 1044 | defp reference_on_delete(:delete_all), do: " ON DELETE CASCADE" 1045 | defp reference_on_delete(:restrict), do: " ON DELETE RESTRICT" 1046 | defp reference_on_delete(_), do: [] 1047 | 1048 | defp reference_on_update(:nilify_all), do: " ON UPDATE SET NULL" 1049 | defp reference_on_update(:update_all), do: " ON UPDATE CASCADE" 1050 | defp reference_on_update(:restrict), do: " ON UPDATE RESTRICT" 1051 | defp reference_on_update(_), do: [] 1052 | 1053 | ## Helpers 1054 | 1055 | defp get_source(query, sources, ix, source) do 1056 | {expr, name, _schema} = elem(sources, ix) 1057 | {expr || expr(source, sources, query), name} 1058 | end 1059 | 1060 | defp quote_qualified_name(name, sources, ix) do 1061 | {_, source, _} = elem(sources, ix) 1062 | [source, ?. | quote_name(name)] 1063 | end 1064 | 1065 | defp quote_name(name) when is_atom(name) do 1066 | quote_name(Atom.to_string(name)) 1067 | end 1068 | defp quote_name(name) do 1069 | if String.contains?(name, "\"") do 1070 | error!(nil, "bad field name #{inspect name}") 1071 | end 1072 | [?", name, ?"] 1073 | end 1074 | 1075 | defp quote_table(nil, name), do: quote_table(name) 1076 | defp quote_table(prefix, name), do: [quote_table(prefix), ?., quote_table(name)] 1077 | 1078 | defp quote_table(name) when is_atom(name), 1079 | do: quote_table(Atom.to_string(name)) 1080 | defp quote_table(name) do 1081 | if String.contains?(name, "\"") do 1082 | error!(nil, "bad table name #{inspect name}") 1083 | end 1084 | [?", name, ?"] 1085 | end 1086 | 1087 | defp single_quote(value), do: [?', escape_string(value), ?'] 1088 | 1089 | defp intersperse_map(list, separator, mapper, acc \\ []) 1090 | defp intersperse_map([], _separator, _mapper, acc), 1091 | do: acc 1092 | defp intersperse_map([elem], _separator, mapper, acc), 1093 | do: [acc | mapper.(elem)] 1094 | defp intersperse_map([elem | rest], separator, mapper, acc), 1095 | do: intersperse_map(rest, separator, mapper, [acc, mapper.(elem), separator]) 1096 | 1097 | defp intersperse_reduce(list, separator, user_acc, reducer, acc \\ []) 1098 | defp intersperse_reduce([], _separator, user_acc, _reducer, acc), 1099 | do: {acc, user_acc} 1100 | defp intersperse_reduce([elem], _separator, user_acc, reducer, acc) do 1101 | {elem, user_acc} = reducer.(elem, user_acc) 1102 | {[acc | elem], user_acc} 1103 | end 1104 | defp intersperse_reduce([elem | rest], separator, user_acc, reducer, acc) do 1105 | {elem, user_acc} = reducer.(elem, user_acc) 1106 | intersperse_reduce(rest, separator, user_acc, reducer, [acc, elem, separator]) 1107 | end 1108 | 1109 | defp if_do(condition, value) do 1110 | if condition, do: value, else: [] 1111 | end 1112 | 1113 | defp escape_string(value) when is_binary(value) do 1114 | :binary.replace(value, "'", "''", [:global]) 1115 | end 1116 | 1117 | defp ecto_to_db({:array, t}), do: [ecto_to_db(t), ?[, ?]] 1118 | defp ecto_to_db(:id), do: "integer" 1119 | defp ecto_to_db(:serial), do: "serial" 1120 | defp ecto_to_db(:bigserial), do: "bigserial" 1121 | defp ecto_to_db(:binary_id), do: "uuid" 1122 | defp ecto_to_db(:string), do: "varchar" 1123 | defp ecto_to_db(:binary), do: "bytea" 1124 | defp ecto_to_db(:map), do: Application.fetch_env!(:ecto_sql, :postgres_map_type) 1125 | defp ecto_to_db({:map, _}), do: Application.fetch_env!(:ecto_sql, :postgres_map_type) 1126 | defp ecto_to_db(:time_usec), do: "time" 1127 | defp ecto_to_db(:utc_datetime), do: "timestamp" 1128 | defp ecto_to_db(:utc_datetime_usec), do: "timestamp" 1129 | defp ecto_to_db(:naive_datetime), do: "timestamp" 1130 | defp ecto_to_db(:naive_datetime_usec), do: "timestamp" 1131 | defp ecto_to_db(other), do: Atom.to_string(other) 1132 | 1133 | defp error!(nil, message) do 1134 | raise ArgumentError, message 1135 | end 1136 | defp error!(query, message) do 1137 | raise Ecto.QueryError, query: query, message: message 1138 | end 1139 | end 1140 | end --------------------------------------------------------------------------------