├── .formatter.exs ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── README.md ├── lib ├── ecto_extract_migrations.ex ├── ecto_extract_migrations │ ├── commands │ │ ├── alter_sequence.ex │ │ ├── alter_table.ex │ │ ├── comment.ex │ │ ├── create_extension.ex │ │ ├── create_function.ex │ │ ├── create_index.ex │ │ ├── create_schema.ex │ │ ├── create_sequence.ex │ │ ├── create_table.ex │ │ ├── create_trigger.ex │ │ ├── create_type.ex │ │ ├── create_view.ex │ │ └── whitespace.ex │ ├── execute.ex │ ├── parsers │ │ ├── alter_sequence.ex │ │ ├── alter_table.ex │ │ ├── comment.ex │ │ ├── common.ex │ │ ├── create_extension.ex │ │ ├── create_function.ex │ │ ├── create_index.ex │ │ ├── create_schema.ex │ │ ├── create_sequence.ex │ │ ├── create_table.ex │ │ ├── create_trigger.ex │ │ ├── create_type.ex │ │ ├── create_view.ex │ │ └── whitespace.ex │ └── reference.ex └── mix │ └── tasks │ └── ecto_extract_migrations.ex ├── mix.exs ├── mix.lock ├── priv └── templates │ ├── execute_sql.eex │ └── multi_statement.eex └── test ├── alter_sequence.exs ├── alter_table_test.exs ├── comment_test.exs ├── create_extension_test.exs ├── create_function_test.exs ├── create_index_test.exs ├── create_schema_test.exs ├── create_sequence_test.exs ├── create_table_test.exs ├── create_trigger_test.exs ├── create_type_test.exs ├── create_view_test.exs ├── ecto_extract_migrations_test.exs ├── table_test.exs └── test_helper.exs /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where third-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | ecto_extract_migrations-*.tar 24 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [Unreleased] 8 | 9 | ## [0.2.0] - 2020-11-01 10 | ### Added 11 | - Improved docs, bump ex_doc version, thanks to @kianmeng 12 | - Handle missing sql schema objects 13 | 14 | ## [0.1.1] - 2020-10-31 15 | ### Added 16 | - Improved docs 17 | 18 | ## [0.1.0] - 2020-09-19 19 | ### Added 20 | - Make initial release to hex 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ecto_extract_migrations 2 | 3 | [![Module Version](https://img.shields.io/hexpm/v/ecto_extract_migrations.svg)](https://hex.pm/packages/ecto_extract_migrations) 4 | [![Hex Docs](https://img.shields.io/badge/hex-docs-lightgreen.svg)](https://hexdocs.pm/ecto_extract_migrations/) 5 | [![Total Download](https://img.shields.io/hexpm/dt/ecto_extract_migrations.svg)](https://hex.pm/packages/ecto_extract_migrations) 6 | [![License](https://img.shields.io/hexpm/l/ecto_extract_migrations.svg)](https://hex.pm/packages/ecto_extract_migrations) 7 | [![Last Updated](https://img.shields.io/github/last-commit/cogini/ecto_extract_migrations.svg)](https://github.com/cogini/ecto_extract_migrations/commits/master) 8 | 9 | Mix task to generate Ecto migrations from a Postgres schema SQL file. 10 | 11 | This lets you take an existing project and move it into Elixir 12 | with a proper development workflow. 13 | 14 | ## Usage 15 | 16 | 1. Generate a schema-only dump of the database to SQL: 17 | 18 | ```shell 19 | pg_dump --schema-only --no-owner postgres://dbuser:dbpassword@localhost/dbname > dbname.schema.sql 20 | ``` 21 | 22 | 2. Generate migrations from the SQL file: 23 | 24 | ```shell 25 | mix ecto.extract.migrations --sql-file dbname.schema.sql 26 | ``` 27 | 28 | or, from outside the target project: 29 | 30 | ```shell 31 | mix ecto.extract.migrations --sql-file dbname.schema.sql --repo "MyProject.Repo" --migrations-path ../myproject/priv/repo/migrations 32 | ``` 33 | 34 | 3. Create a test database, run migrations to create the schema, then 35 | export it and verify that it matches the original database: 36 | 37 | ```shell 38 | createuser --encrypted --pwprompt dbuser 39 | dropdb dbname_migrations 40 | createdb -Odbuser -Eutf8 dbname_migrations 41 | 42 | mix ecto.migrate --log-sql 43 | 44 | pg_dump --schema-only --no-owner postgres://dbuser@localhost/dbname_migrations > dbname_migrations.sql 45 | 46 | cat dbname.schema.sql | grep -v -E '^--|^$' > old.sql 47 | cat dbname_migrations.sql | grep -v -E '^--|^$' > new.sql 48 | diff -wu old.sql new.sql 49 | ``` 50 | 51 | ## Details 52 | 53 | This was written to migrate a legacy database with hundreds of tables and 54 | objects. 55 | 56 | The parser uses [NimbleParsec](https://github.com/dashbitco/nimble_parsec), and 57 | is based on the SQL grammar, so it is precise (unlike regex) and reasonably 58 | complete. It doesn't support every esoteric option, just what we needed, but 59 | that was quite a lot. Patches are welcome. 60 | 61 | Supports: 62 | 63 | * `ALTER SEQUENCE` 64 | * `ALTER TABLE` 65 | * `CREATE EXTENSION` 66 | * `CREATE FUNCTION` 67 | * `CREATE INDEX` 68 | * `CREATE SCHEMA` 69 | * `CREATE SEQUENCE` 70 | * `CREATE TABLE` 71 | * `CREATE TRIGGER` 72 | * `CREATE TYPE` 73 | * `CREATE VIEW` 74 | 75 | ## Installation 76 | 77 | Add `ecto_extract_migrations` to your list of dependencies in `mix.exs`: 78 | 79 | ```elixir 80 | def deps do 81 | [ 82 | {:ecto_extract_migrations, "~> 0.1.0"} 83 | ] 84 | end 85 | ``` 86 | 87 | ## Resources 88 | 89 | Here are some useful resources for NimbleParsec: 90 | 91 | * https://stefan.lapers.be/posts/elixir-writing-an-expression-parser-with-nimble-parsec/ 92 | * https://github.com/slapers/ex_sel 93 | 94 | ## Alternatives 95 | 96 | * [ecto_generator](https://github.com/pmarreck/ecto_generator) generates Ecto schemas by querying 97 | the database [information schema](https://www.postgresql.org/docs/13/information-schema.html) 98 | * [ex_abnf](https://github.com/marcelog/ex_abnf) generates a parser based on an 99 | [ABNF grammar](https://en.wikipedia.org/wiki/Augmented_Backus%E2%80%93Naur_form). 100 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations do 2 | @moduledoc """ 3 | The main entry point is lib/mix/tasks/ecto_extract_migrations.ex. 4 | This module mainly has common library functions. 5 | """ 6 | 7 | @app :ecto_extract_migrations 8 | 9 | require EEx 10 | 11 | @template_execute_sql """ 12 | execute( 13 | \"\"\" 14 | <%= Regex.replace(~r/^/m, sql, " ") %> 15 | \"\"\") 16 | """ 17 | 18 | @template_execute_sql_updown """ 19 | execute( 20 | \"\"\" 21 | <%= Regex.replace(~r/^/m, up_sql, " ") %> 22 | \"\"\", 23 | \"\"\" 24 | <%= Regex.replace(~r/^/m, down_sql, " ") %> 25 | \"\"\" 26 | ) 27 | """ 28 | 29 | # defmodule ParseError do 30 | # defexception message: "default message" 31 | # end 32 | 33 | @doc "Parse line from SQL file (Stream.transform function)" 34 | @spec parse({binary, integer}, nil | {module, binary}) :: {list, nil | {module, binary}} 35 | 36 | # First line of SQL statement 37 | def parse({line, line_num}, nil) do 38 | module_parse({line, line_num}) 39 | end 40 | 41 | # In multi-line SQL statement 42 | def parse({line, line_num}, {module, state}) do 43 | # Mix.shell().info("#{line_num}> #{line} #{inspect state}") 44 | case module.parse(line, state) do 45 | {:ok, value} -> 46 | # Parsing succeeded 47 | sql = state.sql <> line 48 | {[%{module: module, type: module.type(), line_num: line_num, sql: sql, data: value}], nil} 49 | {:continue, new_state} -> 50 | # Keep reading lines 51 | # This assumes that we will ultimately succeed, probably overly optimistic. 52 | # The alternative is to stop when e.g. we hit a line ending with ";" 53 | {[], {module, new_state}} 54 | {:error, _reason} -> 55 | {[], nil} 56 | end 57 | end 58 | 59 | @spec module_parse({binary, integer}) :: {list, nil | {module, map}} 60 | def module_parse(value) do 61 | modules = [ 62 | EctoExtractMigrations.Commands.Whitespace, 63 | EctoExtractMigrations.Commands.Comment, 64 | 65 | EctoExtractMigrations.Commands.CreateExtension, 66 | EctoExtractMigrations.Commands.CreateSchema, 67 | EctoExtractMigrations.Commands.CreateIndex, 68 | EctoExtractMigrations.Commands.CreateTrigger, 69 | EctoExtractMigrations.Commands.CreateFunction, 70 | 71 | EctoExtractMigrations.Commands.AlterTable, 72 | EctoExtractMigrations.Commands.AlterSequence, 73 | 74 | EctoExtractMigrations.Commands.CreateTable, 75 | EctoExtractMigrations.Commands.CreateSequence, 76 | EctoExtractMigrations.Commands.CreateType, 77 | EctoExtractMigrations.Commands.CreateView, 78 | ] 79 | module_parse(value, modules) 80 | end 81 | 82 | @spec module_parse({binary, integer}, list(module)) :: {list, nil | {module, map}} 83 | def module_parse({line, line_num}, []) do 84 | # No parser matched line 85 | Mix.shell().info("UNKNOWN #{line_num}> #{String.trim_trailing(line)}") 86 | {[], nil} 87 | end 88 | def module_parse({line, line_num}, [module | rest]) do 89 | case module.match(line) do 90 | {:ok, value} -> 91 | # Parsing succeeded 92 | {[%{module: module, type: module.type(), line_num: line_num, sql: line, data: value}], nil} 93 | {:continue, state} -> 94 | # Matched multi-line statement, keep going 95 | {[], {module, state}} 96 | {:error, _reason} -> 97 | # Try next parser 98 | module_parse({line, line_num}, rest) 99 | end 100 | end 101 | 102 | EEx.function_from_string(:def, :eval_template_execute_sql, @template_execute_sql, [:sql]) 103 | 104 | EEx.function_from_string(:def, :eval_template_execute_sql, @template_execute_sql_updown, [:up_sql, :down_sql]) 105 | 106 | @doc "Expand template file to path under priv/templates" 107 | @spec template_path(Path.t()) :: Path.t() 108 | def template_path(file) do 109 | template_dir = Application.app_dir(@app, ["priv", "templates"]) 110 | Path.join(template_dir, file) 111 | end 112 | 113 | @doc "Evaluate template file with bindings" 114 | @spec eval_template_file(Path.t(), Keyword.t()) :: {:ok, binary} | {:error, term} 115 | def eval_template_file(template_file, bindings \\ []) do 116 | path = template_path(template_file) 117 | {:ok, EEx.eval_file(path, bindings, trim: true)} 118 | rescue 119 | e -> 120 | {:error, {:template, e}} 121 | end 122 | 123 | @doc "Evaluate template file with bindings" 124 | @spec eval_template(Path.t(), Keyword.t()) :: {:ok, binary} | {:error, term} 125 | def eval_template(template_file, bindings \\ []) do 126 | {:ok, EEx.eval_file(template_file, bindings, trim: true)} 127 | rescue 128 | e -> 129 | {:error, {:template, e}} 130 | end 131 | 132 | @doc "Convert SQL name to Elixir module name" 133 | @spec sql_name_to_module(binary | list(binary)) :: binary 134 | def sql_name_to_module(name) when is_binary(name), do: Macro.camelize(name) 135 | def sql_name_to_module(["public", name]), do: Macro.camelize(name) 136 | def sql_name_to_module([schema, name]) do 137 | "#{Macro.camelize(schema)}.#{Macro.camelize(name)}" 138 | end 139 | 140 | @doc "Convert schema qualified name in list format to binary" 141 | @spec object_name(binary | list(binary)) :: binary 142 | def object_name(name) when is_binary(name), do: name 143 | def object_name(["public", name]), do: name 144 | def object_name([schema, name]), do: "#{schema}.#{name}" 145 | 146 | @doc "Convert NimbleParsec result tuple into simple ok/error tuple" 147 | @spec parsec_result(tuple) :: {:ok, term} | {:error, term} 148 | def parsec_result(result) do 149 | case result do 150 | {:ok, [acc], "", _, _line, _offset} -> 151 | {:ok, acc} 152 | 153 | {:ok, _, rest, _, _line, _offset} -> 154 | {:error, "could not parse: " <> rest} 155 | 156 | {:error, reason, _rest, _context, _line, _offset} -> 157 | {:error, reason} 158 | end 159 | end 160 | end 161 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/alter_sequence.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.AlterSequence do 2 | @moduledoc "Handle ALTER SEQUENCE." 3 | 4 | def type, do: :alter_sequence 5 | 6 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.AlterSequence 7 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.AlterSequence 8 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.AlterSequence 9 | end 10 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/alter_table.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.AlterTable do 2 | @moduledoc "Handle ALTER TABLE." 3 | 4 | def type, do: :alter_table 5 | 6 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.AlterTable 7 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.AlterTable 8 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.AlterTable 9 | end 10 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/comment.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.Comment do 2 | @moduledoc "Handle SQL comments." 3 | 4 | def type, do: :comment 5 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.Comment 6 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.Comment 7 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.Comment 8 | end 9 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_extension.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateExtension do 2 | @moduledoc "Handle CREATE EXTENSION." 3 | 4 | @app :ecto_extract_migrations 5 | 6 | def type, do: :create_extension 7 | 8 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateExtension 9 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateExtension 10 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateExtension 11 | 12 | @spec file_name(map, Keyword.t) :: binary 13 | def file_name(data, bindings) 14 | def file_name(%{name: [schema, name]}, _bindings), do: "extension_#{schema}_#{name}.exs" 15 | def file_name(%{name: name}, _bindings), do: "extension_#{name}.exs" 16 | 17 | @spec migration(map, Keyword.t) :: {:ok, binary} | {:error, term} 18 | def migration(data, bindings) do 19 | %{schema: schema, name: name} = data 20 | 21 | module_name = Enum.join([ 22 | bindings[:repo], 23 | "Migrations", 24 | "Extension", 25 | Macro.camelize(schema), 26 | Macro.camelize(name) 27 | ], ".") 28 | 29 | bindings = Keyword.merge(bindings, [ 30 | module_name: module_name, 31 | up_sql: data[:sql], 32 | down_sql: "DROP EXTENSION IF EXISTS #{name}" 33 | ]) 34 | 35 | template_dir = Application.app_dir(@app, ["priv", "templates"]) 36 | template_path = Path.join(template_dir, "execute_sql.eex") 37 | EctoExtractMigrations.eval_template(template_path, bindings) 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_function.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateFunction do 2 | @moduledoc "Handle CREATE FUNCTION." 3 | 4 | @app :ecto_extract_migrations 5 | 6 | def type, do: :create_function 7 | 8 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateFunction 9 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateFunction 10 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateFunction 11 | 12 | @spec file_name(map, Keyword.t) :: binary 13 | def file_name(data, bindings) 14 | def file_name(%{name: [schema, name]}, _bindings), do: "function_#{schema}_#{name}.exs" 15 | def file_name(%{name: name}, _bindings), do: "function_#{name}.exs" 16 | 17 | @spec migration(map, Keyword.t) :: {:ok, binary} | {:error, term} 18 | def migration(data, bindings) do 19 | [schema, name] = data.name 20 | 21 | module_name = Enum.join([ 22 | bindings[:repo], 23 | "Migrations", 24 | "Function", 25 | Macro.camelize(schema), 26 | Macro.camelize(name) 27 | ], ".") 28 | 29 | bindings = Keyword.merge(bindings, [ 30 | module_name: module_name, 31 | up_sql: data[:sql], 32 | down_sql: "DROP FUNCTION IF EXISTS #{schema}.#{name}" 33 | ]) 34 | 35 | template_dir = Application.app_dir(@app, ["priv", "templates"]) 36 | template_path = Path.join(template_dir, "execute_sql.eex") 37 | EctoExtractMigrations.eval_template(template_path, bindings) 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_index.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateIndex do 2 | @moduledoc "Handle CREATE INDEX." 3 | 4 | def type, do: :create_index 5 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateIndex 6 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateIndex 7 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateIndex 8 | 9 | @spec file_name(map, Keyword.t) :: binary 10 | def file_name(data, bindings) 11 | def file_name(%{name: name}, _bindings), do: "index_#{name}.exs" 12 | 13 | # %{key: [:member_id], name: "t_eligibility_member_id_idx", table_name: ["bnd", "t_eligibility"], using: "btree"} 14 | # CREATE INDEX t_eligibility_member_id_idx ON bnd.t_eligibility USING btree (member_id); 15 | 16 | def migration(data, bindings) do 17 | module_name = module_name(data, bindings) 18 | # table_name = table_name(data) 19 | [_prefix, table_name] = data.table_name 20 | 21 | # :name - the name of the index. Defaults to "#{table}_#{column}_index". 22 | # :unique - indicates whether the index should be unique. Defaults to false. 23 | # :concurrently - indicates whether the index should be created/dropped concurrently. 24 | # :using - configures the index type. 25 | # :prefix - specify an optional prefix for the index. 26 | # :where - specify conditions for a partial index. 27 | # :include - specify fields for a covering index. This is not supported by all databases. For more information on PostgreSQL support, please read the official docs. 28 | 29 | opts = [ 30 | name: data[:name], 31 | unique: data[:unique], 32 | concurrently: data[:concurrently], 33 | using: data[:using], 34 | prefix: table_opt_prefix(data), 35 | where: data[:where], 36 | include: data[:include], 37 | ] 38 | |> Enum.reject(fn {_key, value} -> value == nil end) 39 | 40 | ast = quote do 41 | defmodule unquote(module_name) do 42 | use Ecto.Migration 43 | 44 | def change do 45 | create index(unquote(table_name), unquote(data.key), unquote(opts)) 46 | end 47 | end 48 | end 49 | {:ok, Macro.to_string(ast)} 50 | end 51 | 52 | def module_name(%{name: name}, bindings) do 53 | [bindings[:repo], "migrations", "index"] ++ [name] 54 | |> Enum.map(&Macro.camelize/1) 55 | |> Module.concat() 56 | end 57 | def module_name(%{table_name: table_name, key: key}, bindings) do 58 | [bindings[:repo], "migrations", "index"] ++ table_name ++ [key] 59 | |> Enum.map(&Macro.camelize/1) 60 | |> Module.concat() 61 | end 62 | 63 | # Get schema prefix if it is not public 64 | defp table_opt_prefix(%{table_name: ["public", _table]}), do: nil 65 | defp table_opt_prefix(%{table_name: [schema, _table]}), do: schema 66 | defp table_opt_prefix(%{table_name: value}) when is_binary(value), do: nil 67 | 68 | end 69 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_schema.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateSchema do 2 | @moduledoc "Handle CREATE SCHEMA." 3 | 4 | @app :ecto_extract_migrations 5 | 6 | def type, do: :create_schema 7 | 8 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateSchema 9 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateSchema 10 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateSchema 11 | 12 | @spec file_name(map, Keyword.t) :: binary 13 | def file_name(data, _bindings), do: "schema_#{data.name}.exs" 14 | 15 | @spec migration(map, Keyword.t) :: {:ok, binary} | {:error, term} 16 | def migration(data, bindings) do 17 | name = data.name 18 | 19 | repo = bindings[:repo] 20 | module_name = Enum.join([ 21 | repo, 22 | "Migrations", 23 | "Schema", 24 | Macro.camelize(name) 25 | ], ".") 26 | 27 | bindings = Keyword.merge(bindings, [ 28 | module_name: module_name, 29 | up_sql: data[:sql], 30 | down_sql: "DROP SCHEMA IF EXISTS #{name}" 31 | ]) 32 | 33 | template_dir = Application.app_dir(@app, ["priv", "templates"]) 34 | template_path = Path.join(template_dir, "execute_sql.eex") 35 | EctoExtractMigrations.eval_template(template_path, bindings) 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_sequence.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateSequence do 2 | @moduledoc "Handle CREATE SEQUENCE." 3 | 4 | @app :ecto_extract_migrations 5 | 6 | def type, do: :create_sequence 7 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateSequence 8 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateSequence 9 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateSequence 10 | 11 | @spec file_name(map, Keyword.t) :: binary 12 | def file_name(data, bindings) 13 | def file_name(%{name: [schema, name]}, _bindings), do: "sequence_#{schema}_#{name}.exs" 14 | def file_name(%{name: name}, _bindings), do: "sequence_#{name}.exs" 15 | 16 | @spec migration(map, Keyword.t) :: {:ok, binary} | {:error, term} 17 | def migration(data, bindings) do 18 | [schema, name] = data.name 19 | 20 | module_name = Enum.join([ 21 | bindings[:repo], 22 | "Migrations", 23 | "Sequence", 24 | Macro.camelize(schema), 25 | Macro.camelize(name) 26 | ], ".") 27 | 28 | bindings = Keyword.merge(bindings, [ 29 | module_name: module_name, 30 | up_sql: data[:sql], 31 | down_sql: "DROP SEQUENCE IF EXISTS #{schema}.#{name}" 32 | ]) 33 | 34 | template_dir = Application.app_dir(@app, ["priv", "templates"]) 35 | template_path = Path.join(template_dir, "execute_sql.eex") 36 | EctoExtractMigrations.eval_template(template_path, bindings) 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_table.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateTable do 2 | @moduledoc "Handle CREATE TABLE." 3 | 4 | def type, do: :create_table 5 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateTable 6 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateTable 7 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateTable 8 | 9 | @spec file_name(map, Keyword.t) :: binary 10 | def file_name(data, bindings) 11 | def file_name(%{name: [schema, name]}, _bindings), do: "table_#{schema}_#{name}.exs" 12 | def file_name(%{name: name}, _bindings), do: "table_#{name}.exs" 13 | 14 | @doc "Create module name based on data" 15 | def module_name(%{name: name}, bindings) when is_list(name) do 16 | [bindings[:repo], "migrations", "table"] ++ name 17 | |> Enum.map(&Macro.camelize/1) 18 | |> Module.concat() 19 | end 20 | 21 | def migration(data, bindings) do 22 | module_name = module_name(data, bindings) 23 | [_schema, table] = data.name 24 | 25 | opts = [ 26 | prefix: table_opt_prefix(data), 27 | primary_key: table_opt_generate_primary_key(data), 28 | ] 29 | |> Enum.reject(fn {_key, value} -> value == nil end) 30 | 31 | # https://elixirforum.com/t/how-can-i-insert-an-ast-as-a-function-body/1227 32 | 33 | columns_ast = Enum.map(data.columns, &column_ast/1) 34 | 35 | ast = quote do 36 | defmodule unquote(module_name) do 37 | use Ecto.Migration 38 | 39 | def change do 40 | create table(unquote(table), unquote(opts)) do 41 | unquote_splicing(columns_ast) 42 | end 43 | end 44 | end 45 | end 46 | {:ok, Macro.to_string(ast)} 47 | end 48 | 49 | @doc "Set prefix opt if schema is not public" 50 | def table_opt_prefix(%{name: value}) when is_binary(value), do: nil 51 | def table_opt_prefix(%{name: ["public", _table]}), do: nil 52 | def table_opt_prefix(%{name: [schema, _table]}), do: schema 53 | 54 | @doc "Set primary_key opt if migration should generate a primary key id column" 55 | def table_opt_generate_primary_key(data) do 56 | if Enum.any?(data.columns, &has_pk/1) do 57 | false 58 | else 59 | # Default is true 60 | nil 61 | end 62 | end 63 | 64 | def column_ast(column) do 65 | column = munge_column(column) 66 | column_name = String.to_atom(column.name) 67 | column_type = column_type(column.type) 68 | 69 | keys = [:primary_key, :default, :null, :size, :precision, :scale] 70 | opts = for key <- keys, Map.has_key?(column, key) do 71 | {key, column[key]} 72 | end 73 | |> Enum.reject(fn {_key, value} -> value == nil end) 74 | |> Enum.map(&column_value/1) 75 | 76 | quote do 77 | add unquote(column_name), unquote(column_type), unquote(opts) 78 | end 79 | end 80 | 81 | def munge_column(%{type: type, is_array: true} = value) do 82 | value = %{value | type: {:array, type}} 83 | Map.drop(value, [:is_array]) 84 | end 85 | def munge_column(value), do: value 86 | 87 | def column_type(value) when is_list(value), do: String.to_atom(Enum.join(value, ".")) 88 | def column_type(value), do: value 89 | 90 | def column_value({key, {:fragment, value}}) do 91 | ast = quote do 92 | fragment(unquote(value)) 93 | end 94 | {key, ast} 95 | end 96 | def column_value(value), do: value 97 | 98 | def has_pk(value) when is_list(value), do: Enum.any?(value, &has_pk/1) 99 | def has_pk(%{name: "id"}), do: true 100 | def has_pk(%{name: "rowid"}), do: true 101 | def has_pk(%{primary_key: true}), do: true 102 | def has_pk(_), do: false 103 | 104 | def starts_with_number(<>) when first >= ?0 and first <= ?9, do: true 105 | def starts_with_number(_), do: false 106 | end 107 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_trigger.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateTrigger do 2 | @moduledoc "Handle CREATE TRIGGER." 3 | 4 | def type, do: :create_trigger 5 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateTrigger 6 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateTrigger 7 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateTrigger 8 | 9 | # https://www.postgresql.org/docs/current/sql-droptrigger.html 10 | 11 | # %{name: "chat_message_update"} 12 | # CREATE TRIGGER chat_message_update BEFORE UPDATE ON chat.message FOR EACH ROW EXECUTE PROCEDURE public.chat_update_timestamp(); 13 | 14 | @spec file_name(map, Keyword.t) :: binary 15 | def file_name(data, bindings) 16 | def file_name(%{name: [schema, name]}, _bindings), do: "trigger_#{schema}_#{name}.exs" 17 | def file_name(%{name: name}, _bindings), do: "trigger_#{name}.exs" 18 | 19 | def migration(data, bindings) do 20 | module_name = module_name(data, bindings) 21 | EctoExtractMigrations.Execute.create_migration(module_name, data.sql) 22 | end 23 | 24 | @doc "Create module name based on data" 25 | def module_name(data, bindings) do 26 | [bindings[:repo], "migrations", "create_trigger"] ++ [data.name] 27 | |> Enum.map(&Macro.camelize/1) 28 | |> Module.concat() 29 | end 30 | end 31 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_type.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateType do 2 | @moduledoc "Handle CREATE TYPE." 3 | 4 | @app :ecto_extract_migrations 5 | 6 | def type, do: :create_type 7 | 8 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateType 9 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateType 10 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateType 11 | 12 | @spec file_name(map, Keyword.t) :: binary 13 | def file_name(data, bindings) 14 | def file_name(%{name: [schema, name]}, _bindings), do: "type_#{schema}_#{name}.exs" 15 | def file_name(%{name: name}, _bindings), do: "type_#{name}.exs" 16 | 17 | @spec migration(map, Keyword.t) :: {:ok, binary} | {:error, term} 18 | def migration(data, bindings) do 19 | [schema, name] = data.name 20 | 21 | module_name = Enum.join([ 22 | bindings[:repo], 23 | "Migrations", 24 | "Type", 25 | Macro.camelize(schema), 26 | Macro.camelize(name) 27 | ], ".") 28 | 29 | bindings = Keyword.merge(bindings, [ 30 | module_name: module_name, 31 | up_sql: data[:sql], 32 | down_sql: "DROP TYPE IF EXISTS #{schema}.#{name}" 33 | ]) 34 | 35 | template_dir = Application.app_dir(@app, ["priv", "templates"]) 36 | template_path = Path.join(template_dir, "execute_sql.eex") 37 | EctoExtractMigrations.eval_template(template_path, bindings) 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/create_view.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.CreateView do 2 | @moduledoc "Handle CREATE VIEW." 3 | 4 | @app :ecto_extract_migrations 5 | 6 | def type, do: :create_view 7 | 8 | defdelegate parse(sql), to: EctoExtractMigrations.Parsers.CreateView 9 | defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.CreateView 10 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.CreateView 11 | 12 | @spec file_name(map, Keyword.t) :: binary 13 | def file_name(data, bindings) 14 | def file_name(%{name: [schema, name]}, _bindings), do: "view_#{schema}_#{name}.exs" 15 | def file_name(%{name: name}, _bindings), do: "view_#{name}.exs" 16 | 17 | @spec migration(map, Keyword.t) :: {:ok, binary} | {:error, term} 18 | def migration(data, bindings) do 19 | [schema, name] = data.name 20 | module_name = Enum.join([ 21 | bindings[:repo], 22 | "Migrations", 23 | "View", 24 | Macro.camelize(schema), 25 | Macro.camelize(name) 26 | ], ".") 27 | 28 | bindings = Keyword.merge(bindings, [ 29 | module_name: module_name, 30 | up_sql: data[:sql], 31 | down_sql: "DROP VIEW IF EXISTS #{schema}.#{name}" 32 | ]) 33 | 34 | template_dir = Application.app_dir(@app, ["priv", "templates"]) 35 | template_path = Path.join(template_dir, "execute_sql.eex") 36 | EctoExtractMigrations.eval_template(template_path, bindings) 37 | end 38 | end 39 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/commands/whitespace.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Commands.Whitespace do 2 | @moduledoc "Handle SQL whitespace lines." 3 | 4 | def type, do: :whitespace 5 | # defdelegate parse(sql), to: EctoExtractMigrations.Parsers.Whitespace 6 | # defdelegate parse(sql, state), to: EctoExtractMigrations.Parsers.Whitespace 7 | defdelegate match(sql), to: EctoExtractMigrations.Parsers.Whitespace 8 | end 9 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/execute.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Execute do 2 | @moduledoc "Create migration which uses execute to run raw SQL" 3 | 4 | def create_migration(module_name, up_sql, down_sql) do 5 | ast = quote do 6 | defmodule unquote(module_name) do 7 | use Ecto.Migration 8 | 9 | def change do 10 | execute(unquote(up_sql), unquote(down_sql)) 11 | end 12 | end 13 | end 14 | {:ok, Macro.to_string(ast)} 15 | end 16 | 17 | def create_migration(module_name, up_sql) do 18 | ast = quote do 19 | defmodule unquote(module_name) do 20 | use Ecto.Migration 21 | 22 | def up do 23 | execute(unquote(up_sql)) 24 | end 25 | end 26 | end 27 | {:ok, Macro.to_string(ast)} 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/alter_sequence.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.AlterSequence do 2 | @moduledoc "Parser for ALTER SEQUENCE." 3 | 4 | import NimbleParsec 5 | 6 | alias EctoExtractMigrations.Parsers.Common 7 | 8 | # https://www.postgresql.org/docs/current/sql-altersequence.html 9 | 10 | # ALTER SEQUENCE [ IF EXISTS ] name 11 | # [ AS data_type ] 12 | # [ INCREMENT [ BY ] increment ] 13 | # [ MINVALUE minvalue | NO MINVALUE ] [ MAXVALUE maxvalue | NO MAXVALUE ] 14 | # [ START [ WITH ] start ] 15 | # [ RESTART [ [ WITH ] restart ] ] 16 | # [ CACHE cache ] [ [ NO ] CYCLE ] 17 | # [ OWNED BY { table_name.column_name | NONE } ] 18 | # ALTER SEQUENCE [ IF EXISTS ] name OWNER TO { new_owner | CURRENT_USER | SESSION_USER } 19 | # ALTER SEQUENCE [ IF EXISTS ] name RENAME TO new_name 20 | # ALTER SEQUENCE [ IF EXISTS ] name SET SCHEMA new_schema 21 | 22 | # %{ 23 | # data: %{owned_by: [table: ["chat", "assignment"], column: "id"], sequence: ["chat", "assignment_id_seq"]}, 24 | # line_num: 409, 25 | # module: EctoExtractMigrations.Commands.AlterSequence, 26 | # sql: "ALTER SEQUENCE chat.assignment_id_seq OWNED BY chat.assignment.id;\n", 27 | # type: :alter_sequence 28 | # } 29 | 30 | whitespace = Common.whitespace() 31 | name = Common.name() 32 | 33 | schema_name = name 34 | bare_sequence_name = name |> unwrap_and_tag(:sequence) 35 | schema_qualified_sequence_name = 36 | schema_name |> ignore(ascii_char([?.])) |> concat(name) |> tag(:sequence) 37 | 38 | sequence_name = choice([schema_qualified_sequence_name, bare_sequence_name]) 39 | 40 | bare_table_name = name |> unwrap_and_tag(:table) 41 | schema_qualified_table_name = 42 | schema_name |> ignore(ascii_char([?.])) |> concat(name) |> tag(:table) 43 | 44 | table_name = choice([schema_qualified_table_name, bare_table_name]) 45 | 46 | column_name = 47 | name 48 | |> unwrap_and_tag(:column) 49 | 50 | table_column = 51 | table_name 52 | |> ignore(ascii_char([?.])) 53 | |> concat(column_name) 54 | 55 | if_exists = 56 | ignore(whitespace) 57 | |> string("IF EXISTS") 58 | 59 | owned_by = 60 | ignore(whitespace) 61 | |> ignore(string("OWNED BY")) 62 | |> ignore(whitespace) 63 | |> choice([table_column, string("NONE")]) 64 | |> tag(:owned_by) 65 | 66 | alter_sequence = 67 | ignore(optional(whitespace)) 68 | |> ignore(string("ALTER SEQUENCE")) 69 | |> ignore(optional(if_exists)) 70 | |> ignore(whitespace) 71 | |> concat(sequence_name) 72 | |> concat(owned_by) 73 | |> ignore(ascii_char([?;])) |> label(";") 74 | |> ignore(optional(whitespace)) 75 | |> reduce({Enum, :into, [%{}]}) 76 | 77 | match_alter_sequence = 78 | optional(whitespace) 79 | |> string("ALTER SEQUENCE") 80 | 81 | defparsec :parsec_parse, alter_sequence 82 | defparsec :parsec_match, match_alter_sequence 83 | 84 | def parse(line), do: parse(line, %{sql: ""}) 85 | 86 | def parse(line, %{sql: lines} = state) do 87 | sql = lines <> line 88 | case parsec_parse(sql) do 89 | {:ok, [value], _, _, _, _} -> 90 | {:ok, value} 91 | {:error, reason, _, _, _, _} -> 92 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 93 | end 94 | end 95 | 96 | def match(line) do 97 | case parsec_match(line) do 98 | {:ok, _, _, _, _, _} -> 99 | case parsec_parse(line) do 100 | {:ok, [value], _, _, _, _} -> 101 | {:ok, value} 102 | {:error, reason, _, _, _, _} -> 103 | {:continue, %{sql: line, error: reason}} 104 | end 105 | {:error, reason, _, _, _, _} -> 106 | {:error, reason} 107 | end 108 | end 109 | 110 | end 111 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/alter_table.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.AlterTable do 2 | @moduledoc "Parser for ALTER TABLE." 3 | 4 | import NimbleParsec 5 | 6 | alias EctoExtractMigrations.Parsers.Common 7 | 8 | # https://www.postgresql.org/docs/current/sql-altertable.html 9 | 10 | whitespace = Common.whitespace() 11 | name = Common.name() 12 | 13 | schema_name = name 14 | bare_table_name = name |> unwrap_and_tag(:table) 15 | schema_qualified_table_name = 16 | schema_name |> ignore(ascii_char([?.])) |> concat(name) |> tag(:table) 17 | 18 | table_name = choice([schema_qualified_table_name, bare_table_name]) 19 | 20 | if_exists = 21 | ignore(whitespace) 22 | |> string("IF EXISTS") 23 | 24 | only = 25 | ignore(whitespace) 26 | |> string("ONLY") 27 | 28 | table_constraint_name = 29 | name |> unwrap_and_tag(:constraint_name) |> label("constraint_name") 30 | 31 | table_constraint_primary_key = 32 | ignore(whitespace) 33 | |> string("PRIMARY KEY") |> replace(:primary_key) |> unwrap_and_tag(:type) 34 | |> ignore(whitespace) 35 | |> concat(Common.column_list(:primary_key)) 36 | |> label("PRIMARY KEY") 37 | 38 | table_constraint_unique = 39 | ignore(whitespace) 40 | |> string("UNIQUE") |> replace(:unique) |> unwrap_and_tag(:type) 41 | |> ignore(whitespace) 42 | |> concat(Common.column_list(:columns)) 43 | |> label("UNIQUE") 44 | 45 | on_delete = 46 | ignore(whitespace) 47 | |> ignore(string("ON DELETE")) 48 | |> ignore(whitespace) 49 | |> choice([ 50 | string("CASCADE") |> replace(:cascade), 51 | string("RESTRICT") |> replace(:restrict), 52 | string("SET NULL") |> replace(:set_null) 53 | ]) 54 | |> unwrap_and_tag(:on_delete) 55 | |> label("ON DELETE") 56 | 57 | on_update = 58 | ignore(whitespace) 59 | |> ignore(string("ON UPDATE")) 60 | |> ignore(whitespace) 61 | |> choice([ 62 | string("CASCADE") |> replace(:cascade), 63 | string("RESTRICT") |> replace(:restrict), 64 | string("SET NULL") |> replace(:set_null) 65 | ]) 66 | |> unwrap_and_tag(:on_update) 67 | |> label("ON UPDATE") 68 | 69 | table_constraint_foreign_key = 70 | ignore(whitespace) 71 | |> string("FOREIGN KEY") |> replace(:foreign_key) |> unwrap_and_tag(:type) 72 | |> ignore(whitespace) 73 | |> concat(Common.column_list(:columns)) 74 | |> ignore(whitespace) 75 | |> ignore(string("REFERENCES")) 76 | |> ignore(whitespace) 77 | |> concat(Common.table_name(:references_table)) 78 | |> concat(Common.column_list(:references_columns)) 79 | |> times(choice([on_delete, on_update]), min: 0) 80 | 81 | # table_constraint 82 | # 83 | # [ CONSTRAINT constraint_name ] 84 | # { CHECK ( expression ) [ NO INHERIT ] | 85 | # UNIQUE ( column_name [, ... ] ) index_parameters | 86 | # PRIMARY KEY ( column_name [, ... ] ) index_parameters | 87 | # EXCLUDE [ USING index_method ] ( exclude_element WITH operator [, ... ] ) index_parameters [ WHERE ( predicate ) ] | 88 | # FOREIGN KEY ( column_name [, ... ] ) REFERENCES reftable [ ( refcolumn [, ... ] ) ] 89 | # [ MATCH FULL | MATCH PARTIAL | MATCH SIMPLE ] [ ON DELETE referential_action ] [ ON UPDATE referential_action ] } 90 | # [ DEFERRABLE | NOT DEFERRABLE ] [ INITIALLY DEFERRED | INITIALLY IMMEDIATE ] 91 | 92 | add_table_constraint = 93 | string("ADD CONSTRAINT") |> replace(:add_table_constraint) |> unwrap_and_tag(:action) 94 | |> ignore(whitespace) 95 | |> concat(table_constraint_name) 96 | |> choice([table_constraint_primary_key, table_constraint_foreign_key, table_constraint_unique]) 97 | 98 | column_name = name 99 | 100 | alter_column = 101 | ignore(string("ALTER COLUMN")) 102 | |> ignore(whitespace) 103 | |> concat(column_name) |> unwrap_and_tag(:column) 104 | |> ignore(whitespace) 105 | 106 | # ALTER TABLE ONLY chat.assignment ALTER COLUMN id SET DEFAULT nextval('chat.assignment_id_seq'::regclass); 107 | 108 | # This assumes that the default is a sequence 109 | default = 110 | utf8_string([{:not, ?;}], min: 1) 111 | |> unwrap_and_tag(:fragment) 112 | |> unwrap_and_tag(:default) 113 | 114 | set_default = 115 | ignore(string("SET DEFAULT")) 116 | |> ignore(whitespace) 117 | |> replace(:set_default) |> unwrap_and_tag(:action) 118 | |> concat(default) 119 | 120 | action = 121 | ignore(whitespace) 122 | # |> times(add_table_constraint, min: 1) 123 | |> choice([add_table_constraint, alter_column |> concat(set_default)]) 124 | |> ignore(optional(ascii_char([?,]))) 125 | 126 | alter_table = 127 | ignore(string("ALTER TABLE")) 128 | |> ignore(optional(if_exists)) 129 | |> ignore(optional(only)) 130 | |> ignore(whitespace) 131 | |> concat(table_name) 132 | |> times(action, min: 1) 133 | |> ignore(ascii_char([?;])) 134 | |> ignore(optional(whitespace)) 135 | |> reduce({Enum, :into, [%{}]}) 136 | 137 | match_alter_table = 138 | ignore(string("ALTER TABLE")) 139 | 140 | defparsec :parsec_parse, alter_table 141 | defparsec :parsec_match, match_alter_table 142 | 143 | def parse(line), do: parse(line, %{sql: ""}) 144 | 145 | def parse(line, %{sql: lines} = state) do 146 | sql = lines <> line 147 | case parsec_parse(sql) do 148 | {:ok, [value], _, _, _, _} -> 149 | {:ok, value} 150 | {:error, reason, _, _, _, _} -> 151 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 152 | end 153 | end 154 | 155 | def match(line) do 156 | case parsec_match(line) do 157 | {:ok, _, _, _, _, _} -> 158 | case parsec_parse(line) do 159 | {:ok, [value], _, _, _, _} -> 160 | {:ok, value} 161 | {:error, reason, _, _, _, _} -> 162 | {:continue, %{sql: line, error: reason}} 163 | end 164 | {:error, reason, _, _, _, _} -> 165 | {:error, reason} 166 | end 167 | end 168 | end 169 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/comment.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.Comment do 2 | @moduledoc "Parser for SQL comments." 3 | 4 | import NimbleParsec 5 | 6 | alias EctoExtractMigrations.Parsers.Common 7 | 8 | whitespace = Common.whitespace() 9 | 10 | empty_comment = 11 | string("--\n") 12 | |> replace("") 13 | |> unwrap_and_tag(:comment) 14 | 15 | comment_text = 16 | ignore(string("--")) 17 | |> ignore(optional(whitespace)) 18 | |> utf8_string([{:not, ?\n}], min: 1) 19 | |> unwrap_and_tag(:comment) 20 | 21 | comment = 22 | ignore(optional(whitespace)) 23 | |> choice([empty_comment, comment_text]) 24 | |> reduce({Enum, :into, [%{}]}) 25 | 26 | defparsec :parsec_parse, comment 27 | defparsec :parsec_match, comment 28 | 29 | def parse(line), do: parse(line, %{sql: ""}) 30 | 31 | def parse(line, %{sql: lines} = state) do 32 | sql = lines <> line 33 | case parsec_parse(sql) do 34 | {:ok, [value], _, _, _, _} -> 35 | {:ok, value} 36 | {:error, reason, _, _, _, _} -> 37 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 38 | end 39 | end 40 | 41 | def match(line) do 42 | case parsec_match(line) do 43 | {:ok, _, _, _, _, _} -> 44 | case parsec_parse(line) do 45 | {:ok, [value], _, _, _, _} -> 46 | {:ok, value} 47 | {:error, reason, _, _, _, _} -> 48 | {:continue, %{sql: line, error: reason}} 49 | end 50 | {:error, reason, _, _, _, _} -> 51 | {:error, reason} 52 | end 53 | end 54 | 55 | end 56 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/common.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.Common do 2 | @moduledoc "Utility functions for parsers." 3 | 4 | import NimbleParsec 5 | 6 | def whitespace do 7 | ascii_char([32, ?\t, ?\n]) |> times(min: 1) |> label("whitespace") 8 | end 9 | 10 | # https://www.postgresql.org/docs/current/sql-syntax-lexical.html 11 | 12 | def identifier do 13 | # utf8_string([], min: 1) 14 | utf8_string([?a..?z, ?A..?Z, ?0..?9, ?_], min: 1) |> label("identifier") 15 | # utf8_string([{:not, ?.}], min: 1) 16 | end 17 | 18 | def quoted_identifier do 19 | ignore(ascii_char([?"])) 20 | |> concat(utf8_string([?a..?z, ?A..?Z, ?0..?9, ?_, 32], min: 1)) 21 | |> ignore(ascii_char([?"])) 22 | |> label("quoted identifier") 23 | end 24 | 25 | def name do 26 | choice([quoted_identifier(), identifier()]) |> label("name") 27 | end 28 | 29 | def schema_name do 30 | name() 31 | end 32 | 33 | def schema_qualified_table_name() do 34 | schema_name() |> ignore(ascii_char([?.])) |> concat(name()) 35 | end 36 | 37 | def table_name do 38 | choice([schema_qualified_table_name(), name()]) 39 | end 40 | 41 | def column_name do 42 | choice([quoted_identifier(), identifier()]) 43 | end 44 | 45 | def convert_type(value, acc) do 46 | [String.downcase(value) |> String.to_existing_atom() | acc] 47 | end 48 | 49 | # https://www.postgresql.org/docs/current/datatype.html 50 | def data_type() do 51 | choice(Enum.map([ 52 | "bigint", 53 | "bigserial", 54 | {"bit", :size}, 55 | {"bit varying", :size}, 56 | "boolean", 57 | "box", 58 | "bytea", 59 | {"character varying", :size}, 60 | {"character", :size}, 61 | "cidr", 62 | "circle", 63 | "date", 64 | "double precision", 65 | "inet", 66 | "integer", 67 | "jsonb", 68 | "json", 69 | "line", 70 | "lseg", 71 | "macaddr8", 72 | "macaddr", 73 | "money", 74 | {"numeric", [:precision, :scale]}, 75 | {"decimal", [:precision, :scale]}, 76 | "path", 77 | "pg_lsn", 78 | "point", 79 | "polygon", 80 | "real", 81 | "smallint", 82 | "smallserial", 83 | "serial", 84 | "text", 85 | "timestamp without time zone", 86 | "timestamp with time zone", 87 | "timestamp", 88 | "time without time zone", 89 | "time with time zone", 90 | "time", 91 | "tsquery", 92 | "tsvector", 93 | "txid_snapshot", 94 | "uuid", 95 | "xml", 96 | ], &atom_type/1)) 97 | end 98 | 99 | def atom_type({name, :size}) do 100 | uc = String.upcase(name) 101 | a = String.to_atom(name) 102 | 103 | choice([string(name), string(uc)]) 104 | |> replace(a) 105 | |> unwrap_and_tag(:type) 106 | |> optional( 107 | ignore(ascii_char([?(])) 108 | |> integer(min: 1) 109 | |> ignore(ascii_char([?)])) 110 | |> unwrap_and_tag(:size) 111 | ) 112 | end 113 | def atom_type({name, [:precision, :scale]}) do 114 | uc = String.upcase(name) 115 | a = String.to_atom(name) 116 | 117 | precision = 118 | integer(min: 1) |> unwrap_and_tag(:precision) 119 | 120 | scale = 121 | integer(min: 1) |> unwrap_and_tag(:scale) 122 | 123 | precision_scale = 124 | ignore(ascii_char([?(])) 125 | |> concat(precision) 126 | |> ignore(ascii_char([?,])) 127 | |> concat(scale) 128 | |> ignore(ascii_char([?)])) 129 | 130 | just_precision = 131 | ignore(ascii_char([?(])) 132 | |> concat(precision) 133 | |> ignore(ascii_char([?)])) 134 | 135 | choice([string(name), string(uc)]) 136 | |> replace(a) 137 | |> unwrap_and_tag(:type) 138 | |> optional(choice([precision_scale, just_precision])) 139 | # |> optional( 140 | # ignore(ascii_char([?(])) 141 | # |> integer(min: 1) 142 | # |> ignore(ascii_char([?,])) 143 | # |> integer(min: 1) 144 | # |> ignore(ascii_char([?)])) 145 | # |> tag(:size) 146 | # ) 147 | end 148 | def atom_type(name) do 149 | uc = String.upcase(name) 150 | a = String.to_atom(name) 151 | choice([string(name), string(uc)]) |> replace(a) |> unwrap_and_tag(:type) 152 | end 153 | 154 | def column_list(tag_name) do 155 | ignore(ascii_char([?(])) 156 | |> times(name() |> ignore(optional(ascii_char([?,]))) |> ignore(optional(whitespace())), min: 1) 157 | |> ignore(ascii_char([?)])) 158 | |> tag(tag_name) 159 | end 160 | 161 | def table_name(tag_name) do 162 | bare = name() |> unwrap_and_tag(tag_name) 163 | schema = name() |> ignore(ascii_char([?.])) |> concat(name()) |> tag(tag_name) 164 | choice([schema, bare]) 165 | end 166 | 167 | # ignore_surrounding_whitespace = fn p -> 168 | # ignore(optional(whitespace)) 169 | # |> concat(p) 170 | # |> ignore(optional(whitespace)) 171 | # end 172 | # ignore_surrounding_whitespace.() 173 | 174 | lparen = ascii_char([?(]) |> label("(") 175 | rparen = ascii_char([?)]) |> label(")") 176 | 177 | expression = 178 | utf8_string([?a..?z, ?A..?Z, ?0..?9, ?_, ?=, ?>, ?<, ?\s, ?', ?-, ?:], min: 1) 179 | |> post_traverse(:wrap_parens) 180 | |> label("expression") 181 | 182 | defcombinatorp(:expr, 183 | ignore(lparen) 184 | |> choice([parsec(:expr), expression]) 185 | |> ignore(rparen) 186 | |> label("expr") 187 | ) 188 | 189 | def wrap_parens(_rest, acc, context, _line, _offset) do 190 | values = Enum.map(acc, &("(" <> &1 <> ")")) 191 | {values, context} 192 | end 193 | 194 | end 195 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_extension.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateExtension do 2 | @moduledoc "Parser for CREATE EXTENSION." 3 | 4 | import NimbleParsec 5 | 6 | alias EctoExtractMigrations.Parsers.Common 7 | 8 | # https://www.postgresql.org/docs/current/sql-createextension.html 9 | # https://www.postgresql.org/docs/current/sql-dropextension.html 10 | 11 | whitespace = Common.whitespace() 12 | name = Common.name() 13 | 14 | if_not_exists = 15 | ignore(optional(whitespace)) 16 | |> string("IF NOT EXISTS") 17 | 18 | with_ = 19 | ignore(optional(whitespace)) 20 | |> ignore(string("WITH")) 21 | |> label("WITH") 22 | 23 | schema = 24 | ignore(optional(whitespace)) 25 | |> ignore(string("SCHEMA")) 26 | |> ignore(whitespace) 27 | |> concat(name) |> unwrap_and_tag(:schema) 28 | |> label("SCHEMA") 29 | 30 | version = 31 | ignore(optional(whitespace)) 32 | |> ignore(string("VERSION")) 33 | |> ignore(whitespace) 34 | |> concat(name) |> unwrap_and_tag(:version) 35 | |> label("VERSION") 36 | 37 | old_version = 38 | ignore(optional(whitespace)) 39 | |> ignore(string("FROM")) 40 | |> ignore(whitespace) 41 | |> concat(name) |> unwrap_and_tag(:old_version) 42 | |> label("FROM") 43 | 44 | cascade = 45 | ignore(optional(whitespace)) 46 | |> ignore(string("CASCADE")) 47 | |> label("CASCADE") 48 | 49 | create_extension = 50 | ignore(string("CREATE EXTENSION")) 51 | |> ignore(optional(if_not_exists)) 52 | |> ignore(whitespace) 53 | |> concat(name) |> unwrap_and_tag(:name) 54 | |> optional(with_) 55 | |> optional(schema) 56 | |> optional(version) 57 | |> optional(old_version) 58 | |> optional(cascade) 59 | |> ignore(ascii_char([?;])) 60 | |> ignore(optional(whitespace)) 61 | |> reduce({Enum, :into, [%{}]}) 62 | 63 | defparsec :parsec_parse, create_extension 64 | defparsec :parsec_match, create_extension 65 | 66 | def parse(line), do: parse(line, %{sql: ""}) 67 | 68 | def parse(line, %{sql: lines} = state) do 69 | sql = lines <> line 70 | case parsec_parse(sql) do 71 | {:ok, [value], _, _, _, _} -> 72 | {:ok, value} 73 | {:error, reason, _, _, _, _} -> 74 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 75 | end 76 | end 77 | 78 | def match(line) do 79 | case parsec_match(line) do 80 | {:ok, _, _, _, _, _} -> 81 | case parsec_parse(line) do 82 | {:ok, [value], _, _, _, _} -> 83 | {:ok, value} 84 | {:error, reason, _, _, _, _} -> 85 | {:continue, %{sql: line, error: reason}} 86 | end 87 | {:error, reason, _, _, _, _} -> 88 | {:error, reason} 89 | end 90 | end 91 | 92 | end 93 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_function.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateFunction do 2 | @moduledoc "Parser for CREATE FUNCTION." 3 | 4 | import NimbleParsec 5 | 6 | alias EctoExtractMigrations.Parsers.Common 7 | 8 | # https://www.postgresql.org/docs/current/sql-createfunction.html 9 | # CREATE [ OR REPLACE ] FUNCTION 10 | # name ( [ [ argmode ] [ argname ] argtype [ { DEFAULT | = } default_expr ] [, ...] ] ) 11 | # [ RETURNS rettype 12 | # | RETURNS TABLE ( column_name column_type [, ...] ) ] 13 | # { LANGUAGE lang_name 14 | # | TRANSFORM { FOR TYPE type_name } [, ... ] 15 | # | WINDOW 16 | # | IMMUTABLE | STABLE | VOLATILE | [ NOT ] LEAKPROOF 17 | # | CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT 18 | # | [ EXTERNAL ] SECURITY INVOKER | [ EXTERNAL ] SECURITY DEFINER 19 | # | PARALLEL { UNSAFE | RESTRICTED | SAFE } 20 | # | COST execution_cost 21 | # | ROWS result_rows 22 | # | SUPPORT support_function 23 | # | SET configuration_parameter { TO value | = value | FROM CURRENT } 24 | # | AS 'definition' 25 | # | AS 'obj_file', 'link_symbol' 26 | # } ... 27 | 28 | # CREATE FUNCTION public.cast_to_decimal(text, numeric) RETURNS numeric 29 | # LANGUAGE plpgsql IMMUTABLE 30 | # AS $_$ 31 | # begin 32 | # return cast($1 as decimal); 33 | # exception 34 | # when invalid_text_representation then 35 | # return $2; 36 | # end; 37 | # $_$; 38 | 39 | whitespace = Common.whitespace() 40 | name = Common.name() 41 | 42 | # data_type = Common.data_type() 43 | 44 | schema_name = name 45 | bare_name = name |> unwrap_and_tag(:name) 46 | schema_qualified_name = 47 | schema_name |> ignore(ascii_char([?.])) |> concat(name) |> tag(:name) 48 | 49 | lparen = ascii_char([?(]) |> label("(") 50 | rparen = ascii_char([?)]) |> label(")") 51 | 52 | or_replace = 53 | ignore(whitespace) 54 | |> ignore(string("OR REPLACE")) 55 | 56 | function_name = choice([schema_qualified_name, bare_name]) 57 | 58 | args = 59 | utf8_string([{:not, ?)}], min: 0) 60 | 61 | returns = 62 | ignore(whitespace) 63 | |> ignore(string("RETURNS")) 64 | |> ignore(whitespace) 65 | |> concat(name) 66 | 67 | language = 68 | ignore(whitespace) 69 | |> ignore(string("LANGUAGE")) 70 | |> ignore(whitespace) 71 | |> concat(name) 72 | 73 | immutable = 74 | ignore(whitespace) 75 | |> choice([ 76 | string("IMMUTABLE"), 77 | string("STABLE"), 78 | string("VOLATILE"), 79 | choice([string("NOT LEAKPROOF"), string("LEAKPROOF")]) 80 | ]) 81 | 82 | as = 83 | ignore(whitespace) 84 | |> ignore(string("AS")) 85 | |> ignore(whitespace) 86 | |> ascii_string([{:not, ?\s}, {:not, ?\n}], min: 1) 87 | |> unwrap_and_tag(:delimiter) 88 | 89 | # argname = name 90 | 91 | # arg_definition = 92 | # # optional(argname) 93 | # # |> ignore(optional(whitespace)) 94 | # # |> concat(data_type) 95 | # data_type 96 | # |> optional(string("[]") |> replace(true) |> unwrap_and_tag(:is_array)) 97 | 98 | # arg_spec = 99 | # ignore(times(whitespace, min: 0)) 100 | # |> concat(arg_definition) 101 | # |> ignore(optional(ascii_char([?,]))) |> label(",") 102 | # |> reduce({Enum, :into, [%{}]}) 103 | 104 | create_function = 105 | ignore(string("CREATE")) 106 | |> ignore(optional(or_replace)) 107 | |> ignore(whitespace) 108 | |> ignore(string("FUNCTION")) 109 | |> ignore(whitespace) 110 | |> concat(function_name) 111 | |> ignore(optional(whitespace)) 112 | |> ignore(lparen) 113 | |> ignore(args) 114 | # |> ignore(optional(whitespace)) 115 | # |> times(arg_spec, min: 0) 116 | # |> ignore(times(whitespace, min: 0)) 117 | |> ignore(rparen) 118 | |> ignore(returns) 119 | |> ignore(optional(language)) 120 | |> ignore(optional(immutable)) 121 | |> concat(as) 122 | |> reduce({Enum, :into, [%{}]}) 123 | 124 | match_create_function = 125 | ignore(string("CREATE")) 126 | |> ignore(optional(or_replace)) 127 | |> ignore(whitespace) 128 | |> ignore(string("FUNCTION")) 129 | 130 | defparsec :parsec_parse, create_function 131 | defparsec :parsec_match, match_create_function 132 | 133 | def parse(line), do: parse(line, %{sql: ""}) 134 | 135 | # Parse SQL for function body until delimiter is reached 136 | def parse(line, %{sql: lines, delimiter: delimiter, data: data} = state) do 137 | if line == delimiter <> ";\n" do 138 | {:ok, data} 139 | else 140 | {:continue, Map.merge(state, %{sql: lines <> line})} 141 | end 142 | end 143 | 144 | # Parse function head 145 | def parse(line, %{sql: lines} = state) do 146 | sql = lines <> line 147 | case parsec_parse(sql) do 148 | {:ok, [value], _, _, _, _} -> 149 | if String.ends_with?(sql, value.delimiter <> ";\n") do 150 | # SQL has complete statement, not line by line parsing 151 | {:ok, value} 152 | else 153 | # Parsed header, continue and parser will read function body up to delimiter 154 | new_state = %{sql: sql, data: value, delimiter: value.delimiter} 155 | {:continue, Map.merge(state, new_state)} 156 | end 157 | {:error, _reason, _, _, _, _} = error -> 158 | {:continue, Map.merge(state, %{sql: sql, error: error})} 159 | end 160 | end 161 | 162 | def match(line) do 163 | case parsec_match(line) do 164 | {:ok, _, _, _, _, _} -> 165 | case parsec_parse(line) do 166 | {:ok, [value], _, _, _, _} -> 167 | {:ok, value} 168 | {:error, _reason, _, _, _, _} = error -> 169 | {:continue, %{sql: line, error: error}} 170 | end 171 | {:error, reason, _, _, _, _} -> 172 | {:error, reason} 173 | end 174 | end 175 | 176 | end 177 | 178 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_index.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateIndex do 2 | @moduledoc "Parser for CREATE INDEX." 3 | 4 | import NimbleParsec 5 | 6 | require EctoExtractMigrations.Parsers.Common 7 | alias EctoExtractMigrations.Parsers.Common 8 | 9 | # https://www.postgresql.org/docs/current/sql-createindex.html 10 | 11 | # CREATE [ UNIQUE ] INDEX [ CONCURRENTLY ] [ [ IF NOT EXISTS ] name ] ON [ ONLY ] table_name [ USING method ] 12 | # ( { column_name | ( expression ) } [ COLLATE collation ] [ opclass ] [ ASC | DESC ] [ NULLS { FIRST | LAST } ] [, ...] ) 13 | # [ INCLUDE ( column_name [, ...] ) ] 14 | # [ WITH ( storage_parameter = value [, ... ] ) ] 15 | # [ TABLESPACE tablespace_name ] 16 | # [ WHERE predicate ] 17 | 18 | whitespace = Common.whitespace() 19 | identifier = Common.identifier() 20 | name = Common.name() 21 | 22 | table_name = 23 | Common.table_name(:table_name) 24 | 25 | index_name = 26 | Common.table_name(:name) 27 | 28 | column_name = Common.column_name() |> post_traverse(:to_atom) |> label("column_name") 29 | 30 | defp to_atom(_rest, acc, context, _line, _offset) do 31 | atoms = Enum.map(acc, &String.to_atom/1) 32 | {atoms, context} 33 | end 34 | 35 | defp wrap_parens(_rest, acc, context, _line, _offset) do 36 | values = Enum.map(acc, &("(" <> &1 <> ")")) 37 | {values, context} 38 | end 39 | 40 | defp unwrap_parens_traverse(_rest, acc, context, _line, _offset) do 41 | values = Enum.map(acc, &unwrap_parens/1) 42 | {values, context} 43 | end 44 | 45 | # defp unwrap_parens_traverse(_rest, acc, context, _line, _offset) do 46 | # values = Enum.map(acc, &unwrap_parens/1) 47 | # {values, context} 48 | # end 49 | 50 | def unwrap_parens(value) do 51 | String.replace_prefix(value, "(", "") |> String.replace_suffix(")", "") 52 | end 53 | 54 | unique = 55 | ignore(optional(whitespace)) 56 | |> string("UNIQUE") 57 | |> replace(true) 58 | |> unwrap_and_tag(:unique) 59 | 60 | concurrently = 61 | ignore(optional(whitespace)) 62 | |> string("CONCURRENTLY") 63 | |> replace(true) 64 | |> unwrap_and_tag(:concurrently) 65 | 66 | if_not_exists = 67 | ignore(optional(whitespace)) 68 | |> string("IF NOT EXISTS") 69 | 70 | only = 71 | ignore(optional(whitespace)) 72 | |> string("ONLY") 73 | 74 | using = 75 | ignore(whitespace) 76 | |> ignore(string("USING")) 77 | |> ignore(whitespace) 78 | |> concat(Common.name()) 79 | |> unwrap_and_tag(:using) 80 | 81 | lparen = ascii_char([?(]) |> label("(") 82 | rparen = ascii_char([?)]) |> label(")") 83 | 84 | expression = 85 | utf8_string([?a..?z, ?A..?Z, ?0..?9, ?_, ?=, ?>, ?<, ?\s, ?', ?-, ?:], min: 1) 86 | |> post_traverse(:wrap_parens) 87 | |> label("expression") 88 | 89 | sql_expression = 90 | # utf8_string([?a..?z, ?A..?Z, ?0..?9, ?_, ?=, ?>, ?<, ?\s, ?', ?-, ?:, ?(, ?)], min: 1) 91 | utf8_string([{:not, ?;}], min: 1) 92 | |> label("sql_expression") 93 | 94 | defcombinatorp(:expr, 95 | ignore(lparen) 96 | |> choice([parsec(:expr), expression]) 97 | |> ignore(rparen) 98 | |> label("expr") 99 | ) 100 | 101 | column = 102 | choice([column_name, parsec(:expr)]) 103 | 104 | key = 105 | ignore(lparen) 106 | |> times(column |> ignore(optional(ascii_char([?,]))) |> ignore(optional(whitespace)), min: 1) 107 | |> ignore(rparen) 108 | |> tag(:key) 109 | |> label("key") 110 | 111 | include = 112 | ignore(optional(whitespace)) 113 | |> ignore(string("INCLUDE")) 114 | |> ignore(whitespace) 115 | |> ignore(lparen) 116 | |> times(column |> ignore(optional(ascii_char([?,]))) |> ignore(optional(whitespace)), min: 1) 117 | |> ignore(rparen) 118 | |> tag(:include) 119 | |> label("include") 120 | 121 | equal_sign = ascii_char([?=]) |> label("=") 122 | semicolon = ascii_char([?;]) |> label(";") 123 | 124 | storage_parameter = 125 | identifier 126 | |> concat(whitespace) 127 | |> concat(equal_sign) 128 | |> concat(whitespace) 129 | |> concat(identifier) 130 | |> label("storage_parameter") 131 | 132 | with_ = 133 | ignore(optional(whitespace)) 134 | |> ignore(string("WITH")) 135 | |> ignore(whitespace) 136 | |> ignore(lparen) 137 | |> times(storage_parameter |> ignore(optional(ascii_char([?,]))) |> ignore(optional(whitespace)), min: 1) 138 | |> ignore(rparen) 139 | |> tag(:with) 140 | |> label("with") 141 | 142 | tablespace = 143 | ignore(optional(whitespace)) 144 | |> ignore(string("TABLESPACE")) 145 | |> ignore(whitespace) 146 | |> concat(name) 147 | |> unwrap_and_tag(:tablespace) 148 | |> label("tablespace") 149 | 150 | where = 151 | ignore(optional(whitespace)) 152 | |> ignore(string("WHERE")) 153 | |> ignore(whitespace) 154 | |> concat(sql_expression) |> post_traverse(:unwrap_parens_traverse) 155 | |> unwrap_and_tag(:where) 156 | |> label("where") 157 | 158 | create_index = 159 | ignore(string("CREATE")) 160 | |> optional(unique) 161 | |> ignore(whitespace) 162 | |> ignore(string("INDEX")) 163 | |> optional(concurrently) 164 | |> ignore(optional(if_not_exists)) 165 | |> ignore(whitespace) 166 | |> concat(index_name) 167 | |> ignore(whitespace) 168 | |> ignore(string("ON")) 169 | |> ignore(optional(only)) 170 | |> ignore(whitespace) 171 | |> concat(table_name) 172 | |> optional(using) 173 | |> ignore(whitespace) 174 | |> concat(key) 175 | |> optional(include) 176 | |> optional(with_) 177 | |> optional(tablespace) 178 | |> optional(where) 179 | |> ignore(semicolon) 180 | |> ignore(optional(whitespace)) 181 | |> reduce({Enum, :into, [%{}]}) 182 | 183 | defparsec :parsec_parse, create_index 184 | defparsec :parsec_match, create_index 185 | 186 | # :name - the name of the index. Defaults to "#{table}_#{column}_index". 187 | # :unique - indicates whether the index should be unique. Defaults to false. 188 | # :concurrently - indicates whether the index should be created/dropped concurrently. 189 | # :using - configures the index type. 190 | # :prefix - specify an optional prefix for the index. 191 | # :where - specify conditions for a partial index. 192 | # :include - specify fields for a covering index. This is not supported by all databases. For more information on PostgreSQL support, please read the official docs. 193 | 194 | def parse(line), do: parse(line, %{sql: ""}) 195 | 196 | def parse(line, %{sql: lines} = state) do 197 | sql = lines <> line 198 | case parsec_parse(sql) do 199 | {:ok, [value], _, _, _, _} -> 200 | {:ok, value} 201 | {:error, reason, _, _, _, _} -> 202 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 203 | end 204 | end 205 | 206 | def match(line) do 207 | case parsec_match(line) do 208 | {:ok, _, _, _, _, _} -> 209 | case parsec_parse(line) do 210 | {:ok, [value], _, _, _, _} -> 211 | {:ok, value} 212 | {:error, reason, _, _, _, _} -> 213 | {:continue, %{sql: line, error: reason}} 214 | end 215 | {:error, reason, _, _, _, _} -> 216 | {:error, reason} 217 | end 218 | end 219 | 220 | end 221 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_schema.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateSchema do 2 | @moduledoc "Parser for CREATE SCHEMA." 3 | 4 | import NimbleParsec 5 | 6 | alias EctoExtractMigrations.Parsers.Common 7 | 8 | # https://www.postgresql.org/docs/current/sql-createschema.html 9 | # CREATE SCHEMA foo; 10 | 11 | whitespace = Common.whitespace() 12 | name = Common.name() 13 | 14 | create_schema = 15 | ignore(string("CREATE SCHEMA")) 16 | |> ignore(whitespace) 17 | |> concat(name) |> unwrap_and_tag(:name) 18 | |> ignore(ascii_char([?;])) 19 | |> ignore(optional(whitespace)) 20 | |> reduce({Enum, :into, [%{}]}) 21 | 22 | defparsec :parsec_parse, create_schema 23 | defparsec :parsec_match, create_schema 24 | 25 | def parse(line), do: parse(line, %{sql: ""}) 26 | 27 | def parse(line, %{sql: lines} = state) do 28 | sql = lines <> line 29 | case parsec_parse(sql) do 30 | {:ok, [value], _, _, _, _} -> 31 | {:ok, value} 32 | {:error, reason, _, _, _, _} -> 33 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 34 | end 35 | end 36 | 37 | def match(line) do 38 | case parsec_match(line) do 39 | {:ok, _, _, _, _, _} -> 40 | case parsec_parse(line) do 41 | {:ok, [value], _, _, _, _} -> 42 | {:ok, value} 43 | {:error, reason, _, _, _, _} -> 44 | {:continue, %{sql: line, error: reason}} 45 | end 46 | {:error, reason, _, _, _, _} -> 47 | {:error, reason} 48 | end 49 | end 50 | 51 | end 52 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_sequence.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateSequence do 2 | @moduledoc "Parser for CREATE SEQUENCE." 3 | 4 | import NimbleParsec 5 | 6 | require EctoExtractMigrations.Parsers.Common 7 | alias EctoExtractMigrations.Parsers.Common 8 | 9 | # https://www.postgresql.org/docs/current/sql-createsequence.html 10 | 11 | whitespace = Common.whitespace() 12 | 13 | sequence_name = 14 | Common.table_name(:name) 15 | 16 | temporary = 17 | ignore(optional(whitespace)) 18 | |> choice([string("TEMPORARY"), string("TEMP")]) 19 | |> replace(true) 20 | |> unwrap_and_tag(:temporary) 21 | 22 | if_not_exists = 23 | ignore(optional(whitespace)) 24 | |> string("IF NOT EXISTS") 25 | 26 | data_type = 27 | ignore(optional(whitespace)) 28 | |> ignore(string("AS")) 29 | |> ignore(whitespace) 30 | |> choice([ 31 | string("smallint") |> replace(:smallint), 32 | string("integer") |> replace(:integer), 33 | string("bigint") |> replace(:integer)]) 34 | |> unwrap_and_tag(:data_type) 35 | 36 | increment = 37 | ignore(optional(whitespace)) 38 | |> ignore(string("INCREMENT")) 39 | |> ignore(optional(whitespace)) 40 | |> ignore(optional(string("BY"))) 41 | |> ignore(optional(whitespace)) 42 | |> integer(min: 1) 43 | |> unwrap_and_tag(:increment) 44 | 45 | minvalue = 46 | ignore(optional(whitespace)) 47 | |> ignore(string("MINVALUE")) 48 | |> ignore(whitespace) 49 | |> integer(min: 1) 50 | |> unwrap_and_tag(:minvalue) 51 | 52 | no_minvalue = 53 | ignore(optional(whitespace)) 54 | |> string("NO MINVALUE") |> replace(false) 55 | |> unwrap_and_tag(:minvalue) 56 | 57 | maxvalue = 58 | ignore(optional(whitespace)) 59 | |> ignore(string("MAXVALUE")) 60 | |> ignore(whitespace) 61 | |> integer(min: 1) 62 | |> unwrap_and_tag(:maxvalue) 63 | 64 | no_maxvalue = 65 | ignore(optional(whitespace)) 66 | |> string("NO MAXVALUE") |> replace(false) 67 | |> unwrap_and_tag(:maxvalue) 68 | 69 | start = 70 | ignore(optional(whitespace)) 71 | |> ignore(string("START")) 72 | |> ignore(optional(whitespace)) 73 | |> ignore(optional(string("WITH"))) 74 | |> ignore(optional(whitespace)) 75 | |> integer(min: 1) 76 | |> unwrap_and_tag(:start) 77 | 78 | cache = 79 | ignore(optional(whitespace)) 80 | |> ignore(string("CACHE")) 81 | |> ignore(whitespace) 82 | |> integer(min: 1) 83 | |> unwrap_and_tag(:cache) 84 | 85 | cycle = 86 | ignore(optional(whitespace)) 87 | |> choice([string("NO CYCLE") |> replace(false), string("CYCLE") |> replace(true)]) 88 | |> unwrap_and_tag(:cycle) 89 | 90 | owned_by = 91 | ignore(optional(whitespace)) 92 | |> string("OWNED BY") 93 | |> ignore(whitespace) 94 | |> choice([Common.table_name(), string("NONE") |> replace(:none)]) 95 | |> unwrap_and_tag(:owned_by) 96 | 97 | create_sequence = 98 | ignore(string("CREATE")) 99 | |> optional(temporary) 100 | |> ignore(whitespace) 101 | |> ignore(string("SEQUENCE")) 102 | |> ignore(optional(if_not_exists)) 103 | |> ignore(whitespace) 104 | |> concat(sequence_name) 105 | |> optional(data_type) 106 | |> ignore(whitespace) 107 | |> optional(start) 108 | |> optional(increment) 109 | |> optional(choice([minvalue, no_minvalue])) 110 | |> optional(choice([maxvalue, no_maxvalue])) 111 | |> optional(cache) 112 | |> optional(cycle) 113 | |> optional(owned_by) 114 | |> ignore(ascii_char([?;])) 115 | |> ignore(optional(whitespace)) 116 | |> reduce({Enum, :into, [%{}]}) 117 | 118 | match_create_sequence = 119 | ignore(string("CREATE")) 120 | |> optional(temporary) 121 | |> ignore(whitespace) 122 | |> ignore(string("SEQUENCE")) 123 | 124 | defparsec :parsec_parse, create_sequence 125 | defparsec :parsec_match, match_create_sequence 126 | 127 | def parse(line), do: parse(line, %{sql: ""}) 128 | 129 | def parse(line, %{sql: lines} = state) do 130 | sql = lines <> line 131 | case parsec_parse(sql) do 132 | {:ok, [value], _, _, _, _} -> 133 | {:ok, value} 134 | {:error, reason, _, _, _, _} -> 135 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 136 | end 137 | end 138 | 139 | def match(line) do 140 | case parsec_match(line) do 141 | {:ok, _, _, _, _, _} -> 142 | case parsec_parse(line) do 143 | {:ok, [value], _, _, _, _} -> 144 | {:ok, value} 145 | {:error, reason, _, _, _, _} -> 146 | {:continue, %{sql: line, error: reason}} 147 | end 148 | {:error, reason, _, _, _, _} -> 149 | {:error, reason} 150 | end 151 | end 152 | 153 | end 154 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_table.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateTable do 2 | @moduledoc "Parser for CREATE TABLE." 3 | 4 | import NimbleParsec 5 | 6 | require EctoExtractMigrations.Parsers.Common 7 | alias EctoExtractMigrations.Parsers.Common 8 | 9 | # https://www.postgresql.org/docs/current/sql-createtable.html 10 | 11 | # TODO 12 | # 13 | # Parse CONSTRAINTS with new expression parser 14 | # Column options are not in order, use choice 15 | # e.g. public.login_log 16 | 17 | whitespace = Common.whitespace() 18 | name = Common.name() 19 | 20 | global = 21 | choice([string("GLOBAL"), string("LOCAL")]) 22 | |> concat(whitespace) 23 | 24 | temporary = 25 | choice([string("TEMPORARY"), string("TEMP")]) 26 | |> concat(whitespace) 27 | 28 | unlogged = 29 | string("UNLOGGED") |> ignore(whitespace) 30 | 31 | if_not_exists = 32 | string("IF NOT EXISTS") |> ignore(whitespace) 33 | 34 | schema_name = name 35 | bare_table_name = name |> unwrap_and_tag(:name) 36 | schema_qualified_table_name = 37 | schema_name |> ignore(ascii_char([?.])) |> concat(name) |> tag(:name) 38 | 39 | table_name = choice([schema_qualified_table_name, bare_table_name]) 40 | 41 | 42 | # [ CONSTRAINT constraint_name ] 43 | # { CHECK ( expression ) [ NO INHERIT ] | 44 | # UNIQUE ( column_name [, ... ] ) index_parameters | 45 | # PRIMARY KEY ( column_name [, ... ] ) index_parameters | 46 | # EXCLUDE [ USING index_method ] ( exclude_element WITH operator [, ... ] ) index_parameters [ WHERE ( predicate ) ] | 47 | # FOREIGN KEY ( column_name [, ... ] ) REFERENCES reftable [ ( refcolumn [, ... ] ) ] 48 | # [ MATCH FULL | MATCH PARTIAL | MATCH SIMPLE ] [ ON DELETE referential_action ] [ ON UPDATE referential_action ] } 49 | # [ DEFERRABLE | NOT DEFERRABLE ] [ INITIALLY DEFERRED | INITIALLY IMMEDIATE ] 50 | 51 | # CONSTRAINT case_coupon_current_uses_check CHECK ((current_uses >= 0)) 52 | 53 | lparen = ascii_char([?(]) |> label("(") 54 | rparen = ascii_char([?)]) |> label(")") 55 | 56 | def wrap_parens(_rest, acc, context, _line, _offset) do 57 | values = Enum.map(acc, &("(" <> &1 <> ")")) 58 | {values, context} 59 | end 60 | 61 | expression = 62 | utf8_string([?a..?z, ?A..?Z, ?0..?9, ?_, ?=, ?>, ?<, ?\s, ?', ?-, ?:], min: 1) 63 | |> post_traverse(:wrap_parens) 64 | |> label("expression") 65 | 66 | defcombinatorp(:expr, 67 | ignore(lparen) 68 | |> choice([parsec(:expr), expression]) 69 | |> ignore(rparen) 70 | |> label("expr") 71 | ) 72 | 73 | table_constraint_check = 74 | ignore(string("CHECK")) 75 | |> ignore(whitespace) 76 | # |> utf8_string([?a..?z, ?A..?Z, ?0..?9, ?_, 32, ?(, ?), ?=, ?<, ?>], min: 1) 77 | |> parsec(:expr) 78 | |> unwrap_and_tag(:check) 79 | |> optional(string("NO INHERIT")) 80 | |> label("CHECK") 81 | 82 | table_constraint_name = 83 | name |> unwrap_and_tag(:name) |> label("constraint_name") 84 | 85 | table_constraint = 86 | string("CONSTRAINT") |> replace(:constraint) |> unwrap_and_tag(:type) 87 | |> ignore(whitespace) 88 | |> concat(table_constraint_name) 89 | |> ignore(whitespace) 90 | |> concat(table_constraint_check) 91 | 92 | column_name = name 93 | 94 | def convert_type(value, acc) do 95 | [String.downcase(value) |> String.to_existing_atom() | acc] 96 | end 97 | 98 | # interval [ fields ] [ (p) ] 99 | # time [ (p) ] [ without time zone ] 100 | # time [ (p) ] with time zone timetz 101 | # timestamp [ (p) ] [ without time zone ] 102 | # timestamp [ (p) ] [ without time zone ] 103 | # timestamp [ (p) ] with time zone timestamptz 104 | # https://www.postgresql.org/docs/current/datatype.html 105 | data_type = 106 | choice(Enum.map([ 107 | "bigint", 108 | "bigserial", 109 | {"bit", :size}, 110 | {"bit varying", :size}, 111 | "boolean", 112 | "box", 113 | "bytea", 114 | {"character varying", :size}, 115 | {"character", :size}, 116 | "cidr", 117 | "circle", 118 | "date", 119 | "double precision", 120 | "inet", 121 | "integer", 122 | "jsonb", 123 | "json", 124 | "line", 125 | "lseg", 126 | "macaddr8", 127 | "macaddr", 128 | "money", 129 | {"numeric", [:precision, :scale]}, 130 | {"decimal", [:precision, :scale]}, 131 | "path", 132 | "pg_lsn", 133 | "point", 134 | "polygon", 135 | "real", 136 | "smallint", 137 | "smallserial", 138 | "serial", 139 | "text", 140 | "timestamp without time zone", 141 | "timestamp with time zone", 142 | "timestamp", 143 | "time without time zone", 144 | "time with time zone", 145 | "time", 146 | "tsquery", 147 | "tsvector", 148 | "txid_snapshot", 149 | "uuid", 150 | "xml", 151 | ], &Common.atom_type/1)) 152 | 153 | user_defined_type_bare = 154 | name |> unwrap_and_tag(:type) 155 | 156 | user_defined_type_schema_qualified = 157 | schema_name |> ignore(ascii_char([?.])) |> concat(name) |> tag(:type) 158 | 159 | user_defined_type = 160 | choice([user_defined_type_schema_qualified, user_defined_type_bare]) 161 | 162 | collation = 163 | ignore(whitespace) 164 | |> ignore(string("COLLATE")) 165 | |> ignore(whitespace) 166 | |> concat(name) 167 | |> unwrap_and_tag(:collation) 168 | |> label("collation") 169 | 170 | # column_constraint 171 | # 172 | # [ CONSTRAINT constraint_name ] 173 | # { NOT NULL | 174 | # NULL | 175 | # CHECK ( expression ) [ NO INHERIT ] | 176 | # DEFAULT default_expr | 177 | # GENERATED ALWAYS AS ( generation_expr ) STORED | 178 | # GENERATED { ALWAYS | BY DEFAULT } AS IDENTITY [ ( sequence_options ) ] | 179 | # UNIQUE index_parameters | 180 | # PRIMARY KEY index_parameters | 181 | # REFERENCES reftable [ ( refcolumn ) ] [ MATCH FULL | MATCH PARTIAL | MATCH SIMPLE ] 182 | # [ ON DELETE referential_action ] [ ON UPDATE referential_action ] } 183 | # [ DEFERRABLE | NOT DEFERRABLE ] [ INITIALLY DEFERRED | INITIALLY IMMEDIATE ] 184 | 185 | null = 186 | ignore(whitespace) 187 | |> choice([ 188 | string("NULL") |> replace(true), 189 | string("NOT NULL") |> replace(false) 190 | ]) 191 | |> unwrap_and_tag(:null) 192 | 193 | primary_key = 194 | ignore(whitespace) 195 | |> string("PRIMARY KEY") 196 | |> replace(true) 197 | |> unwrap_and_tag(:primary_key) 198 | 199 | default = 200 | ignore(whitespace) 201 | |> ignore(string("DEFAULT")) 202 | |> ignore(whitespace) 203 | |> choice([ 204 | string("now()") |> unwrap_and_tag(:fragment), 205 | string("timezone('UTC'::text, now())") |> unwrap_and_tag(:fragment), 206 | string("NULL::character varying") |> unwrap_and_tag(:fragment), # redundant 207 | string("CURRENT_TIMESTAMP") |> unwrap_and_tag(:fragment), 208 | string("''") 209 | |> ignore( 210 | optional( 211 | choice([ 212 | string("::character varying"), 213 | string("::character"), 214 | string("::text"), 215 | ]) 216 | ) 217 | |> optional(string("[]")) 218 | ) 219 | |> replace(""), 220 | ignore(ascii_char([?'])) 221 | |> utf8_string([{:not, ?'}], min: 1) 222 | |> ignore(ascii_char([?'])) 223 | |> ignore( 224 | optional( 225 | choice([ 226 | string("::character varying"), 227 | string("::character"), 228 | string("::text"), 229 | string("::bytea"), 230 | string("::jsonb"), 231 | string("::json"), 232 | string("::integer"), 233 | ]) 234 | ) 235 | |> optional(string("[]")) 236 | ), 237 | integer(min: 1), 238 | choice([ 239 | string("true") |> replace(true), 240 | string("TRUE") |> replace(true), 241 | string("false") |> replace(false), 242 | string("FALSE") |> replace(false), 243 | ]) 244 | ]) |> unwrap_and_tag(:default) |> label("default") 245 | 246 | match = 247 | optional(whitespace) 248 | |> choice([ 249 | string("MATCH FULL"), 250 | string("MATCH PARTIAL"), 251 | string("MATCH SIMPLE") 252 | ]) 253 | |> label("match") 254 | 255 | on_delete = 256 | ignore(whitespace) 257 | |> ignore(string("ON DELETE")) 258 | |> ignore(whitespace) 259 | |> choice([ 260 | string("CASCADE") |> replace(:cascade), 261 | string("RESTRICT") |> replace(:restrict), 262 | string("SET NULL") |> replace(:set_null) 263 | ]) 264 | |> unwrap_and_tag(:on_delete) 265 | |> label("ON DELETE") 266 | 267 | on_update = 268 | ignore(whitespace) 269 | |> ignore(string("ON UPDATE")) 270 | |> ignore(whitespace) 271 | |> choice([ 272 | string("CASCADE") |> replace(:cascade), 273 | string("RESTRICT") |> replace(:restrict), 274 | string("SET NULL") |> replace(:set_null) 275 | ]) 276 | |> unwrap_and_tag(:on_update) 277 | |> label("ON UPDATE") 278 | 279 | references = 280 | ignore(whitespace) 281 | |> ignore(string("REFERENCES")) 282 | |> ignore(whitespace) 283 | |> concat(Common.table_name(:references_table)) 284 | |> ignore(optional(whitespace)) 285 | |> concat(Common.column_list(:references_column)) 286 | |> optional(match) 287 | |> times(choice([on_delete, on_update]), min: 0) 288 | 289 | column_constraint_name = 290 | ignore(whitespace) 291 | |> string("CONSTRAINT") 292 | |> concat(name) 293 | |> unwrap_and_tag(:constraint_name) 294 | |> label("constraint_name") 295 | 296 | column_constraint = 297 | optional(column_constraint_name) 298 | |> times(choice([null, default, primary_key, references]), min: 0) 299 | 300 | column_definition = 301 | column_name |> unwrap_and_tag(:name) 302 | |> ignore(whitespace) 303 | |> choice([data_type, user_defined_type]) 304 | |> optional(string("[]") |> replace(true) |> unwrap_and_tag(:is_array)) 305 | |> optional(collation) 306 | |> optional(column_constraint) 307 | 308 | column_spec = 309 | ignore(times(whitespace, min: 0)) 310 | |> choice([table_constraint, column_definition]) 311 | |> ignore(optional(ascii_char([?,]))) |> label(",") 312 | |> reduce({Enum, :into, [%{}]}) 313 | 314 | create_table = 315 | ignore(string("CREATE")) |> ignore(whitespace) 316 | |> ignore(optional(global)) 317 | |> ignore(optional(temporary)) 318 | |> ignore(optional(unlogged)) 319 | |> ignore(string("TABLE")) |> ignore(whitespace) 320 | |> ignore(optional(if_not_exists)) 321 | |> concat(table_name) 322 | |> ignore(whitespace) 323 | |> ignore(ascii_char([?(])) 324 | |> ignore(optional(whitespace)) 325 | |> times(column_spec, min: 0) 326 | |> ignore(times(whitespace, min: 0)) 327 | |> ignore(string(");")) |> label(";") 328 | |> ignore(optional(whitespace)) 329 | 330 | match_create_table = 331 | ignore(string("CREATE")) |> ignore(whitespace) 332 | |> ignore(optional(global)) 333 | |> ignore(optional(temporary)) 334 | |> ignore(optional(unlogged)) 335 | |> ignore(string("TABLE")) 336 | 337 | defparsec :parsec_table_constraint, table_constraint 338 | defparsec :parsec_table_name, table_name 339 | defparsec :parsec_create_table, create_table 340 | defparsec :parsec_column, column_spec 341 | 342 | defparsec :parsec_parse, create_table 343 | defparsec :parsec_match, match_create_table 344 | 345 | defp postprocess_value(value) do 346 | {attrs, columns} = Enum.reduce(value, {%{}, []}, &split_attrs_columns/2) 347 | columns = Enum.reverse(columns) 348 | 349 | {constraints, columns} = Enum.split_with(columns, &is_constraint/1) 350 | attrs = Map.merge(attrs, %{columns: columns, constraints: constraints}) 351 | attrs = if constraints == [] do 352 | Map.drop(attrs, [:constraints]) 353 | else 354 | attrs 355 | end 356 | 357 | attrs 358 | end 359 | 360 | def parse(line), do: parse(line, %{sql: ""}) 361 | 362 | def parse(line, %{sql: lines} = state) do 363 | sql = lines <> line 364 | case parsec_parse(sql) do 365 | {:ok, value, _, _, _, _} -> 366 | {:ok, postprocess_value(value)} 367 | {:error, _, _, _, _, _} = error -> 368 | {:continue, Map.merge(state, %{sql: sql, error: error})} 369 | end 370 | end 371 | 372 | def match(line) do 373 | case parsec_match(line) do 374 | {:ok, _, _, _, _, _} -> 375 | case parsec_parse(line) do 376 | {:ok, value, _, _, _, _} -> 377 | {:ok, postprocess_value(value)} 378 | {:error, _, _, _, _, _} = error -> 379 | {:continue, %{sql: line, error: error}} 380 | end 381 | {:error, reason, _, _, _, _} -> 382 | {:error, reason} 383 | end 384 | end 385 | 386 | def parse_column(sql), do: value(parsec_column(sql)) 387 | def parse_table_constraint(sql), do: value(parsec_table_constraint(sql)) 388 | 389 | # Whether definition is a constraint 390 | def is_constraint(%{type: :constraint}), do: true 391 | def is_constraint(_), do: false 392 | 393 | # Separate table attributes from column definitions 394 | def split_attrs_columns(value, {m, l}) when is_map(value), do: {m, [value | l]} 395 | def split_attrs_columns({key, value}, {m, l}), do: {Map.put(m, key, value), l} 396 | 397 | @doc """ 398 | Modify column attributes to better match migrations. 399 | 400 | Here are the fields in migrations: 401 | 402 | * :primary_key - when true, marks this field as the primary key. 403 | If multiple fields are marked, a composite primary key will be created. 404 | * :default - the column's default value. It can be a string, number, empty 405 | list, list of strings, list of numbers, or a fragment generated by 406 | fragment/1. 407 | * :null - when false, the column does not allow null values. 408 | * :size - the size of the type (for example, the number of characters). 409 | The default is no size, except for :string, which defaults to 255. 410 | * :precision - the precision for a numeric type. Required when :scale is 411 | specified. 412 | * :scale - the scale of a numeric type. Defaults to 0. 413 | """ 414 | 415 | def parse_table_name(name), do: value(parsec_table_name(name)) 416 | 417 | # Convert parsec result tuple to something simpler 418 | def value({:ok, value, _, _, _, _}), do: {:ok, value} 419 | def value(result), do: result 420 | # def value({:error, value, _, _, _, _}), do: {:error, value} 421 | 422 | end 423 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_trigger.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateTrigger do 2 | @moduledoc "Parser for CREATE TRIGGER." 3 | 4 | import NimbleParsec 5 | 6 | require EctoExtractMigrations.Parsers.Common 7 | alias EctoExtractMigrations.Parsers.Common 8 | 9 | # https://www.postgresql.org/docs/current/sql-createtrigger.html 10 | # 11 | # CREATE [ CONSTRAINT ] TRIGGER name { BEFORE | AFTER | INSTEAD OF } { event [ OR ... ] } 12 | # ON table_name 13 | # [ FROM referenced_table_name ] 14 | # [ NOT DEFERRABLE | [ DEFERRABLE ] [ INITIALLY IMMEDIATE | INITIALLY DEFERRED ] ] 15 | # [ REFERENCING { { OLD | NEW } TABLE [ AS ] transition_relation_name } [ ... ] ] 16 | # [ FOR [ EACH ] { ROW | STATEMENT } ] 17 | # [ WHEN ( condition ) ] 18 | # EXECUTE { FUNCTION | PROCEDURE } function_name ( arguments ) 19 | 20 | # where event can be one of: 21 | 22 | # INSERT 23 | # UPDATE [ OF column_name [, ... ] ] 24 | # DELETE 25 | # TRUNCATE 26 | 27 | # CREATE TRIGGER chat_message_update BEFORE UPDATE ON chat.message FOR EACH ROW EXECUTE PROCEDURE public.chat_update_timestamp(); 28 | 29 | whitespace = Common.whitespace() 30 | name = Common.name() 31 | 32 | constraint = 33 | ignore(optional(whitespace)) 34 | |> ignore(string("CONSTRAINT")) 35 | 36 | trigger_name = 37 | name |> unwrap_and_tag(:name) 38 | 39 | # when_ = 40 | # choice([ 41 | # string("BEFORE"), 42 | # string("AFTER"), 43 | # string("INSTEAD OF") 44 | # ]) 45 | 46 | # event = 47 | # choice([ 48 | # string("INSERT"), 49 | # string("UPDATE"), 50 | # string("DELETE"), 51 | # string("TRUNCATE") 52 | # ]) 53 | 54 | # table_name = 55 | # Common.table_name(:table_name) 56 | 57 | semicolon = ascii_char([?;]) |> label(";") 58 | 59 | create_trigger = 60 | ignore(string("CREATE")) 61 | |> optional(constraint) 62 | |> ignore(whitespace) 63 | |> ignore(string("TRIGGER")) 64 | |> ignore(whitespace) 65 | |> concat(trigger_name) 66 | # |> ignore(when_) 67 | # |> ignore(whitespace) 68 | # |> ignore(event) 69 | # |> ignore(whitespace) 70 | # |> ignore(string("ON")) 71 | # |> ignore(whitespace) 72 | # |> concat(table_name) 73 | # |> ignore(optional(choice([ 74 | # string("FOR EACH ROW"), 75 | # string("FOR EACH STATEMENT") 76 | # ]) 77 | |> ignore(utf8_string([{:not, ?;}], min: 1)) 78 | |> ignore(semicolon) 79 | |> ignore(optional(whitespace)) 80 | |> reduce({Enum, :into, [%{}]}) 81 | 82 | defparsec :parsec_parse, create_trigger 83 | defparsec :parsec_match, create_trigger 84 | 85 | def parse(line), do: parse(line, %{sql: ""}) 86 | 87 | def parse(line, %{sql: lines} = state) do 88 | sql = lines <> line 89 | case parsec_parse(sql) do 90 | {:ok, [value], _, _, _, _} -> 91 | {:ok, value} 92 | {:error, reason, _, _, _, _} -> 93 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 94 | end 95 | end 96 | 97 | def match(line) do 98 | case parsec_match(line) do 99 | {:ok, _, _, _, _, _} -> 100 | case parsec_parse(line) do 101 | {:ok, [value], _, _, _, _} -> 102 | {:ok, value} 103 | {:error, reason, _, _, _, _} -> 104 | {:continue, %{sql: line, error: reason}} 105 | end 106 | {:error, reason, _, _, _, _} -> 107 | {:error, reason} 108 | end 109 | end 110 | 111 | end 112 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_type.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateType do 2 | @moduledoc "Parser for CREATE TYPE." 3 | 4 | import NimbleParsec 5 | 6 | alias EctoExtractMigrations.Parsers.Common 7 | 8 | # https://www.postgresql.org/docs/current/sql-createtype.html 9 | # CREATE TYPE public.case_payment_status AS ENUM ( 10 | # 'paid', 11 | # 'unpaid', 12 | # 'partial' 13 | # ); 14 | 15 | whitespace = Common.whitespace() 16 | name = Common.name() 17 | 18 | schema_name = name 19 | bare_table_name = name |> unwrap_and_tag(:name) 20 | schema_qualified_table_name = 21 | schema_name |> ignore(ascii_char([?.])) |> concat(name) |> tag(:name) 22 | 23 | type_name = choice([schema_qualified_table_name, bare_table_name]) 24 | 25 | label_name = 26 | ignore(ascii_char([?'])) 27 | |> utf8_string([{:not, ?'}], min: 1) 28 | |> ignore(ascii_char([?'])) 29 | 30 | labels = 31 | ignore(ascii_char([?(])) 32 | |> times( 33 | ignore(optional(whitespace)) 34 | |> concat(label_name) 35 | |> ignore(optional(ascii_char([?,]))) 36 | |> ignore(optional(whitespace)), min: 1) 37 | |> ignore(ascii_char([?)])) 38 | |> ignore(optional(whitespace)) 39 | |> tag(:labels) 40 | 41 | create_type = 42 | ignore(string("CREATE TYPE")) 43 | |> ignore(whitespace) 44 | |> concat(type_name) 45 | |> ignore(whitespace) 46 | |> ignore(string("AS ENUM")) 47 | |> ignore(whitespace) 48 | |> concat(labels) 49 | |> ignore(ascii_char([?;])) 50 | |> ignore(optional(whitespace)) 51 | |> reduce({Enum, :into, [%{}]}) 52 | 53 | match_create_type = 54 | ignore(string("CREATE TYPE")) 55 | 56 | defparsec :parsec_parse, create_type 57 | defparsec :parsec_match, match_create_type 58 | 59 | def parse(line), do: parse(line, %{sql: ""}) 60 | 61 | def parse(line, %{sql: lines} = state) do 62 | sql = lines <> line 63 | case parsec_parse(sql) do 64 | {:ok, [value], _, _, _, _} -> 65 | {:ok, value} 66 | {:error, reason, _, _, _, _} -> 67 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 68 | end 69 | end 70 | 71 | def match(line) do 72 | case parsec_match(line) do 73 | {:ok, _, _, _, _, _} -> 74 | case parsec_parse(line) do 75 | {:ok, [value], _, _, _, _} -> 76 | {:ok, value} 77 | {:error, reason, _, _, _, _} -> 78 | {:continue, %{sql: line, error: reason}} 79 | end 80 | {:error, reason, _, _, _, _} -> 81 | {:error, reason} 82 | end 83 | end 84 | 85 | end 86 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/create_view.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.CreateView do 2 | @moduledoc "Parser for CREATE VIEW." 3 | 4 | import NimbleParsec 5 | 6 | alias EctoExtractMigrations.Parsers.Common 7 | 8 | # https://www.postgresql.org/docs/current/sql-createview.html 9 | 10 | whitespace = Common.whitespace() 11 | name = Common.name() 12 | 13 | schema_name = name 14 | bare_table_name = name |> unwrap_and_tag(:name) 15 | schema_qualified_table_name = 16 | schema_name |> ignore(ascii_char([?.])) |> concat(name) |> tag(:name) 17 | 18 | view_name = choice([schema_qualified_table_name, bare_table_name]) 19 | 20 | view_data = 21 | utf8_string([{:not, ?;}], min: 1) 22 | 23 | create_view = 24 | ignore(string("CREATE VIEW")) 25 | |> ignore(whitespace) 26 | |> concat(view_name) 27 | |> ignore(whitespace) 28 | |> ignore(string("AS")) 29 | |> ignore(whitespace) 30 | |> ignore(view_data) 31 | |> ignore(ascii_char([?;])) 32 | |> ignore(optional(whitespace)) 33 | |> reduce({Enum, :into, [%{}]}) 34 | 35 | match_create_view = 36 | ignore(string("CREATE VIEW")) 37 | 38 | defparsec :parsec_parse, create_view 39 | defparsec :parsec_match, match_create_view 40 | 41 | def parse(line), do: parse(line, %{sql: ""}) 42 | 43 | def parse(line, %{sql: lines} = state) do 44 | sql = lines <> line 45 | case parsec_parse(sql) do 46 | {:ok, [value], _, _, _, _} -> 47 | {:ok, value} 48 | {:error, reason, _, _, _, _} -> 49 | {:continue, Map.merge(state, %{sql: sql, error: reason})} 50 | end 51 | end 52 | 53 | def match(line) do 54 | case parsec_match(line) do 55 | {:ok, _, _, _, _, _} -> 56 | case parsec_parse(line) do 57 | {:ok, [value], _, _, _, _} -> 58 | {:ok, value} 59 | {:error, reason, _, _, _, _} -> 60 | {:continue, %{sql: line, error: reason}} 61 | end 62 | {:error, reason, _, _, _, _} -> 63 | {:error, reason} 64 | end 65 | end 66 | 67 | end 68 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/parsers/whitespace.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Parsers.Whitespace do 2 | @moduledoc "Parser for SQL whitespace." 3 | 4 | def match(line) do 5 | if Regex.match?(~r/^\s*$/, line) do 6 | {:ok, ""} 7 | else 8 | {:error, "no match"} 9 | end 10 | end 11 | 12 | end 13 | -------------------------------------------------------------------------------- /lib/ecto_extract_migrations/reference.ex: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.Reference do 2 | 3 | # %{action: :add_table_constraint, column: ["user_id"], 4 | # constraint_name: "assignment_care_taker_id_fkey", 5 | # references_column: ["id"], references_table: ["chat", "user"], table: ["chat", "assignment"], type: :foreign_key} 6 | 7 | @doc "Convert alter table data structure into Ecto migration references()" 8 | def column_reference(data) do 9 | [_schema, table] = data[:references_table] 10 | 11 | keys = [:constraint_name, :references_column, :references_table, :on_delete, :on_update] 12 | opts = for key <- keys, Map.has_key?(data, key), do: {key, data[key]} 13 | opts = 14 | opts 15 | |> Enum.map(&map_value/1) 16 | |> Enum.reject(fn {_key, value} -> value == nil end) 17 | 18 | ast = quote do 19 | references(unquote(table), unquote(opts)) 20 | end 21 | Macro.to_string(ast) 22 | end 23 | 24 | defp map_value({:constraint_name, value}), do: {:name, value} 25 | defp map_value({:references_column, [value]}), do: {:column, value} 26 | defp map_value({:references_table, ["public", _table]}), do: {:prefix, nil} 27 | defp map_value({:references_table, [schema, _table]}), do: {:prefix, schema} 28 | defp map_value({:on_delete, :cascade}), do: {:on_delete, :delete_all} 29 | defp map_value({:on_delete, :restrict}), do: {:on_delete, :restrict} 30 | defp map_value({:on_delete, :set_null}), do: {:on_delete, :nilify_all} 31 | defp map_value({:on_delete, _}), do: {:on_delete, nil} 32 | defp map_value({:on_update, :cascade}), do: {:on_update, :delete_all} 33 | defp map_value({:on_update, :restrict}), do: {:on_update, :restrict} 34 | defp map_value({:on_update, :set_null}), do: {:on_update, :nilify_all} 35 | defp map_value({:on_update, _}), do: {:on_update, nil} 36 | end 37 | -------------------------------------------------------------------------------- /lib/mix/tasks/ecto_extract_migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule Mix.Tasks.Ecto.Extract.Migrations do 2 | @moduledoc """ 3 | Mix task to create Ecto migration files from database schema SQL file. 4 | 5 | ## Command line options 6 | 7 | * `--sql-file`- Schema SQL file 8 | * `--repo` - Name of Ecto repo, default Repo 9 | * `--migrations-path` - target dir for migrations, default "priv/repo/migrations". 10 | 11 | ## Usage 12 | 13 | pg_dump --schema-only --no-owner postgres://dbuser:dbpassword@localhost/dbname > dbname.schema.sql 14 | mix ecto.extract.migrations --sql-file dbname.schema.sql 15 | 16 | """ 17 | @shortdoc "Create Ecto migration files from db schema SQL file" 18 | 19 | use Mix.Task 20 | 21 | @impl Mix.Task 22 | def run(args) do 23 | opts = [ 24 | strict: [ 25 | migrations_path: :string, 26 | sql_file: :string, 27 | repo: :string, 28 | verbose: :boolean 29 | ] 30 | ] 31 | {overrides, _} = OptionParser.parse!(args, opts) 32 | 33 | sql_file = overrides[:sql_file] 34 | repo = overrides[:repo] || "Repo" 35 | repo_dir = Macro.underscore(repo) 36 | default_migrations_path = Path.join(["priv", repo_dir, "migrations"]) 37 | migrations_path = overrides[:migrations_path] || default_migrations_path 38 | 39 | :ok = File.mkdir_p(migrations_path) 40 | 41 | # Parse SQL file 42 | results = 43 | sql_file 44 | |> File.stream!() 45 | |> Stream.with_index(1) 46 | |> Stream.transform(nil, &EctoExtractMigrations.parse/2) 47 | # |> Stream.filter(&(&1.type in [:create_function])) 48 | |> Stream.reject(&(&1.type in [:whitespace, :comment])) 49 | |> Enum.to_list() 50 | 51 | # for result <- results do 52 | # Mix.shell().info("#{inspect result}") 53 | # end 54 | 55 | # Group results by type 56 | by_type = Enum.group_by(results, &(&1.type)) 57 | # Mix.shell().info("types: #{inspect Map.keys(by_type)}") 58 | 59 | # Collect ALTER SEQUENCE statements 60 | as_objects = Enum.group_by(by_type[:alter_sequence] || [], &alter_sequence_type/1) 61 | 62 | # Collect ALTER TABLE statements 63 | at_objects = Enum.group_by(by_type[:alter_table] || [], &alter_table_type/1) 64 | 65 | # Collect table primary keys from ALTER TABLE statements 66 | primary_keys = 67 | for %{data: data} <- at_objects[:primary_key] || [], into: %{} do 68 | {data.table, data.primary_key} 69 | end 70 | 71 | # Collect table defaults from ALTER TABLE statements 72 | # column_defaults = 73 | # for result <- at_objects[:default], reduce: %{} do 74 | # acc -> 75 | # %{table: table, column: column, default: default} = result.data 76 | # value = acc[table] || %{} 77 | # Map.put(acc, table, Map.put(value, column, default)) 78 | # end 79 | 80 | # Collect table foreign key constraints from ALTER TABLE statements 81 | # foreign_keys = 82 | # for result <- at_objects[:foreign_key], reduce: %{} do 83 | # acc -> 84 | # data = result.data 85 | # column_reference = Reference.column_reference(data) 86 | # Mix.shell().info("foreign_key> #{inspect result}\n#{inspect column_reference}") 87 | # %{table: table, columns: columns} = data 88 | # value = acc[table] || %{} 89 | # column = List.first(columns) 90 | # Map.put(acc, table, Map.put(value, column, data)) 91 | # end 92 | 93 | # Base bindings for templates 94 | bindings = [ 95 | repo: repo, 96 | ] 97 | 98 | # Create extensions, schemas and types 99 | phase_1 = 100 | for object_type <- [:create_extension, :create_schema, :create_type, :create_function], 101 | object <- by_type[object_type] || [] do 102 | %{module: module, sql: sql, data: data, line_num: line_num} = object 103 | 104 | Mix.shell().info("SQL #{line_num} #{object_type}\n#{inspect data}") 105 | Mix.shell().info(sql) 106 | 107 | data = Map.put(data, :sql, sql) 108 | {:ok, migration} = module.migration(data, bindings) 109 | file_name = module.file_name(data, bindings) 110 | 111 | Mix.shell().info(file_name) 112 | Mix.shell().info(migration) 113 | 114 | {file_name, migration} 115 | end 116 | 117 | # Create sequences, merging multiple into one migration 118 | statements = for %{data: data, sql: sql} <- by_type[:create_sequence] do 119 | name = EctoExtractMigrations.object_name(data.name) 120 | down_sql = "DROP SEQUENCE IF EXISTS #{name}" 121 | EctoExtractMigrations.eval_template_execute_sql(sql, down_sql) 122 | end 123 | call_bindings = Keyword.merge([ 124 | module_name: Enum.join([repo, "Migrations.Sequences"], "."), statements: statements], bindings) 125 | {:ok, migration} = EctoExtractMigrations.eval_template_file("multi_statement.eex", call_bindings) 126 | file_name = "sequences.exs" 127 | Mix.shell().info(file_name) 128 | create_sequences_migration = [{file_name, migration}] 129 | 130 | # Create tables 131 | object_type = :create_table 132 | create_table_migrations = 133 | for %{module: module, sql: sql, data: data, line_num: line_num} <- by_type[object_type], 134 | # Skip schema_migrations table as it is created by ecto.migrate itself 135 | data.name != ["public", "schema_migrations"] do 136 | 137 | data = 138 | data 139 | |> Map.put(:sql, sql) 140 | |> table_set_pk(primary_keys[data.name]) 141 | # |> table_set_default(column_defaults[data.name]) 142 | 143 | Mix.shell().info("\nSQL #{line_num} #{object_type}\n#{inspect data}") 144 | Mix.shell().info(sql) 145 | 146 | {:ok, migration} = module.migration(data, bindings) 147 | file_name = module.file_name(data, bindings) 148 | 149 | Mix.shell().info(file_name) 150 | Mix.shell().info(migration) 151 | 152 | {file_name, migration} 153 | end 154 | 155 | # Create ALTER SEQUENCE OWNED BY associating sequence with table primary key 156 | # data: %{owned_by: [table: ["chat", "assignment"], column: "id"], sequence: ["chat", "assignment_id_seq"]}, 157 | statements = for %{sql: sql} <- as_objects[:owned_by], do: EctoExtractMigrations.eval_template_execute_sql(sql) 158 | call_bindings = Keyword.merge([ 159 | module_name: Enum.join([repo, "Migrations.AlterSequences"], "."), 160 | statements: statements 161 | ], bindings) 162 | {:ok, migration} = EctoExtractMigrations.eval_template_file("multi_statement.eex", call_bindings) 163 | file_name = "alter_sequences_owned_by.exs" 164 | Mix.shell().info(file_name) 165 | alter_sequences_owned_by_migration = [{file_name, migration}] 166 | 167 | # Create views, triggers, and indexes 168 | phase_3 = 169 | for object_type <- [:create_view, :create_trigger, :create_index], object <- by_type[object_type] || [] do 170 | %{module: module, sql: sql, data: data, line_num: line_num} = object 171 | 172 | Mix.shell().info("\nSQL #{line_num} #{object_type}\n#{inspect data}") 173 | Mix.shell().info(sql) 174 | 175 | data = Map.put(data, :sql, sql) 176 | {:ok, migration} = module.migration(data, bindings) 177 | file_name = module.file_name(data, bindings) 178 | 179 | Mix.shell().info(file_name) 180 | Mix.shell().info(migration) 181 | 182 | {file_name, migration} 183 | end 184 | 185 | # Create foreign keys and unique constraints 186 | # phase_4 = 187 | # for object_type <- [:foreign_key, :unique], object <- at_objects[object_type] do 188 | # %{sql: sql, data: data, line_num: line_num} = object 189 | # 190 | # Mix.shell().info("SQL #{line_num} #{object_type}\n#{inspect data}") 191 | # Mix.shell().info(sql) 192 | # 193 | # data = Map.put(data, :sql, sql) 194 | # module = migration_module(object_type) 195 | # {:ok, migration} = module.migration(data, bindings) 196 | # file_name = module.file_name(data, bindings) 197 | # 198 | # Mix.shell().info(file_name) 199 | # Mix.shell().info(migration) 200 | # 201 | # {file_name, migration} 202 | # end 203 | 204 | # Mix.shell().info("alter table types: #{inspect Map.keys(at_objects)}") 205 | 206 | # Create ALTER TABLE 207 | statements = 208 | for action <- [:default, :foreign_key, :unique], %{sql: sql} <- at_objects[action] do 209 | EctoExtractMigrations.eval_template_execute_sql(sql) 210 | end 211 | call_bindings = Keyword.merge([statements: statements, 212 | module_name: Enum.join([repo, "Migrations.AlterTables"], ".")], bindings) 213 | {:ok, migration} = EctoExtractMigrations.eval_template_file("multi_statement.eex", call_bindings) 214 | file_name = "alter_tables.exs" 215 | Mix.shell().info(file_name) 216 | alter_tables = [{file_name, migration}] 217 | 218 | # Generate ALTER TABLE CHECK constraints from CREATE TABLE constraints 219 | statements = 220 | for %{table: table, constraints: constraints} <- Enum.flat_map(results, &get_table_constraints/1), 221 | %{check: check, name: constraint_name} <- constraints do 222 | table_name = Enum.join(table, ".") 223 | sql = "ALTER TABLE #{table_name} ADD CONSTRAINT #{constraint_name} CHECK #{check}" 224 | # Could also generate for Ecto constraint(table, constraint_name, check: check) 225 | EctoExtractMigrations.eval_template_execute_sql(sql) 226 | end 227 | call_bindings = Keyword.merge([statements: statements, 228 | module_name: Enum.join([repo, "Migrations.AlterTable.CheckConstraints"], ".")], bindings) 229 | {:ok, migration} = EctoExtractMigrations.eval_template_file("multi_statement.eex", call_bindings) 230 | file_name = "alter_table_check_constraints.exs" 231 | Mix.shell().info(file_name) 232 | alter_table_check_contraints = [{file_name, migration}] 233 | 234 | # Write migrations to file 235 | files = List.flatten([ 236 | phase_1, 237 | create_sequences_migration, 238 | create_table_migrations, 239 | alter_sequences_owned_by_migration, 240 | phase_3, 241 | alter_tables, 242 | alter_table_check_contraints, 243 | ]) 244 | for {{file_name, migration}, index} <- Enum.with_index(files, 1) do 245 | path = Path.join(migrations_path, "#{to_prefix(index)}_#{file_name}") 246 | Mix.shell().info("#{path}") 247 | :ok = File.write(path, migration) 248 | end 249 | end 250 | 251 | # def migration_module(:foreign_key), do: EctoExtractMigrations.Migrations.ForeignKey 252 | # def migration_module(:unique), do: EctoExtractMigrations.Migrations.Unique 253 | 254 | # Get constraint type 255 | # ALTER TABLE ADD CONSTRAINT PRIMARY KEY 256 | def alter_table_type(%{data: %{action: :add_table_constraint, type: :primary_key}}), do: :primary_key 257 | # ALTER TABLE ADD CONSTRAINT FOREIGN KEY 258 | def alter_table_type(%{data: %{action: :add_table_constraint, type: :foreign_key}}), do: :foreign_key 259 | # ALTER TABLE ALTER COLUMN id SET DEFAULT 260 | def alter_table_type(%{data: %{action: :set_default}}), do: :default 261 | # ALTER TABLE ADD CONSTRAINT UNIQUE 262 | def alter_table_type(%{data: %{action: :add_table_constraint, type: :unique}}), do: :unique 263 | 264 | # Get alter sequence type 265 | # ALTER SEQUENCE chat.assignment_id_seq OWNED BY chat.assignment.id; 266 | def alter_sequence_type(%{data: %{owned_by: _}}), do: :owned_by 267 | 268 | # Set primary_key: true on column if it is part of table primary key 269 | def table_set_pk(data, nil), do: data 270 | def table_set_pk(data, pk) do 271 | Mix.shell().info("setting pk: #{inspect data.name} #{inspect pk}") 272 | columns = data[:columns] 273 | # Mix.shell().info("setting pk columns: #{inspect columns}") 274 | columns = Enum.map(columns, &(column_set_pk(&1, pk))) 275 | # Mix.shell().info("setting pk columns: #{inspect columns}") 276 | %{data | columns: columns} 277 | end 278 | 279 | def column_set_pk(column, pk) do 280 | if column.name in pk do 281 | Mix.shell().info("setting pk column: #{inspect column}") 282 | Map.put(column, :primary_key, true) 283 | else 284 | column 285 | end 286 | end 287 | 288 | # Set default on column based on alter table 289 | def table_set_default(data, nil), do: data 290 | def table_set_default(data, defaults) do 291 | Mix.shell().info("setting default: #{inspect data.name} #{inspect defaults}") 292 | columns = Enum.map(data[:columns], &(column_set_default(&1, defaults))) 293 | %{data | columns: columns} 294 | end 295 | 296 | def column_set_default(data, defaults) do 297 | case Map.fetch(defaults, data.name) do 298 | {:ok, default} -> 299 | Map.put(data, :default, default) 300 | :error -> 301 | data 302 | end 303 | end 304 | 305 | def get_table_constraints(%{type: :create_table, data: %{name: name, constraints: constraints}}) do 306 | [%{table: name, constraints: constraints}] 307 | end 308 | def get_table_constraints(_), do: [] 309 | 310 | # Format numeric index as string with leading zeroes for filenames 311 | @spec to_prefix(integer) :: binary 312 | defp to_prefix(index) do 313 | to_string(:io_lib.format('~4..0b', [index])) 314 | end 315 | end 316 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrations.MixProject do 2 | use Mix.Project 3 | 4 | @github "https://github.com/cogini/ecto_extract_migrations" 5 | 6 | def project do 7 | [ 8 | app: :ecto_extract_migrations, 9 | version: "0.2.0", 10 | elixir: "~> 1.10", 11 | build_embedded: Mix.env() == :prod, 12 | start_permanent: Mix.env() == :prod, 13 | description: description(), 14 | package: package(), 15 | docs: docs(), 16 | deps: deps(), 17 | dialyzer: [ 18 | plt_add_apps: [:mix, :eex] 19 | # plt_add_deps: true, 20 | # flags: ["-Werror_handling", "-Wrace_conditions"], 21 | # flags: ["-Wunmatched_returns", :error_handling, :race_conditions, :underspecs], 22 | # ignore_warnings: "dialyzer.ignore-warnings" 23 | ] 24 | ] 25 | end 26 | 27 | def application do 28 | [ 29 | extra_applications: [:logger, :eex] 30 | ] 31 | end 32 | 33 | defp deps do 34 | [ 35 | {:nimble_parsec, "~> 0.6"}, 36 | {:dialyxir, "~> 0.5.1", only: [:dev, :test], runtime: false}, 37 | {:ex_doc, "~> 0.23", only: :dev, runtime: false} 38 | ] 39 | end 40 | 41 | defp description do 42 | "Mix task to generate Ecto migrations from SQL schema file" 43 | end 44 | 45 | defp package do 46 | [ 47 | maintainers: ["Jake Morrison"], 48 | licenses: ["Apache 2.0"], 49 | links: %{"GitHub" => @github} 50 | ] 51 | end 52 | 53 | defp docs do 54 | [ 55 | main: "readme", 56 | extras: ["README.md", "CHANGELOG.md"], 57 | source_url: @github, 58 | skip_undefined_reference_warnings_on: ["CHANGELOG.md"] 59 | ] 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "dialyxir": {:hex, :dialyxir, "0.5.1", "b331b091720fd93e878137add264bac4f644e1ddae07a70bf7062c7862c4b952", [:mix], [], "hexpm", "6c32a70ed5d452c6650916555b1f96c79af5fc4bf286997f8b15f213de786f73"}, 3 | "earmark_parser": {:hex, :earmark_parser, "1.4.10", "6603d7a603b9c18d3d20db69921527f82ef09990885ed7525003c7fe7dc86c56", [:mix], [], "hexpm", "8e2d5370b732385db2c9b22215c3f59c84ac7dda7ed7e544d7c459496ae519c0"}, 4 | "ex_doc": {:hex, :ex_doc, "0.23.0", "a069bc9b0bf8efe323ecde8c0d62afc13d308b1fa3d228b65bca5cf8703a529d", [:mix], [{:earmark_parser, "~> 1.4.0", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm", "f5e2c4702468b2fd11b10d39416ddadd2fcdd173ba2a0285ebd92c39827a5a16"}, 5 | "makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"}, 6 | "makeup_elixir": {:hex, :makeup_elixir, "0.14.1", "4f0e96847c63c17841d42c08107405a005a2680eb9c7ccadfd757bd31dabccfb", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f2438b1a80eaec9ede832b5c41cd4f373b38fd7aa33e3b22d9db79e640cbde11"}, 7 | "nimble_parsec": {:hex, :nimble_parsec, "0.6.0", "32111b3bf39137144abd7ba1cce0914533b2d16ef35e8abc5ec8be6122944263", [:mix], [], "hexpm", "27eac315a94909d4dc68bc07a4a83e06c8379237c5ea528a9acff4ca1c873c52"}, 8 | } 9 | -------------------------------------------------------------------------------- /priv/templates/execute_sql.eex: -------------------------------------------------------------------------------- 1 | defmodule <%= module_name %> do 2 | use Ecto.Migration 3 | 4 | def up do 5 | execute( 6 | """ 7 | <%= Regex.replace(~r/^/m, up_sql, " ") %> 8 | """) 9 | end 10 | 11 | <%= if down_sql do %> 12 | def down do 13 | execute( 14 | """ 15 | <%= Regex.replace(~r/^/m, down_sql, " ") %> 16 | 17 | """) 18 | end 19 | <% end %> 20 | end 21 | -------------------------------------------------------------------------------- /priv/templates/multi_statement.eex: -------------------------------------------------------------------------------- 1 | defmodule <%= module_name %> do 2 | use Ecto.Migration 3 | 4 | def up do 5 | <%= for statement <- statements do %> 6 | <%= statement %> 7 | <% end %> 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /test/alter_sequence.exs: -------------------------------------------------------------------------------- 1 | defmodule AlterSequenceTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.AlterSequence 5 | 6 | test "parse" do 7 | expected = %{ 8 | owned_by: [table: ["public", "app_version"], column: "id"], sequence: ["public", "app_version_id_seq"] 9 | } 10 | assert {:ok, expected} == AlterSequence.parse(""" 11 | ALTER SEQUENCE public.app_version_id_seq OWNED BY public.app_version.id; 12 | """) 13 | end 14 | 15 | describe "match/1" do 16 | test "one line" do 17 | expected = %{ 18 | owned_by: [table: ["public", "data_table_2593"], column: "rowid"], sequence: ["public", "data_table_2593_rowid_seq"] 19 | } 20 | assert {:ok, expected} == AlterSequence.match(""" 21 | ALTER SEQUENCE public.data_table_2593_rowid_seq OWNED BY public.data_table_2593.rowid; 22 | """) 23 | end 24 | test "multiline" do 25 | expected = %{ 26 | owned_by: [table: ["public", "data_table_2593"], column: "rowid"], sequence: ["public", "data_table_2593_rowid_seq"] 27 | } 28 | assert {:ok, expected} == AlterSequence.match(""" 29 | ALTER SEQUENCE public.data_table_2593_rowid_seq 30 | OWNED BY public.data_table_2593.rowid; 31 | """) 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /test/alter_table_test.exs: -------------------------------------------------------------------------------- 1 | defmodule AlterTableTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.AlterTable 5 | 6 | test "primary_key" do 7 | expected = %{ 8 | action: :add_table_constraint, 9 | constraint_name: "assignment_pkey", 10 | primary_key: ["id"], 11 | table: ["chat", "assignment"], 12 | type: :primary_key, 13 | } 14 | assert {:ok, expected} == AlterTable.parse(""" 15 | ALTER TABLE ONLY chat.assignment 16 | ADD CONSTRAINT assignment_pkey PRIMARY KEY (id); 17 | """) 18 | 19 | expected = %{ 20 | action: :add_table_constraint, 21 | constraint_name: "message_pkey", 22 | primary_key: ["id"], 23 | table: ["chat", "message"], 24 | type: :primary_key, 25 | } 26 | assert {:ok, expected} == AlterTable.parse(""" 27 | ALTER TABLE ONLY chat.message 28 | ADD CONSTRAINT message_pkey PRIMARY KEY (id); 29 | """) 30 | 31 | expected = %{ 32 | action: :add_table_constraint, 33 | constraint_name: "message_upload_pkey", 34 | primary_key: ["uuid"], 35 | table: ["chat", "message_upload"], 36 | type: :primary_key, 37 | } 38 | assert {:ok, expected} == AlterTable.parse(""" 39 | ALTER TABLE ONLY chat.message_upload 40 | ADD CONSTRAINT message_upload_pkey PRIMARY KEY (uuid); 41 | """) 42 | 43 | expected = %{ 44 | action: :add_table_constraint, 45 | constraint_name: "pending_chunk_pkey", 46 | primary_key: ["uuid", "chunk"], 47 | table: ["chat", "pending_chunk"], 48 | type: :primary_key, 49 | } 50 | assert {:ok, expected} == AlterTable.parse(""" 51 | ALTER TABLE ONLY chat.pending_chunk 52 | ADD CONSTRAINT pending_chunk_pkey PRIMARY KEY (uuid, chunk); 53 | """) 54 | end 55 | 56 | test "unique" do 57 | expected = %{ 58 | action: :add_table_constraint, 59 | constraint_name: "session_token_key", 60 | table: ["chat", "session"], 61 | columns: ["token"], 62 | type: :unique, 63 | } 64 | assert {:ok, expected} == AlterTable.parse(""" 65 | ALTER TABLE ONLY chat.session 66 | ADD CONSTRAINT session_token_key UNIQUE (token); 67 | """) 68 | 69 | expected = %{ 70 | action: :add_table_constraint, 71 | constraint_name: "access_case_facility_case_id_key", 72 | table: ["public", "access_case_facility"], 73 | type: :unique, 74 | columns: ["case_id", "facility_id"] 75 | } 76 | assert {:ok, expected} == AlterTable.parse(""" 77 | ALTER TABLE ONLY public.access_case_facility 78 | ADD CONSTRAINT access_case_facility_case_id_key UNIQUE (case_id, facility_id); 79 | """) 80 | end 81 | 82 | test "column constraint" do 83 | expected = %{ 84 | action: :set_default, 85 | table: ["chat", "assignment"], 86 | column: "id", 87 | default: {:fragment, "nextval('chat.assignment_id_seq'::regclass)"} 88 | } 89 | assert {:ok, expected} == AlterTable.parse(""" 90 | ALTER TABLE ONLY chat.assignment 91 | ALTER COLUMN id SET DEFAULT nextval('chat.assignment_id_seq'::regclass); 92 | """) 93 | end 94 | 95 | test "foreign key" do 96 | expected = %{ 97 | action: :add_table_constraint, 98 | columns: ["user_id"], 99 | table: ["chat", "assignment"], 100 | constraint_name: "assignment_care_taker_id_fkey", 101 | references_columns: ["id"], 102 | references_table: ["chat", "user"], 103 | type: :foreign_key, 104 | } 105 | assert {:ok, expected} == AlterTable.parse(""" 106 | ALTER TABLE ONLY chat.assignment 107 | ADD CONSTRAINT assignment_care_taker_id_fkey FOREIGN KEY (user_id) REFERENCES chat."user"(id); 108 | """) 109 | 110 | expected = %{ 111 | action: :add_table_constraint, 112 | columns: ["facility_id"], 113 | constraint_name: "access_case_facility_facility_id_fkey", 114 | references_columns: ["id"], 115 | references_table: ["public", "facility"], 116 | table: ["public", "access_case_facility"], 117 | on_delete: :cascade, 118 | type: :foreign_key 119 | } 120 | assert {:ok, expected} == AlterTable.parse(""" 121 | ALTER TABLE ONLY public.access_case_facility 122 | ADD CONSTRAINT access_case_facility_facility_id_fkey FOREIGN KEY (facility_id) REFERENCES public.facility(id) ON DELETE CASCADE; 123 | """) 124 | 125 | expected = %{ 126 | action: :add_table_constraint, 127 | columns: ["cache_table_id"], 128 | constraint_name: "cache_table_row_cache_table_id_fkey", 129 | references_columns: ["id"], 130 | references_table: ["public", "cache_table"], 131 | table: ["public", "cache_table_row"], 132 | on_update: :cascade, 133 | on_delete: :cascade, 134 | type: :foreign_key, 135 | } 136 | assert {:ok, expected} == AlterTable.parse(""" 137 | ALTER TABLE ONLY public.cache_table_row 138 | ADD CONSTRAINT cache_table_row_cache_table_id_fkey FOREIGN KEY (cache_table_id) REFERENCES public.cache_table(id) ON UPDATE CASCADE ON DELETE CASCADE; 139 | """) 140 | 141 | expected = %{ 142 | action: :add_table_constraint, 143 | columns: ["creator_id"], 144 | constraint_name: "report_query_creator_id_fkey", 145 | references_columns: ["id"], 146 | references_table: ["public", "click_user"], 147 | table: ["public", "report_query"], 148 | on_delete: :set_null, 149 | type: :foreign_key, 150 | } 151 | assert {:ok, expected} == AlterTable.parse(""" 152 | ALTER TABLE ONLY public.report_query 153 | ADD CONSTRAINT report_query_creator_id_fkey FOREIGN KEY (creator_id) REFERENCES public.click_user(id) ON DELETE SET NULL; 154 | """) 155 | end 156 | 157 | def value({:ok, value, "", _, _, _}), do: value 158 | def value({:error, value, _, _, _, _}), do: value 159 | end 160 | -------------------------------------------------------------------------------- /test/comment_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CommentTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.Comment 5 | 6 | test "parse" do 7 | assert {:ok, %{comment: "Some text"}} == Comment.parse("-- Some text\n") 8 | assert {:ok, %{comment: "Some text"}} == Comment.parse(" -- Some text\n") 9 | assert {:ok, %{comment: "Some text"}} == Comment.parse(" --Some text\n") 10 | end 11 | end 12 | 13 | -------------------------------------------------------------------------------- /test/create_extension_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateExtensionTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateExtension 5 | 6 | test "parse" do 7 | input = "CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public;" 8 | assert {:ok, %{name: "pg_stat_statements", schema: "public"}} == CreateExtension.parse(input) 9 | end 10 | 11 | end 12 | -------------------------------------------------------------------------------- /test/create_function_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateFunctionTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateFunction 5 | 6 | describe "parse" do 7 | test "create function" do 8 | expected = %{name: ["public", "cast_to_decimal"], delimiter: "$_$"} 9 | assert {:ok, expected} == CreateFunction.parse(""" 10 | CREATE FUNCTION public.cast_to_decimal(text, numeric) RETURNS numeric 11 | LANGUAGE plpgsql IMMUTABLE 12 | AS $_$ 13 | begin 14 | return cast($1 as decimal); 15 | exception 16 | when invalid_text_representation then 17 | return $2; 18 | end; 19 | $_$; 20 | """) 21 | 22 | expected = %{name: ["public", "ensure_access_case_facility"], delimiter: "$$"} 23 | assert {:ok, expected} == CreateFunction.parse(""" 24 | CREATE FUNCTION public.ensure_access_case_facility(arg_case_id integer, arg_facility_id integer) RETURNS void 25 | LANGUAGE plpgsql 26 | AS $$ 27 | DECLARE 28 | BEGIN 29 | INSERT INTO access_case_facility (case_id, facility_id) 30 | SELECT arg_case_id, arg_facility_id 31 | WHERE NOT EXISTS ( 32 | SELECT 1 FROM access_case_facility 33 | WHERE case_id = arg_case_id 34 | AND facility_id = arg_facility_id); 35 | END; 36 | $$; 37 | """) 38 | end 39 | 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /test/create_index_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateIndexTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateIndex 5 | 6 | test "create_index" do 7 | sql = """ 8 | CREATE INDEX t_eligibility_member_id_idx ON bnd.t_eligibility USING btree (member_id); 9 | """ 10 | expected = %{ 11 | key: [:member_id], name: "t_eligibility_member_id_idx", table_name: ["bnd", "t_eligibility"], using: "btree" 12 | } 13 | assert {:ok, expected} == CreateIndex.parse(sql) 14 | 15 | sql = """ 16 | CREATE INDEX jpatient_refill_idx ON public.patient USING btree (((patient_fields ->> 'refill'::text))); 17 | """ 18 | expected = %{ 19 | key: ["(patient_fields ->> 'refill'::text)"], 20 | name: "jpatient_refill_idx", table_name: ["public", "patient"], 21 | using: "btree" 22 | } 23 | assert {:ok, expected} == CreateIndex.parse(sql) 24 | 25 | sql = """ 26 | CREATE UNIQUE INDEX patient_facility_id_email_idx ON public.patient USING btree (facility_id, email) WHERE ((facility_id IS NOT NULL) AND (email IS NOT NULL)); 27 | """ 28 | expected = %{ 29 | key: [:facility_id, :email], 30 | name: "patient_facility_id_email_idx", 31 | table_name: ["public", "patient"], 32 | using: "btree", 33 | unique: true, 34 | where: "(facility_id IS NOT NULL) AND (email IS NOT NULL)" 35 | } 36 | assert {:ok, expected} == CreateIndex.parse(sql) 37 | 38 | end 39 | 40 | end 41 | -------------------------------------------------------------------------------- /test/create_schema_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateSchemaTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateSchema 5 | 6 | test "parse" do 7 | assert {:ok, %{name: "foo"}} == CreateSchema.parse("CREATE SCHEMA foo;") 8 | assert {:ok, %{name: "foo"}} == CreateSchema.parse("CREATE SCHEMA \"foo\";") 9 | end 10 | end 11 | -------------------------------------------------------------------------------- /test/create_sequence_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateSequenceTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateSequence 5 | 6 | test "create_sequence" do 7 | sql = """ 8 | CREATE SEQUENCE chat.assignment_id_seq 9 | START WITH 1 10 | INCREMENT BY 1 11 | NO MINVALUE 12 | NO MAXVALUE 13 | CACHE 1; 14 | """ 15 | expected = %{ 16 | name: ["chat", "assignment_id_seq"], 17 | cache: 1, 18 | increment: 1, 19 | maxvalue: false, 20 | minvalue: false, 21 | start: 1, 22 | } 23 | assert {:ok, expected} == CreateSequence.parse(sql) 24 | end 25 | 26 | end 27 | 28 | -------------------------------------------------------------------------------- /test/create_table_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateTableTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateTable 5 | 6 | test "match" do 7 | assert {:ok, %{columns: [], name: "device"}} == CreateTable.match("CREATE TABLE device ();") 8 | assert {:continue, %{sql: "CREATE TABLE device"}} = CreateTable.match("CREATE TABLE device") 9 | end 10 | 11 | test "parse_table_name" do 12 | assert {:ok, [{:name, "foo"}]} == CreateTable.parse_table_name("foo") 13 | assert {:ok, [{:name, "foo"}]} == CreateTable.parse_table_name("\"foo\"") 14 | assert {:ok, [{:name, "foo"}]} == CreateTable.parse_table_name("\"foo\"") 15 | assert {:ok, [{:name, ["public", "foo"]}]} == CreateTable.parse_table_name("public.foo") 16 | assert {:ok, [{:name, ["public", "foo"]}]} == CreateTable.parse_table_name("public.\"foo\"") 17 | end 18 | 19 | test "parse_create_table" do 20 | assert {:ok, %{columns: [], name: "device"}} == CreateTable.parse("CREATE TABLE device ();") 21 | assert {:ok, %{columns: [], name: ["public", "data_table_974"]}} == CreateTable.parse("CREATE TABLE public.data_table_974 ();") 22 | assert {:ok, %{columns: [], name: ["public", "data_table__tamil__form"]}} == CreateTable.parse("CREATE TABLE public.data_table__tamil__form ();") 23 | assert {:ok, %{columns: [], name: ["public", "device"]}} == CreateTable.parse("CREATE TABLE public.device ();") 24 | end 25 | 26 | test "column" do 27 | input = "contact character varying(64) DEFAULT 'email'::character varying NOT NULL" 28 | expected = [%{default: "email", name: "contact", size: 64, type: :"character varying", null: false}] 29 | assert {:ok, expected} == CreateTable.parse_column(input) 30 | 31 | input = "name character varying(128) DEFAULT ''::character varying NOT NULL" 32 | expected = [%{default: "", name: "name", null: false, size: 128, type: :"character varying"}] 33 | assert {:ok, expected} == CreateTable.parse_column(input) 34 | 35 | input = "is_active boolean DEFAULT true NOT NULL" 36 | expected = [%{default: true, name: "is_active", null: false, type: :boolean}] 37 | assert {:ok, expected} == CreateTable.parse_column(input) 38 | 39 | input = "is_student boolean DEFAULT false NOT NULL" 40 | expected = [%{default: false, name: "is_student", null: false, type: :boolean}] 41 | assert {:ok, expected} == CreateTable.parse_column(input) 42 | 43 | input = ~s|"PRIM CHRONIC COND" character varying(50)| 44 | expected = [%{name: "PRIM CHRONIC COND", type: :"character varying", size: 50}] 45 | assert {:ok, expected} == CreateTable.parse_column(input) 46 | 47 | expected = [%{name: "uid", null: false, primary_key: true, type: :bytea}] 48 | input = "uid BYTEA NOT NULL PRIMARY KEY," 49 | assert {:ok, expected} == CreateTable.parse_column(input) 50 | 51 | input = "isPersistent BOOLEAN NOT NULL DEFAULT FALSE" 52 | expected = [%{name: "isPersistent", null: false, type: :boolean, default: false}] 53 | assert {:ok, expected} == CreateTable.parse_column(input) 54 | 55 | input = "size INTEGER NOT NULL DEFAULT 0," 56 | expected = [%{default: 0, name: "size", null: false, type: :integer}] 57 | assert {:ok, expected} == CreateTable.parse_column(input) 58 | 59 | input = "member_id character varying(50) NOT NULL" 60 | expected = [%{name: "member_id", null: false, size: 50, type: :"character varying"}] 61 | assert {:ok, expected} == CreateTable.parse_column(input) 62 | 63 | input = "mbi character varying(50)" 64 | expected = [%{name: "mbi", size: 50, type: :"character varying"}] 65 | assert {:ok, expected} == CreateTable.parse_column(input) 66 | 67 | input = "admit_risk numeric(18,2)" 68 | expected = [%{name: "admit_risk", precision: 18, scale: 2, type: :numeric}] 69 | assert {:ok, expected} == CreateTable.parse_column(input) 70 | 71 | input = "admit_risk numeric(18)" 72 | expected = [%{name: "admit_risk", precision: 18, type: :numeric}] 73 | assert {:ok, expected} == CreateTable.parse_column(input) 74 | 75 | input = "admit_risk numeric" 76 | expected = [%{name: "admit_risk", type: :numeric}] 77 | assert {:ok, expected} == CreateTable.parse_column(input) 78 | 79 | input = "avatar_id INTEGER REFERENCES warp_avatar(id) ON DELETE CASCADE" 80 | expected = [%{name: "avatar_id", type: :integer, on_delete: :cascade, references_column: ["id"], references_table: "warp_avatar"}] 81 | assert {:ok, expected} == CreateTable.parse_column(input) 82 | 83 | input = """ 84 | customization_options text DEFAULT '{"logo": "", "email_body": "

Dear %(title)s %(surname)s:

\n\n

\n Your case %(shortname)s for patient %(patient_id)s has been submitted.
\n You can view your case at %(case_url)s.\n

", "use_default_config": "false"}'::text NOT NULL", 85 | """ 86 | expected = [%{default: "{\"logo\": \"\", \"email_body\": \"

Dear %(title)s %(surname)s:

\n\n

\n Your case %(shortname)s for patient %(patient_id)s has been submitted.
\n You can view your case at %(case_url)s.\n

\", \"use_default_config\": \"false\"}", name: "customization_options", null: false, type: :text}] 87 | assert {:ok, expected} == CreateTable.parse_column(input) 88 | end 89 | 90 | test "table constraint check" do 91 | expected = [{:type, :constraint}, {:name, "case_coupon_current_uses_check"}, {:check, "(current_uses >= 0)"}] 92 | input = "CONSTRAINT case_coupon_current_uses_check CHECK ((current_uses >= 0))" 93 | assert {:ok, expected} == CreateTable.parse_table_constraint(input) 94 | 95 | expected = [{:type, :constraint}, {:name, "case_coupon_discount_percentage_check"}, {:check, "(discount_percentage >= 0)"}] 96 | input = "CONSTRAINT case_coupon_discount_percentage_check CHECK ((discount_percentage >= 0))" 97 | assert {:ok, expected} == CreateTable.parse_table_constraint(input) 98 | 99 | expected = [{:type, :constraint}, {:name, "case_coupon_max_uses_check"}, {:check, "(max_uses >= 0)"}] 100 | input = "CONSTRAINT case_coupon_max_uses_check CHECK ((max_uses >= 0))" 101 | assert {:ok, expected} == CreateTable.parse_table_constraint(input) 102 | end 103 | 104 | test "parse_session" do 105 | sql = """ 106 | CREATE TABLE session ( 107 | uid BYTEA NOT NULL PRIMARY KEY, 108 | isPersistent BOOLEAN NOT NULL DEFAULT FALSE, 109 | touched INTEGER, 110 | avatar_id INTEGER REFERENCES warp_avatar(id) ON DELETE CASCADE 111 | ); 112 | """ 113 | expected = %{ 114 | name: "session", 115 | columns: [ 116 | %{name: "uid", null: false, primary_key: true, type: :bytea}, 117 | %{default: false, name: "isPersistent", null: false, type: :boolean}, 118 | %{name: "touched", type: :integer}, 119 | %{name: "avatar_id", on_delete: :cascade, references_column: ["id"], references_table: "warp_avatar", type: :integer}, 120 | ] 121 | } 122 | assert {:ok, expected} == CreateTable.parse(sql) 123 | end 124 | 125 | test "parse_varchar" do 126 | sql = """ 127 | CREATE TABLE bnd.t_eligibility ( 128 | rowid integer, 129 | member_id character varying(50) NOT NULL, 130 | mbi character varying(50), 131 | dob timestamp without time zone, 132 | age integer, 133 | ); 134 | """ 135 | expected = %{ 136 | name: ["bnd", "t_eligibility"], 137 | columns: [ 138 | %{name: "rowid", type: :integer}, 139 | %{name: "member_id", type: :"character varying", size: 50, null: false}, 140 | %{name: "mbi", type: :"character varying", size: 50}, 141 | %{name: "dob", type: :"timestamp without time zone"}, 142 | %{name: "age", type: :integer}, 143 | ] 144 | } 145 | assert {:ok, expected} == CreateTable.parse(sql) 146 | end 147 | 148 | test "create table with constraints" do 149 | sql = """ 150 | CREATE TABLE public.case_coupon ( 151 | id integer NOT NULL, 152 | facility_id integer NOT NULL, 153 | code character varying(64) NOT NULL, 154 | discount_percentage integer NOT NULL, 155 | discount_amount integer NOT NULL, 156 | max_uses integer NOT NULL, 157 | current_uses integer NOT NULL, 158 | start_date timestamp without time zone, 159 | end_date timestamp without time zone, 160 | CONSTRAINT case_coupon_current_uses_check CHECK ((current_uses >= 0)), 161 | CONSTRAINT case_coupon_discount_percentage_check CHECK ((discount_percentage >= 0)), 162 | CONSTRAINT case_coupon_discount_percentage_check1 CHECK ((discount_percentage >= 0)), 163 | CONSTRAINT case_coupon_max_uses_check CHECK ((max_uses >= 0)) 164 | ); 165 | """ 166 | expected = %{ 167 | columns: [ 168 | %{name: "id", type: :integer, null: false}, 169 | %{name: "facility_id", null: false, type: :integer}, 170 | %{name: "code", size: 64, type: :"character varying", null: false}, 171 | %{name: "discount_percentage", type: :integer, null: false}, 172 | %{name: "discount_amount", type: :integer, null: false}, 173 | %{name: "max_uses", null: false, type: :integer}, 174 | %{name: "current_uses", null: false, type: :integer}, 175 | %{name: "start_date", type: :"timestamp without time zone"}, 176 | %{name: "end_date", type: :"timestamp without time zone"} 177 | ], 178 | name: ["public", "case_coupon"], 179 | constraints: [ 180 | %{check: "(current_uses >= 0)", name: "case_coupon_current_uses_check", type: :constraint}, 181 | %{check: "(discount_percentage >= 0)", name: "case_coupon_discount_percentage_check", type: :constraint}, 182 | %{check: "(discount_percentage >= 0)", name: "case_coupon_discount_percentage_check1", type: :constraint}, 183 | %{check: "(max_uses >= 0)", name: "case_coupon_max_uses_check", type: :constraint} 184 | ] 185 | } 186 | assert {:ok, expected} == CreateTable.parse(sql) 187 | end 188 | 189 | test "starts_with_number" do 190 | assert EctoExtractMigrations.Commands.CreateTable.starts_with_number("10") 191 | assert EctoExtractMigrations.Commands.CreateTable.starts_with_number("01") 192 | refute EctoExtractMigrations.Commands.CreateTable.starts_with_number("fish") 193 | end 194 | 195 | def value({:ok, [value], "", _, _, _}), do: value 196 | def value({:error, value, _, _, _, _}), do: value 197 | end 198 | -------------------------------------------------------------------------------- /test/create_trigger_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateTriggerTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateTrigger 5 | 6 | test "create_trigger" do 7 | sql = """ 8 | CREATE TRIGGER chat_message_update BEFORE UPDATE ON chat.message FOR EACH ROW EXECUTE PROCEDURE public.chat_update_timestamp(); 9 | """ 10 | expected = %{name: "chat_message_update"} 11 | assert {:ok, expected} == CreateTrigger.parse(sql) 12 | end 13 | 14 | end 15 | -------------------------------------------------------------------------------- /test/create_type_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateTypeTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateType 5 | 6 | test "create type" do 7 | expected = %{ 8 | labels: ["paid", "unpaid", "partial"], 9 | name: ["public", "case_payment_status"] 10 | } 11 | assert {:ok, expected} == CreateType.parse(""" 12 | CREATE TYPE public.case_payment_status AS ENUM ( 13 | 'paid', 14 | 'unpaid', 15 | 'partial' 16 | ); 17 | """) 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /test/create_view_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CreateViewTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Parsers.CreateView 5 | 6 | test "create view" do 7 | expected = %{ 8 | name: ["chat", "raw_assignment"] 9 | } 10 | assert {:ok, expected} == CreateView.parse(""" 11 | CREATE VIEW chat.raw_assignment AS 12 | WITH users AS ( 13 | SELECT u.id, 14 | u.warp_avatar_id, 15 | u.is_active, 16 | u.is_student, 17 | u.surname, 18 | FROM (public.click_user u 19 | JOIN public.warp_avatar a ON ((a.id = u.warp_avatar_id))) 20 | ) 21 | SELECT DISTINCT p.id AS patient_id, 22 | first_value(c.id) OVER w AS care_taker_id, 23 | first_value(cf.facility_id) OVER w AS facility_id, 24 | first_value(cf.role_name) OVER w AS role_name 25 | FROM (((users c 26 | JOIN public.facility_user cf ON ((cf.user_id = c.id))) 27 | JOIN public.facility_user pf ON ((pf.facility_id = cf.facility_id))) 28 | JOIN users p ON ((p.id = pf.user_id))) 29 | WHERE (((cf.role_name)::text <> 'patient'::text) AND ((pf.role_name)::text = 'patient'::text)) 30 | WINDOW w AS (PARTITION BY p.id ORDER BY cf.created_at); 31 | """) 32 | 33 | expected = %{ 34 | name: ["chat", "internal_assignment"] 35 | } 36 | assert {:ok, expected} == CreateView.parse(""" 37 | CREATE VIEW chat.internal_assignment AS 38 | SELECT raw_assignment.patient_id AS assignee_id, 39 | raw_assignment.care_taker_id AS user_id, 40 | raw_assignment.facility_id, 41 | raw_assignment.role_name 42 | FROM chat.raw_assignment 43 | WHERE (NOT (raw_assignment.patient_id IN ( SELECT raw_assignment_1.care_taker_id 44 | FROM chat.raw_assignment raw_assignment_1))); 45 | """) 46 | end 47 | 48 | end 49 | 50 | -------------------------------------------------------------------------------- /test/ecto_extract_migrations_test.exs: -------------------------------------------------------------------------------- 1 | defmodule EctoExtractMigrationsTest do 2 | use ExUnit.Case 3 | 4 | import EctoExtractMigrations 5 | 6 | test "sql_name_to_module/1" do 7 | assert "Mytable" == sql_name_to_module("mytable") 8 | assert "Myschema.Mytable" == sql_name_to_module(["myschema", "mytable"]) 9 | assert "Mytable" == sql_name_to_module(["public", "mytable"]) 10 | end 11 | 12 | describe "parse" do 13 | data = """ 14 | -- 15 | -- Name: cast_to_decimal(text, numeric); Type: FUNCTION; Schema: public; Owner: - 16 | -- 17 | 18 | CREATE FUNCTION public.cast_to_decimal(text, numeric) RETURNS numeric 19 | LANGUAGE plpgsql IMMUTABLE 20 | AS $_$ 21 | begin 22 | return cast($1 as decimal); 23 | exception 24 | when invalid_text_representation then 25 | return $2; 26 | end; 27 | $_$; 28 | 29 | 30 | -- 31 | -- Name: chat_update_timestamp(); Type: FUNCTION; Schema: public; Owner: - 32 | -- 33 | 34 | CREATE FUNCTION public.chat_update_timestamp() RETURNS trigger 35 | LANGUAGE plpgsql 36 | AS $$ 37 | BEGIN 38 | NEW.updated_at = NOW() AT TIME ZONE 'UTC'; 39 | RETURN NEW; 40 | END; 41 | $$; 42 | 43 | 44 | -- 45 | -- Name: create_warp_session(); Type: FUNCTION; Schema: public; Owner: - 46 | -- 47 | 48 | CREATE FUNCTION public.create_warp_session() RETURNS void 49 | LANGUAGE plpgsql 50 | AS $$ 51 | BEGIN 52 | 53 | IF EXISTS ( 54 | SELECT * 55 | FROM pg_catalog.pg_tables 56 | WHERE tablename = 'warp_session' 57 | ) THEN 58 | RAISE NOTICE 'Table "warp_session" already exists.'; 59 | ELSE 60 | CREATE TABLE warp_session ( 61 | uid BYTEA NOT NULL PRIMARY KEY, 62 | isPersistent BOOLEAN NOT NULL DEFAULT FALSE, 63 | touched INTEGER, 64 | avatar_id INTEGER REFERENCES warp_avatar(id) ON DELETE CASCADE); 65 | END IF; 66 | 67 | END; 68 | $$; 69 | 70 | 71 | -- 72 | -- Name: ensure_access_case_facility(integer, integer); Type: FUNCTION; Schema: public; Owner: - 73 | -- 74 | 75 | CREATE FUNCTION public.ensure_access_case_facility(arg_case_id integer, arg_facility_id integer) RETURNS void 76 | LANGUAGE plpgsql 77 | AS $$ 78 | DECLARE 79 | BEGIN 80 | INSERT INTO access_case_facility (case_id, facility_id) 81 | SELECT arg_case_id, arg_facility_id 82 | WHERE NOT EXISTS ( 83 | SELECT 1 FROM access_case_facility 84 | WHERE case_id = arg_case_id 85 | AND facility_id = arg_facility_id); 86 | END; 87 | $$; 88 | 89 | 90 | -- 91 | -- Name: ensure_access_case_user(integer, integer, bytea); Type: FUNCTION; Schema: public; Owner: - 92 | -- 93 | 94 | CREATE FUNCTION public.ensure_access_case_user(arg_case_id integer, arg_user_id integer, arg_role_name bytea) RETURNS void 95 | LANGUAGE plpgsql 96 | AS $$ 97 | DECLARE 98 | BEGIN 99 | INSERT INTO access_case_user (case_id, user_id, role_name) 100 | SELECT arg_case_id, arg_user_id, arg_role_name 101 | WHERE NOT EXISTS ( 102 | SELECT 1 FROM access_case_user 103 | WHERE case_id = arg_case_id 104 | AND user_id = arg_user_id); 105 | END; 106 | $$; 107 | """ 108 | 109 | results = 110 | data 111 | |> String.split("\n") 112 | |> Enum.map(&(&1 <> "\n")) 113 | |> Stream.with_index() 114 | |> Stream.transform(nil, &EctoExtractMigrations.parse/2) 115 | |> Stream.reject(&(&1.type in [:whitespace, :comment])) 116 | |> Enum.to_list() 117 | |> Enum.map(&(&1.data.name)) 118 | expected = [ 119 | ["public", "cast_to_decimal"], 120 | ["public", "chat_update_timestamp"], 121 | ["public", "create_warp_session"], 122 | ["public", "ensure_access_case_facility"], 123 | ["public", "ensure_access_case_user"] 124 | ] 125 | assert results == expected 126 | end 127 | 128 | end 129 | -------------------------------------------------------------------------------- /test/table_test.exs: -------------------------------------------------------------------------------- 1 | defmodule TableTest do 2 | use ExUnit.Case 3 | 4 | alias EctoExtractMigrations.Commands.CreateTable 5 | 6 | test "has_pk" do 7 | columns = [ 8 | %{name: "id", null: false, primary_key: true, type: :integer}, 9 | %{name: "questionnaire_id", null: false, type: :integer}, 10 | %{name: "response_id", null: false, type: :integer} 11 | ] 12 | assert CreateTable.has_pk(columns) 13 | end 14 | 15 | # test "format_column" do 16 | # assert " add :uid, :bytea, primary_key: true, null: false\n" == Table.format_column(%{name: "uid", null: false, primary_key: true, type: :bytea}) 17 | # assert " add :avatar_id, :integer\n" == Table.format_column(%{name: "avatar_id", type: :integer}) 18 | # end 19 | end 20 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | --------------------------------------------------------------------------------