├── .github └── workflows │ └── build.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE.md ├── Makefile ├── README.md ├── cliff.toml ├── deny.toml ├── fixtures └── migrations │ └── 20230402025956_init.sql └── src ├── lib.rs └── pg.rs /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: build 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | build-rust: 13 | strategy: 14 | matrix: 15 | platform: [ubuntu-latest] 16 | runs-on: ${{ matrix.platform }} 17 | services: 18 | postgres: 19 | image: postgres:14.5 20 | env: 21 | POSTGRES_USER: postgres 22 | POSTGRES_PASSWORD: postgres 23 | POSTGRES_DB: test 24 | ports: 25 | - 5432:5432 26 | # Set health checks to wait until postgres has started 27 | options: >- 28 | --health-cmd pg_isready 29 | --health-interval 10s 30 | --health-timeout 5s 31 | --health-retries 5 32 | steps: 33 | - uses: actions/checkout@v3 34 | with: 35 | fetch-depth: 0 36 | - name: Install Rust 37 | run: rustup toolchain install stable --component llvm-tools-preview 38 | - name: Install cargo-llvm-cov 39 | uses: taiki-e/install-action@cargo-llvm-cov 40 | - name: install nextest 41 | uses: taiki-e/install-action@nextest 42 | - uses: Swatinem/rust-cache@v1 43 | - name: Check code format 44 | run: cargo fmt -- --check 45 | - name: Check the package for errors 46 | run: cargo check --all 47 | - name: Lint rust sources 48 | run: cargo clippy --all-targets --all-features --tests --benches -- -D warnings 49 | - name: Execute rust tests 50 | run: cargo nextest run --all-features 51 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /Cargo.lock 3 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | fail_fast: false 2 | repos: 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: v4.3.0 5 | hooks: 6 | - id: check-byte-order-marker 7 | - id: check-case-conflict 8 | - id: check-merge-conflict 9 | - id: check-symlinks 10 | - id: check-yaml 11 | - id: end-of-file-fixer 12 | - id: mixed-line-ending 13 | - id: trailing-whitespace 14 | - repo: https://github.com/psf/black 15 | rev: 22.10.0 16 | hooks: 17 | - id: black 18 | - repo: local 19 | hooks: 20 | - id: cargo-fmt 21 | name: cargo fmt 22 | description: Format files with rustfmt. 23 | entry: bash -c 'cargo fmt -- --check' 24 | language: rust 25 | files: \.rs$ 26 | args: [] 27 | - id: cargo-deny 28 | name: cargo deny check 29 | description: Check cargo dependencies 30 | entry: bash -c 'cargo deny check' 31 | language: rust 32 | files: \.rs$ 33 | args: [] 34 | - id: typos 35 | name: typos 36 | description: check typo 37 | entry: bash -c 'typos' 38 | language: rust 39 | files: \.*$ 40 | pass_filenames: false 41 | - id: cargo-check 42 | name: cargo check 43 | description: Check the package for errors. 44 | entry: bash -c 'cargo check --all' 45 | language: rust 46 | files: \.rs$ 47 | pass_filenames: false 48 | - id: cargo-clippy 49 | name: cargo clippy 50 | description: Lint rust sources 51 | entry: bash -c 'cargo clippy --all-targets --all-features --tests --benches -- -D warnings' 52 | language: rust 53 | files: \.rs$ 54 | pass_filenames: false 55 | - id: cargo-test 56 | name: cargo test 57 | description: unit test for the project 58 | entry: bash -c 'cargo nextest run --all-features' 59 | language: rust 60 | files: \.rs$ 61 | pass_filenames: false 62 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | ## [0.2.1] - 2023-04-02 6 | 7 | [985b498](985b498dadfbcfbceedd2e760e8a59ba5677fce9)...[5c5926b](5c5926b013ec4456e74c853e9dcb0a5e873d81f0) 8 | 9 | ### Bug Fixes 10 | 11 | - Fix gh ([cc12dab](cc12dab59e1b6db8c6a4110e4536d06328cf8207) - 2023-04-02 by Tyr Chen) 12 | 13 | ### Miscellaneous Tasks 14 | 15 | - Better documentation ([5c5926b](5c5926b013ec4456e74c853e9dcb0a5e873d81f0) - 2023-04-02 by Tyr Chen) 16 | 17 | ## [0.2.0] - 2023-04-02 18 | 19 | ### Features 20 | 21 | - Init project. Use chatgpt suggested SQL to retrieve database schema info ([9e3ff9b](9e3ff9b727a7d33c83fb7cdf530a5e966752ce26) - 2023-04-02 by Tyr Chen) 22 | 23 | ### Miscellaneous Tasks 24 | 25 | - Add readme ([985b498](985b498dadfbcfbceedd2e760e8a59ba5677fce9) - 2023-04-02 by Tyr Chen) 26 | 27 | 28 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "db-schema" 3 | version = "0.2.1" 4 | edition = "2021" 5 | license = "MIT" 6 | documentation = "https://docs.rs/db-schema" 7 | repository = "https://github.com/tyrchen/db-schema" 8 | homepage = "https://github.com/tyrchen/db-schema" 9 | description = """ 10 | A simple library to retrieve schema information from database. 11 | """ 12 | readme = "README.md" 13 | categories = ["development-tools"] 14 | keywords = ["sqlx", "postgres", "database", "schema"] 15 | 16 | [features] 17 | default = ["db-postgres"] 18 | db-all = ["db-postgres", "db-mysql", "db-sqlite"] 19 | db-postgres = ["sqlx/postgres", "paste"] 20 | db-mysql = ["sqlx/mysql", "paste"] 21 | db-sqlite = ["sqlx/sqlite", "paste"] 22 | 23 | [dependencies] 24 | paste = { version = "1.0.12", optional = true } 25 | sqlx = { version = "0.6.3", features = ["runtime-tokio-rustls"], optional = true } 26 | 27 | [dev-dependencies] 28 | anyhow = "1.0.70" 29 | sqlx = { version = "0.6.3", features = ["postgres", "runtime-tokio-rustls"] } 30 | sqlx-db-tester = "0.3.6" 31 | tokio = { version = "1.27.0", features = ["rt", "rt-multi-thread", "macros"] } 32 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright <2023> 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | build: 2 | @cargo build 3 | 4 | cov: 5 | @cargo llvm-cov nextest --all-features --workspace --lcov --output-path coverage/lcov-$(shell date +%F).info 6 | 7 | test: 8 | @cargo nextest run --all-features 9 | 10 | release: 11 | @cargo release tag --execute 12 | @git cliff -o CHANGELOG.md 13 | @git commit -a -m "Update CHANGELOG.md" || true 14 | @git push origin master 15 | @cargo release push --execute 16 | 17 | .PHONY: build cov test release 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # README 2 | 3 | This crate provides a set of functions to generate SQL statements for various PostgreSQL schema objects, such as tables, views, materialized views, functions, triggers, and indexes. The generated SQL statements can be useful for schema introspection, documentation, or migration purposes. 4 | 5 | ## Features 6 | 7 | The module provides a `PgSchema` struct that accepts a namespace (schema name) as input and exposes methods for generating SQL statements for the following schema objects: 8 | 9 | - Enums 10 | - Composite types 11 | - Tables 12 | - Views 13 | - Materialized views 14 | - Functions 15 | - Triggers 16 | - Indexes 17 | 18 | ## Usage 19 | 20 | 1. Create an instance of `PgSchema` with the desired namespace (schema name). 21 | 22 | rust 23 | 24 | ```rust 25 | use db_schema::PgSchema; 26 | 27 | let schema = PgSchema::new("your_schema_name"); 28 | ``` 29 | 30 | 2. Use the available methods to generate SQL statements for the desired schema objects. 31 | 32 | rust 33 | 34 | ```rust 35 | // Get the SQL statements for all enums in the schema 36 | let enums_sql = schema.enums(); 37 | 38 | // Get the SQL statements for all composite types in the schema 39 | let types_sql = schema.types(); 40 | 41 | // Get the SQL statements for all tables in the schema 42 | let tables_sql = schema.tables(); 43 | 44 | // Get the SQL statements for all views in the schema 45 | let views_sql = schema.views(); 46 | 47 | // Get the SQL statements for all materialized views in the schema 48 | let mviews_sql = schema.mviews(); 49 | 50 | // Get the SQL statements for all functions in the schema 51 | let functions_sql = schema.functions(); 52 | 53 | // Get the SQL statements for all triggers in the schema 54 | let triggers_sql = schema.triggers(); 55 | 56 | // Get the SQL statements for all indexes in the schema 57 | let indexes_sql = schema.indexes(); 58 | ``` 59 | 60 | You can also use the `get_*` methods to generate SQL statements for the desired schema objects. These methods accept a `PgPool` instance as input and return a `Result, sqlx::Error>`. 61 | 62 | ## Example 63 | 64 | Here's an example of how to retrieve the SQL statements for all schema objects in a given namespace (schema name): 65 | 66 | rust 67 | 68 | ```rust 69 | use db_schema::PgSchema; 70 | use sqlx::PgPool; 71 | 72 | async fn generate_sql_statements_for_schema(pool: &PgPool) -> Result<(), sqlx::Error> { 73 | let schema = PgSchema::new("your_schema_name"); 74 | 75 | let enums = schema.get_enums(pool).await?; 76 | let types = schema.get_types(pool).await?; 77 | let tables = schema.get_tables(pool).await?; 78 | let views = schema.get_views(pool).await?; 79 | let mviews = schema.get_mviews(pool).await?; 80 | let functions = schema.get_functions(pool).await?; 81 | let triggers = schema.get_triggers(pool).await?; 82 | let indexes = schema.get_indexes(pool).await?; 83 | 84 | println!("Enums: {:?}", enums); 85 | println!("Types: {:?}", types); 86 | println!("Tables: {:?}", tables); 87 | println!("Views: {:?}", views); 88 | println!("Materialized Views: {:?}", mviews); 89 | println!("Functions: {:?}", functions); 90 | println!("Triggers: {:?}", triggers); 91 | println!("Indexes: {:?}", indexes); 92 | 93 | Ok(()) 94 | } 95 | ``` 96 | 97 | ## Tests 98 | 99 | The code also includes tests to validate the functionality of the `PgSchema` struct. To run the tests, execute the following command: 100 | 101 | sh 102 | 103 | ```sh 104 | cargo nextest run 105 | ``` 106 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | [changelog] 2 | # changelog header 3 | header = """ 4 | # Changelog\n 5 | All notable changes to this project will be documented in this file.\n 6 | """ 7 | # template for the changelog body 8 | # https://tera.netlify.app/docs/#introduction 9 | body = """ 10 | {% if version %}\ 11 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 12 | {% else %}\ 13 | ## [unreleased] 14 | {% endif %}\ 15 | {% if previous %}\ 16 | {% if previous.commit_id %} 17 | [{{ previous.commit_id | truncate(length=7, end="") }}]({{ previous.commit_id }})...\ 18 | [{{ commit_id | truncate(length=7, end="") }}]({{ commit_id }}) 19 | {% endif %}\ 20 | {% endif %}\ 21 | {% for group, commits in commits | group_by(attribute="group") %} 22 | ### {{ group | upper_first }} 23 | {% for commit in commits %} 24 | - {{ commit.message | upper_first }} ([{{ commit.id | truncate(length=7, end="") }}]({{ commit.id }}) - {{ commit.author.timestamp | date }} by {{ commit.author.name }})\ 25 | {% for footer in commit.footers -%} 26 | , {{ footer.token }}{{ footer.separator }}{{ footer.value }}\ 27 | {% endfor %}\ 28 | {% endfor %} 29 | {% endfor %}\n 30 | """ 31 | # remove the leading and trailing whitespace from the template 32 | trim = true 33 | # changelog footer 34 | footer = """ 35 | 36 | """ 37 | 38 | [git] 39 | # parse the commits based on https://www.conventionalcommits.org 40 | conventional_commits = true 41 | # filter out the commits that are not conventional 42 | filter_unconventional = true 43 | # process each line of a commit as an individual commit 44 | split_commits = false 45 | # regex for parsing and grouping commits 46 | commit_parsers = [ 47 | { message = "^feat", group = "Features"}, 48 | { message = "^fix", group = "Bug Fixes"}, 49 | { message = "^doc", group = "Documentation"}, 50 | { message = "^perf", group = "Performance"}, 51 | { message = "^refactor", group = "Refactor"}, 52 | { message = "^style", group = "Styling"}, 53 | { message = "^test", group = "Testing"}, 54 | { message = "^chore\\(release\\): prepare for", skip = true}, 55 | { message = "^chore", group = "Miscellaneous Tasks"}, 56 | { body = ".*security", group = "Security"}, 57 | ] 58 | # protect breaking changes from being skipped due to matching a skipping commit_parser 59 | protect_breaking_commits = false 60 | # filter out the commits that are not matched by commit parsers 61 | filter_commits = false 62 | # glob pattern for matching git tags 63 | tag_pattern = "v[0-9]*" 64 | # regex for skipping tags 65 | skip_tags = "v0.1.0-beta.1" 66 | # regex for ignoring tags 67 | ignore_tags = "" 68 | # sort the tags chronologically 69 | date_order = false 70 | # sort the commits inside sections by oldest/newest order 71 | sort_commits = "oldest" 72 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | # This template contains all of the possible sections and their default values 2 | 3 | # Note that all fields that take a lint level have these possible values: 4 | # * deny - An error will be produced and the check will fail 5 | # * warn - A warning will be produced, but the check will not fail 6 | # * allow - No warning or error will be produced, though in some cases a note 7 | # will be 8 | 9 | # The values provided in this template are the default values that will be used 10 | # when any section or field is not specified in your own configuration 11 | 12 | # If 1 or more target triples (and optionally, target_features) are specified, 13 | # only the specified targets will be checked when running `cargo deny check`. 14 | # This means, if a particular package is only ever used as a target specific 15 | # dependency, such as, for example, the `nix` crate only being used via the 16 | # `target_family = "unix"` configuration, that only having windows targets in 17 | # this list would mean the nix crate, as well as any of its exclusive 18 | # dependencies not shared by any other crates, would be ignored, as the target 19 | # list here is effectively saying which targets you are building for. 20 | targets = [ 21 | # The triple can be any string, but only the target triples built in to 22 | # rustc (as of 1.40) can be checked against actual config expressions 23 | #{ triple = "x86_64-unknown-linux-musl" }, 24 | # You can also specify which target_features you promise are enabled for a 25 | # particular target. target_features are currently not validated against 26 | # the actual valid features supported by the target architecture. 27 | #{ triple = "wasm32-unknown-unknown", features = ["atomics"] }, 28 | ] 29 | 30 | # This section is considered when running `cargo deny check advisories` 31 | # More documentation for the advisories section can be found here: 32 | # https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html 33 | [advisories] 34 | # The path where the advisory database is cloned/fetched into 35 | db-path = "~/.cargo/advisory-db" 36 | # The url(s) of the advisory databases to use 37 | db-urls = ["https://github.com/rustsec/advisory-db"] 38 | # The lint level for security vulnerabilities 39 | vulnerability = "deny" 40 | # The lint level for unmaintained crates 41 | unmaintained = "warn" 42 | # The lint level for crates that have been yanked from their source registry 43 | yanked = "warn" 44 | # The lint level for crates with security notices. Note that as of 45 | # 2019-12-17 there are no security notice advisories in 46 | # https://github.com/rustsec/advisory-db 47 | notice = "warn" 48 | # A list of advisory IDs to ignore. Note that ignored advisories will still 49 | # output a note when they are encountered. 50 | ignore = [ 51 | #"RUSTSEC-0000-0000", 52 | ] 53 | # Threshold for security vulnerabilities, any vulnerability with a CVSS score 54 | # lower than the range specified will be ignored. Note that ignored advisories 55 | # will still output a note when they are encountered. 56 | # * None - CVSS Score 0.0 57 | # * Low - CVSS Score 0.1 - 3.9 58 | # * Medium - CVSS Score 4.0 - 6.9 59 | # * High - CVSS Score 7.0 - 8.9 60 | # * Critical - CVSS Score 9.0 - 10.0 61 | #severity-threshold = 62 | 63 | # This section is considered when running `cargo deny check licenses` 64 | # More documentation for the licenses section can be found here: 65 | # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html 66 | [licenses] 67 | # The lint level for crates which do not have a detectable license 68 | unlicensed = "allow" 69 | # List of explicitly allowed licenses 70 | # See https://spdx.org/licenses/ for list of possible licenses 71 | # [possible values: any SPDX 3.7 short identifier (+ optional exception)]. 72 | allow = [ 73 | "MIT", 74 | "Apache-2.0", 75 | "Unicode-DFS-2016", 76 | "ISC", 77 | "BSD-3-Clause", 78 | ] 79 | # List of explicitly disallowed licenses 80 | # See https://spdx.org/licenses/ for list of possible licenses 81 | # [possible values: any SPDX 3.7 short identifier (+ optional exception)]. 82 | deny = [ 83 | #"Nokia", 84 | ] 85 | # Lint level for licenses considered copyleft 86 | copyleft = "warn" 87 | # Blanket approval or denial for OSI-approved or FSF Free/Libre licenses 88 | # * both - The license will be approved if it is both OSI-approved *AND* FSF 89 | # * either - The license will be approved if it is either OSI-approved *OR* FSF 90 | # * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF 91 | # * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved 92 | # * neither - This predicate is ignored and the default lint level is used 93 | allow-osi-fsf-free = "neither" 94 | # Lint level used when no other predicates are matched 95 | # 1. License isn't in the allow or deny lists 96 | # 2. License isn't copyleft 97 | # 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither" 98 | default = "deny" 99 | # The confidence threshold for detecting a license from license text. 100 | # The higher the value, the more closely the license text must be to the 101 | # canonical license text of a valid SPDX license file. 102 | # [possible values: any between 0.0 and 1.0]. 103 | confidence-threshold = 0.8 104 | # Allow 1 or more licenses on a per-crate basis, so that particular licenses 105 | # aren't accepted for every possible crate as with the normal allow list 106 | exceptions = [ 107 | # Each entry is the crate and version constraint, and its specific allow 108 | # list 109 | #{ allow = ["Zlib"], name = "adler32", version = "*" }, 110 | ] 111 | 112 | # Some crates don't have (easily) machine readable licensing information, 113 | # adding a clarification entry for it allows you to manually specify the 114 | # licensing information 115 | #[[licenses.clarify]] 116 | # The name of the crate the clarification applies to 117 | #name = "ring" 118 | # The optional version constraint for the crate 119 | #version = "*" 120 | # The SPDX expression for the license requirements of the crate 121 | #expression = "MIT AND ISC AND OpenSSL" 122 | # One or more files in the crate's source used as the "source of truth" for 123 | # the license expression. If the contents match, the clarification will be used 124 | # when running the license check, otherwise the clarification will be ignored 125 | # and the crate will be checked normally, which may produce warnings or errors 126 | # depending on the rest of your configuration 127 | #license-files = [ 128 | # Each entry is a crate relative path, and the (opaque) hash of its contents 129 | #{ path = "LICENSE", hash = 0xbd0eed23 } 130 | #] 131 | 132 | [licenses.private] 133 | # If true, ignores workspace crates that aren't published, or are only 134 | # published to private registries 135 | ignore = false 136 | # One or more private registries that you might publish crates to, if a crate 137 | # is only published to private registries, and ignore is true, the crate will 138 | # not have its license(s) checked 139 | registries = [ 140 | #"https://sekretz.com/registry 141 | ] 142 | 143 | # This section is considered when running `cargo deny check bans`. 144 | # More documentation about the 'bans' section can be found here: 145 | # https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html 146 | [bans] 147 | # Lint level for when multiple versions of the same crate are detected 148 | multiple-versions = "warn" 149 | # Lint level for when a crate version requirement is `*` 150 | wildcards = "allow" 151 | # The graph highlighting used when creating dotgraphs for crates 152 | # with multiple versions 153 | # * lowest-version - The path to the lowest versioned duplicate is highlighted 154 | # * simplest-path - The path to the version with the fewest edges is highlighted 155 | # * all - Both lowest-version and simplest-path are used 156 | highlight = "all" 157 | # List of crates that are allowed. Use with care! 158 | allow = [ 159 | #{ name = "ansi_term", version = "=0.11.0" }, 160 | ] 161 | # List of crates to deny 162 | deny = [ 163 | # Each entry the name of a crate and a version range. If version is 164 | # not specified, all versions will be matched. 165 | #{ name = "ansi_term", version = "=0.11.0" }, 166 | # 167 | # Wrapper crates can optionally be specified to allow the crate when it 168 | # is a direct dependency of the otherwise banned crate 169 | #{ name = "ansi_term", version = "=0.11.0", wrappers = [] }, 170 | ] 171 | # Certain crates/versions that will be skipped when doing duplicate detection. 172 | skip = [ 173 | #{ name = "ansi_term", version = "=0.11.0" }, 174 | ] 175 | # Similarly to `skip` allows you to skip certain crates during duplicate 176 | # detection. Unlike skip, it also includes the entire tree of transitive 177 | # dependencies starting at the specified crate, up to a certain depth, which is 178 | # by default infinite 179 | skip-tree = [ 180 | #{ name = "ansi_term", version = "=0.11.0", depth = 20 }, 181 | ] 182 | 183 | # This section is considered when running `cargo deny check sources`. 184 | # More documentation about the 'sources' section can be found here: 185 | # https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html 186 | [sources] 187 | # Lint level for what to happen when a crate from a crate registry that is not 188 | # in the allow list is encountered 189 | unknown-registry = "warn" 190 | # Lint level for what to happen when a crate from a git repository that is not 191 | # in the allow list is encountered 192 | unknown-git = "warn" 193 | # List of URLs for allowed crate registries. Defaults to the crates.io index 194 | # if not specified. If it is specified but empty, no registries are allowed. 195 | allow-registry = ["https://github.com/rust-lang/crates.io-index"] 196 | # List of URLs for allowed Git repositories 197 | allow-git = [] 198 | 199 | [sources.allow-org] 200 | # 1 or more github.com organizations to allow git sources for 201 | github = [] 202 | # 1 or more gitlab.com organizations to allow git sources for 203 | gitlab = [] 204 | # 1 or more bitbucket.org organizations to allow git sources for 205 | bitbucket = [] 206 | -------------------------------------------------------------------------------- /fixtures/migrations/20230402025956_init.sql: -------------------------------------------------------------------------------- 1 | --- The following test tables are created by ChatGPT 2 | CREATE SCHEMA gpt; 3 | 4 | CREATE TYPE gpt.login_method AS ENUM ( 5 | 'email', 6 | 'google', 7 | 'github' 8 | ); 9 | 10 | CREATE TYPE gpt.post_status AS ENUM ( 11 | 'draft', 12 | 'published', 13 | 'archived' 14 | ); 15 | 16 | CREATE TYPE gpt.address AS ( 17 | street varchar ( 255), 18 | city VARCHAR(100), 19 | state CHAR(2), 20 | postal_code CHAR(5)); 21 | 22 | CREATE TABLE gpt.users ( 23 | id serial PRIMARY KEY, 24 | username varchar(50) UNIQUE NOT NULL, 25 | email varchar(255) UNIQUE NOT NULL, 26 | first_name varchar(50), 27 | last_name varchar(50), 28 | created_at timestamptz NOT NULL DEFAULT NOW(), 29 | updated_at timestamptz NOT NULL DEFAULT NOW() 30 | ); 31 | 32 | CREATE TABLE gpt.logins ( 33 | id serial PRIMARY KEY, 34 | user_id integer NOT NULL REFERENCES gpt.users (id) ON DELETE CASCADE, 35 | method gpt.login_method NOT NULL, 36 | identifier varchar(255) NOT NULL, 37 | created_at timestamptz NOT NULL DEFAULT NOW(), 38 | updated_at timestamptz NOT NULL DEFAULT NOW(), 39 | UNIQUE (method, identifier) 40 | ); 41 | 42 | CREATE TABLE gpt.posts ( 43 | id serial PRIMARY KEY, 44 | user_id integer NOT NULL REFERENCES gpt.users (id) ON DELETE CASCADE, 45 | title varchar(255) NOT NULL, 46 | content text NOT NULL, 47 | status gpt.post_status NOT NULL DEFAULT 'draft', 48 | published_at timestamptz, 49 | created_at timestamptz NOT NULL DEFAULT NOW(), 50 | updated_at timestamptz NOT NULL DEFAULT NOW() 51 | ); 52 | 53 | CREATE TABLE gpt.comments ( 54 | id serial PRIMARY KEY, 55 | post_id integer NOT NULL REFERENCES gpt.posts (id) ON DELETE CASCADE, 56 | user_id integer NOT NULL REFERENCES gpt.users (id) ON DELETE CASCADE, 57 | content text NOT NULL, 58 | created_at timestamptz NOT NULL DEFAULT NOW(), 59 | updated_at timestamptz NOT NULL DEFAULT NOW() 60 | ); 61 | 62 | -- create indexes 63 | CREATE INDEX user_posts ON gpt.posts (user_id, created_at DESC); 64 | 65 | -- create views 66 | CREATE VIEW gpt.posts_with_comments AS 67 | SELECT 68 | p.id, 69 | p.user_id, 70 | p.title, 71 | p.content, 72 | p.status, 73 | p.published_at, 74 | p.created_at, 75 | p.updated_at, 76 | json_agg(json_build_object('id', c.id, 'user_id', c.user_id, 'content', c.content, 'created_at', c.created_at, 'updated_at', c.updated_at)) AS comments 77 | FROM 78 | gpt.posts p 79 | LEFT JOIN gpt.comments c ON c.post_id = p.id 80 | GROUP BY 81 | p.id; 82 | 83 | CREATE MATERIALIZED VIEW gpt.users_with_posts AS 84 | SELECT 85 | u.id, 86 | u.username, 87 | u.email, 88 | u.first_name, 89 | u.last_name, 90 | u.created_at, 91 | u.updated_at, 92 | json_agg(json_build_object('id', p.id, 'title', p.title, 'content', p.content, 'status', p.status, 'published_at', p.published_at, 'created_at', p.created_at, 'updated_at', p.updated_at)) AS posts 93 | FROM 94 | gpt.users u 95 | LEFT JOIN gpt.posts p ON p.user_id = u.id 96 | GROUP BY 97 | u.id; 98 | 99 | -- create function 100 | CREATE OR REPLACE FUNCTION gpt.refresh_users_with_posts () 101 | RETURNS TRIGGER 102 | AS $$ 103 | BEGIN 104 | REFRESH MATERIALIZED VIEW gpt.users_with_posts; 105 | RETURN NULL; 106 | END; 107 | $$ 108 | LANGUAGE plpgsql; 109 | 110 | -- create triggers 111 | CREATE TRIGGER refresh_users_with_posts 112 | AFTER INSERT OR UPDATE OR DELETE ON gpt.posts 113 | FOR EACH STATEMENT 114 | EXECUTE PROCEDURE gpt.refresh_users_with_posts (); 115 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! A module for working with PostgreSQL database schemas. 2 | //! 3 | //! This module provides a `PgSchema` struct that allows you to generate SQL statements 4 | //! for various schema-related tasks, such as creating tables, views, and functions. 5 | //! 6 | //! Example usage: 7 | //! ``` 8 | //! use db_schema::PgSchema; 9 | //! 10 | //! let schema = PgSchema::new("my_schema"); 11 | //! let pool = get_pg_pool(); // Function to get a connection pool 12 | //! let tables_sql = schema.get_tables(&pool).await.unwrap(); 13 | //! ``` 14 | 15 | #![warn( 16 | clippy::all, 17 | clippy::dbg_macro, 18 | clippy::todo, 19 | clippy::empty_enum, 20 | clippy::enum_glob_use, 21 | clippy::mem_forget, 22 | clippy::unused_self, 23 | clippy::filter_map_next, 24 | clippy::needless_continue, 25 | clippy::needless_borrow, 26 | clippy::match_wildcard_for_single_variants, 27 | clippy::if_let_mutex, 28 | clippy::mismatched_target_os, 29 | clippy::await_holding_lock, 30 | clippy::match_on_vec_items, 31 | clippy::imprecise_flops, 32 | clippy::suboptimal_flops, 33 | clippy::lossy_float_literal, 34 | clippy::rest_pat_in_fully_bound_structs, 35 | clippy::fn_params_excessive_bools, 36 | clippy::exit, 37 | clippy::inefficient_to_string, 38 | clippy::linkedlist, 39 | clippy::macro_use_imports, 40 | clippy::option_option, 41 | clippy::verbose_file_reads, 42 | clippy::unnested_or_patterns, 43 | clippy::str_to_string, 44 | rust_2018_idioms, 45 | future_incompatible, 46 | nonstandard_style, 47 | missing_debug_implementations, 48 | missing_docs 49 | )] 50 | #![deny(unreachable_pub, private_in_public)] 51 | #![allow(elided_lifetimes_in_paths, clippy::type_complexity)] 52 | #![forbid(unsafe_code)] 53 | #![cfg_attr(docsrs, feature(doc_auto_cfg, doc_cfg))] 54 | #![cfg_attr(test, allow(clippy::float_cmp))] 55 | 56 | mod pg; 57 | pub use pg::PgSchema; 58 | -------------------------------------------------------------------------------- /src/pg.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "db-postgres")] 2 | use paste::paste; 3 | #[cfg(feature = "db-postgres")] 4 | use sqlx::PgPool; 5 | 6 | /// A struct representing a PostgreSQL schema. 7 | #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] 8 | pub struct PgSchema { 9 | namespace: String, 10 | } 11 | 12 | impl PgSchema { 13 | /// Create a new `PgSchema` instance. 14 | pub fn new(namespace: impl Into) -> Self { 15 | Self { 16 | namespace: namespace.into(), 17 | } 18 | } 19 | 20 | /// Generates a SQL statement for creating all enum types in the schema. 21 | pub fn enums(&self) -> String { 22 | format!("SELECT 23 | 'CREATE TYPE ' || n.nspname || '.' || t.typname || ' AS ENUM (' || string_agg(quote_literal(e.enumlabel), ', ') || ');' AS sql 24 | FROM 25 | pg_catalog.pg_type t 26 | JOIN pg_catalog.pg_namespace n ON t.typnamespace = n.oid 27 | JOIN pg_catalog.pg_enum e ON t.oid = e.enumtypid 28 | WHERE 29 | n.nspname = '{}' 30 | AND t.typtype = 'e' 31 | GROUP BY 32 | n.nspname, t.typname;", self.namespace) 33 | } 34 | 35 | /// Generates a SQL statement for creating all composite types in the schema. 36 | pub fn types(&self) -> String { 37 | format!("SELECT 38 | 'CREATE TYPE ' || n.nspname || '.' || t.typname || ' AS (' || string_agg(a.attname || ' ' || pg_catalog.format_type(a.atttypid, a.atttypmod), ', ') || ');' AS sql 39 | FROM 40 | pg_catalog.pg_type t 41 | JOIN pg_catalog.pg_namespace n ON t.typnamespace = n.oid 42 | JOIN pg_catalog.pg_class c ON t.typrelid = c.oid 43 | JOIN pg_catalog.pg_attribute a ON t.typrelid = a.attrelid 44 | WHERE 45 | n.nspname = '{}' 46 | AND t.typtype = 'c' 47 | AND c.relkind = 'c' 48 | GROUP BY 49 | n.nspname, t.typname;", self.namespace) 50 | } 51 | 52 | /// Generates a SQL statement for creating all tables in the schema. 53 | pub fn tables(&self) -> String { 54 | format!("WITH table_columns AS ( 55 | SELECT 56 | n.nspname AS schema_name, 57 | c.relname AS table_name, 58 | a.attname AS column_name, 59 | pg_catalog.format_type(a.atttypid, a.atttypmod) AS column_type, 60 | a.attnotnull AS is_not_null, 61 | a.attnum AS column_position 62 | FROM 63 | pg_catalog.pg_attribute a 64 | JOIN pg_catalog.pg_class c ON a.attrelid = c.oid 65 | JOIN pg_catalog.pg_namespace n ON c.relnamespace = n.oid 66 | WHERE 67 | a.attnum > 0 68 | AND NOT a.attisdropped 69 | AND n.nspname = '{0}' 70 | AND c.relkind = 'r' 71 | ), 72 | table_constraints AS ( 73 | SELECT 74 | tc.constraint_name, 75 | tc.table_schema, 76 | tc.table_name, 77 | kcu.column_name, 78 | tc.constraint_type 79 | FROM 80 | information_schema.table_constraints tc 81 | JOIN information_schema.key_column_usage kcu 82 | ON tc.constraint_catalog = kcu.constraint_catalog 83 | AND tc.constraint_schema = kcu.constraint_schema 84 | AND tc.constraint_name = kcu.constraint_name 85 | WHERE 86 | tc.constraint_type IN ('PRIMARY KEY', 'FOREIGN KEY', 'UNIQUE') 87 | AND tc.table_schema = '{0}' 88 | ), 89 | formatted_columns AS ( 90 | SELECT 91 | tc.schema_name, 92 | tc.table_name, 93 | tc.column_name, 94 | tc.column_type, 95 | tc.is_not_null, 96 | tc.column_position, 97 | STRING_AGG( 98 | tcs.constraint_type || ' (' || tc.column_name || ')', 99 | ', ' 100 | ORDER BY tcs.constraint_type DESC 101 | ) AS column_constraints 102 | FROM 103 | table_columns tc 104 | LEFT JOIN table_constraints tcs 105 | ON tc.schema_name = tcs.table_schema 106 | AND tc.table_name = tcs.table_name 107 | AND tc.column_name = tcs.column_name 108 | GROUP BY 109 | tc.schema_name, 110 | tc.table_name, 111 | tc.column_name, 112 | tc.column_type, 113 | tc.is_not_null, 114 | tc.column_position 115 | ), 116 | create_table_statements AS ( 117 | SELECT 118 | fc.schema_name, 119 | fc.table_name, 120 | STRING_AGG( 121 | fc.column_name || ' ' || fc.column_type || (CASE WHEN fc.is_not_null THEN ' NOT NULL' ELSE '' END) || COALESCE(' ' || fc.column_constraints, ''), 122 | ', ' 123 | ORDER BY fc.column_position 124 | ) AS formatted_columns 125 | FROM 126 | formatted_columns fc 127 | GROUP BY 128 | fc.schema_name, 129 | fc.table_name 130 | ) 131 | SELECT 132 | 'CREATE TABLE ' || schema_name || '.' || table_name || ' (' || formatted_columns || ');' AS sql 133 | FROM 134 | create_table_statements;", self.namespace) 135 | } 136 | 137 | /// Generates a SQL statement for creating all views in the schema. 138 | pub fn views(&self) -> String { 139 | format!( 140 | "SELECT 141 | 'CREATE VIEW ' || n.nspname || '.' || c.relname || ' AS ' || pg_get_viewdef(c.oid) AS sql 142 | FROM 143 | pg_catalog.pg_class c 144 | JOIN pg_catalog.pg_namespace n ON c.relnamespace = n.oid 145 | WHERE 146 | c.relkind = 'v' -- Select views 147 | AND n.nspname = '{}';", 148 | self.namespace 149 | ) 150 | } 151 | 152 | /// Generates a SQL statement for creating all materialized views in the schema. 153 | pub fn mviews(&self) -> String { 154 | format!("SELECT 155 | 'CREATE MATERIALIZED VIEW ' || n.nspname || '.' || c.relname || ' AS ' || pg_get_viewdef(c.oid) AS sql 156 | FROM 157 | pg_catalog.pg_class c 158 | JOIN pg_catalog.pg_namespace n ON c.relnamespace = n.oid 159 | WHERE 160 | c.relkind = 'm' -- Select materialized views 161 | AND n.nspname = '{}';", self.namespace) 162 | } 163 | 164 | /// Generates a SQL statement for creating all functions in the schema. 165 | pub fn functions(&self) -> String { 166 | format!("SELECT 167 | 'CREATE OR REPLACE FUNCTION ' || n.nspname || '.' || p.proname || '(' || pg_get_function_arguments(p.oid) || ') RETURNS ' 168 | || pg_get_function_result(p.oid) || ' AS $function_body$ ' || pg_get_functiondef(p.oid) || '$function_body$ LANGUAGE ' || l.lanname || ';' AS sql 169 | FROM 170 | pg_catalog.pg_proc p 171 | JOIN pg_catalog.pg_namespace n ON p.pronamespace = n.oid 172 | JOIN pg_catalog.pg_language l ON p.prolang = l.oid 173 | WHERE 174 | n.nspname = '{}' 175 | AND p.prokind = 'f';", self.namespace) 176 | } 177 | 178 | /// Generates a SQL statement for creating all triggers in the schema. 179 | pub fn triggers(&self) -> String { 180 | format!( 181 | "SELECT 182 | 'CREATE TRIGGER ' || t.tgname 183 | || ' ' || CASE 184 | WHEN t.tgtype & 2 > 0 THEN 'BEFORE' 185 | WHEN t.tgtype & 4 > 0 THEN 'AFTER' 186 | WHEN t.tgtype & 64 > 0 THEN 'INSTEAD OF' 187 | END 188 | || ' ' || CASE 189 | WHEN t.tgtype & 8 > 0 THEN 'INSERT' 190 | WHEN t.tgtype & 16 > 0 THEN 'DELETE' 191 | WHEN t.tgtype & 32 > 0 THEN 'UPDATE' 192 | END 193 | || ' ON ' || n.nspname || '.' || c.relname 194 | || ' FOR EACH ' || CASE WHEN t.tgtype & 1 > 0 THEN 'ROW' ELSE 'STATEMENT' END 195 | || ' EXECUTE FUNCTION ' || np.nspname || '.' || p.proname || '();' AS sql 196 | FROM 197 | pg_catalog.pg_trigger t 198 | JOIN pg_catalog.pg_class c ON t.tgrelid = c.oid 199 | JOIN pg_catalog.pg_namespace n ON c.relnamespace = n.oid 200 | JOIN pg_catalog.pg_proc p ON t.tgfoid = p.oid 201 | JOIN pg_catalog.pg_namespace np ON p.pronamespace = np.oid 202 | WHERE 203 | n.nspname = '{}' 204 | AND NOT t.tgisinternal;", 205 | self.namespace 206 | ) 207 | } 208 | 209 | /// Generates a SQL statement for creating all indexes in the schema. 210 | pub fn indexes(&self) -> String { 211 | format!("SELECT indexdef || ';' AS sql FROM pg_indexes WHERE schemaname = '{}' ORDER BY tablename, indexname;", self.namespace) 212 | } 213 | } 214 | 215 | #[cfg(feature = "db-postgres")] 216 | #[derive(sqlx::FromRow)] 217 | struct SchemaRet { 218 | sql: String, 219 | } 220 | 221 | #[cfg(feature = "db-postgres")] 222 | macro_rules! gen_fn { 223 | ($($name:ident),*) => { 224 | $( 225 | paste! { 226 | /// Async function that fetches the SQL statements for $name for the specified schema item. 227 | /// 228 | /// Example usage: 229 | /// ``` 230 | /// use crate::PgSchema; 231 | /// 232 | /// let schema = PgSchema::new("my_schema"); 233 | /// let pool = get_pg_pool(); // Function to get a connection pool 234 | /// let sqls = schema.[](&pool).await.unwrap(); 235 | /// ``` 236 | pub async fn [] (&self, pool: &PgPool) -> Result, sqlx::Error> { 237 | let sql = self.$name(); 238 | let ret: Vec = sqlx::query_as(&sql).fetch_all(pool).await?; 239 | Ok(ret.into_iter().map(|r| r.sql).collect()) 240 | } 241 | } 242 | )* 243 | }; 244 | } 245 | 246 | #[cfg(feature = "db-postgres")] 247 | impl PgSchema { 248 | gen_fn!(enums, types, tables, views, mviews, functions, triggers, indexes); 249 | } 250 | 251 | #[cfg(feature = "db-postgres")] 252 | #[cfg(test)] 253 | mod tests { 254 | use super::*; 255 | use anyhow::Result; 256 | use sqlx_db_tester::TestPg; 257 | 258 | #[tokio::test] 259 | async fn get_tables_should_work() -> Result<()> { 260 | let schema = PgSchema::new("gpt"); 261 | let tdb = TestPg::default(); 262 | let pool = tdb.get_pool().await; 263 | let items = schema.get_tables(&pool).await?; 264 | assert_eq!(items.len(), 4); 265 | assert_eq!( 266 | items[0], 267 | "CREATE TABLE gpt.comments (id integer NOT NULL PRIMARY KEY (id), post_id integer NOT NULL FOREIGN KEY (post_id), user_id integer NOT NULL FOREIGN KEY (user_id), content text NOT NULL, created_at timestamp with time zone NOT NULL, updated_at timestamp with time zone NOT NULL);" 268 | ); 269 | 270 | Ok(()) 271 | } 272 | 273 | #[tokio::test] 274 | async fn get_enums_should_work() -> Result<()> { 275 | let schema = PgSchema::new("gpt"); 276 | let tdb = TestPg::default(); 277 | let pool = tdb.get_pool().await; 278 | let items = schema.get_enums(&pool).await?; 279 | assert_eq!(items.len(), 2); 280 | assert_eq!( 281 | items[0], 282 | "CREATE TYPE gpt.login_method AS ENUM ('email', 'google', 'github');" 283 | ); 284 | 285 | Ok(()) 286 | } 287 | 288 | #[tokio::test] 289 | async fn get_types_should_work() -> Result<()> { 290 | let schema = PgSchema::new("gpt"); 291 | let tdb = TestPg::default(); 292 | let pool = tdb.get_pool().await; 293 | let items = schema.get_types(&pool).await?; 294 | assert_eq!(items.len(), 1); 295 | assert_eq!( 296 | items[0], 297 | "CREATE TYPE gpt.address AS (street character varying(255), city character varying(100), state character(2), postal_code character(5));" 298 | ); 299 | 300 | Ok(()) 301 | } 302 | 303 | #[tokio::test] 304 | async fn get_views_should_work() -> Result<()> { 305 | let schema = PgSchema::new("gpt"); 306 | let tdb = TestPg::default(); 307 | let pool = tdb.get_pool().await; 308 | let items = schema.get_views(&pool).await?; 309 | assert_eq!(items.len(), 1); 310 | assert_eq!( 311 | items[0], 312 | "CREATE VIEW gpt.posts_with_comments AS SELECT p.id,\n p.user_id,\n p.title,\n p.content,\n p.status,\n p.published_at,\n p.created_at,\n p.updated_at,\n json_agg(json_build_object('id', c.id, 'user_id', c.user_id, 'content', c.content, 'created_at', c.created_at, 'updated_at', c.updated_at)) AS comments\n FROM (gpt.posts p\n LEFT JOIN gpt.comments c ON ((c.post_id = p.id)))\n GROUP BY p.id;" 313 | ); 314 | 315 | Ok(()) 316 | } 317 | 318 | #[tokio::test] 319 | async fn get_mviews_should_work() -> Result<()> { 320 | let schema = PgSchema::new("gpt"); 321 | let tdb = TestPg::default(); 322 | let pool = tdb.get_pool().await; 323 | let items = schema.get_mviews(&pool).await?; 324 | assert_eq!(items.len(), 1); 325 | assert_eq!( 326 | items[0], 327 | "CREATE MATERIALIZED VIEW gpt.users_with_posts AS SELECT u.id,\n u.username,\n u.email,\n u.first_name,\n u.last_name,\n u.created_at,\n u.updated_at,\n json_agg(json_build_object('id', p.id, 'title', p.title, 'content', p.content, 'status', p.status, 'published_at', p.published_at, 'created_at', p.created_at, 'updated_at', p.updated_at)) AS posts\n FROM (gpt.users u\n LEFT JOIN gpt.posts p ON ((p.user_id = u.id)))\n GROUP BY u.id;" 328 | ); 329 | 330 | Ok(()) 331 | } 332 | 333 | #[tokio::test] 334 | async fn get_functions_should_work() -> Result<()> { 335 | let schema = PgSchema::new("gpt"); 336 | let tdb = TestPg::default(); 337 | let pool = tdb.get_pool().await; 338 | let items = schema.get_functions(&pool).await?; 339 | assert_eq!(items.len(), 1); 340 | assert_eq!( 341 | items[0], 342 | "CREATE OR REPLACE FUNCTION gpt.refresh_users_with_posts() RETURNS trigger AS $function_body$ CREATE OR REPLACE FUNCTION gpt.refresh_users_with_posts()\n RETURNS trigger\n LANGUAGE plpgsql\nAS $function$\nBEGIN\n REFRESH MATERIALIZED VIEW gpt.users_with_posts;\n RETURN NULL;\nEND;\n$function$\n$function_body$ LANGUAGE plpgsql;" 343 | ); 344 | 345 | Ok(()) 346 | } 347 | 348 | #[tokio::test] 349 | async fn get_triggers_should_work() -> Result<()> { 350 | let schema = PgSchema::new("gpt"); 351 | let tdb = TestPg::default(); 352 | let pool = tdb.get_pool().await; 353 | let items = schema.get_triggers(&pool).await?; 354 | assert_eq!(items.len(), 1); 355 | assert_eq!( 356 | items[0], 357 | "CREATE TRIGGER refresh_users_with_posts AFTER INSERT ON gpt.posts FOR EACH STATEMENT EXECUTE FUNCTION gpt.refresh_users_with_posts();" 358 | ); 359 | 360 | Ok(()) 361 | } 362 | 363 | #[tokio::test] 364 | async fn get_indexes_should_work() -> Result<()> { 365 | let schema = PgSchema::new("gpt"); 366 | let tdb = TestPg::default(); 367 | let pool = tdb.get_pool().await; 368 | let items = schema.get_indexes(&pool).await?; 369 | assert_eq!(items.len(), 8); 370 | assert_eq!( 371 | items[0], 372 | "CREATE UNIQUE INDEX comments_pkey ON gpt.comments USING btree (id);" 373 | ); 374 | 375 | Ok(()) 376 | } 377 | } 378 | --------------------------------------------------------------------------------