├── .editorconfig ├── .gitattributes ├── .gitignore ├── LICENSE ├── README.md ├── TROUBLESHOOTING.md ├── dbt_project.yml ├── docs ├── catalog.json ├── graph_summary.json ├── index.html ├── manifest.json ├── run_results.json └── semantic_manifest.json ├── generate_docs.sh ├── integration_tests ├── README.md ├── data │ └── tpc_h_seeds │ │ ├── customer.csv │ │ ├── lineitem.csv │ │ ├── orders.csv │ │ ├── part.csv │ │ ├── partsupp.csv │ │ ├── seeds.yml │ │ └── supplier.csv ├── dbt_project.yml ├── macros │ └── clone_table.sql ├── models │ ├── dim_customers.sql │ ├── dim_customers_view.sql │ ├── dim_duplicate_orders.sql │ ├── dim_missing_orders.sql │ ├── dim_orders.sql │ ├── dim_orders_null_keys.sql │ ├── dim_part.sql │ ├── dim_part_supplier.sql │ ├── dim_part_supplier_missing_con.sql │ ├── fact_order_line.sql │ ├── fact_order_line_longcol.sql │ ├── fact_order_line_missing_orders.sql │ ├── schema.yml │ └── sources.yml ├── packages.yml ├── profiles.yml └── tests │ └── singlular_test.sql └── macros ├── create_constraints.sql ├── default__test_constraints.sql ├── macros.yml ├── oracle__create_constraints.sql ├── postgres__create_constraints.sql ├── redshift__create_constraints.sql ├── snowflake__create_constraints.sql └── vertica__create_constraints.sql /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 4 6 | charset = utf-8 7 | trim_trailing_whitespace = true 8 | insert_final_newline = true 9 | 10 | [*.{yml,yaml}] 11 | indent_size = 2 12 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | *.sh -linguist-detectable 4 | *.sql linguist-detectable 5 | *.sql linguist-language=SQL 6 | docs/* linguist-documentation 7 | integration_tests/* linguist-documentation 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | target/ 3 | dbt_packages/ 4 | logs/ 5 | .DS_Store 6 | .gitconfig 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dbt Constraints Package 2 | 3 | This package generates database constraints based on the tests in a dbt project. It is currently compatible with Snowflake, PostgreSQL, Oracle, Redshift, and Vertica only. 4 | 5 | ## How the dbt Constraints Package differs from dbt's Model Contracts feature 6 | 7 | This package focuses on automatically generating constraints based on the tests already in a user's dbt project. In most cases, merely adding the dbt Constraints package is all that is needed to generate constraints. dbt's recent [model contracts feature](https://docs.getdbt.com/docs/collaborate/govern/model-contracts) allows users to explicitly document constraints for models in yml. This package and the core feature are 100% compatible with one another and the dbt Constraints package will skip generating constraints already created by a model contract. However, the dbt Constraints package will also generate constraints for any tests that are not documented as model contracts. As described in the next section, dbt Constraints is also designed to provide join elimination on Snowflake. 8 | 9 | ## Why data engineers should add referential integrity constraints 10 | 11 | The primary reason to add constraints to your database tables is that many tools including [DBeaver](https://dbeaver.io) and [Oracle SQL Developer Data Modeler](https://community.snowflake.com/s/article/How-To-Customizing-Oracle-SQL-Developer-Data-Modeler-SDDM-to-Support-Snowflake-Variant) can correctly reverse-engineer data model diagrams if there are primary keys, unique keys, and foreign keys on tables. Most BI tools will also add joins automatically between tables when you import tables that have foreign keys. This can both save time and avoid mistakes. 12 | 13 | In addition, although Snowflake doesn't enforce most constraints, the [query optimizer can consider primary key, unique key, and foreign key constraints](https://docs.snowflake.com/en/sql-reference/constraints-properties.html?#extended-constraint-properties) during query rewrite if the constraint is set to RELY. Since dbt can test that the data in the table complies with the constraints, this package creates constraints on Snowflake with the RELY property to improve query performance. Some database query optimizers also consider not null constraints when building an execution plan. 14 | 15 | Many databases including [Snowflake](https://docs.snowflake.com/en/user-guide/join-elimination.html), PostgreSQL, Oracle, SQL Server, MySQL, and DB2 can use referential integrity constraints to perform "[Join Elimination](https://blog.jooq.org/join-elimination-an-essential-optimiser-feature-for-advanced-sql-usage/)" to remove tables from an execution plan. This commonly occurs when you query a subset of columns from a view and some of the tables in the view are unnecessary. In addition, on databases that do not support join elimination, some [BI and visualization tools will also rewrite their queries](https://docs.snowflake.com/en/user-guide/table-considerations.html#referential-integrity-constraints) based on constraint information, producing the same effect. 16 | 17 | Finally, although most columnar databases including Snowflake do not use or need indexes, most row-oriented databases including PostgreSQL and Oracle require indexes on their primary key columns in order to perform efficient joins between tables. A primary key or unique key constraint is typically enforced on databases using such indexes. Having dbt create the unique indexes automatically can slightly reduce the degree of performance tuning necessary for row-oriented databases. Row-oriented databases frequently also need indexes on foreign key columns but [that is something best added manually](https://docs.getdbt.com/reference/resource-configs/postgres-configs#indexes). 18 | 19 | ## Please note 20 | 21 | When you add this package, dbt will automatically begin to create __unique keys__ for all your existing `unique` and `dbt_utils.unique_combination_of_columns` tests, __foreign keys__ for existing `relationship` tests, and __not null constraints__ for `not_null` tests. The package also provides three new tests (`primary_key`, `unique_key`, and `foreign_key`) that are a bit more flexible than the standard dbt tests. These tests can be used inline, out-of-line, and can support multiple columns when used in the `tests:` section of a model. The `primary_key` test will also cause a not null constraint to be created on each column. 22 | 23 | ### Disabling automatic constraint generation 24 | 25 | The `dbt_constraints_enabled` variable can be set to `false` in your project to disable automatic constraint generation. By default dbt Constraints only creates constraints on models. To allow constraints on sources, you can set `dbt_constraints_sources_enabled` to `true`. The package will verify that you have sufficient database privileges to create constraints on sources. 26 | 27 | ```yml 28 | vars: 29 | # The package can be temporarily disabled using this variable 30 | dbt_constraints_enabled: true 31 | 32 | # The package can also add constraints on sources if you have sufficient privileges 33 | dbt_constraints_sources_enabled: false 34 | 35 | # You can also be specific on which constraints are enabled for sources 36 | # You must also enable dbt_constraints_sources_enabled above 37 | dbt_constraints_sources_pk_enabled: true 38 | dbt_constraints_sources_uk_enabled: true 39 | dbt_constraints_sources_fk_enabled: true 40 | dbt_constraints_sources_nn_enabled: true 41 | ``` 42 | 43 | ## Installation 44 | 45 | 1. Add this package to your `packages.yml` following [these instructions](https://docs.getdbt.com/docs/building-a-dbt-project/package-management/). Please check [this link for the latest released version](https://github.com/Snowflake-Labs/dbt_constraints/releases/latest). 46 | 47 | ```yml 48 | packages: 49 | - package: Snowflake-Labs/dbt_constraints 50 | version: [">=1.0.0", "<1.1.0"] 51 | # for the latest version tag. 52 | # You can also pull the latest changes from Github with the following: 53 | # - git: "https://github.com/Snowflake-Labs/dbt_constraints.git" 54 | # revision: main 55 | ``` 56 | 57 | 2. Run `dbt deps`. 58 | 59 | 3. Optionally add `primary_key`, `unique_key`, or `foreign_key` tests to your model like the following examples. 60 | 61 | ```yml 62 | - name: DIM_ORDER_LINES 63 | columns: 64 | # Single column inline constraints 65 | - name: OL_PK 66 | tests: 67 | - dbt_constraints.primary_key 68 | - name: OL_UK 69 | tests: 70 | - dbt_constraints.unique_key 71 | - name: OL_CUSTKEY 72 | tests: 73 | - dbt_constraints.foreign_key: 74 | pk_table_name: ref('DIM_CUSTOMERS') 75 | pk_column_name: C_CUSTKEY 76 | tests: 77 | # Single column constraints 78 | - dbt_constraints.primary_key: 79 | column_name: OL_PK 80 | - dbt_constraints.unique_key: 81 | column_name: OL_ORDERKEY 82 | - dbt_constraints.foreign_key: 83 | fk_column_name: OL_CUSTKEY 84 | pk_table_name: ref('DIM_CUSTOMERS') 85 | pk_column_name: C_CUSTKEY 86 | # Multiple column constraints 87 | - dbt_constraints.primary_key: 88 | column_names: 89 | - OL_PK_COLUMN_1 90 | - OL_PK_COLUMN_2 91 | - dbt_constraints.unique_key: 92 | column_names: 93 | - OL_UK_COLUMN_1 94 | - OL_UK_COLUMN_2 95 | - dbt_constraints.foreign_key: 96 | fk_column_names: 97 | - OL_FK_COLUMN_1 98 | - OL_FK_COLUMN_2 99 | pk_table_name: ref('DIM_CUSTOMERS') 100 | pk_column_names: 101 | - C_PK_COLUMN_1 102 | - C_PK_COLUMN_2 103 | ``` 104 | 105 | ### Dependencies and Requirements 106 | 107 | * The package's macros depend on the results and graph object schemas of dbt >=1.0.0 108 | 109 | * The package currently only includes macros for creating constraints in Snowflake, PostgreSQL, and Oracle. To add support for other databases, it is necessary to implement the following seven macros with the appropriate DDL & SQL for your database. Pull requests to contribute support for other databases are welcome. See the __create_constraints.sql files as examples. 110 | 111 | ```sql 112 | __create_primary_key(table_model, column_names, verify_permissions, quote_columns=false, constraint_name=none, lookup_cache=none) 113 | __create_unique_key(table_model, column_names, verify_permissions, quote_columns=false, constraint_name=none, lookup_cache=none) 114 | __create_foreign_key(pk_model, pk_column_names, fk_model, fk_column_names, verify_permissions, quote_columns=false, constraint_name=none, lookup_cache=none) 115 | __create_not_null(pk_model, pk_column_names, fk_model, fk_column_names, verify_permissions, quote_columns=false, lookup_cache=none) 116 | __unique_constraint_exists(table_relation, column_names, lookup_cache=none) 117 | __foreign_key_exists(table_relation, column_names, lookup_cache=none) 118 | __have_references_priv(table_relation, verify_permissions, lookup_cache=none) 119 | __have_ownership_priv(table_relation, verify_permissions, lookup_cache=none) 120 | ``` 121 | 122 | ## RELY and NORELY Properties 123 | 124 | Version 1.0.0 introduces the ability to create constraints with the RELY and NORELY properties on Snowflake. Executed tests with zero failures are created with the `RELY` property. Tests with any failures will generate `NORELY` constraints and constraints will be altered to `RELY` or `NORELY` based on subsequent executions of the test. When the `always_create_constraint` feature is enabled, it is now also possible to create `NORELY` constraints using `dbt run` and then have those constraints become RELY constraints using `dbt test`. 125 | 126 | ## Determining the Constraints to Generate 127 | 128 | Version 1.0.0 introduces a more advanced set of criteria for selecting tests to turn into constraints. 129 | 130 | * The test must be one of the following: `primary_key`, `unique_key`, `unique_combination_of_columns`, `unique`, `foreign_key`, `relationships`, or `not_null` 131 | * The test executed and had zero failures (RELY) or the database has support for NORELY constraints 132 | * The test executed (RELY/NORELY), we need the primary/unique key constraint for a foreign key, or we have the `always_create_constraint` parameter turned on. 133 | * If you are using `build`, `run`, or `test` for only part of a project using the `--select` parameter, either the test or its model was selected to run, or the test is a primary/unique key that is needed for a selected foreign key. If a primary/unique key is created for a foreign key, and its test was not executed, the primary/unique key will be created as a NORELY constraint. 134 | * All models involved in a constraint must **not** be a view or ephemeral materialization. Version 1.0.0 now allows custom materializations. 135 | * If source constraints are enabled, the source must be a table. You must also have the `OWNERSHIP` table privilege to add a constraint. For foreign keys you also need the `REFERENCES` privilege on the parent table with the primary or unique key. The package will identify when you lack these privileges on Snowflake and PostgreSQL. Oracle does not provide an easy way to look up your effective privileges so it has an exception handler and will display Oracle's error messages. 136 | * All columns on constraints must be individual column names, not expressions. You can reference columns on a model that come from an expression. 137 | * `primary_key`, `unique_key`, and `foreign_key` tests are considered first and duplicate constraints are skipped. One exception is that you will get an error if you add two different `primary_key` tests to the same model. 138 | * Foreign keys require that the parent table have a primary key or unique key on the referenced columns. Unique keys generated from standard `unique` tests are sufficient. 139 | * The order of columns on a foreign key test must match between the FK columns and PK columns 140 | * Referential constraints must apply to all the rows in a table so any tests with a `config: where:`, `config: warn_if:`, or `config: fail_calc:` property will be set as `NORELY` when creating constraints. 141 | 142 | Additional notes: 143 | * The `foreign_key` test will ignore any rows with a null column, even if only one of two columns in a compound key is null. If you also want to ensure FK columns are not null, you should add standard `not_null` tests to your model which will add not null constraints to the table. 144 | * You may need to manually drop a primary key constraint from a table if you change the columns in the constraint. This is not necessary for table materializations or if you do a full-refresh of an incremental model. 145 | 146 | ## Advanced: `always_create_constraint` Property 147 | 148 | There is an advanced option for Snowflake users to force a constraint to be generated even when the test was not executed. When this setting is in effect, constraints on Snowflake will have the `NORELY` property until the associated test is executed with zero failures. Snowflake does not support `NORELY` for not null constraints so those constraints will still be skipped. You activate this feature in your dbt_project.yml under the `models:` or `tests:` sections. You can set it to be true for your entire project or you can specify specific folders that should use this feature. You can also set this in a specific model's header. 149 | 150 | __[Caveat Emptor](https://en.wikipedia.org/wiki/Caveat_emptor):__ 151 | 152 | * You will get an error if you try to force constraints to be generated that are enforced by your database. On Snowflake that is only a not_null constraint but on databases like Oracle, all the generated constraints are enforced. This is why, at present, only the Snowflake macros implement this feature. 153 | * This feature can still cause unexpected query results on Snowflake due to [join elimination](https://docs.snowflake.com/en/user-guide/join-elimination). Although executing tests on Snowflake will correctly set the `RELY` or `NORELY` property based on whether the tests pass and fail, activating this feature and **skipping the execution of tests** will not cause a `RELY` constraint to become a `NORELY` constraint. A `RELY` constraint only becomes a `NORELY` constraint **if a test is executed** and has failures. If you create a `RELY` constraint by running `dbt build` and subsequently only execute `dbt run` without eventually following up with `dbt test`, you could have constraints that still have the `RELY` property but now have referential integrity issues. Snowflake users are encouraged to frequently or always execute their tests so that the `RELY` property is kept up to date. 154 | 155 | These are examples from a dbt_project.yml using the feature in models or tests: 156 | 157 | ```yml 158 | models: 159 | your_project_name: 160 | +always_create_constraint: true 161 | tests: 162 | your_project_name: 163 | +always_create_constraint: true 164 | ``` 165 | 166 | This is an example from a model schema.yml using the feature. Setting the property in the `config:` section of a test does not work so you should set it in the model's `config:` section. 167 | 168 | ```yml 169 | version: 2 170 | 171 | models: 172 | - name: your_model_name 173 | config: 174 | always_create_constraint: true 175 | ``` 176 | 177 | This is an example of activating the feature in the header of a model: 178 | ```jinja 179 | {{ config(always_create_constraint = true) }} 180 | ``` 181 | 182 | ## Primary Maintainers 183 | 184 | * Dan Flippo ([@sfc-gh-dflippo](https://github.com/sfc-gh-dflippo)) 185 | 186 | This is a community-developed package, not an official Snowflake offering. It comes with no support or warranty. However, feel free to raise a github issue if you find a bug or would like a new feature. 187 | 188 | ## Legal 189 | 190 | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this package except in compliance with the License. You may obtain a copy of the License at: [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0) 191 | 192 | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 193 | -------------------------------------------------------------------------------- /TROUBLESHOOTING.md: -------------------------------------------------------------------------------- 1 | # Troubleshooting dbt Constraints 2 | 3 | As it executes, dbt Constraints will log a number of messages when it cannot create a constraint. 4 | A list of the messages is provided below with additional details on how to address the message. 5 | 6 | ## Messages that are displayed on the front-end of the command line interface and in the dbt log 7 | 8 | ``` 9 | Skipping primary/unique key because a physical column name was not found on the table: {TABLE NAME} {TABLE COLUMNS} 10 | Skipping foreign key because a physical column was not found on the pk table: {PK TABLE NAME} {PK TABLE COLUMNS} 11 | Skipping foreign key because a physical column was not found on the fk table: {FK TABLE NAME} {FK TABLE COLUMNS} 12 | ``` 13 | - These error messages typically occur when a column is misspelled or if the test uses an expression instead of a column name. 14 | - One solution can be adding the expression as an additional column in your model so that you can reference it in your constraint. 15 | 16 | 17 | ``` 18 | "Skipping foreign key because a we couldn't find the child table: model={FK TABLE NAME} or source={PK TABLE NAME} 19 | ``` 20 | - This is only expected to occur when a foreign key constraint is made with a source and dbt Constraints can't parse the reference to a source table. 21 | - The package is looking for something that looks like: `source("source_name", "table_name")` or `source('source_name', 'table_name')` 22 | - You may need to replace any dynamic variables with strings for the source name or table name. 23 | 24 | 25 | ``` 26 | Skipping {CONSTRAINT NAME} because of insufficient privileges: {FK TABLE NAME} referencing {PK TABLE NAME} 27 | Skipping {CONSTRAINT NAME} because of insufficient privileges: {TABLE NAME} 28 | ``` 29 | - You must have OWNERSHIP on the child FK table and you must have REFERENCES on the parent PK table. 30 | - For primary keys and unique keys, you need ownership on the table. 31 | - These errors most frequently apply to sources. 32 | - This can also indicate that one of your tables is actually a view 33 | 34 | 35 | ``` 36 | Skipping {CONSTRAINT NAME} because a PK/UK was not found on the PK table: {PK TABLE NAME} {PK TABLE COLUMNS} 37 | ``` 38 | - You either need to manually create a primary key/unique key or you need to add a test to the parent table and allow the package to create the constraint. 39 | - The package creates constraints in the order of primary keys, unique keys, foreign keys to allow parent constraints to be referenced by foreign keys. 40 | 41 | 42 | ## Messages that are only displayed in the dbt log: 43 | 44 | ``` 45 | Skipping {CONSTRAINT NAME} because PK/UK already exists: {TABLE NAME} {TABLE COLUMNS} 46 | Skipping {CONSTRAINT NAME} because FK already exists: {FK TABLE NAME} {FK TABLE COLUMNS} 47 | ``` 48 | - Indicates duplicate constraints or that a constraint was already added to an incremental / snapshot table on a previous run 49 | - Typically not an issue 50 | -------------------------------------------------------------------------------- /dbt_project.yml: -------------------------------------------------------------------------------- 1 | 2 | name: 'dbt_constraints' 3 | version: '1.0.4' 4 | config-version: 2 5 | 6 | # These macros depend on the results and graph objects in dbt >=0.19.0 7 | # and has been tested with dbt >=1.0.0 8 | # v1.0.3 adds usage of the attached_node attribute added in dbt 1.5.x 9 | require-dbt-version: ">=1.5.0" 10 | 11 | profile: 'dbt_constraints' 12 | 13 | # All tests and macros in this packages are found in the macro path 14 | macro-paths: ["macros"] 15 | clean-targets: # directories to be removed by `dbt clean` 16 | - "logs" 17 | - "target" 18 | - "dbt_packages" 19 | 20 | # This package adds an automatic hook to create constraints 21 | on-run-end: 22 | - "{{ dbt_constraints.create_constraints() }}" 23 | 24 | # Global variables 25 | vars: 26 | # The package can be temporarily disabled using this variable 27 | dbt_constraints_enabled: true 28 | 29 | # The package can also add constraints on sources if you have sufficient privileges 30 | dbt_constraints_sources_enabled: false 31 | 32 | # You can also be specific on which constraints are enabled for sources 33 | # You must also enable dbt_constraints_sources_enabled above 34 | dbt_constraints_sources_pk_enabled: true 35 | dbt_constraints_sources_uk_enabled: true 36 | dbt_constraints_sources_fk_enabled: true 37 | dbt_constraints_sources_nn_enabled: true 38 | -------------------------------------------------------------------------------- /docs/catalog.json: -------------------------------------------------------------------------------- 1 | {"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", "dbt_version": "1.8.5", "generated_at": "2024-08-19T18:34:12.783856Z", "invocation_id": "6248ccf8-48d0-470c-b010-2db8eb51c482", "env": {}}, "nodes": {}, "sources": {}, "errors": null} -------------------------------------------------------------------------------- /docs/graph_summary.json: -------------------------------------------------------------------------------- 1 | {"_invocation_id": "6248ccf8-48d0-470c-b010-2db8eb51c482", "linked": {"0": {"name": "operation.dbt_constraints.dbt_constraints-on-run-end-0", "type": "operation"}}} -------------------------------------------------------------------------------- /docs/run_results.json: -------------------------------------------------------------------------------- 1 | {"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v6.json", "dbt_version": "1.8.5", "generated_at": "2024-08-19T18:34:12.756239Z", "invocation_id": "6248ccf8-48d0-470c-b010-2db8eb51c482", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-19T18:34:12.748983Z", "completed_at": "2024-08-19T18:34:12.754902Z"}, {"name": "execute", "started_at": "2024-08-19T18:34:12.755039Z", "completed_at": "2024-08-19T18:34:12.755046Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.006446123123168945, "adapter_response": {}, "message": null, "failures": null, "unique_id": "operation.dbt_constraints.dbt_constraints-on-run-end-0", "compiled": true, "compiled_code": "", "relation_name": null}], "elapsed_time": 0.00990605354309082, "args": {"show_resource_report": false, "log_level": "info", "populate_cache": true, "warn_error_options": {"include": [], "exclude": []}, "version_check": true, "use_colors_file": true, "exclude": [], "quiet": false, "printer_width": 80, "defer": false, "source_freshness_run_project_hooks": false, "write_json": true, "require_explicit_package_overrides_for_builtin_materializations": true, "macro_debugging": false, "log_format_file": "debug", "profiles_dir": "/Users/dflippo/.dbt", "static_parser": true, "invocation_command": "dbt docs generate", "strict_mode": false, "log_path": "/Users/dflippo/Documents/GitHub/Snowflake-Labs/dbt_constraints/logs", "partial_parse": true, "project_dir": "/Users/dflippo/Documents/GitHub/Snowflake-Labs/dbt_constraints", "enable_legacy_logger": false, "send_anonymous_usage_stats": true, "print": true, "empty_catalog": false, "indirect_selection": "eager", "log_level_file": "debug", "vars": {}, "log_file_max_bytes": 10485760, "log_format": "default", "favor_state": false, "introspect": true, "use_colors": true, "partial_parse_file_diff": true, "require_resource_names_without_spaces": false, "compile": true, "cache_selected_only": false, "select": [], "static": false, "which": "generate"}} -------------------------------------------------------------------------------- /docs/semantic_manifest.json: -------------------------------------------------------------------------------- 1 | {"semantic_models": [], "metrics": [], "project_configuration": {"time_spine_table_configurations": [], "metadata": null, "dsi_package_version": {"major_version": "0", "minor_version": "5", "patch_version": "1"}}, "saved_queries": []} -------------------------------------------------------------------------------- /generate_docs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | dbt clean 4 | dbt deps 5 | dbt compile 6 | dbt docs generate 7 | cp ./target/*.json ./docs/ 8 | cp ./target/*.html ./docs/ 9 | -------------------------------------------------------------------------------- /integration_tests/README.md: -------------------------------------------------------------------------------- 1 | # dbt Constraints Integration Tests 2 | This set of models and associated tests is designed to test all the supported tests as well as most unsupported scenarios. 3 | 4 | ## Environment 5 | A set of TPC-H test data has been included as seeds to test the validity of primary keys, unique keys, and foreign keys. 6 | 7 | ## Running the tests 8 | 1. Set up a `dbt_constraints` profile in your ~/.dbt/profiles.yml to a Snowflake or PostgreSQL schema you can create tables and views in. 9 | 2. Execute `dbt seed` 10 | 3. Execute `dbt build` 11 | 12 | A successful execution of the project should result in the following messages about constraints being created: 13 | ``` 14 | Creating primary key: fact_order_line_missing_orders_l_linenumber_l_orderkey_PK 15 | Creating primary key: dim_customers_c_custkey_PK 16 | Creating primary key: dim_part_p_partkey_PK 17 | Creating primary key: fact_order_line_l_linenumber_l_orderkey_PK 18 | Creating unique key: dim_customers_c_custkey_HASH_UK 19 | Creating unique key: dim_customers_c_custkey_seq_UK 20 | Creating unique key: dim_part_p_partkey_HASH_UK 21 | Creating unique key: dim_part_p_partkey_seq_UK 22 | Creating unique key: dim_part_supplier_ps_partkey_ps_suppkey_UK 23 | Creating unique key: dim_orders_null_keys_o_orderkey_HASH_UK 24 | Creating unique key: fact_order_line_missing_orders_integration_id_UK 25 | Creating unique key: dim_orders_o_orderkey_UK 26 | Creating unique key: fact_order_line_integration_id_UK 27 | Creating foreign key: dim_orders_o_custkey_FK referencing dim_customers ['c_custkey'] 28 | Skipping fact_order_line_missing_orders_l_partkey_l_suppkey_FK because a PK/UK was not found on the PK table: "DFLIPPO_DEV"."DBT_DEMO"."dim_part_supplier_missing_con" ['ps_partkey', 'ps_suppkey'] 29 | Creating foreign key: fact_order_line_l_partkey_l_suppkey_FK referencing dim_part_supplier ['ps_partkey', 'ps_suppkey'] 30 | Creating foreign key: dim_orders_null_keys_o_custkey_FK referencing dim_customers ['c_custkey'] 31 | Creating foreign key: fact_order_line_l_orderkey_FK referencing dim_orders ['o_orderkey'] 32 | ``` 33 | 34 | Also, 4 errors should be reported by models that have been designed to test failures with messages like the following: 35 | ``` 36 | Completed with 4 warnings: 37 | Warning in test dbt_constraints_unique_key_dim_duplicate_orders_o_orderkey_HASH (models/schema.yml) 38 | Got 938 results, configured to warn if != 0 39 | Warning in test dbt_constraints_primary_key_dim_duplicate_orders_o_orderkey (models/schema.yml) 40 | Got 938 results, configured to warn if != 0 41 | Warning in test dbt_constraints_foreign_key_fact_order_line_missing_orders_l_orderkey__o_orderkey__ref_dim_missing_orders_ (models/schema.yml) 42 | Got 484 results, configured to warn if != 0 43 | Warning in test dbt_constraints_primary_key_dim_orders_null_keys_o_orderkey (models/schema.yml) 44 | Got 1 result, configured to warn if != 0 45 | ``` 46 | -------------------------------------------------------------------------------- /integration_tests/data/tpc_h_seeds/customer.csv: -------------------------------------------------------------------------------- 1 | c_custkey,c_name,c_address,c_nationkey,c_phone,c_acctbal,c_mktsegment,c_comment 2 | 309,"Customer#000000309","6Jg4ECVS2u7i,E",21,"31-231-377-9535",8824.78,"FURNITURE","lyly. furiously enticing instructions haggle. carefull" 3 | 344,"Customer#000000344","Zasc8,E0VVY",2,"12-810-788-6699",-544.95,"FURNITURE","le according to the regular instruction" 4 | 434,"Customer#000000434","6LGAf2hv4MB5MJhfvNsg",3,"13-325-443-1474",2940.46,"MACHINERY","lly final Tiresias. blithely regular ideas nag stealthily about the furiously " 5 | 557,"Customer#000000557","Nt6FUuDR7v",15,"25-390-153-6699",9559.04,"BUILDING","furiously pending dolphins use. carefully unusual ideas must have to are carefully. express instructions a" 6 | 702,"Customer#000000702","BDKtDAva8rBuCWXT6jXb2JJY7YoRS",3,"13-549-296-5659",4782.05,"FURNITURE"," silent accounts. regular, regul" 7 | 938,"Customer#000000938","wrq9S3rEW8zXUVCXpa7uKi",12,"22-157-321-7590",2584.52,"BUILDING"," the quickly special accounts are regular patt" 8 | 1230,"Customer#000001230","Pr7yxcRne6NiloD1oR,d28rwVFRnOoTWeYq9",23,"33-786-129-3407",4787.85,"MACHINERY","ackages cajole furiously quickly pending packages. ironic foxes" 9 | 1440,"Customer#000001440","k3LXBO5QJrG94TBG77adB1HjqQkleDyUf2c",7,"17-619-730-9883",1236.36,"BUILDING","xpress, even accounts integrate. ironic, special requests doze. carefully express instructions doze furio" 10 | 1460,"Customer#000001460","AEgBZGLmuMqe7Gqh1",20,"30-151-388-7118",9680.51,"BUILDING","accounts. ironic packages cajole furiously; quickly pending requests lose quickly carefully bold deposits. " 11 | 1509,"Customer#000001509","LQY2i,MHY8czRV2Ize",9,"19-226-262-5083",328.44,"BUILDING","tipliers serve quickly furiously express excuses. furiously unusual deposits slee" 12 | 1553,"Customer#000001553","zS2t71h5ssFkRFiB4EvNtWPqjexC1FaO1MeNutf",1,"11-879-323-7032",5853.10,"AUTOMOBILE","he slyly unusual packages cajole slyly ab" 13 | 1784,"Customer#000001784","Zs8QpbcHZfcVJ6oujM8g69J",15,"25-605-903-3007",5458.37,"BUILDING","n frays. blithely ironic theodolites haggle carefully. blithel" 14 | 2219,"Customer#000002219","YCA0PAGuo6mb4x6v8",9,"19-180-438-6369",4949.15,"BUILDING","usly regular theodolites cajole carefully requests. specia" 15 | 2686,"Customer#000002686","swYLHCMgDINfhdDnv4HP",0,"10-248-197-2553",1711.00,"BUILDING","ular foxes use carefully carefully careful ideas: theodolit" 16 | 2871,"Customer#000002871"," OvEjEkqvRv7yUNJ,oJyG92",15,"25-239-287-1986",-937.80,"MACHINERY"," quickly: even, unusual deposits above the even, even packages cajo" 17 | 2932,"Customer#000002932","rqQebdXGH CZb7jHnAq",2,"12-618-105-2545",2411.88,"MACHINERY","nstructions wake. carefully regular accounts after the furiously regular ideas haggle carefully after the " 18 | 3534,"Customer#000003534","APJYwAtJiOb",14,"24-592-878-9397",7506.30,"MACHINERY","ove the quickly regular requests wake slyly silent, ironic deposits. slyly ironic t" 19 | 3756,"Customer#000003756","ZJ4xpQc9yXcG7",9,"19-442-197-7340",-584.62,"MACHINERY","rding to the carefully even accoun" 20 | 3759,"Customer#000003759","xeB84,YEJIV5kfSoLqAmFpkl",14,"24-512-945-7377",3813.94,"FURNITURE","g requests. carefully regular escapades cajole blithely alongside of the blithely pending requests. blith" 21 | 4464,"Customer#000004464","VtXsGUXebZHlm3Tx5pvSn8QPk9PyHYtvD",24,"34-471-291-3741",5282.83,"FURNITURE","bold accounts. blithely slow deposits grow again" 22 | 4589,"Customer#000004589","vauu3Jmh3SGS 2X6Qnw8w5PhI6Lf5xfT",7,"17-126-750-1273",1464.72,"BUILDING","al accounts haggle quickly against t" 23 | 4702,"Customer#000004702"," 9,aQrOAdoQ",16,"26-497-211-6072",7887.48,"AUTOMOBILE","ecial accounts use around the regular, final instructions. slyly even" 24 | 4807,"Customer#000004807","BwJ5vmkJTo",10,"20-630-551-4752",7493.01,"HOUSEHOLD"," pinto beans sleep carefully even packages. quickly express ideas nag furiously according to the car" 25 | 4931,"Customer#000004931","NvsrTXOtoKvKMX1iODF,jWN0m9xJYF2kZ",6,"16-799-824-2703",5126.04,"FURNITURE","ing to the slyly unusual decoys. furiously final pinto beans sleep blithely final frays. express, specia" 26 | 5443,"Customer#000005443","sMR2JfbLYKYGjR1ntSGsAVYL3GbujLIA8r9",17,"27-449-986-8978",464.28,"BUILDING","nding requests. carefully regular deposits ought to haggle idl" 27 | 5697,"Customer#000005697","6UGZlt0tT0cZv9E8f1GY7UFhHwMTmkZLd",6,"16-361-163-6576",2018.75,"HOUSEHOLD"," blithely final requests. silent, final pinto beans acros" 28 | 6093,"Customer#000006093","BOuFVeQZ,E",8,"18-806-815-4057",9343.42,"AUTOMOBILE","gedly regular pinto beans. express pinto beans nag slyly. regular ideas wake thi" 29 | 6104,"Customer#000006104","jcKuAbr5WFuVxd2xt,4KqyvQ7kz",15,"25-250-924-1873",4010.86,"MACHINERY"," final packages sleep above the slyly bold" 30 | 6166,"Customer#000006166","df2AQz9BJgmAj XoIOiyoUd",14,"24-891-533-8945",1444.98,"HOUSEHOLD","ng packages affix even, final pi" 31 | 6444,"Customer#000006444","7wOibQuMiSsum0zAfieMfvToucnh",15,"25-268-268-9763",6469.44,"HOUSEHOLD","sits use among the fluffily even foxes. furiously regular dependenc" 32 | 6491,"Customer#000006491","J3,jSVfCkxq91fHjycRrqx1mQ,sGEFBT7iCtgu",6,"16-977-273-4547",5239.92,"AUTOMOBILE","al foxes sleep fluffily along the" 33 | 6752,"Customer#000006752","80HtfjhjeYU",17,"27-487-194-4578",1487.49,"AUTOMOBILE","regular accounts. pinto beans sleep express foxes. special instructions wake. quickl" 34 | 7072,"Customer#000007072","bjG qDtPBl",2,"12-680-261-1512",-808.33,"MACHINERY","nusual sentiments wake furiously regular pinto beans. quickly unusual accounts according to" 35 | 7136,"Customer#000007136",",6v6tw,vjitbpRZ1CceZGLnh2hFFRv7hZXczs",22,"32-611-951-7924",6621.96,"AUTOMOBILE"," carefully pending pinto beans use slyly silent theodolites." 36 | 7262,"Customer#000007262","Ux6wzi5Ii38V1Y 6g40ng0rsi,8rSANyjTY",13,"23-518-394-7541",3105.57,"FURNITURE","atelets are blithely quickly bold foxes. fluffily ironic dependencies after th" 37 | 7325,"Customer#000007325","UXaIo8cWmZd2GhR,",2,"12-297-366-7293",4938.76,"BUILDING","asymptotes. fluffily silent instruct" 38 | 7326,"Customer#000007326","D4MS1W OSryPx9sb3EgatfZbo0Sa6L5",8,"18-110-403-5989",8762.35,"HOUSEHOLD","ptotes sleep carefully. blithely ev" 39 | 7351,"Customer#000007351","MsxxHCs6nn6K,tOFOAca",20,"30-632-744-2601",9100.21,"MACHINERY","ts after the packages unwind within the carefully unusual requests. thinly reg" 40 | 7468,"Customer#000007468","WQvCnP26r5eF8gebp3iK",19,"29-472-187-8744",4484.55,"MACHINERY","iously unusual dependencies sleep furiousl" 41 | 7737,"Customer#000007737","y1qx2j64UiK7ez7Q",20,"30-860-713-7460",8587.65,"BUILDING","ar patterns cajole fluffily about the " 42 | 7816,"Customer#000007816","Rmbwyp2r6LDigBB",18,"28-502-487-1552",4846.14,"HOUSEHOLD","hely pending requests. blithely bold sentiments a" 43 | 8024,"Customer#000008024","aCpv,4NUZeAg6TDPMDOzzhJpdym07uUMpI1XK",13,"23-183-376-7621",7195.96,"BUILDING"," use slyly blithely unusual pinto " 44 | 8035,"Customer#000008035","lUMu8B421btF27XItqct",2,"12-958-729-3024",3340.35,"AUTOMOBILE","even packages. carefully final instructions around the express deposits sleep " 45 | 8124,"Customer#000008124","zSCfdVsDJ1i0W",12,"22-537-136-6734",6877.44,"AUTOMOBILE"," boost quickly pending requests. slyly bold requests sleep carefully final pinto beans" 46 | 8553,"Customer#000008553","84cv0hbbpZ5qW55UqUOnis3HYfRSWWwh",14,"24-165-142-8209",1440.09,"BUILDING"," tithes cajole carefully about the furiously express pinto beans. deposits haggle along the special excuses." 47 | 8558,"Customer#000008558","dTVh46wUXYu3rw3TWCwSqqqnoygbpHwDxr7t",3,"13-445-798-5258",5257.28,"BUILDING","inal requests can boost carefully slyly even accounts. quickly final instructions" 48 | 8663,"Customer#000008663","FfLrrigsqljcPMiPP71onJ",12,"22-379-841-6296",4905.03,"BUILDING","kages-- waters cajole. fluffily ironic dependenci" 49 | 8774,"Customer#000008774","J1i6bdCYCua,Z",11,"21-281-662-2959",8816.66,"HOUSEHOLD"," packages wake furiously according to the pinto beans. regular platelets a" 50 | 8864,"Customer#000008864","SPurzCp8X5mmU1NLeqWfDXlAVhxzB a,OU84F",12,"22-424-266-4789",9842.21,"MACHINERY"," unwind slyly along the sheaves. ironic, ironic plat" 51 | 8907,"Customer#000008907","RMKewKq9A0yF9crdWZYjB",15,"25-805-119-8822",5617.33,"HOUSEHOLD","ar packages. unusual accounts shall sleep carefully. special, unusual excuses b" 52 | 8974,"Customer#000008974","DjDVodtTJc",9,"19-506-351-6742",-236.86,"FURNITURE"," around the silently unusual foxes. ruthless foxes haggle slyly. carefully express courts are carefully slyly " 53 | 9297,"Customer#000009297","hAgfMNMCbu",9,"19-737-733-5909",4099.45,"HOUSEHOLD","r pinto beans affix blithely across the dependencies. excuses snooze idly carefully pending theo" 54 | 9362,"Customer#000009362","c088haKCIHf0eu0wtrK,,Yvk0tAan7bGIs",14,"24-216-725-2882",8240.04,"AUTOMOBILE","around the ideas sleep slyly special, regular asymptotes. ironic dolphins haggle" 55 | 9553,"Customer#000009553","rFQIBB mCf",20,"30-136-567-5068",7636.81,"HOUSEHOLD","y even packages boost slyly carefully special deposits. furiously ironic foxes " 56 | 9919,"Customer#000009919","xGgcWYphLgXYNFPl0CJiDF35eF4G5iqjiNfX",22,"32-418-583-9198",8541.10,"HOUSEHOLD","onic ideas sleep fluffily blithely silent deposits. blit" 57 | 10389,"Customer#000010389",",YPJmrjZnzKst",11,"21-312-257-7638",9580.25,"AUTOMOBILE"," accounts wake carefully near t" 58 | 10935,"Customer#000010935","xh6,8XXt8N8kIMdymXNTYG07akJi5SBL",12,"22-685-815-4771",3907.57,"MACHINERY","e the daringly ironic accounts. slyly fina" 59 | 10993,"Customer#000010993","B9LOpPbBEAlbAFeBi09gGjfGHhmP0f y,NvA",20,"30-667-927-5332",5274.30,"HOUSEHOLD","ely final pinto beans use after the regular deposits. dolphins believe fluffily. blith" 60 | 11000,"Customer#000011000","y zS8kDauQj3uU42RK2gyxiy1AC",14,"24-327-101-9961",3504.34,"BUILDING"," blithely final accounts shall nag" 61 | 11497,"Customer#000011497","5FPoFqnjHsq BhRVjyC8rKl",12,"22-966-587-1201",2398.29,"HOUSEHOLD","sly after the regular requests-- asymptotes use quickly furiously silent requests. pinto beans haggle slyly" 62 | 11501,"Customer#000011501"," Y,K,8QY5mVM8LTb ChE6mbIhovjZJIP",16,"26-139-910-9281",8948.49,"BUILDING","ng instructions. quickly regular mult" 63 | 11657,"Customer#000011657","rbwxxXwm9AbNwSVTol28kCQZsmIAMUEvBA",20,"30-154-153-3612",1427.94,"HOUSEHOLD","e foxes. carefully bold dolphins play blithely express packages. even forges haggle. fluffily silent id" 64 | 11746,"Customer#000011746","5Sae3n546ZPjpqF6dvHUKlCRhG7UktM",2,"12-533-662-3156",9573.90,"AUTOMOBILE"," never regular excuses detect carefully. blithely final foxes sublate. carefully r" 65 | 11829,"Customer#000011829","pMccUheZIN8SjK66tI8NeRNodIJVgFyWWJ0Wrc",19,"29-462-962-8972",245.68,"MACHINERY"," thin platelets. unusual asymptotes mold carefully f" 66 | 11861,"Customer#000011861","Vp0CVj3jrp9w3DfgTGK",19,"29-515-186-8796",8818.04,"FURNITURE"," express requests. carefully regular deposits nag. regular, final hockey players above the qui" 67 | 12021,"Customer#000012021","mzLU71LdYg6RVAWDoj0zBM y xRoCOI",14,"24-808-154-7686",2179.09,"BUILDING","s. unusual deposits are carefully. even, even foxes cajole quickly after the slyly silent foxes." 68 | 12036,"Customer#000012036","NSxcYryYiAoBnum9gr7",2,"12-284-892-8505",7433.26,"FURNITURE","uriously ironic pinto beans. ironic foxes impress. quiet packages use along the carefully ironic deposits. silen" 69 | 12135,"Customer#000012135","MNzoRQaBvGfV wRoyRX QhVlDM4vWhm1rtswKXyN",24,"34-663-737-8414",8259.49,"AUTOMOBILE","ial epitaphs cajole slyly. special deposits sleep s" 70 | 12335,"Customer#000012335","NPhzhcnVt72pr90ep8TGg2m xlU",13,"23-299-451-6323",7256.23,"HOUSEHOLD","theodolites. regular requests since the ironic ideas integrate carefully alongside of the bold asymp" 71 | 12386,"Customer#000012386","nPgFQwGzBIMVO",23,"33-520-498-8052",9233.76,"MACHINERY","r asymptotes boost among the regular, special dependencies. furiously express requ" 72 | 12567,"Customer#000012567","EOfUAsu1Bs8,sd5hl3SfC",8,"18-986-887-2424",9873.38,"HOUSEHOLD","ncies. regular, regular asymptotes sleep slyly. slyly regular packages wake regularly " 73 | 12621,"Customer#000012621","eZZ2nxV3CgUeVP1c",16,"26-119-952-2722",9396.13,"MACHINERY","iously bold requests. quickly furious requests are carefully furiousl" 74 | 12998,"Customer#000012998","E9h12yGRZquuu9LMSiy85OL9BJINIdT",8,"18-197-233-3807",4366.97,"MACHINERY","ding requests after the slyly regular id" 75 | 13146,"Customer#000013146","SRPw QEKoDzE",20,"30-238-761-4130",6191.07,"AUTOMOBILE","its until the unusual, ironic dependencies cajole" 76 | 13441,"Customer#000013441","btvkpn gk,",2,"12-768-309-2246",-274.62,"FURNITURE","ithin the furiously ironic ideas detect blithely around the furiously" 77 | 13499,"Customer#000013499","XXSnB0XyB70IRwrRcwYgqobj7xvPhYyd0LiUZv7X",17,"27-472-606-5057",4652.73,"MACHINERY","carefully pending packages. slow accounts engage sl" 78 | 13517,"Customer#000013517","soc0gBZzOyhM9IisaCwfAr cYOb6OKRs3tjxd",24,"34-760-264-7033",595.54,"BUILDING","thlessly silent packages must haggle quickly regular foxes. regular, ironi" 79 | 13715,"Customer#000013715","pUgTeaHbtp1CSDZ",1,"11-658-368-2145",8829.75,"MACHINERY","about the requests. unusual deposits use. unusual requests n" 80 | 13955,"Customer#000013955","we IGUbr4bRQb,R8yBZaON",10,"20-614-548-2418",1451.06,"FURNITURE","ites sleep carefully after the blithely idle as" 81 | 14038,"Customer#000014038","jNTOVXgw5ghq",0,"10-823-855-7512",-517.93,"AUTOMOBILE","theodolites wake ironic orbits." 82 | 14240,"Customer#000014240","ghnwVvQNZ81",8,"18-839-189-5783",4713.07,"MACHINERY","eas above the special ideas sleep" 83 | 14392,"Customer#000014392","GUjSMVFbbPN9JA fM9",24,"34-683-288-7472",6580.91,"MACHINERY","y busy ideas. final accounts along the furiously bold requests affix furiously agai" 84 | 14570,"Customer#000014570","DKx82aKtldR,6P,yYC",12,"22-838-171-5872",8867.23,"BUILDING","le after the fluffily bold excuses" 85 | 14687,"Customer#000014687","ad5G4p30 dtKR9EPUB6rHSki ZaGB8QF9j ub",1,"11-195-955-6338",6560.56,"MACHINERY","ross the slyly unusual pinto beans. slyly even pinto beans x-ray carefully bold theodolites. pending theodolit" 86 | 15014,"Customer#000015014","0vP9D58TQFEIgBF5Z0KAbtICtTE",14,"24-860-916-6204",1019.38,"HOUSEHOLD","l asymptotes cajole across the express instructions. unusual, expres" 87 | 15066,"Customer#000015066","RG, sfEhgSdgz9jVEN8MSvqJWtoyLpWl0lpkB",14,"24-664-183-6543",7155.93,"MACHINERY","uctions cajole furiously against the slyly even accounts. quickly final packa" 88 | 15085,"Customer#000015085","oebN1,0FxvMtCw,SIUreRwTh1W0Qf,r45GlkP",23,"33-653-221-9281",6717.44,"FURNITURE","ccounts. permanent deposits are idly about the unusual, bold instructions. even, express asympto" 89 | 15108,"Customer#000015108","HKJ GCuFKVH5aXV9op",12,"22-700-705-6057",5854.69,"BUILDING","lyly even accounts promise carefully along the special requests. slyly even theodolites wake" 90 | 15136,"Customer#000015136","izOKXj5RaJ0sD0yv6gWjbpj1IuIPbMB,d,n",21,"31-505-256-9915",7737.01,"FURNITURE","y final theodolites. silent, pending requests sleep alongside of the slyly final theodolites. blithely sile" 91 | 15312,"Customer#000015312","v,G 5JFDsA vEW2FEtdZDd A1l3P8Lhvltq3p2",6,"16-110-782-7095",5816.13,"AUTOMOBILE","ifts. quickly final accounts according t" 92 | 15489,"Customer#000015489","STzSerbAMw6snqX0xg1L",24,"34-249-700-7423",6837.16,"AUTOMOBILE"," slyly furious packages use blithely" 93 | 15588,"Customer#000015588","Kgv32 Cx6yjupBv7dyGsPwvzHwcv",3,"13-856-158-9233",7284.53,"BUILDING","s: furiously even requests against the blithely ironic " 94 | 15615,"Customer#000015615","YIpvgIcMqBX8ZI,04tp1ho1",20,"30-101-121-9042",1659.42,"BUILDING","gle blithely ruthlessly brave foxes. fluffily final instructions nag fluffily quiet requests. q" 95 | 15665,"Customer#000015665","IA0GlBQGChIG3",1,"11-423-976-1492",3892.30,"HOUSEHOLD","ctions. blithely ironic instructions snooze. blithely regular dependencies cajole " 96 | 15934,"Customer#000015934","AWXyMVj1Kh82f1jBBQiEAmEVh1LB104g7CA",13,"23-403-634-9511",3204.25,"MACHINERY","eposits nag. bold theodolites haggle. ruthless dolphins wake furiously carefully pending theodolites. blithely " 97 | 15938,"Customer#000015938","ii63yePCCrVY0",2,"12-368-959-2413",2019.70,"BUILDING","ost finally bold excuses. furiously regular excuses alongside of the dependen" 98 | 15961,"Customer#000015961","45G2qhcWZOqDfFhN VJH",6,"16-307-662-3578",1860.26,"HOUSEHOLD","boost: blithely even packages according to the fluffily regular instructions wake car" 99 | 16021,"Customer#000016021","nyh580gi0x7I ThFF0KvqU0oNBWwj0vQ,H4gBPK4",13,"23-612-248-6034",8246.18,"MACHINERY","ctions wake furiously. special hockey players wake across the regular courts. accounts above the " 100 | 16206,"Customer#000016206","DRYKbcnisxle4qbBYabFppPGrsBPcgItm9n",24,"34-776-490-7753",4918.45,"HOUSEHOLD","es haggle furiously final ideas. forges sleep requests. unusual pinto beans accor" 101 | 16672,"Customer#000016672","qR5fUZni9FYIjTHE362",5,"15-473-809-6937",7826.99,"FURNITURE","arefully final packages. slyly quiet accounts haggle above the ironic, ironic requests. blithely ironic asymp" 102 | -------------------------------------------------------------------------------- /integration_tests/data/tpc_h_seeds/seeds.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | seeds: 4 | 5 | - name: part 6 | columns: 7 | - name: p_partkey 8 | description: "The primary key for this table" 9 | tests: 10 | - unique 11 | - not_null 12 | 13 | - name: partsupp 14 | columns: 15 | - name: ps_partkey 16 | description: "Part of compound primary key for this table" 17 | tests: 18 | - not_null 19 | - relationships: 20 | to: ref('part') 21 | field: p_partkey 22 | - name: ps_suppkey 23 | description: "Part of compound primary key for this table" 24 | tests: 25 | - not_null 26 | - dbt_constraints.foreign_key: 27 | pk_table_name: ref('supplier') 28 | pk_column_name: s_suppkey 29 | tests: 30 | # This is a higher performance way to test compound PK/UK 31 | - dbt_constraints.unique_key: 32 | column_names: 33 | - ps_partkey 34 | - ps_suppkey 35 | # How to validate a compound primary key natively 36 | - unique: 37 | column_name: "coalesce(cast(ps_partkey as varchar(100)), '') || '~' || coalesce(cast(ps_suppkey as varchar(100)), '')" 38 | 39 | - name: supplier 40 | columns: 41 | - name: s_suppkey 42 | description: "The primary key for this table" 43 | tests: 44 | - unique 45 | - not_null 46 | - name: s_nationkey 47 | tests: 48 | - not_null 49 | 50 | - name: orders 51 | columns: 52 | - name: o_orderkey 53 | description: "The primary key for this table" 54 | tests: 55 | - unique 56 | - not_null 57 | - name: o_custkey 58 | tests: 59 | - not_null 60 | - relationships: 61 | to: ref('customer') 62 | field: c_custkey 63 | 64 | - name: customer 65 | columns: 66 | - name: c_custkey 67 | description: "The primary key for dim_customers" 68 | tests: 69 | - dbt_constraints.primary_key 70 | - name: c_name 71 | description: "Customer Name" 72 | tests: 73 | - not_null 74 | - name: c_nationkey 75 | tests: 76 | - not_null 77 | 78 | - name: lineitem 79 | columns: 80 | - name: l_orderkey 81 | tests: 82 | - not_null 83 | - relationships: 84 | to: ref('orders') 85 | field: o_orderkey 86 | - name: l_linenumber 87 | tests: 88 | - not_null 89 | tests: 90 | # This is a higher performance way to test compound PK/UK 91 | - dbt_constraints.unique_key: 92 | column_names: 93 | - l_orderkey 94 | - l_linenumber 95 | # How to validate a compound primary key natively 96 | - unique: 97 | column_name: "coalesce(cast(l_orderkey as varchar(100)), '') || '~' || coalesce(cast(l_linenumber as varchar(100)), '')" 98 | # How to validate a compound foreign key 99 | - relationships: 100 | column_name: "coalesce(cast(l_partkey as varchar(100)), '') || '~' || coalesce(cast(l_suppkey as varchar(100)), '')" 101 | to: ref('partsupp') 102 | field: "coalesce(cast(ps_partkey as varchar(100)), '') || '~' || coalesce(cast(ps_suppkey as varchar(100)), '')" 103 | -------------------------------------------------------------------------------- /integration_tests/dbt_project.yml: -------------------------------------------------------------------------------- 1 | 2 | # Name your project! Project names should contain only lowercase characters 3 | # and underscores. A good package name should reflect your organization's 4 | # name or the intended use of these models 5 | name: 'dbt_constraints_integration_tests' 6 | version: '1.0.0' 7 | config-version: 2 8 | 9 | 10 | # This setting configures which "profile" dbt uses for this project. 11 | profile: 'dbt_constraints' 12 | 13 | # These configurations specify where dbt should look for different types of files. 14 | # The `source-paths` config, for example, states that models in this project can be 15 | # found in the "models/" directory. You probably won't need to change these! 16 | model-paths: ["models"] 17 | analysis-paths: ["analysis"] 18 | test-paths: ["tests"] 19 | seed-paths: ["data"] 20 | macro-paths: ["macros"] 21 | snapshot-paths: ["snapshots"] 22 | target-path: "target" # directory which will store compiled SQL files 23 | clean-targets: # directories to be removed by `dbt clean` 24 | - "target" 25 | - "dbt_packages" 26 | - "dbt_modules" 27 | - "logs" 28 | 29 | 30 | 31 | # Global variables 32 | vars: 33 | # The package can be temporarily disabled using this variable 34 | dbt_constraints_enabled: true 35 | 36 | # The package can also add constraints on sources if you have sufficient privileges 37 | dbt_constraints_sources_enabled: true 38 | 39 | # You can also be specific on which constraints are enabled for sources 40 | # You must also enable dbt_constraints_sources_enabled above 41 | dbt_constraints_sources_pk_enabled: true 42 | dbt_constraints_sources_uk_enabled: true 43 | dbt_constraints_sources_fk_enabled: true 44 | 45 | on-run-start: 46 | - "drop table if exists dim_orders" 47 | 48 | models: 49 | +materialized: table 50 | 51 | seeds: 52 | +quote_columns: false 53 | +post-hook: "{{ clone_table('source_') }}" 54 | #+full_refresh: false 55 | 56 | tests: 57 | dbt_constraints_integration_tests: 58 | +always_create_constraint: true 59 | # These configuration settings disable running tests or just constraints by path 60 | # +enabled: false 61 | #+dbt_constraints_enabled: false 62 | -------------------------------------------------------------------------------- /integration_tests/macros/clone_table.sql: -------------------------------------------------------------------------------- 1 | {%- macro clone_table(new_prefix) -%} 2 | {{ return(adapter.dispatch('clone_table')(new_prefix)) }} 3 | {%- endmacro -%} 4 | 5 | 6 | {%- macro snowflake__clone_table(new_prefix) -%} 7 | {%- set table_clone = api.Relation.create( 8 | database = this.database, 9 | schema = this.schema, 10 | identifier = new_prefix ~ this.identifier ) -%} 11 | 12 | {%- set clone_statement -%} 13 | create or replace table {{table_clone}} clone {{this}} 14 | {%- endset -%} 15 | {%- do log("Creating table clone: " ~ table_clone, info=false) -%} 16 | {%- do run_query(clone_statement) -%} 17 | 18 | {%- endmacro -%} 19 | 20 | 21 | {%- macro postgres__clone_table(new_prefix) -%} 22 | {%- set table_clone = api.Relation.create( 23 | database = this.database, 24 | schema = this.schema, 25 | identifier = new_prefix ~ this.identifier ) -%} 26 | 27 | {%- set clone_statement -%} 28 | drop table if exists {{table_clone}} 29 | {%- endset -%} 30 | {%- do log("Drop table if exists: " ~ table_clone, info=false) -%} 31 | 32 | {%- set clone_statement -%} 33 | create table {{table_clone}} as select * from {{this}} 34 | {%- endset -%} 35 | {%- do log("Creating table clone: " ~ table_clone, info=false) -%} 36 | {%- do run_query(clone_statement) -%} 37 | 38 | {%- endmacro -%} 39 | 40 | 41 | {%- macro oracle__clone_table(new_prefix) -%} 42 | {%- set table_clone = api.Relation.create( 43 | database = this.database, 44 | schema = this.schema, 45 | identifier = new_prefix ~ this.identifier ) -%} 46 | 47 | {%- set clone_statement -%} 48 | DECLARE 49 | tbl_count number; 50 | sql_stmt long; 51 | 52 | BEGIN 53 | SELECT COUNT(*) INTO tbl_count 54 | FROM dba_tables 55 | WHERE owner = '{{table_clone.schema}}' 56 | AND table_name = '{{table_clone.identifier}}'; 57 | 58 | IF(tbl_count <> 0) 59 | THEN 60 | sql_stmt:='DROP TABLE {{table_clone}}'; 61 | EXECUTE IMMEDIATE sql_stmt; 62 | END IF; 63 | END; 64 | {%- endset -%} 65 | {%- do log("Drop table if exists: " ~ table_clone, info=false) -%} 66 | 67 | {%- set clone_statement -%} 68 | create table {{table_clone}} as select * from {{this}} 69 | {%- endset -%} 70 | {%- do log("Creating table clone: " ~ table_clone, info=false) -%} 71 | {%- do run_query(clone_statement) -%} 72 | 73 | {%- endmacro -%} 74 | 75 | 76 | -------------------------------------------------------------------------------- /integration_tests/models/dim_customers.sql: -------------------------------------------------------------------------------- 1 | /* 2 | All Customers 3 | Additional unique keys generated by sequence and hash 4 | */ 5 | SELECT C.*, 6 | DENSE_RANK() over (order by c_custkey) as c_custkey_seq 7 | FROM {{ ref('customer') }} C 8 | -------------------------------------------------------------------------------- /integration_tests/models/dim_customers_view.sql: -------------------------------------------------------------------------------- 1 | {{ config(materialized = 'view') }} 2 | /* 3 | All Customers 4 | */ 5 | SELECT * 6 | FROM {{ ref('dim_customers') }} 7 | -------------------------------------------------------------------------------- /integration_tests/models/dim_duplicate_orders.sql: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Create an artificial duplication of the orders 4 | */ 5 | 6 | SELECT O.* 7 | FROM {{ ref('dim_orders') }} O 8 | UNION ALL 9 | SELECT O.* 10 | FROM {{ ref('dim_orders') }} O 11 | -------------------------------------------------------------------------------- /integration_tests/models/dim_missing_orders.sql: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Create an artificial subset of the orders 4 | */ 5 | 6 | SELECT * 7 | FROM 8 | {{ ref('dim_orders') }} 9 | 10 | -- This line will cause a FK violation 11 | WHERE MOD(o_orderkey, 2) = 0 12 | -------------------------------------------------------------------------------- /integration_tests/models/dim_orders.sql: -------------------------------------------------------------------------------- 1 | /* 2 | All Orders 3 | Additional unique keys generated by sequence and hash 4 | */ 5 | SELECT 6 | O.*, 7 | DENSE_RANK() over (order by o_orderkey) as o_orderkey_seq 8 | FROM 9 | {{ ref('orders') }} O 10 | -------------------------------------------------------------------------------- /integration_tests/models/dim_orders_null_keys.sql: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Simulate missing PK values 4 | */ 5 | 6 | SELECT 7 | CASE WHEN MOD(o_orderkey, 10) = 0 THEN o_orderkey ELSE NULL END AS o_orderkey, 8 | o_orderkey_seq, 9 | o_custkey, 10 | o_orderstatus, 11 | o_totalprice, 12 | o_orderdate, 13 | o_order_priority, 14 | o_clerk, 15 | o_shippriority, 16 | o_comment 17 | FROM 18 | {{ ref('dim_orders') }} 19 | -------------------------------------------------------------------------------- /integration_tests/models/dim_part.sql: -------------------------------------------------------------------------------- 1 | /* 2 | All Parts 3 | Additional unique keys generated by sequence and hash 4 | */ 5 | {{ config(always_create_constraint = true) }} 6 | 7 | SELECT 8 | P.*, 9 | DENSE_RANK() over (order by p_partkey) as p_partkey_seq 10 | FROM {{ ref('part') }} P 11 | -------------------------------------------------------------------------------- /integration_tests/models/dim_part_supplier.sql: -------------------------------------------------------------------------------- 1 | /* 2 | All Part Suppliers 3 | */ 4 | SELECT PS.* 5 | FROM {{ source('tpc_h', 'source_partsupp') }} PS 6 | -------------------------------------------------------------------------------- /integration_tests/models/dim_part_supplier_missing_con.sql: -------------------------------------------------------------------------------- 1 | /* 2 | All Part Suppliers 3 | */ 4 | SELECT PS.* 5 | FROM {{ source('tpc_h', 'source_partsupp') }} PS 6 | -------------------------------------------------------------------------------- /integration_tests/models/fact_order_line.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Simulate an incremental load of orders 3 | */ 4 | 5 | {{ config( 6 | materialized='incremental', 7 | unique_key='integration_id', 8 | on_schema_change='append_new_columns' 9 | ) 10 | }} 11 | 12 | SELECT 13 | lineitem.*, 14 | cast(TO_CHAR(o_orderdate, 'YYYYMMDD') AS INTEGER) AS o_orderdate_key, 15 | coalesce(cast(l_orderkey as varchar(100)), '') || '~' || coalesce(cast(l_linenumber as varchar(100)), '') AS integration_id 16 | FROM {{ ref('lineitem') }} lineitem 17 | JOIN {{ ref('orders') }} orders ON l_orderkey = o_orderkey 18 | 19 | {% if is_incremental() -%} 20 | 21 | -- this filter will only be applied on an incremental run 22 | WHERE l_orderkey >= 23 | ( SELECT coalesce(MAX(l_orderkey), -1) FROM {{ this }} ) 24 | 25 | {% endif -%} 26 | -------------------------------------------------------------------------------- /integration_tests/models/fact_order_line_longcol.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Test 3 | dim_order_copy____________ 4 | */ 5 | SELECT 6 | l_orderkey as l_____________________orderkey, 7 | l_linenumber as l___________________linenumber, 8 | l_partkey as l______________________partkey, 9 | l_suppkey l______________________suppkey, 10 | integration_id as l_______________integration_id 11 | FROM 12 | {{ ref('fact_order_line') }} O 13 | -------------------------------------------------------------------------------- /integration_tests/models/fact_order_line_missing_orders.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Simulate an incremental load of orders 3 | */ 4 | 5 | SELECT 6 | lineitem.*, 7 | coalesce(cast(l_orderkey as varchar(100)), '') || '~' || coalesce(cast(l_linenumber as varchar(100)), '') AS integration_id 8 | FROM {{ ref('lineitem') }} lineitem 9 | -------------------------------------------------------------------------------- /integration_tests/models/schema.yml: -------------------------------------------------------------------------------- 1 | 2 | version: 2 3 | 4 | models: 5 | 6 | # In the first 2 models we are deliberately creating a bunch of duplicate PK/UK to test 7 | # that DBT_CONSTRAINTS doesn't try to create constraints multiple times. 8 | # This also shows how to apply PK/UK tests inline, with one column 9 | # and with multiple columns 10 | - name: dim_part 11 | columns: 12 | - name: p_partkey 13 | description: "Primary key for this table" 14 | tests: 15 | - dbt_constraints.primary_key 16 | - unique 17 | - not_null 18 | - name: p_partkey_seq 19 | description: "Unique sequence number key" 20 | tests: 21 | - dbt_constraints.unique_key 22 | - unique 23 | tests: 24 | - dbt_constraints.unique_key: 25 | column_name: p_partkey 26 | 27 | - name: dim_customers 28 | # Test support for alias 29 | config: 30 | alias: dim_customer 31 | description: "Customer dimension" 32 | columns: 33 | - name: c_custkey 34 | description: "The primary key for dim_customers" 35 | tests: 36 | - dbt_constraints.unique_key 37 | - unique 38 | - not_null 39 | - name: c_custkey_seq 40 | description: "sequence key for dim_customers" 41 | tests: 42 | - dbt_constraints.unique_key 43 | - unique 44 | - not_null 45 | tests: 46 | - dbt_constraints.primary_key: 47 | column_name: c_custkey 48 | 49 | - name: dim_customers_view 50 | description: "Test that we run tests but don't create constraints on views" 51 | columns: 52 | - name: c_custkey 53 | tests: 54 | - dbt_constraints.primary_key 55 | - name: c_custkey_seq 56 | tests: 57 | - dbt_constraints.unique_key 58 | 59 | 60 | # Here we are deliberately creating a bunch of duplicate FK to test 61 | # that DBT_CONSTRAINTS doesn't try to create constraints multiple times. 62 | # This also shows how to apply PK/UK tests inline, with one column 63 | # and with multiple columns 64 | - name: dim_orders 65 | description: "All Orders" 66 | columns: 67 | - name: o_orderkey 68 | description: "The primary key for this table" 69 | tests: 70 | - unique 71 | - not_null 72 | - name: o_orderkey_seq 73 | description: "Sequence key based on orderkey" 74 | tests: 75 | # We are adding a where config to test that the constraint is skipped 76 | - unique: 77 | config: 78 | where: "1 = 1" 79 | - not_null 80 | - name: o_custkey 81 | tests: 82 | - relationships: 83 | to: ref('dim_customers') 84 | field: c_custkey 85 | - dbt_constraints.foreign_key: 86 | pk_table_name: ref('dim_customers') 87 | pk_column_name: c_custkey 88 | - dbt_constraints.foreign_key: 89 | pk_table_name: ref('dim_customers') 90 | pk_column_names: 91 | - c_custkey 92 | tests: 93 | - dbt_constraints.foreign_key: 94 | fk_column_name: o_custkey 95 | pk_table_name: ref('dim_customers') 96 | pk_column_name: c_custkey 97 | - dbt_constraints.foreign_key: 98 | fk_column_names: 99 | - o_custkey 100 | pk_table_name: ref('dim_customers') 101 | pk_column_names: 102 | - c_custkey 103 | 104 | - name: fact_order_line 105 | description: "Fact Order Lines" 106 | columns: 107 | - name: l_orderkey 108 | description: "FK to dim_orders and first key in PK" 109 | tests: 110 | - relationships: 111 | to: ref('dim_orders') 112 | field: o_orderkey 113 | - name: l_partkey 114 | description: "1st column in FK to dim_part_supplier. Testing behavior when one key is nullable and the second is not." 115 | - name: l_suppkey 116 | description: "2nd column in FK to dim_part_supplier. Testing behavior when one key is nullable and the second is not." 117 | tests: 118 | - not_null 119 | - name: l_linenumber 120 | description: "Order Line Number and second key in PK" 121 | - name: integration_id 122 | description: "Concatenation of PK colums for the unique and not_null tests" 123 | tests: 124 | - unique 125 | - not_null 126 | tests: 127 | # Demonstration that the primary_key test can accept multiple columns 128 | - dbt_constraints.primary_key: 129 | column_names: 130 | - l_orderkey 131 | - l_linenumber 132 | # Test multi-column FK 133 | - dbt_constraints.foreign_key: 134 | fk_column_names: 135 | - l_partkey 136 | - l_suppkey 137 | pk_table_name: ref('dim_part_supplier') 138 | pk_column_names: 139 | - ps_partkey 140 | - ps_suppkey 141 | 142 | - name: dim_duplicate_orders 143 | description: "Test that we do not try to create PK/UK on failed tests" 144 | columns: 145 | - name: o_orderkey 146 | description: "The primary key for this table" 147 | - name: o_orderkey_seq 148 | description: "duplicate seq column to test UK" 149 | tests: 150 | # This constraint should be skipped because it has failures 151 | - dbt_constraints.primary_key: 152 | column_name: o_orderkey 153 | config: 154 | severity: warn 155 | 156 | - dbt_constraints.unique_key: 157 | column_name: o_orderkey 158 | config: 159 | warn_if: ">= 5000" 160 | error_if: ">= 10000" 161 | 162 | - dbt_constraints.unique_key: 163 | column_name: o_orderkey_seq 164 | config: 165 | severity: warn 166 | 167 | - name: fact_order_line_missing_orders 168 | description: "Test that we do not create FK on failed tests" 169 | columns: 170 | - name: l_orderkey 171 | description: "FK to dim_orders and first key in PK" 172 | - name: l_linenumber 173 | description: "Order Line Number and second key in PK" 174 | - name: integration_id 175 | description: "Test whether we still create this valid UK" 176 | tests: 177 | - unique 178 | - not_null 179 | tests: 180 | - dbt_constraints.foreign_key: 181 | fk_column_name: l_orderkey 182 | pk_table_name: ref('dim_missing_orders') 183 | pk_column_name: o_orderkey 184 | config: 185 | severity: warn 186 | # Test that we still create this valid primary key 187 | - dbt_constraints.primary_key: 188 | column_names: 189 | - l_orderkey 190 | - l_linenumber 191 | # Test that we do not create a FK because the PK/UK is missing 192 | - dbt_constraints.foreign_key: 193 | fk_column_names: 194 | - l_partkey 195 | - l_suppkey 196 | pk_table_name: ref('dim_part_supplier_missing_con') 197 | pk_column_names: 198 | - ps_partkey 199 | - ps_suppkey 200 | 201 | - name: dim_orders_null_keys 202 | description: "All Orders" 203 | config: 204 | always_create_constraint: true 205 | columns: 206 | - name: o_custkey 207 | tests: 208 | # test that we still create this valid foreign key 209 | - dbt_constraints.foreign_key: 210 | pk_table_name: ref('dim_customers') 211 | pk_column_name: c_custkey 212 | tests: 213 | # test that we do not create this PK with missing values 214 | - dbt_constraints.primary_key: 215 | column_name: o_orderkey 216 | config: 217 | severity: warn 218 | 219 | # test that we still create this valid unique key 220 | - dbt_constraints.unique_key: 221 | column_name: o_orderkey_seq 222 | 223 | 224 | - name: dim_part_supplier 225 | description: "Multi column UK" 226 | columns: 227 | - name: ps_suppkey 228 | description: "Part of compound primary key for this table" 229 | tests: 230 | - not_null 231 | # Testing FK to a source 232 | - relationships: 233 | to: source('tpc_h', 'source_supplier') 234 | field: s_suppkey 235 | # Testing FK to a seed 236 | - relationships: 237 | to: ref('supplier') 238 | field: s_suppkey 239 | tests: 240 | - dbt_constraints.primary_key: 241 | column_names: 242 | - ps_partkey 243 | - ps_suppkey 244 | 245 | - name: dim_part_supplier_missing_con 246 | description: "Table is missing constraints to test FK won't be generated to it" 247 | 248 | - name: fact_order_line_longcol 249 | description: "Fact Order Lines with long column names" 250 | columns: 251 | - name: l_____________________orderkey 252 | description: "FK to dim_orders and first key in PK" 253 | tests: 254 | - relationships: 255 | to: ref('dim_orders') 256 | field: o_orderkey 257 | - name: l___________________linenumber 258 | description: "Order Line Number and second key in PK" 259 | - name: l_______________integration_id 260 | description: "Concatenation of PK colums for the unique and not_null tests" 261 | tests: 262 | - unique 263 | - not_null 264 | tests: 265 | # Demonstration that the primary_key test can accept multiple columns 266 | - dbt_constraints.primary_key: 267 | column_names: 268 | - l_____________________orderkey 269 | - l___________________linenumber 270 | # Test multi-column FK 271 | - dbt_constraints.foreign_key: 272 | fk_column_names: 273 | - l______________________partkey 274 | - l______________________suppkey 275 | pk_table_name: ref('dim_part_supplier') 276 | pk_column_names: 277 | - ps_partkey 278 | - ps_suppkey 279 | -------------------------------------------------------------------------------- /integration_tests/models/sources.yml: -------------------------------------------------------------------------------- 1 | 2 | version: 2 3 | 4 | # We are creating sources that match our seed tables to test source 5 | # tests do not result in constraints 6 | sources: 7 | - name: tpc_h 8 | schema: "{{ target.schema }}" 9 | tables: 10 | - name: source_part 11 | columns: 12 | - name: p_partkey 13 | description: "The primary key for this table" 14 | tests: 15 | - unique 16 | - not_null 17 | - relationships: 18 | to: ref('part') 19 | field: p_partkey 20 | 21 | - name: source_partsupp 22 | columns: 23 | - name: ps_partkey 24 | description: "Part of compound primary key for this table" 25 | tests: 26 | - not_null 27 | - relationships: 28 | to: source('tpc_h', 'source_part') 29 | field: p_partkey 30 | - relationships: 31 | to: ref('partsupp') 32 | field: ps_partkey 33 | - relationships: 34 | to: ref('part') 35 | field: p_partkey 36 | - name: ps_suppkey 37 | description: "Part of compound primary key for this table" 38 | tests: 39 | - not_null 40 | - relationships: 41 | to: source('tpc_h', 'source_supplier') 42 | field: s_suppkey 43 | - relationships: 44 | to: ref('partsupp') 45 | field: ps_suppkey 46 | - relationships: 47 | to: ref('supplier') 48 | field: s_suppkey 49 | tests: 50 | # This is a higher performance way to test compound PK/UK 51 | - dbt_utils.unique_combination_of_columns: 52 | combination_of_columns: 53 | - "ps_partkey" 54 | - "ps_suppkey" 55 | # How to validate a compound primary key natively 56 | - unique: 57 | column_name: "coalesce(cast(ps_partkey as varchar(100)), '') || '~' || coalesce(cast(ps_suppkey as varchar(100)), '')" 58 | 59 | - name: "source_supplier" 60 | columns: 61 | - name: "s_suppkey" 62 | description: "The primary key for this table" 63 | tests: 64 | - unique 65 | - not_null 66 | - relationships: 67 | to: ref('supplier') 68 | field: s_suppkey 69 | - name: s_nationkey 70 | tests: 71 | - not_null 72 | 73 | - name: source_orders 74 | columns: 75 | - name: o_orderkey 76 | description: "The primary key for this table" 77 | tests: 78 | - unique 79 | - not_null 80 | - relationships: 81 | to: ref('orders') 82 | field: o_orderkey 83 | - name: o_custkey 84 | tests: 85 | - not_null 86 | - relationships: 87 | to: source('tpc_h', 'source_customer') 88 | field: c_custkey 89 | 90 | - name: source_customer 91 | columns: 92 | - name: c_custkey 93 | description: "The primary key for dim_customers" 94 | tests: 95 | - unique 96 | - not_null 97 | - relationships: 98 | to: ref('customer') 99 | field: c_custkey 100 | - name: c_name 101 | description: "Customer Name" 102 | tests: 103 | - not_null 104 | - name: c_nationkey 105 | tests: 106 | - not_null 107 | 108 | - name: source_lineitem 109 | columns: 110 | - name: l_orderkey 111 | tests: 112 | - not_null 113 | - relationships: 114 | to: source('tpc_h', 'source_orders') 115 | field: o_orderkey 116 | - name: l_linenumber 117 | tests: 118 | - not_null 119 | tests: 120 | # This is a higher performance way to test compound PK/UK 121 | - dbt_utils.unique_combination_of_columns: 122 | combination_of_columns: 123 | - l_orderkey 124 | - l_linenumber 125 | # How to validate a compound primary key natively 126 | - unique: 127 | column_name: "coalesce(cast(l_orderkey as varchar(100)), '') || '~' || coalesce(cast(l_linenumber as varchar(100)), '')" 128 | 129 | # How to validate a compound foreign key 130 | - relationships: 131 | column_name: "coalesce(cast(l_partkey as varchar(100)), '') || '~' || coalesce(cast(l_suppkey as varchar(100)), '')" 132 | to: source('tpc_h', 'source_partsupp') 133 | field: "coalesce(cast(ps_partkey as varchar(100)), '') || '~' || coalesce(cast(ps_suppkey as varchar(100)), '')" 134 | 135 | # multi-column FK 136 | - dbt_constraints.foreign_key: 137 | fk_column_names: 138 | - l_orderkey 139 | - l_linenumber 140 | pk_table_name: ref('lineitem') 141 | pk_column_names: 142 | - l_orderkey 143 | - l_linenumber 144 | -------------------------------------------------------------------------------- /integration_tests/packages.yml: -------------------------------------------------------------------------------- 1 | packages: 2 | 3 | # We will test unique keys based on dbt_utils.unique_combination_of_columns 4 | - package: dbt-labs/dbt_utils 5 | version: [">=1.3.0"] 6 | 7 | - local: ../ 8 | -------------------------------------------------------------------------------- /integration_tests/profiles.yml: -------------------------------------------------------------------------------- 1 | dbt_constraints: 2 | target: snowflake 3 | outputs: 4 | snowflake: 5 | type: "snowflake" 6 | client_session_keep_alive: False 7 | account: "{{ env_var('SNOWFLAKE_ACCOUNT') }}" 8 | user: "{{ env_var('SNOWFLAKE_USER') }}" 9 | # The DBT_ENV_SECRET_ prefix prevents the variable being included in logs 10 | private_key: "{{ env_var('DBT_ENV_SECRET_PRIVATE_KEY') }}" 11 | role: "{{ env_var('SNOWFLAKE_ROLE') }}" 12 | database: "{{ env_var('SNOWFLAKE_DATABASE') }}" 13 | warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE') }}" 14 | schema: "{{ env_var('SNOWFLAKE_SCHEMA') }}" 15 | threads: "{{ env_var('SNOWFLAKE_THREADS', '8') | as_number }}" 16 | query_tag: dbt_constraints 17 | oracle: 18 | database: XEPDB1 19 | host: localhost 20 | pass: dbt_user2 21 | port: 1521 22 | protocol: tcp 23 | schema: DBT_USER 24 | service: XEPDB1 25 | threads: 4 26 | type: oracle 27 | user: DBT_USER 28 | postgres: 29 | dbname: postgres 30 | host: localhost 31 | password: Snowflake 32 | port: 5432 33 | schema: dbt_demo 34 | threads: 1 35 | type: postgres 36 | user: postgres 37 | -------------------------------------------------------------------------------- /integration_tests/tests/singlular_test.sql: -------------------------------------------------------------------------------- 1 | SELECT * 2 | FROM {{ ref('dim_part') }} 3 | WHERE 1 = 2 4 | -------------------------------------------------------------------------------- /macros/create_constraints.sql: -------------------------------------------------------------------------------- 1 | {#- Define three tests for PK, UK, and FK that can be overridden by DB implementations. 2 | These tests have overloaded parameter names to be as flexible as possible. -#} 3 | 4 | {%- test primary_key(model, 5 | column_name=none, column_names=[], 6 | quote_columns=false, constraint_name=none) -%} 7 | 8 | {%- if column_names|count == 0 and column_name -%} 9 | {%- do column_names.append(column_name) -%} 10 | {%- endif -%} 11 | 12 | {{ return(adapter.dispatch('test_primary_key', 'dbt_constraints')(model, column_names, quote_columns)) }} 13 | 14 | {%- endtest -%} 15 | 16 | 17 | {%- test unique_key(model, 18 | column_name=none, column_names=[], 19 | quote_columns=false, constraint_name=none) -%} 20 | 21 | {%- if column_names|count == 0 and column_name -%} 22 | {%- do column_names.append(column_name) -%} 23 | {%- endif -%} 24 | 25 | {{ return(adapter.dispatch('test_unique_key', 'dbt_constraints')(model, column_names, quote_columns)) }} 26 | 27 | {%- endtest -%} 28 | 29 | 30 | {%- test foreign_key(model, 31 | column_name=none, fk_column_name=none, fk_column_names=[], 32 | pk_table_name=none, to=none, 33 | pk_column_name=none, pk_column_names=[], field=none, 34 | quote_columns=false, constraint_name=none) -%} 35 | 36 | {%- if pk_column_names|count == 0 and (pk_column_name or field) -%} 37 | {%- do pk_column_names.append( (pk_column_name or field) ) -%} 38 | {%- endif -%} 39 | {%- if fk_column_names|count == 0 and (fk_column_name or column_name) -%} 40 | {%- do fk_column_names.append( (fk_column_name or column_name) ) -%} 41 | {%- endif -%} 42 | {%- set pk_table_name = pk_table_name or to -%} 43 | 44 | {{ return(adapter.dispatch('test_foreign_key', 'dbt_constraints')(model, fk_column_names, pk_table_name, pk_column_names, quote_columns)) }} 45 | 46 | {%- endtest -%} 47 | 48 | 49 | 50 | 51 | {#- Define three create macros for PK, UK, and FK that can be overridden by DB implementations -#} 52 | 53 | {%- macro create_primary_key(table_model, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 54 | {{ return(adapter.dispatch('create_primary_key', 'dbt_constraints')(table_model, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause)) }} 55 | {%- endmacro -%} 56 | 57 | 58 | {%- macro create_unique_key(table_model, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 59 | {{ return(adapter.dispatch('create_unique_key', 'dbt_constraints')(table_model, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause)) }} 60 | {%- endmacro -%} 61 | 62 | 63 | {%- macro create_foreign_key(pk_table_relation, pk_column_names, fk_table_relation, fk_column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 64 | {{ return(adapter.dispatch('create_foreign_key', 'dbt_constraints')(pk_table_relation, pk_column_names, fk_table_relation, fk_column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause)) }} 65 | {%- endmacro -%} 66 | 67 | 68 | {%- macro create_not_null(table_relation, column_names, verify_permissions, quote_columns, lookup_cache, rely_clause) -%} 69 | {{ return(adapter.dispatch('create_not_null', 'dbt_constraints')(table_relation, column_names, verify_permissions, quote_columns, lookup_cache, rely_clause)) }} 70 | {%- endmacro -%} 71 | 72 | 73 | {#- Define two macros for detecting if PK, UK, and FK exist that can be overridden by DB implementations -#} 74 | 75 | {%- macro unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 76 | {{ return(adapter.dispatch('unique_constraint_exists', 'dbt_constraints')(table_relation, column_names, lookup_cache) ) }} 77 | {%- endmacro -%} 78 | 79 | {%- macro foreign_key_exists(table_relation, column_names, lookup_cache) -%} 80 | {{ return(adapter.dispatch('foreign_key_exists', 'dbt_constraints')(table_relation, column_names, lookup_cache)) }} 81 | {%- endmacro -%} 82 | 83 | 84 | {#- Define two macros for detecting if we have sufficient privileges that can be overridden by DB implementations -#} 85 | 86 | {%- macro have_references_priv(table_relation, verify_permissions, lookup_cache) -%} 87 | {{ return(adapter.dispatch('have_references_priv', 'dbt_constraints')(table_relation, verify_permissions, lookup_cache) ) }} 88 | {%- endmacro -%} 89 | 90 | {%- macro have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 91 | {{ return(adapter.dispatch('have_ownership_priv', 'dbt_constraints')(table_relation, verify_permissions, lookup_cache)) }} 92 | {%- endmacro -%} 93 | 94 | 95 | {#- Define macro for whether a DB implementation has implemented logic for RELY and NORELY constraints -#} 96 | 97 | {%- macro adapter_supports_rely_norely(test_name) -%} 98 | {{ return(adapter.dispatch('adapter_supports_rely_norely', 'dbt_constraints')(test_name)) }} 99 | {%- endmacro -%} 100 | 101 | {#- By default, we assume DB implementations have NOT implemented logic for RELY and NORELY constraints -#} 102 | {%- macro default__adapter_supports_rely_norely(test_name) -%} 103 | {{ return(false) }} 104 | {%- endmacro -%} 105 | 106 | 107 | {#- Override dbt's truncate_relation macro to allow us to create adapter specific versions that drop constraints -#} 108 | 109 | {% macro truncate_relation(relation) -%} 110 | {{ return(adapter.dispatch('truncate_relation')(relation)) }} 111 | {% endmacro %} 112 | 113 | {#- Override dbt's drop_relation macro to allow us to create adapter specific versions that drop constraints -#} 114 | 115 | {% macro drop_relation(relation) -%} 116 | {{ return(adapter.dispatch('drop_relation')(relation)) }} 117 | {% endmacro %} 118 | 119 | 120 | 121 | {#- This macro should be added to on-run-end to create constraints 122 | after all the models and tests have completed. You can pass a 123 | list of the tests that you want considered for constraints and 124 | a flag for whether columns should be quoted. The first macro 125 | primarily controls the order that constraints are created. -#} 126 | {%- macro create_constraints( 127 | constraint_types=[ 128 | 'primary_key', 129 | 'unique_key', 130 | 'unique_combination_of_columns', 131 | 'unique', 132 | 'foreign_key', 133 | 'relationships', 134 | 'not_null'], 135 | quote_columns=false) -%} 136 | {%- if execute and var('dbt_constraints_enabled', false) and results -%} 137 | {%- do log("Running dbt Constraints", info=true) -%} 138 | 139 | {%- set lookup_cache = { 140 | "table_columns": { }, 141 | "table_privileges": { }, 142 | "unique_keys": { }, 143 | "not_null_col": { }, 144 | "foreign_keys": { } } -%} 145 | 146 | {%- if 'not_null' in constraint_types -%} 147 | {%- do dbt_constraints.create_constraints_by_type(['not_null'], quote_columns, lookup_cache) -%} 148 | {%- endif -%} 149 | {%- if 'primary_key' in constraint_types -%} 150 | {%- do dbt_constraints.create_constraints_by_type(['primary_key'], quote_columns, lookup_cache) -%} 151 | {%- endif -%} 152 | {%- if 'unique_key' in constraint_types -%} 153 | {%- do dbt_constraints.create_constraints_by_type(['unique_key'], quote_columns, lookup_cache) -%} 154 | {%- endif -%} 155 | {%- if 'unique_combination_of_columns' in constraint_types -%} 156 | {%- do dbt_constraints.create_constraints_by_type(['unique_combination_of_columns'], quote_columns, lookup_cache) -%} 157 | {%- endif -%} 158 | {%- if 'unique' in constraint_types -%} 159 | {%- do dbt_constraints.create_constraints_by_type(['unique'], quote_columns, lookup_cache) -%} 160 | {%- endif -%} 161 | {%- if 'foreign_key' in constraint_types -%} 162 | {%- do dbt_constraints.create_constraints_by_type(['foreign_key'], quote_columns, lookup_cache) -%} 163 | {%- endif -%} 164 | {%- if 'relationships' in constraint_types -%} 165 | {%- do dbt_constraints.create_constraints_by_type(['relationships'], quote_columns, lookup_cache) -%} 166 | {%- endif -%} 167 | 168 | {%- do log("Finished dbt Constraints", info=true) -%} 169 | {%- endif -%} 170 | 171 | {%- endmacro -%} 172 | 173 | 174 | {#- This macro checks if a test or its model is selected -#} 175 | {%- macro test_selected(test_model) -%} 176 | 177 | {%- if test_model.unique_id in selected_resources -%} 178 | {{ return("TEST_SELECTED") }} 179 | {%- endif -%} 180 | {%- if test_model.attached_node in selected_resources -%} -%} 181 | {{ return("MODEL_SELECTED") }} 182 | {%- endif -%} 183 | 184 | {#- Check if a PK/UK should be created because it is referenced by a selected FK -#} 185 | {%- if test_model.test_metadata.name in ("primary_key", "unique_key", "unique_combination_of_columns", "unique") -%} 186 | {%- set pk_test_args = test_model.test_metadata.kwargs -%} 187 | {%- set pk_test_columns = [] -%} 188 | {%- if pk_test_args.column_names -%} 189 | {%- set pk_test_columns = pk_test_args.column_names -%} 190 | {%- elif pk_test_args.combination_of_columns -%} 191 | {%- set pk_test_columns = pk_test_args.combination_of_columns -%} 192 | {%- elif pk_test_args.column_name -%} 193 | {%- set pk_test_columns = [pk_test_args.column_name] -%} 194 | {%- endif -%} 195 | {%- for fk_model in graph.nodes.values() | selectattr("resource_type", "equalto", "test") 196 | if fk_model.test_metadata 197 | and fk_model.test_metadata.name in ("foreign_key", "relationships") 198 | and test_model.attached_node in fk_model.depends_on.nodes 199 | and ( (fk_model.unique_id and fk_model.unique_id in selected_resources) 200 | or (fk_model.attached_node and fk_model.attached_node in selected_resources) ) -%} 201 | {%- set fk_test_args = fk_model.test_metadata.kwargs -%} 202 | {%- set fk_test_columns = [] -%} 203 | {%- if fk_test_args.pk_column_names -%} 204 | {%- set fk_test_columns = fk_test_args.pk_column_names -%} 205 | {%- elif fk_test_args.pk_column_name -%} 206 | {%- set fk_test_columns = [fk_test_args.pk_column_name] -%} 207 | {%- elif fk_test_args.field -%} 208 | {%- set fk_test_columns = [fk_test_args.field] -%} 209 | {%- endif -%} 210 | {%- if column_list_matches(pk_test_columns, fk_test_columns) -%} 211 | {{ return("PK_UK_FOR_SELECTED_FK") }} 212 | {%- endif -%} 213 | {%- endfor -%} 214 | {%- endif -%} 215 | 216 | {{ return(none) }} 217 | {%- endmacro -%} 218 | 219 | 220 | {#- This macro that checks if a test has results and whether there were errors -#} 221 | {%- macro lookup_should_rely(test_model) -%} 222 | {%- if test_model.config.where 223 | or test_model.config.warn_if != "!= 0" 224 | or test_model.config.fail_calc != "count(*)" -%} 225 | {#- Set NORELY if there is a condition on the test -#} 226 | {{ return('NORELY') }} 227 | {%- endif -%} 228 | 229 | {%- for res in results 230 | if res.node.config.materialized == "test" 231 | and res.node.unique_id == test_model.unique_id -%} 232 | {%- if res.failures == None -%} 233 | {#- Set '' if we do not know if there is a test failure -#} 234 | {{ return('') }} 235 | {%- elif res.failures > 0 -%} 236 | {#- Set NORELY if there is a test failure -#} 237 | {{ return('NORELY') }} 238 | {%- elif res.failures == 0 -%} 239 | {#- Set RELY if there are 0 failures -#} 240 | {{ return('RELY') }} 241 | {%- endif -%} 242 | {%- endfor -%} 243 | {{ return('') }} 244 | {%- endmacro -%} 245 | 246 | 247 | {#- This macro that checks if a test or its model has always_create_constraint set -#} 248 | {%- macro should_always_create_constraint(test_model) -%} 249 | {%- if test_model.config.get("always_create_constraint", false) == true -%} 250 | {{ return(true) }} 251 | {%- endif -%} 252 | {%- for table_node in test_model.depends_on.nodes -%} 253 | {%- for node in graph.nodes.values() | selectattr("unique_id", "equalto", table_node) 254 | if node.config.get("always_create_constraint", false) == true -%} 255 | {{ return(true) }} 256 | {%- endfor -%} 257 | {%- endfor -%} 258 | 259 | {{ return(false) }} 260 | {%- endmacro -%} 261 | 262 | 263 | {#- This macro is called internally and passed which constraint types to create. -#} 264 | {%- macro create_constraints_by_type(constraint_types, quote_columns, lookup_cache) -%} 265 | 266 | {#- Loop through the metadata and find all tests that match the constraint_types and have all the fields we check for tests -#} 267 | {%- for test_model in graph.nodes.values() | selectattr("resource_type", "equalto", "test") 268 | if test_model.test_metadata 269 | and test_model.test_metadata.kwargs 270 | and test_model.test_metadata.name 271 | and test_model.test_metadata.name is in( constraint_types ) 272 | and test_model.unique_id 273 | and test_model.attached_node 274 | and test_model.depends_on 275 | and test_model.depends_on.nodes 276 | and test_model.config 277 | and test_model.config.enabled 278 | and test_model.config.get("dbt_constraints_enabled", true) -%} 279 | 280 | {%- set test_parameters = test_model.test_metadata.kwargs -%} 281 | {%- set test_name = test_model.test_metadata.name -%} 282 | {%- set selected = dbt_constraints.test_selected(test_model) -%} 283 | 284 | {#- We can shortcut additional tests if the constraint was not selected -#} 285 | {%- if selected is not none -%} 286 | {#- rely_clause clause will be RELY if a test passed, NORELY if it failed, and '' if it was skipped -#} 287 | {%- set rely_clause = dbt_constraints.lookup_should_rely(test_model) -%} 288 | {%- set always_create_constraint = dbt_constraints.should_always_create_constraint(test_model) -%} 289 | {%- else -%} 290 | {%- set rely_clause = '' -%} 291 | {%- set always_create_constraint = false -%} 292 | {%- endif -%} 293 | 294 | {#- Create constraints that: 295 | - Either the test or its model was selected to run, including PK/UK for FK 296 | - Passed the test (RELY) or the database supports NORELY constraints 297 | - We ran the test (RELY/NORELY) or we need the constraint for a FK 298 | or we have the always_create_constraint parameter turned on -#} 299 | {%- if selected is not none 300 | and ( rely_clause == 'RELY' 301 | or dbt_constraints.adapter_supports_rely_norely(test_name) == true ) 302 | and ( rely_clause in('RELY', 'NORELY') 303 | or selected == "PK_UK_FOR_SELECTED_FK" 304 | or always_create_constraint == true ) -%} 305 | 306 | {% set ns = namespace(verify_permissions=false) %} 307 | {%- set table_models = [] -%} 308 | 309 | {#- Find the table models that are referenced by this test. -#} 310 | {%- for table_node in test_model.depends_on.nodes -%} 311 | {%- for node in graph.nodes.values() | selectattr("unique_id", "equalto", table_node) 312 | if node.config 313 | and node.config.get("materialized", "other") not in ("view", "ephemeral", "dynamic_table") 314 | and ( node.resource_type in ("model", "snapshot", "seed") 315 | or ( node.resource_type == "source" and var('dbt_constraints_sources_enabled', false) 316 | and ( ( var('dbt_constraints_sources_pk_enabled', false) and test_name in("primary_key") ) 317 | or ( var('dbt_constraints_sources_uk_enabled', false) and test_name in("unique_key", "unique_combination_of_columns", "unique") ) 318 | or ( var('dbt_constraints_sources_fk_enabled', false) and test_name in("foreign_key", "relationships") ) 319 | or ( var('dbt_constraints_sources_nn_enabled', false) and test_name in("not_null") ) ) 320 | ) ) -%} 321 | 322 | {%- do node.update({'alias': node.alias or node.name }) -%} 323 | {#- Append to our list of models for this test -#} 324 | {%- do table_models.append(node) -%} 325 | {%- if node.resource_type == "source" 326 | or node.config.get("materialized", "other") not in ("table", "incremental", "snapshot", "seed") -%} 327 | {#- If we are using a sources or custom materializations, we will need to verify permissions -#} 328 | {%- set ns.verify_permissions = true -%} 329 | {%- endif -%} 330 | 331 | {% endfor %} 332 | {% endfor %} 333 | 334 | {#- We only create PK/UK if there is one model referenced by the test 335 | and if all the columns exist as physical columns on the table -#} 336 | {%- if 1 == table_models|count 337 | and test_name in("primary_key", "unique_key", "unique_combination_of_columns", "unique") -%} 338 | 339 | {# Attempt to identify a parameter we can use for the column names #} 340 | {%- set column_names = [] -%} 341 | {%- if test_parameters.column_names -%} 342 | {%- set column_names = test_parameters.column_names -%} 343 | {%- elif test_parameters.combination_of_columns -%} 344 | {%- set column_names = test_parameters.combination_of_columns -%} 345 | {%- elif test_parameters.column_name -%} 346 | {%- set column_names = [test_parameters.column_name] -%} 347 | {%- else -%} 348 | {{ exceptions.raise_compiler_error( 349 | "`column_names` or `column_name` parameter missing for primary/unique key constraint on table: '" ~ table_models[0].name 350 | ) }} 351 | {%- endif -%} 352 | 353 | {%- set table_relation = adapter.get_relation( 354 | database=table_models[0].database, 355 | schema=table_models[0].schema, 356 | identifier=table_models[0].alias ) -%} 357 | {%- if table_relation and table_relation.is_table -%} 358 | {%- if dbt_constraints.table_columns_all_exist(table_relation, column_names, lookup_cache) -%} 359 | {%- if test_name == "primary_key" -%} 360 | {%- if dbt_constraints.adapter_supports_rely_norely("not_null") == true -%} 361 | {%- do dbt_constraints.create_not_null(table_relation, column_names, ns.verify_permissions, quote_columns, lookup_cache, rely_clause) -%} 362 | {%- endif -%} 363 | {%- do dbt_constraints.create_primary_key(table_relation, column_names, ns.verify_permissions, quote_columns, test_parameters.constraint_name, lookup_cache, rely_clause) -%} 364 | {%- else -%} 365 | {%- do dbt_constraints.create_unique_key(table_relation, column_names, ns.verify_permissions, quote_columns, test_parameters.constraint_name, lookup_cache, rely_clause) -%} 366 | {%- endif -%} 367 | {%- else -%} 368 | {%- do log("Skipping primary/unique key because a physical column name was not found on the table: " ~ table_models[0].name ~ " " ~ column_names, info=true) -%} 369 | {%- endif -%} 370 | {%- else -%} 371 | {%- do log("Skipping primary/unique key because the table was not found in the database: " ~ table_models[0].name, info=true) -%} 372 | {%- endif -%} 373 | 374 | {#- We only create FK if there are two models referenced by the test 375 | and if all the columns exist as physical columns on the tables -#} 376 | {%- elif 2 == table_models|count 377 | and test_name in( "foreign_key", "relationships") -%} 378 | 379 | {%- set fk_model = table_models | selectattr("unique_id", "equalto", test_model.attached_node) | first -%} 380 | {%- set pk_model = table_models | rejectattr("unique_id", "equalto", test_model.attached_node) | first -%} 381 | 382 | {%- if fk_model and pk_model -%} 383 | 384 | {%- set fk_table_relation = adapter.get_relation( 385 | database=fk_model.database, 386 | schema=fk_model.schema, 387 | identifier=fk_model.alias) -%} 388 | 389 | {%- set pk_table_relation = adapter.get_relation( 390 | database=pk_model.database, 391 | schema=pk_model.schema, 392 | identifier=pk_model.alias) -%} 393 | 394 | {%- if fk_table_relation and pk_table_relation and fk_table_relation.is_table and pk_table_relation.is_table-%} 395 | {# Attempt to identify parameters we can use for the column names #} 396 | {%- set pk_column_names = [] -%} 397 | {%- if test_parameters.pk_column_names -%} 398 | {%- set pk_column_names = test_parameters.pk_column_names -%} 399 | {%- elif test_parameters.field -%} 400 | {%- set pk_column_names = [test_parameters.field] -%} 401 | {%- elif test_parameters.pk_column_name -%} 402 | {%- set pk_column_names = [test_parameters.pk_column_name] -%} 403 | {%- else -%} 404 | {{ exceptions.raise_compiler_error( 405 | "`pk_column_names`, `pk_column_name`, or `field` parameter missing for foreign key constraint on table: '" ~ fk_model.name ~ " " ~ test_parameters 406 | ) }} 407 | {%- endif -%} 408 | 409 | {%- set fk_column_names = [] -%} 410 | {%- if test_parameters.fk_column_names -%} 411 | {%- set fk_column_names = test_parameters.fk_column_names -%} 412 | {%- elif test_parameters.column_name -%} 413 | {%- set fk_column_names = [test_parameters.column_name] -%} 414 | {%- elif test_parameters.fk_column_name -%} 415 | {%- set fk_column_names = [test_parameters.fk_column_name] -%} 416 | {%- else -%} 417 | {{ exceptions.raise_compiler_error( 418 | "`fk_column_names`, `fk_column_name`, or `column_name` parameter missing for foreign key constraint on table: '" ~ fk_model.name ~ " " ~ test_parameters 419 | ) }} 420 | {%- endif -%} 421 | 422 | {%- if not dbt_constraints.table_columns_all_exist(pk_table_relation, pk_column_names, lookup_cache) -%} 423 | {%- do log("Skipping foreign key because a physical column was not found on the pk table: " ~ pk_model.name ~ " " ~ pk_column_names, info=true) -%} 424 | {%- elif not dbt_constraints.table_columns_all_exist(fk_table_relation, fk_column_names, lookup_cache) -%} 425 | {%- do log("Skipping foreign key because a physical column was not found on the fk table: " ~ fk_model.name ~ " " ~ fk_column_names, info=true) -%} 426 | {%- else -%} 427 | {%- do dbt_constraints.create_foreign_key(pk_table_relation, pk_column_names, fk_table_relation, fk_column_names, ns.verify_permissions, quote_columns, test_parameters.constraint_name, lookup_cache, rely_clause) -%} 428 | {%- endif -%} 429 | {%- else -%} 430 | {%- if fk_model == None or not fk_table_relation.is_table -%} 431 | {%- do log("Skipping foreign key to " ~ pk_model.alias ~ " because the child table was not found in the database: " ~ fk_model.alias, info=true) -%} 432 | {%- endif -%} 433 | {%- if pk_model == None or not pk_model.is_table -%} 434 | {%- do log("Skipping foreign key on " ~ fk_model.alias ~ " because the parent table was not found in the database: " ~ pk_model.alias, info=true) -%} 435 | {%- endif -%} 436 | {%- endif -%} 437 | 438 | {%- else -%} 439 | {%- do log("Skipping foreign key because a we couldn't find the child table: model=" ~ test_model.attached_node ~ " or source", info=true) -%} 440 | {%- endif -%} 441 | 442 | {#- We only create NN if there is one model referenced by the test 443 | and if all the columns exist as physical columns on the table -#} 444 | {%- elif 1 == table_models|count 445 | and test_name in("not_null") -%} 446 | 447 | {# Attempt to identify a parameter we can use for the column names #} 448 | {%- set column_names = [] -%} 449 | {%- if test_parameters.column_names -%} 450 | {%- set column_names = test_parameters.column_names -%} 451 | {%- elif test_parameters.combination_of_columns -%} 452 | {%- set column_names = test_parameters.combination_of_columns -%} 453 | {%- elif test_parameters.column_name -%} 454 | {%- set column_names = [test_parameters.column_name] -%} 455 | {%- else -%} 456 | {{ exceptions.raise_compiler_error( 457 | "`column_names` or `column_name` parameter missing for not null constraint on table: '" ~ table_models[0].name 458 | ) }} 459 | {%- endif -%} 460 | 461 | {%- set table_relation = adapter.get_relation( 462 | database=table_models[0].database, 463 | schema=table_models[0].schema, 464 | identifier=table_models[0].alias ) -%} 465 | 466 | {%- if table_relation and table_relation.is_table -%} 467 | {%- if dbt_constraints.table_columns_all_exist(table_relation, column_names, lookup_cache) -%} 468 | {%- do dbt_constraints.create_not_null(table_relation, column_names, ns.verify_permissions, quote_columns, lookup_cache, rely_clause) -%} 469 | {%- else -%} 470 | {%- do log("Skipping not null constraint because a physical column name was not found on the table: " ~ table_models[0].name ~ " " ~ column_names, info=true) -%} 471 | {%- endif -%} 472 | {%- else -%} 473 | {%- do log("Skipping not null constraint because the table was not found in the database: " ~ table_models[0].name, info=true) -%} 474 | {%- endif -%} 475 | 476 | {%- endif -%} 477 | {%- endif -%} 478 | 479 | 480 | {%- endfor -%} 481 | 482 | {%- endmacro -%} 483 | 484 | 485 | 486 | {# This macro tests that all the column names passed to the macro can be found on the table, ignoring case #} 487 | {%- macro table_columns_all_exist(table_relation, column_list, lookup_cache) -%} 488 | {%- set tab_column_list = dbt_constraints.lookup_table_columns(table_relation, lookup_cache) -%} 489 | 490 | {%- for column in column_list|map('upper') if column not in tab_column_list -%} 491 | {{ return(false) }} 492 | {%- endfor -%} 493 | {{ return(true) }} 494 | 495 | {%- endmacro -%} 496 | 497 | 498 | {%- macro lookup_table_columns(table_relation, lookup_cache) -%} 499 | {{ return(adapter.dispatch('lookup_table_columns', 'dbt_constraints')(table_relation, lookup_cache)) }} 500 | {%- endmacro -%} 501 | 502 | 503 | {%- macro default__lookup_table_columns(table_relation, lookup_cache) -%} 504 | {%- if table_relation not in lookup_cache.table_columns -%} 505 | {%- set tab_Columns = adapter.get_columns_in_relation(table_relation) -%} 506 | 507 | {%- set tab_column_list = [] -%} 508 | {%- for column in tab_Columns -%} 509 | {{ tab_column_list.append(column.name|upper) }} 510 | {%- endfor -%} 511 | {%- do lookup_cache.table_columns.update({ table_relation: tab_column_list }) -%} 512 | {%- endif -%} 513 | {{ return(lookup_cache.table_columns[table_relation]) }} 514 | {%- endmacro -%} 515 | 516 | 517 | {# This macro allows us to compare two sets of columns to see if they are the same, ignoring case #} 518 | {%- macro column_list_matches(listA, listB) -%} 519 | {# Test if A is empty or the lists are not the same size #} 520 | {%- if listA | count > 0 and listA | count == listB | count -%} 521 | {# Fail if there are any columns in A that are not in B #} 522 | {%- for valueFromA in listA|map('upper') -%} 523 | {%- if valueFromA|upper not in listB| map('upper') -%} 524 | {{ return(false) }} 525 | {%- endif -%} 526 | {% endfor %} 527 | {# Since we know the count is the same, A must equal B #} 528 | {{ return(true) }} 529 | {%- else -%} 530 | {{ return(false) }} 531 | {%- endif -%} 532 | {%- endmacro -%} 533 | -------------------------------------------------------------------------------- /macros/default__test_constraints.sql: -------------------------------------------------------------------------------- 1 | {#- Test if the primary key is valid -#} 2 | {%- macro default__test_primary_key(model, column_names, quote_columns=false) -%} 3 | {# 4 | NOTE: This test is designed to implement the "primary key" as specified in ANSI SQL 92 which states the following: 5 | "A unique constraint is satisfied if and only if no two rows in 6 | a table have the same non-null values in the unique columns. In 7 | addition, if the unique constraint was defined with PRIMARY KEY, 8 | then it requires that none of the values in the specified column or 9 | columns be the null value." 10 | #} 11 | 12 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) %} 13 | 14 | {#- This test will return for any duplicates and if any of the key columns is null -#} 15 | with validation_errors as ( 16 | select 17 | {{columns_csv}}, count(*) as row_count 18 | from {{model}} 19 | group by {{columns_csv}} 20 | having count(*) > 1 21 | {% for column in column_names -%} 22 | or {{column}} is null 23 | {% endfor %} 24 | ) 25 | 26 | select * 27 | from validation_errors 28 | 29 | {%- endmacro -%} 30 | 31 | 32 | 33 | {#- Test if the unique key is valid -#} 34 | {%- macro default__test_unique_key(model, column_names, quote_columns=false) -%} 35 | {# 36 | NOTE: This test is designed to implement the "unique constraint" as specified in ANSI SQL 92 which states the following: 37 | "A unique constraint is satisfied if and only if no two rows in 38 | a table have the same non-null values in the unique columns." 39 | #} 40 | 41 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) %} 42 | 43 | {#- This test will return any duplicates -#} 44 | with validation_errors as ( 45 | select 46 | {{columns_csv}} 47 | from {{model}} 48 | group by {{columns_csv}} 49 | having count(*) > 1 50 | ) 51 | 52 | select * 53 | from validation_errors 54 | 55 | {%- endmacro -%} 56 | 57 | 58 | 59 | {#- Test if the foreign key is valid -#} 60 | {%- macro default__test_foreign_key(model, fk_column_names, pk_table_name, pk_column_names, quote_columns=false) -%} 61 | {# 62 | NOTE: This test is designed to implement the "referential constraint" as specified in ANSI SQL 92 which states the following: 63 | "A referential constraint is satisfied if one of the following con- 64 | ditions is true, depending on the specified in the 65 | : 66 | 67 | - If no was specified then, for each row R1 of the 68 | referencing table, either at least one of the values of the 69 | referencing columns in R1 shall be a null value, or the value of 70 | each referencing column in R1 shall be equal to the value of the 71 | corresponding referenced column in some row of the referenced 72 | table." 73 | 74 | The implications of this standard is that if one column is NULL in a compound foreign key, the other column 75 | does NOT need to match a row in a referenced unique key. This is implemented by first excluding any 76 | rows from the test that have a NULL value in any of the columns. 77 | #} 78 | 79 | {%- set fk_columns_list=dbt_constraints.get_quoted_column_list(fk_column_names, quote_columns) %} 80 | {%- set pk_columns_list=dbt_constraints.get_quoted_column_list(pk_column_names, quote_columns) %} 81 | {%- set fk_columns_csv=dbt_constraints.get_quoted_column_csv(fk_column_names, quote_columns) %} 82 | {%- set pk_columns_csv=dbt_constraints.get_quoted_column_csv(pk_column_names, quote_columns) %} 83 | {%- set join_conditions = [] -%} 84 | {%- for x in range(fk_columns_list|count) -%} 85 | {%- set join_conditions = join_conditions.append( 'parent.' ~ pk_columns_list[x] ~ ' = child.' ~ fk_columns_list[x] ) -%} 86 | {%- endfor -%} 87 | 88 | {#- This test will return if all the columns are not null 89 | and the values are not found in the referenced PK table #} 90 | with child as ( 91 | select 92 | {{fk_columns_csv}} 93 | from {{model}} 94 | where 1=1 95 | {% for column in fk_columns_list -%} 96 | and {{column}} is not null 97 | {% endfor %} 98 | ), 99 | 100 | parent as ( 101 | select 102 | {{pk_columns_csv}} 103 | from {{pk_table_name}} 104 | ), 105 | 106 | validation_errors as ( 107 | select 108 | child.* 109 | from child 110 | left join parent 111 | on {{join_conditions | join(' and ')}} 112 | 113 | where parent.{{pk_columns_list | first}} is null 114 | ) 115 | 116 | select * 117 | from validation_errors 118 | 119 | {%- endmacro -%} 120 | 121 | 122 | {%- macro get_quoted_column_list(column_array, quote_columns=false) -%} 123 | 124 | {%- if not quote_columns -%} 125 | {%- set column_list=column_array -%} 126 | {%- elif quote_columns -%} 127 | {%- set column_list=[] -%} 128 | {%- for column in column_array -%} 129 | {%- set column_list = column_list.append( adapter.quote(column) ) -%} 130 | {%- endfor -%} 131 | {%- else -%} 132 | {{ exceptions.raise_compiler_error( 133 | "`quote_columns` argument must be one of [True, False] Got: '" ~ quote ~"'.'" 134 | ) }} 135 | {%- endif -%} 136 | 137 | {{ return(column_list) }} 138 | 139 | {%- endmacro -%} 140 | 141 | 142 | {%- macro get_quoted_column_csv(column_array, quote_columns=false) -%} 143 | 144 | {%- set column_list = dbt_constraints.get_quoted_column_list(column_array, quote_columns) -%} 145 | {%- set columns_csv=column_list | join(', ') -%} 146 | {{ return(columns_csv) }} 147 | 148 | {%- endmacro -%} 149 | -------------------------------------------------------------------------------- /macros/macros.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | macros: 4 | # Primary macro 5 | - name: create_constraints 6 | description: Primary macro automatically called `on-run-end` to generate primary keys, unique keys, and foreign keys. The `dbt_constraints_enabled` variable can be set to `false` in your project to disable this macro. 7 | arguments: 8 | - name: constraint_types 9 | type: array of constraint types 10 | description: Accepts a list of tests to consider for constraint creation and whether columns should be quoted. By default it will create all the constraint types. Valid values are ['primary_key', 'unique_key', 'unique_combination_of_columns', 'unique', 'foreign_key', 'relationships'] 11 | - name: quote_columns 12 | type: boolean 13 | description: Whether to wrap column names in double quotes. By default this is set to false. 14 | docs: 15 | show: true 16 | 17 | # The following five macros must be implemented for the current adapter or dbt will halt 18 | 19 | - name: create_primary_key 20 | description: Calls the adapter-specific version of the macro to create a primary key 21 | arguments: 22 | - name: table_model 23 | type: graph node 24 | description: Accepts the graph node of the table that will have the constraint 25 | - name: column_names 26 | type: array of column names 27 | description: An array of text column names to include in the constraint 28 | - name: quote_columns 29 | type: boolean 30 | description: Whether to wrap column names in double quotes. By default this is set to false. 31 | - name: constraint_name 32 | type: string 33 | description: Name of the constraint. If not specified, a constraint name will be generated. 34 | docs: 35 | show: true 36 | 37 | - name: create_unique_key 38 | description: Calls the adapter-specific version of the macro to create a unique key 39 | arguments: 40 | - name: table_model 41 | type: graph node 42 | description: Accepts the graph node of the table that will have the constraint 43 | - name: column_names 44 | type: array of column names 45 | description: An array of text column names to include in the constraint 46 | - name: quote_columns 47 | type: boolean 48 | description: Whether to wrap column names in double quotes. By default this is set to false. 49 | - name: constraint_name 50 | type: string 51 | description: Name of the constraint. If not specified, a constraint name will be generated. 52 | docs: 53 | show: true 54 | 55 | - name: create_foreign_key 56 | description: Calls the adapter-specific version of the macro to create a foreign key 57 | arguments: 58 | - name: test_model 59 | type: results node 60 | description: Accepts the result node of the test related to this constraint 61 | - name: pk_model 62 | type: graph node 63 | description: Accepts the graph node of the parent table that has a PK or UK 64 | - name: column_names 65 | type: array of column names 66 | description: An array of text column names to include in the FK reference 67 | - name: fk_model 68 | type: graph node 69 | description: Accepts the graph node of the table that will have the constraint 70 | - name: fk_column_names 71 | type: array of column names 72 | description: An array of text column names to include in the constraint 73 | - name: quote_columns 74 | type: boolean 75 | description: Whether to wrap column names in double quotes. By default this is set to false. 76 | - name: constraint_name 77 | type: string 78 | description: Name of the constraint. If not specified, a constraint name will be generated. 79 | docs: 80 | show: true 81 | 82 | - name: unique_constraint_exists 83 | description: Calls the adapter-specific version of the macro to check if a PK or UK already exists 84 | arguments: 85 | - name: table_relation 86 | type: relation 87 | description: Accepts the relation of the table to check 88 | - name: column_names 89 | type: array of column names 90 | description: An array of text column names the constraint must contain 91 | docs: 92 | show: true 93 | 94 | - name: foreign_key_exists 95 | description: Calls the adapter-specific version of the macro to check if a foreign key already exists 96 | arguments: 97 | - name: table_relation 98 | type: relation 99 | description: Accepts the relation of the table to check 100 | - name: column_names 101 | type: array of column names 102 | description: An array of text column names the constraint must contain 103 | docs: 104 | show: true 105 | -------------------------------------------------------------------------------- /macros/oracle__create_constraints.sql: -------------------------------------------------------------------------------- 1 | {# Oracle specific implementation to create a primary key #} 2 | {%- macro oracle__create_primary_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 3 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_PK") | upper -%} 4 | 5 | {%- if constraint_name|length > 30 %} 6 | {%- set constraint_name_query %} 7 | select 'PK_' || ora_hash( '{{ constraint_name }}' ) as "constraint_name" from dual 8 | {%- endset -%} 9 | {%- set results = run_query(constraint_name_query) -%} 10 | {%- set constraint_name = results.columns[0].values()[0] -%} 11 | {% endif %} 12 | 13 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 14 | 15 | {#- Check that the table does not already have this PK/UK -#} 16 | {%- if not dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 17 | 18 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 19 | 20 | {%- set query -%} 21 | BEGIN 22 | EXECUTE IMMEDIATE 'ALTER TABLE {{table_relation}} ADD CONSTRAINT {{constraint_name}} PRIMARY KEY ( {{columns_csv}} )'; 23 | EXCEPTION 24 | WHEN OTHERS THEN 25 | DBMS_OUTPUT.ENABLE(BUFFER_SIZE => NULL); 26 | DBMS_OUTPUT.PUT_LINE('Unable to create constraint: ' || SQLERRM); 27 | END; 28 | {%- endset -%} 29 | {%- do log("Creating primary key: " ~ constraint_name, info=true) -%} 30 | {%- do run_query(query) -%} 31 | 32 | {%- else -%} 33 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=false) -%} 34 | {%- endif -%} 35 | 36 | {%- else -%} 37 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 38 | {%- endif -%} 39 | 40 | {%- endmacro -%} 41 | 42 | 43 | 44 | {# Oracle specific implementation to create a unique key #} 45 | {%- macro oracle__create_unique_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 46 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_UK") | upper -%} 47 | 48 | {%- if constraint_name|length > 30 %} 49 | {%- set constraint_name_query %} 50 | select 'UK_' || ora_hash( '{{ constraint_name }}' ) as "constraint_name" from dual 51 | {%- endset -%} 52 | {%- set results = run_query(constraint_name_query) -%} 53 | {%- set constraint_name = results.columns[0].values()[0] -%} 54 | {% endif %} 55 | 56 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 57 | 58 | {#- Check that the table does not already have this PK/UK -#} 59 | {%- if not dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 60 | 61 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 62 | 63 | {%- set query -%} 64 | BEGIN 65 | EXECUTE IMMEDIATE 'ALTER TABLE {{table_relation}} ADD CONSTRAINT {{constraint_name}} UNIQUE ( {{columns_csv}} )'; 66 | EXCEPTION 67 | WHEN OTHERS THEN 68 | DBMS_OUTPUT.ENABLE(BUFFER_SIZE => NULL); 69 | DBMS_OUTPUT.PUT_LINE('Unable to create constraint: ' || SQLERRM); 70 | END; 71 | {%- endset -%} 72 | {%- do log("Creating unique key: " ~ constraint_name, info=true) -%} 73 | {%- do run_query(query) -%} 74 | 75 | {%- else -%} 76 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=false) -%} 77 | {%- endif -%} 78 | 79 | {%- else -%} 80 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 81 | {%- endif -%} 82 | 83 | {%- endmacro -%} 84 | 85 | 86 | 87 | {# Oracle specific implementation to create a foreign key #} 88 | {%- macro oracle__create_foreign_key(pk_table_relation, pk_column_names, fk_table_relation, fk_column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 89 | {%- set constraint_name = (constraint_name or fk_table_relation.identifier ~ "_" ~ fk_column_names|join('_') ~ "_FK") | upper -%} 90 | 91 | {%- if constraint_name|length > 30 %} 92 | {%- set constraint_name_query %} 93 | select 'FK_' || ora_hash( '{{ constraint_name }}' ) as "constraint_name" from dual 94 | {%- endset -%} 95 | {%- set results = run_query(constraint_name_query) -%} 96 | {%- set constraint_name = results.columns[0].values()[0] -%} 97 | {% endif %} 98 | 99 | {%- set fk_columns_csv = dbt_constraints.get_quoted_column_csv(fk_column_names, quote_columns) -%} 100 | {%- set pk_columns_csv = dbt_constraints.get_quoted_column_csv(pk_column_names, quote_columns) -%} 101 | {#- Check that the PK table has a PK or UK -#} 102 | {%- if dbt_constraints.unique_constraint_exists(pk_table_relation, pk_column_names, lookup_cache) -%} 103 | {#- Check if the table already has this foreign key -#} 104 | {%- if not dbt_constraints.foreign_key_exists(fk_table_relation, fk_column_names) -%} 105 | 106 | {%- if dbt_constraints.have_ownership_priv(fk_table_relation, verify_permissions, lookup_cache) and dbt_constraints.have_references_priv(pk_table_relation, verify_permissions) -%} 107 | 108 | {%- set query -%} 109 | BEGIN 110 | EXECUTE IMMEDIATE 'ALTER TABLE {{fk_table_relation}} ADD CONSTRAINT {{constraint_name}} FOREIGN KEY ( {{fk_columns_csv}} ) REFERENCES {{pk_table_relation}} ( {{pk_columns_csv}} )'; 111 | EXCEPTION 112 | WHEN OTHERS THEN 113 | DBMS_OUTPUT.ENABLE(BUFFER_SIZE => NULL); 114 | DBMS_OUTPUT.PUT_LINE('Unable to create constraint: ' || SQLERRM); 115 | END; 116 | {%- endset -%} 117 | {%- do log("Creating foreign key: " ~ constraint_name ~ " referencing " ~ pk_table_relation.identifier ~ " " ~ pk_column_names, info=true) -%} 118 | {%- do run_query(query) -%} 119 | 120 | {%- else -%} 121 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ fk_table_relation ~ " referencing " ~ pk_table_relation, info=true) -%} 122 | {%- endif -%} 123 | 124 | {%- else -%} 125 | {%- do log("Skipping " ~ constraint_name ~ " because FK already exists: " ~ fk_table_relation ~ " " ~ fk_column_names, info=false) -%} 126 | {%- endif -%} 127 | {%- else -%} 128 | {%- do log("Skipping " ~ constraint_name ~ " because a PK/UK was not found on the PK table: " ~ pk_table_relation ~ " " ~ pk_column_names, info=true) -%} 129 | {%- endif -%} 130 | 131 | {%- endmacro -%} 132 | 133 | {# Oracle specific implementation to create a not null constraint #} 134 | {%- macro oracle__create_not_null(table_relation, column_names, verify_permissions, quote_columns, lookup_cache, rely_clause) -%} 135 | {%- set columns_list = dbt_constraints.get_quoted_column_list(column_names, quote_columns) -%} 136 | 137 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions) -%} 138 | 139 | {%- set modify_statements= [] -%} 140 | {%- for column in columns_list -%} 141 | {%- set modify_statements = modify_statements.append( column ~ " NOT NULL" ) -%} 142 | {%- endfor -%} 143 | {%- set modify_statement_csv = modify_statements | join(", ") -%} 144 | {%- set query -%} 145 | BEGIN 146 | EXECUTE IMMEDIATE 'ALTER TABLE {{table_relation}} MODIFY ( {{ modify_statement_csv }} )'; 147 | EXCEPTION 148 | WHEN OTHERS THEN 149 | DBMS_OUTPUT.ENABLE(BUFFER_SIZE => NULL); 150 | DBMS_OUTPUT.PUT_LINE('Unable to create constraint: ' || SQLERRM); 151 | END; 152 | {%- endset -%} 153 | {%- do log("Creating not null constraint for: " ~ columns_list | join(", ") ~ " in " ~ table_relation, info=true) -%} 154 | {%- do run_query(query) -%} 155 | 156 | {%- else -%} 157 | {%- do log("Skipping not null constraint for " ~ columns_list | join(", ") ~ " in " ~ table_relation ~ " because of insufficient privileges: " ~ table_relation, info=true) -%} 158 | {%- endif -%} 159 | {%- endmacro -%} 160 | {#- This macro is used in create macros to avoid duplicate PK/UK constraints 161 | and to skip FK where no PK/UK constraint exists on the parent table -#} 162 | {%- macro oracle__unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 163 | {%- set lookup_query -%} 164 | select 165 | cols.constraint_name as "constraint_name", 166 | upper(cols.column_name) as "column_name" 167 | from 168 | all_constraints cons 169 | join all_cons_columns cols on cons.constraint_name = cols.constraint_name 170 | and cons.owner = cols.owner 171 | where 172 | cons.constraint_type in ( 'P', 'U' ) 173 | and upper(cons.owner) = upper('{{table_relation.schema}}') 174 | and upper(cons.table_name) = upper('{{table_relation.identifier}}') 175 | order by 1, 2 176 | {%- endset -%} 177 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 178 | {%- set constraint_list = run_query(lookup_query) -%} 179 | {%- if constraint_list.columns["column_name"].values() | count > 0 -%} 180 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 181 | {%- if dbt_constraints.column_list_matches(constraint.columns["column_name"].values(), column_names ) -%} 182 | {%- do log("Found PK/UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 183 | {{ return(true) }} 184 | {%- endif -%} 185 | {% endfor %} 186 | {%- endif -%}#} 187 | 188 | {#- If we get this far then the table does not have either constraint -#} 189 | {%- do log("No PK/UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 190 | {{ return(false) }} 191 | {%- endmacro -%} 192 | 193 | 194 | 195 | {#- This macro is used in create macros to avoid duplicate FK constraints -#} 196 | {%- macro oracle__foreign_key_exists(table_relation, column_names, lookup_cache) -%} 197 | {%- set lookup_query -%} 198 | select 199 | cols.constraint_name as "fk_name", 200 | upper(cols.column_name) as "fk_column_name" 201 | from 202 | all_constraints cons 203 | join all_cons_columns cols on cons.constraint_name = cols.constraint_name 204 | and cons.owner = cols.owner 205 | where 206 | cons.constraint_type in ( 'R' ) 207 | and upper(cons.owner) = upper('{{table_relation.schema}}') 208 | and upper(cons.table_name) = upper('{{table_relation.identifier}}') 209 | order by 1, 2 210 | {%- endset -%} 211 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 212 | {%- set constraint_list = run_query(lookup_query) -%} 213 | {%- if constraint_list.columns["fk_column_name"].values() | count > 0 -%} 214 | {%- for constraint in constraint_list.group_by("fk_name") -%} 215 | {%- if dbt_constraints.column_list_matches(constraint.columns["fk_column_name"].values(), column_names ) -%} 216 | {%- do log("Found FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 217 | {{ return(true) }} 218 | {%- endif -%} 219 | {% endfor %} 220 | {%- endif -%} 221 | 222 | {#- If we get this far then the table does not have this constraint -#} 223 | {%- do log("No FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 224 | {{ return(false) }} 225 | {%- endmacro -%} 226 | 227 | {#- Oracle lacks a simple way to verify privileges so we will instead use an exception handler -#} 228 | {%- macro oracle__have_references_priv(table_relation, verify_permissions, lookup_cache) -%} 229 | {{ return(true) }} 230 | {%- endmacro -%} 231 | 232 | {#- Oracle lacks a simple way to verify privileges so we will instead use an exception handler -#} 233 | {%- macro oracle__have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 234 | {{ return(true) }} 235 | {%- endmacro -%} 236 | 237 | {% macro oracle__drop_referential_constraints(relation) -%} 238 | {%- call statement('drop_constraint_cascade') -%} 239 | BEGIN 240 | FOR REC IN ( 241 | SELECT owner, table_name, constraint_name 242 | FROM all_constraints cons 243 | WHERE cons.constraint_type IN ('P', 'U', 'R') 244 | AND upper(cons.owner) = '{{relation.schema|upper}}' 245 | AND upper(cons.table_name) = '{{relation.identifier|upper}}' 246 | ORDER BY 1 247 | ) LOOP 248 | BEGIN 249 | EXECUTE IMMEDIATE 'ALTER TABLE "'||REC.OWNER||'"."'||REC.TABLE_NAME||'" DROP CONSTRAINT "'||REC.CONSTRAINT_NAME||'" CASCADE'; 250 | EXCEPTION 251 | WHEN OTHERS THEN 252 | DBMS_OUTPUT.ENABLE(BUFFER_SIZE => NULL); 253 | DBMS_OUTPUT.PUT_LINE('Unable to drop constraint: ' || SQLERRM); 254 | END; 255 | END LOOP; 256 | END; 257 | {%- endcall -%} 258 | 259 | {% endmacro %} 260 | 261 | {#- Oracle will error if you try to truncate tables with FK constraints or tables with PK/UK constraints 262 | referenced by FK so we will drop all constraints before truncating tables -#} 263 | {% macro oracle__truncate_relation(relation) -%} 264 | {%- do log("Truncating table " ~ relation, info=true) -%} 265 | {{ oracle__drop_referential_constraints(relation) }} 266 | {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }} 267 | {% endmacro %} 268 | 269 | {#- Oracle will error if you try to drop tables with FK constraints or tables with PK/UK constraints 270 | referenced by FK so we will drop all constraints before dropping tables -#} 271 | {% macro oracle__drop_relation(relation) -%} 272 | {%- do log("Dropping table " ~ relation, info=true) -%} 273 | {%- call statement('drop_constraint_cascade') -%} 274 | BEGIN 275 | FOR REC IN ( 276 | SELECT owner, table_name, constraint_name 277 | FROM all_constraints cons 278 | WHERE cons.constraint_type IN ('P', 'U', 'R') 279 | AND upper(cons.owner) = '{{relation.schema|upper}}' 280 | AND upper(cons.table_name) = '{{relation.identifier|upper}}' 281 | ORDER BY 1 282 | ) LOOP 283 | BEGIN 284 | EXECUTE IMMEDIATE 'ALTER TABLE "'||REC.OWNER||'"."'||REC.TABLE_NAME||'" DROP CONSTRAINT "'||REC.CONSTRAINT_NAME||'" CASCADE'; 285 | EXCEPTION 286 | WHEN OTHERS THEN 287 | DBMS_OUTPUT.ENABLE(BUFFER_SIZE => NULL); 288 | DBMS_OUTPUT.PUT_LINE('Unable to drop constraint: ' || SQLERRM); 289 | END; 290 | END LOOP; 291 | FOR REC IN ( 292 | SELECT owner, table_name 293 | FROM all_tables 294 | WHERE upper(owner) = '{{relation.schema|upper}}' 295 | AND upper(table_name) = '{{relation.identifier|upper}}' 296 | ORDER BY 1 297 | ) LOOP 298 | BEGIN 299 | EXECUTE IMMEDIATE 'DROP TABLE "'||REC.OWNER||'"."'||REC.TABLE_NAME||'" CASCADE CONSTRAINTS'; 300 | EXCEPTION 301 | WHEN OTHERS THEN 302 | DBMS_OUTPUT.ENABLE(BUFFER_SIZE => NULL); 303 | DBMS_OUTPUT.PUT_LINE('Unable to drop table: ' || SQLERRM); 304 | END; 305 | END LOOP; 306 | END; 307 | {%- endcall -%} 308 | {% endmacro %} 309 | -------------------------------------------------------------------------------- /macros/postgres__create_constraints.sql: -------------------------------------------------------------------------------- 1 | {# PostgreSQL specific implementation to create a primary key #} 2 | {%- macro postgres__create_primary_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 3 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_PK") | upper -%} 4 | 5 | {%- if constraint_name|length > 63 %} 6 | {%- set constraint_name_query %} 7 | select 'PK_' || md5( '{{ constraint_name }}' )::varchar as "constraint_name" 8 | {%- endset -%} 9 | {%- set results = run_query(constraint_name_query) -%} 10 | {%- set constraint_name = results.columns[0].values()[0] -%} 11 | {% endif %} 12 | 13 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 14 | 15 | {#- Check that the table does not already have this PK/UK -#} 16 | {%- if not dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 17 | 18 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 19 | 20 | {%- do log("Creating primary key: " ~ constraint_name, info=true) -%} 21 | {%- call statement('add_pk', fetch_result=False, auto_begin=True) -%} 22 | ALTER TABLE {{table_relation}} ADD CONSTRAINT {{constraint_name}} PRIMARY KEY ( {{columns_csv}} ) 23 | {%- endcall -%} 24 | {{ adapter.commit() }} 25 | 26 | {%- else -%} 27 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=false) -%} 28 | {%- endif -%} 29 | 30 | {%- else -%} 31 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 32 | {%- endif -%} 33 | 34 | {%- endmacro -%} 35 | 36 | 37 | 38 | {# PostgreSQL specific implementation to create a unique key #} 39 | {%- macro postgres__create_unique_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 40 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_UK") | upper -%} 41 | 42 | {%- if constraint_name|length > 63 %} 43 | {%- set constraint_name_query %} 44 | select 'UK_' || md5( '{{ constraint_name }}' )::varchar as "constraint_name" 45 | {%- endset -%} 46 | {%- set results = run_query(constraint_name_query) -%} 47 | {%- set constraint_name = results.columns[0].values()[0] -%} 48 | {% endif %} 49 | 50 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 51 | 52 | {#- Check that the table does not already have this PK/UK -#} 53 | {%- if not dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 54 | 55 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 56 | 57 | {%- do log("Creating unique key: " ~ constraint_name, info=true) -%} 58 | {%- call statement('add_uk', fetch_result=False, auto_begin=True) -%} 59 | ALTER TABLE {{table_relation}} ADD CONSTRAINT {{constraint_name}} UNIQUE ( {{columns_csv}} ) 60 | {%- endcall -%} 61 | {{ adapter.commit() }} 62 | 63 | {%- else -%} 64 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=false) -%} 65 | {%- endif -%} 66 | 67 | {%- else -%} 68 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 69 | {%- endif -%} 70 | 71 | {%- endmacro -%} 72 | 73 | {# PostgreSQL specific implementation to create a not null constraint #} 74 | {%- macro postgres__create_not_null(table_relation, column_names, verify_permissions, quote_columns, lookup_cache, rely_clause) -%} 75 | {%- set columns_list = dbt_constraints.get_quoted_column_list(column_names, quote_columns) -%} 76 | 77 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 78 | 79 | {%- set modify_statements= [] -%} 80 | {%- for column in columns_list -%} 81 | {%- set modify_statements = modify_statements.append( "ALTER COLUMN " ~ column ~ " SET NOT NULL" ) -%} 82 | {%- endfor -%} 83 | {%- set modify_statement_csv = modify_statements | join(", ") -%} 84 | {%- do log("Creating not null constraint for: " ~ columns_list | join(", ") ~ " in " ~ table_relation, info=true) -%} 85 | {%- call statement('add_nn', fetch_result=False, auto_begin=True) -%} 86 | ALTER TABLE {{table_relation}} {{ modify_statement_csv }}; 87 | {%- endcall -%} 88 | {{ adapter.commit() }} 89 | 90 | {%- else -%} 91 | {%- do log("Skipping not null constraint for " ~ columns_list | join(", ") ~ " in " ~ table_relation ~ " because of insufficient privileges: " ~ table_relation, info=true) -%} 92 | {%- endif -%} 93 | {%- endmacro -%} 94 | 95 | {# PostgreSQL specific implementation to create a foreign key #} 96 | {%- macro postgres__create_foreign_key(pk_table_relation, pk_column_names, fk_table_relation, fk_column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 97 | {%- set constraint_name = (constraint_name or fk_table_relation.identifier ~ "_" ~ fk_column_names|join('_') ~ "_FK") | upper -%} 98 | 99 | {%- if constraint_name|length > 63 %} 100 | {%- set constraint_name_query %} 101 | select 'FK_' || md5( '{{ constraint_name }}' )::varchar as "constraint_name" 102 | {%- endset -%} 103 | {%- set results = run_query(constraint_name_query) -%} 104 | {%- set constraint_name = results.columns[0].values()[0] -%} 105 | {% endif %} 106 | 107 | {%- set fk_columns_csv = dbt_constraints.get_quoted_column_csv(fk_column_names, quote_columns) -%} 108 | {%- set pk_columns_csv = dbt_constraints.get_quoted_column_csv(pk_column_names, quote_columns) -%} 109 | {#- Check that the PK table has a PK or UK -#} 110 | {%- if dbt_constraints.unique_constraint_exists(pk_table_relation, pk_column_names, lookup_cache) -%} 111 | {#- Check if the table already has this foreign key -#} 112 | {%- if not dbt_constraints.foreign_key_exists(fk_table_relation, fk_column_names) -%} 113 | 114 | {%- if dbt_constraints.have_ownership_priv(fk_table_relation, verify_permissions, lookup_cache) and dbt_constraints.have_references_priv(pk_table_relation, verify_permissions, lookup_cache) -%} 115 | 116 | {%- do log("Creating foreign key: " ~ constraint_name ~ " referencing " ~ pk_table_relation.identifier ~ " " ~ pk_column_names, info=true) -%} 117 | {%- call statement('add_fk', fetch_result=False, auto_begin=True) -%} 118 | ALTER TABLE {{fk_table_relation}} ADD CONSTRAINT {{constraint_name}} FOREIGN KEY ( {{fk_columns_csv}} ) REFERENCES {{pk_table_relation}} ( {{pk_columns_csv}} ) ON DELETE NO ACTION DEFERRABLE INITIALLY DEFERRED 119 | {%- endcall -%} 120 | {{ adapter.commit() }} 121 | 122 | {%- else -%} 123 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ fk_table_relation ~ " referencing " ~ pk_table_relation, info=true) -%} 124 | {%- endif -%} 125 | 126 | {%- else -%} 127 | {%- do log("Skipping " ~ constraint_name ~ " because FK already exists: " ~ fk_table_relation ~ " " ~ fk_column_names, info=false) -%} 128 | {%- endif -%} 129 | {%- else -%} 130 | {%- do log("Skipping " ~ constraint_name ~ " because a PK/UK was not found on the PK table: " ~ pk_table_relation ~ " " ~ pk_column_names, info=true) -%} 131 | {%- endif -%} 132 | 133 | {%- endmacro -%} 134 | 135 | 136 | 137 | {#- This macro is used in create macros to avoid duplicate PK/UK constraints 138 | and to skip FK where no PK/UK constraint exists on the parent table -#} 139 | {%- macro postgres__unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 140 | {%- set lookup_query -%} 141 | select c.oid as constraint_name 142 | , upper(col.attname) as column_name 143 | from pg_constraint c 144 | cross join lateral unnest(c.conkey) as con(conkey) 145 | join pg_class tbl on tbl.oid = c.conrelid 146 | join pg_namespace ns on ns.oid = tbl.relnamespace 147 | join pg_attribute col on (col.attrelid = tbl.oid 148 | and col.attnum = con.conkey) 149 | where c.contype in ('p', 'u') 150 | and ns.nspname ilike '{{table_relation.schema}}' 151 | and tbl.relname ilike '{{table_relation.identifier}}' 152 | order by constraint_name 153 | {%- endset -%} 154 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 155 | {%- set constraint_list = run_query(lookup_query) -%} 156 | {%- if constraint_list.columns["column_name"].values() | count > 0 -%} 157 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 158 | {%- if dbt_constraints.column_list_matches(constraint.columns["column_name"].values(), column_names ) -%} 159 | {%- do log("Found PK/UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 160 | {{ return(true) }} 161 | {%- endif -%} 162 | {% endfor %} 163 | {%- endif -%}#} 164 | 165 | {#- If we get this far then the table does not have either constraint -#} 166 | {%- do log("No PK/UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 167 | {{ return(false) }} 168 | {%- endmacro -%} 169 | 170 | 171 | 172 | {#- This macro is used in create macros to avoid duplicate FK constraints -#} 173 | {%- macro postgres__foreign_key_exists(table_relation, column_names, lookup_cache) -%} 174 | {%- set lookup_query -%} 175 | select c.oid as fk_name 176 | , upper(col.attname) as fk_column_name 177 | from pg_constraint c 178 | cross join lateral unnest(c.conkey) as con(conkey) 179 | join pg_class tbl on tbl.oid = c.conrelid 180 | join pg_namespace ns on ns.oid = tbl.relnamespace 181 | join pg_attribute col on (col.attrelid = tbl.oid 182 | and col.attnum = con.conkey) 183 | where c.contype in ('f') 184 | and ns.nspname ilike '{{table_relation.schema}}' 185 | and tbl.relname ilike '{{table_relation.identifier}}' 186 | order by fk_name 187 | {%- endset -%} 188 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 189 | {%- set constraint_list = run_query(lookup_query) -%} 190 | {%- if constraint_list.columns["fk_column_name"].values() | count > 0 -%} 191 | {%- for constraint in constraint_list.group_by("fk_name") -%} 192 | {%- if dbt_constraints.column_list_matches(constraint.columns["fk_column_name"].values(), column_names ) -%} 193 | {%- do log("Found FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 194 | {{ return(true) }} 195 | {%- endif -%} 196 | {% endfor %} 197 | {%- endif -%} 198 | 199 | {#- If we get this far then the table does not have this constraint -#} 200 | {%- do log("No FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 201 | {{ return(false) }} 202 | {%- endmacro -%} 203 | 204 | 205 | {%- macro postgres__have_references_priv(table_relation, verify_permissions, lookup_cache) -%} 206 | {%- if verify_permissions is sameas true -%} 207 | 208 | {%- set lookup_query -%} 209 | select case when count(*) > 0 then 'y' else 'n' end as "have_references" 210 | from information_schema.table_privileges t 211 | join information_schema.enabled_roles er on t.grantee = er.role_name 212 | where upper(t.table_schema) = upper('{{table_relation.schema}}') 213 | and upper(t.table_name) = upper('{{table_relation.identifier}}') 214 | {%- endset -%} 215 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 216 | {%- set results = run_query(lookup_query) -%} 217 | {%- if "y" in( results.columns["have_references"].values() ) -%} 218 | {{ return(true) }} 219 | {%- endif -%} 220 | 221 | {{ return(false) }} 222 | {%- else -%} 223 | {{ return(true) }} 224 | {%- endif -%} 225 | {%- endmacro -%} 226 | 227 | 228 | {%- macro postgres__have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 229 | {%- if verify_permissions is sameas true -%} 230 | 231 | {%- set lookup_query -%} 232 | select case when count(*) > 0 then 'y' else 'n' end as "have_ownership" 233 | from pg_catalog.pg_tables t 234 | join information_schema.enabled_roles er on t.tableowner = er.role_name 235 | where upper(t.schemaname) = upper('{{table_relation.schema}}') 236 | and upper(t.tablename) = upper('{{table_relation.identifier}}') 237 | {%- endset -%} 238 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 239 | {%- set results = run_query(lookup_query) -%} 240 | {%- if "y" in( results.columns["have_ownership"].values() ) -%} 241 | {{ return(true) }} 242 | {%- endif -%} 243 | 244 | {{ return(false) }} 245 | {%- else -%} 246 | {{ return(true) }} 247 | {%- endif -%} 248 | {%- endmacro -%} 249 | 250 | 251 | {% macro postgres__drop_referential_constraints(relation) -%} 252 | {%- set lookup_query -%} 253 | select constraint_name 254 | from information_schema.table_constraints 255 | where table_schema = '{{relation.schema}}' 256 | and table_name='{{relation.identifier}}' 257 | and constraint_type in ('FOREIGN KEY', 'PRIMARY KEY', 'UNIQUE') 258 | {%- endset -%} 259 | {%- set constraint_list = run_query(lookup_query) -%} 260 | 261 | {%- for constraint_name in constraint_list.columns["constraint_name"].values() -%} 262 | {%- do log("Dropping constraint: " ~ constraint_name ~ " from table " ~ relation, info=false) -%} 263 | {%- call statement('drop_constraint_cascade', fetch_result=False, auto_begin=True) -%} 264 | ALTER TABLE {{relation}} DROP CONSTRAINT IF EXISTS "{{constraint_name}}" CASCADE 265 | {%- endcall -%} 266 | {{ adapter.commit() }} 267 | {% endfor %} 268 | 269 | {% endmacro %} 270 | 271 | {#- PostgreSQL will error if you try to truncate tables with FK constraints or tables with PK/UK constraints 272 | referenced by FK so we will drop all constraints before truncating tables -#} 273 | {% macro postgres__truncate_relation(relation) -%} 274 | {{ postgres__drop_referential_constraints(relation) }} 275 | {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }} 276 | {% endmacro %} 277 | 278 | {#- PostgreSQL will get deadlocks if you try to drop tables with FK constraints or tables with PK/UK constraints 279 | referenced by FK so we will drop all constraints before dropping tables -#} 280 | {% macro postgres__drop_relation(relation) -%} 281 | {{ postgres__drop_referential_constraints(relation) }} 282 | {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }} 283 | {% endmacro %} 284 | -------------------------------------------------------------------------------- /macros/redshift__create_constraints.sql: -------------------------------------------------------------------------------- 1 | {# Redshift specific implementation to create a primary key #} 2 | {%- macro redshift__create_primary_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 3 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_PK") | upper -%} 4 | 5 | {%- if constraint_name|length > 127 %} 6 | {%- set constraint_name_query %} 7 | select 'PK_' || md5( '{{ constraint_name }}' )::varchar as "constraint_name" 8 | {%- endset -%} 9 | {%- set results = run_query(constraint_name_query) -%} 10 | {%- set constraint_name = results.columns[0].values()[0] -%} 11 | {% endif %} 12 | 13 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 14 | 15 | {#- Check that the table does not already have this PK/UK -#} 16 | {%- if not dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 17 | 18 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 19 | 20 | {%- do log("Creating primary key: " ~ constraint_name, info=true) -%} 21 | {%- set query -%} 22 | ALTER TABLE {{table_relation}} ADD CONSTRAINT {{constraint_name}} PRIMARY KEY ( {{columns_csv}} ) 23 | {%- endset -%} 24 | {%- do run_query(query) -%} 25 | 26 | {%- else -%} 27 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=false) -%} 28 | {%- endif -%} 29 | 30 | {%- else -%} 31 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 32 | {%- endif -%} 33 | 34 | {%- endmacro -%} 35 | 36 | 37 | 38 | {# Redshift specific implementation to create a unique key #} 39 | {%- macro redshift__create_unique_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 40 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_UK") | upper -%} 41 | 42 | {%- if constraint_name|length > 127 %} 43 | {%- set constraint_name_query %} 44 | select 'UK_' || md5( '{{ constraint_name }}' )::varchar as "constraint_name" 45 | {%- endset -%} 46 | {%- set results = run_query(constraint_name_query) -%} 47 | {%- set constraint_name = results.columns[0].values()[0] -%} 48 | {% endif %} 49 | 50 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 51 | 52 | {#- Check that the table does not already have this PK/UK -#} 53 | {%- if not dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 54 | 55 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 56 | 57 | {%- do log("Creating unique key: " ~ constraint_name, info=true) -%} 58 | {%- set query -%} 59 | ALTER TABLE {{table_relation}} ADD CONSTRAINT {{constraint_name}} UNIQUE ( {{columns_csv}} ) 60 | {%- endset -%} 61 | {%- do run_query(query) -%} 62 | 63 | {%- else -%} 64 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=false) -%} 65 | {%- endif -%} 66 | 67 | {%- else -%} 68 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 69 | {%- endif -%} 70 | 71 | {%- endmacro -%} 72 | 73 | {# Redshift specific implementation to create a not null constraint #} 74 | {%- macro redshift__create_not_null(table_relation, column_names, verify_permissions, quote_columns, lookup_cache, rely_clause) -%} 75 | {%- set columns_list = dbt_constraints.get_quoted_column_list(column_names, quote_columns) -%} 76 | 77 | {%- do log("Skipping not null constraint for " ~ columns_list | join(", ") ~ " in " ~ table_relation ~ " because ALTER COLUMN SET NOT NULL is not supported", info=true) -%} 78 | {%- endmacro -%} 79 | 80 | {# Redshift specific implementation to create a foreign key #} 81 | {%- macro redshift__create_foreign_key(pk_table_relation, pk_column_names, fk_table_relation, fk_column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 82 | {%- set constraint_name = (constraint_name or fk_table_relation.identifier ~ "_" ~ fk_column_names|join('_') ~ "_FK") | upper -%} 83 | 84 | {%- if constraint_name|length > 127 %} 85 | {%- set constraint_name_query %} 86 | select 'FK_' || md5( '{{ constraint_name }}' )::varchar as "constraint_name" 87 | {%- endset -%} 88 | {%- set results = run_query(constraint_name_query) -%} 89 | {%- set constraint_name = results.columns[0].values()[0] -%} 90 | {% endif %} 91 | 92 | {%- set fk_columns_csv = dbt_constraints.get_quoted_column_csv(fk_column_names, quote_columns) -%} 93 | {%- set pk_columns_csv = dbt_constraints.get_quoted_column_csv(pk_column_names, quote_columns) -%} 94 | {#- Check that the PK table has a PK or UK -#} 95 | {%- if dbt_constraints.unique_constraint_exists(pk_table_relation, pk_column_names, lookup_cache) -%} 96 | {#- Check if the table already has this foreign key -#} 97 | {%- if not dbt_constraints.foreign_key_exists(fk_table_relation, fk_column_names) -%} 98 | 99 | {%- if dbt_constraints.have_ownership_priv(fk_table_relation, verify_permissions, lookup_cache) and dbt_constraints.have_references_priv(pk_table_relation, verify_permissions, lookup_cache) -%} 100 | 101 | {%- do log("Creating foreign key: " ~ constraint_name ~ " referencing " ~ pk_table_relation.identifier ~ " " ~ pk_column_names, info=true) -%} 102 | {%- set query -%} 103 | --Note: ON DELETE not supported in Redshift 104 | ALTER TABLE {{fk_table_relation}} ADD CONSTRAINT {{constraint_name}} FOREIGN KEY ( {{fk_columns_csv}} ) REFERENCES {{pk_table_relation}} ( {{pk_columns_csv}} ) --ON DELETE NO ACTION DEFERRABLE INITIALLY DEFERRED 105 | {%- endset -%} 106 | {%- do run_query(query) -%} 107 | 108 | {%- else -%} 109 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ fk_table_relation ~ " referencing " ~ pk_table_relation, info=true) -%} 110 | {%- endif -%} 111 | 112 | {%- else -%} 113 | {%- do log("Skipping " ~ constraint_name ~ " because FK already exists: " ~ fk_table_relation ~ " " ~ fk_column_names, info=false) -%} 114 | {%- endif -%} 115 | {%- else -%} 116 | {%- do log("Skipping " ~ constraint_name ~ " because a PK/UK was not found on the PK table: " ~ pk_table_relation ~ " " ~ pk_column_names, info=true) -%} 117 | {%- endif -%} 118 | 119 | {%- endmacro -%} 120 | 121 | 122 | 123 | {#- This macro is used in create macros to avoid duplicate PK/UK constraints 124 | and to skip FK where no PK/UK constraint exists on the parent table -#} 125 | {%- macro redshift__unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 126 | {%- set lookup_query -%} 127 | SELECT 128 | kc.constraint_name 129 | , lower(kc.column_name) as column_name 130 | FROM information_schema.key_column_usage kc 131 | JOIN information_schema.table_constraints tc 132 | ON kc.table_name = tc.table_name 133 | AND kc.table_schema = tc.table_schema 134 | AND kc.constraint_name = tc.constraint_name 135 | WHERE tc.constraint_type in ('PRIMARY KEY', 'UNIQUE') 136 | AND kc.table_schema ilike '{{table_relation.schema}}' 137 | AND kc.table_name ilike '{{table_relation.identifier}}' 138 | order by kc.constraint_name 139 | {%- endset -%} 140 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 141 | {%- set constraint_list = run_query(lookup_query) -%} 142 | {%- if constraint_list.columns["column_name"].values() | count > 0 -%} 143 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 144 | {%- if dbt_constraints.column_list_matches(constraint.columns["column_name"].values(), column_names ) -%} 145 | {%- do log("Found PK/UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 146 | {{ return(true) }} 147 | {%- endif -%} 148 | {% endfor %} 149 | {%- endif -%}#} 150 | 151 | {#- If we get this far then the table does not have either constraint -#} 152 | {%- do log("No PK/UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 153 | {{ return(false) }} 154 | {%- endmacro -%} 155 | 156 | 157 | 158 | {#- This macro is used in create macros to avoid duplicate FK constraints -#} 159 | {%- macro redshift__foreign_key_exists(table_relation, column_names, lookup_cache) -%} 160 | {%- set lookup_query -%} 161 | SELECT 162 | kc.constraint_name fk_name 163 | , lower(kc.column_name) as fk_column_name 164 | FROM information_schema.key_column_usage kc 165 | JOIN information_schema.table_constraints tc 166 | ON kc.table_name = tc.table_name 167 | AND kc.table_schema = tc.table_schema 168 | AND kc.constraint_name = tc.constraint_name 169 | WHERE tc.constraint_type='FOREIGN KEY' 170 | AND kc.table_schema ilike '{{table_relation.schema}}' 171 | AND kc.table_name ilike '{{table_relation.identifier}}' 172 | order by kc.constraint_name 173 | {%- endset -%} 174 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 175 | {%- set constraint_list = run_query(lookup_query) -%} 176 | {%- if constraint_list.columns["fk_column_name"].values() | count > 0 -%} 177 | {%- for constraint in constraint_list.group_by("fk_name") -%} 178 | {%- if dbt_constraints.column_list_matches(constraint.columns["fk_column_name"].values(), column_names ) -%} 179 | {%- do log("Found FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 180 | {{ return(true) }} 181 | {%- endif -%} 182 | {% endfor %} 183 | {%- endif -%} 184 | 185 | {#- If we get this far then the table does not have this constraint -#} 186 | {%- do log("No FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 187 | {{ return(false) }} 188 | {%- endmacro -%} 189 | 190 | 191 | {%- macro redshift__have_references_priv(table_relation, verify_permissions, lookup_cache) -%} 192 | {%- if verify_permissions is sameas true -%} 193 | 194 | {%- set lookup_query -%} 195 | select case when count(*) > 0 then 'y' else 'n' end as "have_references" 196 | from information_schema.table_privileges t 197 | join information_schema.enabled_roles er on t.grantee = er.role_name 198 | where upper(t.table_schema) = upper('{{table_relation.schema}}') 199 | and upper(t.table_name) = upper('{{table_relation.identifier}}') 200 | {%- endset -%} 201 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 202 | {%- set results = run_query(lookup_query) -%} 203 | {%- if "y" in( results.columns["have_references"].values() ) -%} 204 | {{ return(true) }} 205 | {%- endif -%} 206 | 207 | {{ return(false) }} 208 | {%- else -%} 209 | {{ return(true) }} 210 | {%- endif -%} 211 | {%- endmacro -%} 212 | 213 | 214 | {%- macro redshift__have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 215 | {%- if verify_permissions is sameas true -%} 216 | 217 | {%- set lookup_query -%} 218 | select case when count(*) > 0 then 'y' else 'n' end as "have_ownership" 219 | from pg_catalog.pg_tables t 220 | join information_schema.enabled_roles er on t.tableowner = er.role_name 221 | where upper(t.schemaname) = upper('{{table_relation.schema}}') 222 | and upper(t.tablename) = upper('{{table_relation.identifier}}') 223 | {%- endset -%} 224 | {%- do log("Lookup: " ~ lookup_query, info=false) -%} 225 | {%- set results = run_query(lookup_query) -%} 226 | {%- if "y" in( results.columns["have_ownership"].values() ) -%} 227 | {{ return(true) }} 228 | {%- endif -%} 229 | 230 | {{ return(false) }} 231 | {%- else -%} 232 | {{ return(true) }} 233 | {%- endif -%} 234 | {%- endmacro -%} 235 | 236 | 237 | {% macro redshift__drop_referential_constraints(relation) -%} 238 | {%- set lookup_query -%} 239 | select constraint_name 240 | from information_schema.table_constraints 241 | where table_schema = '{{relation.schema}}' 242 | and table_name='{{relation.identifier}}' 243 | and constraint_type in ('FOREIGN KEY', 'PRIMARY KEY', 'UNIQUE') 244 | {%- endset -%} 245 | {%- set constraint_list = run_query(lookup_query) -%} 246 | 247 | {%- for constraint_name in constraint_list.columns["constraint_name"].values() -%} 248 | {%- do log("Dropping constraint: " ~ constraint_name ~ " from table " ~ relation, info=false) -%} 249 | {%- set query -%} 250 | ALTER TABLE {{relation}} DROP CONSTRAINT "{{constraint_name}}" CASCADE 251 | {%- endset -%} 252 | {%- do run_query(query) -%} 253 | {% endfor %} 254 | 255 | {% endmacro %} 256 | 257 | {#- Redshift will error if you try to truncate tables with FK constraints or tables with PK/UK constraints 258 | referenced by FK so we will drop all constraints before truncating tables -#} 259 | {% macro redshift__truncate_relation(relation) -%} 260 | {{ redshift__drop_referential_constraints(relation) }} 261 | {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }} 262 | {% endmacro %} 263 | 264 | {#- Redshift will get deadlocks if you try to drop tables with FK constraints or tables with PK/UK constraints 265 | referenced by FK so we will drop all constraints before dropping tables -#} 266 | {% macro redshift__drop_relation(relation) -%} 267 | {{ redshift__drop_referential_constraints(relation) }} 268 | {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }} 269 | {% endmacro %} 270 | -------------------------------------------------------------------------------- /macros/snowflake__create_constraints.sql: -------------------------------------------------------------------------------- 1 | {#- Snowflake supports RELY and NORELY constraints for PK, UK, FK but not not_null -#} 2 | {%- macro snowflake__adapter_supports_rely_norely(test_name) -%} 3 | {%- if test_name in ( 4 | 'primary_key', 5 | 'unique_key', 6 | 'unique_combination_of_columns', 7 | 'unique', 8 | 'foreign_key', 9 | 'relationships') -%} 10 | {{ return(true) }} 11 | {%- else -%} 12 | {{ return(false) }} 13 | {%- endif -%} 14 | {%- endmacro -%} 15 | 16 | 17 | {# Snowflake specific implementation to create a primary key #} 18 | {%- macro snowflake__create_primary_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 19 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_PK") | upper -%} 20 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 21 | 22 | {#- Check that the table does not already have this PK/UK -#} 23 | {%- set existing_constraint = dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 24 | {%- if constraint_name == existing_constraint -%} 25 | {%- do dbt_constraints.set_rely_norely(table_relation, constraint_name, lookup_cache.unique_keys[table_relation][constraint_name].rely, rely_clause) -%} 26 | {%- do lookup_cache.unique_keys.update({table_relation: {constraint_name: 27 | { "constraint_name": constraint_name, 28 | "columns": column_names, 29 | "rely": "true" if rely_clause == "RELY" else "false" } } }) -%} 30 | {%- elif none == existing_constraint -%} 31 | 32 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 33 | 34 | {%- set rely_clause = 'NORELY' if rely_clause == '' else rely_clause -%} 35 | {%- set query -%} 36 | ALTER TABLE {{ table_relation }} ADD CONSTRAINT {{ constraint_name }} PRIMARY KEY ( {{ columns_csv }} ) {{ rely_clause }} 37 | {%- endset -%} 38 | {%- do log("Creating primary key: " ~ constraint_name ~ " " ~ rely_clause, info=true) -%} 39 | {%- do run_query(query) -%} 40 | {#- Add this constraint to the lookup cache -#} 41 | {%- do lookup_cache.unique_keys.update({table_relation: {constraint_name: 42 | { "constraint_name": constraint_name, 43 | "columns": column_names, 44 | "rely": "true" if rely_clause == "RELY" else "false" } } }) -%} 45 | {%- else -%} 46 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=true) -%} 47 | {%- endif -%} 48 | 49 | {%- else -%} 50 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 51 | {%- endif -%} 52 | 53 | {%- endmacro -%} 54 | 55 | 56 | 57 | 58 | {# Snowflake specific implementation to create a unique key #} 59 | {%- macro snowflake__create_unique_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 60 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_UK") | upper -%} 61 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 62 | 63 | {#- Check that the table does not already have this PK/UK -#} 64 | {%- set existing_constraint = dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 65 | {%- if constraint_name == existing_constraint -%} 66 | {%- do dbt_constraints.set_rely_norely(table_relation, constraint_name, lookup_cache.unique_keys[table_relation][constraint_name].rely, rely_clause) -%} 67 | {%- do lookup_cache.unique_keys.update({table_relation: {constraint_name: 68 | { "constraint_name": constraint_name, 69 | "columns": column_names, 70 | "rely": "true" if rely_clause == "RELY" else "false" } } }) -%} 71 | {%- elif none == existing_constraint -%} 72 | 73 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 74 | 75 | {%- set rely_clause = 'NORELY' if rely_clause == '' else rely_clause -%} 76 | {%- set query -%} 77 | ALTER TABLE {{ table_relation }} ADD CONSTRAINT {{ constraint_name }} UNIQUE ( {{ columns_csv }} ) {{ rely_clause }} 78 | {%- endset -%} 79 | {%- do log("Creating unique key: " ~ constraint_name ~ " " ~ rely_clause, info=true) -%} 80 | {%- do run_query(query) -%} 81 | {#- Add this constraint to the lookup cache -#} 82 | {%- do lookup_cache.unique_keys.update({table_relation: {constraint_name: 83 | { "constraint_name": constraint_name, 84 | "columns": column_names, 85 | "rely": "true" if rely_clause == "RELY" else "false" } } }) -%} 86 | 87 | {%- else -%} 88 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=true) -%} 89 | {%- endif -%} 90 | 91 | {%- else -%} 92 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 93 | {%- endif -%} 94 | 95 | {%- endmacro -%} 96 | 97 | 98 | 99 | {# Snowflake specific implementation to create a foreign key #} 100 | {%- macro snowflake__create_foreign_key(pk_table_relation, pk_column_names, fk_table_relation, fk_column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 101 | {%- set constraint_name = (constraint_name or fk_table_relation.identifier ~ "_" ~ fk_column_names|join('_') ~ "_FK") | upper -%} 102 | {%- set fk_columns_csv = dbt_constraints.get_quoted_column_csv(fk_column_names, quote_columns) -%} 103 | {%- set pk_columns_csv = dbt_constraints.get_quoted_column_csv(pk_column_names, quote_columns) -%} 104 | 105 | {#- Check that the PK table has a PK or UK -#} 106 | {%- if none != dbt_constraints.unique_constraint_exists(pk_table_relation, pk_column_names, lookup_cache) -%} 107 | {#- Check if the table already has this foreign key -#} 108 | {%- set existing_constraint = dbt_constraints.foreign_key_exists(fk_table_relation, fk_column_names, lookup_cache) -%} 109 | {%- if constraint_name == existing_constraint -%} 110 | {%- do dbt_constraints.set_rely_norely(fk_table_relation, constraint_name, lookup_cache.foreign_keys[fk_table_relation][constraint_name].rely, rely_clause) -%} 111 | {%- do lookup_cache.foreign_keys.update({fk_table_relation: {constraint_name: 112 | {"constraint_name": constraint_name, 113 | "columns": fk_column_names, 114 | "rely": "true" if rely_clause == "RELY" else "false" } } }) -%} 115 | {%- elif none == existing_constraint -%} 116 | 117 | {%- if dbt_constraints.have_ownership_priv(fk_table_relation, verify_permissions, lookup_cache) and dbt_constraints.have_references_priv(pk_table_relation, verify_permissions, lookup_cache) -%} 118 | 119 | {%- set rely_clause = 'NORELY' if rely_clause == '' else rely_clause -%} 120 | {%- set query -%} 121 | ALTER TABLE {{ fk_table_relation }} ADD CONSTRAINT {{ constraint_name }} FOREIGN KEY ( {{ fk_columns_csv }} ) REFERENCES {{ pk_table_relation }} ( {{ pk_columns_csv }} ) {{ rely_clause }} 122 | {%- endset -%} 123 | {%- do log("Creating foreign key: " ~ constraint_name ~ " referencing " ~ pk_table_relation.identifier ~ " " ~ pk_column_names ~ " " ~ rely_clause, info=true) -%} 124 | {%- do run_query(query) -%} 125 | {#- Add this constraint to the lookup cache -#} 126 | {%- do lookup_cache.foreign_keys.update({fk_table_relation: {constraint_name: 127 | {"constraint_name": constraint_name, 128 | "columns": fk_column_names, 129 | "rely": "true" if rely_clause == "RELY" else "false" } } }) -%} 130 | 131 | {%- else -%} 132 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ fk_table_relation ~ " referencing " ~ pk_table_relation, info=true) -%} 133 | {%- endif -%} 134 | 135 | {%- else -%} 136 | {%- do log("Skipping " ~ constraint_name ~ " because FK already exists: " ~ fk_table_relation ~ " " ~ fk_column_names, info=false) -%} 137 | {%- endif -%} 138 | {%- else -%} 139 | {%- do log("Skipping " ~ constraint_name ~ " because a PK/UK was not found on the PK table: " ~ pk_table_relation ~ " " ~ pk_column_names, info=true) -%} 140 | {%- endif -%} 141 | 142 | {%- endmacro -%} 143 | 144 | 145 | 146 | {# Snowflake specific implementation to create a not null constraint #} 147 | {%- macro snowflake__create_not_null(table_relation, column_names, verify_permissions, quote_columns, lookup_cache, rely_clause) -%} 148 | {%- if not rely_clause == 'RELY' -%} 149 | {%- do log("Skipping not null constraint for " ~ column_names | join(", ") ~ " in " ~ table_relation ~ " because Snowflake does not support NORELY for not null constraints.", info=true) -%} 150 | {{ return(false) }} 151 | {%- endif -%} 152 | 153 | {%- set existing_not_null_col = lookup_cache.not_null_col[table_relation] -%} 154 | 155 | {%- set columns_to_change = [] -%} 156 | {%- for column_name in column_names if column_name not in existing_not_null_col -%} 157 | {%- do columns_to_change.append(column_name) -%} 158 | {%- do existing_not_null_col.append(column_name) -%} 159 | {%- endfor -%} 160 | {%- if columns_to_change|count > 0 -%} 161 | {%- set columns_list = dbt_constraints.get_quoted_column_list(columns_to_change, quote_columns) -%} 162 | 163 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 164 | 165 | {%- set modify_statements= [] -%} 166 | {%- for column in columns_list -%} 167 | {%- set modify_statements = modify_statements.append( "COLUMN " ~ column ~ " SET NOT NULL" ) -%} 168 | {%- endfor -%} 169 | {%- set modify_statement_csv = modify_statements | join(", ") -%} 170 | {%- set query -%} 171 | ALTER TABLE {{ table_relation }} MODIFY {{ modify_statement_csv }}; 172 | {%- endset -%} 173 | {%- do log("Creating not null constraint for: " ~ columns_to_change | join(", ") ~ " in " ~ table_relation ~ " " ~ rely_clause, info=true) -%} 174 | {%- do run_query(query) -%} 175 | {#- Add this constraint to the lookup cache -#} 176 | {%- set constraint_key = table_relation.identifier ~ "_" ~ columns_to_change|join('_') ~ "_NN" -%} 177 | {%- do lookup_cache.not_null_col.update({table_relation: existing_not_null_col }) -%} 178 | 179 | {%- else -%} 180 | {%- do log("Skipping not null constraint for " ~ columns_to_change | join(", ") ~ " in " ~ table_relation ~ " because of insufficient privileges: " ~ table_relation, info=true) -%} 181 | {%- endif -%} 182 | {%- else -%} 183 | {%- do log("Skipping not null constraint for " ~ column_names | join(", ") ~ " in " ~ table_relation ~ " because all columns are already not null", info=false) -%} 184 | {%- endif -%} 185 | 186 | {%- endmacro -%} 187 | 188 | 189 | {#- This macro alters constraints to use RELY or NORELY based on failed and passed tests -#} 190 | {%- macro set_rely_norely(table_relation, constraint_name, constraint_rely, rely_clause) -%} 191 | {%- if ( rely_clause == 'NORELY' and constraint_rely == 'true' ) 192 | or ( rely_clause == 'RELY' and constraint_rely == 'false' ) -%} 193 | {%- set query -%} 194 | ALTER TABLE {{ table_relation }} MODIFY CONSTRAINT {{ constraint_name }} {{ rely_clause }} 195 | {%- endset -%} 196 | {%- do log("Updating constraint: " ~ constraint_name ~ " " ~ rely_clause, info=true) -%} 197 | {%- do run_query(query) -%} 198 | {%- endif -%} 199 | {%- endmacro -%} 200 | 201 | 202 | {#- This macro is used in create macros to avoid duplicate PK/UK constraints 203 | and to skip FK where no PK/UK constraint exists on the parent table -#} 204 | {%- macro snowflake__unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 205 | {#- Check if we can find this constraint in the lookup cache -#} 206 | {%- if table_relation in lookup_cache.unique_keys -%} 207 | {%- set cached_unique_keys = lookup_cache.unique_keys[table_relation] -%} 208 | {%- for cached_val in cached_unique_keys.values() -%} 209 | {%- if dbt_constraints.column_list_matches(cached_val.columns, column_names ) -%} 210 | {%- do log("Found UK key: " ~ table_relation ~ " " ~ cached_val.columns ~ " " ~ cached_val.rely, info=false) -%} 211 | {{ return(cached_val.constraint_name) }} 212 | {%- endif -%} 213 | {% endfor %} 214 | {%- endif -%} 215 | 216 | {%- set lookup_query -%} 217 | SHOW UNIQUE KEYS IN TABLE {{ table_relation }} 218 | {%- endset -%} 219 | {%- set constraint_list = run_query(lookup_query) -%} 220 | {%- if constraint_list.columns["column_name"].values() | count > 0 -%} 221 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 222 | {%- set existing_constraint_name = (constraint.columns["constraint_name"].values() | first) -%} 223 | {%- set existing_columns = constraint.columns["column_name"].values() -%} 224 | {%- set existing_rely = (constraint.columns["rely"].values() | first) -%} 225 | {#- Add this constraint to the lookup cache -#} 226 | {%- do lookup_cache.unique_keys.update({table_relation: {existing_constraint_name: 227 | { "constraint_name": existing_constraint_name, 228 | "columns": existing_columns, 229 | "rely": existing_rely } } }) -%} 230 | {%- if dbt_constraints.column_list_matches(existing_columns, column_names ) -%} 231 | {%- do log("Found UK key: " ~ existing_constraint_name ~ " " ~ table_relation ~ " " ~ column_names ~ " " ~ existing_rely, info=false) -%} 232 | {{ return(existing_constraint_name) }} 233 | {%- endif -%} 234 | {% endfor %} 235 | {%- endif -%} 236 | 237 | {%- set lookup_query -%} 238 | SHOW PRIMARY KEYS IN TABLE {{ table_relation }} 239 | {%- endset -%} 240 | {%- set constraint_list = run_query(lookup_query) -%} 241 | {%- if constraint_list.columns["column_name"].values() | count > 0 -%} 242 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 243 | {%- set existing_constraint_name = (constraint.columns["constraint_name"].values() | first) -%} 244 | {%- set existing_columns = constraint.columns["column_name"].values() -%} 245 | {%- set existing_rely = (constraint.columns["rely"].values() | first) -%} 246 | {#- Add this constraint to the lookup cache -#} 247 | {%- do lookup_cache.unique_keys.update({table_relation: {existing_constraint_name: 248 | { "constraint_name": existing_constraint_name, 249 | "columns": existing_columns, 250 | "rely": existing_rely } } }) -%} 251 | {%- if dbt_constraints.column_list_matches(existing_columns, column_names ) -%} 252 | {%- do log("Found PK key: " ~ existing_constraint_name ~ " " ~ table_relation ~ " " ~ column_names ~ " " ~ existing_rely, info=false) -%} 253 | {{ return(existing_constraint_name) }} 254 | {%- endif -%} 255 | {% endfor %} 256 | {%- endif -%} 257 | 258 | {#- If we get this far then the table does not have either constraint -#} 259 | {%- do log("No PK/UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 260 | {{ return(none) }} 261 | {%- endmacro -%} 262 | 263 | 264 | 265 | {#- This macro is used in create macros to avoid duplicate FK constraints -#} 266 | {%- macro snowflake__foreign_key_exists(table_relation, column_names, lookup_cache) -%} 267 | 268 | {#- Check if we can find this constraint in the lookup cache -#} 269 | {%- if table_relation in lookup_cache.foreign_keys -%} 270 | {%- set cached_foreign_keys = lookup_cache.foreign_keys[table_relation] -%} 271 | {%- for cached_val in cached_foreign_keys.values() -%} 272 | {%- if dbt_constraints.column_list_matches(cached_val.columns, column_names ) -%} 273 | {%- do log("Found FK key: " ~ table_relation ~ " " ~ cached_val.constraint_name ~ " " ~ column_names ~ " " ~ cached_val.rely, info=false) -%} 274 | {{ return(cached_val.constraint_name) }} 275 | {%- endif -%} 276 | {% endfor %} 277 | {%- endif -%} 278 | 279 | {%- set lookup_query -%} 280 | SHOW IMPORTED KEYS IN TABLE {{ table_relation }} 281 | {%- endset -%} 282 | {%- set constraint_list = run_query(lookup_query) -%} 283 | {%- if constraint_list.columns["fk_column_name"].values() | count > 0 -%} 284 | {%- for constraint in constraint_list.group_by("fk_name") -%} 285 | {%- set existing_constraint_name = (constraint.columns["fk_name"].values() | first) -%} 286 | {%- set existing_columns = constraint.columns["fk_column_name"].values() -%} 287 | {%- set existing_rely = (constraint.columns["rely"].values() | first) -%} 288 | {#- Add this constraint to the lookup cache -#} 289 | {%- do lookup_cache.foreign_keys.update({table_relation: {existing_constraint_name: 290 | { "constraint_name": existing_constraint_name, 291 | "columns": existing_columns, 292 | "rely": existing_rely } } }) -%} 293 | {%- if dbt_constraints.column_list_matches(existing_columns, column_names ) -%} 294 | {%- do log("Found FK key: " ~ table_relation ~ " " ~ existing_constraint_name ~ " " ~ column_names ~ " " ~ existing_rely, info=false) -%} 295 | {{ return(existing_constraint_name) }} 296 | {%- endif -%} 297 | {% endfor %} 298 | {%- endif -%} 299 | 300 | {#- If we get this far then the table does not have this constraint -#} 301 | {%- do log("No FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 302 | {{ return(none) }} 303 | {%- endmacro -%} 304 | 305 | 306 | 307 | {%- macro snowflake__have_references_priv(table_relation, verify_permissions, lookup_cache) -%} 308 | {%- if verify_permissions is sameas true -%} 309 | 310 | {%- set table_privileges = snowflake__lookup_table_privileges(table_relation, lookup_cache) -%} 311 | {%- if "REFERENCES" in table_privileges or "OWNERSHIP" in table_privileges -%} 312 | {{ return(true) }} 313 | {%- else -%} 314 | {{ return(false) }} 315 | {%- endif -%} 316 | 317 | {%- else -%} 318 | {{ return(true) }} 319 | {%- endif -%} 320 | {%- endmacro -%} 321 | 322 | 323 | 324 | {%- macro snowflake__have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 325 | {%- if verify_permissions is sameas true -%} 326 | 327 | {%- set table_privileges = snowflake__lookup_table_privileges(table_relation, lookup_cache) -%} 328 | {%- if "OWNERSHIP" in table_privileges -%} 329 | {{ return(true) }} 330 | {%- else -%} 331 | {{ return(false) }} 332 | {%- endif -%} 333 | 334 | {%- else -%} 335 | {{ return(true) }} 336 | {%- endif -%} 337 | {%- endmacro -%} 338 | 339 | 340 | 341 | {%- macro snowflake__lookup_table_privileges(table_relation, lookup_cache) -%} 342 | 343 | {%- if table_relation.database not in lookup_cache.table_privileges -%} 344 | {%- do log("Caching privileges for database: " ~ table_relation.database, info=false) -%} 345 | 346 | {%- set lookup_query -%} 347 | select distinct 348 | upper(tp.table_schema) as "table_schema", 349 | upper(tp.table_name) as "table_name", 350 | tp.privilege_type as "privilege_type" 351 | from {{ table_relation.database }}.information_schema.table_privileges tp 352 | where (is_role_in_session(tp.grantee) or is_database_role_in_session(tp.grantee)) 353 | and tp.privilege_type in ('OWNERSHIP', 'REFERENCES') 354 | {%- endset -%} 355 | {%- set privilege_list = run_query(lookup_query) -%} 356 | {%- do lookup_cache.table_privileges.update({ table_relation.database: privilege_list }) -%} 357 | {%- endif -%} 358 | 359 | {%- set tab_priv_list = [] -%} 360 | {%- set schema_name = table_relation.schema|upper -%} 361 | {%- set table_name = table_relation.identifier|upper -%} 362 | {%- for row in lookup_cache.table_privileges[table_relation.database].rows -%} 363 | {%- if row["table_schema"] == schema_name and row["table_name"] == table_name -%} 364 | {%- do tab_priv_list.append(row["privilege_type"]) -%} 365 | {%- endif -%} 366 | {%- endfor -%} 367 | {{ return(tab_priv_list) }} 368 | 369 | {%- endmacro -%} 370 | 371 | 372 | 373 | {%- macro snowflake__lookup_table_columns(table_relation, lookup_cache) -%} 374 | 375 | {%- if table_relation not in lookup_cache.table_columns -%} 376 | {%- set lookup_query -%} 377 | SHOW COLUMNS IN TABLE {{ table_relation }} 378 | {%- endset -%} 379 | {%- set results = run_query(lookup_query) -%} 380 | 381 | {%- set not_null_col = [] -%} 382 | {%- set upper_column_list = [] -%} 383 | {%- for row in results.rows -%} 384 | {%- do upper_column_list.append(row["column_name"]|upper) -%} 385 | {%- if row['null?'] == 'false' -%} 386 | {%- do not_null_col.append(row["column_name"]|upper) -%} 387 | {%- endif -%} 388 | {%- endfor -%} 389 | {%- do lookup_cache.table_columns.update({ table_relation: upper_column_list }) -%} 390 | {%- do lookup_cache.not_null_col.update({ table_relation: not_null_col }) -%} 391 | {%- endif -%} 392 | {{ return(lookup_cache.table_columns[table_relation]) }} 393 | 394 | {%- endmacro -%} 395 | -------------------------------------------------------------------------------- /macros/vertica__create_constraints.sql: -------------------------------------------------------------------------------- 1 | {# Vertica specific implementation to create a primary key #} 2 | {%- macro vertica__create_primary_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 3 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_PK") | upper -%} 4 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 5 | 6 | {#- Check that the table does not already have this PK/UK -#} 7 | {%- if not dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 8 | 9 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 10 | 11 | {%- set query -%} 12 | ALTER TABLE {{table_relation}} ADD CONSTRAINT {{constraint_name}} PRIMARY KEY ( {{columns_csv}} ) 13 | {%- endset -%} 14 | {%- do log("Creating primary key: " ~ constraint_name, info=true) -%} 15 | {%- do run_query(query) -%} 16 | {#- Add this constraint to the lookup cache -#} 17 | {%- do lookup_cache.unique_keys.update({table_relation: {constraint_name: column_names} }) -%} 18 | 19 | {%- else -%} 20 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=true) -%} 21 | {%- endif -%} 22 | 23 | {%- else -%} 24 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 25 | {%- endif -%} 26 | {%- endmacro -%} 27 | 28 | 29 | 30 | {# Vertica specific implementation to create a unique key #} 31 | {%- macro vertica__create_unique_key(table_relation, column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 32 | {%- set constraint_name = (constraint_name or table_relation.identifier ~ "_" ~ column_names|join('_') ~ "_UK") | upper -%} 33 | {%- set columns_csv = dbt_constraints.get_quoted_column_csv(column_names, quote_columns) -%} 34 | 35 | {#- Check that the table does not already have this PK/UK -#} 36 | {%- if not dbt_constraints.unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 37 | 38 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 39 | 40 | {%- set query -%} 41 | ALTER TABLE {{table_relation}} ADD CONSTRAINT {{constraint_name}} UNIQUE ( {{columns_csv}} ) 42 | {%- endset -%} 43 | {%- do log("Creating unique key: " ~ constraint_name, info=true) -%} 44 | {%- do run_query(query) -%} 45 | {#- Add this constraint to the lookup cache -#} 46 | {%- do lookup_cache.unique_keys.update({table_relation: {constraint_name: column_names} }) -%} 47 | 48 | {%- else -%} 49 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ table_relation, info=true) -%} 50 | {%- endif -%} 51 | 52 | {%- else -%} 53 | {%- do log("Skipping " ~ constraint_name ~ " because PK/UK already exists: " ~ table_relation ~ " " ~ column_names, info=false) -%} 54 | {%- endif -%} 55 | {%- endmacro -%} 56 | 57 | 58 | 59 | {# Vertica specific implementation to create a foreign key #} 60 | {%- macro vertica__create_foreign_key(pk_table_relation, pk_column_names, fk_table_relation, fk_column_names, verify_permissions, quote_columns, constraint_name, lookup_cache, rely_clause) -%} 61 | {%- set constraint_name = (constraint_name or fk_table_relation.identifier ~ "_" ~ fk_column_names|join('_') ~ "_FK") | upper -%} 62 | {%- set fk_columns_csv = dbt_constraints.get_quoted_column_csv(fk_column_names, quote_columns) -%} 63 | {%- set pk_columns_csv = dbt_constraints.get_quoted_column_csv(pk_column_names, quote_columns) -%} 64 | {#- Check that the PK table has a PK or UK -#} 65 | {%- if dbt_constraints.unique_constraint_exists(pk_table_relation, pk_column_names, lookup_cache) -%} 66 | {#- Check if the table already has this foreign key -#} 67 | {%- if not dbt_constraints.foreign_key_exists(fk_table_relation, fk_column_names, lookup_cache) -%} 68 | 69 | {%- if dbt_constraints.have_ownership_priv(fk_table_relation, verify_permissions, lookup_cache) and dbt_constraints.have_references_priv(pk_table_relation, verify_permissions, lookup_cache) -%} 70 | 71 | {%- set query -%} 72 | ALTER TABLE {{fk_table_relation}} ADD CONSTRAINT {{constraint_name}} FOREIGN KEY ( {{fk_columns_csv}} ) REFERENCES {{pk_table_relation}} ( {{pk_columns_csv}} ) 73 | {%- endset -%} 74 | {%- do log("Creating foreign key: " ~ constraint_name ~ " referencing " ~ pk_table_relation.identifier ~ " " ~ pk_column_names, info=true) -%} 75 | {%- do run_query(query) -%} 76 | {#- Add this constraint to the lookup cache -#} 77 | {%- do lookup_cache.foreign_keys.update({fk_table_relation: {constraint_name: fk_column_names} }) -%} 78 | 79 | {%- else -%} 80 | {%- do log("Skipping " ~ constraint_name ~ " because of insufficient privileges: " ~ fk_table_relation ~ " referencing " ~ pk_table_relation, info=true) -%} 81 | {%- endif -%} 82 | 83 | {%- else -%} 84 | {%- do log("Skipping " ~ constraint_name ~ " because FK already exists: " ~ fk_table_relation ~ " " ~ fk_column_names, info=false) -%} 85 | {%- endif -%} 86 | {%- else -%} 87 | {%- do log("Skipping " ~ constraint_name ~ " because a PK/UK was not found on the PK table: " ~ pk_table_relation ~ " " ~ pk_column_names, info=true) -%} 88 | {%- endif -%} 89 | {%- endmacro -%} 90 | 91 | 92 | 93 | {# Vertica specific implementation to create a not null constraint #} 94 | {%- macro vertica__create_not_null(table_relation, column_names, verify_permissions, quote_columns, lookup_cache, rely_clause) -%} 95 | 96 | {%- set existing_not_null_col = lookup_cache.not_null_col[table_relation] -%} 97 | 98 | {%- set columns_to_change = [] -%} 99 | {%- for column_name in column_names if column_name|upper not in existing_not_null_col -%} 100 | {%- do columns_to_change.append(column_name) -%} 101 | {%- do existing_not_null_col.append(column_name) -%} 102 | {%- endfor -%} 103 | {%- if columns_to_change|count > 0 -%} 104 | {%- set columns_list = dbt_constraints.get_quoted_column_list(columns_to_change, quote_columns) -%} 105 | 106 | {%- if dbt_constraints.have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 107 | {%- set modify_statements= [] -%} 108 | {%- for column in columns_list -%} 109 | {%- set modify_statements = modify_statements.append( "COLUMN " ~ column ~ " SET NOT NULL" ) -%} 110 | {%- endfor -%} 111 | {%- set modify_statement_csv = modify_statements | join(", ") -%} 112 | {%- set query -%} 113 | ALTER TABLE {{table_relation}} ALTER {{ modify_statement_csv }}; 114 | {%- endset -%} 115 | {%- do log("Creating not null constraint for: " ~ columns_to_change | join(", ") ~ " in " ~ table_relation, info=true) -%} 116 | {%- do run_query(query) -%} 117 | {#- Add this constraint to the lookup cache -#} 118 | {%- set constraint_key = table_relation.identifier ~ "_" ~ columns_to_change|join('_') ~ "_NN" -%} 119 | {%- do lookup_cache.not_null_col.update({table_relation: existing_not_null_col }) -%} 120 | {%- else -%} 121 | {%- do log("Skipping not null constraint for " ~ columns_to_change | join(", ") ~ " in " ~ table_relation ~ " because of insufficient privileges: " ~ table_relation, info=true) -%} 122 | {%- endif -%} 123 | {%- else -%} 124 | {%- do log("Skipping not null constraint for " ~ column_names | join(", ") ~ " in " ~ table_relation ~ " because all columns are already not null", info=false) -%} 125 | {%- endif -%} 126 | {%- endmacro -%} 127 | 128 | 129 | 130 | {#- This macro is used in create macros to avoid duplicate PK/UK constraints 131 | and to skip FK where no PK/UK constraint exists on the parent table -#} 132 | {%- macro vertica__unique_constraint_exists(table_relation, column_names, lookup_cache) -%} 133 | 134 | {#- Check if we can find this constraint in the lookup cache -#} 135 | {%- if table_relation in lookup_cache.unique_keys -%} 136 | {%- set cached_unique_keys = lookup_cache.unique_keys[table_relation] -%} 137 | {%- for cached_columns in cached_unique_keys.values() -%} 138 | {%- if dbt_constraints.column_list_matches(cached_columns, column_names ) -%} 139 | {%- do log("Found UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 140 | {{ return(true) }} 141 | {%- endif -%} 142 | {% endfor %} 143 | {%- endif -%} 144 | 145 | {%- set lookup_query -%} 146 | select constraint_name, column_name 147 | from constraint_columns 148 | where 149 | table_schema ilike '{{table_relation.schema}}' 150 | and table_name ilike '{{table_relation.identifier}}' 151 | and constraint_type in ('u') 152 | order by constraint_name 153 | {%- endset -%} 154 | {%- set constraint_list = run_query(lookup_query) -%} 155 | {%- if constraint_list.columns["column_name"].values() | count > 0 -%} 156 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 157 | {#- Add this constraint to the lookup cache -#} 158 | {%- do lookup_cache.unique_keys.update({table_relation: {constraint.key_name: constraint.columns["column_name"].values()} }) -%} 159 | {% endfor %} 160 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 161 | {%- if dbt_constraints.column_list_matches(constraint.columns["column_name"].values(), column_names ) -%} 162 | {%- do log("Found UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 163 | {{ return(true) }} 164 | {%- endif -%} 165 | {% endfor %} 166 | {%- endif -%} 167 | 168 | {%- set lookup_query -%} 169 | select constraint_name, column_name 170 | from constraint_columns 171 | where 172 | table_schema ilike '{{table_relation.schema}}' 173 | and table_name ilike '{{table_relation.identifier}}' 174 | and constraint_type in ('p') 175 | order by constraint_name 176 | {%- endset -%} 177 | {%- set constraint_list = run_query(lookup_query) -%} 178 | {%- if constraint_list.columns["column_name"].values() | count > 0 -%} 179 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 180 | {#- Add this constraint to the lookup cache -#} 181 | {%- do lookup_cache.unique_keys.update({table_relation: {constraint.key_name: constraint.columns["column_name"].values()} }) -%} 182 | {% endfor %} 183 | {%- for constraint in constraint_list.group_by("constraint_name") -%} 184 | {%- if dbt_constraints.column_list_matches(constraint.columns["column_name"].values(), column_names ) -%} 185 | {%- do log("Found PK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 186 | {{ return(true) }} 187 | {%- endif -%} 188 | {% endfor %} 189 | {%- endif -%} 190 | 191 | {#- If we get this far then the table does not have either constraint -#} 192 | {%- do log("No PK/UK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 193 | {{ return(false) }} 194 | {%- endmacro -%} 195 | 196 | 197 | 198 | {#- This macro is used in create macros to avoid duplicate FK constraints -#} 199 | {%- macro vertica__foreign_key_exists(table_relation, column_names, lookup_cache) -%} 200 | 201 | {#- Check if we can find this constraint in the lookup cache -#} 202 | {%- if table_relation in lookup_cache.foreign_keys -%} 203 | {%- set cached_foreign_keys = lookup_cache.foreign_keys[table_relation] -%} 204 | {%- for cached_columns in cached_foreign_keys.values() -%} 205 | {%- if dbt_constraints.column_list_matches(cached_columns, column_names ) -%} 206 | {%- do log("Found FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 207 | {{ return(true) }} 208 | {%- endif -%} 209 | {% endfor %} 210 | {%- endif -%} 211 | 212 | {%- set lookup_query -%} 213 | select constraint_name as fk_name, column_name as fk_column_name 214 | from constraint_columns 215 | where 216 | table_schema ilike '{{table_relation.schema}}' 217 | and table_name ilike '{{table_relation.identifier}}' 218 | and constraint_type in ('f') 219 | order by constraint_name 220 | {%- endset -%} 221 | {%- set constraint_list = run_query(lookup_query) -%} 222 | {%- if constraint_list.columns["fk_column_name"].values() | count > 0 -%} 223 | {%- for constraint in constraint_list.group_by("fk_name") -%} 224 | {#- Add this constraint to the lookup cache -#} 225 | {%- do lookup_cache.foreign_keys.update({table_relation: {constraint.key_name: constraint.columns["fk_column_name"].values()} }) -%} 226 | {% endfor %} 227 | {%- for constraint in constraint_list.group_by("fk_name") -%} 228 | {%- if dbt_constraints.column_list_matches(constraint.columns["fk_column_name"].values(), column_names ) -%} 229 | {%- do log("Found FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 230 | {{ return(true) }} 231 | {%- endif -%} 232 | {% endfor %} 233 | {%- endif -%} 234 | 235 | {#- If we get this far then the table does not have this constraint -#} 236 | {%- do log("No FK key: " ~ table_relation ~ " " ~ column_names, info=false) -%} 237 | {{ return(false) }} 238 | {%- endmacro -%} 239 | 240 | 241 | 242 | {%- macro vertica__have_references_priv(table_relation, verify_permissions, lookup_cache) -%} 243 | {%- if verify_permissions is sameas true -%} 244 | 245 | {%- set table_privileges = vertica__lookup_table_privileges(table_relation, lookup_cache) -%} 246 | {%- if "REFERENCES" in table_privileges or "OWNERSHIP" in table_privileges -%} 247 | {{ return(true) }} 248 | {%- else -%} 249 | {{ return(false) }} 250 | {%- endif -%} 251 | 252 | {%- else -%} 253 | {{ return(true) }} 254 | {%- endif -%} 255 | {%- endmacro -%} 256 | 257 | 258 | 259 | {%- macro vertica__have_ownership_priv(table_relation, verify_permissions, lookup_cache) -%} 260 | {%- if verify_permissions is sameas true -%} 261 | 262 | {%- set table_privileges = vertica__lookup_table_privileges(table_relation, lookup_cache) -%} 263 | {%- if "OWNERSHIP" in table_privileges -%} 264 | {{ return(true) }} 265 | {%- else -%} 266 | {{ return(false) }} 267 | {%- endif -%} 268 | 269 | {%- else -%} 270 | {{ return(true) }} 271 | {%- endif -%} 272 | {%- endmacro -%} 273 | 274 | 275 | 276 | {%- macro vertica__lookup_table_privileges(table_relation, lookup_cache) -%} 277 | 278 | {%- if table_relation.database not in lookup_cache.table_privileges -%} 279 | {%- set lookup_query -%} 280 | select distinct 281 | upper(tp.table_schema) as "table_schema", 282 | upper(tp.table_name) as "table_name", 283 | tp.privilege_type as "privilege_type" 284 | from {{table_relation.database}}.information_schema.table_privileges tp 285 | where is_role_in_session(tp.grantee) 286 | and tp.privilege_type in ('OWNERSHIP', 'REFERENCES') 287 | {%- endset -%} 288 | {%- do log("Caching privileges for database: " ~ table_relation.database, info=false) -%} 289 | {%- set privilege_list = run_query(lookup_query) -%} 290 | {%- do lookup_cache.table_privileges.update({ table_relation.database: privilege_list }) -%} 291 | {%- endif -%} 292 | 293 | {%- set tab_priv_list = [] -%} 294 | {%- set schema_name = table_relation.schema|upper -%} 295 | {%- set table_name = table_relation.identifier|upper -%} 296 | {%- for row in lookup_cache.table_privileges[table_relation.database].rows -%} 297 | {%- if row["table_schema"] == schema_name and row["table_name"] == table_name -%} 298 | {%- do tab_priv_list.append(row["privilege_type"]) -%} 299 | {%- endif -%} 300 | {%- endfor -%} 301 | {{ return(tab_priv_list) }} 302 | 303 | {%- endmacro -%} 304 | 305 | 306 | 307 | {%- macro vertica__lookup_table_columns(table_relation, lookup_cache) -%} 308 | 309 | {%- if table_relation not in lookup_cache.table_columns -%} 310 | {%- set lookup_query -%} 311 | select column_name, is_nullable 312 | from columns 313 | where table_schema ilike '{{table_relation.schema}}' 314 | and table_name ilike '{{table_relation.identifier}}' 315 | {%- endset -%} 316 | {%- set results = run_query(lookup_query) -%} 317 | {%- set not_null_col = [] -%} 318 | {%- set upper_column_list = [] -%} 319 | {%- for row in results.rows -%} 320 | {%- do upper_column_list.append(row["column_name"]|upper) -%} 321 | {%- if row['is_nullable'] == False -%} 322 | {%- do not_null_col.append(row["column_name"]|upper) -%} 323 | {%- endif -%} 324 | {%- endfor -%} 325 | {%- do lookup_cache.table_columns.update({ table_relation: upper_column_list }) -%} 326 | 327 | {%- do lookup_cache.not_null_col.update({ table_relation: not_null_col }) -%} 328 | {%- endif -%} 329 | {{ return(lookup_cache.table_columns[table_relation]) }} 330 | 331 | {%- endmacro -%} 332 | 333 | 334 | 335 | {%- macro vertica__get_create_index_sql(table_relation, lookup_cache) -%} 336 | 337 | {%- do log("Skipping creation of indexes, they are not supported by Vertica", info=true) -%} 338 | 339 | {%- endmacro -%} 340 | --------------------------------------------------------------------------------