├── .changes ├── 0.0.0.md ├── 1.9.0.md ├── 1.9.0 │ ├── Breaking Changes-20241024-172250.yaml │ ├── Dependencies-20241218-155103.yaml │ ├── Features-20241202-132727.yaml │ ├── Features-20241218-151645.yaml │ ├── Features-20241218-153349.yaml │ ├── Under the Hood-20241024-172431.yaml │ └── Under the Hood-20241105-083613.yaml ├── 1.9.1.md ├── 1.9.1 │ └── Fixes-20250325-161315.yaml ├── 1.9.2.md ├── 1.9.2 │ ├── Dependencies-20250528-191726.yaml │ ├── Dependencies-20250530-143839.yaml │ ├── Dependencies-20250530-145103.yaml │ ├── Features-20250310-155741.yaml │ ├── Features-20250602-212247.yaml │ ├── Fixes-20250220-121852.yaml │ └── Fixes-20250312-000346.yaml ├── header.tpl.md └── unreleased │ └── .gitkeep ├── .changie.yaml ├── .flake8 ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── config.yml │ └── feature_request.yml ├── dependabot.yml ├── pull_request_template.md └── workflows │ ├── bot-changelog.yml │ ├── changelog-existence.yml │ ├── ci.yml │ ├── release.yml │ ├── security.yml │ └── version-bump.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE.txt ├── Makefile ├── README.md ├── assets └── images │ ├── Starburst_Logo_Black+Blue.svg │ ├── Starburst_Logo_White+Blue.svg │ ├── dbt-signature_tm.svg │ ├── dbt-signature_tm_light.svg │ ├── trino-logo-dk-bg.svg │ └── trino-logo-w-bk.svg ├── dbt ├── adapters │ └── trino │ │ ├── __init__.py │ │ ├── __version__.py │ │ ├── column.py │ │ ├── connections.py │ │ ├── impl.py │ │ └── relation.py └── include │ └── trino │ ├── __init__.py │ ├── dbt_project.yml │ ├── macros │ ├── adapters.sql │ ├── apply_grants.sql │ ├── catalog.sql │ ├── materializations │ │ ├── incremental.sql │ │ ├── materialized_view.sql │ │ ├── seeds │ │ │ └── helpers.sql │ │ ├── snapshot.sql │ │ ├── table.sql │ │ └── view.sql │ └── utils │ │ ├── any_value.sql │ │ ├── array_append.sql │ │ ├── array_concat.sql │ │ ├── array_construct.sql │ │ ├── bool_or.sql │ │ ├── datatypes.sql │ │ ├── date_spine.sql │ │ ├── date_trunc.sql │ │ ├── dateadd.sql │ │ ├── datediff.sql │ │ ├── hash.sql │ │ ├── listagg.sql │ │ ├── right.sql │ │ ├── safe_cast.sql │ │ ├── split_part.sql │ │ └── timestamps.sql │ └── sample_profiles.yml ├── dev_requirements.txt ├── docker-compose-starburst.yml ├── docker-compose-trino.yml ├── docker ├── init_starburst.bash ├── init_trino.bash ├── remove_starburst.bash ├── remove_trino.bash ├── starburst │ ├── catalog │ │ ├── delta.properties │ │ ├── hive.properties │ │ ├── iceberg.properties │ │ ├── memory.properties │ │ ├── postgresql.properties │ │ └── tpch.properties │ └── etc │ │ ├── config.properties │ │ ├── jvm.config │ │ └── node.properties └── trino │ ├── catalog │ ├── delta.properties │ ├── hive.properties │ ├── iceberg.properties │ ├── memory.properties │ ├── postgresql.properties │ └── tpch.properties │ └── etc │ ├── config.properties │ ├── jvm.config │ └── node.properties ├── mypy.ini ├── pytest.ini ├── setup.py ├── tests ├── conftest.py ├── functional │ └── adapter │ │ ├── behavior_flags │ │ └── test_require_certificate_validation.py │ │ ├── column_types │ │ ├── fixtures.py │ │ └── test_column_types.py │ │ ├── constraints │ │ ├── fixtures.py │ │ └── test_constraints.py │ │ ├── dbt_clone │ │ └── test_dbt_clone.py │ │ ├── dbt_debug │ │ └── test_dbt_debug.py │ │ ├── dbt_show │ │ └── test_dbt_show.py │ │ ├── empty │ │ └── test_empty.py │ │ ├── fixture_datediff.py │ │ ├── hooks │ │ ├── data │ │ │ ├── seed_model.sql │ │ │ └── seed_run.sql │ │ ├── test_hooks_delete.py │ │ ├── test_model_hooks.py │ │ └── test_run_hooks.py │ │ ├── materialization │ │ ├── fixtures.py │ │ ├── test_incremental_delete_insert.py │ │ ├── test_incremental_merge.py │ │ ├── test_incremental_microbatch.py │ │ ├── test_incremental_predicates.py │ │ ├── test_incremental_schema.py │ │ ├── test_incremental_views_enabled.py │ │ ├── test_materialized_view.py │ │ ├── test_on_table_exists.py │ │ ├── test_prepared_statements.py │ │ ├── test_snapshot.py │ │ └── test_view_security.py │ │ ├── materialized_view_tests │ │ ├── test_materialized_view_dbt_core.py │ │ └── utils.py │ │ ├── persist_docs │ │ ├── fixtures.py │ │ └── test_persist_docs.py │ │ ├── show │ │ ├── fixtures.py │ │ └── test_show.py │ │ ├── simple_seed │ │ ├── seed_bom.csv │ │ ├── seeds.py │ │ └── test_seed.py │ │ ├── store_failures │ │ ├── fixtures.py │ │ └── test_store_failures.py │ │ ├── test_basic.py │ │ ├── test_caching.py │ │ ├── test_changing_relation_type.py │ │ ├── test_concurrency.py │ │ ├── test_custom_schema.py │ │ ├── test_ephemeral.py │ │ ├── test_get_incremental_tmp_relation_type_macro.py │ │ ├── test_grants.py │ │ ├── test_query_comments.py │ │ ├── test_quote_policy.py │ │ ├── test_seeds_column_types_overrides.py │ │ ├── test_session_property.py │ │ ├── test_simple_copy.py │ │ ├── test_simple_snapshot.py │ │ ├── test_sql_status_output.py │ │ ├── test_table_properties.py │ │ ├── unit_testing │ │ └── test_unit_testing.py │ │ └── utils │ │ ├── fixture_date_spine.py │ │ ├── fixture_get_intervals_between.py │ │ ├── test_data_types.py │ │ ├── test_date_spine.py │ │ ├── test_get_intervals_between.py │ │ ├── test_timestamps.py │ │ └── test_utils.py └── unit │ ├── __init__.py │ ├── test_adapter.py │ └── utils.py └── tox.ini /.changes/0.0.0.md: -------------------------------------------------------------------------------- 1 | ## Previous Releases 2 | 3 | For information on prior major and minor releases, see their changelogs: 4 | 5 | * [1.8](https://github.com/starburstdata/dbt-trino/blob/1.8.latest/CHANGELOG.md) 6 | * [1.7](https://github.com/starburstdata/dbt-trino/blob/1.7.latest/CHANGELOG.md) 7 | * [1.6](https://github.com/starburstdata/dbt-trino/blob/1.6.latest/CHANGELOG.md) 8 | * [1.5](https://github.com/starburstdata/dbt-trino/blob/1.5.latest/CHANGELOG.md) 9 | * [1.4](https://github.com/starburstdata/dbt-trino/blob/1.4.latest/CHANGELOG.md) 10 | * [1.3](https://github.com/starburstdata/dbt-trino/blob/1.3.latest/CHANGELOG.md) 11 | * [1.2](https://github.com/starburstdata/dbt-trino/blob/1.2.latest/CHANGELOG.md) 12 | * [1.1](https://github.com/starburstdata/dbt-trino/blob/1.1.latest/CHANGELOG.md) 13 | * [1.0 and earlier](https://github.com/starburstdata/dbt-trino/blob/1.0.latest/CHANGELOG.md) 14 | -------------------------------------------------------------------------------- /.changes/1.9.0.md: -------------------------------------------------------------------------------- 1 | ## dbt-trino 1.9.0 - December 20, 2024 2 | ### Breaking Changes 3 | - Drop support for Python 3.8 ([#439](https://github.com/starburstdata/dbt-trino/pull/439)) 4 | ### Features 5 | - Microbatch incremental strategy ([#453](https://github.com/starburstdata/dbt-trino/pull/453)) 6 | - Allow configuring of snapshot column names ([#462](https://github.com/starburstdata/dbt-trino/pull/462)) 7 | - Enable setting current value of dbt_valid_to ([#462](https://github.com/starburstdata/dbt-trino/pull/462)) 8 | ### Under the Hood 9 | - Add tests against Python 3.13 ([#439](https://github.com/starburstdata/dbt-trino/pull/439)) 10 | - Update trino__get_columns_in_relation to use information_schema.columns ([#443](https://github.com/starburstdata/dbt-trino/issues/443), [#444](https://github.com/starburstdata/dbt-trino/pull/444)) 11 | ### Dependencies 12 | - Update dependencies ([#462](https://github.com/starburstdata/dbt-trino/pull/462)) 13 | 14 | ### Contributors 15 | - [@damian3031](https://github.com/damian3031) ([#439](https://github.com/starburstdata/dbt-trino/pull/439), [#453](https://github.com/starburstdata/dbt-trino/pull/453), [#462](https://github.com/starburstdata/dbt-trino/pull/462), [#462](https://github.com/starburstdata/dbt-trino/pull/462), [#439](https://github.com/starburstdata/dbt-trino/pull/439), [#462](https://github.com/starburstdata/dbt-trino/pull/462)) 16 | - [@posulliv](https://github.com/posulliv) ([#444](https://github.com/starburstdata/dbt-trino/pull/444)) 17 | -------------------------------------------------------------------------------- /.changes/1.9.0/Breaking Changes-20241024-172250.yaml: -------------------------------------------------------------------------------- 1 | kind: Breaking Changes 2 | body: Drop support for Python 3.8 3 | time: 2024-10-24T17:22:50.717059+02:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "439" 8 | -------------------------------------------------------------------------------- /.changes/1.9.0/Dependencies-20241218-155103.yaml: -------------------------------------------------------------------------------- 1 | kind: Dependencies 2 | body: Update dependencies 3 | time: 2024-12-18T15:51:03.624898+01:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "462" 8 | -------------------------------------------------------------------------------- /.changes/1.9.0/Features-20241202-132727.yaml: -------------------------------------------------------------------------------- 1 | kind: Features 2 | body: Microbatch incremental strategy 3 | time: 2024-12-02T13:27:27.845398+01:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "453" 8 | -------------------------------------------------------------------------------- /.changes/1.9.0/Features-20241218-151645.yaml: -------------------------------------------------------------------------------- 1 | kind: Features 2 | body: Allow configuring of snapshot column names 3 | time: 2024-12-18T15:16:45.941759+01:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "462" 8 | -------------------------------------------------------------------------------- /.changes/1.9.0/Features-20241218-153349.yaml: -------------------------------------------------------------------------------- 1 | kind: Features 2 | body: Enable setting current value of dbt_valid_to 3 | time: 2024-12-18T15:33:49.520897+01:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "462" 8 | -------------------------------------------------------------------------------- /.changes/1.9.0/Under the Hood-20241024-172431.yaml: -------------------------------------------------------------------------------- 1 | kind: Under the Hood 2 | body: Add tests against Python 3.13 3 | time: 2024-10-24T17:24:31.976133+02:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "439" 8 | -------------------------------------------------------------------------------- /.changes/1.9.0/Under the Hood-20241105-083613.yaml: -------------------------------------------------------------------------------- 1 | kind: Under the Hood 2 | body: Update trino__get_columns_in_relation to use information_schema.columns 3 | time: 2024-11-05T08:36:13.788945-05:00 4 | custom: 5 | Author: posulliv 6 | Issue: "443" 7 | PR: "444" 8 | -------------------------------------------------------------------------------- /.changes/1.9.1.md: -------------------------------------------------------------------------------- 1 | ## dbt-trino 1.9.1 - March 26, 2025 2 | ### Fixes 3 | - Avoid treating VARBINARY and JSON as a string types ([#437](https://github.com/starburstdata/dbt-trino/issues/437), [#475](https://github.com/starburstdata/dbt-trino/pull/475)) 4 | 5 | ### Contributors 6 | - [@damian3031](https://github.com/damian3031) ([#475](https://github.com/starburstdata/dbt-trino/pull/475)) 7 | -------------------------------------------------------------------------------- /.changes/1.9.1/Fixes-20250325-161315.yaml: -------------------------------------------------------------------------------- 1 | kind: Fixes 2 | body: Avoid treating VARBINARY and JSON as a string types 3 | time: 2025-03-25T16:13:15.227499+01:00 4 | custom: 5 | Author: damian3031 6 | Issue: "437" 7 | PR: "475" 8 | -------------------------------------------------------------------------------- /.changes/1.9.2.md: -------------------------------------------------------------------------------- 1 | ## dbt-trino 1.9.2 - June 03, 2025 2 | ### Features 3 | - To allow to set grace period for mv ([#472](https://github.com/starburstdata/dbt-trino/pull/472)) 4 | - table materialization on_table_exists=skip option ([#479](https://github.com/starburstdata/dbt-trino/issues/479), [#481](https://github.com/starburstdata/dbt-trino/pull/481)) 5 | ### Fixes 6 | - Add __dbt_tmp suffix to specified location for temporary tables ([#467](https://github.com/starburstdata/dbt-trino/issues/467), [#468](https://github.com/starburstdata/dbt-trino/pull/468)) 7 | - Fix handling of composite unique_key in incremental models ([#465](https://github.com/starburstdata/dbt-trino/issues/465), [#473](https://github.com/starburstdata/dbt-trino/pull/473)) 8 | ### Dependencies 9 | - Update dbt-adapters to 1.15.1 ([#483](https://github.com/starburstdata/dbt-trino/pull/483)) 10 | - Bump dbt-tests-adapter to 1.15.1 ([#484](https://github.com/starburstdata/dbt-trino/pull/484)) 11 | - Bump dbt-common to 1.25.0 ([#484](https://github.com/starburstdata/dbt-trino/pull/484)) 12 | 13 | ### Contributors 14 | - [@AlexandrKhabarov](https://github.com/AlexandrKhabarov) ([#472](https://github.com/starburstdata/dbt-trino/pull/472)) 15 | - [@choyrim](https://github.com/choyrim) ([#481](https://github.com/starburstdata/dbt-trino/pull/481)) 16 | - [@damian3031](https://github.com/damian3031) ([#483](https://github.com/starburstdata/dbt-trino/pull/483), [#484](https://github.com/starburstdata/dbt-trino/pull/484), [#484](https://github.com/starburstdata/dbt-trino/pull/484)) 17 | - [@yakovlevvs](https://github.com/yakovlevvs) ([#468](https://github.com/starburstdata/dbt-trino/pull/468), [#473](https://github.com/starburstdata/dbt-trino/pull/473)) 18 | -------------------------------------------------------------------------------- /.changes/1.9.2/Dependencies-20250528-191726.yaml: -------------------------------------------------------------------------------- 1 | kind: Dependencies 2 | body: Update dbt-adapters to 1.15.1 3 | time: 2025-05-28T19:17:26.738442+02:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "483" 8 | -------------------------------------------------------------------------------- /.changes/1.9.2/Dependencies-20250530-143839.yaml: -------------------------------------------------------------------------------- 1 | kind: Dependencies 2 | body: Bump dbt-tests-adapter to 1.15.1 3 | time: 2025-05-30T14:38:39.707293+02:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "484" 8 | -------------------------------------------------------------------------------- /.changes/1.9.2/Dependencies-20250530-145103.yaml: -------------------------------------------------------------------------------- 1 | kind: Dependencies 2 | body: Bump dbt-common to 1.25.0 3 | time: 2025-05-30T14:51:03.097835+02:00 4 | custom: 5 | Author: damian3031 6 | Issue: "" 7 | PR: "484" 8 | -------------------------------------------------------------------------------- /.changes/1.9.2/Features-20250310-155741.yaml: -------------------------------------------------------------------------------- 1 | kind: Features 2 | body: To allow to set grace period for mv 3 | time: 2025-03-10T15:57:41.485966118+03:00 4 | custom: 5 | Author: AlexandrKhabarov 6 | Issue: "" 7 | PR: "472" 8 | -------------------------------------------------------------------------------- /.changes/1.9.2/Features-20250602-212247.yaml: -------------------------------------------------------------------------------- 1 | kind: Features 2 | body: table materialization on_table_exists=skip option 3 | time: 2025-06-02T21:22:47.837474-04:00 4 | custom: 5 | Author: choyrim 6 | Issue: "479" 7 | PR: "481" 8 | -------------------------------------------------------------------------------- /.changes/1.9.2/Fixes-20250220-121852.yaml: -------------------------------------------------------------------------------- 1 | kind: Fixes 2 | body: Add __dbt_tmp suffix to specified location for temporary tables 3 | time: 2025-02-20T12:18:52.200467772+03:00 4 | custom: 5 | Author: yakovlevvs 6 | Issue: "467" 7 | PR: "468" 8 | -------------------------------------------------------------------------------- /.changes/1.9.2/Fixes-20250312-000346.yaml: -------------------------------------------------------------------------------- 1 | kind: Fixes 2 | body: Fix handling of composite unique_key in incremental models 3 | time: 2025-03-12T00:03:46.198336572+03:00 4 | custom: 5 | Author: yakovlevvs 6 | Issue: "465" 7 | PR: "473" 8 | -------------------------------------------------------------------------------- /.changes/header.tpl.md: -------------------------------------------------------------------------------- 1 | # dbt-trino Changelog 2 | 3 | - This file provides a full account of all changes to `dbt-trino` 4 | - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. 5 | - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. 6 | - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry) 7 | -------------------------------------------------------------------------------- /.changes/unreleased/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/starburstdata/dbt-trino/8463e99cfbb4fea4b9dbac9f071ed81e42d0120e/.changes/unreleased/.gitkeep -------------------------------------------------------------------------------- /.changie.yaml: -------------------------------------------------------------------------------- 1 | changesDir: .changes 2 | unreleasedDir: unreleased 3 | headerPath: header.tpl.md 4 | versionHeaderPath: "" 5 | changelogPath: CHANGELOG.md 6 | versionExt: md 7 | versionFormat: '## dbt-trino {{.Version}} - {{.Time.Format "January 02, 2006"}}' 8 | kindFormat: '### {{.Kind}}' 9 | changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))' 10 | 11 | kinds: 12 | - label: Breaking Changes 13 | - label: Features 14 | - label: Fixes 15 | - label: Under the Hood 16 | - label: Dependencies 17 | changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))' 18 | - label: Security 19 | changeFormat: '- {{.Body}} ({{if ne .Custom.Issue ""}}[#{{.Custom.Issue}}](https://github.com/starburstdata/dbt-trino/issues/{{.Custom.Issue}}), {{end}}[#{{.Custom.PR}}](https://github.com/starburstdata/dbt-trino/pull/{{.Custom.PR}}))' 20 | 21 | newlines: 22 | beforeChangelogHeader: 1 23 | 24 | custom: 25 | - key: Author 26 | label: GitHub Username(s) (separated by a single space if multiple) 27 | type: string 28 | minLength: 3 29 | - key: Issue 30 | label: GitHub Issue Number 31 | type: int 32 | minInt: 1 33 | optional: true 34 | - key: PR 35 | label: GitHub Pull Request Number 36 | type: int 37 | minInt: 1 38 | 39 | footerFormat: | 40 | {{- $contributorDict := dict }} 41 | {{- range $change := .Changes }} 42 | {{- $authorList := splitList " " $change.Custom.Author }} 43 | {{- /* loop through all authors for a PR */}} 44 | {{- range $author := $authorList }} 45 | {{- $authorLower := lower $author }} 46 | {{- $prLink := $change.Kind }} 47 | {{- $prLink = "[#pr](https://github.com/starburstdata/dbt-trino/pull/pr)" | replace "pr" $change.Custom.PR }} 48 | {{- /* check if this contributor has other PRs associated with them already */}} 49 | {{- if hasKey $contributorDict $author }} 50 | {{- $prList := get $contributorDict $author }} 51 | {{- $prList = append $prList $prLink }} 52 | {{- $contributorDict := set $contributorDict $author $prList }} 53 | {{- else }} 54 | {{- $prList := list $prLink }} 55 | {{- $contributorDict := set $contributorDict $author $prList }} 56 | {{- end }} 57 | {{- end}} 58 | {{- end }} 59 | {{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}} 60 | {{- if $contributorDict}} 61 | ### Contributors 62 | {{- range $k,$v := $contributorDict }} 63 | - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}}) 64 | {{- end }} 65 | {{- end }} 66 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | select = 3 | E 4 | W 5 | F 6 | ignore = 7 | W503, 8 | W504, 9 | E203, 10 | E741, 11 | E501, 12 | exclude = test 13 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | description: Report a bug or an issue you've found with dbt-trino 4 | labels: bug 5 | body: 6 | - type: textarea 7 | attributes: 8 | label: Expected behavior 9 | description: What do you think should have happened 10 | placeholder: > 11 | A clear and concise description of what you expected to happen. 12 | validations: 13 | required: true 14 | - type: textarea 15 | attributes: 16 | label: Actual behavior 17 | description: Describe what actually happened 18 | placeholder: > 19 | A clear and concise description of what actually happened. 20 | validations: 21 | required: true 22 | - type: textarea 23 | attributes: 24 | label: Steps To Reproduce 25 | description: This will help us reproduce your issue 26 | placeholder: > 27 | In as much detail as possible, please provide steps to reproduce the issue. 28 | Sample code that triggers the issue, relevant server settings, etc is all very helpful here. 29 | validations: 30 | required: true 31 | - type: textarea 32 | attributes: 33 | label: Log output/Screenshots 34 | description: What do you think went wrong? 35 | placeholder: > 36 | If applicable, add log output and/or screenshots to help explain your problem. 37 | - type: input 38 | attributes: 39 | label: Operating System 40 | description: What Operating System are you using? 41 | placeholder: "You can get it via `cat /etc/os-release` for example" 42 | validations: 43 | required: true 44 | - type: input 45 | attributes: 46 | label: dbt version 47 | description: "Execute `dbt --version`" 48 | placeholder: Which version of dbt are you using? 49 | validations: 50 | required: true 51 | - type: input 52 | attributes: 53 | label: Trino Server version 54 | description: "Run `SELECT VERSION();` on your Trino server" 55 | placeholder: Which Trino server version are you using? 56 | validations: 57 | required: true 58 | - type: input 59 | attributes: 60 | label: Python version 61 | description: "You can get it via executing `python --version`" 62 | placeholder: What Python version are you using? 63 | validations: 64 | required: true 65 | - type: checkboxes 66 | attributes: 67 | label: Are you willing to submit PR? 68 | description: > 69 | This is absolutely not required, but we are happy to guide you in the contribution process 70 | especially if you already have a good understanding of how to implement the feature. 71 | options: 72 | - label: Yes I am willing to submit a PR! 73 | - type: markdown 74 | attributes: 75 | value: "Thanks for completing our form!" 76 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | contact_links: 3 | - name: Ask a question or get help around `dbt-trino` on Slack 4 | url: https://getdbt.slack.com/channels/db-presto-trino 5 | about: Get help and share your experiences around `dbt-trino` with the `dbt` Slack community. 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | description: Suggest an idea for dbt-trino 4 | labels: enhancement 5 | body: 6 | - type: textarea 7 | attributes: 8 | label: Describe the feature 9 | description: What would you like to happen? 10 | placeholder: > 11 | A clear and concise description of what you want to happen 12 | and what problem it would solve. 13 | validations: 14 | required: true 15 | - type: textarea 16 | attributes: 17 | label: Describe alternatives you've considered 18 | description: What did you try to make it happen? 19 | placeholder: > 20 | A clear and concise description of any alternative solutions or features you've considered. 21 | - type: textarea 22 | attributes: 23 | label: Who will benefit? 24 | placeholder: > 25 | What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly. 26 | - type: checkboxes 27 | attributes: 28 | label: Are you willing to submit PR? 29 | description: > 30 | This is absolutely not required, but we are happy to guide you in the contribution process 31 | especially if you already have a good understanding of how to implement the feature. 32 | options: 33 | - label: Yes I am willing to submit a PR! 34 | - type: markdown 35 | attributes: 36 | value: "Thanks for completing our form!" 37 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # python dependencies 4 | - package-ecosystem: "pip" 5 | directory: "/" 6 | schedule: 7 | interval: "daily" 8 | rebase-strategy: "disabled" 9 | labels: 10 | - "Skip Changelog" 11 | - "dependencies" 12 | - package-ecosystem: "github-actions" 13 | directory: "/" 14 | schedule: 15 | interval: "weekly" 16 | rebase-strategy: "disabled" 17 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Overview 2 | 10 | 11 | ## Checklist 12 | 13 | - [ ] I have run this code in development and it appears to resolve the stated issue 14 | - [ ] This PR includes tests, or tests are not required/relevant for this PR 15 | - [ ] `README.md` updated and added information about my change 16 | - [ ] I have run `changie new` to [create a changelog entry](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#Adding-CHANGELOG-Entry) 17 | -------------------------------------------------------------------------------- /.github/workflows/bot-changelog.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # When bots create a PR, this action will add a corresponding changie yaml file to that 3 | # PR when a specific label is added. 4 | # 5 | # The file is created off a template: 6 | # 7 | # kind: 8 | # body: 9 | # time: 10 | # custom: 11 | # Author: 12 | # Issue: 4904 13 | # PR: 14 | # 15 | # **why?** 16 | # Automate changelog generation for more visability with automated bot PRs. 17 | # 18 | # **when?** 19 | # Once a PR is created, label should be added to PR before or after creation. You can also 20 | # manually trigger this by adding the appropriate label at any time. 21 | # 22 | # **how to add another bot?** 23 | # Add the label and changie kind to the include matrix. That's it! 24 | # 25 | 26 | name: Bot Changelog 27 | 28 | on: 29 | pull_request: 30 | # catch when the PR is opened with the label or when the label is added 31 | types: [opened, labeled] 32 | 33 | permissions: 34 | contents: write 35 | pull-requests: read 36 | 37 | jobs: 38 | generate_changelog: 39 | runs-on: ubuntu-latest 40 | 41 | steps: 42 | - name: Check out the repository 43 | uses: actions/checkout@v4 44 | with: 45 | fetch-depth: 2 46 | 47 | - name: Create and commit changelog on bot PR 48 | id: bot_changelog 49 | uses: emmyoop/changie_bot@v1.0 50 | with: 51 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 52 | commit_author_name: "starburstdata-automation" 53 | commit_author_email: "automation@starburstdata.com" 54 | commit_message: ${{ github.event.pull_request.title }} 55 | changie_kind: "Dependencies" 56 | label: "dependencies" 57 | custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: ''\n PR: ${{ github.event.pull_request.number }}" 58 | -------------------------------------------------------------------------------- /.github/workflows/changelog-existence.yml: -------------------------------------------------------------------------------- 1 | 2 | 3 | # **what?** 4 | # Checks that a file has been committed under the /.changes directory 5 | # as a new CHANGELOG entry. Cannot check for a specific filename as 6 | # it is dynamically generated by change type and timestamp. 7 | # This workflow should not require any secrets since it runs for PRs 8 | # from forked repos. 9 | # By default, secrets are not passed to workflows running from 10 | # a forked repo. 11 | 12 | # **why?** 13 | # Ensure code change gets reflected in the CHANGELOG. 14 | 15 | # **when?** 16 | # This will run for all PRs going into master. It will 17 | # run when they are opened, reopened, when any label is added or removed 18 | # and when new code is pushed to the branch. The action will then get 19 | # skipped if the 'Skip Changelog' label is present is any of the labels. 20 | 21 | name: Check Changelog Entry 22 | 23 | on: 24 | pull_request: 25 | types: [opened, reopened, labeled, unlabeled, synchronize] 26 | workflow_dispatch: 27 | 28 | defaults: 29 | run: 30 | shell: bash 31 | 32 | permissions: 33 | contents: read 34 | pull-requests: write 35 | 36 | jobs: 37 | changelog: 38 | uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main 39 | with: 40 | changelog_comment: 'Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry).' 41 | skip_label: 'Skip Changelog' 42 | secrets: inherit 43 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: dbt-trino tests 2 | on: 3 | push: 4 | branches: 5 | - master 6 | - "*.*.latest" 7 | paths-ignore: 8 | - "**/*.md" 9 | pull_request: 10 | branches: 11 | - master 12 | - "*.*.latest" 13 | paths-ignore: 14 | - "**/*.md" 15 | 16 | jobs: 17 | checks: 18 | runs-on: ubuntu-latest 19 | steps: 20 | - name: "Checkout the source code" 21 | uses: actions/checkout@v4 22 | 23 | - name: "Install Python" 24 | uses: actions/setup-python@v5 25 | 26 | - name: "Install dev requirements" 27 | run: pip install -r dev_requirements.txt 28 | 29 | - name: "Run pre-commit checks" 30 | run: pre-commit run --all-files 31 | test: 32 | runs-on: ubuntu-latest 33 | strategy: 34 | fail-fast: false 35 | matrix: 36 | engine: 37 | - "trino" 38 | - "starburst" 39 | - "starburst_galaxy" 40 | python: 41 | - "3.9" 42 | - "3.10" 43 | - "3.11" 44 | - "3.12" 45 | - "3.13" 46 | isStarburstBranch: 47 | - ${{ (github.event_name == 'pull_request' && contains(github.event.pull_request.head.repo.full_name, 'starburstdata')) || github.event_name != 'pull_request' }} 48 | exclude: 49 | - engine: "starburst_galaxy" 50 | python: "3.13" 51 | isStarburstBranch: false 52 | - engine: "starburst_galaxy" 53 | python: "3.12" 54 | - engine: "starburst_galaxy" 55 | python: "3.11" 56 | - engine: "starburst_galaxy" 57 | python: "3.10" 58 | - engine: "starburst_galaxy" 59 | python: "3.9" 60 | 61 | steps: 62 | - name: Checkout 63 | uses: actions/checkout@v4 64 | 65 | - name: Setup Python 66 | uses: actions/setup-python@v5 67 | with: 68 | python-version: ${{ matrix.python }} 69 | 70 | - name: Run dbt-trino tests against ${{ matrix.engine }} on python ${{ matrix.python }} 71 | env: 72 | DBT_TESTS_STARBURST_GALAXY_HOST: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_HOST }} 73 | DBT_TESTS_STARBURST_GALAXY_USER: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_USER }} 74 | DBT_TESTS_STARBURST_GALAXY_PASSWORD: ${{ secrets.DBT_TESTS_STARBURST_GALAXY_PASSWORD }} 75 | run: | 76 | if [[ ${{ matrix.engine }} == "trino" || ${{ matrix.engine }} == "starburst" ]]; then 77 | make dbt-${{ matrix.engine }}-tests 78 | elif [[ ${{ matrix.engine }} == "starburst_galaxy" ]]; then 79 | python -m pip install -e . -r dev_requirements.txt 80 | python -m pytest tests/functional --profile starburst_galaxy 81 | fi 82 | 83 | - name: Remove container on failure 84 | if: failure() 85 | run: ./docker/remove_${{ matrix.engine }}.bash || true 86 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: dbt-trino release 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | test: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout 11 | uses: actions/checkout@v4 12 | 13 | - name: Setup Python 14 | uses: actions/setup-python@v5 15 | with: 16 | python-version: "3.13" 17 | 18 | - name: Test release 19 | run: | 20 | python3 -m venv env 21 | source env/bin/activate 22 | pip install -r dev_requirements.txt 23 | pip install twine wheel setuptools 24 | python setup.py sdist bdist_wheel 25 | pip install dist/dbt_trino-*.tar.gz 26 | pip install dist/dbt_trino-*-py3-none-any.whl 27 | twine check dist/dbt_trino-*-py3-none-any.whl dist/dbt_trino-*.tar.gz 28 | 29 | github-release: 30 | name: GitHub release 31 | runs-on: ubuntu-latest 32 | needs: test 33 | steps: 34 | - name: Checkout 35 | uses: actions/checkout@v4 36 | 37 | - name: Setup Python 38 | uses: actions/setup-python@v5 39 | with: 40 | python-version: "3.13" 41 | 42 | - name: Get dbt-trino version 43 | run: echo "version_number=$(cat dbt/adapters/trino/__version__.py | sed -n 's/version = "\(.*\)\"/\1/p')" >> $GITHUB_ENV 44 | 45 | # Need to set an output variable because env variables can't be taken as input 46 | # This is needed for the next step with releasing to GitHub 47 | - name: Find release type 48 | id: release_type 49 | env: 50 | IS_PRERELEASE: ${{ contains(env.version_number, 'rc') || contains(env.version_number, 'b') }} 51 | run: | 52 | echo "isPrerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT 53 | 54 | - name: Create GitHub release 55 | uses: actions/create-release@v1 56 | env: 57 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token 58 | with: 59 | tag_name: v${{env.version_number}} 60 | release_name: v${{env.version_number}} 61 | prerelease: ${{ steps.release_type.outputs.isPrerelease }} 62 | body: | 63 | [Release notes](https://github.com/starburstdata/dbt-trino/blob/master/CHANGELOG.md) 64 | ```sh 65 | $ pip install dbt-trino==${{env.version_number}} 66 | ``` 67 | 68 | pypi-release: 69 | name: Pypi release 70 | runs-on: ubuntu-latest 71 | needs: github-release 72 | environment: PypiProd 73 | permissions: 74 | id-token: write 75 | steps: 76 | - name: Checkout 77 | uses: actions/checkout@v4 78 | 79 | - name: Setup Python 80 | uses: actions/setup-python@v5 81 | with: 82 | python-version: "3.13" 83 | 84 | - name: Get dbt-trino version 85 | run: echo "version_number=$(cat dbt/adapters/trino/__version__.py | sed -n 's/version = "\(.*\)\"/\1/p')" >> $GITHUB_ENV 86 | 87 | - name: Release to pypi 88 | run: | 89 | python3 -m venv env 90 | source env/bin/activate 91 | pip install -r dev_requirements.txt 92 | pip install twine wheel setuptools 93 | python setup.py sdist bdist_wheel 94 | twine upload --non-interactive dist/dbt_trino-${{env.version_number}}-py3-none-any.whl dist/dbt_trino-${{env.version_number}}.tar.gz 95 | -------------------------------------------------------------------------------- /.github/workflows/security.yml: -------------------------------------------------------------------------------- 1 | name: Veracode SCA 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | veracode-sca-task: 8 | runs-on: ubuntu-latest 9 | name: Scan repository for Issues 10 | 11 | steps: 12 | - name: Checkout 13 | uses: actions/checkout@v4 14 | - name: Run Veracode SCA 15 | env: 16 | SRCCLR_API_TOKEN: ${{ secrets.SRCCLR_API_TOKEN }} 17 | uses: veracode/veracode-sca@v1.09 18 | 19 | with: 20 | github_token: ${{ secrets.GITHUB_TOKEN }} 21 | create-issues: true 22 | min-cvss-for-issue: 1 23 | fail-on-cvss: 11 24 | -------------------------------------------------------------------------------- /.github/workflows/version-bump.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # This workflow will take the new version number to bump to. With that 3 | # it will run versionbump to update the version number everywhere in the 4 | # code base and then run changie to create the corresponding changelog. 5 | # A PR will be created with the changes that can be reviewed before committing. 6 | 7 | # **why?** 8 | # This is to aid in releasing dbt-trino and making sure we have updated 9 | # the version in all places and generated the changelog. 10 | 11 | # **when?** 12 | # This is triggered manually 13 | 14 | name: Version Bump 15 | 16 | on: 17 | workflow_dispatch: 18 | inputs: 19 | version_number: 20 | description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)' 21 | required: true 22 | 23 | jobs: 24 | bump: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - name: "[DEBUG] Print Variables" 28 | run: | 29 | echo "all variables defined as inputs" 30 | echo The version_number: ${{ github.event.inputs.version_number }} 31 | 32 | - name: Check out the repository 33 | uses: actions/checkout@v4 34 | 35 | - uses: actions/setup-python@v5 36 | with: 37 | python-version: "3.8" 38 | 39 | - name: Install brew 40 | run: | 41 | echo "/home/linuxbrew/.linuxbrew/bin:/home/linuxbrew/.linuxbrew/sbin" >> $GITHUB_PATH 42 | 43 | - name: Install python dependencies 44 | run: | 45 | python3 -m venv env 46 | source env/bin/activate 47 | pip install --upgrade pip 48 | 49 | - name: Audit Version and Parse Into Parts 50 | id: semver 51 | uses: dbt-labs/actions/parse-semver@v1 52 | with: 53 | version: ${{ github.event.inputs.version_number }} 54 | 55 | - name: Set branch value 56 | id: variables 57 | run: | 58 | echo "BRANCH_NAME=prep-release/${{ github.event.inputs.version_number }}_$GITHUB_RUN_ID" >> $GITHUB_OUTPUT 59 | 60 | - name: Create PR branch 61 | run: | 62 | git checkout -b ${{ steps.variables.outputs.BRANCH_NAME }} 63 | git push origin ${{ steps.variables.outputs.BRANCH_NAME }} 64 | git branch --set-upstream-to=origin/${{ steps.variables.outputs.BRANCH_NAME }} ${{ steps.variables.outputs.BRANCH_NAME }} 65 | 66 | - name: Bump version 67 | run: | 68 | echo -en "version = \"${{ github.event.inputs.version_number }}\"\n" > dbt/adapters/trino/__version__.py 69 | git status 70 | 71 | - name: Run changie 72 | run: | 73 | brew tap miniscruff/changie https://github.com/miniscruff/changie 74 | brew install changie 75 | if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]] 76 | then 77 | changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' --prerelease '${{ steps.semver.outputs.pre-release }}' 78 | else 79 | if [[ -d ".changes/${{ steps.semver.outputs.base-version }}" ]] 80 | then 81 | changie batch ${{ steps.semver.outputs.base-version }} --include '${{ steps.semver.outputs.base-version }}' --remove-prereleases 82 | else 83 | changie batch ${{ steps.semver.outputs.base-version }} --move-dir '${{ steps.semver.outputs.base-version }}' 84 | fi 85 | fi 86 | changie merge 87 | git status 88 | 89 | - name: Commit version bump to branch 90 | uses: EndBug/add-and-commit@v9 91 | with: 92 | author_name: 'Github Build Bot' 93 | author_email: 'automation@starburstdata.com' 94 | message: 'Bumping version to ${{ github.event.inputs.version_number }} and generate CHANGELOG' 95 | branch: '${{ steps.variables.outputs.BRANCH_NAME }}' 96 | push: 'origin origin/${{ steps.variables.outputs.BRANCH_NAME }}' 97 | 98 | - name: Create Pull Request 99 | uses: peter-evans/create-pull-request@v7 100 | with: 101 | author: 'Github Build Bot ' 102 | base: ${{github.ref}} 103 | title: 'Bumping version to ${{ github.event.inputs.version_number }} and generate changelog' 104 | branch: '${{ steps.variables.outputs.BRANCH_NAME }}' 105 | labels: | 106 | Skip Changelog 107 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg-info 2 | env/ 3 | __pycache__/ 4 | .tox/ 5 | .idea/ 6 | build/ 7 | dist/ 8 | dbt-integration-tests 9 | docker/dbt/.user.yml 10 | .DS_Store 11 | .vscode/ 12 | logs/ 13 | .venv/ 14 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # Configuration for pre-commit hooks (see https://pre-commit.com/). 2 | # Eventually the hooks described here will be run as tests before merging each PR. 3 | 4 | # TODO: remove global exclusion of tests when testing overhaul is complete 5 | exclude: ^test/ 6 | 7 | repos: 8 | - repo: https://github.com/pre-commit/pre-commit-hooks 9 | rev: v4.4.0 10 | hooks: 11 | - id: check-yaml 12 | args: [--unsafe] 13 | - id: check-json 14 | - id: end-of-file-fixer 15 | - id: trailing-whitespace 16 | exclude_types: 17 | - "markdown" 18 | - id: check-case-conflict 19 | - repo: https://github.com/dbt-labs/pre-commit-hooks 20 | rev: v0.1.0a1 21 | hooks: 22 | - id: dbt-core-in-adapters-check 23 | - repo: https://github.com/psf/black 24 | rev: 23.3.0 25 | hooks: 26 | - id: black 27 | args: 28 | - "--line-length=99" 29 | - "--target-version=py38" 30 | - id: black 31 | alias: black-check 32 | stages: [manual] 33 | args: 34 | - "--line-length=99" 35 | - "--target-version=py38" 36 | - "--check" 37 | - "--diff" 38 | - repo: https://github.com/pycqa/isort 39 | rev: 5.12.0 40 | hooks: 41 | - id: isort 42 | args: [ "--profile", "black", "--filter-files" ] 43 | - repo: https://github.com/pycqa/flake8 44 | rev: 7.1.2 45 | hooks: 46 | - id: flake8 47 | - id: flake8 48 | alias: flake8-check 49 | stages: [manual] 50 | - repo: https://github.com/pre-commit/mirrors-mypy 51 | rev: v1.2.0 52 | hooks: 53 | - id: mypy 54 | # N.B.: Mypy is... a bit fragile. 55 | # 56 | # By using `language: system` we run this hook in the local 57 | # environment instead of a pre-commit isolated one. This is needed 58 | # to ensure mypy correctly parses the project. 59 | 60 | # It may cause trouble in that it adds environmental variables out 61 | # of our control to the mix. Unfortunately, there's nothing we can 62 | # do about per pre-commit's author. 63 | # See https://github.com/pre-commit/pre-commit/issues/730 for details. 64 | args: [ --show-error-codes, --ignore-missing-imports ] 65 | files: ^dbt/adapters/.* 66 | language: system 67 | - id: mypy 68 | alias: mypy-check 69 | stages: [ manual ] 70 | args: [ --show-error-codes, --pretty, --ignore-missing-imports ] 71 | files: ^dbt/adapters 72 | language: system 73 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # dbt-trino Changelog 2 | 3 | - This file provides a full account of all changes to `dbt-trino` 4 | - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. 5 | - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. 6 | - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/starburstdata/dbt-trino/blob/master/CONTRIBUTING.md#adding-changelog-entry) 7 | ## dbt-trino 1.9.2 - June 03, 2025 8 | ### Features 9 | - To allow to set grace period for mv ([#472](https://github.com/starburstdata/dbt-trino/pull/472)) 10 | - table materialization on_table_exists=skip option ([#479](https://github.com/starburstdata/dbt-trino/issues/479), [#481](https://github.com/starburstdata/dbt-trino/pull/481)) 11 | ### Fixes 12 | - Add __dbt_tmp suffix to specified location for temporary tables ([#467](https://github.com/starburstdata/dbt-trino/issues/467), [#468](https://github.com/starburstdata/dbt-trino/pull/468)) 13 | - Fix handling of composite unique_key in incremental models ([#465](https://github.com/starburstdata/dbt-trino/issues/465), [#473](https://github.com/starburstdata/dbt-trino/pull/473)) 14 | ### Dependencies 15 | - Update dbt-adapters to 1.15.1 ([#483](https://github.com/starburstdata/dbt-trino/pull/483)) 16 | - Bump dbt-tests-adapter to 1.15.1 ([#484](https://github.com/starburstdata/dbt-trino/pull/484)) 17 | - Bump dbt-common to 1.25.0 ([#484](https://github.com/starburstdata/dbt-trino/pull/484)) 18 | 19 | ### Contributors 20 | - [@AlexandrKhabarov](https://github.com/AlexandrKhabarov) ([#472](https://github.com/starburstdata/dbt-trino/pull/472)) 21 | - [@choyrim](https://github.com/choyrim) ([#481](https://github.com/starburstdata/dbt-trino/pull/481)) 22 | - [@damian3031](https://github.com/damian3031) ([#483](https://github.com/starburstdata/dbt-trino/pull/483), [#484](https://github.com/starburstdata/dbt-trino/pull/484), [#484](https://github.com/starburstdata/dbt-trino/pull/484)) 23 | - [@yakovlevvs](https://github.com/yakovlevvs) ([#468](https://github.com/starburstdata/dbt-trino/pull/468), [#473](https://github.com/starburstdata/dbt-trino/pull/473)) 24 | ## dbt-trino 1.9.1 - March 26, 2025 25 | ### Fixes 26 | - Avoid treating VARBINARY and JSON as a string types ([#437](https://github.com/starburstdata/dbt-trino/issues/437), [#475](https://github.com/starburstdata/dbt-trino/pull/475)) 27 | 28 | ### Contributors 29 | - [@damian3031](https://github.com/damian3031) ([#475](https://github.com/starburstdata/dbt-trino/pull/475)) 30 | ## dbt-trino 1.9.0 - December 20, 2024 31 | ### Breaking Changes 32 | - Drop support for Python 3.8 ([#439](https://github.com/starburstdata/dbt-trino/pull/439)) 33 | ### Features 34 | - Microbatch incremental strategy ([#453](https://github.com/starburstdata/dbt-trino/pull/453)) 35 | - Allow configuring of snapshot column names ([#462](https://github.com/starburstdata/dbt-trino/pull/462)) 36 | - Enable setting current value of dbt_valid_to ([#462](https://github.com/starburstdata/dbt-trino/pull/462)) 37 | ### Under the Hood 38 | - Add tests against Python 3.13 ([#439](https://github.com/starburstdata/dbt-trino/pull/439)) 39 | - Update trino__get_columns_in_relation to use information_schema.columns ([#443](https://github.com/starburstdata/dbt-trino/issues/443), [#444](https://github.com/starburstdata/dbt-trino/pull/444)) 40 | ### Dependencies 41 | - Update dependencies ([#462](https://github.com/starburstdata/dbt-trino/pull/462)) 42 | 43 | ### Contributors 44 | - [@damian3031](https://github.com/damian3031) ([#439](https://github.com/starburstdata/dbt-trino/pull/439), [#453](https://github.com/starburstdata/dbt-trino/pull/453), [#462](https://github.com/starburstdata/dbt-trino/pull/462), [#462](https://github.com/starburstdata/dbt-trino/pull/462), [#439](https://github.com/starburstdata/dbt-trino/pull/439), [#462](https://github.com/starburstdata/dbt-trino/pull/462)) 45 | - [@posulliv](https://github.com/posulliv) ([#444](https://github.com/starburstdata/dbt-trino/pull/444)) 46 | ## Previous Releases 47 | 48 | For information on prior major and minor releases, see their changelogs: 49 | 50 | * [1.8](https://github.com/starburstdata/dbt-trino/blob/1.8.latest/CHANGELOG.md) 51 | * [1.7](https://github.com/starburstdata/dbt-trino/blob/1.7.latest/CHANGELOG.md) 52 | * [1.6](https://github.com/starburstdata/dbt-trino/blob/1.6.latest/CHANGELOG.md) 53 | * [1.5](https://github.com/starburstdata/dbt-trino/blob/1.5.latest/CHANGELOG.md) 54 | * [1.4](https://github.com/starburstdata/dbt-trino/blob/1.4.latest/CHANGELOG.md) 55 | * [1.3](https://github.com/starburstdata/dbt-trino/blob/1.3.latest/CHANGELOG.md) 56 | * [1.2](https://github.com/starburstdata/dbt-trino/blob/1.2.latest/CHANGELOG.md) 57 | * [1.1](https://github.com/starburstdata/dbt-trino/blob/1.1.latest/CHANGELOG.md) 58 | * [1.0 and earlier](https://github.com/starburstdata/dbt-trino/blob/1.0.latest/CHANGELOG.md) 59 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .EXPORT_ALL_VARIABLES: 2 | 3 | DBT_TEST_USER_1=user1 4 | DBT_TEST_USER_2=user2 5 | DBT_TEST_USER_3=user3 6 | 7 | start-trino: 8 | docker network create dbt-net || true 9 | ./docker/init_trino.bash 10 | 11 | dbt-trino-tests: start-trino 12 | pip install -e . -r dev_requirements.txt 13 | tox -r 14 | 15 | start-starburst: 16 | docker network create dbt-net || true 17 | ./docker/init_starburst.bash 18 | 19 | dbt-starburst-tests: start-starburst 20 | pip install -e . -r dev_requirements.txt 21 | tox -r 22 | 23 | dev: 24 | pre-commit install 25 | -------------------------------------------------------------------------------- /assets/images/dbt-signature_tm.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /assets/images/dbt-signature_tm_light.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /assets/images/trino-logo-dk-bg.svg: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /dbt/adapters/trino/__init__.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.base import AdapterPlugin 2 | 3 | from dbt.adapters.trino.column import TrinoColumn # noqa 4 | from dbt.adapters.trino.connections import TrinoConnectionManager # noqa 5 | from dbt.adapters.trino.connections import TrinoCredentialsFactory 6 | from dbt.adapters.trino.relation import TrinoRelation # noqa 7 | 8 | from dbt.adapters.trino.impl import TrinoAdapter # isort: split 9 | from dbt.include import trino 10 | 11 | Plugin = AdapterPlugin( 12 | adapter=TrinoAdapter, # type: ignore 13 | credentials=TrinoCredentialsFactory, # type: ignore 14 | include_path=trino.PACKAGE_PATH, 15 | ) 16 | -------------------------------------------------------------------------------- /dbt/adapters/trino/__version__.py: -------------------------------------------------------------------------------- 1 | version = "1.9.2" 2 | -------------------------------------------------------------------------------- /dbt/adapters/trino/column.py: -------------------------------------------------------------------------------- 1 | import re 2 | from dataclasses import dataclass 3 | from typing import ClassVar, Dict 4 | 5 | from dbt.adapters.base.column import Column 6 | from dbt_common.exceptions import DbtRuntimeError 7 | 8 | # Taken from the MAX_LENGTH variable in 9 | # https://github.com/trinodb/trino/blob/master/core/trino-spi/src/main/java/io/trino/spi/type/VarcharType.java 10 | TRINO_VARCHAR_MAX_LENGTH = 2147483646 11 | 12 | 13 | @dataclass 14 | class TrinoColumn(Column): 15 | TYPE_LABELS: ClassVar[Dict[str, str]] = { 16 | "STRING": "VARCHAR", 17 | "FLOAT": "DOUBLE", 18 | } 19 | 20 | @property 21 | def data_type(self): 22 | # when varchar has no defined size, default to unbound varchar 23 | # the super().data_type defaults to varchar(256) 24 | if self.dtype.lower() == "varchar" and self.char_size is None: 25 | return self.dtype 26 | 27 | return super().data_type 28 | 29 | def is_string(self) -> bool: 30 | return self.dtype.lower() in ["varchar", "char"] 31 | 32 | def is_float(self) -> bool: 33 | return self.dtype.lower() in [ 34 | "real", 35 | "double precision", 36 | "double", 37 | ] 38 | 39 | def is_integer(self) -> bool: 40 | return self.dtype.lower() in [ 41 | "tinyint", 42 | "smallint", 43 | "integer", 44 | "int", 45 | "bigint", 46 | ] 47 | 48 | def is_numeric(self) -> bool: 49 | return self.dtype.lower() == "decimal" 50 | 51 | @classmethod 52 | def string_type(cls, size: int) -> str: 53 | return "varchar({})".format(size) 54 | 55 | def string_size(self) -> int: 56 | # override the string_size function to handle the unbound varchar case 57 | if self.dtype.lower() == "varchar" and self.char_size is None: 58 | return TRINO_VARCHAR_MAX_LENGTH 59 | 60 | return super().string_size() 61 | 62 | @classmethod 63 | def from_description(cls, name: str, raw_data_type: str) -> "Column": 64 | # Most of the Trino data types specify a type and not a precision/scale/charsize 65 | if not raw_data_type.lower().startswith(("varchar", "char", "decimal")): 66 | return cls(name, raw_data_type) 67 | # Trino data types that do specify a precision/scale/charsize: 68 | match = re.match( 69 | r"(?P[^(]+)(?P\([^)]+\))?(?P[\w ]+)?", raw_data_type 70 | ) 71 | if match is None: 72 | raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"') 73 | data_type = match.group("type") 74 | size_info = match.group("size") 75 | data_type_suffix = match.group("type_suffix") 76 | if data_type_suffix: 77 | data_type += data_type_suffix 78 | char_size = None 79 | numeric_precision = None 80 | numeric_scale = None 81 | if size_info is not None: 82 | # strip out the parentheses 83 | size_info = size_info[1:-1] 84 | parts = size_info.split(",") 85 | if len(parts) == 1: 86 | try: 87 | char_size = int(parts[0]) 88 | except ValueError: 89 | raise DbtRuntimeError( 90 | f'Could not interpret data_type "{raw_data_type}": ' 91 | f'could not convert "{parts[0]}" to an integer' 92 | ) 93 | elif len(parts) == 2: 94 | try: 95 | numeric_precision = int(parts[0]) 96 | except ValueError: 97 | raise DbtRuntimeError( 98 | f'Could not interpret data_type "{raw_data_type}": ' 99 | f'could not convert "{parts[0]}" to an integer' 100 | ) 101 | try: 102 | numeric_scale = int(parts[1]) 103 | except ValueError: 104 | raise DbtRuntimeError( 105 | f'Could not interpret data_type "{raw_data_type}": ' 106 | f'could not convert "{parts[1]}" to an integer' 107 | ) 108 | 109 | return cls(name, data_type, char_size, numeric_precision, numeric_scale) 110 | -------------------------------------------------------------------------------- /dbt/adapters/trino/impl.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Dict, List, Optional 3 | 4 | import agate 5 | from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport 6 | from dbt.adapters.capability import ( 7 | Capability, 8 | CapabilityDict, 9 | CapabilitySupport, 10 | Support, 11 | ) 12 | from dbt.adapters.sql import SQLAdapter 13 | from dbt_common.behavior_flags import BehaviorFlag 14 | from dbt_common.contracts.constraints import ConstraintType 15 | from dbt_common.exceptions import DbtDatabaseError 16 | 17 | from dbt.adapters.trino import TrinoColumn, TrinoConnectionManager, TrinoRelation 18 | 19 | 20 | @dataclass 21 | class TrinoConfig(AdapterConfig): 22 | properties: Optional[Dict[str, str]] = None 23 | view_security: Optional[str] = "definer" 24 | 25 | 26 | class TrinoAdapter(SQLAdapter): 27 | Relation = TrinoRelation 28 | Column = TrinoColumn 29 | ConnectionManager = TrinoConnectionManager 30 | AdapterSpecificConfigs = TrinoConfig 31 | 32 | CONSTRAINT_SUPPORT = { 33 | ConstraintType.check: ConstraintSupport.NOT_SUPPORTED, 34 | ConstraintType.not_null: ConstraintSupport.ENFORCED, 35 | ConstraintType.unique: ConstraintSupport.NOT_SUPPORTED, 36 | ConstraintType.primary_key: ConstraintSupport.NOT_SUPPORTED, 37 | ConstraintType.foreign_key: ConstraintSupport.NOT_SUPPORTED, 38 | } 39 | 40 | _capabilities: CapabilityDict = CapabilityDict( 41 | { 42 | Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full), 43 | # No information about last table modification in information_schema.tables 44 | Capability.TableLastModifiedMetadata: CapabilitySupport(support=Support.Unsupported), 45 | Capability.TableLastModifiedMetadataBatch: CapabilitySupport( 46 | support=Support.Unsupported 47 | ), 48 | } 49 | ) 50 | 51 | def __init__(self, config, mp_context) -> None: 52 | super().__init__(config, mp_context) 53 | self.connections = self.ConnectionManager(config, mp_context, self.behavior) 54 | 55 | @property 56 | def _behavior_flags(self) -> List[BehaviorFlag]: 57 | return [ 58 | { # type: ignore 59 | "name": "require_certificate_validation", 60 | "default": False, 61 | "description": ( 62 | "SSL certificate validation is disabled by default. " 63 | "It is legacy behavior which will be changed in future releases. " 64 | "It is strongly advised to enable `require_certificate_validation` flag " 65 | "or explicitly set `cert` configuration to `True` for security reasons. " 66 | "You may receive an error after that if your SSL setup is incorrect." 67 | ), 68 | } 69 | ] 70 | 71 | @classmethod 72 | def date_function(cls): 73 | return "datenow()" 74 | 75 | @classmethod 76 | def convert_text_type(cls, agate_table, col_idx): 77 | return "VARCHAR" 78 | 79 | @classmethod 80 | def convert_number_type(cls, agate_table, col_idx): 81 | decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) 82 | return "DOUBLE" if decimals else "INTEGER" 83 | 84 | @classmethod 85 | def convert_datetime_type(cls, agate_table, col_idx): 86 | return "TIMESTAMP" 87 | 88 | @classmethod 89 | def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: 90 | return "DATE" 91 | 92 | def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: 93 | return f"{add_to} + interval '{number}' {interval}" 94 | 95 | def get_columns_in_relation(self, relation): 96 | try: 97 | return super().get_columns_in_relation(relation) 98 | except DbtDatabaseError as exc: 99 | if "does not exist" in str(exc): 100 | return [] 101 | else: 102 | raise 103 | 104 | def valid_incremental_strategies(self): 105 | return ["append", "merge", "delete+insert", "microbatch"] 106 | -------------------------------------------------------------------------------- /dbt/adapters/trino/relation.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | 3 | from dbt.adapters.base.relation import BaseRelation, EventTimeFilter, Policy 4 | from dbt.adapters.contracts.relation import ComponentName 5 | 6 | 7 | @dataclass(frozen=True, eq=False, repr=False) 8 | class TrinoRelation(BaseRelation): 9 | quote_policy: Policy = field(default_factory=lambda: Policy()) 10 | require_alias: bool = False 11 | 12 | # Overridden as Trino converts relation identifiers to lowercase 13 | def _is_exactish_match(self, field: ComponentName, value: str) -> bool: 14 | return self.path.get_lowered_part(field) == value.lower() 15 | 16 | # Overridden because Trino cannot compare a TIMESTAMP column with a VARCHAR literal. 17 | def _render_event_time_filtered(self, event_time_filter: EventTimeFilter) -> str: 18 | """ 19 | Returns "" if start and end are both None 20 | """ 21 | filter = "" 22 | if event_time_filter.start and event_time_filter.end: 23 | filter = f"{event_time_filter.field_name} >= TIMESTAMP '{event_time_filter.start}' and {event_time_filter.field_name} < TIMESTAMP '{event_time_filter.end}'" 24 | elif event_time_filter.start: 25 | filter = f"{event_time_filter.field_name} >= TIMESTAMP '{event_time_filter.start}'" 26 | elif event_time_filter.end: 27 | filter = f"{event_time_filter.field_name} < TIMESTAMP '{event_time_filter.end}'" 28 | 29 | return filter 30 | -------------------------------------------------------------------------------- /dbt/include/trino/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | PACKAGE_PATH = os.path.dirname(__file__) 4 | -------------------------------------------------------------------------------- /dbt/include/trino/dbt_project.yml: -------------------------------------------------------------------------------- 1 | name: dbt_trino 2 | version: 1.0 3 | config-version: 2 4 | 5 | macro-paths: ["macros"] 6 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/apply_grants.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__get_show_grant_sql(relation) -%} 2 | select 3 | grantee, 4 | lower(privilege_type) as privilege_type 5 | from information_schema.table_privileges 6 | where table_catalog = '{{ relation.database }}' 7 | and table_schema = '{{ relation.schema }}' 8 | and table_name = '{{ relation.identifier }}' 9 | {%- endmacro %} 10 | 11 | {% macro trino__copy_grants() %} 12 | {# 13 | -- This macro should return true or false depending on the answer to 14 | -- following question: 15 | -- when an object is fully replaced on your database, do grants copy over? 16 | -- e.g. on Postgres this is never true, 17 | -- on Spark this is different for views vs. non-Delta tables vs. Delta tables, 18 | -- on Snowflake it depends on the user-supplied copy_grants configuration. 19 | -- true by default, which means “play it safe”: grants MIGHT have copied over, 20 | -- so dbt will run an extra query to check them + calculate diffs. 21 | #} 22 | {{ return(False) }} 23 | {% endmacro %} 24 | 25 | {%- macro trino__get_grant_sql(relation, privilege, grantees) -%} 26 | grant {{ privilege }} on {{ relation }} to {{ adapter.quote(grantees[0]) }} 27 | {%- endmacro %} 28 | 29 | {%- macro trino__support_multiple_grantees_per_dcl_statement() -%} 30 | {# 31 | -- This macro should return true or false depending on the answer to 32 | -- following question: 33 | -- does this database support grant {privilege} to user_a, user_b, ...? 34 | -- or do user_a + user_b need their own separate grant statements? 35 | #} 36 | {{ return(False) }} 37 | {%- endmacro -%} 38 | 39 | {% macro trino__call_dcl_statements(dcl_statement_list) %} 40 | {% for dcl_statement in dcl_statement_list %} 41 | {% call statement('grant_or_revoke') %} 42 | {{ dcl_statement }} 43 | {% endcall %} 44 | {% endfor %} 45 | {% endmacro %} 46 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/materializations/materialized_view.sql: -------------------------------------------------------------------------------- 1 | {%- macro trino__get_create_materialized_view_as_sql(target_relation, sql) -%} 2 | create materialized view {{ target_relation }} 3 | {%- set grace_period = config.get('grace_period') %} 4 | {%- if grace_period is not none %} 5 | grace period {{ grace_period }} 6 | {%- endif %} 7 | {{ properties() }} 8 | as 9 | {{ sql }} 10 | ; 11 | {%- endmacro -%} 12 | 13 | 14 | {% macro trino__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} 15 | {{- trino__get_create_materialized_view_as_sql(intermediate_relation, sql) }} 16 | 17 | {% if existing_relation is not none %} 18 | {{ log("Found a " ~ existing_relation.type ~ " with same name. Will drop it", info=true) }} 19 | alter {{ existing_relation.type|replace("_", " ") }} {{ existing_relation }} rename to {{ backup_relation }}; 20 | {% endif %} 21 | 22 | alter materialized view {{ intermediate_relation }} rename to {{ relation }}; 23 | 24 | {% endmacro %} 25 | 26 | 27 | {#-- Applying materialized view configuration changes via alter is not supported. --#} 28 | {#-- Return None, so `refresh_materialized_view` macro is invoked even --#} 29 | {#-- if materialized view configuration changes are made. --#} 30 | {#-- After configuration change, full refresh needs to be performed on mv. --#} 31 | {% macro trino__get_materialized_view_configuration_changes(existing_relation, new_config) %} 32 | {% do return(None) %} 33 | {% endmacro %} 34 | 35 | 36 | {%- macro trino__refresh_materialized_view(relation) -%} 37 | refresh materialized view {{ relation }} 38 | {%- endmacro -%} 39 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/materializations/seeds/helpers.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__get_batch_size() %} 2 | {{ return(1000) }} 3 | {% endmacro %} 4 | 5 | 6 | {% macro create_bindings(row, types) %} 7 | {% set values = [] %} 8 | {% set re = modules.re %} 9 | 10 | {%- for item in row -%} 11 | {%- set type = types[loop.index0] -%} 12 | {%- set match_type = re.match("(\w+)(\(.*\))?", type) -%} 13 | {%- if item is not none and item is string and 'interval' in match_type.group(1) -%} 14 | {%- do values.append((none, match_type.group(1).upper() ~ " " ~ item)) -%} 15 | {%- elif item is not none and item is string and 'varchar' not in type.lower() -%} 16 | {%- do values.append((none, match_type.group(1).upper() ~ " '" ~ item ~ "'")) -%} 17 | {%- elif item is not none and 'varchar' in type.lower() -%} 18 | {%- do values.append((get_binding_char(), item|string())) -%} 19 | {%- else -%} 20 | {%- do values.append((get_binding_char(), item)) -%} 21 | {% endif -%} 22 | {%- endfor -%} 23 | {{ return(values) }} 24 | {% endmacro %} 25 | 26 | 27 | {# 28 | We need to override the default__load_csv_rows macro as Trino requires values to be typed according to the column type 29 | as in following example: 30 | 31 | create table "memory"."default"."string_type" ("varchar_example" varchar,"varchar_n_example" varchar(10),"char_example" char,"char_n_example" char(10),"varbinary_example" varbinary,"json_example" json) 32 | 33 | insert into "memory"."default"."string_type" ("varchar_example", "varchar_n_example", "char_example", "char_n_example", "varbinary_example", "json_example") values 34 | ('test','abc',CHAR 'd',CHAR 'ghi',VARBINARY '65683F',JSON '{"k1":1,"k2":23,"k3":456}'),(NULL,NULL,NULL,NULL,NULL,NULL) 35 | 36 | Usually seed row's values through agate_table's data type detection and come through as python types, in this case typing is 37 | handled by using bindings in `ConnectionWrapper.execute`. However dbt also allows you to override the data types of the created table 38 | through setting `column_types`, this case is handled here where we have the type information of the seed table. 39 | #} 40 | 41 | {% macro trino__load_csv_rows(model, agate_table) %} 42 | {% set column_override = model['config'].get('column_types', {}) %} 43 | {% set types = [] %} 44 | 45 | {%- for col_name in agate_table.column_names -%} 46 | {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%} 47 | {%- set type = column_override.get(col_name, inferred_type) -%} 48 | {%- do types.append(type) -%} 49 | {%- endfor -%} 50 | 51 | {% set batch_size = get_batch_size() %} 52 | 53 | {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %} 54 | {% set bindings = [] %} 55 | 56 | {% set statements = [] %} 57 | 58 | {% for chunk in agate_table.rows | batch(batch_size) %} 59 | {% set bindings = [] %} 60 | 61 | {% set sql %} 62 | insert into {{ this.render() }} ({{ cols_sql }}) values 63 | {% for row in chunk -%} 64 | ({%- for tuple in create_bindings(row, types) -%} 65 | {%- if tuple.0 is not none -%} 66 | {{ tuple.0 }} 67 | {%- do bindings.append(tuple.1) -%} 68 | {%- else -%} 69 | {{ tuple.1 }} 70 | {%- endif -%} 71 | {%- if not loop.last%},{%- endif %} 72 | {%- endfor -%}) 73 | {%- if not loop.last%},{%- endif %} 74 | {%- endfor %} 75 | {% endset %} 76 | 77 | {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %} 78 | 79 | {% if loop.index0 == 0 %} 80 | {% do statements.append(sql) %} 81 | {% endif %} 82 | {% endfor %} 83 | 84 | {# Return SQL so we can render it out into the compiled files #} 85 | {{ return(statements[0]) }} 86 | {% endmacro %} 87 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/materializations/snapshot.sql: -------------------------------------------------------------------------------- 1 | {% materialization snapshot, adapter='trino' %} 2 | {% if config.get('properties') %} 3 | {% if config.get('properties').get('location') %} 4 | {%- do exceptions.raise_compiler_error("Specifying 'location' property in snapshots is not supported.") -%} 5 | {% endif %} 6 | {% endif %} 7 | {{ return(materialization_snapshot_default()) }} 8 | {% endmaterialization %} 9 | 10 | {% macro trino__snapshot_hash_arguments(args) -%} 11 | lower(to_hex(md5(to_utf8(concat({%- for arg in args -%} 12 | coalesce(cast({{ arg }} as varchar), ''){% if not loop.last %}, '|',{% endif -%} 13 | {%- endfor -%} 14 | ))))) 15 | {%- endmacro %} 16 | 17 | {% macro trino__post_snapshot(staging_relation) %} 18 | -- Clean up the snapshot temp table 19 | {% do drop_relation(staging_relation) %} 20 | {% endmacro %} 21 | 22 | {% macro trino__snapshot_merge_sql(target, source, insert_cols) -%} 23 | {%- set insert_cols_csv = insert_cols | join(', ') -%} 24 | 25 | {%- set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() -%} 26 | 27 | merge into {{ target.render() }} as DBT_INTERNAL_DEST 28 | using {{ source }} as DBT_INTERNAL_SOURCE 29 | on DBT_INTERNAL_SOURCE.{{ columns.dbt_scd_id }} = DBT_INTERNAL_DEST.{{ columns.dbt_scd_id }} 30 | 31 | when matched 32 | {% if config.get("dbt_valid_to_current") %} 33 | and (DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or 34 | DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null) 35 | {% else %} 36 | and DBT_INTERNAL_DEST.{{ columns.dbt_valid_to }} is null 37 | {% endif %} 38 | and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete') 39 | then update 40 | set {{ columns.dbt_valid_to }} = DBT_INTERNAL_SOURCE.{{ columns.dbt_valid_to }} 41 | 42 | when not matched 43 | and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert' 44 | then insert ({{ insert_cols_csv }}) 45 | values ({% for insert_col in insert_cols -%} 46 | DBT_INTERNAL_SOURCE.{{ insert_col }} 47 | {%- if not loop.last %}, {% endif %} 48 | {%- endfor %}) 49 | 50 | {% endmacro %} 51 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/materializations/table.sql: -------------------------------------------------------------------------------- 1 | {% materialization table, adapter = 'trino' %} 2 | {%- set on_table_exists = config.get('on_table_exists', 'rename') -%} 3 | {% if on_table_exists not in ['rename', 'drop', 'replace', 'skip'] %} 4 | {%- set log_message = 'Invalid value for on_table_exists (%s) specified. Setting default value (%s).' % (on_table_exists, 'rename') -%} 5 | {% do log(log_message) %} 6 | {%- set on_table_exists = 'rename' -%} 7 | {% endif %} 8 | 9 | {%- set existing_relation = load_cached_relation(this) -%} 10 | {%- set target_relation = this.incorporate(type='table') %} 11 | 12 | {% if on_table_exists == 'rename' %} 13 | {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} 14 | -- the intermediate_relation should not already exist in the database; get_relation 15 | -- will return None in that case. Otherwise, we get a relation that we can drop 16 | -- later, before we try to use this name for the current operation 17 | {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%} 18 | 19 | {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} 20 | {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} 21 | -- as above, the backup_relation should not already exist 22 | {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} 23 | 24 | -- drop the temp relations if they exist already in the database 25 | {{ drop_relation_if_exists(preexisting_intermediate_relation) }} 26 | {{ drop_relation_if_exists(preexisting_backup_relation) }} 27 | {% endif %} 28 | 29 | {{ run_hooks(pre_hooks) }} 30 | 31 | -- grab current tables grants config for comparision later on 32 | {% set grant_config = config.get('grants') %} 33 | 34 | {#-- Create table with given `on_table_exists` mode #} 35 | {% do on_table_exists_logic(on_table_exists, existing_relation, intermediate_relation, backup_relation, target_relation) %} 36 | 37 | {% do persist_docs(target_relation, model) %} 38 | 39 | {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} 40 | {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} 41 | 42 | {{ run_hooks(post_hooks) }} 43 | 44 | {{ return({'relations': [target_relation]}) }} 45 | {% endmaterialization %} 46 | 47 | 48 | {% macro on_table_exists_logic(on_table_exists, existing_relation, intermediate_relation, backup_relation, target_relation) -%} 49 | {#-- Create table with given `on_table_exists` mode #} 50 | {% if on_table_exists == 'rename' %} 51 | 52 | {#-- table does not exists #} 53 | {% if existing_relation is none %} 54 | {% call statement('main') -%} 55 | {{ create_table_as(False, target_relation, sql) }} 56 | {%- endcall %} 57 | 58 | {#-- table does exists #} 59 | {% else %} 60 | {#-- build modeldock #} 61 | {% call statement('main') -%} 62 | {{ create_table_as(False, intermediate_relation, sql) }} 63 | {%- endcall %} 64 | 65 | {#-- cleanup #} 66 | {{ adapter.rename_relation(existing_relation, backup_relation) }} 67 | {{ adapter.rename_relation(intermediate_relation, target_relation) }} 68 | 69 | {#-- finally, drop the existing/backup relation after the commit #} 70 | {{ drop_relation_if_exists(backup_relation) }} 71 | {% endif %} 72 | 73 | {% elif on_table_exists == 'drop' %} 74 | {#-- cleanup #} 75 | {%- if existing_relation is not none -%} 76 | {{ adapter.drop_relation(existing_relation) }} 77 | {%- endif -%} 78 | 79 | {#-- build model #} 80 | {% call statement('main') -%} 81 | {{ create_table_as(False, target_relation, sql) }} 82 | {%- endcall %} 83 | 84 | {% elif on_table_exists == 'replace' %} 85 | {#-- build model #} 86 | {% call statement('main') -%} 87 | {{ create_table_as(False, target_relation, sql, 'replace') }} 88 | {%- endcall %} 89 | 90 | {% elif on_table_exists == 'skip' %} 91 | {#-- build model #} 92 | {% call statement('main') -%} 93 | {{ create_table_as(False, target_relation, sql, 'skip') }} 94 | {%- endcall %} 95 | 96 | {% endif %} 97 | {% endmacro %} 98 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/materializations/view.sql: -------------------------------------------------------------------------------- 1 | {% materialization view, adapter='trino' -%} 2 | {% set to_return = create_or_replace_view() %} 3 | {% set target_relation = this.incorporate(type='view') %} 4 | 5 | {% do persist_docs(target_relation, model) %} 6 | 7 | {% do return(to_return) %} 8 | {%- endmaterialization %} 9 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/any_value.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__any_value(expression) -%} 2 | min({{ expression }}) 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/array_append.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__array_append(array, new_element) -%} 2 | {{ array_concat(array, array_construct([new_element])) }} 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/array_concat.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__array_concat(array_1, array_2) -%} 2 | concat({{ array_1 }}, {{ array_2 }}) 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/array_construct.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__array_construct(inputs, data_type) -%} 2 | {%- if not inputs -%} 3 | null 4 | {%- else -%} 5 | array[ {{ inputs|join(' , ') }} ] 6 | {%- endif -%} 7 | {%- endmacro %} 8 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/bool_or.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__bool_or(expression) -%} 2 | bool_or({{ expression }}) 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/datatypes.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__type_float() -%} 2 | double 3 | {%- endmacro %} 4 | 5 | {% macro trino__type_string() -%} 6 | varchar 7 | {%- endmacro %} 8 | 9 | {% macro trino__type_numeric() -%} 10 | decimal(28, 6) 11 | {%- endmacro %} 12 | 13 | {%- macro trino__type_int() -%} 14 | integer 15 | {%- endmacro -%} 16 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/date_spine.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__date_spine(datepart, start_date, end_date) %} 2 | 3 | 4 | {# call as follows: 5 | 6 | date_spine( 7 | "day", 8 | "to_date('01/01/2016', 'mm/dd/yyyy')", 9 | "dbt.dateadd(week, 1, current_date)" 10 | ) #} 11 | 12 | 13 | with rawdata as ( 14 | 15 | {{dbt.generate_series( 16 | dbt.get_intervals_between(start_date, end_date, datepart) 17 | )}} 18 | 19 | ), 20 | 21 | all_periods as ( 22 | 23 | select ( 24 | {{ 25 | dbt.dateadd( 26 | datepart, 27 | "row_number() over (order by 1) - 1", 28 | "cast(" ~ start_date ~ " as date)" 29 | ) 30 | }} 31 | ) as date_{{datepart}} 32 | from rawdata 33 | 34 | ), 35 | 36 | filtered as ( 37 | 38 | select * 39 | from all_periods 40 | where date_{{datepart}} <= cast({{ end_date }} as date) 41 | 42 | ) 43 | 44 | select * from filtered 45 | 46 | {% endmacro %} 47 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/date_trunc.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__date_trunc(datepart, date) -%} 2 | date_trunc('{{datepart}}', {{date}}) 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/dateadd.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__dateadd(datepart, interval, from_date_or_timestamp) -%} 2 | date_add('{{ datepart }}', {{ interval }}, {{ from_date_or_timestamp }}) 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/datediff.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__datediff(first_date, second_date, datepart) -%} 2 | {%- if datepart == 'year' -%} 3 | (year(CAST({{ second_date }} AS TIMESTAMP)) - year(CAST({{ first_date }} AS TIMESTAMP))) 4 | {%- elif datepart == 'quarter' -%} 5 | ({{ datediff(first_date, second_date, 'year') }} * 4) + quarter(CAST({{ second_date }} AS TIMESTAMP)) - quarter(CAST({{ first_date }} AS TIMESTAMP)) 6 | {%- elif datepart == 'month' -%} 7 | ({{ datediff(first_date, second_date, 'year') }} * 12) + month(CAST({{ second_date }} AS TIMESTAMP)) - month(CAST({{ first_date }} AS TIMESTAMP)) 8 | {%- elif datepart == 'day' -%} 9 | ((to_milliseconds((CAST(CAST({{ second_date }} AS TIMESTAMP) AS DATE) - CAST(CAST({{ first_date }} AS TIMESTAMP) AS DATE)))) / 86400000) 10 | {%- elif datepart == 'week' -%} 11 | ({{ datediff(first_date, second_date, 'day') }} / 7 + case 12 | when dow(CAST({{first_date}} AS TIMESTAMP)) <= dow(CAST({{second_date}} AS TIMESTAMP)) then 13 | case when {{first_date}} <= {{second_date}} then 0 else -1 end 14 | else 15 | case when {{first_date}} <= {{second_date}} then 1 else 0 end 16 | end) 17 | {%- elif datepart == 'hour' -%} 18 | ({{ datediff(first_date, second_date, 'day') }} * 24 + hour(CAST({{ second_date }} AS TIMESTAMP)) - hour(CAST({{ first_date }} AS TIMESTAMP))) 19 | {%- elif datepart == 'minute' -%} 20 | ({{ datediff(first_date, second_date, 'hour') }} * 60 + minute(CAST({{ second_date }} AS TIMESTAMP)) - minute(CAST({{ first_date }} AS TIMESTAMP))) 21 | {%- elif datepart == 'second' -%} 22 | ({{ datediff(first_date, second_date, 'minute') }} * 60 + second(CAST({{ second_date }} AS TIMESTAMP)) - second(CAST({{ first_date }} AS TIMESTAMP))) 23 | {%- elif datepart == 'millisecond' -%} 24 | (to_milliseconds((CAST({{ second_date }} AS TIMESTAMP) - CAST({{ first_date }} AS TIMESTAMP)))) 25 | {%- else -%} 26 | {% if execute %}{{ exceptions.raise_compiler_error("Unsupported datepart for macro datediff in Trino: {!r}".format(datepart)) }}{% endif %} 27 | {%- endif -%} 28 | {%- endmacro %} 29 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/hash.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__hash(field) -%} 2 | lower(to_hex(md5(to_utf8(cast({{field}} as varchar))))) 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/listagg.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__listagg(measure, delimiter_text, order_by_clause, limit_num) -%} 2 | {% set collect_list %} array_agg({{ measure }} {% if order_by_clause -%}{{ order_by_clause }}{%- endif %}) {% endset %} 3 | {% set limited %} slice({{ collect_list }}, 1, {{ limit_num }}) {% endset %} 4 | {% set collected = limited if limit_num else collect_list %} 5 | {% set final %} array_join({{ collected }}, {{ delimiter_text }}) {% endset %} 6 | {% do return(final) %} 7 | {%- endmacro %} 8 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/right.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__right(string_text, length_expression) %} 2 | case when {{ length_expression }} = 0 3 | then '' 4 | else 5 | substr({{ string_text }}, -1 * ({{ length_expression }})) 6 | end 7 | {%- endmacro -%} 8 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/safe_cast.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__safe_cast(field, type) -%} 2 | try_cast({{field}} as {{type}}) 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/split_part.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__split_part(string_text, delimiter_text, part_number) %} 2 | {% if part_number >= 0 %} 3 | {{ dbt.default__split_part(string_text, delimiter_text, part_number) }} 4 | {% else %} 5 | {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }} 6 | {% endif %} 7 | {% endmacro %} 8 | -------------------------------------------------------------------------------- /dbt/include/trino/macros/utils/timestamps.sql: -------------------------------------------------------------------------------- 1 | {% macro trino__current_timestamp() -%} 2 | current_timestamp 3 | {%- endmacro %} 4 | 5 | {% macro trino__snapshot_string_as_time(timestamp) %} 6 | {%- set result = "timestamp '" ~ timestamp ~ "'" -%} 7 | {{ return(result) }} 8 | {% endmacro %} 9 | -------------------------------------------------------------------------------- /dbt/include/trino/sample_profiles.yml: -------------------------------------------------------------------------------- 1 | default: 2 | outputs: 3 | 4 | dev: 5 | type: trino 6 | method: none # optional, one of {none | ldap | kerberos} 7 | user: [dev_user] 8 | password: [password] # required if method is ldap or kerberos 9 | database: [database name] 10 | host: [hostname] 11 | port: [port number] 12 | schema: [dev_schema] 13 | threads: [1 or more] 14 | 15 | prod: 16 | type: trino 17 | method: none # optional, one of {none | ldap | kerberos} 18 | user: [prod_user] 19 | password: [prod_password] # required if method is ldap or kerberos 20 | database: [database name] 21 | host: [hostname] 22 | port: [port number] 23 | schema: [prod_schema] 24 | threads: [1 or more] 25 | 26 | target: dev 27 | -------------------------------------------------------------------------------- /dev_requirements.txt: -------------------------------------------------------------------------------- 1 | dbt-tests-adapter~=1.15.1 2 | mypy==1.13.0 # patch updates have historically introduced breaking changes 3 | pre-commit~=4.0 4 | pytest~=8.3 5 | tox~=4.23 6 | -------------------------------------------------------------------------------- /docker-compose-starburst.yml: -------------------------------------------------------------------------------- 1 | services: 2 | trino: 3 | ports: 4 | - "8080:8080" 5 | image: "starburstdata/starburst-enterprise:468-e.6" 6 | volumes: 7 | - ./docker/starburst/etc:/etc/starburst 8 | - ./docker/starburst/catalog:/etc/starburst/catalog 9 | environment: 10 | - _JAVA_OPTIONS=-Dfile.encoding=UTF-8 11 | 12 | postgres: 13 | ports: 14 | - "5432:5432" 15 | image: postgres:11 16 | environment: 17 | POSTGRES_USER: dbt-trino 18 | POSTGRES_PASSWORD: dbt-trino 19 | 20 | metastore_db: 21 | image: postgres:11 22 | hostname: metastore_db 23 | environment: 24 | POSTGRES_USER: hive 25 | POSTGRES_PASSWORD: hive 26 | POSTGRES_DB: metastore 27 | 28 | hive-metastore: 29 | hostname: hive-metastore 30 | image: 'starburstdata/hive:3.1.2-e.18' 31 | ports: 32 | - '9083:9083' # Metastore Thrift 33 | environment: 34 | HIVE_METASTORE_DRIVER: org.postgresql.Driver 35 | HIVE_METASTORE_JDBC_URL: jdbc:postgresql://metastore_db:5432/metastore 36 | HIVE_METASTORE_USER: hive 37 | HIVE_METASTORE_PASSWORD: hive 38 | HIVE_METASTORE_WAREHOUSE_DIR: s3://datalake/ 39 | S3_ENDPOINT: http://minio:9000 40 | S3_ACCESS_KEY: minio 41 | S3_SECRET_KEY: minio123 42 | S3_PATH_STYLE_ACCESS: "true" 43 | REGION: "" 44 | GOOGLE_CLOUD_KEY_FILE_PATH: "" 45 | AZURE_ADL_CLIENT_ID: "" 46 | AZURE_ADL_CREDENTIAL: "" 47 | AZURE_ADL_REFRESH_URL: "" 48 | AZURE_ABFS_STORAGE_ACCOUNT: "" 49 | AZURE_ABFS_ACCESS_KEY: "" 50 | AZURE_WASB_STORAGE_ACCOUNT: "" 51 | AZURE_ABFS_OAUTH: "" 52 | AZURE_ABFS_OAUTH_TOKEN_PROVIDER: "" 53 | AZURE_ABFS_OAUTH_CLIENT_ID: "" 54 | AZURE_ABFS_OAUTH_SECRET: "" 55 | AZURE_ABFS_OAUTH_ENDPOINT: "" 56 | AZURE_WASB_ACCESS_KEY: "" 57 | HIVE_METASTORE_USERS_IN_ADMIN_ROLE: "admin" 58 | depends_on: 59 | - metastore_db 60 | 61 | minio: 62 | hostname: minio 63 | image: 'minio/minio:RELEASE.2022-05-26T05-48-41Z' 64 | container_name: minio 65 | ports: 66 | - '9000:9000' 67 | - '9001:9001' 68 | environment: 69 | MINIO_ACCESS_KEY: minio 70 | MINIO_SECRET_KEY: minio123 71 | command: server /data --console-address ":9001" 72 | 73 | # This job will create the "datalake" bucket on Minio 74 | mc-job: 75 | image: 'minio/mc:RELEASE.2022-05-09T04-08-26Z' 76 | entrypoint: | 77 | /bin/bash -c " 78 | sleep 5; 79 | /usr/bin/mc config --quiet host add myminio http://minio:9000 minio minio123; 80 | /usr/bin/mc mb --quiet myminio/datalake 81 | " 82 | depends_on: 83 | - minio 84 | 85 | networks: 86 | default: 87 | name: dbt-net 88 | external: true 89 | -------------------------------------------------------------------------------- /docker-compose-trino.yml: -------------------------------------------------------------------------------- 1 | services: 2 | trino: 3 | ports: 4 | - "8080:8080" 5 | image: "trinodb/trino:475" 6 | volumes: 7 | - ./docker/trino/etc:/usr/lib/trino/etc:ro 8 | - ./docker/trino/catalog:/etc/trino/catalog 9 | 10 | postgres: 11 | ports: 12 | - "5432:5432" 13 | image: postgres:11 14 | container_name: postgres 15 | environment: 16 | POSTGRES_USER: dbt-trino 17 | POSTGRES_PASSWORD: dbt-trino 18 | 19 | metastore_db: 20 | image: postgres:11 21 | hostname: metastore_db 22 | environment: 23 | POSTGRES_USER: hive 24 | POSTGRES_PASSWORD: hive 25 | POSTGRES_DB: metastore 26 | 27 | hive-metastore: 28 | hostname: hive-metastore 29 | image: 'starburstdata/hive:3.1.2-e.18' 30 | ports: 31 | - '9083:9083' # Metastore Thrift 32 | environment: 33 | HIVE_METASTORE_DRIVER: org.postgresql.Driver 34 | HIVE_METASTORE_JDBC_URL: jdbc:postgresql://metastore_db:5432/metastore 35 | HIVE_METASTORE_USER: hive 36 | HIVE_METASTORE_PASSWORD: hive 37 | HIVE_METASTORE_WAREHOUSE_DIR: s3://datalake/ 38 | S3_ENDPOINT: http://minio:9000 39 | S3_ACCESS_KEY: minio 40 | S3_SECRET_KEY: minio123 41 | S3_PATH_STYLE_ACCESS: "true" 42 | REGION: "" 43 | GOOGLE_CLOUD_KEY_FILE_PATH: "" 44 | AZURE_ADL_CLIENT_ID: "" 45 | AZURE_ADL_CREDENTIAL: "" 46 | AZURE_ADL_REFRESH_URL: "" 47 | AZURE_ABFS_STORAGE_ACCOUNT: "" 48 | AZURE_ABFS_ACCESS_KEY: "" 49 | AZURE_WASB_STORAGE_ACCOUNT: "" 50 | AZURE_ABFS_OAUTH: "" 51 | AZURE_ABFS_OAUTH_TOKEN_PROVIDER: "" 52 | AZURE_ABFS_OAUTH_CLIENT_ID: "" 53 | AZURE_ABFS_OAUTH_SECRET: "" 54 | AZURE_ABFS_OAUTH_ENDPOINT: "" 55 | AZURE_WASB_ACCESS_KEY: "" 56 | HIVE_METASTORE_USERS_IN_ADMIN_ROLE: "admin" 57 | depends_on: 58 | - metastore_db 59 | 60 | minio: 61 | hostname: minio 62 | image: 'minio/minio:RELEASE.2022-05-26T05-48-41Z' 63 | container_name: minio 64 | ports: 65 | - '9000:9000' 66 | - '9001:9001' 67 | environment: 68 | MINIO_ACCESS_KEY: minio 69 | MINIO_SECRET_KEY: minio123 70 | command: server /data --console-address ":9001" 71 | 72 | # This job will create the "datalake" bucket on Minio 73 | mc-job: 74 | image: 'minio/mc:RELEASE.2022-05-09T04-08-26Z' 75 | entrypoint: | 76 | /bin/bash -c " 77 | sleep 5; 78 | /usr/bin/mc config --quiet host add myminio http://minio:9000 minio minio123; 79 | /usr/bin/mc mb --quiet myminio/datalake 80 | " 81 | depends_on: 82 | - minio 83 | 84 | networks: 85 | default: 86 | name: dbt-net 87 | external: true 88 | -------------------------------------------------------------------------------- /docker/init_starburst.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # move to wherever we are so docker things work 4 | cd "$(dirname "${BASH_SOURCE[0]}")" 5 | cd .. 6 | 7 | set -exo pipefail 8 | 9 | docker compose -f docker-compose-starburst.yml build 10 | docker compose -f docker-compose-starburst.yml up -d --quiet-pull 11 | timeout 5m bash -c -- 'while ! docker compose -f docker-compose-starburst.yml logs trino 2>&1 | tail -n 1 | grep "SERVER STARTED"; do sleep 2; done' 12 | -------------------------------------------------------------------------------- /docker/init_trino.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # move to wherever we are so docker things work 4 | cd "$(dirname "${BASH_SOURCE[0]}")" 5 | cd .. 6 | 7 | set -exo pipefail 8 | 9 | docker compose -f docker-compose-trino.yml build 10 | docker compose -f docker-compose-trino.yml up -d --quiet-pull 11 | timeout 5m bash -c -- 'while ! docker compose -f docker-compose-trino.yml logs trino 2>&1 | tail -n 1 | grep "SERVER STARTED"; do sleep 2; done' 12 | -------------------------------------------------------------------------------- /docker/remove_starburst.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # move to wherever we are so docker things work 4 | cd "$(dirname "${BASH_SOURCE[0]}")" 5 | cd .. 6 | docker compose -f docker-compose-starburst.yml down 7 | -------------------------------------------------------------------------------- /docker/remove_trino.bash: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # move to wherever we are so docker things work 4 | cd "$(dirname "${BASH_SOURCE[0]}")" 5 | cd .. 6 | docker compose -f docker-compose-trino.yml down 7 | -------------------------------------------------------------------------------- /docker/starburst/catalog/delta.properties: -------------------------------------------------------------------------------- 1 | connector.name=delta-lake 2 | delta.enable-non-concurrent-writes=true 3 | fs.native-s3.enabled=true 4 | s3.region=us-east-1 5 | s3.endpoint=http://minio:9000 6 | s3.path-style-access=true 7 | hive.metastore.uri=thrift://hive-metastore:9083 8 | s3.aws-access-key=minio 9 | s3.aws-secret-key=minio123 10 | hive.metastore-cache-ttl=0s 11 | hive.metastore-refresh-interval=5s 12 | delta.security=allow-all 13 | -------------------------------------------------------------------------------- /docker/starburst/catalog/hive.properties: -------------------------------------------------------------------------------- 1 | connector.name=hive 2 | hive.metastore.uri=thrift://hive-metastore:9083 3 | fs.native-s3.enabled=true 4 | s3.region=us-east-1 5 | s3.endpoint=http://minio:9000 6 | s3.path-style-access=true 7 | s3.aws-access-key=minio 8 | s3.aws-secret-key=minio123 9 | hive.metastore-cache-ttl=0s 10 | hive.metastore-refresh-interval=5s 11 | hive.security=sql-standard 12 | -------------------------------------------------------------------------------- /docker/starburst/catalog/iceberg.properties: -------------------------------------------------------------------------------- 1 | connector.name=iceberg 2 | hive.metastore.uri=thrift://hive-metastore:9083 3 | fs.native-s3.enabled=true 4 | s3.region=us-east-1 5 | s3.endpoint=http://minio:9000 6 | s3.path-style-access=true 7 | s3.aws-access-key=minio 8 | s3.aws-secret-key=minio123 9 | hive.metastore-cache-ttl=0s 10 | hive.metastore-refresh-interval=5s 11 | iceberg.unique-table-location=true 12 | -------------------------------------------------------------------------------- /docker/starburst/catalog/memory.properties: -------------------------------------------------------------------------------- 1 | connector.name=memory 2 | memory.max-data-per-node=128MB 3 | -------------------------------------------------------------------------------- /docker/starburst/catalog/postgresql.properties: -------------------------------------------------------------------------------- 1 | connector.name=postgresql 2 | connection-url=jdbc:postgresql://postgres:5432/dbt-trino 3 | connection-user=dbt-trino 4 | connection-password=dbt-trino 5 | -------------------------------------------------------------------------------- /docker/starburst/catalog/tpch.properties: -------------------------------------------------------------------------------- 1 | connector.name=tpch 2 | -------------------------------------------------------------------------------- /docker/starburst/etc/config.properties: -------------------------------------------------------------------------------- 1 | coordinator=true 2 | node-scheduler.include-coordinator=true 3 | http-server.http.port=8080 4 | discovery.uri=http://localhost:8080 5 | -------------------------------------------------------------------------------- /docker/starburst/etc/jvm.config: -------------------------------------------------------------------------------- 1 | -server 2 | -XX:InitialRAMPercentage=80 3 | -XX:MaxRAMPercentage=80 4 | -XX:G1HeapRegionSize=32M 5 | -XX:+ExplicitGCInvokesConcurrent 6 | -XX:+HeapDumpOnOutOfMemoryError 7 | -XX:+ExitOnOutOfMemoryError 8 | -XX:-OmitStackTraceInFastThrow 9 | -XX:ReservedCodeCacheSize=256M 10 | -XX:PerMethodRecompilationCutoff=10000 11 | -XX:PerBytecodeRecompilationCutoff=10000 12 | -Djdk.attach.allowAttachSelf=true 13 | -Djdk.nio.maxCachedBufferSize=2000000 14 | -------------------------------------------------------------------------------- /docker/starburst/etc/node.properties: -------------------------------------------------------------------------------- 1 | node.environment=docker 2 | node.data-dir=/data/starburst 3 | -------------------------------------------------------------------------------- /docker/trino/catalog/delta.properties: -------------------------------------------------------------------------------- 1 | connector.name=delta-lake 2 | delta.enable-non-concurrent-writes=true 3 | fs.native-s3.enabled=true 4 | s3.region=us-east-1 5 | s3.endpoint=http://minio:9000 6 | s3.path-style-access=true 7 | hive.metastore.uri=thrift://hive-metastore:9083 8 | s3.aws-access-key=minio 9 | s3.aws-secret-key=minio123 10 | hive.metastore-cache-ttl=0s 11 | hive.metastore-refresh-interval=5s 12 | -------------------------------------------------------------------------------- /docker/trino/catalog/hive.properties: -------------------------------------------------------------------------------- 1 | connector.name=hive 2 | hive.metastore.uri=thrift://hive-metastore:9083 3 | fs.native-s3.enabled=true 4 | s3.region=us-east-1 5 | s3.endpoint=http://minio:9000 6 | s3.path-style-access=true 7 | s3.aws-access-key=minio 8 | s3.aws-secret-key=minio123 9 | hive.metastore-cache-ttl=0s 10 | hive.metastore-refresh-interval=5s 11 | hive.security=sql-standard 12 | -------------------------------------------------------------------------------- /docker/trino/catalog/iceberg.properties: -------------------------------------------------------------------------------- 1 | connector.name=iceberg 2 | hive.metastore.uri=thrift://hive-metastore:9083 3 | fs.native-s3.enabled=true 4 | s3.region=us-east-1 5 | s3.endpoint=http://minio:9000 6 | s3.path-style-access=true 7 | s3.aws-access-key=minio 8 | s3.aws-secret-key=minio123 9 | hive.metastore-cache-ttl=0s 10 | hive.metastore-refresh-interval=5s 11 | -------------------------------------------------------------------------------- /docker/trino/catalog/memory.properties: -------------------------------------------------------------------------------- 1 | connector.name=memory 2 | memory.max-data-per-node=128MB 3 | -------------------------------------------------------------------------------- /docker/trino/catalog/postgresql.properties: -------------------------------------------------------------------------------- 1 | connector.name=postgresql 2 | connection-url=jdbc:postgresql://postgres:5432/dbt-trino 3 | connection-user=dbt-trino 4 | connection-password=dbt-trino 5 | -------------------------------------------------------------------------------- /docker/trino/catalog/tpch.properties: -------------------------------------------------------------------------------- 1 | connector.name=tpch 2 | -------------------------------------------------------------------------------- /docker/trino/etc/config.properties: -------------------------------------------------------------------------------- 1 | coordinator=true 2 | node-scheduler.include-coordinator=true 3 | http-server.http.port=8080 4 | discovery.uri=http://localhost:8080 5 | -------------------------------------------------------------------------------- /docker/trino/etc/jvm.config: -------------------------------------------------------------------------------- 1 | -server 2 | -XX:InitialRAMPercentage=80 3 | -XX:MaxRAMPercentage=80 4 | -XX:G1HeapRegionSize=32M 5 | -XX:+ExplicitGCInvokesConcurrent 6 | -XX:+HeapDumpOnOutOfMemoryError 7 | -XX:+ExitOnOutOfMemoryError 8 | -XX:-OmitStackTraceInFastThrow 9 | -XX:ReservedCodeCacheSize=256M 10 | -XX:PerMethodRecompilationCutoff=10000 11 | -XX:PerBytecodeRecompilationCutoff=10000 12 | -Djdk.attach.allowAttachSelf=true 13 | -Djdk.nio.maxCachedBufferSize=2000000 14 | -------------------------------------------------------------------------------- /docker/trino/etc/node.properties: -------------------------------------------------------------------------------- 1 | node.environment=docker 2 | node.data-dir=/data/trino 3 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | namespace_packages = True 3 | explicit_package_bases = True 4 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | filterwarnings = 3 | ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning 4 | ignore:unclosed file .*:ResourceWarning 5 | testpaths = 6 | tests/unit 7 | tests/functional 8 | markers = 9 | delta 10 | iceberg 11 | hive 12 | postgresql 13 | prepared_statements_disabled 14 | skip_profile(profile) 15 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import re 4 | import sys 5 | 6 | # require python 3.9 or newer 7 | if sys.version_info < (3, 9): 8 | print("Error: dbt does not support this version of Python.") 9 | print("Please upgrade to Python 3.9 or higher.") 10 | sys.exit(1) 11 | 12 | 13 | # require version of setuptools that supports find_namespace_packages 14 | from setuptools import setup 15 | 16 | try: 17 | from setuptools import find_namespace_packages 18 | except ImportError: 19 | # the user has a downlevel version of setuptools. 20 | print("Error: dbt requires setuptools v40.1.0 or higher.") 21 | print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") 22 | sys.exit(1) 23 | 24 | this_directory = os.path.abspath(os.path.dirname(__file__)) 25 | with open(os.path.join(this_directory, "README.md")) as f: 26 | long_description = f.read() 27 | 28 | 29 | package_name = "dbt-trino" 30 | 31 | 32 | # get this package's version from dbt/adapters//__version__.py 33 | def _get_plugin_version_dict(): 34 | _version_path = os.path.join(this_directory, "dbt", "adapters", "trino", "__version__.py") 35 | _semver = r"""(?P\d+)\.(?P\d+)\.(?P\d+)""" 36 | _pre = r"""((?Pa|b|rc)(?P
\d+))?"""
 37 |     _version_pattern = rf"""version\s*=\s*["']{_semver}{_pre}["']"""
 38 |     with open(_version_path) as f:
 39 |         match = re.search(_version_pattern, f.read().strip())
 40 |         if match is None:
 41 |             raise ValueError(f"invalid version at {_version_path}")
 42 |         return match.groupdict()
 43 | 
 44 | 
 45 | def _dbt_trino_version():
 46 |     parts = _get_plugin_version_dict()
 47 |     trino_version = "{major}.{minor}.{patch}".format(**parts)
 48 |     if parts["prekind"] and parts["pre"]:
 49 |         trino_version += parts["prekind"] + parts["pre"]
 50 |     return trino_version
 51 | 
 52 | 
 53 | package_version = _dbt_trino_version()
 54 | description = """The trino adapter plugin for dbt (data build tool)"""
 55 | 
 56 | setup(
 57 |     name=package_name,
 58 |     version=package_version,
 59 |     description=description,
 60 |     long_description=long_description,
 61 |     long_description_content_type="text/markdown",
 62 |     platforms="any",
 63 |     license="Apache License 2.0",
 64 |     license_files=("LICENSE.txt",),
 65 |     author="Starburst Data",
 66 |     author_email="info@starburstdata.com",
 67 |     url="https://github.com/starburstdata/dbt-trino",
 68 |     packages=find_namespace_packages(include=["dbt", "dbt.*"]),
 69 |     package_data={
 70 |         "dbt": [
 71 |             "include/trino/dbt_project.yml",
 72 |             "include/trino/sample_profiles.yml",
 73 |             "include/trino/macros/*.sql",
 74 |             "include/trino/macros/*/*.sql",
 75 |             "include/trino/macros/*/*/*.sql",
 76 |         ]
 77 |     },
 78 |     install_requires=[
 79 |         "dbt-common>=1.25.0,<2.0",
 80 |         "dbt-adapters~=1.15.1",
 81 |         "trino~=0.331",
 82 |         # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency
 83 |         "dbt-core>=1.8.0",
 84 |     ],
 85 |     zip_safe=False,
 86 |     classifiers=[
 87 |         "Development Status :: 5 - Production/Stable",
 88 |         "License :: OSI Approved :: Apache Software License",
 89 |         "Operating System :: Microsoft :: Windows",
 90 |         "Operating System :: MacOS :: MacOS X",
 91 |         "Operating System :: POSIX :: Linux",
 92 |         "Programming Language :: Python :: 3.9",
 93 |         "Programming Language :: Python :: 3.10",
 94 |         "Programming Language :: Python :: 3.11",
 95 |         "Programming Language :: Python :: 3.12",
 96 |         "Programming Language :: Python :: 3.13",
 97 |     ],
 98 |     python_requires=">=3.9",
 99 | )
100 | 


--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
  1 | import os
  2 | 
  3 | import pytest
  4 | import trino
  5 | 
  6 | # Import the functional fixtures as a plugin
  7 | # Note: fixtures with session scope need to be local
  8 | 
  9 | pytest_plugins = ["dbt.tests.fixtures.project"]
 10 | 
 11 | 
 12 | def pytest_addoption(parser):
 13 |     parser.addoption("--profile", action="store", default="trino_starburst", type=str)
 14 | 
 15 | 
 16 | # The profile dictionary, used to write out profiles.yml
 17 | @pytest.fixture(scope="class")
 18 | def dbt_profile_target(request):
 19 |     profile_type = request.config.getoption("--profile")
 20 |     if profile_type == "trino_starburst":
 21 |         target = get_trino_starburst_target()
 22 |     elif profile_type == "starburst_galaxy":
 23 |         target = get_galaxy_target()
 24 |     else:
 25 |         raise ValueError(f"Invalid profile type '{profile_type}'")
 26 | 
 27 |     prepared_statements_disabled = request.node.get_closest_marker("prepared_statements_disabled")
 28 |     if prepared_statements_disabled:
 29 |         target.update({"prepared_statements_enabled": False})
 30 | 
 31 |     postgresql = request.node.get_closest_marker("postgresql")
 32 |     iceberg = request.node.get_closest_marker("iceberg")
 33 |     delta = request.node.get_closest_marker("delta")
 34 |     hive = request.node.get_closest_marker("hive")
 35 | 
 36 |     if sum(bool(x) for x in (postgresql, iceberg, delta)) > 1:
 37 |         raise ValueError("Only one of postgresql, iceberg, delta can be specified as a marker")
 38 | 
 39 |     if postgresql:
 40 |         target.update({"catalog": "postgresql"})
 41 | 
 42 |     if delta:
 43 |         target.update({"catalog": "delta"})
 44 | 
 45 |     if iceberg:
 46 |         target.update({"catalog": "iceberg"})
 47 | 
 48 |     if hive:
 49 |         target.update({"catalog": "hive"})
 50 | 
 51 |     return target
 52 | 
 53 | 
 54 | def get_trino_starburst_target():
 55 |     return {
 56 |         "type": "trino",
 57 |         "method": "none",
 58 |         "threads": 4,
 59 |         "host": "localhost",
 60 |         "port": 8080,
 61 |         "user": "admin",
 62 |         "password": "",
 63 |         "roles": {
 64 |             "hive": "admin",
 65 |         },
 66 |         "catalog": "memory",
 67 |         "schema": "default",
 68 |         "timezone": "UTC",
 69 |     }
 70 | 
 71 | 
 72 | def get_galaxy_target():
 73 |     return {
 74 |         "type": "trino",
 75 |         "method": "ldap",
 76 |         "threads": 4,
 77 |         "retries": 5,
 78 |         "host": os.environ.get("DBT_TESTS_STARBURST_GALAXY_HOST"),
 79 |         "port": 443,
 80 |         "user": os.environ.get("DBT_TESTS_STARBURST_GALAXY_USER"),
 81 |         "password": os.environ.get("DBT_TESTS_STARBURST_GALAXY_PASSWORD"),
 82 |         "catalog": "iceberg",
 83 |         "schema": "default",
 84 |         "timezone": "UTC",
 85 |     }
 86 | 
 87 | 
 88 | @pytest.fixture(autouse=True)
 89 | def skip_by_profile_type(request):
 90 |     profile_type = request.config.getoption("--profile")
 91 |     if request.node.get_closest_marker("skip_profile"):
 92 |         for skip_profile_type in request.node.get_closest_marker("skip_profile").args:
 93 |             if skip_profile_type == profile_type:
 94 |                 pytest.skip(f"skipped on {profile_type} profile")
 95 | 
 96 | 
 97 | @pytest.fixture(scope="class")
 98 | def trino_connection(dbt_profile_target):
 99 |     if dbt_profile_target["method"] == "ldap":
100 |         return trino.dbapi.connect(
101 |             host=dbt_profile_target["host"],
102 |             port=dbt_profile_target["port"],
103 |             auth=trino.auth.BasicAuthentication(
104 |                 dbt_profile_target["user"], dbt_profile_target["password"]
105 |             ),
106 |             catalog=dbt_profile_target["catalog"],
107 |             schema=dbt_profile_target["schema"],
108 |             http_scheme="https",
109 |         )
110 |     else:
111 |         return trino.dbapi.connect(
112 |             host=dbt_profile_target["host"],
113 |             port=dbt_profile_target["port"],
114 |             user=dbt_profile_target["user"],
115 |             catalog=dbt_profile_target["catalog"],
116 |             schema=dbt_profile_target["schema"],
117 |         )
118 | 
119 | 
120 | def get_engine_type(trino_connection):
121 |     conn = trino_connection
122 |     if "galaxy.starburst.io" in conn.host:
123 |         return "starburst_galaxy"
124 |     cur = conn.cursor()
125 |     cur.execute("SELECT version()")
126 |     version = cur.fetchone()
127 |     if "-e" in version[0]:
128 |         return "starburst_enterprise"
129 |     else:
130 |         return "trino"
131 | 
132 | 
133 | @pytest.fixture(autouse=True)
134 | def skip_by_engine_type(request, trino_connection):
135 |     engine_type = get_engine_type(trino_connection)
136 |     if request.node.get_closest_marker("skip_engine"):
137 |         for skip_engine_type in request.node.get_closest_marker("skip_engine").args:
138 |             if skip_engine_type == engine_type:
139 |                 pytest.skip(f"skipped on {engine_type} engine")
140 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/behavior_flags/test_require_certificate_validation.py:
--------------------------------------------------------------------------------
 1 | import warnings
 2 | 
 3 | import pytest
 4 | from dbt.tests.util import run_dbt, run_dbt_and_capture
 5 | from urllib3.exceptions import InsecureRequestWarning
 6 | 
 7 | 
 8 | class TestRequireCertificateValidationDefault:
 9 |     @pytest.fixture(scope="class")
10 |     def project_config_update(self):
11 |         return {"flags": {}}
12 | 
13 |     def test_cert_default_value(self, project):
14 |         assert project.adapter.connections.profile.credentials.cert is None
15 | 
16 |     def test_require_certificate_validation_logs(self, project):
17 |         dbt_args = ["show", "--inline", "select 1"]
18 |         _, logs = run_dbt_and_capture(dbt_args)
19 |         assert "It is strongly advised to enable `require_certificate_validation` flag" in logs
20 | 
21 |     @pytest.mark.skip_profile("trino_starburst")
22 |     def test_require_certificate_validation_insecure_request_warning(self, project):
23 |         with warnings.catch_warnings(record=True) as w:
24 |             dbt_args = ["show", "--inline", "select 1"]
25 |             run_dbt(dbt_args)
26 | 
27 |             # Check if any InsecureRequestWarning was raised
28 |             assert any(
29 |                 issubclass(warning.category, InsecureRequestWarning) for warning in w
30 |             ), "InsecureRequestWarning was not raised"
31 | 
32 | 
33 | class TestRequireCertificateValidationFalse:
34 |     @pytest.fixture(scope="class")
35 |     def project_config_update(self):
36 |         return {"flags": {"require_certificate_validation": False}}
37 | 
38 |     def test_cert_default_value(self, project):
39 |         assert project.adapter.connections.profile.credentials.cert is None
40 | 
41 |     def test_require_certificate_validation_logs(self, project):
42 |         dbt_args = ["show", "--inline", "select 1"]
43 |         _, logs = run_dbt_and_capture(dbt_args)
44 |         assert "It is strongly advised to enable `require_certificate_validation` flag" in logs
45 | 
46 |     @pytest.mark.skip_profile("trino_starburst")
47 |     def test_require_certificate_validation_insecure_request_warning(self, project):
48 |         with warnings.catch_warnings(record=True) as w:
49 |             dbt_args = ["show", "--inline", "select 1"]
50 |             run_dbt(dbt_args)
51 | 
52 |             # Check if any InsecureRequestWarning was raised
53 |             assert any(
54 |                 issubclass(warning.category, InsecureRequestWarning) for warning in w
55 |             ), "InsecureRequestWarning was not raised"
56 | 
57 | 
58 | class TestRequireCertificateValidationTrue:
59 |     @pytest.fixture(scope="class")
60 |     def project_config_update(self):
61 |         return {"flags": {"require_certificate_validation": True}}
62 | 
63 |     def test_cert_default_value(self, project):
64 |         assert project.adapter.connections.profile.credentials.cert is True
65 | 
66 |     def test_require_certificate_validation_logs(self, project):
67 |         dbt_args = ["show", "--inline", "select 1"]
68 |         _, logs = run_dbt_and_capture(dbt_args)
69 |         assert "It is strongly advised to enable `require_certificate_validation` flag" not in logs
70 | 
71 |     @pytest.mark.skip_profile("trino_starburst")
72 |     def test_require_certificate_validation_insecure_request_warning(self, project):
73 |         with warnings.catch_warnings(record=True) as w:
74 |             dbt_args = ["show", "--inline", "select 1"]
75 |             run_dbt(dbt_args)
76 | 
77 |             # Check if not any InsecureRequestWarning was raised
78 |             assert not any(
79 |                 issubclass(warning.category, InsecureRequestWarning) for warning in w
80 |             ), "InsecureRequestWarning was not raised"
81 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/column_types/fixtures.py:
--------------------------------------------------------------------------------
 1 | model_sql = """
 2 | select
 3 |     cast(0 as tinyint) as tinyint_col,
 4 |     cast(1 as smallint) as smallint_col,
 5 |     cast(2 as integer) as integer_col,
 6 |     cast(2 as int) as int_col,
 7 |     cast(3 as bigint) as bigint_col,
 8 |     cast(4.0 as real) as real_col,
 9 |     cast(5.0 as double) as double_col,
10 |     cast(5.5 as double precision) as double_precision_col,
11 |     cast(6.0 as decimal) as decimal_col,
12 |     cast('7' as char) as char_col,
13 |     cast('8' as varchar(20)) as varchar_col
14 | """
15 | 
16 | schema_yml = """
17 | version: 2
18 | models:
19 |   - name: model
20 |     tests:
21 |       - is_type:
22 |           column_map:
23 |             tinyint_col: ['integer', 'number']
24 |             smallint_col: ['integer', 'number']
25 |             integer_col: ['integer', 'number']
26 |             int_col: ['integer', 'number']
27 |             bigint_col: ['integer', 'number']
28 |             real_col: ['float', 'number']
29 |             double_col: ['float', 'number']
30 |             double_precision_col: ['float', 'number']
31 |             decimal_col: ['numeric', 'number']
32 |             char_col: ['string', 'not number']
33 |             varchar_col: ['string', 'not number']
34 | """
35 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/column_types/test_column_types.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes
 3 | 
 4 | from tests.functional.adapter.column_types.fixtures import model_sql, schema_yml
 5 | 
 6 | 
 7 | class TestTrinoColumnTypes(BaseColumnTypes):
 8 |     @pytest.fixture(scope="class")
 9 |     def models(self):
10 |         return {"model.sql": model_sql, "schema.yml": schema_yml}
11 | 
12 |     def test_run_and_test(self, project):
13 |         self.run_and_test()
14 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/constraints/fixtures.py:
--------------------------------------------------------------------------------
  1 | trino_model_contract_sql_header_sql = """
  2 | {{
  3 |   config(
  4 |     materialized = "table"
  5 |   )
  6 | }}
  7 | 
  8 | {% call set_sql_header(config) %}
  9 | set time zone 'Asia/Kolkata';
 10 | {%- endcall %}
 11 | select current_timezone() as column_name
 12 | """
 13 | 
 14 | trino_model_incremental_contract_sql_header_sql = """
 15 | {{
 16 |   config(
 17 |     materialized = "incremental",
 18 |     on_schema_change="append_new_columns"
 19 |   )
 20 | }}
 21 | 
 22 | {% call set_sql_header(config) %}
 23 | set time zone 'Asia/Kolkata';
 24 | {%- endcall %}
 25 | select current_timezone() as column_name
 26 | """
 27 | 
 28 | trino_model_schema_yml = """
 29 | version: 2
 30 | models:
 31 |   - name: my_model
 32 |     config:
 33 |       contract:
 34 |         enforced: true
 35 |     columns:
 36 |       - name: id
 37 |         quote: true
 38 |         data_type: integer
 39 |         description: hello
 40 |         constraints:
 41 |           - type: not_null
 42 |           - type: check
 43 |             expression: (id > 0)
 44 |         tests:
 45 |           - unique
 46 |       - name: color
 47 |         data_type: varchar
 48 |       - name: date_day
 49 |         data_type: varchar
 50 |   - name: my_model_error
 51 |     config:
 52 |       contract:
 53 |         enforced: true
 54 |     columns:
 55 |       - name: id
 56 |         data_type: integer
 57 |         description: hello
 58 |         constraints:
 59 |           - type: not_null
 60 |           - type: check
 61 |             expression: (id > 0)
 62 |         tests:
 63 |           - unique
 64 |       - name: color
 65 |         data_type: varchar
 66 |       - name: date_day
 67 |         data_type: varchar
 68 |   - name: my_model_wrong_order
 69 |     config:
 70 |       contract:
 71 |         enforced: true
 72 |     columns:
 73 |       - name: id
 74 |         data_type: integer
 75 |         description: hello
 76 |         constraints:
 77 |           - type: not_null
 78 |           - type: check
 79 |             expression: (id > 0)
 80 |         tests:
 81 |           - unique
 82 |       - name: color
 83 |         data_type: varchar
 84 |       - name: date_day
 85 |         data_type: varchar
 86 |   - name: my_model_wrong_name
 87 |     config:
 88 |       contract:
 89 |         enforced: true
 90 |     columns:
 91 |       - name: id
 92 |         data_type: integer
 93 |         description: hello
 94 |         constraints:
 95 |           - type: not_null
 96 |           - type: check
 97 |             expression: (id > 0)
 98 |         tests:
 99 |           - unique
100 |       - name: color
101 |         data_type: varchar
102 |       - name: date_day
103 |         data_type: varchar
104 | """
105 | 
106 | trino_constrained_model_schema_yml = """
107 | version: 2
108 | models:
109 |   - name: my_model
110 |     config:
111 |       contract:
112 |         enforced: true
113 |     constraints:
114 |       - type: check
115 |         expression: (id > 0)
116 |       - type: primary_key
117 |         columns: [ id ]
118 |       - type: unique
119 |         columns: [ color, date_day ]
120 |         name: strange_uniqueness_requirement
121 |     columns:
122 |       - name: id
123 |         quote: true
124 |         data_type: integer
125 |         description: hello
126 |         constraints:
127 |           - type: not_null
128 |         tests:
129 |           - unique
130 |       - name: color
131 |         data_type: varchar
132 |       - name: date_day
133 |         data_type: varchar
134 | """
135 | 
136 | trino_model_quoted_column_schema_yml = """
137 | version: 2
138 | models:
139 |   - name: my_model
140 |     config:
141 |       contract:
142 |         enforced: true
143 |       materialized: table
144 |     constraints:
145 |       - type: check
146 |         # this one is the on the user
147 |         expression: ("from" = 'blue')
148 |         columns: [ '"from"' ]
149 |     columns:
150 |       - name: id
151 |         data_type: integer
152 |         description: hello
153 |         constraints:
154 |           - type: not_null
155 |         tests:
156 |           - unique
157 |       - name: from  # reserved word
158 |         quote: true
159 |         data_type: varchar
160 |         constraints:
161 |           - type: not_null
162 |       - name: date_day
163 |         data_type: varchar
164 | """
165 | 
166 | trino_model_contract_header_schema_yml = """
167 | version: 2
168 | models:
169 |   - name: my_model_contract_sql_header
170 |     config:
171 |       contract:
172 |         enforced: true
173 |     columns:
174 |       - name: column_name
175 |         data_type: varchar
176 | """
177 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/dbt_clone/test_dbt_clone.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.dbt_clone.fixtures import (
 3 |     custom_can_clone_tables_false_macros_sql,
 4 |     get_schema_name_sql,
 5 |     infinite_macros_sql,
 6 |     macros_sql,
 7 | )
 8 | from dbt.tests.adapter.dbt_clone.test_dbt_clone import BaseCloneNotPossible
 9 | 
10 | iceberg_macro_override_sql = """
11 | {% macro trino__current_timestamp() -%}
12 |     current_timestamp(6)
13 | {%- endmacro %}
14 | """
15 | 
16 | 
17 | class TestTrinoCloneNotPossible(BaseCloneNotPossible):
18 |     @pytest.fixture(scope="class")
19 |     def macros(self):
20 |         return {
21 |             "macros.sql": macros_sql,
22 |             "my_can_clone_tables.sql": custom_can_clone_tables_false_macros_sql,
23 |             "infinite_macros.sql": infinite_macros_sql,
24 |             "get_schema_name.sql": get_schema_name_sql,
25 |             "iceberg.sql": iceberg_macro_override_sql,
26 |         }
27 | 
28 |     # TODO: below method probably should be implemented in base class (on dbt-core side)
29 |     @pytest.fixture(autouse=True)
30 |     def clean_up(self, project):
31 |         yield
32 |         with project.adapter.connection_named("__test"):
33 |             relation = project.adapter.Relation.create(
34 |                 database=project.database, schema=f"{project.test_schema}_seeds"
35 |             )
36 |             project.adapter.drop_schema(relation)
37 | 
38 |             relation = project.adapter.Relation.create(
39 |                 database=project.database, schema=project.test_schema
40 |             )
41 |             project.adapter.drop_schema(relation)
42 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/dbt_debug/test_dbt_debug.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.dbt_debug.test_dbt_debug import (
 3 |     BaseDebug,
 4 |     BaseDebugProfileVariable,
 5 | )
 6 | from dbt.tests.util import run_dbt
 7 | 
 8 | 
 9 | class TestDebugTrino(BaseDebug):
10 |     # TODO: below teardown method probably should be implemented in base class (on dbt-core side)
11 |     @pytest.fixture(scope="function", autouse=True)
12 |     def teardown_method(self, project):
13 |         yield
14 |         project.run_sql(f"drop schema if exists {project.test_schema}")
15 | 
16 |     def test_ok_trino(self, project):
17 |         run_dbt(["debug"])
18 |         assert "ERROR" not in self.capsys.readouterr().out
19 | 
20 | 
21 | class TestDebugProfileVariableTrino(BaseDebugProfileVariable):
22 |     # TODO: below teardown method probably should be implemented in base class (on dbt-core side)
23 |     @pytest.fixture(scope="function", autouse=True)
24 |     def teardown_method(self, project):
25 |         yield
26 |         project.run_sql(f"drop schema if exists {project.test_schema}")
27 | 
28 |     def test_ok_trino(self, project):
29 |         run_dbt(["debug"])
30 |         assert "ERROR" not in self.capsys.readouterr().out
31 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/dbt_show/test_dbt_show.py:
--------------------------------------------------------------------------------
 1 | from dbt.tests.adapter.dbt_show.test_dbt_show import BaseShowLimit, BaseShowSqlHeader
 2 | 
 3 | 
 4 | class TestTrinoShowSqlHeader(BaseShowSqlHeader):
 5 |     pass
 6 | 
 7 | 
 8 | class TestTrinoShowLimit(BaseShowLimit):
 9 |     pass
10 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/empty/test_empty.py:
--------------------------------------------------------------------------------
 1 | from dbt.tests.adapter.empty.test_empty import (
 2 |     BaseTestEmpty,
 3 |     BaseTestEmptyInlineSourceRef,
 4 | )
 5 | 
 6 | 
 7 | class TestTrinoEmpty(BaseTestEmpty):
 8 |     pass
 9 | 
10 | 
11 | class TestTrinoEmptyInlineSourceRef(BaseTestEmptyInlineSourceRef):
12 |     pass
13 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/fixture_datediff.py:
--------------------------------------------------------------------------------
 1 | seeds__data_datediff_csv = """first_date,second_date,datepart,result
 2 | 2018-01-01 01:00:00,2018-01-02 01:00:00,day,1
 3 | 2018-01-01 01:00:00,2018-02-01 01:00:00,month,1
 4 | 2018-01-01 01:00:00,2019-01-01 01:00:00,year,1
 5 | 2018-01-01 01:00:00,2018-01-01 02:00:00,hour,1
 6 | 2018-01-01 01:00:00,2018-01-01 02:01:00,minute,61
 7 | 2018-01-01 01:00:00,2018-01-01 02:00:01,second,3601
 8 | 2019-12-31 00:00:00,2019-12-27 00:00:00,week,-1
 9 | 2019-12-31 00:00:00,2019-12-30 00:00:00,week,0
10 | 2019-12-31 00:00:00,2020-01-02 00:00:00,week,0
11 | 2019-12-31 00:00:00,2020-01-06 02:00:00,week,1
12 | ,2018-01-01 02:00:00,hour,
13 | 2018-01-01 02:00:00,,hour,
14 | """
15 | 
16 | 
17 | models__test_datediff_sql = """
18 | with data as (
19 | 
20 |     select * from {{ ref('data_datediff') }}
21 | 
22 | )
23 | 
24 | select
25 | 
26 |     case
27 |         when datepart = 'second' then {{ datediff('first_date', 'second_date', 'second') }}
28 |         when datepart = 'minute' then {{ datediff('first_date', 'second_date', 'minute') }}
29 |         when datepart = 'hour' then {{ datediff('first_date', 'second_date', 'hour') }}
30 |         when datepart = 'day' then {{ datediff('first_date', 'second_date', 'day') }}
31 |         when datepart = 'week' then {{ datediff('first_date', 'second_date', 'week') }}
32 |         when datepart = 'month' then {{ datediff('first_date', 'second_date', 'month') }}
33 |         when datepart = 'year' then {{ datediff('first_date', 'second_date', 'year') }}
34 |         else null
35 |     end as actual,
36 |     result as expected
37 | 
38 | from data
39 | 
40 | -- Also test correct casting of literal values.
41 | 
42 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "millisecond") }} as actual, 1 as expected
43 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "second") }} as actual, 1 as expected
44 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "minute") }} as actual, 1 as expected
45 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "hour") }} as actual, 1 as expected
46 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "day") }} as actual, 1 as expected
47 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-03 00:00:00.000000'", "week") }} as actual, 1 as expected
48 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "month") }} as actual, 1 as expected
49 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "quarter") }} as actual, 1 as expected
50 | union all select {{ datediff("'1999-12-31 23:59:59.999000'", "'2000-01-01 00:00:00.000000'", "year") }} as actual, 1 as expected
51 | """
52 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/hooks/data/seed_model.sql:
--------------------------------------------------------------------------------
 1 | drop table if exists {schema}.on_model_hook;
 2 | 
 3 | create table {schema}.on_model_hook (
 4 |     test_state       VARCHAR, -- start|end
 5 |     target_dbname    VARCHAR,
 6 |     target_host      VARCHAR,
 7 |     target_name      VARCHAR,
 8 |     target_schema    VARCHAR,
 9 |     target_type      VARCHAR,
10 |     target_user      VARCHAR,
11 |     target_pass      VARCHAR,
12 |     target_threads   INTEGER,
13 |     run_started_at   VARCHAR,
14 |     invocation_id    VARCHAR,
15 |     thread_id        VARCHAR
16 | );
17 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/hooks/data/seed_run.sql:
--------------------------------------------------------------------------------
 1 | drop table if exists {schema}.on_run_hook;
 2 | 
 3 | create table {schema}.on_run_hook (
 4 |     test_state       VARCHAR, -- start|end
 5 |     target_dbname    VARCHAR,
 6 |     target_host      VARCHAR,
 7 |     target_name      VARCHAR,
 8 |     target_schema    VARCHAR,
 9 |     target_type      VARCHAR,
10 |     target_user      VARCHAR,
11 |     target_pass      VARCHAR,
12 |     target_threads   INTEGER,
13 |     run_started_at   VARCHAR,
14 |     invocation_id    VARCHAR,
15 |     thread_id        VARCHAR
16 | );
17 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/hooks/test_hooks_delete.py:
--------------------------------------------------------------------------------
 1 | # Test hooks with DELETE statement
 2 | import pytest
 3 | from dbt.tests.util import run_dbt, run_sql_with_adapter
 4 | 
 5 | seed = """
 6 | id,name,some_date
 7 | 1,Easton,1981-05-20
 8 | 2,Lillian,1978-09-03
 9 | 3,Jeremiah,1982-03-11
10 | 4,Nolan,1976-05-06
11 | 5,Hannah,1982-06-23ľ
12 | 6,Eleanor,1991-08-10
13 | 7,Lily,1971-03-29
14 | 8,Jonathan,1988-02-26
15 | 9,Adrian,1994-02-09
16 | 10,Nora,1976-03-01
17 | """.lstrip()
18 | 
19 | model = """
20 |   {{ config(
21 |         materialized="table",
22 |         on_table_exists = 'drop'
23 |      )
24 |   }}
25 |   select * from {{ ref('seed') }}
26 | """
27 | 
28 | 
29 | class BaseTestHooksDelete:
30 |     @pytest.fixture(scope="class")
31 |     def seeds(self):
32 |         return {
33 |             "seed.csv": seed,
34 |         }
35 | 
36 |     @pytest.fixture(scope="class")
37 |     def models(self):
38 |         return {
39 |             "model.sql": model,
40 |         }
41 | 
42 |     @pytest.fixture(scope="class")
43 |     def project_config_update(self):
44 |         return {
45 |             "models": {
46 |                 "pre-hook": "DELETE FROM seed WHERE name IN ('Jeremiah','Eleanor');",
47 |                 "post-hook": "DELETE FROM seed WHERE name IN ('Nolan','Jonathan','Nora');",
48 |             }
49 |         }
50 | 
51 |     def test_pre_and_post_run_hooks(self, project, dbt_profile_target):
52 |         # Run seed
53 |         results = run_dbt(["seed"], expect_pass=True)
54 |         assert len(results) == 1
55 | 
56 |         # Check if table has all rows
57 |         sql_seed = "SELECT COUNT(*) from seed"
58 |         query_results = run_sql_with_adapter(project.adapter, sql_seed, fetch="all")
59 |         assert query_results[0][0] == 10
60 | 
61 |         # Run model, hooks should run DELETE statements
62 |         results = run_dbt(["run"], expect_pass=True)
63 |         assert len(results) == 1
64 | 
65 |         # 2 rows were deleted in pre-hook
66 |         sql_model = "SELECT COUNT(*) from model"
67 |         query_results = run_sql_with_adapter(project.adapter, sql_model, fetch="all")
68 |         assert query_results[0][0] == 8
69 | 
70 |         # 2 rows were deleted in pre-hook, and 3 in post-hook
71 |         query_results = run_sql_with_adapter(project.adapter, sql_seed, fetch="all")
72 |         assert query_results[0][0] == 5
73 | 
74 | 
75 | @pytest.mark.delta
76 | class TestBaseTestHooksDeleteDelta(BaseTestHooksDelete):
77 |     pass
78 | 
79 | 
80 | @pytest.mark.iceberg
81 | class TestBaseTestHooksDeleteIceberg(BaseTestHooksDelete):
82 |     pass
83 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/hooks/test_model_hooks.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.hooks import test_model_hooks as core_base
 3 | 
 4 | 
 5 | class TestTrinoPrePostModelHooks(core_base.TestPrePostModelHooks):
 6 |     def check_hooks(self, state, project, host, count=1):
 7 |         self.get_ctx_vars(state, count=count, project=project)
 8 | 
 9 | 
10 | class TestTrinoPrePostModelHooksUnderscores(core_base.TestPrePostModelHooksUnderscores):
11 |     def check_hooks(self, state, project, host, count=1):
12 |         self.get_ctx_vars(state, count=count, project=project)
13 | 
14 | 
15 | class TestTrinoHookRefs(core_base.TestHookRefs):
16 |     def check_hooks(self, state, project, host, count=1):
17 |         self.get_ctx_vars(state, count=count, project=project)
18 | 
19 | 
20 | @pytest.mark.iceberg
21 | class TestTrinoPrePostModelHooksOnSeeds(core_base.TestPrePostModelHooksOnSeeds):
22 |     def check_hooks(self, state, project, host, count=1):
23 |         self.get_ctx_vars(state, count=count, project=project)
24 | 
25 |     @pytest.fixture(scope="class")
26 |     def project_config_update(self):
27 |         return {
28 |             "seed-paths": ["seeds"],
29 |             "models": {},
30 |             "seeds": {
31 |                 "+post-hook": [
32 |                     "alter table {{ this }} add column new_col int",
33 |                     "update {{ this }} set new_col = 1 where 1=1",
34 |                 ],
35 |                 "quote_columns": True,
36 |             },
37 |         }
38 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/hooks/test_run_hooks.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.hooks.test_run_hooks import (
 3 |     BaseAfterRunHooks,
 4 |     BasePrePostRunHooks,
 5 | )
 6 | 
 7 | 
 8 | class TestPrePostRunHooksTrino(BasePrePostRunHooks):
 9 |     @pytest.fixture(scope="class")
10 |     def project_config_update(self):
11 |         return {
12 |             # The create and drop table statements here validate that these hooks run
13 |             # in the same order that they are defined. Drop before create is an error.
14 |             # Also check that the table does not exist below.
15 |             "on-run-start": [
16 |                 "{{ custom_run_hook('start', target, run_started_at, invocation_id) }}",
17 |                 "create table {{ target.schema }}.start_hook_order_test ( id int )",
18 |                 "drop table {{ target.schema }}.start_hook_order_test",
19 |                 "{{ log(env_var('TERM_TEST'), info=True) }}",
20 |             ],
21 |             "on-run-end": [
22 |                 "{{ custom_run_hook('end', target, run_started_at, invocation_id) }}",
23 |                 "create table {{ target.schema }}.end_hook_order_test ( id int )",
24 |                 "drop table {{ target.schema }}.end_hook_order_test",
25 |                 "create table {{ target.schema }}.schemas ( schema varchar )",
26 |                 "insert into {{ target.schema }}.schemas (schema) values {% for schema in schemas %}( '{{ schema }}' ){% if not loop.last %},{% endif %}{% endfor %}",
27 |                 "create table {{ target.schema }}.db_schemas ( db varchar, schema varchar )",
28 |                 "insert into {{ target.schema }}.db_schemas (db, schema) values {% for db, schema in database_schemas %}('{{ db }}', '{{ schema }}' ){% if not loop.last %},{% endif %}{% endfor %}",
29 |             ],
30 |             "seeds": {
31 |                 "quote_columns": False,
32 |             },
33 |         }
34 | 
35 |     def check_hooks(self, state, project, host):
36 |         ctx = self.get_ctx_vars(state, project)
37 | 
38 |         assert ctx["test_state"] == state
39 |         assert ctx["target_dbname"] == ""
40 |         assert ctx["target_host"] == host
41 |         assert ctx["target_name"] == "default"
42 |         assert ctx["target_schema"] == project.test_schema
43 |         assert ctx["target_threads"] == 4
44 |         assert ctx["target_type"] == project.adapter_type
45 |         assert "admin" in ctx["target_user"]
46 |         assert ctx["target_pass"] == ""
47 | 
48 |         assert (
49 |             ctx["run_started_at"] is not None and len(ctx["run_started_at"]) > 0
50 |         ), "run_started_at was not set"
51 |         assert (
52 |             ctx["invocation_id"] is not None and len(ctx["invocation_id"]) > 0
53 |         ), "invocation_id was not set"
54 | 
55 | 
56 | class TestAfterRunHooksTrino(BaseAfterRunHooks):
57 |     pass
58 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/materialization/test_incremental_merge.py:
--------------------------------------------------------------------------------
  1 | import pytest
  2 | from dbt.tests.adapter.incremental.test_incremental_unique_id import (
  3 |     BaseIncrementalUniqueKey,
  4 |     models__duplicated_unary_unique_key_list_sql,
  5 |     models__empty_str_unique_key_sql,
  6 |     models__empty_unique_key_list_sql,
  7 |     models__no_unique_key_sql,
  8 |     models__nontyped_trinary_unique_key_list_sql,
  9 |     models__not_found_unique_key_list_sql,
 10 |     models__not_found_unique_key_sql,
 11 |     models__str_unique_key_sql,
 12 |     models__trinary_unique_key_list_sql,
 13 |     models__unary_unique_key_list_sql,
 14 |     seeds__seed_csv,
 15 | )
 16 | 
 17 | seeds__duplicate_insert_sql = """
 18 | -- Insert statement which when applied to seed.csv triggers the inplace
 19 | --   overwrite strategy of incremental models. Seed and incremental model
 20 | --   diverge.
 21 | 
 22 | -- insert new row, which should not be in incremental model
 23 | --  with primary or first three columns unique
 24 | insert into {schema}.seed
 25 |     (state, county, city, last_visit_date)
 26 | values ('CT','Hartford','Hartford',DATE '2022-02-14');
 27 | 
 28 | """
 29 | 
 30 | seeds__add_new_rows_sql = """
 31 | -- Insert statement which when applied to seed.csv sees incremental model
 32 | --   grow in size while not (necessarily) diverging from the seed itself.
 33 | 
 34 | -- insert two new rows, both of which should be in incremental model
 35 | --   with any unique columns
 36 | insert into {schema}.seed
 37 |     (state, county, city, last_visit_date)
 38 | values ('WA','King','Seattle',DATE '2022-02-01');
 39 | 
 40 | insert into {schema}.seed
 41 |     (state, county, city, last_visit_date)
 42 | values ('CA','Los Angeles','Los Angeles',DATE '2022-02-01');
 43 | 
 44 | """
 45 | 
 46 | models__expected__one_str__overwrite_sql = """
 47 | {{
 48 |     config(
 49 |         materialized='table'
 50 |     )
 51 | }}
 52 | 
 53 | select
 54 |     'CT' as state,
 55 |     'Hartford' as county,
 56 |     'Hartford' as city,
 57 |     cast('2022-02-14' as date) as last_visit_date
 58 | union all
 59 | select 'MA','Suffolk','Boston',DATE '2020-02-12'
 60 | union all
 61 | select 'NJ','Mercer','Trenton',DATE '2022-01-01'
 62 | union all
 63 | select 'NY','Kings','Brooklyn',DATE '2021-04-02'
 64 | union all
 65 | select 'NY','New York','Manhattan',DATE '2021-04-01'
 66 | union all
 67 | select 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'
 68 | union all
 69 | select 'CO','Denver',null,DATE '2021-06-18'
 70 | 
 71 | """
 72 | 
 73 | models__expected__unique_key_list__inplace_overwrite_sql = """
 74 | {{
 75 |     config(
 76 |         materialized='table'
 77 |     )
 78 | }}
 79 | 
 80 | select
 81 |     'CT' as state,
 82 |     'Hartford' as county,
 83 |     'Hartford' as city,
 84 |     cast('2022-02-14' as date) as last_visit_date
 85 | union all
 86 | select 'MA','Suffolk','Boston',DATE '2020-02-12'
 87 | union all
 88 | select 'NJ','Mercer','Trenton',DATE '2022-01-01'
 89 | union all
 90 | select 'NY','Kings','Brooklyn',DATE '2021-04-02'
 91 | union all
 92 | select 'NY','New York','Manhattan',DATE '2021-04-01'
 93 | union all
 94 | select 'PA','Philadelphia','Philadelphia',DATE '2021-05-21'
 95 | union all
 96 | select 'CO','Denver',null,DATE '2021-06-18'
 97 | 
 98 | """
 99 | 
100 | 
101 | class TrinoIncrementalUniqueKey(BaseIncrementalUniqueKey):
102 |     @pytest.fixture(scope="class")
103 |     def seeds(self):
104 |         return {
105 |             "duplicate_insert.sql": seeds__duplicate_insert_sql,
106 |             "seed.csv": seeds__seed_csv,
107 |             "add_new_rows.sql": seeds__add_new_rows_sql,
108 |         }
109 | 
110 |     @pytest.fixture(scope="class")
111 |     def models(self):
112 |         return {
113 |             "trinary_unique_key_list.sql": models__trinary_unique_key_list_sql,
114 |             "nontyped_trinary_unique_key_list.sql": models__nontyped_trinary_unique_key_list_sql,
115 |             "unary_unique_key_list.sql": models__unary_unique_key_list_sql,
116 |             "not_found_unique_key.sql": models__not_found_unique_key_sql,
117 |             "empty_unique_key_list.sql": models__empty_unique_key_list_sql,
118 |             "no_unique_key.sql": models__no_unique_key_sql,
119 |             "empty_str_unique_key.sql": models__empty_str_unique_key_sql,
120 |             "str_unique_key.sql": models__str_unique_key_sql,
121 |             "duplicated_unary_unique_key_list.sql": models__duplicated_unary_unique_key_list_sql,
122 |             "not_found_unique_key_list.sql": models__not_found_unique_key_list_sql,
123 |             "expected": {
124 |                 "one_str__overwrite.sql": models__expected__one_str__overwrite_sql,
125 |                 "unique_key_list__inplace_overwrite.sql": models__expected__unique_key_list__inplace_overwrite_sql,
126 |             },
127 |         }
128 | 
129 | 
130 | @pytest.mark.iceberg
131 | class TestIcebergIncrementalMerge(TrinoIncrementalUniqueKey):
132 |     @pytest.fixture(scope="class")
133 |     def project_config_update(self):
134 |         return {
135 |             "name": "incremental",
136 |             "models": {"+incremental_strategy": "merge"},
137 |             "seeds": {"incremental": {"seed": {"+column_types": {"some_date": "date"}}}},
138 |         }
139 | 
140 | 
141 | @pytest.mark.delta
142 | class TestDeltaIncrementalMerge(TrinoIncrementalUniqueKey):
143 |     @pytest.fixture(scope="class")
144 |     def project_config_update(self):
145 |         return {
146 |             "name": "incremental",
147 |             "models": {
148 |                 "+on_table_exists": "drop",
149 |                 "+incremental_strategy": "merge",
150 |             },
151 |             "seeds": {"incremental": {"seed": {"+column_types": {"some_date": "date"}}}},
152 |         }
153 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/materialization/test_incremental_microbatch.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from dbt.tests.adapter.incremental.test_incremental_microbatch import BaseMicrobatch
3 | 
4 | 
5 | @pytest.mark.iceberg
6 | class TestTrinoMicrobatchIceberg(BaseMicrobatch):
7 |     pass
8 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/materialization/test_incremental_predicates.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.incremental.test_incremental_predicates import (
 3 |     BaseIncrementalPredicates,
 4 | )
 5 | 
 6 | 
 7 | @pytest.mark.iceberg
 8 | class TestIcebergPredicatesDeleteInsertTrino(BaseIncrementalPredicates):
 9 |     @pytest.fixture(scope="class")
10 |     def project_config_update(self):
11 |         return {"models": {"+predicates": ["id != 2"], "+incremental_strategy": "delete+insert"}}
12 | 
13 | 
14 | @pytest.mark.delta
15 | class TestDeltaPredicatesDeleteInsertTrino(BaseIncrementalPredicates):
16 |     @pytest.fixture(scope="class")
17 |     def project_config_update(self):
18 |         return {"models": {"+predicates": ["id != 2"], "+incremental_strategy": "delete+insert"}}
19 | 
20 | 
21 | @pytest.mark.iceberg
22 | class TestIcebergIncrementalPredicatesMergeTrino(BaseIncrementalPredicates):
23 |     @pytest.fixture(scope="class")
24 |     def project_config_update(self):
25 |         return {
26 |             "models": {
27 |                 "+incremental_predicates": ["dbt_internal_dest.id != 2"],
28 |                 "+incremental_strategy": "merge",
29 |             }
30 |         }
31 | 
32 | 
33 | @pytest.mark.delta
34 | class TestDeltaIncrementalPredicatesMergeTrino(BaseIncrementalPredicates):
35 |     @pytest.fixture(scope="class")
36 |     def project_config_update(self):
37 |         return {
38 |             "models": {
39 |                 "+incremental_predicates": ["dbt_internal_dest.id != 2"],
40 |                 "+incremental_strategy": "merge",
41 |             }
42 |         }
43 | 
44 | 
45 | @pytest.mark.iceberg
46 | class TestIcebergPredicatesMergeTrino(BaseIncrementalPredicates):
47 |     @pytest.fixture(scope="class")
48 |     def project_config_update(self):
49 |         return {
50 |             "models": {
51 |                 "+predicates": ["dbt_internal_dest.id != 2"],
52 |                 "+incremental_strategy": "merge",
53 |             }
54 |         }
55 | 
56 | 
57 | @pytest.mark.delta
58 | class TestDeltaPredicatesMergeTrino(BaseIncrementalPredicates):
59 |     @pytest.fixture(scope="class")
60 |     def project_config_update(self):
61 |         return {
62 |             "models": {
63 |                 "+predicates": ["dbt_internal_dest.id != 2"],
64 |                 "+incremental_strategy": "merge",
65 |             }
66 |         }
67 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/materialization/test_incremental_views_enabled.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.util import run_dbt, run_dbt_and_capture
 3 | 
 4 | from tests.functional.adapter.materialization.fixtures import model_sql, seed_csv
 5 | 
 6 | 
 7 | class BaseViewsEnabled:
 8 |     # everything that goes in the "seeds" directory
 9 |     @pytest.fixture(scope="class")
10 |     def seeds(self):
11 |         return {
12 |             "seed.csv": seed_csv,
13 |         }
14 | 
15 |     # everything that goes in the "models" directory
16 |     @pytest.fixture(scope="class")
17 |     def models(self):
18 |         return {
19 |             "materialization.sql": model_sql,
20 |         }
21 | 
22 | 
23 | class TestViewsEnabledTrue(BaseViewsEnabled):
24 |     """
25 |     Testing without views_enabled config specified, which defaults to views_enabled = True configuration
26 |     """
27 | 
28 |     @pytest.fixture(scope="class")
29 |     def project_config_update(self):
30 |         return {
31 |             "name": "views_enabled_true",
32 |             "models": {"+materialized": "incremental"},
33 |             "seeds": {
34 |                 "+column_types": {"some_date": "timestamp(6)"},
35 |             },
36 |         }
37 | 
38 |     # The actual sequence of dbt commands and assertions
39 |     # pytest will take care of all "setup" + "teardown"
40 |     def test_run_seed_test(self, project):
41 |         # seed seeds
42 |         results = run_dbt(["seed"], expect_pass=True)
43 |         assert len(results) == 1
44 | 
45 |         results = run_dbt(["run"], expect_pass=True)
46 |         assert len(results) == 1
47 | 
48 |         results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
49 |         assert len(results) == 1
50 |         assert (
51 |             f'''create or replace view
52 |     "{project.database}"."{project.test_schema}"."materialization__dbt_tmp"'''
53 |             in logs
54 |         )
55 | 
56 | 
57 | class TestViewsEnabledFalse(BaseViewsEnabled):
58 |     """
59 |     Testing views_enabled = False configuration for incremental materialization
60 |     """
61 | 
62 |     @pytest.fixture(scope="class")
63 |     def project_config_update(self):
64 |         return {
65 |             "name": "views_enabled_false",
66 |             "models": {"+materialized": "incremental", "+views_enabled": False},
67 |             "seeds": {
68 |                 "+column_types": {"some_date": "timestamp(6)"},
69 |             },
70 |         }
71 | 
72 |     # The actual sequence of dbt commands and assertions
73 |     # pytest will take care of all "setup" + "teardown"
74 |     def test_run_seed_test(self, project):
75 |         # seed seeds
76 |         results = run_dbt(["seed"], expect_pass=True)
77 |         assert len(results) == 1
78 | 
79 |         results = run_dbt(["run"], expect_pass=True)
80 |         assert len(results) == 1
81 | 
82 |         results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
83 |         assert len(results) == 1
84 |         assert (
85 |             f'create table "{project.database}"."{project.test_schema}"."materialization__dbt_tmp"'
86 |             in logs
87 |         )
88 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/materialization/test_prepared_statements.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.util import check_relations_equal, run_dbt
 3 | 
 4 | from tests.functional.adapter.materialization.fixtures import (
 5 |     model_sql,
 6 |     profile_yml,
 7 |     seed_csv,
 8 | )
 9 | 
10 | 
11 | class PreparedStatementsBase:
12 |     """
13 |     Testing prepared_statements_enabled profile configuration using dbt
14 |     seed, run and tests commands and validate data load correctness.
15 |     """
16 | 
17 |     # configuration in dbt_project.yml
18 |     @pytest.fixture(scope="class")
19 |     def project_config_update(self):
20 |         return {
21 |             "name": "test_prepared_statements",
22 |             "seeds": {
23 |                 "+column_types": {"some_date": "timestamp(6)"},
24 |             },
25 |         }
26 | 
27 |     # everything that goes in the "seeds" directory
28 |     @pytest.fixture(scope="class")
29 |     def seeds(self):
30 |         return {
31 |             "seed.csv": seed_csv,
32 |         }
33 | 
34 |     # everything that goes in the "models" directory
35 |     @pytest.fixture(scope="class")
36 |     def models(self):
37 |         return {
38 |             "materialization.sql": model_sql,
39 |             "materialization.yml": profile_yml,
40 |         }
41 | 
42 |     def retrieve_num_prepared_statements(self, trino_connection):
43 |         cur = trino_connection.cursor()
44 |         cur.execute("select query from system.runtime.queries order by query_id desc limit 3")
45 |         result = cur.fetchall()
46 |         return len(list(filter(lambda rec: "EXECUTE" in rec[0], result)))
47 | 
48 |     # The actual sequence of dbt commands and assertions
49 |     # pytest will take care of all "setup" + "teardown"
50 |     def run_seed_with_prepared_statements(
51 |         self, project, trino_connection, expected_num_prepared_statements
52 |     ):
53 |         # seed seeds
54 |         results = run_dbt(["seed"], expect_pass=True)
55 |         assert len(results) == 1
56 | 
57 |         # Check if the seed command is using prepared statements
58 |         assert (
59 |             self.retrieve_num_prepared_statements(trino_connection)
60 |             == expected_num_prepared_statements
61 |         )
62 | 
63 |         # run models
64 |         results = run_dbt(["run"], expect_pass=True)
65 |         assert len(results) == 1
66 |         # test tests
67 |         results = run_dbt(["test"], expect_pass=True)
68 |         assert len(results) == 3
69 | 
70 |         # check if the data was loaded correctly
71 |         check_relations_equal(project.adapter, ["seed", "materialization"])
72 | 
73 | 
74 | @pytest.mark.prepared_statements_disabled
75 | @pytest.mark.skip_profile("starburst_galaxy")
76 | class TestPreparedStatementsDisabled(PreparedStatementsBase):
77 |     def test_run_seed_with_prepared_statements_disabled(self, project, trino_connection):
78 |         self.run_seed_with_prepared_statements(project, trino_connection, 0)
79 | 
80 | 
81 | @pytest.mark.skip_profile("starburst_galaxy")
82 | class TestPreparedStatementsEnabled(PreparedStatementsBase):
83 |     def test_run_seed_with_prepared_statements_enabled(self, project, trino_connection):
84 |         self.run_seed_with_prepared_statements(project, trino_connection, 1)
85 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/materialization/test_view_security.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.util import check_relations_equal, run_dbt
 3 | 
 4 | from tests.functional.adapter.materialization.fixtures import (
 5 |     model_sql,
 6 |     profile_yml,
 7 |     seed_csv,
 8 | )
 9 | 
10 | 
11 | class TestViewSecurity:
12 |     """
13 |     Testing view_security = 'invoker' configuration for view materialization,
14 |     using dbt seed, run and tests commands and validate data load correctness.
15 |     """
16 | 
17 |     # configuration in dbt_project.yml
18 |     @pytest.fixture(scope="class")
19 |     def project_config_update(self):
20 |         return {
21 |             "name": "view_security",
22 |             "models": {"+materialized": "view", "+view_security": "invoker"},
23 |             "seeds": {
24 |                 "+column_types": {"some_date": "timestamp(6)"},
25 |             },
26 |         }
27 | 
28 |     # everything that goes in the "seeds" directory
29 |     @pytest.fixture(scope="class")
30 |     def seeds(self):
31 |         return {
32 |             "seed.csv": seed_csv,
33 |         }
34 | 
35 |     # everything that goes in the "models" directory
36 |     @pytest.fixture(scope="class")
37 |     def models(self):
38 |         return {
39 |             "materialization.sql": model_sql,
40 |             "materialization.yml": profile_yml,
41 |         }
42 | 
43 |     # The actual sequence of dbt commands and assertions
44 |     # pytest will take care of all "setup" + "teardown"
45 |     def test_run_seed_test(self, project):
46 |         # seed seeds
47 |         results = run_dbt(["seed"], expect_pass=True)
48 |         assert len(results) == 1
49 |         # run models
50 |         results = run_dbt(["run"], expect_pass=True)
51 |         assert len(results) == 1
52 |         # test tests
53 |         results = run_dbt(["test"], expect_pass=True)
54 |         assert len(results) == 3
55 | 
56 |         # check if the data was loaded correctly
57 |         check_relations_equal(project.adapter, ["seed", "materialization"])
58 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/materialized_view_tests/test_materialized_view_dbt_core.py:
--------------------------------------------------------------------------------
 1 | from typing import Optional, Tuple
 2 | 
 3 | import pytest
 4 | from dbt.adapters.base.relation import BaseRelation
 5 | from dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic
 6 | from dbt.tests.util import get_model_file, run_dbt, run_sql_with_adapter, set_model_file
 7 | 
 8 | from tests.functional.adapter.materialized_view_tests.utils import query_relation_type
 9 | 
10 | 
11 | @pytest.mark.iceberg
12 | class TestTrinoMaterializedViewsBasic(MaterializedViewBasic):
13 |     @staticmethod
14 |     def insert_record(project, table: BaseRelation, record: Tuple[int, int]):
15 |         my_id, value = record
16 |         project.run_sql(f"insert into {table} (id, value) values ({my_id}, {value})")
17 | 
18 |     @staticmethod
19 |     def refresh_materialized_view(project, materialized_view: BaseRelation):
20 |         sql = f"refresh materialized view {materialized_view}"
21 |         project.run_sql(sql)
22 | 
23 |     @staticmethod
24 |     def query_row_count(project, relation: BaseRelation) -> int:
25 |         sql = f"select count(*) from {relation}"
26 |         return project.run_sql(sql, fetch="one")[0]
27 | 
28 |     @staticmethod
29 |     def query_relation_type(project, relation: BaseRelation) -> Optional[str]:
30 |         return query_relation_type(project, relation)
31 | 
32 |     # TODO: remove `setup` fixture when CASCADE will be supported in Iceberg, delta, hive connectors
33 |     @pytest.fixture(scope="function", autouse=True)
34 |     def setup(self, project, my_materialized_view):
35 |         run_dbt(["seed"])
36 |         run_dbt(["run", "--models", my_materialized_view.identifier, "--full-refresh"])
37 | 
38 |         # the tests touch these files, store their contents in memory
39 |         initial_model = get_model_file(project, my_materialized_view)
40 | 
41 |         yield
42 | 
43 |         # and then reset them after the test runs
44 |         set_model_file(project, my_materialized_view, initial_model)
45 | 
46 |         # Drop materialized views first, then drop schema
47 |         sql = "select * from system.metadata.materialized_views"
48 |         results = run_sql_with_adapter(project.adapter, sql, fetch="all")
49 |         for mv in results:
50 |             project.run_sql(f"drop materialized view {mv[0]}.{mv[1]}.{mv[2]}")
51 | 
52 |         relation = project.adapter.Relation.create(
53 |             database=project.database, schema=project.test_schema
54 |         )
55 |         project.adapter.drop_schema(relation)
56 | 
57 |     @pytest.mark.skip(
58 |         reason="""
59 |     on iceberg:
60 |     If the data is outdated, the materialized view behaves like a normal view,
61 |     and the data is queried directly from the base tables.
62 |     https://trino.io/docs/current/connector/iceberg.html#materialized-views
63 |     """
64 |     )
65 |     def test_materialized_view_only_updates_after_refresh(self):
66 |         pass
67 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/materialized_view_tests/utils.py:
--------------------------------------------------------------------------------
 1 | from typing import Optional
 2 | 
 3 | from dbt.adapters.base.relation import BaseRelation
 4 | 
 5 | from dbt.adapters.trino.relation import TrinoRelation
 6 | 
 7 | 
 8 | def query_relation_type(project, relation: BaseRelation) -> Optional[str]:
 9 |     assert isinstance(relation, TrinoRelation)
10 |     sql = f"""
11 |     select
12 |       case when mv.name is not null then 'materialized_view'
13 |            when t.table_type = 'BASE TABLE' then 'table'
14 |            when t.table_type = 'VIEW' then 'view'
15 |            else t.table_type
16 |       end as table_type
17 |     from {relation.information_schema()}.tables t
18 |     left join system.metadata.materialized_views mv
19 |           on mv.catalog_name = t.table_catalog and mv.schema_name = t.table_schema and mv.name = t.table_name
20 |     where t.table_schema = '{relation.schema.lower()}'
21 |           and (mv.catalog_name is null or mv.catalog_name =  '{relation.database.lower()}')
22 |           and (mv.schema_name is null or mv.schema_name =  '{relation.schema.lower()}')
23 |           and t.table_name = '{relation.identifier.lower()}'
24 |     """
25 |     results = project.run_sql(sql, fetch="all")
26 |     if len(results) == 0:
27 |         return None
28 |     elif len(results) > 1:
29 |         raise ValueError(f"More than one instance of {relation.name} found!")
30 |     else:
31 |         return results[0][0]
32 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/persist_docs/fixtures.py:
--------------------------------------------------------------------------------
  1 | seed_csv = """
  2 | id,name,date
  3 | 1,Easton,1981-05-20 06:46:51
  4 | 2,Lillian,1978-09-03 18:10:33
  5 | 3,Jeremiah,1982-03-11 03:59:51
  6 | 4,Nolan,1976-05-06 20:21:35
  7 | """.lstrip()
  8 | 
  9 | table_model = """
 10 | {{config(materialized = "table")}}
 11 | select * from {{ ref('seed') }}
 12 | """
 13 | 
 14 | view_model = """
 15 | {{config(materialized = "view")}}
 16 | select * from {{ ref('seed') }}
 17 | """
 18 | 
 19 | incremental_model = """
 20 | {{config(materialized = "incremental")}}
 21 | select * from {{ ref('seed') }}
 22 | """
 23 | 
 24 | table_profile_yml = """
 25 | version: 2
 26 | models:
 27 |   - name: table_model
 28 |     description: |
 29 |       Table model description "with double quotes"
 30 |       and with 'single  quotes' as well as other;
 31 |       '''abc123'''
 32 |       reserved -- characters
 33 |       --
 34 |       /* comment */
 35 |       Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
 36 |     columns:
 37 |       - name: id
 38 |         description: |
 39 |           id Column description "with double quotes"
 40 |           and with 'single  quotes' as well as other;
 41 |           '''abc123'''
 42 |           reserved -- characters
 43 |           --
 44 |           /* comment */
 45 |           Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
 46 |         tests:
 47 |           - unique
 48 |           - not_null
 49 |       - name: name
 50 |         description: |
 51 |           Fancy column description
 52 |         tests:
 53 |           - not_null
 54 | seeds:
 55 |   - name: seed
 56 |     description: |
 57 |       Seed model description "with double quotes"
 58 |       and with 'single  quotes' as welll as other;
 59 |       '''abc123'''
 60 |       reserved -- characters
 61 |       --
 62 |       /* comment */
 63 |       Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
 64 |     columns:
 65 |       - name: id
 66 |         description: |
 67 |           id Column description "with double quotes"
 68 |           and with 'single  quotes' as welll as other;
 69 |           '''abc123'''
 70 |           reserved -- characters
 71 |           --
 72 |           /* comment */
 73 |           Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
 74 |       - name: name
 75 |         description: |
 76 |           Fancy column description
 77 |         tests:
 78 |           - not_null
 79 | """
 80 | 
 81 | 
 82 | view_profile_yml = """
 83 | version: 2
 84 | models:
 85 |   - name: view_model
 86 |     description: |
 87 |       Table model description "with double quotes"
 88 |       and with 'single  quotes' as well as other;
 89 |       '''abc123'''
 90 |       reserved -- characters
 91 |       --
 92 |       /* comment */
 93 |       Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
 94 |     columns:
 95 |       - name: id
 96 |         tests:
 97 |           - unique
 98 |           - not_null
 99 |         description: ID Column description
100 |       - name: name
101 |         tests:
102 |           - not_null
103 |         description: Name description
104 | seeds:
105 |   - name: seed
106 |     description: |
107 |       Seed model description "with double quotes"
108 |       and with 'single  quotes' as welll as other;
109 |       '''abc123'''
110 |       reserved -- characters
111 |       --
112 |       /* comment */
113 |       Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
114 |     columns:
115 |       - name: id
116 |         description: |
117 |           id Column description "with double quotes"
118 |           and with 'single  quotes' as welll as other;
119 |           '''abc123'''
120 |           reserved -- characters
121 |           --
122 |           /* comment */
123 |           Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
124 |       - name: name
125 |         description: |
126 |           Fancy column description
127 |         tests:
128 |           - not_null
129 | """
130 | 
131 | incremental_profile_yml = """
132 | version: 2
133 | models:
134 |   - name: incremental_model
135 |     description: |
136 |       Table model description "with double quotes"
137 |       and with 'single  quotes' as well as other;
138 |       '''abc123'''
139 |       reserved -- characters
140 |       --
141 |       /* comment */
142 |       Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
143 |     columns:
144 |       - name: id
145 |         description: |
146 |           id Column description "with double quotes"
147 |           and with 'single  quotes' as well as other;
148 |           '''abc123'''
149 |           reserved -- characters
150 |           --
151 |           /* comment */
152 |           Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
153 |         tests:
154 |           - unique
155 |           - not_null
156 |       - name: name
157 |         description: |
158 |           Fancy column description
159 |         tests:
160 |           - not_null
161 | seeds:
162 |   - name: seed
163 |     description: |
164 |       Seed model description "with double quotes"
165 |       and with 'single  quotes' as welll as other;
166 |       '''abc123'''
167 |       reserved -- characters
168 |       --
169 |       /* comment */
170 |       Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
171 |     columns:
172 |       - name: id
173 |         description: |
174 |           id Column description "with double quotes"
175 |           and with 'single  quotes' as welll as other;
176 |           '''abc123'''
177 |           reserved -- characters
178 |           --
179 |           /* comment */
180 |           Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting
181 |       - name: name
182 |         description: |
183 |           Fancy column description
184 |         tests:
185 |           - not_null
186 | """
187 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/show/fixtures.py:
--------------------------------------------------------------------------------
 1 | models__sample_model = """
 2 | select * from {{ ref('sample_seed') }}
 3 | """
 4 | 
 5 | models__second_model = """
 6 | select
 7 |     sample_num as col_one,
 8 |     sample_bool as col_two,
 9 |     42 as answer
10 | from {{ ref('sample_model') }}
11 | """
12 | 
13 | models__sql_header = """
14 | {% call set_sql_header(config) %}
15 | set time zone 'Asia/Kolkata';
16 | {%- endcall %}
17 | select current_timezone() as timezone
18 | """
19 | 
20 | private_model_yml = """
21 | groups:
22 |   - name: my_cool_group
23 |     owner: {name: me}
24 | 
25 | models:
26 |   - name: private_model
27 |     access: private
28 |     config:
29 |       group: my_cool_group
30 | """
31 | 
32 | 
33 | schema_yml = """
34 | models:
35 |   - name: sample_model
36 |     latest_version: 1
37 | 
38 |     # declare the versions, and fully specify them
39 |     versions:
40 |       - v: 2
41 |         config:
42 |           materialized: table
43 |         columns:
44 |           - name: sample_num
45 |             data_type: int
46 |           - name: sample_bool
47 |             data_type: boolean
48 |           - name: answer
49 |             data_type: int
50 | 
51 |       - v: 1
52 |         config:
53 |           materialized: table
54 |           contract: {enforced: true}
55 |         columns:
56 |           - name: sample_num
57 |             data_type: int
58 |           - name: sample_bool
59 |             data_type: boolean
60 | """
61 | 
62 | models__ephemeral_model = """
63 | {{ config(materialized = 'ephemeral') }}
64 | select
65 |     coalesce(sample_num, 0) + 10 as col_deci
66 | from {{ ref('sample_model') }}
67 | """
68 | 
69 | models__second_ephemeral_model = """
70 | {{ config(materialized = 'ephemeral') }}
71 | select
72 |     col_deci + 100 as col_hundo
73 | from {{ ref('ephemeral_model') }}
74 | """
75 | 
76 | seeds__sample_seed = """sample_num,sample_bool
77 | 1,true
78 | 2,false
79 | 3,true
80 | 4,false
81 | 5,true
82 | 6,false
83 | 7,true
84 | """
85 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/simple_seed/test_seed.py:
--------------------------------------------------------------------------------
  1 | from pathlib import Path
  2 | 
  3 | import pytest
  4 | from dbt.tests.adapter.simple_seed.test_seed import (
  5 |     TestBasicSeedTests as CoreTestBasicSeedTests,
  6 | )
  7 | from dbt.tests.adapter.simple_seed.test_seed import (
  8 |     TestSeedConfigFullRefreshOff as CoreTestSeedConfigFullRefreshOff,
  9 | )
 10 | from dbt.tests.adapter.simple_seed.test_seed import (
 11 |     TestSeedConfigFullRefreshOn as CoreTestSeedConfigFullRefreshOn,
 12 | )
 13 | from dbt.tests.adapter.simple_seed.test_seed import (
 14 |     TestSeedCustomSchema as CoreTestSeedCustomSchema,
 15 | )
 16 | from dbt.tests.adapter.simple_seed.test_seed import (
 17 |     TestSeedParsing as CoreTestSeedParsing,
 18 | )
 19 | from dbt.tests.adapter.simple_seed.test_seed import (
 20 |     TestSeedSpecificFormats as CoreTestSeedSpecificFormats,
 21 | )
 22 | from dbt.tests.adapter.simple_seed.test_seed import (
 23 |     TestSeedWithEmptyDelimiter as CoreTestSeedWithEmptyDelimiter,
 24 | )
 25 | from dbt.tests.adapter.simple_seed.test_seed import (
 26 |     TestSeedWithUniqueDelimiter as CoreTestSeedWithUniqueDelimiter,
 27 | )
 28 | from dbt.tests.adapter.simple_seed.test_seed import (
 29 |     TestSeedWithWrongDelimiter as CoreTestSeedWithWrongDelimiter,
 30 | )
 31 | from dbt.tests.adapter.simple_seed.test_seed import (
 32 |     TestSimpleSeedEnabledViaConfig as CoreTestSimpleSeedEnabledViaConfig,
 33 | )
 34 | from dbt.tests.adapter.simple_seed.test_seed import (
 35 |     TestSimpleSeedWithBOM as CoreTestSimpleSeedWithBOM,
 36 | )
 37 | from dbt.tests.util import copy_file, run_dbt
 38 | 
 39 | from tests.functional.adapter.simple_seed.seeds import (
 40 |     trino_seeds__expected_sql_create_table,
 41 |     trino_seeds__expected_sql_insert_into,
 42 | )
 43 | 
 44 | 
 45 | class TrinoSetUpFixture:
 46 |     @pytest.fixture(scope="class", autouse=True)
 47 |     def setUp(self, project):
 48 |         """Create table for ensuring seeds and models used in tests build correctly"""
 49 |         project.run_sql(trino_seeds__expected_sql_create_table)
 50 |         project.run_sql(trino_seeds__expected_sql_insert_into)
 51 | 
 52 | 
 53 | class TestTrinoBasicSeedTests(TrinoSetUpFixture, CoreTestBasicSeedTests):
 54 |     # TODO Trino currently does not support DROP TABLE CASCADE.
 55 |     #  Dropping seed won't drop downstream models automatically.
 56 |     @pytest.mark.skip
 57 |     def test_simple_seed_full_refresh_flag(self, project):
 58 |         pass
 59 | 
 60 | 
 61 | # TODO Trino currently does not support DROP TABLE CASCADE.
 62 | #  Dropping seed won't drop downstream models automatically.
 63 | @pytest.mark.skip
 64 | class TestTrinoSeedConfigFullRefreshOn(TrinoSetUpFixture, CoreTestSeedConfigFullRefreshOn):
 65 |     pass
 66 | 
 67 | 
 68 | class TestTrinoSeedConfigFullRefreshOff(TrinoSetUpFixture, CoreTestSeedConfigFullRefreshOff):
 69 |     pass
 70 | 
 71 | 
 72 | class TestTrinoSeedCustomSchema(TrinoSetUpFixture, CoreTestSeedCustomSchema):
 73 |     pass
 74 | 
 75 | 
 76 | class TestTrinoSeedWithUniqueDelimiter(TrinoSetUpFixture, CoreTestSeedWithUniqueDelimiter):
 77 |     pass
 78 | 
 79 | 
 80 | class TestTrinoSeedWithWrongDelimiter(TrinoSetUpFixture, CoreTestSeedWithWrongDelimiter):
 81 |     def test_seed_with_wrong_delimiter(self, project):
 82 |         """Testing failure of running dbt seed with a wrongly configured delimiter"""
 83 |         seed_result = run_dbt(["seed"], expect_pass=False)
 84 |         assert "syntax_error" in seed_result.results[0].message.lower()
 85 | 
 86 | 
 87 | class TestTrinoSeedWithEmptyDelimiter(TrinoSetUpFixture, CoreTestSeedWithEmptyDelimiter):
 88 |     pass
 89 | 
 90 | 
 91 | class TestTrinoSimpleSeedEnabledViaConfig(CoreTestSimpleSeedEnabledViaConfig):
 92 |     pass
 93 | 
 94 | 
 95 | class TestTrinoSeedParsing(TrinoSetUpFixture, CoreTestSeedParsing):
 96 |     pass
 97 | 
 98 | 
 99 | class TestTrinoSimpleSeedWithBOM(CoreTestSimpleSeedWithBOM):
100 |     @pytest.fixture(scope="class", autouse=True)
101 |     def setUp(self, project):
102 |         """Create table for ensuring seeds and models used in tests build correctly"""
103 |         project.run_sql(trino_seeds__expected_sql_create_table)
104 |         project.run_sql(trino_seeds__expected_sql_insert_into)
105 |         copy_file(
106 |             project.test_dir,
107 |             "seed_bom.csv",
108 |             project.project_root / Path("seeds") / "seed_bom.csv",
109 |             "",
110 |         )
111 | 
112 | 
113 | class TestTrinoSeedSpecificFormats(CoreTestSeedSpecificFormats):
114 |     pass
115 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/store_failures/fixtures.py:
--------------------------------------------------------------------------------
 1 | seed_csv = """
 2 | id,value
 3 | 1,1
 4 | 2,2
 5 | 3,3
 6 | 4,4
 7 | """.lstrip()
 8 | 
 9 | table_model = """
10 | select * from {{ ref('seed') }}
11 | """
12 | 
13 | table_profile_yml = """
14 | version: 2
15 | models:
16 |   - name: table_model
17 |     columns:
18 |       - name: id
19 |         tests:
20 |           - unique
21 |           - not_null
22 |       - name: value
23 |         quote: true
24 |         tests:
25 |           - not_null
26 |           - accepted_values:
27 |               values:
28 |                 - 1
29 |                 - 2
30 |                 - 3
31 |                 - 4
32 |               quote: false
33 | 
34 | seeds:
35 |   - name: seed
36 |     columns:
37 |       - name: id
38 |       - name: value
39 |         tests:
40 |           - not_null
41 | """
42 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/store_failures/test_store_failures.py:
--------------------------------------------------------------------------------
  1 | import pytest
  2 | from dbt.tests.adapter.store_test_failures_tests import basic
  3 | from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import (
  4 |     TestStoreTestFailures,
  5 | )
  6 | from dbt.tests.util import run_dbt
  7 | 
  8 | from tests.functional.adapter.store_failures.fixtures import (
  9 |     seed_csv,
 10 |     table_model,
 11 |     table_profile_yml,
 12 | )
 13 | 
 14 | 
 15 | class TestStoreFailuresTable:
 16 |     @property
 17 |     def schema(self):
 18 |         return "default"
 19 | 
 20 |     # everything that goes in the "seeds" directory
 21 |     @pytest.fixture(scope="class")
 22 |     def seeds(self):
 23 |         return {
 24 |             "seed.csv": seed_csv,
 25 |         }
 26 | 
 27 |     @pytest.fixture(scope="class")
 28 |     def project_config_update(self):
 29 |         return {
 30 |             "name": "store_failures_tests",
 31 |             "quoting": {
 32 |                 "database": False,
 33 |                 "schema": False,
 34 |                 "identifier": True,
 35 |             },
 36 |             "models": {
 37 |                 "+materialized": "table",
 38 |             },
 39 |             "tests": {
 40 |                 "+store_failures": True,
 41 |             },
 42 |         }
 43 | 
 44 |     # everything that goes in the "models" directory
 45 |     @pytest.fixture(scope="class")
 46 |     def models(self):
 47 |         return {
 48 |             "table_model.sql": table_model,
 49 |             "table_store_failures.yml": table_profile_yml,
 50 |         }
 51 | 
 52 |     @pytest.fixture(autouse=True)
 53 |     def teardown_method(self, project):
 54 |         yield
 55 |         with project.adapter.connection_named("__test"):
 56 |             relation = project.adapter.Relation.create(
 57 |                 database=project.database, schema=f"{project.test_schema}_dbt_test__audit"
 58 |             )
 59 |             project.adapter.drop_schema(relation)
 60 | 
 61 |     def test_run_seed_test(self, project):
 62 |         # seed seeds
 63 |         results = run_dbt(["seed"], expect_pass=True)
 64 |         assert len(results) == 1
 65 |         results = run_dbt(["run"], expect_pass=True)
 66 |         assert len(results) == 1
 67 |         # test tests
 68 |         results = run_dbt(["test"], expect_pass=True)
 69 |         assert len(results) == 5
 70 |         # test tests 2nd times
 71 |         results = run_dbt(["test"], expect_pass=True)
 72 |         assert len(results) == 5
 73 | 
 74 | 
 75 | class TestTrinoTestStoreTestFailures(TestStoreTestFailures):
 76 |     pass
 77 | 
 78 | 
 79 | class TestStoreTestFailuresAsInteractions(basic.StoreTestFailuresAsInteractions):
 80 |     pass
 81 | 
 82 | 
 83 | class TestStoreTestFailuresAsProjectLevelOff(basic.StoreTestFailuresAsProjectLevelOff):
 84 |     pass
 85 | 
 86 | 
 87 | class TestStoreTestFailuresAsProjectLevelView(basic.StoreTestFailuresAsProjectLevelView):
 88 |     pass
 89 | 
 90 | 
 91 | class TestStoreTestFailuresAsGeneric(basic.StoreTestFailuresAsGeneric):
 92 |     pass
 93 | 
 94 | 
 95 | class TestStoreTestFailuresAsProjectLevelEphemeral(basic.StoreTestFailuresAsProjectLevelEphemeral):
 96 |     pass
 97 | 
 98 | 
 99 | class TestStoreTestFailuresAsExceptions(basic.StoreTestFailuresAsExceptions):
100 |     pass
101 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_caching.py:
--------------------------------------------------------------------------------
 1 | from dbt.tests.adapter.caching.test_caching import (
 2 |     BaseCachingLowercaseModel,
 3 |     BaseCachingSelectedSchemaOnly,
 4 |     BaseCachingUppercaseModel,
 5 | )
 6 | 
 7 | 
 8 | class TestCachingLowerCaseModel(BaseCachingLowercaseModel):
 9 |     pass
10 | 
11 | 
12 | class TestCachingUppercaseModel(BaseCachingUppercaseModel):
13 |     pass
14 | 
15 | 
16 | class TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly):
17 |     pass
18 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_changing_relation_type.py:
--------------------------------------------------------------------------------
1 | from dbt.tests.adapter.relations.test_changing_relation_type import (
2 |     BaseChangeRelationTypeValidator,
3 | )
4 | 
5 | 
6 | class TestTrinoChangeRelationTypes(BaseChangeRelationTypeValidator):
7 |     pass
8 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_concurrency.py:
--------------------------------------------------------------------------------
 1 | from dbt.tests.adapter.concurrency.test_concurrency import (
 2 |     BaseConcurrency,
 3 |     seeds__update_csv,
 4 | )
 5 | from dbt.tests.util import check_relations_equal, rm_file, run_dbt, write_file
 6 | 
 7 | 
 8 | class TestConcurrencyTrino(BaseConcurrency):
 9 |     def test_concurrency(self, project):
10 |         run_dbt(["seed", "--select", "seed"])
11 |         results = run_dbt(["run"], expect_pass=False)
12 |         assert len(results) == 7
13 |         check_relations_equal(project.adapter, ["SEED", "VIEW_MODEL"])
14 |         check_relations_equal(project.adapter, ["SEED", "DEP"])
15 |         check_relations_equal(project.adapter, ["SEED", "TABLE_A"])
16 |         check_relations_equal(project.adapter, ["SEED", "TABLE_B"])
17 | 
18 |         rm_file(project.project_root, "seeds", "seed.csv")
19 |         write_file(seeds__update_csv, project.project_root + "/seeds", "seed.csv")
20 |         results = run_dbt(["run"], expect_pass=False)
21 |         assert len(results) == 7
22 |         check_relations_equal(project.adapter, ["SEED", "VIEW_MODEL"])
23 |         check_relations_equal(project.adapter, ["SEED", "DEP"])
24 |         check_relations_equal(project.adapter, ["SEED", "TABLE_A"])
25 |         check_relations_equal(project.adapter, ["SEED", "TABLE_B"])
26 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_custom_schema.py:
--------------------------------------------------------------------------------
  1 | from abc import ABC, abstractmethod
  2 | 
  3 | import pytest
  4 | from dbt.tests.util import run_dbt, run_sql_with_adapter
  5 | 
  6 | seed_csv = """
  7 | id,name,date
  8 | 1,Easton,1981-05-20 06:46:51
  9 | 2,Lillian,1978-09-03 18:10:33
 10 | 3,Jeremiah,1982-03-11 03:59:51
 11 | 4,Nolan,1976-05-06 20:21:35
 12 | """.lstrip()
 13 | 
 14 | 
 15 | class CustomSchemaBase(ABC):
 16 |     """
 17 |     This test is meant to ensure that Trino table, view, incremental materialization
 18 |     works as expected for custom schemas
 19 |     """
 20 | 
 21 |     # set custom schema name
 22 |     custom_schema_name = "very_custom_schema_name"
 23 | 
 24 |     @property
 25 |     @abstractmethod
 26 |     def table_type(self):
 27 |         pass
 28 | 
 29 |     @property
 30 |     @abstractmethod
 31 |     def materialization(self):
 32 |         pass
 33 | 
 34 |     # define model
 35 |     def custom_schema_model(self, materialization):
 36 |         return f"""
 37 |                     {{{{
 38 |                         config(
 39 |                         materialized="{materialization}",
 40 |                         schema="{self.custom_schema_name}"
 41 |                         )
 42 |                     }}}}
 43 |                     select * from {{{{ ref('seed') }}}}
 44 |                 """
 45 | 
 46 |     @pytest.fixture(scope="class")
 47 |     def project_config_update(self):
 48 |         return {
 49 |             "seeds": {
 50 |                 "+column_types": {"date": "timestamp(6)"},
 51 |             },
 52 |         }
 53 | 
 54 |     # everything that goes in the "seeds" directory
 55 |     @pytest.fixture(scope="class")
 56 |     def seeds(self):
 57 |         return {
 58 |             "seed.csv": seed_csv,
 59 |         }
 60 | 
 61 |     # everything that goes in the "models" directory
 62 |     @pytest.fixture(scope="class")
 63 |     def models(self):
 64 |         return {
 65 |             f"custom_schema_{self.materialization()}_model.sql": self.custom_schema_model(
 66 |                 self.materialization()
 67 |             )
 68 |         }
 69 | 
 70 |     @pytest.fixture(scope="function", autouse=True)
 71 |     def teardown_method(self, project):
 72 |         yield
 73 |         relation = project.adapter.Relation.create(
 74 |             database=project.database, schema=f"{project.test_schema}_{self.custom_schema_name}"
 75 |         )
 76 |         project.adapter.drop_schema(relation)
 77 | 
 78 |     def test_custom_schema_trino(self, project):
 79 |         # Seed seeds, run models.
 80 |         results = run_dbt(["seed"], expect_pass=True)
 81 |         assert len(results) == 1
 82 |         results = run_dbt(["run"], expect_pass=True)
 83 |         assert len(results) == 1
 84 | 
 85 |         # Fetch info from information_schema about just created table/view.
 86 |         sql = f"""
 87 |             select * from {project.adapter.config.credentials.database}.information_schema.tables
 88 |             where table_catalog = '{project.adapter.config.credentials.database}'
 89 |             and table_schema = '{project.adapter.config.credentials.schema}_{self.custom_schema_name}'
 90 |         """.strip()
 91 |         results = run_sql_with_adapter(project.adapter, sql, fetch="all")
 92 | 
 93 |         # Check if fetched info is as expected to be.
 94 |         assert len(results) == 1
 95 |         assert results[0][0] == project.adapter.config.credentials.database
 96 |         assert (
 97 |             results[0][1]
 98 |             == f"{project.adapter.config.credentials.schema}_{self.custom_schema_name}"
 99 |         )
100 |         assert results[0][2] == f"custom_schema_{self.materialization()}_model"
101 |         assert results[0][3] == self.table_type()
102 | 
103 | 
104 | class TestCustomSchemaTable(CustomSchemaBase):
105 |     def materialization(self):
106 |         return "table"
107 | 
108 |     def table_type(self):
109 |         return "BASE TABLE"
110 | 
111 | 
112 | class TestCustomSchemaView(CustomSchemaBase):
113 |     def materialization(self):
114 |         return "view"
115 | 
116 |     def table_type(self):
117 |         return "VIEW"
118 | 
119 | 
120 | class TestCustomSchemaIncremental(CustomSchemaBase):
121 |     def materialization(self):
122 |         return "incremental"
123 | 
124 |     def table_type(self):
125 |         return "BASE TABLE"
126 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_ephemeral.py:
--------------------------------------------------------------------------------
 1 | from dbt.tests.adapter.ephemeral.test_ephemeral import (
 2 |     BaseEphemeralErrorHandling,
 3 |     BaseEphemeralMulti,
 4 |     BaseEphemeralNested,
 5 | )
 6 | from dbt.tests.util import check_relations_equal, run_dbt
 7 | 
 8 | 
 9 | class TestEphemeralMultiTrino(BaseEphemeralMulti):
10 |     def test_ephemeral_multi(self, project):
11 |         run_dbt(["seed"])
12 |         results = run_dbt(["run"])
13 |         assert len(results) == 3
14 |         check_relations_equal(
15 |             project.adapter, ["SEED", "DEPENDENT", "DOUBLE_DEPENDENT", "SUPER_DEPENDENT"]
16 |         )
17 | 
18 | 
19 | class TestEphemeralNestedTrino(BaseEphemeralNested):
20 |     def test_ephemeral_nested(self, project):
21 |         results = run_dbt(["run"])
22 |         assert len(results) == 2
23 | 
24 | 
25 | class TestEphemeralErrorHandlingTrino(BaseEphemeralErrorHandling):
26 |     pass
27 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_get_incremental_tmp_relation_type_macro.py:
--------------------------------------------------------------------------------
 1 | from abc import ABC, abstractmethod
 2 | 
 3 | import pytest
 4 | from dbt.tests.util import run_dbt, run_sql_with_adapter
 5 | 
 6 | 
 7 | class CustomSchemaBase(ABC):
 8 |     """
 9 |     This test is meant to ensure that get_incremental_tmp_relation_type macro
10 |     is returning expected values on certain inputs.
11 |     """
12 | 
13 |     @property
14 |     @abstractmethod
15 |     def expected_types(self):
16 |         # Expected table/view type returned from created model.
17 |         # Order based on columns' order in model definition.
18 |         return ["table", "view", "view", "view", "table", "view", "table"]
19 | 
20 |     # define model
21 |     def incremental_model(self):
22 |         return """
23 |                     select
24 |                     '{{ get_incremental_tmp_relation_type('delete+insert', 'foo', 'sql') }}' AS delete_plus_insert_strategy,
25 |                     '{{ get_incremental_tmp_relation_type('append', 'foo', 'sql') }}' AS append_strategy,
26 |                     '{{ get_incremental_tmp_relation_type('default', 'foo', 'sql') }}' AS default_strategy,
27 |                     '{{ get_incremental_tmp_relation_type('merge', 'foo', 'sql') }}' AS merge_strategy,
28 |                     '{{ get_incremental_tmp_relation_type('foo', 'some_unique_key', 'sql') }}' AS unique_key,
29 |                     '{{ get_incremental_tmp_relation_type('foo', None, 'sql') }}' AS no_unique_key,
30 |                     '{{ get_incremental_tmp_relation_type('default', 'foo', 'python') }}' AS python_model
31 |                 """
32 | 
33 |     @pytest.fixture(scope="class")
34 |     @abstractmethod
35 |     def project_config_update(self):
36 |         pass
37 | 
38 |     # everything that goes in the "models" directory
39 |     @pytest.fixture(scope="class")
40 |     def models(self):
41 |         return {"test_get_incremental_tmp_relation_type.sql": self.incremental_model()}
42 | 
43 |     def test_get_incremental_tmp_relation_type(self, project):
44 |         # Run models.
45 |         results = run_dbt(["run"], expect_pass=True)
46 |         assert len(results) == 1
47 | 
48 |         # Fetch info from get_incremental_tmp_relation_type macro output.
49 |         sql = f"""
50 |             select * from {project.adapter.config.credentials.database}.{project.adapter.config.credentials.schema}.test_get_incremental_tmp_relation_type
51 |         """.strip()
52 |         results = run_sql_with_adapter(project.adapter, sql, fetch="all")
53 | 
54 |         # Check if fetched info is as expected to be.
55 |         assert len(results) == 1
56 |         assert results[0] == self.expected_types
57 | 
58 | 
59 | class TestViewsEnabled(CustomSchemaBase):
60 |     @property
61 |     def expected_types(self):
62 |         return super().expected_types
63 | 
64 |     @pytest.fixture(scope="class")
65 |     def project_config_update(self):
66 |         # Not specifying views_enabled config,
67 |         # as it is 'True' by default
68 |         pass
69 | 
70 | 
71 | class TestViewsNotEnabled(CustomSchemaBase):
72 |     @property
73 |     def expected_types(self):
74 |         # Expected type is 'table' for every config,
75 |         # as views_enabled is set to 'False'.
76 |         return ["table" for _ in super().expected_types]
77 | 
78 |     @pytest.fixture(scope="class")
79 |     def project_config_update(self):
80 |         return {
81 |             "models": {"+views_enabled": False},
82 |         }
83 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_grants.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.context.base import BaseContext  # diff_of_two_dicts only
 3 | from dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants
 4 | from dbt.tests.adapter.grants.test_model_grants import BaseModelGrants
 5 | 
 6 | 
 7 | @pytest.mark.hive
 8 | # TODO: setup Galaxy and Starbust tests
 9 | #   See https://github.com/starburstdata/dbt-trino/issues/147
10 | #   and also https://github.com/starburstdata/dbt-trino/issues/146
11 | @pytest.mark.skip_profile("starburst_galaxy")
12 | # To run this test locally add following env vars:
13 | # DBT_TEST_USER_1=user1
14 | # DBT_TEST_USER_2=user2
15 | # DBT_TEST_USER_3=user3
16 | class TestModelGrantsTrino(BaseModelGrants):
17 |     def assert_expected_grants_match_actual(self, project, relation_name, expected_grants):
18 |         actual_grants = self.get_grants_on_relation(project, relation_name)
19 |         # Remove the creation user
20 |         try:
21 |             for privilege in ["delete", "update", "insert", "select"]:
22 |                 if privilege in actual_grants:
23 |                     actual_grants[privilege].remove("admin")
24 |                     if len(actual_grants[privilege]) == 0:
25 |                         del actual_grants[privilege]
26 |         except ValueError:
27 |             pass
28 | 
29 |         # need a case-insensitive comparison
30 |         # so just a simple "assert expected == actual_grants" won't work
31 |         diff_a = BaseContext.diff_of_two_dicts(actual_grants, expected_grants)
32 |         diff_b = BaseContext.diff_of_two_dicts(expected_grants, actual_grants)
33 |         assert diff_a == diff_b == {}
34 | 
35 | 
36 | @pytest.mark.hive
37 | # TODO: setup Galaxy and Starbust tests, might need separate tests
38 | #   See https://github.com/starburstdata/dbt-trino/issues/147
39 | #   and also https://github.com/starburstdata/dbt-trino/issues/146
40 | @pytest.mark.skip(reason="Hive doesn't raise errors on invalid roles")
41 | class TestInvalidGrantsTrino(BaseInvalidGrants):
42 |     pass
43 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_query_comments.py:
--------------------------------------------------------------------------------
 1 | from dbt.tests.adapter.query_comment.test_query_comment import (
 2 |     BaseEmptyQueryComments,
 3 |     BaseMacroArgsQueryComments,
 4 |     BaseMacroInvalidQueryComments,
 5 |     BaseMacroQueryComments,
 6 |     BaseNullQueryComments,
 7 |     BaseQueryComments,
 8 | )
 9 | 
10 | 
11 | class TestQueryCommentsTrino(BaseQueryComments):
12 |     pass
13 | 
14 | 
15 | class TestMacroQueryCommentsTrino(BaseMacroQueryComments):
16 |     pass
17 | 
18 | 
19 | class TestMacroArgsQueryCommentsTrino(BaseMacroArgsQueryComments):
20 |     pass
21 | 
22 | 
23 | class TestMacroInvalidQueryCommentsTrino(BaseMacroInvalidQueryComments):
24 |     pass
25 | 
26 | 
27 | class TestNullQueryCommentsTrino(BaseNullQueryComments):
28 |     pass
29 | 
30 | 
31 | class TestEmptyQueryCommentsTrino(BaseEmptyQueryComments):
32 |     pass
33 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_quote_policy.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | 
 3 | from tests.functional.adapter.test_basic import TestIncrementalTrino
 4 | 
 5 | 
 6 | @pytest.fixture(scope="class")
 7 | def unique_schema(request, prefix) -> str:
 8 |     return "sChEmAWiThMiXeDCaSe"
 9 | 
10 | 
11 | class TestTrinoQuotePolicy(TestIncrementalTrino):
12 |     pass
13 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_session_property.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.util import run_dbt
 3 | 
 4 | set_session_property = "set session query_max_run_time='20s'"
 5 | 
 6 | 
 7 | class TestSessionProperty:
 8 |     """
 9 |     This test is ensuring that setting session properties through pre_hook is working as expected.
10 |     Test is asserting, that session property passed in 'pre_hook' config in model definition
11 |     matches pre_hook value extracted from RunExecutionResult object.
12 |     """
13 | 
14 |     @property
15 |     def schema(self):
16 |         return "default"
17 | 
18 |     def session_property_model(self, prehook):
19 |         return f"""
20 |                     {{{{
21 |                         config(
22 |                             pre_hook="{prehook}"
23 |                         )
24 |                     }}}}
25 |                     select 'OK' as status
26 |                 """
27 | 
28 |     # everything that goes in the "models" directory
29 |     @pytest.fixture(scope="class")
30 |     def models(self):
31 |         return {"session_property_model.sql": self.session_property_model(set_session_property)}
32 | 
33 |     def test_custom_schema_trino(self, project):
34 |         # Run models.
35 |         results = run_dbt(["run"], expect_pass=True)
36 |         assert len(results) == 1
37 |         assert set_session_property == results.results[0].node.config.pre_hook[0].sql
38 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_simple_copy.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.simple_copy.test_simple_copy import (
 3 |     EmptyModelsArentRunBase,
 4 |     SimpleCopyBase,
 5 | )
 6 | from dbt.tests.util import run_dbt
 7 | 
 8 | 
 9 | @pytest.mark.iceberg
10 | class TestSimpleCopyBase(SimpleCopyBase):
11 |     def test_simple_copy_with_materialized_views(self, project):
12 |         project.run_sql(f"create table {project.test_schema}.unrelated_table (id int)")
13 |         sql = f"""
14 |             create materialized view {project.test_schema}.unrelated_materialized_view as (
15 |                 select * from {project.test_schema}.unrelated_table
16 |             )
17 |         """
18 |         project.run_sql(sql)
19 |         sql = f"""
20 |             create view {project.test_schema}.unrelated_view as (
21 |                 select * from {project.test_schema}.unrelated_materialized_view
22 |             )
23 |         """
24 |         project.run_sql(sql)
25 |         results = run_dbt(["seed"])
26 |         assert len(results) == 1
27 |         results = run_dbt()
28 |         assert len(results) == 7
29 | 
30 |         # clean up
31 |         # TODO: check if this clean-up is still needed
32 |         #  after implementing CASCADE in iceberg, delta, hive connectors
33 |         #  if not, entire method could be deleted
34 |         project.run_sql("drop view unrelated_view")
35 |         project.run_sql("drop materialized view unrelated_materialized_view")
36 |         project.run_sql("drop table unrelated_table")
37 | 
38 | 
39 | # Trino implementation of dbt.tests.fixtures.project.TestProjInfo.get_tables_in_schema
40 | # which use `like` instead of `ilike`
41 | def trino_get_tables_in_schema(prj):
42 |     sql = """
43 |             select table_name,
44 |                     case when table_type = 'BASE TABLE' then 'table'
45 |                          when table_type = 'VIEW' then 'view'
46 |                          else table_type
47 |                     end as materialization
48 |             from information_schema.tables
49 |             where {}
50 |             order by table_name
51 |             """
52 |     sql = sql.format("lower({}) like lower('{}')".format("table_schema", prj.test_schema))
53 |     result = prj.run_sql(sql, fetch="all")
54 |     return {model_name: materialization for (model_name, materialization) in result}
55 | 
56 | 
57 | class TestEmptyModelsArentRun(EmptyModelsArentRunBase):
58 |     def test_dbt_doesnt_run_empty_models(self, project):
59 |         results = run_dbt(["seed"])
60 |         assert len(results) == 1
61 |         results = run_dbt()
62 |         assert len(results) == 7
63 | 
64 |         tables = trino_get_tables_in_schema(project)
65 | 
66 |         assert "empty" not in tables.keys()
67 |         assert "disabled" not in tables.keys()
68 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_simple_snapshot.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.simple_snapshot.test_snapshot import (
 3 |     BaseSimpleSnapshot,
 4 |     BaseSnapshotCheck,
 5 | )
 6 | from dbt.tests.util import run_dbt
 7 | 
 8 | iceberg_macro_override_sql = """
 9 | {% macro trino__current_timestamp() -%}
10 |     current_timestamp(6)
11 | {%- endmacro %}
12 | """
13 | 
14 | 
15 | class TrinoSimpleSnapshot(BaseSimpleSnapshot):
16 |     def test_updates_are_captured_by_snapshot(self, project):
17 |         """
18 |         Update the last 5 records. Show that all ids are current, but the last 5 reflect updates.
19 |         """
20 |         self.update_fact_records(
21 |             {"updated_at": "updated_at + interval '1' day"}, "id between 16 and 20"
22 |         )
23 |         run_dbt(["snapshot"])
24 |         self._assert_results(
25 |             ids_with_current_snapshot_records=range(1, 21),
26 |             ids_with_closed_out_snapshot_records=range(16, 21),
27 |         )
28 | 
29 |     def test_new_column_captured_by_snapshot(self, project):
30 |         """
31 |         Add a column to `fact` and populate the last 10 records with a non-null value.
32 |         Show that all ids are current, but the last 10 reflect updates and the first 10 don't
33 |         i.e. if the column is added, but not updated, the record doesn't reflect that it's updated
34 |         """
35 |         self.add_fact_column("full_name", "varchar(200)")
36 |         self.update_fact_records(
37 |             {
38 |                 "full_name": "first_name || ' ' || last_name",
39 |                 "updated_at": "updated_at + interval '1' day",
40 |             },
41 |             "id between 11 and 20",
42 |         )
43 |         run_dbt(["snapshot"])
44 |         self._assert_results(
45 |             ids_with_current_snapshot_records=range(1, 21),
46 |             ids_with_closed_out_snapshot_records=range(11, 21),
47 |         )
48 | 
49 | 
50 | class TrinoSnapshotCheck(BaseSnapshotCheck):
51 |     def test_column_selection_is_reflected_in_snapshot(self, project):
52 |         """
53 |         Update the first 10 records on a non-tracked column.
54 |         Update the middle 10 records on a tracked column. (hence records 6-10 are updated on both)
55 |         Show that all ids are current, and only the tracked column updates are reflected in `snapshot`.
56 |         """
57 |         self.update_fact_records(
58 |             {"last_name": "substring(last_name, 1, 3)"}, "id between 1 and 10"
59 |         )  # not tracked
60 |         self.update_fact_records(
61 |             {"email": "substring(email, 1, 3)"}, "id between 6 and 15"
62 |         )  # tracked
63 |         run_dbt(["snapshot"])
64 |         self._assert_results(
65 |             ids_with_current_snapshot_records=range(1, 21),
66 |             ids_with_closed_out_snapshot_records=range(6, 16),
67 |         )
68 | 
69 | 
70 | @pytest.mark.iceberg
71 | class TestIcebergSimpleSnapshot(TrinoSimpleSnapshot):
72 |     @pytest.fixture(scope="class")
73 |     def project_config_update(self):
74 |         return {
75 |             "seeds": {
76 |                 "+column_types": {"updated_at": "timestamp(6)"},
77 |             },
78 |         }
79 | 
80 | 
81 | @pytest.mark.delta
82 | class TestDeltaSimpleSnapshot(TrinoSimpleSnapshot):
83 |     pass
84 | 
85 | 
86 | @pytest.mark.iceberg
87 | class TestIcebergSnapshotCheck(TrinoSnapshotCheck):
88 |     @pytest.fixture(scope="class")
89 |     def macros(self):
90 |         return {"iceberg.sql": iceberg_macro_override_sql}
91 | 
92 | 
93 | @pytest.mark.delta
94 | class TestDeltaSnapshotCheck(TrinoSnapshotCheck):
95 |     pass
96 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_sql_status_output.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.util import run_dbt, run_dbt_and_capture
 3 | 
 4 | seed_csv = """
 5 | id,name,some_date
 6 | 1,Easton,1981-05-20 06:46:51
 7 | 2,Lillian,1978-09-03 18:10:33
 8 | 3,Jeremiah,1982-03-11 03:59:51
 9 | 4,Nolan,1976-05-06 20:21:35
10 | """.lstrip()
11 | 
12 | model_sql = """
13 | select * from {{ ref('seed') }}
14 | """
15 | 
16 | 
17 | class TestSqlStatusOutput:
18 |     """
19 |     Testing if SQL status output contains update_type and rowcount
20 |     """
21 | 
22 |     @pytest.fixture(scope="class")
23 |     def seeds(self):
24 |         return {
25 |             "seed.csv": seed_csv,
26 |         }
27 | 
28 |     @pytest.fixture(scope="class")
29 |     def models(self):
30 |         return {
31 |             "materialization_table.sql": model_sql,
32 |             "materialization_view.sql": model_sql,
33 |         }
34 | 
35 |     @pytest.fixture(scope="class")
36 |     def project_config_update(self):
37 |         return {
38 |             "name": "sql_status_output",
39 |             "models": {
40 |                 "sql_status_output": {
41 |                     "materialization_table": {"+materialized": "table"},
42 |                     "materialization_view": {"+materialized": "view"},
43 |                 }
44 |             },
45 |         }
46 | 
47 |     def test_run_seed_test(self, project):
48 |         results = run_dbt(["seed"], expect_pass=True)
49 |         assert len(results) == 1
50 | 
51 |         results, logs = run_dbt_and_capture(["--no-use-colors", "run"], expect_pass=True)
52 |         assert len(results) == 2
53 |         assert (
54 |             f" of 2 OK created sql table model {project.test_schema}.materialization_table  [CREATE TABLE (4 rows) in "
55 |             in logs
56 |         )
57 |         assert (
58 |             f" of 2 OK created sql view model {project.test_schema}.materialization_view  [CREATE VIEW in "
59 |             in logs
60 |         )
61 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/test_table_properties.py:
--------------------------------------------------------------------------------
  1 | import pytest
  2 | from dbt.tests.util import run_dbt, run_dbt_and_capture
  3 | 
  4 | from tests.functional.adapter.materialization.fixtures import model_sql, seed_csv
  5 | 
  6 | 
  7 | class BaseTableProperties:
  8 |     # Everything that goes in the "seeds" directory
  9 |     @pytest.fixture(scope="class")
 10 |     def seeds(self):
 11 |         return {
 12 |             "seed.csv": seed_csv,
 13 |         }
 14 | 
 15 |     # Everything that goes in the "models" directory
 16 |     @pytest.fixture(scope="class")
 17 |     def models(self):
 18 |         return {
 19 |             "model.sql": model_sql,
 20 |         }
 21 | 
 22 | 
 23 | @pytest.mark.iceberg
 24 | class TestTableProperties(BaseTableProperties):
 25 |     # Configuration in dbt_project.yml
 26 |     @pytest.fixture(scope="class")
 27 |     def project_config_update(self):
 28 |         return {
 29 |             "name": "properties_test",
 30 |             "models": {
 31 |                 "+materialized": "table",
 32 |                 "+properties": {
 33 |                     "format": "'PARQUET'",
 34 |                     "format_version": "2",
 35 |                 },
 36 |             },
 37 |         }
 38 | 
 39 |     def test_table_properties(self, project):
 40 |         # Seed seed
 41 |         results = run_dbt(["seed"], expect_pass=True)
 42 |         assert len(results) == 1
 43 | 
 44 |         # Create model with properties
 45 |         results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
 46 |         assert len(results) == 1
 47 |         assert "WITH (" in logs
 48 |         assert "format = 'PARQUET'" in logs
 49 |         assert "format_version = 2" in logs
 50 | 
 51 | 
 52 | @pytest.mark.iceberg
 53 | class TestFileFormatConfig(BaseTableProperties):
 54 |     # Configuration in dbt_project.yml
 55 |     @pytest.fixture(scope="class")
 56 |     def project_config_update(self):
 57 |         return {
 58 |             "name": "properties_test",
 59 |             "models": {
 60 |                 "+materialized": "table",
 61 |                 "file_format": "parquet",
 62 |             },
 63 |         }
 64 | 
 65 |     def test_table_properties(self, project):
 66 |         # Seed seed
 67 |         results = run_dbt(["seed"], expect_pass=True)
 68 |         assert len(results) == 1
 69 | 
 70 |         # Create model with properties
 71 |         results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
 72 |         assert len(results) == 1
 73 |         assert "WITH (" in logs
 74 |         assert "format = 'parquet'" in logs
 75 | 
 76 | 
 77 | @pytest.mark.iceberg
 78 | class TestFileFormatConfigAndFormatTablePropertyFail(BaseTableProperties):
 79 |     # Configuration in dbt_project.yml
 80 |     @pytest.fixture(scope="class")
 81 |     def project_config_update(self):
 82 |         return {
 83 |             "name": "properties_test",
 84 |             "models": {
 85 |                 "+materialized": "table",
 86 |                 "+properties": {
 87 |                     "format": "'PARQUET'",
 88 |                 },
 89 |                 "file_format": "orc",
 90 |             },
 91 |         }
 92 | 
 93 |     def test_table_properties(self, project):
 94 |         # Seed seed
 95 |         results = run_dbt(["seed"], expect_pass=True)
 96 |         assert len(results) == 1
 97 | 
 98 |         # Create model with properties
 99 |         results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=False)
100 |         assert len(results) == 1
101 |         assert (
102 |             "You can specify either 'file_format' or 'properties.format' configurations, but not both."
103 |             in logs
104 |         )
105 | 
106 | 
107 | @pytest.mark.hive
108 | # Setting `type` property is available only in Starburst Galaxy
109 | # https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/gl-iceberg.html
110 | @pytest.mark.skip_profile("trino_starburst")
111 | class TestTableFormatConfig(BaseTableProperties):
112 |     # Configuration in dbt_project.yml
113 |     @pytest.fixture(scope="class")
114 |     def project_config_update(self):
115 |         return {
116 |             "name": "properties_test",
117 |             "models": {
118 |                 "+materialized": "table",
119 |                 "table_format": "iceberg",
120 |             },
121 |         }
122 | 
123 |     def test_table_properties(self, project):
124 |         # Seed seed
125 |         results = run_dbt(["seed"], expect_pass=True)
126 |         assert len(results) == 1
127 | 
128 |         # Create model with properties
129 |         results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True)
130 |         assert len(results) == 1
131 |         assert "WITH (" in logs
132 |         assert "type = 'iceberg'" in logs
133 | 
134 | 
135 | @pytest.mark.hive
136 | # Setting `type` property is available only in Starburst Galaxy
137 | # https://docs.starburst.io/starburst-galaxy/data-engineering/working-with-data-lakes/table-formats/gl-iceberg.html
138 | @pytest.mark.skip_profile("trino_starburst")
139 | class TestTableFormatConfigAndTypeTablePropertyFail(BaseTableProperties):
140 |     # Configuration in dbt_project.yml
141 |     @pytest.fixture(scope="class")
142 |     def project_config_update(self):
143 |         return {
144 |             "name": "properties_test",
145 |             "models": {
146 |                 "+materialized": "table",
147 |                 "+properties": {
148 |                     "type": "'iceberg'",
149 |                 },
150 |                 "table_format": "iceberg",
151 |             },
152 |         }
153 | 
154 |     def test_table_properties(self, project):
155 |         # Seed seed
156 |         results = run_dbt(["seed"], expect_pass=True)
157 |         assert len(results) == 1
158 | 
159 |         # Create model with properties
160 |         results, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=False)
161 |         assert len(results) == 1
162 |         assert (
163 |             "You can specify either 'table_format' or 'properties.type' configurations, but not both."
164 |             in logs
165 |         )
166 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/unit_testing/test_unit_testing.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.unit_testing.test_case_insensitivity import (
 3 |     BaseUnitTestCaseInsensivity,
 4 | )
 5 | from dbt.tests.adapter.unit_testing.test_invalid_input import BaseUnitTestInvalidInput
 6 | from dbt.tests.adapter.unit_testing.test_types import BaseUnitTestingTypes
 7 | 
 8 | 
 9 | @pytest.mark.skip_profile("starburst_galaxy")
10 | class TestTrinoUnitTestingTypesTrinoStarburst(BaseUnitTestingTypes):
11 |     @pytest.fixture
12 |     def data_types(self):
13 |         # sql_value, yaml_value
14 |         return [
15 |             ["1", "1"],
16 |             ["'1'", "1"],
17 |             ["true", "true"],
18 |             ["DATE '2020-01-02'", "2020-01-02"],
19 |             ["TIMESTAMP '2013-11-03 00:00:00'", "2013-11-03 00:00:00"],
20 |             ["TIMESTAMP '2013-11-03 00:00:00-0'", "2013-11-03 00:00:00-0"],
21 |             ["DECIMAL '1'", "1"],
22 |             [
23 |                 """JSON '{"bar": "baz", "balance": 7.77, "active": false}'""",
24 |                 """'{"bar": "baz", "balance": 7.77, "active": false}'""",
25 |             ],
26 |         ]
27 | 
28 | 
29 | # JSON type is not supported on object storage connectors
30 | @pytest.mark.skip_profile("trino_starburst")
31 | class TestTrinoUnitTestingTypesGalaxy(BaseUnitTestingTypes):
32 |     @pytest.fixture
33 |     def data_types(self):
34 |         # sql_value, yaml_value
35 |         return [
36 |             ["1", "1"],
37 |             ["'1'", "1"],
38 |             ["true", "true"],
39 |             ["DATE '2020-01-02'", "2020-01-02"],
40 |             ["TIMESTAMP '2013-11-03 00:00:00'", "2013-11-03 00:00:00"],
41 |             ["TIMESTAMP '2013-11-03 00:00:00-0'", "2013-11-03 00:00:00-0"],
42 |             ["DECIMAL '1'", "1"],
43 |         ]
44 | 
45 | 
46 | class TestTrinoUnitTestCaseInsensitivity(BaseUnitTestCaseInsensivity):
47 |     pass
48 | 
49 | 
50 | class TestTrinoUnitTestInvalidInput(BaseUnitTestInvalidInput):
51 |     pass
52 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/utils/fixture_date_spine.py:
--------------------------------------------------------------------------------
 1 | # If date_spine works properly, there should be no `null` values in the resulting model
 2 | 
 3 | models__trino_test_date_spine_sql = """
 4 | with generated_dates as (
 5 |     {{ date_spine("day", "'2023-09-01'", "'2023-09-10'") }}
 6 | ), expected_dates as (
 7 |     select cast('2023-09-01' as date) as expected
 8 |     union all
 9 |     select cast('2023-09-02' as date) as expected
10 |     union all
11 |     select cast('2023-09-03' as date) as expected
12 |     union all
13 |     select cast('2023-09-04' as date) as expected
14 |     union all
15 |     select cast('2023-09-05' as date) as expected
16 |     union all
17 |     select cast('2023-09-06' as date) as expected
18 |     union all
19 |     select cast('2023-09-07' as date) as expected
20 |     union all
21 |     select cast('2023-09-08' as date) as expected
22 |     union all
23 |     select cast('2023-09-09' as date) as expected
24 | ), joined as (
25 |     select
26 |         generated_dates.date_day,
27 |         expected_dates.expected
28 |     from generated_dates
29 |     left join expected_dates on generated_dates.date_day = expected_dates.expected
30 | )
31 | 
32 | SELECT * from joined
33 | """
34 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/utils/fixture_get_intervals_between.py:
--------------------------------------------------------------------------------
1 | models__trino_test_get_intervals_between_sql = """
2 | SELECT
3 |   {{ get_intervals_between("'2023-09-01'", "'2023-09-12'", "day") }} as intervals,
4 |   11 as expected
5 | 
6 | """
7 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/utils/test_data_types.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.utils.data_types.test_type_bigint import BaseTypeBigInt
 3 | from dbt.tests.adapter.utils.data_types.test_type_boolean import BaseTypeBoolean
 4 | from dbt.tests.adapter.utils.data_types.test_type_float import BaseTypeFloat
 5 | from dbt.tests.adapter.utils.data_types.test_type_int import BaseTypeInt
 6 | from dbt.tests.adapter.utils.data_types.test_type_numeric import BaseTypeNumeric
 7 | from dbt.tests.adapter.utils.data_types.test_type_string import BaseTypeString
 8 | from dbt.tests.adapter.utils.data_types.test_type_timestamp import BaseTypeTimestamp
 9 | 
10 | 
11 | class TestTypeBigInt(BaseTypeBigInt):
12 |     pass
13 | 
14 | 
15 | class TestTypeFloat(BaseTypeFloat):
16 |     pass
17 | 
18 | 
19 | class TestTypeInt(BaseTypeInt):
20 |     pass
21 | 
22 | 
23 | class TestTypeNumeric(BaseTypeNumeric):
24 |     def numeric_fixture_type(self):
25 |         return "decimal(28,6)"
26 | 
27 | 
28 | class TestTypeString(BaseTypeString):
29 |     pass
30 | 
31 | 
32 | # TODO: Re-enable when https://github.com/trinodb/trino/pull/13981 is merged
33 | @pytest.mark.skip_profile("starburst_galaxy")
34 | class TestTypeTimestamp(BaseTypeTimestamp):
35 |     pass
36 | 
37 | 
38 | class TestTypeBoolean(BaseTypeBoolean):
39 |     pass
40 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/utils/test_date_spine.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.utils.base_utils import BaseUtils
 3 | from dbt.tests.adapter.utils.fixture_date_spine import models__test_date_spine_yml
 4 | 
 5 | from tests.functional.adapter.utils.fixture_date_spine import (
 6 |     models__trino_test_date_spine_sql,
 7 | )
 8 | 
 9 | 
10 | class BaseDateSpine(BaseUtils):
11 |     @pytest.fixture(scope="class")
12 |     def models(self):
13 |         return {
14 |             "test_date_spine.yml": models__test_date_spine_yml,
15 |             "test_date_spine.sql": self.interpolate_macro_namespace(
16 |                 models__trino_test_date_spine_sql, "date_spine"
17 |             ),
18 |         }
19 | 
20 | 
21 | class TestDateSpine(BaseDateSpine):
22 |     pass
23 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/utils/test_get_intervals_between.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.utils.base_utils import BaseUtils
 3 | from dbt.tests.adapter.utils.fixture_get_intervals_between import (
 4 |     models__test_get_intervals_between_yml,
 5 | )
 6 | 
 7 | from tests.functional.adapter.utils.fixture_get_intervals_between import (
 8 |     models__trino_test_get_intervals_between_sql,
 9 | )
10 | 
11 | 
12 | class BaseGetIntervalsBetween(BaseUtils):
13 |     @pytest.fixture(scope="class")
14 |     def models(self):
15 |         return {
16 |             "test_get_intervals_between.yml": models__test_get_intervals_between_yml,
17 |             "test_get_intervals_between.sql": self.interpolate_macro_namespace(
18 |                 models__trino_test_get_intervals_between_sql, "get_intervals_between"
19 |             ),
20 |         }
21 | 
22 | 
23 | class TestGetIntervalsBetween(BaseGetIntervalsBetween):
24 |     pass
25 | 


--------------------------------------------------------------------------------
/tests/functional/adapter/utils/test_timestamps.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from dbt.tests.adapter.utils.test_timestamps import BaseCurrentTimestamps
 3 | 
 4 | 
 5 | class TestCurrentTimestampTrino(BaseCurrentTimestamps):
 6 |     @pytest.fixture(scope="class")
 7 |     def models(self):
 8 |         return {
 9 |             "get_current_timestamp.sql": 'select {{ current_timestamp() }} as "current_timestamp"'
10 |         }
11 | 
12 |     @pytest.fixture(scope="class")
13 |     def expected_schema(self):
14 |         return {"current_timestamp": "timestamp(3) with time zone"}
15 | 
16 |     @pytest.fixture(scope="class")
17 |     def expected_sql(self):
18 |         return 'select current_timestamp as "current_timestamp"'
19 | 


--------------------------------------------------------------------------------
/tests/unit/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/starburstdata/dbt-trino/8463e99cfbb4fea4b9dbac9f071ed81e42d0120e/tests/unit/__init__.py


--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
 1 | [tox]
 2 | skipsdist = True
 3 | envlist = unit, integration
 4 | 
 5 | [testenv:unit]
 6 | description = unit testing
 7 | basepython = python3
 8 | commands = {envpython} -m pytest -v {posargs} tests/unit
 9 | passenv = DBT_INVOCATION_ENV
10 | deps =
11 |     -r{toxinidir}/dev_requirements.txt
12 |     -e.
13 | 
14 | [testenv:integration]
15 | description = adapter plugin integration testing
16 | basepython = python3
17 | commands = {envpython} -m pytest {posargs} tests/functional
18 | passenv = DBT_INVOCATION_ENV, DBT_TEST_TRINO_HOST, DBT_TEST_USER_1, DBT_TEST_USER_2, DBT_TEST_USER_3
19 | deps =
20 |     -r{toxinidir}/dev_requirements.txt
21 |     -e.
22 | 


--------------------------------------------------------------------------------