├── .changes ├── 0.0.0.md ├── README.md ├── header.tpl.md └── unreleased │ ├── .gitkeep │ ├── Features-20241202-095136.yaml │ ├── Features-20250113-133414.yaml │ ├── Fixes-20241018-173123.yaml │ ├── Fixes-20241127-162204.yaml │ ├── Fixes-20241209-131530.yaml │ ├── Under the Hood-20241117-184430.yaml │ ├── Under the Hood-20241205-144036.yaml │ └── Under the Hood-20241211-170831.yaml ├── .changie.yaml ├── .dockerignore ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ └── config.yml ├── pull_request_template.md ├── scripts │ ├── update_dev_dependency_branches.sh │ └── update_release_branch.sh └── workflows │ ├── backport.yml │ ├── bot-changelog.yml │ ├── changelog-existence.yml │ ├── cut-release-branch.yml │ ├── integration.yml │ ├── main.yml │ ├── nightly-release.yml │ ├── release-branch-tests.yml │ ├── release-internal.yml │ ├── release.yml │ ├── release_prep_hatch.yml │ ├── repository-cleanup.yml │ ├── stale.yml │ └── version-bump.yml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE.md ├── README.md ├── dbt ├── __init__.py ├── adapters │ └── snowflake │ │ ├── __init__.py │ │ ├── __version__.py │ │ ├── auth.py │ │ ├── column.py │ │ ├── connections.py │ │ ├── constants.py │ │ ├── impl.py │ │ ├── record │ │ ├── __init__.py │ │ ├── cursor │ │ │ ├── cursor.py │ │ │ ├── sfqid.py │ │ │ └── sqlstate.py │ │ └── handle.py │ │ ├── relation.py │ │ └── relation_configs │ │ ├── __init__.py │ │ ├── base.py │ │ ├── catalog.py │ │ ├── dynamic_table.py │ │ ├── formats.py │ │ └── policies.py └── include │ └── snowflake │ ├── __init__.py │ ├── dbt_project.yml │ ├── macros │ ├── adapters.sql │ ├── apply_grants.sql │ ├── catalog.sql │ ├── materializations │ │ ├── clone.sql │ │ ├── dynamic_table.sql │ │ ├── incremental.sql │ │ ├── merge.sql │ │ ├── seed.sql │ │ ├── snapshot.sql │ │ ├── table.sql │ │ ├── test.sql │ │ └── view.sql │ ├── metadata.sql │ ├── relations │ │ ├── create.sql │ │ ├── create_backup.sql │ │ ├── drop.sql │ │ ├── dynamic_table │ │ │ ├── alter.sql │ │ │ ├── create.sql │ │ │ ├── describe.sql │ │ │ ├── drop.sql │ │ │ ├── refresh.sql │ │ │ └── replace.sql │ │ ├── rename.sql │ │ ├── rename_intermediate.sql │ │ ├── replace.sql │ │ ├── table │ │ │ ├── create.sql │ │ │ ├── drop.sql │ │ │ ├── rename.sql │ │ │ └── replace.sql │ │ └── view │ │ │ ├── create.sql │ │ │ ├── drop.sql │ │ │ ├── rename.sql │ │ │ └── replace.sql │ └── utils │ │ ├── array_construct.sql │ │ ├── bool_or.sql │ │ ├── cast.sql │ │ ├── escape_single_quotes.sql │ │ ├── optional.sql │ │ ├── right.sql │ │ ├── safe_cast.sql │ │ └── timestamps.sql │ └── profile_template.yml ├── docker ├── Dockerfile ├── README.md ├── dev.Dockerfile └── test.sh ├── hatch.toml ├── pyproject.toml ├── scripts ├── build-dist.sh ├── env-setup.sh └── werkzeug-refresh-token.py ├── test.env.example └── tests ├── __init__.py ├── conftest.py ├── functional ├── adapter │ ├── catalog_tests │ │ ├── files.py │ │ └── test_relation_types.py │ ├── column_types │ │ ├── fixtures.py │ │ └── test_column_types.py │ ├── custom_schema_tests │ │ ├── seeds.py │ │ ├── test_custom_database.py │ │ └── test_custom_schema.py │ ├── dbt_clone │ │ └── test_dbt_clone.py │ ├── dbt_show │ │ └── test_dbt_show.py │ ├── empty │ │ └── test_empty.py │ ├── expected_stats.py │ ├── incremental │ │ ├── test_incremental_merge_exclude_columns.py │ │ ├── test_incremental_on_schema_change.py │ │ ├── test_incremental_predicates.py │ │ ├── test_incremental_run_result.py │ │ └── test_incremental_unique_id.py │ ├── list_relations_tests │ │ ├── test_pagination.py │ │ ├── test_show_objects.py │ │ └── test_special_characters.py │ ├── python_model_tests │ │ ├── _files.py │ │ └── test_table_type.py │ ├── query_comment_tests │ │ └── test_query_comments.py │ ├── simple_copy │ │ ├── fixtures.py │ │ └── test_simple_copy.py │ ├── simple_seed │ │ ├── test_simple_seed.py │ │ └── test_simple_seed_override.py │ ├── statement_test │ │ ├── seeds.py │ │ └── test_statements.py │ ├── store_test_failures_tests │ │ └── test_store_test_failures.py │ ├── test_aliases.py │ ├── test_anonymous_usage_stats.py │ ├── test_basic.py │ ├── test_caching.py │ ├── test_changing_relation_type.py │ ├── test_concurrency.py │ ├── test_constraints.py │ ├── test_ephemeral.py │ ├── test_get_last_relation_modified.py │ ├── test_grants.py │ ├── test_incremental_microbatch.py │ ├── test_persist_docs.py │ ├── test_python_model.py │ ├── test_simple_snapshot.py │ ├── test_timestamps.py │ ├── unit_testing │ │ └── test_unit_testing.py │ └── utils │ │ ├── test_data_types.py │ │ └── test_utils.py ├── auth_tests │ ├── test_database_role.py │ ├── test_jwt.py │ ├── test_key_pair.py │ └── test_oauth.py ├── generic_test_tests │ ├── __init__.py │ ├── _files.py │ ├── _models.py │ ├── _schemas.py │ └── test_generic_tests.py ├── iceberg │ ├── models.py │ ├── test_incremental_models.py │ └── test_table_basic.py ├── override_database │ └── test_override_database.py ├── query_tag │ └── test_query_tags.py ├── redact_log_values │ ├── test_duplicate_key_not_in_exceptions.py │ └── test_row_values_not_in_exceptions.py ├── relation_tests │ ├── __init__.py │ ├── base.py │ ├── dynamic_table_tests │ │ ├── __init__.py │ │ ├── models.py │ │ ├── test_basic.py │ │ └── test_configuration_changes.py │ ├── models.py │ ├── test_relation_type_change.py │ ├── test_table.py │ └── test_view.py ├── snowflake_view_dependency │ └── test_snowflake_view_dependency.py ├── test_isolated_begin_commit.py ├── utils.py └── warehouse_test │ └── test_warehouses.py ├── performance ├── README.md └── test_auth_methods.py └── unit ├── __init__.py ├── mock_adapter.py ├── test_adapter_telemetry.py ├── test_connections.py ├── test_iceberg_location.py ├── test_private_keys.py ├── test_relation_as_case_sensitive.py ├── test_renamed_relations.py ├── test_snowflake_adapter.py └── utils.py /.changes/0.0.0.md: -------------------------------------------------------------------------------- 1 | ## Previous Releases 2 | For information on prior major and minor releases, see their changelogs: 3 | - [1.6](https://github.com/dbt-labs/dbt-snowflake/blob/1.6.latest/CHANGELOG.md) 4 | - [1.5](https://github.com/dbt-labs/dbt-snowflake/blob/1.5.latest/CHANGELOG.md) 5 | - [1.4](https://github.com/dbt-labs/dbt-snowflake/blob/1.4.latest/CHANGELOG.md) 6 | - [1.3](https://github.com/dbt-labs/dbt-snowflake/blob/1.3.latest/CHANGELOG.md) 7 | - [1.2](https://github.com/dbt-labs/dbt-snowflake/blob/1.2.latest/CHANGELOG.md) 8 | - [1.1](https://github.com/dbt-labs/dbt-snowflake/blob/1.1.latest/CHANGELOG.md) 9 | - [1.0](https://github.com/dbt-labs/dbt-snowflake/blob/1.0.latest/CHANGELOG.md) 10 | -------------------------------------------------------------------------------- /.changes/README.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | 4 | To view information about the changelog operation we suggest reading this [README](https://github.com/dbt-labs/dbt-snowflake/blob/main/.changes/README.md) found in `dbt-snowflake`. 5 | -------------------------------------------------------------------------------- /.changes/header.tpl.md: -------------------------------------------------------------------------------- 1 | # dbt-snowflake Changelog 2 | 3 | - This file provides a full account of all changes to `dbt-snowflake`. 4 | - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. 5 | - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. 6 | - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-snowflake/blob/main/CONTRIBUTING.md#adding-changelog-entry) 7 | -------------------------------------------------------------------------------- /.changes/unreleased/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/dbt-snowflake/986d31db890580f04d92a17feca6291c410b9629/.changes/unreleased/.gitkeep -------------------------------------------------------------------------------- /.changes/unreleased/Features-20241202-095136.yaml: -------------------------------------------------------------------------------- 1 | kind: Features 2 | body: Support MicrobatchConcurrency 3 | time: 2024-12-02T09:51:36.606097-05:00 4 | custom: 5 | Author: michelleark 6 | Issue: "1260" 7 | -------------------------------------------------------------------------------- /.changes/unreleased/Features-20250113-133414.yaml: -------------------------------------------------------------------------------- 1 | kind: Features 2 | body: Added support for custom iceberg base_location_root 3 | time: 2025-01-13T13:34:14.326047-08:00 4 | custom: 5 | Author: LProcopi15 6 | Issue: "1284" 7 | -------------------------------------------------------------------------------- /.changes/unreleased/Fixes-20241018-173123.yaml: -------------------------------------------------------------------------------- 1 | kind: Fixes 2 | body: Fix parsing of the VECTOR type 3 | time: 2024-10-18T17:31:23.931299-04:00 4 | custom: 5 | Author: achawkins 6 | Issue: "1098" 7 | -------------------------------------------------------------------------------- /.changes/unreleased/Fixes-20241127-162204.yaml: -------------------------------------------------------------------------------- 1 | kind: Fixes 2 | body: Use timestamp_tz type in microbatch `delete` DDL 3 | time: 2024-11-27T16:22:04.103212-05:00 4 | custom: 5 | Author: michelleark 6 | Issue: "1256" 7 | -------------------------------------------------------------------------------- /.changes/unreleased/Fixes-20241209-131530.yaml: -------------------------------------------------------------------------------- 1 | kind: Fixes 2 | body: AUTO should no longer lead to rebuilds of dynamic tables. 3 | time: 2024-12-09T13:15:30.554566-08:00 4 | custom: 5 | Author: versusfacit 6 | Issue: "1267" 7 | -------------------------------------------------------------------------------- /.changes/unreleased/Under the Hood-20241117-184430.yaml: -------------------------------------------------------------------------------- 1 | kind: Under the Hood 2 | body: Move from setup.py to pyproject.toml and to hatch as a dev tool 3 | time: 2024-11-17T18:44:30.85288-05:00 4 | custom: 5 | Author: mikealfare 6 | Issue: "1250" 7 | -------------------------------------------------------------------------------- /.changes/unreleased/Under the Hood-20241205-144036.yaml: -------------------------------------------------------------------------------- 1 | kind: Under the Hood 2 | body: Use new `batch` context variables over `node.config.__dbt_internal` ones 3 | time: 2024-12-05T14:40:36.373637-05:00 4 | custom: 5 | Author: michelleark 6 | Issue: "1263" 7 | -------------------------------------------------------------------------------- /.changes/unreleased/Under the Hood-20241211-170831.yaml: -------------------------------------------------------------------------------- 1 | kind: Under the Hood 2 | body: Update default Python version for python models 3 | time: 2024-12-11T17:08:31.842063-05:00 4 | custom: 5 | Author: mikealfare 6 | Issue: "1203" 7 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | !docker_dev 3 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # This codeowners file is used to ensure all PRs require reviews from the adapters team 2 | 3 | * @dbt-labs/adapters 4 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Ask the community for help 4 | url: https://github.com/dbt-labs/docs.getdbt.com/discussions 5 | about: Need help troubleshooting? Check out our guide on how to ask 6 | - name: Contact dbt Cloud support 7 | url: mailto:support@getdbt.com 8 | about: Are you using dbt Cloud? Contact our support team for help! 9 | - name: Participate in Discussions 10 | url: https://github.com/dbt-labs/dbt-adapters/discussions 11 | about: Do you have a Big Idea for dbt-snowflake? Read open discussions, or start a new one 12 | - name: Create an issue for dbt-snowflake 13 | url: https://github.com/dbt-labs/dbt-adapters/issues/new/choose 14 | about: Report a bug or request a feature for dbt-snowflake 15 | - name: Create an issue for dbt-core 16 | url: https://github.com/dbt-labs/dbt-core/issues/new/choose 17 | about: Report a bug or request a feature for dbt-core 18 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 4 | -------------------------------------------------------------------------------- /.github/scripts/update_dev_dependency_branches.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | set -e 3 | 4 | 5 | dbt_adapters_branch=$1 6 | dbt_core_branch=$2 7 | dbt_common_branch=$3 8 | target_req_file="hatch.toml" 9 | core_req_sed_pattern="s|dbt-core.git.*#subdirectory=core|dbt-core.git@${dbt_core_branch}#subdirectory=core|g" 10 | adapters_req_sed_pattern="s|dbt-adapters.git|dbt-adapters.git@${dbt_adapters_branch}|g" 11 | tests_req_sed_pattern="s|dbt-adapters.git.*#subdirectory=dbt-tests-adapter|dbt-adapters.git@${dbt_adapters_branch}#subdirectory=dbt-tests-adapter|g" 12 | common_req_sed_pattern="s|dbt-common.git|dbt-common.git@${dbt_common_branch}|g" 13 | if [[ "$OSTYPE" == darwin* ]]; then 14 | # mac ships with a different version of sed that requires a delimiter arg 15 | sed -i "" "$adapters_req_sed_pattern" $target_req_file 16 | sed -i "" "$tests_req_sed_pattern" $target_req_file 17 | sed -i "" "$core_req_sed_pattern" $target_req_file 18 | sed -i "" "$common_req_sed_pattern" $target_req_file 19 | else 20 | sed -i "$adapters_req_sed_pattern" $target_req_file 21 | sed -i "$tests_req_sed_pattern" $target_req_file 22 | sed -i "$core_req_sed_pattern" $target_req_file 23 | sed -i "$common_req_sed_pattern" $target_req_file 24 | fi 25 | -------------------------------------------------------------------------------- /.github/scripts/update_release_branch.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash -e 2 | set -e 3 | 4 | release_branch=$1 5 | target_req_file=".github/workflows/nightly-release.yml" 6 | if [[ "$OSTYPE" == darwin* ]]; then 7 | # mac ships with a different version of sed that requires a delimiter arg 8 | sed -i "" "s|[0-9].[0-9].latest|$release_branch|" $target_req_file 9 | else 10 | sed -i "s|[0-9].[0-9].latest|$release_branch|" $target_req_file 11 | fi 12 | -------------------------------------------------------------------------------- /.github/workflows/backport.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # When a PR is merged, if it has the backport label, it will create 3 | # a new PR to backport those changes to the given branch. If it can't 4 | # cleanly do a backport, it will comment on the merged PR of the failure. 5 | # 6 | # Label naming convention: "backport " 7 | # Example: backport 1.0.latest 8 | # 9 | # You MUST "Squash and merge" the original PR or this won't work. 10 | 11 | # **why?** 12 | # Changes sometimes need to be backported to release branches. 13 | # This automates the backporting process 14 | 15 | # **when?** 16 | # Once a PR is "Squash and merge"'d, by adding a backport label, this is triggered 17 | 18 | name: Backport 19 | on: 20 | pull_request: 21 | types: 22 | - labeled 23 | 24 | permissions: 25 | contents: write 26 | pull-requests: write 27 | 28 | jobs: 29 | backport: 30 | name: Backport 31 | runs-on: ubuntu-latest 32 | # Only react to merged PRs for security reasons. 33 | # See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target. 34 | if: > 35 | github.event.pull_request.merged 36 | && contains(github.event.label.name, 'backport') 37 | steps: 38 | - uses: tibdex/backport@v2 39 | with: 40 | github_token: ${{ secrets.GITHUB_TOKEN }} 41 | -------------------------------------------------------------------------------- /.github/workflows/bot-changelog.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # When bots create a PR, this action will add a corresponding changie yaml file to that 3 | # PR when a specific label is added. 4 | # 5 | # The file is created off a template: 6 | # 7 | # kind: 8 | # body: 9 | # time: 10 | # custom: 11 | # Author: 12 | # PR: 13 | # 14 | # **why?** 15 | # Automate changelog generation for more visability with automated bot PRs. 16 | # 17 | # **when?** 18 | # Once a PR is created, label should be added to PR before or after creation. You can also 19 | # manually trigger this by adding the appropriate label at any time. 20 | # 21 | # **how to add another bot?** 22 | # Add the label and changie kind to the include matrix. That's it! 23 | # 24 | 25 | name: Bot Changelog 26 | 27 | on: 28 | pull_request: 29 | # catch when the PR is opened with the label or when the label is added 30 | types: [opened, labeled] 31 | 32 | permissions: 33 | contents: write 34 | pull-requests: read 35 | 36 | jobs: 37 | generate_changelog: 38 | strategy: 39 | matrix: 40 | include: 41 | - label: "dependencies" 42 | changie_kind: "Dependencies" 43 | - label: "snyk" 44 | changie_kind: "Security" 45 | runs-on: ubuntu-latest 46 | 47 | steps: 48 | - name: Create and commit changelog on bot PR 49 | if: "contains(github.event.pull_request.labels.*.name, ${{ matrix.label }})" 50 | id: bot_changelog 51 | uses: emmyoop/changie_bot@v1 52 | with: 53 | GITHUB_TOKEN: ${{ secrets.FISHTOWN_BOT_PAT }} 54 | commit_author_name: "Github Build Bot" 55 | commit_author_email: "" 56 | commit_message: "Add automated changelog yaml from template for bot PR" 57 | changie_kind: ${{ matrix.changie_kind }} 58 | label: ${{ matrix.label }} 59 | custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}" 60 | -------------------------------------------------------------------------------- /.github/workflows/changelog-existence.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # Checks that a file has been committed under the /.changes directory 3 | # as a new CHANGELOG entry. Cannot check for a specific filename as 4 | # it is dynamically generated by change type and timestamp. 5 | # This workflow should not require any secrets since it runs for PRs 6 | # from forked repos. 7 | # By default, secrets are not passed to workflows running from 8 | # a forked repo. 9 | 10 | # **why?** 11 | # Ensure code change gets reflected in the CHANGELOG. 12 | 13 | # **when?** 14 | # This will run for all PRs going into main and *.latest. It will 15 | # run when they are opened, reopened, when any label is added or removed 16 | # and when new code is pushed to the branch. The action will then get 17 | # skipped if the 'Skip Changelog' label is present is any of the labels. 18 | 19 | name: Check Changelog Entry 20 | 21 | on: 22 | pull_request: 23 | types: [opened, reopened, labeled, unlabeled, synchronize] 24 | workflow_dispatch: 25 | 26 | defaults: 27 | run: 28 | shell: bash 29 | 30 | permissions: 31 | contents: read 32 | pull-requests: write 33 | 34 | jobs: 35 | changelog: 36 | uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main 37 | with: 38 | changelog_comment: "Thank you for your pull request! We could not find a changelog entry for this change. For details on how to document a change, see the [dbt-snowflake contributing guide](https://github.com/dbt-labs/dbt-snowflake/blob/main/CONTRIBUTING.md)." 39 | skip_label: "Skip Changelog" 40 | secrets: inherit # this is only acceptable because we own the action we're calling 41 | -------------------------------------------------------------------------------- /.github/workflows/cut-release-branch.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # Calls a centralize3d workflow that will: 3 | # 1. Cut a new branch (generally `*.latest`) 4 | # 2. Also cleans up all files in `.changes/unreleased` and `.changes/previous version on 5 | # `main` and bumps `main` to the input version. 6 | 7 | # **why?** 8 | # Generally reduces the workload of engineers and reduces error. Allow automation. 9 | 10 | # **when?** 11 | # This will run when called manually. 12 | 13 | name: Cut new release branch 14 | 15 | on: 16 | workflow_dispatch: 17 | inputs: 18 | version_to_bump_main: 19 | description: 'The alpha version main should bump to (ex. 1.6.0a1)' 20 | required: true 21 | new_branch_name: 22 | description: 'The full name of the new branch (ex. 1.5.latest)' 23 | required: true 24 | 25 | defaults: 26 | run: 27 | shell: bash 28 | 29 | permissions: 30 | contents: write 31 | 32 | jobs: 33 | cut_branch: 34 | name: "Cut branch and clean up main for dbt-snowflake" 35 | uses: dbt-labs/actions/.github/workflows/cut-release-branch.yml@main 36 | with: 37 | version_to_bump_main: ${{ inputs.version_to_bump_main }} 38 | new_branch_name: ${{ inputs.new_branch_name }} 39 | PR_title: "Cleanup main after cutting new ${{ inputs.new_branch_name }} branch" 40 | PR_body: "This PR will fail CI until the dbt-core PR has been merged due to release version conflicts." 41 | secrets: 42 | FISHTOWN_BOT_PAT: ${{ secrets.FISHTOWN_BOT_PAT }} 43 | -------------------------------------------------------------------------------- /.github/workflows/release-branch-tests.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # The purpose of this workflow is to trigger CI to run for each release 3 | # branch on a regular cadence. If the CI workflow fails for a branch, it 4 | # will post to dev-core-alerts to raise awareness. The 5 | 6 | # **why?** 7 | # Ensures release branches are always shippable and not broken. 8 | # Also, can catch any dependencies shifting beneath us that might 9 | # introduce breaking changes (could also impact Cloud). 10 | 11 | # **when?** 12 | # Once each morning. Manual trigger can also test on demand 13 | 14 | name: Release branch scheduled testing 15 | 16 | on: 17 | schedule: 18 | - cron: '0 12 * * *' # 12:00 UTC 19 | 20 | workflow_dispatch: # for manual triggering 21 | 22 | # no special access is needed 23 | permissions: read-all 24 | 25 | jobs: 26 | run_tests: 27 | uses: dbt-labs/actions/.github/workflows/release-branch-tests.yml@main 28 | with: 29 | workflows_to_run: '["main.yml", "integration.yml"]' 30 | include_main: false 31 | secrets: inherit 32 | -------------------------------------------------------------------------------- /.github/workflows/release-internal.yml: -------------------------------------------------------------------------------- 1 | # What? 2 | # 3 | # Tag and release an arbitrary ref. Uploads to an internal archive for further processing. 4 | # 5 | # How? 6 | # 7 | # After checking out and testing the provided ref, the image is built and uploaded. 8 | # 9 | # When? 10 | # 11 | # Manual trigger. 12 | 13 | name: "Release to Cloud" 14 | run-name: "Release to Cloud off of ${{ inputs.ref }}" 15 | 16 | on: 17 | workflow_dispatch: 18 | inputs: 19 | ref: 20 | description: "The ref (sha or branch name) to use" 21 | type: string 22 | default: "main" 23 | required: true 24 | package_test_command: 25 | description: "Package test command" 26 | type: string 27 | default: "python -c \"import dbt.adapters.snowflake\"" 28 | required: true 29 | skip_tests: 30 | description: "Should the tests be skipped? (default to false)" 31 | type: boolean 32 | required: true 33 | default: false 34 | 35 | defaults: 36 | run: 37 | shell: bash 38 | 39 | jobs: 40 | invoke-reusable-workflow: 41 | name: "Build and Release Internally" 42 | 43 | uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@main" 44 | 45 | with: 46 | package_test_command: "${{ inputs.package_test_command }}" 47 | dbms_name: "snowflake" 48 | ref: "${{ inputs.ref }}" 49 | skip_tests: "${{ inputs.skip_tests }}" 50 | 51 | secrets: "inherit" 52 | -------------------------------------------------------------------------------- /.github/workflows/repository-cleanup.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # Cleanup branches left over from automation and testing. Also cleanup 3 | # draft releases from release testing. 4 | 5 | # **why?** 6 | # The automations are leaving behind branches and releases that clutter 7 | # the repository. Sometimes we need them to debug processes so we don't 8 | # want them immediately deleted. Running on Saturday to avoid running 9 | # at the same time as an actual release to prevent breaking a release 10 | # mid-release. 11 | 12 | # **when?** 13 | # Mainly on a schedule of 12:00 Saturday. 14 | # Manual trigger can also run on demand 15 | 16 | name: Repository Cleanup 17 | 18 | on: 19 | schedule: 20 | - cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above 21 | 22 | workflow_dispatch: # for manual triggering 23 | 24 | permissions: 25 | contents: write 26 | 27 | jobs: 28 | cleanup-repo: 29 | uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main 30 | secrets: inherit 31 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: "Close stale issues and PRs" 2 | on: 3 | schedule: 4 | - cron: "30 1 * * *" 5 | 6 | permissions: 7 | issues: write 8 | pull-requests: write 9 | 10 | jobs: 11 | stale: 12 | uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main 13 | -------------------------------------------------------------------------------- /.github/workflows/version-bump.yml: -------------------------------------------------------------------------------- 1 | # **what?** 2 | # This workflow will take the new version number to bump to. With that 3 | # it will run versionbump to update the version number everywhere in the 4 | # code base and then run changie to create the corresponding changelog. 5 | # A PR will be created with the changes that can be reviewed before committing. 6 | 7 | # **why?** 8 | # This is to aid in releasing dbt and making sure we have updated 9 | # the version in all places and generated the changelog. 10 | 11 | # **when?** 12 | # This is triggered manually 13 | 14 | name: Version Bump 15 | 16 | on: 17 | workflow_dispatch: 18 | inputs: 19 | version_number: 20 | description: "The version number to bump to (ex. 1.2.0, 1.3.0b1)" 21 | required: true 22 | 23 | jobs: 24 | version_bump_and_changie: 25 | uses: dbt-labs/actions/.github/workflows/version-bump.yml@main 26 | with: 27 | version_number: ${{ inputs.version_number }} 28 | secrets: inherit # ok since what we are calling is internally maintained 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env*/ 12 | dbt_env/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | *.mypy_cache/ 28 | logs/ 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | .env 47 | nosetests.xml 48 | coverage.xml 49 | *,cover 50 | .hypothesis/ 51 | test.env 52 | *.pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | 61 | # Sphinx documentation 62 | docs/_build/ 63 | 64 | # PyBuilder 65 | target/ 66 | 67 | # Ipython Notebook 68 | .ipynb_checkpoints 69 | 70 | # Emacs 71 | *~ 72 | 73 | # Sublime Text 74 | *.sublime-* 75 | 76 | # Vim 77 | *.sw* 78 | 79 | # Pyenv 80 | .python-version 81 | 82 | # Vim 83 | *.sw* 84 | 85 | # pycharm 86 | .idea/ 87 | venv/ 88 | 89 | # AWS credentials 90 | .aws/ 91 | 92 | # MacOS 93 | .DS_Store 94 | 95 | # vscode 96 | .vscode/ 97 | .venv/ 98 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # For more on configuring pre-commit hooks (see https://pre-commit.com/) 2 | default_language_version: 3 | python: python3 4 | 5 | repos: 6 | - repo: https://github.com/pre-commit/pre-commit-hooks 7 | rev: v4.6.0 8 | hooks: 9 | - id: check-yaml 10 | args: [--unsafe] 11 | - id: check-json 12 | - id: end-of-file-fixer 13 | - id: trailing-whitespace 14 | - id: check-case-conflict 15 | 16 | - repo: https://github.com/dbt-labs/pre-commit-hooks 17 | rev: v0.1.0a1 18 | hooks: 19 | - id: dbt-core-in-adapters-check 20 | 21 | - repo: https://github.com/psf/black 22 | rev: 24.4.2 23 | hooks: 24 | - id: black 25 | args: 26 | - --line-length=99 27 | - --target-version=py39 28 | - --target-version=py310 29 | - --target-version=py311 30 | - --target-version=py312 31 | additional_dependencies: [flaky] 32 | 33 | - repo: https://github.com/pycqa/flake8 34 | rev: 7.0.0 35 | hooks: 36 | - id: flake8 37 | exclude: tests/ 38 | args: 39 | - --max-line-length=99 40 | - --select=E,F,W 41 | - --ignore=E203,E501,E741,W503,W504 42 | - --per-file-ignores=*/__init__.py:F401 43 | 44 | - repo: https://github.com/pre-commit/mirrors-mypy 45 | rev: v1.10.0 46 | hooks: 47 | - id: mypy 48 | args: 49 | - --show-error-codes 50 | - --ignore-missing-imports 51 | - --explicit-package-bases 52 | - --warn-unused-ignores 53 | - --pretty 54 | files: ^dbt/adapters 55 | additional_dependencies: 56 | - types-pytz 57 | - types-requests 58 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # dbt-snowflake Changelog 2 | 3 | - This file provides a full account of all changes to `dbt-snowflake`. 4 | - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. 5 | - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. 6 | - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-snowflake/blob/main/CONTRIBUTING.md#adding-changelog-entry) 7 | 8 | ## Previous Releases 9 | For information on prior major and minor releases, see their changelogs: 10 | - [1.6](https://github.com/dbt-labs/dbt-snowflake/blob/1.6.latest/CHANGELOG.md) 11 | - [1.5](https://github.com/dbt-labs/dbt-snowflake/blob/1.5.latest/CHANGELOG.md) 12 | - [1.4](https://github.com/dbt-labs/dbt-snowflake/blob/1.4.latest/CHANGELOG.md) 13 | - [1.3](https://github.com/dbt-labs/dbt-snowflake/blob/1.3.latest/CHANGELOG.md) 14 | - [1.2](https://github.com/dbt-labs/dbt-snowflake/blob/1.2.latest/CHANGELOG.md) 15 | - [1.1](https://github.com/dbt-labs/dbt-snowflake/blob/1.1.latest/CHANGELOG.md) 16 | - [1.0](https://github.com/dbt-labs/dbt-snowflake/blob/1.0.latest/CHANGELOG.md) 17 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to `dbt-snowflake` 2 | 3 | This repository has moved into [dbt-labs/dbt-adapters](https://www.github.com/dbt-labs/dbt-adapters). 4 | Please refer to that repo for a guide on how to contribute to `dbt-snowflake`. 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | dbt logo 3 |

4 | 5 | This repository has moved into [dbt-labs/dbt-adapters](https://www.github.com/dbt-labs/dbt-adapters). 6 | Please refer to that repo for information about `dbt-snowflake`. 7 | -------------------------------------------------------------------------------- /dbt/__init__.py: -------------------------------------------------------------------------------- 1 | from pkgutil import extend_path 2 | 3 | __path__ = extend_path(__path__, __name__) 4 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/__init__.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.snowflake.column import SnowflakeColumn 2 | from dbt.adapters.snowflake.connections import SnowflakeConnectionManager 3 | from dbt.adapters.snowflake.connections import SnowflakeCredentials 4 | from dbt.adapters.snowflake.relation import SnowflakeRelation 5 | from dbt.adapters.snowflake.impl import SnowflakeAdapter 6 | 7 | from dbt.adapters.base import AdapterPlugin 8 | from dbt.include import snowflake 9 | 10 | Plugin = AdapterPlugin( 11 | adapter=SnowflakeAdapter, credentials=SnowflakeCredentials, include_path=snowflake.PACKAGE_PATH 12 | ) 13 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/__version__.py: -------------------------------------------------------------------------------- 1 | version = "1.10.0a1" 2 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/auth.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import sys 3 | from typing import Optional 4 | 5 | if sys.version_info < (3, 9): 6 | from functools import lru_cache 7 | 8 | cache = lru_cache(maxsize=None) 9 | else: 10 | from functools import cache 11 | 12 | from cryptography.hazmat.backends import default_backend 13 | from cryptography.hazmat.primitives import serialization 14 | from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey 15 | 16 | 17 | @cache 18 | def private_key_from_string( 19 | private_key_string: str, passphrase: Optional[str] = None 20 | ) -> RSAPrivateKey: 21 | 22 | if passphrase: 23 | encoded_passphrase = passphrase.encode() 24 | else: 25 | encoded_passphrase = None 26 | 27 | if private_key_string.startswith("-"): 28 | return serialization.load_pem_private_key( 29 | data=bytes(private_key_string, "utf-8"), 30 | password=encoded_passphrase, 31 | backend=default_backend(), 32 | ) 33 | return serialization.load_der_private_key( 34 | data=base64.b64decode(private_key_string), 35 | password=encoded_passphrase, 36 | backend=default_backend(), 37 | ) 38 | 39 | 40 | @cache 41 | def private_key_from_file( 42 | private_key_path: str, passphrase: Optional[str] = None 43 | ) -> RSAPrivateKey: 44 | 45 | if passphrase: 46 | encoded_passphrase = passphrase.encode() 47 | else: 48 | encoded_passphrase = None 49 | 50 | with open(private_key_path, "rb") as file: 51 | private_key_bytes = file.read() 52 | 53 | return serialization.load_pem_private_key( 54 | data=private_key_bytes, 55 | password=encoded_passphrase, 56 | backend=default_backend(), 57 | ) 58 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/column.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from dbt.adapters.base.column import Column 4 | from dbt_common.exceptions import DbtRuntimeError 5 | 6 | 7 | @dataclass 8 | class SnowflakeColumn(Column): 9 | def is_integer(self) -> bool: 10 | # everything that smells like an int is actually a NUMBER(38, 0) 11 | return False 12 | 13 | def is_numeric(self) -> bool: 14 | return self.dtype.lower() in [ 15 | "int", 16 | "integer", 17 | "bigint", 18 | "smallint", 19 | "tinyint", 20 | "byteint", 21 | "numeric", 22 | "decimal", 23 | "number", 24 | ] 25 | 26 | def is_float(self): 27 | return self.dtype.lower() in [ 28 | "float", 29 | "float4", 30 | "float8", 31 | "double", 32 | "double precision", 33 | "real", 34 | ] 35 | 36 | def string_size(self) -> int: 37 | if not self.is_string(): 38 | raise DbtRuntimeError("Called string_size() on non-string field!") 39 | 40 | if self.dtype == "text" or self.char_size is None: 41 | return 16777216 42 | else: 43 | return int(self.char_size) 44 | 45 | @classmethod 46 | def from_description(cls, name: str, raw_data_type: str) -> "SnowflakeColumn": 47 | if "vector" in raw_data_type.lower(): 48 | column = cls(name, raw_data_type, None, None, None) 49 | else: 50 | column = super().from_description(name, raw_data_type) 51 | return column 52 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/constants.py: -------------------------------------------------------------------------------- 1 | DEFAULT_PYTHON_VERSION_FOR_PYTHON_MODELS = "3.9" 2 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/record/__init__.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.snowflake.record.cursor.cursor import SnowflakeRecordReplayCursor 2 | from dbt.adapters.snowflake.record.handle import SnowflakeRecordReplayHandle 3 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/record/cursor/cursor.py: -------------------------------------------------------------------------------- 1 | from dbt_common.record import record_function 2 | 3 | from dbt.adapters.record import RecordReplayCursor 4 | from dbt.adapters.snowflake.record.cursor.sfqid import CursorGetSfqidRecord 5 | from dbt.adapters.snowflake.record.cursor.sqlstate import CursorGetSqlStateRecord 6 | 7 | 8 | class SnowflakeRecordReplayCursor(RecordReplayCursor): 9 | """A custom extension of RecordReplayCursor that adds the sqlstate 10 | and sfqid properties which are specific to snowflake-connector.""" 11 | 12 | @property 13 | @property 14 | @record_function(CursorGetSqlStateRecord, method=True, id_field_name="connection_name") 15 | def sqlstate(self): 16 | return self.native_cursor.sqlstate 17 | 18 | @property 19 | @record_function(CursorGetSfqidRecord, method=True, id_field_name="connection_name") 20 | def sfqid(self): 21 | return self.native_cursor.sfqid 22 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/record/cursor/sfqid.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from typing import Optional 3 | 4 | from dbt_common.record import Record, Recorder 5 | 6 | 7 | @dataclasses.dataclass 8 | class CursorGetSfqidParams: 9 | connection_name: str 10 | 11 | 12 | @dataclasses.dataclass 13 | class CursorGetSfqidResult: 14 | msg: Optional[str] 15 | 16 | 17 | @Recorder.register_record_type 18 | class CursorGetSfqidRecord(Record): 19 | params_cls = CursorGetSfqidParams 20 | result_cls = CursorGetSfqidResult 21 | group = "Database" 22 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/record/cursor/sqlstate.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | from typing import Optional 3 | 4 | from dbt_common.record import Record, Recorder 5 | 6 | 7 | @dataclasses.dataclass 8 | class CursorGetSqlStateParams: 9 | connection_name: str 10 | 11 | 12 | @dataclasses.dataclass 13 | class CursorGetSqlStateResult: 14 | msg: Optional[str] 15 | 16 | 17 | @Recorder.register_record_type 18 | class CursorGetSqlStateRecord(Record): 19 | params_cls = CursorGetSqlStateParams 20 | result_cls = CursorGetSqlStateResult 21 | group = "Database" 22 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/record/handle.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.record import RecordReplayHandle 2 | 3 | from dbt.adapters.snowflake.record.cursor.cursor import SnowflakeRecordReplayCursor 4 | 5 | 6 | class SnowflakeRecordReplayHandle(RecordReplayHandle): 7 | """A custom extension of RecordReplayHandle that returns a 8 | snowflake-connector-specific SnowflakeRecordReplayCursor object.""" 9 | 10 | def cursor(self): 11 | cursor = None if self.native_handle is None else self.native_handle.cursor() 12 | return SnowflakeRecordReplayCursor(cursor, self.connection) 13 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/relation_configs/__init__.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.snowflake.relation_configs.catalog import ( 2 | SnowflakeCatalogConfig, 3 | SnowflakeCatalogConfigChange, 4 | ) 5 | from dbt.adapters.snowflake.relation_configs.dynamic_table import ( 6 | RefreshMode, 7 | SnowflakeDynamicTableConfig, 8 | SnowflakeDynamicTableConfigChangeset, 9 | SnowflakeDynamicTableRefreshModeConfigChange, 10 | SnowflakeDynamicTableWarehouseConfigChange, 11 | SnowflakeDynamicTableTargetLagConfigChange, 12 | ) 13 | from dbt.adapters.snowflake.relation_configs.formats import TableFormat 14 | from dbt.adapters.snowflake.relation_configs.policies import ( 15 | SnowflakeIncludePolicy, 16 | SnowflakeQuotePolicy, 17 | SnowflakeRelationType, 18 | ) 19 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/relation_configs/base.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Any, Dict, Optional, TYPE_CHECKING 3 | from dbt.adapters.base.relation import Policy 4 | from dbt.adapters.relation_configs import ( 5 | RelationConfigBase, 6 | RelationResults, 7 | ) 8 | 9 | from dbt.adapters.contracts.relation import ComponentName, RelationConfig 10 | 11 | from dbt.adapters.snowflake.relation_configs.policies import ( 12 | SnowflakeIncludePolicy, 13 | SnowflakeQuotePolicy, 14 | ) 15 | 16 | if TYPE_CHECKING: 17 | # Imported downfile for specific row gathering function. 18 | import agate 19 | 20 | 21 | @dataclass(frozen=True, eq=True, unsafe_hash=True) 22 | class SnowflakeRelationConfigBase(RelationConfigBase): 23 | """ 24 | This base class implements a few boilerplate methods and provides some light structure for Snowflake relations. 25 | """ 26 | 27 | @classmethod 28 | def include_policy(cls) -> Policy: 29 | return SnowflakeIncludePolicy() 30 | 31 | @classmethod 32 | def quote_policy(cls) -> Policy: 33 | return SnowflakeQuotePolicy() 34 | 35 | @classmethod 36 | def from_relation_config(cls, relation_config: RelationConfig): 37 | relation_config_dict = cls.parse_relation_config(relation_config) 38 | relation = cls.from_dict(relation_config_dict) 39 | return relation 40 | 41 | @classmethod 42 | def parse_relation_config(cls, relation_config: RelationConfig) -> Dict: 43 | raise NotImplementedError( 44 | "`parse_relation_config()` needs to be implemented on this RelationConfigBase instance" 45 | ) 46 | 47 | @classmethod 48 | def from_relation_results(cls, relation_results: RelationResults): 49 | relation_config = cls.parse_relation_results(relation_results) 50 | relation = cls.from_dict(relation_config) 51 | return relation 52 | 53 | @classmethod 54 | def parse_relation_results(cls, relation_results: RelationResults) -> Dict[str, Any]: 55 | raise NotImplementedError( 56 | "`parse_relation_results()` needs to be implemented on this RelationConfigBase instance" 57 | ) 58 | 59 | @classmethod 60 | def _render_part(cls, component: ComponentName, value: Optional[str]) -> Optional[str]: 61 | if cls.include_policy().get_part(component) and value: 62 | if cls.quote_policy().get_part(component): 63 | return f'"{value}"' 64 | return value.lower() 65 | return None 66 | 67 | @classmethod 68 | def _get_first_row(cls, results: "agate.Table") -> "agate.Row": 69 | try: 70 | return results.rows[0] 71 | except IndexError: 72 | import agate 73 | 74 | return agate.Row(values=set()) 75 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/relation_configs/formats.py: -------------------------------------------------------------------------------- 1 | from dbt_common.dataclass_schema import StrEnum # doesn't exist in standard library until py3.11 2 | from typing_extensions import Self 3 | 4 | 5 | class TableFormat(StrEnum): 6 | """ 7 | Snowflake docs refers to this an 'Object Format.' 8 | Data practitioners and interfaces refer to this as 'Table Format's, hence the term's use here. 9 | """ 10 | 11 | DEFAULT = "default" 12 | ICEBERG = "iceberg" 13 | 14 | @classmethod 15 | def default(cls) -> Self: 16 | return cls("default") 17 | 18 | def __str__(self): 19 | return self.value 20 | -------------------------------------------------------------------------------- /dbt/adapters/snowflake/relation_configs/policies.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from dbt.adapters.base.relation import Policy 4 | from dbt_common.dataclass_schema import StrEnum 5 | 6 | 7 | class SnowflakeRelationType(StrEnum): 8 | Table = "table" 9 | View = "view" 10 | CTE = "cte" 11 | External = "external" 12 | DynamicTable = "dynamic_table" 13 | 14 | 15 | class SnowflakeIncludePolicy(Policy): 16 | database: bool = True 17 | schema: bool = True 18 | identifier: bool = True 19 | 20 | 21 | @dataclass 22 | class SnowflakeQuotePolicy(Policy): 23 | database: bool = False 24 | schema: bool = False 25 | identifier: bool = False 26 | -------------------------------------------------------------------------------- /dbt/include/snowflake/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | PACKAGE_PATH = os.path.dirname(__file__) 4 | -------------------------------------------------------------------------------- /dbt/include/snowflake/dbt_project.yml: -------------------------------------------------------------------------------- 1 | config-version: 2 2 | name: dbt_snowflake 3 | version: 1.0 4 | 5 | macro-paths: ["macros"] 6 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/apply_grants.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__copy_grants() %} 2 | {% set copy_grants = config.get('copy_grants', False) %} 3 | {{ return(copy_grants) }} 4 | {% endmacro %} 5 | 6 | {%- macro snowflake__support_multiple_grantees_per_dcl_statement() -%} 7 | {{ return(False) }} 8 | {%- endmacro -%} 9 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/materializations/clone.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__can_clone_table() %} 2 | {{ return(True) }} 3 | {% endmacro %} 4 | 5 | {% macro snowflake__create_or_replace_clone(this_relation, defer_relation) %} 6 | create or replace 7 | {{ "transient" if config.get("transient", true) }} 8 | table {{ this_relation }} 9 | clone {{ defer_relation }} 10 | {{ "copy grants" if config.get("copy_grants", false) }} 11 | {% endmacro %} 12 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/materializations/dynamic_table.sql: -------------------------------------------------------------------------------- 1 | {% materialization dynamic_table, adapter='snowflake' %} 2 | 3 | {% set query_tag = set_query_tag() %} 4 | 5 | {% set existing_relation = load_cached_relation(this) %} 6 | {% set target_relation = this.incorporate(type=this.DynamicTable) %} 7 | 8 | {{ run_hooks(pre_hooks) }} 9 | 10 | {% set build_sql = dynamic_table_get_build_sql(existing_relation, target_relation) %} 11 | 12 | {% if build_sql == '' %} 13 | {{ dynamic_table_execute_no_op(target_relation) }} 14 | {% else %} 15 | {{ dynamic_table_execute_build_sql(build_sql, existing_relation, target_relation) }} 16 | {% endif %} 17 | 18 | {{ run_hooks(post_hooks) }} 19 | 20 | {% do unset_query_tag(query_tag) %} 21 | 22 | {{ return({'relations': [target_relation]}) }} 23 | 24 | {% endmaterialization %} 25 | 26 | 27 | {% macro dynamic_table_get_build_sql(existing_relation, target_relation) %} 28 | 29 | {% set full_refresh_mode = should_full_refresh() %} 30 | 31 | -- determine the scenario we're in: create, full_refresh, alter, refresh data 32 | {% if existing_relation is none %} 33 | {% set build_sql = get_create_sql(target_relation, sql) %} 34 | {% elif full_refresh_mode or not existing_relation.is_dynamic_table %} 35 | {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %} 36 | {% else %} 37 | 38 | -- get config options 39 | {% set on_configuration_change = config.get('on_configuration_change') %} 40 | {% set configuration_changes = snowflake__get_dynamic_table_configuration_changes(existing_relation, config) %} 41 | 42 | {% if configuration_changes is none %} 43 | {% set build_sql = '' %} 44 | {{ exceptions.warn("No configuration changes were identified on: `" ~ target_relation ~ "`. Continuing.") }} 45 | 46 | {% elif on_configuration_change == 'apply' %} 47 | {% set build_sql = snowflake__get_alter_dynamic_table_as_sql(existing_relation, configuration_changes, target_relation, sql) %} 48 | {% elif on_configuration_change == 'continue' %} 49 | {% set build_sql = '' %} 50 | {{ exceptions.warn("Configuration changes were identified and `on_configuration_change` was set to `continue` for `" ~ target_relation ~ "`") }} 51 | {% elif on_configuration_change == 'fail' %} 52 | {{ exceptions.raise_fail_fast_error("Configuration changes were identified and `on_configuration_change` was set to `fail` for `" ~ target_relation ~ "`") }} 53 | 54 | {% else %} 55 | -- this only happens if the user provides a value other than `apply`, 'continue', 'fail' 56 | {{ exceptions.raise_compiler_error("Unexpected configuration scenario: `" ~ on_configuration_change ~ "`") }} 57 | 58 | {% endif %} 59 | 60 | {% endif %} 61 | 62 | {% do return(build_sql) %} 63 | 64 | {% endmacro %} 65 | 66 | 67 | {% macro dynamic_table_execute_no_op(relation) %} 68 | {% do store_raw_result( 69 | name="main", 70 | message="skip " ~ relation, 71 | code="skip", 72 | rows_affected="-1" 73 | ) %} 74 | {% endmacro %} 75 | 76 | 77 | {% macro dynamic_table_execute_build_sql(build_sql, existing_relation, target_relation) %} 78 | 79 | {% set grant_config = config.get('grants') %} 80 | 81 | {% call statement(name="main") %} 82 | {{ build_sql }} 83 | {% endcall %} 84 | 85 | {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} 86 | {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} 87 | 88 | {% do persist_docs(target_relation, model) %} 89 | 90 | {% endmacro %} 91 | 92 | 93 | {% macro snowflake__get_dynamic_table_configuration_changes(existing_relation, new_config) -%} 94 | {% set _existing_dynamic_table = snowflake__describe_dynamic_table(existing_relation) %} 95 | {% set _configuration_changes = existing_relation.dynamic_table_config_changeset(_existing_dynamic_table, new_config.model) %} 96 | {% do return(_configuration_changes) %} 97 | {%- endmacro %} 98 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/materializations/merge.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_merge_sql(target, source_sql, unique_key, dest_columns, incremental_predicates) -%} 2 | 3 | {# 4 | Workaround for Snowflake not being happy with a merge on a constant-false predicate. 5 | When no unique_key is provided, this macro will do a regular insert. If a unique_key 6 | is provided, then this macro will do a proper merge instead. 7 | #} 8 | 9 | {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute='name')) -%} 10 | {%- set sql_header = config.get('sql_header', none) -%} 11 | 12 | {%- set dml -%} 13 | {%- if unique_key is none -%} 14 | 15 | {{ sql_header if sql_header is not none }} 16 | 17 | insert into {{ target }} ({{ dest_cols_csv }}) 18 | ( 19 | select {{ dest_cols_csv }} 20 | from {{ source_sql }} 21 | ) 22 | 23 | {%- else -%} 24 | 25 | {{ default__get_merge_sql(target, source_sql, unique_key, dest_columns, incremental_predicates) }} 26 | 27 | {%- endif -%} 28 | {%- endset -%} 29 | 30 | {% do return(snowflake_dml_explicit_transaction(dml)) %} 31 | 32 | {% endmacro %} 33 | 34 | 35 | {% macro snowflake__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) %} 36 | {% set dml = default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) %} 37 | {% do return(snowflake_dml_explicit_transaction(dml)) %} 38 | {% endmacro %} 39 | 40 | 41 | {% macro snowflake__snapshot_merge_sql(target, source, insert_cols) %} 42 | {% set dml = default__snapshot_merge_sql(target, source, insert_cols) %} 43 | {% do return(snowflake_dml_explicit_transaction(dml)) %} 44 | {% endmacro %} 45 | 46 | 47 | {% macro snowflake__get_incremental_append_sql(get_incremental_append_sql) %} 48 | {% set dml = default__get_incremental_append_sql(get_incremental_append_sql) %} 49 | {% do return(snowflake_dml_explicit_transaction(dml)) %} 50 | {% endmacro %} 51 | 52 | 53 | {% macro snowflake__get_incremental_microbatch_sql(arg_dict) %} 54 | {%- set target = arg_dict["target_relation"] -%} 55 | {%- set source = arg_dict["temp_relation"] -%} 56 | {%- set dest_columns = arg_dict["dest_columns"] -%} 57 | {%- set incremental_predicates = [] if arg_dict.get('incremental_predicates') is none else arg_dict.get('incremental_predicates') -%} 58 | 59 | {#-- Add additional incremental_predicates to filter for batch --#} 60 | {% if model.batch and model.batch.event_time_start -%} 61 | {% do incremental_predicates.append("DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " >= to_timestamp_tz('" ~ model.config.__dbt_internal_microbatch_event_time_start ~ "')") %} 62 | {% endif %} 63 | {% if model.batch and model.batch.event_time_end -%} 64 | {% do incremental_predicates.append("DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " < to_timestamp_tz('" ~ model.config.__dbt_internal_microbatch_event_time_end ~ "')") %} 65 | {% endif %} 66 | {% do arg_dict.update({'incremental_predicates': incremental_predicates}) %} 67 | 68 | delete from {{ target }} DBT_INTERNAL_TARGET 69 | where ( 70 | {% for predicate in incremental_predicates %} 71 | {%- if not loop.first %}and {% endif -%} {{ predicate }} 72 | {% endfor %} 73 | ); 74 | 75 | {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} 76 | insert into {{ target }} ({{ dest_cols_csv }}) 77 | ( 78 | select {{ dest_cols_csv }} 79 | from {{ source }} 80 | ) 81 | {% endmacro %} 82 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/materializations/seed.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__load_csv_rows(model, agate_table) %} 2 | {% set batch_size = get_batch_size() %} 3 | {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %} 4 | {% set bindings = [] %} 5 | 6 | {% set statements = [] %} 7 | 8 | {% for chunk in agate_table.rows | batch(batch_size) %} 9 | {% set bindings = [] %} 10 | 11 | {% for row in chunk %} 12 | {% do bindings.extend(row) %} 13 | {% endfor %} 14 | 15 | {% set sql %} 16 | insert into {{ this.render() }} ({{ cols_sql }}) values 17 | {% for row in chunk -%} 18 | ({%- for column in agate_table.column_names -%} 19 | %s 20 | {%- if not loop.last%},{%- endif %} 21 | {%- endfor -%}) 22 | {%- if not loop.last%},{%- endif %} 23 | {%- endfor %} 24 | {% endset %} 25 | 26 | {% do adapter.add_query('BEGIN', auto_begin=False) %} 27 | {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %} 28 | {% do adapter.add_query('COMMIT', auto_begin=False) %} 29 | 30 | {% if loop.index0 == 0 %} 31 | {% do statements.append(sql) %} 32 | {% endif %} 33 | {% endfor %} 34 | 35 | {# Return SQL so we can render it out into the compiled files #} 36 | {{ return(statements[0]) }} 37 | {% endmacro %} 38 | 39 | {% materialization seed, adapter='snowflake' %} 40 | {% set original_query_tag = set_query_tag() %} 41 | 42 | {% set relations = materialization_seed_default() %} 43 | 44 | {% do unset_query_tag(original_query_tag) %} 45 | 46 | {{ return(relations) }} 47 | {% endmaterialization %} 48 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/materializations/snapshot.sql: -------------------------------------------------------------------------------- 1 | {% materialization snapshot, adapter='snowflake' %} 2 | {% set original_query_tag = set_query_tag() %} 3 | {% set relations = materialization_snapshot_default() %} 4 | 5 | {% do unset_query_tag(original_query_tag) %} 6 | 7 | {{ return(relations) }} 8 | {% endmaterialization %} 9 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/materializations/table.sql: -------------------------------------------------------------------------------- 1 | {% materialization table, adapter='snowflake', supported_languages=['sql', 'python']%} 2 | 3 | {% set original_query_tag = set_query_tag() %} 4 | 5 | {%- set identifier = model['alias'] -%} 6 | {%- set language = model['language'] -%} 7 | 8 | {% set grant_config = config.get('grants') %} 9 | 10 | {%- set existing_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} 11 | {%- set target_relation = api.Relation.create( 12 | identifier=identifier, 13 | schema=schema, 14 | database=database, 15 | type='table', 16 | table_format=config.get('table_format', 'default') 17 | ) -%} 18 | 19 | {{ run_hooks(pre_hooks) }} 20 | 21 | {% if target_relation.needs_to_drop(existing_relation) %} 22 | {{ drop_relation_if_exists(existing_relation) }} 23 | {% endif %} 24 | 25 | {% call statement('main', language=language) -%} 26 | {{ create_table_as(False, target_relation, compiled_code, language) }} 27 | {%- endcall %} 28 | 29 | {{ run_hooks(post_hooks) }} 30 | 31 | {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} 32 | {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} 33 | 34 | {% do persist_docs(target_relation, model) %} 35 | 36 | {% do unset_query_tag(original_query_tag) %} 37 | 38 | {{ return({'relations': [target_relation]}) }} 39 | 40 | {% endmaterialization %} 41 | 42 | {% macro py_write_table(compiled_code, target_relation, temporary=False, table_type=none) %} 43 | {#- The following logic is only for backwards-compatiblity with deprecated `temporary` parameter -#} 44 | {% if table_type is not none %} 45 | {#- Just use the table_type as-is -#} 46 | {% elif temporary -%} 47 | {#- Case 1 when the deprecated `temporary` parameter is used without the replacement `table_type` parameter -#} 48 | {%- set table_type = "temporary" -%} 49 | {% else %} 50 | {#- Case 2 when the deprecated `temporary` parameter is used without the replacement `table_type` parameter -#} 51 | {#- Snowflake treats "" as meaning "permanent" -#} 52 | {%- set table_type = "" -%} 53 | {%- endif %} 54 | {{ compiled_code }} 55 | def materialize(session, df, target_relation): 56 | # make sure pandas exists 57 | import importlib.util 58 | package_name = 'pandas' 59 | if importlib.util.find_spec(package_name): 60 | import pandas 61 | if isinstance(df, pandas.core.frame.DataFrame): 62 | session.use_database(target_relation.database) 63 | session.use_schema(target_relation.schema) 64 | # session.write_pandas does not have overwrite function 65 | df = session.createDataFrame(df) 66 | {% set target_relation_name = resolve_model_name(target_relation) %} 67 | df.write.mode("overwrite").save_as_table('{{ target_relation_name }}', table_type='{{table_type}}') 68 | 69 | def main(session): 70 | dbt = dbtObj(session.table) 71 | df = model(dbt, session) 72 | materialize(session, df, dbt.this) 73 | return "OK" 74 | {% endmacro %} 75 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/materializations/test.sql: -------------------------------------------------------------------------------- 1 | {%- materialization test, adapter='snowflake' -%} 2 | 3 | {% set original_query_tag = set_query_tag() %} 4 | {% set relations = materialization_test_default() %} 5 | {% do unset_query_tag(original_query_tag) %} 6 | {{ return(relations) }} 7 | 8 | {%- endmaterialization -%} 9 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/materializations/view.sql: -------------------------------------------------------------------------------- 1 | {% materialization view, adapter='snowflake' -%} 2 | 3 | {% set original_query_tag = set_query_tag() %} 4 | {% set to_return = snowflake__create_or_replace_view() %} 5 | 6 | {% set target_relation = this.incorporate(type='view') %} 7 | 8 | {% do persist_docs(target_relation, model, for_columns=false) %} 9 | 10 | {% do unset_query_tag(original_query_tag) %} 11 | 12 | {% do return(to_return) %} 13 | 14 | {%- endmaterialization %} 15 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/metadata.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_relation_last_modified(information_schema, relations) -%} 2 | 3 | {%- call statement('last_modified', fetch_result=True) -%} 4 | select table_schema as schema, 5 | table_name as identifier, 6 | last_altered as last_modified, 7 | {{ current_timestamp() }} as snapshotted_at 8 | from {{ information_schema }}.tables 9 | where ( 10 | {%- for relation in relations -%} 11 | (upper(table_schema) = upper('{{ relation.schema }}') and 12 | upper(table_name) = upper('{{ relation.identifier }}')){%- if not loop.last %} or {% endif -%} 13 | {%- endfor -%} 14 | ) 15 | {%- endcall -%} 16 | 17 | {{ return(load_result('last_modified')) }} 18 | 19 | {% endmacro %} 20 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/create.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_create_sql(relation, sql) %} 2 | 3 | {% if relation.is_dynamic_table %} 4 | {{ snowflake__get_create_dynamic_table_as_sql(relation, sql) }} 5 | 6 | {% else %} 7 | {{ default__get_create_sql(relation, sql) }} 8 | 9 | {% endif %} 10 | 11 | {% endmacro %} 12 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/create_backup.sql: -------------------------------------------------------------------------------- 1 | {%- macro snowflake__get_create_backup_sql(relation) -%} 2 | 3 | -- get the standard backup name 4 | {% set backup_relation = make_backup_relation(relation, relation.type) %} 5 | 6 | -- drop any pre-existing backup 7 | {{ get_drop_sql(backup_relation) }}; 8 | 9 | -- use `render` to ensure that the fully qualified name is used 10 | {{ get_rename_sql(relation, backup_relation.render()) }} 11 | 12 | {%- endmacro -%} 13 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/drop.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_drop_sql(relation) %} 2 | 3 | {% if relation.is_dynamic_table %} 4 | {{ snowflake__get_drop_dynamic_table_sql(relation) }} 5 | 6 | {% else %} 7 | {{ default__get_drop_sql(relation) }} 8 | 9 | {% endif %} 10 | 11 | {% endmacro %} 12 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/dynamic_table/alter.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_alter_dynamic_table_as_sql( 2 | existing_relation, 3 | configuration_changes, 4 | target_relation, 5 | sql 6 | ) -%} 7 | {{- log('Applying ALTER to: ' ~ existing_relation) -}} 8 | 9 | {% if configuration_changes.requires_full_refresh %} 10 | {{- get_replace_sql(existing_relation, target_relation, sql) -}} 11 | 12 | {% else %} 13 | 14 | {%- set target_lag = configuration_changes.target_lag -%} 15 | {%- if target_lag -%}{{- log('Applying UPDATE TARGET_LAG to: ' ~ existing_relation) -}}{%- endif -%} 16 | {%- set snowflake_warehouse = configuration_changes.snowflake_warehouse -%} 17 | {%- if snowflake_warehouse -%}{{- log('Applying UPDATE WAREHOUSE to: ' ~ existing_relation) -}}{%- endif -%} 18 | 19 | alter dynamic table {{ existing_relation }} set 20 | {% if target_lag %}target_lag = '{{ target_lag.context }}'{% endif %} 21 | {% if snowflake_warehouse %}warehouse = {{ snowflake_warehouse.context }}{% endif %} 22 | 23 | {%- endif -%} 24 | 25 | {%- endmacro %} 26 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/dynamic_table/create.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_create_dynamic_table_as_sql(relation, sql) -%} 2 | {#- 3 | -- Produce DDL that creates a dynamic table 4 | -- 5 | -- Args: 6 | -- - relation: Union[SnowflakeRelation, str] 7 | -- - SnowflakeRelation - required for relation.render() 8 | -- - str - is already the rendered relation name 9 | -- - sql: str - the code defining the model 10 | -- Globals: 11 | -- - config: NodeConfig - contains the attribution required to produce a SnowflakeDynamicTableConfig 12 | -- Returns: 13 | -- A valid DDL statement which will result in a new dynamic table. 14 | -#} 15 | 16 | {%- set dynamic_table = relation.from_config(config.model) -%} 17 | 18 | {%- if dynamic_table.catalog.table_format == 'iceberg' -%} 19 | {{ _get_create_dynamic_iceberg_table_as_sql(dynamic_table, relation, sql) }} 20 | {%- else -%} 21 | {{ _get_create_dynamic_standard_table_as_sql(dynamic_table, relation, sql) }} 22 | {%- endif -%} 23 | 24 | {%- endmacro %} 25 | 26 | 27 | {% macro _get_create_dynamic_standard_table_as_sql(dynamic_table, relation, sql) -%} 28 | {#- 29 | -- Produce DDL that creates a standard dynamic table 30 | -- 31 | -- This follows the syntax outlined here: 32 | -- https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#syntax 33 | -- 34 | -- Args: 35 | -- - dynamic_table: SnowflakeDynamicTableConfig - contains all of the configuration for the dynamic table 36 | -- - relation: Union[SnowflakeRelation, str] 37 | -- - SnowflakeRelation - required for relation.render() 38 | -- - str - is already the rendered relation name 39 | -- - sql: str - the code defining the model 40 | -- Returns: 41 | -- A valid DDL statement which will result in a new dynamic standard table. 42 | -#} 43 | 44 | create dynamic table {{ relation }} 45 | target_lag = '{{ dynamic_table.target_lag }}' 46 | warehouse = {{ dynamic_table.snowflake_warehouse }} 47 | {{ optional('refresh_mode', dynamic_table.refresh_mode) }} 48 | {{ optional('initialize', dynamic_table.initialize) }} 49 | as ( 50 | {{ sql }} 51 | ) 52 | 53 | {%- endmacro %} 54 | 55 | 56 | {% macro _get_create_dynamic_iceberg_table_as_sql(dynamic_table, relation, sql) -%} 57 | {#- 58 | -- Produce DDL that creates a dynamic iceberg table 59 | -- 60 | -- This follows the syntax outlined here: 61 | -- https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#create-dynamic-iceberg-table 62 | -- 63 | -- Args: 64 | -- - dynamic_table: SnowflakeDynamicTableConfig - contains all of the configuration for the dynamic table 65 | -- - relation: Union[SnowflakeRelation, str] 66 | -- - SnowflakeRelation - required for relation.render() 67 | -- - str - is already the rendered relation name 68 | -- - sql: str - the code defining the model 69 | -- Returns: 70 | -- A valid DDL statement which will result in a new dynamic iceberg table. 71 | -#} 72 | 73 | create dynamic iceberg table {{ relation }} 74 | target_lag = '{{ dynamic_table.target_lag }}' 75 | warehouse = {{ dynamic_table.snowflake_warehouse }} 76 | {{ optional('external_volume', dynamic_table.catalog.external_volume) }} 77 | {{ optional('catalog', dynamic_table.catalog.name) }} 78 | base_location = '{{ dynamic_table.catalog.base_location }}' 79 | {{ optional('refresh_mode', dynamic_table.refresh_mode) }} 80 | {{ optional('initialize', dynamic_table.initialize) }} 81 | as ( 82 | {{ sql }} 83 | ) 84 | 85 | {%- endmacro %} 86 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/dynamic_table/describe.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__describe_dynamic_table(relation) %} 2 | {#- 3 | -- Get all relevant metadata about a dynamic table 4 | -- 5 | -- Args: 6 | -- - relation: SnowflakeRelation - the relation to describe 7 | -- Returns: 8 | -- A dictionary with one or two entries depending on whether iceberg is enabled: 9 | -- - dynamic_table: the metadata associated with a standard dynamic table 10 | -- - catalog: the metadata associated with the iceberg catalog 11 | -#} 12 | {%- set _dynamic_table_sql -%} 13 | show dynamic tables 14 | like '{{ relation.identifier }}' 15 | in schema {{ relation.database }}.{{ relation.schema }} 16 | ; 17 | select 18 | "name", 19 | "schema_name", 20 | "database_name", 21 | "text", 22 | "target_lag", 23 | "warehouse", 24 | "refresh_mode" 25 | from table(result_scan(last_query_id())) 26 | {%- endset %} 27 | {% set results = {'dynamic_table': run_query(_dynamic_table_sql)} %} 28 | 29 | {% if adapter.behavior.enable_iceberg_materializations.no_warn %} 30 | {% set _ = results.update({'catalog': run_query(_get_describe_iceberg_catalog_sql(relation))}) %} 31 | {% endif %} 32 | 33 | {% do return(results) %} 34 | {% endmacro %} 35 | 36 | 37 | {% macro _get_describe_iceberg_catalog_sql(relation) %} 38 | {#- 39 | -- Produce DQL that returns all relevant metadata about an iceberg catalog 40 | -- 41 | -- Args: 42 | -- - relation: SnowflakeRelation - the relation to describe 43 | -- Returns: 44 | -- A valid DQL statement that will return metadata associated with an iceberg catalog 45 | -#} 46 | show iceberg tables 47 | like '{{ relation.identifier }}' 48 | in schema {{ relation.database }}.{{ relation.schema }} 49 | ; 50 | select 51 | "catalog_name", 52 | "external_volume_name", 53 | "base_location" 54 | from table(result_scan(last_query_id())) 55 | {% endmacro %} 56 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/dynamic_table/drop.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_drop_dynamic_table_sql(relation) %} 2 | drop dynamic table if exists {{ relation }} 3 | {% endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/dynamic_table/refresh.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__refresh_dynamic_table(relation) -%} 2 | {{- log('Applying REFRESH to: ' ~ relation) -}} 3 | 4 | alter dynamic table {{ relation }} refresh 5 | {%- endmacro %} 6 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/dynamic_table/replace.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_replace_dynamic_table_sql(relation, sql) -%} 2 | {#- 3 | -- Produce DDL that replaces a dynamic table with a new dynamic table 4 | -- 5 | -- Args: 6 | -- - relation: Union[SnowflakeRelation, str] 7 | -- - SnowflakeRelation - required for relation.render() 8 | -- - str - is already the rendered relation name 9 | -- - sql: str - the code defining the model 10 | -- Globals: 11 | -- - config: NodeConfig - contains the attribution required to produce a SnowflakeDynamicTableConfig 12 | -- Returns: 13 | -- A valid DDL statement which will result in a new dynamic table. 14 | -#} 15 | 16 | {%- set dynamic_table = relation.from_config(config.model) -%} 17 | 18 | {%- if dynamic_table.catalog.table_format == 'iceberg' -%} 19 | {{ _get_replace_dynamic_iceberg_table_as_sql(dynamic_table, relation, sql) }} 20 | {%- else -%} 21 | {{ _get_replace_dynamic_standard_table_as_sql(dynamic_table, relation, sql) }} 22 | {%- endif -%} 23 | 24 | {%- endmacro %} 25 | 26 | {% macro _get_replace_dynamic_standard_table_as_sql(dynamic_table, relation, sql) -%} 27 | {#- 28 | -- Produce DDL that replaces a standard dynamic table with a new standard dynamic table 29 | -- 30 | -- This follows the syntax outlined here: 31 | -- https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#syntax 32 | -- 33 | -- Args: 34 | -- - dynamic_table: SnowflakeDynamicTableConfig - contains all of the configuration for the dynamic table 35 | -- - relation: Union[SnowflakeRelation, str] 36 | -- - SnowflakeRelation - required for relation.render() 37 | -- - str - is already the rendered relation name 38 | -- - sql: str - the code defining the model 39 | -- Returns: 40 | -- A valid DDL statement which will result in a new dynamic standard table. 41 | -#} 42 | 43 | create or replace dynamic table {{ relation }} 44 | target_lag = '{{ dynamic_table.target_lag }}' 45 | warehouse = {{ dynamic_table.snowflake_warehouse }} 46 | {{ optional('refresh_mode', dynamic_table.refresh_mode) }} 47 | {{ optional('initialize', dynamic_table.initialize) }} 48 | as ( 49 | {{ sql }} 50 | ) 51 | 52 | {%- endmacro %} 53 | 54 | 55 | {% macro _get_replace_dynamic_iceberg_table_as_sql(dynamic_table, relation, sql) -%} 56 | {#- 57 | -- Produce DDL that replaces a dynamic iceberg table with a new dynamic iceberg table 58 | -- 59 | -- This follows the syntax outlined here: 60 | -- https://docs.snowflake.com/en/sql-reference/sql/create-dynamic-table#create-dynamic-iceberg-table 61 | -- 62 | -- Args: 63 | -- - dynamic_table: SnowflakeDynamicTableConfig - contains all of the configuration for the dynamic table 64 | -- - relation: Union[SnowflakeRelation, str] 65 | -- - SnowflakeRelation - required for relation.render() 66 | -- - str - is already the rendered relation name 67 | -- - sql: str - the code defining the model 68 | -- Returns: 69 | -- A valid DDL statement which will result in a new dynamic iceberg table. 70 | -#} 71 | 72 | create or replace dynamic iceberg table {{ relation }} 73 | target_lag = '{{ dynamic_table.target_lag }}' 74 | warehouse = {{ dynamic_table.snowflake_warehouse }} 75 | {{ optional('external_volume', dynamic_table.catalog.external_volume) }} 76 | {{ optional('catalog', dynamic_table.catalog.name) }} 77 | base_location = '{{ dynamic_table.catalog.base_location }}' 78 | {{ optional('refresh_mode', dynamic_table.refresh_mode) }} 79 | {{ optional('initialize', dynamic_table.initialize) }} 80 | as ( 81 | {{ sql }} 82 | ) 83 | 84 | {%- endmacro %} 85 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/rename.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__rename_relation(from_relation, to_relation) -%} 2 | {% call statement('rename_relation') -%} 3 | alter table {{ from_relation }} rename to {{ to_relation }} 4 | {%- endcall %} 5 | {% endmacro %} 6 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/rename_intermediate.sql: -------------------------------------------------------------------------------- 1 | {%- macro snowflake__get_rename_intermediate_sql(relation) -%} 2 | 3 | -- get the standard intermediate name 4 | {% set intermediate_relation = make_intermediate_relation(relation) %} 5 | 6 | -- use `render` to ensure that the fully qualified name is used 7 | {{ get_rename_sql(intermediate_relation, relation.render()) }} 8 | 9 | {%- endmacro -%} 10 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/replace.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_replace_sql(existing_relation, target_relation, sql) %} 2 | 3 | {% if existing_relation.is_dynamic_table and target_relation.is_dynamic_table %} 4 | {{ snowflake__get_replace_dynamic_table_sql(target_relation, sql) }} 5 | 6 | {% else %} 7 | {{ default__get_replace_sql(existing_relation, target_relation, sql) }} 8 | 9 | {% endif %} 10 | 11 | {% endmacro %} 12 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/table/create.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__create_table_as(temporary, relation, compiled_code, language='sql') -%} 2 | 3 | {%- if relation.is_iceberg_format and not adapter.behavior.enable_iceberg_materializations.no_warn %} 4 | {% do exceptions.raise_compiler_error('Was unable to create model as Iceberg Table Format. Please set the `enable_iceberg_materializations` behavior flag to True in your dbt_project.yml. For more information, go to https://docs.getdbt.com/reference/resource-configs/snowflake-configs#iceberg-table-format') %} 5 | {%- endif %} 6 | 7 | {%- set materialization_prefix = relation.get_ddl_prefix_for_create(config.model.config, temporary) -%} 8 | {%- set alter_prefix = relation.get_ddl_prefix_for_alter() -%} 9 | 10 | {# Generate DDL/DML #} 11 | {%- if language == 'sql' -%} 12 | {%- set cluster_by_keys = config.get('cluster_by', default=none) -%} 13 | {%- set enable_automatic_clustering = config.get('automatic_clustering', default=false) -%} 14 | {%- set copy_grants = config.get('copy_grants', default=false) -%} 15 | 16 | {%- if cluster_by_keys is not none and cluster_by_keys is string -%} 17 | {%- set cluster_by_keys = [cluster_by_keys] -%} 18 | {%- endif -%} 19 | {%- if cluster_by_keys is not none -%} 20 | {%- set cluster_by_string = cluster_by_keys|join(", ")-%} 21 | {% else %} 22 | {%- set cluster_by_string = none -%} 23 | {%- endif -%} 24 | {%- set sql_header = config.get('sql_header', none) -%} 25 | 26 | {{ sql_header if sql_header is not none }} 27 | 28 | create or replace {{ materialization_prefix }} table {{ relation }} 29 | {%- if relation.is_iceberg_format %} 30 | {# 31 | Valid DDL in CTAS statements. Plain create statements have a different order. 32 | https://docs.snowflake.com/en/sql-reference/sql/create-iceberg-table 33 | #} 34 | {{ relation.get_iceberg_ddl_options(config.model.config) }} 35 | {%- endif -%} 36 | 37 | {%- set contract_config = config.get('contract') -%} 38 | {%- if contract_config.enforced -%} 39 | {{ get_assert_columns_equivalent(sql) }} 40 | {{ get_table_columns_and_constraints() }} 41 | {% set compiled_code = get_select_subquery(compiled_code) %} 42 | {% endif %} 43 | {% if copy_grants and not temporary -%} copy grants {%- endif %} as 44 | ( 45 | {%- if cluster_by_string is not none -%} 46 | select * from ( 47 | {{ compiled_code }} 48 | ) order by ({{ cluster_by_string }}) 49 | {%- else -%} 50 | {{ compiled_code }} 51 | {%- endif %} 52 | ); 53 | {% if cluster_by_string is not none and not temporary -%} 54 | alter {{ alter_prefix }} table {{relation}} cluster by ({{cluster_by_string}}); 55 | {%- endif -%} 56 | {% if enable_automatic_clustering and cluster_by_string is not none and not temporary %} 57 | alter {{ alter_prefix }} table {{relation}} resume recluster; 58 | {%- endif -%} 59 | 60 | {%- elif language == 'python' -%} 61 | {%- if relation.is_iceberg_format %} 62 | {% do exceptions.raise_compiler_error('Iceberg is incompatible with Python models. Please use a SQL model for the iceberg format.') %} 63 | {%- endif %} 64 | {{ py_write_table(compiled_code=compiled_code, target_relation=relation, table_type=relation.get_ddl_prefix_for_create(config.model.config, temporary)) }} 65 | {%- else -%} 66 | {% do exceptions.raise_compiler_error("snowflake__create_table_as macro didn't get supported language, it got %s" % language) %} 67 | {%- endif -%} 68 | 69 | {% endmacro %} 70 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/table/drop.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_drop_table_sql(relation) %} 2 | drop table if exists {{ relation }} cascade 3 | {% endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/table/rename.sql: -------------------------------------------------------------------------------- 1 | {%- macro snowflake__get_rename_table_sql(relation, new_name) -%} 2 | /* 3 | Rename or move a table to the new name. 4 | 5 | Args: 6 | relation: SnowflakeRelation - relation to be renamed 7 | new_name: Union[str, SnowflakeRelation] - new name for `relation` 8 | if providing a string, the default database/schema will be used if that string is just an identifier 9 | if providing a SnowflakeRelation, `render` will be used to produce a fully qualified name 10 | Returns: templated string 11 | */ 12 | alter table {{ relation }} rename to {{ new_name }} 13 | {%- endmacro -%} 14 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/table/replace.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_replace_table_sql(relation, sql) %} 2 | {{ snowflake__create_table_as(False, relation, sql) }} 3 | {% endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/view/create.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__create_view_as_with_temp_flag(relation, sql, is_temporary=False) -%} 2 | {%- set secure = config.get('secure', default=false) -%} 3 | {%- set copy_grants = config.get('copy_grants', default=false) -%} 4 | {%- set sql_header = config.get('sql_header', none) -%} 5 | 6 | {{ sql_header if sql_header is not none }} 7 | create or replace {% if secure -%} 8 | secure 9 | {%- endif %} {% if is_temporary -%} 10 | temporary 11 | {%- endif %} view {{ relation }} 12 | {% if config.persist_column_docs() -%} 13 | {% set model_columns = model.columns %} 14 | {% set query_columns = get_columns_in_query(sql) %} 15 | {{ get_persist_docs_column_list(model_columns, query_columns) }} 16 | 17 | {%- endif %} 18 | {%- set contract_config = config.get('contract') -%} 19 | {%- if contract_config.enforced -%} 20 | {{ get_assert_columns_equivalent(sql) }} 21 | {%- endif %} 22 | {% if copy_grants -%} copy grants {%- endif %} as ( 23 | {{ sql }} 24 | ); 25 | {% endmacro %} 26 | 27 | 28 | {% macro snowflake__create_view_as(relation, sql) -%} 29 | {{ snowflake__create_view_as_with_temp_flag(relation, sql) }} 30 | {% endmacro %} 31 | 32 | 33 | /* {# 34 | Vendored from dbt-core for the purpose of overwriting small pieces to support dynamics tables. This should 35 | eventually be retired in favor of a standardized approach. Changed line: 36 | 37 | {%- if old_relation is not none and old_relation.is_table -%} -> 38 | {%- if old_relation is not none and not old_relation.is_view -%} 39 | #} */ 40 | 41 | {% macro snowflake__create_or_replace_view() %} 42 | {%- set identifier = model['alias'] -%} 43 | 44 | {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} 45 | {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} 46 | 47 | {%- set target_relation = api.Relation.create( 48 | identifier=identifier, schema=schema, database=database, 49 | type='view') -%} 50 | {% set grant_config = config.get('grants') %} 51 | 52 | {{ run_hooks(pre_hooks) }} 53 | 54 | -- If there's a table with the same name and we weren't told to full refresh, 55 | -- that's an error. If we were told to full refresh, drop it. This behavior differs 56 | -- for Snowflake and BigQuery, so multiple dispatch is used. 57 | {%- if old_relation is not none and not old_relation.is_view -%} 58 | {{ handle_existing_table(should_full_refresh(), old_relation) }} 59 | {%- endif -%} 60 | 61 | -- build model 62 | {% call statement('main') -%} 63 | {{ get_create_view_as_sql(target_relation, sql) }} 64 | {%- endcall %} 65 | 66 | {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %} 67 | {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} 68 | 69 | {{ run_hooks(post_hooks) }} 70 | 71 | {{ return({'relations': [target_relation]}) }} 72 | 73 | {% endmacro %} 74 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/view/drop.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_drop_view_sql(relation) %} 2 | drop view if exists {{ relation }} cascade 3 | {% endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/view/rename.sql: -------------------------------------------------------------------------------- 1 | {%- macro snowflake__get_rename_view_sql(relation, new_name) -%} 2 | /* 3 | Rename or move a view to the new name. 4 | 5 | Args: 6 | relation: SnowflakeRelation - relation to be renamed 7 | new_name: Union[str, SnowflakeRelation] - new name for `relation` 8 | if providing a string, the default database/schema will be used if that string is just an identifier 9 | if providing a SnowflakeRelation, `render` will be used to produce a fully qualified name 10 | Returns: templated string 11 | */ 12 | alter view {{ relation }} rename to {{ new_name }} 13 | {%- endmacro -%} 14 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/relations/view/replace.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__get_replace_view_sql(relation, sql) %} 2 | {{ snowflake__create_view_as(relation, sql) }} 3 | {% endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/utils/array_construct.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__array_construct(inputs, data_type) -%} 2 | array_construct( {{ inputs|join(' , ') }} ) 3 | {%- endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/utils/bool_or.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__bool_or(expression) -%} 2 | 3 | boolor_agg({{ expression }}) 4 | 5 | {%- endmacro %} 6 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/utils/cast.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__cast(field, type) %} 2 | {% if (type|upper == "GEOGRAPHY") -%} 3 | to_geography({{field}}) 4 | {% elif (type|upper == "GEOMETRY") -%} 5 | to_geometry({{field}}) 6 | {% else -%} 7 | cast({{field}} as {{type}}) 8 | {% endif -%} 9 | {% endmacro %} 10 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/utils/escape_single_quotes.sql: -------------------------------------------------------------------------------- 1 | {# /*Snowflake uses a single backslash: they're -> they\'re. The second backslash is to escape it from Jinja */ #} 2 | {% macro snowflake__escape_single_quotes(expression) -%} 3 | {{ expression | replace("'", "\\'") }} 4 | {%- endmacro %} 5 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/utils/optional.sql: -------------------------------------------------------------------------------- 1 | {% macro optional(name, value, quote_char = '') %} 2 | {#- 3 | -- Insert optional DDL parameters only when their value is provided; makes DDL statements more readable 4 | -- 5 | -- Args: 6 | -- - name: the name of the DDL option 7 | -- - value: the value of the DDL option, may be None 8 | -- - quote_char: the quote character to use (e.g. string), leave blank if unnecessary (e.g. integer or bool) 9 | -- Returns: 10 | -- If the value is not None (e.g. provided by the user), return the option setting DDL 11 | -- If the value is None, return an empty string 12 | -#} 13 | {% if value is not none %}{{ name }} = {{ quote_char }}{{ value }}{{ quote_char }}{% endif %} 14 | {% endmacro %} 15 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/utils/right.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__right(string_text, length_expression) %} 2 | 3 | case when {{ length_expression }} = 0 4 | then '' 5 | else 6 | right( 7 | {{ string_text }}, 8 | {{ length_expression }} 9 | ) 10 | end 11 | 12 | {%- endmacro -%} 13 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/utils/safe_cast.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__safe_cast(field, type) %} 2 | {% if type|upper == "GEOMETRY" -%} 3 | try_to_geometry({{field}}) 4 | {% elif type|upper == "GEOGRAPHY" -%} 5 | try_to_geography({{field}}) 6 | {% elif type|upper != "VARIANT" -%} 7 | {#-- Snowflake try_cast does not support casting to variant, and expects the field as a string --#} 8 | {% set field_as_string = dbt.string_literal(field) if field is number else field %} 9 | try_cast({{field_as_string}} as {{type}}) 10 | {% else -%} 11 | {{ adapter.dispatch('cast', 'dbt')(field, type) }} 12 | {% endif -%} 13 | {% endmacro %} 14 | -------------------------------------------------------------------------------- /dbt/include/snowflake/macros/utils/timestamps.sql: -------------------------------------------------------------------------------- 1 | {% macro snowflake__current_timestamp() -%} 2 | convert_timezone('UTC', current_timestamp()) 3 | {%- endmacro %} 4 | 5 | {% macro snowflake__snapshot_string_as_time(timestamp) -%} 6 | {%- set result = "to_timestamp_ntz('" ~ timestamp ~ "')" -%} 7 | {{ return(result) }} 8 | {%- endmacro %} 9 | 10 | {% macro snowflake__snapshot_get_time() -%} 11 | to_timestamp_ntz({{ current_timestamp() }}) 12 | {%- endmacro %} 13 | 14 | {% macro snowflake__current_timestamp_backcompat() %} 15 | current_timestamp::{{ type_timestamp() }} 16 | {% endmacro %} 17 | 18 | {% macro snowflake__current_timestamp_in_utc_backcompat() %} 19 | convert_timezone('UTC', {{ snowflake__current_timestamp_backcompat() }})::{{ type_timestamp() }} 20 | {% endmacro %} 21 | -------------------------------------------------------------------------------- /dbt/include/snowflake/profile_template.yml: -------------------------------------------------------------------------------- 1 | fixed: 2 | type: snowflake 3 | prompts: 4 | account: 5 | hint: 'https://.snowflakecomputing.com' 6 | user: 7 | hint: 'dev username' 8 | _choose_authentication_type: 9 | password: 10 | password: 11 | hint: 'dev password' 12 | hide_input: true 13 | keypair: 14 | private_key_path: 15 | hint: 'path/to/private.key' 16 | private_key_passphrase: 17 | hint: 'passphrase for the private key, if key is encrypted' 18 | hide_input: true 19 | sso: 20 | authenticator: 21 | hint: "'externalbrowser' or a valid Okta URL" 22 | default: 'externalbrowser' 23 | role: 24 | hint: 'dev role' 25 | warehouse: 26 | hint: 'warehouse name' 27 | database: 28 | hint: 'default database that dbt will build objects in' 29 | schema: 30 | hint: 'default schema that dbt will build objects in' 31 | threads: 32 | hint: '1 or more' 33 | type: 'int' 34 | default: 1 35 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | # this image gets published to GHCR for production use 2 | ARG py_version=3.11.2 3 | 4 | FROM python:$py_version-slim-bullseye AS base 5 | 6 | RUN apt-get update \ 7 | && apt-get dist-upgrade -y \ 8 | && apt-get install -y --no-install-recommends \ 9 | build-essential=12.9 \ 10 | ca-certificates=20210119 \ 11 | git=1:2.30.2-1+deb11u2 \ 12 | libpq-dev=13.18-0+deb11u1 \ 13 | make=4.3-4.1 \ 14 | openssh-client=1:8.4p1-5+deb11u3 \ 15 | software-properties-common=0.96.20.2-2.1 \ 16 | && apt-get clean \ 17 | && rm -rf \ 18 | /var/lib/apt/lists/* \ 19 | /tmp/* \ 20 | /var/tmp/* 21 | 22 | ENV PYTHONIOENCODING=utf-8 23 | ENV LANG=C.UTF-8 24 | 25 | RUN python -m pip install --upgrade "pip==24.0" "setuptools==69.2.0" "wheel==0.43.0" --no-cache-dir 26 | 27 | 28 | FROM base AS dbt-snowflake 29 | 30 | ARG commit_ref=main 31 | 32 | HEALTHCHECK CMD dbt --version || exit 1 33 | 34 | WORKDIR /usr/app/dbt/ 35 | ENTRYPOINT ["dbt"] 36 | 37 | RUN python -m pip install --no-cache-dir "dbt-snowflake @ git+https://github.com/dbt-labs/dbt-snowflake@${commit_ref}" 38 | -------------------------------------------------------------------------------- /docker/README.md: -------------------------------------------------------------------------------- 1 | # Docker for dbt 2 | This docker file is suitable for building dbt Docker images locally or using with CI/CD to automate populating a container registry. 3 | 4 | 5 | ## Building an image: 6 | This Dockerfile can create images for the following target: `dbt-snowflake` 7 | 8 | In order to build a new image, run the following docker command. 9 | ```shell 10 | docker build --tag --target dbt-snowflake 11 | ``` 12 | --- 13 | > **Note:** Docker must be configured to use [BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/) in order for images to build properly! 14 | 15 | --- 16 | 17 | By default the image will be populated with the latest version of `dbt-snowflake` on `main`. 18 | If you need to use a different version you can specify it by git ref using the `--build-arg` flag: 19 | ```shell 20 | docker build --tag \ 21 | --target dbt-snowflake \ 22 | --build-arg commit_ref= \ 23 | 24 | ``` 25 | 26 | ### Examples: 27 | To build an image named "my-dbt" that supports Snowflake using the latest releases: 28 | ```shell 29 | cd dbt-core/docker 30 | docker build --tag my-dbt --target dbt-snowflake . 31 | ``` 32 | 33 | To build an image named "my-other-dbt" that supports Snowflake using the adapter version 1.0.0b1: 34 | ```shell 35 | cd dbt-core/docker 36 | docker build \ 37 | --tag my-other-dbt \ 38 | --target dbt-snowflake \ 39 | --build-arg commit_ref=v1.0.0b1 \ 40 | . 41 | ``` 42 | 43 | ## Running an image in a container: 44 | The `ENTRYPOINT` for this Dockerfile is the command `dbt` so you can bind-mount your project to `/usr/app` and use dbt as normal: 45 | ```shell 46 | docker run \ 47 | --network=host \ 48 | --mount type=bind,source=path/to/project,target=/usr/app \ 49 | --mount type=bind,source=path/to/profiles.yml,target=/root/.dbt/profiles.yml \ 50 | my-dbt \ 51 | ls 52 | ``` 53 | --- 54 | **Notes:** 55 | * Bind-mount sources _must_ be an absolute path 56 | * You may need to make adjustments to the docker networking setting depending on the specifics of your data warehouse/database host. 57 | 58 | --- 59 | -------------------------------------------------------------------------------- /docker/dev.Dockerfile: -------------------------------------------------------------------------------- 1 | # this image does not get published, it is intended for local development only, see `Makefile` for usage 2 | FROM ubuntu:24.04 AS base 3 | 4 | # prevent python installation from asking for time zone region 5 | ARG DEBIAN_FRONTEND=noninteractive 6 | 7 | # add python repository 8 | RUN apt-get update \ 9 | && apt-get install -y software-properties-common=0.99.48 \ 10 | && add-apt-repository -y ppa:deadsnakes/ppa \ 11 | && apt-get clean \ 12 | && rm -rf \ 13 | /var/lib/apt/lists/* \ 14 | /tmp/* \ 15 | /var/tmp/* 16 | 17 | # install python 18 | RUN apt-get update \ 19 | && apt-get install -y --no-install-recommends \ 20 | build-essential=12.10ubuntu1 \ 21 | git-all=1:2.43.0-1ubuntu7.1 \ 22 | python3.9=3.9.20-1+noble1 \ 23 | python3.9-dev=3.9.20-1+noble1 \ 24 | python3.9-distutils=3.9.20-1+noble1 \ 25 | python3.9-venv=3.9.20-1+noble1 \ 26 | python3-pip=24.0+dfsg-1ubuntu1 \ 27 | python3-wheel=0.42.0-2 \ 28 | && apt-get clean \ 29 | && rm -rf \ 30 | /var/lib/apt/lists/* \ 31 | /tmp/* \ 32 | /var/tmp/* \ 33 | 34 | # update the default system interpreter to the newly installed version 35 | RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.9 1 36 | 37 | 38 | FROM base AS dbt-snowflake-dev 39 | 40 | HEALTHCHECK CMD python --version || exit 1 41 | 42 | # send stdout/stderr to terminal 43 | ENV PYTHONUNBUFFERED=1 44 | 45 | # setup mount for local code 46 | WORKDIR /opt/code 47 | VOLUME /opt/code 48 | 49 | # create a virtual environment 50 | RUN python -m venv /opt/venv 51 | -------------------------------------------------------------------------------- /docker/test.sh: -------------------------------------------------------------------------------- 1 | # - VERY rudimentary test script to run latest + specific branch image builds and test them all by running `--version` 2 | # TODO: create a real test suite 3 | 4 | clear \ 5 | && echo "\n\n"\ 6 | "########################################\n"\ 7 | "##### Testing dbt-snowflake latest #####\n"\ 8 | "########################################\n"\ 9 | && docker build --tag dbt-snowflake \ 10 | --target dbt-snowflake \ 11 | docker \ 12 | && docker run dbt-snowflake --version \ 13 | \ 14 | && echo "\n\n"\ 15 | "#########################################\n"\ 16 | "##### Testing dbt-snowflake-1.0.0b1 #####\n"\ 17 | "#########################################\n"\ 18 | && docker build --tag dbt-snowflake-1.0.0b1 \ 19 | --target dbt-snowflake \ 20 | --build-arg commit_ref=v1.0.0b1 \ 21 | docker \ 22 | && docker run dbt-snowflake-1.0.0b1 --version 23 | -------------------------------------------------------------------------------- /hatch.toml: -------------------------------------------------------------------------------- 1 | [version] 2 | path = "dbt/adapters/snowflake/__version__.py" 3 | 4 | [build.targets.sdist] 5 | packages = ["src/dbt"] 6 | sources = ["src"] 7 | 8 | [build.targets.wheel] 9 | packages = ["src/dbt"] 10 | sources = ["src"] 11 | 12 | [envs.default] 13 | dependencies = [ 14 | "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", 15 | "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", 16 | "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", 17 | "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", 18 | "ddtrace==2.3.0", 19 | "ipdb~=0.13.13", 20 | "pre-commit~=3.7.0", 21 | "pytest~=7.4", 22 | "pytest-csv~=3.0", 23 | "pytest-dotenv~=0.5.2", 24 | "pytest-logbook~=1.2", 25 | "pytest-xdist~=3.6", 26 | "tox~=4.16", # does this pin deps transitively? 27 | ] 28 | 29 | [envs.default.scripts] 30 | setup = "pre-commit install" 31 | code-quality = "pre-commit run --all-files" 32 | unit-tests = "python -m pytest {args:tests/unit}" 33 | integration-tests = "- python -m pytest {args:tests/functional}" 34 | docker-dev = [ 35 | "docker build -f docker/dev.Dockerfile -t dbt-snowflake-dev .", 36 | "docker run --rm -it --name dbt-snowflake-dev -v $(pwd):/opt/code dbt-snowflake-dev", 37 | ] 38 | 39 | [envs.build] 40 | detached = true 41 | dependencies = [ 42 | "wheel", 43 | "twine", 44 | "check-wheel-contents", 45 | ] 46 | 47 | [envs.build.scripts] 48 | check-all = [ 49 | "- check-wheel", 50 | "- check-sdist", 51 | ] 52 | check-wheel = [ 53 | "twine check dist/*", 54 | "find ./dist/dbt_snowflake-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", 55 | "pip freeze | grep dbt-snowflake", 56 | ] 57 | check-sdist = [ 58 | "check-wheel-contents dist/*.whl --ignore W007,W008", 59 | "find ./dist/dbt_snowflake-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", 60 | "pip freeze | grep dbt-snowflake", 61 | ] 62 | docker-prod = "docker build -f docker/Dockerfile -t dbt-snowflake ." 63 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | dynamic = ["version"] 7 | name = "dbt-snowflake" 8 | description = "The Snowflake adapter plugin for dbt" 9 | readme = "README.md" 10 | keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs", "snowflake"] 11 | requires-python = ">=3.9.0" 12 | authors = [{ name = "dbt Labs", email = "info@dbtlabs.com" }] 13 | maintainers = [{ name = "dbt Labs", email = "info@dbtlabs.com" }] 14 | classifiers = [ 15 | "Development Status :: 5 - Production/Stable", 16 | "License :: OSI Approved :: Apache Software License", 17 | "Operating System :: MacOS :: MacOS X", 18 | "Operating System :: Microsoft :: Windows", 19 | "Operating System :: POSIX :: Linux", 20 | "Programming Language :: Python :: 3.9", 21 | "Programming Language :: Python :: 3.10", 22 | "Programming Language :: Python :: 3.11", 23 | "Programming Language :: Python :: 3.12", 24 | ] 25 | dependencies = [ 26 | "dbt-common>=1.10,<2.0", 27 | "dbt-adapters>=1.10.4,<2.0", 28 | "snowflake-connector-python[secure-local-storage]>=3.0.0,<3.12.4", 29 | # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency 30 | "dbt-core>=1.8.0", 31 | # installed via dbt-core but referenced directly; don't pin to avoid version conflicts with dbt-core 32 | "agate", 33 | ] 34 | 35 | [project.urls] 36 | Homepage = "https://github.com/dbt-labs/dbt-snowflake" 37 | Documentation = "https://docs.getdbt.com" 38 | Repository = "https://github.com/dbt-labs/dbt-snowflake.git" 39 | Issues = "https://github.com/dbt-labs/dbt-snowflake/issues" 40 | Changelog = "https://github.com/dbt-labs/dbt-snowflake/blob/main/CHANGELOG.md" 41 | 42 | [tool.pytest.ini_options] 43 | testpaths = ["tests/functional", "tests/unit"] 44 | env_files = ["test.env"] 45 | addopts = "-v --color=yes -n auto" 46 | filterwarnings = [ 47 | "ignore:datetime.datetime.utcnow:DeprecationWarning", 48 | ] 49 | -------------------------------------------------------------------------------- /scripts/build-dist.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | DBT_PATH="$( cd "$(dirname "$0")/.." ; pwd -P )" 6 | 7 | PYTHON_BIN=${PYTHON_BIN:-python} 8 | 9 | echo "$PYTHON_BIN" 10 | 11 | set -x 12 | 13 | rm -rf "$DBT_PATH"/dist 14 | rm -rf "$DBT_PATH"/build 15 | mkdir -p "$DBT_PATH"/dist 16 | 17 | cd "$DBT_PATH" 18 | $PYTHON_BIN setup.py sdist bdist_wheel 19 | 20 | set +x 21 | -------------------------------------------------------------------------------- /scripts/env-setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Set TOXENV environment variable for subsequent steps 3 | echo "TOXENV=integration-snowflake" >> $GITHUB_ENV 4 | # Set INTEGRATION_TESTS_SECRETS_PREFIX environment variable for subsequent steps 5 | # All GH secrets that have this prefix will be set as environment variables 6 | echo "INTEGRATION_TESTS_SECRETS_PREFIX=SNOWFLAKE_TEST" >> $GITHUB_ENV 7 | # Set environment variables required for integration tests 8 | echo "DBT_TEST_USER_1=dbt_test_role_1" >> $GITHUB_ENV 9 | echo "DBT_TEST_USER_2=dbt_test_role_2" >> $GITHUB_ENV 10 | echo "DBT_TEST_USER_3=dbt_test_role_3" >> $GITHUB_ENV 11 | -------------------------------------------------------------------------------- /test.env.example: -------------------------------------------------------------------------------- 1 | # Note: Make sure you have a Snowflake account that is set up so these fields are easy to complete. 2 | # If you don't have an account set up yet, then take note of these required fields that way. When you're getting set up, 3 | # you can use them later to build your Snowflake project. 4 | 5 | ### Test Environment field definitions 6 | # These will all be gathered from account information or created by you. 7 | 8 | # SNOWFLAKE_TEST_ACCOUNT: The name that uniquely identifies your Snowflake account. 9 | # SNOWFLAKE_TEST_ALT_DATABASE: Name of a secondary or alternate database to use for testing. You will need to create this database. 10 | # SNOWFLAKE_TEST_ALT_WAREHOUSE: Name of the secondary warehouse to use for testing. 11 | # SNOWFLAKE_TEST_DATABASE: Name of the primary database to use for testing. 12 | # SNOWFLAKE_TEST_OAUTH_CLIENT_ID: Client ID of the OAuth client integration. (only for oauth authentication) 13 | # SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET: Client secret of your OAuth client id. (only for oauth authentication) 14 | # SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN: Boolean value defaulted to True keep connection alive. (only for oauth authentication) 15 | # SNOWFLAKE_TEST_PASSWORD:Password used for your database user. 16 | # SNOWFLAKE_TEST_QUOTED_DATABASE: Name of database to be used from warehouse. 17 | # SNOWFLAKE_TEST_USER: Username of database user 18 | # SNOWFLAKE_TEST_WAREHOUSE: Warehouse name to be used as primary. 19 | 20 | # Copy the following to a test.env, and replace example values with your information. 21 | SNOWFLAKE_TEST_ACCOUNT=my_account_id 22 | SNOWFLAKE_TEST_ALT_DATABASE=my_alt_database_name 23 | SNOWFLAKE_TEST_ALT_WAREHOUSE=my_alt_warehouse_name 24 | SNOWFLAKE_TEST_DATABASE=my_database_name 25 | SNOWFLAKE_TEST_OAUTH_CLIENT_ID=my_oauth_id 26 | SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET=my_oauth_secret 27 | SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN=TRUE 28 | SNOWFLAKE_TEST_PASSWORD=my_password 29 | SNOWFLAKE_TEST_QUOTED_DATABASE=my_quoted_database_name 30 | SNOWFLAKE_TEST_USER=my_username 31 | SNOWFLAKE_TEST_WAREHOUSE=my_warehouse_name 32 | 33 | DBT_TEST_USER_1=dbt_test_role_1 34 | DBT_TEST_USER_2=dbt_test_role_2 35 | DBT_TEST_USER_3=dbt_test_role_3 36 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/dbt-snowflake/986d31db890580f04d92a17feca6291c410b9629/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | 4 | # Import the fuctional fixtures as a plugin 5 | # Note: fixtures with session scope need to be local 6 | 7 | pytest_plugins = ["dbt.tests.fixtures.project"] 8 | 9 | 10 | # The profile dictionary, used to write out profiles.yml 11 | @pytest.fixture(scope="class") 12 | def dbt_profile_target(): 13 | return { 14 | "type": "snowflake", 15 | "threads": 4, 16 | "account": os.getenv("SNOWFLAKE_TEST_ACCOUNT"), 17 | "user": os.getenv("SNOWFLAKE_TEST_USER"), 18 | "password": os.getenv("SNOWFLAKE_TEST_PASSWORD"), 19 | "database": os.getenv("SNOWFLAKE_TEST_DATABASE"), 20 | "warehouse": os.getenv("SNOWFLAKE_TEST_WAREHOUSE"), 21 | } 22 | -------------------------------------------------------------------------------- /tests/functional/adapter/catalog_tests/files.py: -------------------------------------------------------------------------------- 1 | MY_SEED = """ 2 | id,value 3 | 1,100 4 | 2,200 5 | 3,300 6 | """.strip() 7 | 8 | 9 | MY_TABLE = """ 10 | {{ config( 11 | materialized='table', 12 | ) }} 13 | select * from {{ ref('my_seed') }} 14 | """ 15 | 16 | 17 | MY_VIEW = """ 18 | {{ config( 19 | materialized='view', 20 | ) }} 21 | select * from {{ ref('my_seed') }} 22 | """ 23 | 24 | 25 | MY_DYNAMIC_TABLE = """ 26 | {{ config( 27 | materialized='dynamic_table', 28 | snowflake_warehouse='DBT_TESTING', 29 | target_lag='30 minutes', 30 | ) }} 31 | select * from {{ ref('my_seed') }} 32 | """ 33 | -------------------------------------------------------------------------------- /tests/functional/adapter/catalog_tests/test_relation_types.py: -------------------------------------------------------------------------------- 1 | from dbt.contracts.results import CatalogArtifact 2 | from dbt.tests.util import run_dbt 3 | import pytest 4 | 5 | from tests.functional.adapter.catalog_tests import files 6 | 7 | 8 | class TestCatalogRelationTypes: 9 | @pytest.fixture(scope="class", autouse=True) 10 | def seeds(self): 11 | return {"my_seed.csv": files.MY_SEED} 12 | 13 | @pytest.fixture(scope="class", autouse=True) 14 | def models(self): 15 | yield { 16 | "my_table.sql": files.MY_TABLE, 17 | "my_view.sql": files.MY_VIEW, 18 | "my_dynamic_table.sql": files.MY_DYNAMIC_TABLE, 19 | } 20 | 21 | @pytest.fixture(scope="class", autouse=True) 22 | def docs(self, project): 23 | run_dbt(["seed"]) 24 | run_dbt(["run"]) 25 | yield run_dbt(["docs", "generate"]) 26 | 27 | @pytest.mark.parametrize( 28 | "node_name,relation_type", 29 | [ 30 | ("seed.test.my_seed", "BASE TABLE"), 31 | ("model.test.my_table", "BASE TABLE"), 32 | ("model.test.my_view", "VIEW"), 33 | ("model.test.my_dynamic_table", "DYNAMIC TABLE"), 34 | ], 35 | ) 36 | def test_relation_types_populate_correctly( 37 | self, docs: CatalogArtifact, node_name: str, relation_type: str 38 | ): 39 | """ 40 | This test addresses: https://github.com/dbt-labs/dbt-snowflake/issues/817 41 | """ 42 | assert node_name in docs.nodes 43 | node = docs.nodes[node_name] 44 | assert node.metadata.type == relation_type 45 | -------------------------------------------------------------------------------- /tests/functional/adapter/column_types/fixtures.py: -------------------------------------------------------------------------------- 1 | _MODEL_SQL = """ 2 | select 3 | 1::smallint as smallint_col, 4 | 2::int as int_col, 5 | 3::bigint as bigint_col, 6 | 4::integer as integer_col, 7 | 5::tinyint as tinyint_col, 8 | 6::byteint as byteint_col, 9 | 7.0::float as float_col, 10 | 8.0::float4 as float4_col, 11 | 9.0::float8 as float8_col, 12 | 10.0::double as double_col, 13 | 11.0::double precision as double_p_col, 14 | 12.0::real as real_col, 15 | 13.0::numeric as numeric_col, 16 | 14.0::decimal as decimal_col, 17 | 15.0::number as number_col, 18 | '16'::text as text_col, 19 | '17'::varchar(20) as varchar_col 20 | """ 21 | 22 | _SCHEMA_YML = """ 23 | version: 2 24 | models: 25 | - name: model 26 | data_tests: 27 | - is_type: 28 | column_map: 29 | smallint_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 30 | int_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 31 | bigint_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 32 | integer_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 33 | tinyint_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 34 | byteint_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 35 | float_col: ['float', 'number', 'not string', 'not integer', 'not numeric'] 36 | float4_col: ['float', 'number', 'not string', 'not integer', 'not numeric'] 37 | float8_col: ['float', 'number', 'not string', 'not integer', 'not numeric'] 38 | double_col: ['float', 'number', 'not string', 'not integer', 'not numeric'] 39 | double_p_col: ['float', 'number', 'not string', 'not integer', 'not numeric'] 40 | real_col: ['float', 'number', 'not string', 'not integer', 'not numeric'] 41 | numeric_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 42 | decimal_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 43 | number_col: ['numeric', 'number', 'not string', 'not float', 'not integer'] 44 | text_col: ['string', 'not number'] 45 | varchar_col: ['string', 'not number'] 46 | """ 47 | -------------------------------------------------------------------------------- /tests/functional/adapter/column_types/test_column_types.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes 3 | from tests.functional.adapter.column_types.fixtures import _MODEL_SQL, _SCHEMA_YML 4 | 5 | 6 | class TestSnowflakeColumnTypes(BaseColumnTypes): 7 | @pytest.fixture(scope="class") 8 | def models(self): 9 | return {"model.sql": _MODEL_SQL, "schema.yml": _SCHEMA_YML} 10 | 11 | def test_run_and_test(self, project): 12 | self.run_and_test() 13 | -------------------------------------------------------------------------------- /tests/functional/adapter/custom_schema_tests/seeds.py: -------------------------------------------------------------------------------- 1 | seed_csv = """ 2 | id,first_name,last_name,email,gender,ip_address 3 | 1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 4 | 2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 5 | 3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 6 | """.lstrip() 7 | 8 | seed_agg_csv = """ 9 | last_name,count 10 | Hunter,2 11 | Walker,2 12 | Ryan,2 13 | """.lstrip() 14 | -------------------------------------------------------------------------------- /tests/functional/adapter/custom_schema_tests/test_custom_database.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import os 3 | from dbt.tests.util import check_relations_equal, check_table_does_exist, run_dbt 4 | from tests.functional.adapter.custom_schema_tests.seeds import seed_agg_csv, seed_csv 5 | 6 | _VIEW_1_SQL = """ 7 | select * from {{ ref('seed') }} 8 | """.lstrip() 9 | 10 | _VIEW_2_SQL = """ 11 | {{ config(database='alt') }} 12 | select * from {{ ref('view_1') }} 13 | """.lstrip() 14 | 15 | _VIEW_3_SQL = """ 16 | {{ config(database='alt', materialized='table') }} 17 | 18 | 19 | with v1 as ( 20 | 21 | select * from {{ ref('view_1') }} 22 | 23 | ), 24 | 25 | v2 as ( 26 | 27 | select * from {{ ref('view_2') }} 28 | 29 | ), 30 | 31 | combined as ( 32 | 33 | select last_name from v1 34 | union all 35 | select last_name from v2 36 | 37 | ) 38 | 39 | select 40 | last_name, 41 | count(*) as count 42 | 43 | from combined 44 | group by 1 45 | """.lstrip() 46 | 47 | _CUSTOM_DB_SQL = """ 48 | {% macro generate_database_name(database_name, node) %} 49 | {% if database_name == 'alt' %} 50 | {{ env_var('SNOWFLAKE_TEST_ALT_DATABASE') }} 51 | {% elif database_name %} 52 | {{ database_name }} 53 | {% else %} 54 | {{ target.database }} 55 | {% endif %} 56 | {% endmacro %} 57 | """.lstrip() 58 | 59 | ALT_DATABASE = os.getenv("SNOWFLAKE_TEST_ALT_DATABASE") 60 | 61 | 62 | class TestOverrideDatabase: 63 | @pytest.fixture(scope="class") 64 | def macros(self): 65 | return { 66 | "custom_db.sql": _CUSTOM_DB_SQL, 67 | } 68 | 69 | @pytest.fixture(scope="class") 70 | def seeds(self): 71 | return {"seed.csv": seed_csv, "agg.csv": seed_agg_csv} 72 | 73 | @pytest.fixture(scope="class") 74 | def models(self): 75 | return { 76 | "view_1.sql": _VIEW_1_SQL, 77 | "view_2.sql": _VIEW_2_SQL, 78 | "view_3.sql": _VIEW_3_SQL, 79 | } 80 | 81 | @pytest.fixture(scope="function") 82 | def clean_up(self, project): 83 | yield 84 | with project.adapter.connection_named("__test"): 85 | relation = project.adapter.Relation.create( 86 | database=ALT_DATABASE, schema=project.test_schema 87 | ) 88 | project.adapter.drop_schema(relation) 89 | 90 | def test_snowflake_override_generate_db_name(self, project, clean_up): 91 | seed_results = run_dbt(["seed", "--full-refresh"]) 92 | assert len(seed_results) == 2 93 | 94 | db_with_schema = f"{project.database}.{project.test_schema}" 95 | alt_db_with_schema = f"{ALT_DATABASE}.{project.test_schema}" 96 | seed_table = "SEED" 97 | agg_table = "AGG" 98 | view_1 = "VIEW_1" 99 | view_2 = "VIEW_2" 100 | view_3 = "VIEW_3" 101 | 102 | check_table_does_exist(project.adapter, f"{db_with_schema}.{seed_table}") 103 | check_table_does_exist(project.adapter, f"{db_with_schema}.{agg_table}") 104 | 105 | results = run_dbt() 106 | assert len(results) == 3 107 | 108 | check_table_does_exist(project.adapter, f"{db_with_schema}.{view_1}") 109 | check_table_does_exist(project.adapter, f"{alt_db_with_schema}.{view_2}") 110 | check_table_does_exist(project.adapter, f"{alt_db_with_schema}.{view_3}") 111 | 112 | # not overridden 113 | check_relations_equal( 114 | project.adapter, [f"{db_with_schema}.{seed_table}", f"{db_with_schema}.{view_1}"] 115 | ) 116 | 117 | # overridden 118 | check_relations_equal( 119 | project.adapter, [f"{db_with_schema}.{seed_table}", f"{alt_db_with_schema}.{view_2}"] 120 | ) 121 | check_relations_equal( 122 | project.adapter, [f"{db_with_schema}.{agg_table}", f"{alt_db_with_schema}.{view_3}"] 123 | ) 124 | -------------------------------------------------------------------------------- /tests/functional/adapter/custom_schema_tests/test_custom_schema.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import check_relations_equal, run_dbt 3 | from tests.functional.adapter.custom_schema_tests.seeds import seed_agg_csv, seed_csv 4 | 5 | _VIEW_1_SQL = """ 6 | select * from {{ ref('seed') }} 7 | """.lstrip() 8 | 9 | _VIEW_2_SQL = """ 10 | {{ config(schema='custom') }} 11 | 12 | select * from {{ ref('view_1') }} 13 | """.lstrip() 14 | 15 | _VIEW_3_SQL = """ 16 | {{ config(schema='test', materialized='table') }} 17 | 18 | 19 | with v1 as ( 20 | 21 | select * from {{ ref('view_1') }} 22 | 23 | ), 24 | 25 | v2 as ( 26 | 27 | select * from {{ ref('view_2') }} 28 | 29 | ), 30 | 31 | combined as ( 32 | 33 | select last_name from v1 34 | union all 35 | select last_name from v2 36 | 37 | ) 38 | 39 | select 40 | last_name, 41 | count(*) as count 42 | 43 | from combined 44 | group by 1 45 | """.lstrip() 46 | 47 | 48 | class TestCustomProjectSchemaWithPrefix: 49 | @pytest.fixture(scope="class") 50 | def seeds(self): 51 | return {"seed.csv": seed_csv, "agg.csv": seed_agg_csv} 52 | 53 | @pytest.fixture(scope="class") 54 | def models(self): 55 | return {"view_1.sql": _VIEW_1_SQL, "view_2.sql": _VIEW_2_SQL, "view_3.sql": _VIEW_3_SQL} 56 | 57 | @pytest.fixture(scope="class") 58 | def project_config_update(self): 59 | return {"models": {"schema": "dbt_test"}} 60 | 61 | @pytest.fixture(scope="function") 62 | def clean_up(self, project): 63 | yield 64 | with project.adapter.connection_named("__test"): 65 | alt_schema_list = ["DBT_TEST", "CUSTOM", "TEST"] 66 | for alt_schema in alt_schema_list: 67 | alt_test_schema = f"{project.test_schema}_{alt_schema}" 68 | relation = project.adapter.Relation.create( 69 | database=project.database, schema=alt_test_schema 70 | ) 71 | project.adapter.drop_schema(relation) 72 | 73 | def test__snowflake__custom_schema_with_prefix(self, project, clean_up): 74 | seed_results = run_dbt(["seed"]) 75 | assert len(seed_results) == 2 76 | results = run_dbt() 77 | assert len(results) == 3 78 | 79 | db_with_schema = f"{project.database}.{project.test_schema}" 80 | check_relations_equal( 81 | project.adapter, [f"{db_with_schema}.SEED", f"{db_with_schema}_DBT_TEST.VIEW_1"] 82 | ) 83 | check_relations_equal( 84 | project.adapter, [f"{db_with_schema}.SEED", f"{db_with_schema}_CUSTOM.VIEW_2"] 85 | ) 86 | check_relations_equal( 87 | project.adapter, [f"{db_with_schema}.AGG", f"{db_with_schema}_TEST.VIEW_3"] 88 | ) 89 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt_clone/test_dbt_clone.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import shutil 3 | import os 4 | from copy import deepcopy 5 | from dbt.tests.util import run_dbt 6 | from dbt.tests.adapter.dbt_clone.test_dbt_clone import BaseClonePossible 7 | 8 | 9 | class TestSnowflakeClonePossible(BaseClonePossible): 10 | @pytest.fixture(autouse=True) 11 | def clean_up(self, project): 12 | yield 13 | with project.adapter.connection_named("__test"): 14 | relation = project.adapter.Relation.create( 15 | database=project.database, schema=f"{project.test_schema}_SEEDS" 16 | ) 17 | project.adapter.drop_schema(relation) 18 | 19 | relation = project.adapter.Relation.create( 20 | database=project.database, schema=project.test_schema 21 | ) 22 | project.adapter.drop_schema(relation) 23 | 24 | pass 25 | 26 | 27 | table_model_1_sql = """ 28 | {{ config( 29 | materialized='table', 30 | transient=true, 31 | ) }} 32 | 33 | select 1 as fun 34 | """ 35 | 36 | 37 | class TestSnowflakeCloneTrainsentTable: 38 | @pytest.fixture(scope="class") 39 | def models(self): 40 | return { 41 | "table_model.sql": table_model_1_sql, 42 | } 43 | 44 | @pytest.fixture(scope="class") 45 | def other_schema(self, unique_schema): 46 | return unique_schema + "_other" 47 | 48 | @pytest.fixture(scope="class") 49 | def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): 50 | outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)} 51 | outputs["default"]["schema"] = unique_schema 52 | outputs["otherschema"]["schema"] = other_schema 53 | return {"test": {"outputs": outputs, "target": "default"}} 54 | 55 | def copy_state(self, project_root): 56 | state_path = os.path.join(project_root, "state") 57 | if not os.path.exists(state_path): 58 | os.makedirs(state_path) 59 | shutil.copyfile( 60 | f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json" 61 | ) 62 | 63 | def run_and_save_state(self, project_root, with_snapshot=False): 64 | results = run_dbt(["run"]) 65 | assert len(results) == 1 66 | 67 | self.copy_state(project_root) 68 | 69 | def test_can_clone_transient_table(self, project, other_schema): 70 | project.create_test_schema(other_schema) 71 | self.run_and_save_state(project.project_root) 72 | 73 | clone_args = [ 74 | "clone", 75 | "--state", 76 | "state", 77 | "--target", 78 | "otherschema", 79 | ] 80 | 81 | results = run_dbt(clone_args) 82 | assert len(results) == 1 83 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt_show/test_dbt_show.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.dbt_show.test_dbt_show import ( 2 | BaseShowSqlHeader, 3 | BaseShowLimit, 4 | BaseShowDoesNotHandleDoubleLimit, 5 | ) 6 | 7 | 8 | class TestSnowflakeShowLimit(BaseShowLimit): 9 | pass 10 | 11 | 12 | class TestSnowflakeShowSqlHeader(BaseShowSqlHeader): 13 | pass 14 | 15 | 16 | class TestSnowflakeShowDoesNotHandleDoubleLimit(BaseShowDoesNotHandleDoubleLimit): 17 | DATABASE_ERROR_MESSAGE = "unexpected 'limit'" 18 | -------------------------------------------------------------------------------- /tests/functional/adapter/empty/test_empty.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.empty.test_empty import ( 2 | BaseTestEmpty, 3 | BaseTestEmptyInlineSourceRef, 4 | MetadataWithEmptyFlag, 5 | ) 6 | 7 | 8 | class TestSnowflakeEmpty(BaseTestEmpty): 9 | pass 10 | 11 | 12 | class TestSnowflakeEmptyInlineSourceRef(BaseTestEmptyInlineSourceRef): 13 | pass 14 | 15 | 16 | class TestMetadataWithEmptyFlag(MetadataWithEmptyFlag): 17 | pass 18 | -------------------------------------------------------------------------------- /tests/functional/adapter/expected_stats.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.util import AnyString, AnyInteger 2 | 3 | 4 | def snowflake_stats(): 5 | return { 6 | "has_stats": { 7 | "id": "has_stats", 8 | "label": "Has Stats?", 9 | "value": True, 10 | "description": "Indicates whether there are statistics for this table", 11 | "include": False, 12 | }, 13 | "bytes": { 14 | "id": "bytes", 15 | "label": "Approximate Size", 16 | "value": AnyInteger(), 17 | "description": "Approximate size of the table as reported by Snowflake", 18 | "include": True, 19 | }, 20 | "last_modified": { 21 | "id": "last_modified", 22 | "label": "Last Modified", 23 | "value": AnyString(), 24 | "description": "The timestamp for last update/change", 25 | "include": True, 26 | }, 27 | "row_count": { 28 | "id": "row_count", 29 | "label": "Row Count", 30 | "value": 1.0, 31 | "description": "An approximate count of rows in this table", 32 | "include": True, 33 | }, 34 | } 35 | -------------------------------------------------------------------------------- /tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.incremental.test_incremental_merge_exclude_columns import ( 2 | BaseMergeExcludeColumns, 3 | ) 4 | 5 | 6 | class TestMergeExcludeColumns(BaseMergeExcludeColumns): 7 | pass 8 | -------------------------------------------------------------------------------- /tests/functional/adapter/incremental/test_incremental_on_schema_change.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.incremental.test_incremental_on_schema_change import ( 2 | BaseIncrementalOnSchemaChange, 3 | ) 4 | 5 | 6 | class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange): 7 | pass 8 | -------------------------------------------------------------------------------- /tests/functional/adapter/incremental/test_incremental_predicates.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.incremental.test_incremental_predicates import BaseIncrementalPredicates 3 | 4 | 5 | class TestIncrementalPredicatesDeleteInsertSnowflake(BaseIncrementalPredicates): 6 | pass 7 | 8 | 9 | class TestPredicatesDeleteInsertSnowflake(BaseIncrementalPredicates): 10 | @pytest.fixture(scope="class") 11 | def project_config_update(self): 12 | return {"models": {"+predicates": ["id != 2"], "+incremental_strategy": "delete+insert"}} 13 | 14 | 15 | class TestIncrementalPredicatesMergeSnowflake(BaseIncrementalPredicates): 16 | @pytest.fixture(scope="class") 17 | def project_config_update(self): 18 | return { 19 | "models": { 20 | "+incremental_predicates": ["dbt_internal_dest.id != 2"], 21 | "+incremental_strategy": "merge", 22 | } 23 | } 24 | 25 | 26 | class TestPredicatesMergeSnowflake(BaseIncrementalPredicates): 27 | @pytest.fixture(scope="class") 28 | def project_config_update(self): 29 | return { 30 | "models": { 31 | "+predicates": ["dbt_internal_dest.id != 2"], 32 | "+incremental_strategy": "merge", 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /tests/functional/adapter/incremental/test_incremental_run_result.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.util import run_dbt 2 | from dbt.tests.adapter.basic.test_incremental import ( 3 | BaseIncremental, 4 | BaseIncrementalNotSchemaChange, 5 | ) 6 | 7 | 8 | class TestBaseIncrementalNotSchemaChange(BaseIncrementalNotSchemaChange): 9 | pass 10 | 11 | 12 | class TestIncrementalRunResultSnowflake(BaseIncremental): 13 | """Bonus test to verify that incremental models return the number of rows affected""" 14 | 15 | def test_incremental(self, project): 16 | # seed command 17 | results = run_dbt(["seed"]) 18 | assert len(results) == 2 19 | 20 | # run with initial seed 21 | results = run_dbt(["run", "--vars", "seed_name: base"]) 22 | assert len(results) == 1 23 | 24 | # run with additions 25 | results = run_dbt(["run", "--vars", "seed_name: added"]) 26 | assert len(results) == 1 27 | # verify that run_result is correct 28 | rows_affected = results[0].adapter_response["rows_affected"] 29 | assert rows_affected == 10, f"Expected 10 rows changed, found {rows_affected}" 30 | -------------------------------------------------------------------------------- /tests/functional/adapter/incremental/test_incremental_unique_id.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.incremental.test_incremental_unique_id import BaseIncrementalUniqueKey 3 | 4 | 5 | class TestUniqueKeySnowflake(BaseIncrementalUniqueKey): 6 | pass 7 | 8 | 9 | class TestUniqueKeyDeleteInsertSnowflake(BaseIncrementalUniqueKey): 10 | @pytest.fixture(scope="class") 11 | def project_config_update(self): 12 | return {"models": {"+incremental_strategy": "delete+insert"}} 13 | -------------------------------------------------------------------------------- /tests/functional/adapter/list_relations_tests/test_pagination.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from dbt_common.exceptions import CompilationError 6 | from dbt.tests.util import run_dbt 7 | 8 | """ 9 | Testing rationale: 10 | - snowflake SHOW TERSE OBJECTS command returns at max 10K objects in a single call 11 | - when dbt attempts to write into a schema with more than 10K objects, compilation will fail 12 | unless we paginate the result 13 | - we default pagination to 10 pages, but users want to configure this 14 | - we instead use that here to force failures by making it smaller 15 | """ 16 | 17 | 18 | TABLE = """ 19 | {{ config(materialized='table') }} 20 | select 1 as id 21 | """ 22 | 23 | 24 | VIEW = """ 25 | {{ config(materialized='view') }} 26 | select id from {{ ref('my_model_base') }} 27 | """ 28 | 29 | 30 | DYNAMIC_TABLE = ( 31 | """ 32 | {{ config( 33 | materialized='dynamic_table', 34 | target_lag='1 hour', 35 | snowflake_warehouse='""" 36 | + os.getenv("SNOWFLAKE_TEST_WAREHOUSE") 37 | + """', 38 | ) }} 39 | 40 | select id from {{ ref('my_model_base') }} 41 | """ 42 | ) 43 | 44 | 45 | class BaseConfig: 46 | VIEWS = 90 47 | DYNAMIC_TABLES = 10 48 | 49 | @pytest.fixture(scope="class") 50 | def models(self): 51 | my_models = {"my_model_base.sql": TABLE} 52 | for view in range(0, self.VIEWS): 53 | my_models[f"my_model_{view}.sql"] = VIEW 54 | for dynamic_table in range(0, self.DYNAMIC_TABLES): 55 | my_models[f"my_dynamic_table_{dynamic_table}.sql"] = DYNAMIC_TABLE 56 | return my_models 57 | 58 | @pytest.fixture(scope="class", autouse=True) 59 | def setup(self, project): 60 | run_dbt(["run"]) 61 | 62 | def test_list_relations(self, project): 63 | kwargs = {"schema_relation": project.test_schema} 64 | with project.adapter.connection_named("__test"): 65 | relations = project.adapter.execute_macro( 66 | "snowflake__list_relations_without_caching", kwargs=kwargs 67 | ) 68 | assert len(relations) == self.VIEWS + self.DYNAMIC_TABLES + 1 69 | 70 | 71 | class TestListRelationsWithoutCachingSmall(BaseConfig): 72 | pass 73 | 74 | 75 | class TestListRelationsWithoutCachingLarge(BaseConfig): 76 | @pytest.fixture(scope="class") 77 | def profiles_config_update(self): 78 | return { 79 | "flags": { 80 | "list_relations_per_page": 10, 81 | "list_relations_page_limit": 20, 82 | } 83 | } 84 | 85 | 86 | class TestListRelationsWithoutCachingTooLarge(BaseConfig): 87 | 88 | @pytest.fixture(scope="class") 89 | def project_config_update(self): 90 | return { 91 | "flags": { 92 | "list_relations_per_page": 10, 93 | "list_relations_page_limit": 5, 94 | } 95 | } 96 | 97 | def test_list_relations(self, project): 98 | kwargs = {"schema_relation": project.test_schema} 99 | with project.adapter.connection_named("__test"): 100 | with pytest.raises(CompilationError) as error: 101 | project.adapter.execute_macro( 102 | "snowflake__list_relations_without_caching", kwargs=kwargs 103 | ) 104 | assert "list_relations_per_page" in error.value.msg 105 | assert "list_relations_page_limit" in error.value.msg 106 | 107 | def test_on_run(self, project): 108 | with pytest.raises(CompilationError) as error: 109 | run_dbt(["run"]) 110 | assert "list_relations_per_page" in error.value.msg 111 | assert "list_relations_page_limit" in error.value.msg 112 | -------------------------------------------------------------------------------- /tests/functional/adapter/list_relations_tests/test_show_objects.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import List 3 | 4 | import pytest 5 | 6 | from pathlib import Path 7 | 8 | from dbt.adapters.factory import get_adapter_by_type 9 | from dbt.adapters.snowflake import SnowflakeRelation 10 | 11 | from dbt.tests.util import run_dbt, get_connection 12 | 13 | 14 | SEED = """ 15 | id,value 16 | 0,red 17 | 1,yellow 18 | 2,blue 19 | """.strip() 20 | 21 | 22 | VIEW = """ 23 | select * from {{ ref('my_seed') }} 24 | """ 25 | 26 | 27 | TABLE = """ 28 | {{ config(materialized='table') }} 29 | select * from {{ ref('my_seed') }} 30 | """ 31 | 32 | 33 | DYNAMIC_TABLE = ( 34 | """ 35 | {{ config( 36 | materialized='dynamic_table', 37 | target_lag='1 day', 38 | snowflake_warehouse='""" 39 | + os.getenv("SNOWFLAKE_TEST_WAREHOUSE") 40 | + """', 41 | ) }} 42 | select * from {{ ref('my_seed') }} 43 | """ 44 | ) 45 | 46 | _MODEL_ICEBERG = """ 47 | {{ 48 | config( 49 | materialized = "table", 50 | table_format="iceberg", 51 | external_volume="s3_iceberg_snow", 52 | ) 53 | }} 54 | 55 | select 1 56 | """ 57 | 58 | 59 | class ShowObjectsBase: 60 | @staticmethod 61 | def list_relations_without_caching(project) -> List[SnowflakeRelation]: 62 | my_adapter = get_adapter_by_type("snowflake") 63 | schema = my_adapter.Relation.create( 64 | database=project.database, schema=project.test_schema, identifier="" 65 | ) 66 | with get_connection(my_adapter): 67 | relations = my_adapter.list_relations_without_caching(schema) 68 | return relations 69 | 70 | 71 | class TestShowObjects(ShowObjectsBase): 72 | views: int = 10 73 | tables: int = 10 74 | dynamic_tables: int = 10 75 | 76 | @pytest.fixture(scope="class") 77 | def seeds(self): 78 | yield {"my_seed.csv": SEED} 79 | 80 | @pytest.fixture(scope="class") 81 | def models(self): 82 | models = {} 83 | models.update({f"my_view_{i}.sql": VIEW for i in range(self.views)}) 84 | models.update({f"my_table_{i}.sql": TABLE for i in range(self.tables)}) 85 | models.update( 86 | {f"my_dynamic_table_{i}.sql": DYNAMIC_TABLE for i in range(self.dynamic_tables)} 87 | ) 88 | yield models 89 | 90 | @pytest.fixture(scope="class", autouse=True) 91 | def setup(self, project): 92 | run_dbt(["seed"]) 93 | run_dbt(["run"]) 94 | 95 | def test_list_relations_without_caching(self, project): 96 | relations = self.list_relations_without_caching(project) 97 | assert len([relation for relation in relations if relation.is_view]) == self.views 98 | assert ( 99 | len([relation for relation in relations if relation.is_table]) 100 | == self.tables + 1 # add the seed 101 | ) 102 | assert ( 103 | len([relation for relation in relations if relation.is_dynamic_table]) 104 | == self.dynamic_tables 105 | ) 106 | 107 | 108 | class TestShowIcebergObjects(ShowObjectsBase): 109 | @pytest.fixture(scope="class") 110 | def project_config_update(self): 111 | return {"flags": {"enable_iceberg_materializations": True}} 112 | 113 | @pytest.fixture(scope="class") 114 | def models(self): 115 | return {"my_model.sql": _MODEL_ICEBERG} 116 | 117 | def test_quoting_ignore_flag_doesnt_break_iceberg_metadata(self, project): 118 | """https://github.com/dbt-labs/dbt-snowflake/issues/1227 119 | 120 | The list relations function involves a metadata sub-query. Regardless of 121 | QUOTED_IDENTIFIERS_IGNORE_CASE, this function will fail without proper 122 | normalization within the encapsulating python function after the macro invocation 123 | returns. This test verifies that normalization is working. 124 | """ 125 | run_dbt(["run"]) 126 | 127 | self.list_relations_without_caching(project) 128 | -------------------------------------------------------------------------------- /tests/functional/adapter/list_relations_tests/test_special_characters.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt 3 | 4 | 5 | TABLE_BASE_SQL = """ 6 | -- models/my_model.sql 7 | {{ config(schema = '1_contains_special*character$') }} 8 | select 1 as id 9 | """ 10 | 11 | 12 | class TestSpecialCharactersInSchema: 13 | 14 | @pytest.fixture(scope="class") 15 | def project_config_update(self): 16 | return {"quoting": {"schema": True}} 17 | 18 | @pytest.fixture(scope="class") 19 | def models(self): 20 | return { 21 | "my_model.sql": TABLE_BASE_SQL, 22 | } 23 | 24 | def test_schema_with_special_chars(self, project): 25 | run_dbt(["run", "-s", "my_model"]) 26 | -------------------------------------------------------------------------------- /tests/functional/adapter/python_model_tests/_files.py: -------------------------------------------------------------------------------- 1 | # __table 2 | TRANSIENT_TRUE_TABLE = """ 3 | import pandas 4 | 5 | def model(dbt, session): 6 | dbt.config(transient=True) 7 | return pandas.DataFrame([[1,2]] * 10, columns=['test', 'test2']) 8 | """ 9 | 10 | 11 | TRANSIENT_FALSE_TABLE = """ 12 | import pandas 13 | 14 | def model(dbt, session): 15 | dbt.config(transient=False) 16 | return pandas.DataFrame([[1,2]] * 10, columns=['test', 'test2']) 17 | """ 18 | 19 | 20 | TRANSIENT_NONE_TABLE = """ 21 | import pandas 22 | 23 | def model(dbt, session): 24 | dbt.config(transient=None) 25 | return pandas.DataFrame([[1,2]] * 10, columns=['test', 'test2']) 26 | """ 27 | 28 | 29 | TRANSIENT_UNSET_TABLE = """ 30 | import pandas 31 | 32 | def model(dbt, session): 33 | return pandas.DataFrame([[1,2]] * 10, columns=['test', 'test2']) 34 | """ 35 | 36 | 37 | MACRO__DESCRIBE_TABLES = """ 38 | {% macro snowflake__test__describe_tables() %} 39 | {%- set _sql -%} 40 | show tables; 41 | select "name", "kind" 42 | from table(result_scan(last_query_id())) 43 | {%- endset %} 44 | {% set _table = run_query(_sql) %} 45 | 46 | {% do return(_table) %} 47 | {% endmacro %} 48 | """ 49 | -------------------------------------------------------------------------------- /tests/functional/adapter/python_model_tests/test_table_type.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dbt.tests.util import run_dbt 4 | 5 | from tests.functional.adapter.python_model_tests import _files 6 | 7 | 8 | class TestTableType: 9 | @pytest.fixture(scope="class") 10 | def macros(self): 11 | return {"snowflake__test__describe_tables.sql": _files.MACRO__DESCRIBE_TABLES} 12 | 13 | @pytest.fixture(scope="class") 14 | def models(self): 15 | return { 16 | # __table 17 | "TRANSIENT_TRUE_TABLE.py": _files.TRANSIENT_TRUE_TABLE, 18 | "TRANSIENT_FALSE_TABLE.py": _files.TRANSIENT_FALSE_TABLE, 19 | "TRANSIENT_NONE_TABLE.py": _files.TRANSIENT_NONE_TABLE, 20 | "TRANSIENT_UNSET_TABLE.py": _files.TRANSIENT_UNSET_TABLE, 21 | } 22 | 23 | def test_expected_table_types_are_created(self, project): 24 | run_dbt(["run"]) 25 | expected_table_types = { 26 | # (name, kind) - TABLE == permanent 27 | ("TRANSIENT_TRUE_TABLE", "TRANSIENT"), 28 | ("TRANSIENT_FALSE_TABLE", "TABLE"), 29 | ("TRANSIENT_NONE_TABLE", "TABLE"), 30 | ("TRANSIENT_UNSET_TABLE", "TRANSIENT"), 31 | } 32 | with project.adapter.connection_named("__test"): 33 | agate_table = project.adapter.execute_macro("snowflake__test__describe_tables") 34 | actual_table_types = {(row.get("name"), row.get("kind")) for row in agate_table.rows} 35 | assert actual_table_types == expected_table_types 36 | -------------------------------------------------------------------------------- /tests/functional/adapter/query_comment_tests/test_query_comments.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.query_comment.test_query_comment import ( 3 | BaseQueryComments, 4 | BaseMacroQueryComments, 5 | BaseMacroArgsQueryComments, 6 | BaseMacroInvalidQueryComments, 7 | BaseNullQueryComments, 8 | BaseEmptyQueryComments, 9 | ) 10 | 11 | 12 | class TestQueryCommentsSnowflake(BaseQueryComments): 13 | pass 14 | 15 | 16 | class TestMacroQueryCommentsSnowflake(BaseMacroQueryComments): 17 | pass 18 | 19 | 20 | class TestMacroArgsQueryCommentsSnowflake(BaseMacroArgsQueryComments): 21 | @pytest.mark.skip( 22 | "This test is incorrectly comparing the version of `dbt-core`" 23 | "to the version of `dbt-snowflake`, which is not always the same." 24 | ) 25 | def test_matches_comment(self, project, get_package_version): 26 | pass 27 | 28 | 29 | class TestMacroInvalidQueryCommentsSnowflake(BaseMacroInvalidQueryComments): 30 | pass 31 | 32 | 33 | class TestNullQueryCommentsSnowflake(BaseNullQueryComments): 34 | pass 35 | 36 | 37 | class TestEmptyQueryCommentsSnowflake(BaseEmptyQueryComments): 38 | pass 39 | -------------------------------------------------------------------------------- /tests/functional/adapter/simple_seed/test_simple_seed.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dbt.tests.adapter.simple_seed.test_seed import SeedConfigBase 4 | from dbt.tests.util import run_dbt 5 | 6 | 7 | class TestSimpleBigSeedBatched(SeedConfigBase): 8 | @pytest.fixture(scope="class") 9 | def seeds(self): 10 | seed_data = ["seed_id"] 11 | seed_data.extend([str(i) for i in range(20_000)]) 12 | return {"big_batched_seed.csv": "\n".join(seed_data)} 13 | 14 | def test_big_batched_seed(self, project): 15 | seed_results = run_dbt(["seed"]) 16 | assert len(seed_results) == 1 17 | -------------------------------------------------------------------------------- /tests/functional/adapter/simple_seed/test_simple_seed_override.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.simple_seed.test_seed_type_override import BaseSimpleSeedColumnOverride 3 | from dbt.tests.adapter.utils.base_utils import run_dbt 4 | 5 | _SCHEMA_YML = """ 6 | version: 2 7 | seeds: 8 | - name: seed_enabled 9 | columns: 10 | - name: birthday 11 | data_tests: 12 | - column_type: 13 | type: character varying(16777216) 14 | - name: seed_id 15 | data_tests: 16 | - column_type: 17 | type: FLOAT 18 | 19 | - name: seed_tricky 20 | columns: 21 | - name: seed_id 22 | data_tests: 23 | - column_type: 24 | type: NUMBER(38,0) 25 | - name: seed_id_str 26 | data_tests: 27 | - column_type: 28 | type: character varying(16777216) 29 | - name: a_bool 30 | data_tests: 31 | - column_type: 32 | type: BOOLEAN 33 | - name: looks_like_a_bool 34 | data_tests: 35 | - column_type: 36 | type: character varying(16777216) 37 | - name: a_date 38 | data_tests: 39 | - column_type: 40 | type: TIMESTAMP_NTZ 41 | - name: looks_like_a_date 42 | data_tests: 43 | - column_type: 44 | type: character varying(16777216) 45 | - name: relative 46 | data_tests: 47 | - column_type: 48 | type: character varying(16777216) 49 | - name: weekday 50 | data_tests: 51 | - column_type: 52 | type: character varying(16777216) 53 | """.lstrip() 54 | 55 | 56 | class TestSimpleSeedColumnOverride(BaseSimpleSeedColumnOverride): 57 | @pytest.fixture(scope="class") 58 | def schema(self): 59 | return "simple_seed" 60 | 61 | @pytest.fixture(scope="class") 62 | def models(self): 63 | return {"models-snowflake.yml": _SCHEMA_YML} 64 | 65 | @staticmethod 66 | def seed_enabled_types(): 67 | return { 68 | "seed_id": "FLOAT", 69 | "birthday": "TEXT", 70 | } 71 | 72 | @staticmethod 73 | def seed_tricky_types(): 74 | return { 75 | "seed_id_str": "TEXT", 76 | "looks_like_a_bool": "TEXT", 77 | "looks_like_a_date": "TEXT", 78 | } 79 | 80 | def test_snowflake_simple_seed_with_column_override_snowflake(self, project): 81 | seed_results = run_dbt(["seed"]) 82 | assert len(seed_results) == 2 83 | test_results = run_dbt(["test"]) 84 | assert len(test_results) == 10 85 | -------------------------------------------------------------------------------- /tests/functional/adapter/statement_test/test_statements.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import check_relations_equal, run_dbt 3 | from tests.functional.adapter.statement_test.seeds import seeds_csv, statement_expected_csv 4 | 5 | _STATEMENT_ACTUAL_SQL = """ 6 | -- {{ ref('seed') }} 7 | 8 | {%- call statement('test_statement', fetch_result=True) -%} 9 | 10 | select 11 | count(*) as "num_records" 12 | 13 | from {{ ref('seed') }} 14 | 15 | {%- endcall -%} 16 | 17 | {% set result = load_result('test_statement') %} 18 | 19 | {% set res_table = result['table'] %} 20 | {% set res_matrix = result['data'] %} 21 | 22 | {% set matrix_value = res_matrix[0][0] %} 23 | {% set table_value = res_table[0]['num_records'] %} 24 | 25 | select 'matrix' as source, {{ matrix_value }} as value 26 | union all 27 | select 'table' as source, {{ table_value }} as value 28 | """.lstrip() 29 | 30 | 31 | class TestStatements: 32 | @pytest.fixture(scope="class") 33 | def models(self): 34 | return {"statement_actual.sql": _STATEMENT_ACTUAL_SQL} 35 | 36 | @pytest.fixture(scope="class") 37 | def seeds(self): 38 | return { 39 | "seed.csv": seeds_csv, 40 | "statement_expected.csv": statement_expected_csv, 41 | } 42 | 43 | def test_snowflake_statements(self, project): 44 | seed_results = run_dbt(["seed"]) 45 | assert len(seed_results) == 2 46 | results = run_dbt() 47 | assert len(results) == 1 48 | 49 | db_with_schema = f"{project.database}.{project.test_schema}" 50 | check_relations_equal( 51 | project.adapter, 52 | [f"{db_with_schema}.STATEMENT_ACTUAL", f"{db_with_schema}.STATEMENT_EXPECTED"], 53 | ) 54 | -------------------------------------------------------------------------------- /tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.store_test_failures_tests import basic 2 | from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import ( 3 | TestStoreTestFailures, 4 | ) 5 | 6 | 7 | class TestSnowflakeStoreTestFailures(TestStoreTestFailures): 8 | pass 9 | 10 | 11 | class TestStoreTestFailuresAsInteractions(basic.StoreTestFailuresAsInteractions): 12 | pass 13 | 14 | 15 | class TestStoreTestFailuresAsProjectLevelOff(basic.StoreTestFailuresAsProjectLevelOff): 16 | pass 17 | 18 | 19 | class TestStoreTestFailuresAsProjectLevelView(basic.StoreTestFailuresAsProjectLevelView): 20 | pass 21 | 22 | 23 | class TestStoreTestFailuresAsGeneric(basic.StoreTestFailuresAsGeneric): 24 | pass 25 | 26 | 27 | class TestStoreTestFailuresAsProjectLevelEphemeral(basic.StoreTestFailuresAsProjectLevelEphemeral): 28 | pass 29 | 30 | 31 | class TestStoreTestFailuresAsExceptions(basic.StoreTestFailuresAsExceptions): 32 | pass 33 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_aliases.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.aliases.test_aliases import BaseAliases 3 | 4 | MACROS__SNOWFLAKE_CAST_SQL = """ 5 | {% macro snowflake__string_literal(s) %} 6 | cast('{{ s }}' as string) 7 | {% endmacro %} 8 | """ 9 | 10 | MACROS__EXPECT_VALUE_SQL = """ 11 | -- cross-db compatible test, similar to accepted_values 12 | 13 | {% test expect_value(model, field, value) %} 14 | 15 | select * 16 | from {{ model }} 17 | where {{ field }} != '{{ value }}' 18 | 19 | {% endtest %} 20 | """ 21 | 22 | 23 | class TestAliasesSnowflake(BaseAliases): 24 | @pytest.fixture(scope="class") 25 | def macros(self): 26 | return { 27 | "snowflake_cast.sql": MACROS__SNOWFLAKE_CAST_SQL, 28 | "expect_value.sql": MACROS__EXPECT_VALUE_SQL, 29 | } 30 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_anonymous_usage_stats.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.util import run_dbt_and_capture 2 | import pytest 3 | 4 | 5 | ANONYMOUS_USAGE_MESSAGE = """ 6 | sys._xoptions['snowflake_partner_attribution'].append("dbtLabs_dbtPython") 7 | """.strip() 8 | 9 | 10 | MY_PYTHON_MODEL = """ 11 | import pandas 12 | 13 | def model(dbt, session): 14 | dbt.config(materialized='table') 15 | data = [[1,2]] * 10 16 | return pandas.DataFrame(data, columns=['test', 'test2']) 17 | """ 18 | 19 | 20 | class AnonymousUsageStatsBase: 21 | @pytest.fixture(scope="class") 22 | def models(self): 23 | return {"my_python_model.py": MY_PYTHON_MODEL} 24 | 25 | 26 | class TestAnonymousUsageStatsOn(AnonymousUsageStatsBase): 27 | @pytest.fixture(scope="class") 28 | def project_config_update(self): 29 | return {"flags": {"send_anonymous_usage_stats": True}} 30 | 31 | def test_stats_get_sent(self, project): 32 | _, logs = run_dbt_and_capture(["--debug", "run"]) 33 | assert ANONYMOUS_USAGE_MESSAGE in logs 34 | 35 | 36 | class TestAnonymousUsageStatsOff(AnonymousUsageStatsBase): 37 | @pytest.fixture(scope="class") 38 | def project_config_update(self, dbt_profile_target): 39 | return {"flags": {"send_anonymous_usage_stats": False}} 40 | 41 | def test_stats_do_not_get_sent(self, project): 42 | _, logs = run_dbt_and_capture(["--debug", "run"]) 43 | assert ANONYMOUS_USAGE_MESSAGE not in logs 44 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_caching.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.caching.test_caching import ( 2 | BaseCachingLowercaseModel, 3 | BaseCachingUppercaseModel, 4 | BaseCachingSelectedSchemaOnly, 5 | ) 6 | 7 | 8 | class TestCachingLowerCaseModel(BaseCachingLowercaseModel): 9 | pass 10 | 11 | 12 | class TestCachingUppercaseModel(BaseCachingUppercaseModel): 13 | pass 14 | 15 | 16 | class TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly): 17 | pass 18 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_changing_relation_type.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.relations.test_changing_relation_type import BaseChangeRelationTypeValidator 2 | 3 | 4 | class TestSnowflakeChangeRelationTypes(BaseChangeRelationTypeValidator): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_concurrency.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.util import run_dbt, check_relations_equal, rm_file, write_file 2 | from dbt.tests.adapter.concurrency.test_concurrency import BaseConcurrency, seeds__update_csv 3 | 4 | 5 | class TestConncurenncySnowflake(BaseConcurrency): 6 | def test_conncurrency_snowflake(self, project): 7 | run_dbt(["seed", "--select", "seed"]) 8 | results = run_dbt(["run"], expect_pass=False) 9 | assert len(results) == 7 10 | check_relations_equal(project.adapter, ["SEED", "VIEW_MODEL"]) 11 | check_relations_equal(project.adapter, ["SEED", "DEP"]) 12 | check_relations_equal(project.adapter, ["SEED", "TABLE_A"]) 13 | check_relations_equal(project.adapter, ["SEED", "TABLE_B"]) 14 | 15 | rm_file(project.project_root, "seeds", "seed.csv") 16 | write_file(seeds__update_csv, project.project_root + "/seeds", "seed.csv") 17 | results = run_dbt(["run"], expect_pass=False) 18 | assert len(results) == 7 19 | check_relations_equal(project.adapter, ["SEED", "VIEW_MODEL"]) 20 | check_relations_equal(project.adapter, ["SEED", "DEP"]) 21 | check_relations_equal(project.adapter, ["SEED", "TABLE_A"]) 22 | check_relations_equal(project.adapter, ["SEED", "TABLE_B"]) 23 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_ephemeral.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.ephemeral.test_ephemeral import BaseEphemeralMulti 2 | from dbt.tests.util import run_dbt, check_relations_equal 3 | 4 | 5 | class TestEphemeralMultiSnowflake(BaseEphemeralMulti): 6 | def test_ephemeral_multi(self, project): 7 | run_dbt(["seed"]) 8 | results = run_dbt(["run"]) 9 | assert len(results) == 3 10 | check_relations_equal( 11 | project.adapter, ["SEED", "DEPENDENT", "DOUBLE_DEPENDENT", "SUPER_DEPENDENT"] 12 | ) 13 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_grants.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.grants.test_incremental_grants import BaseIncrementalGrants 3 | from dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants 4 | from dbt.tests.adapter.grants.test_model_grants import BaseModelGrants 5 | from dbt.tests.adapter.grants.test_seed_grants import BaseSeedGrants 6 | from dbt.tests.adapter.grants.test_snapshot_grants import BaseSnapshotGrants 7 | 8 | 9 | class BaseCopyGrantsSnowflake: 10 | # Try every test case without copy_grants enabled (default), 11 | # and with copy_grants enabled (this base class) 12 | @pytest.fixture(scope="class") 13 | def project_config_update(self): 14 | return { 15 | "models": { 16 | "+copy_grants": True, 17 | }, 18 | "seeds": { 19 | "+copy_grants": True, 20 | }, 21 | "snapshots": { 22 | "+copy_grants": True, 23 | }, 24 | } 25 | 26 | 27 | class TestInvalidGrantsSnowflake(BaseInvalidGrants): 28 | def grantee_does_not_exist_error(self): 29 | return "does not exist or not authorized" 30 | 31 | def privilege_does_not_exist_error(self): 32 | return "unexpected" 33 | 34 | 35 | class TestModelGrantsSnowflake(BaseModelGrants): 36 | pass 37 | 38 | 39 | class TestModelGrantsCopyGrantsSnowflake(BaseCopyGrantsSnowflake, BaseModelGrants): 40 | pass 41 | 42 | 43 | class TestIncrementalGrantsSnowflake(BaseIncrementalGrants): 44 | pass 45 | 46 | 47 | class TestIncrementalCopyGrantsSnowflake(BaseCopyGrantsSnowflake, BaseIncrementalGrants): 48 | pass 49 | 50 | 51 | class TestSeedGrantsSnowflake(BaseSeedGrants): 52 | pass 53 | 54 | 55 | class TestSeedCopyGrantsSnowflake(BaseCopyGrantsSnowflake, BaseSeedGrants): 56 | pass 57 | 58 | 59 | class TestSnapshotGrants(BaseSnapshotGrants): 60 | pass 61 | 62 | 63 | class TestSnapshotCopyGrantsSnowflake(BaseCopyGrantsSnowflake, BaseSnapshotGrants): 64 | pass 65 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_incremental_microbatch.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.incremental.test_incremental_microbatch import ( 3 | BaseMicrobatch, 4 | ) 5 | 6 | # Create input with UTC timestamps 7 | _input_model_sql = """ 8 | {{ config(materialized='table', event_time='event_time') }} 9 | select 1 as id, to_timestamp_tz('2020-01-01 00:00:00-0') as event_time 10 | union all 11 | select 2 as id, to_timestamp_tz('2020-01-02 00:00:00-0') as event_time 12 | union all 13 | select 3 as id, to_timestamp_tz('2020-01-03 00:00:00-0') as event_time 14 | """ 15 | 16 | 17 | # No requirement for a unique_id for snowflake microbatch! 18 | _microbatch_model_no_unique_id_sql = """ 19 | {{ config(materialized='incremental', incremental_strategy='microbatch', event_time='event_time', batch_size='day', begin=modules.datetime.datetime(2020, 1, 1, 0, 0, 0)) }} 20 | select * from {{ ref('input_model') }} 21 | """ 22 | 23 | 24 | class TestSnowflakeMicrobatch(BaseMicrobatch): 25 | @pytest.fixture(scope="class") 26 | def microbatch_model_sql(self) -> str: 27 | return _microbatch_model_no_unique_id_sql 28 | 29 | @pytest.fixture(scope="class") 30 | def input_model_sql(self) -> str: 31 | return _input_model_sql 32 | 33 | @pytest.fixture(scope="class") 34 | def insert_two_rows_sql(self, project) -> str: 35 | test_schema_relation = project.adapter.Relation.create( 36 | database=project.database, schema=project.test_schema 37 | ) 38 | return f"insert into {test_schema_relation}.input_model (id, event_time) values (4, '2020-01-04 00:00:00-0'), (5, '2020-01-05 00:00:00-0')" 39 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_persist_docs.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | 4 | from dbt.tests.util import run_dbt 5 | 6 | from dbt.tests.adapter.persist_docs.test_persist_docs import ( 7 | BasePersistDocs, 8 | BasePersistDocsColumnMissing, 9 | BasePersistDocsCommentOnQuotedColumn, 10 | ) 11 | 12 | 13 | class TestPersistDocs(BasePersistDocs): 14 | def _assert_common_comments(self, *comments): 15 | for comment in comments: 16 | assert '"with double quotes"' in comment 17 | assert """'''abc123'''""" in comment 18 | assert "\n" in comment 19 | assert ( 20 | "Some [$]lbl[$] labeled [$]lbl[$] and [$][$] unlabeled [$][$] dollar-quoting" 21 | in comment 22 | ) 23 | assert "/* comment */" in comment 24 | if os.name == "nt": 25 | assert "--\r\n" in comment or "--\n" in comment 26 | else: 27 | assert "--\n" in comment 28 | 29 | def _assert_has_table_comments(self, table_node): 30 | table_comment = table_node["metadata"]["comment"] 31 | assert table_comment.startswith("Table model description") 32 | 33 | table_id_comment = table_node["columns"]["ID"]["comment"] 34 | assert table_id_comment.startswith("id Column description") 35 | 36 | table_name_comment = table_node["columns"]["NAME"]["comment"] 37 | assert table_name_comment.startswith("Some stuff here and then a call to") 38 | 39 | self._assert_common_comments(table_comment, table_id_comment, table_name_comment) 40 | 41 | def _assert_has_view_comments( 42 | self, view_node, has_node_comments=True, has_column_comments=True 43 | ): 44 | view_comment = view_node["metadata"]["comment"] 45 | if has_node_comments: 46 | assert view_comment.startswith("View model description") 47 | self._assert_common_comments(view_comment) 48 | else: 49 | assert not view_comment 50 | 51 | view_id_comment = view_node["columns"]["ID"]["comment"] 52 | if has_column_comments: 53 | assert view_id_comment.startswith("id Column description") 54 | self._assert_common_comments(view_id_comment) 55 | else: 56 | assert not view_id_comment 57 | 58 | view_name_comment = view_node["columns"]["NAME"]["comment"] 59 | assert not view_name_comment 60 | 61 | 62 | class TestPersistDocsColumnMissing(BasePersistDocsColumnMissing): 63 | def test_missing_column(self, project): 64 | run_dbt(["docs", "generate"]) 65 | with open("target/catalog.json") as fp: 66 | catalog_data = json.load(fp) 67 | assert "nodes" in catalog_data 68 | 69 | table_node = catalog_data["nodes"]["model.test.missing_column"] 70 | table_id_comment = table_node["columns"]["ID"]["comment"] 71 | assert table_id_comment.startswith("test id column description") 72 | 73 | 74 | class TestPersistDocsCommentOnQuotedColumn(BasePersistDocsCommentOnQuotedColumn): 75 | pass 76 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_simple_snapshot.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.simple_snapshot.test_snapshot import BaseSnapshotCheck, BaseSimpleSnapshot 2 | 3 | 4 | class TestSnapshot(BaseSimpleSnapshot): 5 | pass 6 | 7 | 8 | class TestSnapshotCheck(BaseSnapshotCheck): 9 | pass 10 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_timestamps.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.utils.test_timestamps import BaseCurrentTimestamps 3 | 4 | _MODEL_CURRENT_TIMESTAMP = """ 5 | SELECT {{current_timestamp()}} as current_timestamp, 6 | {{current_timestamp_in_utc_backcompat()}} as current_timestamp_in_utc_backcompat, 7 | {{current_timestamp_backcompat()}} as current_timestamp_backcompat 8 | """ 9 | 10 | 11 | class TestCurrentTimestampSnowflake(BaseCurrentTimestamps): 12 | @pytest.fixture(scope="class") 13 | def models(self): 14 | return {"get_current_timestamp.sql": _MODEL_CURRENT_TIMESTAMP} 15 | 16 | @pytest.fixture(scope="class") 17 | def expected_schema(self): 18 | return { 19 | "CURRENT_TIMESTAMP": "TIMESTAMP_TZ", 20 | "CURRENT_TIMESTAMP_IN_UTC_BACKCOMPAT": "TIMESTAMP_NTZ", 21 | "CURRENT_TIMESTAMP_BACKCOMPAT": "TIMESTAMP_NTZ", 22 | } 23 | 24 | @pytest.fixture(scope="class") 25 | def expected_sql(self): 26 | return """ 27 | select convert_timezone('UTC', current_timestamp()) as current_timestamp, 28 | convert_timezone('UTC', current_timestamp::TIMESTAMP)::TIMESTAMP as current_timestamp_in_utc_backcompat, 29 | current_timestamp::TIMESTAMP as current_timestamp_backcompat 30 | """ 31 | -------------------------------------------------------------------------------- /tests/functional/adapter/unit_testing/test_unit_testing.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dbt.tests.adapter.unit_testing.test_types import BaseUnitTestingTypes 4 | from dbt.tests.adapter.unit_testing.test_case_insensitivity import BaseUnitTestCaseInsensivity 5 | from dbt.tests.adapter.unit_testing.test_invalid_input import BaseUnitTestInvalidInput 6 | 7 | 8 | class TestSnowflakeUnitTestingTypes(BaseUnitTestingTypes): 9 | @pytest.fixture 10 | def data_types(self): 11 | # sql_value, yaml_value 12 | return [ 13 | ["1", "1"], 14 | ["2.0", "2.0"], 15 | ["'12345'", "12345"], 16 | ["'string'", "string"], 17 | ["true", "true"], 18 | ["DATE '2020-01-02'", "2020-01-02"], 19 | ["TIMESTAMP '2013-11-03 00:00:00-0'", "2013-11-03 00:00:00-0"], 20 | ["'2013-11-03 00:00:00-0'::TIMESTAMPTZ", "2013-11-03 00:00:00-0"], 21 | ["TO_NUMBER('3', 10, 9)", "3"], 22 | ["3::VARIANT", "3"], 23 | ["TO_GEOMETRY('POINT(1820.12 890.56)')", "POINT(1820.12 890.56)"], 24 | ["TO_GEOGRAPHY('POINT(-122.35 37.55)')", "POINT(-122.35 37.55)"], 25 | [ 26 | "{'Alberta':'Edmonton','Manitoba':'Winnipeg'}", 27 | "{'Alberta':'Edmonton','Manitoba':'Winnipeg'}", 28 | ], 29 | ["['a','b','c']", "['a','b','c']"], 30 | ["[1,2,3]", "[1, 2, 3]"], 31 | ] 32 | 33 | 34 | class TestSnowflakeUnitTestCaseInsensitivity(BaseUnitTestCaseInsensivity): 35 | pass 36 | 37 | 38 | class TestSnowflakeUnitTestInvalidInput(BaseUnitTestInvalidInput): 39 | pass 40 | -------------------------------------------------------------------------------- /tests/functional/adapter/utils/test_data_types.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.utils.data_types.test_type_bigint import BaseTypeBigInt 2 | from dbt.tests.adapter.utils.data_types.test_type_float import BaseTypeFloat 3 | from dbt.tests.adapter.utils.data_types.test_type_int import BaseTypeInt 4 | from dbt.tests.adapter.utils.data_types.test_type_numeric import BaseTypeNumeric 5 | from dbt.tests.adapter.utils.data_types.test_type_string import BaseTypeString 6 | from dbt.tests.adapter.utils.data_types.test_type_timestamp import BaseTypeTimestamp 7 | from dbt.tests.adapter.utils.data_types.test_type_boolean import BaseTypeBoolean 8 | 9 | 10 | class TestTypeBigInt(BaseTypeBigInt): 11 | pass 12 | 13 | 14 | class TestTypeFloat(BaseTypeFloat): 15 | pass 16 | 17 | 18 | class TestTypeInt(BaseTypeInt): 19 | pass 20 | 21 | 22 | class TestTypeNumeric(BaseTypeNumeric): 23 | pass 24 | 25 | 26 | class TestTypeString(BaseTypeString): 27 | pass 28 | 29 | 30 | class TestTypeTimestamp(BaseTypeTimestamp): 31 | pass 32 | 33 | 34 | class TestTypeBoolean(BaseTypeBoolean): 35 | pass 36 | -------------------------------------------------------------------------------- /tests/functional/auth_tests/test_database_role.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | from dbt.tests.util import run_dbt 6 | 7 | 8 | SEED = """ 9 | id 10 | 1 11 | """.strip() 12 | 13 | 14 | MODEL = """ 15 | {{ config( 16 | materialized='incremental', 17 | ) }} 18 | select * from {{ ref('my_seed') }} 19 | """ 20 | 21 | 22 | class TestDatabaseRole: 23 | """ 24 | This test addresses https://github.com/dbt-labs/dbt-snowflake/issues/1151 25 | 26 | While dbt-snowflake does not manage database roles (it only manages account roles, 27 | it still needs to account for them so that it doesn't try to revoke them. 28 | """ 29 | 30 | @pytest.fixture(scope="class") 31 | def seeds(self): 32 | return {"my_seed.csv": SEED} 33 | 34 | @pytest.fixture(scope="class") 35 | def models(self): 36 | return {"my_model.sql": MODEL} 37 | 38 | @pytest.fixture(scope="class") 39 | def project_config_update(self): 40 | # grant to the test role even though this role already has these permissions 41 | # this triggers syncing grants since `apply_grants` first looks for a grants config 42 | return {"models": {"+grants": {"select": [os.getenv("SNOWFLAKE_TEST_ROLE")]}}} 43 | 44 | @pytest.fixture(scope="class", autouse=True) 45 | def setup(self, project, prefix): 46 | """ 47 | Create a database role with access to the model we're about to create. 48 | The existence of this database role triggered the bug as dbt-snowflake attempts 49 | to revoke it if the user also provides a grants config. 50 | """ 51 | role = f"BLOCKING_DB_ROLE_{prefix}" 52 | project.run_sql(f"CREATE DATABASE ROLE {role}") 53 | sql = f""" 54 | GRANT 55 | ALL PRIVILEGES ON FUTURE TABLES 56 | IN SCHEMA {project.test_schema} 57 | TO DATABASE ROLE {role} 58 | """ 59 | project.run_sql(sql) 60 | yield 61 | project.run_sql(f"DROP DATABASE ROLE {role}") 62 | 63 | def test_database_role(self, project): 64 | run_dbt(["seed"]) 65 | run_dbt(["run"]) 66 | # run a second time to trigger revoke on an incremental update 67 | # this originally failed, demonstrating the bug 68 | run_dbt(["run"]) 69 | -------------------------------------------------------------------------------- /tests/functional/auth_tests/test_jwt.py: -------------------------------------------------------------------------------- 1 | """ 2 | Please follow the instructions in test_oauth.py for instructions on how to set up 3 | the security integration required to retrieve a JWT from Snowflake. 4 | """ 5 | 6 | import pytest 7 | import os 8 | from dbt.tests.util import run_dbt, check_relations_equal 9 | 10 | from dbt.adapters.snowflake import SnowflakeCredentials 11 | 12 | _MODELS__MODEL_1_SQL = """ 13 | select 1 as id 14 | """ 15 | 16 | 17 | _MODELS__MODEL_2_SQL = """ 18 | select 2 as id 19 | """ 20 | 21 | 22 | _MODELS__MODEL_3_SQL = """ 23 | select * from {{ ref('model_1') }} 24 | union all 25 | select * from {{ ref('model_2') }} 26 | """ 27 | 28 | 29 | _MODELS__MODEL_4_SQL = """ 30 | select 1 as id 31 | union all 32 | select 2 as id 33 | """ 34 | 35 | 36 | class TestSnowflakeJWT: 37 | """Tests that setting authenticator: jwt allows setting token to a plain JWT 38 | that will be passed into the Snowflake connection without modification.""" 39 | 40 | @pytest.fixture(scope="class", autouse=True) 41 | def access_token(self): 42 | """Because JWTs are short-lived, we need to get a fresh JWT via the refresh 43 | token flow before running the test. 44 | 45 | This fixture leverages the existing SnowflakeCredentials._get_access_token 46 | method to retrieve a valid JWT from Snowflake. 47 | """ 48 | client_id = os.getenv("SNOWFLAKE_TEST_OAUTH_CLIENT_ID") 49 | client_secret = os.getenv("SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET") 50 | refresh_token = os.getenv("SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN") 51 | 52 | credentials = SnowflakeCredentials( 53 | account=os.getenv("SNOWFLAKE_TEST_ACCOUNT"), 54 | database="", 55 | schema="", 56 | authenticator="oauth", 57 | oauth_client_id=client_id, 58 | oauth_client_secret=client_secret, 59 | token=refresh_token, 60 | ) 61 | 62 | yield credentials._get_access_token() 63 | 64 | @pytest.fixture(scope="class", autouse=True) 65 | def dbt_profile_target(self, access_token): 66 | """A dbt_profile that has authenticator set to JWT, and token set to 67 | a JWT accepted by Snowflake. Also omits the user, as the user attribute 68 | is optional when the authenticator is set to JWT. 69 | """ 70 | return { 71 | "type": "snowflake", 72 | "threads": 4, 73 | "account": os.getenv("SNOWFLAKE_TEST_ACCOUNT"), 74 | "database": os.getenv("SNOWFLAKE_TEST_DATABASE"), 75 | "warehouse": os.getenv("SNOWFLAKE_TEST_WAREHOUSE"), 76 | "authenticator": "jwt", 77 | "token": access_token, 78 | } 79 | 80 | @pytest.fixture(scope="class") 81 | def models(self): 82 | return { 83 | "model_1.sql": _MODELS__MODEL_1_SQL, 84 | "model_2.sql": _MODELS__MODEL_2_SQL, 85 | "model_3.sql": _MODELS__MODEL_3_SQL, 86 | "model_4.sql": _MODELS__MODEL_4_SQL, 87 | } 88 | 89 | def test_snowflake_basic(self, project): 90 | run_dbt() 91 | check_relations_equal(project.adapter, ["MODEL_3", "MODEL_4"]) 92 | -------------------------------------------------------------------------------- /tests/functional/auth_tests/test_key_pair.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from dbt.tests.util import run_dbt 4 | import pytest 5 | 6 | 7 | class TestKeyPairAuth: 8 | @pytest.fixture(scope="class", autouse=True) 9 | def dbt_profile_target(self): 10 | return { 11 | "type": "snowflake", 12 | "threads": 4, 13 | "account": os.getenv("SNOWFLAKE_TEST_ACCOUNT"), 14 | "user": os.getenv("SNOWFLAKE_TEST_USER"), 15 | "private_key": os.getenv("SNOWFLAKE_TEST_PRIVATE_KEY"), 16 | "private_key_passphrase": os.getenv("SNOWFLAKE_TEST_PRIVATE_KEY_PASSPHRASE"), 17 | "database": os.getenv("SNOWFLAKE_TEST_DATABASE"), 18 | "warehouse": os.getenv("SNOWFLAKE_TEST_WAREHOUSE"), 19 | } 20 | 21 | @pytest.fixture(scope="class") 22 | def models(self): 23 | return {"my_model.sql": "select 1 as id"} 24 | 25 | def test_connection(self, project): 26 | run_dbt() 27 | -------------------------------------------------------------------------------- /tests/functional/auth_tests/test_oauth.py: -------------------------------------------------------------------------------- 1 | """ 2 | The first time using an account for testing, you should run this: 3 | 4 | ``` 5 | CREATE OR REPLACE SECURITY INTEGRATION DBT_INTEGRATION_TEST_OAUTH 6 | TYPE = OAUTH 7 | ENABLED = TRUE 8 | OAUTH_CLIENT = CUSTOM 9 | OAUTH_CLIENT_TYPE = 'CONFIDENTIAL' 10 | OAUTH_REDIRECT_URI = 'http://localhost:8080' 11 | oauth_issue_refresh_tokens = true 12 | OAUTH_ALLOW_NON_TLS_REDIRECT_URI = true 13 | BLOCKED_ROLES_LIST = 14 | oauth_refresh_token_validity = 7776000; 15 | ``` 16 | 17 | 18 | Every month (or any amount <90 days): 19 | 20 | Run `select SYSTEM$SHOW_OAUTH_CLIENT_SECRETS('DBT_INTEGRATION_TEST_OAUTH');` 21 | 22 | The only row/column of output should be a json blob, it goes (within single 23 | quotes!) as the second argument to the server script: 24 | 25 | python scripts/werkzeug-refresh-token.py ${acount_name} '${json_blob}' 26 | 27 | Open http://localhost:8080 28 | 29 | Log in as the test user, get a response page with some environment variables. 30 | Update CI providers and test.env with the new values (If you kept the security 31 | integration the same, just the refresh token changed) 32 | """ 33 | 34 | import os 35 | from dbt.tests.util import check_relations_equal, run_dbt 36 | import pytest 37 | 38 | 39 | _MODELS__MODEL_1_SQL = """ 40 | select 1 as id 41 | """ 42 | 43 | 44 | _MODELS__MODEL_2_SQL = """ 45 | select 2 as id 46 | """ 47 | 48 | 49 | _MODELS__MODEL_3_SQL = """ 50 | select * from {{ ref('model_1') }} 51 | union all 52 | select * from {{ ref('model_2') }} 53 | """ 54 | 55 | 56 | _MODELS__MODEL_4_SQL = """ 57 | select 1 as id 58 | union all 59 | select 2 as id 60 | """ 61 | 62 | 63 | class TestSnowflakeOauth: 64 | @pytest.fixture(scope="class", autouse=True) 65 | def dbt_profile_target(self): 66 | return { 67 | "type": "snowflake", 68 | "threads": 4, 69 | "account": os.getenv("SNOWFLAKE_TEST_ACCOUNT"), 70 | "user": os.getenv("SNOWFLAKE_TEST_USER"), 71 | "oauth_client_id": os.getenv("SNOWFLAKE_TEST_OAUTH_CLIENT_ID"), 72 | "oauth_client_secret": os.getenv("SNOWFLAKE_TEST_OAUTH_CLIENT_SECRET"), 73 | "token": os.getenv("SNOWFLAKE_TEST_OAUTH_REFRESH_TOKEN"), 74 | "database": os.getenv("SNOWFLAKE_TEST_DATABASE"), 75 | "warehouse": os.getenv("SNOWFLAKE_TEST_WAREHOUSE"), 76 | "authenticator": "oauth", 77 | } 78 | 79 | @pytest.fixture(scope="class") 80 | def models(self): 81 | return { 82 | "model_1.sql": _MODELS__MODEL_1_SQL, 83 | "model_2.sql": _MODELS__MODEL_2_SQL, 84 | "model_3.sql": _MODELS__MODEL_3_SQL, 85 | "model_4.sql": _MODELS__MODEL_4_SQL, 86 | } 87 | 88 | def test_snowflake_basic(self, project): 89 | run_dbt() 90 | check_relations_equal(project.adapter, ["MODEL_3", "MODEL_4"]) 91 | -------------------------------------------------------------------------------- /tests/functional/generic_test_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/dbt-snowflake/986d31db890580f04d92a17feca6291c410b9629/tests/functional/generic_test_tests/__init__.py -------------------------------------------------------------------------------- /tests/functional/generic_test_tests/_files.py: -------------------------------------------------------------------------------- 1 | SCHEMA__CONTROL = """ 2 | version: 2 3 | models: 4 | - name: colors 5 | columns: 6 | - name: color 7 | data_tests: 8 | - not_null 9 | """ 10 | 11 | 12 | SCHEMA__EXPLICIT_WAREHOUSE = """ 13 | version: 2 14 | models: 15 | - name: colors 16 | columns: 17 | - name: color 18 | data_tests: 19 | - not_null: 20 | config: 21 | snowflake_warehouse: DBT_TESTING_ALT 22 | """ 23 | 24 | 25 | SCHEMA__NOT_NULL = """ 26 | version: 2 27 | models: 28 | - name: facts 29 | columns: 30 | - name: value 31 | data_tests: 32 | - not_null: 33 | config: 34 | snowflake_warehouse: DBT_TESTING_ALT 35 | """ 36 | 37 | 38 | SCHEMA__RELATIONSHIPS = """ 39 | version: 2 40 | models: 41 | - name: facts 42 | columns: 43 | - name: color 44 | data_tests: 45 | - relationships: 46 | to: ref('my_colors') 47 | field: color 48 | config: 49 | snowflake_warehouse: DBT_TESTING_ALT 50 | """ 51 | 52 | 53 | SCHEMA__ACCEPTED_VALUES = """ 54 | version: 2 55 | models: 56 | - name: colors 57 | columns: 58 | - name: color 59 | data_tests: 60 | - accepted_values: 61 | values: ['blue', 'red', 'green'] 62 | config: 63 | snowflake_warehouse: DBT_TESTING_ALT 64 | """ 65 | 66 | 67 | SEED__COLORS = """ 68 | color 69 | blue 70 | green 71 | red 72 | yellow 73 | """.strip() 74 | 75 | 76 | # record 10 is missing a value 77 | # record 7 has a color that's not on COLORS 78 | SEED__FACTS = """ 79 | id,color,value 80 | 1,blue,10 81 | 2,red,20 82 | 3,green,30 83 | 4,yellow,40 84 | 5,blue,50 85 | 6,red,60 86 | 7,orange,70 87 | 8,green,80 88 | 9,yellow,90 89 | 10,blue, 90 | """.strip() 91 | -------------------------------------------------------------------------------- /tests/functional/generic_test_tests/_models.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/dbt-snowflake/986d31db890580f04d92a17feca6291c410b9629/tests/functional/generic_test_tests/_models.py -------------------------------------------------------------------------------- /tests/functional/generic_test_tests/_schemas.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/dbt-snowflake/986d31db890580f04d92a17feca6291c410b9629/tests/functional/generic_test_tests/_schemas.py -------------------------------------------------------------------------------- /tests/functional/generic_test_tests/test_generic_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dbt.tests.util import run_dbt, run_dbt_and_capture 4 | 5 | from tests.functional.generic_test_tests import _files 6 | 7 | 8 | class TestWarehouseConfig: 9 | 10 | @pytest.fixture(scope="class") 11 | def seeds(self): 12 | return { 13 | "colors.csv": _files.SEED__COLORS, 14 | "facts.csv": _files.SEED__FACTS, 15 | } 16 | 17 | @pytest.fixture(scope="class", autouse=True) 18 | def setup(self, project): 19 | run_dbt(["seed"]) 20 | run_dbt(["run"]) 21 | yield 22 | 23 | 24 | class TestWarehouseConfigControl(TestWarehouseConfig): 25 | 26 | @pytest.fixture(scope="class") 27 | def models(self): 28 | return {"schema.yml": _files.SCHEMA__CONTROL} 29 | 30 | def test_expected_warehouse(self, project): 31 | results, logs = run_dbt_and_capture(["test"]) 32 | assert len(results) == 1 33 | 34 | 35 | class TestWarehouseConfigExplicitWarehouse(TestWarehouseConfig): 36 | 37 | @pytest.fixture(scope="class") 38 | def models(self): 39 | return {"schema.yml": _files.SCHEMA__EXPLICIT_WAREHOUSE} 40 | 41 | def test_expected_warehouse(self, project): 42 | _, logs = run_dbt_and_capture(["test", "--log-level", "debug"]) 43 | assert "use warehouse " in logs 44 | 45 | 46 | class TestWarehouseConfigNotNull(TestWarehouseConfig): 47 | 48 | @pytest.fixture(scope="class") 49 | def models(self): 50 | return {"schema.yml": _files.SCHEMA__NOT_NULL} 51 | 52 | def test_expected_warehouse(self, project): 53 | _, logs = run_dbt_and_capture(["test", "--log-level", "debug"], expect_pass=False) 54 | assert "use warehouse " in logs 55 | -------------------------------------------------------------------------------- /tests/functional/iceberg/models.py: -------------------------------------------------------------------------------- 1 | _MODEL_BASIC_TABLE_MODEL = """ 2 | {{ 3 | config( 4 | materialized = "table", 5 | cluster_by=['id'], 6 | ) 7 | }} 8 | select 1 as id 9 | """ 10 | 11 | _MODEL_BASIC_ICEBERG_MODEL = """ 12 | {{ 13 | config( 14 | transient = "true", 15 | materialized = "table", 16 | cluster_by=['id'], 17 | table_format="iceberg", 18 | external_volume="s3_iceberg_snow", 19 | base_location_subpath="subpath", 20 | ) 21 | }} 22 | 23 | select * from {{ ref('first_table') }} 24 | """ 25 | 26 | _MODEL_BASIC_ICEBERG_MODEL_WITH_PATH = """ 27 | {{ 28 | config( 29 | transient = "true", 30 | materialized = "table", 31 | cluster_by=['id'], 32 | table_format="iceberg", 33 | external_volume="s3_iceberg_snow", 34 | base_location_root="root_path", 35 | ) 36 | }} 37 | 38 | select * from {{ ref('first_table') }} 39 | """ 40 | 41 | _MODEL_BASIC_ICEBERG_MODEL_WITH_PATH_SUBPATH = """ 42 | {{ 43 | config( 44 | transient = "true", 45 | materialized = "table", 46 | cluster_by=['id'], 47 | table_format="iceberg", 48 | external_volume="s3_iceberg_snow", 49 | base_location_root="root_path", 50 | base_location_subpath="subpath", 51 | ) 52 | }} 53 | 54 | select * from {{ ref('first_table') }} 55 | """ 56 | 57 | _MODEL_BASIC_DYNAMIC_TABLE_MODEL = """ 58 | {{ config( 59 | materialized='dynamic_table', 60 | snowflake_warehouse='DBT_TESTING', 61 | target_lag='1 minute', 62 | refresh_mode='INCREMENTAL', 63 | table_format='iceberg', 64 | external_volume='s3_iceberg_snow', 65 | ) }} 66 | 67 | select * from {{ ref('first_table') }} 68 | """ 69 | 70 | _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_PATH = """ 71 | {{ 72 | config( 73 | transient = "transient", 74 | materialized = "dynamic_table", 75 | cluster_by=['id'], 76 | table_format="iceberg", 77 | external_volume="s3_iceberg_snow", 78 | base_location_root="root_path", 79 | ) 80 | }} 81 | 82 | select * from {{ ref('first_table') }} 83 | """ 84 | 85 | _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_PATH_SUBPATH = """ 86 | {{ 87 | config( 88 | transient = "true", 89 | materialized = "dynamic_table", 90 | cluster_by=['id'], 91 | table_format="iceberg", 92 | external_volume="s3_iceberg_snow", 93 | base_location_root="root_path", 94 | base_location_subpath='subpath', 95 | ) 96 | }} 97 | 98 | select * from {{ ref('first_table') }} 99 | """ 100 | 101 | 102 | _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_SUBPATH = """ 103 | {{ config( 104 | materialized='dynamic_table', 105 | snowflake_warehouse='DBT_TESTING', 106 | target_lag='1 minute', 107 | refresh_mode='INCREMENTAL', 108 | table_format='iceberg', 109 | external_volume='s3_iceberg_snow', 110 | base_location_subpath='subpath', 111 | ) }} 112 | 113 | select * from {{ ref('first_table') }} 114 | """ 115 | 116 | _MODEL_BUILT_ON_ICEBERG_TABLE = """ 117 | {{ 118 | config( 119 | materialized = "table", 120 | ) 121 | }} 122 | select * from {{ ref('iceberg_table') }} 123 | """ 124 | 125 | _MODEL_TABLE_BEFORE_SWAP = """ 126 | {{ 127 | config( 128 | materialized = "table", 129 | ) 130 | }} 131 | select 1 as id 132 | """ 133 | 134 | _MODEL_VIEW_BEFORE_SWAP = """ 135 | select 1 as id 136 | """ 137 | 138 | _MODEL_TABLE_FOR_SWAP_ICEBERG = """ 139 | {{ 140 | config( 141 | materialized = "table", 142 | table_format="iceberg", 143 | external_volume="s3_iceberg_snow", 144 | base_location_subpath="subpath", 145 | ) 146 | }} 147 | select 1 as id 148 | """ 149 | -------------------------------------------------------------------------------- /tests/functional/iceberg/test_table_basic.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from pathlib import Path 4 | 5 | from dbt.tests.util import run_dbt, rm_file, write_file 6 | 7 | from tests.functional.iceberg.models import ( 8 | _MODEL_BASIC_TABLE_MODEL, 9 | _MODEL_BASIC_ICEBERG_MODEL, 10 | _MODEL_BASIC_ICEBERG_MODEL_WITH_PATH, 11 | _MODEL_BASIC_ICEBERG_MODEL_WITH_PATH_SUBPATH, 12 | _MODEL_BASIC_DYNAMIC_TABLE_MODEL, 13 | _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_PATH, 14 | _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_PATH_SUBPATH, 15 | _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_SUBPATH, 16 | _MODEL_BUILT_ON_ICEBERG_TABLE, 17 | _MODEL_TABLE_BEFORE_SWAP, 18 | _MODEL_VIEW_BEFORE_SWAP, 19 | _MODEL_TABLE_FOR_SWAP_ICEBERG, 20 | ) 21 | 22 | 23 | class TestIcebergTableBuilds: 24 | @pytest.fixture(scope="class") 25 | def project_config_update(self): 26 | return {"flags": {"enable_iceberg_materializations": True}} 27 | 28 | @pytest.fixture(scope="class") 29 | def models(self): 30 | return { 31 | "first_table.sql": _MODEL_BASIC_TABLE_MODEL, 32 | "iceberg_table.sql": _MODEL_BASIC_ICEBERG_MODEL, 33 | "iceberg_tableb.sql": _MODEL_BASIC_ICEBERG_MODEL_WITH_PATH, 34 | "iceberg_tablec.sql": _MODEL_BASIC_ICEBERG_MODEL_WITH_PATH_SUBPATH, 35 | "table_built_on_iceberg_table.sql": _MODEL_BUILT_ON_ICEBERG_TABLE, 36 | "dynamic_table.sql": _MODEL_BASIC_DYNAMIC_TABLE_MODEL, 37 | "dynamic_tableb.sql": _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_PATH, 38 | "dynamic_tablec.sql": _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_PATH_SUBPATH, 39 | "dynamic_tabled.sql": _MODEL_BASIC_DYNAMIC_TABLE_MODEL_WITH_SUBPATH, 40 | } 41 | 42 | def test_iceberg_tables_build_and_can_be_referred(self, project): 43 | run_results = run_dbt() 44 | assert len(run_results) == 9 45 | 46 | 47 | class TestIcebergTableTypeBuildsOnExistingTable: 48 | @pytest.fixture(scope="class") 49 | def project_config_update(self): 50 | return {"flags": {"enable_iceberg_materializations": True}} 51 | 52 | @pytest.mark.parametrize("start_model", [_MODEL_TABLE_BEFORE_SWAP, _MODEL_VIEW_BEFORE_SWAP]) 53 | def test_changing_model_types(self, project, start_model): 54 | model_file = project.project_root / Path("models") / Path("my_model.sql") 55 | 56 | write_file(start_model, model_file) 57 | run_results = run_dbt() 58 | assert len(run_results) == 1 59 | 60 | rm_file(model_file) 61 | write_file(_MODEL_TABLE_FOR_SWAP_ICEBERG, model_file) 62 | run_results = run_dbt() 63 | assert len(run_results) == 1 64 | 65 | rm_file(model_file) 66 | write_file(start_model, model_file) 67 | run_results = run_dbt() 68 | assert len(run_results) == 1 69 | -------------------------------------------------------------------------------- /tests/functional/query_tag/test_query_tags.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt 3 | 4 | 5 | snapshots__snapshot_query_tag_sql = """ 6 | {% snapshot snapshot_query_tag %} 7 | {{ 8 | config( 9 | target_database=database, 10 | target_schema=schema, 11 | unique_key='id', 12 | strategy='check', 13 | check_cols=['color'], 14 | ) 15 | }} 16 | select 1 as id, 'blue' as color 17 | {% endsnapshot %} 18 | """ 19 | 20 | 21 | models__table_model_query_tag_sql = """ 22 | {{ config(materialized = 'table') }} 23 | select 1 as id 24 | """ 25 | 26 | 27 | models__models_config_yml = """ 28 | version: 2 29 | 30 | models: 31 | - name: view_model_query_tag 32 | columns: 33 | - name: id 34 | data_tests: 35 | - unique 36 | """ 37 | 38 | 39 | models__view_model_query_tag_sql = """ 40 | {{ config(materialized = 'view') }} 41 | select 1 as id 42 | """ 43 | 44 | 45 | models__incremental_model_query_tag_sql = """ 46 | {{ config(materialized = 'incremental', unique_key = 'id') }} 47 | select 1 as id 48 | """ 49 | 50 | 51 | macros__check_tag_sql = """ 52 | {% macro check_query_tag() %} 53 | 54 | {% if execute %} 55 | {% set query_tag = get_current_query_tag() %} 56 | {% if query_tag != var("query_tag") %} 57 | {{ exceptions.raise_compiler_error("Query tag not used!") }} 58 | {% endif %} 59 | {% endif %} 60 | 61 | {% endmacro %} 62 | """ 63 | 64 | 65 | seeds__seed_query_tag_csv = """id 66 | 1 67 | """.strip() 68 | 69 | 70 | class TestQueryTag: 71 | @pytest.fixture(scope="class") 72 | def models(self): 73 | return { 74 | "table_model_query_tag.sql": models__table_model_query_tag_sql, 75 | "view_model_query_tag.sql": models__view_model_query_tag_sql, 76 | "incremental_model_query_tag.sql": models__incremental_model_query_tag_sql, 77 | "models_config.yml": models__models_config_yml, 78 | } 79 | 80 | @pytest.fixture(scope="class") 81 | def snapshots(self): 82 | return {"snapshot_query_tag.sql": snapshots__snapshot_query_tag_sql} 83 | 84 | @pytest.fixture(scope="class") 85 | def macros(self): 86 | return {"check_tag.sql": macros__check_tag_sql} 87 | 88 | @pytest.fixture(scope="class") 89 | def seeds(self): 90 | return {"seed_query_tag.csv": seeds__seed_query_tag_csv} 91 | 92 | @pytest.fixture(scope="class") 93 | def project_config_update(self, prefix): 94 | return { 95 | "config-version": 2, 96 | "models": {"query_tag": prefix, "post-hook": "{{ check_query_tag() }}"}, 97 | "seeds": {"query_tag": prefix, "post-hook": "{{ check_query_tag() }}"}, 98 | "snapshots": {"query_tag": prefix, "post-hook": "{{ check_query_tag() }}"}, 99 | "tests": {"test": {"query_tag": prefix, "post-hook": "{{ check_query_tag() }}"}}, 100 | } 101 | 102 | def build_all_with_query_tags(self, project, prefix): 103 | run_dbt(["build", "--vars", '{{"query_tag": "{}"}}'.format(prefix)]) 104 | 105 | def test_snowflake_query_tag(self, project, prefix): 106 | self.build_all_with_query_tags(project, prefix) 107 | self.build_all_with_query_tags(project, prefix) 108 | 109 | 110 | class TestSnowflakeProfileQueryTag: 111 | @pytest.fixture(scope="class") 112 | def models(self): 113 | return { 114 | "table_model_query_tag.sql": models__table_model_query_tag_sql, 115 | "view_model_query_tag.sql": models__view_model_query_tag_sql, 116 | "incremental_model_query_tag.sql": models__incremental_model_query_tag_sql, 117 | "models_config.yml": models__models_config_yml, 118 | } 119 | 120 | @pytest.fixture(scope="class") 121 | def profiles_config_update(self, prefix): 122 | return {"query_tag": prefix} 123 | 124 | def build_all_with_query_tags(self, project, prefix): 125 | run_dbt(["build", "--vars", '{{"query_tag": "{}"}}'.format(prefix)]) 126 | 127 | def test_snowflake_query_tag(self, project, prefix): 128 | self.build_all_with_query_tags(project, prefix) 129 | self.build_all_with_query_tags(project, prefix) 130 | -------------------------------------------------------------------------------- /tests/functional/redact_log_values/test_duplicate_key_not_in_exceptions.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dbt.tests.util import ( 4 | run_dbt, 5 | ) 6 | 7 | _MODELS__view = """ 8 | {{ config( 9 | materialized='table', 10 | ) }} 11 | 12 | with dupes as ( 13 | select 'foo' as key, 1 as value 14 | union all 15 | select 'foo' as key, 2 as value 16 | ) 17 | 18 | select 19 | object_agg(key, value) as agg 20 | from dupes 21 | """ 22 | 23 | 24 | class TestDuplicateKeyNotInExceptions: 25 | @pytest.fixture(scope="class") 26 | def models(self): 27 | return {"model.sql": _MODELS__view} 28 | 29 | def test_row_values_were_scrubbed_from_duplicate_merge_exception(self, project): 30 | result = run_dbt(["run", "-s", "model"], expect_pass=False) 31 | assert len(result) == 1 32 | assert "Duplicate field key '[redacted]'" in result[0].message 33 | assert "'foo'" not in result[0].message 34 | -------------------------------------------------------------------------------- /tests/functional/redact_log_values/test_row_values_not_in_exceptions.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dbt.tests.util import ( 4 | run_dbt, 5 | ) 6 | 7 | _MODELS__incremental_model = """ 8 | {{ config( 9 | materialized='incremental', 10 | unique_key='id' 11 | ) }} 12 | 13 | with data as ( 14 | SELECT $1 id, $2 name FROM ( 15 | VALUES (1, 'one'), (2, 'two'), (3, 'three'), (1, 'one') 16 | ) 17 | ) 18 | select * from data 19 | """ 20 | 21 | 22 | class TestRowValuesNotInExceptions: 23 | @pytest.fixture(scope="class") 24 | def models(self): 25 | return {"model.sql": _MODELS__incremental_model} 26 | 27 | def test_row_values_were_scrubbed_from_duplicate_merge_exception(self, project): 28 | result = run_dbt(["run", "-s", "model"]) 29 | assert len(result) == 1 30 | 31 | result = run_dbt(["run", "-s", "model"], expect_pass=False) 32 | assert len(result) == 1 33 | assert "Row Values: [redacted]" in result[0].message 34 | assert "'one'" not in result[0].message 35 | -------------------------------------------------------------------------------- /tests/functional/relation_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/dbt-snowflake/986d31db890580f04d92a17feca6291c410b9629/tests/functional/relation_tests/__init__.py -------------------------------------------------------------------------------- /tests/functional/relation_tests/base.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dbt.tests.util import run_dbt, run_dbt_and_capture 4 | 5 | 6 | SEED = """ 7 | id 8 | 0 9 | 1 10 | 2 11 | """.strip() 12 | 13 | 14 | TABLE = """ 15 | {{ config(materialized="table") }} 16 | select * from {{ ref('my_seed') }} 17 | """ 18 | 19 | 20 | VIEW = """ 21 | {{ config(materialized="view") }} 22 | select * from {{ ref('my_seed') }} 23 | """ 24 | 25 | 26 | MACRO__GET_CREATE_BACKUP_SQL = """ 27 | {% macro test__get_create_backup_sql(database, schema, identifier, relation_type) -%} 28 | {%- set relation = adapter.Relation.create(database=database, schema=schema, identifier=identifier, type=relation_type) -%} 29 | {% call statement('test__get_create_backup_sql') -%} 30 | {{ get_create_backup_sql(relation) }} 31 | {%- endcall %} 32 | {% endmacro %}""" 33 | 34 | 35 | MACRO__GET_RENAME_INTERMEDIATE_SQL = """ 36 | {% macro test__get_rename_intermediate_sql(database, schema, identifier, relation_type) -%} 37 | {%- set relation = adapter.Relation.create(database=database, schema=schema, identifier=identifier, type=relation_type) -%} 38 | {% call statement('test__get_rename_intermediate_sql') -%} 39 | {{ get_rename_intermediate_sql(relation) }} 40 | {%- endcall %} 41 | {% endmacro %}""" 42 | 43 | 44 | class RelationOperation: 45 | @pytest.fixture(scope="class") 46 | def seeds(self): 47 | yield {"my_seed.csv": SEED} 48 | 49 | @pytest.fixture(scope="class") 50 | def models(self): 51 | yield { 52 | "my_table.sql": TABLE, 53 | "my_table__dbt_tmp.sql": TABLE, 54 | "my_view.sql": VIEW, 55 | "my_view__dbt_tmp.sql": VIEW, 56 | } 57 | 58 | @pytest.fixture(scope="class") 59 | def macros(self): 60 | yield { 61 | "test__get_create_backup_sql.sql": MACRO__GET_CREATE_BACKUP_SQL, 62 | "test__get_rename_intermediate_sql.sql": MACRO__GET_RENAME_INTERMEDIATE_SQL, 63 | } 64 | 65 | @pytest.fixture(scope="class", autouse=True) 66 | def setup(self, project): 67 | run_dbt(["seed"]) 68 | run_dbt(["run"]) 69 | 70 | def assert_operation(self, project, operation, args, expected_statement): 71 | results, logs = run_dbt_and_capture( 72 | ["--debug", "run-operation", operation, "--args", str(args)] 73 | ) 74 | assert len(results) == 1 75 | assert expected_statement in logs 76 | -------------------------------------------------------------------------------- /tests/functional/relation_tests/dynamic_table_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/dbt-snowflake/986d31db890580f04d92a17feca6291c410b9629/tests/functional/relation_tests/dynamic_table_tests/__init__.py -------------------------------------------------------------------------------- /tests/functional/relation_tests/dynamic_table_tests/models.py: -------------------------------------------------------------------------------- 1 | SEED = """ 2 | id,value 3 | 1,100 4 | 2,200 5 | 3,300 6 | """.strip() 7 | 8 | 9 | DYNAMIC_TABLE = """ 10 | {{ config( 11 | materialized='dynamic_table', 12 | snowflake_warehouse='DBT_TESTING', 13 | target_lag='2 minutes', 14 | refresh_mode='INCREMENTAL', 15 | ) }} 16 | select * from {{ ref('my_seed') }} 17 | """ 18 | 19 | 20 | EXPLICIT_AUTO_DYNAMIC_TABLE = """ 21 | {{ config( 22 | materialized='dynamic_table', 23 | snowflake_warehouse='DBT_TESTING', 24 | target_lag='2 minutes', 25 | refresh_mode='AUTO', 26 | ) }} 27 | select * from {{ ref('my_seed') }} 28 | """ 29 | 30 | IMPLICIT_AUTO_DYNAMIC_TABLE = """ 31 | {{ config( 32 | materialized='dynamic_table', 33 | snowflake_warehouse='DBT_TESTING', 34 | target_lag='2 minutes', 35 | ) }} 36 | select * from {{ ref('my_seed') }} 37 | """ 38 | 39 | 40 | DYNAMIC_TABLE_DOWNSTREAM = """ 41 | {{ config( 42 | materialized='dynamic_table', 43 | snowflake_warehouse='DBT_TESTING', 44 | target_lag='DOWNSTREAM', 45 | refresh_mode='INCREMENTAL', 46 | ) }} 47 | select * from {{ ref('my_seed') }} 48 | """ 49 | 50 | 51 | DYNAMIC_ICEBERG_TABLE = """ 52 | {{ config( 53 | materialized='dynamic_table', 54 | snowflake_warehouse='DBT_TESTING', 55 | target_lag='2 minutes', 56 | refresh_mode='INCREMENTAL', 57 | table_format="iceberg", 58 | external_volume="s3_iceberg_snow", 59 | base_location_subpath="subpath", 60 | ) }} 61 | select * from {{ ref('my_seed') }} 62 | """ 63 | 64 | 65 | DYNAMIC_TABLE_ALTER = """ 66 | {{ config( 67 | materialized='dynamic_table', 68 | snowflake_warehouse='DBT_TESTING', 69 | target_lag='5 minutes', 70 | refresh_mode='INCREMENTAL', 71 | ) }} 72 | select * from {{ ref('my_seed') }} 73 | """ 74 | 75 | 76 | DYNAMIC_TABLE_REPLACE = """ 77 | {{ config( 78 | materialized='dynamic_table', 79 | snowflake_warehouse='DBT_TESTING', 80 | target_lag='2 minutes', 81 | refresh_mode='FULL', 82 | ) }} 83 | select * from {{ ref('my_seed') }} 84 | """ 85 | 86 | 87 | DYNAMIC_ICEBERG_TABLE_ALTER = """ 88 | {{ config( 89 | materialized='dynamic_table', 90 | snowflake_warehouse='DBT_TESTING', 91 | target_lag='5 minutes', 92 | refresh_mode='INCREMENTAL', 93 | table_format="iceberg", 94 | external_volume="s3_iceberg_snow", 95 | base_location_subpath="subpath", 96 | ) }} 97 | select * from {{ ref('my_seed') }} 98 | """ 99 | 100 | 101 | DYNAMIC_ICEBERG_TABLE_REPLACE = """ 102 | {{ config( 103 | materialized='dynamic_table', 104 | snowflake_warehouse='DBT_TESTING', 105 | target_lag='2 minutes', 106 | refresh_mode='FULL', 107 | table_format="iceberg", 108 | external_volume="s3_iceberg_snow", 109 | base_location_subpath="subpath", 110 | ) }} 111 | select * from {{ ref('my_seed') }} 112 | """ 113 | -------------------------------------------------------------------------------- /tests/functional/relation_tests/dynamic_table_tests/test_basic.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from dbt.tests.util import assert_message_in_logs, run_dbt, run_dbt_and_capture 4 | 5 | from tests.functional.relation_tests.dynamic_table_tests import models 6 | from tests.functional.utils import query_relation_type 7 | 8 | 9 | class TestBasic: 10 | iceberg: bool = False 11 | 12 | @pytest.fixture(scope="class", autouse=True) 13 | def seeds(self): 14 | return {"my_seed.csv": models.SEED} 15 | 16 | @pytest.fixture(scope="class", autouse=True) 17 | def models(self): 18 | my_models = { 19 | "my_dynamic_table.sql": models.DYNAMIC_TABLE, 20 | "my_dynamic_table_downstream.sql": models.DYNAMIC_TABLE_DOWNSTREAM, 21 | } 22 | if self.iceberg: 23 | my_models.update( 24 | { 25 | "my_dynamic_iceberg_table.sql": models.DYNAMIC_ICEBERG_TABLE, 26 | } 27 | ) 28 | yield my_models 29 | 30 | @pytest.fixture(scope="class", autouse=True) 31 | def setup(self, project): 32 | run_dbt(["seed"]) 33 | run_dbt(["run"]) 34 | 35 | def test_dynamic_table_full_refresh(self, project): 36 | run_dbt(["run", "--full-refresh"]) 37 | assert query_relation_type(project, "my_dynamic_table") == "dynamic_table" 38 | assert query_relation_type(project, "my_dynamic_table_downstream") == "dynamic_table" 39 | if self.iceberg: 40 | assert query_relation_type(project, "my_dynamic_iceberg_table") == "dynamic_table" 41 | 42 | 43 | class TestBasicIcebergOn(TestBasic): 44 | iceberg = True 45 | 46 | @pytest.fixture(scope="class") 47 | def project_config_update(self): 48 | return {"flags": {"enable_iceberg_materializations": True}} 49 | 50 | 51 | class TestAutoConfigDoesntFullRefresh: 52 | """ 53 | AUTO refresh_strategy will be compared accurately with both INCREMENTAL and FULL. 54 | https://github.com/dbt-labs/dbt-snowflake/issues/1267 55 | """ 56 | 57 | DT_NAME = "my_dynamic_table" 58 | 59 | @pytest.fixture(scope="class", autouse=True) 60 | def seeds(self): 61 | return {"my_seed.csv": models.SEED} 62 | 63 | @pytest.fixture(scope="class", autouse=True) 64 | def models(self): 65 | yield { 66 | f"explicit_{self.DT_NAME}.sql": models.EXPLICIT_AUTO_DYNAMIC_TABLE, 67 | f"implicit_{self.DT_NAME}.sql": models.IMPLICIT_AUTO_DYNAMIC_TABLE, 68 | } 69 | 70 | @pytest.mark.parametrize("test_dt", [f"explicit_{DT_NAME}", f"implicit_{DT_NAME}"]) 71 | def test_auto_config_doesnt_full_refresh(self, project, test_dt): 72 | model_qualified_name = f"{project.database}.{project.test_schema}.{test_dt}" 73 | 74 | run_dbt(["seed"]) 75 | _, logs = run_dbt_and_capture(["--debug", "run", "--select", f"{test_dt}.sql"]) 76 | assert_message_in_logs(f"create dynamic table {model_qualified_name}", logs) 77 | assert_message_in_logs("refresh_mode = AUTO", logs) 78 | 79 | _, logs = run_dbt_and_capture(["--debug", "run", "--select", f"{test_dt}.sql"]) 80 | 81 | assert_message_in_logs(f"create dynamic table {model_qualified_name}", logs, False) 82 | assert_message_in_logs( 83 | f"create or replace dynamic table {model_qualified_name}", logs, False 84 | ) 85 | assert_message_in_logs("refresh_mode = AUTO", logs, False) 86 | assert_message_in_logs( 87 | f"No configuration changes were identified on: `{model_qualified_name}`. Continuing.", 88 | logs, 89 | ) 90 | -------------------------------------------------------------------------------- /tests/functional/relation_tests/models.py: -------------------------------------------------------------------------------- 1 | SEED = """ 2 | id,value 3 | 1,100 4 | 2,200 5 | 3,300 6 | """.strip() 7 | 8 | 9 | TABLE = """ 10 | {{ config( 11 | materialized='table', 12 | ) }} 13 | select * from {{ ref('my_seed') }} 14 | """ 15 | 16 | 17 | VIEW = """ 18 | {{ config( 19 | materialized='view', 20 | ) }} 21 | select * from {{ ref('my_seed') }} 22 | """ 23 | 24 | 25 | DYNAMIC_TABLE = """ 26 | {{ config( 27 | materialized='dynamic_table', 28 | snowflake_warehouse='DBT_TESTING', 29 | target_lag='1 minute', 30 | refresh_mode='INCREMENTAL', 31 | ) }} 32 | select * from {{ ref('my_seed') }} 33 | """ 34 | 35 | 36 | DYNAMIC_ICEBERG_TABLE = """ 37 | {{ config( 38 | materialized='dynamic_table', 39 | snowflake_warehouse='DBT_TESTING', 40 | target_lag='1 minute', 41 | refresh_mode='INCREMENTAL', 42 | table_format="iceberg", 43 | external_volume="s3_iceberg_snow", 44 | base_location_subpath="subpath", 45 | ) }} 46 | select * from {{ ref('my_seed') }} 47 | """ 48 | 49 | ICEBERG_TABLE = """ 50 | {{ config( 51 | materialized='table', 52 | table_format="iceberg", 53 | external_volume="s3_iceberg_snow", 54 | ) }} 55 | select * from {{ ref('my_seed') }} 56 | """ 57 | 58 | INCREMENTAL_ICEBERG_TABLE = """ 59 | {{ config( 60 | materialized='incremental', 61 | table_format='iceberg', 62 | incremental_strategy='append', 63 | unique_key="id", 64 | external_volume = "s3_iceberg_snow", 65 | ) }} 66 | select * from {{ ref('my_seed') }} 67 | """ 68 | 69 | 70 | INCREMENTAL_TABLE = """ 71 | {{ config( 72 | materialized='incremental', 73 | incremental_strategy='append', 74 | unique_key="id", 75 | ) }} 76 | select * from {{ ref('my_seed') }} 77 | """ 78 | -------------------------------------------------------------------------------- /tests/functional/relation_tests/test_table.py: -------------------------------------------------------------------------------- 1 | from tests.functional.relation_tests.base import RelationOperation 2 | 3 | 4 | class TestTable(RelationOperation): 5 | 6 | def test_get_create_backup_and_rename_intermediate_sql(self, project): 7 | args = { 8 | "database": project.database, 9 | "schema": project.test_schema, 10 | "identifier": "my_table", 11 | "relation_type": "table", 12 | } 13 | expected_statement = ( 14 | f"alter table {project.database}.{project.test_schema}.my_table " 15 | f"rename to {project.database}.{project.test_schema}.my_table__dbt_backup" 16 | ) 17 | self.assert_operation(project, "test__get_create_backup_sql", args, expected_statement) 18 | 19 | expected_statement = ( 20 | f"alter table {project.database}.{project.test_schema}.my_table__dbt_tmp " 21 | f"rename to {project.database}.{project.test_schema}.my_table" 22 | ) 23 | self.assert_operation( 24 | project, "test__get_rename_intermediate_sql", args, expected_statement 25 | ) 26 | -------------------------------------------------------------------------------- /tests/functional/relation_tests/test_view.py: -------------------------------------------------------------------------------- 1 | from tests.functional.relation_tests.base import RelationOperation 2 | 3 | 4 | class TestView(RelationOperation): 5 | 6 | def test_get_create_backup_and_rename_intermediate_sql(self, project): 7 | args = { 8 | "database": project.database, 9 | "schema": project.test_schema, 10 | "identifier": "my_view", 11 | "relation_type": "view", 12 | } 13 | expected_statement = ( 14 | f"alter view {project.database}.{project.test_schema}.my_view " 15 | f"rename to {project.database}.{project.test_schema}.my_view__dbt_backup" 16 | ) 17 | self.assert_operation(project, "test__get_create_backup_sql", args, expected_statement) 18 | 19 | expected_statement = ( 20 | f"alter view {project.database}.{project.test_schema}.my_view__dbt_tmp " 21 | f"rename to {project.database}.{project.test_schema}.my_view" 22 | ) 23 | self.assert_operation( 24 | project, "test__get_rename_intermediate_sql", args, expected_statement 25 | ) 26 | -------------------------------------------------------------------------------- /tests/functional/snowflake_view_dependency/test_snowflake_view_dependency.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt, check_relations_equal 3 | 4 | _MODELS__DEPENDENT_MODEL_SQL = """ 5 | {% if var('dependent_type', 'view') == 'view' %} 6 | {{ config(materialized='view') }} 7 | {% else %} 8 | {{ config(materialized='table') }} 9 | {% endif %} 10 | 11 | select * from {{ ref('base_table') }} 12 | """ 13 | 14 | 15 | _MODELS__BASE_TABLE_SQL = """ 16 | {{ config(materialized='table') }} 17 | select * 18 | {% if var('add_table_field', False) %} 19 | , 1 as new_field 20 | {% endif %} 21 | 22 | from {{ ref('people') }} 23 | """ 24 | 25 | _SEEDS__PEOPLE_CSV = """id,name 26 | 1,Drew 27 | 2,Jake 28 | 3,Connor 29 | """ 30 | 31 | 32 | class TestSnowflakeLateBindingViewDependency: 33 | @pytest.fixture(scope="class") 34 | def models(self): 35 | return { 36 | "dependent_model.sql": _MODELS__DEPENDENT_MODEL_SQL, 37 | "base_table.sql": _MODELS__BASE_TABLE_SQL, 38 | } 39 | 40 | @pytest.fixture(scope="class") 41 | def seeds(self): 42 | return {"people.csv": _SEEDS__PEOPLE_CSV} 43 | 44 | @pytest.fixture(scope="class") 45 | def project_config_update(self): 46 | return { 47 | "seeds": { 48 | "quote_columns": False, 49 | }, 50 | "quoting": {"schema": False, "identifier": False}, 51 | } 52 | 53 | @pytest.fixture(scope="class", autouse=True) 54 | def setup_method(self, project): 55 | results = run_dbt(["seed"]) 56 | assert len(results) == 1 57 | results = run_dbt(["run"]) 58 | assert len(results) == 2 59 | check_relations_equal(project.adapter, ["PEOPLE", "BASE_TABLE"]) 60 | check_relations_equal(project.adapter, ["PEOPLE", "DEPENDENT_MODEL"]) 61 | 62 | def check_result(self, project, results, expected_types): 63 | for result in results: 64 | node_name = result.node.name 65 | node_type = result.node.config.materialized 66 | assert node_type == expected_types[node_name] 67 | 68 | """ 69 | Snowflake views are not bound to the relations they select from. A Snowflake view 70 | can have entirely invalid SQL if, for example, the table it selects from is dropped 71 | and recreated with a different schema. In these scenarios, Snowflake will raise an error if: 72 | 1) The view is queried 73 | 2) The view is altered 74 | 75 | dbt's logic should avoid running these types of queries against views in situations 76 | where they _may_ have invalid definitions. These tests assert that views are handled 77 | correctly in various different scenarios 78 | """ 79 | 80 | def test__snowflake__changed_table_schema_for_downstream_view(self, project): 81 | run_dbt(["seed"]) 82 | # Change the schema of base_table, assert that dependent_model doesn't fail 83 | results = run_dbt(["run", "--vars", "{add_table_field: true, dependent_type: view}"]) 84 | assert len(results) == 2 85 | check_relations_equal(project.adapter, ["BASE_TABLE", "DEPENDENT_MODEL"]) 86 | 87 | """ 88 | This test is similar to the one above, except the downstream model starts as a view, and 89 | then is changed to be a table. This checks that the table materialization does not 90 | errantly rename a view that might have an invalid definition, which would cause an error 91 | """ 92 | 93 | def test__snowflake__changed_table_schema_for_downstream_view_changed_to_table(self, project): 94 | run_dbt(["seed"]) 95 | results = run_dbt(["run"]) 96 | expected_types = {"base_table": "table", "dependent_model": "view"} 97 | # ensure that the model actually was materialized as a view 98 | self.check_result(project, results, expected_types) 99 | results = run_dbt(["run", "--vars", "{add_table_field: true, dependent_type: table}"]) 100 | assert len(results) == 2 101 | check_relations_equal(project.adapter, ["BASE_TABLE", "DEPENDENT_MODEL"]) 102 | expected_types = {"base_table": "table", "dependent_model": "table"} 103 | # ensure that the model actually was materialized as a table 104 | self.check_result(project, results, expected_types) 105 | -------------------------------------------------------------------------------- /tests/functional/test_isolated_begin_commit.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt_and_capture 3 | 4 | my_model_sql = """ 5 | {{ 6 | config( 7 | materialized = 'table', 8 | post_hook = '{{ my_silly_insert_macro() }}' 9 | ) 10 | }} 11 | select 1 as id, 'blue' as color, current_timestamp as updated_at 12 | """ 13 | 14 | my_macro_sql = """ 15 | {% macro my_silly_insert_macro() %} 16 | {#-- This is a bad pattern! Made obsolete by changes in v0.21 + v1.2 --#} 17 | {% do run_query('begin;') %} 18 | {% set query %} 19 | insert into {{ this }} values (2, 'red', current_timestamp); 20 | {% endset %} 21 | {% do run_query(query) %} 22 | {% do run_query('commit;') %} 23 | {% endmacro %} 24 | """ 25 | 26 | 27 | class TestModelWarehouse: 28 | @pytest.fixture(scope="class") 29 | def models(self): 30 | return { 31 | "my_model.sql": my_model_sql, 32 | } 33 | 34 | @pytest.fixture(scope="class") 35 | def macros(self): 36 | return { 37 | "my_macro.sql": my_macro_sql, 38 | } 39 | 40 | def test_isolated_begin_commit(self, project): 41 | # this should succeed / avoid raising an error 42 | results, log_output = run_dbt_and_capture(["run"]) 43 | # but we should see a warning in the logs 44 | assert "WARNING" in log_output and "Explicit transactional logic" in log_output 45 | -------------------------------------------------------------------------------- /tests/functional/utils.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, Optional 2 | 3 | from dbt.tests.util import ( 4 | get_connection, 5 | get_model_file, 6 | relation_from_name, 7 | set_model_file, 8 | ) 9 | 10 | from dbt.adapters.snowflake.relation_configs import SnowflakeDynamicTableConfig 11 | 12 | 13 | def query_relation_type(project, name: str) -> Optional[str]: 14 | relation = relation_from_name(project.adapter, name) 15 | sql = f""" 16 | select 17 | case table_type 18 | when 'BASE TABLE' then iff(is_dynamic = 'YES', 'dynamic_table', 'table') 19 | when 'VIEW' then 'view' 20 | when 'EXTERNAL TABLE' then 'external_table' 21 | end as relation_type 22 | from information_schema.tables 23 | where table_name like '{relation.identifier.upper()}' 24 | and table_schema like '{relation.schema.upper()}' 25 | and table_catalog like '{relation.database.upper()}' 26 | """ 27 | results = project.run_sql(sql, fetch="all") 28 | 29 | assert len(results) > 0, f"Relation {relation} not found" 30 | assert len(results) == 1, f"Multiple relations found" 31 | 32 | return results[0][0].lower() 33 | 34 | 35 | def query_row_count(project, name: str) -> int: 36 | relation = relation_from_name(project.adapter, name) 37 | sql = f"select count(*) from {relation}" 38 | return project.run_sql(sql, fetch="one")[0] 39 | 40 | 41 | def insert_record(project, name: str, record: Dict[str, Any]): 42 | relation = relation_from_name(project.adapter, name) 43 | column_names = ", ".join(record.keys()) 44 | values = ", ".join( 45 | [f"'{value}'" if isinstance(value, str) else f"{value}" for value in record.values()] 46 | ) 47 | sql = f"insert into {relation} ({column_names}) values ({values})" 48 | project.run_sql(sql) 49 | 50 | 51 | def update_model(project, name: str, model: str) -> str: 52 | relation = relation_from_name(project.adapter, name) 53 | original_model = get_model_file(project, relation) 54 | set_model_file(project, relation, model) 55 | return original_model 56 | 57 | 58 | def describe_dynamic_table(project, name: str) -> Optional[SnowflakeDynamicTableConfig]: 59 | macro = "snowflake__describe_dynamic_table" 60 | dynamic_table = relation_from_name(project.adapter, name) 61 | kwargs = {"relation": dynamic_table} 62 | with get_connection(project.adapter): 63 | results = project.adapter.execute_macro(macro, kwargs=kwargs) 64 | 65 | assert len(results["dynamic_table"].rows) > 0, f"Dynamic table {dynamic_table} not found" 66 | found = len(results["dynamic_table"].rows) 67 | names = ", ".join([table.get("name") for table in results["dynamic_table"].rows]) 68 | assert found == 1, f"Multiple dynamic tables found: {names}" 69 | 70 | return SnowflakeDynamicTableConfig.from_relation_results(results) 71 | 72 | 73 | def refresh_dynamic_table(project, name: str) -> None: 74 | macro = "snowflake__refresh_dynamic_table" 75 | dynamic_table = relation_from_name(project.adapter, name) 76 | kwargs = {"relation": dynamic_table} 77 | with get_connection(project.adapter): 78 | project.adapter.execute_macro(macro, kwargs=kwargs) 79 | -------------------------------------------------------------------------------- /tests/performance/README.md: -------------------------------------------------------------------------------- 1 | # Performance testing 2 | 3 | These tests are not meant to run on a regular basis; instead, they are tools for measuring performance impacts of changes as needed. 4 | We often get requests for reducing processing times, researching why a particular component is taking longer to run than expected, etc. 5 | In the past we have performed one-off analyses to address these requests and documented the results in the relevant PR (when a change is made). 6 | It is more useful to document those analyses in the form of performance tests so that we can easily rerun the analysis at a later date. 7 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-labs/dbt-snowflake/986d31db890580f04d92a17feca6291c410b9629/tests/unit/__init__.py -------------------------------------------------------------------------------- /tests/unit/mock_adapter.py: -------------------------------------------------------------------------------- 1 | from contextlib import contextmanager 2 | from unittest import mock 3 | 4 | from dbt.adapters.base import BaseAdapter 5 | 6 | 7 | def adapter_factory(): 8 | class MockAdapter(BaseAdapter): 9 | ConnectionManager = mock.MagicMock(TYPE="mock") 10 | responder = mock.MagicMock() 11 | # some convenient defaults 12 | responder.quote.side_effect = lambda identifier: '"{}"'.format(identifier) 13 | responder.date_function.side_effect = lambda: "unitdate()" 14 | responder.is_cancelable.side_effect = lambda: False 15 | 16 | @contextmanager 17 | def exception_handler(self, *args, **kwargs): 18 | self.responder.exception_handler(*args, **kwargs) 19 | yield 20 | 21 | def execute(self, *args, **kwargs): 22 | return self.responder.execute(*args, **kwargs) 23 | 24 | def drop_relation(self, *args, **kwargs): 25 | return self.responder.drop_relation(*args, **kwargs) 26 | 27 | def truncate_relation(self, *args, **kwargs): 28 | return self.responder.truncate_relation(*args, **kwargs) 29 | 30 | def rename_relation(self, *args, **kwargs): 31 | return self.responder.rename_relation(*args, **kwargs) 32 | 33 | def get_columns_in_relation(self, *args, **kwargs): 34 | return self.responder.get_columns_in_relation(*args, **kwargs) 35 | 36 | def get_catalog_for_single_relation(self, *args, **kwargs): 37 | return self.responder.get_catalog_for_single_relation(*args, **kwargs) 38 | 39 | def expand_column_types(self, *args, **kwargs): 40 | return self.responder.expand_column_types(*args, **kwargs) 41 | 42 | def list_relations_without_caching(self, *args, **kwargs): 43 | return self.responder.list_relations_without_caching(*args, **kwargs) 44 | 45 | def create_schema(self, *args, **kwargs): 46 | return self.responder.create_schema(*args, **kwargs) 47 | 48 | def drop_schema(self, *args, **kwargs): 49 | return self.responder.drop_schema(*args, **kwargs) 50 | 51 | @classmethod 52 | def quote(cls, identifier): 53 | return cls.responder.quote(identifier) 54 | 55 | def convert_text_type(self, *args, **kwargs): 56 | return self.responder.convert_text_type(*args, **kwargs) 57 | 58 | def convert_number_type(self, *args, **kwargs): 59 | return self.responder.convert_number_type(*args, **kwargs) 60 | 61 | def convert_boolean_type(self, *args, **kwargs): 62 | return self.responder.convert_boolean_type(*args, **kwargs) 63 | 64 | def convert_datetime_type(self, *args, **kwargs): 65 | return self.responder.convert_datetime_type(*args, **kwargs) 66 | 67 | def convert_date_type(self, *args, **kwargs): 68 | return self.responder.convert_date_type(*args, **kwargs) 69 | 70 | def convert_time_type(self, *args, **kwargs): 71 | return self.responder.convert_time_type(*args, **kwargs) 72 | 73 | def list_schemas(self, *args, **kwargs): 74 | return self.responder.list_schemas(*args, **kwargs) 75 | 76 | @classmethod 77 | def date_function(cls): 78 | return cls.responder.date_function() 79 | 80 | @classmethod 81 | def is_cancelable(cls): 82 | return cls.responder.is_cancelable() 83 | 84 | return MockAdapter 85 | -------------------------------------------------------------------------------- /tests/unit/test_adapter_telemetry.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import dbt.adapters.snowflake.__version__ 4 | 5 | from dbt.adapters.snowflake.impl import SnowflakeAdapter 6 | from dbt.adapters.base.relation import AdapterTrackingRelationInfo 7 | 8 | 9 | def test_telemetry_with_snowflake_details(): 10 | mock_model_config = mock.MagicMock() 11 | mock_model_config._extra = mock.MagicMock() 12 | mock_model_config._extra = { 13 | "adapter_type": "snowflake", 14 | "table_format": "iceberg", 15 | } 16 | 17 | res = SnowflakeAdapter.get_adapter_run_info(mock_model_config) 18 | 19 | assert res.adapter_name == "snowflake" 20 | assert res.base_adapter_version == dbt.adapters.__about__.version 21 | assert res.adapter_version == dbt.adapters.snowflake.__version__.version 22 | assert res.model_adapter_details == { 23 | "adapter_type": "snowflake", 24 | "table_format": "iceberg", 25 | } 26 | 27 | assert type(res) is AdapterTrackingRelationInfo 28 | -------------------------------------------------------------------------------- /tests/unit/test_connections.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pytest 3 | from importlib import reload 4 | from unittest.mock import Mock, patch 5 | import multiprocessing 6 | from dbt.adapters.exceptions.connection import FailedToConnectError 7 | import dbt.adapters.snowflake.connections as connections 8 | import dbt.adapters.events.logging 9 | 10 | 11 | def test_connections_sets_logs_in_response_to_env_var(monkeypatch): 12 | """Test that setting the DBT_SNOWFLAKE_CONNECTOR_DEBUG_LOGGING environment variable happens on import""" 13 | log_mock = Mock() 14 | monkeypatch.setattr(dbt.adapters.events.logging, "AdapterLogger", Mock(return_value=log_mock)) 15 | monkeypatch.setattr(os, "environ", {"DBT_SNOWFLAKE_CONNECTOR_DEBUG_LOGGING": "true"}) 16 | reload(connections) 17 | 18 | assert log_mock.debug.call_count == 3 19 | assert log_mock.set_adapter_dependency_log_level.call_count == 3 20 | 21 | 22 | def test_connections_does_not_set_logs_in_response_to_env_var(monkeypatch): 23 | log_mock = Mock() 24 | monkeypatch.setattr(dbt.adapters.events.logging, "AdapterLogger", Mock(return_value=log_mock)) 25 | reload(connections) 26 | 27 | assert log_mock.debug.call_count == 0 28 | assert log_mock.set_adapter_dependency_log_level.call_count == 0 29 | 30 | 31 | def test_connnections_credentials_replaces_underscores_with_hyphens(): 32 | credentials = { 33 | "account": "account_id_with_underscores", 34 | "user": "user", 35 | "password": "password", 36 | "database": "database", 37 | "warehouse": "warehouse", 38 | "schema": "schema", 39 | } 40 | creds = connections.SnowflakeCredentials(**credentials) 41 | assert creds.account == "account-id-with-underscores" 42 | 43 | 44 | def test_snowflake_oauth_expired_token_raises_error(): 45 | credentials = { 46 | "account": "test_account", 47 | "user": "test_user", 48 | "authenticator": "oauth", 49 | "token": "expired_or_incorrect_token", 50 | "database": "database", 51 | "schema": "schema", 52 | } 53 | 54 | mp_context = multiprocessing.get_context("spawn") 55 | mock_credentials = connections.SnowflakeCredentials(**credentials) 56 | 57 | with patch.object( 58 | connections.SnowflakeConnectionManager, 59 | "open", 60 | side_effect=FailedToConnectError( 61 | "This error occurs when authentication has expired. " 62 | "Please reauth with your auth provider." 63 | ), 64 | ): 65 | 66 | adapter = connections.SnowflakeConnectionManager(mock_credentials, mp_context) 67 | 68 | with pytest.raises(FailedToConnectError): 69 | adapter.open() 70 | -------------------------------------------------------------------------------- /tests/unit/test_iceberg_location.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.adapters.snowflake.relation import SnowflakeRelation 3 | 4 | 5 | @pytest.fixture 6 | def iceberg_config() -> dict: 7 | """Fixture providing standard Iceberg configuration.""" 8 | return { 9 | "schema": "my_schema", 10 | "identifier": "my_table", 11 | "external_volume": "s3_iceberg_snow", 12 | "base_location_root": "root_path", 13 | "base_location_subpath": "subpath", 14 | } 15 | 16 | 17 | def get_actual_base_location(config: dict[str, str]) -> str: 18 | """Get the actual base location from the configuration by parsing the DDL predicates.""" 19 | 20 | relation = SnowflakeRelation.create( 21 | schema=config["schema"], 22 | identifier=config["identifier"], 23 | ) 24 | 25 | actual_ddl_predicates = relation.get_iceberg_ddl_options(config).strip() 26 | actual_base_location = actual_ddl_predicates.split("base_location = ")[1] 27 | 28 | return actual_base_location 29 | 30 | 31 | def test_iceberg_path_and_subpath(iceberg_config: dict[str, str]): 32 | """Test when base_location_root and base_location_subpath are provided""" 33 | expected_base_location = ( 34 | f"'{iceberg_config['base_location_root']}/" 35 | f"{iceberg_config['schema']}/" 36 | f"{iceberg_config['identifier']}/" 37 | f"{iceberg_config['base_location_subpath']}'" 38 | ).strip() 39 | 40 | assert get_actual_base_location(iceberg_config) == expected_base_location 41 | 42 | 43 | def test_iceberg_only_subpath(iceberg_config: dict[str, str]): 44 | """Test when only base_location_subpath is provided""" 45 | del iceberg_config["base_location_root"] 46 | 47 | expected_base_location = ( 48 | f"'_dbt/" 49 | f"{iceberg_config['schema']}/" 50 | f"{iceberg_config['identifier']}/" 51 | f"{iceberg_config['base_location_subpath']}'" 52 | ).strip() 53 | 54 | assert get_actual_base_location(iceberg_config) == expected_base_location 55 | 56 | 57 | def test_iceberg_only_path(iceberg_config: dict[str, str]): 58 | """Test when only base_location_root is provided""" 59 | del iceberg_config["base_location_subpath"] 60 | 61 | expected_base_location = ( 62 | f"'{iceberg_config['base_location_root']}/" 63 | f"{iceberg_config['schema']}/" 64 | f"{iceberg_config['identifier']}'" 65 | ).strip() 66 | 67 | assert get_actual_base_location(iceberg_config) == expected_base_location 68 | 69 | 70 | def test_iceberg_no_path(iceberg_config: dict[str, str]): 71 | """Test when no base_location_root or is base_location_subpath provided""" 72 | del iceberg_config["base_location_root"] 73 | del iceberg_config["base_location_subpath"] 74 | 75 | expected_base_location = ( 76 | f"'_dbt/" f"{iceberg_config['schema']}/" f"{iceberg_config['identifier']}'" 77 | ).strip() 78 | 79 | assert get_actual_base_location(iceberg_config) == expected_base_location 80 | -------------------------------------------------------------------------------- /tests/unit/test_private_keys.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tempfile 3 | from typing import Generator 4 | 5 | from cryptography.hazmat.primitives import serialization 6 | from cryptography.hazmat.primitives.asymmetric import rsa 7 | import pytest 8 | 9 | from dbt.adapters.snowflake.auth import private_key_from_file, private_key_from_string 10 | 11 | 12 | PASSPHRASE = "password1234" 13 | 14 | 15 | def serialize(private_key: rsa.RSAPrivateKey) -> bytes: 16 | return private_key.private_bytes( 17 | serialization.Encoding.DER, 18 | serialization.PrivateFormat.PKCS8, 19 | serialization.NoEncryption(), 20 | ) 21 | 22 | 23 | @pytest.fixture(scope="session") 24 | def private_key() -> rsa.RSAPrivateKey: 25 | return rsa.generate_private_key(public_exponent=65537, key_size=2048) 26 | 27 | 28 | @pytest.fixture(scope="session") 29 | def private_key_string(private_key) -> str: 30 | private_key_bytes = private_key.private_bytes( 31 | encoding=serialization.Encoding.PEM, 32 | format=serialization.PrivateFormat.PKCS8, 33 | encryption_algorithm=serialization.BestAvailableEncryption(PASSPHRASE.encode()), 34 | ) 35 | return private_key_bytes.decode("utf-8") 36 | 37 | 38 | @pytest.fixture(scope="session") 39 | def private_key_file(private_key) -> Generator[str, None, None]: 40 | private_key_bytes = private_key.private_bytes( 41 | encoding=serialization.Encoding.PEM, 42 | format=serialization.PrivateFormat.PKCS8, 43 | encryption_algorithm=serialization.BestAvailableEncryption(PASSPHRASE.encode()), 44 | ) 45 | file = tempfile.NamedTemporaryFile() 46 | file.write(private_key_bytes) 47 | file.seek(0) 48 | yield file.name 49 | file.close() 50 | 51 | 52 | def test_private_key_from_string_pem(private_key_string, private_key): 53 | assert isinstance(private_key_string, str) 54 | calculated_private_key = private_key_from_string(private_key_string, PASSPHRASE) 55 | assert serialize(calculated_private_key) == serialize(private_key) 56 | 57 | 58 | def test_private_key_from_file(private_key_file, private_key): 59 | assert os.path.exists(private_key_file) 60 | calculated_private_key = private_key_from_file(private_key_file, PASSPHRASE) 61 | assert serialize(calculated_private_key) == serialize(private_key) 62 | -------------------------------------------------------------------------------- /tests/unit/test_relation_as_case_sensitive.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.snowflake.relation import SnowflakeRelation 2 | from dbt.adapters.snowflake.relation_configs import SnowflakeQuotePolicy 3 | 4 | 5 | def test_relation_as_case_sensitive_quoting_true(): 6 | relation = SnowflakeRelation.create( 7 | database="My_Db", 8 | schema="My_ScHeMa", 9 | identifier="My_TaBlE", 10 | quote_policy=SnowflakeQuotePolicy(database=False, schema=True, identifier=False), 11 | ) 12 | 13 | case_sensitive_relation = relation.as_case_sensitive() 14 | case_sensitive_relation.render_limited() 15 | 16 | assert case_sensitive_relation.database == "MY_DB" 17 | assert case_sensitive_relation.schema == "My_ScHeMa" 18 | assert case_sensitive_relation.identifier == "MY_TABLE" 19 | assert case_sensitive_relation.render() == 'MY_DB."My_ScHeMa".MY_TABLE' 20 | -------------------------------------------------------------------------------- /tests/unit/test_renamed_relations.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.snowflake.relation import SnowflakeRelation 2 | from dbt.adapters.snowflake.relation_configs import SnowflakeRelationType 3 | 4 | 5 | def test_renameable_relation(): 6 | relation = SnowflakeRelation.create( 7 | database="my_db", 8 | schema="my_schema", 9 | identifier="my_table", 10 | type=SnowflakeRelationType.Table, 11 | ) 12 | assert relation.renameable_relations == frozenset( 13 | { 14 | SnowflakeRelationType.Table, 15 | SnowflakeRelationType.View, 16 | } 17 | ) 18 | --------------------------------------------------------------------------------