├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── dbt ├── __init__.py ├── adapters │ └── fabric │ │ ├── __init__.py │ │ ├── __version__.py │ │ ├── fabric_adapter.py │ │ ├── fabric_column.py │ │ ├── fabric_connection_manager.py │ │ └── fabric_credentials.py └── include │ └── fabric │ ├── __init__.py │ ├── dbt_project.yml │ └── macros │ ├── adapters │ ├── metadata.sql │ └── relation.sql │ └── materializations │ └── models │ └── table │ └── create_table_as.sql ├── dev_requirements.txt ├── pytest.ini ├── setup.py ├── test.env.sample └── tests ├── conftest.py └── functional └── adapter ├── test_aliases.py ├── test_basic.py ├── test_changing_relation_type.py ├── test_concurrency.py ├── test_data_types.py ├── test_debug.py ├── test_docs.py ├── test_ephemeral.py ├── test_grants.py ├── test_incremental.py ├── test_new_project.py ├── test_provision_users.py ├── test_query_comment.py ├── test_schema.py ├── test_seed.py ├── test_sources.py ├── test_timestamps.py └── test_utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | /*.egg-info 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *.cover 46 | *.log.legacy 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | # DotEnv configuration 62 | .env 63 | 64 | # Database 65 | *.db 66 | *.rdb 67 | 68 | # Pycharm 69 | .idea 70 | 71 | # Spyder 72 | .spyproject/ 73 | 74 | # Jupyter NB Checkpoints 75 | .ipynb_checkpoints/ 76 | 77 | # exclude data from source control by default 78 | /data/ 79 | 80 | # Mac OS-specific storage files 81 | .DS_Store 82 | 83 | # vim 84 | *.swp 85 | *.swo 86 | 87 | # Mypy cache 88 | .mypy_cache/ 89 | 90 | # Environments 91 | *.env 92 | .venv 93 | env/ 94 | venv/ 95 | ENV/ 96 | env.bak/ 97 | venv.bak/ 98 | logs/ 99 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3.8 3 | repos: 4 | - repo: 'https://github.com/pre-commit/pre-commit-hooks' 5 | rev: v4.4.0 6 | hooks: 7 | - id: check-yaml 8 | args: 9 | - '--unsafe' 10 | - id: check-json 11 | - id: end-of-file-fixer 12 | - id: trailing-whitespace 13 | exclude_types: 14 | - markdown 15 | - id: check-case-conflict 16 | - id: check-ast 17 | - id: check-builtin-literals 18 | - id: check-merge-conflict 19 | - id: no-commit-to-branch 20 | - id: fix-byte-order-marker 21 | - id: mixed-line-ending 22 | - id: check-docstring-first 23 | - repo: 'https://github.com/adrienverge/yamllint' 24 | rev: v1.31.0 25 | hooks: 26 | - id: yamllint 27 | args: 28 | - '-d {extends: default, rules: {line-length: disable, document-start: disable}}' 29 | - '-s' 30 | - repo: 'https://github.com/MarcoGorelli/absolufy-imports' 31 | rev: v0.3.1 32 | hooks: 33 | - id: absolufy-imports 34 | - repo: 'https://github.com/hadialqattan/pycln' 35 | rev: v2.1.3 36 | hooks: 37 | - id: pycln 38 | args: 39 | - '--all' 40 | - repo: 'https://github.com/pycqa/isort' 41 | rev: 5.12.0 42 | hooks: 43 | - id: isort 44 | args: 45 | - '--profile' 46 | - black 47 | - '--atomic' 48 | - '--line-length' 49 | - '99' 50 | - '--python-version' 51 | - '39' 52 | - repo: 'https://github.com/psf/black' 53 | rev: 23.3.0 54 | hooks: 55 | - id: black 56 | args: 57 | - '--line-length=99' 58 | - '--target-version=py39' 59 | - id: black 60 | alias: black-check 61 | stages: 62 | - manual 63 | args: 64 | - '--line-length=99' 65 | - '--target-version=py39' 66 | - '--check' 67 | - '--diff' 68 | - repo: 'https://github.com/pycqa/flake8' 69 | rev: 6.0.0 70 | hooks: 71 | - id: flake8 72 | args: 73 | - '--max-line-length=99' 74 | - id: flake8 75 | args: 76 | - '--max-line-length=99' 77 | alias: flake8-check 78 | stages: 79 | - manual 80 | - repo: 'https://github.com/pre-commit/mirrors-mypy' 81 | rev: v1.3.0 82 | hooks: 83 | - id: mypy 84 | args: 85 | - '--show-error-codes' 86 | - '--ignore-missing-imports' 87 | - '--explicit-package-bases' 88 | files: '^dbt/adapters' 89 | - id: mypy 90 | alias: mypy-check 91 | stages: 92 | - manual 93 | args: 94 | - '--show-error-codes' 95 | - '--pretty' 96 | - '--ignore-missing-imports' 97 | files: '^dbt/adapters' 98 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Sam Debruyn 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include dbt/include *.sql *.yml *.md 2 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .DEFAULT_GOAL:=help 2 | 3 | .PHONY: dev 4 | dev: ## Installs adapter in develop mode along with development dependencies 5 | @\ 6 | pip install -r dev_requirements.txt && pre-commit install 7 | 8 | .PHONY: mypy 9 | mypy: ## Runs mypy against staged changes for static type checking. 10 | @\ 11 | pre-commit run --hook-stage manual mypy-check | grep -v "INFO" 12 | 13 | .PHONY: flake8 14 | flake8: ## Runs flake8 against staged changes to enforce style guide. 15 | @\ 16 | pre-commit run --hook-stage manual flake8-check | grep -v "INFO" 17 | 18 | .PHONY: black 19 | black: ## Runs black against staged changes to enforce style guide. 20 | @\ 21 | pre-commit run --hook-stage manual black-check -v | grep -v "INFO" 22 | 23 | .PHONY: lint 24 | lint: ## Runs flake8 and mypy code checks against staged changes. 25 | @\ 26 | pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \ 27 | pre-commit run mypy-check --hook-stage manual | grep -v "INFO" 28 | 29 | .PHONY: all 30 | all: ## Runs all checks against staged changes. 31 | @\ 32 | pre-commit run -a 33 | 34 | .PHONY: linecheck 35 | linecheck: ## Checks for all Python lines 100 characters or more 36 | @\ 37 | find dbt -type f -name "*.py" -exec grep -I -r -n '.\{100\}' {} \; 38 | 39 | .PHONY: functional 40 | functional: ## Runs functional tests. 41 | @\ 42 | pytest -n auto -ra -v tests/functional 43 | 44 | .PHONY: clean 45 | @echo "cleaning repo" 46 | @git clean -f -X 47 | 48 | .PHONY: help 49 | help: ## Show this help message. 50 | @echo 'usage: make [target]' 51 | @echo 52 | @echo 'targets:' 53 | @grep -E '^[7+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dbt-fabric 2 | 3 | The code from this adapter has been integrated into [Microsoft's official dbt adapter for Fabric](https://github.com/microsoft/dbt-fabric). 4 | -------------------------------------------------------------------------------- /dbt/__init__.py: -------------------------------------------------------------------------------- 1 | __path__ = __import__("pkgutil").extend_path(__path__, __name__) 2 | -------------------------------------------------------------------------------- /dbt/adapters/fabric/__init__.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.base import AdapterPlugin 2 | 3 | from dbt.adapters.fabric.fabric_adapter import FabricAdapter 4 | from dbt.adapters.fabric.fabric_connection_manager import FabricConnectionManager 5 | from dbt.adapters.fabric.fabric_credentials import FabricCredentials 6 | from dbt.include import fabric 7 | 8 | Plugin = AdapterPlugin( 9 | adapter=FabricAdapter, 10 | credentials=FabricCredentials, 11 | include_path=fabric.PACKAGE_PATH, 12 | dependencies=["sqlserver"], 13 | ) 14 | 15 | __all__ = [ 16 | "Plugin", 17 | "FabricConnectionManager", 18 | "FabricAdapter", 19 | "FabricCredentials", 20 | ] 21 | -------------------------------------------------------------------------------- /dbt/adapters/fabric/__version__.py: -------------------------------------------------------------------------------- 1 | version = "1.4.0" 2 | -------------------------------------------------------------------------------- /dbt/adapters/fabric/fabric_adapter.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.sqlserver import SQLServerAdapter 2 | 3 | from dbt.adapters.fabric.fabric_column import FabricColumn 4 | from dbt.adapters.fabric.fabric_connection_manager import FabricConnectionManager 5 | 6 | 7 | class FabricAdapter(SQLServerAdapter): 8 | ConnectionManager = FabricConnectionManager 9 | Column = FabricColumn 10 | 11 | @classmethod 12 | def convert_datetime_type(cls, agate_table, col_idx): 13 | return "datetime2(6)" 14 | 15 | @classmethod 16 | def convert_time_type(cls, agate_table, col_idx): 17 | return "time(6)" 18 | -------------------------------------------------------------------------------- /dbt/adapters/fabric/fabric_column.py: -------------------------------------------------------------------------------- 1 | from typing import ClassVar, Dict 2 | 3 | from dbt.adapters.sqlserver import SQLServerColumn 4 | 5 | 6 | class FabricColumn(SQLServerColumn): 7 | TYPE_LABELS: ClassVar[Dict[str, str]] = { 8 | "STRING": "VARCHAR(MAX)", 9 | "TIMESTAMP": "DATETIME2(6)", 10 | "FLOAT": "FLOAT", 11 | "INTEGER": "INT", 12 | "BOOLEAN": "BIT", 13 | } 14 | -------------------------------------------------------------------------------- /dbt/adapters/fabric/fabric_connection_manager.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from typing import Optional, Any, Tuple 3 | 4 | from dbt.adapters.sqlserver import SQLServerConnectionManager 5 | from pyodbc import Connection 6 | 7 | 8 | class FabricConnectionManager(SQLServerConnectionManager): 9 | TYPE = "fabric" 10 | 11 | def add_query(self, sql: str, auto_begin: bool = True, bindings: Optional[Any] = None, 12 | abridge_sql_log: bool = False) -> Tuple[Connection, Any]: 13 | if bindings: 14 | bindings = [binding if not isinstance(binding, datetime.datetime) else binding.isoformat() for binding in bindings] 15 | return super().add_query(sql, auto_begin, bindings, abridge_sql_log) 16 | -------------------------------------------------------------------------------- /dbt/adapters/fabric/fabric_credentials.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from dbt.adapters.sqlserver import SQLServerCredentials 4 | 5 | 6 | @dataclass 7 | class FabricCredentials(SQLServerCredentials): 8 | @property 9 | def type(self): 10 | return "fabric" 11 | -------------------------------------------------------------------------------- /dbt/include/fabric/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | PACKAGE_PATH = os.path.dirname(__file__) 4 | -------------------------------------------------------------------------------- /dbt/include/fabric/dbt_project.yml: -------------------------------------------------------------------------------- 1 | name: dbt_fabric 2 | version: 1.0 3 | 4 | config-version: 2 5 | 6 | macro-paths: ["macros"] 7 | -------------------------------------------------------------------------------- /dbt/include/fabric/macros/adapters/metadata.sql: -------------------------------------------------------------------------------- 1 | {% macro fabric__information_schema_hints() %}{% endmacro %} 2 | -------------------------------------------------------------------------------- /dbt/include/fabric/macros/adapters/relation.sql: -------------------------------------------------------------------------------- 1 | {% macro fabric__rename_relation(from_relation, to_relation) -%} 2 | {% if from_relation.type == 'view' %} 3 | {% call statement('get_view_definition', fetch_result=True) %} 4 | select VIEW_DEFINITION 5 | from INFORMATION_SCHEMA.VIEWS 6 | where TABLE_CATALOG = '{{ from_relation.database }}' 7 | and TABLE_SCHEMA = '{{ from_relation.schema }}' 8 | and TABLE_NAME = '{{ from_relation.identifier }}' 9 | {% endcall %} 10 | {% set view_def_full = load_result('get_view_definition')['data'][0][0] %} 11 | {{ log("Found view definition " ~ view_def_full) }} 12 | {% set view_def_sql_matches = modules.re.match('^create\\s+view\\s+[0-9a-z.\\"\\[\\]_]+\\s+as\\s+\\(?(.*)\\)?\\s+;?\\s+$', view_def_full, modules.re.I) %} 13 | {% if not view_def_sql_matches %} 14 | {{ exceptions.raise_compiler_error("Could not extract view definition to rename") }} 15 | {% endif %} 16 | {% set view_def_sql = view_def_sql_matches.group(1) %} 17 | {{ log("Found view SQL " ~ view_def_sql) }} 18 | {% call statement('create_new_view') %} 19 | {{ create_view_as(to_relation, view_def_sql) }} 20 | {% endcall %} 21 | {% call statement('drop_old_view') %} 22 | drop view {{ from_relation.include(database=False) }}; 23 | {% endcall %} 24 | {% endif %} 25 | {% if from_relation.type == 'table' %} 26 | {% call statement('rename_relation') %} 27 | create table {{ to_relation.include(database=False) }} as select * from {{ from_relation.include(database=False) }} 28 | {%- endcall %} 29 | {{ sqlserver__drop_relation(from_relation) }} 30 | {% endif %} 31 | {% endmacro %} -------------------------------------------------------------------------------- /dbt/include/fabric/macros/materializations/models/table/create_table_as.sql: -------------------------------------------------------------------------------- 1 | {% macro fabric__create_table_as(temporary, relation, sql) -%} 2 | {%- set sql_header = config.get('sql_header', none) -%} 3 | {%- set temp_view_sql = sql.replace("'", "''") -%} 4 | {%- set tmp_relation = relation.incorporate( 5 | path={"identifier": relation.identifier.replace("#", "") ~ '_temp_view'}, 6 | type='view') -%} 7 | 8 | {{ sql_header if sql_header is not none }} 9 | 10 | -- drop previous temp view 11 | {{- sqlserver__drop_relation_script(tmp_relation) }} 12 | 13 | -- create temp view 14 | USE [{{ relation.database }}]; 15 | EXEC('create view {{ tmp_relation.include(database=False) }} as 16 | {{ temp_view_sql }} 17 | '); 18 | 19 | -- now create the actual table 20 | create table 21 | {{ relation.include(database=(not temporary), schema=(not temporary)) }} 22 | as ( select * from {{ tmp_relation }} ); 23 | 24 | -- drop temp view 25 | {{ sqlserver__drop_relation_script(tmp_relation) }} 26 | {% endmacro %} 27 | -------------------------------------------------------------------------------- /dev_requirements.txt: -------------------------------------------------------------------------------- 1 | pytest==7.1.3 2 | twine==4.0.2 3 | wheel==0.40.0 4 | pre-commit==2.21.0;python_version<"3.8" 5 | pre-commit==3.3.2;python_version>="3.8" 6 | pytest-dotenv==0.5.2 7 | dbt-tests-adapter~=1.4.6 8 | flaky==3.7.0 9 | pytest-xdist==3.3.1 10 | -e . 11 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | filterwarnings = 3 | ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning 4 | ignore:unclosed file .*:ResourceWarning 5 | env_files = 6 | test.env 7 | testpaths = 8 | tests/functional 9 | markers = 10 | skip_profile 11 | only_with_profile 12 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import re 4 | import sys 5 | 6 | from setuptools import find_namespace_packages, setup 7 | from setuptools.command.install import install 8 | 9 | package_name = "dbt-fabric" 10 | authors_list = ["Sam Debruyn"] 11 | dbt_version = "1.4" 12 | dbt_sqlserver_requirement = "dbt-sqlserver>=1.4.3" 13 | description = "A dbt adapter for Microsoft Fabric DWH" 14 | 15 | this_directory = os.path.abspath(os.path.dirname(__file__)) 16 | with open(os.path.join(this_directory, "README.md")) as f: 17 | long_description = f.read() 18 | 19 | 20 | # get this from a separate file 21 | def _dbt_fabric_version(): 22 | _version_path = os.path.join(this_directory, "dbt", "adapters", "fabric", "__version__.py") 23 | _version_pattern = r"""version\s*=\s*["'](.+)["']""" 24 | with open(_version_path) as f: 25 | match = re.search(_version_pattern, f.read().strip()) 26 | if match is None: 27 | raise ValueError(f"invalid version at {_version_path}") 28 | return match.group(1) 29 | 30 | 31 | package_version = _dbt_fabric_version() 32 | 33 | # the package version should be the dbt version, with maybe some things on the 34 | # ends of it. (0.18.1 vs 0.18.1a1, 0.18.1.1, ...) 35 | if not package_version.startswith(dbt_version): 36 | raise ValueError( 37 | f"Invalid setup.py: package_version={package_version} must start with " 38 | f"dbt_version={dbt_version}" 39 | ) 40 | 41 | 42 | class VerifyVersionCommand(install): 43 | """Custom command to verify that the git tag matches our version""" 44 | 45 | description = "Verify that the git tag matches our version" 46 | 47 | def run(self): 48 | tag = os.getenv("GITHUB_REF_NAME") 49 | tag_without_prefix = tag[1:] 50 | 51 | if tag_without_prefix != package_version: 52 | info = "Git tag: {0} does not match the version of this app: {1}".format( 53 | tag_without_prefix, package_version 54 | ) 55 | sys.exit(info) 56 | 57 | 58 | setup( 59 | name=package_name, 60 | version=package_version, 61 | description=description, 62 | long_description=long_description, 63 | long_description_content_type="text/markdown", 64 | license="MIT", 65 | author=", ".join(authors_list), 66 | packages=find_namespace_packages(include=["dbt", "dbt.*"]), 67 | include_package_data=True, 68 | install_requires=[dbt_sqlserver_requirement], 69 | cmdclass={ 70 | "verify": VerifyVersionCommand, 71 | }, 72 | classifiers=[ 73 | "Development Status :: 5 - Production/Stable", 74 | "License :: OSI Approved :: MIT License", 75 | "Operating System :: Microsoft :: Windows", 76 | "Operating System :: MacOS :: MacOS X", 77 | "Operating System :: POSIX :: Linux", 78 | "Programming Language :: Python :: 3.8", 79 | "Programming Language :: Python :: 3.9", 80 | "Programming Language :: Python :: 3.10", 81 | "Programming Language :: Python :: 3.11", 82 | ], 83 | ) 84 | -------------------------------------------------------------------------------- /test.env.sample: -------------------------------------------------------------------------------- 1 | FABRIC_TEST_HOST=example.datawarehouse.pbidedicated.windows.net 2 | FABRIC_TEST_DB=TestDWH 3 | FABRIC_TEST_AUTH=cli 4 | DBT_TEST_USER_1=DBT_TEST_USER_1 5 | DBT_TEST_USER_2=DBT_TEST_USER_2 6 | DBT_TEST_USER_3=DBT_TEST_USER_3 7 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from _pytest.fixtures import FixtureRequest 5 | 6 | pytest_plugins = ["dbt.tests.fixtures.project"] 7 | 8 | 9 | def pytest_addoption(parser): 10 | parser.addoption( 11 | "--profile", action="store", default=os.getenv("PROFILE_NAME", "user"), type=str 12 | ) 13 | 14 | 15 | @pytest.fixture(scope="class") 16 | def dbt_profile_target(request: FixtureRequest, dbt_profile_target_update): 17 | profile = request.config.getoption("--profile") 18 | 19 | if profile == "ci": 20 | target = _profile_ci() 21 | elif profile == "user": 22 | target = _profile_user() 23 | else: 24 | raise ValueError(f"Unknown profile: {profile}") 25 | 26 | target.update(dbt_profile_target_update) 27 | return target 28 | 29 | 30 | @pytest.fixture(scope="class") 31 | def dbt_profile_target_update(): 32 | return {} 33 | 34 | 35 | def _all_profiles_base(): 36 | return { 37 | "type": "fabric", 38 | "driver": os.getenv("FABRIC_TEST_DRIVER", "ODBC Driver 18 for SQL Server"), 39 | "port": int(os.getenv("FABRIC_TEST_PORT", "1433")), 40 | "host": os.getenv("FABRIC_TEST_HOST"), 41 | "database": os.getenv("FABRIC_TEST_DB"), 42 | "retries": 0, 43 | "threads": 1, 44 | "encrypt": True, 45 | "trust_cert": True, 46 | "authentication": os.getenv("FABRIC_TEST_AUTH", "auto"), 47 | "client_id": os.getenv("FABRIC_TEST_CLIENT_ID"), 48 | "client_secret": os.getenv("FABRIC_TEST_CLIENT_SECRET"), 49 | "tenant_id": os.getenv("FABRIC_TEST_TENANT_ID"), 50 | } 51 | 52 | 53 | def _profile_ci(): 54 | return { 55 | **_all_profiles_base(), 56 | **{}, 57 | } 58 | 59 | 60 | def _profile_user(): 61 | profile = { 62 | **_all_profiles_base(), 63 | **{}, 64 | } 65 | return profile 66 | 67 | 68 | @pytest.fixture(autouse=True) 69 | def skip_by_profile_type(request: FixtureRequest): 70 | profile_type = request.config.getoption("--profile") 71 | 72 | if request.node.get_closest_marker("skip_profile"): 73 | if profile_type in request.node.get_closest_marker("skip_profile").args: 74 | pytest.skip(f"Skipped on '{profile_type}' profile") 75 | 76 | if request.node.get_closest_marker("only_with_profile"): 77 | if profile_type not in request.node.get_closest_marker("only_with_profile").args: 78 | pytest.skip(f"Skipped on '{profile_type}' profile") 79 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_aliases.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.aliases.fixtures import MACROS__EXPECT_VALUE_SQL 3 | from dbt.tests.adapter.aliases.test_aliases import ( 4 | BaseAliasErrors, 5 | BaseAliases, 6 | BaseSameAliasDifferentDatabases, 7 | BaseSameAliasDifferentSchemas, 8 | ) 9 | 10 | 11 | class TestAliasesFabric(BaseAliases): 12 | @pytest.fixture(scope="class") 13 | def macros(self): 14 | return {"expect_value.sql": MACROS__EXPECT_VALUE_SQL} 15 | 16 | 17 | class TestAliasErrorsFabric(BaseAliasErrors): 18 | @pytest.fixture(scope="class") 19 | def macros(self): 20 | return {"expect_value.sql": MACROS__EXPECT_VALUE_SQL} 21 | 22 | 23 | class TestSameAliasDifferentSchemasFabric(BaseSameAliasDifferentSchemas): 24 | @pytest.fixture(scope="class") 25 | def macros(self): 26 | return {"expect_value.sql": MACROS__EXPECT_VALUE_SQL} 27 | 28 | 29 | class TestSameAliasDifferentDatabasesFabric(BaseSameAliasDifferentDatabases): 30 | @pytest.fixture(scope="class") 31 | def macros(self): 32 | return {"expect_value.sql": MACROS__EXPECT_VALUE_SQL} 33 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_basic.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.basic.files import incremental_not_schema_change_sql 3 | from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod 4 | from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations 5 | from dbt.tests.adapter.basic.test_empty import BaseEmpty 6 | from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral 7 | from dbt.tests.adapter.basic.test_generic_tests import BaseGenericTests 8 | from dbt.tests.adapter.basic.test_incremental import ( 9 | BaseIncremental, 10 | BaseIncrementalNotSchemaChange, 11 | ) 12 | from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests 13 | from dbt.tests.adapter.basic.test_singular_tests_ephemeral import BaseSingularTestsEphemeral 14 | from dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols 15 | from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp 16 | from dbt.tests.adapter.basic.test_validate_connection import BaseValidateConnection 17 | 18 | 19 | class TestSimpleMaterializationsFabric(BaseSimpleMaterializations): 20 | pass 21 | 22 | 23 | class TestSingularTestsFabric(BaseSingularTests): 24 | pass 25 | 26 | 27 | @pytest.mark.skip(reason="ephemeral not supported") 28 | class TestSingularTestsEphemeralFabric(BaseSingularTestsEphemeral): 29 | pass 30 | 31 | 32 | class TestEmptyFabric(BaseEmpty): 33 | pass 34 | 35 | 36 | class TestEphemeralFabric(BaseEphemeral): 37 | pass 38 | 39 | 40 | class TestIncrementalFabric(BaseIncremental): 41 | pass 42 | 43 | 44 | class TestIncrementalNotSchemaChangeFabric(BaseIncrementalNotSchemaChange): 45 | @pytest.fixture(scope="class") 46 | def models(self): 47 | return { 48 | "incremental_not_schema_change.sql": incremental_not_schema_change_sql.replace( 49 | "||", "+" 50 | ) 51 | } 52 | 53 | 54 | class TestGenericTestsFabric(BaseGenericTests): 55 | pass 56 | 57 | 58 | class TestSnapshotCheckColsFabric(BaseSnapshotCheckCols): 59 | pass 60 | 61 | 62 | class TestSnapshotTimestampFabric(BaseSnapshotTimestamp): 63 | pass 64 | 65 | 66 | class TestBaseCachingFabric(BaseAdapterMethod): 67 | pass 68 | 69 | 70 | class TestValidateConnectionFabric(BaseValidateConnection): 71 | pass 72 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_changing_relation_type.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.relations.test_changing_relation_type import BaseChangeRelationTypeValidator 2 | 3 | 4 | class TestChangeRelationTypesFabric(BaseChangeRelationTypeValidator): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_concurrency.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.concurrency.test_concurrency import BaseConcurrency, seeds__update_csv 2 | from dbt.tests.util import ( 3 | check_relations_equal, 4 | check_table_does_not_exist, 5 | rm_file, 6 | run_dbt, 7 | run_dbt_and_capture, 8 | write_file, 9 | ) 10 | 11 | 12 | class TestConcurenncyFabric(BaseConcurrency): 13 | def test_concurrency(self, project): 14 | run_dbt(["seed", "--select", "seed"]) 15 | results = run_dbt(["run"], expect_pass=False) 16 | assert len(results) == 7 17 | check_relations_equal(project.adapter, ["seed", "view_model"]) 18 | check_relations_equal(project.adapter, ["seed", "dep"]) 19 | check_relations_equal(project.adapter, ["seed", "table_a"]) 20 | check_relations_equal(project.adapter, ["seed", "table_b"]) 21 | check_table_does_not_exist(project.adapter, "invalid") 22 | check_table_does_not_exist(project.adapter, "skip") 23 | 24 | rm_file(project.project_root, "seeds", "seed.csv") 25 | write_file(seeds__update_csv, project.project_root, "seeds", "seed.csv") 26 | 27 | results, output = run_dbt_and_capture(["run"], expect_pass=False) 28 | assert len(results) == 7 29 | check_relations_equal(project.adapter, ["seed", "view_model"]) 30 | check_relations_equal(project.adapter, ["seed", "dep"]) 31 | check_relations_equal(project.adapter, ["seed", "table_a"]) 32 | check_relations_equal(project.adapter, ["seed", "table_b"]) 33 | check_table_does_not_exist(project.adapter, "invalid") 34 | check_table_does_not_exist(project.adapter, "skip") 35 | 36 | assert "PASS=5 WARN=0 ERROR=1 SKIP=1 TOTAL=7" in output 37 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_data_types.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.utils.data_types.test_type_bigint import BaseTypeBigInt 3 | from dbt.tests.adapter.utils.data_types.test_type_boolean import BaseTypeBoolean 4 | from dbt.tests.adapter.utils.data_types.test_type_float import BaseTypeFloat 5 | from dbt.tests.adapter.utils.data_types.test_type_int import BaseTypeInt 6 | from dbt.tests.adapter.utils.data_types.test_type_numeric import BaseTypeNumeric 7 | from dbt.tests.adapter.utils.data_types.test_type_string import BaseTypeString 8 | from dbt.tests.adapter.utils.data_types.test_type_timestamp import ( 9 | BaseTypeTimestamp, 10 | seeds__expected_csv, 11 | ) 12 | 13 | 14 | @pytest.mark.skip(reason="SQL Server shows 'numeric' if you don't explicitly cast it to bigint") 15 | class TestTypeBigIntFabric(BaseTypeBigInt): 16 | pass 17 | 18 | 19 | class TestTypeFloatFabric(BaseTypeFloat): 20 | pass 21 | 22 | 23 | class TestTypeIntFabric(BaseTypeInt): 24 | pass 25 | 26 | 27 | class TestTypeNumericFabric(BaseTypeNumeric): 28 | pass 29 | 30 | 31 | class TestTypeStringFabric(BaseTypeString): 32 | def assert_columns_equal(self, project, expected_cols, actual_cols): 33 | # ignore the size of the varchar since we do 34 | # an optimization to not use varchar(max) all the time 35 | assert ( 36 | expected_cols[:-1] == actual_cols[:-1] 37 | ), f"Type difference detected: {expected_cols} vs. {actual_cols}" 38 | 39 | 40 | class TestTypeTimestampFabric(BaseTypeTimestamp): 41 | @pytest.fixture(scope="class") 42 | def seeds(self): 43 | seeds__expected_yml = """ 44 | version: 2 45 | seeds: 46 | - name: expected 47 | config: 48 | column_types: 49 | timestamp_col: "datetimeoffset" 50 | """ 51 | 52 | return { 53 | "expected.csv": seeds__expected_csv, 54 | "expected.yml": seeds__expected_yml, 55 | } 56 | 57 | 58 | class TestTypeBooleanFabric(BaseTypeBoolean): 59 | pass 60 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_debug.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | import yaml 5 | from dbt.tests.adapter.dbt_debug.test_dbt_debug import BaseDebug, BaseDebugProfileVariable 6 | from dbt.tests.util import run_dbt 7 | 8 | 9 | class TestDebugFabric(BaseDebug): 10 | def test_ok(self, project): 11 | run_dbt(["debug"]) 12 | assert "ERROR" not in self.capsys.readouterr().out 13 | 14 | def test_nopass(self, project): 15 | run_dbt(["debug", "--target", "nopass"], expect_pass=False) 16 | self.assertGotValue(re.compile(r"\s+profiles\.yml file"), "ERROR invalid") 17 | 18 | def test_wronguser(self, project): 19 | run_dbt(["debug", "--target", "wronguser"], expect_pass=False) 20 | self.assertGotValue(re.compile(r"\s+Connection test"), "ERROR") 21 | 22 | def test_empty_target(self, project): 23 | run_dbt(["debug", "--target", "none_target"], expect_pass=False) 24 | self.assertGotValue(re.compile(r"\s+output 'none_target'"), "misconfigured") 25 | 26 | 27 | class TestDebugProfileVariableFabric(BaseDebugProfileVariable): 28 | pass 29 | 30 | 31 | class TestDebugInvalidProjectFabric(BaseDebug): 32 | def test_empty_project(self, project): 33 | with open("dbt_project.yml", "w") as f: # noqa: F841 34 | pass 35 | 36 | run_dbt(["debug", "--profile", "test"], expect_pass=False) 37 | splitout = self.capsys.readouterr().out.split("\n") 38 | self.check_project(splitout) 39 | 40 | def test_badproject(self, project): 41 | update_project = {"invalid-key": "not a valid key so this is bad project"} 42 | 43 | with open("dbt_project.yml", "w") as f: 44 | yaml.safe_dump(update_project, f) 45 | 46 | run_dbt(["debug", "--profile", "test"], expect_pass=False) 47 | splitout = self.capsys.readouterr().out.split("\n") 48 | self.check_project(splitout) 49 | 50 | def test_not_found_project(self, project): 51 | run_dbt(["debug", "--project-dir", "nopass"], expect_pass=False) 52 | splitout = self.capsys.readouterr().out.split("\n") 53 | self.check_project(splitout, msg="ERROR not found") 54 | 55 | def test_invalid_project_outside_current_dir(self, project): 56 | # create a dbt_project.yml 57 | project_config = {"invalid-key": "not a valid key in this project"} 58 | os.makedirs("custom", exist_ok=True) 59 | with open("custom/dbt_project.yml", "w") as f: 60 | yaml.safe_dump(project_config, f, default_flow_style=True) 61 | run_dbt(["debug", "--project-dir", "custom"], expect_pass=False) 62 | splitout = self.capsys.readouterr().out.split("\n") 63 | self.check_project(splitout) 64 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_docs.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from dbt.tests.adapter.basic.expected_catalog import ( 5 | base_expected_catalog, 6 | expected_references_catalog, 7 | no_stats, 8 | ) 9 | from dbt.tests.adapter.basic.test_docs_generate import ( 10 | BaseDocsGenerate, 11 | BaseDocsGenReferences, 12 | ref_models__docs_md, 13 | ref_models__ephemeral_copy_sql, 14 | ref_models__schema_yml, 15 | ref_sources__schema_yml, 16 | ) 17 | 18 | 19 | class TestDocsGenerateFabric(BaseDocsGenerate): 20 | @staticmethod 21 | @pytest.fixture(scope="class") 22 | def dbt_profile_target_update(): 23 | return {"schema_authorization": "{{ env_var('DBT_TEST_USER_1') }}"} 24 | 25 | @pytest.fixture(scope="class") 26 | def expected_catalog(self, project): 27 | return base_expected_catalog( 28 | project, 29 | role=os.getenv("DBT_TEST_USER_1"), 30 | id_type="int", 31 | text_type="varchar", 32 | time_type="datetime", 33 | view_type="VIEW", 34 | table_type="BASE TABLE", 35 | model_stats=no_stats(), 36 | ) 37 | 38 | 39 | class TestDocsGenReferencesFabric(BaseDocsGenReferences): 40 | @staticmethod 41 | @pytest.fixture(scope="class") 42 | def dbt_profile_target_update(): 43 | return {"schema_authorization": "{{ env_var('DBT_TEST_USER_1') }}"} 44 | 45 | @pytest.fixture(scope="class") 46 | def expected_catalog(self, project): 47 | return expected_references_catalog( 48 | project, 49 | role=os.getenv("DBT_TEST_USER_1"), 50 | id_type="int", 51 | text_type="varchar", 52 | time_type="datetime", 53 | bigint_type="int", 54 | view_type="VIEW", 55 | table_type="BASE TABLE", 56 | model_stats=no_stats(), 57 | ) 58 | 59 | @pytest.fixture(scope="class") 60 | def models(self): 61 | ref_models__ephemeral_summary_sql_no_order_by = """ 62 | {{ 63 | config( 64 | materialized = "table" 65 | ) 66 | }} 67 | 68 | select first_name, count(*) as ct from {{ref('ephemeral_copy')}} 69 | group by first_name 70 | """ 71 | 72 | ref_models__view_summary_sql_no_order_by = """ 73 | {{ 74 | config( 75 | materialized = "view" 76 | ) 77 | }} 78 | 79 | select first_name, ct from {{ref('ephemeral_summary')}} 80 | """ 81 | 82 | return { 83 | "schema.yml": ref_models__schema_yml, 84 | "sources.yml": ref_sources__schema_yml, 85 | # order by not allowed in VIEWS 86 | "view_summary.sql": ref_models__view_summary_sql_no_order_by, 87 | # order by not allowed in CTEs 88 | "ephemeral_summary.sql": ref_models__ephemeral_summary_sql_no_order_by, 89 | "ephemeral_copy.sql": ref_models__ephemeral_copy_sql, 90 | "docs.md": ref_models__docs_md, 91 | } 92 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_ephemeral.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.ephemeral.test_ephemeral import ( 3 | BaseEphemeral, 4 | ephemeral_errors__base__base_copy_sql, 5 | ephemeral_errors__base__base_sql, 6 | ephemeral_errors__dependent_sql, 7 | ) 8 | from dbt.tests.util import run_dbt 9 | 10 | 11 | class TestEphemeralErrorHandling(BaseEphemeral): 12 | @pytest.fixture(scope="class") 13 | def models(self): 14 | return { 15 | "dependent.sql": ephemeral_errors__dependent_sql, 16 | "base": { 17 | "base.sql": ephemeral_errors__base__base_sql, 18 | "base_copy.sql": ephemeral_errors__base__base_copy_sql, 19 | }, 20 | } 21 | 22 | def test_ephemeral_error_handling(self, project): 23 | results = run_dbt(["run"], expect_pass=False) 24 | assert len(results) == 1 25 | assert results[0].status == "skipped" 26 | assert "Compilation Error" in results[0].message 27 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_grants.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.grants.test_incremental_grants import BaseIncrementalGrants 2 | from dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants 3 | from dbt.tests.adapter.grants.test_model_grants import BaseModelGrants 4 | from dbt.tests.adapter.grants.test_seed_grants import BaseSeedGrants 5 | from dbt.tests.adapter.grants.test_snapshot_grants import BaseSnapshotGrants 6 | 7 | 8 | class TestIncrementalGrantsFabric(BaseIncrementalGrants): 9 | pass 10 | 11 | 12 | class TestInvalidGrantsFabric(BaseInvalidGrants): 13 | def grantee_does_not_exist_error(self): 14 | return "Cannot find the user" 15 | 16 | def privilege_does_not_exist_error(self): 17 | return "Incorrect syntax near" 18 | 19 | 20 | class TestModelGrantsFabric(BaseModelGrants): 21 | pass 22 | 23 | 24 | class TestSeedGrantsFabric(BaseSeedGrants): 25 | pass 26 | 27 | 28 | class TestSnapshotGrantsFabric(BaseSnapshotGrants): 29 | pass 30 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_incremental.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.incremental.fixtures import ( 3 | _MODELS__A, 4 | _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, 5 | _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, 6 | _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, 7 | _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, 8 | _MODELS__INCREMENTAL_FAIL, 9 | _MODELS__INCREMENTAL_IGNORE_TARGET, 10 | _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, 11 | _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, 12 | ) 13 | from dbt.tests.adapter.incremental.test_incremental_on_schema_change import ( 14 | BaseIncrementalOnSchemaChange, 15 | ) 16 | from dbt.tests.adapter.incremental.test_incremental_predicates import BaseIncrementalPredicates 17 | from dbt.tests.adapter.incremental.test_incremental_unique_id import BaseIncrementalUniqueKey 18 | 19 | _MODELS__INCREMENTAL_IGNORE = """ 20 | {{ 21 | config( 22 | materialized='incremental', 23 | unique_key='id', 24 | on_schema_change='ignore' 25 | ) 26 | }} 27 | 28 | WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) 29 | 30 | {% if is_incremental() %} 31 | 32 | SELECT 33 | id, 34 | field1, 35 | field2, 36 | field3, 37 | field4 38 | FROM source_data 39 | WHERE id NOT IN (SELECT id from {{ this }} ) 40 | 41 | {% else %} 42 | 43 | SELECT TOP 3 id, field1, field2 FROM source_data 44 | 45 | {% endif %} 46 | """ 47 | 48 | _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET = """ 49 | {{ 50 | config(materialized='table') 51 | }} 52 | 53 | with source_data as ( 54 | 55 | select * from {{ ref('model_a') }} 56 | 57 | ) 58 | 59 | {% set string_type = dbt.type_string() %} 60 | 61 | select id 62 | ,cast(field1 as {{string_type}}) as field1 63 | 64 | from source_data 65 | """ 66 | 67 | _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET = """ 68 | {{ 69 | config(materialized='table') 70 | }} 71 | 72 | with source_data as ( 73 | 74 | select * from {{ ref('model_a') }} 75 | 76 | ) 77 | 78 | {% set string_type = dbt.type_string() %} 79 | 80 | select id 81 | ,cast(field1 as {{string_type}}) as field1 82 | --,field2 83 | ,cast(case when id <= 3 then null else field3 end as {{string_type}}) as field3 84 | ,cast(case when id <= 3 then null else field4 end as {{string_type}}) as field4 85 | 86 | from source_data 87 | """ 88 | 89 | 90 | class TestBaseIncrementalUniqueKeyFabric(BaseIncrementalUniqueKey): 91 | pass 92 | 93 | 94 | class TestIncrementalOnSchemaChangeFabric(BaseIncrementalOnSchemaChange): 95 | @pytest.fixture(scope="class") 96 | def models(self): 97 | return { 98 | "incremental_sync_remove_only.sql": _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, 99 | "incremental_ignore.sql": _MODELS__INCREMENTAL_IGNORE, 100 | "incremental_sync_remove_only_target.sql": _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, # noqa: E501 101 | "incremental_ignore_target.sql": _MODELS__INCREMENTAL_IGNORE_TARGET, 102 | "incremental_fail.sql": _MODELS__INCREMENTAL_FAIL, 103 | "incremental_sync_all_columns.sql": _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, 104 | "incremental_append_new_columns_remove_one.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, # noqa: E501 105 | "model_a.sql": _MODELS__A, 106 | "incremental_append_new_columns_target.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, # noqa: E501 107 | "incremental_append_new_columns.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, 108 | "incremental_sync_all_columns_target.sql": _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, # noqa: E501 109 | "incremental_append_new_columns_remove_one_target.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, # noqa: E501 110 | } 111 | 112 | 113 | class TestIncrementalPredicatesDeleteInsertFabric(BaseIncrementalPredicates): 114 | pass 115 | 116 | 117 | class TestPredicatesDeleteInsertFabric(BaseIncrementalPredicates): 118 | @pytest.fixture(scope="class") 119 | def project_config_update(self): 120 | return {"models": {"+predicates": ["id != 2"], "+incremental_strategy": "delete+insert"}} 121 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_new_project.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt 3 | 4 | schema_yml = """ 5 | 6 | version: 2 7 | 8 | models: 9 | - name: my_first_dbt_model 10 | description: "A starter dbt model" 11 | columns: 12 | - name: id 13 | description: "The primary key for this table" 14 | tests: 15 | - unique 16 | 17 | - name: my_second_dbt_model 18 | description: "A starter dbt model" 19 | columns: 20 | - name: id 21 | description: "The primary key for this table" 22 | tests: 23 | - unique 24 | - not_null 25 | """ 26 | 27 | my_first_dbt_model_sql = """ 28 | /* 29 | Welcome to your first dbt model! 30 | Did you know that you can also configure models directly within SQL files? 31 | This will override configurations stated in dbt_project.yml 32 | 33 | Try changing "table" to "view" below 34 | */ 35 | 36 | {{ config(materialized='table') }} 37 | 38 | with source_data as ( 39 | 40 | select 1 as id 41 | union all 42 | select null as id 43 | 44 | ) 45 | 46 | select * 47 | from source_data 48 | 49 | /* 50 | Uncomment the line below to remove records with null `id` values 51 | */ 52 | 53 | -- where id is not null 54 | """ 55 | 56 | my_second_dbt_model_sql = """ 57 | -- Use the `ref` function to select from other models 58 | 59 | select * 60 | from {{ ref('my_first_dbt_model') }} 61 | where id = 1 62 | """ 63 | 64 | 65 | class TestNewProjectFabric: 66 | @pytest.fixture(scope="class") 67 | def project_config_update(self): 68 | return {"name": "my_new_project"} 69 | 70 | @pytest.fixture(scope="class") 71 | def models(self): 72 | return { 73 | "my_first_dbt_model.sql": my_first_dbt_model_sql, 74 | "my_second_dbt_model.sql": my_second_dbt_model_sql, 75 | "schema.yml": schema_yml, 76 | } 77 | 78 | def test_new_project(self, project): 79 | results = run_dbt(["build"]) 80 | assert len(results) == 5 81 | 82 | def test_run_same_model_multiple_times(self, project): 83 | results = run_dbt(["run"]) 84 | assert len(results) == 2 85 | 86 | for i in range(10): 87 | run_dbt(["run", "-s", "my_second_dbt_model"]) 88 | assert len(results) == 2 89 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_provision_users.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt 3 | 4 | my_model_sql = """ 5 | select 1 as fun 6 | """ 7 | 8 | cleanup_existing_sql = """ 9 | {% macro cleanup_existing() %} 10 | {%- call statement('drop_existing', fetch_result=False) -%} 11 | 12 | if exists( 13 | select * 14 | from sys.database_principals 15 | where name = '{{ env_var('DBT_TEST_AAD_PRINCIPAL_1') }}') 16 | drop user [{{ env_var('DBT_TEST_AAD_PRINCIPAL_1') }}] 17 | 18 | if exists( 19 | select * 20 | from sys.database_principals 21 | where name = '{{ env_var('DBT_TEST_AAD_PRINCIPAL_2') }}') 22 | drop user [{{ env_var('DBT_TEST_AAD_PRINCIPAL_2') }}] 23 | 24 | {%- endcall -%} 25 | {% endmacro %} 26 | """ 27 | 28 | model_schema_single_user_yml = """ 29 | version: 2 30 | models: 31 | - name: my_model 32 | config: 33 | auto_provision_aad_principals: true 34 | grants: 35 | select: ["{{ env_var('DBT_TEST_AAD_PRINCIPAL_1') }}"] 36 | """ 37 | 38 | model_schema_multiple_users_yml = """ 39 | version: 2 40 | models: 41 | - name: my_model 42 | config: 43 | auto_provision_aad_principals: true 44 | grants: 45 | select: 46 | - "{{ env_var('DBT_TEST_AAD_PRINCIPAL_1') }}" 47 | - "{{ env_var('DBT_TEST_AAD_PRINCIPAL_2') }}" 48 | """ 49 | 50 | 51 | class BaseTestProvisionAzureSQL: 52 | @pytest.fixture(scope="class") 53 | def macros(self): 54 | return { 55 | "cleanup_existing.sql": cleanup_existing_sql, 56 | } 57 | 58 | def test_auto_provision(self, project): 59 | run_dbt(["run-operation", "cleanup_existing"]) 60 | run_dbt(["run"]) 61 | 62 | 63 | @pytest.mark.flaky(max_runs=5, min_passes=1) 64 | class TestProvisionSingleUserAzureSQL(BaseTestProvisionAzureSQL): 65 | @pytest.fixture(scope="class") 66 | def models(self): 67 | return { 68 | "my_model.sql": my_model_sql, 69 | "schema.yml": model_schema_single_user_yml, 70 | } 71 | 72 | 73 | @pytest.mark.flaky(max_runs=5, min_passes=1) 74 | class TestProvisionMultipleUsersAzureSQL(BaseTestProvisionAzureSQL): 75 | @pytest.fixture(scope="class") 76 | def models(self): 77 | return { 78 | "my_model.sql": my_model_sql, 79 | "schema.yml": model_schema_multiple_users_yml, 80 | } 81 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_query_comment.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.query_comment.test_query_comment import ( 2 | BaseEmptyQueryComments, 3 | BaseMacroArgsQueryComments, 4 | BaseMacroInvalidQueryComments, 5 | BaseMacroQueryComments, 6 | BaseNullQueryComments, 7 | BaseQueryComments, 8 | ) 9 | 10 | 11 | class TestQueryCommentsFabric(BaseQueryComments): 12 | pass 13 | 14 | 15 | class TestMacroQueryCommentsFabric(BaseMacroQueryComments): 16 | pass 17 | 18 | 19 | class TestMacroArgsQueryCommentsFabric(BaseMacroArgsQueryComments): 20 | pass 21 | 22 | 23 | class TestMacroInvalidQueryCommentsFabric(BaseMacroInvalidQueryComments): 24 | pass 25 | 26 | 27 | class TestNullQueryCommentsFabric(BaseNullQueryComments): 28 | pass 29 | 30 | 31 | class TestEmptyQueryCommentsFabric(BaseEmptyQueryComments): 32 | pass 33 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_schema.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from dbt.tests.util import run_dbt 5 | 6 | 7 | class TestSchemaCreation: 8 | @pytest.fixture(scope="class") 9 | def models(self): 10 | return { 11 | "dummy.sql": """ 12 | {{ config(schema='with_custom_auth') }} 13 | select 1 as id 14 | """, 15 | } 16 | 17 | @staticmethod 18 | @pytest.fixture(scope="class") 19 | def dbt_profile_target_update(): 20 | return {"schema_authorization": "{{ env_var('DBT_TEST_USER_1') }}"} 21 | 22 | @staticmethod 23 | def _verify_schema_owner(schema_name, owner, project): 24 | get_schema_owner = f""" 25 | select SCHEMA_OWNER from INFORMATION_SCHEMA.SCHEMATA where SCHEMA_NAME = '{schema_name}' 26 | """ 27 | result = project.run_sql(get_schema_owner, fetch="one")[0] 28 | assert result == owner 29 | 30 | def test_schema_creation(self, project, unique_schema): 31 | res = run_dbt(["run"]) 32 | assert len(res) == 1 33 | 34 | self._verify_schema_owner(unique_schema, os.getenv("DBT_TEST_USER_1"), project) 35 | self._verify_schema_owner( 36 | unique_schema + "_with_custom_auth", os.getenv("DBT_TEST_USER_1"), project 37 | ) 38 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_seed.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from dbt.tests.adapter.simple_seed.seeds import seeds__expected_sql 5 | from dbt.tests.adapter.simple_seed.test_seed import SeedConfigBase 6 | from dbt.tests.adapter.simple_seed.test_seed import TestBasicSeedTests as BaseBasicSeedTests 7 | from dbt.tests.adapter.simple_seed.test_seed import ( 8 | TestSeedConfigFullRefreshOff as BaseSeedConfigFullRefreshOff, 9 | ) 10 | from dbt.tests.adapter.simple_seed.test_seed import ( 11 | TestSeedConfigFullRefreshOn as BaseSeedConfigFullRefreshOn, 12 | ) 13 | from dbt.tests.adapter.simple_seed.test_seed import TestSeedCustomSchema as BaseSeedCustomSchema 14 | from dbt.tests.adapter.simple_seed.test_seed import TestSeedParsing as BaseSeedParsing 15 | from dbt.tests.adapter.simple_seed.test_seed import ( 16 | TestSeedSpecificFormats as BaseSeedSpecificFormats, 17 | ) 18 | from dbt.tests.adapter.simple_seed.test_seed import ( 19 | TestSimpleSeedEnabledViaConfig as BaseSimpleSeedEnabledViaConfig, 20 | ) 21 | from dbt.tests.adapter.simple_seed.test_seed_type_override import ( 22 | BaseSimpleSeedColumnOverride, 23 | seeds__disabled_in_config_csv, 24 | seeds__enabled_in_config_csv, 25 | ) 26 | from dbt.tests.util import get_connection, run_dbt 27 | 28 | from dbt.adapters.fabric import FabricAdapter 29 | 30 | fixed_setup_sql = seeds__expected_sql.replace("TIMESTAMP WITHOUT TIME ZONE", "DATETIME").replace( 31 | "TEXT", "VARCHAR(255)" 32 | ) 33 | 34 | seeds__tricky_csv = """ 35 | seed_id,seed_id_str,a_bool,looks_like_a_bool,a_date,looks_like_a_date,relative,weekday 36 | 1,1,1,1,2019-01-01 12:32:30,2019-01-01 12:32:30,tomorrow,Saturday 37 | 2,2,1,1,2019-01-01 12:32:31,2019-01-01 12:32:31,today,Sunday 38 | 3,3,1,1,2019-01-01 12:32:32,2019-01-01 12:32:32,yesterday,Monday 39 | 4,4,0,0,2019-01-01 01:32:32,2019-01-01 01:32:32,tomorrow,Saturday 40 | 5,5,0,0,2019-01-01 01:32:32,2019-01-01 01:32:32,today,Sunday 41 | 6,6,0,0,2019-01-01 01:32:32,2019-01-01 01:32:32,yesterday,Monday 42 | """.lstrip() 43 | 44 | macros__schema_test = """ 45 | {% test column_type(model, column_name, type) %} 46 | 47 | {% set cols = adapter.get_columns_in_relation(model) %} 48 | 49 | {% set col_types = {} %} 50 | {% for col in cols %} 51 | {% do col_types.update({col.name: col.data_type}) %} 52 | {% endfor %} 53 | 54 | {% set col_type = col_types.get(column_name) %} 55 | {% set col_type = 'text' if col_type and 'varchar' in col_type else col_type %} 56 | 57 | {% set validation_message = 'Got a column type of ' ~ col_type ~ ', expected ' ~ type %} 58 | 59 | {% set val = 0 if col_type == type else 1 %} 60 | {% if val == 1 and execute %} 61 | {{ log(validation_message, info=True) }} 62 | {% endif %} 63 | 64 | select '{{ validation_message }}' as validation_error 65 | from (select 1 as empty) as nothing 66 | where {{ val }} = 1 67 | 68 | {% endtest %} 69 | 70 | """ 71 | 72 | properties__schema_yml = """ 73 | version: 2 74 | seeds: 75 | - name: seed_enabled 76 | columns: 77 | - name: birthday 78 | tests: 79 | - column_type: 80 | type: date 81 | - name: seed_id 82 | tests: 83 | - column_type: 84 | type: text 85 | 86 | - name: seed_tricky 87 | columns: 88 | - name: seed_id 89 | tests: 90 | - column_type: 91 | type: int 92 | - name: seed_id_str 93 | tests: 94 | - column_type: 95 | type: text 96 | - name: a_bool 97 | tests: 98 | - column_type: 99 | type: int 100 | - name: looks_like_a_bool 101 | tests: 102 | - column_type: 103 | type: text 104 | - name: a_date 105 | tests: 106 | - column_type: 107 | type: datetime 108 | - name: looks_like_a_date 109 | tests: 110 | - column_type: 111 | type: text 112 | - name: relative 113 | tests: 114 | - column_type: 115 | type: text 116 | - name: weekday 117 | tests: 118 | - column_type: 119 | type: text 120 | """ 121 | 122 | 123 | class TestSimpleSeedColumnOverrideFabric(BaseSimpleSeedColumnOverride): 124 | @pytest.fixture(scope="class") 125 | def seeds(self): 126 | return { 127 | "seed_enabled.csv": seeds__enabled_in_config_csv, 128 | "seed_disabled.csv": seeds__disabled_in_config_csv, 129 | "seed_tricky.csv": seeds__tricky_csv, 130 | } 131 | 132 | @pytest.fixture(scope="class") 133 | def macros(self): 134 | return {"schema_test.sql": macros__schema_test} 135 | 136 | @pytest.fixture(scope="class") 137 | def models(self): 138 | return { 139 | "schema.yml": properties__schema_yml, 140 | } 141 | 142 | 143 | class TestBasicSeedTestsFabric(BaseBasicSeedTests): 144 | @pytest.fixture(scope="class", autouse=True) 145 | def setUp(self, project): 146 | project.run_sql(fixed_setup_sql) 147 | 148 | 149 | class TestSeedConfigFullRefreshOnFabric(BaseSeedConfigFullRefreshOn): 150 | @pytest.fixture(scope="class", autouse=True) 151 | def setUp(self, project): 152 | project.run_sql(fixed_setup_sql) 153 | 154 | 155 | class TestSeedConfigFullRefreshOffFabric(BaseSeedConfigFullRefreshOff): 156 | @pytest.fixture(scope="class", autouse=True) 157 | def setUp(self, project): 158 | project.run_sql(fixed_setup_sql) 159 | 160 | 161 | class TestSeedCustomSchemaFabric(BaseSeedCustomSchema): 162 | @pytest.fixture(scope="class", autouse=True) 163 | def setUp(self, project): 164 | project.run_sql(fixed_setup_sql) 165 | 166 | 167 | class TestSimpleSeedEnabledViaConfigFabric(BaseSimpleSeedEnabledViaConfig): 168 | @pytest.fixture(scope="function") 169 | def clear_test_schema(self, project): 170 | yield 171 | adapter = project.adapter 172 | assert isinstance(project.adapter, FabricAdapter) 173 | with get_connection(project.adapter): 174 | rel = adapter.Relation.create(database=project.database, schema=project.test_schema) 175 | adapter.drop_schema(rel) 176 | 177 | 178 | class TestSeedParsingFabric(BaseSeedParsing): 179 | @pytest.fixture(scope="class", autouse=True) 180 | def setUp(self, project): 181 | project.run_sql(fixed_setup_sql) 182 | 183 | 184 | class TestSeedSpecificFormatsFabric(BaseSeedSpecificFormats): 185 | pass 186 | 187 | 188 | class TestSeedBatchSizeMaxFabric(SeedConfigBase): 189 | @pytest.fixture(scope="class") 190 | def seeds(self, test_data_dir): 191 | return { 192 | "five_columns.csv": """seed_id,first_name,email,ip_address,birthday 193 | 1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 194 | 2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 195 | 3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57""" 196 | } 197 | 198 | def test_max_batch_size(self, project, logs_dir): 199 | run_dbt(["seed"]) 200 | with open(os.path.join(logs_dir, "dbt.log"), "r") as fp: 201 | logs = "".join(fp.readlines()) 202 | 203 | assert "Inserting batches of 400 records" in logs 204 | 205 | 206 | class TestSeedBatchSizeCustomFabric(SeedConfigBase): 207 | @pytest.fixture(scope="class") 208 | def seeds(self, test_data_dir): 209 | return { 210 | "six_columns.csv": """seed_id,first_name,last_name,email,ip_address,birthday 211 | 1,Larry,King,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 212 | 2,Larry,Perkins,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 213 | 3,Anna,Montgomery,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57""" 214 | } 215 | 216 | def test_custom_batch_size(self, project, logs_dir): 217 | run_dbt(["seed"]) 218 | with open(os.path.join(logs_dir, "dbt.log"), "r") as fp: 219 | logs = "".join(fp.readlines()) 220 | 221 | assert "Inserting batches of 350 records" in logs 222 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_sources.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.basic.files import config_materialized_table, config_materialized_view 3 | from dbt.tests.util import run_dbt 4 | 5 | source_regular = """ 6 | version: 2 7 | sources: 8 | - name: regular 9 | schema: INFORMATION_SCHEMA 10 | tables: 11 | - name: VIEWS 12 | columns: 13 | - name: TABLE_NAME 14 | tests: 15 | - not_null 16 | """ 17 | 18 | source_space_in_name = """ 19 | version: 2 20 | sources: 21 | - name: 'space in name' 22 | schema: INFORMATION_SCHEMA 23 | tables: 24 | - name: VIEWS 25 | columns: 26 | - name: TABLE_NAME 27 | tests: 28 | - not_null 29 | """ 30 | 31 | select_from_source_regular = """ 32 | select * from {{ source("regular", "VIEWS") }} with (nolock) 33 | """ 34 | 35 | select_from_source_space_in_name = """ 36 | select * from {{ source("space in name", "VIEWS") }} with (nolock) 37 | """ 38 | 39 | 40 | class TestSourcesFabric: 41 | @pytest.fixture(scope="class") 42 | def models(self): 43 | return { 44 | "source_regular.yml": source_regular, 45 | "source_space_in_name.yml": source_space_in_name, 46 | "v_select_from_source_regular.sql": config_materialized_view 47 | + select_from_source_regular, 48 | "v_select_from_source_space_in_name.sql": config_materialized_view 49 | + select_from_source_space_in_name, 50 | "t_select_from_source_regular.sql": config_materialized_table 51 | + select_from_source_regular, 52 | "t_select_from_source_space_in_name.sql": config_materialized_table 53 | + select_from_source_space_in_name, 54 | } 55 | 56 | def test_dbt_run(self, project): 57 | run_dbt(["compile"]) 58 | 59 | ls = run_dbt(["list"]) 60 | assert len(ls) == 8 61 | ls_sources = [src for src in ls if src.startswith("source:")] 62 | assert len(ls_sources) == 2 63 | 64 | run_dbt(["run"]) 65 | run_dbt(["test"]) 66 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_timestamps.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.utils.test_timestamps import BaseCurrentTimestamps 3 | 4 | 5 | class TestCurrentTimestampFabric(BaseCurrentTimestamps): 6 | @pytest.fixture(scope="class") 7 | def models(self): 8 | return { 9 | "get_current_timestamp.sql": 'select {{ current_timestamp() }} as "current_timestamp"' 10 | } 11 | 12 | @pytest.fixture(scope="class") 13 | def expected_schema(self): 14 | return {"current_timestamp": "datetime2"} 15 | 16 | @pytest.fixture(scope="class") 17 | def expected_sql(self): 18 | return '''select SYSDATETIME() as "current_timestamp"''' 19 | -------------------------------------------------------------------------------- /tests/functional/adapter/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.utils.fixture_cast_bool_to_text import models__test_cast_bool_to_text_yml 3 | from dbt.tests.adapter.utils.fixture_listagg import ( 4 | models__test_listagg_yml, 5 | seeds__data_listagg_csv, 6 | ) 7 | from dbt.tests.adapter.utils.test_any_value import BaseAnyValue 8 | from dbt.tests.adapter.utils.test_array_append import BaseArrayAppend 9 | from dbt.tests.adapter.utils.test_array_concat import BaseArrayConcat 10 | from dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct 11 | from dbt.tests.adapter.utils.test_bool_or import BaseBoolOr 12 | from dbt.tests.adapter.utils.test_cast_bool_to_text import BaseCastBoolToText 13 | from dbt.tests.adapter.utils.test_concat import BaseConcat 14 | from dbt.tests.adapter.utils.test_current_timestamp import BaseCurrentTimestampNaive 15 | from dbt.tests.adapter.utils.test_date_trunc import BaseDateTrunc 16 | from dbt.tests.adapter.utils.test_dateadd import BaseDateAdd 17 | from dbt.tests.adapter.utils.test_datediff import BaseDateDiff 18 | from dbt.tests.adapter.utils.test_escape_single_quotes import BaseEscapeSingleQuotesQuote 19 | from dbt.tests.adapter.utils.test_except import BaseExcept 20 | from dbt.tests.adapter.utils.test_hash import BaseHash 21 | from dbt.tests.adapter.utils.test_intersect import BaseIntersect 22 | from dbt.tests.adapter.utils.test_last_day import BaseLastDay 23 | from dbt.tests.adapter.utils.test_length import BaseLength 24 | from dbt.tests.adapter.utils.test_listagg import BaseListagg 25 | from dbt.tests.adapter.utils.test_position import BasePosition 26 | from dbt.tests.adapter.utils.test_replace import BaseReplace 27 | from dbt.tests.adapter.utils.test_right import BaseRight 28 | from dbt.tests.adapter.utils.test_safe_cast import BaseSafeCast 29 | from dbt.tests.adapter.utils.test_split_part import BaseSplitPart 30 | from dbt.tests.adapter.utils.test_string_literal import BaseStringLiteral 31 | 32 | 33 | class BaseFixedMacro: 34 | @pytest.fixture(scope="class") 35 | def macros(self): 36 | return { 37 | "test_assert_equal.sql": """ 38 | {% test assert_equal(model, actual, expected) %} 39 | select * from {{ model }} 40 | where {{ actual }} != {{ expected }} 41 | or ({{ actual }} is null and {{ expected }} is not null) 42 | or ({{ expected }} is null and {{ actual }} is not null) 43 | {% endtest %} 44 | """ 45 | } 46 | 47 | 48 | class TestAnyValueFabric(BaseFixedMacro, BaseAnyValue): 49 | pass 50 | 51 | 52 | @pytest.mark.skip("bool_or not supported in this adapter") 53 | class TestBoolOrFabric(BaseFixedMacro, BaseBoolOr): 54 | pass 55 | 56 | 57 | class TestCastBoolToTextFabric(BaseFixedMacro, BaseCastBoolToText): 58 | @pytest.fixture(scope="class") 59 | def models(self): 60 | models__test_cast_bool_to_text_sql = """ 61 | with data as ( 62 | 63 | select 0 as input, 'false' as expected union all 64 | select 1 as input, 'true' as expected union all 65 | select null as input, null as expected 66 | 67 | ) 68 | 69 | select 70 | 71 | {{ cast_bool_to_text("input") }} as actual, 72 | expected 73 | 74 | from data 75 | """ 76 | 77 | return { 78 | "test_cast_bool_to_text.yml": models__test_cast_bool_to_text_yml, 79 | "test_cast_bool_to_text.sql": self.interpolate_macro_namespace( 80 | models__test_cast_bool_to_text_sql, "cast_bool_to_text" 81 | ), 82 | } 83 | 84 | 85 | class TestConcatFabric(BaseFixedMacro, BaseConcat): 86 | @pytest.fixture(scope="class") 87 | def seeds(self): 88 | return { 89 | "data_concat.csv": """input_1,input_2,output 90 | a,b,ab 91 | a,,a 92 | ,b,b 93 | """ 94 | } 95 | 96 | 97 | class TestDateTruncFabric(BaseFixedMacro, BaseDateTrunc): 98 | pass 99 | 100 | 101 | seeds__data_hash_csv = """input_1,output 102 | ab,187ef4436122d1cc2f40dc2b92f0eba0 103 | a,0cc175b9c0f1b6a831c399e269772661 104 | 1,c4ca4238a0b923820dcc509a6f75849b 105 | ,d41d8cd98f00b204e9800998ecf8427e""" 106 | 107 | 108 | class TestHashFabric(BaseFixedMacro, BaseHash): 109 | @pytest.fixture(scope="class") 110 | def seeds(self): 111 | return {"data_hash.csv": seeds__data_hash_csv} 112 | 113 | 114 | class TestStringLiteralFabric(BaseFixedMacro, BaseStringLiteral): 115 | pass 116 | 117 | 118 | class TestSplitPartFabric(BaseFixedMacro, BaseSplitPart): 119 | pass 120 | 121 | 122 | class TestDateDiffFabric(BaseFixedMacro, BaseDateDiff): 123 | pass 124 | 125 | 126 | class TestEscapeSingleQuotesFabric(BaseFixedMacro, BaseEscapeSingleQuotesQuote): 127 | pass 128 | 129 | 130 | class TestIntersectFabric(BaseFixedMacro, BaseIntersect): 131 | pass 132 | 133 | 134 | class TestLastDayFabric(BaseFixedMacro, BaseLastDay): 135 | pass 136 | 137 | 138 | class TestLengthFabric(BaseFixedMacro, BaseLength): 139 | pass 140 | 141 | 142 | class TestListaggFabric(BaseFixedMacro, BaseListagg): 143 | # Only supported in SQL Server 2017 and later or cloud versions 144 | # DISTINCT not supported 145 | # limit not supported 146 | @pytest.fixture(scope="class") 147 | def seeds(self): 148 | seeds__data_listagg_output_csv = """group_col,expected,version 149 | 1,"a_|_b_|_c",bottom_ordered 150 | 2,"1_|_a_|_p",bottom_ordered 151 | 3,"g_|_g_|_g",bottom_ordered 152 | 3,"g, g, g",comma_whitespace_unordered 153 | 3,"g,g,g",no_params 154 | """ 155 | 156 | return { 157 | "data_listagg.csv": seeds__data_listagg_csv, 158 | "data_listagg_output.csv": seeds__data_listagg_output_csv, 159 | } 160 | 161 | @pytest.fixture(scope="class") 162 | def models(self): 163 | models__test_listagg_sql = """ 164 | with data as ( 165 | 166 | select * from {{ ref('data_listagg') }} 167 | 168 | ), 169 | 170 | data_output as ( 171 | 172 | select * from {{ ref('data_listagg_output') }} 173 | 174 | ), 175 | 176 | calculate as ( 177 | 178 | select 179 | group_col, 180 | {{ listagg('string_text', "'_|_'", "order by order_col") }} as actual, 181 | 'bottom_ordered' as version 182 | from data 183 | group by group_col 184 | 185 | union all 186 | 187 | select 188 | group_col, 189 | {{ listagg('string_text', "', '") }} as actual, 190 | 'comma_whitespace_unordered' as version 191 | from data 192 | where group_col = 3 193 | group by group_col 194 | 195 | union all 196 | 197 | select 198 | group_col, 199 | {{ listagg('string_text') }} as actual, 200 | 'no_params' as version 201 | from data 202 | where group_col = 3 203 | group by group_col 204 | 205 | ) 206 | 207 | select 208 | calculate.actual, 209 | data_output.expected 210 | from calculate 211 | left join data_output 212 | on calculate.group_col = data_output.group_col 213 | and calculate.version = data_output.version 214 | """ 215 | 216 | return { 217 | "test_listagg.yml": models__test_listagg_yml, 218 | "test_listagg.sql": self.interpolate_macro_namespace( 219 | models__test_listagg_sql, "listagg" 220 | ), 221 | } 222 | 223 | 224 | class TestRightFabric(BaseFixedMacro, BaseRight): 225 | pass 226 | 227 | 228 | class TestSafeCastFabric(BaseFixedMacro, BaseSafeCast): 229 | pass 230 | 231 | 232 | class TestDateAddFabric(BaseFixedMacro, BaseDateAdd): 233 | @pytest.fixture(scope="class") 234 | def project_config_update(self): 235 | return { 236 | "name": "test", 237 | "seeds": { 238 | "test": { 239 | "data_dateadd": { 240 | "+column_types": { 241 | "from_time": "datetimeoffset", 242 | "result": "datetimeoffset", 243 | }, 244 | }, 245 | }, 246 | }, 247 | } 248 | 249 | 250 | class TestExceptFabric(BaseFixedMacro, BaseExcept): 251 | pass 252 | 253 | 254 | class TestPositionFabric(BaseFixedMacro, BasePosition): 255 | pass 256 | 257 | 258 | class TestReplaceFabric(BaseFixedMacro, BaseReplace): 259 | pass 260 | 261 | 262 | class TestCurrentTimestampFabric(BaseCurrentTimestampNaive): 263 | pass 264 | 265 | 266 | @pytest.mark.skip(reason="arrays not supported") 267 | class TestArrayAppendFabric(BaseFixedMacro, BaseArrayAppend): 268 | pass 269 | 270 | 271 | @pytest.mark.skip(reason="arrays not supporteTd") 272 | class TestArrayConcatFabric(BaseFixedMacro, BaseArrayConcat): 273 | pass 274 | 275 | 276 | @pytest.mark.skip(reason="arrays not supported") 277 | class TestArrayConstructFabric(BaseFixedMacro, BaseArrayConstruct): 278 | pass 279 | --------------------------------------------------------------------------------