├── .devcontainer ├── devcontainer.json ├── install_pyenv.sh ├── setup_env.sh └── setup_odbc.sh ├── .git-blame-ignore-revs ├── .gitattributes ├── .github ├── dependabot.yml └── workflows │ ├── integration-tests-sqlserver.yml │ ├── publish-docker.yml │ ├── release-version.yml │ └── unit-tests.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .vscode └── settings.json ├── CHANGELOG.md ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── dbt ├── adapters │ └── sqlserver │ │ ├── __init__.py │ │ ├── __version__.py │ │ ├── relation_configs │ │ ├── __init__.py │ │ └── policies.py │ │ ├── sqlserver_adapter.py │ │ ├── sqlserver_column.py │ │ ├── sqlserver_configs.py │ │ ├── sqlserver_connections.py │ │ ├── sqlserver_credentials.py │ │ └── sqlserver_relation.py └── include │ └── sqlserver │ ├── __init__.py │ ├── dbt_project.yml │ ├── macros │ ├── .gitkeep │ ├── adapter │ │ ├── catalog.sql │ │ ├── columns.sql │ │ ├── indexes.sql │ │ ├── metadata.sql │ │ ├── relation.sql │ │ ├── schemas.sql │ │ └── validate_sql.sql │ ├── materializations │ │ ├── models │ │ │ ├── incremental │ │ │ │ ├── incremental.sql │ │ │ │ └── merge.sql │ │ │ ├── table │ │ │ │ └── table.sql │ │ │ └── view │ │ │ │ └── view.sql │ │ ├── snapshot │ │ │ ├── helpers.sql │ │ │ ├── snapshot.sql │ │ │ └── snapshot_merge.sql │ │ ├── tests.sql │ │ └── unit_tests.sql │ ├── readme.md │ ├── relations │ │ ├── seeds │ │ │ └── helpers.sql │ │ ├── table │ │ │ ├── clone.sql │ │ │ └── create.sql │ │ └── views │ │ │ ├── .gitkeep │ │ │ └── create.sql │ └── utils │ │ ├── .gitkeep │ │ └── split_part.sql │ └── profile_template.yml ├── dev_requirements.txt ├── devops ├── CI.Dockerfile ├── scripts │ ├── entrypoint.sh │ ├── init.sql │ ├── init_db.sh │ └── wakeup_azure.py └── server.Dockerfile ├── docker-compose.yml ├── pytest.ini ├── setup.py ├── test.env.sample └── tests ├── __init__.py ├── conftest.py ├── functional └── adapter │ ├── dbt │ ├── test_aliases.py │ ├── test_basic.py │ ├── test_caching.py │ ├── test_catalog.py │ ├── test_column_types.py │ ├── test_concurrency.py │ ├── test_constraints.py │ ├── test_dbt_clone.py │ ├── test_dbt_debug.py │ ├── test_empty.py │ ├── test_ephemeral.py │ ├── test_grants.py │ ├── test_hooks.py │ ├── test_incremental.py │ ├── test_incremental_microbatch_datetime.py │ ├── test_materialized_views.py │ ├── test_persist_docs.py │ ├── test_python_model.py │ ├── test_query_comment.py │ ├── test_relations.py │ ├── test_simple_seed.py │ ├── test_simple_snapshot.py │ ├── test_unit_tests.py │ └── test_utils.py │ └── mssql │ ├── test_cross_db.py │ ├── test_db_non_standard.py │ ├── test_index.py │ ├── test_materialize_change.py │ ├── test_mssql_seed.py │ ├── test_provision_users.py │ ├── test_temp_relation_cleanup.py │ ├── test_test_with.py │ └── test_xml_index.py └── unit └── adapters └── mssql └── test_sqlserver_connection_manager.py /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Python 3", 3 | "image": "mcr.microsoft.com/devcontainers/python:1-3.10-bookworm", 4 | "features": { 5 | "ghcr.io/devcontainers/features/docker-in-docker:2.12.0": {} 6 | }, 7 | "forwardPorts": [1433], 8 | "postStartCommand": "/bin/bash ./.devcontainer/setup_odbc.sh & /bin/bash ./.devcontainer/setup_env.sh", 9 | "containerEnv": { 10 | "SQLSERVER_TEST_DRIVER": "ODBC Driver 18 for SQL Server", 11 | "SQLSERVER_TEST_HOST": "127.0.0.1", 12 | "SQLSERVER_TEST_USER": "SA", 13 | "SQLSERVER_TEST_PASS": "L0calTesting!", 14 | "SQLSERVER_TEST_PORT": "1433", 15 | "SQLSERVER_TEST_DBNAME": "TestDB", 16 | "SQLSERVER_TEST_ENCRYPT": "true", 17 | "SQLSERVER_TEST_TRUST_CERT": "true", 18 | "DBT_TEST_USER_1": "DBT_TEST_USER_1", 19 | "DBT_TEST_USER_2": "DBT_TEST_USER_2", 20 | "DBT_TEST_USER_3": "DBT_TEST_USER_3" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /.devcontainer/install_pyenv.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | curl https://pyenv.run | bash 3 | 4 | echo 'export PYENV_ROOT="$HOME/.pyenv" 5 | [[ -d $PYENV_ROOT/bin ]] && export PATH="$PYENV_ROOT/bin:$PATH" 6 | eval "$(pyenv init -)"' >> ~/.bashrc 7 | -------------------------------------------------------------------------------- /.devcontainer/setup_env.sh: -------------------------------------------------------------------------------- 1 | cp test.env.sample test.env 2 | 3 | docker compose build 4 | docker compose up -d 5 | 6 | pip install -r dev_requirements.txt 7 | -------------------------------------------------------------------------------- /.devcontainer/setup_odbc.sh: -------------------------------------------------------------------------------- 1 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo tee /etc/apt/trusted.gpg.d/microsoft.asc 2 | 3 | #Download appropriate package for the OS version 4 | #Choose only ONE of the following, corresponding to your OS version 5 | 6 | #Debian 12 7 | curl https://packages.microsoft.com/config/debian/12/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list 8 | 9 | sudo apt-get update 10 | sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 11 | # optional: for bcp and sqlcmd 12 | sudo ACCEPT_EULA=Y apt-get install -y mssql-tools18 13 | echo 'export PATH="$PATH:/opt/mssql-tools18/bin"' >> ~/.bashrc 14 | source ~/.bashrc 15 | # optional: for unixODBC development headers 16 | sudo apt-get install -y unixodbc-dev 17 | # optional: kerberos library for debian-slim distributions 18 | sudo apt-get install -y libgssapi-krb5-2 19 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # .git-blame-ignore-revs 2 | # pre-commit-setup 3 | 4eff3237f2bf9d3d206c9234353fb07e70ac6013 4 | # Updated README.md and blackified the code. 5 | 29bab50f0e7d9e09b9110500f6e41e1255d3f3f6 6 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.txt text eol=lf 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2 3 | updates: 4 | - package-ecosystem: pip 5 | directory: "/" 6 | schedule: 7 | interval: daily 8 | - package-ecosystem: github-actions 9 | directory: "/" 10 | schedule: 11 | interval: daily 12 | - package-ecosystem: docker 13 | directory: "/" 14 | schedule: 15 | interval: daily 16 | -------------------------------------------------------------------------------- /.github/workflows/integration-tests-sqlserver.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Integration tests on SQL Server 3 | on: # yamllint disable-line rule:truthy 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - master 8 | - v* 9 | pull_request: 10 | branches: 11 | - master 12 | - v* 13 | schedule: 14 | - cron: '0 22 * * 0' 15 | 16 | jobs: 17 | integration-tests-sql-server: 18 | name: Regular 19 | strategy: 20 | matrix: 21 | python_version: ["3.9", "3.10", "3.11", "3.12"] 22 | msodbc_version: ["17", "18"] 23 | sqlserver_version: ["2017", "2019", "2022"] 24 | collation: ["SQL_Latin1_General_CP1_CS_AS", "SQL_Latin1_General_CP1_CI_AS"] 25 | runs-on: ubuntu-latest 26 | container: 27 | image: ghcr.io/${{ github.repository }}:CI-${{ matrix.python_version }}-msodbc${{ matrix.msodbc_version }} 28 | services: 29 | sqlserver: 30 | image: ghcr.io/${{ github.repository }}:server-${{ matrix.sqlserver_version }} 31 | env: 32 | ACCEPT_EULA: 'Y' 33 | SA_PASSWORD: 5atyaNadella 34 | DBT_TEST_USER_1: DBT_TEST_USER_1 35 | DBT_TEST_USER_2: DBT_TEST_USER_2 36 | DBT_TEST_USER_3: DBT_TEST_USER_3 37 | COLLATION: ${{ matrix.collation }} 38 | steps: 39 | - uses: actions/checkout@v4 40 | 41 | - name: Install dependencies 42 | run: pip install -r dev_requirements.txt 43 | 44 | - name: Run functional tests 45 | run: pytest -ra -v tests/functional --profile "ci_sql_server" 46 | env: 47 | DBT_TEST_USER_1: DBT_TEST_USER_1 48 | DBT_TEST_USER_2: DBT_TEST_USER_2 49 | DBT_TEST_USER_3: DBT_TEST_USER_3 50 | SQLSERVER_TEST_DRIVER: 'ODBC Driver ${{ matrix.msodbc_version }} for SQL Server' 51 | -------------------------------------------------------------------------------- /.github/workflows/publish-docker.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Publish Docker images for CI/CD 3 | on: # yamllint disable-line rule:truthy 4 | push: 5 | paths: 6 | - 'devops/**' 7 | - '.github/workflows/publish-docker.yml' 8 | branches: 9 | - 'master' 10 | 11 | jobs: 12 | publish-docker-client: 13 | strategy: 14 | matrix: 15 | python_version: ["3.9", "3.10", "3.11", "3.12"] 16 | docker_target: ["msodbc17", "msodbc18"] 17 | runs-on: ubuntu-latest 18 | permissions: 19 | contents: read 20 | packages: write 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v4 24 | 25 | - name: Log in to the Container registry 26 | uses: docker/login-action@v3.3.0 27 | with: 28 | registry: ghcr.io 29 | username: ${{ github.actor }} 30 | password: ${{ secrets.GITHUB_TOKEN }} 31 | 32 | - name: Build and push Docker image 33 | uses: docker/build-push-action@v4.0.0 34 | with: 35 | context: devops 36 | build-args: PYTHON_VERSION=${{ matrix.python_version }} 37 | file: devops/CI.Dockerfile 38 | push: true 39 | platforms: linux/amd64 40 | target: ${{ matrix.docker_target }} 41 | tags: ghcr.io/${{ github.repository }}:CI-${{ matrix.python_version }}-${{ matrix.docker_target }} 42 | 43 | publish-docker-server: 44 | strategy: 45 | matrix: 46 | mssql_version: ["2017", "2019", "2022"] 47 | runs-on: ubuntu-latest 48 | permissions: 49 | contents: read 50 | packages: write 51 | steps: 52 | - name: Checkout 53 | uses: actions/checkout@v4 54 | 55 | - name: Log in to the Container registry 56 | uses: docker/login-action@v3.3.0 57 | with: 58 | registry: ghcr.io 59 | username: ${{ github.actor }} 60 | password: ${{ secrets.GITHUB_TOKEN }} 61 | 62 | - name: Build and push Docker image 63 | uses: docker/build-push-action@v4.0.0 64 | with: 65 | context: devops 66 | build-args: MSSQL_VERSION=${{ matrix.mssql_version }} 67 | file: devops/server.Dockerfile 68 | push: true 69 | platforms: linux/amd64 70 | tags: ghcr.io/${{ github.repository }}:server-${{ matrix.mssql_version }} 71 | -------------------------------------------------------------------------------- /.github/workflows/release-version.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Release new version 3 | 4 | on: # yamllint disable-line rule:truthy 5 | release: 6 | types: 7 | - published 8 | 9 | jobs: 10 | release-version: 11 | name: Release new version 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v4 15 | 16 | - uses: actions/setup-python@v5 17 | with: 18 | python-version: '3.9' 19 | 20 | - name: Install dependencies 21 | run: pip install -r dev_requirements.txt 22 | 23 | - name: Verify version match 24 | run: python setup.py verify 25 | 26 | - name: Initialize .pypirc 27 | run: | 28 | echo -e "[pypi]" >> ~/.pypirc 29 | echo -e "username = __token__" >> ~/.pypirc 30 | echo -e "password = ${{ secrets.PYPI_DBT_SQLSERVER }}" >> ~/.pypirc 31 | 32 | - name: Build and publish package 33 | run: | 34 | python setup.py sdist bdist_wheel 35 | twine upload dist/* 36 | -------------------------------------------------------------------------------- /.github/workflows/unit-tests.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Unit tests 3 | on: # yamllint disable-line rule:truthy 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - master 8 | - v* 9 | pull_request: 10 | branches: 11 | - master 12 | - v* 13 | schedule: 14 | - cron: '0 22 * * 0' 15 | 16 | jobs: 17 | unit-tests: 18 | name: Unit tests 19 | strategy: 20 | matrix: 21 | python_version: ["3.9", "3.10", "3.11", "3.12"] 22 | runs-on: ubuntu-latest 23 | permissions: 24 | contents: read 25 | packages: read 26 | container: 27 | image: ghcr.io/${{ github.repository }}:CI-${{ matrix.python_version }}-msodbc18 28 | credentials: 29 | username: ${{ github.actor }} 30 | password: ${{ secrets.github_token }} 31 | steps: 32 | 33 | - uses: actions/checkout@v4 34 | 35 | - name: Install dependencies 36 | run: pip install -r dev_requirements.txt 37 | 38 | - name: Run unit tests 39 | run: pytest -n auto -ra -v tests/unit 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | /*.egg-info 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *.cover 46 | *.log.legacy 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | 61 | # DotEnv configuration 62 | .env 63 | 64 | # Database 65 | *.db 66 | *.rdb 67 | 68 | # Pycharm 69 | .idea 70 | 71 | # Spyder 72 | .spyproject/ 73 | 74 | # Jupyter NB Checkpoints 75 | .ipynb_checkpoints/ 76 | 77 | # exclude data from source control by default 78 | /data/ 79 | 80 | # Mac OS-specific storage files 81 | .DS_Store 82 | 83 | # vim 84 | *.swp 85 | *.swo 86 | 87 | # Mypy cache 88 | .mypy_cache/ 89 | 90 | # Environments 91 | *.env 92 | .venv 93 | env/ 94 | venv/ 95 | ENV/ 96 | env.bak/ 97 | venv.bak/ 98 | .mise.toml 99 | 100 | devcontainer-lock.json 101 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | default_language_version: 2 | python: python3.10 3 | repos: 4 | - repo: 'https://github.com/pre-commit/pre-commit-hooks' 5 | rev: v4.6.0 6 | hooks: 7 | - id: check-yaml 8 | args: 9 | - '--unsafe' 10 | - id: check-json 11 | - id: end-of-file-fixer 12 | - id: trailing-whitespace 13 | exclude_types: 14 | - markdown 15 | - id: check-case-conflict 16 | - id: check-ast 17 | - id: check-builtin-literals 18 | - id: check-merge-conflict 19 | - id: no-commit-to-branch 20 | - id: fix-byte-order-marker 21 | - id: mixed-line-ending 22 | - id: check-docstring-first 23 | - repo: 'https://github.com/adrienverge/yamllint' 24 | rev: v1.35.1 25 | hooks: 26 | - id: yamllint 27 | args: 28 | - '-d {extends: default, rules: {line-length: disable, document-start: disable}}' 29 | - '-s' 30 | - repo: 'https://github.com/MarcoGorelli/absolufy-imports' 31 | rev: v0.3.1 32 | hooks: 33 | - id: absolufy-imports 34 | - repo: 'https://github.com/hadialqattan/pycln' 35 | rev: v2.5.0 36 | hooks: 37 | - id: pycln 38 | args: 39 | - '--all' 40 | - repo: 'https://github.com/pycqa/isort' 41 | rev: 5.13.2 42 | hooks: 43 | - id: isort 44 | args: 45 | - '--profile' 46 | - black 47 | - '--atomic' 48 | - '--line-length' 49 | - '99' 50 | - '--python-version' 51 | - '39' 52 | - repo: 'https://github.com/psf/black' 53 | rev: 24.8.0 54 | hooks: 55 | - id: black 56 | args: 57 | - '--line-length=99' 58 | - '--target-version=py39' 59 | - id: black 60 | alias: black-check 61 | stages: 62 | - manual 63 | args: 64 | - '--line-length=99' 65 | - '--target-version=py310' 66 | - '--check' 67 | - '--diff' 68 | - repo: 'https://github.com/pycqa/flake8' 69 | rev: 7.1.1 70 | hooks: 71 | - id: flake8 72 | args: 73 | - '--max-line-length=99' 74 | - id: flake8 75 | args: 76 | - '--max-line-length=99' 77 | alias: flake8-check 78 | stages: 79 | - manual 80 | - repo: 'https://github.com/pre-commit/mirrors-mypy' 81 | rev: v1.11.1 82 | hooks: 83 | - id: mypy 84 | args: 85 | - '--show-error-codes' 86 | - '--ignore-missing-imports' 87 | - '--explicit-package-bases' 88 | files: '^dbt/adapters' 89 | - id: mypy 90 | alias: mypy-check 91 | stages: 92 | - manual 93 | args: 94 | - '--show-error-codes' 95 | - '--pretty' 96 | - '--ignore-missing-imports' 97 | - '--explicit-package-bases' 98 | files: '^dbt/adapters' 99 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests" 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true 7 | } 8 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Development of the adapter 2 | 3 | Python 3.10 is used for developing the adapter. To get started, bootstrap your environment as follows: 4 | 5 | Create a virtual environment, [pyenv](https://github.com/pyenv/pyenv) is used in the example: 6 | 7 | ```shell 8 | pyenv install 3.10.7 9 | pyenv virtualenv 3.10.7 dbt-sqlserver 10 | pyenv activate dbt-sqlserver 11 | ``` 12 | 13 | Install the development dependencies and pre-commit and get information about possible make commands: 14 | 15 | ```shell 16 | make dev 17 | make help 18 | ``` 19 | 20 | [Pre-commit](https://pre-commit.com/) helps us to maintain a consistent style and code quality across the entire project. 21 | After running `make dev`, pre-commit will automatically validate your commits and fix any formatting issues whenever possible. 22 | 23 | ## Devcontainer 24 | 25 | A devcontainer file has been added since 1.7.2 to simpify creating the development environment. 26 | 27 | ## Testing 28 | 29 | The functional tests require a running SQL Server instance. You can easily spin up a local instance with the following command: 30 | 31 | ```shell 32 | make server 33 | ``` 34 | 35 | This will use Docker Compose to spin up a local instance of SQL Server. Docker Compose is now bundled with Docker, so make sure to [install the latest version of Docker](https://docs.docker.com/get-docker/). 36 | 37 | Next, tell our tests how they should connect to the local instance by creating a file called `test.env` in the root of the project. 38 | You can use the provided `test.env.sample` as a base and if you started the server with `make server`, then this matches the instance running on your local machine. 39 | 40 | ```shell 41 | cp test.env.sample test.env 42 | ``` 43 | 44 | You can tweak the contents of this file to test against a different database. 45 | 46 | Note that we need 3 users to be able to run tests related to the grants. 47 | The 3 users are defined by the following environment variables containing their usernames. 48 | 49 | * `DBT_TEST_USER_1` 50 | * `DBT_TEST_USER_2` 51 | * `DBT_TEST_USER_3` 52 | 53 | You can use the following commands to run the unit and the functional tests respectively: 54 | 55 | ```shell 56 | make unit 57 | make functional 58 | ``` 59 | 60 | ## CI/CD 61 | 62 | We use Docker images that have all the things we need to test the adapter in the CI/CD workflows. 63 | The Dockerfile is located in the *devops* directory and pushed to GitHub Packages to this repo. 64 | There is one tag per supported Python version. 65 | 66 | All CI/CD pipelines are using GitHub Actions. The following pipelines are available: 67 | 68 | * `publish-docker`: publishes the image we use in all other pipelines. 69 | * `unit-tests`: runs the unit tests for each supported Python version. 70 | * `integration-tests-azure`: runs the integration tests for Azure SQL Server. 71 | * `integration-tests-sqlserver`: runs the integration tests for SQL Server. 72 | * `release-version`: publishes the adapter to PyPI. 73 | 74 | There is an additional [Pre-commit](https://pre-commit.ci/) pipeline that validates the code style. 75 | 76 | ### Azure integration tests 77 | 78 | The following environment variables are available: 79 | 80 | * `DBT_AZURESQL_SERVER`: full hostname of the server hosting the Azure SQL database 81 | * `DBT_AZURESQL_DB`: name of the Azure SQL database 82 | * `DBT_AZURESQL_UID`: username of the SQL admin on the server hosting the Azure SQL database 83 | * `DBT_AZURESQL_PWD`: password of the SQL admin on the server hosting the Azure SQL database 84 | * `DBT_AZURE_TENANT`: Azure tenant ID 85 | * `DBT_AZURE_SUBSCRIPTION_ID`: Azure subscription ID 86 | * `DBT_AZURE_RESOURCE_GROUP_NAME`: Azure resource group name 87 | * `DBT_AZURE_SP_NAME`: Client/application ID of the service principal used to connect to Azure AD 88 | * `DBT_AZURE_SP_SECRET`: Password of the service principal used to connect to Azure AD 89 | 90 | ## Releasing a new version 91 | 92 | Make sure the version number is bumped in `__version__.py`. Then, create a git tag named `v` and push it to GitHub. 93 | A GitHub Actions workflow will be triggered to build the package and push it to PyPI. 94 | 95 | If you're releasing support for a new version of `dbt-core`, also bump the `dbt_version` in `setup.py`. 96 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 mikaelene 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include dbt/include *.sql *.yml *.md 2 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .DEFAULT_GOAL:=help 2 | 3 | .PHONY: dev 4 | dev: ## Installs adapter in develop mode along with development dependencies 5 | @\ 6 | pip install -r dev_requirements.txt && pre-commit install 7 | 8 | .PHONY: mypy 9 | mypy: ## Runs mypy against staged changes for static type checking. 10 | @\ 11 | pre-commit run --hook-stage manual mypy-check | grep -v "INFO" 12 | 13 | .PHONY: flake8 14 | flake8: ## Runs flake8 against staged changes to enforce style guide. 15 | @\ 16 | pre-commit run --hook-stage manual flake8-check | grep -v "INFO" 17 | 18 | .PHONY: black 19 | black: ## Runs black against staged changes to enforce style guide. 20 | @\ 21 | pre-commit run --hook-stage manual black-check -v | grep -v "INFO" 22 | 23 | .PHONY: lint 24 | lint: ## Runs flake8 and mypy code checks against staged changes. 25 | @\ 26 | pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \ 27 | pre-commit run mypy-check --hook-stage manual | grep -v "INFO" 28 | 29 | .PHONY: all 30 | all: ## Runs all checks against staged changes. 31 | @\ 32 | pre-commit run -a 33 | 34 | .PHONY: linecheck 35 | linecheck: ## Checks for all Python lines 100 characters or more 36 | @\ 37 | find dbt -type f -name "*.py" -exec grep -I -r -n '.\{100\}' {} \; 38 | 39 | .PHONY: unit 40 | unit: ## Runs unit tests. 41 | @\ 42 | pytest -n auto -ra -v tests/unit 43 | 44 | .PHONY: functional 45 | functional: ## Runs functional tests. 46 | @\ 47 | pytest -n auto -ra -v tests/functional 48 | 49 | .PHONY: test 50 | test: ## Runs unit tests and code checks against staged changes. 51 | @\ 52 | pytest -n auto -ra -v tests/unit; \ 53 | pre-commit run black-check --hook-stage manual | grep -v "INFO"; \ 54 | pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \ 55 | pre-commit run mypy-check --hook-stage manual | grep -v "INFO" 56 | 57 | .PHONY: server 58 | server: ## Spins up a local MS SQL Server instance for development. Docker-compose is required. 59 | @\ 60 | docker compose up -d 61 | 62 | .PHONY: clean 63 | @echo "cleaning repo" 64 | @git clean -f -X 65 | 66 | .PHONY: help 67 | help: ## Show this help message. 68 | @echo 'usage: make [target]' 69 | @echo 70 | @echo 'targets:' 71 | @grep -E '^[7+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 72 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dbt-sqlserver 2 | 3 | [dbt](https://www.getdbt.com) adapter for Microsoft SQL Server and Azure SQL services. 4 | 5 | The adapter supports dbt-core 0.14 or newer and follows the same versioning scheme. 6 | E.g. version 1.1.x of the adapter will be compatible with dbt-core 1.1.x. 7 | 8 | ## Documentation 9 | 10 | We've bundled all documentation on the dbt docs site: 11 | 12 | * [Profile setup & authentication](https://docs.getdbt.com/reference/warehouse-profiles/mssql-profile) 13 | * [Adapter documentation, usage and important notes](https://docs.getdbt.com/reference/resource-configs/mssql-configs) 14 | 15 | Join us on the [dbt Slack](https://getdbt.slack.com/archives/CMRMDDQ9W) to ask questions, get help, or to discuss the project. 16 | 17 | ## Installation 18 | 19 | This adapter requires the Microsoft ODBC driver to be installed: 20 | [Windows](https://docs.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server?view=sql-server-ver16#download-for-windows) | 21 | [macOS](https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/install-microsoft-odbc-driver-sql-server-macos?view=sql-server-ver16) | 22 | [Linux](https://docs.microsoft.com/en-us/sql/connect/odbc/linux-mac/installing-the-microsoft-odbc-driver-for-sql-server?view=sql-server-ver16) 23 | 24 |
Debian/Ubuntu 25 |

26 | 27 | Make sure to install the ODBC headers as well as the driver linked above: 28 | 29 | ```shell 30 | sudo apt-get install -y unixodbc-dev 31 | ``` 32 | 33 |

34 |
35 | 36 | Latest version: ![PyPI](https://img.shields.io/pypi/v/dbt-sqlserver?label=latest%20stable&logo=pypi) 37 | 38 | ```shell 39 | pip install -U dbt-sqlserver 40 | ``` 41 | 42 | Latest pre-release: ![GitHub tag (latest SemVer pre-release)](https://img.shields.io/github/v/tag/dbt-msft/dbt-sqlserver?include_prereleases&label=latest%20pre-release&logo=pypi) 43 | 44 | ```shell 45 | pip install -U --pre dbt-sqlserver 46 | ``` 47 | 48 | ## Changelog 49 | 50 | See [the changelog](CHANGELOG.md) 51 | 52 | ## Contributing 53 | 54 | [![Unit tests](https://github.com/dbt-msft/dbt-sqlserver/actions/workflows/unit-tests.yml/badge.svg)](https://github.com/dbt-msft/dbt-sqlserver/actions/workflows/unit-tests.yml) 55 | [![Integration tests on SQL Server](https://github.com/dbt-msft/dbt-sqlserver/actions/workflows/integration-tests-sqlserver.yml/badge.svg)](https://github.com/dbt-msft/dbt-sqlserver/actions/workflows/integration-tests-sqlserver.yml) 56 | [![Integration tests on Azure](https://github.com/dbt-msft/dbt-sqlserver/actions/workflows/integration-tests-azure.yml/badge.svg)](https://github.com/dbt-msft/dbt-sqlserver/actions/workflows/integration-tests-azure.yml) 57 | 58 | This adapter is community-maintained. 59 | You are welcome to contribute by creating issues, opening or reviewing pull requests or helping other users in Slack channel. 60 | If you're unsure how to get started, check out our [contributing guide](CONTRIBUTING.md). 61 | 62 | ## License 63 | 64 | [![PyPI - License](https://img.shields.io/pypi/l/dbt-sqlserver)](https://github.com/dbt-msft/dbt-sqlserver/blob/master/LICENSE) 65 | 66 | ## Code of Conduct 67 | 68 | This project and everyone involved is expected to follow the [dbt Code of Conduct](https://community.getdbt.com/code-of-conduct). 69 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/__init__.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.base import AdapterPlugin 2 | 3 | from dbt.adapters.sqlserver.sqlserver_adapter import SQLServerAdapter 4 | from dbt.adapters.sqlserver.sqlserver_column import SQLServerColumn 5 | from dbt.adapters.sqlserver.sqlserver_configs import SQLServerConfigs 6 | from dbt.adapters.sqlserver.sqlserver_connections import SQLServerConnectionManager # noqa 7 | from dbt.adapters.sqlserver.sqlserver_credentials import SQLServerCredentials 8 | from dbt.include import sqlserver 9 | 10 | Plugin = AdapterPlugin( 11 | adapter=SQLServerAdapter, 12 | credentials=SQLServerCredentials, 13 | include_path=sqlserver.PACKAGE_PATH, 14 | dependencies=["fabric"], 15 | ) 16 | 17 | __all__ = [ 18 | "Plugin", 19 | "SQLServerConnectionManager", 20 | "SQLServerColumn", 21 | "SQLServerAdapter", 22 | "SQLServerCredentials", 23 | "SQLServerConfigs", 24 | ] 25 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/__version__.py: -------------------------------------------------------------------------------- 1 | version = "1.9.0" 2 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/relation_configs/__init__.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.sqlserver.relation_configs.policies import ( 2 | MAX_CHARACTERS_IN_IDENTIFIER, 3 | SQLServerIncludePolicy, 4 | SQLServerQuotePolicy, 5 | SQLServerRelationType, 6 | ) 7 | 8 | __all__ = [ 9 | "MAX_CHARACTERS_IN_IDENTIFIER", 10 | "SQLServerIncludePolicy", 11 | "SQLServerQuotePolicy", 12 | "SQLServerRelationType", 13 | ] 14 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/relation_configs/policies.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from dbt.adapters.contracts.relation import Policy 4 | from dbt_common.dataclass_schema import StrEnum 5 | 6 | MAX_CHARACTERS_IN_IDENTIFIER = 127 7 | 8 | 9 | class SQLServerRelationType(StrEnum): 10 | Table = "table" 11 | View = "view" 12 | CTE = "cte" 13 | 14 | 15 | class SQLServerIncludePolicy(Policy): 16 | database: bool = True 17 | schema: bool = True 18 | identifier: bool = True 19 | 20 | 21 | @dataclass 22 | class SQLServerQuotePolicy(Policy): 23 | database: bool = True 24 | schema: bool = True 25 | identifier: bool = True 26 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/sqlserver_adapter.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import dbt.exceptions 4 | from dbt.adapters.base.impl import ConstraintSupport 5 | from dbt.adapters.fabric import FabricAdapter 6 | from dbt.contracts.graph.nodes import ConstraintType 7 | 8 | from dbt.adapters.sqlserver.sqlserver_column import SQLServerColumn 9 | from dbt.adapters.sqlserver.sqlserver_connections import SQLServerConnectionManager 10 | from dbt.adapters.sqlserver.sqlserver_relation import SQLServerRelation 11 | 12 | 13 | class SQLServerAdapter(FabricAdapter): 14 | """ 15 | Controls actual implmentation of adapter, and ability to override certain methods. 16 | """ 17 | 18 | ConnectionManager = SQLServerConnectionManager 19 | Column = SQLServerColumn 20 | Relation = SQLServerRelation 21 | 22 | CONSTRAINT_SUPPORT = { 23 | ConstraintType.check: ConstraintSupport.ENFORCED, 24 | ConstraintType.not_null: ConstraintSupport.ENFORCED, 25 | ConstraintType.unique: ConstraintSupport.ENFORCED, 26 | ConstraintType.primary_key: ConstraintSupport.ENFORCED, 27 | ConstraintType.foreign_key: ConstraintSupport.ENFORCED, 28 | } 29 | 30 | @classmethod 31 | def render_model_constraint(cls, constraint) -> Optional[str]: 32 | constraint_prefix = "add constraint " 33 | column_list = ", ".join(constraint.columns) 34 | 35 | if constraint.name is None: 36 | raise dbt.exceptions.DbtDatabaseError( 37 | "Constraint name cannot be empty. Provide constraint name - column " 38 | + column_list 39 | + " and run the project again." 40 | ) 41 | 42 | if constraint.type == ConstraintType.unique: 43 | return constraint_prefix + f"{constraint.name} unique nonclustered({column_list})" 44 | elif constraint.type == ConstraintType.primary_key: 45 | return constraint_prefix + f"{constraint.name} primary key nonclustered({column_list})" 46 | elif constraint.type == ConstraintType.foreign_key and constraint.expression: 47 | return ( 48 | constraint_prefix 49 | + f"{constraint.name} foreign key({column_list}) references " 50 | + constraint.expression 51 | ) 52 | elif constraint.type == ConstraintType.check and constraint.expression: 53 | return f"{constraint_prefix} {constraint.name} check ({constraint.expression})" 54 | elif constraint.type == ConstraintType.custom and constraint.expression: 55 | return f"{constraint_prefix} {constraint.name} {constraint.expression}" 56 | else: 57 | return None 58 | 59 | @classmethod 60 | def date_function(cls): 61 | return "getdate()" 62 | 63 | def valid_incremental_strategies(self): 64 | """The set of standard builtin strategies which this adapter supports out-of-the-box. 65 | Not used to validate custom strategies defined by end users. 66 | """ 67 | return ["append", "delete+insert", "merge", "microbatch"] 68 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/sqlserver_column.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.fabric import FabricColumn 2 | 3 | 4 | class SQLServerColumn(FabricColumn): 5 | def is_integer(self) -> bool: 6 | return self.dtype.lower() in [ 7 | # real types 8 | "smallint", 9 | "integer", 10 | "bigint", 11 | "smallserial", 12 | "serial", 13 | "bigserial", 14 | # aliases 15 | "int2", 16 | "int4", 17 | "int8", 18 | "serial2", 19 | "serial4", 20 | "serial8", 21 | "int", 22 | ] 23 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/sqlserver_configs.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from dbt.adapters.fabric import FabricConfigs 4 | 5 | 6 | @dataclass 7 | class SQLServerConfigs(FabricConfigs): 8 | pass 9 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/sqlserver_connections.py: -------------------------------------------------------------------------------- 1 | import dbt_common.exceptions # noqa 2 | import pyodbc 3 | from azure.core.credentials import AccessToken 4 | from azure.identity import ClientSecretCredential, ManagedIdentityCredential 5 | from dbt.adapters.contracts.connection import Connection, ConnectionState 6 | from dbt.adapters.events.logging import AdapterLogger 7 | from dbt.adapters.fabric import FabricConnectionManager 8 | from dbt.adapters.fabric.fabric_connection_manager import ( 9 | AZURE_AUTH_FUNCTIONS as AZURE_AUTH_FUNCTIONS_FABRIC, 10 | ) 11 | from dbt.adapters.fabric.fabric_connection_manager import ( 12 | AZURE_CREDENTIAL_SCOPE, 13 | bool_to_connection_string_arg, 14 | get_pyodbc_attrs_before_accesstoken, 15 | get_pyodbc_attrs_before_credentials, 16 | ) 17 | 18 | from dbt.adapters.sqlserver import __version__ 19 | from dbt.adapters.sqlserver.sqlserver_credentials import SQLServerCredentials 20 | 21 | logger = AdapterLogger("sqlserver") 22 | 23 | 24 | def get_msi_access_token(credentials: SQLServerCredentials) -> AccessToken: 25 | """ 26 | Get an Azure access token from the system's managed identity 27 | 28 | Parameters 29 | ----------- 30 | credentials: SQLServerCredentials 31 | Credentials. 32 | 33 | Returns 34 | ------- 35 | out : AccessToken 36 | The access token. 37 | """ 38 | token = ManagedIdentityCredential().get_token(AZURE_CREDENTIAL_SCOPE) 39 | return token 40 | 41 | 42 | def get_sp_access_token(credentials: SQLServerCredentials) -> AccessToken: 43 | """ 44 | Get an Azure access token using the SP credentials. 45 | 46 | Parameters 47 | ---------- 48 | credentials : SQLServerCredentials 49 | Credentials. 50 | 51 | Returns 52 | ------- 53 | out : AccessToken 54 | The access token. 55 | """ 56 | token = ClientSecretCredential( 57 | str(credentials.tenant_id), 58 | str(credentials.client_id), 59 | str(credentials.client_secret), 60 | ).get_token(AZURE_CREDENTIAL_SCOPE) 61 | return token 62 | 63 | 64 | AZURE_AUTH_FUNCTIONS = { 65 | **AZURE_AUTH_FUNCTIONS_FABRIC, 66 | "serviceprincipal": get_sp_access_token, 67 | "msi": get_msi_access_token, 68 | } 69 | 70 | 71 | class SQLServerConnectionManager(FabricConnectionManager): 72 | TYPE = "sqlserver" 73 | 74 | @classmethod 75 | def open(cls, connection: Connection) -> Connection: 76 | if connection.state == ConnectionState.OPEN: 77 | logger.debug("Connection is already open, skipping open.") 78 | return connection 79 | 80 | credentials = cls.get_credentials(connection.credentials) 81 | if credentials.authentication != "sql": 82 | return super().open(connection) 83 | 84 | # sql login authentication 85 | 86 | con_str = [f"DRIVER={{{credentials.driver}}}"] 87 | 88 | if "\\" in credentials.host: 89 | # If there is a backslash \ in the host name, the host is a 90 | # SQL Server named instance. In this case then port number has to be omitted. 91 | con_str.append(f"SERVER={credentials.host}") 92 | else: 93 | con_str.append(f"SERVER={credentials.host},{credentials.port}") 94 | 95 | con_str.append(f"Database={credentials.database}") 96 | 97 | assert credentials.authentication is not None 98 | 99 | con_str.append(f"UID={{{credentials.UID}}}") 100 | con_str.append(f"PWD={{{credentials.PWD}}}") 101 | 102 | # https://docs.microsoft.com/en-us/sql/relational-databases/native-client/features/using-encryption-without-validation?view=sql-server-ver15 103 | assert credentials.encrypt is not None 104 | assert credentials.trust_cert is not None 105 | 106 | con_str.append(bool_to_connection_string_arg("encrypt", credentials.encrypt)) 107 | con_str.append( 108 | bool_to_connection_string_arg("TrustServerCertificate", credentials.trust_cert) 109 | ) 110 | 111 | plugin_version = __version__.version 112 | application_name = f"dbt-{credentials.type}/{plugin_version}" 113 | con_str.append(f"APP={application_name}") 114 | 115 | con_str_concat = ";".join(con_str) 116 | 117 | index = [] 118 | for i, elem in enumerate(con_str): 119 | if "pwd=" in elem.lower(): 120 | index.append(i) 121 | 122 | if len(index) != 0: 123 | con_str[index[0]] = "PWD=***" 124 | 125 | con_str_display = ";".join(con_str) 126 | 127 | retryable_exceptions = [ # https://github.com/mkleehammer/pyodbc/wiki/Exceptions 128 | pyodbc.InternalError, # not used according to docs, but defined in PEP-249 129 | pyodbc.OperationalError, 130 | ] 131 | 132 | if credentials.authentication.lower() in AZURE_AUTH_FUNCTIONS: 133 | # Temporary login/token errors fall into this category when using AAD 134 | retryable_exceptions.append(pyodbc.InterfaceError) 135 | 136 | def connect(): 137 | logger.debug(f"Using connection string: {con_str_display}") 138 | 139 | if credentials.authentication == "ActiveDirectoryAccessToken": 140 | attrs_before = get_pyodbc_attrs_before_accesstoken(credentials.access_token) 141 | else: 142 | attrs_before = get_pyodbc_attrs_before_credentials(credentials) 143 | 144 | handle = pyodbc.connect( 145 | con_str_concat, 146 | attrs_before=attrs_before, 147 | autocommit=True, 148 | timeout=credentials.login_timeout, 149 | ) 150 | handle.timeout = credentials.query_timeout 151 | logger.debug(f"Connected to db: {credentials.database}") 152 | return handle 153 | 154 | return cls.retry_connection( 155 | connection, 156 | connect=connect, 157 | logger=logger, 158 | retry_limit=credentials.retries, 159 | retryable_exceptions=retryable_exceptions, 160 | ) 161 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/sqlserver_credentials.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Optional 3 | 4 | from dbt.adapters.fabric import FabricCredentials 5 | 6 | 7 | @dataclass 8 | class SQLServerCredentials(FabricCredentials): 9 | """ 10 | Defines database specific credentials that get added to 11 | profiles.yml to connect to new adapter 12 | """ 13 | 14 | port: Optional[int] = 1433 15 | authentication: Optional[str] = "sql" 16 | 17 | @property 18 | def type(self): 19 | return "sqlserver" 20 | 21 | def _connection_keys(self): 22 | return super()._connection_keys() + ("port",) 23 | -------------------------------------------------------------------------------- /dbt/adapters/sqlserver/sqlserver_relation.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass, field 2 | from typing import Optional, Type 3 | 4 | from dbt.adapters.base.relation import BaseRelation, EventTimeFilter 5 | from dbt.adapters.utils import classproperty 6 | from dbt_common.exceptions import DbtRuntimeError 7 | 8 | from dbt.adapters.sqlserver.relation_configs import ( 9 | MAX_CHARACTERS_IN_IDENTIFIER, 10 | SQLServerIncludePolicy, 11 | SQLServerQuotePolicy, 12 | SQLServerRelationType, 13 | ) 14 | 15 | 16 | @dataclass(frozen=True, eq=False, repr=False) 17 | class SQLServerRelation(BaseRelation): 18 | type: Optional[SQLServerRelationType] = None # type: ignore 19 | include_policy: SQLServerIncludePolicy = field( 20 | default_factory=lambda: SQLServerIncludePolicy() 21 | ) 22 | quote_policy: SQLServerQuotePolicy = field(default_factory=lambda: SQLServerQuotePolicy()) 23 | 24 | @classproperty 25 | def get_relation_type(cls) -> Type[SQLServerRelationType]: 26 | return SQLServerRelationType 27 | 28 | def render_limited(self) -> str: 29 | rendered = self.render() 30 | if self.limit is None: 31 | return rendered 32 | elif self.limit == 0: 33 | return f"(select * from {rendered} where 1=0) {self._render_limited_alias()}" 34 | else: 35 | return f"(select TOP {self.limit} * from {rendered}) {self._render_limited_alias()}" 36 | 37 | def __post_init__(self): 38 | # Check for length of Redshift table/view names. 39 | # Check self.type to exclude test relation identifiers 40 | if ( 41 | self.identifier is not None 42 | and self.type is not None 43 | and len(self.identifier) > MAX_CHARACTERS_IN_IDENTIFIER 44 | ): 45 | raise DbtRuntimeError( 46 | f"Relation name '{self.identifier}' " 47 | f"is longer than {MAX_CHARACTERS_IN_IDENTIFIER} characters" 48 | ) 49 | 50 | def relation_max_name_length(self): 51 | return MAX_CHARACTERS_IN_IDENTIFIER 52 | 53 | def _render_event_time_filtered(self, event_time_filter: EventTimeFilter) -> str: 54 | """ 55 | Returns "" if start and end are both None 56 | """ 57 | filter = "" 58 | if event_time_filter.start and event_time_filter.end: 59 | filter = ( 60 | f"{event_time_filter.field_name} >=" 61 | f" cast('{event_time_filter.start}' as datetimeoffset)" 62 | f" and {event_time_filter.field_name} <" 63 | f" cast('{event_time_filter.end}' as datetimeoffset)" 64 | ) 65 | elif event_time_filter.start: 66 | filter = ( 67 | f"{event_time_filter.field_name} >=" 68 | f" cast('{event_time_filter.start}' as datetimeoffset)" 69 | ) 70 | elif event_time_filter.end: 71 | filter = ( 72 | f"{event_time_filter.field_name} <" 73 | f" cast('{event_time_filter.end}' as datetimeoffset)" 74 | ) 75 | 76 | return filter 77 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | PACKAGE_PATH = os.path.dirname(__file__) 4 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/dbt_project.yml: -------------------------------------------------------------------------------- 1 | name: dbt_sqlserver 2 | version: 1.9.0 3 | config-version: 2 4 | 5 | macro-paths: ["macros"] 6 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-msft/dbt-sqlserver/bcf4ac910528c9bc681df5fd487818a19094838a/dbt/include/sqlserver/macros/.gitkeep -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/adapter/catalog.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__get_catalog(information_schemas, schemas) -%} 2 | {% set query_label = apply_label() %} 3 | {%- call statement('catalog', fetch_result=True) -%} 4 | 5 | with 6 | principals as ( 7 | select 8 | name as principal_name, 9 | principal_id as principal_id 10 | from 11 | sys.database_principals {{ information_schema_hints() }} 12 | ), 13 | 14 | schemas as ( 15 | select 16 | name as schema_name, 17 | schema_id as schema_id, 18 | principal_id as principal_id 19 | from 20 | sys.schemas {{ information_schema_hints() }} 21 | ), 22 | 23 | tables as ( 24 | select 25 | object_id, 26 | name as table_name, 27 | schema_id as schema_id, 28 | principal_id as principal_id, 29 | 'BASE TABLE' as table_type 30 | from 31 | sys.tables {{ information_schema_hints() }} 32 | ), 33 | 34 | tables_with_metadata as ( 35 | select 36 | object_id, 37 | table_name, 38 | schema_name, 39 | coalesce(tables.principal_id, schemas.principal_id) as owner_principal_id, 40 | table_type 41 | from 42 | tables 43 | join schemas on tables.schema_id = schemas.schema_id 44 | ), 45 | 46 | views as ( 47 | select 48 | object_id, 49 | name as table_name, 50 | schema_id as schema_id, 51 | principal_id as principal_id, 52 | 'VIEW' as table_type 53 | from 54 | sys.views {{ information_schema_hints() }} 55 | ), 56 | 57 | views_with_metadata as ( 58 | select 59 | object_id, 60 | table_name, 61 | schema_name, 62 | coalesce(views.principal_id, schemas.principal_id) as owner_principal_id, 63 | table_type 64 | from 65 | views 66 | join schemas on views.schema_id = schemas.schema_id 67 | ), 68 | 69 | tables_and_views as ( 70 | select 71 | object_id, 72 | table_name, 73 | schema_name, 74 | principal_name, 75 | table_type 76 | from 77 | tables_with_metadata 78 | join principals on tables_with_metadata.owner_principal_id = principals.principal_id 79 | union all 80 | select 81 | object_id, 82 | table_name, 83 | schema_name, 84 | principal_name, 85 | table_type 86 | from 87 | views_with_metadata 88 | join principals on views_with_metadata.owner_principal_id = principals.principal_id 89 | ), 90 | 91 | cols as ( 92 | 93 | select 94 | c.object_id, 95 | c.name as column_name, 96 | c.column_id as column_index, 97 | t.name as column_type 98 | from sys.columns as c {{ information_schema_hints() }} 99 | left join sys.types as t {{ information_schema_hints() }} on c.system_type_id = t.system_type_id 100 | ) 101 | 102 | select 103 | DB_NAME() as table_database, 104 | tv.schema_name as table_schema, 105 | tv.table_name, 106 | tv.table_type, 107 | null as table_comment, 108 | tv.principal_name as table_owner, 109 | cols.column_name, 110 | cols.column_index, 111 | cols.column_type, 112 | null as column_comment 113 | from tables_and_views tv 114 | join cols on tv.object_id = cols.object_id 115 | where ({%- for schema in schemas -%} 116 | upper(tv.schema_name) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%} 117 | {%- endfor -%}) 118 | 119 | order by column_index 120 | {{ query_label }} 121 | 122 | {%- endcall -%} 123 | 124 | {{ return(load_result('catalog').table) }} 125 | 126 | {%- endmacro %} 127 | 128 | {% macro sqlserver__get_catalog_relations(information_schema, relations) -%} 129 | {% set query_label = apply_label() %} 130 | {%- call statement('catalog', fetch_result=True) -%} 131 | 132 | with 133 | principals as ( 134 | select 135 | name as principal_name, 136 | principal_id as principal_id 137 | from 138 | sys.database_principals {{ information_schema_hints() }} 139 | ), 140 | 141 | schemas as ( 142 | select 143 | name as schema_name, 144 | schema_id as schema_id, 145 | principal_id as principal_id 146 | from 147 | sys.schemas {{ information_schema_hints() }} 148 | ), 149 | 150 | tables as ( 151 | select 152 | object_id, 153 | name as table_name, 154 | schema_id as schema_id, 155 | principal_id as principal_id, 156 | 'BASE TABLE' as table_type 157 | from 158 | sys.tables {{ information_schema_hints() }} 159 | ), 160 | 161 | tables_with_metadata as ( 162 | select 163 | object_id, 164 | table_name, 165 | schema_name, 166 | coalesce(tables.principal_id, schemas.principal_id) as owner_principal_id, 167 | table_type 168 | from 169 | tables 170 | join schemas on tables.schema_id = schemas.schema_id 171 | ), 172 | 173 | views as ( 174 | select 175 | object_id, 176 | name as table_name, 177 | schema_id as schema_id, 178 | principal_id as principal_id, 179 | 'VIEW' as table_type 180 | from 181 | sys.views {{ information_schema_hints() }} 182 | ), 183 | 184 | views_with_metadata as ( 185 | select 186 | object_id, 187 | table_name, 188 | schema_name, 189 | coalesce(views.principal_id, schemas.principal_id) as owner_principal_id, 190 | table_type 191 | from 192 | views 193 | join schemas on views.schema_id = schemas.schema_id 194 | ), 195 | 196 | tables_and_views as ( 197 | select 198 | object_id, 199 | table_name, 200 | schema_name, 201 | principal_name, 202 | table_type 203 | from 204 | tables_with_metadata 205 | join principals on tables_with_metadata.owner_principal_id = principals.principal_id 206 | union all 207 | select 208 | object_id, 209 | table_name, 210 | schema_name, 211 | principal_name, 212 | table_type 213 | from 214 | views_with_metadata 215 | join principals on views_with_metadata.owner_principal_id = principals.principal_id 216 | ), 217 | 218 | cols as ( 219 | 220 | select 221 | c.object_id, 222 | c.name as column_name, 223 | c.column_id as column_index, 224 | t.name as column_type 225 | from sys.columns as c {{ information_schema_hints() }} 226 | left join sys.types as t on c.system_type_id = t.system_type_id 227 | ) 228 | 229 | select 230 | DB_NAME() as table_database, 231 | tv.schema_name as table_schema, 232 | tv.table_name, 233 | tv.table_type, 234 | null as table_comment, 235 | tv.principal_name as table_owner, 236 | cols.column_name, 237 | cols.column_index, 238 | cols.column_type, 239 | null as column_comment 240 | from tables_and_views tv 241 | join cols on tv.object_id = cols.object_id 242 | where ( 243 | {%- for relation in relations -%} 244 | {% if relation.schema and relation.identifier %} 245 | ( 246 | upper(tv.schema_name) = upper('{{ relation.schema }}') 247 | and upper(tv.table_name) = upper('{{ relation.identifier }}') 248 | ) 249 | {% elif relation.schema %} 250 | ( 251 | upper(tv.schema_name) = upper('{{ relation.schema }}') 252 | ) 253 | {% else %} 254 | {% do exceptions.raise_compiler_error( 255 | '`get_catalog_relations` requires a list of relations, each with a schema' 256 | ) %} 257 | {% endif %} 258 | 259 | {%- if not loop.last %} or {% endif -%} 260 | {%- endfor -%} 261 | ) 262 | 263 | order by column_index 264 | {{ query_label }} 265 | {%- endcall -%} 266 | 267 | {{ return(load_result('catalog').table) }} 268 | 269 | {%- endmacro %} 270 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/adapter/columns.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__get_empty_subquery_sql(select_sql, select_sql_header=none) %} 2 | {% if select_sql.strip().lower().startswith('with') %} 3 | {{ select_sql }} 4 | {% else -%} 5 | select * from ( 6 | {{ select_sql }} 7 | ) dbt_sbq_tmp 8 | where 1 = 0 9 | {%- endif -%} 10 | 11 | {% endmacro %} 12 | 13 | {% macro sqlserver__get_columns_in_query(select_sql) %} 14 | {% set query_label = apply_label() %} 15 | {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%} 16 | select TOP 0 * from ( 17 | {{ select_sql }} 18 | ) as __dbt_sbq 19 | where 0 = 1 20 | {{ query_label }} 21 | {% endcall %} 22 | 23 | {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }} 24 | {% endmacro %} 25 | 26 | {% macro sqlserver__alter_column_type(relation, column_name, new_column_type) %} 27 | 28 | {%- set tmp_column = column_name + "__dbt_alter" -%} 29 | {% set alter_column_type %} 30 | alter {{ relation.type }} {{ relation }} add "{{ tmp_column }}" {{ new_column_type }}; 31 | {%- endset %} 32 | 33 | {% set update_column %} 34 | update {{ relation }} set "{{ tmp_column }}" = "{{ column_name }}"; 35 | {%- endset %} 36 | 37 | {% set drop_column %} 38 | alter {{ relation.type }} {{ relation }} drop column "{{ column_name }}"; 39 | {%- endset %} 40 | 41 | {% set rename_column %} 42 | exec sp_rename '{{ relation | replace('"', '') }}.{{ tmp_column }}', '{{ column_name }}', 'column' 43 | {%- endset %} 44 | 45 | {% do run_query(alter_column_type) %} 46 | {% do run_query(update_column) %} 47 | {% do run_query(drop_column) %} 48 | {% do run_query(rename_column) %} 49 | 50 | {% endmacro %} 51 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/adapter/indexes.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__create_clustered_columnstore_index(relation) -%} 2 | {%- set cci_name = (relation.schema ~ '_' ~ relation.identifier ~ '_cci') | replace(".", "") | replace(" ", "") -%} 3 | {%- set relation_name = relation.schema ~ '_' ~ relation.identifier -%} 4 | {%- set full_relation = '"' ~ relation.schema ~ '"."' ~ relation.identifier ~ '"' -%} 5 | use [{{ relation.database }}]; 6 | if EXISTS ( 7 | SELECT * 8 | FROM sys.indexes {{ information_schema_hints() }} 9 | WHERE name = '{{cci_name}}' 10 | AND object_id=object_id('{{relation_name}}') 11 | ) 12 | DROP index {{full_relation}}.{{cci_name}} 13 | CREATE CLUSTERED COLUMNSTORE INDEX {{cci_name}} 14 | ON {{full_relation}} 15 | {% endmacro %} 16 | 17 | {% macro drop_xml_indexes() -%} 18 | {{ log("Running drop_xml_indexes() macro...") }} 19 | 20 | declare @drop_xml_indexes nvarchar(max); 21 | select @drop_xml_indexes = ( 22 | select 'IF INDEXPROPERTY(' + CONVERT(VARCHAR(MAX), sys.tables.[object_id]) + ', ''' + sys.indexes.[name] + ''', ''IndexId'') IS NOT NULL DROP INDEX [' + sys.indexes.[name] + '] ON ' + '[' + SCHEMA_NAME(sys.tables.[schema_id]) + '].[' + OBJECT_NAME(sys.tables.[object_id]) + ']; ' 23 | from sys.indexes {{ information_schema_hints() }} 24 | inner join sys.tables {{ information_schema_hints() }} 25 | on sys.indexes.object_id = sys.tables.object_id 26 | where sys.indexes.[name] is not null 27 | and sys.indexes.type_desc = 'XML' 28 | and sys.tables.[name] = '{{ this.table }}' 29 | for xml path('') 30 | ); exec sp_executesql @drop_xml_indexes; 31 | {%- endmacro %} 32 | 33 | 34 | {% macro drop_spatial_indexes() -%} 35 | {# Altered from https://stackoverflow.com/q/1344401/10415173 #} 36 | {# and https://stackoverflow.com/a/33785833/10415173 #} 37 | 38 | {{ log("Running drop_spatial_indexes() macro...") }} 39 | 40 | declare @drop_spatial_indexes nvarchar(max); 41 | select @drop_spatial_indexes = ( 42 | select 'IF INDEXPROPERTY(' + CONVERT(VARCHAR(MAX), sys.tables.[object_id]) + ', ''' + sys.indexes.[name] + ''', ''IndexId'') IS NOT NULL DROP INDEX [' + sys.indexes.[name] + '] ON ' + '[' + SCHEMA_NAME(sys.tables.[schema_id]) + '].[' + OBJECT_NAME(sys.tables.[object_id]) + ']; ' 43 | from sys.indexes {{ information_schema_hints() }} 44 | inner join sys.tables {{ information_schema_hints() }} 45 | on sys.indexes.object_id = sys.tables.object_id 46 | where sys.indexes.[name] is not null 47 | and sys.indexes.type_desc = 'Spatial' 48 | and sys.tables.[name] = '{{ this.table }}' 49 | for xml path('') 50 | ); exec sp_executesql @drop_spatial_indexes; 51 | {%- endmacro %} 52 | 53 | 54 | {% macro drop_fk_constraints() -%} 55 | {# Altered from https://stackoverflow.com/q/1344401/10415173 #} 56 | 57 | {{ log("Running drop_fk_constraints() macro...") }} 58 | 59 | declare @drop_fk_constraints nvarchar(max); 60 | select @drop_fk_constraints = ( 61 | select 'IF OBJECT_ID(''' + SCHEMA_NAME(CONVERT(VARCHAR(MAX), sys.foreign_keys.[schema_id])) + '.' + sys.foreign_keys.[name] + ''', ''F'') IS NOT NULL ALTER TABLE [' + SCHEMA_NAME(sys.foreign_keys.[schema_id]) + '].[' + OBJECT_NAME(sys.foreign_keys.[parent_object_id]) + '] DROP CONSTRAINT [' + sys.foreign_keys.[name]+ '];' 62 | from sys.foreign_keys 63 | inner join sys.tables on sys.foreign_keys.[referenced_object_id] = sys.tables.[object_id] 64 | where sys.tables.[name] = '{{ this.table }}' 65 | for xml path('') 66 | ); exec sp_executesql @drop_fk_constraints; 67 | 68 | {%- endmacro %} 69 | 70 | 71 | {% macro drop_pk_constraints() -%} 72 | {# Altered from https://stackoverflow.com/q/1344401/10415173 #} 73 | {# and https://stackoverflow.com/a/33785833/10415173 #} 74 | 75 | {{ drop_xml_indexes() }} 76 | 77 | {{ drop_spatial_indexes() }} 78 | 79 | {{ drop_fk_constraints() }} 80 | 81 | {{ log("Running drop_pk_constraints() macro...") }} 82 | 83 | declare @drop_pk_constraints nvarchar(max); 84 | select @drop_pk_constraints = ( 85 | select 'IF INDEXPROPERTY(' + CONVERT(VARCHAR(MAX), sys.tables.[object_id]) + ', ''' + sys.indexes.[name] + ''', ''IndexId'') IS NOT NULL ALTER TABLE [' + SCHEMA_NAME(sys.tables.[schema_id]) + '].[' + sys.tables.[name] + '] DROP CONSTRAINT [' + sys.indexes.[name]+ '];' 86 | from sys.indexes 87 | inner join sys.tables on sys.indexes.[object_id] = sys.tables.[object_id] 88 | where sys.indexes.is_primary_key = 1 89 | and sys.tables.[name] = '{{ this.table }}' 90 | for xml path('') 91 | ); exec sp_executesql @drop_pk_constraints; 92 | 93 | {%- endmacro %} 94 | 95 | 96 | {% macro drop_all_indexes_on_table() -%} 97 | {# Altered from https://stackoverflow.com/q/1344401/10415173 #} 98 | {# and https://stackoverflow.com/a/33785833/10415173 #} 99 | 100 | {{ drop_pk_constraints() }} 101 | 102 | {{ log("Dropping remaining indexes...") }} 103 | 104 | declare @drop_remaining_indexes_last nvarchar(max); 105 | select @drop_remaining_indexes_last = ( 106 | select 'IF INDEXPROPERTY(' + CONVERT(VARCHAR(MAX), sys.tables.[object_id]) + ', ''' + sys.indexes.[name] + ''', ''IndexId'') IS NOT NULL DROP INDEX [' + sys.indexes.[name] + '] ON ' + '[' + SCHEMA_NAME(sys.tables.[schema_id]) + '].[' + OBJECT_NAME(sys.tables.[object_id]) + ']; ' 107 | from sys.indexes {{ information_schema_hints() }} 108 | inner join sys.tables {{ information_schema_hints() }} 109 | on sys.indexes.object_id = sys.tables.object_id 110 | where sys.indexes.[name] is not null 111 | and SCHEMA_NAME(sys.tables.schema_id) = '{{ this.schema }}' 112 | and sys.tables.[name] = '{{ this.table }}' 113 | for xml path('') 114 | ); exec sp_executesql @drop_remaining_indexes_last; 115 | 116 | {%- endmacro %} 117 | 118 | 119 | {% macro create_clustered_index(columns, unique=False) -%} 120 | {{ log("Creating clustered index...") }} 121 | 122 | {% set idx_name = "clustered_" + local_md5(columns | join("_")) %} 123 | 124 | if not exists(select * 125 | from sys.indexes {{ information_schema_hints() }} 126 | where name = '{{ idx_name }}' 127 | and object_id = OBJECT_ID('{{ this }}') 128 | ) 129 | begin 130 | 131 | create 132 | {% if unique -%} 133 | unique 134 | {% endif %} 135 | clustered index 136 | {{ idx_name }} 137 | on {{ this }} ({{ '[' + columns|join("], [") + ']' }}) 138 | end 139 | {%- endmacro %} 140 | 141 | 142 | {% macro create_nonclustered_index(columns, includes=False) %} 143 | 144 | {{ log("Creating nonclustered index...") }} 145 | 146 | {% if includes -%} 147 | {% set idx_name = ( 148 | "nonclustered_" 149 | + local_md5(columns | join("_")) 150 | + "_incl_" 151 | + local_md5(includes | join("_")) 152 | ) %} 153 | {% else -%} 154 | {% set idx_name = "nonclustered_" + local_md5(columns | join("_")) %} 155 | {% endif %} 156 | 157 | if not exists(select * 158 | from sys.indexes {{ information_schema_hints() }} 159 | where name = '{{ idx_name }}' 160 | and object_id = OBJECT_ID('{{ this }}') 161 | ) 162 | begin 163 | create nonclustered index 164 | {{ idx_name }} 165 | on {{ this }} ({{ '[' + columns|join("], [") + ']' }}) 166 | {% if includes -%} 167 | include ({{ '[' + includes|join("], [") + ']' }}) 168 | {% endif %} 169 | end 170 | {% endmacro %} 171 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/adapter/metadata.sql: -------------------------------------------------------------------------------- 1 | {% macro apply_label() %} 2 | {{ log (config.get('query_tag','dbt-sqlserver'))}} 3 | {%- set query_label = config.get('query_tag','dbt-sqlserver') -%} 4 | OPTION (LABEL = '{{query_label}}'); 5 | {% endmacro %} 6 | 7 | {% macro default__information_schema_hints() %}{% endmacro %} 8 | {% macro sqlserver__information_schema_hints() %}with (nolock){% endmacro %} 9 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/adapter/relation.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__truncate_relation(relation) %} 2 | {% call statement('truncate_relation') -%} 3 | truncate table {{ relation }} 4 | {%- endcall %} 5 | {% endmacro %} 6 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/adapter/schemas.sql: -------------------------------------------------------------------------------- 1 | 2 | {% macro sqlserver__drop_schema_named(schema_name) %} 3 | {% set schema_relation = api.Relation.create(schema=schema_name) %} 4 | {{ adapter.drop_schema(schema_relation) }} 5 | {% endmacro %} 6 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/adapter/validate_sql.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__validate_sql(sql) -%} 2 | {% call statement('validate_sql') -%} 3 | {{ sql }} 4 | {% endcall %} 5 | {{ return(load_result('validate_sql')) }} 6 | {% endmacro %} 7 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/models/incremental/incremental.sql: -------------------------------------------------------------------------------- 1 | {% materialization incremental, adapter='sqlserver' -%} 2 | 3 | -- relations 4 | {%- set existing_relation = load_cached_relation(this) -%} 5 | {%- set target_relation = this.incorporate(type='table') -%} 6 | {%- set temp_relation = make_temp_relation(target_relation)-%} 7 | {%- set intermediate_relation = make_intermediate_relation(target_relation)-%} 8 | {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} 9 | {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} 10 | 11 | -- configs 12 | {%- set unique_key = config.get('unique_key') -%} 13 | {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%} 14 | {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%} 15 | 16 | -- the temp_ and backup_ relations should not already exist in the database; get_relation 17 | -- will return None in that case. Otherwise, we get a relation that we can drop 18 | -- later, before we try to use this name for the current operation. This has to happen before 19 | -- BEGIN, in a separate transaction 20 | {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%} 21 | {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} 22 | -- grab current tables grants config for comparision later on 23 | {% set grant_config = config.get('grants') %} 24 | {{ drop_relation_if_exists(preexisting_intermediate_relation) }} 25 | {{ drop_relation_if_exists(preexisting_backup_relation) }} 26 | 27 | {{ run_hooks(pre_hooks, inside_transaction=False) }} 28 | 29 | -- `BEGIN` happens here: 30 | {{ run_hooks(pre_hooks, inside_transaction=True) }} 31 | 32 | {% set to_drop = [] %} 33 | 34 | {% if existing_relation is none %} 35 | {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %} 36 | {% elif full_refresh_mode %} 37 | {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %} 38 | {% set need_swap = true %} 39 | {% else %} 40 | 41 | {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %} 42 | 43 | {% set contract_config = config.get('contract') %} 44 | {% if not contract_config or not contract_config.enforced %} 45 | {% do adapter.expand_target_column_types( 46 | from_relation=temp_relation, 47 | to_relation=target_relation) %} 48 | {% endif %} 49 | {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#} 50 | {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %} 51 | {% if not dest_columns %} 52 | {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %} 53 | {% endif %} 54 | 55 | {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#} 56 | {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} 57 | {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %} 58 | {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %} 59 | {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %} 60 | {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %} 61 | 62 | {% do to_drop.append(temp_relation) %} 63 | {% endif %} 64 | 65 | {% call statement("main") %} 66 | {{ build_sql }} 67 | {% endcall %} 68 | 69 | {% if need_swap %} 70 | {% do adapter.rename_relation(target_relation, backup_relation) %} 71 | {% do adapter.rename_relation(intermediate_relation, target_relation) %} 72 | {% do to_drop.append(backup_relation) %} 73 | {% endif %} 74 | 75 | {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %} 76 | {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} 77 | 78 | {% do persist_docs(target_relation, model) %} 79 | 80 | {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %} 81 | {% do create_indexes(target_relation) %} 82 | {% endif %} 83 | 84 | {{ run_hooks(post_hooks, inside_transaction=True) }} 85 | 86 | -- `COMMIT` happens here 87 | {% do adapter.commit() %} 88 | 89 | {% for rel in to_drop %} 90 | {% do adapter.drop_relation(rel) %} 91 | {% endfor %} 92 | 93 | {{ run_hooks(post_hooks, inside_transaction=False) }} 94 | 95 | {{ return({'relations': [target_relation]}) }} 96 | 97 | {%- endmaterialization %} 98 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/models/incremental/merge.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__get_incremental_microbatch_sql(arg_dict) %} 2 | {%- set target = arg_dict["target_relation"] -%} 3 | {%- set source = arg_dict["temp_relation"] -%} 4 | {%- set dest_columns = arg_dict["dest_columns"] -%} 5 | {%- set incremental_predicates = [] if arg_dict.get('incremental_predicates') is none else arg_dict.get('incremental_predicates') -%} 6 | 7 | {#-- Add additional incremental_predicates to filter for batch --#} 8 | {% if model.config.get("__dbt_internal_microbatch_event_time_start") -%} 9 | {{ log("incremental append event start time > DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " >= cast('" ~ model.config.__dbt_internal_microbatch_event_time_start ~ "' as datetimeoffset)") }} 10 | {% do incremental_predicates.append("DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " >= cast('" ~ model.config.__dbt_internal_microbatch_event_time_start ~ "' as datetimeoffset)") %} 11 | {% endif %} 12 | {% if model.config.__dbt_internal_microbatch_event_time_end -%} 13 | {{ log("incremental append event end time < DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " < cast('" ~ model.config.__dbt_internal_microbatch_event_time_end ~ "' as datetimeoffset)") }} 14 | {% do incremental_predicates.append("DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " < cast('" ~ model.config.__dbt_internal_microbatch_event_time_end ~ "' as datetimeoffset)") %} 15 | {% endif %} 16 | {% do arg_dict.update({'incremental_predicates': incremental_predicates}) %} 17 | 18 | delete DBT_INTERNAL_TARGET from {{ target }} AS DBT_INTERNAL_TARGET 19 | where ( 20 | {% for predicate in incremental_predicates %} 21 | {%- if not loop.first %}and {% endif -%} {{ predicate }} 22 | {% endfor %} 23 | ); 24 | 25 | {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} 26 | insert into {{ target }} ({{ dest_cols_csv }}) 27 | ( 28 | select {{ dest_cols_csv }} 29 | from {{ source }} 30 | ) 31 | {% endmacro %} 32 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/models/table/table.sql: -------------------------------------------------------------------------------- 1 | {% materialization table, adapter='sqlserver' %} 2 | 3 | {%- set existing_relation = load_cached_relation(this) -%} 4 | {%- set target_relation = this.incorporate(type='table') %} 5 | {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} 6 | -- the intermediate_relation should not already exist in the database; get_relation 7 | -- will return None in that case. Otherwise, we get a relation that we can drop 8 | -- later, before we try to use this name for the current operation 9 | {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%} 10 | /* 11 | See ../view/view.sql for more information about this relation. 12 | */ 13 | {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} 14 | {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} 15 | -- as above, the backup_relation should not already exist 16 | {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} 17 | -- grab current tables grants config for comparision later on 18 | {% set grant_config = config.get('grants') %} 19 | 20 | -- drop the temp relations if they exist already in the database 21 | {{ drop_relation_if_exists(preexisting_intermediate_relation) }} 22 | {{ drop_relation_if_exists(preexisting_backup_relation) }} 23 | 24 | {{ run_hooks(pre_hooks, inside_transaction=False) }} 25 | 26 | -- `BEGIN` happens here: 27 | {{ run_hooks(pre_hooks, inside_transaction=True) }} 28 | 29 | -- build model 30 | {% call statement('main') -%} 31 | {{ get_create_table_as_sql(False, intermediate_relation, sql) }} 32 | {%- endcall %} 33 | 34 | -- cleanup 35 | {% if existing_relation is not none %} 36 | /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped 37 | since the variable was first set. */ 38 | {% set existing_relation = load_cached_relation(existing_relation) %} 39 | {% if existing_relation is not none %} 40 | {{ adapter.rename_relation(existing_relation, backup_relation) }} 41 | {% endif %} 42 | {% endif %} 43 | 44 | {{ adapter.rename_relation(intermediate_relation, target_relation) }} 45 | 46 | {% do create_indexes(target_relation) %} 47 | 48 | {{ run_hooks(post_hooks, inside_transaction=True) }} 49 | 50 | {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} 51 | {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} 52 | 53 | {% do persist_docs(target_relation, model) %} 54 | 55 | -- `COMMIT` happens here 56 | {{ adapter.commit() }} 57 | 58 | -- finally, drop the existing/backup relation after the commit 59 | {{ drop_relation_if_exists(backup_relation) }} 60 | 61 | {{ run_hooks(post_hooks, inside_transaction=False) }} 62 | 63 | {{ return({'relations': [target_relation]}) }} 64 | {% endmaterialization %} 65 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/models/view/view.sql: -------------------------------------------------------------------------------- 1 | {%- materialization view, adapter='sqlserver' -%} 2 | {%- set existing_relation = load_cached_relation(this) -%} 3 | {%- set target_relation = this.incorporate(type='view') -%} 4 | {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} 5 | 6 | -- the intermediate_relation should not already exist in the database; get_relation 7 | -- will return None in that case. Otherwise, we get a relation that we can drop 8 | -- later, before we try to use this name for the current operation 9 | {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%} 10 | /* 11 | This relation (probably) doesn't exist yet. If it does exist, it's a leftover from 12 | a previous run, and we're going to try to drop it immediately. At the end of this 13 | materialization, we're going to rename the "existing_relation" to this identifier, 14 | and then we're going to drop it. In order to make sure we run the correct one of: 15 | - drop view ... 16 | - drop table ... 17 | 18 | We need to set the type of this relation to be the type of the existing_relation, if it exists, 19 | or else "view" as a sane default if it does not. Note that if the existing_relation does not 20 | exist, then there is nothing to move out of the way and subsequentally drop. In that case, 21 | this relation will be effectively unused. 22 | */ 23 | {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%} 24 | {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} 25 | -- as above, the backup_relation should not already exist 26 | {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} 27 | -- grab current tables grants config for comparision later on 28 | {% set grant_config = config.get('grants') %} 29 | 30 | {{ run_hooks(pre_hooks, inside_transaction=False) }} 31 | 32 | -- drop the temp relations if they exist already in the database 33 | {{ drop_relation_if_exists(preexisting_intermediate_relation) }} 34 | {{ drop_relation_if_exists(preexisting_backup_relation) }} 35 | 36 | -- `BEGIN` happens here: 37 | {{ run_hooks(pre_hooks, inside_transaction=True) }} 38 | 39 | -- build model 40 | {% call statement('main') -%} 41 | {{ get_create_view_as_sql(intermediate_relation, sql) }} 42 | {%- endcall %} 43 | 44 | -- cleanup 45 | -- move the existing view out of the way 46 | {% if existing_relation is not none %} 47 | /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped 48 | since the variable was first set. */ 49 | {% set existing_relation = load_cached_relation(existing_relation) %} 50 | {% if existing_relation is not none %} 51 | {{ adapter.rename_relation(existing_relation, backup_relation) }} 52 | {% endif %} 53 | {% endif %} 54 | {{ adapter.rename_relation(intermediate_relation, target_relation) }} 55 | 56 | {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} 57 | {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} 58 | 59 | {% do persist_docs(target_relation, model) %} 60 | 61 | {{ run_hooks(post_hooks, inside_transaction=True) }} 62 | 63 | {{ adapter.commit() }} 64 | 65 | {{ drop_relation_if_exists(backup_relation) }} 66 | 67 | {{ run_hooks(post_hooks, inside_transaction=False) }} 68 | 69 | {{ return({'relations': [target_relation]}) }} 70 | 71 | {%- endmaterialization -%} 72 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/snapshot/helpers.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__create_columns(relation, columns) %} 2 | {% set column_list %} 3 | {% for column_entry in columns %} 4 | {{column_entry.name}} {{column_entry.data_type}}{{ ", " if not loop.last }} 5 | {% endfor %} 6 | {% endset %} 7 | 8 | {% set alter_sql %} 9 | ALTER TABLE {{ relation }} 10 | ADD {{ column_list }} 11 | {% endset %} 12 | 13 | {% set results = run_query(alter_sql) %} 14 | 15 | {% endmacro %} 16 | 17 | {% macro build_snapshot_staging_table(strategy, temp_snapshot_relation, target_relation) %} 18 | {% set temp_relation = make_temp_relation(target_relation) %} 19 | {{ adapter.drop_relation(temp_relation) }} 20 | 21 | {% set select = snapshot_staging_table(strategy, temp_snapshot_relation, target_relation) %} 22 | 23 | {% set tmp_tble_vw_relation = temp_relation.incorporate(path={"identifier": temp_relation.identifier ~ '__dbt_tmp_vw'}, type='view')-%} 24 | -- Dropping temp view relation if it exists 25 | {{ adapter.drop_relation(tmp_tble_vw_relation) }} 26 | 27 | {% call statement('build_snapshot_staging_relation') %} 28 | {{ get_create_table_as_sql(True, temp_relation, select) }} 29 | {% endcall %} 30 | 31 | -- Dropping temp view relation if it exists 32 | {{ adapter.drop_relation(tmp_tble_vw_relation) }} 33 | 34 | {% do return(temp_relation) %} 35 | {% endmacro %} 36 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/snapshot/snapshot.sql: -------------------------------------------------------------------------------- 1 | {% materialization snapshot, adapter='sqlserver' %} 2 | {%- set config = model['config'] -%} 3 | 4 | {%- set target_table = model.get('alias', model.get('name')) -%} 5 | 6 | {%- set strategy_name = config.get('strategy') -%} 7 | {%- set unique_key = config.get('unique_key') %} 8 | -- grab current tables grants config for comparision later on 9 | {%- set grant_config = config.get('grants') -%} 10 | 11 | {% set target_relation_exists, target_relation = get_or_create_relation( 12 | database=model.database, 13 | schema=model.schema, 14 | identifier=target_table, 15 | type='table') -%} 16 | 17 | {%- if not target_relation.is_table -%} 18 | {% do exceptions.relation_wrong_type(target_relation, 'table') %} 19 | {%- endif -%} 20 | 21 | 22 | {{ run_hooks(pre_hooks, inside_transaction=False) }} 23 | 24 | {{ run_hooks(pre_hooks, inside_transaction=True) }} 25 | 26 | {% set strategy_macro = strategy_dispatch(strategy_name) %} 27 | {% set strategy = strategy_macro(model, "snapshotted_data", "source_data", config, target_relation_exists) %} 28 | 29 | {% set temp_snapshot_relation_exists, temp_snapshot_relation = get_or_create_relation( 30 | database=model.database, 31 | schema=model.schema, 32 | identifier=target_table+"_snapshot_staging_temp_view", 33 | type='view') 34 | -%} 35 | 36 | {% set temp_snapshot_relation_sql = model['compiled_code'].replace("'", "''") %} 37 | {% call statement('create temp_snapshot_relation') %} 38 | USE [{{ model.database}}]; 39 | EXEC('DROP VIEW IF EXISTS {{ temp_snapshot_relation.include(database=False) }};'); 40 | EXEC('create view {{ temp_snapshot_relation.include(database=False) }} as {{ temp_snapshot_relation_sql }};'); 41 | {% endcall %} 42 | 43 | {% if not target_relation_exists %} 44 | 45 | {% set build_sql = build_snapshot_table(strategy, temp_snapshot_relation) %} 46 | {% set final_sql = create_table_as(False, target_relation, build_sql) %} 47 | 48 | {% else %} 49 | 50 | {{ adapter.valid_snapshot_target(target_relation) }} 51 | 52 | {% set staging_table = build_snapshot_staging_table(strategy, temp_snapshot_relation, target_relation) %} 53 | 54 | -- this may no-op if the database does not require column expansion 55 | {% do adapter.expand_target_column_types(from_relation=staging_table, 56 | to_relation=target_relation) %} 57 | 58 | {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation) 59 | | rejectattr('name', 'equalto', 'dbt_change_type') 60 | | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') 61 | | rejectattr('name', 'equalto', 'dbt_unique_key') 62 | | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') 63 | | list %} 64 | {% if missing_columns|length > 0 %} 65 | {{log("Missing columns length is: "~ missing_columns|length)}} 66 | {% do create_columns(target_relation, missing_columns) %} 67 | {% endif %} 68 | 69 | {% set source_columns = adapter.get_columns_in_relation(staging_table) 70 | | rejectattr('name', 'equalto', 'dbt_change_type') 71 | | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') 72 | | rejectattr('name', 'equalto', 'dbt_unique_key') 73 | | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') 74 | | list %} 75 | 76 | {% set quoted_source_columns = [] %} 77 | {% for column in source_columns %} 78 | {% do quoted_source_columns.append(adapter.quote(column.name)) %} 79 | {% endfor %} 80 | 81 | {% set final_sql = snapshot_merge_sql( 82 | target = target_relation, 83 | source = staging_table, 84 | insert_cols = quoted_source_columns 85 | ) 86 | %} 87 | 88 | {% endif %} 89 | 90 | {% call statement('main') %} 91 | {{ final_sql }} 92 | {% endcall %} 93 | 94 | {{ adapter.drop_relation(temp_snapshot_relation) }} 95 | 96 | {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %} 97 | {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} 98 | 99 | {% do persist_docs(target_relation, model) %} 100 | 101 | {% if not target_relation_exists %} 102 | {% do create_indexes(target_relation) %} 103 | {% endif %} 104 | 105 | {{ run_hooks(post_hooks, inside_transaction=True) }} 106 | 107 | {{ adapter.commit() }} 108 | 109 | {% if staging_table is defined %} 110 | {% do post_snapshot(staging_table) %} 111 | {% endif %} 112 | 113 | {{ run_hooks(post_hooks, inside_transaction=False) }} 114 | 115 | {{ return({'relations': [target_relation]}) }} 116 | 117 | {% endmaterialization %} 118 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/snapshot/snapshot_merge.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__snapshot_merge_sql(target, source, insert_cols) -%} 2 | {%- set insert_cols_csv = insert_cols | join(', ') -%} 3 | 4 | merge into {{ target.render() }} as DBT_INTERNAL_DEST 5 | using {{ source }} as DBT_INTERNAL_SOURCE 6 | on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id 7 | 8 | when matched 9 | and DBT_INTERNAL_DEST.dbt_valid_to is null 10 | and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete') 11 | then update 12 | set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to 13 | 14 | when not matched 15 | and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert' 16 | then insert ({{ insert_cols_csv }}) 17 | values ({{ insert_cols_csv }}) 18 | ; 19 | {% endmacro %} 20 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/tests.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} 2 | 3 | -- Create target schema if it does not 4 | USE [{{ target.database }}]; 5 | IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{{ target.schema }}') 6 | BEGIN 7 | EXEC('CREATE SCHEMA [{{ target.schema }}]') 8 | END 9 | 10 | {% set testview %} 11 | [{{ target.schema }}].[testview_{{ local_md5(main_sql) }}_{{ range(1300, 19000) | random }}] 12 | {% endset %} 13 | 14 | {% set sql = main_sql.replace("'", "''")%} 15 | EXEC('create view {{testview}} as {{ sql }};') 16 | select 17 | {{ "top (" ~ limit ~ ')' if limit != none }} 18 | {{ fail_calc }} as failures, 19 | case when {{ fail_calc }} {{ warn_if }} 20 | then 'true' else 'false' end as should_warn, 21 | case when {{ fail_calc }} {{ error_if }} 22 | then 'true' else 'false' end as should_error 23 | from ( 24 | select * from {{testview}} 25 | ) dbt_internal_test; 26 | 27 | EXEC('drop view {{testview}};') 28 | 29 | {%- endmacro %} 30 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/materializations/unit_tests.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%} 2 | 3 | USE [{{ target.database }}]; 4 | IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{{ target.schema }}') 5 | BEGIN 6 | EXEC('CREATE SCHEMA [{{ target.schema }}]') 7 | END 8 | 9 | {% set test_view %} 10 | [{{ target.schema }}].[testview_{{ local_md5(main_sql) }}_{{ range(1300, 19000) | random }}] 11 | {% endset %} 12 | {% set test_sql = main_sql.replace("'", "''")%} 13 | EXEC('create view {{test_view}} as {{ test_sql }};') 14 | 15 | {% set expected_view %} 16 | [{{ target.schema }}].[expectedview_{{ local_md5(expected_fixture_sql) }}_{{ range(1300, 19000) | random }}] 17 | {% endset %} 18 | {% set expected_sql = expected_fixture_sql.replace("'", "''")%} 19 | EXEC('create view {{expected_view}} as {{ expected_sql }};') 20 | 21 | -- Build actual result given inputs 22 | {% set unittest_sql %} 23 | with dbt_internal_unit_test_actual as ( 24 | select 25 | {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal("actual") }} as {{ adapter.quote("actual_or_expected") }} 26 | from 27 | {{ test_view }} 28 | ), 29 | -- Build expected result 30 | dbt_internal_unit_test_expected as ( 31 | select 32 | {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal("expected") }} as {{ adapter.quote("actual_or_expected") }} 33 | from 34 | {{ expected_view }} 35 | ) 36 | -- Union actual and expected results 37 | select * from dbt_internal_unit_test_actual 38 | union all 39 | select * from dbt_internal_unit_test_expected 40 | {% endset %} 41 | 42 | EXEC('{{- escape_single_quotes(unittest_sql) -}}') 43 | 44 | EXEC('drop view {{test_view}};') 45 | EXEC('drop view {{expected_view}};') 46 | 47 | {%- endmacro %} 48 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/readme.md: -------------------------------------------------------------------------------- 1 | # Alterations from Fabric 2 | 3 | ## `materialization incremental` 4 | 5 | This is reset to the original logic from the global project. 6 | 7 | ## `materialization view` 8 | 9 | This is reset to the original logic from the global project 10 | 11 | ## `materialization table` 12 | 13 | This is resets to the original logic from the global project 14 | 15 | ## `sqlserver__create_columns` 16 | 17 | SQLServer supports ALTER; this updates the logic to apply alter instead of the drop/recreate 18 | 19 | ## `sqlserver__alter_column_type` 20 | 21 | SQLServer supports ALTER; this updates the logic to apply alter instead of the drop/recreate 22 | 23 | 24 | ## `sqlserver__can_clone_table` 25 | 26 | SQLServer cannot clone, so this just returns False 27 | 28 | ## `sqlserver__create_table_as` 29 | 30 | Logic is slightly re-written from original. 31 | There is an underlying issue with the structure in that its embedding in EXEC calls. 32 | 33 | This creates an issue where temporary tables cannot be used, as they dont exist within the context of the EXEC call. 34 | 35 | One work around might be to issue the create table from a `{{ run_query }}` statement in order to have it accessible outside the exec context. 36 | 37 | Additionally the expected {% do adapter.drop_relation(tmp_relation) %} does not fire. Possible cache issue? 38 | Resolved by calling `DROP VIEW IF EXISTS` on the relation 39 | 40 | ## `sqlserver__create_view_as` 41 | 42 | Updated to remove `create_view_as_exec` call. 43 | 44 | ## `listagg` 45 | 46 | DBT expects a limit function, but the sqlserver syntax does not support it. Fabric also does not implement this properly 47 | 48 | ## `sqlserver__snapshot_merge_sql` 49 | 50 | Restores logic to the merge statement logic like the dbt core. Merge will probably be slower then the existing logic 51 | 52 | ## unit tests 53 | 54 | To accomidate the nested CTE situation, we create a temp view for the actual/expected and use those both in the test. 55 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/relations/seeds/helpers.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__get_binding_char() %} 2 | {{ return('?') }} 3 | {% endmacro %} 4 | 5 | {% macro sqlserver__get_batch_size() %} 6 | {{ return(400) }} 7 | {% endmacro %} 8 | 9 | {% macro calc_batch_size(num_columns) %} 10 | {# 11 | SQL Server allows for a max of 2098 parameters in a single statement. 12 | Check if the max_batch_size fits with the number of columns, otherwise 13 | reduce the batch size so it fits. 14 | #} 15 | {% set max_batch_size = get_batch_size() %} 16 | {% set calculated_batch = (2098 / num_columns)|int %} 17 | {% set batch_size = [max_batch_size, calculated_batch] | min %} 18 | 19 | {{ return(batch_size) }} 20 | {% endmacro %} 21 | 22 | {% macro sqlserver__load_csv_rows(model, agate_table) %} 23 | {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %} 24 | {% set batch_size = calc_batch_size(agate_table.column_names|length) %} 25 | {% set bindings = [] %} 26 | {% set statements = [] %} 27 | 28 | {{ log("Inserting batches of " ~ batch_size ~ " records") }} 29 | 30 | {% for chunk in agate_table.rows | batch(batch_size) %} 31 | {% set bindings = [] %} 32 | 33 | {% for row in chunk %} 34 | {% do bindings.extend(row) %} 35 | {% endfor %} 36 | 37 | {% set sql %} 38 | insert into {{ this.render() }} ({{ cols_sql }}) values 39 | {% for row in chunk -%} 40 | ({%- for column in agate_table.column_names -%} 41 | {{ get_binding_char() }} 42 | {%- if not loop.last%},{%- endif %} 43 | {%- endfor -%}) 44 | {%- if not loop.last%},{%- endif %} 45 | {%- endfor %} 46 | {% endset %} 47 | 48 | {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %} 49 | 50 | {% if loop.index0 == 0 %} 51 | {% do statements.append(sql) %} 52 | {% endif %} 53 | {% endfor %} 54 | 55 | {# Return SQL so we can render it out into the compiled files #} 56 | {{ return(statements[0]) }} 57 | {% endmacro %} 58 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/relations/table/clone.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__can_clone_table() %} 2 | {{ return(False) }} 3 | {% endmacro %} 4 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/relations/table/create.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__create_table_as(temporary, relation, sql) -%} 2 | {%- set query_label = apply_label() -%} 3 | {%- set tmp_relation = relation.incorporate(path={"identifier": relation.identifier ~ '__dbt_tmp_vw'}, type='view') -%} 4 | 5 | {%- do adapter.drop_relation(tmp_relation) -%} 6 | USE [{{ relation.database }}]; 7 | {{ get_create_view_as_sql(tmp_relation, sql) }} 8 | 9 | {%- set table_name -%} 10 | {{ relation }} 11 | {%- endset -%} 12 | 13 | 14 | {%- set contract_config = config.get('contract') -%} 15 | {%- set query -%} 16 | {% if contract_config.enforced and (not temporary) %} 17 | CREATE TABLE {{table_name}} 18 | {{ get_assert_columns_equivalent(sql) }} 19 | {{ build_columns_constraints(relation) }} 20 | {% set listColumns %} 21 | {% for column in model['columns'] %} 22 | {{ "["~column~"]" }}{{ ", " if not loop.last }} 23 | {% endfor %} 24 | {%endset%} 25 | INSERT INTO {{relation}} ({{listColumns}}) 26 | SELECT {{listColumns}} FROM {{tmp_relation}} {{ query_label }} 27 | 28 | {% else %} 29 | SELECT * INTO {{ table_name }} FROM {{ tmp_relation }} {{ query_label }} 30 | {% endif %} 31 | {%- endset -%} 32 | 33 | EXEC('{{- escape_single_quotes(query) -}}') 34 | 35 | {# For some reason drop_relation is not firing. This solves the issue for now. #} 36 | EXEC('DROP VIEW IF EXISTS {{tmp_relation.schema}}.{{tmp_relation.identifier}}') 37 | 38 | 39 | 40 | {% set as_columnstore = config.get('as_columnstore', default=true) %} 41 | {% if not temporary and as_columnstore -%} 42 | {#- 43 | add columnstore index 44 | this creates with dbt_temp as its coming from a temporary relation before renaming 45 | could alter relation to drop the dbt_temp portion if needed 46 | -#} 47 | {{ sqlserver__create_clustered_columnstore_index(relation) }} 48 | {% endif %} 49 | 50 | {% endmacro %} 51 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/relations/views/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-msft/dbt-sqlserver/bcf4ac910528c9bc681df5fd487818a19094838a/dbt/include/sqlserver/macros/relations/views/.gitkeep -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/relations/views/create.sql: -------------------------------------------------------------------------------- 1 | {% macro sqlserver__create_view_as(relation, sql) -%} 2 | 3 | {{ get_use_database_sql(relation.database) }} 4 | {% set contract_config = config.get('contract') %} 5 | {% if contract_config.enforced %} 6 | {{ get_assert_columns_equivalent(sql) }} 7 | {%- endif %} 8 | 9 | {% set query %} 10 | create view {{ relation.include(database=False) }} as {{ sql }}; 11 | {% endset %} 12 | 13 | {% set tst %} 14 | SELECT '1' as col 15 | {% endset %} 16 | USE [{{ relation.database }}]; 17 | EXEC('{{- escape_single_quotes(query) -}}') 18 | 19 | {% endmacro %} 20 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/utils/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbt-msft/dbt-sqlserver/bcf4ac910528c9bc681df5fd487818a19094838a/dbt/include/sqlserver/macros/utils/.gitkeep -------------------------------------------------------------------------------- /dbt/include/sqlserver/macros/utils/split_part.sql: -------------------------------------------------------------------------------- 1 | {# 2 | For more information on how this XML trick works with splitting strings, see https://www.sqlservertips.com/sqlservertip/1771/splitting-delimited-strings-using-xml-in-sql-server/ 3 | On Azure SQL and SQL Server 2019, we can use the string_split function instead of the XML trick. 4 | But since we don't know which version of SQL Server the user is using, we'll stick with the XML trick in this adapter. 5 | However, since the XML data type is not supported in Synapse, it has to be overriden in that adapter. 6 | 7 | To adjust for negative part numbers, aka 'from the end of the split', we take the position and subtract from last to get the specific part. 8 | Since the input is '-1' for the last, '-2' for second last, we add 1 to the part number to get the correct position. 9 | #} 10 | 11 | {% macro sqlserver__split_part(string_text, delimiter_text, part_number) %} 12 | {% if part_number >= 0 %} 13 | LTRIM(CAST((''+REPLACE({{ string_text }},{{ delimiter_text }} ,'')+'') AS XML).value('(/X)[{{ part_number }}]', 'VARCHAR(128)')) 14 | {% else %} 15 | LTRIM(CAST((''+REPLACE({{ string_text }},{{ delimiter_text }} ,'')+'') AS XML).value('(/X)[position() = last(){{ part_number }}+1][1]', 'VARCHAR(128)')) 16 | {% endif %} 17 | {% endmacro %} 18 | -------------------------------------------------------------------------------- /dbt/include/sqlserver/profile_template.yml: -------------------------------------------------------------------------------- 1 | fixed: 2 | type: sqlserver 3 | prompts: 4 | host: 5 | hint: "your host name" 6 | port: 7 | default: 5432 8 | type: "int" 9 | user: 10 | hint: "dev username" 11 | password: 12 | hint: "dev password" 13 | hide_input: true 14 | database: 15 | hint: "default database" 16 | threads: 17 | hint: "1 or more" 18 | type: "int" 19 | default: 1 20 | -------------------------------------------------------------------------------- /dev_requirements.txt: -------------------------------------------------------------------------------- 1 | 2 | dbt-tests-adapter>=1.9.0,<2.0 3 | 4 | ruff 5 | black==24.8.0 6 | bumpversion 7 | flake8 8 | flaky 9 | freezegun==1.4.0 10 | ipdb 11 | mypy==1.11.2 12 | pip-tools 13 | pre-commit 14 | pytest 15 | pytest-dotenv 16 | pytest-logbook 17 | pytest-csv 18 | pytest-xdist 19 | pytz 20 | tox>=3.13 21 | twine 22 | wheel 23 | -e . 24 | -------------------------------------------------------------------------------- /devops/CI.Dockerfile: -------------------------------------------------------------------------------- 1 | ARG PYTHON_VERSION="3.10" 2 | FROM python:${PYTHON_VERSION}-bullseye as base 3 | 4 | # Setup dependencies for pyodbc 5 | RUN apt-get update && \ 6 | apt-get install -y --no-install-recommends \ 7 | apt-transport-https \ 8 | curl \ 9 | gnupg2 \ 10 | unixodbc-dev \ 11 | lsb-release && \ 12 | apt-get autoremove -yqq --purge && \ 13 | apt-get clean && \ 14 | rm -rf /var/lib/apt/lists/* 15 | 16 | # enable Microsoft package repo 17 | RUN curl -sL https://packages.microsoft.com/keys/microsoft.asc | apt-key add - 18 | RUN curl -sL https://packages.microsoft.com/config/debian/$(lsb_release -sr)/prod.list | tee /etc/apt/sources.list.d/msprod.list 19 | # enable Azure CLI package repo 20 | RUN echo "deb [arch=amd64] https://packages.microsoft.com/repos/azure-cli/ $(lsb_release -cs) main" | tee /etc/apt/sources.list.d/azure-cli.list 21 | 22 | # install Azure CLI 23 | ENV ACCEPT_EULA=Y 24 | RUN apt-get update && \ 25 | apt-get install -y --no-install-recommends \ 26 | azure-cli && \ 27 | apt-get autoremove -yqq --purge && \ 28 | apt-get clean && \ 29 | rm -rf /var/lib/apt/lists/* 30 | 31 | FROM base as msodbc17 32 | 33 | # install ODBC driver 17 34 | ENV ACCEPT_EULA=Y 35 | RUN apt-get update && \ 36 | apt-get install -y --no-install-recommends \ 37 | msodbcsql17 \ 38 | mssql-tools && \ 39 | apt-get autoremove -yqq --purge && \ 40 | apt-get clean && \ 41 | rm -rf /var/lib/apt/lists/* 42 | 43 | # add sqlcmd to the path 44 | ENV PATH="$PATH:/opt/mssql-tools/bin" 45 | 46 | FROM base as msodbc18 47 | 48 | # install ODBC driver 18 49 | ENV ACCEPT_EULA=Y 50 | RUN apt-get update && \ 51 | apt-get install -y --no-install-recommends \ 52 | msodbcsql18 \ 53 | mssql-tools18 && \ 54 | apt-get autoremove -yqq --purge && \ 55 | apt-get clean && \ 56 | rm -rf /var/lib/apt/lists/* 57 | 58 | # add sqlcmd to the path 59 | ENV PATH="$PATH:/opt/mssql-tools18/bin" 60 | -------------------------------------------------------------------------------- /devops/scripts/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | /opt/init_scripts/init_db.sh & /opt/mssql/bin/sqlservr 4 | -------------------------------------------------------------------------------- /devops/scripts/init.sql: -------------------------------------------------------------------------------- 1 | IF NOT EXISTS(SELECT * FROM sys.database_principals WHERE name = '$(DBT_TEST_USER_1)') 2 | CREATE USER [$(DBT_TEST_USER_1)] WITHOUT LOGIN; 3 | 4 | IF NOT EXISTS(SELECT * FROM sys.database_principals WHERE name = '$(DBT_TEST_USER_2)') 5 | CREATE USER [$(DBT_TEST_USER_2)] WITHOUT LOGIN; 6 | 7 | IF NOT EXISTS(SELECT * FROM sys.database_principals WHERE name = '$(DBT_TEST_USER_3)') 8 | CREATE USER [$(DBT_TEST_USER_3)] WITHOUT LOGIN; 9 | -------------------------------------------------------------------------------- /devops/scripts/init_db.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if [ -d "/opt/mssql-tools18" ]; then 4 | cp -r /opt/mssql-tools18 /opt/mssql-tools 5 | fi 6 | 7 | for i in {1..50}; 8 | do 9 | /opt/mssql-tools/bin/sqlcmd -C -S localhost -U sa -P "${SA_PASSWORD}" -d master -I -Q "CREATE DATABASE TestDB COLLATE ${COLLATION}" 10 | if [ $? -eq 0 ] 11 | then 12 | echo "database creation completed" 13 | break 14 | else 15 | echo "creating database..." 16 | sleep 1 17 | fi 18 | done 19 | 20 | for i in {1..50}; 21 | do 22 | /opt/mssql-tools/bin/sqlcmd -C -S localhost -U sa -P "${SA_PASSWORD}" -d TestDB -I -i init.sql 23 | if [ $? -eq 0 ] 24 | then 25 | echo "user creation completed" 26 | break 27 | else 28 | echo "configuring users..." 29 | sleep 1 30 | fi 31 | done 32 | -------------------------------------------------------------------------------- /devops/scripts/wakeup_azure.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import time 5 | 6 | import pyodbc 7 | 8 | 9 | def resume_azsql(): 10 | sql_server_name = os.getenv("DBT_AZURESQL_SERVER") 11 | sql_server_port = 1433 12 | database_name = os.getenv("DBT_AZURESQL_DB") 13 | username = os.getenv("DBT_AZURESQL_UID") 14 | password = os.getenv("DBT_AZURESQL_PWD") 15 | driver = f"ODBC Driver {os.getenv('MSODBC_VERSION')} for SQL Server" 16 | 17 | con_str = [ 18 | f"DRIVER={{{driver}}}", 19 | f"SERVER={sql_server_name},{sql_server_port}", 20 | f"Database={database_name}", 21 | "Encrypt=Yes", 22 | f"UID={{{username}}}", 23 | f"PWD={{{password}}}", 24 | ] 25 | 26 | con_str_concat = ";".join(con_str) 27 | print("Connecting with the following connection string:") 28 | print(con_str_concat.replace(password, "***")) 29 | 30 | connected = False 31 | attempts = 0 32 | while not connected and attempts < 20: 33 | try: 34 | attempts += 1 35 | handle = pyodbc.connect(con_str_concat, autocommit=True) 36 | cursor = handle.cursor() 37 | cursor.execute("SELECT 1") 38 | connected = True 39 | except pyodbc.Error as e: 40 | print("Failed to connect to SQL Server. Retrying...") 41 | print(e) 42 | time.sleep(10) 43 | 44 | 45 | def main(): 46 | resume_azsql() 47 | 48 | 49 | if __name__ == "__main__": 50 | main() 51 | -------------------------------------------------------------------------------- /devops/server.Dockerfile: -------------------------------------------------------------------------------- 1 | ARG SQLServer_VERSION="2022" 2 | FROM mcr.microsoft.com/mssql/server:${SQLServer_VERSION}-latest 3 | 4 | ENV COLLATION="SQL_Latin1_General_CP1_CI_AS" 5 | 6 | USER root 7 | 8 | RUN mkdir -p /opt/init_scripts 9 | WORKDIR /opt/init_scripts 10 | COPY scripts/* /opt/init_scripts/ 11 | 12 | RUN chmod +x /opt/init_scripts/*.sh 13 | 14 | ENTRYPOINT /bin/bash ./entrypoint.sh 15 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | sqlserver: 3 | build: 4 | context: devops 5 | dockerfile: server.Dockerfile 6 | args: 7 | MSSQL_VERSION: "2022" 8 | environment: 9 | SA_PASSWORD: "L0calTesting!" 10 | ACCEPT_EULA: "Y" 11 | COLLATION: "SQL_Latin1_General_CP1_CS_AS" 12 | env_file: 13 | - test.env 14 | ports: 15 | - "1433:1433" 16 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | filterwarnings = 3 | ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning 4 | ignore:unclosed file .*:ResourceWarning 5 | env_files = 6 | test.env 7 | testpaths = 8 | tests/unit 9 | tests/functional 10 | markers = 11 | skip_profile 12 | only_with_profile 13 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import re 4 | import sys 5 | 6 | from setuptools import find_namespace_packages, setup 7 | from setuptools.command.install import install 8 | 9 | package_name = "dbt-sqlserver" 10 | authors_list = ["Mikael Ene", "Anders Swanson", "Sam Debruyn", "Cor Zuurmond", "Cody Scott"] 11 | dbt_version = "1.9" 12 | description = """A Microsoft SQL Server adapter plugin for dbt""" 13 | 14 | this_directory = os.path.abspath(os.path.dirname(__file__)) 15 | with open(os.path.join(this_directory, "README.md")) as f: 16 | long_description = f.read() 17 | 18 | 19 | # get this from a separate file 20 | def _dbt_sqlserver_version(): 21 | _version_path = os.path.join(this_directory, "dbt", "adapters", "sqlserver", "__version__.py") 22 | _version_pattern = r"""version\s*=\s*["'](.+)["']""" 23 | with open(_version_path) as f: 24 | match = re.search(_version_pattern, f.read().strip()) 25 | if match is None: 26 | raise ValueError(f"invalid version at {_version_path}") 27 | return match.group(1) 28 | 29 | 30 | package_version = _dbt_sqlserver_version() 31 | 32 | # the package version should be the dbt version, with maybe some things on the 33 | # ends of it. (0.18.1 vs 0.18.1a1, 0.18.1.1, ...) 34 | if not package_version.startswith(dbt_version): 35 | raise ValueError( 36 | f"Invalid setup.py: package_version={package_version} must start with " 37 | f"dbt_version={dbt_version}" 38 | ) 39 | 40 | 41 | class VerifyVersionCommand(install): 42 | """Custom command to verify that the git tag matches our version""" 43 | 44 | description = "Verify that the git tag matches our version" 45 | 46 | def run(self): 47 | tag = os.getenv("GITHUB_REF_NAME") 48 | tag_without_prefix = tag[1:] 49 | 50 | if tag_without_prefix != package_version: 51 | info = "Git tag: {0} does not match the version of this app: {1}".format( 52 | tag_without_prefix, package_version 53 | ) 54 | sys.exit(info) 55 | 56 | 57 | setup( 58 | name=package_name, 59 | version=package_version, 60 | description=description, 61 | long_description=long_description, 62 | long_description_content_type="text/markdown", 63 | license="MIT", 64 | author=", ".join(authors_list), 65 | url="https://github.com/dbt-msft/dbt-sqlserver", 66 | packages=find_namespace_packages(include=["dbt", "dbt.*"]), 67 | include_package_data=True, 68 | install_requires=[ 69 | "dbt-fabric==1.9.3", 70 | "dbt-core>=1.9.0,<2.0", 71 | "dbt-common>=1.0,<2.0", 72 | "dbt-adapters>=1.11.0,<2.0", 73 | ], 74 | cmdclass={ 75 | "verify": VerifyVersionCommand, 76 | }, 77 | classifiers=[ 78 | "Development Status :: 5 - Production/Stable", 79 | "License :: OSI Approved :: MIT License", 80 | "Operating System :: Microsoft :: Windows", 81 | "Operating System :: MacOS :: MacOS X", 82 | "Operating System :: POSIX :: Linux", 83 | "Programming Language :: Python :: 3.7", 84 | "Programming Language :: Python :: 3.8", 85 | "Programming Language :: Python :: 3.9", 86 | "Programming Language :: Python :: 3.10", 87 | "Programming Language :: Python :: 3.11", 88 | "Programming Language :: Python :: 3.12", 89 | ], 90 | project_urls={ 91 | "Setup & configuration": "https://docs.getdbt.com/reference/warehouse-profiles/mssql-profile", # noqa: E501 92 | "Documentation & usage": "https://docs.getdbt.com/reference/resource-configs/mssql-configs", # noqa: E501 93 | "Changelog": "https://github.com/dbt-msft/dbt-sqlserver/blob/master/CHANGELOG.md", # noqa: E501 94 | "Issue Tracker": "https://github.com/dbt-msft/dbt-sqlserver/issues", # noqa: E501 95 | }, 96 | ) 97 | -------------------------------------------------------------------------------- /test.env.sample: -------------------------------------------------------------------------------- 1 | SQLSERVER_TEST_DRIVER=ODBC Driver 18 for SQL Server 2 | SQLSERVER_TEST_HOST=127.0.0.1 3 | SQLSERVER_TEST_USER=SA 4 | SQLSERVER_TEST_PASS=L0calTesting! 5 | SQLSERVER_TEST_PORT=1433 6 | SQLSERVER_TEST_DBNAME=TestDB 7 | SQLSERVER_TEST_ENCRYPT=True 8 | SQLSERVER_TEST_TRUST_CERT=True 9 | DBT_TEST_USER_1=DBT_TEST_USER_1 10 | DBT_TEST_USER_2=DBT_TEST_USER_2 11 | DBT_TEST_USER_3=DBT_TEST_USER_3 12 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from azure.identity import AzureCliCredential 3 | 4 | from dbt.adapters.sqlserver.sqlserver_connections import ( # byte_array_to_datetime, 5 | bool_to_connection_string_arg, 6 | get_pyodbc_attrs_before_credentials, 7 | ) 8 | from dbt.adapters.sqlserver.sqlserver_credentials import SQLServerCredentials 9 | 10 | # See 11 | # https://github.com/Azure/azure-sdk-for-python/blob/azure-identity_1.5.0/sdk/identity/azure-identity/tests/test_cli_credential.py 12 | CHECK_OUTPUT = AzureCliCredential.__module__ + ".subprocess.check_output" 13 | 14 | 15 | @pytest.fixture 16 | def credentials() -> SQLServerCredentials: 17 | credentials = SQLServerCredentials( 18 | driver="ODBC Driver 18 for SQL Server", 19 | host="fake.sql.sqlserver.net", 20 | database="dbt", 21 | schema="sqlserver", 22 | ) 23 | return credentials 24 | 25 | 26 | def test_get_pyodbc_attrs_before_empty_dict_when_service_principal( 27 | credentials: SQLServerCredentials, 28 | ) -> None: 29 | """ 30 | When the authentication is set to sql we expect an empty attrs before. 31 | """ 32 | attrs_before = get_pyodbc_attrs_before_credentials(credentials) 33 | assert attrs_before == {} 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "key, value, expected", 38 | [("somekey", False, "somekey=No"), ("somekey", True, "somekey=Yes")], 39 | ) 40 | def test_bool_to_connection_string_arg(key: str, value: bool, expected: str) -> None: 41 | assert bool_to_connection_string_arg(key, value) == expected 42 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from _pytest.fixtures import FixtureRequest 5 | 6 | pytest_plugins = ["dbt.tests.fixtures.project"] 7 | 8 | 9 | def pytest_addoption(parser): 10 | parser.addoption( 11 | "--profile", action="store", default=os.getenv("PROFILE_NAME", "user"), type=str 12 | ) 13 | 14 | 15 | @pytest.fixture(scope="class") 16 | def dbt_profile_target(request: FixtureRequest, dbt_profile_target_update): 17 | profile = request.config.getoption("--profile") 18 | 19 | if profile == "ci_sql_server": 20 | target = _profile_ci_sql_server() 21 | elif profile == "ci_azure_cli": 22 | target = _profile_ci_azure_cli() 23 | elif profile == "ci_azure_auto": 24 | target = _profile_ci_azure_auto() 25 | elif profile == "ci_azure_environment": 26 | target = _profile_ci_azure_environment() 27 | elif profile == "ci_azure_basic": 28 | target = _profile_ci_azure_basic() 29 | elif profile == "user": 30 | target = _profile_user() 31 | elif profile == "user_azure": 32 | target = _profile_user_azure() 33 | else: 34 | raise ValueError(f"Unknown profile: {profile}") 35 | 36 | target.update(dbt_profile_target_update) 37 | return target 38 | 39 | 40 | @pytest.fixture(scope="class") 41 | def dbt_profile_target_update(): 42 | return {} 43 | 44 | 45 | @pytest.fixture(scope="class") 46 | def is_azure(request: FixtureRequest) -> bool: 47 | profile = request.config.getoption("--profile") 48 | return "azure" in profile 49 | 50 | 51 | def _all_profiles_base(): 52 | return { 53 | "type": "sqlserver", 54 | "driver": os.getenv("SQLSERVER_TEST_DRIVER", "ODBC Driver 18 for SQL Server"), 55 | "port": int(os.getenv("SQLSERVER_TEST_PORT", "1433")), 56 | "retries": 2, 57 | } 58 | 59 | 60 | def _profile_ci_azure_base(): 61 | return { 62 | **_all_profiles_base(), 63 | **{ 64 | "host": os.getenv("DBT_AZURESQL_SERVER"), 65 | "database": os.getenv("DBT_AZURESQL_DB"), 66 | "encrypt": True, 67 | "trust_cert": True, 68 | }, 69 | } 70 | 71 | 72 | def _profile_ci_azure_basic(): 73 | return { 74 | **_profile_ci_azure_base(), 75 | **{ 76 | "user": os.getenv("DBT_AZURESQL_UID"), 77 | "pass": os.getenv("DBT_AZURESQL_PWD"), 78 | }, 79 | } 80 | 81 | 82 | def _profile_ci_azure_cli(): 83 | return { 84 | **_profile_ci_azure_base(), 85 | **{ 86 | "authentication": "CLI", 87 | }, 88 | } 89 | 90 | 91 | def _profile_ci_azure_auto(): 92 | return { 93 | **_profile_ci_azure_base(), 94 | **{ 95 | "authentication": "auto", 96 | }, 97 | } 98 | 99 | 100 | def _profile_ci_azure_environment(): 101 | return { 102 | **_profile_ci_azure_base(), 103 | **{ 104 | "authentication": "environment", 105 | }, 106 | } 107 | 108 | 109 | def _profile_ci_sql_server(): 110 | return { 111 | **_all_profiles_base(), 112 | **{ 113 | "host": "sqlserver", 114 | "user": "SA", 115 | "pass": "5atyaNadella", 116 | "database": "TestDB", 117 | "encrypt": True, 118 | "trust_cert": True, 119 | }, 120 | } 121 | 122 | 123 | def _profile_user(): 124 | profile = { 125 | **_all_profiles_base(), 126 | **{ 127 | "host": os.getenv("SQLSERVER_TEST_HOST"), 128 | "user": os.getenv("SQLSERVER_TEST_USER"), 129 | "pass": os.getenv("SQLSERVER_TEST_PASS"), 130 | "database": os.getenv("SQLSERVER_TEST_DBNAME"), 131 | "encrypt": bool(os.getenv("SQLSERVER_TEST_ENCRYPT", "False")), 132 | "trust_cert": bool(os.getenv("SQLSERVER_TEST_TRUST_CERT", "False")), 133 | }, 134 | } 135 | return profile 136 | 137 | 138 | def _profile_user_azure(): 139 | profile = { 140 | **_all_profiles_base(), 141 | **{ 142 | "host": os.getenv("SQLSERVER_TEST_HOST"), 143 | "authentication": os.getenv("SQLSERVER_TEST_AUTH", "auto"), 144 | "encrypt": True, 145 | "trust_cert": True, 146 | "database": os.getenv("SQLSERVER_TEST_DBNAME"), 147 | "client_id": os.getenv("SQLSERVER_TEST_CLIENT_ID"), 148 | "client_secret": os.getenv("SQLSERVER_TEST_CLIENT_SECRET"), 149 | "tenant_id": os.getenv("SQLSERVER_TEST_TENANT_ID"), 150 | }, 151 | } 152 | return profile 153 | 154 | 155 | @pytest.fixture(autouse=True) 156 | def skip_by_profile_type(request: FixtureRequest): 157 | profile_type = request.config.getoption("--profile") 158 | 159 | if request.node.get_closest_marker("skip_profile"): 160 | if profile_type in request.node.get_closest_marker("skip_profile").args: 161 | pytest.skip(f"Skipped on '{profile_type}' profile") 162 | 163 | if request.node.get_closest_marker("only_with_profile"): 164 | if profile_type not in request.node.get_closest_marker("only_with_profile").args: 165 | pytest.skip(f"Skipped on '{profile_type}' profile") 166 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_aliases.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.aliases import fixtures 3 | from dbt.tests.adapter.aliases.test_aliases import ( 4 | BaseAliasErrors, 5 | BaseAliases, 6 | BaseSameAliasDifferentDatabases, 7 | BaseSameAliasDifferentSchemas, 8 | ) 9 | 10 | # we override the default as the SQLServer adapter uses CAST instead of :: for type casting 11 | MACROS__CAST_SQL_SQLServer = """ 12 | 13 | 14 | {% macro string_literal(s) -%} 15 | {{ adapter.dispatch('string_literal', macro_namespace='test')(s) }} 16 | {%- endmacro %} 17 | 18 | {% macro default__string_literal(s) %} 19 | CAST('{{ s }}' AS VARCHAR(8000)) 20 | {% endmacro %} 21 | 22 | """ 23 | 24 | 25 | class TestAliases(BaseAliases): 26 | @pytest.fixture(scope="class") 27 | def macros(self): 28 | return { 29 | "cast.sql": MACROS__CAST_SQL_SQLServer, 30 | "expect_value.sql": fixtures.MACROS__EXPECT_VALUE_SQL, 31 | } 32 | 33 | 34 | class TestAliasesError(BaseAliasErrors): 35 | @pytest.fixture(scope="class") 36 | def macros(self): 37 | return { 38 | "cast.sql": MACROS__CAST_SQL_SQLServer, 39 | "expect_value.sql": fixtures.MACROS__EXPECT_VALUE_SQL, 40 | } 41 | 42 | 43 | class TestSameAliasDifferentSchemas(BaseSameAliasDifferentSchemas): 44 | @pytest.fixture(scope="class") 45 | def macros(self): 46 | return { 47 | "cast.sql": MACROS__CAST_SQL_SQLServer, 48 | "expect_value.sql": fixtures.MACROS__EXPECT_VALUE_SQL, 49 | } 50 | 51 | 52 | class TestSameAliasDifferentDatabases(BaseSameAliasDifferentDatabases): 53 | @pytest.fixture(scope="class") 54 | def macros(self): 55 | return { 56 | "cast.sql": MACROS__CAST_SQL_SQLServer, 57 | "expect_value.sql": fixtures.MACROS__EXPECT_VALUE_SQL, 58 | } 59 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_basic.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod 3 | from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations 4 | from dbt.tests.adapter.basic.test_empty import BaseEmpty 5 | from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral 6 | from dbt.tests.adapter.basic.test_generic_tests import BaseGenericTests 7 | from dbt.tests.adapter.basic.test_incremental import BaseIncremental 8 | from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests 9 | from dbt.tests.adapter.basic.test_singular_tests_ephemeral import BaseSingularTestsEphemeral 10 | from dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols 11 | from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp 12 | 13 | 14 | class TestSimpleMaterializations(BaseSimpleMaterializations): 15 | pass 16 | 17 | 18 | class TestSingularTests(BaseSingularTests): 19 | pass 20 | 21 | 22 | @pytest.mark.skip(reason="SQLServer doesn't support nested CTE") 23 | class TestSingularTestsEphemeral(BaseSingularTestsEphemeral): 24 | pass 25 | 26 | 27 | class TestEmpty(BaseEmpty): 28 | pass 29 | 30 | 31 | @pytest.mark.skip(reason="SQLServer doesn't support nested CTE") 32 | class TestEphemeral(BaseEphemeral): 33 | pass 34 | 35 | 36 | class TestIncremental(BaseIncremental): 37 | pass 38 | 39 | 40 | class TestGenericTests(BaseGenericTests): 41 | pass 42 | 43 | 44 | class TestSnapshotCheckCols(BaseSnapshotCheckCols): 45 | pass 46 | 47 | 48 | class TestSnapshotTimestamp(BaseSnapshotTimestamp): 49 | pass 50 | 51 | 52 | class TestBaseAdapterMethod(BaseAdapterMethod): 53 | pass 54 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_caching.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.caching.test_caching import ( 3 | BaseCachingLowercaseModel, 4 | BaseCachingSelectedSchemaOnly, 5 | BaseCachingUppercaseModel, 6 | BaseNoPopulateCache, 7 | ) 8 | 9 | 10 | class TestCachingLowercaseModel(BaseCachingLowercaseModel): 11 | pass 12 | 13 | 14 | @pytest.mark.skip( 15 | reason=""" 16 | Fails because of case sensitivity. 17 | MODEL is coereced to model which fails the test as it sees conflicting naming 18 | """ 19 | ) 20 | class TestCachingUppercaseModel(BaseCachingUppercaseModel): 21 | pass 22 | 23 | 24 | class TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly): 25 | pass 26 | 27 | 28 | class TestNoPopulateCache(BaseNoPopulateCache): 29 | pass 30 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_catalog.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.artifacts.schemas.catalog import CatalogArtifact 3 | from dbt.tests.adapter.catalog import files 4 | from dbt.tests.adapter.catalog.relation_types import CatalogRelationTypes 5 | from dbt.tests.util import run_dbt 6 | 7 | 8 | class TestRelationTypes(CatalogRelationTypes): 9 | """ 10 | This is subclassed to remove the references to the materialized views, 11 | as SQLServer does not support them. 12 | 13 | Likely does not need to be subclassed since we implement everything, 14 | but prefer keeping it here for clarity. 15 | """ 16 | 17 | @pytest.fixture(scope="class", autouse=True) 18 | def seeds(self): 19 | return {"my_seed.csv": files.MY_SEED} 20 | 21 | @pytest.fixture(scope="class", autouse=True) 22 | def models(self): 23 | yield { 24 | "my_table.sql": files.MY_TABLE, 25 | "my_view.sql": files.MY_VIEW, 26 | # "my_materialized_view.sql": files.MY_MATERIALIZED_VIEW, 27 | } 28 | 29 | @pytest.fixture(scope="class", autouse=True) 30 | def docs(self, project): 31 | run_dbt(["seed"]) 32 | run_dbt(["run"]) 33 | yield run_dbt(["docs", "generate"]) 34 | 35 | @pytest.mark.parametrize( 36 | "node_name,relation_type", 37 | [ 38 | ("seed.test.my_seed", "BASE TABLE"), 39 | ("model.test.my_table", "BASE TABLE"), 40 | ("model.test.my_view", "VIEW"), 41 | # ("model.test.my_materialized_view", "MATERIALIZED VIEW"), 42 | ], 43 | ) 44 | def test_relation_types_populate_correctly( 45 | self, docs: CatalogArtifact, node_name: str, relation_type: str 46 | ): 47 | """ 48 | This test addresses: https://github.com/dbt-labs/dbt-core/issues/8864 49 | """ 50 | assert node_name in docs.nodes 51 | node = docs.nodes[node_name] 52 | assert node.metadata.type == relation_type 53 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_column_types.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes 3 | 4 | # flake8: noqa: E501 5 | 6 | macro_test_is_type_sql_2 = """ 7 | {% macro simple_type_check_column(column, check) %} 8 | {% if check == 'string' %} 9 | {{ return(column.is_string()) }} 10 | {% elif check == 'float' %} 11 | {{ return(column.is_float()) }} 12 | {% elif check == 'number' %} 13 | {{ return(column.is_number()) }} 14 | {% elif check == 'numeric' %} 15 | {{ return(column.is_numeric()) }} 16 | {% elif check == 'integer' %} 17 | {{ return(column.is_integer()) }} 18 | {% else %} 19 | {% do exceptions.raise_compiler_error('invalid type check value: ' ~ check) %} 20 | {% endif %} 21 | {% endmacro %} 22 | 23 | {% macro type_check_column(column, type_checks) %} 24 | {% set failures = [] %} 25 | {% for type_check in type_checks %} 26 | {% if type_check.startswith('not ') %} 27 | {% if simple_type_check_column(column, type_check[4:]) %} 28 | {% do log('simple_type_check_column got ', True) %} 29 | {% do failures.append(type_check) %} 30 | {% endif %} 31 | {% else %} 32 | {% if not simple_type_check_column(column, type_check) %} 33 | {% do failures.append(type_check) %} 34 | {% endif %} 35 | {% endif %} 36 | {% endfor %} 37 | {% if (failures | length) > 0 %} 38 | {% do log('column ' ~ column.name ~ ' had failures: ' ~ failures, info=True) %} 39 | {% endif %} 40 | {% do return((failures | length) == 0) %} 41 | {% endmacro %} 42 | 43 | {% test is_type(model, column_map) %} 44 | {% if not execute %} 45 | {{ return(None) }} 46 | {% endif %} 47 | {% if not column_map %} 48 | {% do exceptions.raise_compiler_error('test_is_type must have a column name') %} 49 | {% endif %} 50 | {% set columns = adapter.get_columns_in_relation(model) %} 51 | {% if (column_map | length) != (columns | length) %} 52 | {% set column_map_keys = (column_map | list | string) %} 53 | {% set column_names = (columns | map(attribute='name') | list | string) %} 54 | {% do exceptions.raise_compiler_error('did not get all the columns/all columns not specified:\n' ~ column_map_keys ~ '\nvs\n' ~ column_names) %} 55 | {% endif %} 56 | {% set bad_columns = [] %} 57 | {% for column in columns %} 58 | {% set column_key = (column.name | lower) %} 59 | {% if column_key in column_map %} 60 | {% set type_checks = column_map[column_key] %} 61 | {% if not type_checks %} 62 | {% do exceptions.raise_compiler_error('no type checks?') %} 63 | {% endif %} 64 | {% if not type_check_column(column, type_checks) %} 65 | {% do bad_columns.append(column.name) %} 66 | {% endif %} 67 | {% else %} 68 | {% do exceptions.raise_compiler_error( 69 | 'column key ' ~ column_key ~ ' not found in ' ~ (column_map | list | string)) %} 70 | {% endif %} 71 | {% endfor %} 72 | {% do log('bad columns: ' ~ bad_columns, info=True) %} 73 | {% for bad_column in bad_columns %} 74 | select '{{ bad_column }}' as bad_column 75 | {{ 'union all'}} 76 | --{{ 'union all' if not loop.last }} 77 | {% endfor %} 78 | select * from (select 1 as c where 1 = 0) as nothing 79 | {% endtest %} 80 | """ 81 | 82 | model_sql = """ 83 | select 84 | CAST(1 AS smallint) as smallint_col, 85 | CAST(2 AS integer) as int_col, 86 | CAST(3 AS bigint) as bigint_col, 87 | CAST(4.0 AS real) as real_col, 88 | CAST(5.0 AS float) as double_col, 89 | CAST(6.0 AS numeric) as numeric_col, 90 | CAST(7 AS varchar(20)) as text_col, 91 | CAST(8 AS varchar(20)) as varchar_col 92 | """ 93 | 94 | schema_yml = """ 95 | version: 2 96 | models: 97 | - name: model 98 | data_tests: 99 | - is_type: 100 | column_map: 101 | smallint_col: ['integer', 'number'] 102 | int_col: ['integer', 'number'] 103 | bigint_col: ['integer', 'number'] 104 | real_col: ['float', 'number'] 105 | double_col: ['float', 'number'] 106 | numeric_col: ['numeric', 'number'] 107 | text_col: ['string', 'not number'] 108 | varchar_col: ['string', 'not number'] 109 | """ # noqa 110 | 111 | 112 | class TestColumnTypes(BaseColumnTypes): 113 | @pytest.fixture(scope="class") 114 | def macros(self): 115 | return {"test_is_type.sql": macro_test_is_type_sql_2} 116 | 117 | @pytest.fixture(scope="class") 118 | def models(self): 119 | return {"model.sql": model_sql, "schema.yml": schema_yml} 120 | 121 | def test_run_and_test(self, project): 122 | self.run_and_test() 123 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_concurrency.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.concurrency.test_concurrency import BaseConcurrency 2 | 3 | 4 | class TestConcurrency(BaseConcurrency): 5 | pass 6 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_dbt_clone.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.dbt_clone.test_dbt_clone import BaseCloneNotPossible, BaseClonePossible 3 | 4 | 5 | @pytest.mark.skip(reason="SQLServer does not support cloning") 6 | class TestCloneNotPossible(BaseCloneNotPossible): 7 | pass 8 | 9 | 10 | @pytest.mark.skip(reason="SQLServer does not support cloning") 11 | class TestClonePossible(BaseClonePossible): 12 | pass 13 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_dbt_debug.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.dbt_debug.test_dbt_debug import ( 2 | BaseDebugInvalidProjectPostgres, 3 | BaseDebugPostgres, 4 | ) 5 | 6 | 7 | class TestDebugProfileVariable(BaseDebugPostgres): 8 | pass 9 | 10 | 11 | class TestDebugInvalidProject(BaseDebugInvalidProjectPostgres): 12 | pass 13 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_empty.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.empty._models import model_input_sql, schema_sources_yml 3 | 4 | # switch for 1.9 5 | # from dbt.tests.adapter.empty import _models 6 | from dbt.tests.adapter.empty.test_empty import ( # MetadataWithEmptyFlag 7 | BaseTestEmpty, 8 | BaseTestEmptyInlineSourceRef, 9 | ) 10 | from dbt.tests.util import run_dbt 11 | 12 | model_sql_sqlserver = """ 13 | select * 14 | from {{ ref('model_input') }} 15 | union all 16 | select * 17 | from {{ source('seed_sources', 'raw_source') }} 18 | """ 19 | 20 | model_inline_sql_sqlserver = """ 21 | select * from {{ source('seed_sources', 'raw_source') }} 22 | """ 23 | 24 | 25 | class TestEmpty(BaseTestEmpty): 26 | @pytest.fixture(scope="class") 27 | def models(self): 28 | return { 29 | "model_input.sql": model_input_sql, 30 | # # no support for ephemeral models in SQLServer 31 | # "ephemeral_model_input.sql": _models.ephemeral_model_input_sql, 32 | "model.sql": model_sql_sqlserver, 33 | "sources.yml": schema_sources_yml, 34 | } 35 | 36 | def test_run_with_empty(self, project): 37 | # create source from seed 38 | run_dbt(["seed"]) 39 | 40 | # run without empty - 3 expected rows in output - 1 from each input 41 | run_dbt(["run"]) 42 | self.assert_row_count(project, "model", 2) 43 | 44 | # run with empty - 0 expected rows in output 45 | run_dbt(["run", "--empty"]) 46 | self.assert_row_count(project, "model", 0) 47 | 48 | 49 | class TestemptyInlineSourceRef(BaseTestEmptyInlineSourceRef): 50 | @pytest.fixture(scope="class") 51 | def models(self): 52 | return { 53 | "model.sql": model_inline_sql_sqlserver, 54 | "sources.yml": schema_sources_yml, 55 | } 56 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_ephemeral.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.ephemeral.test_ephemeral import ( 3 | BaseEphemeralErrorHandling, 4 | BaseEphemeralMulti, 5 | BaseEphemeralNested, 6 | ) 7 | 8 | 9 | @pytest.mark.skip(reason="Epemeral models are not supported in SQLServer") 10 | class TestEphemeral(BaseEphemeralMulti): 11 | pass 12 | 13 | 14 | @pytest.mark.skip(reason="Epemeral models are not supported in SQLServer") 15 | class TestEphemeralNested(BaseEphemeralNested): 16 | pass 17 | 18 | 19 | @pytest.mark.skip(reason="Epemeral models are not supported in SQLServer") 20 | class TestEphemeralErrorHandling(BaseEphemeralErrorHandling): 21 | pass 22 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_grants.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.grants.test_incremental_grants import BaseIncrementalGrants 2 | from dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants 3 | from dbt.tests.adapter.grants.test_model_grants import BaseModelGrants 4 | from dbt.tests.adapter.grants.test_seed_grants import BaseSeedGrants 5 | from dbt.tests.adapter.grants.test_snapshot_grants import BaseSnapshotGrants 6 | 7 | 8 | class TestIncrementalGrants(BaseIncrementalGrants): 9 | pass 10 | 11 | 12 | class TestInvalidGrants(BaseInvalidGrants): 13 | def privilege_does_not_exist_error(self): 14 | return "Incorrect syntax near" 15 | 16 | 17 | class TestModelGrants(BaseModelGrants): 18 | pass 19 | 20 | 21 | class TestSeedGrants(BaseSeedGrants): 22 | pass 23 | 24 | 25 | class TestSnapshotGrants(BaseSnapshotGrants): 26 | pass 27 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_hooks.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.hooks import fixtures 3 | from dbt.tests.util import run_dbt 4 | 5 | seed_model_sql = """ 6 | drop table if exists {schema}.on_model_hook; 7 | 8 | create table {schema}.on_model_hook ( 9 | test_state VARCHAR(8000), -- start|end 10 | target_dbname VARCHAR(8000), 11 | target_host VARCHAR(8000), 12 | target_name VARCHAR(8000), 13 | target_schema VARCHAR(8000), 14 | target_type VARCHAR(8000), 15 | target_user VARCHAR(8000), 16 | target_pass VARCHAR(8000), 17 | target_threads INTEGER, 18 | run_started_at VARCHAR(8000), 19 | invocation_id VARCHAR(8000), 20 | thread_id VARCHAR(8000) 21 | ); 22 | """.strip() 23 | 24 | MODEL_PRE_HOOK = """ 25 | insert into {{this.schema}}.on_model_hook ( 26 | test_state, 27 | target_dbname, 28 | target_host, 29 | target_name, 30 | target_schema, 31 | target_type, 32 | target_user, 33 | target_pass, 34 | target_threads, 35 | run_started_at, 36 | invocation_id, 37 | thread_id 38 | ) VALUES ( 39 | 'start', 40 | '{{ target.database }}', 41 | '{{ target.server }}', 42 | '{{ target.name }}', 43 | '{{ target.schema }}', 44 | '{{ target.type }}', 45 | '{{ target.user }}', 46 | '{{ target.get("pass", "") }}', 47 | {{ target.threads }}, 48 | '{{ run_started_at }}', 49 | '{{ invocation_id }}', 50 | '{{ thread_id }}' 51 | ) 52 | """ 53 | 54 | MODEL_POST_HOOK = """ 55 | insert into {{this.schema}}.on_model_hook ( 56 | test_state, 57 | target_dbname, 58 | target_host, 59 | target_name, 60 | target_schema, 61 | target_type, 62 | target_user, 63 | target_pass, 64 | target_threads, 65 | run_started_at, 66 | invocation_id, 67 | thread_id 68 | ) VALUES ( 69 | 'end', 70 | '{{ target.database }}', 71 | '{{ target.server }}', 72 | '{{ target.name }}', 73 | '{{ target.schema }}', 74 | '{{ target.type }}', 75 | '{{ target.user }}', 76 | '{{ target.get("pass", "") }}', 77 | {{ target.threads }}, 78 | '{{ run_started_at }}', 79 | '{{ invocation_id }}', 80 | '{{ thread_id }}' 81 | ) 82 | """ 83 | 84 | 85 | class BaseTestPrePost: 86 | @pytest.fixture(scope="class", autouse=True) 87 | def setUp(self, project): 88 | project.run_sql(seed_model_sql) 89 | 90 | def get_ctx_vars(self, state, count, project): 91 | fields = [ 92 | "test_state", 93 | "target_dbname", 94 | "target_host", 95 | "target_name", 96 | "target_schema", 97 | "target_threads", 98 | "target_type", 99 | "target_user", 100 | "target_pass", 101 | "run_started_at", 102 | "invocation_id", 103 | "thread_id", 104 | ] 105 | field_list = ", ".join(['"{}"'.format(f) for f in fields]) 106 | query = f""" 107 | select 108 | {field_list} 109 | from 110 | {project.test_schema}.on_model_hook where test_state = '{state}'""" 111 | 112 | vals = project.run_sql(query, fetch="all") 113 | assert len(vals) != 0, "nothing inserted into hooks table" 114 | assert len(vals) >= count, "too few rows in hooks table" 115 | assert len(vals) <= count, "too many rows in hooks table" 116 | return [{k: v for k, v in zip(fields, val)} for val in vals] 117 | 118 | def check_hooks(self, state, project, host, count=1): 119 | ctxs = self.get_ctx_vars(state, count=count, project=project) 120 | for ctx in ctxs: 121 | assert ctx["test_state"] == state 122 | # assert ctx["target_dbname"] == "TestDB" 123 | # assert ctx["target_host"] == host 124 | assert ctx["target_name"] == "default" 125 | assert ctx["target_schema"] == project.test_schema 126 | assert ctx["target_threads"] == 1 127 | assert ctx["target_type"] == project.adapter_type 128 | # assert ctx["target_user"] == "root" 129 | # assert ctx["target_pass"] == "" 130 | 131 | assert ( 132 | ctx["run_started_at"] is not None and len(ctx["run_started_at"]) > 0 133 | ), "run_started_at was not set" 134 | assert ( 135 | ctx["invocation_id"] is not None and len(ctx["invocation_id"]) > 0 136 | ), "invocation_id was not set" 137 | assert ctx["thread_id"].startswith("Thread-") 138 | 139 | 140 | class BasePrePostModelHooks(BaseTestPrePost): 141 | @pytest.fixture(scope="class") 142 | def project_config_update(self): 143 | return { 144 | "models": { 145 | "test": { 146 | "pre-hook": [ 147 | # inside transaction (runs second) 148 | MODEL_PRE_HOOK, 149 | ], 150 | "post-hook": [ 151 | # inside transaction (runs first) 152 | MODEL_POST_HOOK, 153 | ], 154 | } 155 | } 156 | } 157 | 158 | @pytest.fixture(scope="class") 159 | def models(self): 160 | return {"hooks.sql": fixtures.models__hooks} 161 | 162 | def test_pre_and_post_run_hooks(self, project, dbt_profile_target): 163 | run_dbt() 164 | self.check_hooks("start", project, dbt_profile_target.get("host", None)) 165 | self.check_hooks("end", project, dbt_profile_target.get("host", None)) 166 | 167 | 168 | class TestPrePostModelHooks(BasePrePostModelHooks): 169 | pass 170 | 171 | 172 | class TestPrePostModelHooksUnderscores(BasePrePostModelHooks): 173 | @pytest.fixture(scope="class") 174 | def project_config_update(self): 175 | return { 176 | "models": { 177 | "test": { 178 | "pre_hook": [ 179 | # inside transaction (runs second) 180 | MODEL_PRE_HOOK, 181 | ], 182 | "post_hook": [ 183 | # inside transaction (runs first) 184 | MODEL_POST_HOOK, 185 | ], 186 | } 187 | } 188 | } 189 | 190 | 191 | class BaseHookRefs(BaseTestPrePost): 192 | @pytest.fixture(scope="class") 193 | def project_config_update(self): 194 | return { 195 | "models": { 196 | "test": { 197 | "hooked": { 198 | "post-hook": [ 199 | """ 200 | insert into {{this.schema}}.on_model_hook select 201 | test_state, 202 | '{{ target.dbname }}' as target_dbname, 203 | '{{ target.host }}' as target_host, 204 | '{{ target.name }}' as target_name, 205 | '{{ target.schema }}' as target_schema, 206 | '{{ target.type }}' as target_type, 207 | '{{ target.user }}' as target_user, 208 | '{{ target.get(pass, "") }}' as target_pass, 209 | {{ target.threads }} as target_threads, 210 | '{{ run_started_at }}' as run_started_at, 211 | '{{ invocation_id }}' as invocation_id, 212 | '{{ thread_id }}' as thread_id 213 | from {{ ref('post') }}""".strip() 214 | ], 215 | } 216 | }, 217 | } 218 | } 219 | 220 | @pytest.fixture(scope="class") 221 | def models(self): 222 | return { 223 | "hooked.sql": fixtures.models__hooked, 224 | "post.sql": fixtures.models__post, 225 | "pre.sql": fixtures.models__pre, 226 | } 227 | 228 | def test_pre_post_model_hooks_refed(self, project, dbt_profile_target): 229 | run_dbt() 230 | self.check_hooks("start", project, dbt_profile_target.get("host", None)) 231 | self.check_hooks("end", project, dbt_profile_target.get("host", None)) 232 | 233 | 234 | class TestHookRefs(BaseHookRefs): 235 | pass 236 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_incremental.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.incremental import fixtures 3 | from dbt.tests.adapter.incremental.test_incremental_on_schema_change import ( 4 | BaseIncrementalOnSchemaChange, 5 | ) 6 | from dbt.tests.adapter.incremental.test_incremental_predicates import ( 7 | TestIncrementalPredicatesDeleteInsert, 8 | TestPredicatesDeleteInsert, 9 | ) 10 | 11 | _MODELS__INCREMENTAL_IGNORE_SQLServer = """ 12 | {{ 13 | config( 14 | materialized='incremental', 15 | unique_key='id', 16 | on_schema_change='ignore' 17 | ) 18 | }} 19 | 20 | WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) 21 | 22 | {% if is_incremental() %} 23 | 24 | SELECT id, field1, field2, field3, field4 25 | FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) 26 | 27 | {% else %} 28 | 29 | SELECT TOP 3 id, field1, field2 FROM source_data 30 | 31 | {% endif %} 32 | """ 33 | 34 | _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET_SQLServer = """ 35 | {{ 36 | config(materialized='table') 37 | }} 38 | 39 | with source_data as ( 40 | 41 | select * from {{ ref('model_a') }} 42 | 43 | ) 44 | 45 | {% set string_type = dbt.type_string() %} 46 | 47 | select id 48 | ,cast(field1 as {{string_type}}) as field1 49 | 50 | from source_data 51 | """ 52 | 53 | _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET_SQLServer = """ 54 | {{ 55 | config(materialized='table') 56 | }} 57 | 58 | with source_data as ( 59 | 60 | select * from {{ ref('model_a') }} 61 | 62 | ) 63 | 64 | {% set string_type = dbt.type_string() %} 65 | 66 | select id 67 | ,cast(field1 as {{string_type}}) as field1 68 | --,field2 69 | ,cast(case when id <= 3 then null else field3 end as {{string_type}}) as field3 70 | ,cast(case when id <= 3 then null else field4 end as {{string_type}}) as field4 71 | 72 | from source_data 73 | """ 74 | 75 | 76 | class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange): 77 | @pytest.fixture(scope="class") 78 | def models(self): 79 | return { 80 | "incremental_sync_remove_only.sql": fixtures._MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, 81 | "incremental_ignore.sql": _MODELS__INCREMENTAL_IGNORE_SQLServer, 82 | "incremental_sync_remove_only_target.sql": _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET_SQLServer, # noqa: E501 83 | "incremental_ignore_target.sql": fixtures._MODELS__INCREMENTAL_IGNORE_TARGET, 84 | "incremental_fail.sql": fixtures._MODELS__INCREMENTAL_FAIL, 85 | "incremental_sync_all_columns.sql": fixtures._MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, 86 | "incremental_append_new_columns_remove_one.sql": fixtures._MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, # noqa: E501 87 | "model_a.sql": fixtures._MODELS__A, 88 | "incremental_append_new_columns_target.sql": fixtures._MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, # noqa: E501 89 | "incremental_append_new_columns.sql": fixtures._MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, # noqa: E501 90 | "incremental_sync_all_columns_target.sql": _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET_SQLServer, # noqa: E501 91 | "incremental_append_new_columns_remove_one_target.sql": fixtures._MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, # noqa: E501 92 | } 93 | 94 | 95 | class TestIncrementalPredicatesDeleteInsert(TestIncrementalPredicatesDeleteInsert): 96 | pass 97 | 98 | 99 | class TestPredicatesDeleteInsert(TestPredicatesDeleteInsert): 100 | pass 101 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_incremental_microbatch_datetime.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.incremental.test_incremental_microbatch import BaseMicrobatch 3 | 4 | _microbatch_model_no_unique_id_sql_datetime = """ 5 | {{ config(materialized='incremental', incremental_strategy='microbatch', 6 | event_time='event_time', batch_size='day', begin='2020-01-01 00:00:00') }} 7 | select * from {{ ref('input_model') }} 8 | """ 9 | 10 | _input_model_sql_datetime = """ 11 | {{ config(materialized='table', event_time='event_time') }} 12 | select 1 as id, '2020-01-01 00:00:00' as event_time 13 | union all 14 | select 2 as id, '2020-01-02 00:00:00' as event_time 15 | union all 16 | select 3 as id, '2020-01-03 00:00:00' as event_time 17 | """ 18 | 19 | 20 | class TestSQLServerMicrobatchDateTime(BaseMicrobatch): 21 | """ 22 | Setup a version of the microbatch testing that uses a datetime column as the event_time 23 | This is to test that the microbatch strategy can handle datetime columns when passing in 24 | event times as UTC strings 25 | """ 26 | 27 | @pytest.fixture(scope="class") 28 | def microbatch_model_sql(self) -> str: 29 | return _microbatch_model_no_unique_id_sql_datetime 30 | 31 | @pytest.fixture(scope="class") 32 | def input_model_sql(self) -> str: 33 | """ 34 | This is the SQL that defines the input model to the microbatch model, 35 | including any {{ config(..) }}. event_time is a required configuration of this input 36 | """ 37 | return _input_model_sql_datetime 38 | 39 | @pytest.fixture(scope="class") 40 | def insert_two_rows_sql(self, project) -> str: 41 | test_schema_relation = project.adapter.Relation.create( 42 | database=project.database, schema=project.test_schema 43 | ) 44 | return ( 45 | f"insert into {test_schema_relation}.input_model (id, event_time) " 46 | f"values (4, '2020-01-04 00:00:00'), (5, '2020-01-05 00:00:00')" 47 | ) 48 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_materialized_views.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic 3 | 4 | 5 | @pytest.mark.skip(reason="Materialized views are not supported in SQLServer") 6 | class TestMaterializedViews(MaterializedViewBasic): 7 | pass 8 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_persist_docs.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.persist_docs.test_persist_docs import BasePersistDocs 3 | 4 | 5 | @pytest.mark.skip( 6 | reason=""" 7 | Persisted docs are not implemented in SQLServer. 8 | Could be implemented with sp_addextendedproperty 9 | """ 10 | ) 11 | class TestPersistDocs(BasePersistDocs): 12 | pass 13 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_python_model.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.python_model.test_python_model import ( 3 | BasePythonIncrementalTests, 4 | BasePythonModelTests, 5 | ) 6 | from dbt.tests.adapter.python_model.test_spark import BasePySparkTests 7 | 8 | 9 | @pytest.mark.skip(reason="Python models are not supported in SQLServer") 10 | class TestPythonModel(BasePythonModelTests): 11 | pass 12 | 13 | 14 | @pytest.mark.skip(reason="Python models are not supported in SQLServer") 15 | class TestPythonIncremental(BasePythonIncrementalTests): 16 | pass 17 | 18 | 19 | @pytest.mark.skip(reason="Python models are not supported in SQLServer") 20 | class TestPySpark(BasePySparkTests): 21 | pass 22 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_query_comment.py: -------------------------------------------------------------------------------- 1 | from dbt.tests.adapter.query_comment.test_query_comment import ( 2 | BaseEmptyQueryComments, 3 | BaseMacroArgsQueryComments, 4 | BaseMacroInvalidQueryComments, 5 | BaseMacroQueryComments, 6 | BaseNullQueryComments, 7 | BaseQueryComments, 8 | ) 9 | 10 | 11 | class TestQueryComments(BaseQueryComments): 12 | pass 13 | 14 | 15 | class TestMacroQueryComments(BaseMacroQueryComments): 16 | pass 17 | 18 | 19 | class TestMacroArgsQueryComments(BaseMacroArgsQueryComments): 20 | pass 21 | 22 | 23 | class TestMacroInvalidQueryComments(BaseMacroInvalidQueryComments): 24 | pass 25 | 26 | 27 | class TestNullQueryComments(BaseNullQueryComments): 28 | pass 29 | 30 | 31 | class TestEmptyQueryComments(BaseEmptyQueryComments): 32 | pass 33 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_relations.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.relations.test_changing_relation_type import BaseChangeRelationTypeValidator 3 | from dbt.tests.adapter.relations.test_dropping_schema_named import BaseDropSchemaNamed 4 | 5 | 6 | class TestChangeRelationTypeValidator(BaseChangeRelationTypeValidator): 7 | pass 8 | 9 | 10 | @pytest.mark.xfail( 11 | reason=""" 12 | Test fails as its not passing Use[] properly. 13 | `Use[None]` is called, should be `User[TestDB]` 14 | Unclear why the macro doens't pass it properly. 15 | """ 16 | ) 17 | class TestDropSchemaNamed(BaseDropSchemaNamed): 18 | pass 19 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_simple_seed.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.simple_seed.seeds import seeds__expected_sql 3 | from dbt.tests.adapter.simple_seed.test_seed import ( 4 | BaseBasicSeedTests, 5 | BaseSeedConfigFullRefreshOff, 6 | BaseSeedConfigFullRefreshOn, 7 | BaseSeedCustomSchema, 8 | BaseSeedWithEmptyDelimiter, 9 | BaseSeedWithUniqueDelimiter, 10 | BaseSeedWithWrongDelimiter, 11 | BaseSimpleSeedEnabledViaConfig, 12 | ) 13 | from dbt.tests.util import check_table_does_exist, check_table_does_not_exist, run_dbt 14 | 15 | seeds__expected_sql = seeds__expected_sql.replace( 16 | "TIMESTAMP WITHOUT TIME ZONE", "DATETIME2(6)" 17 | ).replace("TEXT", "VARCHAR(8000)") 18 | 19 | properties__schema_yml = """ 20 | version: 2 21 | seeds: 22 | - name: seed_enabled 23 | columns: 24 | - name: birthday 25 | data_tests: 26 | - column_type: 27 | type: date 28 | - name: seed_id 29 | data_tests: 30 | - column_type: 31 | type: varchar(8000) 32 | 33 | - name: seed_tricky 34 | columns: 35 | - name: seed_id 36 | data_tests: 37 | - column_type: 38 | type: integer 39 | - name: seed_id_str 40 | data_tests: 41 | - column_type: 42 | type: varchar(8000) 43 | - name: a_bool 44 | data_tests: 45 | - column_type: 46 | type: boolean 47 | - name: looks_like_a_bool 48 | data_tests: 49 | - column_type: 50 | type: varchar(8000) 51 | - name: a_date 52 | data_tests: 53 | - column_type: 54 | type: datetime2(6) 55 | - name: looks_like_a_date 56 | data_tests: 57 | - column_type: 58 | type: varchar(8000) 59 | - name: relative 60 | data_tests: 61 | - column_type: 62 | type: varchar(8000) 63 | - name: weekday 64 | data_tests: 65 | - column_type: 66 | type: varchar(8000) 67 | """ 68 | 69 | 70 | class TestBasicSeedTests(BaseBasicSeedTests): 71 | @pytest.fixture(scope="class", autouse=True) 72 | def setUp(self, project): 73 | """Create table for ensuring seeds and models used in tests build correctly""" 74 | project.run_sql(seeds__expected_sql) 75 | 76 | def test_simple_seed_full_refresh_flag(self, project): 77 | """ 78 | Drop the seed_actual table and re-create. 79 | Verifies correct behavior by the absence of the 80 | model which depends on seed_actual.""" 81 | self._build_relations_for_test(project) 82 | self._check_relation_end_state( 83 | run_result=run_dbt(["seed", "--full-refresh"]), project=project, exists=True 84 | ) 85 | 86 | 87 | class TestSeedConfigFullRefreshOn(BaseSeedConfigFullRefreshOn): 88 | @pytest.fixture(scope="class", autouse=True) 89 | def setUp(self, project): 90 | """Create table for ensuring seeds and models used in tests build correctly""" 91 | project.run_sql(seeds__expected_sql) 92 | 93 | def test_simple_seed_full_refresh_config(self, project): 94 | """config option should drop current model and cascade drop to downstream models""" 95 | self._build_relations_for_test(project) 96 | self._check_relation_end_state(run_result=run_dbt(["seed"]), project=project, exists=True) 97 | 98 | 99 | class TestSeedConfigFullRefreshOff(BaseSeedConfigFullRefreshOff): 100 | @pytest.fixture(scope="class", autouse=True) 101 | def setUp(self, project): 102 | """Create table for ensuring seeds and models used in tests build correctly""" 103 | project.run_sql(seeds__expected_sql) 104 | 105 | 106 | @pytest.mark.skip("Unable to inject custom schema to project_config_update") 107 | class TestSeedCustomSchema(BaseSeedCustomSchema): 108 | @pytest.fixture(scope="class", autouse=True) 109 | def setUp(self, project): 110 | """Create table for ensuring seeds and models used in tests build correctly""" 111 | project.run_sql(seeds__expected_sql) 112 | 113 | 114 | class TestSeedWithUniqueDelimiter(BaseSeedWithUniqueDelimiter): 115 | @pytest.fixture(scope="class", autouse=True) 116 | def setUp(self, project): 117 | """Create table for ensuring seeds and models used in tests build correctly""" 118 | project.run_sql(seeds__expected_sql) 119 | 120 | 121 | class TestSeedWithWrongDelimiter(BaseSeedWithWrongDelimiter): 122 | @pytest.fixture(scope="class", autouse=True) 123 | def setUp(self, project): 124 | """Create table for ensuring seeds and models used in tests build correctly""" 125 | project.run_sql(seeds__expected_sql) 126 | 127 | def test_seed_with_wrong_delimiter(self, project): 128 | """Testing failure of running dbt seed with a wrongly configured delimiter""" 129 | seed_result = run_dbt(["seed"], expect_pass=False) 130 | assert "incorrect syntax" in seed_result.results[0].message.lower() 131 | 132 | 133 | class TestSeedWithEmptyDelimiter(BaseSeedWithEmptyDelimiter): 134 | @pytest.fixture(scope="class", autouse=True) 135 | def setUp(self, project): 136 | """Create table for ensuring seeds and models used in tests build correctly""" 137 | project.run_sql(seeds__expected_sql) 138 | 139 | 140 | class TestSimpleSeedEnabledViaConfig__seed_with_disabled(BaseSimpleSeedEnabledViaConfig): 141 | @pytest.fixture(scope="function") 142 | def clear_test_schema(self, project): 143 | yield 144 | project.run_sql( 145 | f"drop table if exists {project.database}.{project.test_schema}.seed_enabled" 146 | ) 147 | project.run_sql( 148 | f"drop table if exists {project.database}.{project.test_schema}.seed_disabled" 149 | ) 150 | project.run_sql( 151 | f"drop table if exists {project.database}.{project.test_schema}.seed_tricky" 152 | ) 153 | project.run_sql(f"drop view if exists {project.test_schema}.seed_enabled") 154 | project.run_sql(f"drop view if exists {project.test_schema}.seed_disabled") 155 | project.run_sql(f"drop view if exists {project.test_schema}.seed_tricky") 156 | project.run_sql(f"drop schema if exists {project.test_schema}") 157 | 158 | def test_simple_seed_with_disabled(self, clear_test_schema, project): 159 | results = run_dbt(["seed"]) 160 | assert len(results) == 2 161 | check_table_does_exist(project.adapter, "seed_enabled") 162 | check_table_does_not_exist(project.adapter, "seed_disabled") 163 | check_table_does_exist(project.adapter, "seed_tricky") 164 | 165 | @pytest.mark.skip( 166 | reason=""" 167 | Running all the tests in the same schema causes the tests to fail 168 | as they all share the same schema across the tests 169 | """ 170 | ) 171 | def test_simple_seed_selection(self, clear_test_schema, project): 172 | results = run_dbt(["seed", "--select", "seed_enabled"]) 173 | assert len(results) == 1 174 | check_table_does_exist(project.adapter, "seed_enabled") 175 | check_table_does_not_exist(project.adapter, "seed_disabled") 176 | check_table_does_not_exist(project.adapter, "seed_tricky") 177 | 178 | @pytest.mark.skip( 179 | reason=""" 180 | Running all the tests in the same schema causes the tests to fail 181 | as they all share the same schema across the tests 182 | """ 183 | ) 184 | def test_simple_seed_exclude(self, clear_test_schema, project): 185 | results = run_dbt(["seed", "--exclude", "seed_enabled"]) 186 | assert len(results) == 1 187 | check_table_does_not_exist(project.adapter, "seed_enabled") 188 | check_table_does_not_exist(project.adapter, "seed_disabled") 189 | check_table_does_exist(project.adapter, "seed_tricky") 190 | 191 | 192 | class TestSimpleSeedEnabledViaConfig__seed_selection(BaseSimpleSeedEnabledViaConfig): 193 | @pytest.fixture(scope="function") 194 | def clear_test_schema(self, project): 195 | yield 196 | project.run_sql( 197 | f"drop table if exists {project.database}.{project.test_schema}.seed_enabled" 198 | ) 199 | project.run_sql( 200 | f"drop table if exists {project.database}.{project.test_schema}.seed_disabled" 201 | ) 202 | project.run_sql( 203 | f"drop table if exists {project.database}.{project.test_schema}.seed_tricky" 204 | ) 205 | project.run_sql(f"drop view if exists {project.test_schema}.seed_enabled") 206 | project.run_sql(f"drop view if exists {project.test_schema}.seed_disabled") 207 | project.run_sql(f"drop view if exists {project.test_schema}.seed_tricky") 208 | project.run_sql(f"drop schema if exists {project.test_schema}") 209 | 210 | @pytest.mark.skip( 211 | reason=""" 212 | Running all the tests in the same schema causes the tests to fail 213 | as they all share the same schema across the tests 214 | """ 215 | ) 216 | def test_simple_seed_with_disabled(self, clear_test_schema, project): 217 | results = run_dbt(["seed"]) 218 | assert len(results) == 2 219 | check_table_does_exist(project.adapter, "seed_enabled") 220 | check_table_does_not_exist(project.adapter, "seed_disabled") 221 | check_table_does_exist(project.adapter, "seed_tricky") 222 | 223 | def test_simple_seed_selection(self, clear_test_schema, project): 224 | results = run_dbt(["seed", "--select", "seed_enabled"]) 225 | assert len(results) == 1 226 | check_table_does_exist(project.adapter, "seed_enabled") 227 | check_table_does_not_exist(project.adapter, "seed_disabled") 228 | check_table_does_not_exist(project.adapter, "seed_tricky") 229 | 230 | @pytest.mark.skip( 231 | reason=""" 232 | Running all the tests in the same schema causes the tests to fail 233 | as they all share the same schema across the tests 234 | """ 235 | ) 236 | def test_simple_seed_exclude(self, clear_test_schema, project): 237 | results = run_dbt(["seed", "--exclude", "seed_enabled"]) 238 | assert len(results) == 1 239 | check_table_does_not_exist(project.adapter, "seed_enabled") 240 | check_table_does_not_exist(project.adapter, "seed_disabled") 241 | check_table_does_exist(project.adapter, "seed_tricky") 242 | 243 | 244 | class TestSimpleSeedEnabledViaConfig__seed_exclude(BaseSimpleSeedEnabledViaConfig): 245 | @pytest.fixture(scope="function") 246 | def clear_test_schema(self, project): 247 | yield 248 | project.run_sql( 249 | f"drop table if exists {project.database}.{project.test_schema}.seed_enabled" 250 | ) 251 | project.run_sql( 252 | f"drop table if exists {project.database}.{project.test_schema}.seed_disabled" 253 | ) 254 | project.run_sql( 255 | f"drop table if exists {project.database}.{project.test_schema}.seed_tricky" 256 | ) 257 | project.run_sql(f"drop view if exists {project.test_schema}.seed_enabled") 258 | project.run_sql(f"drop view if exists {project.test_schema}.seed_disabled") 259 | project.run_sql(f"drop view if exists {project.test_schema}.seed_tricky") 260 | project.run_sql(f"drop schema if exists {project.test_schema}") 261 | 262 | @pytest.mark.skip( 263 | reason=""" 264 | Running all the tests in the same schema causes the tests to fail 265 | as they all share the same schema across the tests 266 | """ 267 | ) 268 | def test_simple_seed_with_disabled(self, clear_test_schema, project): 269 | results = run_dbt(["seed"]) 270 | assert len(results) == 2 271 | check_table_does_exist(project.adapter, "seed_enabled") 272 | check_table_does_not_exist(project.adapter, "seed_disabled") 273 | check_table_does_exist(project.adapter, "seed_tricky") 274 | 275 | @pytest.mark.skip( 276 | reason=""" 277 | Running all the tests in the same schema causes the tests to fail 278 | as they all share the same schema across the tests 279 | """ 280 | ) 281 | def test_simple_seed_selection(self, clear_test_schema, project): 282 | results = run_dbt(["seed", "--select", "seed_enabled"]) 283 | assert len(results) == 1 284 | check_table_does_exist(project.adapter, "seed_enabled") 285 | check_table_does_not_exist(project.adapter, "seed_disabled") 286 | check_table_does_not_exist(project.adapter, "seed_tricky") 287 | 288 | def test_simple_seed_exclude(self, clear_test_schema, project): 289 | results = run_dbt(["seed", "--exclude", "seed_enabled"]) 290 | assert len(results) == 1 291 | check_table_does_not_exist(project.adapter, "seed_enabled") 292 | check_table_does_not_exist(project.adapter, "seed_disabled") 293 | check_table_does_exist(project.adapter, "seed_tricky") 294 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_simple_snapshot.py: -------------------------------------------------------------------------------- 1 | from typing import Iterable 2 | 3 | from dbt.tests.adapter.simple_snapshot.test_snapshot import BaseSimpleSnapshot 4 | from dbt.tests.fixtures.project import TestProjInfo 5 | from dbt.tests.util import relation_from_name, run_dbt 6 | 7 | 8 | def clone_table_sqlserver( 9 | project: TestProjInfo, to_table: str, from_table: str, select: str, where: str = None 10 | ): 11 | """ 12 | Creates a new table based on another table in a dbt project 13 | 14 | Args: 15 | project: the dbt project that contains the table 16 | to_table: the name of the table, without a schema, to be created 17 | from_table: the name of the table, without a schema, to be cloned 18 | select: the selection clause to apply on `from_table`; defaults to all columns (*) 19 | where: the where clause to apply on `from_table`, if any; defaults to all records 20 | 21 | We override this for sqlserver as its using `select into` instead of `create table as select` 22 | """ 23 | to_table_name = relation_from_name(project.adapter, to_table) 24 | from_table_name = relation_from_name(project.adapter, from_table) 25 | select_clause = select or "*" 26 | where_clause = where or "1 = 1" 27 | sql = f"drop table if exists {to_table_name}" 28 | project.run_sql(sql) 29 | sql = f""" 30 | select {select_clause} 31 | into {to_table_name} 32 | from {from_table_name} 33 | where {where_clause} 34 | """ 35 | project.run_sql(sql) 36 | 37 | 38 | def add_column_sqlserver(project: TestProjInfo, table: str, column: str, definition: str): 39 | """ 40 | Applies updates to a table in a dbt project 41 | 42 | Args: 43 | project: the dbt project that contains the table 44 | table: the name of the table without a schema 45 | column: the name of the new column 46 | definition: the definition of the new column, e.g. 'varchar(20) default null' 47 | """ 48 | # BigQuery doesn't like 'varchar' in the definition 49 | if project.adapter.type() == "bigquery" and "varchar" in definition.lower(): 50 | definition = "string" 51 | table_name = relation_from_name(project.adapter, table) 52 | sql = f""" 53 | alter table {table_name} 54 | add {column} {definition} 55 | """ 56 | project.run_sql(sql) 57 | 58 | 59 | class TestSimpleSnapshot(BaseSimpleSnapshot): 60 | def create_fact_from_seed(self, where: str = None): # type: ignore 61 | clone_table_sqlserver(self.project, "fact", "seed", "*", where) 62 | 63 | def add_fact_column(self, column: str = None, definition: str = None): # type: ignore 64 | add_column_sqlserver(self.project, "fact", column, definition) 65 | 66 | def test_updates_are_captured_by_snapshot(self, project): 67 | """ 68 | Update the last 5 records. Show that all ids are current, but the last 5 reflect updates. 69 | """ 70 | self.update_fact_records( 71 | {"updated_at": "DATEADD(DAY, 1, updated_at)"}, "id between 16 and 20" 72 | ) 73 | run_dbt(["snapshot"]) 74 | self._assert_results( 75 | ids_with_current_snapshot_records=range(1, 21), 76 | ids_with_closed_out_snapshot_records=range(16, 21), 77 | ) 78 | 79 | def test_new_column_captured_by_snapshot(self, project): 80 | """ 81 | Add a column to `fact` and populate the last 10 records with a non-null value. 82 | Show that all ids are current, but the last 10 reflect updates and the first 10 don't 83 | i.e. if the column is added, but not updated, the record doesn't reflect that it's updated 84 | """ 85 | self.add_fact_column("full_name", "varchar(200) default null") 86 | self.update_fact_records( 87 | { 88 | "full_name": "first_name + ' ' + last_name", 89 | "updated_at": "DATEADD(DAY, 1, updated_at)", 90 | }, 91 | "id between 11 and 20", 92 | ) 93 | run_dbt(["snapshot"]) 94 | self._assert_results( 95 | ids_with_current_snapshot_records=range(1, 21), 96 | ids_with_closed_out_snapshot_records=range(11, 21), 97 | ) 98 | 99 | def _assert_results( 100 | self, 101 | ids_with_current_snapshot_records: Iterable, 102 | ids_with_closed_out_snapshot_records: Iterable, 103 | ): 104 | """ 105 | All test cases are checked by considering whether a 106 | source record's id has a value in `dbt_valid_to` 107 | in `snapshot`. Each id can fall into one of the following cases: 108 | 109 | - The id has only one record in `snapshot`; it has a value in `dbt_valid_to` 110 | - the record was hard deleted in the source 111 | - The id has only one record in `snapshot`; 112 | it does not have a value in `dbt_valid_to` 113 | - the record was not updated in the source 114 | - the record was updated in the source, 115 | but not in a way that is tracked (e.g. via `strategy='check'`) 116 | - The id has two records in `snapshot`; 117 | one has a value in `dbt_valid_to`, the other does not 118 | - the record was altered in the source in a way that is tracked 119 | - the record was hard deleted and revived 120 | 121 | Note: Because of the third scenario, ids may show up in both arguments of this method. 122 | 123 | Args: 124 | ids_with_current_snapshot_records: a list/set/etc. of ids which aren't end-dated 125 | ids_with_closed_out_snapshot_records: a list/set/etc. of ids which are end-dated 126 | """ 127 | records = set( 128 | self.get_snapshot_records( 129 | "id, CASE WHEN dbt_valid_to is null then 1 else 0 END as is_current" 130 | ) 131 | ) 132 | expected_records = set().union( 133 | {(i, 1) for i in ids_with_current_snapshot_records}, 134 | {(i, 0) for i in ids_with_closed_out_snapshot_records}, 135 | ) 136 | for record in records: 137 | assert record in expected_records 138 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_unit_tests.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.unit_testing.test_case_insensitivity import BaseUnitTestCaseInsensivity 3 | from dbt.tests.adapter.unit_testing.test_invalid_input import BaseUnitTestInvalidInput 4 | from dbt.tests.adapter.unit_testing.test_types import BaseUnitTestingTypes 5 | from dbt.tests.util import run_dbt, write_file 6 | 7 | my_model_sql = """ 8 | select 9 | tested_column from {{ ref('my_upstream_model')}} 10 | """ 11 | 12 | my_upstream_model_sql = """ 13 | select 14 | {sql_value} as tested_column 15 | """ 16 | 17 | test_my_model_yml = """ 18 | unit_tests: 19 | - name: test_my_model 20 | model: my_model 21 | given: 22 | - input: ref('my_upstream_model') 23 | rows: 24 | - {{ tested_column: {yaml_value} }} 25 | expect: 26 | rows: 27 | - {{ tested_column: {yaml_value} }} 28 | """ 29 | 30 | 31 | class TestUnitTestCaseInsensitivity(BaseUnitTestCaseInsensivity): 32 | pass 33 | 34 | 35 | class TestUnitTestInvalidInput(BaseUnitTestInvalidInput): 36 | pass 37 | 38 | 39 | class TestUnitTestingTypes(BaseUnitTestingTypes): 40 | @pytest.fixture 41 | def data_types(self): 42 | # sql_value, yaml_value 43 | return [ 44 | ["1", "1"], 45 | ["'1'", "1"], 46 | ["1", "true"], 47 | ["CAST('2020-01-02' AS DATE)", "2020-01-02"], 48 | ["CAST('2013-11-03 00:00:00-0' AS DATETIME2(6))", "2013-11-03 00:00:00-0"], 49 | ["CAST('2013-11-03 00:00:00-0' AS DATETIME2(6))", "2013-11-03 00:00:00-0"], 50 | ["CAST('1' AS numeric)", "1"], 51 | ] 52 | 53 | def test_unit_test_data_type(self, project, data_types): 54 | for sql_value, yaml_value in data_types: 55 | # Write parametrized type value to sql files 56 | write_file( 57 | my_upstream_model_sql.format(sql_value=sql_value), 58 | "models", 59 | "my_upstream_model.sql", 60 | ) 61 | 62 | # Write parametrized type value to unit test yaml definition 63 | write_file( 64 | test_my_model_yml.format(yaml_value=yaml_value), 65 | "models", 66 | "schema.yml", 67 | ) 68 | 69 | results = run_dbt(["run", "--select", "my_upstream_model"]) 70 | assert len(results) == 1 71 | 72 | try: 73 | run_dbt(["test", "--select", "my_model"]) 74 | except Exception: 75 | raise AssertionError(f"unit test failed when testing model with {sql_value}") 76 | -------------------------------------------------------------------------------- /tests/functional/adapter/dbt/test_utils.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.adapter.utils import fixture_cast_bool_to_text, fixture_dateadd, fixture_listagg 3 | from dbt.tests.adapter.utils.test_any_value import BaseAnyValue 4 | from dbt.tests.adapter.utils.test_array_append import BaseArrayAppend 5 | from dbt.tests.adapter.utils.test_array_concat import BaseArrayConcat 6 | from dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct 7 | from dbt.tests.adapter.utils.test_bool_or import BaseBoolOr 8 | from dbt.tests.adapter.utils.test_cast import BaseCast 9 | from dbt.tests.adapter.utils.test_cast_bool_to_text import BaseCastBoolToText 10 | from dbt.tests.adapter.utils.test_concat import BaseConcat 11 | from dbt.tests.adapter.utils.test_current_timestamp import ( 12 | BaseCurrentTimestampAware, 13 | BaseCurrentTimestampNaive, 14 | ) 15 | from dbt.tests.adapter.utils.test_date import BaseDate 16 | from dbt.tests.adapter.utils.test_date_spine import BaseDateSpine 17 | from dbt.tests.adapter.utils.test_date_trunc import BaseDateTrunc 18 | from dbt.tests.adapter.utils.test_dateadd import BaseDateAdd 19 | from dbt.tests.adapter.utils.test_datediff import BaseDateDiff 20 | from dbt.tests.adapter.utils.test_equals import BaseEquals 21 | from dbt.tests.adapter.utils.test_escape_single_quotes import ( 22 | BaseEscapeSingleQuotesBackslash, 23 | BaseEscapeSingleQuotesQuote, 24 | ) 25 | from dbt.tests.adapter.utils.test_except import BaseExcept 26 | from dbt.tests.adapter.utils.test_generate_series import BaseGenerateSeries 27 | from dbt.tests.adapter.utils.test_get_intervals_between import BaseGetIntervalsBetween 28 | from dbt.tests.adapter.utils.test_get_powers_of_two import BaseGetPowersOfTwo 29 | from dbt.tests.adapter.utils.test_hash import BaseHash 30 | from dbt.tests.adapter.utils.test_intersect import BaseIntersect 31 | from dbt.tests.adapter.utils.test_last_day import BaseLastDay 32 | from dbt.tests.adapter.utils.test_length import BaseLength 33 | from dbt.tests.adapter.utils.test_listagg import BaseListagg 34 | from dbt.tests.adapter.utils.test_null_compare import BaseMixedNullCompare, BaseNullCompare 35 | from dbt.tests.adapter.utils.test_position import BasePosition 36 | from dbt.tests.adapter.utils.test_replace import BaseReplace 37 | from dbt.tests.adapter.utils.test_right import BaseRight 38 | from dbt.tests.adapter.utils.test_safe_cast import BaseSafeCast 39 | from dbt.tests.adapter.utils.test_split_part import BaseSplitPart 40 | from dbt.tests.adapter.utils.test_string_literal import BaseStringLiteral 41 | from dbt.tests.adapter.utils.test_timestamps import BaseCurrentTimestamps 42 | from dbt.tests.adapter.utils.test_validate_sql import BaseValidateSqlMethod 43 | 44 | # flake8: noqa: E501 45 | 46 | 47 | class TestAnyValue(BaseAnyValue): 48 | pass 49 | 50 | 51 | @pytest.mark.skip(reason="Not supported/Not implemented") 52 | class TestArrayAppend(BaseArrayAppend): 53 | pass 54 | 55 | 56 | @pytest.mark.skip(reason="Not supported/Not implemented") 57 | class TestArrayConcat(BaseArrayConcat): 58 | pass 59 | 60 | 61 | @pytest.mark.skip(reason="Not supported/Not implemented") 62 | class TestArrayConstruct(BaseArrayConstruct): 63 | pass 64 | 65 | 66 | @pytest.mark.skip(reason="Not supported/Not implemented") 67 | class TestBoolOr(BaseBoolOr): 68 | pass 69 | 70 | 71 | class TestCast(BaseCast): 72 | pass 73 | 74 | 75 | models__test_cast_bool_to_text_sql = """ 76 | with data as ( 77 | 78 | select 0 as input, 'false' as expected union all 79 | select 1 as input, 'true' as expected union all 80 | select null as input, null as expected 81 | 82 | ) 83 | 84 | select 85 | 86 | {{ cast_bool_to_text("input") }} as actual, 87 | expected 88 | 89 | from data 90 | """ 91 | 92 | 93 | class TestCastBoolToText(BaseCastBoolToText): 94 | @pytest.fixture(scope="class") 95 | def models(self): 96 | return { 97 | "test_cast_bool_to_text.yml": fixture_cast_bool_to_text.models__test_cast_bool_to_text_yml, # noqa: E501 98 | "test_cast_bool_to_text.sql": self.interpolate_macro_namespace( 99 | models__test_cast_bool_to_text_sql, "cast_bool_to_text" 100 | ), 101 | } 102 | 103 | 104 | class TestConcat(BaseConcat): 105 | pass 106 | 107 | 108 | @pytest.mark.skip( 109 | reason="Only should implement Aware or Naive. Opted for Naive to align with fabric." 110 | ) 111 | class TestCurrentTimestampAware(BaseCurrentTimestampAware): 112 | pass 113 | 114 | 115 | class TestCurrentTimestampNaive(BaseCurrentTimestampNaive): 116 | pass 117 | 118 | 119 | @pytest.mark.skip(reason="Date spine relies on recursive CTES which are not supported.") 120 | class TestDate(BaseDate): 121 | pass 122 | 123 | 124 | @pytest.mark.skip(reason="Date spine relies on recursive CTES which are not supported.") 125 | class TestDateSpine(BaseDateSpine): 126 | pass 127 | 128 | 129 | class TestDateTrunc(BaseDateTrunc): 130 | pass 131 | 132 | 133 | class TestDateAdd(BaseDateAdd): 134 | models__test_dateadd_sql = """ 135 | with data as ( 136 | 137 | select * from {{ ref('data_dateadd') }} 138 | 139 | ) 140 | 141 | select 142 | case 143 | when datepart = 'hour' then cast({{ dateadd('hour', 'interval_length', 'from_time') }} as {{ api.Column.translate_type('timestamp') }}) 144 | when datepart = 'day' then cast({{ dateadd('day', 'interval_length', 'from_time') }} as {{ api.Column.translate_type('timestamp') }}) 145 | when datepart = 'month' then cast({{ dateadd('month', 'interval_length', 'from_time') }} as {{ api.Column.translate_type('timestamp') }}) 146 | when datepart = 'year' then cast({{ dateadd('year', 'interval_length', 'from_time') }} as {{ api.Column.translate_type('timestamp') }}) 147 | else null 148 | end as actual, 149 | result as expected 150 | 151 | from data 152 | """ 153 | 154 | @pytest.fixture(scope="class") 155 | def project_config_update(self): 156 | return { 157 | "name": "test", 158 | # this is only needed for BigQuery, right? 159 | # no harm having it here until/unless there's an adapter that doesn't support the 'timestamp' type 160 | "seeds": { 161 | "test": { 162 | "data_dateadd": { 163 | "+column_types": { 164 | "from_time": "datetime2(6)", 165 | "result": "datetime2(6)", 166 | }, 167 | }, 168 | }, 169 | }, 170 | } 171 | 172 | @pytest.fixture(scope="class") 173 | def seeds(self): 174 | return {"data_dateadd.csv": fixture_dateadd.seeds__data_dateadd_csv} 175 | 176 | @pytest.fixture(scope="class") 177 | def models(self): 178 | return { 179 | "test_dateadd.yml": fixture_dateadd.models__test_dateadd_yml, 180 | "test_dateadd.sql": self.interpolate_macro_namespace( 181 | self.models__test_dateadd_sql, "dateadd" 182 | ), 183 | } 184 | 185 | 186 | class TestDateDiff(BaseDateDiff): 187 | pass 188 | 189 | 190 | class TestEquals(BaseEquals): 191 | pass 192 | 193 | 194 | class TestEscapeSingleQuotesQuote(BaseEscapeSingleQuotesQuote): 195 | pass 196 | 197 | 198 | @pytest.mark.skip(reason="SQLServer applies escaping with double of values") 199 | class TestEscapeSingleQuotesBackslash(BaseEscapeSingleQuotesBackslash): 200 | pass 201 | 202 | 203 | class TestExcept(BaseExcept): 204 | pass 205 | 206 | 207 | @pytest.mark.skip( 208 | reason="Only newer versions of SQLServer support Generate Series. Skipping for back compat" 209 | ) 210 | class TestGenerateSeries(BaseGenerateSeries): 211 | pass 212 | 213 | 214 | class TestGetIntervalsBetween(BaseGetIntervalsBetween): 215 | pass 216 | 217 | 218 | class TestGetPowersOfTwo(BaseGetPowersOfTwo): 219 | pass 220 | 221 | 222 | class TestHash(BaseHash): 223 | pass 224 | 225 | 226 | class TestIntersect(BaseIntersect): 227 | pass 228 | 229 | 230 | class TestLastDay(BaseLastDay): 231 | pass 232 | 233 | 234 | class TestLength(BaseLength): 235 | pass 236 | 237 | 238 | seeds__data_listagg_output_csv = """group_col,expected,version 239 | 1,"a_|_b_|_c",bottom_ordered 240 | 2,"1_|_a_|_p",bottom_ordered 241 | 3,"g_|_g_|_g",bottom_ordered 242 | 1,"c_|_b_|_a",reverse_order 243 | 2,"p_|_a_|_1",reverse_order 244 | 3,"g_|_g_|_g",reverse_order 245 | 3,"g, g, g",comma_whitespace_unordered 246 | """ 247 | 248 | 249 | models__test_listagg_sql = """ 250 | with data as ( 251 | 252 | select * from {{ ref('data_listagg') }} 253 | 254 | ), 255 | 256 | data_output as ( 257 | 258 | select * from {{ ref('data_listagg_output') }} 259 | 260 | ), 261 | 262 | calculate as ( 263 | 264 | select 265 | group_col, 266 | {{ listagg('string_text', "'_|_'", "order by order_col") }} as actual, 267 | 'bottom_ordered' as version 268 | from data 269 | group by group_col 270 | 271 | union all 272 | 273 | select 274 | group_col, 275 | {{ listagg('string_text', "'_|_'", "order by order_col desc", 2) }} as actual, 276 | 'reverse_order' as version 277 | from data 278 | group by group_col 279 | 280 | union all 281 | 282 | select 283 | group_col, 284 | {{ listagg('string_text', "', '") }} as actual, 285 | 'comma_whitespace_unordered' as version 286 | from data 287 | where group_col = 3 288 | group by group_col 289 | 290 | ) 291 | 292 | select 293 | calculate.actual, 294 | data_output.expected 295 | from calculate 296 | left join data_output 297 | on calculate.group_col = data_output.group_col 298 | and calculate.version = data_output.version 299 | """ 300 | 301 | 302 | class TestListagg(BaseListagg): 303 | @pytest.fixture(scope="class") 304 | def seeds(self): 305 | return { 306 | "data_listagg.csv": fixture_listagg.seeds__data_listagg_csv, 307 | "data_listagg_output.csv": seeds__data_listagg_output_csv, 308 | } 309 | 310 | @pytest.fixture(scope="class") 311 | def models(self): 312 | return { 313 | "test_listagg.yml": fixture_listagg.models__test_listagg_yml, 314 | "test_listagg.sql": self.interpolate_macro_namespace( 315 | models__test_listagg_sql, "listagg" 316 | ), 317 | } 318 | 319 | 320 | class TestMixedNullCompare(BaseMixedNullCompare): 321 | pass 322 | 323 | 324 | class TestNullCompare(BaseNullCompare): 325 | pass 326 | 327 | 328 | class TestPosition(BasePosition): 329 | pass 330 | 331 | 332 | class TestReplace(BaseReplace): 333 | pass 334 | 335 | 336 | class TestRight(BaseRight): 337 | pass 338 | 339 | 340 | class TestSafeCast(BaseSafeCast): 341 | pass 342 | 343 | 344 | class TestSplitPart(BaseSplitPart): 345 | pass 346 | 347 | 348 | class TestStringLiteral(BaseStringLiteral): 349 | pass 350 | 351 | 352 | @pytest.mark.skip( 353 | reason=""" 354 | comment here about why this is skipped. 355 | https://github.com/dbt-labs/dbt-adapters/blob/f1987d4313cc94bac9906963dff1337ee0bffbc6/dbt/include/global_project/macros/adapters/timestamps.sql#L39 356 | """ 357 | ) 358 | class TestCurrentTimestamps(BaseCurrentTimestamps): 359 | pass 360 | 361 | 362 | class TestValidateSqlMethod(BaseValidateSqlMethod): 363 | pass 364 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_cross_db.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import get_connection, run_dbt 3 | 4 | snapshot_sql = """ 5 | {% snapshot claims_snapshot %} 6 | 7 | {{ 8 | config( 9 | target_database='secondary_db', 10 | target_schema='dbo', 11 | unique_key='id', 12 | 13 | strategy='timestamp', 14 | updated_at='updated_at', 15 | ) 16 | }} 17 | 18 | select * from {{source('mysource', 'claims')}} 19 | 20 | {% endsnapshot %} 21 | """ 22 | 23 | source_csv = """id,updated_date 24 | 1,2024-01-01 25 | 2,2024-01-01 26 | 3,2024-01-01 27 | """ 28 | 29 | sources_yml = """ 30 | version: 2 31 | sources: 32 | - name: mysource 33 | database: TestDB 34 | tables: 35 | - name: claims 36 | """ 37 | 38 | 39 | class TestCrossDB: 40 | def create_secondary_db(self, project): 41 | create_sql = """ 42 | DECLARE @col NVARCHAR(256) 43 | SET @col = (SELECT CONVERT (varchar(256), SERVERPROPERTY('collation'))); 44 | 45 | IF NOT EXISTS (SELECT * FROM sys.databases WHERE name='secondary_db') 46 | BEGIN 47 | EXEC ('CREATE DATABASE secondary_db COLLATE ' + @col) 48 | END 49 | """ 50 | 51 | with get_connection(project.adapter): 52 | project.adapter.execute( 53 | create_sql.format(database=project.database), 54 | fetch=True, 55 | ) 56 | 57 | def cleanup_secondary_database(self, project): 58 | drop_sql = "DROP DATABASE IF EXISTS secondary_db" 59 | with get_connection(project.adapter): 60 | project.adapter.execute( 61 | drop_sql.format(database=project.database), 62 | fetch=True, 63 | ) 64 | 65 | def cleanup_primary_table(self, project): 66 | drop_sql = "DROP TABLE IF EXISTS {database}.mysource.claims" 67 | with get_connection(project.adapter): 68 | project.adapter.execute( 69 | drop_sql.format(database=project.database), 70 | fetch=True, 71 | ) 72 | 73 | def cleanup_snapshot_table(self, project): 74 | drop_sql = "DROP TABLE IF EXISTS TestDB_Secondary.dbo.claims_snapshot" 75 | with get_connection(project.adapter): 76 | project.adapter.execute( 77 | drop_sql, 78 | fetch=True, 79 | ) 80 | 81 | def create_source_schema(self, project): 82 | create_sql = """ 83 | IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = 'mysource') 84 | BEGIN 85 | EXEC('CREATE SCHEMA mysource') 86 | END 87 | """ 88 | with get_connection(project.adapter): 89 | project.adapter.execute( 90 | create_sql, 91 | fetch=True, 92 | ) 93 | 94 | def create_primary_table(self, project): 95 | src_query = """ 96 | SELECT * 97 | INTO 98 | {database}.mysource.claims 99 | FROM 100 | ( 101 | SELECT 102 | 1 as id, 103 | CAST('2024-01-01' as DATETIME2(6)) updated_at 104 | 105 | UNION ALL 106 | 107 | SELECT 108 | 2 as id, 109 | CAST('2024-01-01' as DATETIME2(6)) updated_at 110 | 111 | UNION ALL 112 | 113 | SELECT 114 | 3 as id, 115 | CAST('2024-01-01' as DATETIME2(6)) updated_at 116 | ) as src_data 117 | """ 118 | with get_connection(project.adapter): 119 | project.adapter.execute( 120 | src_query.format(database=project.database, schema=project.test_schema), 121 | fetch=True, 122 | ) 123 | 124 | def create_secondary_schema(self, project): 125 | src_query = """ 126 | USE [secondary_db] 127 | EXEC ('CREATE SCHEMA {schema}') 128 | """ 129 | with get_connection(project.adapter): 130 | project.adapter.execute( 131 | src_query.format(database=project.database, schema=project.test_schema), 132 | fetch=True, 133 | ) 134 | 135 | def update_primary_table(self, project): 136 | sql = """ 137 | UPDATE [{database}].[mysource].[claims] 138 | SET 139 | updated_at = CAST('2024-02-01' as datetime2(6)) 140 | WHERE 141 | id = 3 142 | """ 143 | with get_connection(project.adapter): 144 | project.adapter.execute( 145 | sql.format(database=project.database), 146 | fetch=True, 147 | ) 148 | 149 | @pytest.fixture(scope="class") 150 | def models(self): 151 | return {"sources.yml": sources_yml} 152 | 153 | @pytest.fixture(scope="class") 154 | def snapshots(self): 155 | return {"claims_snapshot.sql": snapshot_sql} 156 | 157 | def test_cross_db_snapshot(self, project): 158 | self.create_secondary_db(project) 159 | 160 | self.cleanup_primary_table(project) 161 | self.cleanup_snapshot_table(project) 162 | 163 | self.create_source_schema(project) 164 | self.create_primary_table(project) 165 | run_dbt(["snapshot"]) 166 | self.update_primary_table(project) 167 | run_dbt(["snapshot"]) 168 | 169 | self.cleanup_snapshot_table(project) 170 | self.cleanup_secondary_database(project) 171 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_db_non_standard.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import get_connection, run_dbt 3 | 4 | database_name = "my-data-base" 5 | schema_name = "mysource" 6 | source_table_name = "my_table" 7 | 8 | sources_yml = f""" 9 | version: 2 10 | 11 | sources: 12 | - name: mysource 13 | database: {database_name} 14 | tables: 15 | - name: my_table 16 | 17 | """ 18 | 19 | model_sql = """ 20 | {{ config(database="my-data-base", schema="mysource", materialized="table") }} 21 | SELECT 22 | * 23 | FROM 24 | {{ source('mysource', 'my_table') }} 25 | """ 26 | 27 | 28 | class TestNonStandardDB: 29 | def create_db(self, project): 30 | create_sql = """ 31 | DECLARE @col NVARCHAR(256) 32 | SET @col = (SELECT CONVERT (varchar(256), SERVERPROPERTY('collation'))); 33 | 34 | IF NOT EXISTS (SELECT * FROM sys.databases WHERE name='{database}') 35 | BEGIN 36 | EXEC ('CREATE DATABASE [{database}] COLLATE ' + @col) 37 | END 38 | """ 39 | 40 | with get_connection(project.adapter): 41 | project.adapter.execute( 42 | create_sql.format(database=database_name), 43 | fetch=True, 44 | ) 45 | 46 | def create_source_schema(self, project): 47 | create_sql = """ 48 | USE [{database}]; 49 | 50 | IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{schema}') 51 | BEGIN 52 | EXEC('CREATE SCHEMA {schema}') 53 | END 54 | """ 55 | with get_connection(project.adapter): 56 | project.adapter.execute( 57 | create_sql.format(database=database_name, schema=schema_name), 58 | fetch=True, 59 | ) 60 | 61 | def create_primary_table(self, project): 62 | src_query = """ 63 | SELECT * 64 | INTO 65 | [{database}].{schema}.{table} 66 | FROM 67 | ( 68 | SELECT 69 | 1 as id, 70 | CAST('2024-01-01' as DATETIME2(6)) updated_at 71 | 72 | UNION ALL 73 | 74 | SELECT 75 | 2 as id, 76 | CAST('2024-01-01' as DATETIME2(6)) updated_at 77 | 78 | UNION ALL 79 | 80 | SELECT 81 | 3 as id, 82 | CAST('2024-01-01' as DATETIME2(6)) updated_at 83 | ) as src_data 84 | """ 85 | with get_connection(project.adapter): 86 | project.adapter.execute( 87 | src_query.format( 88 | database=database_name, schema=schema_name, table=source_table_name 89 | ), 90 | fetch=True, 91 | ) 92 | 93 | def cleanup_primary_table(self, project): 94 | drop_sql = "DROP TABLE IF EXISTS [{database}].{schema}.{table}" 95 | with get_connection(project.adapter): 96 | project.adapter.execute( 97 | drop_sql.format( 98 | database=database_name, schema=schema_name, table=source_table_name 99 | ), 100 | fetch=True, 101 | ) 102 | 103 | @pytest.fixture(scope="class") 104 | def models(self): 105 | return {"model.sql": model_sql, "sources.yml": sources_yml} 106 | 107 | def test_non_standard_database(self, project): 108 | self.create_db(project) 109 | 110 | self.cleanup_primary_table(project) 111 | self.create_source_schema(project) 112 | self.create_primary_table(project) 113 | 114 | run_dbt() 115 | 116 | self.cleanup_primary_table(project) 117 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_index.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import get_connection, run_dbt 3 | 4 | # flake8: noqa: E501 5 | 6 | index_seed_csv = """id_col,data,secondary_data,tertiary_data 7 | 1,'a'",122,20 8 | """ 9 | 10 | index_schema_base_yml = """ 11 | version: 2 12 | seeds: 13 | - name: raw_data 14 | config: 15 | column_types: 16 | id_col: integer 17 | data: nvarchar(20) 18 | secondary_data: integer 19 | tertiary_data: bigint 20 | """ 21 | 22 | model_yml = """ 23 | version: 2 24 | models: 25 | - name: index_model 26 | - name: index_ccs_model 27 | """ 28 | 29 | model_sql = """ 30 | {{ 31 | config({ 32 | "materialized": 'table', 33 | "as_columnstore": False, 34 | "post-hook": [ 35 | "{{ create_clustered_index(columns = ['id_col'], unique=True) }}", 36 | "{{ create_nonclustered_index(columns = ['data']) }}", 37 | "{{ create_nonclustered_index(columns = ['secondary_data'], includes = ['tertiary_data']) }}", 38 | ] 39 | }) 40 | }} 41 | select * from {{ ref('raw_data') }} 42 | """ 43 | 44 | model_sql_ccs = """ 45 | {{ 46 | config({ 47 | "materialized": 'table', 48 | "post-hook": [ 49 | "{{ create_nonclustered_index(columns = ['data']) }}", 50 | "{{ create_nonclustered_index(columns = ['secondary_data'], includes = ['tertiary_data']) }}", 51 | ] 52 | }) 53 | }} 54 | select * from {{ ref('raw_data') }} 55 | """ 56 | 57 | drop_schema_model = """ 58 | {{ 59 | config({ 60 | "materialized": 'table', 61 | "post-hook": [ 62 | "{{ drop_all_indexes_on_table() }}", 63 | ] 64 | }) 65 | }} 66 | select * from {{ ref('raw_data') }} 67 | """ 68 | 69 | base_validation = """ 70 | with base_query AS ( 71 | select i.[name] as index_name, 72 | substring(column_names, 1, len(column_names)-1) as [columns], 73 | case when i.[type] = 1 then 'Clustered index' 74 | when i.[type] = 2 then 'Nonclustered unique index' 75 | when i.[type] = 3 then 'XML index' 76 | when i.[type] = 4 then 'Spatial index' 77 | when i.[type] = 5 then 'Clustered columnstore index' 78 | when i.[type] = 6 then 'Nonclustered columnstore index' 79 | when i.[type] = 7 then 'Nonclustered hash index' 80 | end as index_type, 81 | case when i.is_unique = 1 then 'Unique' 82 | else 'Not unique' end as [unique], 83 | schema_name(t.schema_id) + '.' + t.[name] as table_view, 84 | case when t.[type] = 'U' then 'Table' 85 | when t.[type] = 'V' then 'View' 86 | end as [object_type], 87 | s.name as schema_name 88 | from sys.objects t 89 | inner join sys.schemas s 90 | on 91 | t.schema_id = s.schema_id 92 | inner join sys.indexes i 93 | on t.object_id = i.object_id 94 | cross apply (select col.[name] + ', ' 95 | from sys.index_columns ic 96 | inner join sys.columns col 97 | on ic.object_id = col.object_id 98 | and ic.column_id = col.column_id 99 | where ic.object_id = t.object_id 100 | and ic.index_id = i.index_id 101 | order by key_ordinal 102 | for xml path ('') ) D (column_names) 103 | where t.is_ms_shipped <> 1 104 | and index_id > 0 105 | ) 106 | """ 107 | 108 | index_count = ( 109 | base_validation 110 | + """ 111 | select 112 | index_type, 113 | count(*) index_count 114 | from 115 | base_query 116 | WHERE 117 | schema_name='{schema_name}' 118 | group by index_type 119 | """ 120 | ) 121 | 122 | other_index_count = ( 123 | base_validation 124 | + """ 125 | SELECT 126 | * 127 | FROM 128 | base_query 129 | WHERE 130 | schema_name='{schema_name}' 131 | AND 132 | table_view='{schema_name}.{table_name}' 133 | 134 | """ 135 | ) 136 | 137 | 138 | class TestIndex: 139 | @pytest.fixture(scope="class") 140 | def project_config_update(self): 141 | return {"name": "generic_tests"} 142 | 143 | @pytest.fixture(scope="class") 144 | def seeds(self): 145 | return { 146 | "raw_data.csv": index_seed_csv, 147 | "schema.yml": index_schema_base_yml, 148 | } 149 | 150 | @pytest.fixture(scope="class") 151 | def models(self): 152 | return { 153 | "index_model.sql": model_sql, 154 | "index_ccs_model.sql": model_sql_ccs, 155 | "schema.yml": model_yml, 156 | } 157 | 158 | def test_create_index(self, project): 159 | run_dbt(["seed"]) 160 | run_dbt(["run"]) 161 | 162 | with get_connection(project.adapter): 163 | result, table = project.adapter.execute( 164 | index_count.format(schema_name=project.created_schemas[0]), fetch=True 165 | ) 166 | schema_dict = {_[0]: _[1] for _ in table.rows} 167 | expected = { 168 | "Clustered columnstore index": 1, 169 | "Clustered index": 1, 170 | "Nonclustered unique index": 4, 171 | } 172 | assert schema_dict == expected 173 | 174 | 175 | class TestIndexDropsOnlySchema: 176 | @pytest.fixture(scope="class") 177 | def project_config_update(self): 178 | return {"name": "generic_tests"} 179 | 180 | @pytest.fixture(scope="class") 181 | def seeds(self): 182 | return { 183 | "raw_data.csv": index_seed_csv, 184 | "schema.yml": index_schema_base_yml, 185 | } 186 | 187 | @pytest.fixture(scope="class") 188 | def models(self): 189 | return { 190 | "index_model.sql": drop_schema_model, 191 | "index_ccs_model.sql": model_sql_ccs, 192 | "schema.yml": model_yml, 193 | } 194 | 195 | def create_table_and_index_other_schema(self, project): 196 | _schema = project.test_schema + "other" 197 | create_sql = f""" 198 | USE [{project.database}]; 199 | IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{_schema}') 200 | BEGIN 201 | EXEC('CREATE SCHEMA [{_schema}]') 202 | END 203 | """ 204 | 205 | create_table = f""" 206 | CREATE TABLE {_schema}.index_model ( 207 | IDCOL BIGINT 208 | ) 209 | """ 210 | 211 | create_index = f""" 212 | CREATE INDEX sample_schema ON {_schema}.index_model (IDCOL) 213 | """ 214 | with get_connection(project.adapter): 215 | project.adapter.execute(create_sql, fetch=True) 216 | project.adapter.execute(create_table) 217 | project.adapter.execute(create_index) 218 | 219 | def drop_schema_artifacts(self, project): 220 | _schema = project.test_schema + "other" 221 | drop_index = f"DROP INDEX IF EXISTS sample_schema ON {_schema}.index_model" 222 | drop_table = f"DROP TABLE IF EXISTS {_schema}.index_model" 223 | drop_schema = f"DROP SCHEMA IF EXISTS {_schema}" 224 | 225 | with get_connection(project.adapter): 226 | project.adapter.execute(drop_index, fetch=True) 227 | project.adapter.execute(drop_table) 228 | project.adapter.execute(drop_schema) 229 | 230 | def validate_other_schema(self, project): 231 | with get_connection(project.adapter): 232 | result, table = project.adapter.execute( 233 | other_index_count.format( 234 | schema_name=project.test_schema + "other", table_name="index_model" 235 | ), 236 | fetch=True, 237 | ) 238 | 239 | assert len(table.rows) == 1 240 | 241 | def test_create_index(self, project): 242 | self.create_table_and_index_other_schema(project) 243 | run_dbt(["seed"]) 244 | run_dbt(["run"]) 245 | self.validate_other_schema(project) 246 | self.drop_schema_artifacts(project) 247 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_materialize_change.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import get_connection, run_dbt 3 | 4 | model_sql = """ 5 | SELECT 1 AS data 6 | """ 7 | 8 | table_mat = """ 9 | {{ 10 | config({ 11 | "materialized": 'table' 12 | }) 13 | }} 14 | SELECT 1 AS data 15 | """ 16 | 17 | view_mat = """ 18 | {{ 19 | config({ 20 | "materialized": 'view' 21 | }) 22 | }} 23 | SELECT 1 AS data 24 | """ 25 | 26 | schema = """ 27 | version: 2 28 | models: 29 | - name: mat_object 30 | """ 31 | 32 | 33 | class BaseTableView: 34 | def create_object(self, project, sql): 35 | with get_connection(project.adapter): 36 | project.adapter.execute(sql, fetch=True) 37 | 38 | 39 | class TestTabletoView(BaseTableView): 40 | """Test if changing from a table object to a view object correctly replaces""" 41 | 42 | @pytest.fixture(scope="class") 43 | def models(self): 44 | return {"mat_object.sql": view_mat, "schema.yml": schema} 45 | 46 | def test_passes(self, project): 47 | self.create_object( 48 | project, f"SELECT * INTO {project.test_schema}.mat_object FROM ({model_sql}) t" 49 | ) 50 | run_dbt(["run"]) 51 | 52 | 53 | class TestViewtoTable(BaseTableView): 54 | """Test if changing from a view object to a table object correctly replaces""" 55 | 56 | @pytest.fixture(scope="class") 57 | def models(self): 58 | return {"mat_object.sql": table_mat, "schema.yml": schema} 59 | 60 | def test_passes(self, project): 61 | self.create_object(project, f"CREATE VIEW {project.test_schema}.mat_object AS {model_sql}") 62 | run_dbt(["run"]) 63 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_mssql_seed.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt 3 | 4 | seed_schema_yml = """ 5 | version: 2 6 | seeds: 7 | - name: raw_data 8 | """ 9 | 10 | 11 | class TestLargeSeed: 12 | def build_large_seed_file(self): 13 | row_count = 3000 14 | column_count = 10 15 | 16 | headers = ",".join(["id"] + [f"column_{_}" for _ in range(1, column_count)]) 17 | seed_data = [headers] 18 | for row in range(1, row_count): 19 | row_data = [str(row)] 20 | for column in range(1, column_count): 21 | row_data += [str(column)] 22 | 23 | row_data = ",".join(row_data) 24 | seed_data += [row_data] 25 | 26 | large_seed_file = "\n".join(seed_data) 27 | return large_seed_file 28 | 29 | @pytest.fixture(scope="class") 30 | def project_config_update(self): 31 | return {"name": "generic_tests"} 32 | 33 | @pytest.fixture(scope="class") 34 | def seeds(self): 35 | return { 36 | "raw_data.csv": self.build_large_seed_file(), 37 | "schema.yml": seed_schema_yml, 38 | } 39 | 40 | def test_large_seed(self, project): 41 | run_dbt(["seed"]) 42 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_provision_users.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt 3 | 4 | my_model_sql = """ 5 | select 1 as fun 6 | """ 7 | 8 | cleanup_existing_sql = """ 9 | {% macro cleanup_existing() %} 10 | {%- call statement('drop_existing', fetch_result=False) -%} 11 | 12 | if exists( 13 | select * 14 | from sys.database_principals 15 | where name = '{{ env_var('DBT_TEST_AAD_PRINCIPAL_1') }}') 16 | drop user [{{ env_var('DBT_TEST_AAD_PRINCIPAL_1') }}] 17 | 18 | if exists( 19 | select * 20 | from sys.database_principals 21 | where name = '{{ env_var('DBT_TEST_AAD_PRINCIPAL_2') }}') 22 | drop user [{{ env_var('DBT_TEST_AAD_PRINCIPAL_2') }}] 23 | 24 | {%- endcall -%} 25 | {% endmacro %} 26 | """ 27 | 28 | model_schema_single_user_yml = """ 29 | version: 2 30 | models: 31 | - name: my_model 32 | config: 33 | auto_provision_aad_principals: true 34 | grants: 35 | select: ["{{ env_var('DBT_TEST_AAD_PRINCIPAL_1') }}"] 36 | """ 37 | 38 | model_schema_multiple_users_yml = """ 39 | version: 2 40 | models: 41 | - name: my_model 42 | config: 43 | auto_provision_aad_principals: true 44 | grants: 45 | select: 46 | - "{{ env_var('DBT_TEST_AAD_PRINCIPAL_1') }}" 47 | - "{{ env_var('DBT_TEST_AAD_PRINCIPAL_2') }}" 48 | """ 49 | 50 | 51 | class BaseTestProvisionAzureSQL: 52 | @pytest.fixture(scope="class") 53 | def macros(self): 54 | return { 55 | "cleanup_existing.sql": cleanup_existing_sql, 56 | } 57 | 58 | def test_auto_provision(self, project): 59 | run_dbt(["run-operation", "cleanup_existing"]) 60 | run_dbt(["run"]) 61 | 62 | 63 | @pytest.mark.only_with_profile("ci_azure_environment", "user_azure") 64 | @pytest.mark.flaky(max_runs=5, min_passes=1) 65 | class TestProvisionSingleUserAzureSQL(BaseTestProvisionAzureSQL): 66 | @pytest.fixture(scope="class") 67 | def models(self): 68 | return { 69 | "my_model.sql": my_model_sql, 70 | "schema.yml": model_schema_single_user_yml, 71 | } 72 | 73 | 74 | @pytest.mark.only_with_profile("ci_azure_environment", "user_azure") 75 | @pytest.mark.flaky(max_runs=5, min_passes=1) 76 | class TestProvisionMultipleUsersAzureSQL(BaseTestProvisionAzureSQL): 77 | @pytest.fixture(scope="class") 78 | def models(self): 79 | return { 80 | "my_model.sql": my_model_sql, 81 | "schema.yml": model_schema_multiple_users_yml, 82 | } 83 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_temp_relation_cleanup.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import get_connection, run_dbt 3 | 4 | table_model = """ 5 | {{ 6 | config({ 7 | "materialized": 'table' 8 | }) 9 | }} 10 | 11 | SELECT 1 as data 12 | """ 13 | 14 | model_yml = """ 15 | version: 2 16 | models: 17 | - name: table_model 18 | """ 19 | 20 | validation_sql = """ 21 | SELECT 22 | * 23 | FROM 24 | {database}.INFORMATION_SCHEMA.TABLES 25 | WHERE 26 | TABLE_SCHEMA = '{schema}' 27 | AND 28 | TABLE_NAME LIKE '%__dbt_tmp%' 29 | """ 30 | 31 | seed_schema_yml = """ 32 | version: 2 33 | seeds: 34 | - name: raw_data 35 | config: 36 | column_types: 37 | id: integer 38 | value_col: nvarchar(20) 39 | date_col: datetime2(6) 40 | """ 41 | 42 | seed_csv = """id,data,date_col 43 | 1,1,2024-01-01 44 | 2,1,2024-01-01 45 | 3,1,2024-01-01""" 46 | 47 | incremental_sql = """ 48 | {{ 49 | config( 50 | materialized='incremental', 51 | unique_key='id', 52 | on_schema_change='sync_all_columns' 53 | ) 54 | }} 55 | 56 | WITH source_data AS (SELECT * FROM {{ ref('raw_data') }} ) 57 | 58 | {% if is_incremental() %} 59 | 60 | SELECT id, 61 | data, 62 | date_col 63 | FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) 64 | 65 | {% else %} 66 | 67 | SELECT id, 68 | data, 69 | date_col 70 | FROM source_data where id <= 1 71 | 72 | {% endif %} 73 | """ 74 | 75 | 76 | class BaseTempRelationCleanup: 77 | view_name = "__dbt_tmp_vw" 78 | 79 | def validate_temp_objects(self, project): 80 | with get_connection(project.adapter): 81 | result, table = project.adapter.execute( 82 | validation_sql.format( 83 | database=project.database, schema=project.created_schemas[0] 84 | ), 85 | fetch=True, 86 | ) 87 | assert len(table.rows) == 0 88 | 89 | 90 | class TestTempRelationCleanup(BaseTempRelationCleanup): 91 | """ 92 | This tests to validate that the temporary relations, 93 | created by the `create_table` statement is cleaned up after a set of runs. 94 | """ 95 | 96 | @pytest.fixture(scope="class") 97 | def models(self): 98 | return { 99 | "table_model.sql": table_model, 100 | "schema.yml": model_yml, 101 | } 102 | 103 | def test_drops_temp_view_object(self, project): 104 | run_dbt(["run"]) 105 | 106 | self.validate_temp_objects(project) 107 | 108 | 109 | class TestIncrementalTempCleanup(BaseTempRelationCleanup): 110 | """Tests if the `dbt_tmp` views are properly cleaned up in an incremental model""" 111 | 112 | @pytest.fixture(scope="class") 113 | def seeds(self): 114 | return { 115 | "raw_data.csv": seed_csv, 116 | "schema.yml": seed_schema_yml, 117 | } 118 | 119 | @pytest.fixture(scope="class") 120 | def models(self): 121 | return { 122 | "table_model.sql": incremental_sql, 123 | "schema.yml": model_yml, 124 | } 125 | 126 | def test_drops_temp_view_object(self, project): 127 | run_dbt(["seed"]) 128 | run_dbt(["run"]) 129 | run_dbt(["run"]) 130 | 131 | self.validate_temp_objects(project) 132 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_test_with.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from dbt.tests.util import run_dbt 3 | 4 | sample_model = """ 5 | SELECT 6 | 1 as ID, 7 | 'a' as data 8 | 9 | UNION ALL 10 | 11 | SELECT 12 | 2 as ID, 13 | 'b' as data 14 | 15 | UNION ALL 16 | 17 | SELECT 18 | 2 as ID, 19 | 'c' as data 20 | """ 21 | 22 | pass_model_yml = """ 23 | version: 2 24 | models: 25 | - name: sample_model 26 | data_tests: 27 | - with_statement_pass: 28 | field: ID 29 | """ 30 | 31 | fail_model_yml = """ 32 | version: 2 33 | models: 34 | - name: sample_model 35 | data_tests: 36 | - with_statement_fail: 37 | field: ID 38 | """ 39 | 40 | comments_model_yml = """ 41 | version: 2 42 | models: 43 | - name: sample_model 44 | data_tests: 45 | - with_statement_comments: 46 | field: ID 47 | """ 48 | 49 | with_test_fail_sql = """ 50 | {% test with_statement_fail(model, field) %} 51 | 52 | with test_sample AS ( 53 | SELECT {{ field }} FROM {{ model }} 54 | GROUP BY {{ field }} 55 | HAVING COUNT(*) > 1 56 | ) 57 | SELECT * FROM test_sample 58 | 59 | {% endtest %} 60 | """ 61 | 62 | with_test_pass_sql = """ 63 | {% test with_statement_pass(model, field) %} 64 | 65 | with test_sample AS ( 66 | SELECT {{ field }} FROM {{ model }} 67 | GROUP BY {{ field }} 68 | HAVING COUNT(*) > 2 69 | ) 70 | SELECT * FROM test_sample 71 | 72 | {% endtest %} 73 | """ 74 | 75 | with_test_with_comments_sql = """ 76 | {% test with_statement_comments(model, field) %} 77 | -- comments 78 | with test_sample AS ( 79 | SELECT {{ field }} FROM {{ model }} 80 | GROUP BY {{ field }} 81 | HAVING COUNT(*) > 2 82 | ) 83 | SELECT * FROM test_sample 84 | {% endtest %} 85 | """ 86 | 87 | with_test_multiline = """ 88 | {% test with_test_multiline(model, field) %} 89 | -- comments 90 | with 91 | 92 | test_sample AS ( 93 | SELECT {{ field }} FROM {{ model }} 94 | GROUP BY {{ field }} 95 | HAVING COUNT(*) > 2 96 | ) 97 | SELECT * FROM test_sample 98 | {% endtest %} 99 | """ 100 | 101 | 102 | class BaseSQLTestWith: 103 | @pytest.fixture(scope="class") 104 | def project_config_update(self): 105 | return { 106 | "config-version": 2, 107 | "macro-paths": ["macros"], 108 | } 109 | 110 | @pytest.fixture(scope="class") 111 | def macros(self): 112 | return { 113 | "with_statement_pass.sql": with_test_pass_sql, 114 | "with_statement_fail.sql": with_test_fail_sql, 115 | "with_statement_comments.sql": with_test_with_comments_sql, 116 | "with_test_multiline.sql": with_test_multiline, 117 | } 118 | 119 | @pytest.fixture(scope="class") 120 | def models(self): 121 | return { 122 | "sample_model.sql": sample_model, 123 | "schema.yml": pass_model_yml, 124 | } 125 | 126 | 127 | class TestSQLTestWithPass(BaseSQLTestWith): 128 | @pytest.fixture(scope="class") 129 | def models(self): 130 | return { 131 | "sample_model.sql": sample_model, 132 | "schema.yml": pass_model_yml, 133 | } 134 | 135 | def test_sql_test_contains_with(self, project): 136 | run_dbt(["run"]) 137 | run_dbt(["test"]) 138 | 139 | 140 | class TestSQLTestWithFail(BaseSQLTestWith): 141 | @pytest.fixture(scope="class") 142 | def models(self): 143 | return { 144 | "sample_model.sql": sample_model, 145 | "schema.yml": fail_model_yml, 146 | } 147 | 148 | def test_sql_test_contains_with(self, project): 149 | run_dbt(["run"]) 150 | run_dbt(["test"], expect_pass=False) 151 | 152 | 153 | class TestSQLTestWithComment(BaseSQLTestWith): 154 | @pytest.fixture(scope="class") 155 | def models(self): 156 | return { 157 | "sample_model.sql": sample_model, 158 | "schema.yml": comments_model_yml, 159 | } 160 | 161 | def test_sql_test_contains_with(self, project): 162 | run_dbt(["run"]) 163 | run_dbt(["test"]) 164 | -------------------------------------------------------------------------------- /tests/functional/adapter/mssql/test_xml_index.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | xml_seed = """id_col,xml_data 4 | 1,1" 5 | """ 6 | 7 | xml_schema_base_yml = """ 8 | version: 2 9 | seeds: 10 | - name: xml_data 11 | config: 12 | column_types: 13 | id_col: integer 14 | xml_data: xml 15 | """ 16 | 17 | xml_model_yml = """ 18 | version: 2 19 | models: 20 | - name: xml_model 21 | columns: 22 | - name: id 23 | - name: xml_data 24 | """ 25 | 26 | xml_sql = """ 27 | {{ config(materialized="table") }} 28 | select * from {{ ref('xml_data') }} 29 | """ 30 | 31 | 32 | class TestIndex: 33 | @pytest.fixture(scope="class") 34 | def project_config_update(self): 35 | return {"name": "generic_tests"} 36 | 37 | @pytest.fixture(scope="class") 38 | def seeds(self): 39 | return { 40 | "xml_data.csv": xml_seed, 41 | "schema.yml": xml_schema_base_yml, 42 | } 43 | 44 | @pytest.fixture(scope="class") 45 | def models(self): 46 | return { 47 | "xml_model.sql": xml_sql, 48 | "schema.yml": xml_model_yml, 49 | } 50 | -------------------------------------------------------------------------------- /tests/unit/adapters/mssql/test_sqlserver_connection_manager.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from azure.identity import AzureCliCredential 3 | 4 | from dbt.adapters.sqlserver.sqlserver_connections import ( # byte_array_to_datetime, 5 | bool_to_connection_string_arg, 6 | get_pyodbc_attrs_before_credentials, 7 | ) 8 | from dbt.adapters.sqlserver.sqlserver_credentials import SQLServerCredentials 9 | 10 | # See 11 | # https://github.com/Azure/azure-sdk-for-python/blob/azure-identity_1.5.0/sdk/identity/azure-identity/tests/test_cli_credential.py 12 | CHECK_OUTPUT = AzureCliCredential.__module__ + ".subprocess.check_output" 13 | 14 | 15 | @pytest.fixture 16 | def credentials() -> SQLServerCredentials: 17 | credentials = SQLServerCredentials( 18 | driver="ODBC Driver 17 for SQL Server", 19 | host="fake.sql.sqlserver.net", 20 | database="dbt", 21 | schema="sqlserver", 22 | ) 23 | return credentials 24 | 25 | 26 | def test_get_pyodbc_attrs_before_empty_dict_when_service_principal( 27 | credentials: SQLServerCredentials, 28 | ) -> None: 29 | """ 30 | When the authentication is set to sql we expect an empty attrs before. 31 | """ 32 | attrs_before = get_pyodbc_attrs_before_credentials(credentials) 33 | assert attrs_before == {} 34 | 35 | 36 | @pytest.mark.parametrize( 37 | "key, value, expected", 38 | [("somekey", False, "somekey=No"), ("somekey", True, "somekey=Yes")], 39 | ) 40 | def test_bool_to_connection_string_arg(key: str, value: bool, expected: str) -> None: 41 | assert bool_to_connection_string_arg(key, value) == expected 42 | --------------------------------------------------------------------------------