├── .github
├── ISSUE_TEMPLATE
│ ├── bug-report.yml
│ └── feature-request.yml
├── dependabot.yml
├── pull_request_template.md
└── workflows
│ └── workflow.yml
├── .gitignore
├── .pre-commit-config.yaml
├── CHANGELOG.md
├── CONTRIBUTING.md
├── HOWTORELEASE.md
├── LICENSE
├── README.md
├── docs
├── Makefile
├── make.bat
└── source
│ ├── _static
│ └── dbt_project
│ │ ├── dbt_project.yml
│ │ ├── macros
│ │ ├── normalize_column_names.sql
│ │ └── spark_adapter.sql
│ │ ├── packages.yml
│ │ ├── profiles.yml
│ │ └── tests
│ │ ├── test_normalize_column_names.py
│ │ └── test_spark_get_tables.py
│ ├── conf.py
│ ├── configuration.rst
│ ├── dbt_spark.rst
│ ├── index.rst
│ └── projects.rst
├── pyproject.toml
├── scripts
└── release.py
├── setup.cfg
├── setup.py
├── src
└── pytest_dbt_core
│ ├── __init__.py
│ ├── fixtures.py
│ └── plugin.py
└── tests
└── dbt_project
├── dbt_project.yml
├── macros
├── fetch_single_statement.sql
└── prices.sql
├── profiles.yml
└── tests
├── test_fetch_single_statement.py
└── test_prices.py
/.github/ISSUE_TEMPLATE/bug-report.yml:
--------------------------------------------------------------------------------
1 | name: 🐞 Bug
2 | description: Report a bug or an issue you've found with `pytest-dbt-core`
3 | title: "[Bug]
"
4 | labels: ["bug", "triage"]
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: |
9 | Thanks for taking the time to fill out this bug report!
10 | - type: checkboxes
11 | attributes:
12 | label: Is there an existing issue for this?
13 | description: Please search to see if an issue already exists for the bug you encountered.
14 | options:
15 | - label: I have searched the existing issues
16 | required: true
17 | - type: textarea
18 | attributes:
19 | label: Current Behavior
20 | description: A concise description of what you're experiencing.
21 | validations:
22 | required: false
23 | - type: textarea
24 | attributes:
25 | label: Expected Behavior
26 | description: A concise description of what you expected to happen.
27 | validations:
28 | required: false
29 | - type: textarea
30 | attributes:
31 | label: Steps To Reproduce
32 | description: Steps to reproduce the behavior.
33 | placeholder: |
34 | 1. In this environment...
35 | 2. With this config...
36 | 3. Run '...'
37 | 4. See error...
38 | validations:
39 | required: false
40 | - type: textarea
41 | id: logs
42 | attributes:
43 | label: Relevant traceback output
44 | description: |
45 | If applicable, traceback output to help explain your problem.
46 | render: shell
47 | validations:
48 | required: false
49 | - type: textarea
50 | attributes:
51 | label: Environment
52 | description: |
53 | examples:
54 | - **OS**: Ubuntu 20.04
55 | - **Python**: 3.10.13 (`python --version`)
56 | - **dbt**: 0.21.0 (`dbt --version`)
57 | - **pytest-dbt-core**: 0.1.0 (`pip freeze | grep pytest-dbt-core`)
58 | value: |
59 | - OS:
60 | - Python:
61 | - dbt:
62 | - pytest-dbt-core:
63 | render: markdown
64 | validations:
65 | required: false
66 | - type: dropdown
67 | id: database
68 | attributes:
69 | label: What database are you using dbt with?
70 | multiple: true
71 | options:
72 | - postgres
73 | - redshift
74 | - snowflake
75 | - bigquery
76 | - spark
77 | - other (mention it in "Additional Context")
78 | validations:
79 | required: false
80 | - type: textarea
81 | attributes:
82 | label: Additional Context
83 | description: |
84 | Links? References? Anything that will give us more context about the issue you are encountering!
85 |
86 | Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in.
87 | validations:
88 | required: false
89 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature-request.yml:
--------------------------------------------------------------------------------
1 | name: ✨ Feature
2 | description: Suggest an idea for `pytest-dbt-core`
3 | title: "[Feature] "
4 | labels: ["enhancement", "triage"]
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: |
9 | Thanks for taking the time to fill out this feature request!
10 | - type: checkboxes
11 | attributes:
12 | label: Is there an existing feature request for this?
13 | description: Please search to see if an issue already exists for the feature you would like.
14 | options:
15 | - label: I have searched the existing issues
16 | required: true
17 | label: Is this your first time opening an issue?
18 | options:
19 | - label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations)
20 | required: true
21 | - type: textarea
22 | attributes:
23 | label: Describe the Feature
24 | description: A clear and concise description of what you want to happen.
25 | validations:
26 | required: true
27 | - type: textarea
28 | attributes:
29 | label: Describe alternatives you've considered
30 | description: |
31 | A clear and concise description of any alternative solutions or features you've considered.
32 | validations:
33 | required: false
34 | - type: textarea
35 | attributes:
36 | label: Who will this benefit?
37 | description: |
38 | What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly.
39 | validations:
40 | required: false
41 | - type: input
42 | attributes:
43 | label: Are you interested in contributing this feature?
44 | description: Let us know if you want to write some code, and how we can help.
45 | validations:
46 | required: false
47 | - type: textarea
48 | attributes:
49 | label: Anything else?
50 | description: |
51 | Links? References? Anything that will give us more context about the feature you are suggesting!
52 | validations:
53 | required: false
54 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "pip"
9 | directory: "/" # Location of package manifests
10 | schedule:
11 | interval: "weekly"
12 | reviewers:
13 | - JCZuurmond
14 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | resolves #
2 |
3 |
8 |
9 | ### Description
10 |
11 |
15 |
16 | ### Checklist
17 |
18 | - [ ] I read the [CONTRIBUTING](https://github.com/godatadriven/pytest-dbt-core/blob/main/CONTRIBUTING.md) guide and understand what's expected of me
19 | - [ ] I ran this code in development and it appears to resolve the stated issue
20 | - [ ] I added tests, or tests are not required/relevant for this PR
21 | - [ ] I added an entry to the [CHANGELOG](https://github.com/godatadriven/pytest-dbt-core/blob/main/CHANGELOG.md) detailing the change of this PR
22 |
--------------------------------------------------------------------------------
/.github/workflows/workflow.yml:
--------------------------------------------------------------------------------
1 | name: CI pipeline
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches:
7 | - main
8 | tags:
9 | - '*'
10 | workflow_dispatch:
11 |
12 | jobs:
13 | pre-commit:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v2
17 |
18 | - name: Set up Python 3.9
19 | uses: actions/setup-python@v2
20 | with:
21 | python-version: 3.9
22 |
23 | - name: Install pre-commit
24 | shell: bash
25 | run: |
26 | python -m pip install --upgrade pip
27 | python -m pip install pre-commit
28 |
29 | - name: Run pre-commit hooks
30 | shell: bash
31 | run: pre-commit run --all-file
32 |
33 | tests:
34 | runs-on: ubuntu-latest
35 | strategy:
36 | fail-fast: false
37 | matrix:
38 | python-version:
39 | - "3.8"
40 | - "3.9"
41 | - "3.10"
42 | - "3.11"
43 |
44 | steps:
45 | - uses: actions/checkout@v2
46 |
47 | - name: Set up Python ${{ matrix.python-version }}
48 | uses: actions/setup-python@v2
49 | with:
50 | python-version: ${{ matrix.python-version }}
51 |
52 | - name: Install dependencies
53 | run: |
54 | sudo apt-get install libsasl2-dev
55 | python -m pip install --upgrade pip
56 | python -m pip install tox
57 |
58 | - name: Test with tox
59 | run: tox --skip-env "^py(?!${{ matrix.python-version }}).*"
60 |
61 | - name: Test docs with tox
62 | run: tox -e docs
63 |
64 | test-release:
65 | if: contains(github.ref, 'refs/tags/')
66 | runs-on: ubuntu-latest
67 | needs:
68 | - tests
69 | - "pre-commit"
70 | steps:
71 | - uses: actions/checkout@v2
72 |
73 | - name: Set up Python 3.9
74 | uses: actions/setup-python@v2
75 | with:
76 | python-version: 3.9
77 |
78 | - name: Install dependencies
79 | run: |
80 | sudo apt-get install libsasl2-dev
81 | python -m pip install --upgrade pip
82 | python -m pip install twine wheel setuptools setuptools-scm
83 |
84 | - name: Test release
85 | run: |
86 | python setup.py sdist bdist_wheel
87 | pip install dist/pytest-dbt-core-*.tar.gz
88 | pip install dist/pytest_dbt_core-*-py3-none-any.whl
89 | twine check dist/pytest_dbt_core-*-py3-none-any.whl dist/pytest-dbt-core-*.tar.gz
90 |
91 | github-release:
92 | runs-on: ubuntu-latest
93 | needs: test-release
94 | steps:
95 | - uses: actions/checkout@v2
96 |
97 | - name: Set up Python 3.9
98 | uses: actions/setup-python@v2
99 | with:
100 | python-version: 3.9
101 |
102 | - name: Install dependencies
103 | run: |
104 | python -m pip install --upgrade pip
105 | python -m pip install setuptools-scm
106 |
107 | - name: Get version
108 | id: get_version
109 | run: echo ::set-output name=VERSION::$(python setup.py --version)
110 |
111 | - name: Find release type
112 | id: get_release_type
113 | env:
114 | IS_PRERELEASE: ${{ contains(env.version_number, 'rc') || contains(env.version_number, 'b') }}
115 | run: echo ::set-output name=isPrerelease::$IS_PRERELEASE
116 |
117 | - name: Create GitHub release
118 | uses: actions/create-release@v1
119 | env:
120 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
121 | with:
122 | tag_name: ${{ steps.get_version.outputs.VERSION }}
123 | release_name: pytest-dbt-core ${{ steps.get_version.outputs.VERSION }}
124 | prerelease: ${{ steps.get_release_type.outputs.IS_PRERELEASE }}
125 | body: ${{ github.event.head_commit.message }}
126 |
127 | pypi-release:
128 | runs-on: ubuntu-latest
129 | needs: test-release
130 | steps:
131 | - uses: actions/checkout@v2
132 |
133 | - name: Set up Python 3.9
134 | uses: actions/setup-python@v2
135 | with:
136 | python-version: 3.9
137 |
138 | - name: Install dependencies
139 | run: |
140 | sudo apt-get install libsasl2-dev
141 | python -m pip install --upgrade pip
142 | python -m pip install twine wheel setuptools setuptools-scm
143 |
144 | - name: Get version
145 | id: get_version
146 | run: echo ::set-output name=VERSION::$(python setup.py --version)
147 |
148 | - name: Release to pypi
149 | env:
150 | TWINE_USERNAME: __token__
151 | TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
152 | run: |
153 | python setup.py sdist bdist_wheel
154 | twine upload --skip-existing --non-interactive dist/pytest_dbt_core-${{ steps.get_version.outputs.VERSION }}-py3-none-any.whl dist/pytest-dbt-core-${{ steps.get_version.outputs.VERSION }}.tar.gz
155 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 | *-coverage.xml
54 | *-output.xml
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 | db.sqlite3
64 | db.sqlite3-journal
65 |
66 | # Flask stuff:
67 | instance/
68 | .webassets-cache
69 |
70 | # Scrapy stuff:
71 | .scrapy
72 |
73 | # Sphinx documentation
74 | docs/_build/
75 |
76 | # PyBuilder
77 | target/
78 |
79 | # Jupyter Notebook
80 | .ipynb_checkpoints
81 |
82 | # IPython
83 | profile_default/
84 | ipython_config.py
85 |
86 | # pyenv
87 | .python-version
88 |
89 | # pipenv
90 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
91 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
92 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
93 | # install all needed dependencies.
94 | #Pipfile.lock
95 |
96 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
97 | __pypackages__/
98 |
99 | # Celery stuff
100 | celerybeat-schedule
101 | celerybeat.pid
102 |
103 | # SageMath parsed files
104 | *.sage.py
105 |
106 | # Environments
107 | .env
108 | .venv
109 | env/
110 | venv/
111 | ENV/
112 | env.bak/
113 | venv.bak/
114 |
115 | # Spyder project settings
116 | .spyderproject
117 | .spyproject
118 |
119 | # Rope project settings
120 | .ropeproject
121 |
122 | # mkdocs documentation
123 | /site
124 |
125 | # mypy
126 | .mypy_cache/
127 | .dmypy.json
128 | dmypy.json
129 |
130 | # Pyre type checker
131 | .pyre/
132 |
133 | # Version
134 | src/pytest_dbt_core/_version.py
135 |
136 | # dbt
137 | .user.yml
138 | dbt_packages/
139 |
140 | # Editor
141 | .idea/
142 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # See https://pre-commit.com for more information
2 | # See https://pre-commit.com/hooks.html for more hooks
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: v4.0.1
6 | hooks:
7 | - id: trailing-whitespace
8 | - id: check-added-large-files
9 | - id: check-json
10 | - id: check-ast
11 | - id: check-merge-conflict
12 | - id: check-toml
13 | - id: check-yaml
14 | args: [--unsafe]
15 | - id: debug-statements
16 | - id: detect-private-key
17 | - id: end-of-file-fixer
18 | - repo: https://github.com/pre-commit/mirrors-mypy
19 | rev: v0.910
20 | hooks:
21 | - id: mypy
22 | - repo: https://github.com/ambv/black
23 | rev: 22.3.0
24 | hooks:
25 | - id: black
26 | name: Run black formatter
27 | language_version: python3.9
28 | - repo: https://github.com/humitos/mirrors-autoflake.git
29 | rev: v1.3
30 | hooks:
31 | - id: autoflake
32 | args: ['--in-place', '--remove-all-unused-imports']
33 | - repo: https://github.com/pycqa/flake8
34 | rev: 5.0.4
35 | hooks:
36 | - id: flake8
37 | name: Run flake8 linter
38 | additional_dependencies: ["flake8-bugbear==20.11.1", "pep8-naming==0.11.1"]
39 | - repo: https://github.com/timothycrosley/isort
40 | rev: 5.11.5
41 | hooks:
42 | - id: isort
43 | additional_dependencies: [toml]
44 | name: Sort imports using isort
45 | - repo: https://github.com/asottile/pyupgrade
46 | rev: v2.25.0
47 | hooks:
48 | - id: pyupgrade
49 | name: Check for code that can use new Python features
50 | args: [--py39-plus]
51 | - repo: https://github.com/Lucas-C/pre-commit-hooks-bandit
52 | rev: v1.0.4
53 | hooks:
54 | - id: python-bandit-vulnerability-check
55 | args: [-l, --recursive, -x, "tests,docs/source/_static/dbt_project/tests"]
56 | name: Check for vulnerabilities in code with bandit
57 | files: .py$
58 | - repo: https://github.com/Lucas-C/pre-commit-hooks-safety
59 | rev: v1.1.3
60 | hooks:
61 | - id: python-safety-dependencies-check
62 | name: Check for vulnerable dependencies
63 | - repo: https://github.com/detailyang/pre-commit-shell
64 | rev: 1.0.5
65 | hooks:
66 | - id: shell-lint
67 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | - Drop support for Python 3.7 ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/57))
2 |
3 | ## [0.2.4] - 2024-06-04
4 |
5 | - Support dbt v1.8 ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/58))
6 | - Handle dbt flags for dbt version 1.6 and higher ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/58))
7 | - Test dbt v1.7 ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/48))
8 | - Make pytest `parser.addoption` type a function ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/51))
9 |
10 | ## [0.2.3] - 2023-08-25
11 |
12 | - Remove upper version constrain for dbt core ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/43))
13 | - CI speed up: ignore redundant tox environments ([issue](https://github.com/godatadriven/pytest-dbt-core/issues/44), [PR](https://github.com/godatadriven/pytest-dbt-core/pull/45))
14 | - Test for Python 3.11 ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/41))
15 |
16 | ## [0.2.2] - 2023-08-09
17 |
18 | - Support dbt v1.6 ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/37), [PR](https://github.com/godatadriven/pytest-dbt-core/pull/38))
19 |
20 | ## [0.2.1] - 2023-05-03
21 |
22 | - Support dbt v1.5 ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/34))
23 |
24 | ## [0.2.0] - 2023-03-01
25 |
26 | - Add example with column rename macro ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/19))
27 | - Add `--profiles-dir` as an option for setting the profiles directory ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/25))
28 | - Extend tox matrix to test for dbt-spark minor versions ([issue](https://github.com/godatadriven/pytest-dbt-core/issues/28), [PR](https://github.com/godatadriven/pytest-dbt-core/pull/29))
29 | - Extend testing matrix to test for Python minor version 3.10 ([PR](https://github.com/godatadriven/pytest-dbt-core/pull/30))
30 |
31 | ## [0.1.0] - 2022-07-22
32 |
33 | - Run test examples from docs ([issue](https://github.com/godatadriven/pytest-dbt-core/issues/14), [PR](https://github.com/godatadriven/pytest-dbt-core/pull/17))
34 | - Add target flag ([issue](https://github.com/godatadriven/pytest-dbt-core/issues/11), [PR](https://github.com/godatadriven/pytest-dbt-core/pull/13))
35 | - Delete session module [is included in dbt-spark](https://github.com/dbt-labs/dbt-spark/issues/272)
36 | - Add Github templates
37 |
38 | ## [0.1.0rc1] - 2022-05-27
39 |
40 | - Add dbt project for testing
41 | - Add `--dbt-project-dir` pytest flag that points to the dbt project directory
42 |
43 | ## [0.1.0.dev2] - 2022-01-28
44 |
45 | - Implement use as pytest plugin
46 |
47 | ## [0.1.0.dev1] - 2022-01-28
48 |
49 | - Set-up repo
50 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Awesome that you are considering to contribute! This guide explains how the
4 | development process works, and how you can easily contribute to the package.
5 |
6 | ## Install
7 |
8 | Install the package in development mode in your environment using:
9 |
10 | ## Conda
11 |
12 | For a conda environment do:
13 |
14 | ``` bash
15 | conda activate
16 | pip install -e .[test]
17 | ```
18 |
19 | ## Venv
20 |
21 | For a pip environment do:
22 |
23 | ``` bash
24 | python -m venv venv/
25 | source ./venv/bin/activate
26 | pip install -e .[test]
27 | ```
28 |
29 | # Testing
30 |
31 | To verify that the application works as expected, we apply testing. We use two
32 | types of testing: static and unit.
33 |
34 | ## Static analysis
35 |
36 | For performing static analysis we use
37 | [Pre-Commit](https://calmcode.io/pre-commit/the-problem.html). Pre-commit allows
38 | us to easily run a suite of tests, as defined in the
39 | [.pre-commit-config.yaml](.pre-commit-config.yaml) in the project. Pre-commit checks
40 | consists of but is not limited to:
41 |
42 | * Trivial checks:
43 | * The encoding of the files
44 | * If it is valid Python in the files
45 | * If the YAML files are nicely formatted
46 | * The code is free of debug statements
47 | * [**Flake8**](https://pypi.org/project/flake8/) is a Python library that wraps
48 | PyFlakes, pycodestyle and Ned Batchelder's McCabe script. It is a great
49 | toolkit for checking your code base against coding style
50 | ([PEP 8](https://www.python.org/dev/peps/pep-0008/), programming errors (like
51 | “library imported but unused” and “Undefined name”) and to check complexity.
52 | * [**Black**](https://github.com/psf/black) is the Python code formatter and
53 | makes sure that we format our Python code in the same way. By using it, you
54 | agree to cede control over minutiae of hand-formatting. In return, Black gives
55 | you speed, determinism, and freedom from pycodestyle nagging about formatting.
56 | You will save time and mental energy for more important matters.
57 | * [**MyPy**](https://github.com/python/mypy) is an optional static type checker
58 | for Python. You can add type hints
59 | ([PEP 484](https://www.python.org/dev/peps/pep-0484/)) to your Python
60 | programs, and use mypy to type check them statically. Find bugs in your
61 | programs without even running them!
62 |
63 | If you want to run this automatically before each commit, you can install it as
64 | a pre-commit hook:
65 |
66 | ``` bash
67 | pip install pre-commit # install the package in your environment
68 | pre-commit install # add the git hook
69 | ```
70 |
71 | You can run `pre-commit` manually using:
72 |
73 | ```bash
74 | pre-commit run --all-files
75 | ```
76 |
77 | ## Unit testing
78 |
79 | For running unit and integration tests we use `pytest`. You can run the tests
80 | using:
81 |
82 | ```bash
83 | pytest tests/
84 | ```
85 |
86 | Unit tests are the lowest level of tests, and should test isolated pieces of
87 | code. For example, testing an Apache Spark UDF is a perfect example:
88 |
89 | ```python
90 | from pyspark.sql.functions import udf
91 |
92 | @udf("int")
93 | def add(a: int, b: int) -> int:
94 | return a + b
95 | ```
96 |
97 | And in the test file:
98 | ```python
99 | import add
100 |
101 | def test_add_udf():
102 | assert add(19, 25) == 44
103 | ```
104 |
105 | For example, if someone writes a complex regular expression, then you
106 | want to make sure that it is properly covered by unit-tests. The `spark_session`
107 | is automatically generated in the background thanks to the
108 | [`pytest-spark`](https://pypi.org/project/pytest-spark/) package.
109 |
--------------------------------------------------------------------------------
/HOWTORELEASE.md:
--------------------------------------------------------------------------------
1 | # Prerequisites
2 |
3 | * Push and merge rights for this repo.
4 | [pytest-dbt-core repo](https://github.com/godatadriven/pytest-dbt-core),
5 | also referred to as the *upstream*.
6 | * A UNIX system that has:
7 | - `git` able to push to upstream
8 |
9 | # Release
10 |
11 | Run the release command and make sure you pass in the desired release number:
12 |
13 | ``` bash
14 | $ python -m pip install gitpython
15 | $ python scripts/release.py --version
16 | ```
17 |
18 | Create a pull request and wait until it the CI passes. Now make sure you merge
19 | the PR and delete the release branch. The CI will automatically pick the tag up
20 | and release it, wait to appear in PyPI. Only merge if the later happens.
21 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 | Pytest dbt core is a [pytest](https://docs.pytest.org) plugin for testing your
11 | [dbt](https://www.getdbt.com/) projects. Write unit test for your dbt logic with
12 | pytest!
13 |
14 | # :information_desk_person: User documentation
15 |
16 | The user documentation is hosted on
17 | [read the docs](https://pytest-dbt-core.readthedocs.io/en/latest/).
18 |
19 | # :raising_hand: Contributing guide
20 |
21 | The contributing guide is [here](./CONTRIBUTING.md).
22 |
23 | # :gift: How to release
24 |
25 | The "how to release" guide is [here](./HOWTORELEASE.md).
26 |
27 | # :cop: License
28 |
29 | The license is given [here](./LICENSE).
30 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.https://www.sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/_static/dbt_project/dbt_project.yml:
--------------------------------------------------------------------------------
1 | name: dbt_project
2 | profile: dbt_project
3 | version: '0.3.0'
4 | config-version: 2
5 | require-dbt-version: [">=1.0.0", "<2.0.0"]
6 | macro-paths: ["macros"]
7 |
--------------------------------------------------------------------------------
/docs/source/_static/dbt_project/macros/normalize_column_names.sql:
--------------------------------------------------------------------------------
1 | {% macro normalize_column_names(column_names) %}
2 | {%- set re = modules.re -%}
3 |
4 | {%- for column_name in column_names -%}
5 |
6 | {%- set normalized_column_name = re.sub('[!?@#$()%&]', '', column_name).strip().replace(' ', '_').replace('.', '_').rstrip('_').lower() -%}
7 |
8 | {# Columns should not start with digits #}
9 | {%- if normalized_column_name[0].isdigit() -%}
10 | {% set normalized_column_name = '_' + normalized_column_name-%}
11 | {% endif -%}
12 |
13 | `{{ column_name }}` as {{ normalized_column_name }}
14 | {%- if not loop.last -%}, {% endif -%}
15 |
16 | {%- endfor -%}
17 |
18 | {% endmacro %}
19 |
--------------------------------------------------------------------------------
/docs/source/_static/dbt_project/macros/spark_adapter.sql:
--------------------------------------------------------------------------------
1 | {% macro spark__list_relations_without_caching(relation) %}
2 | {% call statement('list_relations_without_caching', fetch_result=True) -%}
3 | show table extended in {{ relation }} like '*'
4 | {% endcall %}
5 |
6 | {% do return(load_result('list_relations_without_caching').table) %}
7 | {% endmacro %}
8 |
--------------------------------------------------------------------------------
/docs/source/_static/dbt_project/packages.yml:
--------------------------------------------------------------------------------
1 | packages:
2 | - package: dbt-labs/spark_utils
3 | version: 0.3.0
4 | - package: dbt-labs/dbt_utils
5 | version: 0.8.6
6 |
--------------------------------------------------------------------------------
/docs/source/_static/dbt_project/profiles.yml:
--------------------------------------------------------------------------------
1 | dbt_project:
2 | target: test
3 | outputs:
4 | test:
5 | type: spark
6 | method: session
7 | schema: test
8 | host: NA # not used, but required by `dbt-core`
9 |
--------------------------------------------------------------------------------
/docs/source/_static/dbt_project/tests/test_normalize_column_names.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from dbt.clients.jinja import MacroGenerator
3 | from pyspark.sql import SparkSession
4 |
5 |
6 | @pytest.mark.parametrize(
7 | "macro_generator",
8 | ["macro.dbt_project.normalize_column_names"],
9 | indirect=True,
10 | )
11 | @pytest.mark.parametrize(
12 | "column_name,expected_column_name",
13 | [
14 | ("unit", "unit"),
15 | ("column with spaces", "column_with_spaces"),
16 | ("c!h?a#r$a(c)t%e&rs", "characters"),
17 | ("trailing white spaces ", "trailing_white_spaces"),
18 | ("column.with.periods", "column_with_periods"),
19 | ("9leading number", "_9leading_number"),
20 | ("UPPERCASE", "uppercase"),
21 | ],
22 | )
23 | def test_normalize_column_names(
24 | spark_session: SparkSession,
25 | macro_generator: MacroGenerator,
26 | column_name: str,
27 | expected_column_name: str,
28 | ) -> None:
29 | """Test normalize column names with different scenarios."""
30 | normalized_column_names = macro_generator([column_name])
31 | out = spark_session.sql(
32 | f"SELECT {normalized_column_names} FROM (SELECT True AS `{column_name}`)"
33 | )
34 | assert out.columns[0] == expected_column_name, normalized_column_names
35 |
--------------------------------------------------------------------------------
/docs/source/_static/dbt_project/tests/test_spark_get_tables.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from dbt.clients.jinja import MacroGenerator
3 | from pyspark.sql import SparkSession
4 |
5 |
6 | @pytest.mark.parametrize(
7 | "macro_generator", ["macro.spark_utils.get_tables"], indirect=True
8 | )
9 | def test_get_tables(
10 | spark_session: SparkSession, macro_generator: MacroGenerator
11 | ) -> None:
12 | """The get tables macro should return the created table."""
13 | expected_table = "default.example"
14 | spark_session.sql(f"CREATE TABLE {expected_table} (id int) USING parquet")
15 | tables = macro_generator()
16 | assert tables == [expected_table]
17 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | # import os
14 | # import sys
15 | # sys.path.insert(0, os.path.abspath('.'))
16 |
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = "pytest-dbt-core"
21 | copyright = "2022, Cor Zuurmond"
22 | author = "Cor Zuurmond"
23 |
24 |
25 | # -- General configuration ---------------------------------------------------
26 |
27 | # Add any Sphinx extension module names here, as strings. They can be
28 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
29 | # ones.
30 | extensions = [
31 | "sphinx.ext.duration",
32 | "sphinx.ext.doctest",
33 | ]
34 |
35 | # Add any paths that contain templates here, relative to this directory.
36 | templates_path = ["_templates"]
37 |
38 | # List of patterns, relative to source directory, that match files and
39 | # directories to ignore when looking for source files.
40 | # This pattern also affects html_static_path and html_extra_path.
41 | exclude_patterns = [".DS_Store"]
42 |
43 |
44 | # https://www.sphinx-doc.org/en/master/usage/markdown.html
45 | source_suffix = {
46 | ".rst": "restructuredtext",
47 | ".txt": "markdown",
48 | ".md": "markdown",
49 | }
50 |
51 | # -- Options for HTML output -------------------------------------------------
52 |
53 | # The theme to use for HTML and HTML Help pages. See the documentation for
54 | # a list of builtin themes.
55 | #
56 | html_theme = "sphinx_rtd_theme"
57 |
58 | # Add any paths that contain custom static files (such as style sheets) here,
59 | # relative to this directory. They are copied after the builtin static files,
60 | # so a file named "default.css" will overwrite the builtin "default.css".
61 | html_static_path = ["_static"]
62 |
--------------------------------------------------------------------------------
/docs/source/configuration.rst:
--------------------------------------------------------------------------------
1 | Configuration
2 | #############
3 |
4 | The plugin runs in the context of a dbt project.
5 |
6 | Project directory
7 | ************************
8 | When you run `pytest` from the root of your project, you do **not** need to set
9 | the project directory. If you want to run `pytest` from another location, you
10 | point the `--dbt-project-dir`
11 | `option `_
12 | to the root of your project.
13 |
14 | Target
15 | ************************
16 | If you want to use another
17 | `target `_
18 | than the default, you set the `--dbt-target`
19 | `option `_.
20 |
21 | Profiles directory
22 | **********************
23 | If you want to change dbt's profiles directory, use the `--profiles-dir` `option `_.
24 |
--------------------------------------------------------------------------------
/docs/source/dbt_spark.rst:
--------------------------------------------------------------------------------
1 | dbt-spark
2 | #########
3 |
4 | `dbt-spark` users are recommend to use the Spark connection method when testing.
5 | Together with the `pytest Spark plugin `_,
6 | a on-the-fly Spark session removes the need for hosting Spark.
7 |
8 | Installation
9 | ************
10 |
11 | Install `dbt-spark`, `pytest-dbt-core` and `pytest-spark` via pip with
12 |
13 | .. code-block:: bash
14 |
15 | python -m pip install dbt-spark pytest-dbt-core pytest-spark
16 |
17 |
18 | Configuration
19 | *************
20 |
21 | Configure `pytest-spark` via `pytest configuration `_.
22 |
23 | .. code-block:: cfg
24 |
25 | # setup.cfg
26 | [tool:pytest]
27 | spark_options =
28 | spark.executor.instances: 1
29 | spark.sql.catalogImplementation: in-memory
30 |
31 | Usage
32 | *****
33 |
34 | Use the `spark_session` fixture to set-up the unit test for your macro:
35 |
36 | .. literalinclude :: _static/dbt_project/tests/test_spark_get_tables.py
37 | :language: python
38 |
39 | Test
40 | ****
41 |
42 | Run the Pytest via your preferred interface.
43 |
44 | .. code-block:: bash
45 |
46 | pytest
47 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. pytest-dbt-core documentation master file, created by
2 | sphinx-quickstart on Fri Jan 28 08:41:31 2022.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | pytest-dbt-core
7 | ###############
8 |
9 | Write unit tests for your dbt logic with `pytest-dbt-core`!
10 | `pytest-dbt-core` is a `pytest `_ plugin for testing your
11 | `dbt `_ projects.
12 |
13 | Installation
14 | ************
15 |
16 | Install `pytest-dbt-core` via pip with
17 |
18 | .. code-block:: bash
19 |
20 | python -m pip install pytest-dbt-core
21 |
22 | Usage
23 | ******
24 |
25 | Create a macro:
26 |
27 | .. literalinclude :: _static/dbt_project/macros/normalize_column_names.sql
28 | :language: jinja
29 |
30 | Unit test a macro:
31 |
32 | .. literalinclude :: _static/dbt_project/tests/test_normalize_column_names.py
33 | :language: python
34 |
35 | .. toctree::
36 | :maxdepth: 2
37 | :caption: Contents:
38 |
39 | configuration.rst
40 | dbt_spark.rst
41 | projects.rst
42 |
43 |
44 | Indices and tables
45 | ==================
46 |
47 | * :ref:`genindex`
48 | * :ref:`modindex`
49 | * :ref:`search`
50 |
--------------------------------------------------------------------------------
/docs/source/projects.rst:
--------------------------------------------------------------------------------
1 | Projects
2 | #############
3 |
4 | The following projects use the `pytest-dbt-core` plugin:
5 |
6 | * `spark-utils `_
7 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools >= 40.0.4",
4 | "setuptools_scm >= 2.0.0",
5 | "wheel >= 0.29.0",
6 | ]
7 | build-backend = 'setuptools.build_meta'
8 |
9 | [tool.black]
10 | line-length = 79
11 | target-version = ['py39']
12 |
13 | [tool.setuptools_scm]
14 | write_to = "src/pytest_dbt_core/_version.py"
15 | write_to_template = """
16 | from __future__ import unicode_literals
17 |
18 | __version__ = {version!r}
19 | """
20 |
21 | [tool.isort]
22 | profile = "black"
23 | src_paths = ["src", "tests"]
24 |
--------------------------------------------------------------------------------
/scripts/release.py:
--------------------------------------------------------------------------------
1 | """
2 | Create a release.
3 |
4 | Source: https://github.com/tox-dev/tox/blob/master/tasks/release.py
5 | """
6 |
7 | from __future__ import annotations
8 |
9 | import datetime as dt
10 | from pathlib import Path
11 |
12 | from git import Commit, Head, Remote, Repo, TagReference
13 | from packaging.version import Version
14 |
15 | ROOT_SRC_DIR = Path(__file__).parents[1]
16 | CHANGE_LOG = ROOT_SRC_DIR / "CHANGELOG.md"
17 |
18 |
19 | def main(version_str: str) -> None:
20 | version = Version(version_str)
21 | repo = Repo(str(ROOT_SRC_DIR))
22 |
23 | if repo.is_dirty():
24 | raise RuntimeError(
25 | "Current repository is dirty. Please commit any changes and try again."
26 | )
27 |
28 | upstream, release_branch = create_release_branch(repo, version)
29 | release_commit = create_release_commit(repo, version)
30 | tag = tag_release_commit(release_commit, repo, version)
31 | print("push release commit")
32 | repo.git.push(upstream.name, release_branch)
33 | print("push release tag")
34 | repo.git.push(upstream.name, tag)
35 | print("All done! ✨ 🍰 ✨")
36 | print("WARNING: you have switched to the release branch")
37 |
38 |
39 | def create_release_commit(repo: Repo, version: Version) -> Commit:
40 | with CHANGE_LOG.open("r") as change_log:
41 | change_log_content = change_log.read()
42 |
43 | new_change_log_content = (
44 | f"## [{version}] - {dt.date.today()}\n\n" + change_log_content
45 | )
46 |
47 | with CHANGE_LOG.open("w") as change_log:
48 | change_log.write(new_change_log_content)
49 |
50 | changes = change_log_content.split("##", 1)[0]
51 | repo.index.add((str(CHANGE_LOG),))
52 | release_commit = repo.index.commit(f"Release {version}\n\n{changes}")
53 | return release_commit
54 |
55 |
56 | def create_release_branch(repo: Repo, version: Version) -> tuple[Remote, Head]:
57 | print("create release branch from upstream main")
58 | upstream = get_upstream(repo)
59 | upstream.fetch()
60 | branch_name = f"release-{version}"
61 | release_branch = repo.create_head(
62 | branch_name, upstream.refs.main, force=True
63 | )
64 | upstream.push(refspec=f"{branch_name}:{branch_name}", force=True)
65 | release_branch.set_tracking_branch(
66 | repo.refs[f"{upstream.name}/{branch_name}"]
67 | )
68 | release_branch.checkout()
69 | return upstream, release_branch
70 |
71 |
72 | def get_upstream(repo: Repo) -> Remote:
73 | for remote in repo.remotes:
74 | for url in remote.urls:
75 | if url.endswith("godatadriven/pytest-dbt-core.git"):
76 | return remote
77 | raise RuntimeError(
78 | "could not find godatadriven/pytest-dbt-core.git remote"
79 | )
80 |
81 |
82 | def tag_release_commit(
83 | release_commit: Commit, repo: Repo, version: Version
84 | ) -> TagReference:
85 | print("tag release commit")
86 | existing_tags = [x.name for x in repo.tags]
87 | if version in existing_tags:
88 | print(f"delete existing tag {version}")
89 | repo.delete_tag(version)
90 | print(f"create tag {version}")
91 | tag = repo.create_tag(version, ref=release_commit, force=True)
92 | return tag
93 |
94 |
95 | if __name__ == "__main__":
96 | import argparse
97 |
98 | parser = argparse.ArgumentParser(prog="release")
99 | parser.add_argument("--version", required=True)
100 | options = parser.parse_args()
101 | main(options.version)
102 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = pytest-dbt-core
3 | description = Pytest extension for dbt.
4 | long_description = file: README.md
5 | long_description_content_type = text/markdown
6 | url = https://github.com/godatadriven/pytest-dbt-core
7 | author = Cor Zuurmond
8 | maintainer = Cor Zuurmond
9 | maintainer_email = corzuurmond@godatadriven.com
10 | license = Apache 2.0
11 | license_file = LICENSE
12 | platforms = any
13 | keywords = dbt, SQL, data, data transformation, testing, pytest
14 | project_urls =
15 | Source=https://github.com/godatadriven/pytest-dbt-core
16 | Tracker=https://github.com/godatadriven/pytest-dbt-core/issues
17 | classifiers =
18 | Framework :: Pytest
19 |
20 | [options]
21 | packages = find:
22 | package_dir = =src
23 | install_requires =
24 | dbt-core>=1.0.0
25 | python_requires = >=3.8
26 |
27 | [options.packages.find]
28 | where = src
29 |
30 | [options.extras_require]
31 | test =
32 | dbt-spark[ODBC]>=1.1.0,<1.9.0
33 | pyspark>=3.0.0,<4.0.0
34 | pre-commit>=2.14.1
35 | pytest>=6.2.5
36 | pytest-spark>=0.6.0
37 | pytest-cov>=2.12.1
38 |
39 | [options.entry_points]
40 | pytest11 =
41 | dbt=pytest_dbt_core.plugin
42 |
43 | [flake8]
44 | ignore = E226,E302,E41,W504,W503
45 | max-line-length = 120
46 | exclude = venv\,direnv\
47 |
48 | [mypy]
49 | python_version = 3.9
50 | disallow_untyped_calls = True
51 | disallow_untyped_defs = True
52 | disallow_incomplete_defs = True
53 | check_untyped_defs = True
54 | no_implicit_optional = True
55 | warn_redundant_casts = True
56 |
57 | [tool:pytest]
58 | addopts = --cov=src
59 | --cov-report=xml:pytest-coverage.xml
60 | --junitxml=pytest-output.xml
61 | --doctest-glob=README.md
62 | --doctest-modules
63 | --ignore=scripts/
64 | --dbt-project-dir=./tests/dbt_project
65 | --dbt-target=test
66 | --profiles-dir=./tests/dbt_project
67 | spark_options =
68 | spark.app.name: dbt-core
69 | spark.executor.instances: 1
70 | spark.sql.catalogImplementation: in-memory
71 |
72 | [tox:tox]
73 | envlist =
74 | py{3.8,3.9,3.10}-dbt-spark{11,12,13,14,15,16,17,18}
75 | py3.11-dbt-spark{14,15,16,17,18} # Previous dbt-spark versions fail when using Python 3.11
76 | isolated_build = true
77 | skip_missing_interpreters = true
78 |
79 | [testenv]
80 | description = run the tests with pytest under {basepython}
81 | setenv =
82 | PIP_DISABLE_PIP_VERSION_CHECK = 1
83 | COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}}
84 | {py27,pypy}: PYTHONWARNINGS=ignore:DEPRECATION::pip._internal.cli.base_command
85 | DBT_PROFILES_DIR = {env:DBT_PROFILES_DIR:{toxinidir}/tests/dbt_project}
86 | passenv =
87 | PYTEST_*
88 | PIP_CACHE_DIR
89 | deps =
90 | dbt-spark11: dbt-spark[ODBC]~=1.1.0
91 | dbt-spark12: dbt-spark[ODBC]~=1.2.0
92 | dbt-spark13: dbt-spark[ODBC]~=1.3.0
93 | dbt-spark14: dbt-spark[ODBC]~=1.4.0
94 | dbt-spark15: dbt-spark[ODBC]~=1.5.0
95 | dbt-spark16: dbt-spark[ODBC]~=1.6.0
96 | dbt-spark17: dbt-spark[ODBC]~=1.7.0
97 | dbt-spark18: dbt-spark[ODBC]~=1.8.0
98 | pip >= 19.3.1
99 | extras = test
100 | commands = pytest {posargs:tests}
101 |
102 | [testenv:docs]
103 | description = run the tests in docs
104 | setenv =
105 | PIP_DISABLE_PIP_VERSION_CHECK = 1
106 | {py27,pypy}: PYTHONWARNINGS=ignore:DEPRECATION::pip._internal.cli.base_command
107 | passenv =
108 | PYTEST_*
109 | PIP_CACHE_DIR
110 | deps =
111 | dbt-spark11: dbt-spark[ODBC]~=1.1.0
112 | dbt-spark12: dbt-spark[ODBC]~=1.2.0
113 | dbt-spark13: dbt-spark[ODBC]~=1.3.0
114 | dbt-spark14: dbt-spark[ODBC]~=1.4.0
115 | dbt-spark15: dbt-spark[ODBC]~=1.5.0
116 | dbt-spark16: dbt-spark[ODBC]~=1.6.0
117 | dbt-spark17: dbt-spark[ODBC]~=1.7.0
118 | dbt-spark18: dbt-spark[ODBC]~=1.8.0
119 | pip >= 19.3.1
120 | extras = test
121 | commands_pre = dbt deps --project-dir {toxinidir}/docs/source/_static/dbt_project --profiles-dir {toxinidir}/docs/source/_static/dbt_project
122 | commands = pytest {posargs:docs/source/_static/dbt_project/tests} --dbt-project-dir={toxinidir}/docs/source/_static/dbt_project --profiles-dir={toxinidir}/docs/source/_static/dbt_project
123 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup()
4 |
--------------------------------------------------------------------------------
/src/pytest_dbt_core/__init__.py:
--------------------------------------------------------------------------------
1 | """Top level attributes."""
2 |
3 | from ._version import __version__
4 |
5 | __all__ = ("__version__",)
6 |
--------------------------------------------------------------------------------
/src/pytest_dbt_core/fixtures.py:
--------------------------------------------------------------------------------
1 | """The pytest fixtures."""
2 |
3 | from __future__ import annotations
4 |
5 | import dataclasses
6 | import os
7 |
8 | import dbt.tracking
9 | import pytest
10 | from _pytest.fixtures import SubRequest
11 | from dbt import flags, version
12 | from dbt.clients.jinja import MacroGenerator
13 | from dbt.config import project
14 | from dbt.config.runtime import RuntimeConfig
15 | from dbt.context import providers
16 | from dbt.contracts.graph.manifest import Manifest
17 | from dbt.parser.manifest import ManifestLoader
18 | from dbt.tracking import User
19 |
20 | from dbt.adapters.factory import ( # isort:skip
21 | AdapterContainer,
22 | get_adapter,
23 | register_adapter,
24 | )
25 |
26 |
27 | dbt.tracking.active_user = User(os.getcwd())
28 |
29 |
30 | def _get_installed_dbt_version() -> tuple[int, int]:
31 | """Cast a dbt version to a tuple with major and minor version."""
32 | installed_dbt_version = version.get_installed_version()
33 | return int(installed_dbt_version.major), int(installed_dbt_version.minor)
34 |
35 |
36 | DBT_INSTALLED_VERSION = _get_installed_dbt_version()
37 |
38 |
39 | @dataclasses.dataclass(frozen=True)
40 | class Args:
41 | """
42 | The arguments.
43 |
44 | dbt is written as command line tool, therefore the entrypoints of dbt expect
45 | (parsed) arguments. To reuse dbt's entrypoints we mock the (minimally)
46 | arguments here.
47 |
48 | Source
49 | ------
50 | See argparse `add_argument` statements in `dbt.main`.
51 | """
52 |
53 | project_dir: str
54 | profiles_dir: str
55 | target: str | None
56 | profile: str | None
57 | threads: int | None
58 | # Required from dbt version 1.8 onwards
59 | REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES = False
60 |
61 |
62 | @pytest.fixture
63 | def config(request: SubRequest) -> RuntimeConfig:
64 | """
65 | Get the (runtime) config.
66 |
67 | Parameters
68 | ----------
69 | request : SubRequest
70 | The pytest request.
71 |
72 | Returns
73 | -------
74 | RuntimeConfig
75 | The runtime config.
76 | """
77 | # For the arguments that are hardcoded to `None`, dbt internals set the
78 | # appropiate values
79 | args = Args(
80 | project_dir=request.config.getoption("--dbt-project-dir"),
81 | profiles_dir=request.config.getoption("--profiles-dir"),
82 | target=request.config.getoption("--dbt-target"),
83 | profile=None,
84 | threads=None,
85 | )
86 |
87 | if DBT_INSTALLED_VERSION > (1, 5):
88 | # See https://github.com/dbt-labs/dbt-core/issues/9183
89 | project_flags = project.read_project_flags(
90 | args.project_dir, args.profiles_dir
91 | )
92 | flags.set_from_args(args, project_flags)
93 | else:
94 | flags.set_from_args(args, user_config=None)
95 |
96 | config = RuntimeConfig.from_args(args)
97 | return config
98 |
99 |
100 | @pytest.fixture
101 | def adapter(config: RuntimeConfig) -> AdapterContainer:
102 | """
103 | Get the adapter.
104 |
105 | Parameters
106 | ----------
107 | config : RuntimeConfig
108 | The runtime config.
109 |
110 | Returns
111 | -------
112 | AdapterContainer
113 | The adapter.
114 | """
115 | if DBT_INSTALLED_VERSION > (1, 7):
116 | from dbt.mp_context import get_mp_context
117 |
118 | register_adapter(config, get_mp_context())
119 | else:
120 | register_adapter(config)
121 | adapter = get_adapter(config)
122 | adapter.acquire_connection()
123 | return adapter
124 |
125 |
126 | @pytest.fixture
127 | def manifest(
128 | adapter: AdapterContainer,
129 | ) -> Manifest:
130 | """
131 | Get the dbt manifest.
132 |
133 | Parameters
134 | ----------
135 | adapter : AdapterContainer
136 | The adapter.
137 |
138 | Returns
139 | -------
140 | Manifest
141 | The manifest.
142 | """
143 | if DBT_INSTALLED_VERSION > (1, 7):
144 | from dbt_common.clients.system import get_env
145 | from dbt_common.context import set_invocation_context
146 |
147 | set_invocation_context(get_env())
148 |
149 | manifest = ManifestLoader.get_full_manifest(adapter.config)
150 | return manifest
151 |
152 |
153 | @pytest.fixture
154 | def macro_generator(
155 | request: SubRequest, config: RuntimeConfig, manifest: Manifest
156 | ) -> MacroGenerator:
157 | """
158 | Get a macro generator.
159 |
160 | Parameters
161 | ----------
162 | request : SubRequest
163 | The pytest request containing the macro name.
164 | config : RuntimeConfig
165 | The runtime config.
166 | manifest : Manifest
167 | The manifest.
168 |
169 | Returns
170 | -------
171 | MacroGenerator
172 | The macro generator.
173 | """
174 | macro = manifest.macros[request.param]
175 | context = providers.generate_runtime_macro_context(
176 | macro, config, manifest, macro.package_name
177 | )
178 | macro_generator = MacroGenerator(macro, context)
179 | return macro_generator
180 |
--------------------------------------------------------------------------------
/src/pytest_dbt_core/plugin.py:
--------------------------------------------------------------------------------
1 | """The entrypoint for the plugin."""
2 |
3 | import os
4 |
5 | from _pytest.config.argparsing import Parser
6 |
7 | from .fixtures import adapter, config, macro_generator, manifest
8 |
9 | __all__ = (
10 | "adapter",
11 | "config",
12 | "macro_generator",
13 | "manifest",
14 | )
15 |
16 |
17 | def pytest_addoption(parser: Parser) -> None:
18 | """
19 | Add pytest options.
20 |
21 | Parameters
22 | ----------
23 | parser : Parser
24 | The parser.
25 | """
26 | parser.addoption(
27 | "--dbt-project-dir",
28 | help="The dbt project directory.",
29 | type=str,
30 | default=os.getcwd(),
31 | )
32 | parser.addoption(
33 | "--dbt-target",
34 | help="Which target to load for the given profile",
35 | type=str,
36 | )
37 | parser.addoption(
38 | "--profiles-dir",
39 | help="Which directory to look in for the profiles.yml file",
40 | type=str,
41 | )
42 |
--------------------------------------------------------------------------------
/tests/dbt_project/dbt_project.yml:
--------------------------------------------------------------------------------
1 | name: dbt_project
2 | profile: dbt_project
3 | version: '0.3.0'
4 | config-version: 2
5 | require-dbt-version: [">=1.0.0", "<2.0.0"]
6 | macro-paths: ["macros"]
7 |
--------------------------------------------------------------------------------
/tests/dbt_project/macros/fetch_single_statement.sql:
--------------------------------------------------------------------------------
1 | {% macro fetch_single_statement(statement, default_value="") %}
2 |
3 | {% set results = run_query(statement) %}
4 |
5 | {% if execute %}
6 | {% set value = results.columns[0].values()[0] %}
7 | {% else %}
8 | {% set value = default_value %}
9 | {% endif %}
10 |
11 | {{ return( value ) }}
12 |
13 | {% endmacro %}
14 |
--------------------------------------------------------------------------------
/tests/dbt_project/macros/prices.sql:
--------------------------------------------------------------------------------
1 | {% macro to_cents(column_name) %}
2 | {{ column_name }} * 100
3 | {% endmacro %}
4 |
--------------------------------------------------------------------------------
/tests/dbt_project/profiles.yml:
--------------------------------------------------------------------------------
1 | dbt_project:
2 | target: dev
3 | outputs:
4 | dev:
5 | type: spark
6 | method: odbc
7 | schema: cor
8 | host: https://adb-123456789.00.azuredatabricks.net
9 | port: 443
10 | organization: "123456789"
11 | cluster: 1234-567890-12ab3c4d
12 | token: dapi1abc2345de6f78g901h234ij5klm6789-1
13 | driver: /Library/simba/spark/lib/libsparkodbc_sbu.dylib
14 | test:
15 | type: spark
16 | method: session
17 | schema: test
18 | host: NA # not used, but required by `dbt-core`
19 |
--------------------------------------------------------------------------------
/tests/dbt_project/tests/test_fetch_single_statement.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from dbt.clients.jinja import MacroGenerator
3 |
4 |
5 | @pytest.mark.parametrize(
6 | "macro_generator",
7 | ["macro.dbt_project.fetch_single_statement"],
8 | indirect=True,
9 | )
10 | def test_create_table(macro_generator: MacroGenerator) -> None:
11 | out = macro_generator("SELECT 1")
12 | assert out == 1
13 |
--------------------------------------------------------------------------------
/tests/dbt_project/tests/test_prices.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from dbt.clients.jinja import MacroGenerator
3 | from pyspark.sql import SparkSession
4 |
5 |
6 | @pytest.mark.parametrize(
7 | "macro_generator",
8 | ["macro.dbt_project.to_cents"],
9 | indirect=True,
10 | )
11 | def test_create_table(
12 | spark_session: SparkSession, macro_generator: MacroGenerator
13 | ) -> None:
14 | expected = spark_session.createDataFrame([{"cents": 1000}])
15 | to_cents = macro_generator("price")
16 | out = spark_session.sql(
17 | "with data AS (SELECT 10 AS price) "
18 | f"SELECT cast({to_cents} AS bigint) AS cents FROM data"
19 | )
20 | assert out.collect() == expected.collect()
21 |
--------------------------------------------------------------------------------