├── .github
├── CODEOWNERS
├── dependabot.yml
└── workflows
│ ├── tests.yml
│ └── version.yml
├── .gitignore
├── CONTRIBUTING.md
├── License.md
├── README.md
├── cookiecutter.json
├── docs
└── arch
│ ├── README.md
│ └── adr-001-new-adapter-scaffolding.md
├── hooks
└── pre_gen_project.py
├── pyproject.toml
├── requirements.txt
├── tests
└── template_test.py
└── {{cookiecutter.project_name}}
├── .flake8
├── .github
└── workflows
│ ├── .gitkeep
│ ├── integration.yml
│ └── main.yml
├── .gitignore
├── .pre-commit-config.yaml
├── MANIFEST.in
├── README.md
├── dbt
├── adapters
│ └── {{cookiecutter.directory_name}}
│ │ ├── __init__.py
│ │ ├── __version__.py
│ │ ├── connections.py
│ │ └── impl.py
└── include
│ └── {{cookiecutter.directory_name}}
│ ├── __init__.py
│ ├── dbt_project.yml
│ ├── macros
│ ├── adapters.sql
│ └── catalog.sql
│ └── profile_template.yml
├── dev-requirements.txt
├── mypy.ini
├── pytest.ini
├── setup.py
├── test.env
├── tests
├── __init__.py
├── conftest.py
└── functional
│ └── adapter
│ └── test_basic.py
└── tox.ini
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # Owning team for this repo
2 | * @dbt-labs/adapters
3 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | # python dependencies
4 | - package-ecosystem: "pip"
5 | directory: "/"
6 | schedule:
7 | interval: "weekly"
8 | rebase-strategy: "disabled"
9 |
10 | # github dependencies
11 | - package-ecosystem: "github-actions"
12 | directory: "/.github"
13 | schedule:
14 | interval: "weekly"
15 | rebase-strategy: "disabled"
16 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | # **what?**
2 | # Run tests
3 | #
4 | # **why?**
5 | # Ensure we don't intriduce breaking changes
6 | #
7 | # **when?**
8 | # On every push to main, pull request, scheduled and manually
9 | #
10 |
11 | name: Tests and Cookiecutter Checks
12 |
13 | on:
14 | push:
15 | branches:
16 | - main
17 | pull_request:
18 | workflow_dispatch:
19 | schedule:
20 | - cron: "0 5 * * *" # 5 UTC
21 |
22 | jobs:
23 |
24 | tests:
25 |
26 | name: tests / python ${{ matrix.python-version }}
27 |
28 | runs-on: ubuntu-latest
29 |
30 | strategy:
31 | fail-fast: false
32 | matrix:
33 | python-version: ['3.8', '3.9', '3.10', '3.11']
34 |
35 | env:
36 | PYTEST_ADDOPTS: "-v --color=yes"
37 |
38 | steps:
39 | - name: check out the repository
40 | uses: actions/checkout@v3
41 | with:
42 | persist-credentials: false
43 |
44 | - name: Set up Python ${{ matrix.python-version }}
45 | uses: actions/setup-python@v4
46 | with:
47 | python-version: ${{ matrix.python-version }}
48 |
49 | - name: Install python dependencies
50 | run: |
51 | pip install --user --upgrade pip
52 | pip --version
53 | pip install -r requirements.txt
54 |
55 | - name: Run tests
56 | run:
57 | python -m pytest tests/
58 |
--------------------------------------------------------------------------------
/.github/workflows/version.yml:
--------------------------------------------------------------------------------
1 | # **what?**
2 | # Creates a pull request to update the version to match dbt-core
3 | #
4 | # **why?**
5 | # Kepp the versions in sync
6 | #
7 | # **when?**
8 | # Scheduled for 0500UTC
9 | #
10 | # To test locally with `act` (https://github.com/nektos/act), run `act -W .github/workflows/version.yml --detect-event`
11 |
12 | name: Check and update dbt version
13 | on:
14 | workflow_dispatch:
15 | schedule:
16 | - cron: "0 5 * * *" # 5 UTC
17 |
18 | jobs:
19 | update-version:
20 | runs-on: ubuntu-latest
21 |
22 | steps:
23 | - name: Check out Repo
24 | uses: actions/checkout@v3
25 |
26 | - name: Get Latest Package Info for dbt-core
27 | id: package-info
28 | uses: dbt-labs/actions/py-package-info@main
29 | with:
30 | package: "dbt-core"
31 |
32 | - name: dbt-core version
33 | run: |
34 | echo version: ${{ steps.package-info.outputs.version }}
35 |
36 | - name: Parse Semver
37 | id: parse-valid
38 | uses: dbt-labs/actions/parse-semver@v1
39 | with:
40 | version: ${{ steps.package-info.outputs.version }}
41 |
42 | - name: Parsed dbt-core Version
43 | run: |
44 | echo base-version: ${{ steps.parse-valid.outputs.base-version }}
45 | echo major: ${{ steps.parse-valid.outputs.major }}
46 | echo minor: ${{ steps.parse-valid.outputs.minor }}
47 | echo patch: ${{ steps.parse-valid.outputs.patch }}
48 |
49 | - name: Update project version
50 | uses: jossef/action-set-json-field@v2
51 | with:
52 | file: cookiecutter.json
53 | field: project_version
54 | value: ${{ format(
55 | '{0}.{1}.0',
56 | steps.parse-valid.outputs.major,
57 | steps.parse-valid.outputs.minor
58 | ) }}
59 |
60 | - name: Display cookiecutter.json
61 | run: cat cookiecutter.json
62 |
63 | - name: Verify Changed files
64 | if: ${{ !env.ACT }}
65 | uses: tj-actions/verify-changed-files@v14
66 | id: verify-changed-files
67 | with:
68 | files: |
69 | cookiecutter.json
70 |
71 | - name: Create pull request
72 | if: ${{ !env.ACT && steps.verify-changed-files.outputs.files_changed == 'true' }}
73 | uses: peter-evans/create-pull-request@v5
74 | with:
75 | title: "Bump dbt to ${{ steps.parse-valid.outputs.base-version }}"
76 | branch: "bump-dbt/${{ steps.parse-valid.outputs.base-version }}"
77 | body: |
78 | Updates cookiecutter ref of dbt-core to newest avaliable version.
79 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # PyCharm
132 | .idea
133 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to `dbt-database-adapter-scaffold`
2 |
3 | `dbt-database-adapter-scaffold` is open source software. It is what it is today because community members have opened issues, provided feedback, and [contributed to the knowledge loop](https://www.getdbt.com/dbt-labs/values/). Whether you are a seasoned open source contributor or a first-time committer, we welcome and encourage you to contribute code, documentation, ideas, or problem statements to this project.
4 |
5 | 1. [About this document](#about-this-document)
6 | 2. [Getting the code](#getting-the-code)
7 | 3. [Setting up an environment](#setting-up-an-environment)
8 | 4. [Testing](#testing)
9 | 5. [Submitting a Pull Request](#submitting-a-pull-request)
10 |
11 | ## About this document
12 |
13 | There are many ways to contribute to the ongoing development of `dbt-database-adapter-scaffold`, such as by participating in discussions and issues. We encourage you to first read our higher-level document: ["Expectations for Open Source Contributors"](https://docs.getdbt.com/docs/contributing/oss-expectations).
14 |
15 | The rest of this document serves as a more granular guide for contributing code changes to `dbt-database-adapter-scaffold` (this repository). It is not intended as a guide for using `dbt-database-adapter-scaffold`, and some pieces assume a level of familiarity with Python development (virtualenvs, `pip`, etc). Specific code snippets in this guide assume you are using macOS or Linux and are comfortable with the command line.
16 |
17 | If you get stuck, we're happy to help! Drop us a line in the `#adapter-ecosystem` channel in the [dbt Community Slack](https://community.getdbt.com).
18 |
19 | ### Notes
20 |
21 | - **CLA:** Please note anyone contributing code to `dbt-database-adapter-scaffold` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-database-adapter-scaffold` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones.
22 |
23 | ## Getting the code
24 |
25 | ### Installing git
26 |
27 | You will need `git` in order to download and modify the `dbt-core` source code. On macOS, the best way to download git is to just install [Xcode](https://developer.apple.com/support/xcode/).
28 |
29 | ### External contributors
30 |
31 | If you are not a member of the `dbt-labs` GitHub organization, you can contribute to `dbt-database-adapter-scaffold` by forking the `dbt-database-adapter-scaffold` repository. For a detailed overview on forking, check out the [GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). In short, you will need to:
32 |
33 | 1. Fork the `dbt-database-adapter-scaffold` repository
34 | 2. Clone your fork locally
35 | 3. Check out a new branch for your proposed changes
36 | 4. Push changes to your fork
37 | 5. Open a pull request against `dbt-labs/dbt-database-adapter-scaffold` from your forked repository
38 |
39 | ### dbt Labs contributors
40 |
41 | If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-database-adapter-scaffold` repo. Rather than forking `dbt-database-adapter-scaffold` to make your changes, just clone the repository, check out a new branch, and push directly to that branch.
42 |
43 | ## Setting up an environment
44 |
45 | There are some tools that will be helpful to you in developing locally. While this is the list relevant for `dbt-database-adapter-scaffold` development, many of these tools are used commonly across open-source python projects.
46 |
47 | ### Tools
48 |
49 | These are the tools used in `dbt-database-adapter-scaffold` development and testing:
50 |
51 | - [`pytest`](https://docs.pytest.org/en/latest/) to define, discover, and run tests
52 | - [GitHub Actions](https://github.com/features/actions) for automating tests and checks, once a PR is pushed to the `dbt-database-adapter-scaffold` repository
53 |
54 | A deep understanding of these tools in not required to effectively contribute to `dbt-database-adapter-scaffold`, but we recommend checking out the attached documentation if you're interested in learning more about each one.
55 |
56 | #### Virtual environments
57 |
58 | We strongly recommend using virtual environments when developing code in `dbt-database-adapter-scaffold`. We recommend creating this virtualenv
59 | in the root of the `dbt-database-adapter-scaffold` repository. To create a new virtualenv, run:
60 | ```sh
61 | python3 -m venv env
62 | source env/bin/activate
63 | ```
64 |
65 | This will create and activate a new Python virtual environment.
66 |
67 | ## Testing
68 |
69 | Once you're able to manually test that your code change is working as expected, it's important to run existing automated tests, as well as adding some new ones. These tests will ensure that:
70 | - Your code changes do not unexpectedly break other established functionality
71 | - Your code changes can handle all known edge cases
72 | - The functionality you're adding will _keep_ working in the future
73 |
74 | ### Test commands
75 |
76 | With a virtualenv active and dev dependencies installed you can do things like:
77 |
78 | ```sh
79 | # run all unit tests in a file
80 | python3 -m pytest tests/template_test.py
81 | # run a specific unit test
82 | python3 -m pytest tests/template_test.py::bake_in_temp_dir
83 | ```
84 |
85 | > See [pytest usage docs](https://docs.pytest.org/en/6.2.x/usage.html) for an overview of useful command-line options.
86 |
87 | ## Submitting a Pull Request
88 |
89 | Code can be merged into the current development branch `main` by opening a pull request. A `dbt-database-adapter-scaffold` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code.
90 |
91 | Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve.
92 |
93 | Once all tests are passing and your PR has been approved, a `dbt-database-adapter-scaffold` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada:
94 |
95 | Sometimes, the content license agreement auto-check bot doesn't find a user's entry in its roster. If you need to force a rerun, add `@cla-bot check` in a comment on the pull request.
96 |
--------------------------------------------------------------------------------
/License.md:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2021 dbt Labs, Inc.
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | **[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
6 |
7 | dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis.
8 |
9 | # dbt-database-adapter-scaffold
10 | The `dbt-database-adapter-scaffold` package is a user-friendly interactive way to build out a new adapter.
11 |
12 | > ### :warning: Versioning
13 | > `dbt-core` use semver to denote compatibility and intends to maintain compatibility for database adapters of the same major and minor version. (i.e. any patch version for a given major and minor version) This is a convention used by existing plugins that database adapter plugin developers can opt into during project setup. When generating a scaffold, the project version will include the same major and minor version of the latest stable version of `dbt-core`.
14 |
15 | ## Getting started
16 |
17 | ### Local environment setup
18 | - `cd` to where you'd like your adapter to live. the scaffold will create a new folder for your adapter, e.g. `dbt-{myadapter}`. This will be the folder to make into a Git repository
19 | - setup a virtual env
20 | ```
21 | python3 -m venv env
22 | ```
23 | - choose how you would like to run the `dbt-database-adapter-scaffold`
24 | - [Running from Github](#running-from-github) (recommended)
25 | - [Running Locally](#running-locally)
26 | - Once you have generated your adapter please continue by using the [`README`]({{cookiecutter.project_name}}/README.md) located within the repo.
27 |
28 | #### Running from Github
29 | - Install [cookiecutter](https://cookiecutter.readthedocs.io/) with `pip install cookiecutter` ([guide for alt install methods](https://cookiecutter.readthedocs.io/en/1.7.2/installation.html))
30 | - run `cookiecutter gh:dbt-labs/dbt-database-adapter-scaffold` in console
31 |
32 | #### Running Locally
33 | - [Install cookiecutter](https://cookiecutter.readthedocs.io/en/1.7.2/installation.html)
34 | - Fork and clone this repo
35 | - Once cloned run cookiecutter `path_to_this_repo/.`
36 |
37 | ## Join the dbt Community
38 |
39 | - Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/)
40 | - Read more on the [dbt Community Discourse](https://discourse.getdbt.com)
41 |
42 | ## Reporting bugs and contributing code
43 |
44 | - Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-redshift/issues/new)
45 | - Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt/blob/HEAD/CONTRIBUTING.md)
46 |
47 | ## Code of Conduct
48 |
49 | Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [dbt Code of Conduct](https://community.getdbt.com/code-of-conduct).
50 |
--------------------------------------------------------------------------------
/cookiecutter.json:
--------------------------------------------------------------------------------
1 | {
2 | "author": "",
3 | "author_email": "",
4 | "github_url": "If you have already made a github repo to tie the project to place it here, otherwise update in setup.py later.",
5 | "adapter_name": "What is name of your database in CamelCase? ex. MyAdapter",
6 | "project_name": "dbt-{{ cookiecutter.adapter_name.lower() }}",
7 | "is_sql_adapter": [
8 | "true",
9 | "false"
10 | ],
11 | "project_version": "1.7.0",
12 | "_copy_without_render": [
13 | "**/main.yml"
14 | ]
15 | }
--------------------------------------------------------------------------------
/docs/arch/README.md:
--------------------------------------------------------------------------------
1 | ## ADRs
2 |
3 | For any architectural/engineering decisions we make, we will create an ADR (Architectural Design Record) to keep track of what decision we made and why. This allows us to refer back to decisions in the future and see if the reasons we made a choice still holds true. This also allows for others to more easily understand the code. ADRs will follow this process:
4 |
5 | - They will live in the repo, under a directory `docs/arch`
6 | - They will be written in markdown
7 | - They will follow the naming convention [`adr-NNN-.md`](http://adr-nnn.md/)
8 | - `NNN` will just be a counter starting at `001` and will allow us easily keep the records in chronological order.
9 | - The common sections that each ADR should have are:
10 | - Title, Context, Decision, Status, Consequences
11 | - Use this article as a reference: [https://cognitect.com/blog/2011/11/15/documenting-architecture-decisions](https://cognitect.com/blog/2011/11/15/documenting-architecture-decisions)
12 |
--------------------------------------------------------------------------------
/docs/arch/adr-001-new-adapter-scaffolding.md:
--------------------------------------------------------------------------------
1 | # New Adapter Scaffold
2 |
3 | ## Context
4 |
5 | Currently we offer the ability for users and datawarehouses to build vendor/community supported adapter plugins via the [`create_adapter_plugins.py`](https://github.com/dbt-labs/dbt/blob/47033c459f2c835d81cc845d49ef23e7d19736b6/core/scripts/create_adapter_plugins.py) file. While this file is able to produce a usable adapter plugin, it is not the most user friendly experience as we have noted over time and needed to be updated.
6 |
7 | ### known issues or possible improvements to be made
8 | - Updating the script to generate a new adapter plugin is tricky. Numerous template strings must be edited in the python code, which makes it difficult to test and search for all areas that need to be changed. ex. `$ python create_adapter_plugins.py --sql --title-case=MyAdapter ./ myadapter`
9 | - Not an interactive experience, must know and pass all required arguments to the .py file
10 | - stuck with defaults, or missing dbt suggested default dependencies
11 | - Options to set up Github Actions
12 | - flake8 by default for linting to match other adapters
13 | - mypy by default or optional for static type checking
14 | - tox by default
15 | - Ability to make a docker image by default
16 | - Lack of docstrings, examples, comments to help users undestand what needs to be built out.
17 | - Lack of pointers to what offical documentation we do have.
18 |
19 | ## Options
20 | - Pull out current script, and move it to a new repo by itself
21 | - lots of template strings difficult to parse
22 | - use Scaffolding tool
23 | - easier to maintain
24 | - interactive
25 | - possibly more easily testable
26 | - potential scaffolding tools
27 | - [`cookiecutter`](https://cookiecutter.readthedocs.io/en/1.7.2/overview.html)
28 | - Uses a json file to trigger commandline interactive session and passes variables user defines or chooses between potential defaults to fill out jinja templated variables and produce a new repo
29 | - allows for manipulation of json behind scense to keep interactive session from being to long
30 | - provides various ways to run program, either from fork/cloning, from a recognized cookiecutter command without cloning, or from within `dbt-core` able to take advantage of bumpversioning.
31 | - [`pyscaffold`](https://cookiecutter.readthedocs.io/en/1.7.2/overview.html)
32 | - creates a simple scaffold of project not as flexible as cookiecutter
33 | - [`yehua`](https://yehua.readthedocs.io/en/latest/)
34 | - Seems to of had similar functionality to cookiecutter but hasn't been suppoted in quite some time therefore no longer works with pip install, (unusable)
35 |
36 | ## Decision
37 |
38 | Firstly after looking at available tooling, we landed on using `cookiecutter` to aid in creating a interactive scaffolding session for users quickily generate a starting point to build out their adapter plugin by quickly building out variable names, giving choice selections for things like `is_sql_adapter` which is how we let the program know which connection methods to pull in from `dbt-core`. This also meant we could keep much of the structure for the files the same from previous adapter generator by having the files exist in the starting state and just passing the user passed parameters on generation.
39 |
40 | Secondly we looked at our current `dbt-docs` to see what things we specifically require users to build out for adapters to work ex. (class methods for connection, macros.) and added starting stubs to the files along with docstrings declaring their purpose, examples from other adapters, pointers in comments to documentation.
41 |
42 | ## Remaining tasks
43 | Project things left to do
44 | - Demo and feedback, cleanup.
45 | - Fully test versioning and merger into core repo.
46 | - Make. Sure naming conventions for variables are what we want. (As easy for new users to understand meaning as possible)
47 | - Github Action tests? (Flake8).
48 | - Making sure we have all packages we need, and finalizing if we want any other optionals added ex. Mypy.
49 | - Any other testing ideas we might have.
50 |
51 |
52 | ## Status
53 |
54 | proposed
55 |
56 | ## Consequences
57 |
58 | We now have a adapter plugin that takes what our original process did and improves upon it by trying to be more explicit in what needs to be done, and offering helpful tips or references. we have given users the suggested tools we at `dbt` use ourselves, and can refocus towards other more deep dive areas around testing these adapters and making that a more clear process.
59 |
--------------------------------------------------------------------------------
/hooks/pre_gen_project.py:
--------------------------------------------------------------------------------
1 | {{ cookiecutter.update({ "adapter_upper":cookiecutter.adapter_name.upper(),
2 | "directory_name":cookiecutter.adapter_name.lower() }) }}
3 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "dbt_database_adapter_scaffold"
7 | version = "1.7.0"
8 | readme = "README.md"
9 | requires-python = ">=3.8"
10 | authors = [{name = "dbt Labs", email = "info@dbtlabs.com"}]
11 | classifiers = [
12 | "Development Status :: 5 - Production/Stable",
13 | "Operating System :: Microsoft :: Windows",
14 | "Operating System :: MacOS :: MacOS X",
15 | "Operating System :: POSIX :: Linux",
16 | "Programming Language :: Python :: 3.8",
17 | "Programming Language :: Python :: 3.9",
18 | "Programming Language :: Python :: 3.10",
19 | "Programming Language :: Python :: 3.11",
20 | ]
21 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 |
2 | cookiecutter~=2.6.0
3 | pytest_cookies~=0.7.0
4 |
5 |
--------------------------------------------------------------------------------
/tests/template_test.py:
--------------------------------------------------------------------------------
1 | import re
2 | from contextlib import contextmanager
3 | from cookiecutter.utils import rmtree
4 |
5 | @contextmanager
6 | def bake_in_temp_dir(cookies, *args, **kwargs):
7 | result = cookies.bake(*args, **kwargs)
8 | try:
9 | yield result
10 | finally:
11 | rmtree(str(result.project_path))
12 |
13 | def test_bake_project_adapter_name(cookies):
14 | """bake and test against default values of cookiecutter.json, adapter_name, and top level files created"""
15 | with bake_in_temp_dir(cookies, extra_context={ "adapter_name": "MyAdapter" }) as result:
16 | assert result.exit_code == 0
17 | assert result.exception is None
18 | assert result.context["adapter_name"] == "MyAdapter"
19 | assert result.project_path.is_dir()
20 | # looks for files in generated project
21 | found_toplevel_files = [f.name for f in result.project_path.glob('*')]
22 | assert ".github" in found_toplevel_files
23 | assert "dbt" in found_toplevel_files
24 | assert "setup.py" in found_toplevel_files
25 | assert "tox.ini" in found_toplevel_files
26 | assert "tests" in found_toplevel_files
27 |
28 | def test_bake_project_name(cookies):
29 | """bake and test against new words, if passes means its changing values in template"""
30 | with bake_in_temp_dir(cookies, extra_context={ "adapter_name":"MyAdapter" }) as result:
31 | assert result.exit_code == 0
32 | assert result.exception is None
33 | assert result.context["project_name"] == "dbt-myadapter"
34 |
35 | def test_bake_deafult_is_sql_adapter(cookies):
36 | """bake and test default version of is_sql_adapter is true"""
37 | with bake_in_temp_dir(cookies) as result:
38 | assert result.exit_code == 0
39 | assert result.exception is None
40 | assert result.context["is_sql_adapter"] =="true"
41 |
42 | def test_bake_is_sql_adapter_base(cookies):
43 | """bake and test is_sql_adapter can register change from true to false"""
44 | with bake_in_temp_dir(cookies, extra_context={ "is_sql_adapter":"false" }) as result:
45 | assert result.exit_code == 0
46 | assert result.exception is None
47 | assert result.context["is_sql_adapter"] == "false"
48 |
49 | def test_bake_author_default(cookies):
50 | with bake_in_temp_dir(cookies) as result:
51 | assert result.context["author"] == ""
52 |
53 | def test_bake_author_change(cookies):
54 | with bake_in_temp_dir(cookies, extra_context={ "author":"John Doe" }) as result:
55 | assert result.context["author"] == "John Doe"
56 |
57 |
58 | def test_bake_email_default(cookies):
59 | with bake_in_temp_dir(cookies) as result:
60 | assert result.context["author_email"] == ""
61 |
62 | def test_bake_email_change(cookies):
63 | with bake_in_temp_dir(cookies, extra_context={ "author_email":"John.D@test.com" }) as result:
64 | assert result.context["author_email"] == "John.D@test.com"
65 | assert result.context["author_email"] != "J@hn.d@test.com"
66 | regex = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b"
67 | assert re.fullmatch(regex,result.context["author_email"])
68 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | select =
3 | E
4 | W
5 | F
6 | ignore =
7 | W503 # makes Flake8 work like black
8 | W504
9 | E203 # makes Flake8 work like black
10 | E741
11 | E501
12 | exclude = tests
13 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/.github/workflows/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dbt-labs/dbt-database-adapter-scaffold/50cbc5ff220ce57f606b416324a3afcb15277224/{{cookiecutter.project_name}}/.github/workflows/.gitkeep
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/.github/workflows/integration.yml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dbt-labs/dbt-database-adapter-scaffold/50cbc5ff220ce57f606b416324a3afcb15277224/{{cookiecutter.project_name}}/.github/workflows/integration.yml
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | # **what?**
2 | # Runs code quality checks, unit tests, and verifies python build on
3 | # all code commited to the repository. This workflow should not
4 | # require any secrets since it runs for PRs from forked repos.
5 | # By default, secrets are not passed to workflows running from
6 | # a forked repo.
7 |
8 | # **why?**
9 | # Ensure code for dbt meets a certain quality standard.
10 |
11 | # **when?**
12 | # This will run for all PRs, when code is pushed to a release
13 | # branch, and when manually triggered.
14 |
15 | name: Tests and Code Checks
16 |
17 | on:
18 | push:
19 | branches:
20 | - "main"
21 | - "develop"
22 | - "*.latest"
23 | - "releases/*"
24 | pull_request:
25 | workflow_dispatch:
26 |
27 | permissions: read-all
28 |
29 | # will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise
30 | concurrency:
31 | group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}
32 | cancel-in-progress: true
33 |
34 | defaults:
35 | run:
36 | shell: bash
37 |
38 | jobs:
39 | code-quality:
40 | name: code-quality
41 |
42 | runs-on: ubuntu-latest
43 |
44 | steps:
45 | - name: Check out the repository
46 | uses: actions/checkout@v3
47 | with:
48 | persist-credentials: false
49 |
50 | - name: Set up Python
51 | uses: actions/setup-python@v4
52 |
53 | - name: Install python dependencies
54 | run: |
55 | pip install --user --upgrade pip
56 | pip install pre-commit
57 | pip install mypy==0.782
58 | pip install -r dev-requirements.txt
59 | pip --version
60 | pre-commit --version
61 | mypy --version
62 | dbt --version
63 |
64 | - name: Run pre-commit hooks
65 | run: pre-commit run --all-files --show-diff-on-failure
66 |
67 | build:
68 | name: build packages
69 |
70 | runs-on: ubuntu-latest
71 |
72 | steps:
73 | - name: Check out the repository
74 | uses: actions/checkout@v3
75 | with:
76 | persist-credentials: false
77 |
78 | - name: Set up Python
79 | uses: actions/setup-python@v4
80 | with:
81 | python-version: 3.8
82 |
83 | - name: Install python dependencies
84 | run: |
85 | pip install --user --upgrade pip
86 | pip install --upgrade setuptools wheel twine check-wheel-contents
87 | pip --version
88 | - name: Build distributions
89 | run: ./scripts/build-dist.sh
90 |
91 | - name: Show distributions
92 | run: ls -lh dist/
93 |
94 | - name: Check distribution descriptions
95 | run: |
96 | twine check dist/*
97 | - name: Check wheel contents
98 | run: |
99 | check-wheel-contents dist/*.whl --ignore W007,W008
100 | - uses: actions/upload-artifact@v3
101 | with:
102 | name: dist
103 | path: dist/
104 |
105 | test-build:
106 | name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }}
107 |
108 | needs: build
109 |
110 | runs-on: ${{ matrix.os }}
111 |
112 | strategy:
113 | fail-fast: false
114 | matrix:
115 | os: [ubuntu-latest, macos-latest, windows-latest]
116 | python-version: ['3.8', '3.9', '3.10', '3.11']
117 |
118 | steps:
119 | - name: Set up Python ${{ matrix.python-version }}
120 | uses: actions/setup-python@v4
121 | with:
122 | python-version: ${{ matrix.python-version }}
123 |
124 | - name: Install python dependencies
125 | run: |
126 | pip install --user --upgrade pip
127 | pip install --upgrade wheel
128 | pip --version
129 | - uses: actions/download-artifact@v3
130 | with:
131 | name: dist
132 | path: dist/
133 |
134 | - name: Show distributions
135 | run: ls -lh dist/
136 |
137 | - name: Install wheel distributions
138 | run: |
139 | find ./dist/*.whl -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
140 | - name: Check wheel distributions
141 | run: |
142 | dbt --version
143 | - name: Install source distributions
144 | run: |
145 | find ./dist/*.gz -maxdepth 1 -type f | xargs pip install --force-reinstall --find-links=dist/
146 | - name: Check source distributions
147 | run: |
148 | dbt --version
149 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 | test.env
54 |
55 | # Translations
56 | *.mo
57 | *.pot
58 |
59 | # Django stuff:
60 | *.log
61 | local_settings.py
62 | db.sqlite3
63 | db.sqlite3-journal
64 |
65 | # Flask stuff:
66 | instance/
67 | .webassets-cache
68 |
69 | # Scrapy stuff:
70 | .scrapy
71 |
72 | # Sphinx documentation
73 | docs/_build/
74 |
75 | # PyBuilder
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | .python-version
87 |
88 | # pipenv
89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
92 | # install all needed dependencies.
93 | #Pipfile.lock
94 |
95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
96 | __pypackages__/
97 |
98 | # Celery stuff
99 | celerybeat-schedule
100 | celerybeat.pid
101 |
102 | # SageMath parsed files
103 | *.sage.py
104 |
105 | # Environments
106 | .env
107 | .venv
108 | env/
109 | venv/
110 | ENV/
111 | env.bak/
112 | venv.bak/
113 |
114 | # Spyder project settings
115 | .spyderproject
116 | .spyproject
117 |
118 | # Rope project settings
119 | .ropeproject
120 |
121 | # mkdocs documentation
122 | /site
123 |
124 | # mypy
125 | .mypy_cache/
126 | .dmypy.json
127 | dmypy.json
128 |
129 | # Pyre type checker
130 | .pyre/
131 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # For more on configuring pre-commit hooks (see https://pre-commit.com/)
2 |
3 | # TODO: remove global exclusion of tests when testing overhaul is complete
4 | exclude: "^tests/.*"
5 |
6 |
7 | default_language_version:
8 | python: python3.8
9 |
10 | repos:
11 | - repo: https://github.com/pre-commit/pre-commit-hooks
12 | rev: v3.2.0
13 | hooks:
14 | - id: check-yaml
15 | args: [--unsafe]
16 | - id: check-json
17 | - id: end-of-file-fixer
18 | - id: trailing-whitespace
19 | - id: check-case-conflict
20 | - repo: https://github.com/psf/black
21 | rev: 22.3.0
22 | hooks:
23 | - id: black
24 | args:
25 | - "--line-length=99"
26 | - "--target-version=py38"
27 | - id: black
28 | alias: black-check
29 | stages: [manual]
30 | args:
31 | - "--line-length=99"
32 | - "--target-version=py38"
33 | - "--check"
34 | - "--diff"
35 | - repo: https://github.com/pycqa/flake8
36 | rev: 4.0.1
37 | hooks:
38 | - id: flake8
39 | - id: flake8
40 | alias: flake8-check
41 | stages: [manual]
42 | - repo: https://github.com/pre-commit/mirrors-mypy
43 | rev: v0.782
44 | hooks:
45 | - id: mypy
46 | args: [--show-error-codes, --ignore-missing-imports]
47 | files: ^dbt/adapters/.*
48 | language: system
49 | - id: mypy
50 | alias: mypy-check
51 | stages: [manual]
52 | args: [--show-error-codes, --pretty, --ignore-missing-imports]
53 | files: ^dbt/adapters
54 | language: system
55 |
56 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include dbt/include *.sql *.yml *.md
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/README.md:
--------------------------------------------------------------------------------
1 | v
2 |
3 |
4 |
5 | **[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications.
6 |
7 | dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis.
8 |
9 | ## {{ cookiecutter.adapter_name }}
10 | This repo contains the base code to help you start to build out your {{ cookiecutter.project_name }} adapter plugin, for more information on how to build out the adapter please follow the [docs](https://docs.getdbt.com/docs/contributing/building-a-new-adapter)
11 |
12 | ** Note ** this `README` is meant to be replaced with what information would be required to use your adpater once your at a point todo so.
13 |
14 | ** Note **
15 | ### Adapter Scaffold default Versioning
16 | This adapter plugin follows [semantic versioning](https://semver.org/). The first version of this plugin is v{{ cookiecutter.project_version }}, in order to be compatible with dbt Core v{{ cookiecutter.project_version }}.
17 |
18 | It's also brand new! For {{ cookiecutter.adapter_name }}-specific functionality, we will aim for backwards-compatibility wherever possible. We are likely to be iterating more quickly than most major-version-1 software projects. To that end, backwards-incompatible changes will be clearly communicated and limited to minor versions (once every three months).
19 |
20 | ## Getting Started
21 |
22 | #### Setting up Locally
23 | - run `pip install -r dev-requirements.txt`.
24 | - cd directory into the `dbt-core` you'd like to be testing against and run `make dev`.
25 |
26 | #### Connect to Github
27 | - run `git init`.
28 | - Connect your lcoal code to a Github repo.
29 |
30 | ## Join the dbt Community
31 |
32 | - Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/)
33 | - If one doesn't exist feel free to request a #db-{{ cookiecutter.adapter_name }} channel be made in the [#channel-requests](https://getdbt.slack.com/archives/C01D8J8AJDA) on dbt community slack channel.
34 | - Read more on the [dbt Community Discourse](https://discourse.getdbt.com)
35 |
36 | ## Reporting bugs and contributing code
37 |
38 | - Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-redshift/issues/new)
39 | - Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt/blob/HEAD/CONTRIBUTING.md)
40 |
41 | ## Code of Conduct
42 |
43 | Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [dbt Code of Conduct](https://community.getdbt.com/code-of-conduct).
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/adapters/{{cookiecutter.directory_name}}/__init__.py:
--------------------------------------------------------------------------------
1 | from dbt.adapters.{{ cookiecutter.directory_name }}.connections import {{ cookiecutter.adapter_name }}ConnectionManager # noqa
2 | from dbt.adapters.{{ cookiecutter.directory_name }}.connections import {{ cookiecutter.adapter_name }}Credentials
3 | from dbt.adapters.{{ cookiecutter.directory_name }}.impl import {{ cookiecutter.adapter_name }}Adapter
4 |
5 | from dbt.adapters.base import AdapterPlugin
6 | from dbt.include import {{ cookiecutter.directory_name }}
7 |
8 |
9 | Plugin = AdapterPlugin(
10 | adapter={{ cookiecutter.adapter_name }}Adapter,
11 | credentials={{ cookiecutter.adapter_name }}Credentials,
12 | include_path={{ cookiecutter.directory_name }}.PACKAGE_PATH
13 | )
14 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/adapters/{{cookiecutter.directory_name}}/__version__.py:
--------------------------------------------------------------------------------
1 | version = "{{ cookiecutter.project_version }}"
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/adapters/{{cookiecutter.directory_name}}/connections.py:
--------------------------------------------------------------------------------
1 | from contextlib import contextmanager
2 | from dataclasses import dataclass
3 | import dbt.common.exceptions # noqa
4 | from dbt.adapters.base import Credentials
5 | {% if cookiecutter.is_sql_adapter == "true" %}
6 | from dbt.adapters.sql import SQLConnectionManager as connection_cls
7 | {% else %}
8 | from dbt.adapters.base import BaseConnectionManager as connection_cls
9 | {% endif %}
10 | from dbt.logger import GLOBAL_LOGGER as logger
11 |
12 | @dataclass
13 | class {{cookiecutter.adapter_name}}Credentials(Credentials):
14 | """
15 | Defines database specific credentials that get added to
16 | profiles.yml to connect to new adapter
17 | """
18 |
19 | # Add credentials members here, like:
20 | # host: str
21 | # port: int
22 | # username: str
23 | # password: str
24 |
25 | _ALIASES = {
26 | "dbname":"database",
27 | "pass":"password",
28 | "user":"username"
29 | }
30 |
31 | @property
32 | def type(self):
33 | """Return name of adapter."""
34 | return "{{ cookiecutter.directory_name }}"
35 |
36 | @property
37 | def unique_field(self):
38 | """
39 | Hashed and included in anonymous telemetry to track adapter adoption.
40 | Pick a field that can uniquely identify one team/organization building with this adapter
41 | """
42 | return self.host
43 |
44 | def _connection_keys(self):
45 | """
46 | List of keys to display in the `dbt debug` output.
47 | """
48 | return ("host","port","username","user")
49 |
50 | class {{ cookiecutter.adapter_name }}ConnectionManager(connection_cls):
51 | TYPE = "{{ cookiecutter.directory_name }}"
52 |
53 |
54 | @contextmanager
55 | def exception_handler(self, sql: str):
56 | """
57 | Returns a context manager, that will handle exceptions raised
58 | from queries, catch, log, and raise dbt exceptions it knows how to handle.
59 | """
60 | # ## Example ##
61 | # try:
62 | # yield
63 | # except myadapter_library.DatabaseError as exc:
64 | # self.release(connection_name)
65 |
66 | # logger.debug("myadapter error: {}".format(str(e)))
67 | # raise dbt.exceptions.DatabaseException(str(exc))
68 | # except Exception as exc:
69 | # logger.debug("Error running SQL: {}".format(sql))
70 | # logger.debug("Rolling back transaction.")
71 | # self.release(connection_name)
72 | # raise dbt.exceptions.RuntimeException(str(exc))
73 | pass
74 |
75 | @classmethod
76 | def open(cls, connection):
77 | """
78 | Receives a connection object and a Credentials object
79 | and moves it to the "open" state.
80 | """
81 | # ## Example ##
82 | # if connection.state == "open":
83 | # logger.debug("Connection is already open, skipping open.")
84 | # return connection
85 |
86 | # credentials = connection.credentials
87 |
88 | # try:
89 | # handle = myadapter_library.connect(
90 | # host=credentials.host,
91 | # port=credentials.port,
92 | # username=credentials.username,
93 | # password=credentials.password,
94 | # catalog=credentials.database
95 | # )
96 | # connection.state = "open"
97 | # connection.handle = handle
98 | # return connection
99 | pass
100 |
101 | @classmethod
102 | def get_response(cls,cursor):
103 | """
104 | Gets a cursor object and returns adapter-specific information
105 | about the last executed command generally a AdapterResponse ojbect
106 | that has items such as code, rows_affected,etc. can also just be a string ex. "OK"
107 | if your cursor does not offer rich metadata.
108 | """
109 | # ## Example ##
110 | # return cursor.status_message
111 | pass
112 |
113 | def cancel(self, connection):
114 | """
115 | Gets a connection object and attempts to cancel any ongoing queries.
116 | """
117 | # ## Example ##
118 | # tid = connection.handle.transaction_id()
119 | # sql = "select cancel_transaction({})".format(tid)
120 | # logger.debug("Cancelling query "{}" ({})".format(connection_name, pid))
121 | # _, cursor = self.add_query(sql, "master")
122 | # res = cursor.fetchone()
123 | # logger.debug("Canceled query "{}": {}".format(connection_name, res))
124 | pass
125 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/adapters/{{cookiecutter.directory_name}}/impl.py:
--------------------------------------------------------------------------------
1 | {% if cookiecutter.is_sql_adapter == "true" %}
2 | from dbt.adapters.sql import SQLAdapter as adapter_cls
3 | {% else %}
4 | from dbt.adapters.base import BaseAdapter as adapter_cls
5 | {% endif %}
6 | from dbt.adapters.{{ cookiecutter.directory_name }} import {{ cookiecutter.adapter_name }}ConnectionManager
7 |
8 |
9 |
10 | class {{ cookiecutter.adapter_name }}Adapter(adapter_cls):
11 | """
12 | Controls actual implmentation of adapter, and ability to override certain methods.
13 | """
14 |
15 | ConnectionManager = {{ cookiecutter.adapter_name }}ConnectionManager
16 |
17 | @classmethod
18 | def date_function(cls):
19 | """
20 | Returns canonical date func
21 | """
22 | return "datenow()"
23 |
24 | # may require more build out to make more user friendly to confer with team and community.
25 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/include/{{cookiecutter.directory_name}}/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | PACKAGE_PATH = os.path.dirname(__file__)
4 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/include/{{cookiecutter.directory_name}}/dbt_project.yml:
--------------------------------------------------------------------------------
1 | name: dbt_{{ cookiecutter.directory_name }}
2 | version: {{ cookiecutter.project_version }}
3 | config-version: 2
4 |
5 | macro-paths: ["macros"]
6 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/include/{{cookiecutter.directory_name}}/macros/adapters.sql:
--------------------------------------------------------------------------------
1 | /* For examples of how to fill out the macros please refer to the postgres adapter and docs
2 | postgres adapter macros: https://github.com/dbt-labs/dbt-core/blob/main/plugins/postgres/dbt/include/postgres/macros/adapters.sql
3 | dbt docs: https://docs.getdbt.com/docs/contributing/building-a-new-adapter
4 | */
5 |
6 | {{'{%'}} macro {{cookiecutter.directory_name}}__alter_column_type(relation,column_name,new_column_type) {{ '-%}' }}
7 | '''Changes column name or data type'''
8 | /*
9 | 1. Create a new column (w/ temp name and correct type)
10 | 2. Copy data over to it
11 | 3. Drop the existing column (cascade!)
12 | 4. Rename the new column to existing column
13 | */
14 | {{'{%'}} endmacro {{ '%}' }}
15 |
16 | {{'{%'}} macro {{cookiecutter.directory_name}}__check_schema_exists(information_schema,schema) {{ '-%}' }}
17 | '''Checks if schema name exists and returns number or times it shows up.'''
18 | /*
19 | 1. Check if schemas exist
20 | 2. return number of rows or columns that match searched parameter
21 | */
22 | {{'{%'}} endmacro {{ '%}' }}
23 |
24 | -- Example from postgres adapter in dbt-core
25 | -- Notice how you can build out other methods than the designated ones for the impl.py file,
26 | -- to make a more robust adapter. ex. (verify_database)
27 |
28 | /*
29 | {% raw %}
30 | {% macro postgres__create_schema(relation) -%}
31 | {% if relation.database -%}
32 | {{ adapter.verify_database(relation.database) }}
33 | {%- endif -%} {%- call statement('create_schema') -%}
34 | create schema if not exists {{ relation.without_identifier().include(database=False) }}
35 | {%- endcall -%}
36 | {% endmacro %}
37 | {% endraw %}
38 | */
39 |
40 | {{'{%'}} macro {{cookiecutter.directory_name}}__create_schema(relation) {{ '-%}' }}
41 | '''Creates a new schema in the target database, if schema already exists, method is a no-op. '''
42 | {{'{%'}} endmacro {{ '%}' }}
43 |
44 | /*
45 | {% raw %}
46 | {% macro postgres__drop_schema(relation) -%}
47 | {% if relation.database -%}
48 | {{ adapter.verify_database(relation.database) }}
49 | {%- endif -%}
50 | {%- call statement('drop_schema') -%}
51 | drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade
52 | {%- endcall -%}
53 | {% endmacro %}
54 | {% endraw %}
55 | */
56 |
57 | {{'{%'}} macro {{cookiecutter.directory_name}}__drop_relation(relation) {{ '-%}' }}
58 | '''Deletes relatonship identifer between tables.'''
59 | /*
60 | 1. If database exists
61 | 2. Create a new schema if passed schema does not exist already
62 | */
63 | {{'{%'}} endmacro {{ '%}' }}
64 |
65 | {{'{%'}} macro {{cookiecutter.directory_name}}__drop_schema(relation) {{ '-%}' }}
66 | '''drops a schema in a target database.'''
67 | /*
68 | 1. If database exists
69 | 2. search all calls of schema, and change include value to False, cascade it to backtrack
70 | */
71 | {{'{%'}} endmacro {{ '%}' }}
72 |
73 | /*
74 | {% raw %}
75 | Example of 1 of 3 required macros that does not have a default implementation
76 | {% macro postgres__get_columns_in_relation(relation) -%}
77 | {% call statement('get_columns_in_relation', fetch_result=True) %}
78 | select
79 | column_name,
80 | data_type,
81 | character_maximum_length,
82 | numeric_precision,
83 | numeric_scale
84 | from {{ relation.information_schema('columns') }}
85 | where table_name = '{{ relation.identifier }}'
86 | {% if relation.schema %}
87 | and table_schema = '{{ relation.schema }}'
88 | {% endif %}
89 | order by ordinal_position
90 | {% endcall %}
91 | {% set table = load_result('get_columns_in_relation').table %}
92 | {{ return(sql_convert_columns_in_relation(table)) }}
93 | {% endmacro %}
94 | {% endraw %}*/
95 |
96 |
97 | {{'{%'}} macro {{cookiecutter.directory_name}}__get_columns_in_relation(relation) {{ '-%}' }}
98 | '''Returns a list of Columns in a table.'''
99 | /*
100 | 1. select as many values from column as needed
101 | 2. search relations to columns
102 | 3. where table name is equal to the relation identifier
103 | 4. if a relation schema exists and table schema is equal to the relation schema
104 | 5. order in whatever way you want to call.
105 | 6. create a table by loading result from call
106 | 7. return new table
107 | */
108 | {{'{%'}} endmacro {{ '%}' }}
109 |
110 | -- Example of 2 of 3 required macros that do not come with a default implementation
111 |
112 | /*
113 | {% raw %}
114 | {% macro postgres__list_relations_without_caching(schema_relation) %}
115 | {% call statement('list_relations_without_caching', fetch_result=True) -%}
116 | select
117 | '{{ schema_relation.database }}' as database,
118 | tablename as name,
119 | schemaname as schema,
120 | 'table' as type
121 | from pg_tables
122 | where schemaname ilike '{{ schema_relation.schema }}'
123 | union all
124 | select
125 | '{{ schema_relation.database }}' as database,
126 | viewname as name,
127 | schemaname as schema,
128 | 'view' as type
129 | from pg_views
130 | where schemaname ilike '{{ schema_relation.schema }}'
131 | {% endcall %}
132 | {{ return(load_result('list_relations_without_caching').table) }}
133 | {% endmacro %}
134 | {% endraw %}
135 | */
136 |
137 | {{'{%'}} macro {{cookiecutter.directory_name}}__list_relations_without_caching(schema_relation) {{ '-%}' }}
138 | '''creates a table of relations withough using local caching.'''
139 | {{'{%'}} endmacro {{ '%}' }}
140 |
141 | {{'{%'}} macro {{cookiecutter.directory_name}}__list_schemas(database) {{ '-%}' }}
142 | '''Returns a table of unique schemas.'''
143 | /*
144 | 1. search schemea by specific name
145 | 2. create a table with names
146 | */
147 | {{'{%'}} endmacro {{ '%}' }}
148 |
149 | {{'{%'}} macro {{cookiecutter.directory_name}}__rename_relation(from_relation, to_relation) {{ '-%}' }}
150 | '''Renames a relation in the database.'''
151 | /*
152 | 1. Search for a specific relation name
153 | 2. alter table by targeting specific name and passing in new name
154 | */
155 | {{'{%'}} endmacro {{ '%}' }}
156 |
157 | {{'{%'}} macro {{cookiecutter.directory_name}}__truncate_relation(relation) {{ '-%}' }}
158 | '''Removes all rows from a targeted set of tables.'''
159 | /*
160 | 1. grab all tables tied to the relation
161 | 2. remove rows from relations
162 | */
163 | {{'{%'}} endmacro {{ '%}' }}
164 |
165 | /*
166 | {% raw %}
167 | Example 3 of 3 of required macros that does not have a default implementation.
168 | ** Good example of building out small methods ** please refer to impl.py for implementation of now() in postgres plugin
169 | {% macro postgres__current_timestamp() -%}
170 | now()
171 | {%- endmacro %}
172 | {% endraw %}
173 | */
174 |
175 | {{'{%'}} macro {{cookiecutter.directory_name}}__current_timestamp() {{ '-%}' }}
176 | '''Returns current UTC time'''
177 | {{'{#'}} docs show not to be implemented currently. {{'#}'}}
178 | {{'{%'}} endmacro {{ '%}' }}
179 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/include/{{cookiecutter.directory_name}}/macros/catalog.sql:
--------------------------------------------------------------------------------
1 | {{'{{%'}} macro {{cookiecutter.directory_name}}__get_catalog(information_schema, schemas){{ '-%}}' }}
2 |
3 | {{'{{%'}}set msg {{ '-%}}' }}
4 | get_catalog not implemented for {{cookiecutter.directory_name}}
5 | {{ '-%}}' }} endset {{'{{%'}}
6 | /*
7 | Your database likely has a way of accessing metadata about its objects,
8 | whether by querying an information schema or by running `show` and `describe` commands.
9 | dbt will use this macro to generate its catalog of objects it knows about. The catalog is one of
10 | the artifacts powering the documentation site.
11 | As an example, below is a simplified version of postgres__get_catalog
12 | */
13 |
14 | /*
15 | {% raw %}
16 | select {{database}} as TABLE,
17 | "- set table type -"
18 | when 'v' then 'VIEW'
19 | else 'BASE TABLE'
20 | "- set table/view names and descriptions -"
21 | use several joins and search types for pulling info together, sorting etc..
22 | where (
23 | search if schema exists, else build
24 | {%- for schema in schemas -%}
25 | upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}
26 | {%- endfor -%}
27 | )
28 | define any shortcut keys
29 | {% endraw %}
30 | */
31 | {{ '{{{{ exceptions.raise_compiler_error(msg) }}}}' }}
32 | {{'{{%'}} endmacro {{'%}}'}}
33 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dbt/include/{{cookiecutter.directory_name}}/profile_template.yml:
--------------------------------------------------------------------------------
1 | fixed:
2 | type: {{ cookiecutter.directory_name }}
3 | prompts:
4 | host:
5 | hint: "your host name"
6 | port:
7 | default: 5432
8 | type: "int"
9 | user:
10 | hint: "dev username"
11 | password:
12 | hint: "dev password"
13 | hide_input: true
14 | dbname:
15 | hint: "default database"
16 | threads:
17 | hint: "1 or more"
18 | type: "int"
19 | default: 1
20 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/dev-requirements.txt:
--------------------------------------------------------------------------------
1 | # install latest changes in dbt-core
2 | git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core
3 | git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-tests-adapter&subdirectory=tests/adapter
4 |
5 | black==24.2.0
6 | bumpversion
7 | flake8
8 | flaky
9 | freezegun==1.4.0
10 | ipdb
11 | mypy==1.8.0
12 | pip-tools
13 | pre-commit
14 | pytest
15 | pytest-dotenv
16 | pytest-logbook
17 | pytest-csv
18 | pytest-xdist
19 | pytz
20 | tox>=3.13
21 | twine
22 | wheel
23 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | mypy_path = ./third-party-stubs
3 | namespace_packages = True
4 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | filterwarnings =
3 | ignore:.*'soft_unicode' has been renamed to 'soft_str'*:DeprecationWarning
4 | ignore:unclosed file .*:ResourceWarning
5 | env_files =
6 | test.env
7 | testpaths =
8 | tests/unit
9 | tests/integration
10 | tests/functional
11 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from setuptools import find_namespace_packages, setup
3 |
4 | package_name = "{{ cookiecutter.project_name}}"
5 | # make sure this always matches dbt/adapters/{adapter}/__version__.py
6 | package_version = "{{cookiecutter.project_version}}"
7 | description = """The {{ cookiecutter.adapter_name }} adapter plugin for dbt"""
8 |
9 | setup(
10 | name=package_name,
11 | version=package_version,
12 | description=description,
13 | long_description=description,
14 | author="{{ cookiecutter.author }}",
15 | author_email="{{ cookiecutter.author_email }}",
16 | url="{{ cookiecutter.github_url }}",
17 | packages=find_namespace_packages(include=["dbt", "dbt.*"]),
18 | include_package_data=True,
19 | install_requires=[
20 | "dbt-core~={{ cookiecutter.project_version}}.",
21 | "dbt-common<1.0"
22 | "dbt-adapter~=0.1.0a2"
23 | ],
24 | )
25 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/test.env:
--------------------------------------------------------------------------------
1 | # Use this file to give a break down of what users would need in a .env file for connection purproses to test against.
2 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dbt-labs/dbt-database-adapter-scaffold/50cbc5ff220ce57f606b416324a3afcb15277224/{{cookiecutter.project_name}}/tests/__init__.py
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | # import os
4 | # import json
5 |
6 | # Import the fuctional fixtures as a plugin
7 | # Note: fixtures with session scope need to be local
8 |
9 | pytest_plugins = ["dbt.tests.fixtures.project"]
10 |
11 |
12 | # The profile dictionary, used to write out profiles.yml
13 | @pytest.fixture(scope="class")
14 | def dbt_profile_target():
15 | pass
16 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/tests/functional/adapter/test_basic.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations
4 | from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests
5 | from dbt.tests.adapter.basic.test_singular_tests_ephemeral import (
6 | BaseSingularTestsEphemeral
7 | )
8 | from dbt.tests.adapter.basic.test_empty import BaseEmpty
9 | from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral
10 | from dbt.tests.adapter.basic.test_incremental import BaseIncremental
11 | from dbt.tests.adapter.basic.test_generic_tests import BaseGenericTests
12 | from dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols
13 | from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp
14 | from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod
15 |
16 |
17 | class TestSimpleMaterializations{{ cookiecutter.adapter_name }}(BaseSimpleMaterializations):
18 | pass
19 |
20 |
21 | class TestSingularTests{{ cookiecutter.adapter_name }}(BaseSingularTests):
22 | pass
23 |
24 |
25 | class TestSingularTestsEphemeral{{ cookiecutter.adapter_name }}(BaseSingularTestsEphemeral):
26 | pass
27 |
28 |
29 | class TestEmpty{{ cookiecutter.adapter_name }}(BaseEmpty):
30 | pass
31 |
32 |
33 | class TestEphemeral{{ cookiecutter.adapter_name }}(BaseEphemeral):
34 | pass
35 |
36 |
37 | class TestIncremental{{ cookiecutter.adapter_name }}(BaseIncremental):
38 | pass
39 |
40 |
41 | class TestGenericTests{{ cookiecutter.adapter_name }}(BaseGenericTests):
42 | pass
43 |
44 |
45 | class TestSnapshotCheckCols{{ cookiecutter.adapter_name }}(BaseSnapshotCheckCols):
46 | pass
47 |
48 |
49 | class TestSnapshotTimestamp{{ cookiecutter.adapter_name }}(BaseSnapshotTimestamp):
50 | pass
51 |
52 |
53 | class TestBaseAdapterMethod{{ cookiecutter.adapter_name }}(BaseAdapterMethod):
54 | pass
55 |
--------------------------------------------------------------------------------
/{{cookiecutter.project_name}}/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | skipsdist = True
3 | envlist = py38,py39,py310,py311
4 |
5 | [testenv:{unit,py38,py39,py310,py311,py}]
6 | description = unit testing
7 | skip_install = true
8 | passenv =
9 | DBT_*
10 | PYTEST_ADDOPTS
11 | commands = {envpython} -m pytest {posargs} tests/unit
12 | deps =
13 | -rdev-requirements.txt
14 | -e.
15 |
16 | [testenv:{integration,py38,py39,py310,py311,py}-{ {{ cookiecutter.directory_name }} }]
17 | description = adapter plugin integration testing
18 | skip_install = true
19 | passenv =
20 | DBT_*
21 | {{ cookiecutter.adapter_upper }}_TEST_*
22 | PYTEST_ADDOPTS
23 | commands =
24 | {{ cookiecutter.directory_name }}: {envpython} -m pytest {posargs} -m profile_{{ cookiecutter.directory_name }} tests/integration
25 | {{ cookiecutter.directory_name }}: {envpython} -m pytest {posargs} tests/functional
26 | deps =
27 | -rdev-requirements.txt
28 | -e.
29 |
--------------------------------------------------------------------------------