├── .github ├── CODEOWNERS ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── project_add.yml │ └── release.yml ├── .gitignore ├── .output └── .gitignore ├── .pre-commit-config.yaml ├── .secrets └── .gitignore ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── meltano.yml ├── pyproject.toml ├── tap_dbt ├── __init__.py ├── client.py ├── schemas │ ├── __init__.py │ └── openapi_v2.yaml ├── streams.py └── tap.py ├── tests ├── __init__.py ├── conftest.py ├── resources │ └── responses │ │ ├── account.json │ │ ├── jobs.json │ │ ├── projects.json │ │ └── runs.json └── test_core.py ├── tox.ini └── uv.lock /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Global owner 2 | * @edgarrmondragon 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: uv 9 | directory: / 10 | schedule: 11 | interval: weekly 12 | timezone: "America/Mexico_City" 13 | commit-message: 14 | prefix: "feat(deps): " 15 | prefix-development: "chore(deps-dev): " 16 | versioning-strategy: increase-if-necessary 17 | groups: 18 | development-dependencies: 19 | dependency-type: development 20 | runtime-dependencies: 21 | dependency-type: production 22 | update-types: 23 | - "patch" 24 | - package-ecosystem: github-actions 25 | directory: "/" 26 | schedule: 27 | interval: monthly 28 | timezone: "America/Mexico_City" 29 | commit-message: 30 | prefix: "ci: " 31 | groups: 32 | actions: 33 | patterns: 34 | - "*" 35 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Test Tap 2 | 3 | on: 4 | pull_request: 5 | paths-ignore: 6 | - '**/README.md' 7 | push: 8 | branches: [main] 9 | paths-ignore: 10 | - '**/README.md' 11 | 12 | env: 13 | FORCE_COLOR: 1 14 | 15 | jobs: 16 | test_tap: 17 | name: Test Tap connectivity and Configuration 18 | runs-on: ubuntu-latest 19 | strategy: 20 | fail-fast: false 21 | matrix: 22 | include: 23 | - python-version: "3.9" 24 | toxenv: py39 25 | - python-version: "3.10" 26 | toxenv: py310 27 | - python-version: "3.11" 28 | toxenv: py311 29 | - python-version: "3.12" 30 | toxenv: py312 31 | - python-version: "3.13" 32 | toxenv: py313 33 | - python-version: "3.x" 34 | toxenv: deps 35 | 36 | steps: 37 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 38 | - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 39 | with: 40 | python-version: ${{ matrix.python-version }} 41 | allow-prereleases: true 42 | - uses: astral-sh/setup-uv@6b9c6063abd6010835644d4c2e1bef4cf5cd0fca # v6.0.1 43 | with: 44 | version: ">=0.5.19" 45 | - run: > 46 | uvx 47 | --with tox-uv 48 | tox -e ${{ matrix.toxenv }} 49 | -------------------------------------------------------------------------------- /.github/workflows/project_add.yml: -------------------------------------------------------------------------------- 1 | # Managed by Pulumi. Any edits to this file will be overwritten. 2 | 3 | name: Add issues and PRs to MeltanoLabs Overview Project 4 | 5 | on: 6 | issues: 7 | types: 8 | - opened 9 | - reopened 10 | - transferred 11 | pull_request: 12 | types: 13 | - opened 14 | - reopened 15 | 16 | jobs: 17 | add-to-project: 18 | name: Add issue to project 19 | runs-on: ubuntu-latest 20 | if: ${{ github.actor != 'dependabot[bot]' }} 21 | steps: 22 | - uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2 23 | with: 24 | project-url: https://github.com/orgs/MeltanoLabs/projects/3 25 | github-token: ${{ secrets.MELTYBOT_PROJECT_ADD_PAT }} 26 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | 6 | permissions: 7 | contents: write 8 | id-token: write 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 15 | with: 16 | fetch-depth: 0 17 | - uses: hynek/build-and-inspect-python-package@b5076c307dc91924a82ad150cdd1533b444d3310 # v2.12.0 18 | 19 | publish: 20 | runs-on: ubuntu-latest 21 | environment: 22 | name: publishing 23 | url: https://pypi.org/p/tap-dbt 24 | needs: build 25 | if: startsWith(github.ref, 'refs/tags/') 26 | steps: 27 | - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 28 | with: 29 | name: Packages 30 | path: dist 31 | - name: Upload wheel to release 32 | uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # 2.9.0 33 | with: 34 | file: dist/*.whl 35 | tag: ${{ github.ref }} 36 | overwrite: true 37 | file_glob: true 38 | 39 | - name: Publish 40 | uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 41 | # with: 42 | # attestations: true 43 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Meltano 2 | .meltano/ 3 | 4 | # IDE 5 | .vscode/ 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | cover/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | .pybuilder/ 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | # For a library or package, you might want to ignore these files since the code is 93 | # intended to run in multiple environments; otherwise, check them in: 94 | .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 104 | __pypackages__/ 105 | 106 | # Celery stuff 107 | celerybeat-schedule 108 | celerybeat.pid 109 | 110 | # SageMath parsed files 111 | *.sage.py 112 | 113 | # Environments 114 | .env 115 | .venv 116 | env/ 117 | venv/ 118 | ENV/ 119 | env.bak/ 120 | venv.bak/ 121 | 122 | # Spyder project settings 123 | .spyderproject 124 | .spyproject 125 | 126 | # Rope project settings 127 | .ropeproject 128 | 129 | # mkdocs documentation 130 | /site 131 | 132 | # mypy 133 | .mypy_cache/ 134 | .dmypy.json 135 | dmypy.json 136 | 137 | # Pyre type checker 138 | .pyre/ 139 | 140 | # pytype static type analyzer 141 | .pytype/ 142 | 143 | # Cython debug symbols 144 | cython_debug/ 145 | 146 | # General 147 | .DS_Store 148 | .AppleDouble 149 | .LSOverride 150 | 151 | # Icon must end with two \r 152 | Icon 153 | 154 | # Thumbnails 155 | ._* 156 | 157 | # Files that might appear in the root of a volume 158 | .DocumentRevisions-V100 159 | .fseventsd 160 | .Spotlight-V100 161 | .TemporaryItems 162 | .Trashes 163 | .VolumeIcon.icns 164 | .com.apple.timemachine.donotpresent 165 | 166 | # Directories potentially created on remote AFP share 167 | .AppleDB 168 | .AppleDesktop 169 | Network Trash Folder 170 | Temporary Items 171 | .apdisk 172 | -------------------------------------------------------------------------------- /.output/.gitignore: -------------------------------------------------------------------------------- 1 | # IMPORTANT! This folder is hidden from git - if you need to store config files or other secrets, 2 | # make sure those are never staged for commit into your git repo. You can store them here or another 3 | # secure location. 4 | 5 | * 6 | !.gitignore 7 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autofix_prs: true 3 | autofix_commit_msg: '[pre-commit.ci] auto fixes' 4 | autoupdate_schedule: monthly 5 | autoupdate_commit_msg: 'chore(deps): pre-commit autoupdate' 6 | skip: 7 | - check-dependabot 8 | - uv-lock 9 | 10 | repos: 11 | - repo: https://github.com/pre-commit/pre-commit 12 | rev: v4.2.0 13 | hooks: 14 | - id: validate_manifest 15 | 16 | - repo: https://github.com/pre-commit/pre-commit-hooks 17 | rev: v5.0.0 18 | hooks: 19 | - id: check-json 20 | exclude: "\\.vscode/.*.json" 21 | - id: check-toml 22 | - id: check-yaml 23 | - id: end-of-file-fixer 24 | - id: trailing-whitespace 25 | 26 | - repo: https://github.com/tox-dev/pyproject-fmt 27 | rev: "v2.6.0" 28 | hooks: 29 | - id: pyproject-fmt 30 | 31 | - repo: https://github.com/python-jsonschema/check-jsonschema 32 | rev: 0.33.0 33 | hooks: 34 | - id: check-dependabot 35 | - id: check-meltano 36 | - id: check-github-workflows 37 | 38 | - repo: https://github.com/astral-sh/ruff-pre-commit 39 | rev: v0.11.12 40 | hooks: 41 | - id: ruff 42 | - id: ruff-format 43 | 44 | - repo: https://github.com/astral-sh/uv-pre-commit 45 | rev: 0.7.9 46 | hooks: 47 | - id: uv-lock 48 | 49 | - repo: https://github.com/pre-commit/mirrors-mypy 50 | rev: v1.16.0 51 | hooks: 52 | - id: mypy 53 | pass_filenames: true 54 | additional_dependencies: 55 | - types-requests 56 | - types-PyYAML 57 | -------------------------------------------------------------------------------- /.secrets/.gitignore: -------------------------------------------------------------------------------- 1 | # IMPORTANT! This folder is hidden from git - if you need to store config files or other secrets, 2 | # make sure those are never staged for commit into your git repo. You can store them here or another 3 | # secure location. 4 | 5 | * 6 | !.gitignore 7 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | edgarrm358@gmail.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # tap-dbt 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/tap-dbt.svg?color=blue)](https://pypi.org/project/tap-dbt/) 4 | [![Python versions](https://img.shields.io/pypi/pyversions/tap-dbt.svg)](https://pypi.org/project/tap-dbt/) 5 | [![Singer](https://img.shields.io/badge/Singer-Tap-purple.svg)](https://hub.meltano.com/taps/dbt) 6 | [![Test Tap](https://github.com/edgarrmondragon/tap-dbt/actions/workflows/ci.yml/badge.svg)](https://github.com/MeltanoLabs/tap-dbt/actions/workflows/ci.yml) 7 | 8 | `tap-dbt` is a Singer tap for the [dbt Cloud API v2][dbtcloud]. 9 | 10 | Built with the [Singer SDK][sdk]. 11 | 12 | - [Installation](#Installation) 13 | - [Features](#Features) 14 | - [Configuration](#Configuration) 15 | - [Inputs](#Inputs) 16 | - [JSON example](#JSON-example) 17 | - [Environment variables example](#Environment-variables-example) 18 | - [Meltano Example](#Meltano-Example) 19 | - [Usage](#Usage) 20 | - [Executing the Tap Directly](#Executing-the-Tap-Directly) 21 | - [With Meltano](#With-Meltano) 22 | 23 | ## Installation 24 | 25 | ```shell 26 | pip install tap-dbt 27 | ``` 28 | 29 | ## Features 30 | 31 | ### Streams 32 | 33 | Full stream metadata is available in the dbt Labs repository: [openapi_schema] 34 | 35 | #### Selected by default 36 | The following will be extracted by default if no catalog is used: 37 | 38 | - [x] Stream: accounts 39 | - [x] Stream: jobs 40 | - [x] Stream: projects 41 | - [x] Stream: runs 42 | 43 | 44 | #### Configurable 45 | Can be enabled by setting `selected` in the catalog: 46 | 47 | - [x] Stream: connections 48 | - [x] Stream: environments 49 | - [x] Stream: repositories 50 | - [x] Stream: users 51 | 52 | 53 | ### Incremental Run Stream 54 | 55 | Ordering the query from the Runs endpoint by `-finished_at`, i.e. descending Run Finished Datetime, yields: 56 | 57 | |id|finished_at|updated_at|created_at| 58 | |---|---|---|---| 59 | |314516|None|2023-05-27 21:05:16.109548+00:00|2023-05-27 21:05:05.664170+00:00| 60 | |314514|None|2023-05-27 21:00:16.847296+00:00|2023-05-27 21:00:05.458908+00:00| 61 | |314513|None|2023-05-27 21:00:16.355680+00:00|2023-05-27 21:00:05.427258+00:00| 62 | |314517|None|2023-05-27 21:05:17.094309+00:00|2023-05-27 21:05:05.696222+00:00| 63 | |314515|2023-05-27 21:01:28.568431+00:00|2023-05-27 21:01:29.269048+00:00|2023-05-27 21:00:05.488543+00:00| 64 | |314512|2023-05-27 20:48:59.342035+00:00|2023-05-27 20:48:59.844412+00:00|2023-05-27 20:45:04.509746+00:00| 65 | |314511|2023-05-27 20:48:46.571106+00:00|2023-05-27 20:48:47.079130+00:00|2023-05-27 20:40:04.257950+00:00| 66 | |314505|2023-05-27 20:41:35.591976+00:00|2023-05-27 20:41:36.305364+00:00|2023-05-27 20:15:02.808079+00:00| 67 | |314510|2023-05-27 20:39:27.162437+00:00|2023-05-27 20:39:28.628257+00:00|2023-05-27 20:35:03.939439+00:00| 68 | |314509|2023-05-27 20:37:39.965974+00:00|2023-05-27 20:37:40.496212+00:00|2023-05-27 20:30:03.802620+00:00| 69 | 70 | The incremental sync has been set up so that it works on `replication_key = "finished_at"`, when an INCREMENTAL sync is run: 71 | 72 | - If the bookmark is set, the stream is queried in reverse `finished_at` order. 73 | - If the `finished_at` value is not set, the run is assumed to still be running so the record is included, plus the sort order implies that there should be records with populated `finished_at` appearing later in the stream - *Repeated sync operation will yield the same records if the dbt Job Run is still underway, however this adheres to the 'at least once' delivery promise - https://sdk.meltano.com/en/latest/implementation/at_least_once.html* 74 | - Once the sync operation reaches records with populated `finished_at`, the values are compared with the bookmark and once the `finished_at` value becomes less than the bookmark the stream finishes syncing. 75 | 76 | 77 | ## Configuration 78 | 79 | Visit the [API docs][apidocs] for instructions on how to get your API key. 80 | 81 | You can pass configuration using environment variables with the `TAP_DBT_` prefix followed by the uppercased field name 82 | 83 | ```shell 84 | tap-dbt --config=ENV 85 | ``` 86 | 87 | or a JSON file 88 | 89 | ```shell 90 | tap-dbt --config=config.json 91 | ``` 92 | 93 | ### Inputs 94 | 95 | | Field | Description | Type | Required | Default | 96 | |---------------|-----------------------------------------------------------------|----------------|----------|--------------------------------------------------| 97 | | `api_key` | API key for the dbt Cloud API | `string` | yes | | 98 | | `account_ids` | dbt Cloud account IDs | `list(string)` | yes | | 99 | | `user_agent` | User-Agent to make requests with | `string` | no | `tap-dbt/0.1.0 Singer Tap for the dbt Cloud API` | 100 | | `base_url` | Base URL for the dbt Cloud API | `string` | no | `https://cloud.getdbt.com/api/v2` | 101 | | `page_size` | Number of records per API call, sets the `limit=` url parameter | `integer` | no | 5000 | 102 | 103 | A full list of supported settings and capabilities for this tap is available by running: 104 | 105 | ```shell 106 | tap-dbt --about --format json 107 | ``` 108 | 109 | ### JSON example 110 | 111 | ```json 112 | { 113 | "api_key": "da39a3ee5e6b4b0d3255bfef95601890afd80709", 114 | "account_ids": ["51341"], 115 | "user_agent": "tap-dbt/0.1.0 Singer Tap for the dbt Cloud API", 116 | "base_url": "https://my-dbt-cloud-api.com", 117 | "page_size": 5000 118 | } 119 | ``` 120 | 121 | ### Environment variables example 122 | 123 | ```dotenv 124 | TAP_DBT_API_KEY=da39a3ee5e6b4b0d3255bfef95601890afd80709 125 | TAP_DBT_ACCOUNT_IDS=51341 126 | TAP_DBT_USER_AGENT='tap-dbt/0.1.0 Singer Tap for the dbt Cloud API' 127 | TAP_DBT_BASE_URL=https://my-dbt-cloud-api.com" 128 | TAP_DBT_PAGE_SIZE=5000 129 | ``` 130 | 131 | ### Meltano Example 132 | 133 | ```yaml 134 | plugins: 135 | extractors: 136 | - name: tap-dbt 137 | logo_url: https://hub.meltano.com/assets/logos/taps/dbt.png 138 | label: dbt Cloud 139 | docs: https://hub.meltano.com/taps/dbt 140 | repo: https://github.com/edgarrmondragon/tap-dbt 141 | namespace: dbt 142 | pip_url: tap-dbt 143 | executable: tap-dbt 144 | capabilities: 145 | - catalog 146 | - discover 147 | settings: 148 | - name: base_url 149 | label: dbt Cloud URL 150 | placeholder: "https://cloud.getdbt.com/api/v2" 151 | - name: api_key 152 | kind: password 153 | label: API Key 154 | docs: "https://docs.getdbt.com/dbt-cloud/api#section/Authentication" 155 | - name: account_ids 156 | kind: array 157 | label: Account IDs 158 | - name: user_agent 159 | label: User-Agent 160 | placeholder: "tap-dbt/0.1.0 Singer Tap for the dbt Cloud API" 161 | - name: page_size 162 | kind: integer 163 | label: Page Size 164 | 165 | ``` 166 | 167 | ## Usage 168 | 169 | You can easily run `tap-dbt` with the CLI or using [Meltano][meltano]. 170 | 171 | ### Executing the Tap Directly 172 | 173 | ```shell 174 | tap-dbt --version 175 | tap-dbt --help 176 | tap-dbt --config .secrets/example.json --discover > ./catalog/json 177 | ``` 178 | 179 | ### With Meltano 180 | 181 | ```shell 182 | meltano elt tap-dbt target-snowflake --job_id dbt_snowflake 183 | ``` 184 | 185 | [dbtcloud]: https://cloud.getdbt.com 186 | [sdk]: https://gitlab.com/meltano/singer-sdk 187 | [apidocs]: https://docs.getdbt.com/dbt-cloud/api#section/Authentication 188 | [meltano]: https://www.meltano.com 189 | [openapi_schema]: https://github.com/dbt-labs/dbt-cloud-openapi-spec/blob/master/openapi-v3.yaml 190 | -------------------------------------------------------------------------------- /meltano.yml: -------------------------------------------------------------------------------- 1 | version: 1 2 | project_id: 5300fed0-e42d-4bfd-85c1-ae89f0686fd3 3 | default_environment: dev 4 | plugins: 5 | extractors: 6 | - name: tap-dbt 7 | namespace: tap_dbt 8 | pip_url: -e . 9 | executable: tap-dbt 10 | capabilities: 11 | - catalog 12 | - discover 13 | - state 14 | settings: 15 | - name: api_key 16 | kind: password 17 | - name: account_ids 18 | kind: array 19 | - name: user_agent 20 | - name: base_url 21 | environments: 22 | - name: dev 23 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "hatchling.build" 3 | requires = [ 4 | "hatch-vcs", 5 | "hatchling", 6 | ] 7 | 8 | [project] 9 | name = "tap-dbt" 10 | description = "Singer tap for dbt, built with the Singer SDK." 11 | readme = "README.md" 12 | keywords = [ 13 | "dbt", 14 | "elt", 15 | "singer-sdk", 16 | "singer.io", 17 | ] 18 | license = "Apache-2.0" 19 | maintainers = [ 20 | { name = "Edgar Ramírez Mondragón", email = "edgarrm358@sample.com" }, 21 | ] 22 | authors = [ 23 | { name = "Edgar Ramírez Mondragón", email = "edgarrm358@sample.com" }, 24 | ] 25 | requires-python = ">=3.9" 26 | classifiers = [ 27 | "Programming Language :: Python :: 3 :: Only", 28 | "Programming Language :: Python :: 3.9", 29 | "Programming Language :: Python :: 3.10", 30 | "Programming Language :: Python :: 3.11", 31 | "Programming Language :: Python :: 3.12", 32 | "Programming Language :: Python :: 3.13", 33 | ] 34 | dynamic = [ "version" ] 35 | dependencies = [ 36 | "backports-datetime-fromisoformat~=2.0.1; python_version<'3.11'", 37 | "pyyaml~=6.0", 38 | "requests~=2.32.0", 39 | "singer-sdk~=0.46.0", 40 | ] 41 | 42 | urls.Documentation = "https://github.com/MeltanoLabs/tap-dbt/blob/main/README.md" 43 | urls.Homepage = "https://github.com/MeltanoLabs/tap-dbt" 44 | urls.Repository = "https://github.com/MeltanoLabs/tap-dbt" 45 | scripts.tap-dbt = "tap_dbt.tap:cli" 46 | 47 | [dependency-groups] 48 | dev = [ 49 | "deptry>=0.14.2", 50 | "faker>=17.6", 51 | "pytest~=8.0", 52 | "responses~=0.25.0", 53 | ] 54 | 55 | [tool.hatch.version] 56 | source = "vcs" 57 | 58 | [tool.ruff] 59 | line-length = 88 60 | src = [ 61 | "tap_dbt", 62 | "tests", 63 | ] 64 | lint.select = [ 65 | "ALL", 66 | ] 67 | lint.ignore = [ 68 | "COM812", # missing-trailing-comma 69 | "FIX002", # line-contains-todo 70 | "ISC001", # single-line-implicit-string-concatenation 71 | ] 72 | 73 | lint.per-file-ignores."tests/*" = [ 74 | "ANN201", 75 | "S101", 76 | ] 77 | lint.unfixable = [ 78 | "ERA001", # commented-out-code 79 | ] 80 | lint.isort.known-first-party = [ 81 | "tap_dbt", 82 | ] 83 | lint.isort.required-imports = [ 84 | "from __future__ import annotations", 85 | ] 86 | lint.pydocstyle.convention = "google" 87 | 88 | [tool.deptry] 89 | known_first_party = [ 90 | "tap_dbt", 91 | ] 92 | 93 | [tool.deptry.package_module_name_map] 94 | "backports-datetime-fromisoformat" = "backports.datetime_fromisoformat" 95 | faker = "faker" 96 | pytest = "pytest" 97 | responses = "responses" 98 | 99 | [tool.deptry.per_rule_ignores] 100 | DEP001 = [ "backports" ] 101 | DEP002 = [ "backports-datetime-fromisoformat", "requests" ] 102 | 103 | [tool.uv] 104 | required-version = ">=0.5.19" 105 | -------------------------------------------------------------------------------- /tap_dbt/__init__.py: -------------------------------------------------------------------------------- 1 | """Singer tap for the dbt Cloud API.""" 2 | -------------------------------------------------------------------------------- /tap_dbt/client.py: -------------------------------------------------------------------------------- 1 | """Base class for connecting to th dbt Cloud API.""" 2 | 3 | from __future__ import annotations 4 | 5 | import importlib.resources 6 | import typing as t 7 | from abc import abstractmethod 8 | from functools import cache, cached_property 9 | 10 | import yaml 11 | from singer_sdk import RESTStream 12 | from singer_sdk.authenticators import APIAuthenticatorBase, SimpleAuthenticator 13 | from singer_sdk.singerlib import resolve_schema_references 14 | 15 | from tap_dbt import schemas 16 | 17 | 18 | @cache 19 | def load_openapi() -> dict[str, t.Any]: 20 | """Load the OpenAPI specification from the package. 21 | 22 | Returns: 23 | The OpenAPI specification as a dict. 24 | """ 25 | schema_path = importlib.resources.files(schemas) / "openapi_v2.yaml" 26 | with schema_path.open() as schema: 27 | return yaml.safe_load(schema) 28 | 29 | 30 | class DBTStream(RESTStream): 31 | """dbt stream class.""" 32 | 33 | primary_keys: t.ClassVar[list[str]] = ["id"] 34 | records_jsonpath = "$.data[*]" 35 | 36 | @property 37 | def url_base(self) -> str: 38 | """Base URL for this stream.""" 39 | return self.config.get("base_url", "https://cloud.getdbt.com/api/v2") 40 | 41 | @property 42 | def http_headers(self) -> dict: 43 | """HTTP headers for this stream.""" 44 | headers = super().http_headers 45 | headers["Accept"] = "application/json" 46 | return headers 47 | 48 | @property 49 | def authenticator(self) -> APIAuthenticatorBase: 50 | """Return the authenticator for this stream.""" 51 | return SimpleAuthenticator( 52 | stream=self, 53 | auth_headers={ 54 | "Authorization": f"Token {self.config.get('api_key')}", 55 | }, 56 | ) 57 | 58 | def _resolve_openapi_ref(self) -> dict[str, t.Any]: 59 | schema = {"$ref": f"#/components/schemas/{self.openapi_ref}"} 60 | openapi = load_openapi() 61 | schema["components"] = openapi["components"] 62 | return resolve_schema_references(schema) 63 | 64 | @cached_property 65 | def schema(self) -> dict[str, t.Any]: 66 | """Return the schema for this stream. 67 | 68 | Returns: 69 | The schema for this stream. 70 | """ 71 | openapi_response = self._resolve_openapi_ref() 72 | 73 | for property_schema in openapi_response["properties"].values(): 74 | if property_schema.get("nullable"): 75 | if isinstance(property_schema["type"], list): 76 | property_schema["type"].append("null") 77 | else: 78 | property_schema["type"] = [property_schema["type"], "null"] 79 | 80 | return openapi_response 81 | 82 | @property 83 | @abstractmethod 84 | def openapi_ref(self) -> str: 85 | """Return the OpenAPI component name for this stream. 86 | 87 | Returns: 88 | The OpenAPI reference for this stream. 89 | """ 90 | ... 91 | -------------------------------------------------------------------------------- /tap_dbt/schemas/__init__.py: -------------------------------------------------------------------------------- 1 | """OpenAPI schema for dbt Cloud.""" 2 | -------------------------------------------------------------------------------- /tap_dbt/schemas/openapi_v2.yaml: -------------------------------------------------------------------------------- 1 | # taken from https://raw.githubusercontent.com/fishtown-analytics/dbt-cloud-openapi-spec/ee64f573d79585f12d30eaafc223dc8a84052c9a/openapi-v2-old.yaml 2 | 3 | openapi: 3.0.0 4 | servers: 5 | - description: Production 6 | url: https://cloud.getdbt.com/api/v2 7 | info: 8 | version: "2.0.0" 9 | title: dbt Cloud API v2 10 | termsOfService: 'https://www.getdbt.com/cloud/terms' 11 | description: | 12 | The dbt Cloud API makes it possible to fetch data from your 13 | dbt Cloud account and programmatically run and monitor dbt jobs. 14 | 15 | # How to use this API 16 | 17 | The dbt Cloud API is intended for enqueuing runs from a job, polling for run progress, 18 | and downloading artifacts after jobs have completed running. Operational endpoints around 19 | creating, modifying, and deleting _objects_ in dbt Cloud are still in flux. These endpoints 20 | are largely undocumented in API v2. 21 | 22 | The API docs are generated from an openapi spec defined in the 23 | [dbt-cloud-openapi-spec](https://github.com/fishtown-analytics/dbt-cloud-openapi-spec) 24 | repository. If you find issues in these docs or have questions about using the dbt Cloud 25 | API, please open an issue in the dbt-cloud-openapi-spec repo or contact support@getdbt.com. 26 | 27 | # Authentication 28 | 29 | To authenticate an application with the dbt Cloud API, navigate to the 30 | API Settings page in your [dbt Cloud profile](https://cloud.getdbt.com/#/profile/api/). 31 | If you cannot access this page, confirm that your dbt Cloud account has access to the API, 32 | and that you are using the hosted version of dbt Cloud. If dbt Cloud is running inside of a VPC 33 | in an Enterprise account, contact your account manager for help finding your API key. 34 | 35 | ## TokenAuth 36 | 37 | Once you've found your API key, use it in the Authorization header of requests to the dbt Cloud API. 38 | Be sure to include the `Token` prefix in the Authorization header, or the request will fail with a 39 | "401 Unauthorized" error. Note: `Bearer` can be used in place of `Token` in the Authorization header. 40 | Both syntaxes are equivalent. 41 | 42 | **Headers** 43 | ``` 44 | Accept: application/json 45 | Authorization: Token 46 | ``` 47 | 48 | ## Example request 49 | 50 | The following example will list the Accounts that your token is authorized to access. 51 | Be sure to replace `` in the Authorization header with your actual API token. 52 | 53 | ``` 54 | curl --request GET \ 55 | --url https://cloud.getdbt.com/api/v2/accounts/ \ 56 | --header 'Content-Type: application/json' \ 57 | --header 'Authorization: Token ' 58 | ``` 59 | 60 | # Pagination 61 | 62 | All top-level API resources have support for bulk fetches via "list" API methods. These list 63 | API methods accept `limit` and `offset` query parameters which can be used together to paginate results. 64 | 65 | Offsets begin at 0. 66 | 67 | The maximum limit for a single request is 100. 68 | 69 | contact: 70 | email: support@getdbt.com 71 | 72 | tags: 73 | - name: Accounts 74 | description: List and view Accounts 75 | - name: Projects 76 | description: List, view, and modify Projects 77 | - name: Jobs 78 | description: List, view, and modify, and trigger Jobs 79 | - name: Runs 80 | description: List and view Runs 81 | 82 | paths: 83 | # Accounts 84 | /accounts/: 85 | get: 86 | tags: 87 | - Accounts 88 | summary: List Accounts 89 | description: Use the List Accounts endpoint to list the Accounts that your API Token is authorized to access. 90 | operationId: listAccounts 91 | responses: 92 | '200': 93 | description: Success. 94 | content: 95 | application/json: 96 | schema: 97 | $ref: '#/components/schemas/AccountsResponse' 98 | links: 99 | GetAccountById: 100 | operationId: getAccountById 101 | parameters: 102 | accountId: $response.body#/data/0/id 103 | ListProjects: 104 | operationId: listProjects 105 | parameters: 106 | accountId: $response.body#/data/0/id 107 | ListJobsForAccount: 108 | operationId: listJobsForAccount 109 | parameters: 110 | accountId: $response.body#/data/0/id 111 | '400': 112 | description: Bad Request. 113 | content: 114 | application/json: 115 | schema: 116 | $ref: '#/components/schemas/ErrorResponse' 117 | 118 | '404': 119 | description: Unauthorized or Not Found. 120 | content: 121 | application/json: 122 | schema: 123 | $ref: '#/components/schemas/ErrorResponse' 124 | /accounts/{accountId}/: 125 | get: 126 | tags: 127 | - Accounts 128 | summary: Get Account 129 | description: Get an Account by its ID 130 | operationId: getAccountById 131 | parameters: 132 | - $ref: '#/components/parameters/accountId' 133 | responses: 134 | '200': 135 | description: Success. 136 | content: 137 | application/json: 138 | schema: 139 | $ref: '#/components/schemas/AccountResponse' 140 | 141 | '400': 142 | description: Bad Request. 143 | content: 144 | application/json: 145 | schema: 146 | $ref: '#/components/schemas/ErrorResponse' 147 | 148 | '404': 149 | description: Unauthorized or Not Found. 150 | content: 151 | application/json: 152 | schema: 153 | $ref: '#/components/schemas/ErrorResponse' 154 | /accounts/{accountId}/users: 155 | get: 156 | tags: 157 | - Accounts 158 | summary: List Users 159 | description: Use the List Users endpoint to list the Users in the specified Account 160 | operationId: listUsers 161 | parameters: 162 | - $ref: '#/components/parameters/accountId' 163 | responses: 164 | '200': 165 | description: Success. 166 | content: 167 | application/json: 168 | schema: 169 | $ref: '#/components/schemas/UsersResponse' 170 | /accounts/{accountId}/permissions/{licenseId}: 171 | post: 172 | tags: 173 | - Accounts 174 | summary: Update License 175 | description: Update (or deactivate) permissions for a given license 176 | operationId: updateLicense 177 | requestBody: 178 | content: 179 | application/json: 180 | schema: 181 | $ref: '#/components/schemas/UserLicense' 182 | parameters: 183 | - $ref: '#/components/parameters/accountId' 184 | - in: path 185 | name: licenseId 186 | schema: 187 | type: integer 188 | required: true 189 | description: Numeric ID of the License to update 190 | responses: 191 | '200': 192 | description: Success. 193 | content: 194 | application/json: 195 | schema: 196 | $ref: '#/components/schemas/UpdateLicenseResponse' 197 | '404': 198 | description: Unauthorized or Not Found. 199 | content: 200 | application/json: 201 | schema: 202 | $ref: '#/components/schemas/ErrorResponse' 203 | 204 | /accounts/{accountId}/projects/: 205 | get: 206 | tags: 207 | - Projects 208 | summary: List Projects 209 | description: Use the List Projects endpoint to list the Projects in the specified Account 210 | operationId: listProjects 211 | parameters: 212 | - $ref: '#/components/parameters/accountId' 213 | responses: 214 | '200': 215 | description: Success. 216 | content: 217 | application/json: 218 | schema: 219 | $ref: '#/components/schemas/ProjectsResponse' 220 | links: 221 | GetProjectById: 222 | operationId: getProjectById 223 | parameters: 224 | accoutnId: $request.path.accountId 225 | projectId: $response.body#/id 226 | '400': 227 | description: Bad Request. 228 | content: 229 | application/json: 230 | schema: 231 | $ref: '#/components/schemas/ErrorResponse' 232 | 233 | '404': 234 | description: Unauthorized or Not Found. 235 | content: 236 | application/json: 237 | schema: 238 | $ref: '#/components/schemas/ErrorResponse' 239 | /accounts/{accountId}/projects/{projectId}: 240 | get: 241 | tags: 242 | - Projects 243 | summary: Get Project 244 | description: Get a Project by its ID 245 | operationId: getProjectById 246 | parameters: 247 | - $ref: '#/components/parameters/accountId' 248 | - in: path 249 | name: projectId 250 | schema: 251 | type: integer 252 | required: true 253 | description: Numeric ID of the Project to retrieve 254 | responses: 255 | '200': 256 | description: Success. 257 | content: 258 | application/json: 259 | schema: 260 | $ref: '#/components/schemas/ProjectResponse' 261 | 262 | '400': 263 | description: Bad Request. 264 | content: 265 | application/json: 266 | schema: 267 | $ref: '#/components/schemas/ErrorResponse' 268 | 269 | '404': 270 | description: Unauthorized or Not Found. 271 | content: 272 | application/json: 273 | schema: 274 | $ref: '#/components/schemas/ErrorResponse' 275 | 276 | /accounts/{accountId}/jobs/: 277 | get: 278 | tags: 279 | - Jobs 280 | summary: List jobs 281 | description: List jobs in a project 282 | operationId: listJobsForAccount 283 | parameters: 284 | - $ref: '#/components/parameters/accountId' 285 | - in: query 286 | name: order_by 287 | example: "-id" 288 | schema: 289 | type: string 290 | description: | 291 | Field to order the result by. Use `-` to indicate reverse order. 292 | - in: query 293 | name: project_id 294 | schema: 295 | type: integer 296 | description: | 297 | Numeric ID of the project containing jobs 298 | responses: 299 | '200': 300 | description: Success. 301 | content: 302 | application/json: 303 | schema: 304 | $ref: '#/components/schemas/JobsResponse' 305 | 306 | '400': 307 | description: Bad Request. 308 | content: 309 | application/json: 310 | schema: 311 | $ref: '#/components/schemas/ErrorResponse' 312 | 313 | '404': 314 | description: Unauthorized or Not Found. 315 | content: 316 | application/json: 317 | schema: 318 | $ref: '#/components/schemas/ErrorResponse' 319 | 320 | post: 321 | tags: 322 | - Jobs 323 | summary: Create job 324 | description: Create a job in a project. 325 | operationId: createJob 326 | requestBody: 327 | content: 328 | application/json: 329 | schema: 330 | $ref: '#/components/schemas/Job' 331 | parameters: 332 | - $ref: '#/components/parameters/accountId' 333 | responses: 334 | '201': 335 | description: Success. 336 | content: 337 | application/json: 338 | schema: 339 | $ref: '#/components/schemas/JobResponse' 340 | 341 | '400': 342 | description: Bad Request. 343 | content: 344 | application/json: 345 | schema: 346 | $ref: '#/components/schemas/ErrorResponse' 347 | 348 | '404': 349 | description: Unauthorized or Not Found. 350 | content: 351 | application/json: 352 | schema: 353 | $ref: '#/components/schemas/ErrorResponse' 354 | 355 | /accounts/{accountId}/jobs/{jobId}/: 356 | get: 357 | tags: 358 | - Jobs 359 | summary: Get job 360 | description: Return job details for a job on an account 361 | operationId: getJobById 362 | parameters: 363 | - $ref: '#/components/parameters/accountId' 364 | - $ref: '#/components/parameters/jobId' 365 | - in: query 366 | name: order_by 367 | example: "-id" 368 | schema: 369 | type: string 370 | description: | 371 | Field to order the result by. Use `-` to indicate reverse order. 372 | responses: 373 | '200': 374 | description: Success. 375 | content: 376 | application/json: 377 | schema: 378 | $ref: '#/components/schemas/JobResponse' 379 | 380 | '400': 381 | description: Bad Request. 382 | content: 383 | application/json: 384 | schema: 385 | $ref: '#/components/schemas/ErrorResponse' 386 | 387 | '404': 388 | description: Unauthorized or Not Found. 389 | content: 390 | application/json: 391 | schema: 392 | $ref: '#/components/schemas/ErrorResponse' 393 | 394 | post: 395 | tags: 396 | - Jobs 397 | summary: Update job 398 | description: Update the definition of an existing job 399 | operationId: updateJobById 400 | requestBody: 401 | content: 402 | application/json: 403 | schema: 404 | $ref: '#/components/schemas/Job' 405 | parameters: 406 | - $ref: '#/components/parameters/accountId' 407 | - $ref: '#/components/parameters/jobId' 408 | responses: 409 | '200': 410 | description: Success. 411 | content: 412 | application/json: 413 | schema: 414 | $ref: '#/components/schemas/JobResponse' 415 | 416 | '400': 417 | description: Bad Request. 418 | content: 419 | application/json: 420 | schema: 421 | $ref: '#/components/schemas/ErrorResponse' 422 | 423 | '404': 424 | description: Unauthorized or Not Found. 425 | content: 426 | application/json: 427 | schema: 428 | $ref: '#/components/schemas/ErrorResponse' 429 | delete: 430 | tags: 431 | - Jobs 432 | operationId: deleteJobById 433 | summary: Delete job 434 | description: Delete the given job by its id 435 | parameters: 436 | - $ref: '#/components/parameters/accountId' 437 | - $ref: '#/components/parameters/jobId' 438 | responses: 439 | '200': 440 | description: Job deleted successfully 441 | content: 442 | application/json: 443 | schema: 444 | $ref: '#/components/schemas/JobResponse' 445 | 446 | /accounts/{accountId}/jobs/{jobId}/run/: 447 | post: 448 | tags: 449 | - Jobs 450 | summary: Trigger job to run 451 | description: | 452 | Use this endpoint to kick off a run for a job. When this endpoint returns a successful 453 | response, a new run will be enqueued for the account. Users can poll the [Get run](#operation/getRunById) 454 | endpoint to poll the run until it completes. After the run has completed, users can use the [Get run artifact](#operation/getArtifactsByRunId) 455 | endpoint to download artifacts generated by the run. 456 | operationId: triggerRun 457 | requestBody: 458 | content: 459 | application/json: 460 | example: 461 | cause: Kicked off from Airflow 462 | git_branch: staging 463 | schema_override: dbt_cloud_pr_123 464 | dbt_version_override: 0.18.0 465 | target_name_override: staging 466 | timeout_seconds_override: 3000 467 | generate_docs_override: true 468 | threads_override: 8 469 | steps_override: 470 | - dbt seed 471 | - dbt run --fail-fast 472 | - dbt test --fail-fast 473 | 474 | schema: 475 | type: object 476 | required: 477 | - cause 478 | properties: 479 | cause: 480 | description: A text description of the reason for running this job 481 | example: "Kicked off from Airflow" 482 | type: string 483 | git_sha: 484 | description: Optional. The git sha to check out before running this job 485 | type: string 486 | git_branch: 487 | description: Optional. The git branch to check out before running this job 488 | type: string 489 | schema_override: 490 | description: Optional. Override the destination schema in the configured target for this job. 491 | example: "dbt_cloud_pr_123_456" 492 | type: string 493 | dbt_version_override: 494 | description: Optional. Override the version of dbt used to run this job 495 | example: 0.18.0 496 | type: string 497 | threads_override: 498 | description: Optional. Override the number of threads used to run this job 499 | example: 8 500 | type: integer 501 | target_name_override: 502 | description: Optional. Override the `target.name` context variable used when running this job 503 | example: CI 504 | type: string 505 | generate_docs_override: 506 | description: Optional. Override whether or not this job generates docs (true=yes, false=no) 507 | example: true 508 | type: boolean 509 | timeout_seconds_override: 510 | description: Optional. Override the timeout in seconds for this job 511 | example: 60 512 | type: integer 513 | steps_override: 514 | type: array 515 | description: Optional. Override the list of steps for this job 516 | example: ['dbt run', 'dbt test', 'dbt source snapshot-freshness'] 517 | items: 518 | type: "string" 519 | parameters: 520 | - $ref: '#/components/parameters/accountId' 521 | - $ref: '#/components/parameters/jobId' 522 | responses: 523 | '200': 524 | description: Success. 525 | content: 526 | application/json: 527 | schema: 528 | $ref: '#/components/schemas/RunResponse' 529 | 530 | '400': 531 | description: Bad Request. 532 | content: 533 | application/json: 534 | schema: 535 | $ref: '#/components/schemas/ErrorResponse' 536 | 537 | '404': 538 | description: Unauthorized or Not Found. 539 | content: 540 | application/json: 541 | schema: 542 | $ref: '#/components/schemas/ErrorResponse' 543 | /accounts/{accountId}/runs/: 544 | get: 545 | tags: 546 | - Runs 547 | summary: List runs 548 | description: List the runs for a given account 549 | operationId: listRunsForAccount 550 | parameters: 551 | - $ref: '#/components/parameters/accountId' 552 | - $ref: '#/components/parameters/includeRelated' 553 | - in: query 554 | name: job_definition_id 555 | example: 1234 556 | schema: 557 | type: integer 558 | description: Optional. Applies a filter to only return runs from the specified Job. 559 | - in: query 560 | name: project_id 561 | example: 1234 562 | schema: 563 | type: integer 564 | description: Optional. Applies a filter to only return runs from the specified Project. 565 | - in: query 566 | name: status 567 | schema: 568 | type: integer 569 | enum: [1, 2, 3, 10, 20, 30] 570 | example: 10 571 | description: > 572 | Optional. Applies a filter to return only runs with the specified Status: 573 | 574 | * `1` - The run is queued but hasn't begun being scheduled 575 | * `2` - The run is being removed from the queue and is actively being scheduled 576 | * `3` - The run is currently executing 577 | * `10` - The run completed successfully 578 | * `20` - The run failed to complete 579 | * `30` - The run was cancelled by a user or via the API 580 | - in: query 581 | name: order_by 582 | example: "-id" 583 | schema: 584 | type: string 585 | default: id 586 | enum: 587 | - id 588 | - -id 589 | - created_at 590 | - -created_at 591 | - finished_at 592 | - -finished_at 593 | description: | 594 | Field to order the result by. Use `-` to indicate reverse order. 595 | 596 | All other order_by values will be deprecated on May 15, 2023. 597 | - in: query 598 | name: offset 599 | example: 100 600 | schema: 601 | type: integer 602 | description: The offset to apply when listing runs. Use with `limit` to paginate results. 603 | - in: query 604 | name: limit 605 | example: 100 606 | schema: 607 | type: integer 608 | description: The limit to apply when listing runs. Use with `offset` to paginate results. 609 | responses: 610 | '200': 611 | description: Success. 612 | content: 613 | application/json: 614 | schema: 615 | $ref: '#/components/schemas/RunsResponse' 616 | 617 | '400': 618 | description: Bad Request. 619 | content: 620 | application/json: 621 | schema: 622 | $ref: '#/components/schemas/ErrorResponse' 623 | 624 | '404': 625 | description: Unauthorized or Not Found. 626 | content: 627 | application/json: 628 | schema: 629 | $ref: '#/components/schemas/ErrorResponse' 630 | /accounts/{accountId}/runs/{runId}/: 631 | get: 632 | tags: 633 | - Runs 634 | summary: Get run 635 | description: Fetch information about a specific run 636 | operationId: getRunById 637 | parameters: 638 | - $ref: '#/components/parameters/accountId' 639 | - in: path 640 | name: runId 641 | schema: 642 | type: integer 643 | required: true 644 | description: Numeric ID of the run to retrieve 645 | - $ref: '#/components/parameters/includeRelatedRun' 646 | responses: 647 | '200': 648 | description: Success. 649 | content: 650 | application/json: 651 | schema: 652 | $ref: '#/components/schemas/RunResponse' 653 | 654 | '400': 655 | description: Bad Request. 656 | content: 657 | application/json: 658 | schema: 659 | $ref: '#/components/schemas/ErrorResponse' 660 | 661 | '404': 662 | description: Unauthorized or Not Found. 663 | content: 664 | application/json: 665 | schema: 666 | $ref: '#/components/schemas/ErrorResponse' 667 | 668 | /accounts/{accountId}/runs/{runId}/artifacts/: 669 | get: 670 | tags: 671 | - Runs 672 | summary: List run artifacts 673 | description: | 674 | Use this endpoint to fetch a list of artifact files generated for a completed run. 675 | 676 | operationId: listArtifactsByRunId 677 | parameters: 678 | - $ref: '#/components/parameters/accountId' 679 | - in: path 680 | name: runId 681 | schema: 682 | type: integer 683 | required: true 684 | description: Numeric ID of the run to retrieve 685 | responses: 686 | '200': 687 | description: Success. 688 | content: 689 | application/json: 690 | schema: 691 | type: "array" 692 | example: ['manifest.json', 'catalog.json', 'run_results.json', 'compiled/my_project/my_model.sql'] 693 | items: 694 | type: "string" 695 | description: A list of artifact file paths that can be used with the [getArtifactsByRunId](#operation/getArtifactsByRunId) endpoint 696 | 697 | '400': 698 | description: Bad Request. 699 | content: 700 | application/json: 701 | schema: 702 | $ref: '#/components/schemas/ErrorResponse' 703 | 704 | '404': 705 | description: Unauthorized or Not Found. 706 | content: 707 | application/json: 708 | schema: 709 | $ref: '#/components/schemas/ErrorResponse' 710 | 711 | /accounts/{accountId}/runs/{runId}/artifacts/{path}: 712 | get: 713 | tags: 714 | - Runs 715 | summary: Get a run artifact 716 | description: | 717 | Use this endpoint to fetch artifacts from a completed run. Once a run has completed, 718 | you can use this endpoint to download the `manifest.json`, `run_results.json` or 719 | `catalog.json` files from dbt Cloud. These _artifacts_ contain information about 720 | the models in your dbt project, timing information around their execution, and 721 | a status message indicating the result of the model build. 722 | 723 | **Note:** By default, this endpoint returns artifacts from the _last step_ in the run. 724 | To list artifacts from other steps in the run, use the `step` query parameter described below. 725 | operationId: getArtifactsByRunId 726 | parameters: 727 | - $ref: '#/components/parameters/accountId' 728 | - in: path 729 | name: runId 730 | schema: 731 | type: integer 732 | required: true 733 | description: Numeric ID of the run to retrieve 734 | - in: path 735 | name: path 736 | schema: 737 | type: string 738 | required: true 739 | example: 'manifest.json' 740 | description: | 741 | Paths are rooted at the `target/` directory. Use `manifest.json`, `catalog.json`, 742 | or `run_results.json` to download dbt-generated artifacts for the run. 743 | - in: query 744 | name: step 745 | schema: 746 | type: integer 747 | required: false 748 | description: | 749 | The index of the Step in the Run to query for artifacts. The first step in the run 750 | has the index `1`. If the `step` parameter is omitted, then this endpoint 751 | will return the artifacts compiled for the last step in the run. 752 | responses: 753 | '200': 754 | description: Success. 755 | content: 756 | application/json: 757 | schema: 758 | type: string 759 | description: Raw file contents product by dbt 760 | example: "# package.yaml 761 | packages: 762 | - package: dbt-labs/audit_helper 763 | version: 0.5.0 764 | - package: dbt-labs/codegen 765 | version: 0.5.0 766 | - package: dbt-labs/dbt_utils 767 | version: 0.8.2" 768 | '400': 769 | description: Bad Request. 770 | content: 771 | application/json: 772 | schema: 773 | $ref: '#/components/schemas/ErrorResponse' 774 | 775 | '404': 776 | description: Unauthorized or Not Found. 777 | content: 778 | application/json: 779 | schema: 780 | $ref: '#/components/schemas/ErrorResponse' 781 | /acounts/{accountId}/steps/{stepId}/: 782 | get: 783 | tags: 784 | - Runs 785 | summary: Get step 786 | description: Fetch information about a given step 787 | operationId: getStep 788 | parameters: 789 | - $ref: '#/components/parameters/accountId' 790 | - in: path 791 | name: stepId 792 | schema: 793 | type: integer 794 | required: true 795 | description: Numeric ID of the step 796 | - $ref: '#/components/parameters/includeRelated' 797 | responses: 798 | '200': 799 | description: Success. 800 | content: 801 | application/json: 802 | schema: 803 | $ref: '#/components/schemas/StepResponse' 804 | '404': 805 | description: Unauthorized or Not Found. 806 | content: 807 | application/json: 808 | schema: 809 | $ref: '#/components/schemas/ErrorResponse' 810 | 811 | /accounts/{accountId}/runs/{runId}/cancel/: 812 | post: 813 | tags: 814 | - Runs 815 | summary: Cancel a run 816 | description: Cancel a run in progress 817 | operationId: cancelRunById 818 | parameters: 819 | - $ref: '#/components/parameters/accountId' 820 | - in: path 821 | name: runId 822 | schema: 823 | type: integer 824 | required: true 825 | description: Numeric ID of the run to cancel 826 | responses: 827 | '200': 828 | description: Success. 829 | content: 830 | application/json: 831 | schema: 832 | $ref: '#/components/schemas/RunResponse' 833 | 834 | '404': 835 | description: Unauthorized or Not Found. 836 | content: 837 | application/json: 838 | schema: 839 | $ref: '#/components/schemas/ErrorResponse' 840 | 841 | components: 842 | parameters: 843 | accountId: 844 | in: path 845 | name: accountId 846 | schema: 847 | type: integer 848 | required: true 849 | description: Numeric ID of the account 850 | jobId: 851 | in: path 852 | name: jobId 853 | schema: 854 | type: integer 855 | required: true 856 | description: Numeric ID of the job 857 | includeRelatedRun: 858 | in: query 859 | name: include_related 860 | schema: 861 | type: string 862 | example: '["run_steps", "job"]' 863 | description: | 864 | List of related fields to pull with the run. Valid values are 865 | "trigger", "job", "debug_logs", and "run_steps". If "debug_logs" 866 | is not provided in a request, then the included debug logs will 867 | be truncated to the last 1,000 lines of the debug log output file. 868 | includeRelated: 869 | in: query 870 | name: include_related 871 | schema: 872 | type: string 873 | example: '["trigger", "job"]' 874 | description: | 875 | List of related fields to pull with the run. Valid values are 876 | "trigger", "job", and "debug_logs". If "debug_logs" is not provided 877 | in a request, then the included debug logs will be truncated to the last 878 | 1,000 lines of the debug log output file. 879 | schemas: 880 | Account: 881 | type: object 882 | properties: 883 | id: 884 | type: "integer" 885 | description: A unique identifier for dbt Cloud accounts 886 | example: 1 887 | name: 888 | type: "string" 889 | description: The name of the dbt Cloud account 890 | plan: 891 | anyOf: 892 | - enum: ["developer", "team", "enterprise"] 893 | - type: "string" 894 | example: team 895 | description: The billing tier for the account 896 | pending_cancel: 897 | type: "boolean" 898 | example: False 899 | description: True if the account is pending cancellation 900 | state: 901 | $ref: '#/components/schemas/State' 902 | developer_seats: 903 | type: "integer" 904 | example: 5 905 | description: The number of Developer Licenses assigned to the account 906 | read_only_seats: 907 | type: "integer" 908 | example: 50 909 | description: The number of Read Only Licenses assigned to the account 910 | run_slots: 911 | type: "integer" 912 | example: 5 913 | description: The number of Run Slots assigned to the account 914 | created_at: 915 | type: "string" 916 | format: "date-time" 917 | updated_at: 918 | type: "string" 919 | format: "date-time" 920 | 921 | User: 922 | type: object 923 | properties: 924 | id: 925 | type: integer 926 | description: A unique identifier for a user 927 | example: 100 928 | state: 929 | $ref: '#/components/schemas/State' 930 | name: 931 | type: string 932 | description: The user's name 933 | example: John Doe 934 | lock_reason: 935 | type: string 936 | nullable: true 937 | description: The reason an account was locked 938 | unlock_if_subscription_renewed: 939 | type: boolean 940 | description: If set, an admin will be able to unlock it by renewing its subscription 941 | plan: 942 | type: string 943 | enum: [free, trial, enterprise, developer, team, cancelled] 944 | description: The user's plan type 945 | pending_cancel: 946 | type: boolean 947 | run_slots: 948 | type: integer 949 | developer_seats: 950 | type: integer 951 | read_only_seats: 952 | type: integer 953 | queue_limit: 954 | type: integer 955 | pod_memory_request_mebibytes: 956 | type: integer 957 | description: | 958 | The amount of memory (in MiB) to request for scheduled runs and 959 | develop pods on this account. 960 | run_duration_limit_seconds: 961 | type: integer 962 | description: | 963 | The maximum duration a run for this account is permitted to execute 964 | before it is terminated 965 | enterprise_authentication_method: 966 | nullable: true 967 | type: string 968 | enum: [none, okta, azure_ad, gsuite] 969 | enterprise_login_slug: 970 | type: string 971 | nullable: true 972 | enterprise_unique_identifier: 973 | type: string 974 | nullable: true 975 | billing_email_address: 976 | type: string 977 | nullable: true 978 | locked: 979 | type: boolean 980 | unlocked_at: 981 | type: string 982 | format: date-time 983 | created_at: 984 | type: string 985 | format: date-time 986 | updated_at: 987 | type: string 988 | format: date-time 989 | starter_repo_url: 990 | type: string 991 | nullable: true 992 | description: The account will use this to initialize projects if defined 993 | sso_reauth: 994 | type: boolean 995 | description: | 996 | If set and the account has configured SSO, users will be forced to 997 | re-authenticate with their identity provider periodically 998 | git_auth_level: 999 | type: string 1000 | enum: [personal, team] 1001 | nullable: true 1002 | description: Indicates the git provider authentication level for this user 1003 | identifier: 1004 | type: string 1005 | description: A globally unique identifier 1006 | example: act_0ujtsYcgvSTl8PAuAdqWYSMnLOv 1007 | docs_job_id: 1008 | deprecated: true 1009 | freshness_job_id: 1010 | deprecated: true 1011 | docs_job: 1012 | deprecated: true 1013 | freshness_job: 1014 | deprecated: true 1015 | enterprise_login_url: 1016 | type: string 1017 | nullable: true 1018 | description: The enterprise login URL, if available 1019 | permissions: 1020 | $ref: '#/components/schemas/UserLicense' 1021 | State: 1022 | type: integer 1023 | description: 1 = Active, 2 = Deleted 1024 | Project: 1025 | type: object 1026 | properties: 1027 | id: 1028 | type: integer 1029 | description: A unique identifier for a project 1030 | example: 100 1031 | account_id: 1032 | type: integer 1033 | example: 1 1034 | connection: 1035 | $ref: '#/components/schemas/Connection' 1036 | connection_id: 1037 | type: integer 1038 | example: 5000 1039 | dbt_project_subdirectory: 1040 | type: string 1041 | nullable: true 1042 | description: Optional. The path in the attached repository where a dbt project can be found 1043 | example: analytics/dbt-models 1044 | name: 1045 | type: string 1046 | description: A name for the project 1047 | example: Analytics 1048 | repository: 1049 | $ref: '#/components/schemas/Repository' 1050 | repository_id: 1051 | type: integer 1052 | example: 6000 1053 | state: 1054 | $ref: '#/components/schemas/State' 1055 | created_at: 1056 | type: "string" 1057 | format: "date-time" 1058 | updated_at: 1059 | type: "string" 1060 | format: "date-time" 1061 | 1062 | BigqueryCredential: 1063 | type: "object" 1064 | properties: 1065 | id: 1066 | type: integer 1067 | description: "Leave blank when creating a BigqueryCredential object" 1068 | account_id: 1069 | type: integer 1070 | description: "The account id to create the BigqueryCredential in" 1071 | type: 1072 | type: string 1073 | description: "The database type (for BigqueryCredentials, use \"bigquery\")" 1074 | state: 1075 | type: integer 1076 | description: "The state of the BigqueryCredential (1 = present, 2 = deleted)" 1077 | schema: 1078 | type: string 1079 | description: "The schema (dataset) for this BigqueryCredential object" 1080 | Connection: 1081 | type: object 1082 | properties: 1083 | id: 1084 | type: integer 1085 | description: The numeric ID for the connection 1086 | account_id: 1087 | type: integer 1088 | description: The numeric ID for the associated account 1089 | project_id: 1090 | type: integer 1091 | description: The numeric ID for the associated project 1092 | name: 1093 | type: string 1094 | type: 1095 | type: string 1096 | enum: 1097 | - postgres 1098 | - redshift 1099 | - snowflake 1100 | - bigquery 1101 | - adapter 1102 | state: 1103 | $ref: '#/components/schemas/State' 1104 | created_by_id: 1105 | nullable: true 1106 | type: integer 1107 | description: The ID of the user who created the connection 1108 | created_by_service_token_id: 1109 | nullable: true 1110 | type: integer 1111 | description: The ID of the service token used to create the connection 1112 | created_at: 1113 | type: string 1114 | format: date-time 1115 | updated_at: 1116 | type: string 1117 | format: date-time 1118 | details: 1119 | oneOf: 1120 | - $ref: '#/components/schemas/BigQueryConnectionDetails' 1121 | - $ref: '#/components/schemas/RedshiftConnectionDetails' 1122 | - $ref: '#/components/schemas/PostgresConnectionDetails' 1123 | - $ref: '#/components/schemas/SnowflakeConnectionDetails' 1124 | 1125 | SnowflakeConnectionDetails: 1126 | type: object 1127 | properties: 1128 | account: 1129 | type: string 1130 | description: The Snowflake account id 1131 | database: 1132 | type: string 1133 | warehouse: 1134 | type: string 1135 | allow_sso: 1136 | type: boolean 1137 | client_session_keep_alive: 1138 | type: boolean 1139 | 1140 | BigQueryConnectionDetails: 1141 | type: object 1142 | properties: 1143 | project_id: 1144 | type: string 1145 | description: Google BigQuery project ID 1146 | timeout_seconds: 1147 | type: integer 1148 | private_key_id: 1149 | type: string 1150 | private_key: 1151 | type: string 1152 | client_email: 1153 | type: string 1154 | client_id: 1155 | type: string 1156 | auth_uri: 1157 | type: string 1158 | token_uri: 1159 | type: string 1160 | auth_provider_x509_cert_url: 1161 | type: string 1162 | client_x509_cert_url: 1163 | type: string 1164 | 1165 | RedshiftConnectionDetails: 1166 | type: object 1167 | properties: 1168 | hostname: 1169 | type: string 1170 | description: The hostname of the Redshift instance 1171 | example: "my-redshift.us-east-2.redshift.amazonaws.com" 1172 | dbname: 1173 | type: string 1174 | description: The database name within Redshift 1175 | port: 1176 | type: integer 1177 | description: The port to connect to the Redshift database 1178 | tunnel_enabled: 1179 | type: boolean 1180 | 1181 | PostgresConnectionDetails: 1182 | type: object 1183 | properties: 1184 | hostname: 1185 | type: string 1186 | description: The hostname of the Postgres instance 1187 | dbname: 1188 | type: string 1189 | description: The database name within Postgres 1190 | port: 1191 | type: integer 1192 | description: The port to connect to the Postgres database 1193 | tunnel_enabled: 1194 | type: boolean 1195 | 1196 | Environment: 1197 | type: object 1198 | properties: 1199 | id: 1200 | type: "integer" 1201 | description: A unique identifier for an environment 1202 | example: 10 1203 | account_id: 1204 | type: "integer" 1205 | example: 1 1206 | deploy_key_id: 1207 | type: "integer" 1208 | created_by_id: 1209 | type: "integer" 1210 | repository_id: 1211 | type: "integer" 1212 | name: 1213 | type: "string" 1214 | description: A name for the environment 1215 | dbt_version: 1216 | type: "string" 1217 | description: The default dbt version for jobs in this environment 1218 | example: 0.17.0 1219 | use_custom_branch: 1220 | type: "boolean" 1221 | example: True 1222 | description: If set, use the custom_branch field when cloning and running jobs in this environment 1223 | custom_branch: 1224 | type: "string" 1225 | example: develop 1226 | supports_docs: 1227 | type: "boolean" 1228 | description: dbt Cloud-generated / read only field 1229 | state: 1230 | $ref: '#/components/schemas/State' 1231 | Job: 1232 | type: "object" 1233 | required: 1234 | - id 1235 | - account_id 1236 | - project_id 1237 | - environment_id 1238 | - dbt_version 1239 | - name 1240 | - execute_steps 1241 | - state 1242 | - triggers 1243 | - settings 1244 | - schedule 1245 | properties: 1246 | id: 1247 | type: integer 1248 | nullable: true 1249 | description: Must be `null` when creating a new Job 1250 | account_id: 1251 | type: integer 1252 | example: 1 1253 | project_id: 1254 | type: integer 1255 | example: 100 1256 | environment_id: 1257 | type: integer 1258 | example: 10 1259 | name: 1260 | type: string 1261 | description: A name for the job 1262 | example: Nightly run 1263 | dbt_version: 1264 | type: string 1265 | nullable: true 1266 | description: Overrides the dbt_version specified on the attached Environment if provided 1267 | example: 0.17.1 1268 | triggers: 1269 | type: object 1270 | required: 1271 | - github_webhook 1272 | - schedule 1273 | properties: 1274 | github_webhook: 1275 | type: boolean 1276 | git_provider_webhook: 1277 | type: boolean 1278 | schedule: 1279 | type: boolean 1280 | custom_branch_only: 1281 | type: boolean 1282 | execute_steps: 1283 | type: array 1284 | description: "A list of commands that the job will run" 1285 | example: ['dbt run', 'dbt test', 'dbt source snapshot-freshness'] 1286 | items: 1287 | type: string 1288 | settings: 1289 | type: object 1290 | required: 1291 | - threads 1292 | - target_name 1293 | properties: 1294 | threads: 1295 | type: integer 1296 | example: 4 1297 | description: The maximum number of models to run in parallel in a single dbt run 1298 | target_name: 1299 | example: prod 1300 | description: Informational field that can be consumed in dbt project code with `{{ target.name }}` 1301 | type: string 1302 | state: 1303 | $ref: '#/components/schemas/State' 1304 | generate_docs: 1305 | type: "boolean" 1306 | example: True 1307 | description: When true, run a `dbt docs generate` step at the end of runs triggered from this job 1308 | schedule: 1309 | type: object 1310 | required: 1311 | - date 1312 | - time 1313 | properties: 1314 | cron: 1315 | type: string 1316 | description: Cron-syntax schedule for the job 1317 | example: "0 7 * * 1" 1318 | date: 1319 | type: object 1320 | required: 1321 | - type 1322 | properties: 1323 | type: 1324 | type: string 1325 | enum: ["every_day", "days_of_week", "custom_cron"] 1326 | days: 1327 | type: array 1328 | nullable: true 1329 | items: 1330 | type: integer 1331 | description: The numeric days of the week, required with days_of_week 1332 | cron: 1333 | type: string 1334 | nullable: true 1335 | description: The custom schedule in crontab format, required with custom_cron 1336 | time: 1337 | type: object 1338 | required: 1339 | - type 1340 | properties: 1341 | type: 1342 | type: string 1343 | enum: ["every_hour", "at_exact_hours"] 1344 | interval: 1345 | type: integer 1346 | nullable: true 1347 | description: The number of hours between runs, required with every_hour 1348 | hours: 1349 | type: array 1350 | nullable: true 1351 | items: 1352 | type: integer 1353 | description: The specific hours to run in UTC, required with at_exact_hours 1354 | 1355 | 1356 | Repository: 1357 | type: object 1358 | properties: 1359 | id: 1360 | type: "integer" 1361 | description: A unique identifier for the Repository 1362 | example: 200 1363 | account_id: 1364 | type: "integer" 1365 | example: 1 1366 | remote_url: 1367 | type: "string" 1368 | description: The git clone URL for the repository 1369 | example: "git@github.com:fishtown-analytics/jaffle_shop.git" 1370 | remote_backend: 1371 | type: "string" 1372 | git_clone_strategy: 1373 | type: "string" 1374 | enum: 1375 | - azure_active_directory_app 1376 | - deploy_key 1377 | - deploy_token 1378 | - github_app 1379 | - git_token 1380 | deploy_key_id: 1381 | type: "integer" 1382 | github_installation_id: 1383 | type: "integer" 1384 | state: 1385 | $ref: '#/components/schemas/State' 1386 | created_at: 1387 | type: "string" 1388 | format: "date-time" 1389 | updated_at: 1390 | type: "string" 1391 | format: "date-time" 1392 | Step: 1393 | type: object 1394 | properties: 1395 | id: 1396 | type: "integer" 1397 | description: Unique identifier for a step 1398 | run_id: 1399 | type: "integer" 1400 | description: Unique identifier for a run 1401 | account_id: 1402 | type: "integer" 1403 | description: Unique identifier for an account 1404 | logs: 1405 | type: "string" 1406 | nullable: true 1407 | description: High level logs for the given run step 1408 | debug_logs: 1409 | type: "string" 1410 | nullable: true 1411 | description: The full debug logs, if requested 1412 | log_location: 1413 | type: "string" 1414 | enum: ["legacy", "db", "s3", "empty"] 1415 | log_path: 1416 | type: "string" 1417 | nullable: true 1418 | description: The path to the logs, if available 1419 | debug_log_path: 1420 | type: "string" 1421 | nullable: true 1422 | description: The path to the debug logs, if available 1423 | log_archive_type: 1424 | type: "string" 1425 | enum: ["db_flushed", "scribe"] 1426 | truncated_debug_logs: 1427 | type: "string" 1428 | nullable: true 1429 | description: A subset of the debug logs 1430 | created_at: 1431 | type: "string" 1432 | format: "date-time" 1433 | description: When the step was originally created 1434 | updated_at: 1435 | type: "string" 1436 | format: "date-time" 1437 | started_at: 1438 | type: "string" 1439 | format: "date-time" 1440 | description: When processing of the step began 1441 | finished_at: 1442 | type: "string" 1443 | format: "date-time" 1444 | description: When the step completed 1445 | status_color: 1446 | type: "string" 1447 | description: A color code, in hex format to display status 1448 | example: "#55973a" 1449 | status_humanized: 1450 | type: "string" 1451 | enum: 1452 | - "Queued" 1453 | - "Starting" 1454 | - "Running" 1455 | - "Success" 1456 | - "Error" 1457 | - "Cancelled" 1458 | duration: 1459 | type: "string" 1460 | nullable: true 1461 | description: The time it took to run the given step 1462 | example: "00:00:23" 1463 | duration_humanized: 1464 | type: "string" 1465 | nullable: true 1466 | description: A human-readable version of the step duration 1467 | example: "23 seconds" 1468 | Run: 1469 | type: object 1470 | properties: 1471 | id: 1472 | type: "integer" 1473 | description: Unique identifier for a run 1474 | example: 10000 1475 | trigger_id: 1476 | type: "integer" 1477 | account_id: 1478 | type: "integer" 1479 | example: 1 1480 | project_id: 1481 | type: "integer" 1482 | example: 100 1483 | job_definition_id: 1484 | type: "integer" 1485 | status: 1486 | type: "integer" 1487 | enum: [1,2,3,10,20,30] 1488 | description: | 1489 | A numeric representation of the job status 1490 | 1: Queued 1491 | 2: Starting 1492 | 3: Running 1493 | 10: Success 1494 | 20: Error 1495 | 30: Cancelled 1496 | git_branch: 1497 | type: "string" 1498 | example: develop 1499 | description: Optional. If provided, check out this branch or tag before running the job. 1500 | git_sha: 1501 | type: "string" 1502 | example: "#abcd123" 1503 | description: Optional. If provided, check out this sha before running the job. 1504 | status_message: 1505 | type: "string" 1506 | nullable: true 1507 | example: Success 1508 | dbt_version: 1509 | type: "string" 1510 | example: 0.17.0 1511 | created_at: 1512 | type: "string" 1513 | format: "date-time" 1514 | description: When the run was initially created (either via the scheduler or via an API request / webhook) 1515 | updated_at: 1516 | type: "string" 1517 | format: "date-time" 1518 | dequeued_at: 1519 | type: "string" 1520 | format: "date-time" 1521 | description: When the run was picked up by a processor node 1522 | started_at: 1523 | type: "string" 1524 | format: "date-time" 1525 | description: When processing of the node actually began 1526 | finished_at: 1527 | type: "string" 1528 | format: "date-time" 1529 | description: When the run completed execution 1530 | last_checked_at: 1531 | type: "string" 1532 | format: "date-time" 1533 | last_heartbeat_at: 1534 | type: "string" 1535 | format: "date-time" 1536 | owner_thread_id: 1537 | type: "string" 1538 | nullable: true 1539 | executed_by_thread_id: 1540 | type: "string" 1541 | artifacts_saved: 1542 | type: "boolean" 1543 | artifact_s3_path: 1544 | type: "string" 1545 | has_docs_generated: 1546 | type: "boolean" 1547 | trigger: 1548 | $ref: '#/components/schemas/Trigger' 1549 | job: 1550 | $ref: '#/components/schemas/Job' 1551 | duration: 1552 | type: "string" 1553 | example: "00:00:12" 1554 | queued_duration: 1555 | type: "string" 1556 | example: "00:00:12" 1557 | run_duration: 1558 | type: "string" 1559 | example: "00:00:12" 1560 | duration_humanized: 1561 | type: "string" 1562 | queued_duration_humanized: 1563 | type: "string" 1564 | run_duration_humanized: 1565 | type: "string" 1566 | finished_at_humanized: 1567 | type: "string" 1568 | status_humanized: 1569 | type: "string" 1570 | enum: ["Queued", "Starting", "Running", "Success", "Error", "Cancelled"] 1571 | created_at_humanized: 1572 | type: "string" 1573 | Trigger: 1574 | type: "object" 1575 | properties: 1576 | id: 1577 | type: "integer" 1578 | cause: 1579 | type: "string" 1580 | job_definition_id: 1581 | type: "integer" 1582 | git_branch: 1583 | type: "string" 1584 | git_sha: 1585 | type: "string" 1586 | github_pull_request_id: 1587 | type: "integer" 1588 | schema_override: 1589 | type: "string" 1590 | dbt_version_override: 1591 | description: Optional. Override the version of dbt used to run this job 1592 | example: 0.18.0 1593 | type: string 1594 | threads_override: 1595 | description: Optional. Override the number of threads used to run this job 1596 | example: 8 1597 | type: integer 1598 | target_name_override: 1599 | description: Optional. Override the `target.name` context variable used when running this job 1600 | example: CI 1601 | type: string 1602 | generate_docs_override: 1603 | description: Optional. Override whether or not this job generates docs (true=yes, false=no) 1604 | example: true 1605 | type: boolean 1606 | timeout_seconds_override: 1607 | description: Optional. Override the timeout in seconds for this job 1608 | example: 60 1609 | type: integer 1610 | steps_override: 1611 | type: array 1612 | description: Optional. Override the list of steps for this job 1613 | example: ['dbt run', 'dbt test', 'dbt source snapshot-freshness'] 1614 | items: 1615 | type: "string" 1616 | created_at: 1617 | type: "string" 1618 | format: "date-time" 1619 | 1620 | UserLicense: 1621 | type: object 1622 | properties: 1623 | id: 1624 | type: integer 1625 | description: The numeric ID for the License 1626 | license_type: 1627 | type: string 1628 | enum: [developer, read_only] 1629 | user_id: 1630 | type: integer 1631 | description: The associated User ID 1632 | account_id: 1633 | type: integer 1634 | description: The associated Account ID 1635 | state: 1636 | $ref: '#/components/schemas/State' 1637 | groups: 1638 | type: array 1639 | items: 1640 | $ref: '#/components/schemas/Group' 1641 | permission_statements: 1642 | type: array 1643 | items: 1644 | $ref: '#/components/schemas/PermissionStatement' 1645 | 1646 | Group: 1647 | type: object 1648 | properties: 1649 | id: 1650 | type: integer 1651 | description: The numeric ID for the Group 1652 | account_id: 1653 | type: integer 1654 | description: The associated Account ID 1655 | name: 1656 | type: string 1657 | description: The name of the group 1658 | example: "Owner" 1659 | state: 1660 | $ref: '#/components/schemas/State' 1661 | assign_by_default: 1662 | type: boolean 1663 | description: Should the group be assigned by default? 1664 | group_permissions: 1665 | type: array 1666 | items: 1667 | $ref: '#/components/schemas/GroupPermission' 1668 | 1669 | PermissionStatement: 1670 | type: object 1671 | properties: 1672 | permission: 1673 | type: string 1674 | enum: 1675 | - billing_read 1676 | - billing_write 1677 | - invitations_send 1678 | - invitations_modify 1679 | - invitations_read 1680 | - members_read 1681 | - members_write 1682 | - groups_read 1683 | - groups_write 1684 | - license_read 1685 | - license_allocate 1686 | - projects_read 1687 | - projects_develop 1688 | - projects_write 1689 | - projects_create 1690 | - projects_delete 1691 | - environments_read 1692 | - environments_write 1693 | - develop_access 1694 | - dbt_adapters_read 1695 | - dbt_adapters_write 1696 | - credentials_read 1697 | - credentials_write 1698 | - connections_read 1699 | - connections_write 1700 | - jobs_read 1701 | - jobs_write 1702 | - repositories_read 1703 | - repositories_write 1704 | - runs_trigger 1705 | - runs_write 1706 | - runs_read 1707 | - permissions_write 1708 | - permissions_read 1709 | - account_settings_write 1710 | - account_settings_read 1711 | - auth_provider_write 1712 | - auth_provider_read 1713 | - service_tokens_write 1714 | - service_tokens_read 1715 | - metadata_read 1716 | - webhooks_write 1717 | - custom_environment_variables_read 1718 | - custom_environment_variables_write 1719 | - audit_log_read 1720 | target_resource: 1721 | type: integer 1722 | all_resources: 1723 | type: boolean 1724 | description: Does this apply to all resources? 1725 | 1726 | GroupPermission: 1727 | type: object 1728 | properties: 1729 | id: 1730 | type: integer 1731 | description: The numeric ID for the Group Permission 1732 | account_id: 1733 | type: integer 1734 | description: The associated Account ID 1735 | project_id: 1736 | type: integer 1737 | nullable: true 1738 | description: The associated Project ID 1739 | all_projects: 1740 | type: boolean 1741 | description: Does this apply to all projects? 1742 | permission_set: 1743 | type: string 1744 | enum: 1745 | - owner 1746 | - member 1747 | - account_admin 1748 | - admin 1749 | - database_admin 1750 | - git_admin 1751 | - team_admin 1752 | - job_admin 1753 | - job_viewer 1754 | - analyst 1755 | - developer 1756 | - stakeholder 1757 | - readonly 1758 | - project_creator 1759 | - account_viewer 1760 | - metadata_only 1761 | - webhooks_only 1762 | permission_level: 1763 | type: integer 1764 | nullable: true 1765 | state: 1766 | $ref: '#/components/schemas/State' 1767 | 1768 | # responses 1769 | AccountsResponse: 1770 | type: "object" 1771 | properties: 1772 | data: 1773 | type: "array" 1774 | items: 1775 | $ref: "#/components/schemas/Account" 1776 | status: 1777 | $ref: "#/components/schemas/Status" 1778 | AccountResponse: 1779 | type: "object" 1780 | properties: 1781 | data: 1782 | $ref: "#/components/schemas/Account" 1783 | status: 1784 | $ref: "#/components/schemas/Status" 1785 | UsersResponse: 1786 | type: "object" 1787 | properties: 1788 | data: 1789 | type: "array" 1790 | items: 1791 | $ref: "#/components/schemas/User" 1792 | status: 1793 | $ref: "#/components/schemas/Status" 1794 | UpdateLicenseResponse: 1795 | type: "object" 1796 | properties: 1797 | data: 1798 | $ref: "#/components/schemas/UserLicense" 1799 | status: 1800 | $ref: "#/components/schemas/Status" 1801 | ProjectsResponse: 1802 | type: "object" 1803 | properties: 1804 | data: 1805 | type: "array" 1806 | items: 1807 | $ref: "#/components/schemas/Project" 1808 | status: 1809 | $ref: "#/components/schemas/Status" 1810 | ProjectResponse: 1811 | type: "object" 1812 | properties: 1813 | data: 1814 | $ref: "#/components/schemas/Project" 1815 | status: 1816 | $ref: "#/components/schemas/Status" 1817 | ConnectionsResponse: 1818 | type: "object" 1819 | properties: 1820 | data: 1821 | type: "array" 1822 | items: 1823 | $ref: "#/components/schemas/Connection" 1824 | status: 1825 | $ref: "#/components/schemas/Status" 1826 | CredentialsResponse: 1827 | type: "object" 1828 | properties: 1829 | data: 1830 | type: "array" 1831 | items: 1832 | $ref: "#/components/schemas/BigqueryCredential" 1833 | status: 1834 | $ref: "#/components/schemas/Status" 1835 | EnvironmentsResponse: 1836 | type: "object" 1837 | properties: 1838 | data: 1839 | type: "array" 1840 | items: 1841 | $ref: "#/components/schemas/Environment" 1842 | status: 1843 | $ref: "#/components/schemas/Status" 1844 | EnvironmentResponse: 1845 | type: "object" 1846 | properties: 1847 | data: 1848 | $ref: "#/components/schemas/Environment" 1849 | status: 1850 | $ref: "#/components/schemas/Status" 1851 | JobsResponse: 1852 | type: "object" 1853 | properties: 1854 | data: 1855 | type: "array" 1856 | items: 1857 | $ref: "#/components/schemas/Job" 1858 | status: 1859 | $ref: "#/components/schemas/Status" 1860 | JobResponse: 1861 | type: "object" 1862 | properties: 1863 | data: 1864 | $ref: "#/components/schemas/Job" 1865 | status: 1866 | $ref: "#/components/schemas/Status" 1867 | RepositoriesResponse: 1868 | type: "object" 1869 | properties: 1870 | data: 1871 | type: "array" 1872 | items: 1873 | $ref: "#/components/schemas/Repository" 1874 | status: 1875 | $ref: "#/components/schemas/Status" 1876 | RepositoryResponse: 1877 | type: "object" 1878 | properties: 1879 | data: 1880 | $ref: "#/components/schemas/Repository" 1881 | status: 1882 | $ref: "#/components/schemas/Status" 1883 | RunsResponse: 1884 | type: "object" 1885 | properties: 1886 | data: 1887 | type: "array" 1888 | items: 1889 | $ref: "#/components/schemas/Run" 1890 | status: 1891 | $ref: "#/components/schemas/Status" 1892 | RunResponse: 1893 | type: "object" 1894 | properties: 1895 | data: 1896 | $ref: "#/components/schemas/Run" 1897 | status: 1898 | $ref: "#/components/schemas/Status" 1899 | StepResponse: 1900 | type: "object" 1901 | properties: 1902 | data: 1903 | $ref: "#/components/schemas/Step" 1904 | status: 1905 | $ref: "#/components/schemas/Status" 1906 | ErrorResponse: 1907 | type: "object" 1908 | properties: 1909 | status: 1910 | $ref: "#/components/schemas/Status" 1911 | Status: 1912 | type: "object" 1913 | properties: 1914 | code: 1915 | type: "integer" 1916 | description: "Same as the HTTP status code returned." 1917 | example: 200 1918 | is_success: 1919 | type: "boolean" 1920 | description: "Whether or not the request succeeded." 1921 | user_message: 1922 | type: "string" 1923 | description: "End-user-friendly description of the response." 1924 | developer_message: 1925 | type: "string" 1926 | description: "Technical description of the response." 1927 | 1928 | securitySchemes: 1929 | TokenAuth: 1930 | type: http 1931 | scheme: bearer 1932 | bearerFormat: "Bearer " 1933 | 1934 | security: 1935 | - TokenAuth: [] 1936 | -------------------------------------------------------------------------------- /tap_dbt/streams.py: -------------------------------------------------------------------------------- 1 | """Stream class for tap-dbt.""" 2 | 3 | from __future__ import annotations 4 | 5 | import datetime 6 | import sys 7 | import typing as t 8 | 9 | from singer_sdk.pagination import BaseOffsetPaginator 10 | 11 | from tap_dbt.client import DBTStream 12 | 13 | if sys.version_info < (3, 11): 14 | from backports.datetime_fromisoformat import MonkeyPatch 15 | 16 | MonkeyPatch.patch_fromisoformat() 17 | 18 | 19 | class AccountBasedStream(DBTStream): 20 | """A stream that requires an account ID.""" 21 | 22 | @property 23 | def partitions(self) -> list[dict]: 24 | """Return a list of partition key dicts (if applicable), otherwise None.""" 25 | if "{account_id}" in self.path: 26 | return [ 27 | {"account_id": account_id} 28 | for account_id in t.cast("list", self.config["account_ids"]) 29 | ] 30 | 31 | errmsg = ( 32 | f"Could not detect partition type for dbt stream " 33 | f"'{self.name}' ({self.path}). " 34 | "Expected a URL path containing '{account_id}'. " 35 | ) 36 | raise ValueError(errmsg) 37 | 38 | def get_new_paginator(self) -> BaseOffsetPaginator: 39 | """Return a new paginator instance for this stream.""" 40 | page_size = self.config["page_size"] 41 | 42 | self.logger.debug( 43 | "Using page size of %s for the limit URL parameter", 44 | page_size, 45 | ) 46 | 47 | return BaseOffsetPaginator(start_value=0, page_size=page_size) 48 | 49 | def get_url_params( 50 | self, 51 | context: dict, 52 | next_page_token: int, 53 | ) -> dict: 54 | """Return offset as the next page token.""" 55 | params = {} 56 | _ = context 57 | # TODO(edgarrmondragon): Get page size from the pagination object when 58 | # it's available in this scope 59 | # https://github.com/meltano/sdk/issues/1606) 60 | params["limit"] = self.config["page_size"] 61 | 62 | # Next page token is an offset 63 | if next_page_token: 64 | params["offset"] = next_page_token 65 | 66 | self.logger.debug("context=%s", context) 67 | self.logger.debug("params=%s", params) 68 | 69 | return params 70 | 71 | 72 | class AccountBasedIncrementalStream(AccountBasedStream): 73 | """Account stream that can be synced incrementally by a datetime field. 74 | 75 | Requires a reverse sorted response such that syncing stops once the 76 | replication_key value is less than the bookmark 77 | 78 | """ 79 | 80 | def get_url_params( 81 | self, 82 | context: dict, 83 | next_page_token: int, 84 | ) -> dict: 85 | """Reverse-sort the list by id if performing INCREMENTAL sync.""" 86 | params = super().get_url_params(context, next_page_token) 87 | 88 | if self.get_starting_timestamp(context): 89 | # Precede replication key with minus to reverse sort 90 | params["order_by"] = f"-{self.replication_key}" 91 | 92 | return params 93 | 94 | def get_records(self, context: dict | None) -> t.Iterable[dict[str, t.Any]]: 95 | """Return a generator of record-type dictionary objects. 96 | 97 | Each record emitted should be a dictionary of property names to their values. 98 | 99 | Args: 100 | context: Stream partition or context dictionary. 101 | 102 | Yields: 103 | One item per (possibly processed) record in the API. 104 | """ 105 | starting_replication_key_value = self.get_starting_timestamp(context) 106 | 107 | for record in self.request_records(context): 108 | transformed_record = self.post_process(record, context) 109 | if transformed_record is None: 110 | # Record filtered out during post_process() 111 | continue 112 | 113 | if ( 114 | starting_replication_key_value is not None 115 | and record[self.replication_key] is not None 116 | ): 117 | record_last_received_datetime = datetime.datetime.fromisoformat( 118 | record[self.replication_key], 119 | ) 120 | 121 | if record_last_received_datetime < starting_replication_key_value: 122 | self.logger.info( 123 | "Breaking after hitting a record with replication key %s < %s", 124 | record_last_received_datetime, 125 | starting_replication_key_value, 126 | ) 127 | break 128 | 129 | yield transformed_record 130 | 131 | 132 | class AccountsStream(DBTStream): 133 | """A stream for the accounts endpoint.""" 134 | 135 | name = "accounts" 136 | path = "/accounts" 137 | openapi_ref = "Account" 138 | 139 | 140 | class ConnectionsStream(AccountBasedStream): 141 | """A stream for the projects endpoint.""" 142 | 143 | name = "connections" 144 | path = "/accounts/{account_id}/connections" 145 | openapi_ref = "Connection" 146 | selected_by_default = False 147 | 148 | 149 | class EnvironmentsStream(AccountBasedStream): 150 | """A stream for the projects endpoint.""" 151 | 152 | name = "environments" 153 | path = "/accounts/{account_id}/environments" 154 | openapi_ref = "Environment" 155 | selected_by_default = False 156 | 157 | 158 | class JobsStream(AccountBasedStream): 159 | """A stream for the jobs endpoint.""" 160 | 161 | name = "jobs" 162 | path = "/accounts/{account_id}/jobs" 163 | openapi_ref = "Job" 164 | 165 | 166 | class ProjectsStream(AccountBasedStream): 167 | """A stream for the projects endpoint.""" 168 | 169 | name = "projects" 170 | path = "/accounts/{account_id}/projects" 171 | openapi_ref = "Project" 172 | 173 | 174 | class RepositoriesStream(AccountBasedStream): 175 | """A stream for the repositories endpoint.""" 176 | 177 | name = "repositories" 178 | path = "/accounts/{account_id}/repositories" 179 | openapi_ref = "Repository" 180 | selected_by_default = False 181 | 182 | 183 | class RunsStream(AccountBasedIncrementalStream): 184 | """A stream for the runs endpoint.""" 185 | 186 | name = "runs" 187 | path = "/accounts/{account_id}/runs" 188 | openapi_ref = "Run" 189 | replication_key = "finished_at" 190 | 191 | 192 | class UsersStream(AccountBasedStream): 193 | """A stream for the users endpoint.""" 194 | 195 | name = "users" 196 | path = "/accounts/{account_id}/users" 197 | openapi_ref = "User" 198 | selected_by_default = False 199 | -------------------------------------------------------------------------------- /tap_dbt/tap.py: -------------------------------------------------------------------------------- 1 | """dbt tap class.""" 2 | 3 | from __future__ import annotations 4 | 5 | from singer_sdk import Stream, Tap 6 | from singer_sdk.helpers._classproperty import classproperty 7 | from singer_sdk.typing import ( 8 | ArrayType, 9 | IntegerType, 10 | PropertiesList, 11 | Property, 12 | StringType, 13 | ) 14 | 15 | from tap_dbt.streams import ( 16 | AccountsStream, 17 | ConnectionsStream, 18 | EnvironmentsStream, 19 | JobsStream, 20 | ProjectsStream, 21 | RepositoriesStream, 22 | RunsStream, 23 | UsersStream, 24 | ) 25 | 26 | TAP_NAME = "tap-dbt" 27 | STREAM_TYPES = [ 28 | AccountsStream, 29 | ConnectionsStream, 30 | EnvironmentsStream, 31 | JobsStream, 32 | ProjectsStream, 33 | RepositoriesStream, 34 | RunsStream, 35 | UsersStream, 36 | ] 37 | 38 | 39 | class TapDBT(Tap): 40 | """Singer tap for the dbt Cloud API.""" 41 | 42 | name = TAP_NAME 43 | 44 | @classproperty 45 | def config_jsonschema(cls) -> dict: # noqa: N805 46 | """Return JSON schema definition for the config. 47 | 48 | Returns: 49 | A JSON schema dictionary. 50 | """ 51 | return PropertiesList( 52 | Property( 53 | "api_key", 54 | StringType, 55 | description="API key for the dbt Cloud API", 56 | required=True, 57 | ), 58 | Property( 59 | "account_ids", 60 | ArrayType(StringType), 61 | description="dbt Cloud account IDs", 62 | required=True, 63 | ), 64 | Property( 65 | "base_url", 66 | StringType, 67 | description="Base URL for the dbt Cloud API", 68 | default="https://cloud.getdbt.com/api/v2", 69 | ), 70 | Property( 71 | "user_agent", 72 | StringType, 73 | default=f"{cls.name}/{cls.plugin_version} {cls.__doc__}", 74 | description="User-Agent to make requests with", 75 | ), 76 | Property( 77 | "page_size", 78 | IntegerType, 79 | default=5000, 80 | description="Page size to use in limit= url parameter", 81 | required=True, 82 | ), 83 | ).to_dict() 84 | 85 | def discover_streams(self) -> list[Stream]: 86 | """Return a list of discovered streams.""" 87 | return [stream_class(tap=self) for stream_class in STREAM_TYPES] 88 | 89 | 90 | cli = TapDBT.cli 91 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for tap-dbt.""" 2 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Fixtures and plugins.""" 2 | 3 | from __future__ import annotations 4 | -------------------------------------------------------------------------------- /tests/resources/responses/account.json: -------------------------------------------------------------------------------- 1 | { 2 | "status": { 3 | "code": 200, 4 | "is_success": true, 5 | "user_message": "Success!", 6 | "developer_message": "" 7 | }, 8 | "data": { 9 | "docs_job_id": null, 10 | "freshness_job_id": null, 11 | "lock_reason": null, 12 | "unlock_if_subscription_renewed": false, 13 | "read_only_seats": 3, 14 | "id": 1001, 15 | "name": "Corp", 16 | "state": 42, 17 | "plan": "enterprise", 18 | "pending_cancel": false, 19 | "run_slots": 3, 20 | "developer_seats": 3, 21 | "queue_limit": 50, 22 | "pod_memory_request_mebibytes": 600, 23 | "run_duration_limit_seconds": 900, 24 | "enterprise_authentication_method": null, 25 | "enterprise_login_slug": null, 26 | "enterprise_unique_identifier": null, 27 | "billing_email_address": null, 28 | "locked": false, 29 | "develop_file_system": true, 30 | "unlocked_at": null, 31 | "created_at": "2022-01-01T00:42:00.000000+00:00", 32 | "updated_at": "2022-01-31T23:59:42.000000+00:00", 33 | "starter_repo_url": null, 34 | "sso_reauth": false, 35 | "force_sso": true, 36 | "git_auth_level": "personal", 37 | "identifier": "act_xyzwq", 38 | "docs_job": null, 39 | "freshness_job": null, 40 | "enterprise_login_url": "https://cloud.corp.getdbt.com/enterprise-login/None/" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /tests/resources/responses/jobs.json: -------------------------------------------------------------------------------- 1 | { 2 | "status": { 3 | "code": 200, 4 | "is_success": true, 5 | "user_message": "Success!", 6 | "developer_message": "" 7 | }, 8 | "data": [ 9 | { 10 | "execution": { 11 | "timeout_seconds": 0 12 | }, 13 | "generate_docs": true, 14 | "run_generate_sources": false, 15 | "id": 1, 16 | "account_id": 1001, 17 | "project_id": 1, 18 | "environment_id": 1, 19 | "name": "Yay dbt Job!", 20 | "dbt_version": null, 21 | "created_at": "2022-08-15T15:15:15.000000+00:00", 22 | "updated_at": "2022-08-15T15:15:16.000001+00:00", 23 | "execute_steps": [ 24 | "dbt build" 25 | ], 26 | "state": 1, 27 | "deactivated": false, 28 | "run_failure_count": 0, 29 | "deferring_job_definition_id": null, 30 | "lifecycle_webhooks": false, 31 | "lifecycle_webhooks_url": null, 32 | "triggers": { 33 | "github_webhook": false, 34 | "git_provider_webhook": false, 35 | "custom_branch_only": false, 36 | "schedule": false 37 | }, 38 | "settings": { 39 | "threads": 4, 40 | "target_name": "default" 41 | }, 42 | "schedule": { 43 | "cron": "0 * * * *", 44 | "date": { 45 | "type": "every_day" 46 | }, 47 | "time": { 48 | "type": "every_hour", 49 | "interval": 1 50 | } 51 | }, 52 | "is_deferrable": false, 53 | "generate_sources": false, 54 | "cron_humanized": "Every hour", 55 | "next_run": null, 56 | "next_run_humanized": null 57 | }, 58 | { 59 | "execution": { 60 | "timeout_seconds": 0 61 | }, 62 | "generate_docs": false, 63 | "run_generate_sources": false, 64 | "id": 2, 65 | "account_id": 1001, 66 | "project_id": 1, 67 | "environment_id": 1, 68 | "name": "My second dbt Job", 69 | "dbt_version": null, 70 | "created_at": "2022-07-31T11:01:30.000000+00:00", 71 | "updated_at": "2022-08-31T12:01:30.000000+00:00", 72 | "execute_steps": [ 73 | "dbt run" 74 | ], 75 | "state": 1, 76 | "deactivated": false, 77 | "run_failure_count": 0, 78 | "deferring_job_definition_id": null, 79 | "lifecycle_webhooks": false, 80 | "lifecycle_webhooks_url": null, 81 | "triggers": { 82 | "github_webhook": true, 83 | "git_provider_webhook": false, 84 | "custom_branch_only": false, 85 | "schedule": false 86 | }, 87 | "settings": { 88 | "threads": 4, 89 | "target_name": "default" 90 | }, 91 | "schedule": { 92 | "cron": "0 * * * *", 93 | "date": { 94 | "type": "every_day" 95 | }, 96 | "time": { 97 | "type": "every_hour", 98 | "interval": 1 99 | } 100 | }, 101 | "is_deferrable": false, 102 | "generate_sources": false, 103 | "cron_humanized": "Every hour", 104 | "next_run": null, 105 | "next_run_humanized": null 106 | } 107 | ], 108 | "extra": { 109 | "filters": { 110 | "limit": 100, 111 | "offset": 0, 112 | "account_id": 1001 113 | }, 114 | "order_by": "id", 115 | "pagination": { 116 | "count": 1, 117 | "total_count": 2 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /tests/resources/responses/projects.json: -------------------------------------------------------------------------------- 1 | { 2 | "status": { 3 | "code": 200, 4 | "is_success": true, 5 | "user_message": "Success!", 6 | "developer_message": "" 7 | }, 8 | "data": [ 9 | { 10 | "name": "Sales dbt Project", 11 | "account_id": 1001, 12 | "repository_id": 29, 13 | "connection_id": 1, 14 | "id": 1, 15 | "created_at": "2021-11-02 08:33:57.867705+00:00", 16 | "updated_at": "2021-12-28 11:26:30.042923+00:00", 17 | "skipped_setup": false, 18 | "state": 1, 19 | "dbt_project_subdirectory": null, 20 | "connection": { 21 | "id": 1, 22 | "account_id": 1001, 23 | "project_id": 1, 24 | "name": "DEV Snowflake", 25 | "type": "snowflake", 26 | "created_by_id": 3, 27 | "created_by_service_token_id": null, 28 | "details": { 29 | "account": "xy12345.us-north.azure", 30 | "database": "dwh", 31 | "warehouse": "compute_wh", 32 | "allow_sso": true, 33 | "client_session_keep_alive": false, 34 | "role": "dwh_system" 35 | }, 36 | "state": 1, 37 | "created_at": "2021-11-10 09:09:00.000005+00:00", 38 | "updated_at": "2021-11-10 09:09:00.000005+00:00" 39 | }, 40 | "repository": { 41 | "id": 29, 42 | "account_id": 1001, 43 | "project_id": 1, 44 | "full_name": "corp/sales-dbt-project", 45 | "remote_url": "git://github.com/corp/sales-dbt-project.git", 46 | "remote_backend": "github", 47 | "git_clone_strategy": "github_app", 48 | "deploy_key_id": 29, 49 | "repository_credentials_id": null, 50 | "github_installation_id": 21729016, 51 | "pull_request_url_template": "https://github.com/corp/sales-dbt-project/compare/{{destination}}...{{source}}", 52 | "state": 1, 53 | "created_at": "2021-11-10 09:09:00.000005+00:00", 54 | "updated_at": "2021-11-10 09:09:59.000006+00:00", 55 | "deploy_key": { 56 | "id": 3, 57 | "account_id": 1001, 58 | "state": 1, 59 | "public_key": "ssh-rsa abc...xyz" 60 | }, 61 | "github_repo": "corp/sales-dbt-project", 62 | "name": "sales-dbt-project", 63 | "git_provider_id": 2, 64 | "gitlab": null, 65 | "git_provider": null 66 | }, 67 | "group_permissions": [ 68 | { 69 | "account_id": 1001, 70 | "group_id": 3, 71 | "project_id": 2, 72 | "all_projects": false, 73 | "permission_set": "readonly", 74 | "permission_level": null, 75 | "id": 3, 76 | "state": 1, 77 | "created_at": "2021-11-12 09:19:01.000006+00:00", 78 | "updated_at": "2021-11-13 09:19:59.000006+00:00" 79 | } 80 | ], 81 | "docs_job_id": null, 82 | "freshness_job_id": null, 83 | "docs_job": null, 84 | "freshness_job": null 85 | }, 86 | { 87 | "name": "Sales Project", 88 | "account_id": 1001, 89 | "repository_id": 3, 90 | "connection_id": 1, 91 | "id": 2, 92 | "created_at": "2021-11-15 10:09:59.000006+00:00", 93 | "updated_at": "2021-11-16 11:59:59.000006+00:00", 94 | "skipped_setup": false, 95 | "state": 1, 96 | "dbt_project_subdirectory": null, 97 | "connection": { 98 | "id": 2, 99 | "account_id": 1001, 100 | "project_id": 2, 101 | "name": "Cloud DWH", 102 | "type": "snowflake", 103 | "created_by_id": 2, 104 | "created_by_service_token_id": null, 105 | "details": { 106 | "account": "xy12345.snowflakecomputing.com", 107 | "database": "dwh", 108 | "warehouse": "compute_wh", 109 | "allow_sso": true, 110 | "client_session_keep_alive": true, 111 | "role": "dwh_system" 112 | }, 113 | "state": 1, 114 | "created_at": "2021-10-12 13:12:12.000020+00:00", 115 | "updated_at": "2021-10-12 13:12:13.000030+00:00" 116 | }, 117 | "repository": { 118 | "id": 2, 119 | "account_id": 1001, 120 | "project_id": 1, 121 | "full_name": null, 122 | "remote_url": "gh repo clone my-corp/dbt-data-project", 123 | "remote_backend": null, 124 | "git_clone_strategy": "deploy_key", 125 | "deploy_key_id": 3, 126 | "repository_credentials_id": null, 127 | "github_installation_id": null, 128 | "pull_request_url_template": null, 129 | "state": 1, 130 | "created_at": "2021-10-12 12:12:12.000020+00:00", 131 | "updated_at": "2021-10-12 12:12:12.000021+00:00", 132 | "deploy_key": { 133 | "id": 1, 134 | "account_id": 1001, 135 | "state": 1, 136 | "public_key": "ssh-rsa abc...xyz" 137 | }, 138 | "github_repo": null, 139 | "name": null, 140 | "git_provider_id": 1, 141 | "gitlab": null, 142 | "git_provider": null 143 | }, 144 | "group_permissions": [], 145 | "docs_job_id": null, 146 | "freshness_job_id": null, 147 | "docs_job": null, 148 | "freshness_job": null 149 | } 150 | ], 151 | "extra": { 152 | "filters": { 153 | "account_id": 1001, 154 | "limit": 2, 155 | "offset": 0 156 | }, 157 | "order_by": "id", 158 | "pagination": { 159 | "count": 2, 160 | "total_count": 2 161 | } 162 | } 163 | } 164 | -------------------------------------------------------------------------------- /tests/resources/responses/runs.json: -------------------------------------------------------------------------------- 1 | { 2 | "status": { 3 | "code": 200, 4 | "is_success": true, 5 | "user_message": "Success!", 6 | "developer_message": "" 7 | }, 8 | "data": [ 9 | { 10 | "name": "Sales dbt Project", 11 | "account_id": 1001, 12 | "repository_id": 29, 13 | "connection_id": 1, 14 | "id": 1, 15 | "created_at": "2021-11-02 08:33:57.867705+00:00", 16 | "updated_at": "2021-12-28 11:26:30.042923+00:00", 17 | "skipped_setup": false, 18 | "state": 1, 19 | "dbt_project_subdirectory": null, 20 | "connection": { 21 | "id": 1, 22 | "account_id": 1001, 23 | "project_id": 1, 24 | "name": "DEV Snowflake", 25 | "type": "snowflake", 26 | "created_by_id": 3, 27 | "created_by_service_token_id": null, 28 | "details": { 29 | "account": "xy12345.us-north.azure", 30 | "database": "dwh", 31 | "warehouse": "compute_wh", 32 | "allow_sso": true, 33 | "client_session_keep_alive": false, 34 | "role": "dwh_system" 35 | }, 36 | "state": 1, 37 | "created_at": "2021-11-10 09:09:00.000005+00:00", 38 | "updated_at": "2021-11-10 09:09:00.000005+00:00" 39 | }, 40 | "repository": { 41 | "id": 29, 42 | "account_id": 1001, 43 | "project_id": 1, 44 | "full_name": "corp/sales-dbt-project", 45 | "remote_url": "git://github.com/corp/sales-dbt-project.git", 46 | "remote_backend": "github", 47 | "git_clone_strategy": "github_app", 48 | "deploy_key_id": 29, 49 | "repository_credentials_id": null, 50 | "github_installation_id": 21729016, 51 | "pull_request_url_template": "https://github.com/corp/sales-dbt-project/compare/{{destination}}...{{source}}", 52 | "state": 1, 53 | "created_at": "2021-11-10 09:09:00.000005+00:00", 54 | "updated_at": "2021-11-10 09:09:59.000006+00:00", 55 | "deploy_key": { 56 | "id": 3, 57 | "account_id": 1001, 58 | "state": 1, 59 | "public_key": "ssh-rsa abc...xyz" 60 | }, 61 | "github_repo": "corp/sales-dbt-project", 62 | "name": "sales-dbt-project", 63 | "git_provider_id": 2, 64 | "gitlab": null, 65 | "git_provider": null 66 | }, 67 | "group_permissions": [ 68 | { 69 | "account_id": 1001, 70 | "group_id": 3, 71 | "project_id": 2, 72 | "all_projects": false, 73 | "permission_set": "readonly", 74 | "permission_level": null, 75 | "id": 3, 76 | "state": 1, 77 | "created_at": "2021-11-12 09:19:01.000006+00:00", 78 | "updated_at": "2021-11-13 09:19:59.000006+00:00" 79 | } 80 | ], 81 | "docs_job_id": null, 82 | "freshness_job_id": null, 83 | "docs_job": null, 84 | "freshness_job": null 85 | }, 86 | { 87 | "name": "Sales Project", 88 | "account_id": 1001, 89 | "repository_id": 3, 90 | "connection_id": 1, 91 | "id": 2, 92 | "created_at": "2021-11-15 10:09:59.000006+00:00", 93 | "updated_at": "2021-11-16 11:59:59.000006+00:00", 94 | "skipped_setup": false, 95 | "state": 1, 96 | "dbt_project_subdirectory": null, 97 | "connection": { 98 | "id": 2, 99 | "account_id": 1001, 100 | "project_id": 2, 101 | "name": "Cloud DWH", 102 | "type": "snowflake", 103 | "created_by_id": 2, 104 | "created_by_service_token_id": null, 105 | "details": { 106 | "account": "xy12345.snowflakecomputing.com", 107 | "database": "dwh", 108 | "warehouse": "compute_wh", 109 | "allow_sso": true, 110 | "client_session_keep_alive": true, 111 | "role": "dwh_system" 112 | }, 113 | "state": 1, 114 | "created_at": "2021-10-12 13:12:12.000020+00:00", 115 | "updated_at": "2021-10-12 13:12:13.000030+00:00" 116 | }, 117 | "repository": { 118 | "id": 2, 119 | "account_id": 1001, 120 | "project_id": 1, 121 | "full_name": null, 122 | "remote_url": "gh repo clone my-corp/dbt-data-project", 123 | "remote_backend": null, 124 | "git_clone_strategy": "deploy_key", 125 | "deploy_key_id": 3, 126 | "repository_credentials_id": null, 127 | "github_installation_id": null, 128 | "pull_request_url_template": null, 129 | "state": 1, 130 | "created_at": "2021-10-12 12:12:12.000020+00:00", 131 | "updated_at": "2021-10-12 12:12:12.000021+00:00", 132 | "deploy_key": { 133 | "id": 1, 134 | "account_id": 1001, 135 | "state": 1, 136 | "public_key": "ssh-rsa abc...xyz" 137 | }, 138 | "github_repo": null, 139 | "name": null, 140 | "git_provider_id": 1, 141 | "gitlab": null, 142 | "git_provider": null 143 | }, 144 | "group_permissions": [], 145 | "docs_job_id": null, 146 | "freshness_job_id": null, 147 | "docs_job": null, 148 | "freshness_job": null 149 | } 150 | ], 151 | "extra": { 152 | "filters": { 153 | "account_id": 1001, 154 | "limit": 2, 155 | "offset": 0 156 | }, 157 | "order_by": "id", 158 | "pagination": { 159 | "count": 2, 160 | "total_count": 2 161 | } 162 | } 163 | } 164 | -------------------------------------------------------------------------------- /tests/test_core.py: -------------------------------------------------------------------------------- 1 | """Integration tests.""" 2 | 3 | from __future__ import annotations 4 | 5 | import re 6 | from typing import TYPE_CHECKING, Any 7 | 8 | import pytest 9 | import responses 10 | from singer_sdk.testing import get_standard_tap_tests 11 | 12 | from tap_dbt.tap import TapDBT 13 | 14 | if TYPE_CHECKING: 15 | from faker import Faker 16 | 17 | SAMPLE_CONFIG: dict[str, Any] = { 18 | "api_key": "abc123", 19 | "account_ids": ["1000"], 20 | } 21 | 22 | 23 | def fake_date(faker: Faker): 24 | """Generate a fake date for datetime stream values.""" 25 | return faker.date_time().strftime("%Y-%m-%d %H:%M:%S") 26 | 27 | 28 | @pytest.fixture 29 | def accounts_response(faker: Faker): 30 | """Return a sample response for the accounts stream.""" 31 | return { 32 | "status": { 33 | "code": 200, 34 | "is_success": True, 35 | }, 36 | "data": [ 37 | { 38 | "id": 1000, 39 | "name": faker.company(), 40 | }, 41 | ], 42 | "extra": { 43 | "filters": { 44 | "pk__in": [ 45 | 1, 46 | ], 47 | }, 48 | "order_by": None, 49 | "pagination": { 50 | "count": 1, 51 | "total_count": 1, 52 | }, 53 | }, 54 | } 55 | 56 | 57 | @pytest.fixture 58 | def connections_response(faker: Faker): 59 | """Return a sample response for the connections stream.""" 60 | return { 61 | "status": { 62 | "code": 200, 63 | "is_success": True, 64 | }, 65 | "extra": { 66 | "filters": { 67 | "limit": 1, 68 | "offset": 0, 69 | "account_id": 1, 70 | }, 71 | "order_by": "id", 72 | "pagination": { 73 | "count": 1, 74 | "total_count": 300, 75 | }, 76 | }, 77 | "data": [ 78 | { 79 | "created_by_id": 12, 80 | "created_by_service_token_id": None, 81 | "id": 1, 82 | "state": faker.random_element([1, 2]), 83 | "account_id": 1000, 84 | "dbt_project_id": 1, 85 | "name": faker.company(), 86 | "type": faker.bs(), 87 | "account": faker.bs(), 88 | "database": faker.bs(), 89 | "warehouse": faker.bs(), 90 | "role": faker.bs(), 91 | "allow_sso": True, 92 | }, 93 | ], 94 | } 95 | 96 | 97 | @pytest.fixture 98 | def environments_response(faker: Faker): 99 | """Return a sample response for the environments stream.""" 100 | return { 101 | "status": { 102 | "code": 200, 103 | "is_success": True, 104 | }, 105 | "extra": { 106 | "filters": { 107 | "limit": 1, 108 | "offset": 0, 109 | "account_id": 1, 110 | }, 111 | "order_by": "id", 112 | "pagination": { 113 | "count": 1, 114 | "total_count": 300, 115 | }, 116 | }, 117 | "data": [ 118 | { 119 | "id": 1, 120 | "account_id": 1, 121 | "connection_id": 1, 122 | "repository_id": 8, 123 | "credentials_id": None, 124 | "created_by_id": None, 125 | "name": "dev", 126 | "use_custom_branch": False, 127 | "custom_branch": None, 128 | "dbt_version": "1.3.0-latest", 129 | "raw_dbt_version": "1.3.0-latest", 130 | "supports_docs": False, 131 | "state": faker.random_element([1, 2]), 132 | "updated_at": fake_date(faker), 133 | }, 134 | ], 135 | } 136 | 137 | 138 | @pytest.fixture 139 | def jobs_response(faker: Faker): 140 | """Return a sample response for the jobs stream.""" 141 | return { 142 | "status": { 143 | "code": 200, 144 | "is_success": True, 145 | }, 146 | "extra": { 147 | "filters": { 148 | "limit": 1, 149 | "offset": 0, 150 | "account_id": 1, 151 | }, 152 | "order_by": "id", 153 | "pagination": { 154 | "count": 1, 155 | "total_count": 300, 156 | }, 157 | }, 158 | "data": [ 159 | { 160 | "id": 1000 + i, 161 | "account_id": 1000, 162 | "project_id": 1000 + i % 3, 163 | "environment_id": 1000, 164 | "dbt_version": "1.4.0", 165 | "name": faker.bs(), 166 | "execute_steps": [ 167 | "dbt deps", 168 | "dbt seed", 169 | "dbt run", 170 | ], 171 | "state": faker.random_element([1, 2]), 172 | "triggers": { 173 | "github_webhook": True, 174 | "schedule": False, 175 | }, 176 | "settings": { 177 | "threads": 5, 178 | "target_name": "prod", 179 | }, 180 | "schedule": { 181 | "date": { 182 | "type": faker.random_element( 183 | [ 184 | "every_day", 185 | "days_of_week", 186 | "custom_cron", 187 | ], 188 | ), 189 | }, 190 | "time": { 191 | "type": faker.random_element( 192 | [ 193 | "every_hour", 194 | "at_exact_hours", 195 | ], 196 | ), 197 | }, 198 | }, 199 | } 200 | for i in range(10) 201 | ], 202 | } 203 | 204 | 205 | @pytest.fixture 206 | def projects_response(): 207 | """Return a sample response for the projects stream.""" 208 | return { 209 | "status": { 210 | "code": 200, 211 | "is_success": True, 212 | }, 213 | "data": [ 214 | { 215 | "id": 1000 + i, 216 | "account_id": 1000, 217 | } 218 | for i in range(10) 219 | ], 220 | "extra": { 221 | "filters": { 222 | "account_id": 1, 223 | "limit": 1, 224 | "offset": 0, 225 | }, 226 | "order_by": "id", 227 | "pagination": { 228 | "count": 1, 229 | "total_count": 2, 230 | }, 231 | }, 232 | } 233 | 234 | 235 | @pytest.fixture 236 | def repositories_response(faker: Faker): 237 | """Return a sample response for the repositories stream.""" 238 | return { 239 | "status": { 240 | "code": 200, 241 | "is_success": True, 242 | }, 243 | "extra": { 244 | "filters": { 245 | "limit": 1, 246 | "offset": 0, 247 | "account_id": 1, 248 | }, 249 | "order_by": "id", 250 | "pagination": { 251 | "count": 1, 252 | "total_count": 300, 253 | }, 254 | }, 255 | "data": [ 256 | { 257 | "id": 3, 258 | "account_id": 1, 259 | "remote_url": faker.file_path(depth=4), 260 | "remote_backend": "gitlab", 261 | "git_clone_strategy": "deploy_token", 262 | "deploy_key_id": 1, 263 | "github_installation_id": 1, 264 | "pull_request_url_template": faker.url(), 265 | "created_at": fake_date(faker), 266 | "updated_at": fake_date(faker), 267 | "state": faker.random_element([1, 2]), 268 | }, 269 | ], 270 | } 271 | 272 | 273 | @pytest.fixture 274 | def runs_response(faker: Faker): 275 | """Return a sample response for the runs stream.""" 276 | return { 277 | "status": { 278 | "code": 200, 279 | "is_success": True, 280 | }, 281 | "extra": { 282 | "filters": { 283 | "account_id": 1, 284 | "limit": 1, 285 | "offset": 0, 286 | }, 287 | "order_by": "id", 288 | "pagination": { 289 | "count": 1, 290 | "total_count": 500000, 291 | }, 292 | }, 293 | "data": [ 294 | { 295 | "id": 1000 + i, 296 | "trigger_id": 1000 + i, 297 | "account_id": 1000, 298 | "project_id": 1000 + i % 3, 299 | "finished_at": fake_date(faker), 300 | } 301 | for i in range(10) 302 | ], 303 | } 304 | 305 | 306 | @pytest.fixture 307 | def users_response(faker: Faker): 308 | """Return a sample response for the users stream.""" 309 | return { 310 | "status": { 311 | "code": 200, 312 | "is_success": True, 313 | }, 314 | "extra": { 315 | "filters": { 316 | "limit": 1, 317 | "offset": 0, 318 | "account_id": 1, 319 | }, 320 | "order_by": "id", 321 | "pagination": { 322 | "count": 1, 323 | "total_count": 300, 324 | }, 325 | }, 326 | "data": [ 327 | { 328 | "id": 3, 329 | "first_name": faker.first_name(), 330 | "last_name": faker.last_name(), 331 | "created_at": fake_date(faker), 332 | "last_login": fake_date(faker), 333 | "is_staff": False, 334 | "is_active": True, 335 | "email": faker.email(), 336 | "email_connected": False, 337 | "email_verified": True, 338 | "github_connected": False, 339 | "github_username": None, 340 | "gitlab_connected": True, 341 | "gitlab_username": f"{faker.first_name()}.{faker.last_name()}", 342 | "azure_active_directory_connected": False, 343 | "azure_active_directory_username": None, 344 | "slack_connected": False, 345 | "enterprise_connected": False, 346 | "enterprise_authentication_method": None, 347 | "auth_provider_infos": { 348 | "sso-azure": { 349 | "domain": faker.domain_name(), 350 | "groups": [], 351 | "auth_provider_type": "azure_single_tenant", 352 | }, 353 | }, 354 | "permissions": [ 355 | { 356 | "license_type": "developer", 357 | "id": 3, 358 | "user_id": 3, 359 | "account_id": 1000, 360 | "state": faker.random_element([1, 2]), 361 | "created_at": fake_date(faker), 362 | "updated_at": fake_date(faker), 363 | "groups": [ 364 | { 365 | "account_id": 1, 366 | "name": "Everyone", 367 | "id": 3, 368 | "state": 1, 369 | "assign_by_default": True, 370 | "sso_mapping_groups": [], 371 | "created_at": fake_date(faker), 372 | "updated_at": fake_date(faker), 373 | "group_permissions": [], 374 | }, 375 | { 376 | "account_id": 1000, 377 | "name": faker.bs(), 378 | "id": 13, 379 | "state": 1, 380 | "assign_by_default": False, 381 | "sso_mapping_groups": [ 382 | faker.bs(), 383 | ], 384 | "created_at": fake_date(faker), 385 | "updated_at": fake_date(faker), 386 | "group_permissions": [ 387 | { 388 | "account_id": 1, 389 | "group_id": 13, 390 | "project_id": None, 391 | "all_projects": True, 392 | "permission_set": faker.bs(), 393 | "permission_level": None, 394 | "id": 13, 395 | "state": 1, 396 | "created_at": fake_date(faker), 397 | "updated_at": fake_date(faker), 398 | }, 399 | ], 400 | }, 401 | ], 402 | "permission_statements": [ 403 | { 404 | "permission": "custom", 405 | "target_resource": None, 406 | "all_resources": True, 407 | }, 408 | ], 409 | }, 410 | ], 411 | "licenses": { 412 | "1": { 413 | "license_type": faker.bs(), 414 | "id": 3, 415 | "user_id": 3, 416 | "account_id": 1000, 417 | "state": faker.random_element([1, 2]), 418 | "created_at": fake_date(faker), 419 | "updated_at": fake_date(faker), 420 | }, 421 | }, 422 | "gitlab_token_retrieval_failure": False, 423 | "avatar_url": None, 424 | "fullname": faker.name(), 425 | "show_existing_user_email_verification": False, 426 | }, 427 | ], 428 | } 429 | 430 | 431 | @responses.activate 432 | def test_standard_tap_tests( # noqa: PLR0913 433 | accounts_response: dict, 434 | connections_response: dict, 435 | environments_response: dict, 436 | jobs_response: dict, 437 | projects_response: dict, 438 | repositories_response: dict, 439 | runs_response: dict, 440 | users_response: dict, 441 | ): 442 | """Run standard tap tests from the SDK.""" 443 | responses.add_passthru(re.compile("https://raw.githubusercontent.com/\\w+")) 444 | 445 | responses.add( 446 | responses.GET, 447 | "https://cloud.getdbt.com/api/v2/accounts", 448 | json=accounts_response, 449 | status=200, 450 | ) 451 | 452 | responses.add( 453 | responses.GET, 454 | "https://cloud.getdbt.com/api/v2/accounts/1000/connections", 455 | json=connections_response, 456 | status=200, 457 | ) 458 | responses.add( 459 | responses.GET, 460 | "https://cloud.getdbt.com/api/v2/accounts/1000/environments", 461 | json=environments_response, 462 | status=200, 463 | ) 464 | 465 | responses.add( 466 | responses.GET, 467 | "https://cloud.getdbt.com/api/v2/accounts/1000/jobs", 468 | json=jobs_response, 469 | status=200, 470 | ) 471 | 472 | responses.add( 473 | responses.GET, 474 | "https://cloud.getdbt.com/api/v2/accounts/1000/projects", 475 | json=projects_response, 476 | status=200, 477 | ) 478 | 479 | responses.add( 480 | responses.GET, 481 | "https://cloud.getdbt.com/api/v2/accounts/1000/repositories", 482 | json=repositories_response, 483 | status=200, 484 | ) 485 | 486 | responses.add( 487 | responses.GET, 488 | "https://cloud.getdbt.com/api/v2/accounts/1000/runs", 489 | json=runs_response, 490 | status=200, 491 | ) 492 | 493 | responses.add( 494 | responses.GET, 495 | "https://cloud.getdbt.com/api/v2/accounts/1000/users", 496 | json=users_response, 497 | status=200, 498 | ) 499 | 500 | tests = get_standard_tap_tests(TapDBT, config=SAMPLE_CONFIG) 501 | for test in tests: 502 | test() 503 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | requires = 3 | tox>=4 4 | tox-uv 5 | env_list = py3{9,10,11,12,13}, deps 6 | 7 | [testenv] 8 | runner = uv-venv-lock-runner 9 | dependency_groups = 10 | dev 11 | commands = 12 | pytest {posargs:tests} 13 | 14 | [testenv:deps] 15 | dependency_groups = 16 | dev 17 | commands = 18 | deptry {posargs:tap_dbt} 19 | --------------------------------------------------------------------------------