├── .editorconfig ├── .github └── ISSUE_TEMPLATE.md ├── .gitignore ├── .travis.yml ├── CONTRIBUTING.rst ├── HISTORY.rst ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── dbt ├── __init__.py ├── adapters │ ├── __init__.py │ └── oracle │ │ ├── __init__.py │ │ ├── __version__.py │ │ ├── connections.py │ │ ├── impl.py │ │ ├── relation.py │ │ └── sample_profiles.yml └── include │ ├── __init__.py │ └── oracle │ ├── __init__.py │ ├── dbt_project.yml │ └── macros │ ├── adapters.sql │ ├── catalog.sql │ ├── materializations │ ├── incremental │ │ ├── helpers.sql │ │ └── incremental.sql │ ├── seed │ │ └── seed.sql │ └── snapshot │ │ ├── snapshot.sql │ │ ├── snapshot_merge.sql │ │ └── strategies.sql │ └── schema_tests.sql ├── dbt_test_project ├── data │ └── seed.csv ├── dbt_project.yml ├── models │ ├── case_test │ │ ├── lower.sql │ │ └── select_from_lower.sql │ ├── jobs.sql │ ├── jobs_per_person.sql │ ├── p_table_with_cte.sql │ ├── p_view_with_cte.sql │ ├── person.sql │ ├── person_inc.sql │ ├── schema.yml │ └── table_relation.sql ├── oracle_tns │ └── network │ │ └── admin │ │ └── tnsnames.ora ├── packages.yml ├── profiles.yml └── test │ └── test_count_employees.sql ├── docs ├── Makefile ├── conf.py ├── contributing.rst ├── history.rst ├── index.rst ├── installation.rst ├── make.bat ├── readme.rst └── usage.rst ├── requirements_dev.txt ├── setup.cfg ├── setup.py ├── tests ├── __init__.py ├── oracle.dbtspec └── test_config.py └── tox.ini /.editorconfig: -------------------------------------------------------------------------------- 1 | # http://editorconfig.org 2 | 3 | root = true 4 | 5 | [*] 6 | indent_style = space 7 | indent_size = 4 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | charset = utf-8 11 | end_of_line = lf 12 | 13 | [*.bat] 14 | indent_style = tab 15 | end_of_line = crlf 16 | 17 | [LICENSE] 18 | insert_final_newline = false 19 | 20 | [Makefile] 21 | indent_style = tab 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | * Oracle DBT version: 2 | * Python version: 3 | * Operating System: 4 | 5 | ### Description 6 | 7 | Describe what you were trying to get done. 8 | Tell us what happened, what went wrong, and what you expected to happen. 9 | 10 | ### What I Did 11 | 12 | ``` 13 | Paste the command(s) you ran and the output. 14 | If there was a crash, please include the traceback here. 15 | ``` 16 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | dbt_modules 3 | logs 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | 61 | # Flask stuff: 62 | instance/ 63 | .webassets-cache 64 | 65 | # Scrapy stuff: 66 | .scrapy 67 | 68 | # Sphinx documentation 69 | docs/_build/ 70 | 71 | # PyBuilder 72 | target/ 73 | 74 | # Jupyter Notebook 75 | .ipynb_checkpoints 76 | 77 | # pyenv 78 | .python-version 79 | 80 | # celery beat schedule file 81 | celerybeat-schedule 82 | 83 | # SageMath parsed files 84 | *.sage.py 85 | 86 | # dotenv 87 | .env 88 | 89 | # virtualenv 90 | .venv 91 | venv/ 92 | ENV/ 93 | 94 | # Spyder project settings 95 | .spyderproject 96 | .spyproject 97 | 98 | # Rope project settings 99 | .ropeproject 100 | 101 | # mkdocs documentation 102 | /site 103 | 104 | # mypy 105 | .mypy_cache/ 106 | 107 | dataml_engine/.vscode/settings.json 108 | 109 | ./profile.yaml 110 | *./profile.yaml 111 | /test_code 112 | 113 | .vscode/ 114 | build/ 115 | .egg-info/ 116 | dist 117 | 118 | .user.yml 119 | 120 | 121 | dbt_integration_test -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # Config file for automatic testing at travis-ci.com 2 | 3 | language: python 4 | python: 5 | - 3.8 6 | - 3.7 7 | - 3.6 8 | - 3.5 9 | 10 | # Command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors 11 | install: pip install -U tox-travis 12 | 13 | # Command to run tests, e.g. python setup.py test 14 | script: tox 15 | 16 | # Assuming you have installed the travis-ci CLI tool, after you 17 | # create the Github repo and add it to Travis, run the 18 | # following command to finish PyPI deployment setup: 19 | # $ travis encrypt --add deploy.password 20 | deploy: 21 | provider: pypi 22 | distributions: sdist bdist_wheel 23 | user: vitoravancini 24 | password: 25 | secure: PLEASE_REPLACE_ME 26 | on: 27 | tags: true 28 | repo: vitoravancini/dbt-oracle 29 | python: 3.8 30 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Contributing 5 | ============ 6 | 7 | Contributions are welcome, and they are greatly appreciated! Every little bit 8 | helps, and credit will always be given. 9 | 10 | You can contribute in many ways: 11 | 12 | Types of Contributions 13 | ---------------------- 14 | 15 | Report Bugs 16 | ~~~~~~~~~~~ 17 | 18 | Report bugs at https://github.com/vitoravancini/oracle_dbt/issues. 19 | 20 | If you are reporting a bug, please include: 21 | 22 | * Your operating system name and version. 23 | * Any details about your local setup that might be helpful in troubleshooting. 24 | * Detailed steps to reproduce the bug. 25 | 26 | Fix Bugs 27 | ~~~~~~~~ 28 | 29 | Look through the GitHub issues for bugs. Anything tagged with "bug" and "help 30 | wanted" is open to whoever wants to implement it. 31 | 32 | Implement Features 33 | ~~~~~~~~~~~~~~~~~~ 34 | 35 | Look through the GitHub issues for features. Anything tagged with "enhancement" 36 | and "help wanted" is open to whoever wants to implement it. 37 | 38 | Write Documentation 39 | ~~~~~~~~~~~~~~~~~~~ 40 | 41 | Oracle DBT could always use more documentation, whether as part of the 42 | official Oracle DBT docs, in docstrings, or even on the web in blog posts, 43 | articles, and such. 44 | 45 | Submit Feedback 46 | ~~~~~~~~~~~~~~~ 47 | 48 | The best way to send feedback is to file an issue at https://github.com/vitoravancini/oracle_dbt/issues. 49 | 50 | If you are proposing a feature: 51 | 52 | * Explain in detail how it would work. 53 | * Keep the scope as narrow as possible, to make it easier to implement. 54 | * Remember that this is a volunteer-driven project, and that contributions 55 | are welcome :) 56 | 57 | Get Started! 58 | ------------ 59 | 60 | Ready to contribute? Here's how to set up `oracle_dbt` for local development. 61 | 62 | 1. Fork the `oracle_dbt` repo on GitHub. 63 | 2. Clone your fork locally:: 64 | 65 | $ git clone git@github.com:your_name_here/oracle_dbt.git 66 | 67 | 3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: 68 | 69 | $ mkvirtualenv oracle_dbt 70 | $ cd oracle_dbt/ 71 | $ python setup.py develop 72 | 73 | 4. Create a branch for local development:: 74 | 75 | $ git checkout -b name-of-your-bugfix-or-feature 76 | 77 | Now you can make your changes locally. 78 | 79 | 5. When you're done making changes, check that your changes pass flake8 and the 80 | tests, including testing other Python versions with tox:: 81 | 82 | $ flake8 oracle_dbt tests 83 | $ python setup.py test or pytest 84 | $ tox 85 | 86 | To get flake8 and tox, just pip install them into your virtualenv. 87 | 88 | 6. Commit your changes and push your branch to GitHub:: 89 | 90 | $ git add . 91 | $ git commit -m "Your detailed description of your changes." 92 | $ git push origin name-of-your-bugfix-or-feature 93 | 94 | 7. Submit a pull request through the GitHub website. 95 | 96 | Pull Request Guidelines 97 | ----------------------- 98 | 99 | Before you submit a pull request, check that it meets these guidelines: 100 | 101 | 1. The pull request should include tests. 102 | 2. If the pull request adds functionality, the docs should be updated. Put 103 | your new functionality into a function with a docstring, and add the 104 | feature to the list in README.rst. 105 | 3. The pull request should work for Python 3.5, 3.6, 3.7 and 3.8, and for PyPy. Check 106 | https://travis-ci.com/vitoravancini/oracle_dbt/pull_requests 107 | and make sure that the tests pass for all supported Python versions. 108 | 109 | Tips 110 | ---- 111 | 112 | To run a subset of tests:: 113 | 114 | 115 | $ python -m unittest tests.test_oracle_dbt 116 | 117 | Deploying 118 | --------- 119 | 120 | A reminder for the maintainers on how to deploy. 121 | Make sure all your changes are committed (including an entry in HISTORY.rst). 122 | Then run:: 123 | 124 | $ bump2version patch # possible: major / minor / patch 125 | $ git push 126 | $ git push --tags 127 | 128 | Travis will then deploy to PyPI if tests pass. 129 | -------------------------------------------------------------------------------- /HISTORY.rst: -------------------------------------------------------------------------------- 1 | ======= 2 | History 3 | ======= 4 | 5 | 0.1.0 (2020-06-02) 6 | ------------------ 7 | 8 | * First release on PyPI. 9 | 10 | 0.2.0 (2020-06-02) 11 | ------------------ 12 | 13 | * Added full macro implementation. Thanks Fabrice Etanchaud 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache Software License 2.0 2 | 3 | Copyright (c) 2020, Vitor Avancini 4 | 5 | Licensed under the Apache License, Version 2.0 (the "License"); 6 | you may not use this file except in compliance with the License. 7 | You may obtain a copy of the License at 8 | 9 | http://www.apache.org/licenses/LICENSE-2.0 10 | 11 | Unless required by applicable law or agreed to in writing, software 12 | distributed under the License is distributed on an "AS IS" BASIS, 13 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | See the License for the specific language governing permissions and 15 | limitations under the License. 16 | 17 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include CONTRIBUTING.rst 2 | include HISTORY.rst 3 | include LICENSE 4 | include README.rst 5 | 6 | recursive-include tests * 7 | recursive-exclude * __pycache__ 8 | recursive-exclude * *.py[co] 9 | 10 | recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif 11 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-test clean-pyc clean-build docs help 2 | .DEFAULT_GOAL := help 3 | 4 | define BROWSER_PYSCRIPT 5 | import os, webbrowser, sys 6 | 7 | from urllib.request import pathname2url 8 | 9 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) 10 | endef 11 | export BROWSER_PYSCRIPT 12 | 13 | define PRINT_HELP_PYSCRIPT 14 | import re, sys 15 | 16 | for line in sys.stdin: 17 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) 18 | if match: 19 | target, help = match.groups() 20 | print("%-20s %s" % (target, help)) 21 | endef 22 | export PRINT_HELP_PYSCRIPT 23 | 24 | BROWSER := python -c "$$BROWSER_PYSCRIPT" 25 | 26 | help: 27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) 28 | 29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts 30 | 31 | clean-build: ## remove build artifacts 32 | rm -fr build/ 33 | rm -fr dist/ 34 | rm -fr .eggs/ 35 | find . -name '*.egg-info' -exec rm -fr {} + 36 | find . -name '*.egg' -exec rm -fr {} + 37 | 38 | clean-pyc: ## remove Python file artifacts 39 | find . -name '*.pyc' -exec rm -f {} + 40 | find . -name '*.pyo' -exec rm -f {} + 41 | find . -name '*~' -exec rm -f {} + 42 | find . -name '__pycache__' -exec rm -fr {} + 43 | 44 | clean-test: ## remove test and coverage artifacts 45 | rm -fr .tox/ 46 | rm -f .coverage 47 | rm -fr htmlcov/ 48 | rm -fr .pytest_cache 49 | 50 | lint: ## check style with flake8 51 | flake8 oracle_dbt tests 52 | 53 | test-dbt-project: ## run tests quickly with the default Python 54 | cd dbt_test_project && ORACLE_HOME=./oracle_tns/ dbt seed --profiles-dir ./ 55 | cd dbt_test_project && ORACLE_HOME=./oracle_tns/ dbt run --profiles-dir ./ 56 | cd dbt_test_project && ORACLE_HOME=./oracle_tns/ dbt test --profiles-dir ./ 57 | 58 | test: test-dbt-integration test-dbt-project ## run tests on every Python version with tox 59 | 60 | test-dbt-integration: ## run dbt team integration tests 61 | pytest 62 | 63 | coverage: ## check code coverage quickly with the default Python 64 | coverage run --source oracle_dbt setup.py test 65 | coverage report -m 66 | coverage html 67 | $(BROWSER) htmlcov/index.html 68 | 69 | docs: ## generate Sphinx HTML documentation, including API docs 70 | rm -f docs/oracle_dbt.rst 71 | rm -f docs/modules.rst 72 | sphinx-apidoc -o docs/ oracle_dbt 73 | $(MAKE) -C docs clean 74 | $(MAKE) -C docs html 75 | $(BROWSER) docs/_build/html/index.html 76 | 77 | servedocs: docs ## compile the docs watching for changes 78 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . 79 | 80 | release: dist ## package and upload a release 81 | twine upload dist/* 82 | 83 | dist: clean ## builds source and wheel package 84 | python setup.py sdist 85 | python setup.py bdist_wheel 86 | ls -l dist 87 | 88 | install: clean ## install the package to the active Python's site-packages 89 | python setup.py install 90 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ========== 2 | ⚠️ This repo is archived! 3 | ========== 4 | 5 | Oracle are now the maintainers of the adapter for Oracle db. The repo can be found here: https://github.com/oracle/dbt-oracle 6 | 7 | For further questions please ask in the [#db-oracle channel](https://getdbt.slack.com/archives/C01PWH4TXLY) on the [dbt community slack](https://www.getdbt.com/community/join-the-community/) 8 | 9 | ========== 10 | Oracle DBT 11 | ========== 12 | 13 | Tested with dbt==0.19.1 14 | 15 | for dbt 0.18.0 and below, use dbt-oracle version 0.3.2 16 | 17 | .. image:: https://img.shields.io/pypi/v/dbt-oracle.svg 18 | :target: https://pypi.python.org/pypi/dbt-oracle 19 | 20 | Installation 21 | ------------ 22 | 23 | You need the oracle database client installed in your system in order for this to work, 24 | here (https://cx-oracle.readthedocs.io/en/latest/user_guide/installation.html) you can find the cx_oracle python driver installation instructions. 25 | 26 | This (https://gist.github.com/tcnksm/7316877) gist is also a useful resource for installing the client in Ubuntu. It's an old link for Ubuntu 12 but it still works at least for ubuntu 18. 27 | 28 | Installing: 29 | 30 | :: 31 | 32 | pip install dbt-oracle==0.4.3 33 | 34 | Configure your profile 35 | ---------------------- 36 | 37 | dbt-oracle supports three authentication methods. In any case, one must specifiy ``user``, ``pass``, ``dbname``, and ``schema`` 38 | 39 | * host: Setting ``host`` (and possibly ``port`` and ``service``, if that differs from ``dbname``) 40 | * connection string: Setting ``connection_string`` 41 | * TNS: Setting dbname only 42 | 43 | 44 | Host example 45 | ------------ 46 | 47 | .. code-block:: yaml 48 | 49 | dbt_oracle_test: 50 | target: dev 51 | outputs: 52 | dev: 53 | type: oracle 54 | host: localhost 55 | user: system 56 | pass: oracle 57 | port: 1522 58 | dbname: xe 59 | schema: system 60 | threads: 4 61 | 62 | 63 | If you need to connect via a service name that doesn't match the database name, then you may 64 | optionally specify ``service`` with the above, e.g.: 65 | 66 | .. code-block:: yaml 67 | 68 | service: xe_ha.domain.tld 69 | 70 | TNS example 71 | ----------- 72 | 73 | Configuring your ORACLE_HOME environment variable so dbt-oracle can find the tnsnames.ora file. 74 | Let's assume your tnsnames.ora file is placed at 75 | 76 | :: 77 | 78 | /home/user/oracle/network/admin/tnsnames.ora 79 | 80 | 81 | you could set 82 | 83 | :: 84 | 85 | ORACLE_HOME=/home/user/oracle/ 86 | 87 | and your profile: 88 | 89 | .. code-block:: yaml 90 | 91 | dbt_oracle_test: 92 | target: dev 93 | outputs: 94 | dev: 95 | type: oracle 96 | user: system 97 | pass: oracle 98 | dbname: xe 99 | schema: system 100 | threads: 4 101 | 102 | 103 | Connection string example 104 | ------------------------- 105 | 106 | .. code-block:: yaml 107 | 108 | dbt_oracle_test: 109 | target: dev 110 | outputs: 111 | dev: 112 | type: oracle 113 | user: system 114 | pass: oracle 115 | dbname: xe 116 | schema: system 117 | threads: 4 118 | connection_string: "(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=localhost)(PORT=1521))(CONNECT_DATA=(SERVICE_NAME=xe_ha.domain.tld)))" 119 | 120 | 121 | Supported Features 122 | ------------------ 123 | 124 | Materilizations 125 | ############### 126 | 127 | * table: OK 128 | * view: OK 129 | * incremental: OK 130 | * ephemeral: not OK 131 | 132 | Seeds 133 | ##### 134 | OK 135 | 136 | Hooks 137 | ##### 138 | OK 139 | 140 | Custom schemas 141 | ############### 142 | Not tested 143 | 144 | Sources 145 | ################### 146 | 147 | Not tested 148 | 149 | Testing & documentation 150 | ####################### 151 | 152 | - Schema tests OK 153 | - Relationship tests Not OK 154 | - Data tests Not OK in general -- These work as long as you do not use CTEs in your data tests. One strategy to manage this is to persist a useful CTE as an analysis. 155 | - SQL Tests OK 156 | - Docs generate Not OK 157 | 158 | Snapshots 159 | ######### 160 | 161 | OK 162 | 163 | Testing 164 | ------- 165 | 166 | There is a dummy dbt project called dbt_test_project for testing some things that the official dbt integration tests do not cover. 167 | For both dbt_test_project and dbt oficial adpter tests we are using a database user 'dbt_test' with password 'dbt_test' 168 | You have to either create this user os change the credentias at tests/oracle.dbtspec and dbt_test_project/profiles.yml 169 | 170 | For running it first start an oracle database instance: 171 | :: 172 | 173 | docker run \ 174 | --name dbt-oracle-db \ 175 | -d \ 176 | -p 1522:1522 \ 177 | epiclabs/docker-oracle-xe-11g 178 | 179 | 180 | Install the project locally 181 | 182 | :: 183 | 184 | python setup.py install 185 | 186 | 187 | then run dbt seed and run (theres is a profile file compatible with oracle 11g docker defaults at the test dir) 188 | 189 | :: 190 | 191 | cd dbt_test_project 192 | dbt seed --profiles-dir ./ 193 | dbt run --profiles-dir ./ 194 | dbt test --profiles-dir ./ 195 | 196 | you can also run 197 | 198 | :: 199 | 200 | make test 201 | 202 | for running both dbt adapter tests and the dbt_test_project included in this repo 203 | 204 | The following dbt adapter tests are passing: 205 | 206 | :: 207 | 208 | tests/oracle.dbtspec::test_dbt_empty 209 | tests/oracle.dbtspec::test_dbt_base 210 | tests/oracle.dbtspec::test_dbt_ephemeral 211 | tests/oracle.dbtspec::test_dbt_incremental 212 | tests/oracle.dbtspec::test_dbt_snapshot_strategy_timestamp 213 | tests/oracle.dbtspec::test_dbt_snapshot_strategy_check_cols 214 | tests/oracle.dbtspec::test_dbt_schema_test 215 | 216 | Known failing tests: 217 | 218 | :: 219 | 220 | FAILED tests/oracle.dbtspec::test_dbt_data_test 221 | FAILED tests/oracle.dbtspec::test_dbt_ephemeral_data_tests 222 | 223 | The dbt adapter tests for data tests fails due to how the test_dbt_data_test sequence is implemented. 224 | You can use data tests with this adapter, there is an example at the test project in this repo. 225 | 226 | 227 | Final Notes 228 | ----------- 229 | 230 | This is a new project and any contribuitions are welcome. 231 | 232 | 233 | -------------------------------------------------------------------------------- /dbt/__init__.py: -------------------------------------------------------------------------------- 1 | __path__ = __import__('pkgutil').extend_path(__path__, __name__) 2 | -------------------------------------------------------------------------------- /dbt/adapters/__init__.py: -------------------------------------------------------------------------------- 1 | __path__ = __import__('pkgutil').extend_path(__path__, __name__) 2 | -------------------------------------------------------------------------------- /dbt/adapters/oracle/__init__.py: -------------------------------------------------------------------------------- 1 | from dbt.adapters.oracle.connections import OracleAdapterConnectionManager 2 | from dbt.adapters.oracle.connections import OracleAdapterCredentials 3 | from dbt.adapters.oracle.impl import OracleAdapter 4 | 5 | from dbt.adapters.base import AdapterPlugin 6 | from dbt.include import oracle 7 | 8 | 9 | Plugin = AdapterPlugin( 10 | adapter=OracleAdapter, 11 | credentials=OracleAdapterCredentials, 12 | include_path=oracle.PACKAGE_PATH 13 | ) 14 | -------------------------------------------------------------------------------- /dbt/adapters/oracle/__version__.py: -------------------------------------------------------------------------------- 1 | version = "0.19.1" 2 | -------------------------------------------------------------------------------- /dbt/adapters/oracle/connections.py: -------------------------------------------------------------------------------- 1 | import agate 2 | from typing import List, Optional, Tuple, Any, Iterable, Dict 3 | from contextlib import contextmanager 4 | import enum 5 | import time 6 | 7 | import dbt.exceptions 8 | import cx_Oracle 9 | from cx_Oracle import Connection 10 | 11 | from dbt.logger import GLOBAL_LOGGER as logger 12 | 13 | from dataclasses import dataclass 14 | from dbt.helper_types import Port 15 | 16 | from dbt.adapters.base import Credentials 17 | from dbt.adapters.sql import SQLConnectionManager 18 | 19 | 20 | class OracleConnectionMethod(enum.Enum): 21 | HOST = 1 22 | TNS = 2 23 | CONNECTION_STRING = 3 24 | 25 | 26 | @dataclass 27 | class OracleAdapterCredentials(Credentials): 28 | """Collect Oracle credentials 29 | 30 | An OracleConnectionMethod is inferred from the combination 31 | of parameters profiled in the profile. 32 | """ 33 | user: str 34 | password: str 35 | # Note: The port won't be used if the host is not provided 36 | # Default Oracle database port 37 | port: Port = 1521 38 | host: Optional[str] = None 39 | service: Optional[str] = None 40 | connection_string: Optional[str] = None 41 | 42 | _ALIASES = { 43 | 'dbname': 'database', 44 | 'pass': 'password' 45 | } 46 | 47 | @property 48 | def type(self): 49 | return 'oracle' 50 | 51 | def _connection_keys(self) -> Tuple[str]: 52 | """ 53 | List of keys to display in the `dbt debug` output. Omit password. 54 | """ 55 | return ( 56 | 'user', 'database', 'schema', 57 | 'host', 'port', 'service', 58 | 'connection_string' 59 | ) 60 | 61 | def connection_method(self) -> OracleConnectionMethod: 62 | "Return an OracleConnecitonMethod inferred from the configuration" 63 | if self.connection_string: 64 | return OracleConnectionMethod.CONNECTION_STRING 65 | elif self.host: 66 | return OracleConnectionMethod.HOST 67 | else: 68 | return OracleConnectionMethod.TNS 69 | 70 | def get_dsn(self) -> str: 71 | """Create dsn for cx_Oracle for either any connection method 72 | 73 | See https://cx-oracle.readthedocs.io/en/latest/user_guide/connection_handling.html""" 74 | 75 | method = self.connection_method() 76 | if method == OracleConnectionMethod.TNS: 77 | return self.database 78 | if method == OracleConnectionMethod.CONNECTION_STRING: 79 | return self.connection_string 80 | 81 | # Assume host connection method OracleConnectionMethod.HOST 82 | 83 | # If the 'service' property is not provided, use 'database' property for 84 | # purposes of connecting. 85 | if self.service: 86 | service = self.service 87 | else: 88 | service = self.database 89 | 90 | return f'{self.host}:{self.port}/{service}' 91 | 92 | 93 | class OracleAdapterConnectionManager(SQLConnectionManager): 94 | TYPE = 'oracle' 95 | 96 | @classmethod 97 | def open(cls, connection): 98 | if connection.state == 'open': 99 | logger.debug('Connection is already open, skipping open.') 100 | return connection 101 | credentials = cls.get_credentials(connection.credentials) 102 | method = credentials.connection_method() 103 | dsn = credentials.get_dsn() 104 | 105 | logger.debug(f"Attempting to connect using Oracle method: '{method}' " 106 | f"and dsn: '{dsn}'") 107 | try: 108 | handle = cx_Oracle.connect( 109 | credentials.user, 110 | credentials.password, 111 | dsn, 112 | encoding="UTF-8" 113 | ) 114 | connection.handle = handle 115 | connection.state = 'open' 116 | except cx_Oracle.DatabaseError as e: 117 | logger.info(f"Got an error when attempting to open an Oracle " 118 | f"connection: '{e}'") 119 | connection.handle = None 120 | connection.state = 'fail' 121 | 122 | raise dbt.exceptions.FailedToConnectException(str(e)) 123 | 124 | return connection 125 | 126 | @classmethod 127 | def cancel(cls, connection): 128 | connection_name = connection.name 129 | oracle_connection = connection.handle 130 | 131 | logger.info("Cancelling query '{}' ".format(connection_name)) 132 | 133 | try: 134 | Connection.close(oracle_connection) 135 | except Exception as e: 136 | logger.error('Error closing connection for cancel request') 137 | raise Exception(str(e)) 138 | 139 | logger.info("Canceled query '{}'".format(connection_name)) 140 | 141 | @classmethod 142 | def get_status(cls, cursor): 143 | # Do oracle cx has something for this? could not find it 144 | return 'OK' 145 | 146 | @classmethod 147 | def get_response(cls, cursor): 148 | return 'OK' 149 | 150 | @contextmanager 151 | def exception_handler(self, sql): 152 | try: 153 | yield 154 | 155 | except cx_Oracle.DatabaseError as e: 156 | logger.info('Oracle error: {}'.format(str(e))) 157 | 158 | try: 159 | # attempt to release the connection 160 | self.release() 161 | except cx_Oracle.Error: 162 | logger.info("Failed to release connection!") 163 | pass 164 | 165 | raise dbt.exceptions.DatabaseException(str(e).strip()) from e 166 | 167 | except Exception as e: 168 | logger.info("Rolling back transaction.") 169 | self.release() 170 | if isinstance(e, dbt.exceptions.RuntimeException): 171 | # during a sql query, an internal to dbt exception was raised. 172 | # this sounds a lot like a signal handler and probably has 173 | # useful information, so raise it without modification. 174 | raise e 175 | 176 | raise dbt.exceptions.RuntimeException(e) from e 177 | 178 | @classmethod 179 | def get_credentials(cls, credentials): 180 | return credentials 181 | 182 | def add_query( 183 | self, 184 | sql: str, 185 | auto_begin: bool = True, 186 | bindings: Optional[Any] = {}, 187 | abridge_sql_log: bool = False 188 | ) -> Tuple[Connection, Any]: 189 | connection = self.get_thread_connection() 190 | if auto_begin and connection.transaction_open is False: 191 | self.begin() 192 | 193 | logger.debug('Using {} connection "{}".' 194 | .format(self.TYPE, connection.name)) 195 | 196 | with self.exception_handler(sql): 197 | if abridge_sql_log: 198 | log_sql = '{}...'.format(sql[:512]) 199 | else: 200 | log_sql = sql 201 | 202 | logger.debug( 203 | 'On {connection_name}: {sql}', 204 | connection_name=connection.name, 205 | sql=log_sql, 206 | ) 207 | pre = time.time() 208 | 209 | cursor = connection.handle.cursor() 210 | cursor.execute(sql, bindings) 211 | connection.handle.commit() 212 | logger.debug( 213 | "SQL status: {status} in {elapsed:0.2f} seconds", 214 | status=self.get_status(cursor), 215 | elapsed=(time.time() - pre) 216 | ) 217 | 218 | return connection, cursor 219 | 220 | def add_begin_query(self): 221 | connection = self.get_thread_connection() 222 | cursor = connection.handle.cursor 223 | return connection, cursor 224 | -------------------------------------------------------------------------------- /dbt/adapters/oracle/impl.py: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | Optional, List 3 | ) 4 | 5 | import dbt.exceptions 6 | from dbt.adapters.sql import SQLAdapter 7 | from dbt.adapters.base.meta import available 8 | from dbt.adapters.oracle import OracleAdapterConnectionManager 9 | from dbt.adapters.oracle.relation import OracleRelation 10 | 11 | import agate 12 | 13 | COLUMNS_EQUAL_SQL = ''' 14 | with diff_count as ( 15 | SELECT 16 | 1 as id, 17 | COUNT(*) as num_missing FROM ( 18 | (SELECT {columns} FROM {relation_a} {except_op} 19 | SELECT {columns} FROM {relation_b}) 20 | MINUS 21 | (SELECT {columns} FROM {relation_b} {except_op} 22 | SELECT {columns} FROM {relation_a}) 23 | ) a 24 | ), table_a as ( 25 | SELECT COUNT(*) as num_rows FROM {relation_a} 26 | ), table_b as ( 27 | SELECT COUNT(*) as num_rows FROM {relation_b} 28 | ), row_count_diff as ( 29 | select 30 | 1 as id, 31 | table_a.num_rows - table_b.num_rows as difference 32 | from table_a, table_b 33 | ) 34 | select 35 | row_count_diff.difference as row_count_difference, 36 | diff_count.num_missing as num_mismatched 37 | from row_count_diff 38 | join diff_count using (id) 39 | '''.strip() 40 | 41 | LIST_RELATIONS_MACRO_NAME = 'list_relations_without_caching' 42 | 43 | 44 | class OracleAdapter(SQLAdapter): 45 | ConnectionManager = OracleAdapterConnectionManager 46 | Relation = OracleRelation 47 | 48 | def debug_query(self) -> None: 49 | self.execute("select 1 as id from dual") 50 | 51 | @classmethod 52 | def date_function(cls): 53 | return 'CURRENT_DATE' 54 | 55 | @classmethod 56 | def convert_text_type(cls, agate_table, col_idx): 57 | column = agate_table.columns[col_idx] 58 | lens = (len(d.encode("utf-8")) for d in column.values_without_nulls()) 59 | max_len = max(lens) if lens else 64 60 | length = max_len if max_len > 16 else 16 61 | return "varchar2({})".format(length) 62 | 63 | @classmethod 64 | def convert_date_type(cls, agate_table, col_idx): 65 | return "timestamp" 66 | 67 | @classmethod 68 | def convert_datetime_type(cls, agate_table, col_idx): 69 | return "timestamp" 70 | 71 | @classmethod 72 | def convert_boolean_type(cls, agate_table, col_idx): 73 | return "number(1)" 74 | 75 | @classmethod 76 | def convert_number_type(cls, agate_table, col_idx): 77 | decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) 78 | return "number" 79 | 80 | @classmethod 81 | def convert_time_type(cls, agate_table, col_idx): 82 | return "timestamp" 83 | 84 | @available 85 | def verify_database(self, database): 86 | if database.startswith('"'): 87 | database = database.strip('"') 88 | expected = self.config.credentials.database 89 | if database.lower() != expected.lower(): 90 | raise dbt.exceptions.NotImplementedException( 91 | 'Cross-db references not allowed in {} ({} vs {})' 92 | .format(self.type(), database, expected) 93 | ) 94 | # return an empty string on success so macros can call this 95 | return '' 96 | 97 | def get_rows_different_sql( 98 | self, 99 | relation_a: OracleRelation, 100 | relation_b: OracleRelation, 101 | column_names: Optional[List[str]] = None, 102 | except_operator: str = 'MINUS', 103 | ) -> str: 104 | """Generate SQL for a query that returns a single row with a two 105 | columns: the number of rows that are different between the two 106 | relations and the number of mismatched rows. 107 | """ 108 | # This method only really exists for test reasons. 109 | names: List[str] 110 | if column_names is None: 111 | columns = self.get_columns_in_relation(relation_a) 112 | # names = sorted((self.quote(c.name) for c in columns) 113 | names = sorted((c.name for c in columns)) 114 | else: 115 | # names = sorted((self.quote(n) for n in column_names)) 116 | names = sorted((n for n in column_names)) 117 | columns_csv = ', '.join(names) 118 | 119 | sql = COLUMNS_EQUAL_SQL.format( 120 | columns=columns_csv, 121 | relation_a=str(relation_a), 122 | relation_b=str(relation_b), 123 | except_op=except_operator, 124 | ) 125 | 126 | return sql 127 | 128 | def list_relations_without_caching( 129 | self, schema_relation: OracleRelation, 130 | ) -> List[OracleRelation]: 131 | # Had to implement this because of the hardcoded 132 | # quote_policy setting all to true in dbt core 133 | 134 | kwargs = {'schema_relation': schema_relation} 135 | results = self.execute_macro( 136 | LIST_RELATIONS_MACRO_NAME, 137 | kwargs=kwargs 138 | ) 139 | 140 | quote_policy = { 141 | 'database': False, 142 | 'schema': False, 143 | 'identifier': False 144 | } 145 | relations = [] 146 | 147 | for _database, name, _schema, _type in results: 148 | try: 149 | _type = self.Relation.get_relation_type(_type) 150 | except ValueError: 151 | _type = self.Relation.External 152 | 153 | relations.append(self.Relation.create( 154 | database=_database, 155 | schema=_schema, 156 | identifier=name, 157 | quote_policy=quote_policy, 158 | type=_type 159 | )) 160 | return relations 161 | 162 | def timestamp_add_sql( 163 | self, add_to: str, number: int = 1, interval: str = 'hour' 164 | ) -> str: 165 | # for backwards compatibility, we're compelled to set some sort of 166 | # default. A lot of searching has lead me to believe that the 167 | # '+ interval' syntax used in postgres/redshift is relatively common 168 | # and might even be the SQL standard's intention. 169 | return f"{add_to} + interval '{number}' {interval}" 170 | -------------------------------------------------------------------------------- /dbt/adapters/oracle/relation.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from dbt.adapters.base.relation import BaseRelation, Policy 3 | 4 | 5 | @dataclass 6 | class OracleQuotePolicy(Policy): 7 | database: bool = False 8 | schema: bool = False 9 | identifier: bool = False 10 | 11 | 12 | @dataclass 13 | class OracleIncludePolicy(Policy): 14 | database: bool = False 15 | schema: bool = True 16 | identifier: bool = True 17 | 18 | 19 | @dataclass(frozen=True, eq=False, repr=False) 20 | class OracleRelation(BaseRelation): 21 | quote_policy: OracleQuotePolicy = OracleQuotePolicy() 22 | include_policy: OracleIncludePolicy = OracleIncludePolicy() 23 | 24 | @staticmethod 25 | def add_ephemeral_prefix(name): 26 | return f'dbt__cte__{name}__' 27 | -------------------------------------------------------------------------------- /dbt/adapters/oracle/sample_profiles.yml: -------------------------------------------------------------------------------- 1 | default: 2 | target: dev 3 | outputs: 4 | dev: 5 | type: oracle 6 | host: [host - default is localhost] 7 | user: system 8 | password: oracle 9 | port: 1521 10 | dbname: [dbname] 11 | schema: [schema] 12 | threads: [1 or more] 13 | -------------------------------------------------------------------------------- /dbt/include/__init__.py: -------------------------------------------------------------------------------- 1 | __path__ = __import__('pkgutil').extend_path(__path__, __name__) 2 | -------------------------------------------------------------------------------- /dbt/include/oracle/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | PACKAGE_PATH = os.path.dirname(__file__) 3 | -------------------------------------------------------------------------------- /dbt/include/oracle/dbt_project.yml: -------------------------------------------------------------------------------- 1 | 2 | name: dbt_oracle 3 | config-version: 2 4 | version: 1.0 5 | 6 | quoting: 7 | database: false 8 | schema: false 9 | identifier: false 10 | 11 | macro-paths: ["macros"] 12 | -------------------------------------------------------------------------------- /dbt/include/oracle/macros/adapters.sql: -------------------------------------------------------------------------------- 1 | {% macro oracle__get_columns_in_query(select_sql) %} 2 | {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%} 3 | select * from ( 4 | {{ select_sql }} 5 | ) dbt_sbq_tmp 6 | where 1 = 0 and rownum < 1 7 | {% endcall %} 8 | 9 | {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }} 10 | {% endmacro %} 11 | 12 | {% macro oracle__create_schema(database_name, schema_name) -%} 13 | {% if relation.database -%} 14 | {{ adapter.verify_database(relation.database) }} 15 | {%- endif -%} 16 | {%- call statement('drop_schema') -%} 17 | -- Noop for not breaking tests, oracle 18 | -- schemas are actualy users, we can't 19 | -- create it here 20 | select 'a' from dual 21 | {%- endcall -%} 22 | {% endmacro %} 23 | 24 | {% macro oracle__drop_schema(schema) -%} 25 | {% if schema.database -%} 26 | {{ adapter.verify_database(schema.database) }} 27 | {%- endif -%} 28 | {%- call statement('drop_schema') -%} 29 | -- from https://gist.github.com/rafaeleyng/33eaef673fc4ee98a6de4f70c8ce3657 30 | BEGIN 31 | FOR cur_rec IN (SELECT object_name, object_type 32 | FROM ALL_objects 33 | WHERE object_type IN 34 | ('TABLE', 35 | 'VIEW', 36 | 'PACKAGE', 37 | 'PROCEDURE', 38 | 'FUNCTION', 39 | 'SEQUENCE', 40 | 'TYPE', 41 | 'SYNONYM', 42 | 'MATERIALIZED VIEW' 43 | ) 44 | AND owner = '{{ schema | upper }}') 45 | LOOP 46 | BEGIN 47 | IF cur_rec.object_type = 'TABLE' 48 | THEN 49 | EXECUTE IMMEDIATE 'DROP ' 50 | || cur_rec.object_type 51 | || ' "' 52 | || cur_rec.object_name 53 | || '" CASCADE CONSTRAINTS'; 54 | ELSE 55 | EXECUTE IMMEDIATE 'DROP ' 56 | || cur_rec.object_type 57 | || ' "' 58 | || cur_rec.object_name 59 | || '"'; 60 | END IF; 61 | EXCEPTION 62 | WHEN OTHERS 63 | THEN 64 | DBMS_OUTPUT.put_line ( 'FAILED: DROP ' 65 | || cur_rec.object_type 66 | || ' "' 67 | || cur_rec.object_name 68 | || '"' 69 | ); 70 | END; 71 | END LOOP; 72 | END; 73 | {%- endcall -%} 74 | {% endmacro %} 75 | 76 | {% macro oracle__create_table_as_backup(temporary, relation, sql) -%} 77 | {%- set sql_header = config.get('sql_header', none) -%} 78 | 79 | {{ sql_header if sql_header is not none }} 80 | 81 | create {% if temporary -%} 82 | global temporary 83 | {%- endif %} table {{ relation.include(schema=(not temporary)).quote(schema=False, identifier=False) }} 84 | {% if temporary -%} on commit preserve rows {%- endif %} 85 | as 86 | {{ sql }} 87 | 88 | {%- endmacro %} 89 | 90 | {% macro oracle__create_table_as(temporary, relation, sql) -%} 91 | {%- set sql_header = config.get('sql_header', none) -%} 92 | 93 | {{ sql_header if sql_header is not none }} 94 | 95 | create {% if temporary -%} 96 | global temporary 97 | {%- endif %} table {{ relation.include(schema=(not temporary)).quote(schema=False, identifier=False) }} 98 | {% if temporary -%} on commit preserve rows {%- endif %} 99 | as 100 | {{ sql }} 101 | 102 | {%- endmacro %} 103 | {% macro oracle__create_view_as(relation, sql) -%} 104 | {%- set sql_header = config.get('sql_header', none) -%} 105 | 106 | {{ sql_header if sql_header is not none }} 107 | create view {{ relation.quote(schema=False, identifier=False) }} as 108 | {{ sql }} 109 | 110 | {% endmacro %} 111 | 112 | {% macro oracle__get_columns_in_relation(relation) -%} 113 | {% call statement('get_columns_in_relation', fetch_result=True) %} 114 | with columns as ( 115 | select 116 | UPPER(SYS_CONTEXT('userenv', 'DB_NAME')) table_catalog, 117 | UPPER(owner) table_schema, 118 | table_name, 119 | column_name, 120 | data_type, 121 | data_type_mod, 122 | decode(data_type_owner, null, TO_CHAR(null), SYS_CONTEXT('userenv', 'DB_NAME')) domain_catalog, 123 | data_type_owner domain_schema, 124 | data_length character_maximum_length, 125 | data_length character_octet_length, 126 | data_length, 127 | data_precision numeric_precision, 128 | data_scale numeric_scale, 129 | nullable is_nullable, 130 | column_id ordinal_position, 131 | default_length, 132 | data_default column_default, 133 | num_distinct, 134 | low_value, 135 | high_value, 136 | density, 137 | num_nulls, 138 | num_buckets, 139 | last_analyzed, 140 | sample_size, 141 | SYS_CONTEXT('userenv', 'DB_NAME') character_set_catalog, 142 | 'SYS' character_set_schema, 143 | SYS_CONTEXT('userenv', 'DB_NAME') collation_catalog, 144 | 'SYS' collation_schema, 145 | character_set_name, 146 | char_col_decl_length, 147 | global_stats, 148 | user_stats, 149 | avg_col_len, 150 | char_length, 151 | char_used, 152 | v80_fmt_image, 153 | data_upgraded, 154 | histogram 155 | from sys.all_tab_columns 156 | ) 157 | select 158 | lower(column_name) as "name", 159 | lower(data_type) as "type", 160 | char_length as "character_maximum_length", 161 | numeric_precision as "numeric_precision", 162 | numeric_scale as "numeric_scale" 163 | from columns 164 | where table_name = upper('{{ relation.identifier }}') 165 | {% if relation.schema %} 166 | and table_schema = upper('{{ relation.schema }}') 167 | {% endif %} 168 | {% if relation.database %} 169 | and table_catalog = upper('{{ relation.database }}') 170 | {% endif %} 171 | order by ordinal_position 172 | 173 | {% endcall %} 174 | {% set table = load_result('get_columns_in_relation').table %} 175 | {{ return(sql_convert_columns_in_relation(table)) }} 176 | {% endmacro %} 177 | 178 | {% macro oracle_escape_comment(comment) -%} 179 | {% if comment is not string %} 180 | {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %} 181 | {% endif %} 182 | {%- set start_quote = "q'<" -%} 183 | {%- set end_quote = ">'" -%} 184 | {%- if end_quote in comment -%} 185 | {%- do exceptions.raise_compiler_error('The string ' ~ end_quote ~ ' is not allowed in comments.') -%} 186 | {%- endif -%} 187 | {{ start_quote }}{{ comment }}{{ end_quote }} 188 | {%- endmacro %} 189 | 190 | {% macro oracle__alter_relation_comment(relation, comment) %} 191 | {% set escaped_comment = oracle_escape_comment(comment) %} 192 | {# "comment on table" even for views #} 193 | comment on table {{ relation.quote(schema=False, identifier=False) }} is {{ escaped_comment }} 194 | {% endmacro %} 195 | 196 | {% macro oracle__persist_docs(relation, model, for_relation, for_columns) -%} 197 | {% if for_relation and config.persist_relation_docs() and model.description %} 198 | {% do run_query(alter_relation_comment(relation, model.description)) %} 199 | {% endif %} 200 | {% if for_columns and config.persist_column_docs() and model.columns %} 201 | {% set column_dict = model.columns %} 202 | {% for column_name in column_dict %} 203 | {% set comment = column_dict[column_name]['description'] %} 204 | {% set escaped_comment = oracle_escape_comment(comment) %} 205 | {% call statement('alter _column comment', fetch_result=False) -%} 206 | comment on column {{ relation.quote(schema=False, identifier=False) }}.{{ column_name }} is {{ escaped_comment }} 207 | {%- endcall %} 208 | {% endfor %} 209 | {% endif %} 210 | {% endmacro %} 211 | 212 | {% macro oracle__alter_column_type(relation, column_name, new_column_type) -%} 213 | {# 214 | 1. Create a new column (w/ temp name and correct type) 215 | 2. Copy data over to it 216 | 3. Drop the existing column (cascade!) 217 | 4. Rename the new column to existing column 218 | #} 219 | {%- set tmp_column = column_name + "__dbt_alter" -%} 220 | 221 | {% call statement('alter_column_type 1', fetch_result=False) %} 222 | alter table {{ relation.quote(schema=False, identifier=False) }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }} 223 | {% endcall %} 224 | {% call statement('alter_column_type 2', fetch_result=False) %} 225 | update {{ relation.quote(schema=False, identifier=False) }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }} 226 | {% endcall %} 227 | {% call statement('alter_column_type 3', fetch_result=False) %} 228 | alter table {{ relation.quote(schema=False, identifier=False) }} drop column {{ adapter.quote(column_name) }} cascade 229 | {% endcall %} 230 | {% call statement('alter_column_type 4', fetch_result=False) %} 231 | rename column {{ relation.quote(schema=False, identifier=False) }}.{{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }} 232 | {% endcall %} 233 | 234 | {% endmacro %} 235 | 236 | {% macro oracle__drop_relation(relation) -%} 237 | {% call statement('drop_relation', auto_begin=False) -%} 238 | DECLARE 239 | dne_942 EXCEPTION; 240 | PRAGMA EXCEPTION_INIT(dne_942, -942); 241 | attempted_ddl_on_in_use_GTT EXCEPTION; 242 | pragma EXCEPTION_INIT(attempted_ddl_on_in_use_GTT, -14452); 243 | BEGIN 244 | EXECUTE IMMEDIATE 'DROP {{ relation.type }} {{ relation.quote(schema=False, identifier=False) }} cascade constraint'; 245 | EXCEPTION 246 | WHEN attempted_ddl_on_in_use_GTT THEN 247 | NULL; -- if it its a global temporary table, leave it alone. 248 | WHEN dne_942 THEN 249 | NULL; -- if it doesn't exist, do nothing .. no error, nothing .. ignore. 250 | END; 251 | {%- endcall %} 252 | {% endmacro %} 253 | 254 | {% macro oracle__truncate_relation(relation) -%} 255 | {% call statement('truncate_relation') -%} 256 | truncate table {{ relation.quote(schema=False, identifier=False) }} 257 | {%- endcall %} 258 | {% endmacro %} 259 | 260 | {% macro oracle__rename_relation(from_relation, to_relation) -%} 261 | {% call statement('rename_relation') -%} 262 | rename {{ from_relation.include(False, False, True).quote(schema=False, identifier=False) }} to {{ to_relation.include(False, False, True).quote(schema=False, identifier=False) }} 263 | {%- endcall %} 264 | {% endmacro %} 265 | 266 | {% macro oracle__information_schema_name(database) -%} 267 | {% if database -%} 268 | {{ adapter.verify_database(database) }} 269 | {%- endif -%} 270 | sys 271 | {%- endmacro %} 272 | 273 | {% macro oracle__list_schemas(database) %} 274 | {% if database -%} 275 | {{ adapter.verify_database(database) }} 276 | {%- endif -%} 277 | {% call statement('list_schemas', fetch_result=True, auto_begin=False) -%} 278 | select lower(username) as "name" 279 | from sys.all_users 280 | order by username 281 | {% endcall %} 282 | {{ return(load_result('list_schemas').table) }} 283 | {% endmacro %} 284 | 285 | {% macro oracle__check_schema_exists(information_schema, schema) -%} 286 | {% if information_schema.database -%} 287 | {{ adapter.verify_database(information_schema.database) }} 288 | {%- endif -%} 289 | {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %} 290 | select count(*) from sys.all_users where username = upper('{{ schema }}') 291 | {% endcall %} 292 | {{ return(load_result('check_schema_exists').table) }} 293 | {% endmacro %} 294 | 295 | {% macro oracle__list_relations_without_caching(schema_relation) %} 296 | {% call statement('list_relations_without_caching', fetch_result=True) -%} 297 | with tables as 298 | (select UPPER(SYS_CONTEXT('userenv', 'DB_NAME')) table_catalog, 299 | UPPER(owner) table_schema, 300 | table_name, 301 | case 302 | when iot_type = 'Y' 303 | then 'IOT' 304 | when temporary = 'Y' 305 | then 'TEMP' 306 | else 'BASE TABLE' 307 | end table_type 308 | from sys.all_tables 309 | union all 310 | select UPPER(SYS_CONTEXT('userenv', 'DB_NAME')), 311 | UPPER(owner), 312 | view_name, 313 | 'VIEW' 314 | from sys.all_views 315 | ) 316 | select lower(table_catalog) as "database_name" 317 | ,lower(table_name) as "name" 318 | ,lower(table_schema) as "schema_name" 319 | ,case table_type 320 | when 'BASE TABLE' then 'table' 321 | when 'VIEW' then 'view' 322 | end as "kind" 323 | from tables 324 | where table_type in ('BASE TABLE', 'VIEW') 325 | and table_catalog = upper('{{ schema_relation.database }}') 326 | and table_schema = upper('{{ schema_relation.schema }}') 327 | {% endcall %} 328 | {{ return(load_result('list_relations_without_caching').table) }} 329 | {% endmacro %} 330 | 331 | {% macro oracle__current_timestamp() -%} 332 | CURRENT_TIMESTAMP 333 | {%- endmacro %} 334 | 335 | {% macro oracle__make_temp_relation(base_relation, suffix) %} 336 | {% set dt = modules.datetime.datetime.now() %} 337 | {% set dtstring = dt.strftime("%H%M%S") %} 338 | {% set tmp_identifier = 'o$pt_' ~ base_relation.identifier ~ dtstring %} 339 | {% set tmp_relation = base_relation.incorporate( 340 | path={"identifier": tmp_identifier}) -%} 341 | 342 | {% do return(tmp_relation) %} 343 | {% endmacro %} 344 | -------------------------------------------------------------------------------- /dbt/include/oracle/macros/catalog.sql: -------------------------------------------------------------------------------- 1 | {% macro oracle__get_catalog(information_schema, schemas) -%} 2 | 3 | {%- call statement('catalog', fetch_result=True) -%} 4 | {# 5 | If the user has multiple databases set and the first one is wrong, this will fail. 6 | But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better. 7 | #} 8 | {% set database = information_schema.database %} 9 | {{ adapter.verify_database(database) }} 10 | 11 | with columns as ( 12 | select 13 | SYS_CONTEXT('userenv', 'DB_NAME') table_catalog, 14 | owner table_schema, 15 | table_name, 16 | column_name, 17 | data_type, 18 | data_type_mod, 19 | decode(data_type_owner, null, TO_CHAR(null), SYS_CONTEXT('userenv', 'DB_NAME')) domain_catalog, 20 | data_type_owner domain_schema, 21 | data_length character_maximum_length, 22 | data_length character_octet_length, 23 | data_length, 24 | data_precision numeric_precision, 25 | data_scale numeric_scale, 26 | nullable is_nullable, 27 | column_id ordinal_position, 28 | default_length, 29 | data_default column_default, 30 | num_distinct, 31 | low_value, 32 | high_value, 33 | density, 34 | num_nulls, 35 | num_buckets, 36 | last_analyzed, 37 | sample_size, 38 | SYS_CONTEXT('userenv', 'DB_NAME') character_set_catalog, 39 | 'SYS' character_set_schema, 40 | SYS_CONTEXT('userenv', 'DB_NAME') collation_catalog, 41 | 'SYS' collation_schema, 42 | character_set_name, 43 | char_col_decl_length, 44 | global_stats, 45 | user_stats, 46 | avg_col_len, 47 | char_length, 48 | char_used, 49 | v80_fmt_image, 50 | data_upgraded, 51 | histogram 52 | from sys.all_tab_columns 53 | ), 54 | tables as 55 | (select UPPER(SYS_CONTEXT('userenv', 'DB_NAME')) table_catalog, 56 | UPPER(owner) table_schema, 57 | table_name, 58 | case 59 | when iot_type = 'Y' 60 | then 'IOT' 61 | when temporary = 'Y' 62 | then 'TEMP' 63 | else 'BASE TABLE' 64 | end table_type 65 | from sys.all_tables 66 | union all 67 | select SYS_CONTEXT('userenv', 'DB_NAME'), 68 | owner, 69 | view_name, 70 | 'VIEW' 71 | from sys.all_views 72 | ) 73 | select 74 | lower(tables.table_catalog) as "table_database", 75 | lower(tables.table_schema) as "table_schema", 76 | lower(tables.table_name) as "table_name", 77 | lower(tables.table_type) as "table_type", 78 | all_tab_comments.comments as "table_comment", 79 | lower(columns.column_name) as "column_name", 80 | ordinal_position as "column_index", 81 | lower(case 82 | when data_type like '%CHAR%' 83 | then data_type || '(' || cast(char_length as varchar(10)) || ')' 84 | else data_type 85 | end) as "column_type", 86 | all_col_comments.comments as "column_comment", 87 | tables.table_schema as "table_owner" 88 | from tables 89 | inner join columns on columns.table_catalog = tables.table_catalog 90 | and columns.table_schema = tables.table_schema 91 | and columns.table_name = tables.table_name 92 | left join all_tab_comments 93 | on all_tab_comments.owner = tables.table_schema 94 | and all_tab_comments.table_name = tables.table_name 95 | left join all_col_comments 96 | on all_col_comments.owner = columns.table_schema 97 | and all_col_comments.table_name = columns.table_name 98 | and all_col_comments.column_name = columns.column_name 99 | where ( 100 | {%- for schema in schemas -%} 101 | tables.table_schema = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%} 102 | {%- endfor -%} 103 | ) 104 | order by 105 | tables.table_schema, 106 | tables.table_name, 107 | ordinal_position 108 | {%- endcall -%} 109 | 110 | {{ return(load_result('catalog').table) }} 111 | 112 | {%- endmacro %} 113 | -------------------------------------------------------------------------------- /dbt/include/oracle/macros/materializations/incremental/helpers.sql: -------------------------------------------------------------------------------- 1 | 2 | {% macro oracle_incremental_upsert_backup(tmp_relation, target_relation, unique_key=none, statement_name="main") %} 3 | {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%} 4 | {%- set dest_cols_csv = dest_columns | map(attribute='name') | join(', ') -%} 5 | 6 | {%- if unique_key is not none -%} 7 | delete 8 | from {{ target_relation }} 9 | where ({{ unique_key }}) in ( 10 | select ({{ unique_key }}) 11 | from {{ tmp_relation }} 12 | ); 13 | {%- endif %} 14 | 15 | insert into {{ target_relation }} ({{ dest_cols_csv }}) 16 | ( 17 | select {{ dest_cols_csv }} 18 | from {{ tmp_relation }} 19 | ) 20 | {%- endmacro %} 21 | 22 | {% macro oracle_incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name="main") %} 23 | {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%} 24 | {%- set dest_cols_csv = dest_columns | map(attribute='name') | join(', ') -%} 25 | 26 | {%- if unique_key is not none -%} 27 | merge into {{ target_relation }} target 28 | using {{ tmp_relation }} temp 29 | on (temp.{{ unique_key }} = target.{{ unique_key }}) 30 | when matched then 31 | update set 32 | {% for col in dest_columns if col.name != unique_key %} 33 | target.{{ col.name }} = temp.{{ col.name }} 34 | {% if not loop.last %}, {% endif %} 35 | {% endfor %} 36 | when not matched then 37 | insert( {{ dest_cols_csv }} ) 38 | values( 39 | {% for col in dest_columns %} 40 | temp.{{ col.name }} 41 | {% if not loop.last %}, {% endif %} 42 | {% endfor %} 43 | ) 44 | {%- else %} 45 | insert into {{ target_relation }} ({{ dest_cols_csv }}) 46 | ( 47 | select {{ dest_cols_csv }} 48 | from {{ tmp_relation }} 49 | ) 50 | {% endif %} 51 | {%- endmacro %} 52 | -------------------------------------------------------------------------------- /dbt/include/oracle/macros/materializations/incremental/incremental.sql: -------------------------------------------------------------------------------- 1 | 2 | {% materialization incremental, adapter='oracle' -%} 3 | 4 | {% set unique_key = config.get('unique_key') %} 5 | {% set full_refresh_mode = flags.FULL_REFRESH %} 6 | 7 | {% set target_relation = this.incorporate(type='table') %} 8 | {% set existing_relation = load_relation(this) %} 9 | {% set tmp_relation = make_temp_relation(this) %} 10 | 11 | {{ run_hooks(pre_hooks, inside_transaction=False) }} 12 | 13 | -- `BEGIN` happens here: 14 | {{ run_hooks(pre_hooks, inside_transaction=True) }} 15 | 16 | {% set to_drop = [] %} 17 | {% if existing_relation is none %} 18 | {% set build_sql = create_table_as(False, target_relation, sql) %} 19 | {% elif existing_relation.is_view or full_refresh_mode %} 20 | {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #} 21 | {% set backup_identifier = existing_relation.identifier ~ "__dbt_backup" %} 22 | {% set backup_relation = existing_relation.incorporate(path={"identifier": backup_identifier}) %} 23 | {% do adapter.drop_relation(backup_relation) %} 24 | 25 | {% do adapter.rename_relation(target_relation, backup_relation) %} 26 | {% set build_sql = create_table_as(False, target_relation, sql) %} 27 | {% do to_drop.append(backup_relation) %} 28 | {% else %} 29 | {% set tmp_relation = make_temp_relation(target_relation) %} 30 | {% do to_drop.append(tmp_relation) %} 31 | {% do run_query(create_table_as(True, tmp_relation, sql)) %} 32 | {% do adapter.expand_target_column_types( 33 | from_relation=tmp_relation, 34 | to_relation=target_relation) %} 35 | {% set build_sql = oracle_incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %} 36 | {% endif %} 37 | 38 | {% call statement("main") %} 39 | {{ build_sql }} 40 | {% endcall %} 41 | 42 | {% do persist_docs(target_relation, model) %} 43 | 44 | {{ run_hooks(post_hooks, inside_transaction=True) }} 45 | 46 | -- `COMMIT` happens here 47 | {% do adapter.commit() %} 48 | 49 | {% for rel in to_drop %} 50 | {% do adapter.drop_relation(rel) %} 51 | {% endfor %} 52 | 53 | {{ run_hooks(post_hooks, inside_transaction=False) }} 54 | 55 | {{ return({'relations': [target_relation]}) }} 56 | 57 | {%- endmaterialization %} 58 | -------------------------------------------------------------------------------- /dbt/include/oracle/macros/materializations/seed/seed.sql: -------------------------------------------------------------------------------- 1 | {% macro oracle_basic_load_csv_rows(model, batch_size, agate_table) %} 2 | 3 | {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %} 4 | {% set bindings = [] %} 5 | 6 | {% set statements = [] %} 7 | 8 | {% for chunk in agate_table.rows | batch(batch_size) %} 9 | {% set bindings = [] %} 10 | 11 | {% for row in chunk %} 12 | {% do bindings.extend(row) %} 13 | {% endfor %} 14 | 15 | {% set sql %} 16 | insert all 17 | {% for row in chunk -%} 18 | into {{ this.render() }} ({{ cols_sql }}) values( 19 | {%- for column in agate_table.column_names -%} 20 | :p{{ loop.index }} 21 | {%- if not loop.last%},{%- endif %} 22 | {%- endfor %}) 23 | {% endfor %} 24 | select * from dual 25 | {% endset %} 26 | 27 | {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %} 28 | 29 | {% if loop.index0 == 0 %} 30 | {% do statements.append(sql) %} 31 | {% endif %} 32 | {% endfor %} 33 | 34 | {# Return SQL so we can render it out into the compiled files #} 35 | {{ return(statements[0]) }} 36 | {% endmacro %} 37 | 38 | {% macro oracle__load_csv_rows(model, agate_table) %} 39 | {{ return(oracle_basic_load_csv_rows(model, 100, agate_table) )}} 40 | {% endmacro %} 41 | 42 | -------------------------------------------------------------------------------- /dbt/include/oracle/macros/materializations/snapshot/snapshot.sql: -------------------------------------------------------------------------------- 1 | {# 2 | Add new columns to the table if applicable 3 | #} 4 | {% macro create_columns(relation, columns) %} 5 | {{ adapter.dispatch('create_columns')(relation, columns) }} 6 | {% endmacro %} 7 | 8 | {% macro default__create_columns(relation, columns) %} 9 | {% for column in columns %} 10 | {% call statement() %} 11 | alter table {{ relation }} add column "{{ column.name }}" {{ column.data_type }}; 12 | {% endcall %} 13 | {% endfor %} 14 | {% endmacro %} 15 | 16 | 17 | {% macro post_snapshot(staging_relation) %} 18 | {{ adapter.dispatch('post_snapshot')(staging_relation) }} 19 | {% endmacro %} 20 | 21 | {% macro default__post_snapshot(staging_relation) %} 22 | {# no-op #} 23 | {% endmacro %} 24 | 25 | 26 | {% macro snapshot_staging_table(strategy, source_sql, target_relation) -%} 27 | 28 | with snapshot_query as ( 29 | 30 | {{ source_sql }} 31 | 32 | ), 33 | 34 | snapshotted_data as ( 35 | 36 | select {{ target_relation }}.*, 37 | {{ strategy.unique_key }} as dbt_unique_key 38 | 39 | from {{ target_relation }} 40 | where dbt_valid_to is null 41 | 42 | ), 43 | 44 | insertions_source_data as ( 45 | 46 | select 47 | snapshot_query.*, 48 | {{ strategy.unique_key }} as dbt_unique_key, 49 | {{ strategy.updated_at }} as dbt_updated_at, 50 | {{ strategy.updated_at }} as dbt_valid_from, 51 | nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to, 52 | {{ strategy.scd_id }} as dbt_scd_id 53 | 54 | from snapshot_query 55 | ), 56 | 57 | updates_source_data as ( 58 | 59 | select 60 | snapshot_query.*, 61 | {{ strategy.unique_key }} as dbt_unique_key, 62 | {{ strategy.updated_at }} as dbt_updated_at, 63 | {{ strategy.updated_at }} as dbt_valid_from, 64 | {{ strategy.updated_at }} as dbt_valid_to 65 | 66 | from snapshot_query 67 | ), 68 | 69 | {%- if strategy.invalidate_hard_deletes %} 70 | 71 | deletes_source_data as ( 72 | 73 | select 74 | snapshot_query.*, 75 | {{ strategy.unique_key }} as dbt_unique_key 76 | from snapshot_query 77 | ), 78 | {% endif %} 79 | 80 | insertions as ( 81 | 82 | select 83 | 'insert' as dbt_change_type, 84 | source_data.* 85 | 86 | from insertions_source_data source_data 87 | left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key 88 | where snapshotted_data.dbt_unique_key is null 89 | or ( 90 | snapshotted_data.dbt_unique_key is not null 91 | and ( 92 | {{ strategy.row_changed }} 93 | ) 94 | ) 95 | 96 | ), 97 | 98 | updates as ( 99 | 100 | select 101 | 'update' as dbt_change_type, 102 | source_data.*, 103 | snapshotted_data.dbt_scd_id 104 | 105 | from updates_source_data source_data 106 | join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key 107 | where ( 108 | {{ strategy.row_changed }} 109 | ) 110 | ) 111 | 112 | {%- if strategy.invalidate_hard_deletes -%} 113 | , 114 | 115 | deletes as ( 116 | 117 | select 118 | 'delete' as dbt_change_type, 119 | source_data.*, 120 | {{ snapshot_get_time() }} as dbt_valid_from, 121 | {{ snapshot_get_time() }} as dbt_updated_at, 122 | {{ snapshot_get_time() }} as dbt_valid_to, 123 | snapshotted_data.dbt_scd_id 124 | 125 | from snapshotted_data 126 | left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key 127 | where source_data.dbt_unique_key is null 128 | ) 129 | {%- endif %} 130 | 131 | select * from insertions 132 | union all 133 | select * from updates 134 | {%- if strategy.invalidate_hard_deletes %} 135 | union all 136 | select * from deletes 137 | {%- endif %} 138 | 139 | {%- endmacro %} 140 | 141 | 142 | 143 | {% macro build_snapshot_table(strategy, sql) %} 144 | 145 | select sbq.*, 146 | {{ strategy.scd_id }} as dbt_scd_id, 147 | {{ strategy.updated_at }} as dbt_updated_at, 148 | {{ strategy.updated_at }} as dbt_valid_from, 149 | cast(nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as DATE) as dbt_valid_to 150 | from ( 151 | {{ sql }} 152 | ) sbq 153 | 154 | {% endmacro %} 155 | 156 | 157 | {% macro get_or_create_relation(database, schema, identifier, type) %} 158 | {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %} 159 | 160 | {% if target_relation %} 161 | {% do return([true, target_relation]) %} 162 | {% endif %} 163 | 164 | {%- set new_relation = api.Relation.create( 165 | database=database, 166 | schema=schema, 167 | identifier=identifier, 168 | type=type 169 | ) -%} 170 | {% do return([false, new_relation]) %} 171 | {% endmacro %} 172 | 173 | {% macro build_snapshot_staging_table(strategy, sql, target_relation) %} 174 | {% set tmp_relation = make_temp_relation(target_relation) %} 175 | 176 | {% set select = snapshot_staging_table(strategy, sql, target_relation) %} 177 | 178 | {% call statement('build_snapshot_staging_relation') %} 179 | {{ create_table_as(True, tmp_relation, select) }} 180 | {% endcall %} 181 | 182 | {% do return(tmp_relation) %} 183 | {% endmacro %} 184 | 185 | 186 | {% materialization snapshot, adapter='oracle' %} 187 | {%- set config = model['config'] -%} 188 | 189 | {%- set target_table = model.get('alias', model.get('name')) -%} 190 | 191 | {%- set strategy_name = config.get('strategy') -%} 192 | {%- set unique_key = config.get('unique_key') %} 193 | 194 | {% if not adapter.check_schema_exists(model.database, model.schema) %} 195 | {% do create_schema(model.database, model.schema) %} 196 | {% endif %} 197 | 198 | {% set target_relation_exists, target_relation = get_or_create_relation( 199 | database=model.database, 200 | schema=model.schema, 201 | identifier=target_table, 202 | type='table') -%} 203 | 204 | {%- if not target_relation.is_table -%} 205 | {% do exceptions.relation_wrong_type(target_relation, 'table') %} 206 | {%- endif -%} 207 | 208 | 209 | {{ run_hooks(pre_hooks, inside_transaction=False) }} 210 | 211 | {{ run_hooks(pre_hooks, inside_transaction=True) }} 212 | 213 | {% set strategy_macro = strategy_dispatch(strategy_name) %} 214 | {% set strategy = strategy_macro(model, "snapshotted_data", "source_data", config, target_relation_exists) %} 215 | 216 | {% if not target_relation_exists %} 217 | 218 | {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %} 219 | {% set final_sql = create_table_as(False, target_relation, build_sql) %} 220 | 221 | {% else %} 222 | 223 | {{ adapter.valid_snapshot_target(target_relation) }} 224 | 225 | {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %} 226 | 227 | -- this may no-op if the database does not require column expansion 228 | {% do adapter.expand_target_column_types(from_relation=staging_table, 229 | to_relation=target_relation) %} 230 | 231 | {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation) 232 | | rejectattr('name', 'equalto', 'dbt_change_type') 233 | | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') 234 | | rejectattr('name', 'equalto', 'dbt_unique_key') 235 | | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') 236 | | list %} 237 | 238 | {% do create_columns(target_relation, missing_columns) %} 239 | 240 | {% set source_columns = adapter.get_columns_in_relation(staging_table) 241 | | rejectattr('name', 'equalto', 'dbt_change_type') 242 | | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') 243 | | rejectattr('name', 'equalto', 'dbt_unique_key') 244 | | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') 245 | | list %} 246 | 247 | {% set quoted_source_columns = [] %} 248 | {% for column in source_columns %} 249 | {% do quoted_source_columns.append(column.name) %} 250 | {% endfor %} 251 | 252 | {% set final_sql = snapshot_merge_sql( 253 | target = target_relation, 254 | source = staging_table, 255 | insert_cols = quoted_source_columns 256 | ) 257 | %} 258 | 259 | {% endif %} 260 | 261 | {% call statement('main') %} 262 | {{ final_sql }} 263 | {% endcall %} 264 | 265 | {% do persist_docs(target_relation, model) %} 266 | 267 | {{ run_hooks(post_hooks, inside_transaction=True) }} 268 | 269 | {{ adapter.commit() }} 270 | 271 | {% if staging_table is defined %} 272 | {% do post_snapshot(staging_table) %} 273 | {% endif %} 274 | 275 | {{ run_hooks(post_hooks, inside_transaction=False) }} 276 | 277 | {{ return({'relations': [target_relation]}) }} 278 | 279 | {% endmaterialization %} 280 | 281 | {% macro oracle__snapshot_hash_arguments(args) -%} 282 | ORA_HASH({%- for arg in args -%} 283 | coalesce(cast({{ arg }} as varchar(50) ), '') 284 | {% if not loop.last %} || '|' || {% endif %} 285 | {%- endfor -%}) 286 | {%- endmacro %} 287 | 288 | {% macro oracle__snapshot_string_as_time(timestamp) -%} 289 | {%- set result = "TO_TIMESTAMP('"~ timestamp ~ "','yyyy/mm/dd hh24:mi:ss.FF')" -%} 290 | {{ return(result) }} 291 | {%- endmacro %} 292 | -------------------------------------------------------------------------------- /dbt/include/oracle/macros/materializations/snapshot/snapshot_merge.sql: -------------------------------------------------------------------------------- 1 | {% macro oracle__snapshot_merge_sql(target, source, insert_cols) -%} 2 | {%- set insert_cols_csv = [] -%} 3 | 4 | {% for column in insert_cols %} 5 | {% do insert_cols_csv.append("s." + column) %} 6 | {% endfor %} 7 | 8 | {%- set dest_cols_csv = [] -%} 9 | 10 | {% for column in insert_cols %} 11 | {% do dest_cols_csv.append("d." + column) %} 12 | {% endfor %} 13 | 14 | merge into {{ target }} d 15 | using {{ source }} s 16 | on (s.dbt_scd_id = d.dbt_scd_id) 17 | 18 | when matched 19 | then update 20 | set dbt_valid_to = s.dbt_valid_to 21 | where d.dbt_valid_to is null 22 | and s.dbt_change_type in ('update', 'delete') 23 | when not matched 24 | then insert ({{ dest_cols_csv | join(', ') }}) 25 | values ({{ insert_cols_csv | join(', ') }}) 26 | where s.dbt_change_type = 'insert' 27 | {% endmacro %} -------------------------------------------------------------------------------- /dbt/include/oracle/macros/materializations/snapshot/strategies.sql: -------------------------------------------------------------------------------- 1 | {% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} 2 | {% set check_cols_config = config['check_cols'] %} 3 | {% set primary_key = config['unique_key'] %} 4 | {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %} 5 | 6 | {% set select_current_time -%} 7 | select {{ snapshot_get_time() }} FROM dual 8 | {%- endset %} 9 | 10 | {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #} 11 | {%- set now = run_query(select_current_time)[0][0] -%} 12 | {% if now is none or now is undefined -%} 13 | {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%} 14 | {%- endif %} 15 | {% set updated_at = snapshot_string_as_time(now) %} 16 | 17 | {% set column_added = false %} 18 | 19 | {% if check_cols_config == 'all' %} 20 | {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %} 21 | {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %} 22 | {% set check_cols = check_cols_config %} 23 | {% else %} 24 | {% do exceptions.raise_compiler_error("Invalid value for 'check_cols': " ~ check_cols_config) %} 25 | {% endif %} 26 | 27 | {%- set row_changed_expr -%} 28 | ( 29 | {%- if column_added -%} 30 | TRUE 31 | {%- else -%} 32 | {%- for col in check_cols -%} 33 | {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }} 34 | or 35 | ( 36 | (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null)) 37 | or 38 | ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null)) 39 | ) 40 | {%- if not loop.last %} or {% endif -%} 41 | {%- endfor -%} 42 | {%- endif -%} 43 | ) 44 | {%- endset %} 45 | 46 | {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} 47 | 48 | {% do return({ 49 | "unique_key": primary_key, 50 | "updated_at": updated_at, 51 | "row_changed": row_changed_expr, 52 | "scd_id": scd_id_expr, 53 | "invalidate_hard_deletes": invalidate_hard_deletes 54 | }) %} 55 | {% endmacro %} -------------------------------------------------------------------------------- /dbt/include/oracle/macros/schema_tests.sql: -------------------------------------------------------------------------------- 1 | 2 | {% macro oracle__test_accepted_values(model, values) %} 3 | 4 | {% set column_name = kwargs.get('column_name', kwargs.get('field')) %} 5 | {% set quote_values = kwargs.get('quote', True) %} 6 | 7 | with all_values as ( 8 | 9 | select distinct 10 | {{ column_name }} as value_field 11 | 12 | from {{ model.include(False, True, True) }} 13 | 14 | ), 15 | 16 | validation_errors as ( 17 | 18 | select 19 | value_field 20 | 21 | from all_values 22 | where value_field not in ( 23 | {% for value in values -%} 24 | {% if quote_values -%} 25 | '{{ value }}' 26 | {%- else -%} 27 | {{ value }} 28 | {%- endif -%} 29 | {%- if not loop.last -%},{%- endif %} 30 | {%- endfor %} 31 | ) 32 | ) 33 | 34 | select count(*) 35 | from validation_errors 36 | 37 | {% endmacro %} 38 | 39 | {% macro oracle__test_not_null(model) %} 40 | 41 | {% set column_name = kwargs.get('column_name', kwargs.get('arg')) %} 42 | 43 | select count(*) 44 | from {{ model.include(False, True, True) }} 45 | where {{ column_name }} is null 46 | 47 | {% endmacro %} 48 | 49 | {% macro oracle__test_relationships(model, to, field) %} 50 | 51 | {% set column_name = kwargs.get('column_name', kwargs.get('from')) %} 52 | 53 | 54 | select count(*) as validation_errors 55 | from ( 56 | select {{ column_name }} as id from {{ model }} 57 | ) child 58 | left join ( 59 | select {{ field }} as id from {{ to }} 60 | ) parent on parent.id = child.id 61 | where child.id is not null 62 | and parent.id is null 63 | 64 | {% endmacro %} 65 | 66 | -------------------------------------------------------------------------------- /dbt_test_project/data/seed.csv: -------------------------------------------------------------------------------- 1 | id,first_name,last_name,email,gender,ip_address 2 | 1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 3 | 2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 4 | 3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 5 | 4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 6 | 5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 -------------------------------------------------------------------------------- /dbt_test_project/dbt_project.yml: -------------------------------------------------------------------------------- 1 | name: dbt_project 2 | config-version: 2 3 | version: 1.0 4 | profile: dbt_oracle_test 5 | 6 | quoting: 7 | database: false 8 | identifier: false 9 | schema: false 10 | 11 | on-run-start: 12 | - "select 'hook start' from dual" 13 | 14 | on-run-end: 15 | - "select 'hook ended' from dual" -------------------------------------------------------------------------------- /dbt_test_project/models/case_test/lower.sql: -------------------------------------------------------------------------------- 1 | select 2 | first_name as "name" 3 | from {{ref("person")}} -------------------------------------------------------------------------------- /dbt_test_project/models/case_test/select_from_lower.sql: -------------------------------------------------------------------------------- 1 | select 2 | "name" 3 | from {{ ref("lower")}} -------------------------------------------------------------------------------- /dbt_test_project/models/jobs.sql: -------------------------------------------------------------------------------- 1 | select * from {{ source('hr_database', 'employees') }} -------------------------------------------------------------------------------- /dbt_test_project/models/jobs_per_person.sql: -------------------------------------------------------------------------------- 1 | select p.employee_id, j.job_id, j.salary from {{ ref('jobs') }} j 2 | join {{ref('person')}} p 3 | on p.employee_id = j.employee_id -------------------------------------------------------------------------------- /dbt_test_project/models/p_table_with_cte.sql: -------------------------------------------------------------------------------- 1 | {{ config(materialized='table')}} 2 | with persons_filtered as( 3 | select * from {{ source('hr_database', 'employees') }} 4 | where employee_Id = 100 5 | ) 6 | select * from persons_filtered -------------------------------------------------------------------------------- /dbt_test_project/models/p_view_with_cte.sql: -------------------------------------------------------------------------------- 1 | {{config(materialized='view')}} 2 | with persons_filtered as( 3 | select * from {{ source('hr_database', 'employees') }} 4 | where employee_Id = 100 5 | ) 6 | select * from persons_filtered -------------------------------------------------------------------------------- /dbt_test_project/models/person.sql: -------------------------------------------------------------------------------- 1 | select * from {{ source('hr_database', 'employees') }} -------------------------------------------------------------------------------- /dbt_test_project/models/person_inc.sql: -------------------------------------------------------------------------------- 1 | {{ 2 | config( 3 | materialized='incremental' 4 | ) 5 | }} 6 | 7 | 8 | select * from {{ source('hr_database', 'employees') }} 9 | 10 | {% if is_incremental() %} 11 | 12 | where employee_Id > (select max(employee_Id) from {{ this }}) 13 | 14 | {% endif %} -------------------------------------------------------------------------------- /dbt_test_project/models/schema.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | sources: 4 | - name: hr_database 5 | schema: hr 6 | tables: 7 | - name: employees 8 | - name: jobs 9 | 10 | models: 11 | - name: table_relation 12 | columns: 13 | - name: id 14 | tests: 15 | - not_null 16 | - unique -------------------------------------------------------------------------------- /dbt_test_project/models/table_relation.sql: -------------------------------------------------------------------------------- 1 | {{config(materialized='view')}} 2 | select * from {{ ref('seed') }} -------------------------------------------------------------------------------- /dbt_test_project/oracle_tns/network/admin/tnsnames.ora: -------------------------------------------------------------------------------- 1 | xe = 2 | (DESCRIPTION = 3 | (ADDRESS = (PROTOCOL = TCP)(HOST = localhost)(PORT = 1522)) 4 | (CONNECT_DATA = 5 | (SERVER = DEDICATED) 6 | (SERVICE_NAME = xe) 7 | ) 8 | ) -------------------------------------------------------------------------------- /dbt_test_project/packages.yml: -------------------------------------------------------------------------------- 1 | packages: 2 | - git: https://github.com/fishtown-analytics/dbt-utils.git -------------------------------------------------------------------------------- /dbt_test_project/profiles.yml: -------------------------------------------------------------------------------- 1 | dbt_oracle_test: 2 | target: dev 3 | outputs: 4 | dev: 5 | type: oracle 6 | user: dbt_test 7 | pass: dbt_test 8 | dbname: xe 9 | schema: dbt_test 10 | threads: 4 -------------------------------------------------------------------------------- /dbt_test_project/test/test_count_employees.sql: -------------------------------------------------------------------------------- 1 | SELECT * FROM ( 2 | select count(*) as count from {{ref('table_relation')}} 3 | ) c WHERE c.count != 5 -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -msphinx 7 | SPHINXPROJ = oracle_dbt 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # 3 | # oracle_dbt documentation build configuration file, created by 4 | # sphinx-quickstart on Fri Jun 9 13:47:02 2017. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another 16 | # directory, add these directories to sys.path here. If the directory is 17 | # relative to the documentation root, use os.path.abspath to make it 18 | # absolute, like shown here. 19 | # 20 | import os 21 | import sys 22 | sys.path.insert(0, os.path.abspath('..')) 23 | 24 | import oracle_dbt 25 | 26 | # -- General configuration --------------------------------------------- 27 | 28 | # If your documentation needs a minimal Sphinx version, state it here. 29 | # 30 | # needs_sphinx = '1.0' 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 34 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] 35 | 36 | # Add any paths that contain templates here, relative to this directory. 37 | templates_path = ['_templates'] 38 | 39 | # The suffix(es) of source filenames. 40 | # You can specify multiple suffix as a list of string: 41 | # 42 | # source_suffix = ['.rst', '.md'] 43 | source_suffix = '.rst' 44 | 45 | # The master toctree document. 46 | master_doc = 'index' 47 | 48 | # General information about the project. 49 | project = 'Oracle DBT' 50 | copyright = "2020, Vitor Avancini" 51 | author = "Vitor Avancini" 52 | 53 | # The version info for the project you're documenting, acts as replacement 54 | # for |version| and |release|, also used in various other places throughout 55 | # the built documents. 56 | # 57 | # The short X.Y version. 58 | version = oracle_dbt.__version__ 59 | # The full version, including alpha/beta/rc tags. 60 | release = oracle_dbt.__version__ 61 | 62 | # The language for content autogenerated by Sphinx. Refer to documentation 63 | # for a list of supported languages. 64 | # 65 | # This is also used if you do content translation via gettext catalogs. 66 | # Usually you set "language" from the command line for these cases. 67 | language = None 68 | 69 | # List of patterns, relative to source directory, that match files and 70 | # directories to ignore when looking for source files. 71 | # This patterns also effect to html_static_path and html_extra_path 72 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 73 | 74 | # The name of the Pygments (syntax highlighting) style to use. 75 | pygments_style = 'sphinx' 76 | 77 | # If true, `todo` and `todoList` produce output, else they produce nothing. 78 | todo_include_todos = False 79 | 80 | 81 | # -- Options for HTML output ------------------------------------------- 82 | 83 | # The theme to use for HTML and HTML Help pages. See the documentation for 84 | # a list of builtin themes. 85 | # 86 | html_theme = 'alabaster' 87 | 88 | # Theme options are theme-specific and customize the look and feel of a 89 | # theme further. For a list of options available for each theme, see the 90 | # documentation. 91 | # 92 | # html_theme_options = {} 93 | 94 | # Add any paths that contain custom static files (such as style sheets) here, 95 | # relative to this directory. They are copied after the builtin static files, 96 | # so a file named "default.css" will overwrite the builtin "default.css". 97 | html_static_path = ['_static'] 98 | 99 | 100 | # -- Options for HTMLHelp output --------------------------------------- 101 | 102 | # Output file base name for HTML help builder. 103 | htmlhelp_basename = 'oracle_dbtdoc' 104 | 105 | 106 | # -- Options for LaTeX output ------------------------------------------ 107 | 108 | latex_elements = { 109 | # The paper size ('letterpaper' or 'a4paper'). 110 | # 111 | # 'papersize': 'letterpaper', 112 | 113 | # The font size ('10pt', '11pt' or '12pt'). 114 | # 115 | # 'pointsize': '10pt', 116 | 117 | # Additional stuff for the LaTeX preamble. 118 | # 119 | # 'preamble': '', 120 | 121 | # Latex figure (float) alignment 122 | # 123 | # 'figure_align': 'htbp', 124 | } 125 | 126 | # Grouping the document tree into LaTeX files. List of tuples 127 | # (source start file, target name, title, author, documentclass 128 | # [howto, manual, or own class]). 129 | latex_documents = [ 130 | (master_doc, 'oracle_dbt.tex', 131 | 'Oracle DBT Documentation', 132 | 'Vitor Avancini', 'manual'), 133 | ] 134 | 135 | 136 | # -- Options for manual page output ------------------------------------ 137 | 138 | # One entry per manual page. List of tuples 139 | # (source start file, name, description, authors, manual section). 140 | man_pages = [ 141 | (master_doc, 'oracle_dbt', 142 | 'Oracle DBT Documentation', 143 | [author], 1) 144 | ] 145 | 146 | 147 | # -- Options for Texinfo output ---------------------------------------- 148 | 149 | # Grouping the document tree into Texinfo files. List of tuples 150 | # (source start file, target name, title, author, 151 | # dir menu entry, description, category) 152 | texinfo_documents = [ 153 | (master_doc, 'oracle_dbt', 154 | 'Oracle DBT Documentation', 155 | author, 156 | 'oracle_dbt', 157 | 'One line description of project.', 158 | 'Miscellaneous'), 159 | ] 160 | 161 | 162 | 163 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../CONTRIBUTING.rst 2 | -------------------------------------------------------------------------------- /docs/history.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../HISTORY.rst 2 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to Oracle DBT's documentation! 2 | ====================================== 3 | 4 | .. toctree:: 5 | :maxdepth: 2 6 | :caption: Contents: 7 | 8 | readme 9 | installation 10 | usage 11 | modules 12 | contributing 13 | history 14 | 15 | Indices and tables 16 | ================== 17 | * :ref:`genindex` 18 | * :ref:`modindex` 19 | * :ref:`search` 20 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: shell 2 | 3 | ============ 4 | Installation 5 | ============ 6 | 7 | 8 | Stable release 9 | -------------- 10 | 11 | To install Oracle DBT, run this command in your terminal: 12 | 13 | .. code-block:: console 14 | 15 | $ pip install oracle_dbt 16 | 17 | This is the preferred method to install Oracle DBT, as it will always install the most recent stable release. 18 | 19 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide 20 | you through the process. 21 | 22 | .. _pip: https://pip.pypa.io 23 | .. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ 24 | 25 | 26 | From sources 27 | ------------ 28 | 29 | The sources for Oracle DBT can be downloaded from the `Github repo`_. 30 | 31 | You can either clone the public repository: 32 | 33 | .. code-block:: console 34 | 35 | $ git clone git://github.com/vitoravancini/oracle_dbt 36 | 37 | Or download the `tarball`_: 38 | 39 | .. code-block:: console 40 | 41 | $ curl -OJL https://github.com/vitoravancini/oracle_dbt/tarball/master 42 | 43 | Once you have a copy of the source, you can install it with: 44 | 45 | .. code-block:: console 46 | 47 | $ python setup.py install 48 | 49 | 50 | .. _Github repo: https://github.com/vitoravancini/oracle_dbt 51 | .. _tarball: https://github.com/vitoravancini/oracle_dbt/tarball/master 52 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=python -msphinx 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | set SPHINXPROJ=oracle_dbt 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The Sphinx module was not found. Make sure you have Sphinx installed, 20 | echo.then set the SPHINXBUILD environment variable to point to the full 21 | echo.path of the 'sphinx-build' executable. Alternatively you may add the 22 | echo.Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/readme.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | -------------------------------------------------------------------------------- /docs/usage.rst: -------------------------------------------------------------------------------- 1 | ===== 2 | Usage 3 | ===== 4 | 5 | To use Oracle DBT in a project:: 6 | 7 | import oracle_dbt 8 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | pip==19.2.3 2 | bump2version==0.5.11 3 | wheel==0.33.6 4 | watchdog==0.9.0 5 | flake8==3.7.8 6 | tox==3.14.0 7 | coverage==4.5.4 8 | Sphinx==1.8.5 9 | twine==1.14.0 10 | rst 11 | rst.linker 12 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.1.0 3 | commit = True 4 | tag = True 5 | 6 | [bumpversion:file:setup.py] 7 | search = version='{current_version}' 8 | replace = version='{new_version}' 9 | 10 | [bumpversion:file:oracle_dbt/__init__.py] 11 | search = __version__ = '{current_version}' 12 | replace = __version__ = '{new_version}' 13 | 14 | [bdist_wheel] 15 | universal = 1 16 | 17 | [flake8] 18 | exclude = docs 19 | 20 | [aliases] 21 | # Define setup.py command aliases here 22 | 23 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | """The setup script.""" 4 | 5 | from setuptools import setup, find_packages 6 | 7 | with open('README.rst') as readme_file: 8 | readme = readme_file.read() 9 | 10 | with open('HISTORY.rst') as history_file: 11 | history = history_file.read() 12 | 13 | requirements = [ 14 | 'dbt-core~=0.19.0', 15 | 'cx_Oracle==7.3.0' 16 | ] 17 | 18 | setup_requirements = [] 19 | 20 | test_requirements = [ 21 | 'pytest-dbt-adapter==0.4.0' 22 | ] 23 | 24 | setup( 25 | author="Indicium Tech", 26 | author_email='vitor.avancini@indicium.tech', 27 | python_requires='>=3.5', 28 | classifiers=[ 29 | 'Development Status :: 2 - Pre-Alpha', 30 | 'Intended Audience :: Developers', 31 | 'License :: OSI Approved :: Apache Software License', 32 | 'Natural Language :: English', 33 | 'Programming Language :: Python :: 3', 34 | 'Programming Language :: Python :: 3.5', 35 | 'Programming Language :: Python :: 3.6', 36 | 'Programming Language :: Python :: 3.7', 37 | 'Programming Language :: Python :: 3.8', 38 | ], 39 | description="An Oracle DBT Adapater", 40 | install_requires=requirements, 41 | license="Apache Software License 2.0", 42 | long_description=readme + '\n\n' + history, 43 | include_package_data=True, 44 | keywords='dbt-oracle', 45 | name='dbt-oracle', 46 | packages=find_packages(), 47 | setup_requires=setup_requirements, 48 | test_suite='tests', 49 | tests_require=test_requirements, 50 | url='https://github.com/techindicium/dbt-oracle', 51 | version='0.4.3', 52 | zip_safe=False, 53 | package_data={ 54 | 'dbt': [ 55 | 'include/oracle/dbt_project.yml', 56 | 'include/oracle/macros/*.sql', 57 | 'include/oracle/macros/**/**/*.sql' 58 | ] 59 | } 60 | ) 61 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Unit test package for oracle_dbt.""" 2 | -------------------------------------------------------------------------------- /tests/oracle.dbtspec: -------------------------------------------------------------------------------- 1 | target: 2 | type: oracle 3 | host: localhost 4 | user: dbt_test 5 | pass: dbt_test 6 | database: xe 7 | schema: dbt_test 8 | port: 1522 9 | threads: 1 10 | sequences: 11 | test_dbt_empty: empty 12 | test_dbt_base: base 13 | test_dbt_ephemeral: ephemeral 14 | test_dbt_incremental: incremental 15 | test_dbt_snapshot_strategy_timestamp: snapshot_strategy_timestamp 16 | test_dbt_snapshot_strategy_check_cols: snapshot_strategy_check_cols 17 | test_dbt_schema_test: schema_test -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | # dbt imports 4 | from dbt.clients.yaml_helper import load_yaml_text 5 | import dbt.config 6 | from dbt.context.base import generate_base_context 7 | 8 | # dbt-oracle imports 9 | from dbt.adapters.oracle import OracleAdapterCredentials 10 | from dbt.adapters.oracle.connections import OracleConnectionMethod 11 | 12 | def get_credentials(profile_yml): 13 | "Render a YAML string profiles.yml into credentials" 14 | dicty_thing = load_yaml_text(profile_yml) 15 | renderer = dbt.config.renderer.ProfileRenderer(generate_base_context({})) 16 | profile = dbt.config.Profile.from_raw_profiles( 17 | dicty_thing, 'default', renderer 18 | ) 19 | return profile.credentials 20 | 21 | # Define data 22 | SCENARIOS = { 23 | "host": { 24 | "method": OracleConnectionMethod.HOST, 25 | "profile": """ 26 | default: 27 | target: target 28 | outputs: 29 | target: 30 | type: oracle 31 | host: localhost 32 | user: dbt_test 33 | pass: dbt_test 34 | database: xe 35 | schema: dbt_test 36 | port: 1522 37 | threads: 1 38 | """, 39 | "dsn": "localhost:1522/xe", 40 | }, 41 | "host_service": { 42 | "method": OracleConnectionMethod.HOST, 43 | "profile": """ 44 | default: 45 | target: target 46 | outputs: 47 | target: 48 | type: oracle 49 | host: localhost 50 | user: dbt_test 51 | pass: dbt_test 52 | database: xe 53 | service: xe_ha.host.tld 54 | schema: dbt_test 55 | port: 1522 56 | threads: 1 57 | """, 58 | "dsn": "localhost:1522/xe_ha.host.tld", 59 | }, 60 | "tns": { 61 | "method": OracleConnectionMethod.TNS, 62 | "profile": """ 63 | default: 64 | target: target 65 | outputs: 66 | target: 67 | type: oracle 68 | user: dbt_test 69 | pass: dbt_test 70 | database: xe 71 | schema: dbt_test 72 | port: 1522 73 | threads: 1 74 | """, 75 | "dsn": "xe", 76 | }, 77 | "connection_string": { 78 | "method": OracleConnectionMethod.CONNECTION_STRING, 79 | "profile": """ 80 | default: 81 | target: target 82 | outputs: 83 | target: 84 | type: oracle 85 | host: localhost 86 | user: dbt_test 87 | pass: dbt_test 88 | database: xe 89 | connection_string: "(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=localhost)(PORT=1522))(CONNECT_DATA=(SERVICE_NAME=xe)))" 90 | schema: dbt_test 91 | port: 1522 92 | threads: 1 93 | """, 94 | "dsn": "(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=localhost)(PORT=1522))(CONNECT_DATA=(SERVICE_NAME=xe)))", 95 | }, 96 | } 97 | 98 | @pytest.fixture(scope="module", params=SCENARIOS.keys()) 99 | def scenario(request): 100 | return SCENARIOS[request.param] 101 | 102 | def test_oracle_credentials(scenario): 103 | for method, parameters in SCENARIOS.items(): 104 | credentials = get_credentials(scenario["profile"]) 105 | assert credentials.connection_method() == scenario["method"] 106 | assert credentials.get_dsn() == scenario["dsn"] 107 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py35, py36, py37, py38, flake8 3 | 4 | [travis] 5 | python = 6 | 3.8: py38 7 | 3.7: py37 8 | 3.6: py36 9 | 3.5: py35 10 | 11 | [testenv:flake8] 12 | basepython = python 13 | deps = flake8 14 | commands = flake8 oracle_dbt tests 15 | 16 | [testenv] 17 | setenv = 18 | PYTHONPATH = {toxinidir} 19 | 20 | commands = python setup.py test 21 | --------------------------------------------------------------------------------