├── .bumpversion.cfg ├── .github ├── dco.yml ├── scripts │ └── runtests.py └── workflows │ ├── lint.yml │ ├── pythonpackage.yml │ └── release.yml ├── .gitignore ├── .readthedocs.yml ├── CHANGES ├── LICENSE ├── README.md ├── contributing └── CONTRIBUTING.md ├── docs ├── Makefile ├── conf.py ├── index.rst ├── make.bat └── requirements.txt ├── pyproject.toml ├── samples ├── Core.py └── ORM.py ├── setup.cfg ├── sqlalchemy_ibmi ├── __init__.py ├── base.py ├── constants.py └── requirements.py └── test ├── __init__.py ├── conftest.py ├── test_cache.py ├── test_suite.py └── util.py /.bumpversion.cfg: -------------------------------------------------------------------------------- 1 | [bumpversion] 2 | current_version = 0.9.4-dev 3 | commit = True 4 | tag = True 5 | tag_name = {new_version} 6 | parse = (?P\d+)\.(?P\d+)\.(?P\d+)(\-(?P[a-z]+))? 7 | serialize = 8 | {major}.{minor}.{patch}-{release} 9 | {major}.{minor}.{patch} 10 | 11 | [bumpversion:file:sqlalchemy_ibmi/__init__.py] 12 | 13 | [bumpversion:file:pyproject.toml] 14 | 15 | [bumpversion:part:release] 16 | optional_value = release 17 | values = 18 | dev 19 | rc 20 | release 21 | 22 | -------------------------------------------------------------------------------- /.github/dco.yml: -------------------------------------------------------------------------------- 1 | require: 2 | members: false 3 | -------------------------------------------------------------------------------- /.github/scripts/runtests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import pyodbc 5 | import pytest 6 | 7 | host = os.environ['IBMI_HOSTNAME'] 8 | uid = os.environ['IBMI_USERNAME'] 9 | pwd = os.environ['IBMI_PASSWORD'] 10 | 11 | URI = "ibmi://{uid}:{pwd}@{host}/".format( 12 | host=host, 13 | uid=uid, 14 | pwd=pwd, 15 | ) 16 | 17 | # Maybe can use a different schema in the future? 18 | schema = uid 19 | 20 | # allocate an object to serialize access to the test schemas 21 | # without this, the tests will step on each other 22 | # we can't use different schemas because some of the tests use the 23 | # hard-coded test_schema schema 24 | print("Connecting to the remote system", flush=True) 25 | conn = pyodbc.connect( 26 | "Driver=IBM i Access ODBC Driver", 27 | system=host, 28 | user=uid, 29 | password=pwd, 30 | ) 31 | cur = conn.cursor() 32 | 33 | print("Attempting to lock the mutex", flush=True) 34 | lock = "ALCOBJ OBJ(({schema}/CI_MUTEX *DTAARA *EXCL)) WAIT(30)".format(schema=schema) 35 | while True: 36 | try: 37 | cur.execute("CALL QSYS2.QCMDEXC(?)", [lock]) 38 | break 39 | except pyodbc.Error as e: 40 | if 'CPF1002' not in e.args[1]: 41 | raise e 42 | 43 | print("Mutex locked, running tests", flush=True) 44 | rc = pytest.main(["--dburi", URI]) 45 | 46 | print("Unlocking mutex", flush=True) 47 | unlock = "DLCOBJ OBJ(({schema}/CI_MUTEX *DTAARA *EXCL))".format(schema=schema) 48 | cur.execute("CALL QSYS2.QCMDEXC(?)", [unlock]) 49 | 50 | exit(rc) 51 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint PR 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | push: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | lint: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v1 16 | - name: Set up Python 17 | uses: actions/setup-python@v1 18 | with: 19 | python-version: '3.9' 20 | - name: Install dependencies 21 | run: | 22 | pip install poetry 23 | poetry config virtualenvs.create false 24 | poetry install 25 | - name: Lint with black 26 | run: | 27 | # Stop the build if there are formatting issues and print out a diff 28 | black --check --diff sqlalchemy_ibmi test 29 | - name: Lint with flake8 30 | run: | 31 | # stop the build if there are Python syntax errors or undefined names 32 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 33 | # exit-zero treats all errors as warnings. Use line length of 88 for consistency with black 34 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=88 --statistics 35 | -------------------------------------------------------------------------------- /.github/workflows/pythonpackage.yml: -------------------------------------------------------------------------------- 1 | name: Build PR 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | build: 10 | 11 | runs-on: ubuntu-latest 12 | strategy: 13 | max-parallel: 4 14 | fail-fast: false 15 | matrix: 16 | python-version: 17 | - '3.11' 18 | - '3.10' 19 | - '3.9' 20 | - '3.8' 21 | - '3.7' 22 | 23 | steps: 24 | - uses: actions/checkout@v1 25 | - name: Set up Python ${{ matrix.python-version }} 26 | uses: actions/setup-python@v1 27 | with: 28 | python-version: ${{ matrix.python-version }} 29 | - name: Add ODBC repo 30 | run: | 31 | wget https://public.dhe.ibm.com/software/ibmi/products/odbc/debs/dists/1.1.0/ibmi-acs-1.1.0.list 32 | sudo mv ibmi-acs-1.1.0.list /etc/apt/sources.list.d 33 | sudo apt update 34 | - name: Install unixodbc and driver on Linux 35 | run: sudo apt install unixodbc-dev ibm-iaccess 36 | - name: Install dependencies 37 | run: | 38 | pip install poetry 39 | poetry config virtualenvs.create false 40 | poetry install 41 | # Our test system is currently unavailable, so disable this for now 42 | #- name: Test with pytest 43 | # run: | 44 | # .github/scripts/runtests.py 45 | # env: 46 | # IBMI_HOSTNAME: ${{ secrets.IBMI_HOSTNAME }} 47 | # IBMI_USERNAME: ${{ secrets.IBMI_USERNAME }} 48 | # IBMI_PASSWORD: ${{ secrets.IBMI_PASSWORD }} 49 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*.*.*' 7 | 8 | jobs: 9 | Release: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout code 14 | uses: actions/checkout@v2 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v2 18 | with: 19 | python-version: "3.9" 20 | 21 | - name: Install Poetry 22 | run: | 23 | curl -sSL https://install.python-poetry.org | python - -y --version 1.6.1 24 | 25 | - name: Update PATH 26 | run: echo "$HOME/.local/bin" >> $GITHUB_PATH 27 | 28 | - name: Build project for distribution 29 | run: poetry build 30 | 31 | - name: Create Release 32 | uses: softprops/action-gh-release@v1 33 | with: 34 | files: "dist/*" 35 | generate_release_notes: true 36 | 37 | - name: Publish to PyPI 38 | env: 39 | POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} 40 | run: poetry publish 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.swp 3 | *.orig 4 | build 5 | tmp 6 | dist 7 | .venv 8 | *.egg-info/ 9 | .coverage 10 | .idea/ 11 | docs/_* 12 | poetry.lock 13 | cwbdetail*.csv 14 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | build: 9 | os: "ubuntu-22.04" 10 | tools: 11 | python: "3.9" 12 | 13 | # Build documentation in the docs/ directory with Sphinx 14 | sphinx: 15 | configuration: docs/conf.py 16 | 17 | # Optionally set the version of Python and requirements required to build your docs 18 | python: 19 | install: 20 | - requirements: docs/requirements.txt 21 | -------------------------------------------------------------------------------- /CHANGES: -------------------------------------------------------------------------------- 1 | CHANGES 2 | ======= 3 | 2019/05/30 4 | - Added fix for missing "CURRENT ISOLATION" register 5 | - Fixed Autocommit not working for pyodbc 6 | - Fixed NameError: name 'asbool' is not defined python 7 | 8 | 2016/08/29 9 | - Fixed multiple defects mentioned below 10 | - Add documentation on alchemy url for conncetion over ssl 11 | - DB2 on AS400: An unexpected token "ISOLATION" was found on ibm_db_sa/ibm_db.py 12 | - Getting AttributeError for AS400 13 | - name 'unicode' is not defined 14 | - AttributeError when using pyodbc 15 | - add capability to the driver to generate query with literals, compile_kwargs={"literal_binds": True} 16 | 17 | 2016/08/30 18 | -Added Support for Python 3.x 19 | 20 | 2014/10/20 (IBM_DB_SA adaptor 0.3.2) 21 | - Added SSL support 22 | - Added get_incoming_foreign_keys functionality with reflector 23 | - Added get_unique_constraints reflection feature 24 | - Added exist() unary operator support within select clause 25 | - Fixed incompatible issue of sql.true() for SQLAlchemy v0.7.x & 0.8.x 26 | - Fixed add_constraint incompatible issue with SQLAlchemy-0.9.x 27 | - Fixed reflection function get_indexes to not return the unique constraint participating index 28 | 29 | 2014/03/26 (IBM_DB_SA adapter 0.3.1) 30 | - Handle Double Type in DDL Generator 31 | - Translating 'update' and 'read' lock-mode with DB2 compatible SQL 32 | - Added Stored procedure with outparam support in ibm_db_sa dialect 33 | - Convert nullable unique constraint to unique index exclude nulls for DB2 10.5 34 | - Fix to detect invalid connection 35 | - Added support for CHAR_LENGTH function support 36 | - Fix drop index implementation incompatibility with SQLAlchemy-0.8.x onwards 37 | - Add/Fix support for zxjdbc for both IBM DB LUW and AS/400 38 | - Add/Fix support for PyODBC for both IBM DB LUW and AS/400 39 | - Fix reflection for get_lastrowid 40 | 41 | 2013/03/01 (IBM_DB_SA adapter 0.3.0) 42 | - Add support for LIMIT/OFFSET 43 | - Add support for savepoints 44 | - Add support for double-precision floating-point number 45 | - Fixed reflection for get_view_names and get_view_definition 46 | 47 | 2013/02/06 48 | - Add support for SQLAlchemy 0.7/0.8 49 | - Refactor code layout 50 | - Now supporting "db2://" scheme as well as 51 | "ibm_db://" for backwards compatibility 52 | - Add/fix support for explicit sequences 53 | 54 | 2011/09/27 (IBM_DB_SA adapter 0.2.1): 55 | - fix reflection problem 56 | - support alternate DB2 LUW connection via PyODBC 57 | - support alternate DB2 i5/OS (iSeries) via PyODBC 58 | - support alternate DB2 i5/OS (iSeries) via ZxJDBC (Jython) 59 | 60 | 2011/08/28 (IBM_DB_SA adapter 0.2.0): 61 | - Support of SQLAlchemy 0.6/0.7 62 | - Add Jython support 63 | 64 | 2008/11/06 (IBM_DB_SA adapter 0.1.6): 65 | - fixed Metadata not loading any table info (defect #158705) 66 | - fixed problems while using different schema names (defect #163785) 67 | - fixed keyerror in length in visit_function (defect #166292) 68 | 69 | 2008/03/28 (IBM_DB_SA adapter 0.1.5): 70 | - fixed BIGINT driver return issue #5 (defect #150638) 71 | - fixed autocommit default issue #6 (defect #156919) 72 | - fixed _get_exception() tuple issue #8 (defect #156925) 73 | - fixed create_engine DSN support issue (defect #156930) 74 | 75 | 2008/02/15 (IBM_DB_SA adapter 0.1.1): 76 | - fixed .egg setup loading issue #1 (defect #154259) 77 | 78 | 2008/02/08 (IBM_DB_SA adapter 0.1.0): 79 | - initial alpha release 80 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. 10 | 11 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. 12 | 13 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. 14 | 15 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. 16 | 17 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. 18 | 19 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. 20 | 21 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). 22 | 23 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. 24 | 25 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." 26 | 27 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 28 | 29 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 30 | 31 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 32 | 33 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: 34 | 35 | 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and 36 | 37 | 2. You must cause any modified files to carry prominent notices stating that You changed the files; and 38 | 39 | 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and 40 | 41 | 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. 42 | 43 | You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 44 | 45 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 46 | 47 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 48 | 49 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 50 | 51 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 52 | 53 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. 54 | 55 | END OF TERMS AND CONDITIONS 56 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SQLAlchemy adapter for IBM i 2 | 3 | [![Latest version released on PyPi](https://img.shields.io/pypi/v/sqlalchemy-ibmi.svg)](https://pypi.org/project/sqlalchemy-ibmi) 4 | ![Supported Python Version Badge](https://img.shields.io/pypi/pyversions/sqlalchemy-ibmi.svg) 5 | [![Documentation Status](https://readthedocs.org/projects/sqlalchemy-ibmi/badge/?version=latest)](https://sqlalchemy-ibmi.readthedocs.io/en/latest/?badge=latest) 6 | 7 | The IBM i SQLAlchemy adapter provides an [SQLAlchemy](https://www.sqlalchemy.org/) 8 | interface to Db2 for [IBM i](https://en.wikipedia.org/wiki/IBM_i). 9 | 10 | **Please note that this project is still under active development. Please 11 | report any bugs in the issue tracker** :rotating_light: 12 | 13 | ## Requirements 14 | 15 | ### SQLAlchemy 16 | 17 | | SQLAlchemy Version | Supported | Notes | 18 | |--------------------|-----------|-------------------------------------------------| 19 | | SQLAlchemy 1.3 | ✅ | Most tested. | 20 | | SQLAlchemy 1.4 | ✅ | Preliminary support added in 0.9.3. | 21 | | SQLAlchemy 2.0+ | ❌ | Currently not supported, but planned for 0.9.4. | 22 | 23 | ### Python 24 | 25 | Python 3.7 - 3.11 are supported. Support for Python 3.12 and 26 | up is [currently broken](https://github.com/IBM/sqlalchemy-ibmi/issues/149). 27 | 28 | ### IBM i Access ODBC Driver 29 | 30 | It is best to use the latest version of the driver, which is currently available in the 31 | IBM i Access Client Solutions Application Package 1.1.0.27. 32 | 33 | Some options may require certain minimum driver versions to be enabled. Because the 34 | driver ignores any unknown options, using them on older driver versions will not cause 35 | an error but instead be silently ignored. 36 | 37 | | Connection Option | Required Version | 38 | |--------------------|-------------------| 39 | | `trim_char_fields` | 1.1.0.25 | 40 | 41 | ### IBM i 42 | 43 | This adapter is only tested against IBM i 7.3 and up. It may support older IBM i 44 | releases, but no support is guaranteed. 45 | 46 | ## Installation 47 | 48 | ```sh 49 | pip install sqlalchemy-ibmi 50 | ``` 51 | 52 | ## Getting Started 53 | 54 | You will need to have the [IBM i Access ODBC Driver](https://www.ibm.com/support/pages/ibm-i-access-client-solutions) 55 | installed in order to use this adapter. Please read 56 | [these docs](https://ibmi-oss-docs.readthedocs.io/en/latest/odbc/installation.html) 57 | for the simplest way to install for your platform. 58 | 59 | ```python 60 | import sqlalchemy as sa 61 | engine = sa.create_engine("ibmi://user:password@host.example.com") 62 | 63 | cnxn = engine.connect() 64 | metadata = sa.MetaData() 65 | table = sa.Table('table_name', metadata, autoload=True, autoload_with=engine) 66 | 67 | query = sa.select([table]) 68 | 69 | result = cnxn.execute(query) 70 | result = result.fetchall() 71 | 72 | # print first entry 73 | print(result[0]) 74 | ``` 75 | 76 | For more details on connection options, check 77 | [our docs](https://sqlalchemy-ibmi.readthedocs.io/en/latest#connection-arguments) 78 | 79 | If you're new to SQLAlchemy, please refer to the 80 | [SQLAlchemy Unified Tutorial](https://docs.sqlalchemy.org/en/14/tutorial/index.html). 81 | 82 | ## Documentation 83 | 84 | The documentation for the SQLAlchemy adapter for IBM i can be found at: 85 | 86 | 87 | ## Known Limitations 88 | 89 | 1) Non-standard SQL queries are not supported. e.g. "SELECT ? FROM TAB1" 90 | 2) For updations involving primary/foreign key references, the entries should be made in correct order. Integrity check is always on and thus the primary keys referenced by the foreign keys in the referencing tables should always exist in the parent table. 91 | 3) Unique key which contains nullable column not supported 92 | 4) UPDATE CASCADE for foreign keys not supported 93 | 5) DEFERRABLE INITIALLY deferred not supported 94 | 6) Subquery in ON clause of LEFT OUTER JOIN not supported 95 | 96 | ## Contributing to the IBM i SQLAlchemy adapter 97 | 98 | Please read the [contribution guidelines](contributing/CONTRIBUTING.md). 99 | 100 | ```text 101 | The developer sign-off should include the reference to the DCO in remarks(example below): 102 | DCO 1.1 Signed-off-by: Random J Developer 103 | ``` 104 | 105 | ## Releasing a New Version 106 | 107 | ```sh 108 | # checkout and pull the latest code from main 109 | git checkout main 110 | git pull 111 | 112 | # bump to a release version (a tag and commit are made) 113 | bumpversion release 114 | 115 | # To skip a release candidate 116 | bumpversion --no-tag --no-commit release 117 | bumpversion --allow-dirty release 118 | 119 | # bump to the new dev version (a commit is made) 120 | bumpversion --no-tag patch 121 | 122 | # push the new tag and commits 123 | git push origin main --tags 124 | ``` 125 | 126 | ## License 127 | 128 | [Apache 2.0](LICENSE) 129 | 130 | ## Credits 131 | 132 | - ibm_db_sa for SQLAlchemy was first produced by IBM Inc., targeting version 0.4. 133 | - The library was ported for version 0.6 and 0.7 by Jaimy Azle. 134 | - Port for version 0.8 and modernization of test suite by Mike Bayer. 135 | - Port for sqlalchemy-ibmi by Naveen Ram/Kevin Adler. 136 | -------------------------------------------------------------------------------- /contributing/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Contributing in General 4 | 5 | Our project welcomes external contributions. If you see something that you want to change, please feel free to. 6 | 7 | To contribute code or documentation, please submit a [pull request](https://github.com/IBM/sqlalchemy-ibmi/pulls). 8 | 9 | A good way to familiarize yourself with the codebase and contribution process is 10 | to look for and tackle low-hanging fruit in the [issue tracker](https://github.com/IBM/sqlalchemy-ibmi/issues). 11 | These will be marked with the [good first issue](https://github.com/IBM/sqlalchemy-ibmi/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) label. 12 | You may also want to look at those marked with [help wanted](https://github.com/IBM/sqlalchemy-ibmi/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22). 13 | 14 | **Note: We appreciate your effort, and want to avoid a situation where a contribution 15 | requires extensive rework (by you or by us), sits in backlog for a long time, or 16 | cannot be accepted at all!** 17 | 18 | ### Proposing a new feature 19 | 20 | When you would like to propose a new feature, please create an [issue](https://github.com/IBM/sqlalchemy-ibmi/issues), 21 | so that the feature may be discussed before creating a pull request. This allows us to decide whether the feature will be 22 | accepted into the code base before you put in valuable and precious time coding that feature. 23 | 24 | ### Fixing bugs 25 | 26 | If you are looking to fix a bug, again, please create an [issue](https://github.com/IBM/sqlalchemy-ibmi/issues) prior to opening a pull request so it can be tracked. 27 | 28 | 29 | ## Legal 30 | 31 | We have tried to make it as easy as possible to make contributions. This 32 | applies to how we handle the legal aspects of contribution. We use the 33 | same approach - the [Developer's Certificate of Origin 1.1 (DCO)](https://github.com/hyperledger/fabric/blob/master/docs/source/DCO1.1.txt) - that the Linux® Kernel [community](https://elinux.org/Developer_Certificate_Of_Origin) 34 | uses to manage code contributions. 35 | 36 | We simply ask that when submitting a patch for review, the developer 37 | must include a sign-off statement in the commit message. 38 | 39 | Here is an example Signed-off-by line, which indicates that the 40 | submitter accepts the DCO: 41 | 42 | ```text 43 | Signed-off-by: John Doe 44 | ``` 45 | 46 | You can include this automatically when you commit a change to your 47 | local git repository using the following command: 48 | 49 | ```bash 50 | git commit -s 51 | ``` 52 | 53 | # Install 54 | ``` 55 | pip install . 56 | ``` 57 | 58 | ## Testing 59 | 60 | To run tests, clone the repository and run poetry install to ensure you are 61 | using the correct version of pytest and then run the command: 62 | ``` 63 | pytest --dburi="ibmi://:@/?" 64 | ``` 65 | Run sub-tests or specific tests with: 66 | ``` 67 | pytest --dburi="ibmi://:@/?" test/example_test.py 68 | ``` 69 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | import sys 15 | sys.path.insert(0, os.path.abspath('..')) 16 | autodoc_mock_imports = ["sqlalchemy"] 17 | 18 | master_doc = 'index' 19 | 20 | # -- Project information ----------------------------------------------------- 21 | 22 | project = 'sqlalchemy-ibmi' 23 | copyright = '2020-2023, the sqlalchemy-ibmi authors and contributors' 24 | 25 | # The full version, including alpha/beta/rc tags 26 | try: 27 | import sqlalchemy_ibmi 28 | release = sqlalchemy_ibmi.__version__ 29 | except ImportError: 30 | release = '' 31 | 32 | 33 | # -- General configuration --------------------------------------------------- 34 | 35 | # Add any Sphinx extension module names here, as strings. They can be 36 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 37 | # ones. 38 | extensions = [ 39 | "sphinx.ext.autodoc" 40 | ] 41 | 42 | # Add any paths that contain templates here, relative to this directory. 43 | templates_path = ['_templates'] 44 | 45 | # List of patterns, relative to source directory, that match files and 46 | # directories to ignore when looking for source files. 47 | # This pattern also affects html_static_path and html_extra_path. 48 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 49 | 50 | 51 | # -- Options for HTML output ------------------------------------------------- 52 | 53 | # The theme to use for HTML and HTML Help pages. See the documentation for 54 | # a list of builtin themes. 55 | # 56 | html_theme = 'sphinx_rtd_theme' 57 | 58 | # Add any paths that contain custom static files (such as style sheets) here, 59 | # relative to this directory. They are copied after the builtin static files, 60 | # so a file named "default.css" will overwrite the builtin "default.css". 61 | html_static_path = ['_static'] 62 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. sqlalchemy-ibmi documentation master file, created by 2 | sphinx-quickstart on Thu Jan 30 10:34:14 2020. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | SQLAlchemy Adapter for IBM i 7 | ============================ 8 | 9 | .. automodule:: sqlalchemy_ibmi.base 10 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | Sphinx==7.2.6 2 | sphinx-rtd-theme==1.3.0 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "sqlalchemy-ibmi" 3 | version = "0.9.4-dev" 4 | description = "SQLAlchemy support for Db2 on IBM i" 5 | readme = "README.md" 6 | authors = [ 7 | "Naveen Ram ", 8 | "Kevin Adler " 9 | ] 10 | license = "Apache-2.0" 11 | keywords = ["sqlalchemy", "database", "ibm", "ibmi", "db2"] 12 | classifiers =[ 13 | "Development Status :: 4 - Beta", 14 | "Intended Audience :: Developers", 15 | "Operating System :: OS Independent", 16 | "Topic :: Database :: Front-Ends", 17 | ] 18 | packages = [ 19 | {include = "sqlalchemy_ibmi"} 20 | ] 21 | include = [ 22 | { path = "test/*", format = "sdist" }, 23 | { path = "contributing/*", format = "sdist" }, 24 | { path = "docs/*", format = "sdist" }, 25 | { path = "setup.cfg", format = "sdist" }, 26 | { path = "CHANGES", format = "sdist" }, 27 | { path = "LICENSE", format = "sdist" }, 28 | ] 29 | 30 | [tool.black] 31 | line-length = 88 32 | required-version = 22 33 | target-version = [ 34 | 'py37', 35 | 'py38', 36 | 'py39', 37 | 'py310', 38 | 'py311', 39 | ] 40 | 41 | [tool.poetry.plugins."sqlalchemy.dialects"] 42 | ibmi = "sqlalchemy_ibmi.base:IBMiDb2Dialect" 43 | "ibmi.pyodbc" = "sqlalchemy_ibmi.base:IBMiDb2Dialect" 44 | 45 | [tool.poetry.dependencies] 46 | python = ">=3.7" 47 | sqlalchemy = ">=1.3.0, <2.0" 48 | pyodbc = ">=4.0" 49 | 50 | [tool.poetry.dev-dependencies] 51 | pytest = [ 52 | { version = ">=7.2.1", python = ">=3.11" }, 53 | { version = ">=6.2.5", python = ">=3.10, <3.11" }, 54 | { version = "<5.4", python = "<3.10" }, 55 | ] 56 | flake8 = "^3.7.9" 57 | bumpversion = "^0.5.0" 58 | sphinx = ">1.0.0" 59 | sphinx-rtd-theme = ">0.4.0" 60 | black = "^22.8.0" 61 | 62 | [build-system] 63 | requires = ["poetry-core"] 64 | build-backend = "poetry.core.masonry.api" 65 | 66 | -------------------------------------------------------------------------------- /samples/Core.py: -------------------------------------------------------------------------------- 1 | """ 2 | This tutorial will show you how to connect to the IBM i system and complete 3 | basic functions using the Expression Language sqlalchmy method. For additional 4 | functions see the tutorial in the docs here [1]. 5 | [1]: https://docs.sqlalchemy.org/en/13/core/tutorial.html 6 | """ 7 | 8 | from sqlalchemy import create_engine 9 | from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey 10 | from sqlalchemy.sql import select 11 | from sqlalchemy.sql import and_, or_ 12 | 13 | system = input("System: ") 14 | UID = input("UID: ") 15 | PWD = input("PWD: ") 16 | extra = input("Add opts: ") 17 | 18 | conn_string = "ibmi://{}:{}@{}/?{}".format(UID, PWD, system, extra) 19 | 20 | engine = create_engine(conn_string, echo=True) 21 | 22 | # Creating Tables 23 | print("Creating Tables") 24 | metadata = MetaData() 25 | users = Table( 26 | "users", 27 | metadata, 28 | Column("id", Integer, primary_key=True), 29 | Column("name", String(50)), 30 | Column("fullname", String(50)), 31 | ) 32 | 33 | addresses = Table( 34 | "addresses", 35 | metadata, 36 | Column("id", Integer, primary_key=True), 37 | Column("user_id", None, ForeignKey("users.id")), 38 | Column("email_address", String(50), nullable=False), 39 | ) 40 | 41 | metadata.create_all(engine) 42 | 43 | # Insertions 44 | print("Insertions") 45 | with engine.connect() as conn: 46 | # single 47 | ins = users.insert().values(name="jack", fullname="Jack Jones") 48 | conn.execute(ins) 49 | 50 | # multiple 51 | conn.execute( 52 | addresses.insert(), 53 | [ 54 | {"user_id": 1, "email_address": "jack@yahoo.com"}, 55 | {"user_id": 1, "email_address": "jack@msn.com"}, 56 | ], 57 | ) 58 | 59 | # Select statements 60 | print("Select statements") 61 | result = conn.execute(select([users])) 62 | 63 | for row in result: 64 | print(row) 65 | 66 | # Select specific columns 67 | result = conn.execute(select([users.c.name, users.c.fullname])) 68 | 69 | for row in result: 70 | print(row) 71 | 72 | # Conjunctions 73 | print("Conjunctions") 74 | s = select( 75 | [(users.c.fullname + ", " + addresses.c.email_address).label("title")] 76 | ).where( 77 | and_( 78 | users.c.id == addresses.c.user_id, 79 | users.c.name.between("a", "z"), 80 | or_( 81 | addresses.c.email_address.like("%@aol.com"), 82 | addresses.c.email_address.like("%@msn.com"), 83 | ), 84 | ) 85 | ) 86 | 87 | print(conn.execute(s).fetchall()) 88 | 89 | # Textual SQL 90 | print("Textual SQL") 91 | s = ( 92 | "SELECT users.fullname || ', ' || addresses.email_address AS" 93 | " title FROM users, addresses WHERE users.id = " 94 | "addresses.user_id AND users.name BETWEEN ? AND ? " 95 | "AND (addresses.email_address LIKE ? " 96 | "OR addresses.email_address LIKE ?)" 97 | ) 98 | print(conn.execute(s, "m", "z", "%@aol.com", "%@msn.com").fetchall()) 99 | 100 | # Updates 101 | print("Updates") 102 | stmt = users.update().where(users.c.name == "jack").values(name="ed") 103 | conn.execute(stmt) 104 | 105 | result = conn.execute(select([users])) 106 | 107 | for row in result: 108 | print(row) 109 | 110 | # Deletion 111 | print("Deletions") 112 | conn.execute(users.delete().where(users.c.name > "a")) 113 | 114 | result = conn.execute(select([users])) 115 | 116 | for row in result: 117 | print(row) 118 | -------------------------------------------------------------------------------- /samples/ORM.py: -------------------------------------------------------------------------------- 1 | """ 2 | This tutorial will show you how to connect to the IBM i system and complete 3 | basic functions using the ORM sqlalchmy method. For additional functions see 4 | the sqlalchemy ORM tutorial in the docs here [1]. 5 | [1]: https://docs.sqlalchemy.org/en/13/orm/tutorial.html 6 | """ 7 | 8 | import sqlalchemy as sa 9 | from sqlalchemy.ext.declarative import declarative_base 10 | from sqlalchemy import Column, Integer, String 11 | from sqlalchemy.orm import sessionmaker 12 | from sqlalchemy import ForeignKey 13 | from sqlalchemy.orm import relationship 14 | 15 | system = input("System: ") 16 | UID = input("UID: ") 17 | PWD = input("PWD: ") 18 | extra = input("Add opts: ") 19 | 20 | conn_string = "ibmi://{}:{}@{}/?{}".format(UID, PWD, system, extra) 21 | 22 | engine = sa.create_engine(conn_string, echo=True) 23 | 24 | Base = declarative_base() 25 | 26 | 27 | # defining the User object mapping 28 | class User(Base): 29 | __tablename__ = "users" 30 | id = Column(Integer, primary_key=True) 31 | name = Column(String(50)) 32 | fullname = Column(String(50)) 33 | nickname = Column(String(50)) 34 | 35 | def __repr__(self): 36 | return "" % ( 37 | self.name, 38 | self.fullname, 39 | self.nickname, 40 | ) 41 | 42 | 43 | # create the mapping 44 | Base.metadata.create_all(engine) 45 | 46 | 47 | # set up session to create Users 48 | Session = sessionmaker(bind=engine) 49 | 50 | session = Session() 51 | 52 | # define and add user 53 | ed_user = User(name="ed", fullname="Ed Jones", nickname="edsnickname") 54 | session.add(ed_user) 55 | 56 | # search for added user 57 | our_user = session.query(User).filter_by(name="ed").first() 58 | 59 | print(our_user) 60 | 61 | # Adding and Updating Objects 62 | 63 | session.add_all( 64 | [ 65 | User(name="wendy", fullname="Wendy Williams", nickname="windy"), 66 | User(name="mary", fullname="Mary Contrary", nickname="mary"), 67 | User(name="fred", fullname="Fred Flintstone", nickname="freddy"), 68 | ] 69 | ) 70 | 71 | ed_user.nickname = "eddie" 72 | 73 | print("Changed data: " + str(session.dirty)) 74 | 75 | print("New data: " + str(session.new)) 76 | 77 | session.commit() 78 | 79 | # Rolling Back 80 | 81 | fake_user = User(name="fakeuser", fullname="Invalid", nickname="12345") 82 | session.add(fake_user) 83 | print("Data: ", session.query(User).all()) 84 | 85 | # rollback new user added 86 | 87 | session.rollback() 88 | 89 | print("Data: ", session.query(User).all()) 90 | 91 | # Querying 92 | print("Querying") 93 | for instance in session.query(User).order_by(User.name): 94 | print(instance.name, instance.fullname) 95 | 96 | for name, fullname in session.query(User.name, User.fullname): 97 | print(name, fullname) 98 | 99 | 100 | # Relationships 101 | class Address(Base): 102 | __tablename__ = "addresses" 103 | id = Column(Integer, primary_key=True) 104 | email_address = Column(String(50), nullable=False) 105 | user_id = Column(Integer, ForeignKey("users.id")) 106 | 107 | # create relationship with user 108 | user = relationship("User", back_populates="addresses") 109 | 110 | def __repr__(self): 111 | return "" % self.email_address 112 | 113 | 114 | # create User relationship with Address 115 | User.addresses = relationship("Address", order_by=Address.id, back_populates="user") 116 | # add cascade="all, delete, delete-orphan" to relationship to cascade delete 117 | 118 | Base.metadata.create_all(engine) 119 | 120 | jack = User(name="jack", fullname="Jack Bean", nickname="gjffdd") 121 | 122 | jack.addresses = [ 123 | Address(email_address="jack@google.com"), 124 | Address(email_address="j25@yahoo.com"), 125 | ] 126 | 127 | session.add(jack) 128 | session.commit() 129 | 130 | print("Data with addresses: ", session.query(User).filter_by(name="jack").one()) 131 | 132 | # Joins 133 | 134 | print( 135 | "Data using join: ", 136 | session.query(User).join(Address, User.id == Address.user_id).all(), 137 | ) 138 | 139 | # Deleting 140 | 141 | session.delete(jack) 142 | print("Count after deletion: ", session.query(User).filter_by(name="jack").count()) 143 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [egg_info] 2 | tag_build = dev 3 | 4 | [tool:pytest] 5 | addopts= --tb native -v -r fxX --maxfail=25 -p no:warnings 6 | python_files=test/*test_*.py 7 | 8 | [sqla_testing] 9 | requirement_cls=sqlalchemy_ibmi.requirements:Requirements 10 | profile_file=test/profiles.txt 11 | 12 | [flake8] 13 | max-line-length=88 14 | -------------------------------------------------------------------------------- /sqlalchemy_ibmi/__init__.py: -------------------------------------------------------------------------------- 1 | """ init file """ 2 | 3 | # +--------------------------------------------------------------------------+ 4 | # | Licensed Materials - Property of IBM | 5 | # | | 6 | # | (C) Copyright IBM Corporation 2008, 2016. | 7 | # +--------------------------------------------------------------------------+ 8 | # | This module complies with SQLAlchemy 0.8 and is | 9 | # | Licensed under the Apache License, Version 2.0 (the "License"); | 10 | # | you may not use this file except in compliance with the License. | 11 | # | You may obtain a copy of the License at | 12 | # | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable | 13 | # | law or agreed to in writing, software distributed under the License is | 14 | # | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | 15 | # | KIND, either express or implied. See the License for the specific | 16 | # | language governing permissions and limitations under the License. | 17 | # +--------------------------------------------------------------------------+ 18 | # | Authors: Alex Pitigoi, Abhigyan Agrawal, Rahul Priyadarshi | 19 | # | Contributors: Jaimy Azle, Mike Bayer | 20 | # +--------------------------------------------------------------------------+ 21 | 22 | __version__ = "0.9.4-dev" 23 | 24 | from . import base # noqa: F401 25 | -------------------------------------------------------------------------------- /sqlalchemy_ibmi/base.py: -------------------------------------------------------------------------------- 1 | # +--------------------------------------------------------------------------+ 2 | # | Licensed Materials - Property of IBM | 3 | # | | 4 | # | (C) Copyright IBM Corporation 2008, 2016. | 5 | # +--------------------------------------------------------------------------+ 6 | # | This module complies with SQLAlchemy 0.8 and is | 7 | # | Licensed under the Apache License, Version 2.0 (the "License"); | 8 | # | you may not use this file except in compliance with the License. | 9 | # | You may obtain a copy of the License at | 10 | # | http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable | 11 | # | law or agreed to in writing, software distributed under the License is | 12 | # | distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | 13 | # | KIND, either express or implied. See the License for the specific | 14 | # | language governing permissions and limitations under the License. | 15 | # +--------------------------------------------------------------------------+ 16 | # | Authors: Alex Pitigoi, Abhigyan Agrawal, Rahul Priyadarshi | 17 | # | Contributors: Jaimy Azle, Mike Bayer | 18 | # +--------------------------------------------------------------------------+ 19 | """ 20 | DBAPI Connection 21 | ---------------- 22 | This dialect uses the `pyodbc `_ DBAPI 23 | and the `IBM i Access ODBC Driver 24 | `_. 25 | 26 | Connection string:: 27 | 28 | engine = create_engine("ibmi://user:password@host/rdbname[?key=value&key=value...]") 29 | 30 | Connection Arguments 31 | -------------------- 32 | 33 | The sqlalchemy-ibmi dialect supports multiple connection arguments that are 34 | passed in the URL to the `create_engine 35 | `_ function. 36 | 37 | Connection string keywords: 38 | 39 | * ``current_schema`` - Define the default schema to use for unqualified names. 40 | * ``library_list`` - Specify which IBM i libraries to add to the server job's 41 | library list. Can be specified in the URL as a comma separated list, or as a 42 | keyword argument to the create_engine function as a list of strings 43 | * ``autocommit`` - If ``False``, Connection.commit must be called; 44 | otherwise each statement is automatically committed. 45 | Defaults to ``False``. 46 | * ``readonly`` - If ``True``, the connection is set to read-only. Defaults to ``False``. 47 | * ``timeout`` - The login timeout for the connection, in seconds. 48 | * ``use_system_naming`` - If ``True``, the connection is set to use the System 49 | naming convention, otherwise it will use the SQL naming convention. 50 | Defaults to ``False``. 51 | * ``trim_char_fields`` - If ``True``, all character fields will be returned 52 | with trailing spaces truncated. Defaults to ``False``. 53 | 54 | create-engine arguments: 55 | 56 | * ``fast_executemany`` - Enables PyODBC's `fast_executemany 57 | `_ 58 | option. Conversion between input and target types is mostly unsupported when this 59 | feature is enabled. eg. Inserting a Decimal object into a Float column will 60 | produce the error "Converting decimal loses precision". Defaults to ``False``. 61 | 62 | Transaction Isolation Level / Autocommit 63 | ---------------------------------------- 64 | Db2 for i supports 5 isolation levels: 65 | 66 | * ``SERIALIZABLE``: ``*RR`` 67 | * ``READ COMMITTED``: ``*CS`` 68 | * ``READ UNCOMMITTED``: ``*CHG`` 69 | * ``REPEATABLE READ``: ``*ALL`` 70 | * ``NO COMMIT``: ``*NC`` 71 | 72 | **At this time, sqlalchemy-ibmi supports all of these isolation levels 73 | except NO COMMIT.** 74 | 75 | Autocommit is supported on all available isolation levels. 76 | 77 | To set isolation level globally:: 78 | 79 | engine = create_engine("ibmi://user:pass@host/", isolation_level='REPEATABLE_READ') 80 | 81 | To set using per-connection execution options:: 82 | 83 | connection = engine.connect() 84 | connection = connection.execution_options( 85 | isolation_level="SERIALIZABLE" 86 | ) 87 | 88 | Table Creation String Size 89 | -------------------------- 90 | When creating a table with SQLAlchemy, Db2 for i requires that the size of 91 | a String column be provided. 92 | 93 | Provide the length for a String column as follows: 94 | 95 | .. code-block:: python 96 | :emphasize-lines: 4, 8 97 | 98 | class User(Base): 99 | __tablename__ = 'users' 100 | id = Column(Integer, Sequence('user_id_seq'), primary_key=True) 101 | name = Column(String(50)) 102 | 103 | users = Table('users', metadata, 104 | Column('id', Integer, Sequence('user_id_seq'), primary_key=True), 105 | Column('name', String(50)), 106 | ) 107 | 108 | 109 | Literal Values and Untyped Parameters 110 | ------------------------------------- 111 | SQLAlchemy will try to use parameter markers as much as possible, even for values 112 | specified with the `literal 113 | `_, 114 | `null `_, 115 | `func `_ 116 | sql expression functions. Because Db2 for i doesn't support untyped parameter markers, 117 | in places where the type is unknown, a CAST expression must be placed around it to 118 | give it a type. sqlalchemy-ibmi will automatically do this based on the type object 119 | provided to SQLAlchemy. 120 | 121 | In some cases, SQLAlchemy allows specifying a Python object directly without a type 122 | object. In this case, SQLAlchemy will deduce the type object based on the Python type: 123 | 124 | +-------------------+-----------------+ 125 | | Python type | SQLAlchemy type | 126 | +===================+=================+ 127 | | bool | Boolean | 128 | +-------------------+-----------------+ 129 | | int | Integer | 130 | +-------------------+-----------------+ 131 | | float | Float | 132 | +-------------------+-----------------+ 133 | | str | Unicode | 134 | +-------------------+-----------------+ 135 | | bytes | LargeBinary | 136 | +-------------------+-----------------+ 137 | | decimal.Decimal | Numeric | 138 | +-------------------+-----------------+ 139 | | datetime.datetime | DateTime | 140 | +-------------------+-----------------+ 141 | | datetime.date | DateTime | 142 | +-------------------+-----------------+ 143 | | datetime.time | DateTime | 144 | +-------------------+-----------------+ 145 | 146 | The deduced SQLAlchemy type will be generic however, having no length, precision, or 147 | scale defined. This causes problems when generating these CAST expressions. To support 148 | handling the majority of cases, some types will be adjusted: 149 | 150 | +-------------------+-----------------+ 151 | | Python type | SQLAlchemy type | 152 | +===================+=================+ 153 | | int | BigInteger | 154 | +-------------------+-----------------+ 155 | | str | Unicode(32739) | 156 | +-------------------+-----------------+ 157 | 158 | In addition, Numeric types will be rendered as inline literals. On SQLAlchemy 1.4 and 159 | up, this will be done using `render_literal_execute 160 | `_ 161 | to support statement caching. 162 | 163 | If the type used is not appropriate (eg. when specifying a >32k string), you must 164 | specify the type (or use a `cast 165 | `_):: 166 | 167 | too_big_for_varchar = 'a' * 32768 168 | connection.execute( 169 | select(literal(too_big_for_varchar, UnicodeText())) 170 | ).scalar() 171 | 172 | 173 | Text search support 174 | ------------------- 175 | The ColumnOperators.match function is implemented using a basic LIKE operation by 176 | default. However, when `OmniFind Text Search Server for Db2 for i 177 | `_ is 178 | installed, match will take advantage of the CONTAINS function that it provides. 179 | 180 | """ # noqa E501 181 | import datetime 182 | import re 183 | 184 | from collections import defaultdict 185 | from distutils.util import strtobool 186 | 187 | from sqlalchemy import ( 188 | select, 189 | schema as sa_schema, 190 | exc, 191 | util, 192 | Table, 193 | MetaData, 194 | Column, 195 | __version__ as _SA_Version, 196 | ) 197 | from sqlalchemy.sql import compiler, operators 198 | from sqlalchemy.sql.expression import and_, cast 199 | from sqlalchemy.engine import default, reflection 200 | from sqlalchemy.types import ( 201 | BLOB, 202 | CHAR, 203 | CLOB, 204 | DATE, 205 | DATETIME, 206 | INTEGER, 207 | SMALLINT, 208 | BIGINT, 209 | DECIMAL, 210 | NUMERIC, 211 | REAL, 212 | TIME, 213 | TIMESTAMP, 214 | VARCHAR, 215 | FLOAT, 216 | ) 217 | from sqlalchemy import types as sa_types 218 | 219 | from .constants import RESERVED_WORDS 220 | 221 | 222 | def get_sa_version(): 223 | """Returns the SQLAlchemy version as a list of integers.""" 224 | version = [int(ver_token) for ver_token in _SA_Version.split(".")[0:2]] 225 | return version 226 | 227 | 228 | SA_Version = get_sa_version() 229 | 230 | 231 | class IBMBoolean(sa_types.Boolean): 232 | """Represents a Db2 Boolean Column""" 233 | 234 | def result_processor(self, _, coltype): 235 | def process(value): 236 | if value is None: 237 | return None 238 | return bool(value) 239 | 240 | return process 241 | 242 | def bind_processor(self, _): 243 | def process(value): 244 | if value is None: 245 | return None 246 | return "1" if value else "0" 247 | 248 | return process 249 | 250 | 251 | class IBMDate(sa_types.Date): 252 | """Represents a Db2 Date Column""" 253 | 254 | def result_processor(self, _, coltype): 255 | def process(value): 256 | if value is None: 257 | return None 258 | if isinstance(value, datetime.datetime): 259 | value = datetime.date(value.year, value.month, value.day) 260 | return value 261 | 262 | return process 263 | 264 | def bind_processor(self, _): 265 | def process(value): 266 | if value is None: 267 | return None 268 | if isinstance(value, datetime.datetime): 269 | value = datetime.date(value.year, value.month, value.day) 270 | return str(value) 271 | 272 | return process 273 | 274 | 275 | class DOUBLE(sa_types.Numeric): 276 | """Represents a Db2 Double Column""" 277 | 278 | __visit_name__ = "DOUBLE" 279 | 280 | 281 | class GRAPHIC(sa_types.CHAR): 282 | """Represents a Db2 Graphic Column""" 283 | 284 | __visit_name__ = "GRAPHIC" 285 | 286 | 287 | class VARGRAPHIC(sa_types.Unicode): 288 | """Represents a Db2 Vargraphic Column""" 289 | 290 | __visit_name__ = "VARGRAPHIC" 291 | 292 | 293 | class DBCLOB(sa_types.CLOB): 294 | """Represents a Db2 Dbclob Column""" 295 | 296 | __visit_name__ = "DBCLOB" 297 | 298 | 299 | class XML(sa_types.Text): 300 | """Represents a Db2 XML Column""" 301 | 302 | __visit_name__ = "XML" 303 | 304 | 305 | COLSPECS = { 306 | sa_types.Boolean: IBMBoolean, 307 | sa_types.Date: IBMDate, 308 | } 309 | 310 | ISCHEMA_NAMES = { 311 | "BLOB": BLOB, 312 | "CHAR": CHAR, 313 | "CHARACTER": CHAR, 314 | "CLOB": CLOB, 315 | "DATE": DATE, 316 | "DATETIME": DATETIME, 317 | "INTEGER": INTEGER, 318 | "SMALLINT": SMALLINT, 319 | "BIGINT": BIGINT, 320 | "DECIMAL": DECIMAL, 321 | "NUMERIC": NUMERIC, 322 | "REAL": REAL, 323 | "DOUBLE": DOUBLE, 324 | "FLOAT": FLOAT, 325 | "TIME": TIME, 326 | "TIMESTAMP": TIMESTAMP, 327 | "VARCHAR": VARCHAR, 328 | "XML": XML, 329 | "GRAPHIC": GRAPHIC, 330 | "VARGRAPHIC": VARGRAPHIC, 331 | "DBCLOB": DBCLOB, 332 | } 333 | 334 | 335 | class DB2TypeCompiler(compiler.GenericTypeCompiler): 336 | """IBM i Db2 Type Compiler""" 337 | 338 | def visit_TIMESTAMP(self, type_, **kw): 339 | precision = getattr(type_, "precision", None) 340 | if precision is not None: 341 | return f"TIMESTAMP({precision})" 342 | else: 343 | return "TIMESTAMP" 344 | 345 | def visit_DATETIME(self, type_, **kw): 346 | return self.visit_TIMESTAMP(type_, **kw) 347 | 348 | def visit_FLOAT(self, type_, **kw): 349 | return ( 350 | "FLOAT" 351 | if type_.precision is None 352 | else "FLOAT(%(precision)s)" % {"precision": type_.precision} 353 | ) 354 | 355 | def visit_BOOLEAN(self, type_, **kw): 356 | return self.visit_SMALLINT(type_, **kw) 357 | 358 | def _extend(self, type_, name, ccsid=None, length=None): 359 | text = name 360 | 361 | if not length: 362 | length = type_.length 363 | 364 | if length: 365 | text += f"({length})" 366 | 367 | if ccsid: 368 | text += f" CCSID {ccsid}" 369 | 370 | # TODO: Handle collation instead of CCSID 371 | # if type_.collation: 372 | # text += ' COLLATE "%s"' % type_.collation 373 | return text 374 | 375 | def visit_CHAR(self, type_, **kw): 376 | return self._extend(type_, "CHAR", 1208) 377 | 378 | def visit_VARCHAR(self, type_, **kw): 379 | return self._extend(type_, "VARCHAR", 1208) 380 | 381 | def visit_CLOB(self, type_, **kw): 382 | return self._extend(type_, "CLOB", ccsid=1208, length=type_.length or "2G") 383 | 384 | def visit_NCHAR(self, type_, **kw): 385 | return self._extend(type_, "NCHAR") 386 | 387 | def visit_NVARCHAR(self, type_, **kw): 388 | return self._extend(type_, "NVARCHAR") 389 | 390 | def visit_NCLOB(self, type_, **kw): 391 | return self._extend(type_, "NCLOB", length=type_.length or "1G") 392 | 393 | def visit_TEXT(self, type_, **kw): 394 | return self.visit_CLOB(type_, **kw) 395 | 396 | def visit_BLOB(self, type_, **kw): 397 | length = type_.length or "2G" 398 | return f"BLOB({length})" 399 | 400 | def visit_numeric(self, type_, **kw): 401 | # For many databases, NUMERIC and DECIMAL are equivalent aliases, but for Db2 402 | # NUMERIC is zoned while DECIMAL is packed. Packed format gives better space 403 | # usage and performance, so we prefer that by default. If a user really wants 404 | # zoned, they can use types.NUMERIC class instead. 405 | return self.visit_DECIMAL(type_, **kw) 406 | 407 | # dialect-specific types 408 | 409 | # This is now part of SQLAlchemy as of 2.0. We can drop this function once 410 | # we drop support for earlier versions. 411 | def visit_DOUBLE(self, type_): 412 | return "DOUBLE" 413 | 414 | def visit_GRAPHIC(self, type_): 415 | return self._extend(type_, "GRAPHIC") 416 | 417 | def visit_VARGRAPHIC(self, type_): 418 | return self._extend(type_, "VARGRAPHIC") 419 | 420 | def visit_DBCLOB(self, type_, **kw): 421 | return self._extend(type_, "DBCLOB", length=type_.length or "1G") 422 | 423 | def visit_XML(self, type_): 424 | return "XML" 425 | 426 | 427 | class DB2Compiler(compiler.SQLCompiler): 428 | """IBM i Db2 compiler class""" 429 | 430 | def get_cte_preamble(self, recursive): 431 | return "WITH" 432 | 433 | def visit_now_func(self, fn, **kw): 434 | return "CURRENT_TIMESTAMP" 435 | 436 | def for_update_clause(self, select, **kw): 437 | if select.for_update == "read": 438 | return " WITH RS USE AND KEEP SHARE LOCKS" 439 | if select.for_update: 440 | return " WITH RS USE AND KEEP UPDATE LOCKS" 441 | return "" 442 | 443 | def visit_mod_binary(self, binary, operator, **kw): 444 | return "mod(%s, %s)" % (self.process(binary.left), self.process(binary.right)) 445 | 446 | def visit_match_op_binary(self, binary, operator, **kw): 447 | if self.dialect.text_server_available: 448 | return "CONTAINS (%s, %s) > 0" % ( 449 | self.process(binary.left), 450 | self.process(binary.right), 451 | ) 452 | binary.right.value = "%" + binary.right.value + "%" 453 | return "%s LIKE %s" % (self.process(binary.left), self.process(binary.right)) 454 | 455 | def limit_clause(self, select, **kw): 456 | # On Db2 for i, there is a separate OFFSET clause, but there is no separate 457 | # LIMIT clause. Instead, LIMIT is treated as an alternate or "shortcut" syntax 458 | # of a FETCH clause. 459 | # Because of this, these work: "LIMIT x", "LIMIT x OFFSET y" 460 | # but these do not: "OFFSET y", "LIMIT x OFFSET y ROWS" 461 | # 462 | # Because of this, if we want to use the LIMIT alternate form, we'd have to 463 | # special case both LIMIT with OFFSET and OFFSET without LIMIT. However, if we 464 | # use the traditional FETCH form we need no special cases. 465 | # 466 | # OFFSET is supported since IBM i 7.1 TR11 / IBM i 7.2 TR3 467 | text = "" 468 | if select._offset_clause is not None: 469 | text += " OFFSET " + self.process(select._offset_clause, **kw) + " ROWS " 470 | if select._limit_clause is not None: 471 | text += ( 472 | " FETCH FIRST " 473 | + self.process(select._limit_clause, **kw) 474 | + " ROWS ONLY " 475 | ) 476 | return text 477 | 478 | def visit_sequence(self, sequence, **kw): 479 | return "NEXT VALUE FOR %s" % sequence.name 480 | 481 | def default_from(self): 482 | # Db2 uses SYSIBM.SYSDUMMY1 table for row count 483 | return " FROM SYSIBM.SYSDUMMY1" 484 | 485 | def visit_function(self, func, result_map=None, **kwargs): 486 | if func.name.upper() == "AVG": 487 | return "AVG(DOUBLE(%s))" % (self.function_argspec(func, **kwargs)) 488 | 489 | if func.name.upper() == "CHAR_LENGTH": 490 | return "CHAR_LENGTH(%s, %s)" % ( 491 | self.function_argspec(func, **kwargs), 492 | "OCTETS", 493 | ) 494 | return compiler.SQLCompiler.visit_function(self, func, **kwargs) 495 | 496 | # TODO: this is wrong but need to know what Db2 is expecting here 497 | # if func.name.upper() == "LENGTH": 498 | # return "LENGTH('%s')" % func.compile().params[func.name + '_1'] 499 | # else: 500 | # return compiler.SQLCompiler.visit_function(self, func, **kwargs) 501 | 502 | def visit_cast(self, cast, **kw): 503 | kw["_cast_applied"] = True 504 | return super().visit_cast(cast, **kw) 505 | 506 | def visit_savepoint(self, savepoint_stmt): 507 | return "SAVEPOINT %(sid)s ON ROLLBACK RETAIN CURSORS" % { 508 | "sid": self.preparer.format_savepoint(savepoint_stmt) 509 | } 510 | 511 | def visit_rollback_to_savepoint(self, savepoint_stmt): 512 | return "ROLLBACK TO SAVEPOINT %(sid)s" % { 513 | "sid": self.preparer.format_savepoint(savepoint_stmt) 514 | } 515 | 516 | def visit_release_savepoint(self, savepoint_stmt): 517 | return "RELEASE TO SAVEPOINT %(sid)s" % { 518 | "sid": self.preparer.format_savepoint(savepoint_stmt) 519 | } 520 | 521 | def visit_unary(self, unary, **kw): 522 | usql = super().visit_unary(unary, **kw) 523 | 524 | if unary.operator == operators.exists and kw.get( 525 | "within_columns_clause", False 526 | ): 527 | usql = f"CASE WHEN {usql} THEN 1 ELSE 0 END" 528 | return usql 529 | 530 | def visit_empty_set_op_expr(self, type_, expand_op): 531 | if expand_op is operators.not_in_op: 532 | return "(%s)) OR (1 = 1" % ( 533 | ", ".join( 534 | "CAST(NULL AS %s)" 535 | % self.dialect.type_compiler.process( 536 | INTEGER() if element._isnull else element 537 | ) 538 | for element in type_ 539 | ) 540 | ) 541 | elif expand_op is operators.in_op: 542 | return "(%s)) OR (1 != 1" % ( 543 | ", ".join( 544 | "CAST(NULL AS %s)" 545 | % self.dialect.type_compiler.process( 546 | INTEGER() if element._isnull else element 547 | ) 548 | for element in type_ 549 | ) 550 | ) 551 | else: 552 | return self.visit_empty_set_expr(type_) 553 | 554 | def visit_empty_set_expr(self, element_types): 555 | return "SELECT 1 FROM SYSIBM.SYSDUMMY1 WHERE 1!=1" 556 | 557 | def visit_null(self, expr, **kw): 558 | if not kw.get("within_columns_clause", False): 559 | return "NULL" 560 | 561 | # We can't use a NULL constant/literal in a parameter list without a type 562 | # or we'll get SQL0206 - Column or global variable NULL not found. 563 | # We can work around this by casting to a type, but at this point we don't 564 | # know what the type was, and when using the null() function, there will 565 | # not be a type anyway, so we pick an arbitrary type of INTEGER which is 566 | # most compatible with other types other than BLOB, XML, and some others. 567 | # 568 | # As an optimization, if we detect we're already in a CAST expression, then 569 | # we don't need to add another. 570 | if kw.get("_cast_applied", False): 571 | # We're in a cast expression, so no need to cast 572 | return "NULL" 573 | 574 | return "CAST(NULL AS INTEGER)" 575 | 576 | def visit_bindparam( 577 | self, 578 | bindparam, 579 | within_columns_clause=False, 580 | literal_binds=False, 581 | skip_bind_expression=False, 582 | literal_execute=False, 583 | render_postcompile=False, 584 | **kwargs, 585 | ): 586 | if within_columns_clause and not literal_binds: 587 | # Db2 doesn't support untyped parameter markers so we need to add a CAST 588 | # clause around them to the appropriate type. 589 | # 590 | # Default Python type to SQLAlchemy type mapping: 591 | # | Python type | SQLAlchemy type | 592 | # |-------------------|-----------------| 593 | # | bool | Boolean | 594 | # | int | Integer | 595 | # | float | Float | 596 | # | str | Unicode | 597 | # | bytes | LargeBinary | 598 | # | decimal.Decimal | Numeric | 599 | # | datetime.datetime | DateTime | 600 | # | datetime.date | DateTime | 601 | # | datetime.time | DateTime | 602 | # 603 | # Most types just need a cast, but some types we handle specially since we 604 | # don't know how big the value will be and by literals will have its 605 | # attributes set to default eg. length, precision, and scale all set to 606 | # None. Since we can't base anything from the bindparam value as all literal 607 | # values will end up caching to the same statement, we must assume the worst 608 | # case scenario and try to handle any possible value. We could render 609 | # everything as literals using bindparam.render_literal_execute(), but that 610 | # will impact statement caching on the server as well as cause problems with 611 | # bytes and str literals over 32k. 612 | # 613 | # - Integer: Cast to BigInteger 614 | # - Unicode: If no length was specified, set length to VARCHAR max length. 615 | # This will cause issues if users specify a >32k literal, but 616 | # this seems unlikely and using UnicodeText by default would 617 | # cause extra network flows for each literal. If a user needs 618 | # to query a >32k literal, they can specify the type for the 619 | # literal themselves. 620 | # - Decimal: Render as a literal if no precision was specified. There's no 621 | # precision and scale values we can use which could cover all 622 | # Decimal literals. 623 | type_ = bindparam.type 624 | use_cast = True 625 | 626 | if isinstance(type_, sa_types.Numeric) and not isinstance( 627 | type_, sa_types.Float 628 | ): 629 | if not type_.precision: 630 | # Render this value as a literal in post-process 631 | use_cast = False 632 | try: 633 | bindparam = bindparam.render_literal_execute() 634 | except AttributeError: 635 | # SQLAlchemy 1.3 doesn't have render_literal_execute 636 | literal_binds = True 637 | elif isinstance(type_, sa_types.Unicode): 638 | if not type_.length: 639 | type_ = type_.copy() 640 | type_.length = 32739 641 | elif isinstance(type_, sa_types.Integer): 642 | type_ = sa_types.BigInteger() 643 | elif isinstance(type_, sa_types.NullType): 644 | # Can't cast to a NULL, just leave it as-is 645 | use_cast = False 646 | 647 | if use_cast: 648 | return self.process(cast(bindparam, type_)) 649 | 650 | return super().visit_bindparam( 651 | bindparam, 652 | within_columns_clause, 653 | literal_binds, 654 | skip_bind_expression, 655 | literal_execute=literal_execute, 656 | render_postcompile=render_postcompile, 657 | **kwargs, 658 | ) 659 | 660 | 661 | class DB2DDLCompiler(compiler.DDLCompiler): 662 | """DDL Compiler for IBM i Db2""" 663 | 664 | def get_column_specification(self, column, **kw): 665 | col_spec = [self.preparer.format_column(column)] 666 | 667 | col_spec.append( 668 | self.dialect.type_compiler.process(column.type, type_expression=column) 669 | ) 670 | 671 | # column-options: "NOT NULL" 672 | if not column.nullable or column.primary_key: 673 | col_spec.append("NOT NULL") 674 | 675 | # default-clause: 676 | default = self.get_column_default_string(column) 677 | if default is not None: 678 | col_spec.append("WITH DEFAULT") 679 | col_spec.append(default) 680 | 681 | if column is column.table._autoincrement_column: 682 | col_spec.append("GENERATED BY DEFAULT") 683 | col_spec.append("AS IDENTITY") 684 | col_spec.append("(START WITH 1)") 685 | 686 | column_spec = " ".join(col_spec) 687 | return column_spec 688 | 689 | def define_constraint_cascades(self, constraint): 690 | text = "" 691 | if constraint.ondelete is not None: 692 | text += " ON DELETE %s" % constraint.ondelete 693 | 694 | if constraint.onupdate is not None: 695 | util.warn("Db2 does not support UPDATE CASCADE for foreign keys.") 696 | 697 | return text 698 | 699 | def visit_drop_constraint(self, drop, **kw): 700 | constraint = drop.element 701 | if isinstance(constraint, sa_schema.ForeignKeyConstraint): 702 | qual = "FOREIGN KEY " 703 | const = self.preparer.format_constraint(constraint) 704 | elif isinstance(constraint, sa_schema.PrimaryKeyConstraint): 705 | qual = "PRIMARY KEY " 706 | const = "" 707 | elif isinstance(constraint, sa_schema.UniqueConstraint): 708 | qual = "UNIQUE " 709 | const = self.preparer.format_constraint(constraint) 710 | else: 711 | qual = "" 712 | const = self.preparer.format_constraint(constraint) 713 | 714 | if ( 715 | hasattr(constraint, "uConstraint_as_index") 716 | and constraint.uConstraint_as_index 717 | ): 718 | return "DROP %s%s" % (qual, const) 719 | return "ALTER TABLE %s DROP %s%s" % ( 720 | self.preparer.format_table(constraint.table), 721 | qual, 722 | const, 723 | ) 724 | 725 | def visit_create_index( 726 | self, create, include_schema=True, include_table_schema=True 727 | ): 728 | sql = super().visit_create_index(create, include_schema, include_table_schema) 729 | if getattr(create.element, "uConstraint_as_index", None): 730 | sql += " EXCLUDE NULL KEYS" 731 | return sql 732 | 733 | 734 | class DB2IdentifierPreparer(compiler.IdentifierPreparer): 735 | """IBM i Db2 specific identifier preparer""" 736 | 737 | reserved_words = RESERVED_WORDS 738 | illegal_initial_characters = {str(x) for x in range(0, 10)}.union(["_", "$"]) 739 | 740 | 741 | class DB2ExecutionContext(default.DefaultExecutionContext): 742 | """IBM i Db2 Execution Context class""" 743 | 744 | _select_lastrowid = False 745 | _lastrowid = None 746 | 747 | def get_lastrowid(self): 748 | return self._lastrowid 749 | 750 | def pre_exec(self): 751 | if self.isinsert: 752 | tbl = self.compiled.statement.table 753 | seq_column = tbl._autoincrement_column 754 | insert_has_sequence = seq_column is not None 755 | 756 | self._select_lastrowid = ( 757 | insert_has_sequence 758 | and not self.compiled.returning 759 | and not self.compiled.inline 760 | ) 761 | 762 | def post_exec(self): 763 | conn = self.root_connection 764 | if self._select_lastrowid: 765 | conn._cursor_execute(self.cursor, "VALUES IDENTITY_VAL_LOCAL()", (), self) 766 | row = self.cursor.fetchall()[0] 767 | if row[0] is not None: 768 | self._lastrowid = int(row[0]) 769 | 770 | def fire_sequence(self, seq, type_): 771 | return self._execute_scalar( 772 | "VALUES NEXTVAL FOR " 773 | + self.connection.dialect.identifier_preparer.format_sequence(seq), 774 | type_, 775 | ) 776 | 777 | 778 | def to_bool(obj): 779 | if isinstance(obj, bool): 780 | return obj 781 | return strtobool(obj) 782 | 783 | 784 | class IBMiDb2Dialect(default.DefaultDialect): 785 | driver = "pyodbc" 786 | name = "ibmi" 787 | max_identifier_length = 128 788 | encoding = "utf-8" 789 | default_paramstyle = "qmark" 790 | colspecs = COLSPECS 791 | ischema_names = ISCHEMA_NAMES 792 | postfetch_lastrowid = True 793 | supports_native_boolean = False 794 | supports_alter = True 795 | supports_sequences = True 796 | sequences_optional = True 797 | supports_sane_multi_rowcount = False 798 | supports_sane_rowcount_returning = True 799 | supports_native_decimal = True 800 | requires_name_normalize = True 801 | supports_default_values = False 802 | supports_empty_insert = False 803 | supports_statement_cache = True 804 | 805 | statement_compiler = DB2Compiler 806 | ddl_compiler = DB2DDLCompiler 807 | type_compiler = DB2TypeCompiler 808 | preparer = DB2IdentifierPreparer 809 | execution_ctx_cls = DB2ExecutionContext 810 | 811 | def __init__(self, isolation_level=None, fast_executemany=False, **kw): 812 | super().__init__(**kw) 813 | self.isolation_level = isolation_level 814 | self.fast_executemany = fast_executemany 815 | 816 | def on_connect(self): 817 | if self.isolation_level is not None: 818 | 819 | def connect(conn): 820 | self.set_isolation_level(conn, self.isolation_level) 821 | 822 | return connect 823 | else: 824 | return None 825 | 826 | def initialize(self, connection): 827 | super().initialize(connection) 828 | self.driver_version = self._get_driver_version(connection.connection) 829 | self.text_server_available = self._check_text_server(connection) 830 | 831 | def get_check_constraints(self, connection, table_name, schema=None, **kw): 832 | current_schema = self.denormalize_name(schema or self.default_schema_name) 833 | table_name = self.denormalize_name(table_name) 834 | sysconst = self.sys_table_constraints 835 | syschkconst = self.sys_check_constraints 836 | 837 | query = select( 838 | [syschkconst.c.conname, syschkconst.c.chkclause], 839 | and_( 840 | syschkconst.c.conschema == sysconst.c.conschema, 841 | syschkconst.c.conname == sysconst.c.conname, 842 | sysconst.c.tabschema == current_schema, 843 | sysconst.c.tabname == table_name, 844 | ), 845 | ) 846 | 847 | check_consts = [] 848 | print(query) 849 | for res in connection.execute(query): 850 | check_consts.append( 851 | {"name": self.normalize_name(res[0]), "sqltext": res[1]} 852 | ) 853 | 854 | return check_consts 855 | 856 | def get_table_comment(self, connection, table_name, schema=None, **kw): 857 | current_schema = self.denormalize_name(schema or self.default_schema_name) 858 | table_name = self.denormalize_name(table_name) 859 | if current_schema: 860 | whereclause = and_( 861 | self.sys_tables.c.tabschema == current_schema, 862 | self.sys_tables.c.tabname == table_name, 863 | ) 864 | else: 865 | whereclause = self.sys_tables.c.tabname == table_name 866 | select_statement = select([self.sys_tables.c.tabcomment], whereclause) 867 | results = connection.execute(select_statement) 868 | return {"text": results.scalar()} 869 | 870 | @property 871 | def _isolation_lookup(self): 872 | return { 873 | # IBM i terminology 874 | "*CHG": self.dbapi.SQL_TXN_READ_UNCOMMITTED, 875 | "*CS": self.dbapi.SQL_TXN_READ_COMMITTED, 876 | "*ALL": self.dbapi.SQL_TXN_REPEATABLE_READ, 877 | "*RR": self.dbapi.SQL_TXN_SERIALIZABLE, 878 | # ODBC terminology 879 | "SERIALIZABLE": self.dbapi.SQL_TXN_SERIALIZABLE, 880 | "READ UNCOMMITTED": self.dbapi.SQL_TXN_READ_UNCOMMITTED, 881 | "READ COMMITTED": self.dbapi.SQL_TXN_READ_COMMITTED, 882 | "REPEATABLE READ": self.dbapi.SQL_TXN_REPEATABLE_READ, 883 | } 884 | 885 | # Methods merged from PyODBCConnector 886 | 887 | def get_isolation_level(self, dbapi_conn): 888 | return self.isolation_level 889 | 890 | def set_isolation_level(self, connection, level): 891 | self.isolation_level = level 892 | level = level.replace("_", " ") 893 | if level in self._isolation_lookup: 894 | connection.set_attr( 895 | self.dbapi.SQL_ATTR_TXN_ISOLATION, self._isolation_lookup[level] 896 | ) 897 | else: 898 | raise exc.ArgumentError( 899 | "Invalid value '%s' for isolation_level. " 900 | "Valid isolation levels for %s are %s" 901 | % (level, self.name, ", ".join(self._isolation_lookup.keys())) 902 | ) 903 | 904 | @classmethod 905 | def dbapi(cls): 906 | return __import__("pyodbc") 907 | 908 | DRIVER_KEYWORD_MAP = { 909 | # SQLAlchemy kwd: (ODBC keyword, type, default) 910 | # 911 | # NOTE: We use the upper-case driver connection string value to work 912 | # around a bug in the the 07.01.025 driver which causes it to do 913 | # case-sensitive lookups. This should be fixed in the 07.01.026 driver 914 | # and older versions are not affected, but we don't have to check 915 | # anything since they are case-insensitive and allow the all-uppercase 916 | # values just fine. 917 | "system": ("SYSTEM", str, None), 918 | "user": ("UID", str, None), 919 | "password": ("PWD", str, None), 920 | "database": ("DATABASE", str, None), 921 | "use_system_naming": ("NAM", to_bool, False), 922 | "trim_char_fields": ("TRIMCHAR", to_bool, None), 923 | "lob_threshold_kb": ("MAXFIELDLEN", int, None), 924 | } 925 | 926 | DRIVER_KEYWORDS_SPECIAL = { 927 | "current_schema", 928 | "library_list", 929 | } 930 | 931 | @classmethod 932 | def map_connect_opts(cls, opts): 933 | # Map our keywords to what ODBC expects 934 | for keyword, keyword_info in cls.DRIVER_KEYWORD_MAP.items(): 935 | odbc_keyword, to_keyword, default_value = keyword_info 936 | 937 | value = opts.pop(keyword, default_value) 938 | if value is None: 939 | continue 940 | 941 | try: 942 | value = to_keyword(value) 943 | 944 | # pyodbc will stringify the bool to "True" or "False" instead 945 | # of "1" and "0" as the driver wants, so manually convert to 946 | # integer first. 947 | if isinstance(value, bool): 948 | value = int(value) 949 | 950 | opts[odbc_keyword] = value 951 | except ValueError: 952 | raise ValueError("Invalid value specified for {}".format(keyword)) 953 | 954 | # For current_schema and library_list we can't use the above loop, since these 955 | # must be combined in to one ODBC keyword 956 | if "current_schema" in opts or "library_list" in opts: 957 | current_schema = opts.pop("current_schema", "") 958 | library_list = opts.pop("library_list", "") 959 | 960 | if not isinstance(library_list, str): 961 | library_list = ",".join(library_list) 962 | 963 | opts["DefaultLibraries"] = f"{current_schema},{library_list}" 964 | 965 | def create_connect_args(self, url): 966 | opts = url.translate_connect_args(username="user", host="system") 967 | opts.update(url.query) 968 | 969 | # Allow both our specific keywords and the SQLAlchemy base keywords 970 | allowed_opts = ( 971 | set(self.DRIVER_KEYWORD_MAP.keys()) 972 | | self.DRIVER_KEYWORDS_SPECIAL 973 | | {"autocommit", "readonly", "timeout"} 974 | ) 975 | 976 | if not allowed_opts.issuperset(opts.keys()): 977 | raise ValueError("Option entered not valid for IBM i Access ODBC Driver") 978 | 979 | self.map_connect_opts(opts) 980 | return [ 981 | [ 982 | "Driver=IBM i Access ODBC Driver" 983 | ";UNICODESQL=1" 984 | ";TRUEAUTOCOMMIT=1" 985 | ";XDYNAMIC=0" 986 | ], 987 | opts, 988 | ] 989 | 990 | def is_disconnect(self, e, connection, cursor): 991 | if isinstance(e, self.dbapi.ProgrammingError): 992 | return "The cursor's connection has been closed." in str( 993 | e 994 | ) or "Attempt to use a closed connection." in str(e) 995 | else: 996 | return False 997 | 998 | def _dbapi_version(self): 999 | if not self.dbapi: 1000 | return () 1001 | return self._parse_dbapi_version(self.dbapi.version) 1002 | 1003 | def _parse_dbapi_version(self, vers): 1004 | m = re.match(r"(?:py.*-)?([\d\.]+)(?:-(\w+))?", vers) 1005 | if not m: 1006 | return () 1007 | vers = tuple([int(x) for x in m.group(1).split(".")]) 1008 | if m.group(2): 1009 | vers += (m.group(2),) 1010 | return vers 1011 | 1012 | def _get_server_version_info(self, connection, allow_chars=True): 1013 | dbapi_con = connection.connection 1014 | version = [ 1015 | int(_) for _ in dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER).split(".") 1016 | ] 1017 | return tuple(version[0:2]) 1018 | 1019 | def _get_default_schema_name(self, connection): 1020 | return self.normalize_name(connection.execute("VALUES CURRENT_SCHEMA").scalar()) 1021 | 1022 | # Driver version for IBM i Access ODBC Driver is given as 1023 | # VV.RR.SSSF where VV (major), RR (release), and SSS (service pack) 1024 | # will be returned and F (test fix version) will be ignored 1025 | def _get_driver_version(self, db_conn): 1026 | version = db_conn.getinfo(self.dbapi.SQL_DRIVER_VER).split(".") 1027 | sssf = version.pop(2) 1028 | sss = sssf[:3] 1029 | version.append(sss) 1030 | return [int(_) for _ in version] 1031 | 1032 | ischema = MetaData() 1033 | 1034 | sys_schemas = Table( 1035 | "SYSSCHEMAS", 1036 | ischema, 1037 | Column("SCHEMA_NAME", sa_types.Unicode, key="schemaname"), 1038 | schema="QSYS2", 1039 | ) 1040 | 1041 | sys_tables = Table( 1042 | "SYSTABLES", 1043 | ischema, 1044 | Column("TABLE_SCHEMA", sa_types.Unicode, key="tabschema"), 1045 | Column("TABLE_NAME", sa_types.Unicode, key="tabname"), 1046 | Column("TABLE_TYPE", sa_types.Unicode, key="tabtype"), 1047 | Column("SYSTEM_TABLE", sa_types.Unicode, key="tabsys"), 1048 | Column("LONG_COMMENT", sa_types.Unicode, key="tabcomment"), 1049 | schema="QSYS2", 1050 | ) 1051 | 1052 | sys_table_constraints = Table( 1053 | "SYSCST", 1054 | ischema, 1055 | Column("CONSTRAINT_SCHEMA", sa_types.Unicode, key="conschema"), 1056 | Column("CONSTRAINT_NAME", sa_types.Unicode, key="conname"), 1057 | Column("CONSTRAINT_TYPE", sa_types.Unicode, key="contype"), 1058 | Column("TABLE_SCHEMA", sa_types.Unicode, key="tabschema"), 1059 | Column("TABLE_NAME", sa_types.Unicode, key="tabname"), 1060 | Column("TABLE_TYPE", sa_types.Unicode, key="tabtype"), 1061 | schema="QSYS2", 1062 | ) 1063 | 1064 | sys_constraints_columns = Table( 1065 | "SYSCSTCOL", 1066 | ischema, 1067 | Column("TABLE_SCHEMA", sa_types.Unicode, key="tabschema"), 1068 | Column("TABLE_NAME", sa_types.Unicode, key="tabname"), 1069 | Column("COLUMN_NAME", sa_types.Unicode, key="colname"), 1070 | Column("CONSTRAINT_SCHEMA", sa_types.Unicode, key="conschema"), 1071 | Column("CONSTRAINT_NAME", sa_types.Unicode, key="conname"), 1072 | schema="QSYS2", 1073 | ) 1074 | 1075 | sys_key_constraints = Table( 1076 | "SYSKEYCST", 1077 | ischema, 1078 | Column("CONSTRAINT_SCHEMA", sa_types.Unicode, key="conschema"), 1079 | Column("CONSTRAINT_NAME", sa_types.Unicode, key="conname"), 1080 | Column("TABLE_SCHEMA", sa_types.Unicode, key="tabschema"), 1081 | Column("TABLE_NAME", sa_types.Unicode, key="tabname"), 1082 | Column("COLUMN_NAME", sa_types.Unicode, key="colname"), 1083 | Column("ORDINAL_POSITION", sa_types.Integer, key="colno"), 1084 | schema="QSYS2", 1085 | ) 1086 | 1087 | sys_check_constraints = Table( 1088 | "SYSCHKCST", 1089 | ischema, 1090 | Column("CONSTRAINT_SCHEMA", sa_types.Unicode, key="conschema"), 1091 | Column("CONSTRAINT_NAME", sa_types.Unicode, key="conname"), 1092 | Column("CHECK_CLAUSE", sa_types.Unicode, key="chkclause"), 1093 | Column("ROUNDING_MODE", sa_types.Unicode, key="rndmode"), 1094 | Column("SYSTEM_CONSTRAINT_SCHEMA", sa_types.Unicode, key="syscstchema"), 1095 | Column("INSERT_ACTION", sa_types.Unicode, key="insact"), 1096 | Column("UPDATE_ACTION", sa_types.Unicode, key="updact"), 1097 | schema="QSYS2", 1098 | ) 1099 | 1100 | sys_columns = Table( 1101 | "SYSCOLUMNS", 1102 | ischema, 1103 | Column("TABLE_SCHEMA", sa_types.Unicode, key="tabschema"), 1104 | Column("TABLE_NAME", sa_types.Unicode, key="tabname"), 1105 | Column("COLUMN_NAME", sa_types.Unicode, key="colname"), 1106 | Column("ORDINAL_POSITION", sa_types.Integer, key="colno"), 1107 | Column("DATA_TYPE", sa_types.Unicode, key="typename"), 1108 | Column("LENGTH", sa_types.Integer, key="length"), 1109 | Column("NUMERIC_SCALE", sa_types.Integer, key="scale"), 1110 | Column("IS_NULLABLE", sa_types.Unicode, key="nullable"), 1111 | Column("COLUMN_DEFAULT", sa_types.Unicode, key="defaultval"), 1112 | Column("HAS_DEFAULT", sa_types.Unicode, key="hasdef"), 1113 | Column("IS_IDENTITY", sa_types.Unicode, key="isid"), 1114 | Column("IDENTITY_GENERATION", sa_types.Unicode, key="idgenerate"), 1115 | schema="QSYS2", 1116 | ) 1117 | 1118 | sys_indexes = Table( 1119 | "SYSINDEXES", 1120 | ischema, 1121 | Column("TABLE_SCHEMA", sa_types.Unicode, key="tabschema"), 1122 | Column("TABLE_NAME", sa_types.Unicode, key="tabname"), 1123 | Column("INDEX_SCHEMA", sa_types.Unicode, key="indschema"), 1124 | Column("INDEX_NAME", sa_types.Unicode, key="indname"), 1125 | Column("IS_UNIQUE", sa_types.Unicode, key="uniquerule"), 1126 | schema="QSYS2", 1127 | ) 1128 | 1129 | sys_keys = Table( 1130 | "SYSKEYS", 1131 | ischema, 1132 | Column("INDEX_SCHEMA", sa_types.Unicode, key="indschema"), 1133 | Column("INDEX_NAME", sa_types.Unicode, key="indname"), 1134 | Column("COLUMN_NAME", sa_types.Unicode, key="colname"), 1135 | Column("ORDINAL_POSITION", sa_types.Integer, key="colno"), 1136 | Column("ORDERING", sa_types.Unicode, key="ordering"), 1137 | schema="QSYS2", 1138 | ) 1139 | 1140 | sys_foreignkeys = Table( 1141 | "SQLFOREIGNKEYS", 1142 | ischema, 1143 | Column("FK_NAME", sa_types.Unicode, key="fkname"), 1144 | Column("FKTABLE_SCHEM", sa_types.Unicode, key="fktabschema"), 1145 | Column("FKTABLE_NAME", sa_types.Unicode, key="fktabname"), 1146 | Column("FKCOLUMN_NAME", sa_types.Unicode, key="fkcolname"), 1147 | Column("PK_NAME", sa_types.Unicode, key="pkname"), 1148 | Column("PKTABLE_SCHEM", sa_types.Unicode, key="pktabschema"), 1149 | Column("PKTABLE_NAME", sa_types.Unicode, key="pktabname"), 1150 | Column("PKCOLUMN_NAME", sa_types.Unicode, key="pkcolname"), 1151 | Column("KEY_SEQ", sa_types.Integer, key="colno"), 1152 | schema="SYSIBM", 1153 | ) 1154 | 1155 | sys_views = Table( 1156 | "SYSVIEWS", 1157 | ischema, 1158 | Column("TABLE_SCHEMA", sa_types.Unicode, key="viewschema"), 1159 | Column("TABLE_NAME", sa_types.Unicode, key="viewname"), 1160 | Column("VIEW_DEFINITION", sa_types.Unicode, key="text"), 1161 | schema="QSYS2", 1162 | ) 1163 | 1164 | sys_sequences = Table( 1165 | "SYSSEQUENCES", 1166 | ischema, 1167 | Column("SEQUENCE_SCHEMA", sa_types.Unicode, key="seqschema"), 1168 | Column("SEQUENCE_NAME", sa_types.Unicode, key="seqname"), 1169 | schema="QSYS2", 1170 | ) 1171 | 1172 | def has_table(self, connection, table_name, schema=None): 1173 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1174 | table_name = self.denormalize_name(table_name) 1175 | if current_schema: 1176 | whereclause = and_( 1177 | self.sys_tables.c.tabschema == current_schema, 1178 | self.sys_tables.c.tabname == table_name, 1179 | ) 1180 | else: 1181 | whereclause = self.sys_tables.c.tabname == table_name 1182 | select_statement = select([self.sys_tables], whereclause) 1183 | results = connection.execute(select_statement) 1184 | return results.first() is not None 1185 | 1186 | def has_sequence(self, connection, sequence_name, schema=None): 1187 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1188 | sequence_name = self.denormalize_name(sequence_name) 1189 | if current_schema: 1190 | whereclause = and_( 1191 | self.sys_sequences.c.seqschema == current_schema, 1192 | self.sys_sequences.c.seqname == sequence_name, 1193 | ) 1194 | else: 1195 | whereclause = self.sys_sequences.c.seqname == sequence_name 1196 | select_statement = select([self.sys_sequences.c.seqname], whereclause) 1197 | results = connection.execute(select_statement) 1198 | return results.first() is not None 1199 | 1200 | @reflection.cache 1201 | def get_schema_names(self, connection, **kw): 1202 | sysschema = self.sys_schemas 1203 | query = ( 1204 | select([sysschema.c.schemaname]) 1205 | .where(sysschema.c.schemaname.notlike("SYS%")) 1206 | .where(sysschema.c.schemaname.notlike("Q%")) 1207 | .order_by(sysschema.c.schemaname) 1208 | ) 1209 | return [self.normalize_name(r[0]) for r in connection.execute(query)] 1210 | 1211 | # Retrieves a list of table names for a given schema 1212 | @reflection.cache 1213 | def get_table_names(self, connection, schema=None, **kw): 1214 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1215 | 1216 | query = ( 1217 | select([self.sys_tables.c.tabname]) 1218 | .where(self.sys_tables.c.tabschema == current_schema) 1219 | .where(self.sys_tables.c.tabtype.in_(["T", "P"])) 1220 | .where(self.sys_tables.c.tabsys == "N") 1221 | .order_by(self.sys_tables.c.tabname) 1222 | ) 1223 | 1224 | return [self.normalize_name(r[0]) for r in connection.execute(query)] 1225 | 1226 | def get_view_names(self, connection, schema=None, **kw): 1227 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1228 | 1229 | query = ( 1230 | select([self.sys_tables.c.tabname]) 1231 | .where(self.sys_tables.c.tabschema == current_schema) 1232 | .where(self.sys_tables.c.tabtype.in_(["V"])) 1233 | .where(self.sys_tables.c.tabsys == "N") 1234 | .order_by(self.sys_tables.c.tabname) 1235 | ) 1236 | 1237 | return [self.normalize_name(r[0]) for r in connection.execute(query)] 1238 | 1239 | @reflection.cache 1240 | def get_view_definition(self, connection, viewname, schema=None, **kw): 1241 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1242 | viewname = self.denormalize_name(viewname) 1243 | 1244 | query = select([self.sys_views.c.text]).where( 1245 | and_( 1246 | self.sys_views.c.viewschema == current_schema, 1247 | self.sys_views.c.viewname == viewname, 1248 | ) 1249 | ) 1250 | 1251 | return connection.execute(query).scalar() 1252 | 1253 | @reflection.cache 1254 | def get_columns(self, connection, table_name, schema=None, **kw): 1255 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1256 | table_name = self.denormalize_name(table_name) 1257 | syscols = self.sys_columns 1258 | 1259 | query = select( 1260 | [ 1261 | syscols.c.colname, 1262 | syscols.c.typename, 1263 | syscols.c.defaultval, 1264 | syscols.c.nullable, 1265 | syscols.c.length, 1266 | syscols.c.scale, 1267 | syscols.c.isid, 1268 | syscols.c.idgenerate, 1269 | ], 1270 | and_( 1271 | syscols.c.tabschema == current_schema, syscols.c.tabname == table_name 1272 | ), 1273 | order_by=[syscols.c.colno], 1274 | ) 1275 | sa_columns = [] 1276 | for row in connection.execute(query): 1277 | coltype = row[1].upper() 1278 | if coltype in ["DECIMAL", "NUMERIC"]: 1279 | coltype = self.ischema_names.get(coltype)( 1280 | precision=int(row[4]), scale=int(row[5]) 1281 | ) 1282 | elif coltype in ["CHARACTER", "CHAR", "VARCHAR", "GRAPHIC", "VARGRAPHIC"]: 1283 | coltype = self.ischema_names.get(coltype)(length=int(row[4])) 1284 | else: 1285 | try: 1286 | coltype = self.ischema_names[coltype] 1287 | except KeyError: 1288 | util.warn( 1289 | "Did not recognize type '%s' of column '%s'" % (coltype, row[0]) 1290 | ) 1291 | coltype = sa_types.NULLTYPE 1292 | 1293 | sa_columns.append( 1294 | { 1295 | "name": self.normalize_name(row[0]), 1296 | "type": coltype, 1297 | "nullable": row[3] == "Y", 1298 | "default": row[2], 1299 | "autoincrement": (row[6] == "YES") and (row[7] is not None), 1300 | } 1301 | ) 1302 | return sa_columns 1303 | 1304 | @reflection.cache 1305 | def get_pk_constraint(self, connection, table_name, schema=None, **kw): 1306 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1307 | table_name = self.denormalize_name(table_name) 1308 | sysconst = self.sys_table_constraints 1309 | syskeyconst = self.sys_key_constraints 1310 | 1311 | query = ( 1312 | select([syskeyconst.c.colname, sysconst.c.conname]) 1313 | .where( 1314 | and_( 1315 | syskeyconst.c.conschema == sysconst.c.conschema, 1316 | syskeyconst.c.conname == sysconst.c.conname, 1317 | sysconst.c.tabschema == current_schema, 1318 | sysconst.c.tabname == table_name, 1319 | sysconst.c.contype == "PRIMARY KEY", 1320 | ) 1321 | ) 1322 | .order_by(syskeyconst.c.colno) 1323 | ) 1324 | 1325 | pk_columns = [] 1326 | pk_name = None 1327 | for key in connection.execute(query): 1328 | pk_columns.append(self.normalize_name(key[0])) 1329 | if not pk_name: 1330 | pk_name = self.normalize_name(key[1]) 1331 | return {"constrained_columns": pk_columns, "name": pk_name} 1332 | 1333 | @reflection.cache 1334 | def get_primary_keys(self, connection, table_name, schema=None, **kw): 1335 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1336 | table_name = self.denormalize_name(table_name) 1337 | sysconst = self.sys_table_constraints 1338 | syskeyconst = self.sys_key_constraints 1339 | 1340 | query = ( 1341 | select([syskeyconst.c.colname, sysconst.c.tabname]) 1342 | .where( 1343 | and_( 1344 | syskeyconst.c.conschema == sysconst.c.conschema, 1345 | syskeyconst.c.conname == sysconst.c.conname, 1346 | sysconst.c.tabschema == current_schema, 1347 | sysconst.c.tabname == table_name, 1348 | sysconst.c.contype == "PRIMARY KEY", 1349 | ) 1350 | ) 1351 | .order_by(syskeyconst.c.colno) 1352 | ) 1353 | 1354 | return [self.normalize_name(key[0]) for key in connection.execute(query)] 1355 | 1356 | @reflection.cache 1357 | def get_foreign_keys(self, connection, table_name, schema=None, **kw): 1358 | default_schema = self.default_schema_name 1359 | current_schema = self.denormalize_name(schema or default_schema) 1360 | default_schema = self.normalize_name(default_schema) 1361 | table_name = self.denormalize_name(table_name) 1362 | sysfkeys = self.sys_foreignkeys 1363 | query = select( 1364 | [ 1365 | sysfkeys.c.fkname, 1366 | sysfkeys.c.fktabschema, 1367 | sysfkeys.c.fktabname, 1368 | sysfkeys.c.fkcolname, 1369 | sysfkeys.c.pkname, 1370 | sysfkeys.c.pktabschema, 1371 | sysfkeys.c.pktabname, 1372 | sysfkeys.c.pkcolname, 1373 | ], 1374 | and_( 1375 | sysfkeys.c.fktabschema == current_schema, 1376 | sysfkeys.c.fktabname == table_name, 1377 | ), 1378 | order_by=[sysfkeys.c.colno], 1379 | ) 1380 | fschema = {} 1381 | for row in connection.execute(query): 1382 | if row[0] not in fschema: 1383 | referred_schema = self.normalize_name(row[5]) 1384 | 1385 | # if no schema specified and referred schema here is the 1386 | # default, then set to None 1387 | if schema is None and referred_schema == default_schema: 1388 | referred_schema = None 1389 | 1390 | fschema[row[0]] = { 1391 | "name": self.normalize_name(row[0]), 1392 | "constrained_columns": [self.normalize_name(row[3])], 1393 | "referred_schema": referred_schema, 1394 | "referred_table": self.normalize_name(row[6]), 1395 | "referred_columns": [self.normalize_name(row[7])], 1396 | } 1397 | else: 1398 | fschema[row[0]]["constrained_columns"].append( 1399 | self.normalize_name(row[3]) 1400 | ) 1401 | fschema[row[0]]["referred_columns"].append(self.normalize_name(row[7])) 1402 | return [value for key, value in fschema.items()] 1403 | 1404 | # Retrieves a list of index names for a given schema 1405 | @reflection.cache 1406 | def get_indexes(self, connection, table_name, schema=None, **kw): 1407 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1408 | table_name = self.denormalize_name(table_name) 1409 | sysidx = self.sys_indexes 1410 | syskey = self.sys_keys 1411 | 1412 | query = select( 1413 | [sysidx.c.indname, sysidx.c.uniquerule, syskey.c.colname], 1414 | and_( 1415 | syskey.c.indschema == sysidx.c.indschema, 1416 | syskey.c.indname == sysidx.c.indname, 1417 | sysidx.c.tabschema == current_schema, 1418 | sysidx.c.tabname == table_name, 1419 | ), 1420 | order_by=[syskey.c.indname, syskey.c.colno], 1421 | ) 1422 | indexes = {} 1423 | for row in connection.execute(query): 1424 | key = row[0].upper() 1425 | if key in indexes: 1426 | indexes[key]["column_names"].append(self.normalize_name(row[2])) 1427 | else: 1428 | indexes[key] = { 1429 | "name": self.normalize_name(row[0]), 1430 | "column_names": [self.normalize_name(row[2])], 1431 | "unique": row[1] == "Y", 1432 | } 1433 | return [value for key, value in indexes.items()] 1434 | 1435 | @reflection.cache 1436 | def get_unique_constraints(self, connection, table_name, schema=None, **kw): 1437 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1438 | table_name = self.denormalize_name(table_name) 1439 | sysconst = self.sys_table_constraints 1440 | sysconstcol = self.sys_constraints_columns 1441 | 1442 | query = ( 1443 | select([sysconst.c.conname, sysconstcol.c.colname]) 1444 | .where( 1445 | and_( 1446 | sysconstcol.c.conschema == sysconst.c.conschema, 1447 | sysconstcol.c.conname == sysconst.c.conname, 1448 | sysconst.c.tabschema == current_schema, 1449 | sysconst.c.tabname == table_name, 1450 | sysconst.c.contype == "UNIQUE", 1451 | ) 1452 | ) 1453 | .order_by( 1454 | sysconst.c.conname, 1455 | sysconstcol.c.colname, 1456 | ) 1457 | ) 1458 | 1459 | constraints = defaultdict(list) 1460 | for name, column_name in connection.execute(query): 1461 | constraints[name].append(self.normalize_name(column_name)) 1462 | 1463 | return [ 1464 | { 1465 | "name": self.normalize_name(name), 1466 | "column_names": value, 1467 | } 1468 | for name, value in constraints.items() 1469 | ] 1470 | 1471 | @reflection.cache 1472 | def get_sequence_names(self, connection, schema, **kw): 1473 | current_schema = self.denormalize_name(schema or self.default_schema_name) 1474 | query = select([self.sys_sequences.c.seqname]).where( 1475 | self.sys_sequences.c.seqschema == current_schema, 1476 | ) 1477 | return [self.normalize_name(r[0]) for r in connection.execute(query)] 1478 | 1479 | def _check_text_server(self, connection): 1480 | stmt = "SELECT COUNT(*) FROM QSYS2.SYSTEXTSERVERS" 1481 | return connection.execute(stmt).scalar() 1482 | 1483 | def do_executemany(self, cursor, statement, parameters, context=None): 1484 | cursor.fast_executemany = self.fast_executemany 1485 | cursor.executemany(statement, parameters) 1486 | -------------------------------------------------------------------------------- /sqlalchemy_ibmi/constants.py: -------------------------------------------------------------------------------- 1 | RESERVED_WORDS = { 2 | "activate", 3 | "disallow", 4 | "locale", 5 | "result", 6 | "add", 7 | "disconnect", 8 | "localtime", 9 | "result_set_locator", 10 | "after", 11 | "distinct", 12 | "localtimestamp", 13 | "return", 14 | "alias", 15 | "do", 16 | "locator", 17 | "returns", 18 | "all", 19 | "double", 20 | "locators", 21 | "revoke", 22 | "allocate", 23 | "drop", 24 | "lock", 25 | "right", 26 | "allow", 27 | "dssize", 28 | "lockmax", 29 | "rollback", 30 | "alter", 31 | "dynamic", 32 | "locksize", 33 | "routine", 34 | "and", 35 | "each", 36 | "long", 37 | "row", 38 | "any", 39 | "editproc", 40 | "loop", 41 | "row_number", 42 | "as", 43 | "else", 44 | "maintained", 45 | "rownumber", 46 | "asensitive", 47 | "elseif", 48 | "materialized", 49 | "rows", 50 | "associate", 51 | "enable", 52 | "maxvalue", 53 | "rowset", 54 | "asutime", 55 | "encoding", 56 | "microsecond", 57 | "rrn", 58 | "at", 59 | "encryption", 60 | "microseconds", 61 | "run", 62 | "attributes", 63 | "end", 64 | "minute", 65 | "savepoint", 66 | "audit", 67 | "end-exec", 68 | "minutes", 69 | "schema", 70 | "authorization", 71 | "ending", 72 | "minvalue", 73 | "scratchpad", 74 | "aux", 75 | "erase", 76 | "mode", 77 | "scroll", 78 | "auxiliary", 79 | "escape", 80 | "modifies", 81 | "search", 82 | "before", 83 | "every", 84 | "month", 85 | "second", 86 | "begin", 87 | "except", 88 | "months", 89 | "seconds", 90 | "between", 91 | "exception", 92 | "new", 93 | "secqty", 94 | "binary", 95 | "excluding", 96 | "new_table", 97 | "security", 98 | "bufferpool", 99 | "exclusive", 100 | "nextval", 101 | "select", 102 | "by", 103 | "execute", 104 | "no", 105 | "sensitive", 106 | "cache", 107 | "exists", 108 | "nocache", 109 | "sequence", 110 | "call", 111 | "exit", 112 | "nocycle", 113 | "session", 114 | "called", 115 | "explain", 116 | "nodename", 117 | "session_user", 118 | "capture", 119 | "external", 120 | "nodenumber", 121 | "set", 122 | "cardinality", 123 | "extract", 124 | "nomaxvalue", 125 | "signal", 126 | "cascaded", 127 | "fenced", 128 | "nominvalue", 129 | "simple", 130 | "case", 131 | "fetch", 132 | "none", 133 | "some", 134 | "cast", 135 | "fieldproc", 136 | "noorder", 137 | "source", 138 | "ccsid", 139 | "file", 140 | "normalized", 141 | "specific", 142 | "char", 143 | "final", 144 | "not", 145 | "sql", 146 | "character", 147 | "for", 148 | "null", 149 | "sqlid", 150 | "check", 151 | "foreign", 152 | "nulls", 153 | "stacked", 154 | "close", 155 | "free", 156 | "numparts", 157 | "standard", 158 | "cluster", 159 | "from", 160 | "obid", 161 | "start", 162 | "collection", 163 | "full", 164 | "of", 165 | "starting", 166 | "collid", 167 | "function", 168 | "old", 169 | "statement", 170 | "column", 171 | "general", 172 | "old_table", 173 | "static", 174 | "comment", 175 | "generated", 176 | "on", 177 | "stay", 178 | "commit", 179 | "get", 180 | "open", 181 | "stogroup", 182 | "concat", 183 | "global", 184 | "optimization", 185 | "stores", 186 | "condition", 187 | "go", 188 | "optimize", 189 | "style", 190 | "connect", 191 | "goto", 192 | "option", 193 | "substring", 194 | "connection", 195 | "grant", 196 | "or", 197 | "summary", 198 | "constraint", 199 | "graphic", 200 | "order", 201 | "synonym", 202 | "contains", 203 | "group", 204 | "out", 205 | "sysfun", 206 | "continue", 207 | "handler", 208 | "outer", 209 | "sysibm", 210 | "count", 211 | "hash", 212 | "over", 213 | "sysproc", 214 | "count_big", 215 | "hashed_value", 216 | "overriding", 217 | "system", 218 | "create", 219 | "having", 220 | "package", 221 | "system_user", 222 | "cross", 223 | "hint", 224 | "padded", 225 | "table", 226 | "current", 227 | "hold", 228 | "pagesize", 229 | "tablespace", 230 | "current_date", 231 | "hour", 232 | "parameter", 233 | "then", 234 | "current_lc_ctype", 235 | "hours", 236 | "part", 237 | "time", 238 | "current_path", 239 | "identity", 240 | "partition", 241 | "timestamp", 242 | "current_schema", 243 | "if", 244 | "partitioned", 245 | "to", 246 | "current_server", 247 | "immediate", 248 | "partitioning", 249 | "transaction", 250 | "current_time", 251 | "in", 252 | "partitions", 253 | "trigger", 254 | "current_timestamp", 255 | "including", 256 | "password", 257 | "trim", 258 | "current_timezone", 259 | "inclusive", 260 | "path", 261 | "type", 262 | "current_user", 263 | "increment", 264 | "piecesize", 265 | "undo", 266 | "cursor", 267 | "index", 268 | "plan", 269 | "union", 270 | "cycle", 271 | "indicator", 272 | "position", 273 | "unique", 274 | # Even though data is listed as reserved, it doesn't seem to actully need 275 | # quoting and quoting it causes problems with HasTableTest 276 | # "data", 277 | "inherit", 278 | "precision", 279 | "until", 280 | "database", 281 | "inner", 282 | "prepare", 283 | "update", 284 | "datapartitionname", 285 | "inout", 286 | "prevval", 287 | "usage", 288 | "datapartitionnum", 289 | "insensitive", 290 | "primary", 291 | "user", 292 | "date", 293 | "insert", 294 | "priqty", 295 | "using", 296 | "day", 297 | "integrity", 298 | "privileges", 299 | "validproc", 300 | "days", 301 | "intersect", 302 | "procedure", 303 | "value", 304 | "db2general", 305 | "into", 306 | "program", 307 | "values", 308 | "db2genrl", 309 | "is", 310 | "psid", 311 | "variable", 312 | "db2sql", 313 | "isobid", 314 | "query", 315 | "variant", 316 | "dbinfo", 317 | "isolation", 318 | "queryno", 319 | "vcat", 320 | "dbpartitionname", 321 | "iterate", 322 | "range", 323 | "version", 324 | "dbpartitionnum", 325 | "jar", 326 | "rank", 327 | "view", 328 | "deallocate", 329 | "java", 330 | "read", 331 | "volatile", 332 | "declare", 333 | "join", 334 | "reads", 335 | "volumes", 336 | "default", 337 | "key", 338 | "recovery", 339 | "when", 340 | "defaults", 341 | "label", 342 | "references", 343 | "whenever", 344 | "definition", 345 | "language", 346 | "referencing", 347 | "where", 348 | "delete", 349 | "lateral", 350 | "refresh", 351 | "while", 352 | "dense_rank", 353 | "lc_ctype", 354 | "release", 355 | "with", 356 | "denserank", 357 | "leave", 358 | "rename", 359 | "without", 360 | "describe", 361 | "left", 362 | "repeat", 363 | "wlm", 364 | "descriptor", 365 | "like", 366 | "reset", 367 | "write", 368 | "deterministic", 369 | "linktype", 370 | "resignal", 371 | "xmlelement", 372 | "diagnostics", 373 | "local", 374 | "restart", 375 | "year", 376 | "disable", 377 | "localdate", 378 | "restrict", 379 | "years", 380 | "", 381 | "abs", 382 | "grouping", 383 | "regr_intercept", 384 | "are", 385 | "int", 386 | "regr_r2", 387 | "array", 388 | "integer", 389 | "regr_slope", 390 | "asymmetric", 391 | "intersection", 392 | "regr_sxx", 393 | "atomic", 394 | "interval", 395 | "regr_sxy", 396 | "avg", 397 | "large", 398 | "regr_syy", 399 | "bigint", 400 | "leading", 401 | "rollup", 402 | "blob", 403 | "ln", 404 | "scope", 405 | "boolean", 406 | "lower", 407 | "similar", 408 | "both", 409 | "match", 410 | "smallint", 411 | "ceil", 412 | "max", 413 | "specifictype", 414 | "ceiling", 415 | "member", 416 | "sqlexception", 417 | "char_length", 418 | "merge", 419 | "sqlstate", 420 | "character_length", 421 | "method", 422 | "sqlwarning", 423 | "clob", 424 | "min", 425 | "sqrt", 426 | "coalesce", 427 | "mod", 428 | "stddev_pop", 429 | "collate", 430 | "module", 431 | "stddev_samp", 432 | "collect", 433 | "multiset", 434 | "submultiset", 435 | "convert", 436 | "national", 437 | "sum", 438 | "corr", 439 | "natural", 440 | "symmetric", 441 | "corresponding", 442 | "nchar", 443 | "tablesample", 444 | "covar_pop", 445 | "nclob", 446 | "timezone_hour", 447 | "covar_samp", 448 | "normalize", 449 | "timezone_minute", 450 | "cube", 451 | "nullif", 452 | "trailing", 453 | "cume_dist", 454 | "numeric", 455 | "translate", 456 | "current_default_transform_group", 457 | "octet_length", 458 | "translation", 459 | "current_role", 460 | "only", 461 | "treat", 462 | "current_transform_group_for_type", 463 | "overlaps", 464 | "true", 465 | "dec", 466 | "overlay", 467 | "uescape", 468 | "decimal", 469 | "percent_rank", 470 | "unknown", 471 | "deref", 472 | "percentile_cont", 473 | "unnest", 474 | "element", 475 | "percentile_disc", 476 | "upper", 477 | "exec", 478 | "power", 479 | "var_pop", 480 | "exp", 481 | "real", 482 | "var_samp", 483 | "false", 484 | "recursive", 485 | "varchar", 486 | "filter", 487 | "ref", 488 | "varying", 489 | "float", 490 | "regr_avgx", 491 | "width_bucket", 492 | "floor", 493 | "regr_avgy", 494 | "window", 495 | "fusion", 496 | "regr_count", 497 | "within", 498 | "asc", 499 | } 500 | -------------------------------------------------------------------------------- /sqlalchemy_ibmi/requirements.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.testing.requirements import SuiteRequirements 2 | from sqlalchemy.testing import exclusions 3 | 4 | 5 | class Requirements(SuiteRequirements): 6 | 7 | """sqlalchemy requirements for tests. This class provides the mechanism to 8 | set available functionality in the dialect""" 9 | 10 | # TODO These methods are overridden from the default dialect and should be 11 | # implemented 12 | 13 | @property 14 | def on_update_cascade(self): 15 | """target database must support ON UPDATE..CASCADE behavior in 16 | foreign keys.""" 17 | 18 | return exclusions.closed() 19 | 20 | @property 21 | def time_microseconds(self): 22 | """target dialect supports representation of Python 23 | datetime.time() with microsecond objects.""" 24 | 25 | return exclusions.closed() 26 | 27 | @property 28 | def unbounded_varchar(self): 29 | """Target database must support VARCHAR with no length""" 30 | 31 | return exclusions.closed() 32 | 33 | @property 34 | def window_functions(self): 35 | """Target database must support window functions.""" 36 | return exclusions.open() 37 | 38 | @property 39 | def precision_numerics_enotation_small(self): 40 | """target backend supports Decimal() objects using E notation 41 | to represent very small values.""" 42 | return exclusions.open() 43 | 44 | @property 45 | def precision_numerics_enotation_large(self): 46 | """target backend supports Decimal() objects using E notation 47 | to represent very large values.""" 48 | return exclusions.closed() 49 | 50 | @property 51 | def precision_numerics_many_significant_digits(self): 52 | """target backend supports values with many digits on both sides, 53 | such as 319438950232418390.273596, 87673.594069654243 54 | """ 55 | return exclusions.open() 56 | 57 | @property 58 | def precision_numerics_retains_significant_digits(self): 59 | """A precision numeric type will return empty significant digits, 60 | i.e. a value such as 10.000 will come back in Decimal form with 61 | the .000 maintained.""" 62 | 63 | return exclusions.open() 64 | 65 | @property 66 | def check_constraint_reflection(self): 67 | """target dialect supports reflection of check constraints""" 68 | return exclusions.open() 69 | 70 | # DB2 for i does not support temporary tables 71 | @property 72 | def temp_table_names(self): 73 | """target dialect supports listing of temporary table names""" 74 | return exclusions.closed() 75 | 76 | @property 77 | def temporary_tables(self): 78 | """target database supports temporary tables""" 79 | return exclusions.closed() 80 | 81 | @property 82 | def temporary_views(self): 83 | """target database supports temporary views""" 84 | return exclusions.closed() 85 | 86 | @property 87 | def temp_table_reflection(self): 88 | return exclusions.closed() 89 | 90 | # adding implicitly_named_constraints which is not included in the 91 | # requirements.py in testing suite 92 | @property 93 | def implicitly_named_constraints(self): 94 | """target database must apply names to unnamed constraints.""" 95 | return exclusions.open() 96 | 97 | @property 98 | def fetch_first(self): 99 | return exclusions.open() 100 | 101 | @property 102 | def fetch_expression(self): 103 | return exclusions.open() 104 | 105 | @property 106 | def fetch_no_order_by(self): 107 | return exclusions.open() 108 | 109 | @property 110 | def floats_to_four_decimals(self): 111 | return exclusions.closed() 112 | 113 | @property 114 | def non_updating_cascade(self): 115 | """target database must *not* support ON UPDATE..CASCADE behavior in 116 | foreign keys.""" 117 | return exclusions.open() 118 | 119 | @property 120 | def reflects_pk_names(self): 121 | return exclusions.open() 122 | 123 | @property 124 | def schemas(self): 125 | """Target database must support external schemas, and have one 126 | named 'test_schema'.""" 127 | 128 | # TODO: Errors due to "Qualifier SQLALCHEMY not same as name TEST_SCHEMA" 129 | # If we're going to support schemas properly, we need to ensure we qualify 130 | # constraint names in a CREATE TABLE statement and likely elsewhere. 131 | # 132 | # In addition, we don't seem to handle schema_translate_map properly, so 133 | # test_nextval_direct_schema_translate fails. 134 | return exclusions.closed() 135 | 136 | @property 137 | def views(self): 138 | """Target database must support VIEWs.""" 139 | return exclusions.open() 140 | 141 | @property 142 | def savepoints(self): 143 | """Target database must support savepoints.""" 144 | 145 | return exclusions.open() 146 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/IBM/sqlalchemy-ibmi/bf939974b3350bf50bcb6ad1a46523659fd77acf/test/__init__.py -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | # Follow standard SQLAlchemy testing setup, see 2 | # https://github.com/sqlalchemy/sqlalchemy/blob/b5927dd9229d9ce85fc2ba25dad10ecbb749195c/README.dialects.rst 3 | 4 | from sqlalchemy.dialects import registry 5 | import pytest 6 | 7 | registry.register("ibmi", "sqlalchemy_ibmi.base", "IBMiDb2Dialect") 8 | registry.register("ibmi.pyodbc", "sqlalchemy_ibmi.base", "IBMiDb2Dialect") 9 | 10 | pytest.register_assert_rewrite("sqlalchemy.testing.assertions") 11 | 12 | from sqlalchemy.testing.plugin.pytestplugin import * # noqa F401,F403 13 | -------------------------------------------------------------------------------- /test/test_cache.py: -------------------------------------------------------------------------------- 1 | from decimal import Decimal 2 | from datetime import date, time, datetime 3 | 4 | from sqlalchemy import literal 5 | from sqlalchemy import testing 6 | from sqlalchemy.testing import fixtures 7 | 8 | from .util import SA_Version 9 | 10 | if SA_Version >= [1, 4]: 11 | from sqlalchemy import select 12 | else: 13 | from sqlalchemy import select as _select 14 | 15 | def select(*args): 16 | return _select(args) 17 | 18 | 19 | class CachingTest(fixtures.TestBase): 20 | @testing.combinations( 21 | (True, False), 22 | (1891723, 21971283), 23 | (1.0, 2.0), 24 | (Decimal("1345.0"), Decimal("2987.1")), 25 | (datetime(2000, 1, 1), datetime(2023, 11, 28)), 26 | (date(2000, 1, 1), date(2023, 11, 28)), 27 | (time(1), time(12)), 28 | ("foo", "bar"), 29 | (b"foo", b"bar"), 30 | argnames="first,second", 31 | ) 32 | def test_cache_literal(self, connection, first, second): 33 | """Test that we don't embed literals in cached statements""" 34 | exp = (first, second) 35 | results = tuple([connection.scalar(select(literal(_))) for _ in exp]) 36 | assert exp == results 37 | -------------------------------------------------------------------------------- /test/test_suite.py: -------------------------------------------------------------------------------- 1 | from .util import SA_Version 2 | from sqlalchemy.testing.suite import * # noqa - need * to import test suite 3 | from sqlalchemy.testing.suite import testing 4 | 5 | from sqlalchemy.testing.suite import ComponentReflectionTest as _ComponentReflectionTest 6 | from sqlalchemy.testing.suite import ExpandingBoundInTest as _ExpandingBoundInTest 7 | from sqlalchemy.testing.suite import InsertBehaviorTest as _InsertBehaviorTest 8 | from sqlalchemy.testing.suite import StringTest as _StringTest 9 | from sqlalchemy.testing.suite import TextTest as _TextTest 10 | from sqlalchemy.testing.suite import UnicodeTextTest as _UnicodeTextTest 11 | from sqlalchemy.testing.suite import UnicodeVarcharTest as _UnicodeVarcharTest 12 | 13 | if SA_Version < [1, 4]: 14 | 15 | class ComponentReflectionTest(_ComponentReflectionTest): 16 | @testing.requires.unique_constraint_reflection 17 | @testing.requires.schemas 18 | @testing.skip("ibmi", "Db2 doesn't support duplicate constraints") 19 | def test_get_unique_constraints_with_schema(self): 20 | pass 21 | 22 | @testing.requires.unique_constraint_reflection 23 | @testing.skip("ibmi", "Db2 doesn't support duplicate constraints") 24 | def test_get_unique_constraints(self): 25 | pass 26 | 27 | else: 28 | 29 | class ComponentReflectionTest(_ComponentReflectionTest): 30 | @testing.combinations( 31 | (True, testing.requires.schemas), (False,), argnames="use_schema" 32 | ) 33 | @testing.requires.unique_constraint_reflection 34 | @testing.skip("ibmi", "Db2 doesn't support duplicate constraints") 35 | def test_get_unique_constraints(self, metadata, connection, use_schema): 36 | pass 37 | 38 | 39 | # empty set tests not possible on DB2 for i 40 | class ExpandingBoundInTest(_ExpandingBoundInTest): 41 | @testing.skip("ibmi") 42 | def test_multiple_empty_sets(self): 43 | pass 44 | 45 | @testing.skip("ibmi") 46 | def test_empty_set_against_integer(self): 47 | pass 48 | 49 | @testing.skip("ibmi") 50 | def test_empty_set_against_integer_negation(self): 51 | pass 52 | 53 | @testing.skip("ibmi") 54 | def test_empty_set_against_string(self): 55 | pass 56 | 57 | @testing.skip("ibmi") 58 | def test_empty_set_against_string_negation(self): 59 | pass 60 | 61 | @testing.skip("ibmi") 62 | def test_null_in_empty_set_is_false(self): 63 | pass 64 | 65 | 66 | class InsertBehaviorTest(_InsertBehaviorTest): 67 | # Skipping test due to incompatible sql query with Db2. Using parameter 68 | # markers in a arithmetic expression is not supported. To force this to 69 | # work, one can cast the parameter marker to int or float before 70 | # performing the operation. However, this will not work here due to 71 | # SQLAlchemy code 72 | @testing.skip("ibmi") 73 | def test_insert_from_select_with_defaults(self): 74 | pass 75 | 76 | 77 | # An assertion error is caused in certain tests by an issue with the IBM i 78 | # Access ODBC Driver for Linux. Until that issue is fixed, the following tests 79 | # will be skipped in the StringTest. TextTest, UnicodeTextTest, 80 | # and UnicodeVarcharTest classes. 81 | 82 | 83 | class StringTest(_StringTest): 84 | @testing.skip("ibmi") 85 | def test_literal_non_ascii(self): 86 | pass 87 | 88 | 89 | class TextTest(_TextTest): 90 | @testing.skip("ibmi") 91 | def test_literal_non_ascii(self): 92 | pass 93 | 94 | 95 | class UnicodeTextTest(_UnicodeTextTest): 96 | @testing.skip("ibmi") 97 | def test_literal_non_ascii(self): 98 | pass 99 | 100 | @testing.skip("ibmi") 101 | def test_literal(self): 102 | pass 103 | 104 | 105 | class UnicodeVarcharTest(_UnicodeVarcharTest): 106 | @testing.skip("ibmi") 107 | def test_literal(self): 108 | pass 109 | 110 | @testing.skip("ibmi") 111 | def test_literal_non_ascii(self): 112 | pass 113 | -------------------------------------------------------------------------------- /test/util.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import __version__ as _SA_Version 2 | 3 | SA_Version = [int(ver_token) for ver_token in _SA_Version.split(".")[0:2]] 4 | --------------------------------------------------------------------------------