├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .gitmodules ├── CHANGES.md ├── LICENSE ├── Makefile ├── README.asyncpg.md ├── README.md ├── README.psycopg.md ├── README.testing.md ├── cockroachdb └── sqlalchemy │ └── __init__.py ├── dev-requirements.in ├── dev-requirements.txt ├── pyproject.toml ├── setup.cfg ├── setup.py ├── sqlalchemy_cockroachdb ├── __init__.py ├── _psycopg_common.py ├── asyncpg.py ├── base.py ├── ddl_compiler.py ├── provision.py ├── psycopg.py ├── psycopg2.py ├── requirements.py ├── stmt_compiler.py └── transaction.py ├── test-requirements.in ├── test-requirements.txt ├── test ├── __init__.py ├── conftest.py ├── test_across_schema.py ├── test_column_reflect.py ├── test_introspection.py ├── test_json.py ├── test_run_transaction_core.py ├── test_run_transaction_session.py ├── test_suite_alembic.py ├── test_suite_sqlalchemy.py └── test_with_hint.py └── tox.ini /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Test and Lint 2 | 3 | on: 4 | # Triggers the workflow on push or pull request events. 5 | push: 6 | # This should disable running the workflow on tags, according to the 7 | # on.. GitHub Actions docs. 8 | branches: 9 | - "*" 10 | pull_request: 11 | types: [opened, reopened, synchronize] 12 | 13 | # Allows you to run this workflow manually from the Actions tab 14 | workflow_dispatch: 15 | 16 | # This allows a subsequently queued workflow run to interrupt previous runs. 17 | concurrency: 18 | group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' 19 | cancel-in-progress: true 20 | 21 | jobs: 22 | # This job aggregates all matrix results and is used for a GitHub required status check. 23 | test_results: 24 | if: ${{ always() }} 25 | runs-on: ubuntu-latest 26 | name: Test Results 27 | needs: [test-py39] 28 | steps: 29 | - run: | 30 | result="${{ needs.test-py39.result }}" 31 | if [[ $result == "success" || $result == "skipped" ]]; then 32 | exit 0 33 | else 34 | exit 1 35 | fi 36 | 37 | test-py39: 38 | runs-on: ubuntu-latest 39 | strategy: 40 | fail-fast: false 41 | matrix: 42 | crdb-version: [ 43 | "cockroach:latest-v24.1", 44 | "cockroach:latest-v24.2", 45 | "cockroach:latest-v24.3", 46 | "cockroach:latest-v25.1" 47 | ] 48 | db-alias: [ 49 | "psycopg2", 50 | "asyncpg", 51 | "psycopg" 52 | ] 53 | env: 54 | TOXENV: py39 55 | TOX_VERSION: 3.23.1 56 | steps: 57 | - uses: actions/checkout@v4 58 | - uses: actions/setup-python@v5 59 | with: 60 | python-version: '3.9' 61 | - name: Start CockroachDB 62 | run: | 63 | docker pull cockroachdb/${{ matrix.crdb-version }} 64 | docker run --rm -d --name crdb -p 26257:26257 \ 65 | cockroachdb/${{ matrix.crdb-version }} start-single-node --insecure 66 | sleep 10 67 | docker exec crdb cockroach sql --insecure --host=localhost:26257 \ 68 | -e 'CREATE SCHEMA test_schema; CREATE SCHEMA test_schema_2;' 69 | - name: Install testrunner 70 | run: pip install --user tox==${TOX_VERSION} 71 | - name: Test 72 | run: ${HOME}/.local/bin/tox -- --db=${{ matrix.db-alias }} 73 | 74 | lint: 75 | runs-on: ubuntu-latest 76 | env: 77 | TOXENV: py39 78 | TOX_VERSION: 3.23.1 79 | steps: 80 | - uses: actions/checkout@v4 81 | - uses: actions/setup-python@v5 82 | with: 83 | python-version: '3.9' 84 | - name: Install testrunner 85 | run: pip install --user tox==${TOX_VERSION} 86 | - name: Lint 87 | run: ${HOME}/.local/bin/tox -e lint 88 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | __pycache__ 3 | .tox 4 | MANIFEST 5 | *.egg-info 6 | 7 | # pytest cache 8 | .cache 9 | .pytest_cache 10 | 11 | # created by release process 12 | dist/ 13 | build/ 14 | 15 | # created by running tests 16 | env/ 17 | cockroach-data/ 18 | cockroach.pid 19 | 20 | # local test configuration 21 | test.cfg 22 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "cockroach-proto"] 2 | path = cockroach-proto 3 | url = https://github.com/cockroachdb/cockroach-proto.git 4 | -------------------------------------------------------------------------------- /CHANGES.md: -------------------------------------------------------------------------------- 1 | # Version 2.0.3 2 | Unreleased 3 | 4 | - Add support for READ COMMITTED transaction isolation 5 | (see [README.read_committed.md](README.read_committed.md)) 6 | - Add column comment to get_columns method (#253), thanks to @dotan-mor 7 | - Fix autogenerate with ON UPDATE / DELETE (#258, #262), thanks to @idumitrescu-dn 8 | - Improve support for table/column comments (via SQLA 2.0.36) 9 | 10 | 11 | # Version 2.0.2 12 | January 10, 2023 13 | 14 | - Implement reflection for array types (#213) 15 | - Fix get_multi_columns() to support multiple table names in filter_array (#220) 16 | - Enhance foreign key reflection to accommodate quoting differences with PostgreSQL 17 | - Add psycopg (v3) support (#185) 18 | - Remove unconditional import of psycopg2 (#176) 19 | 20 | # Version 2.0.1 21 | April 14, 2023 22 | 23 | - Enable AUTOCOMMIT isolation_level for SQLA 2.0 (#205) 24 | 25 | # Version 2.0.0 26 | Released February 21, 2023 27 | 28 | - Applied dialect code and test changes for compatibility with SQLAlchemy 2.0. This 29 | version of the dialect requires SQLAlchemy 2.0, so to work with earlier versions of 30 | SQLAlchemy use `pip install sqlalchemy-cockroachdb<2.0.0` 31 | - Stopped sending telemetry data during startup. 32 | 33 | # Version 1.4.4 34 | Released September 9, 2022 35 | 36 | - Added `include_hidden` option to `get_columns()` to enable reflection of columns like "rowid". (#173) 37 | - Added support for `.with_hint()` (patch courtesy of Jonathan Dieter) 38 | - Updated column introspection query to work with CockroachDB v22.2. 39 | 40 | # Version 1.4.3 41 | Released December 10, 2021 42 | 43 | - Added preliminary support for asyncpg. See instructions in the README. 44 | 45 | # Version 1.4.2 46 | Released October 21, 2021 47 | 48 | - Updated version telemetry to only report major/minor version of SQLAlchemy. 49 | 50 | # Version 1.4.1 51 | Released October 12, 2021 52 | 53 | - Updated test suite to work with Alembic 1.7. 54 | 55 | # Version 1.4.0 56 | Released July 29, 2021 57 | 58 | - Add telemetry to SQLAlchemy CockroachDB 59 | - Telemetry is enabled by default, set disable_cockroachdb_telemetry in create_engine's connect_args field to disable. 60 | - ```Example: engine = create_engine('cockroachdb://...', connect_args={"disable_cockroachdb_telemetry": True})``` 61 | - Initial compatibility with SQLAlchemy 1.4. 62 | 63 | 64 | # Version 1.3.3 65 | Released April 26, 2021 66 | 67 | - Remove `duplicates_constraint` property for unique indexes 68 | 69 | # Version 1.3.2 70 | Released September 29, 2020 71 | 72 | - Stopped returning primary keys in get_indexes. (#42) 73 | - Enabled tests for enums and user-defined schemas for CockroachDB v20.2. 74 | 75 | # Version 1.3.1 76 | Released July 13, 2020 77 | 78 | - Added more support computed columns. (#119) 79 | - Enabled more tests from SQLAlchemy test suite in CI. 80 | 81 | # Version 1.3.0 82 | 83 | Released June 10, 2020 84 | 85 | - Removed python2 support. 86 | - Version number increased to 1.3.0 to indicate compatibility with SQLAlchemy 1.3.x. 87 | - Column type changes via Alembic are now allowed. (#96) 88 | - Added exponential backoff to run_transaction(). (#115) 89 | 90 | # Version 0.4.0 91 | 92 | Released April 10, 2020 93 | 94 | - Renamed package to sqlalchemy-cockroachdb. 95 | 96 | # Version 0.3.3 97 | 98 | Released October 28, 2019 99 | 100 | - Fixed error when the use_native_hstore or server_side_cursors keyword 101 | arguments were specified. 102 | - Stopped using the deprecated sql.text.typemap parameter. 103 | 104 | # Version 0.3.2 105 | 106 | Released July 1, 2019 107 | 108 | - Removed requirement for psycopg2 so psycopg2-binary can be used as well. 109 | - Updated urllib3 to remove security vulnerability. 110 | 111 | # Version 0.3.1 112 | 113 | Released Feb 25, 2019 114 | 115 | - Support CockroachDB version numbers greater than 2. 116 | 117 | # Version 0.3.0 118 | 119 | Released Jan 23, 2019 120 | 121 | - Added support for more data types. 122 | - Improved introspection of types with modifiers (decimal, varchar). 123 | - Improved introspection of unique constraints. 124 | 125 | # Version 0.2.1 126 | 127 | Released Aug 16, 2018 128 | 129 | - Alembic migrations no longer attempt to run DDL statements in transactions. 130 | - Comments are now dropped from table definitions as CockroachDB does not support them. 131 | 132 | # Version 0.2.0 133 | 134 | Released July 16, 2018 135 | 136 | - Adapter again simultaneously compatible with CockroachDB 1.1, 2.0 137 | and 2.1. 138 | 139 | # Version 0.1.5 140 | 141 | Released July 10, 2018 142 | 143 | - More compatibility improvements for JSON/JSONB support. 144 | 145 | # Version 0.1.4 146 | 147 | Released May 9, 2018 148 | 149 | - Improved compatibility of JSON/JSONB support. 150 | 151 | # Version 0.1.3 152 | 153 | Released Mar 27, 2018 154 | 155 | - Support for JSONB columns is now reported in accordance with CockroachDB 2.0. 156 | 157 | # Version 0.1.2 158 | 159 | Released Feb 7, 2018 160 | 161 | - If Alembic or `sqlalchemy-migrate` is installed, an experimental 162 | `cockroachdb` dialect will be registered with those packages too. 163 | - The `get_foreign_keys()` introspection interface is now supported. 164 | - Fixed introspection of boolean columns. 165 | 166 | # Version 0.1.1 167 | 168 | Released Sep 28, 2017 169 | 170 | - Works with CockroachDB 1.0 and 1.1. 171 | - `get_foreign_keys()` reflection is stubbed out and always returns an empty list. 172 | - Reflection interfaces and the `RETURNING` clause support references to tables outside the current schema. 173 | - Foreign key constraints are no longer stripped out of table creation. 174 | 175 | # Version 0.1.0 176 | 177 | Released May 27, 2016 178 | 179 | - Initial release 180 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | ENV_BASE=~/envs 2 | ENV=${ENV_BASE}/sqlalchemy-cockroachdb 3 | TOX=${ENV}/bin/tox 4 | 5 | .PHONY: all 6 | all: test lint 7 | 8 | .PHONY: bootstrap 9 | bootstrap: 10 | @mkdir -p ${ENV} 11 | virtualenv ${ENV} 12 | ${ENV}/bin/pip install -r dev-requirements.txt 13 | 14 | .PHONY: clean-bootstrap-env 15 | clean-bootstrap-env: 16 | rm -rf ${ENV} 17 | 18 | .PHONY: test 19 | test: 20 | ${TOX} -e py39 21 | 22 | .PHONY: lint 23 | lint: 24 | ${TOX} -e lint 25 | 26 | .PHONY: update-requirements 27 | update-requirements: 28 | ${TOX} -e pip-compile 29 | 30 | .PHONY: build 31 | build: clean 32 | ${ENV}/bin/python setup.py sdist 33 | 34 | .PHONY: clean 35 | clean: 36 | rm -rf dist build 37 | 38 | .PHONY: detox 39 | detox: clean 40 | rm -rf .tox 41 | -------------------------------------------------------------------------------- /README.asyncpg.md: -------------------------------------------------------------------------------- 1 | ## asyncpg support 2 | 3 | The connection URL is of the form: 4 | 5 | cockroachdb+asyncpg://root@localhost:26257/defaultdb 6 | 7 | There is a customized version of the FastAPI SQL database tutorial for 8 | `cockroachdb+asyncpg` available at 9 | 10 | https://github.com/gordthompson/fastapi-tutorial-cockroachdb-async 11 | 12 | ### Testing 13 | 14 | Assuming that you have an entry in test.cfg that looks something like 15 | 16 | [db] 17 | asyncpg=cockroachdb+asyncpg://root@localhost:26257/defaultdb 18 | 19 | you can run the tests with asyncpg using a command like 20 | 21 | pytest --db=asyncpg 22 | 23 | If you want to run all the tests *except* the Alembic tests then invoke pytest 24 | using a command like 25 | 26 | pytest --db=asyncpg --ignore-glob='*test_suite_alembic.py' 27 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CockroachDB dialect for SQLAlchemy 2 | 3 | ## Prerequisites 4 | 5 | A database driver (DBAPI layer) is required to work with this dialect. 6 | 7 | ### psycopg2 8 | 9 | For psycopg2 support you must install either: 10 | 11 | * [psycopg2](https://pypi.org/project/psycopg2/), which has some 12 | [prerequisites](https://www.psycopg.org/docs/install.html#prerequisites) of 13 | its own, or 14 | 15 | * [psycopg2-binary](https://pypi.org/project/psycopg2-binary/) 16 | 17 | (The binary package is a practical choice for development and testing but in 18 | production it is advised to use the package built from sources.) 19 | 20 | ### asyncpg 21 | 22 | For asyncpg support you must install 23 | 24 | * [asyncpg](https://pypi.org/project/asyncpg/) 25 | 26 | For more details on working with asyncpg, see 27 | [README.asyncpg.md](README.asyncpg.md) 28 | 29 | ### psycopg 30 | 31 | For psycopg version 3 support (⚠️ experimental 🏗), you'll need to install 32 | 33 | * [psycopg](https://pypi.org/project/psycopg/) 34 | 35 | As with psycopg2, psycopg can be installed as binary for development and testing purposes. 36 | (Installing as binary avoids the need for the libpq-dev package to be installed first.) 37 | 38 | `pip install psycopg[binary]` 39 | 40 | For more details on working with psycopg, see 41 | [README.psycopg.md](README.psycopg.md) 42 | 43 | ## Install and usage 44 | 45 | Use `pip` to install the latest release of this dialect. 46 | 47 | ``` 48 | pip install sqlalchemy-cockroachdb 49 | ``` 50 | 51 | NOTE: This version of the dialect requires SQLAlchemy 2.0 or later. To work with 52 | earlier versions of SQLAlchemy you'll need to install an earlier version of this 53 | dialect. 54 | 55 | ``` 56 | pip install sqlalchemy-cockroachdb<2.0.0 57 | ``` 58 | 59 | Use a `cockroachdb` connection string when creating the `Engine`. For example, 60 | to connect to an insecure, local CockroachDB cluster using psycopg2: 61 | 62 | ``` 63 | from sqlalchemy import create_engine 64 | engine = create_engine('cockroachdb://root@localhost:26257/defaultdb?sslmode=disable') 65 | ``` 66 | 67 | or 68 | 69 | ``` 70 | from sqlalchemy import create_engine 71 | engine = create_engine('cockroachdb+psycopg2://root@localhost:26257/defaultdb?sslmode=disable') 72 | ``` 73 | 74 | To connect using asyncpg: 75 | 76 | ``` 77 | from sqlalchemy.ext.asyncio import create_async_engine 78 | engine = create_async_engine('cockroachdb+asyncpg://root@localhost:26257/defaultdb') 79 | ``` 80 | 81 | To connect using psycopg for sync operation: 82 | 83 | ``` 84 | from sqlalchemy import create_engine 85 | engine = create_engine('cockroachdb+psycopg://root@localhost:26257/defaultdb') 86 | ``` 87 | 88 | To connect using psycopg for async operation (⚠️ experimental 🏗), see 89 | [README.psycopg.md](README.psycopg.md) 90 | 91 | 92 | ## Changelog 93 | 94 | See [CHANGES.md](CHANGES.md) 95 | -------------------------------------------------------------------------------- /README.psycopg.md: -------------------------------------------------------------------------------- 1 | ## psycopg support 2 | 3 | Support for psycopg version 3 (sometimes referred to as "psycopg3") requires the 4 | following *minimum* CockroachDB versions: 22.2.6 or 23.1.0 5 | 6 | ### sync operation 7 | 8 | The connection URL is of the form: 9 | ``` 10 | cockroachdb+psycopg://root@localhost:26257/defaultdb 11 | ``` 12 | 13 | To create the engine 14 | 15 | ``` 16 | from sqlalchemy import create_engine 17 | engine = create_engine('cockroachdb+psycopg://root@localhost:26257/defaultdb') 18 | ``` 19 | 20 | ### async operation (⚠️ experimental 🏗) 21 | 22 | The "classic" approach 23 | 24 | ``` 25 | from sqlalchemy.ext.asyncio import create_async_engine 26 | engine = create_async_engine('cockroachdb+psycopg://root@localhost:26257/defaultdb') 27 | ``` 28 | 29 | does work, but it does not take advantage of the CockroachDB-specific connection code 30 | in psycopg; we just get a plain `psycopg.AsyncConnection`. After 31 | `cnxn = await engine.raw_connection()` we get 32 | 33 | ``` 34 | (Pdb) cnxn.driver_connection 35 | 36 | ``` 37 | 38 | The alternative approach is to use the following 39 | 40 | ```python 41 | import psycopg.crdb 42 | from sqlalchemy.ext.asyncio import create_async_engine 43 | 44 | async def get_async_crdb_connection(): 45 | return await psycopg.crdb.AsyncCrdbConnection.connect( 46 | "host=localhost port=26257 user=root dbname=defaultdb" 47 | ) 48 | 49 | async def async_main(): 50 | engine = create_async_engine( 51 | "cockroachdb+psycopg://", 52 | async_creator=get_async_crdb_connection, 53 | ) 54 | 55 | ``` 56 | 57 | which gives us an `AsyncCrdbConnection` 58 | 59 | ``` 60 | (Pdb) cnxn.driver_connection 61 | 62 | ``` 63 | -------------------------------------------------------------------------------- /README.testing.md: -------------------------------------------------------------------------------- 1 | ## Testing this dialect with SQLAlchemy and Alembic 2 | 3 | "setup.cfg" contains a default SQLAlchemy connection URL that should 4 | work if you have a local instance of CockroachDB installed: 5 | 6 | [db] 7 | default=cockroachdb://root@localhost:26257/defaultdb 8 | 9 | If you want to test against a remote server (or otherwise need to tweak 10 | the connection URL) simply create a file named "test.cfg" in the same 11 | folder as "setup.cfg", copy the ``[db]`` section into it, and adjust the 12 | ``default=`` URL accordingly. 13 | 14 | The minimum requirements for testing are: 15 | 16 | - SQLAlchemy, 17 | - Alembic, 18 | - pytest, and 19 | - the psycopg2 DBAPI module. 20 | 21 | Install them with 22 | 23 | pip install sqlalchemy alembic pytest psycopg2-binary 24 | 25 | Then, to run a complete test simply invoke 26 | 27 | make test 28 | 29 | at a command prompt after you bootstrapped your environment with 30 | 31 | make bootstrap 32 | 33 | To run just the SQLAlchemy test suite, use 34 | 35 | pytest test/test_suite_sqlalchemy.py 36 | 37 | and to run just the Alembic test suite, use 38 | 39 | pytest test/test_suite_alembic.py 40 | 41 | For more detailed information see the corresponding SQLAlchemy document 42 | 43 | https://github.com/sqlalchemy/sqlalchemy/blob/main/README.unittests.rst 44 | -------------------------------------------------------------------------------- /cockroachdb/sqlalchemy/__init__.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import util 2 | from sqlalchemy_cockroachdb import run_transaction # noqa 3 | 4 | util.warn_limited( 5 | "Importing ``run_transaction`` from ``cockroachdb.sqlalchemy`` is deprecated since " 6 | "version %s of this dialect. " 7 | "Please import it from ``sqlalchemy_cockroachdb`` instead.", 8 | "1.4", 9 | ) 10 | -------------------------------------------------------------------------------- /dev-requirements.in: -------------------------------------------------------------------------------- 1 | # Packages used by the Makefile 2 | # 3 | # To add/update dependencies, update dev-requirements.in (not the 4 | # generated dev-requirements.txt), run make update-requirements, 5 | # then make bootstrap. 6 | 7 | tox==3.23.1 8 | 9 | # Twine is used in the release process to upload the package. 10 | twine 11 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | backports-tarfile==1.2.0 2 | # via jaraco-context 3 | certifi==2025.1.31 4 | # via requests 5 | cffi==1.17.1 6 | # via cryptography 7 | charset-normalizer==3.4.1 8 | # via requests 9 | cryptography==44.0.2 10 | # via secretstorage 11 | distlib==0.3.9 12 | # via virtualenv 13 | docutils==0.21.2 14 | # via readme-renderer 15 | filelock==3.18.0 16 | # via 17 | # tox 18 | # virtualenv 19 | id==1.5.0 20 | # via twine 21 | idna==3.10 22 | # via requests 23 | importlib-metadata==8.6.1 24 | # via 25 | # keyring 26 | # twine 27 | jaraco-classes==3.4.0 28 | # via keyring 29 | jaraco-context==6.0.1 30 | # via keyring 31 | jaraco-functools==4.1.0 32 | # via keyring 33 | jeepney==0.9.0 34 | # via 35 | # keyring 36 | # secretstorage 37 | keyring==25.6.0 38 | # via twine 39 | markdown-it-py==3.0.0 40 | # via rich 41 | mdurl==0.1.2 42 | # via markdown-it-py 43 | more-itertools==10.6.0 44 | # via 45 | # jaraco-classes 46 | # jaraco-functools 47 | nh3==0.2.21 48 | # via readme-renderer 49 | packaging==24.2 50 | # via 51 | # tox 52 | # twine 53 | platformdirs==4.3.7 54 | # via virtualenv 55 | pluggy==1.5.0 56 | # via tox 57 | py==1.11.0 58 | # via tox 59 | pycparser==2.22 60 | # via cffi 61 | pygments==2.19.1 62 | # via 63 | # readme-renderer 64 | # rich 65 | readme-renderer==44.0 66 | # via twine 67 | requests==2.32.3 68 | # via 69 | # id 70 | # requests-toolbelt 71 | # twine 72 | requests-toolbelt==1.0.0 73 | # via twine 74 | rfc3986==2.0.0 75 | # via twine 76 | rich==14.0.0 77 | # via twine 78 | secretstorage==3.3.3 79 | # via keyring 80 | six==1.17.0 81 | # via tox 82 | toml==0.10.2 83 | # via tox 84 | tox==3.23.1 85 | # via -r dev-requirements.in 86 | twine==6.1.0 87 | # via -r dev-requirements.in 88 | typing-extensions==4.13.1 89 | # via rich 90 | urllib3==2.3.0 91 | # via 92 | # requests 93 | # twine 94 | virtualenv==20.30.0 95 | # via tox 96 | zipp==3.21.0 97 | # via importlib-metadata 98 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.pytest.ini_options] 2 | addopts = "--tb native -v -r sfxX --maxfail=250 -p warnings -p logging --strict-markers" 3 | markers = [ 4 | "backend: tests that should run on all backends; typically dialect-sensitive", 5 | "mypy: mypy integration / plugin tests", 6 | ] 7 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | addopts= --tb native -v -r fxX --maxfail=25 -p no:warnings 3 | python_files=test/*test_*.py 4 | 5 | [sqla_testing] 6 | requirement_cls = sqlalchemy_cockroachdb.requirements:Requirements 7 | profile_file=test/profiles.txt 8 | 9 | [db] 10 | default=cockroachdb://root@localhost:26257/defaultdb 11 | asyncpg=cockroachdb+asyncpg://root@localhost:26257/defaultdb 12 | psycopg=cockroachdb+psycopg://root@localhost:26257/defaultdb 13 | psycopg2=cockroachdb+psycopg2://root@localhost:26257/defaultdb 14 | 15 | [flake8] 16 | max-line-length = 100 17 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | 4 | from setuptools import setup, find_packages 5 | 6 | with open(os.path.join(os.path.dirname(__file__), "sqlalchemy_cockroachdb", "__init__.py"), encoding='UTF-8') as v: 7 | VERSION = re.compile(r'.*__version__ = "(.*?)"', re.S).match(v.read()).group(1) 8 | 9 | with open(os.path.join(os.path.dirname(__file__), "README.md"), encoding='UTF-8') as f: 10 | README = f.read() 11 | 12 | setup( 13 | name="sqlalchemy-cockroachdb", 14 | version=VERSION, 15 | author="Cockroach Labs", 16 | author_email="cockroach-db@googlegroups.com", 17 | url="https://github.com/cockroachdb/sqlalchemy-cockroachdb", 18 | description="CockroachDB dialect for SQLAlchemy", 19 | long_description=README, 20 | long_description_content_type="text/markdown", 21 | license="http://www.apache.org/licenses/LICENSE-2.0", 22 | classifiers=[ 23 | "License :: OSI Approved :: Apache Software License", 24 | "Programming Language :: Python :: 3", 25 | "Programming Language :: Python :: 3 :: Only", 26 | "Programming Language :: Python :: 3.8", 27 | "Programming Language :: Python :: 3.9", 28 | "Programming Language :: Python :: 3.10", 29 | "Programming Language :: Python :: 3.11", 30 | ], 31 | keywords="SQLAlchemy CockroachDB", 32 | project_urls={ 33 | "Documentation": "https://github.com/cockroachdb/sqlalchemy-cockroachdb/wiki", 34 | "Source": "https://github.com/cockroachdb/sqlalchemy-cockroachdb", 35 | "Tracker": "https://github.com/cockroachdb/sqlalchemy-cockroachdb/issues", 36 | }, 37 | packages=find_packages(include=["sqlalchemy_cockroachdb"]), 38 | include_package_data=True, 39 | install_requires=["SQLAlchemy"], 40 | zip_safe=False, 41 | entry_points={ 42 | "sqlalchemy.dialects": [ 43 | "cockroachdb = sqlalchemy_cockroachdb.psycopg2:CockroachDBDialect_psycopg2", 44 | "cockroachdb.psycopg2 = sqlalchemy_cockroachdb.psycopg2:CockroachDBDialect_psycopg2", 45 | "cockroachdb.asyncpg = sqlalchemy_cockroachdb.asyncpg:CockroachDBDialect_asyncpg", 46 | "cockroachdb.psycopg = sqlalchemy_cockroachdb.psycopg:CockroachDBDialect_psycopg", 47 | ], 48 | }, 49 | ) 50 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/__init__.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.dialects import registry as _registry 2 | from .transaction import run_transaction # noqa 3 | 4 | __version__ = "2.0.3.dev0" 5 | 6 | _registry.register( 7 | "cockroachdb.psycopg2", 8 | "sqlalchemy_cockroachdb.psycopg2", 9 | "CockroachDBDialect_psycopg2", 10 | ) 11 | _registry.register( 12 | "cockroachdb.asyncpg", 13 | "sqlalchemy_cockroachdb.asyncpg", 14 | "CockroachDBDialect_asyncpg", 15 | ) 16 | _registry.register( 17 | "cockroachdb.psycopg", 18 | "sqlalchemy_cockroachdb.psycopg", 19 | "CockroachDBDialect_psycopg", 20 | ) 21 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/_psycopg_common.py: -------------------------------------------------------------------------------- 1 | from .base import CockroachDBDialect 2 | 3 | 4 | class _CockroachDBDialect_common_psycopg(CockroachDBDialect): 5 | supports_sane_rowcount = False # for psycopg2, at least 6 | 7 | def get_isolation_level_values(self, dbapi_conn): 8 | return ("SERIALIZABLE", "AUTOCOMMIT", "READ COMMITTED") 9 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/asyncpg.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.dialects.postgresql.asyncpg import PGDialect_asyncpg 2 | from .base import CockroachDBDialect 3 | from .ddl_compiler import CockroachDDLCompiler 4 | from .stmt_compiler import CockroachCompiler 5 | from .stmt_compiler import CockroachIdentifierPreparer 6 | 7 | 8 | class CockroachDBDialect_asyncpg(PGDialect_asyncpg, CockroachDBDialect): 9 | driver = "asyncpg" # driver name 10 | preparer = CockroachIdentifierPreparer 11 | ddl_compiler = CockroachDDLCompiler 12 | statement_compiler = CockroachCompiler 13 | 14 | supports_statement_cache = True 15 | 16 | async def setup_asyncpg_json_codec(self, conn): 17 | # https://github.com/cockroachdb/cockroach/issues/9990#issuecomment-579202144 18 | pass 19 | 20 | def get_isolation_level_values(self, dbapi_conn): 21 | return ("SERIALIZABLE", "AUTOCOMMIT", "READ COMMITTED") 22 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/base.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import re 3 | import threading 4 | from sqlalchemy import text 5 | from sqlalchemy import util 6 | from sqlalchemy.dialects.postgresql.base import PGDialect 7 | from sqlalchemy.dialects.postgresql import ARRAY 8 | from sqlalchemy.dialects.postgresql import INET 9 | from sqlalchemy.dialects.postgresql import UUID 10 | from sqlalchemy.ext.compiler import compiles 11 | from sqlalchemy.util import warn 12 | import sqlalchemy.sql as sql 13 | 14 | import sqlalchemy.types as sqltypes 15 | 16 | from .stmt_compiler import CockroachCompiler, CockroachIdentifierPreparer 17 | from .ddl_compiler import CockroachDDLCompiler 18 | 19 | 20 | # Map type names (as returned by information_schema) to sqlalchemy type 21 | # objects. 22 | # 23 | # TODO(bdarnell): test more of these. The stock test suite only covers 24 | # a few basic ones. 25 | _type_map = { 26 | "bool": sqltypes.BOOLEAN, # introspection returns "BOOL" not boolean 27 | "boolean": sqltypes.BOOLEAN, 28 | "bigint": sqltypes.INT, 29 | "int": sqltypes.INT, 30 | "int2": sqltypes.INT, 31 | "int4": sqltypes.INT, 32 | "int64": sqltypes.INT, 33 | "int8": sqltypes.INT, 34 | "integer": sqltypes.INT, 35 | "smallint": sqltypes.INT, 36 | "double precision": sqltypes.FLOAT, 37 | "float": sqltypes.FLOAT, 38 | "float4": sqltypes.FLOAT, 39 | "float8": sqltypes.FLOAT, 40 | "real": sqltypes.FLOAT, 41 | "dec": sqltypes.DECIMAL, 42 | "decimal": sqltypes.DECIMAL, 43 | "numeric": sqltypes.DECIMAL, 44 | "date": sqltypes.DATE, 45 | "time": sqltypes.Time, 46 | "time without time zone": sqltypes.Time, 47 | "timestamp": sqltypes.TIMESTAMP, 48 | "timestamptz": sqltypes.TIMESTAMP, 49 | "timestamp with time zone": sqltypes.TIMESTAMP, 50 | "timestamp without time zone": sqltypes.TIMESTAMP, 51 | "interval": sqltypes.Interval, 52 | "char": sqltypes.VARCHAR, 53 | "char varying": sqltypes.VARCHAR, 54 | "character": sqltypes.VARCHAR, 55 | "character varying": sqltypes.VARCHAR, 56 | "string": sqltypes.VARCHAR, 57 | "text": sqltypes.VARCHAR, 58 | "varchar": sqltypes.VARCHAR, 59 | "blob": sqltypes.BLOB, 60 | "bytea": sqltypes.BLOB, 61 | "bytes": sqltypes.BLOB, 62 | "json": sqltypes.JSON, 63 | "jsonb": sqltypes.JSON, 64 | "uuid": UUID, 65 | "inet": INET, 66 | } 67 | 68 | 69 | class _SavepointState(threading.local): 70 | """Hack to override names used in savepoint statements. 71 | 72 | To get the Session to do the right thing with transaction retries, 73 | we use the begin_nested() method, which executes a savepoint. We 74 | need to transform the savepoint statements that are a part of this 75 | retry loop, while leaving other savepoints alone. Unfortunately 76 | the interface leaves us with no way to pass this information along 77 | except via a thread-local variable. 78 | """ 79 | 80 | def __init__(self): 81 | self.cockroach_restart = False 82 | 83 | 84 | savepoint_state = _SavepointState() 85 | 86 | 87 | class CockroachDBDialect(PGDialect): 88 | name = "cockroachdb" 89 | supports_empty_insert = True 90 | supports_multivalues_insert = True 91 | supports_sequences = False 92 | statement_compiler = CockroachCompiler 93 | preparer = CockroachIdentifierPreparer 94 | ddl_compiler = CockroachDDLCompiler 95 | 96 | # Override connect so we can take disable_cockroachdb_telemetry as a connect_arg to sqlalchemy. 97 | # The option is not used any more, but removing it is a backwards-incompatible change. 98 | def connect( 99 | self, 100 | disable_cockroachdb_telemetry=False, 101 | **kwargs, 102 | ): 103 | return super().connect(**kwargs) 104 | 105 | def __init__(self, *args, **kwargs): 106 | if kwargs.get("use_native_hstore", False): 107 | raise NotImplementedError("use_native_hstore is not supported") 108 | if kwargs.get("server_side_cursors", False): 109 | raise NotImplementedError("server_side_cursors is not supported") 110 | kwargs["use_native_hstore"] = False 111 | kwargs["server_side_cursors"] = False 112 | super().__init__(*args, **kwargs) 113 | 114 | def initialize(self, connection): 115 | # Bypass PGDialect's initialize implementation, which looks at 116 | # server_version_info and performs postgres-specific queries 117 | # to detect certain features on the server. Set the attributes 118 | # by hand and hope things don't change out from under us too 119 | # often. 120 | super().initialize(connection) 121 | self.implicit_returning = True 122 | self.supports_smallserial = False 123 | self._backslash_escapes = False 124 | sversion = connection.scalar(text("select version()")) 125 | self._is_v2plus = " v1." not in sversion 126 | self._is_v21plus = self._is_v2plus and (" v2.0." not in sversion) 127 | self._is_v191plus = self._is_v21plus and (" v2.1." not in sversion) 128 | self._is_v192plus = self._is_v191plus and (" v19.1." not in sversion) 129 | self._is_v201plus = self._is_v192plus and (" v19.2." not in sversion) 130 | self._is_v202plus = self._is_v201plus and (" v20.1." not in sversion) 131 | self._is_v211plus = self._is_v202plus and (" v20.2." not in sversion) 132 | self._is_v212plus = self._is_v211plus and (" v21.1." not in sversion) 133 | self._is_v221plus = self._is_v212plus and (" v21.2." not in sversion) 134 | self._is_v222plus = self._is_v221plus and (" v22.1." not in sversion) 135 | self._is_v231plus = self._is_v222plus and (" v22.2." not in sversion) 136 | self._is_v232plus = self._is_v231plus and (" v23.1." not in sversion) 137 | self._is_v241plus = self._is_v232plus and (" v23.2." not in sversion) 138 | self._is_v242plus = self._is_v241plus and (" v24.1." not in sversion) 139 | self._is_v243plus = self._is_v242plus and (" v24.2." not in sversion) 140 | self._is_v251plus = self._is_v243plus and (" v24.3." not in sversion) 141 | self._has_native_json = self._is_v2plus 142 | self._has_native_jsonb = self._is_v2plus 143 | self._supports_savepoints = self._is_v201plus 144 | self.supports_native_enum = self._is_v202plus 145 | self.supports_identity_columns = True 146 | 147 | def _get_server_version_info(self, conn): 148 | # PGDialect expects a postgres server version number here, 149 | # although we've overridden most of the places where it's 150 | # used. 151 | return (9, 5, 0) 152 | 153 | def get_table_names(self, conn, schema=None, **kw): 154 | # Upstream implementation needs correlated subqueries. 155 | 156 | if not self._is_v2plus: 157 | # v1.1 or earlier. 158 | return [row.Table for row in conn.execute(text("SHOW TABLES"))] 159 | 160 | # v2.0+ have a good information schema. Use it. 161 | return [ 162 | row.table_name 163 | for row in conn.execute( 164 | text("SELECT table_name FROM information_schema.tables WHERE table_schema=:schema"), 165 | {"schema": schema or self.default_schema_name}, 166 | ) 167 | ] 168 | 169 | def has_table(self, conn, table, schema=None, info_cache=None): 170 | # Upstream implementation needs pg_table_is_visible(). 171 | return any(t == table for t in self.get_table_names(conn, schema=schema)) 172 | 173 | def get_multi_columns(self, connection, schema, filter_names, scope, kind, **kw): 174 | if not filter_names: 175 | filter_names = self.get_table_names(connection, schema) 176 | return { 177 | (schema, table_name): self.get_columns(connection, table_name, schema, **kw) 178 | for table_name in filter_names 179 | } 180 | 181 | # The upstream implementations of the reflection functions below depend on 182 | # correlated subqueries which are not yet supported. 183 | def get_columns(self, conn, table_name, schema=None, **kw): 184 | _include_hidden = kw.get("include_hidden", False) 185 | if not self._is_v191plus: 186 | # v2.x does not have is_generated or generation_expression 187 | sql = ( 188 | "SELECT column_name, data_type, is_nullable::bool, column_default," 189 | "numeric_precision, numeric_scale, character_maximum_length, " 190 | "NULL AS is_generated, NULL AS generation_expression, is_hidden::bool," 191 | "column_comment AS comment " 192 | "FROM information_schema.columns " 193 | "WHERE table_schema = :table_schema AND table_name = :table_name " 194 | ) 195 | sql += "" if _include_hidden else "AND NOT is_hidden::bool" 196 | rows = conn.execute( 197 | text(sql), 198 | {"table_schema": schema or self.default_schema_name, "table_name": table_name}, 199 | ) 200 | else: 201 | # v19.1 or later. Information schema columns are all usable. 202 | sql = ( 203 | "SELECT column_name, data_type, is_nullable::bool, column_default, " 204 | "numeric_precision, numeric_scale, character_maximum_length, " 205 | "CASE is_generated WHEN 'ALWAYS' THEN true WHEN 'NEVER' THEN false " 206 | "ELSE is_generated::bool END AS is_generated, " 207 | "generation_expression, is_hidden::bool, crdb_sql_type, column_comment AS comment " 208 | "FROM information_schema.columns " 209 | "WHERE table_schema = :table_schema AND table_name = :table_name " 210 | ) 211 | sql += "" if _include_hidden else "AND NOT is_hidden::bool" 212 | rows = conn.execute( 213 | text(sql), 214 | {"table_schema": schema or self.default_schema_name, "table_name": table_name}, 215 | ) 216 | 217 | res = [] 218 | for row in rows: 219 | name, type_str, nullable, default = row[:4] 220 | if type_str == "ARRAY": 221 | is_array = True 222 | type_str, _ = row.crdb_sql_type.split("[", maxsplit=1) 223 | else: 224 | is_array = False 225 | # When there are type parameters, attach them to the 226 | # returned type object. 227 | m = re.match(r"^(\w+(?: \w+)*)(?:\(([0-9, ]*)\))?$", type_str) 228 | if m is None: 229 | warn("Could not parse type name '%s'" % type_str) 230 | typ = sqltypes.NULLTYPE 231 | else: 232 | type_name, type_args = m.groups() 233 | try: 234 | type_class = _type_map[type_name.lower()] 235 | except KeyError: 236 | warn(f"Did not recognize type '{type_name}' of column '{name}'") 237 | type_class = sqltypes.NULLTYPE 238 | if type_args: 239 | typ = type_class(*[int(s.strip()) for s in type_args.split(",")]) 240 | elif type_class is sqltypes.DECIMAL: 241 | typ = type_class( 242 | precision=row.numeric_precision, 243 | scale=row.numeric_scale, 244 | ) 245 | elif type_class is sqltypes.VARCHAR: 246 | typ = type_class(length=row.character_maximum_length) 247 | else: 248 | typ = type_class 249 | if row.is_generated: 250 | # Currently, all computed columns are persisted. 251 | computed = dict(sqltext=row.generation_expression, persisted=True) 252 | default = None 253 | else: 254 | computed = None 255 | # Check if a sequence is being used and adjust the default value. 256 | autoincrement = False 257 | if default is not None: 258 | nextval_match = re.search(r"""(nextval\(')([^']+)('.*$)""", default) 259 | unique_rowid_match = re.search(r"""unique_rowid\(""", default) 260 | if nextval_match is not None or unique_rowid_match is not None: 261 | if issubclass(type_class, sqltypes.Integer): 262 | autoincrement = True 263 | # the default is related to a Sequence 264 | sch = schema 265 | if ( 266 | nextval_match is not None 267 | and "." not in nextval_match.group(2) 268 | and sch is not None 269 | ): 270 | # unconditionally quote the schema name. this could 271 | # later be enhanced to obey quoting rules / 272 | # "quote schema" 273 | default = ( 274 | nextval_match.group(1) 275 | + ('"%s"' % sch) 276 | + "." 277 | + nextval_match.group(2) 278 | + nextval_match.group(3) 279 | ) 280 | 281 | column_info = dict( 282 | name=name, 283 | type=ARRAY(typ) if is_array else typ, 284 | nullable=nullable, 285 | default=default, 286 | autoincrement=autoincrement, 287 | is_hidden=row.is_hidden, 288 | comment=row.comment, 289 | ) 290 | if computed is not None: 291 | column_info["computed"] = computed 292 | res.append(column_info) 293 | return res 294 | 295 | def get_indexes(self, conn, table_name, schema=None, **kw): 296 | if self._is_v192plus: 297 | indexes = super().get_indexes(conn, table_name, schema, **kw) 298 | # CockroachDB creates a UNIQUE INDEX automatically for each UNIQUE CONSTRAINT, and 299 | # there is no difference between unique indexes and unique constraints. We need 300 | # to remove the `duplicates_constraints` value from unique indexes, otherwise 301 | # alembic tries to delete and recreate unique indexes. This is consistent with 302 | # postgresql which doesn't set the duplicates_constraint flag on unique indexes 303 | for index in indexes: 304 | if index["unique"] and "duplicates_constraint" in index: 305 | del index["duplicates_constraint"] 306 | return indexes 307 | 308 | # The Cockroach database creates a UNIQUE INDEX implicitly whenever the 309 | # UNIQUE CONSTRAINT construct is used. Currently we are just ignoring all unique indexes, 310 | # but we might need to return them and add an additional key `duplicates_constraint` if 311 | # it is detected as mirroring a constraint. 312 | # https://www.cockroachlabs.com/docs/stable/unique.html 313 | # https://github.com/sqlalchemy/sqlalchemy/blob/55f930ef3d4e60bed02a2dad16e331fe42cfd12b/lib/sqlalchemy/dialects/postgresql/base.py#L723 314 | q = """ 315 | SELECT 316 | index_name, 317 | column_name, 318 | (not non_unique::bool) as unique, 319 | implicit::bool as implicit 320 | FROM 321 | information_schema.statistics 322 | WHERE 323 | table_schema = :table_schema 324 | AND table_name = :table_name 325 | """ 326 | rows = conn.execute( 327 | text(q), 328 | {"table_schema": (schema or self.default_schema_name), "table_name": table_name}, 329 | ) 330 | indexes = collections.defaultdict(list) 331 | for row in rows: 332 | if row.implicit or row.unique: 333 | continue 334 | indexes[row.index_name].append(row) 335 | 336 | result = [] 337 | for name, rows in indexes.items(): 338 | result.append( 339 | { 340 | "name": name, 341 | "column_names": [r.column_name for r in rows], 342 | "unique": False, 343 | } 344 | ) 345 | return result 346 | 347 | def get_multi_indexes( 348 | self, connection, schema, filter_names, scope, kind, **kw 349 | ): 350 | result = super().get_multi_indexes( 351 | connection, schema, filter_names, scope, kind, **kw 352 | ) 353 | if schema is None: 354 | result = dict(result) 355 | for k in [ 356 | (None, "spatial_ref_sys"), 357 | (None, "geometry_columns"), 358 | (None, "geography_columns"), 359 | ]: 360 | result.pop(k, None) 361 | return result 362 | 363 | def get_foreign_keys_v1(self, conn, table_name, schema=None, **kw): 364 | fkeys = [] 365 | FK_REGEX = re.compile(r"(?P.+)?\.\[(?P.+)?]") 366 | 367 | for row in conn.execute( 368 | text(f'SHOW CONSTRAINTS FROM "{schema or self.default_schema_name}"."{table_name}"') 369 | ): 370 | if row.Type.startswith("FOREIGN KEY"): 371 | m = re.search(FK_REGEX, row.Details) 372 | 373 | name = row.Name 374 | constrained_columns = row["Column(s)"].split(", ") 375 | referred_table = m.group("referred_table") 376 | referred_columns = m.group("referred_columns").split() 377 | referred_schema = schema 378 | fkey_d = { 379 | "name": name, 380 | "constrained_columns": constrained_columns, 381 | "referred_table": referred_table, 382 | "referred_columns": referred_columns, 383 | "referred_schema": referred_schema, 384 | } 385 | fkeys.append(fkey_d) 386 | return fkeys 387 | 388 | @util.memoized_property 389 | def _fk_regex_pattern(self): 390 | # optionally quoted token 391 | qtoken = r'(?:"[^"]+"|[\w]+?)' 392 | 393 | # https://www.postgresql.org/docs/current/static/sql-createtable.html 394 | return re.compile( 395 | r"FOREIGN KEY \((.*?)\) " 396 | rf"REFERENCES (?:({qtoken})\.)?({qtoken})\(((?:{qtoken}(?: *, *)?)+)\)" # noqa: E501 397 | r"[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?" 398 | r"[\s]?(ON DELETE " 399 | r"(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?" 400 | r"[\s]?(ON UPDATE " 401 | r"(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?" 402 | r"[\s]?(DEFERRABLE|NOT DEFERRABLE)?" 403 | r"[\s]?(INITIALLY (DEFERRED|IMMEDIATE)+)?" 404 | ) 405 | 406 | def get_foreign_keys( 407 | self, connection, table_name, schema=None, postgresql_ignore_search_path=False, **kw 408 | ): 409 | if not self._is_v2plus: 410 | # v1.1 or earlier. 411 | return self.get_foreign_keys_v1(connection, table_name, schema, **kw) 412 | 413 | # v2.0 or later. 414 | # This method is the same as the one in SQLAlchemy's pg dialect, with 415 | # a tweak to the FK regular expressions to tolerate whitespace between 416 | # the table name and the column list. 417 | # See also: https://github.com/cockroachdb/cockroach/issues/27123 418 | 419 | preparer = self.identifier_preparer 420 | table_oid = self.get_table_oid( 421 | connection, table_name, schema, info_cache=kw.get("info_cache") 422 | ) 423 | 424 | FK_SQL = """ 425 | SELECT r.conname, 426 | pg_catalog.pg_get_constraintdef(r.oid, true) as condef, 427 | n.nspname as conschema 428 | FROM pg_catalog.pg_constraint r, 429 | pg_namespace n, 430 | pg_class c 431 | 432 | WHERE r.conrelid = :table AND 433 | r.contype = 'f' AND 434 | c.oid = confrelid AND 435 | n.oid = c.relnamespace 436 | ORDER BY 1 437 | """ 438 | # http://www.postgresql.org/docs/9.0/static/sql-createtable.html 439 | FK_REGEX = self._fk_regex_pattern 440 | 441 | t = sql.text(FK_SQL).columns(conname=sqltypes.Unicode, condef=sqltypes.Unicode) 442 | c = connection.execute(t, {"table": table_oid}) 443 | fkeys = [] 444 | for conname, condef, conschema in c.fetchall(): 445 | m = re.search(FK_REGEX, condef).groups() 446 | 447 | ( 448 | constrained_columns, 449 | referred_schema, 450 | referred_table, 451 | referred_columns, 452 | _, 453 | match, 454 | _, 455 | ondelete, 456 | _, 457 | onupdate, 458 | deferrable, 459 | _, 460 | initially, 461 | ) = m 462 | 463 | if deferrable is not None: 464 | deferrable = True if deferrable == "DEFERRABLE" else False 465 | constrained_columns = [ 466 | preparer._unquote_identifier(x) for x in re.split(r"\s*,\s*", constrained_columns) 467 | ] 468 | 469 | if postgresql_ignore_search_path: 470 | # when ignoring search path, we use the actual schema 471 | # provided it isn't the "default" schema 472 | if conschema != self.default_schema_name: 473 | referred_schema = conschema 474 | else: 475 | referred_schema = schema 476 | elif referred_schema: 477 | # referred_schema is the schema that we regexp'ed from 478 | # pg_get_constraintdef(). If the schema is in the search 479 | # path, pg_get_constraintdef() will give us None. 480 | referred_schema = preparer._unquote_identifier(referred_schema) 481 | elif schema is not None and schema == conschema: 482 | # If the actual schema matches the schema of the table 483 | # we're reflecting, then we will use that. 484 | referred_schema = schema 485 | 486 | referred_table = preparer._unquote_identifier(referred_table) 487 | referred_columns = [ 488 | preparer._unquote_identifier(x) for x in re.split(r"\s*,\s", referred_columns) 489 | ] 490 | fkey_d = { 491 | "name": conname, 492 | "constrained_columns": constrained_columns, 493 | "referred_schema": referred_schema, 494 | "referred_table": referred_table, 495 | "referred_columns": referred_columns, 496 | "options": { 497 | "onupdate": onupdate, 498 | "ondelete": ondelete, 499 | "deferrable": deferrable, 500 | "initially": initially, 501 | "match": match, 502 | }, 503 | } 504 | fkeys.append(fkey_d) 505 | return fkeys 506 | 507 | def get_pk_constraint(self, conn, table_name, schema=None, **kw): 508 | if self._is_v21plus: 509 | return super().get_pk_constraint(conn, table_name, schema, **kw) 510 | 511 | # v2.0 does not know about enough SQL to understand the query done by 512 | # the upstream dialect. So run a dumbed down version instead. 513 | idxs = self.get_indexes(conn, table_name, schema=schema, **kw) 514 | if len(idxs) == 0: 515 | # virtual table. No constraints. 516 | return {} 517 | # The PK is always first in the index list; it may not always 518 | # be named "primary". 519 | pk = idxs[0] 520 | res = dict(constrained_columns=pk["column_names"]) 521 | # The SQLAlchemy tests expect that the name field is only 522 | # present if the PK was explicitly renamed by the user. 523 | # Checking for a name of "primary" is an imperfect proxy for 524 | # this but is good enough to pass the tests. 525 | if pk["name"] != "primary": 526 | res["name"] = pk["name"] 527 | return res 528 | 529 | def get_multi_pk_constraint(self, connection, schema, filter_names, scope, kind, **kw): 530 | result = super().get_multi_pk_constraint( 531 | connection, schema, filter_names, scope, kind, **kw 532 | ) 533 | if schema is None: 534 | result = dict(result) 535 | for k in [ 536 | (None, "spatial_ref_sys"), 537 | (None, "geometry_columns"), 538 | (None, "geography_columns"), 539 | ]: 540 | result.pop(k, None) 541 | return result 542 | 543 | def get_unique_constraints(self, conn, table_name, schema=None, **kw): 544 | if self._is_v21plus: 545 | return super().get_unique_constraints(conn, table_name, schema, **kw) 546 | 547 | # v2.0 does not know about enough SQL to understand the query done by 548 | # the upstream dialect. So run a dumbed down version instead. 549 | res = [] 550 | # Skip the primary key which is always first in the list. 551 | idxs = self.get_indexes(conn, table_name, schema=schema, **kw) 552 | if len(idxs) == 0: 553 | # virtual table. No constraints. 554 | return res 555 | for index in idxs[1:]: 556 | if index["unique"]: 557 | del index["unique"] 558 | res.append(index) 559 | return res 560 | 561 | def get_multi_check_constraints( 562 | self, connection, schema, filter_names, scope, kind, **kw 563 | ): 564 | result = super().get_multi_check_constraints( 565 | connection, schema, filter_names, scope, kind, **kw 566 | ) 567 | if schema is None: 568 | result = dict(result) 569 | for k in [ 570 | (None, "spatial_ref_sys"), 571 | (None, "geometry_columns"), 572 | (None, "geography_columns"), 573 | ]: 574 | result.pop(k, None) 575 | return result 576 | 577 | def do_savepoint(self, connection, name): 578 | # Savepoint logic customized to work with run_transaction(). 579 | if savepoint_state.cockroach_restart: 580 | connection.execute(text("SAVEPOINT cockroach_restart")) 581 | else: 582 | super().do_savepoint(connection, name) 583 | 584 | def do_rollback_to_savepoint(self, connection, name): 585 | # Savepoint logic customized to work with run_transaction(). 586 | if savepoint_state.cockroach_restart: 587 | connection.execute(text("ROLLBACK TO SAVEPOINT cockroach_restart")) 588 | else: 589 | super().do_rollback_to_savepoint(connection, name) 590 | 591 | def do_release_savepoint(self, connection, name): 592 | # Savepoint logic customized to work with run_transaction(). 593 | if savepoint_state.cockroach_restart: 594 | connection.execute(text("RELEASE SAVEPOINT cockroach_restart")) 595 | else: 596 | super().do_release_savepoint(connection, name) 597 | 598 | 599 | # If alembic is installed, register an alias in its dialect mapping. 600 | try: 601 | import alembic.ddl.postgresql 602 | except ImportError: 603 | pass 604 | else: 605 | 606 | class CockroachDBImpl(alembic.ddl.postgresql.PostgresqlImpl): 607 | __dialect__ = "cockroachdb" 608 | transactional_ddl = False 609 | 610 | @compiles(alembic.ddl.postgresql.PostgresqlColumnType, "cockroachdb") 611 | def visit_column_type(*args, **kwargs): 612 | return alembic.ddl.postgresql.visit_column_type(*args, **kwargs) 613 | 614 | @compiles(alembic.ddl.postgresql.ColumnComment, "cockroachdb") 615 | def visit_column_comment(*args, **kwargs): 616 | return alembic.ddl.postgresql.visit_column_comment(*args, **kwargs) 617 | 618 | 619 | # If sqlalchemy-migrate is installed, register there too. 620 | try: 621 | from migrate.changeset.databases.visitor import DIALECTS as migrate_dialects 622 | except ImportError: 623 | pass 624 | else: 625 | migrate_dialects["cockroachdb"] = migrate_dialects["postgresql"] 626 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/ddl_compiler.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import exc 2 | from sqlalchemy.dialects.postgresql.base import PGDDLCompiler 3 | 4 | 5 | class CockroachDDLCompiler(PGDDLCompiler): 6 | def visit_computed_column(self, generated, **kw): 7 | if generated.persisted is False: 8 | raise exc.CompileError( 9 | "CockroachDB computed columns do not support 'virtual' " 10 | "persistence; set the 'persisted' flag to None or True for " 11 | "CockroachDB support." 12 | ) 13 | 14 | return "AS (%s) STORED" % self.sql_compiler.process( 15 | generated.sqltext, include_table=False, literal_binds=True 16 | ) 17 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/provision.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.testing.provision import temp_table_keyword_args 2 | 3 | 4 | @temp_table_keyword_args.for_db("cockroachdb") 5 | def _cockroachdb_temp_table_keyword_args(cfg, eng): 6 | return {"prefixes": ["TEMPORARY"]} 7 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/psycopg.py: -------------------------------------------------------------------------------- 1 | from psycopg.crdb import connect as crdb_connect 2 | from sqlalchemy import util 3 | from sqlalchemy.dialects.postgresql.psycopg import PGDialect_psycopg, PGDialectAsync_psycopg 4 | from ._psycopg_common import _CockroachDBDialect_common_psycopg 5 | from .ddl_compiler import CockroachDDLCompiler 6 | from .stmt_compiler import CockroachCompiler 7 | from .stmt_compiler import CockroachIdentifierPreparer 8 | 9 | 10 | class CockroachDBDialect_psycopg(_CockroachDBDialect_common_psycopg, PGDialect_psycopg): 11 | driver = "psycopg" # driver name 12 | preparer = CockroachIdentifierPreparer 13 | ddl_compiler = CockroachDDLCompiler 14 | statement_compiler = CockroachCompiler 15 | 16 | supports_statement_cache = True 17 | 18 | @util.memoized_property 19 | def _psycopg_json(self): 20 | from psycopg.types import json 21 | 22 | new_json = type("foo", (), {"Json": json.Jsonb, "Jsonb": json.Jsonb}) 23 | return new_json 24 | 25 | def connect( 26 | self, 27 | disable_cockroachdb_telemetry=False, 28 | **kwargs, 29 | ): 30 | self.disable_cockroachdb_telemetry = util.asbool(disable_cockroachdb_telemetry) 31 | return crdb_connect(**kwargs) 32 | 33 | @classmethod 34 | def get_async_dialect_cls(cls, url): 35 | return CockroachDBDialectAsync_psycopg 36 | 37 | 38 | class CockroachDBDialectAsync_psycopg(_CockroachDBDialect_common_psycopg, PGDialectAsync_psycopg): 39 | is_async = True 40 | supports_statement_cache = True 41 | 42 | 43 | dialect = CockroachDBDialect_psycopg 44 | dialect_async = CockroachDBDialectAsync_psycopg 45 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/psycopg2.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2 2 | from ._psycopg_common import _CockroachDBDialect_common_psycopg 3 | from .ddl_compiler import CockroachDDLCompiler 4 | from .stmt_compiler import CockroachCompiler 5 | from .stmt_compiler import CockroachIdentifierPreparer 6 | 7 | 8 | class CockroachDBDialect_psycopg2(_CockroachDBDialect_common_psycopg, PGDialect_psycopg2): 9 | driver = "psycopg2" # driver name 10 | preparer = CockroachIdentifierPreparer 11 | ddl_compiler = CockroachDDLCompiler 12 | statement_compiler = CockroachCompiler 13 | 14 | supports_statement_cache = True 15 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/requirements.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.testing.requirements import SuiteRequirements as SuiteRequirementsSQLA 2 | from alembic.testing.requirements import SuiteRequirements as SuiteRequirementsAlembic 3 | 4 | from sqlalchemy.testing import exclusions 5 | 6 | 7 | class Requirements(SuiteRequirementsSQLA, SuiteRequirementsAlembic): 8 | # This class configures the sqlalchemy test suite. Oddly, it must 9 | # be importable in the main codebase and not alongside the tests. 10 | # 11 | # The full list of supported settings is at 12 | # https://github.com/sqlalchemy/sqlalchemy/blob/master/lib/sqlalchemy/testing/requirements.py 13 | 14 | # This one's undocumented but appears to control connection reuse 15 | # in the tests. 16 | independent_connections = exclusions.open() 17 | 18 | # We don't support these features yet, but the tests have them on 19 | # by default. 20 | temporary_tables = exclusions.closed() 21 | temp_table_reflection = exclusions.closed() 22 | time = exclusions.skip_if( 23 | lambda config: not config.db.dialect._is_v2plus, "v1.x does not support TIME." 24 | ) 25 | time_microseconds = exclusions.skip_if( 26 | lambda config: not config.db.dialect._is_v2plus, "v1.x does not support TIME." 27 | ) 28 | timestamp_microseconds = exclusions.open() 29 | server_side_cursors = exclusions.closed() 30 | 31 | # We don't do implicit casts. 32 | date_coerces_from_datetime = exclusions.closed() 33 | 34 | # We do not support creation of views with `SELECT *` expressions, 35 | # which these tests use. 36 | view_reflection = exclusions.closed() 37 | view_column_reflection = exclusions.closed() 38 | 39 | # The autoincrement tests assume a predictable 1-based sequence. 40 | autoincrement_insert = exclusions.closed() 41 | 42 | """ special test for explicit .Identity() on Column without sequence, e.g., 43 | CREATE TABLE tbl ( 44 | id INTEGER GENERATED BY DEFAULT AS IDENTITY, 45 | "desc" VARCHAR(100), 46 | PRIMARY KEY (id) 47 | ) 48 | """ 49 | autoincrement_without_sequence = exclusions.closed() 50 | 51 | # The following features are off by default. We turn on as many as 52 | # we can without causing test failures. 53 | table_reflection = exclusions.skip_if( 54 | lambda config: not config.db.dialect._is_v202plus, 55 | "older versions don't support this correctly.", 56 | ) 57 | primary_key_constraint_reflection = exclusions.skip_if( 58 | lambda config: not config.db.dialect._is_v202plus, 59 | "older versions don't support this correctly.", 60 | ) 61 | foreign_key_constraint_reflection = exclusions.skip_if( 62 | lambda config: not config.db.dialect._is_v202plus, 63 | "older versions don't support this correctly.", 64 | ) 65 | index_reflection = exclusions.skip_if( 66 | lambda config: not config.db.dialect._is_v202plus, 67 | "older versions don't support this correctly.", 68 | ) 69 | unique_constraint_reflection = exclusions.skip_if( 70 | lambda config: not config.db.dialect._is_v202plus, 71 | "older versions don't support this correctly.", 72 | ) 73 | check_constraint_reflection = exclusions.skip_if( 74 | lambda config: not config.db.dialect._is_v202plus, 75 | "older versions don't support this correctly.", 76 | ) 77 | cross_schema_fk_reflection = exclusions.closed() 78 | non_updating_cascade = exclusions.open() 79 | deferrable_fks = exclusions.closed() 80 | boolean_col_expressions = exclusions.open() 81 | nullsordering = exclusions.open() 82 | standalone_binds = exclusions.open() 83 | intersect = exclusions.open() 84 | except_ = exclusions.open() 85 | window_functions = exclusions.open() 86 | returning = exclusions.open() 87 | emulated_lastrowid = exclusions.open() 88 | dbapi_lastrowid = exclusions.open() 89 | views = exclusions.open() 90 | schemas = exclusions.skip_if( 91 | lambda config: not config.db.dialect._is_v202plus, 92 | "versions before 20.2 do not suport schemas", 93 | ) 94 | implicit_default_schema = exclusions.skip_if( 95 | lambda config: not config.db.dialect._is_v202plus, 96 | "versions before 20.2 do not suport schemas", 97 | ) 98 | temporary_views = exclusions.closed() 99 | reflects_pk_names = exclusions.open() 100 | unicode_ddl = exclusions.open() 101 | datetime_literals = exclusions.closed() 102 | datetime_historic = exclusions.open() 103 | date_historic = exclusions.open() 104 | precision_numerics_enotation_small = exclusions.open() 105 | precision_numerics_enotation_large = exclusions.open() 106 | precision_numerics_many_significant_digits = exclusions.open() 107 | precision_numerics_retains_significant_digits = exclusions.closed() 108 | savepoints = exclusions.skip_if( 109 | lambda config: not config.db.dialect._supports_savepoints, 110 | "versions before 20.x do not support savepoints.", 111 | ) 112 | two_phase_transactions = exclusions.closed() 113 | update_from = exclusions.open() 114 | mod_operator_as_percent_sign = exclusions.open() 115 | foreign_key_constraint_reflection = exclusions.open() 116 | computed_columns = exclusions.skip_if( 117 | lambda config: not config.db.dialect._is_v191plus, 118 | "versions before 19.1 do not support reflection on computed columns", 119 | ) 120 | computed_columns_stored = exclusions.skip_if( 121 | lambda config: not config.db.dialect._is_v191plus, 122 | "versions before 19.1 do not support reflection on computed columns", 123 | ) 124 | computed_columns_default_persisted = exclusions.skip_if( 125 | lambda config: not config.db.dialect._is_v191plus, 126 | "versions before 19.1 do not support reflection on computed columns", 127 | ) 128 | computed_columns_reflect_persisted = exclusions.skip_if( 129 | lambda config: not config.db.dialect._is_v191plus, 130 | "versions before 19.1 do not support reflection on computed columns", 131 | ) 132 | computed_columns_virtual = exclusions.closed() 133 | ctes = exclusions.skip_if( 134 | lambda config: not config.db.dialect._is_v201plus, 135 | "versions before 20.x do not fully support CTEs.", 136 | ) 137 | ctes_with_update_delete = exclusions.skip_if( 138 | lambda config: not config.db.dialect._is_v201plus, 139 | "versions before 20.x do not fully support CTEs.", 140 | ) 141 | ctes_on_dml = exclusions.skip_if( 142 | lambda config: not config.db.dialect._is_v201plus, 143 | "versions before 20.x do not fully support CTEs.", 144 | ) 145 | isolation_level = exclusions.open() 146 | json_type = exclusions.skip_if( 147 | lambda config: not config.db.dialect._is_v192plus, 148 | "versions before 19.2.x do not pass the JSON tests.", 149 | ) 150 | tuple_in = exclusions.open() 151 | # The psycopg driver doesn't support these. 152 | percent_schema_names = exclusions.closed() 153 | order_by_label_with_expression = exclusions.open() 154 | order_by_col_from_union = exclusions.open() 155 | implicitly_named_constraints = exclusions.open() 156 | supports_distinct_on = exclusions.open() 157 | 158 | fk_ondelete_noaction = exclusions.closed() 159 | fk_ondelete_restrict = exclusions.closed() 160 | fk_onupdate = exclusions.closed() 161 | fk_onupdate_restrict = exclusions.closed() 162 | 163 | @property 164 | def sync_driver(self): 165 | return exclusions.only_if( 166 | lambda config: config.db.dialect.driver in ["psycopg2", "psycopg"] 167 | ) 168 | 169 | @property 170 | def array_type(self): 171 | # DDL like 172 | # 173 | # CREATE TABLE foo (thing INTEGER[][]) 174 | # 175 | # throws 'invalid syntax: statement ignored: at or near "]": syntax error: unimplemented' 176 | return exclusions.closed() 177 | 178 | @property 179 | def uuid_data_type(self): 180 | return exclusions.open() 181 | 182 | @property 183 | def json_deserializer_binary(self): 184 | return exclusions.only_if(lambda config: config.db.dialect.driver in ["psycopg"]) 185 | 186 | def get_isolation_levels(self, config): 187 | info = { 188 | "default": "SERIALIZABLE", 189 | "supported": ["SERIALIZABLE", "AUTOCOMMIT"], 190 | } 191 | if config.db.dialect._is_v232plus: 192 | info["supported"].append("READ COMMITTED") 193 | return info 194 | 195 | @property 196 | def autocommit(self): 197 | return exclusions.open() 198 | 199 | # ----------------------------------------------- 200 | # non-default requirements for Alembic test suite 201 | # ----------------------------------------------- 202 | 203 | @property 204 | def autoincrement_on_composite_pk(self): 205 | return exclusions.open() 206 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/stmt_compiler.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.dialects.postgresql.base import PGCompiler 2 | from sqlalchemy.dialects.postgresql.base import PGIdentifierPreparer 3 | 4 | # This is extracted from CockroachDB's `sql.y`. Add keywords here if *NEW* reserved keywords 5 | # are added to sql.y. DO NOT DELETE keywords here, even if they are deleted from sql.y: 6 | # once a keyword in CockroachDB, forever a keyword in clients (because of cross-version compat). 7 | crdb_grammar_reserved = """ 8 | ALL 9 | | ANALYSE 10 | | ANALYZE 11 | | AND 12 | | ANY 13 | | ARRAY 14 | | AS 15 | | ASC 16 | | ASYMMETRIC 17 | | BOTH 18 | | CASE 19 | | CAST 20 | | CHECK 21 | | COLLATE 22 | | COLUMN 23 | | CONSTRAINT 24 | | CREATE 25 | | CURRENT_CATALOG 26 | | CURRENT_DATE 27 | | CURRENT_ROLE 28 | | CURRENT_SCHEMA 29 | | CURRENT_TIME 30 | | CURRENT_TIMESTAMP 31 | | CURRENT_USER 32 | | DEFAULT 33 | | DEFERRABLE 34 | | DESC 35 | | DISTINCT 36 | | DO 37 | | ELSE 38 | | END 39 | | EXCEPT 40 | | FALSE 41 | | FETCH 42 | | FOR 43 | | FOREIGN 44 | | FROM 45 | | GRANT 46 | | GROUP 47 | | HAVING 48 | | IN 49 | | INDEX 50 | | INITIALLY 51 | | INTERSECT 52 | | INTO 53 | | LATERAL 54 | | LEADING 55 | | LIMIT 56 | | LOCALTIME 57 | | LOCALTIMESTAMP 58 | | NOT 59 | | NOTHING 60 | | NULL 61 | | OFFSET 62 | | ON 63 | | ONLY 64 | | OR 65 | | ORDER 66 | | PLACING 67 | | PRIMARY 68 | | REFERENCES 69 | | RETURNING 70 | | ROLE 71 | | SELECT 72 | | SESSION_USER 73 | | SOME 74 | | SYMMETRIC 75 | | TABLE 76 | | THEN 77 | | TO 78 | | TRAILING 79 | | TRUE 80 | | UNION 81 | | UNIQUE 82 | | USER 83 | | USING 84 | | VARIADIC 85 | | VIEW 86 | | VIRTUAL 87 | | WHEN 88 | | WHERE 89 | | WINDOW 90 | | WITH 91 | | WORK 92 | """ 93 | CRDB_RESERVED_WORDS = {x.strip().lower() for x in crdb_grammar_reserved.split("|")} 94 | 95 | 96 | class CockroachIdentifierPreparer(PGIdentifierPreparer): 97 | reserved_words = CRDB_RESERVED_WORDS 98 | 99 | 100 | class CockroachCompiler(PGCompiler): 101 | def format_from_hint_text(self, sqltext, table, hint, iscrud): 102 | return f"{sqltext}@{hint}" 103 | -------------------------------------------------------------------------------- /sqlalchemy_cockroachdb/transaction.py: -------------------------------------------------------------------------------- 1 | from random import uniform 2 | from time import sleep 3 | 4 | import sqlalchemy.engine 5 | import sqlalchemy.exc 6 | import sqlalchemy.orm 7 | 8 | from .base import savepoint_state 9 | 10 | 11 | def run_transaction(transactor, callback, max_retries=None, max_backoff=0): 12 | """Run a transaction with retries. 13 | 14 | ``callback()`` will be called with one argument to execute the 15 | transaction. ``callback`` may be called more than once; it should have 16 | no side effects other than writes to the database on the given 17 | connection. ``callback`` should not call ``commit()` or ``rollback()``; 18 | these will be called automatically. 19 | 20 | The ``transactor`` argument may be one of the following types: 21 | * `sqlalchemy.engine.Connection`: the same connection is passed to the callback. 22 | * `sqlalchemy.engine.Engine`: a connection is created and passed to the callback. 23 | * `sqlalchemy.orm.sessionmaker`: a session is created and passed to the callback. 24 | 25 | ``max_retries`` is an optional integer that specifies how many times the 26 | transaction should be retried before giving up. 27 | ``max_backoff`` is an optional integer that specifies the capped number of seconds 28 | for the exponential back-off. 29 | """ 30 | if isinstance(transactor, (sqlalchemy.engine.Connection, sqlalchemy.orm.Session)): 31 | return _txn_retry_loop(transactor, callback, max_retries, max_backoff) 32 | elif isinstance(transactor, sqlalchemy.engine.Engine): 33 | with transactor.connect() as connection: 34 | return _txn_retry_loop(connection, callback, max_retries, max_backoff) 35 | elif isinstance(transactor, sqlalchemy.orm.sessionmaker): 36 | session = transactor() 37 | return _txn_retry_loop(session, callback, max_retries, max_backoff) 38 | else: 39 | raise TypeError("don't know how to run a transaction on %s", type(transactor)) 40 | 41 | 42 | class _NestedTransaction: 43 | """Wraps begin_nested() to set the savepoint_state thread-local. 44 | 45 | This causes the savepoint statements that are a part of this retry 46 | loop to be rewritten by the dialect. 47 | """ 48 | 49 | def __init__(self, conn): 50 | self.conn = conn 51 | 52 | def __enter__(self): 53 | try: 54 | savepoint_state.cockroach_restart = True 55 | self.txn = self.conn.begin_nested() 56 | if isinstance(self.conn, sqlalchemy.orm.Session): 57 | # Sessions are lazy and don't execute the savepoint 58 | # query until you ask for the connection. 59 | self.conn.connection() 60 | finally: 61 | savepoint_state.cockroach_restart = False 62 | return self 63 | 64 | def __exit__(self, typ, value, tb): 65 | try: 66 | savepoint_state.cockroach_restart = True 67 | self.txn.__exit__(typ, value, tb) 68 | finally: 69 | savepoint_state.cockroach_restart = False 70 | 71 | 72 | def retry_exponential_backoff(retry_count: int, max_backoff: int = 0) -> None: 73 | """ 74 | This is a function for an exponential back-off whenever we encounter a retry error. 75 | So we sleep for a bit before retrying, 76 | and the sleep time varies for each failed transaction 77 | capped by the max_backoff parameter. 78 | 79 | :param retry_count: The number for the current retry count 80 | :param max_backoff: The capped number of seconds for the exponential back-off 81 | :return: None 82 | """ 83 | 84 | sleep_secs = uniform(0, min(max_backoff, 0.1 * (2 ** retry_count))) 85 | sleep(sleep_secs) 86 | 87 | 88 | def _txn_retry_loop(conn, callback, max_retries, max_backoff): 89 | """Inner transaction retry loop. 90 | 91 | ``conn`` may be either a Connection or a Session, but they both 92 | have compatible ``begin()`` and ``begin_nested()`` methods. 93 | """ 94 | if isinstance(conn, sqlalchemy.orm.Session): 95 | dbapi_name = conn.bind.driver 96 | else: 97 | dbapi_name = conn.engine.driver 98 | 99 | retry_count = 0 100 | with conn.begin(): 101 | while True: 102 | try: 103 | with _NestedTransaction(conn): 104 | ret = callback(conn) 105 | return ret 106 | except sqlalchemy.exc.DatabaseError as e: 107 | if max_retries is not None and retry_count >= max_retries: 108 | raise 109 | do_retry = False 110 | if dbapi_name == "psycopg2": 111 | import psycopg2 112 | import psycopg2.errorcodes 113 | if isinstance(e.orig, psycopg2.OperationalError): 114 | if e.orig.pgcode == psycopg2.errorcodes.SERIALIZATION_FAILURE: 115 | do_retry = True 116 | else: 117 | import psycopg 118 | if isinstance(e.orig, psycopg.errors.SerializationFailure): 119 | do_retry = True 120 | if do_retry: 121 | retry_count += 1 122 | if max_backoff > 0: 123 | retry_exponential_backoff(retry_count, max_backoff) 124 | continue 125 | raise 126 | -------------------------------------------------------------------------------- /test-requirements.in: -------------------------------------------------------------------------------- 1 | # Packages used when running tests. Installed by tox into multiple 2 | # virtualenvs. 3 | # 4 | # To add/update dependencies, update test-requirements.in (not the 5 | # generated test-requirements.txt) and run make update-requirements 6 | 7 | alembic 8 | asyncpg 9 | futures 10 | mock 11 | more-itertools 12 | psycopg 13 | psycopg2 14 | pytest==7.1.3 15 | sqlalchemy>=2.0.0 16 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | alembic==1.15.2 2 | # via -r test-requirements.in 3 | async-timeout==5.0.1 4 | # via asyncpg 5 | asyncpg==0.30.0 6 | # via -r test-requirements.in 7 | attrs==25.3.0 8 | # via pytest 9 | futures==3.0.5 10 | # via -r test-requirements.in 11 | greenlet==3.1.1 12 | # via sqlalchemy 13 | iniconfig==2.1.0 14 | # via pytest 15 | mako==1.3.9 16 | # via alembic 17 | markupsafe==3.0.2 18 | # via mako 19 | mock==5.2.0 20 | # via -r test-requirements.in 21 | more-itertools==10.6.0 22 | # via -r test-requirements.in 23 | packaging==24.2 24 | # via pytest 25 | pluggy==1.5.0 26 | # via pytest 27 | psycopg==3.2.6 28 | # via -r test-requirements.in 29 | psycopg2==2.9.10 30 | # via -r test-requirements.in 31 | py==1.11.0 32 | # via pytest 33 | pytest==7.1.3 34 | # via -r test-requirements.in 35 | sqlalchemy==2.0.40 36 | # via 37 | # -r test-requirements.in 38 | # alembic 39 | tomli==2.2.1 40 | # via pytest 41 | typing-extensions==4.13.1 42 | # via 43 | # alembic 44 | # psycopg 45 | # sqlalchemy 46 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cockroachdb/sqlalchemy-cockroachdb/5206ae652d801f835b9d94226cb60b2d5347bd8c/test/__init__.py -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.dialects import registry 2 | import pytest 3 | 4 | registry.register( 5 | "cockroachdb", 6 | "sqlalchemy_cockroachdb.psycopg2", 7 | "CockroachDBDialect_psycopg2", 8 | ) 9 | registry.register( 10 | "cockroachdb.psycopg2", 11 | "sqlalchemy_cockroachdb.psycopg2", 12 | "CockroachDBDialect_psycopg2", 13 | ) 14 | registry.register( 15 | "cockroachdb.asyncpg", 16 | "sqlalchemy_cockroachdb.asyncpg", 17 | "CockroachDBDialect_asyncpg", 18 | ) 19 | registry.register( 20 | "cockroachdb.psycopg", 21 | "sqlalchemy_cockroachdb.psycopg", 22 | "CockroachDBDialect_psycopg", 23 | ) 24 | 25 | pytest.register_assert_rewrite("sqlalchemy.testing.assertions") 26 | 27 | from sqlalchemy.testing.plugin.pytestplugin import * # noqa 28 | -------------------------------------------------------------------------------- /test/test_across_schema.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import distinct, func, MetaData, Table, testing, text 2 | from sqlalchemy.orm import sessionmaker 3 | from sqlalchemy.testing import fixtures 4 | 5 | 6 | class AcrossSchemaTest(fixtures.TestBase): 7 | __requires__ = ("sync_driver",) 8 | 9 | def teardown_method(self, method): 10 | if not testing.db.dialect._is_v2plus: 11 | return 12 | with testing.db.begin() as conn: 13 | conn.execute(text("DROP TABLE IF EXISTS users")) 14 | 15 | def setup_method(self): 16 | if not testing.db.dialect._is_v2plus: 17 | return 18 | 19 | with testing.db.begin() as conn: 20 | conn.execute( 21 | text( 22 | """ 23 | CREATE TABLE IF NOT EXISTS users ( 24 | name STRING PRIMARY KEY 25 | ) 26 | """ 27 | ) 28 | ) 29 | self.meta = MetaData(schema="public") 30 | 31 | def test_get_columns_indexes_across_schema(self): 32 | if not testing.db.dialect._is_v2plus: 33 | return 34 | 35 | # get_columns and get_indexes use default db uri schema. 36 | # across schema table must use schema.table 37 | Table("users", self.meta, autoload_with=testing.db, schema="public") 38 | Table("columns", self.meta, autoload_with=testing.db, schema="information_schema") 39 | 40 | def test_using_info_schema(self): 41 | if not testing.db.dialect._is_v2plus: 42 | return 43 | 44 | table = Table("columns", self.meta, autoload_with=testing.db, schema="information_schema") 45 | sm = sessionmaker(testing.db) 46 | session = sm() 47 | assert session.query(func.count(distinct(table.columns["table_name"]))).scalar() > 1 48 | -------------------------------------------------------------------------------- /test/test_column_reflect.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import MetaData, Table, Column, Integer, String, testing, inspect 2 | from sqlalchemy.testing import fixtures, eq_ 3 | 4 | meta = MetaData() 5 | 6 | with_pk = Table( 7 | "with_pk", 8 | meta, 9 | Column("id", Integer, primary_key=True), 10 | Column("txt", String), 11 | ) 12 | 13 | without_pk = Table( 14 | "without_pk", 15 | meta, 16 | Column("txt", String), 17 | ) 18 | 19 | 20 | class ReflectHiddenColumnsTest(fixtures.TestBase): 21 | __requires__ = ("sync_driver",) 22 | 23 | def teardown_method(self, method): 24 | meta.drop_all(testing.db) 25 | 26 | def setup_method(self): 27 | meta.create_all(testing.db) 28 | 29 | def _get_col_info(self, table_name, include_hidden=False): 30 | insp = inspect(testing.db) 31 | col_info = insp.get_columns(table_name, include_hidden=include_hidden) 32 | for row in col_info: 33 | row["type"] = str(row["type"]) 34 | return col_info 35 | 36 | def test_reflect_hidden_columns(self): 37 | eq_( 38 | self._get_col_info("with_pk"), 39 | [ 40 | { 41 | "name": "id", 42 | "type": "INTEGER", 43 | "nullable": False, 44 | "default": "unique_rowid()", 45 | "autoincrement": True, 46 | "is_hidden": False, 47 | "comment": None, 48 | }, 49 | { 50 | "name": "txt", 51 | "type": "VARCHAR", 52 | "nullable": True, 53 | "default": None, 54 | "autoincrement": False, 55 | "is_hidden": False, 56 | "comment": None, 57 | }, 58 | ], 59 | ) 60 | 61 | eq_( 62 | self._get_col_info("without_pk"), # include_hidden=False 63 | [ 64 | { 65 | "name": "txt", 66 | "type": "VARCHAR", 67 | "nullable": True, 68 | "default": None, 69 | "autoincrement": False, 70 | "is_hidden": False, 71 | "comment": None, 72 | }, 73 | ], 74 | ) 75 | 76 | eq_( 77 | self._get_col_info("without_pk", include_hidden=True), 78 | [ 79 | { 80 | "name": "txt", 81 | "type": "VARCHAR", 82 | "nullable": True, 83 | "default": None, 84 | "autoincrement": False, 85 | "is_hidden": False, 86 | "comment": None, 87 | }, 88 | { 89 | "name": "rowid", 90 | "type": "INTEGER", 91 | "nullable": False, 92 | "default": "unique_rowid()", 93 | "autoincrement": True, 94 | "is_hidden": True, 95 | "comment": None, 96 | }, 97 | ], 98 | ) 99 | -------------------------------------------------------------------------------- /test/test_introspection.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | 3 | from sqlalchemy import ( 4 | Table, 5 | Column, 6 | MetaData, 7 | testing, 8 | ForeignKey, 9 | UniqueConstraint, 10 | CheckConstraint, 11 | text, 12 | ) 13 | from sqlalchemy.types import Integer, String, Boolean 14 | import sqlalchemy.types as sqltypes 15 | from sqlalchemy.testing import fixtures 16 | from sqlalchemy.dialects.postgresql import INET 17 | from sqlalchemy.dialects.postgresql import UUID 18 | 19 | meta = MetaData() 20 | 21 | customer_table = Table( 22 | "customer", 23 | meta, 24 | Column("id", Integer, primary_key=True), 25 | Column("name", String), 26 | Column("email", String), 27 | Column("verified", Boolean), 28 | UniqueConstraint("email"), 29 | ) 30 | 31 | order_table = Table( 32 | "order", 33 | meta, 34 | Column("id", Integer, primary_key=True), 35 | Column("customer_id", Integer, ForeignKey("customer.id")), 36 | Column("info", String), 37 | Column("status", String, CheckConstraint("status in ('open', 'closed')")), 38 | ) 39 | 40 | # Regression test for https://github.com/cockroachdb/cockroach/issues/26993 41 | index_table = Table("index", meta, Column("index", Integer, primary_key=True)) 42 | view_table = Table("view", meta, Column("view", Integer, primary_key=True)) 43 | 44 | 45 | class IntrospectionTest(fixtures.TestBase): 46 | __requires__ = ("sync_driver",) 47 | 48 | def teardown_method(self, method): 49 | meta.drop_all(testing.db) 50 | 51 | def setup_method(self): 52 | meta.create_all(testing.db) 53 | 54 | @testing.provide_metadata 55 | def test_create_metadata(self): 56 | # Create a metadata via introspection on the live DB. 57 | meta2 = self.metadata 58 | 59 | # TODO(bdarnell): Do more testing. 60 | # For now just make sure it doesn't raise exceptions. 61 | # This covers get_foreign_keys(), which is apparently untested 62 | # in SQLAlchemy's dialect test suite. 63 | Table("customer", meta2, autoload_with=testing.db) 64 | Table("order", meta2, autoload_with=testing.db) 65 | Table("index", meta2, autoload_with=testing.db) 66 | Table("view", meta2, autoload_with=testing.db) 67 | 68 | 69 | class TestTypeReflection(fixtures.TestBase): 70 | __requires__ = ("sync_driver",) 71 | 72 | TABLE_NAME = "t" 73 | COLUMN_NAME = "c" 74 | 75 | @testing.provide_metadata 76 | def _test(self, typ, expected, array_item_type=None): 77 | with testing.db.begin() as conn: 78 | conn.execute( 79 | text( 80 | "CREATE TABLE {} ({} {})".format( 81 | self.TABLE_NAME, 82 | self.COLUMN_NAME, 83 | typ, 84 | ) 85 | ) 86 | ) 87 | 88 | t = Table(self.TABLE_NAME, self.metadata, autoload_with=testing.db) 89 | c = t.c[self.COLUMN_NAME] 90 | assert isinstance(c.type, expected) 91 | if array_item_type: 92 | assert isinstance(c.type.item_type, array_item_type) 93 | 94 | def test_array(self): 95 | self._test("boolean[]", sqltypes.ARRAY, sqltypes.BOOLEAN) 96 | self._test("date[]", sqltypes.ARRAY, sqltypes.DATE) 97 | self._test("decimal[]", sqltypes.ARRAY, sqltypes.DECIMAL) 98 | self._test("float[]", sqltypes.ARRAY, sqltypes.FLOAT) 99 | self._test("int[]", sqltypes.ARRAY, sqltypes.INTEGER) 100 | self._test("timestamp[]", sqltypes.ARRAY, sqltypes.TIMESTAMP) 101 | self._test("varchar(10)[]", sqltypes.ARRAY, sqltypes.VARCHAR) 102 | 103 | def test_boolean(self): 104 | for t in ["bool", "boolean"]: 105 | self._test(t, sqltypes.BOOLEAN) 106 | 107 | def test_int(self): 108 | for t in ["bigint", "int", "int2", "int4", "int64", "int8", "integer", "smallint"]: 109 | self._test(t, sqltypes.INT) 110 | 111 | def test_float(self): 112 | for t in ["double precision", "float", "float4", "float8", "real"]: 113 | self._test(t, sqltypes.FLOAT) 114 | 115 | def test_decimal(self): 116 | for t in ["dec", "decimal", "numeric"]: 117 | self._test(t, sqltypes.DECIMAL) 118 | 119 | def test_date(self): 120 | self._test("date", sqltypes.DATE) 121 | 122 | def test_time(self): 123 | for t in ["time", "time without time zone"]: 124 | self._test(t, sqltypes.Time) 125 | 126 | def test_timestamp(self): 127 | types = [ 128 | "timestamp", 129 | "timestamptz", 130 | "timestamp with time zone", 131 | "timestamp without time zone", 132 | ] 133 | for t in types: 134 | self._test(t, sqltypes.TIMESTAMP) 135 | 136 | def test_interval(self): 137 | self._test("interval", sqltypes.Interval) 138 | 139 | def test_varchar(self): 140 | types = [ 141 | "char", 142 | "char varying", 143 | "character", 144 | "character varying", 145 | "string", 146 | "text", 147 | "varchar", 148 | ] 149 | for t in types: 150 | self._test(t, sqltypes.VARCHAR) 151 | 152 | def test_blob(self): 153 | for t in ["blob", "bytea", "bytes"]: 154 | self._test(t, sqltypes.BLOB) 155 | 156 | def test_json(self): 157 | for t in ["json", "jsonb"]: 158 | self._test(t, sqltypes.JSON) 159 | 160 | def test_uuid(self): 161 | self._test("uuid", UUID) 162 | 163 | def test_inet(self): 164 | self._test("inet", INET) 165 | 166 | 167 | class UnknownTypeTest(fixtures.TestBase): 168 | __requires__ = ("sync_driver",) 169 | 170 | def setup_method(self): 171 | with testing.db.begin() as conn: 172 | conn.execute(text("CREATE TABLE t2 (c bool)")) 173 | 174 | def teardown_method(self): 175 | with testing.db.begin() as conn: 176 | conn.execute(text("DROP TABLE t2")) 177 | 178 | @testing.expect_warnings("Did not recognize type 'boolean'") 179 | def test_unknown_type(self): 180 | @contextlib.contextmanager 181 | def make_bool_unknown(): 182 | import sqlalchemy_cockroachdb 183 | 184 | t = sqlalchemy_cockroachdb.base._type_map.pop("bool") 185 | sqlalchemy_cockroachdb.base._type_map.pop("boolean") 186 | yield 187 | sqlalchemy_cockroachdb.base._type_map["bool"] = t 188 | sqlalchemy_cockroachdb.base._type_map["boolean"] = t 189 | 190 | with make_bool_unknown(): 191 | meta2 = MetaData() 192 | t = Table("t2", meta2, autoload_with=testing.db) 193 | assert t.c["c"].type == sqltypes.NULLTYPE 194 | -------------------------------------------------------------------------------- /test/test_json.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Table, Column, select, testing 2 | from sqlalchemy.dialects.postgresql import JSONB, JSON 3 | from sqlalchemy.orm import declarative_base 4 | from sqlalchemy.orm import sessionmaker 5 | from sqlalchemy.testing import fixtures, eq_ 6 | from sqlalchemy.types import Integer 7 | 8 | from sqlalchemy import JSON as BaseJSON 9 | 10 | 11 | class JSONTest(fixtures.TablesTest): 12 | __backend__ = True 13 | 14 | @classmethod 15 | def define_tables(cls, metadata): 16 | Table( 17 | "json_model", 18 | metadata, 19 | Column("id", Integer, primary_key=True, autoincrement=False), 20 | Column("jsonb_data", JSONB), 21 | Column("json_data", JSON), 22 | Column("base_json_data", BaseJSON), 23 | ) 24 | 25 | @classmethod 26 | def insert_data(cls, connection): 27 | connection.execute( 28 | cls.tables.json_model.insert(), 29 | [ 30 | dict(id=1, jsonb_data={"a": 1}, json_data={"b": 2}, base_json_data={"c": 3}), 31 | dict(id=2, jsonb_data={"d": 4}, json_data={"e": 5}, base_json_data={"f": 6}), 32 | ], 33 | ) 34 | 35 | def test_json(self, connection): 36 | if not testing.db.dialect._has_native_json: 37 | return 38 | json_table = self.tables.json_model 39 | result = [] 40 | query = select(json_table.c.jsonb_data, json_table.c.json_data, json_table.c.base_json_data) 41 | for row in connection.execute(query): 42 | result.append((row.jsonb_data, row.json_data, row.base_json_data)) 43 | eq_(result, [({"a": 1}, {"b": 2}, {"c": 3}), ({"d": 4}, {"e": 5}, {"f": 6})]) 44 | 45 | 46 | class JSONSessionTest(fixtures.TestBase): 47 | __backend__ = True 48 | 49 | def _fixture(self): 50 | Base = declarative_base() 51 | 52 | class JSONModel(Base): 53 | __tablename__ = "json_model" 54 | id = Column(Integer, primary_key=True, autoincrement=False) 55 | jsonb_data = Column(JSONB) 56 | json_data = Column(JSON) 57 | base_json_data = Column(BaseJSON) 58 | 59 | return JSONModel 60 | 61 | def test_json(self, connection): 62 | if not testing.db.dialect._has_native_json: 63 | return 64 | 65 | JSONModel = self._fixture() 66 | meta = JSONModel.metadata 67 | meta.create_all(connection) 68 | 69 | Session = sessionmaker(connection) 70 | session = Session() 71 | try: 72 | session.add_all( 73 | [ 74 | JSONModel( 75 | id=1, jsonb_data={"a": 1}, json_data={"b": 2}, base_json_data={"c": 3} 76 | ), 77 | JSONModel( 78 | id=2, jsonb_data={"d": 4}, json_data={"e": 5}, base_json_data={"f": 6} 79 | ), 80 | ] 81 | ) 82 | session.commit() 83 | result = [] 84 | for row in session.query(JSONModel).all(): 85 | result.append((row.jsonb_data, row.json_data, row.base_json_data)) 86 | eq_(result, [({"a": 1}, {"b": 2}, {"c": 3}), ({"d": 4}, {"e": 5}, {"f": 6})]) 87 | finally: 88 | meta.drop_all(connection) 89 | -------------------------------------------------------------------------------- /test/test_run_transaction_core.py: -------------------------------------------------------------------------------- 1 | from concurrent.futures import ThreadPoolExecutor 2 | from sqlalchemy import Table, Column, MetaData, select, testing, text 3 | from sqlalchemy.testing import fixtures 4 | from sqlalchemy.types import Integer 5 | import threading 6 | 7 | from sqlalchemy_cockroachdb import run_transaction 8 | 9 | meta = MetaData() 10 | 11 | account_table = Table( 12 | "account", 13 | meta, 14 | Column("acct", Integer, primary_key=True, autoincrement=False), 15 | Column("balance", Integer), 16 | ) 17 | 18 | 19 | class BaseRunTransactionTest(fixtures.TestBase): 20 | def setup_method(self, method): 21 | meta.create_all(testing.db) 22 | with testing.db.begin() as conn: 23 | conn.execute( 24 | account_table.insert(), [dict(acct=1, balance=100), dict(acct=2, balance=100)] 25 | ) 26 | 27 | def teardown_method(self, method): 28 | meta.drop_all(testing.db) 29 | 30 | def get_balances(self, conn): 31 | """Returns the balances of the two accounts as a list.""" 32 | result = [] 33 | query = ( 34 | select(account_table.c.balance) 35 | .where(account_table.c.acct.in_((1, 2))) 36 | .order_by(account_table.c.acct) 37 | ) 38 | for row in conn.execute(query): 39 | result.append(row.balance) 40 | if len(result) != 2: 41 | raise Exception("Expected two balances; got %d", len(result)) 42 | return result 43 | 44 | def run_parallel_transactions(self, callback): 45 | """Runs the callback in two parallel transactions. 46 | 47 | A barrier function is passed to the callback and should be run 48 | after the transaction has performed its first read. This 49 | synchronizes the two transactions to ensure that at least one 50 | of them must restart. 51 | """ 52 | cv = threading.Condition() 53 | wait_count = [2] 54 | 55 | def worker(): 56 | iters = [0] 57 | 58 | def barrier(): 59 | iters[0] += 1 60 | if iters[0] == 1: 61 | # If this is the first iteration, wait for the other txn to also read. 62 | with cv: 63 | wait_count[0] -= 1 64 | cv.notifyAll() 65 | while wait_count[0] > 0: 66 | cv.wait() 67 | 68 | callback(barrier) 69 | return iters[0] 70 | 71 | with ThreadPoolExecutor(2) as executor: 72 | future1 = executor.submit(worker) 73 | future2 = executor.submit(worker) 74 | iters1 = future1.result() 75 | iters2 = future2.result() 76 | 77 | assert ( 78 | iters1 + iters2 > 2 79 | ), "expected at least one retry between the competing " "txns, got txn1=%d, txn2=%d" % ( 80 | iters1, 81 | iters2, 82 | ) 83 | balances = self.get_balances(testing.db.connect()) 84 | assert balances == [100, 100], ( 85 | "expected balances to be restored without error; " "got %s" % balances 86 | ) 87 | 88 | 89 | class RunTransactionCoreTest(BaseRunTransactionTest): 90 | __requires__ = ("sync_driver",) 91 | 92 | def perform_transfer(self, conn, balances): 93 | if balances[0] > balances[1]: 94 | conn.execute( 95 | account_table.update() 96 | .where(account_table.c.acct == 1) 97 | .values(balance=account_table.c.balance - 100) 98 | ) 99 | conn.execute( 100 | account_table.update() 101 | .where(account_table.c.acct == 2) 102 | .values(balance=account_table.c.balance + 100) 103 | ) 104 | else: 105 | conn.execute( 106 | account_table.update() 107 | .where(account_table.c.acct == 1) 108 | .values(balance=account_table.c.balance + 100) 109 | ) 110 | conn.execute( 111 | account_table.update() 112 | .where(account_table.c.acct == 2) 113 | .values(balance=account_table.c.balance - 100) 114 | ) 115 | 116 | def test_run_transaction(self): 117 | def callback(barrier): 118 | def txn_body(conn): 119 | balances = self.get_balances(conn) 120 | barrier() 121 | self.perform_transfer(conn, balances) 122 | 123 | with testing.db.connect() as conn: 124 | run_transaction(conn, txn_body) 125 | 126 | self.run_parallel_transactions(callback) 127 | 128 | def test_run_transaction_retry(self): 129 | def txn_body(conn): 130 | rs = conn.execute(text("select acct, balance from account where acct = 1")) 131 | conn.execute(text("select crdb_internal.force_retry('1s')")) 132 | return [r for r in rs] 133 | 134 | with testing.db.connect() as conn: 135 | rs = run_transaction(conn, txn_body) 136 | assert rs[0] == (1, 100) 137 | -------------------------------------------------------------------------------- /test/test_run_transaction_session.py: -------------------------------------------------------------------------------- 1 | from concurrent.futures import ThreadPoolExecutor 2 | from sqlalchemy import Column, DateTime, func, Integer, select, testing, text 3 | from sqlalchemy.orm import Session, sessionmaker 4 | from sqlalchemy.testing import fixtures 5 | import threading 6 | 7 | from sqlalchemy_cockroachdb import run_transaction 8 | 9 | 10 | class BaseRunTransactionTest(fixtures.DeclarativeMappedTest): 11 | @classmethod 12 | def setup_classes(cls): 13 | Base = cls.DeclarativeBasic 14 | 15 | class Account(Base): 16 | __tablename__ = "account" 17 | 18 | acct = Column(Integer, primary_key=True, autoincrement=False) 19 | balance = Column(Integer) 20 | 21 | @classmethod 22 | def insert_data(cls, connection): 23 | Account = cls.classes.Account 24 | 25 | session = Session(connection) 26 | session.add_all([Account(acct=1, balance=100), Account(acct=2, balance=100)]) 27 | session.commit() 28 | 29 | def get_balances(self, conn): 30 | Account = self.classes.Account 31 | 32 | """Returns the balances of the two accounts as a list.""" 33 | result = [] 34 | query = select(Account.balance).where(Account.acct.in_((1, 2))).order_by(Account.acct) 35 | for row in conn.execute(query): 36 | result.append(row.balance) 37 | if len(result) != 2: 38 | raise Exception("Expected two balances; got %d", len(result)) 39 | return result 40 | 41 | def run_parallel_transactions(self, callback, conn): 42 | """Runs the callback in two parallel transactions. 43 | 44 | A barrier function is passed to the callback and should be run 45 | after the transaction has performed its first read. This 46 | synchronizes the two transactions to ensure that at least one 47 | of them must restart. 48 | """ 49 | cv = threading.Condition() 50 | wait_count = [2] 51 | 52 | def worker(): 53 | iters = [0] 54 | 55 | def barrier(): 56 | iters[0] += 1 57 | if iters[0] == 1: 58 | # If this is the first iteration, wait for the other txn to also read. 59 | with cv: 60 | wait_count[0] -= 1 61 | cv.notifyAll() 62 | while wait_count[0] > 0: 63 | cv.wait() 64 | 65 | callback(barrier) 66 | return iters[0] 67 | 68 | with ThreadPoolExecutor(2) as executor: 69 | future1 = executor.submit(worker) 70 | future2 = executor.submit(worker) 71 | iters1 = future1.result() 72 | iters2 = future2.result() 73 | 74 | assert ( 75 | iters1 + iters2 > 2 76 | ), "expected at least one retry between the competing " "txns, got txn1=%d, txn2=%d" % ( 77 | iters1, 78 | iters2, 79 | ) 80 | balances = self.get_balances(conn) 81 | assert balances == [100, 100], ( 82 | "expected balances to be restored without error; " "got %s" % balances 83 | ) 84 | 85 | 86 | class RunTransactionSessionTest(BaseRunTransactionTest): 87 | __requires__ = ("sync_driver",) 88 | 89 | def test_run_transaction(self, connection): 90 | Account = self.classes.Account 91 | 92 | def callback(barrier): 93 | Session = sessionmaker(testing.db) 94 | 95 | def txn_body(session): 96 | accounts = list( 97 | session.query(Account).filter(Account.acct.in_((1, 2))).order_by(Account.acct) 98 | ) 99 | barrier() 100 | if accounts[0].balance > accounts[1].balance: 101 | accounts[0].balance -= 100 102 | accounts[1].balance += 100 103 | else: 104 | accounts[0].balance += 100 105 | accounts[1].balance -= 100 106 | 107 | run_transaction(Session, txn_body) 108 | 109 | self.run_parallel_transactions(callback, connection) 110 | 111 | def test_run_transaction_retry(self): 112 | def txn_body(sess): 113 | rs = sess.execute(text("select acct, balance from account where acct = 1")) 114 | sess.execute(text("select crdb_internal.force_retry('1s')")) 115 | return [r for r in rs] 116 | 117 | Session = sessionmaker(testing.db) 118 | rs = run_transaction(Session, txn_body) 119 | assert rs[0] == (1, 100) 120 | 121 | 122 | class InsertReturningTest(fixtures.DeclarativeMappedTest): 123 | @classmethod 124 | def setup_classes(cls): 125 | Base = cls.DeclarativeBasic 126 | 127 | class Item(Base): 128 | __tablename__ = "item" 129 | __mapper_args__ = {"eager_defaults": True} 130 | 131 | id = Column(Integer, primary_key=True, autoincrement=True) 132 | created = Column(DateTime, server_default=func.now()) 133 | 134 | def test_insert_returning(self): 135 | # This test demonstrates the use of the INSERT RETURNING 136 | # clause with the ORM to return server-generated values from a 137 | # transaction. The expire_on_commit=False option is necessary 138 | # to make the objects valid after the transaction has 139 | # completed. The eager_defaults option (set above) is 140 | # necessary to handle fields other than the primary key (which 141 | # is always loaded eagerly) 142 | 143 | Item = self.classes.Item 144 | 145 | def txn_body(session): 146 | item = Item() 147 | session.add(item) 148 | return item 149 | 150 | Session = sessionmaker(testing.db, expire_on_commit=False) 151 | item = run_transaction(Session, txn_body) 152 | assert item.id is not None 153 | assert item.created is not None 154 | -------------------------------------------------------------------------------- /test/test_suite_alembic.py: -------------------------------------------------------------------------------- 1 | from alembic.testing.suite import * # noqa 2 | from sqlalchemy.testing import skip 3 | from alembic.testing.suite import AutogenerateFKOptionsTest as _AutogenerateFKOptionsTest 4 | from alembic.testing.suite import BackendAlterColumnTest as _BackendAlterColumnTest 5 | 6 | 7 | class AutogenerateFKOptionsTest(_AutogenerateFKOptionsTest): 8 | @skip("cockroachdb") 9 | def test_nochange_ondelete(self): 10 | pass 11 | 12 | 13 | class BackendAlterColumnTest(_BackendAlterColumnTest): 14 | @skip("cockroachdb") 15 | def test_modify_nullable_to_non(self): 16 | # previously needed "with self.op.get_context().autocommit_block():" 17 | # which is no longer valid in SQLA 2.0 18 | pass 19 | 20 | @skip("cockroachdb") 21 | def test_modify_type_int_str(self): 22 | # TODO: enable this test when warning removed for ALTER COLUMN int → string 23 | pass 24 | -------------------------------------------------------------------------------- /test/test_suite_sqlalchemy.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import FLOAT, INTEGER, VARCHAR 2 | from sqlalchemy.testing import skip 3 | from sqlalchemy.testing.suite import * # noqa 4 | from sqlalchemy.testing.suite import ( 5 | ComponentReflectionTest as _ComponentReflectionTest, 6 | ) 7 | from sqlalchemy.testing.suite import HasIndexTest as _HasIndexTest 8 | from sqlalchemy.testing.suite import HasTableTest as _HasTableTest 9 | from sqlalchemy.testing.suite import IntegerTest as _IntegerTest 10 | from sqlalchemy.testing.suite import InsertBehaviorTest as _InsertBehaviorTest 11 | from sqlalchemy.testing.suite import IsolationLevelTest as _IsolationLevelTest 12 | from sqlalchemy.testing.suite import ( 13 | LongNameBlowoutTest as _LongNameBlowoutTest, 14 | ) 15 | from sqlalchemy.testing.suite import NumericTest as _NumericTest 16 | from sqlalchemy.testing.suite import ( 17 | QuotedNameArgumentTest as _QuotedNameArgumentTest, 18 | ) 19 | from sqlalchemy.testing.suite import TrueDivTest as _TrueDivTest 20 | from sqlalchemy.testing.suite import UnicodeSchemaTest as _UnicodeSchemaTest 21 | 22 | 23 | class ComponentReflectionTest(_ComponentReflectionTest): 24 | def test_get_indexes(self, connection): 25 | if not (config.db.dialect.driver == "asyncpg" and not config.db.dialect._is_v231plus): 26 | super().test_get_indexes(connection, None, None) 27 | 28 | @skip("cockroachdb") 29 | def test_get_noncol_index(self): 30 | # test not designed to handle ('desc', 'nulls_last') 31 | pass 32 | 33 | def test_get_multi_columns(self): 34 | insp = inspect(config.db) 35 | actual = insp.get_multi_columns() 36 | expected = { 37 | (None, "users"): [ 38 | { 39 | "name": "user_id", 40 | "type": INTEGER(), 41 | "nullable": False, 42 | "default": "unique_rowid()", 43 | "autoincrement": True, 44 | "is_hidden": False, 45 | "comment": None, 46 | }, 47 | { 48 | "name": "test1", 49 | "type": VARCHAR(length=5), 50 | "nullable": False, 51 | "default": None, 52 | "autoincrement": False, 53 | "is_hidden": False, 54 | "comment": None, 55 | }, 56 | { 57 | "name": "test2", 58 | "type": FLOAT(), 59 | "nullable": False, 60 | "default": None, 61 | "autoincrement": False, 62 | "is_hidden": False, 63 | "comment": None, 64 | }, 65 | { 66 | "name": "parent_user_id", 67 | "type": INTEGER(), 68 | "nullable": True, 69 | "default": None, 70 | "autoincrement": False, 71 | "is_hidden": False, 72 | "comment": None, 73 | }, 74 | ], 75 | (None, "comment_test"): [ 76 | { 77 | "name": "id", 78 | "type": INTEGER(), 79 | "nullable": False, 80 | "default": "unique_rowid()", 81 | "autoincrement": True, 82 | "is_hidden": False, 83 | "comment": "id comment", 84 | }, 85 | { 86 | "name": "data", 87 | "type": VARCHAR(length=20), 88 | "nullable": True, 89 | "default": None, 90 | "autoincrement": False, 91 | "is_hidden": False, 92 | "comment": "data % comment", 93 | }, 94 | { 95 | "name": "d2", 96 | "type": VARCHAR(length=20), 97 | "nullable": True, 98 | "default": None, 99 | "autoincrement": False, 100 | "is_hidden": False, 101 | "comment": "Comment types type speedily ' \" \\ '' Fun!", 102 | }, 103 | { 104 | "name": "d3", 105 | "type": VARCHAR(length=42), 106 | "nullable": True, 107 | "default": None, 108 | "autoincrement": False, 109 | "is_hidden": False, 110 | "comment": "Comment\nwith\rescapes", 111 | }, 112 | ], 113 | (None, "no_constraints"): [ 114 | { 115 | "name": "data", 116 | "type": VARCHAR(length=20), 117 | "nullable": True, 118 | "default": None, 119 | "autoincrement": False, 120 | "is_hidden": False, 121 | "comment": None, 122 | } 123 | ], 124 | (None, "noncol_idx_test_nopk"): [ 125 | { 126 | "name": "q", 127 | "type": VARCHAR(length=5), 128 | "nullable": True, 129 | "default": None, 130 | "autoincrement": False, 131 | "is_hidden": False, 132 | "comment": None, 133 | } 134 | ], 135 | (None, "noncol_idx_test_pk"): [ 136 | { 137 | "name": "id", 138 | "type": INTEGER(), 139 | "nullable": False, 140 | "default": "unique_rowid()", 141 | "autoincrement": True, 142 | "is_hidden": False, 143 | "comment": None, 144 | }, 145 | { 146 | "name": "q", 147 | "type": VARCHAR(length=5), 148 | "nullable": True, 149 | "default": None, 150 | "autoincrement": False, 151 | "is_hidden": False, 152 | "comment": None, 153 | }, 154 | ], 155 | (None, "email_addresses"): [ 156 | { 157 | "name": "address_id", 158 | "type": INTEGER(), 159 | "nullable": False, 160 | "default": "unique_rowid()", 161 | "autoincrement": True, 162 | "is_hidden": False, 163 | "comment": None, 164 | }, 165 | { 166 | "name": "remote_user_id", 167 | "type": INTEGER(), 168 | "nullable": True, 169 | "default": None, 170 | "autoincrement": False, 171 | "is_hidden": False, 172 | "comment": None, 173 | }, 174 | { 175 | "name": "email_address", 176 | "type": VARCHAR(length=20), 177 | "nullable": True, 178 | "default": None, 179 | "autoincrement": False, 180 | "is_hidden": False, 181 | "comment": None, 182 | }, 183 | ], 184 | (None, "dingalings"): [ 185 | { 186 | "name": "dingaling_id", 187 | "type": INTEGER(), 188 | "nullable": False, 189 | "default": "unique_rowid()", 190 | "autoincrement": True, 191 | "is_hidden": False, 192 | "comment": None, 193 | }, 194 | { 195 | "name": "address_id", 196 | "type": INTEGER(), 197 | "nullable": True, 198 | "default": None, 199 | "autoincrement": False, 200 | "is_hidden": False, 201 | "comment": None, 202 | }, 203 | { 204 | "name": "id_user", 205 | "type": INTEGER(), 206 | "nullable": True, 207 | "default": None, 208 | "autoincrement": False, 209 | "is_hidden": False, 210 | "comment": None, 211 | }, 212 | { 213 | "name": "data", 214 | "type": VARCHAR(length=30), 215 | "nullable": True, 216 | "default": None, 217 | "autoincrement": False, 218 | "is_hidden": False, 219 | "comment": None, 220 | }, 221 | ], 222 | } 223 | eq_(len(actual), len(expected)) 224 | eq_(actual.keys(), expected.keys()) 225 | eq_( 226 | len(actual[(None, "comment_test")]), 227 | len(expected[(None, "comment_test")]), 228 | ) 229 | if config.db.dialect.supports_comments: 230 | act = [x for x in actual[(None, "comment_test")] if x["name"] == "data"][0] 231 | exp = [x for x in expected[(None, "comment_test")] if x["name"] == "data"][0] 232 | eq_(act["comment"], exp["comment"]) 233 | 234 | def test_get_multi_indexes(self): 235 | insp = inspect(config.db) 236 | result = insp.get_multi_indexes() 237 | eq_( 238 | result, 239 | { 240 | (None, "comment_test"): [], 241 | (None, "dingalings"): [ 242 | { 243 | "column_names": ["data"], 244 | "column_sorting": {"data": ("nulls_first",)}, 245 | "dialect_options": {"postgresql_using": "prefix"}, 246 | "duplicates_constraint": "dingalings_data_key", 247 | "name": "dingalings_data_key", 248 | "unique": True, 249 | }, 250 | { 251 | "column_names": ["address_id", "dingaling_id"], 252 | "column_sorting": { 253 | "address_id": ("nulls_first",), 254 | "dingaling_id": ("nulls_first",), 255 | }, 256 | "dialect_options": {"postgresql_using": "prefix"}, 257 | "duplicates_constraint": "zz_dingalings_multiple", 258 | "name": "zz_dingalings_multiple", 259 | "unique": True, 260 | }, 261 | ], 262 | (None, "email_addresses"): [ 263 | { 264 | "column_names": ["email_address"], 265 | "column_sorting": {"email_address": ("nulls_first",)}, 266 | "dialect_options": {"postgresql_using": "prefix"}, 267 | "name": "ix_email_addresses_email_address", 268 | "unique": False, 269 | } 270 | ], 271 | (None, "no_constraints"): [], 272 | (None, "noncol_idx_test_nopk"): [ 273 | { 274 | "column_names": ["q"], 275 | "column_sorting": {"q": ("desc", "nulls_last")}, 276 | "dialect_options": {"postgresql_using": "prefix"}, 277 | "name": "noncol_idx_nopk", 278 | "unique": False, 279 | } 280 | ], 281 | (None, "noncol_idx_test_pk"): [ 282 | { 283 | "column_names": ["q"], 284 | "column_sorting": {"q": ("desc", "nulls_last")}, 285 | "dialect_options": {"postgresql_using": "prefix"}, 286 | "name": "noncol_idx_pk", 287 | "unique": False, 288 | } 289 | ], 290 | (None, "users"): [ 291 | { 292 | "column_names": ["user_id", "test2", "test1"], 293 | "column_sorting": { 294 | "test1": ("nulls_first",), 295 | "test2": ("nulls_first",), 296 | "user_id": ("nulls_first",), 297 | }, 298 | "dialect_options": {"postgresql_using": "prefix"}, 299 | "name": "users_all_idx", 300 | "unique": False, 301 | }, 302 | { 303 | "column_names": ["test1", "test2"], 304 | "column_sorting": {"test1": ("nulls_first",), "test2": ("nulls_first",)}, 305 | "dialect_options": {"postgresql_using": "prefix"}, 306 | "duplicates_constraint": "users_t_idx", 307 | "name": "users_t_idx", 308 | "unique": True, 309 | }, 310 | ], 311 | }, 312 | ) 313 | 314 | def test_get_multi_pk_constraint(self): 315 | insp = inspect(config.db) 316 | result = insp.get_multi_pk_constraint() 317 | eq_( 318 | result, 319 | { 320 | (None, "comment_test"): { 321 | "comment": None, 322 | "constrained_columns": ["id"], 323 | "name": "comment_test_pkey", 324 | }, 325 | (None, "dingalings"): { 326 | "comment": None, 327 | "constrained_columns": ["dingaling_id"], 328 | "name": "dingalings_pkey", 329 | }, 330 | (None, "email_addresses"): { 331 | "comment": "ea pk comment", 332 | "constrained_columns": ["address_id"], 333 | "name": "email_ad_pk", 334 | }, 335 | (None, "no_constraints"): { 336 | "comment": None, 337 | "constrained_columns": ["rowid"], 338 | "name": "no_constraints_pkey", 339 | }, 340 | (None, "noncol_idx_test_nopk"): { 341 | "comment": None, 342 | "constrained_columns": ["rowid"], 343 | "name": "noncol_idx_test_nopk_pkey", 344 | }, 345 | (None, "noncol_idx_test_pk"): { 346 | "comment": None, 347 | "constrained_columns": ["id"], 348 | "name": "noncol_idx_test_pk_pkey", 349 | }, 350 | (None, "users"): { 351 | "comment": None, 352 | "constrained_columns": ["user_id"], 353 | "name": "users_pkey", 354 | }, 355 | }, 356 | ) 357 | 358 | @skip("cockroachdb") 359 | def test_get_pk_constraint(self): 360 | # we still have a "rowid" constraint when no explicit PK declared 361 | pass 362 | 363 | @skip("cockroachdb") 364 | def test_get_view_names(self): 365 | # TODO: What has changed in the SQLA 2.0 tests that causes this to return an empty list? 366 | # FWIW, insp.get_view_names() does still work IRL 367 | pass 368 | 369 | @testing.combinations(True, False, argnames="use_schema") 370 | @testing.combinations((True, testing.requires.views), False, argnames="views") 371 | def test_metadata(self, connection, use_schema, views): 372 | if not (config.db.dialect.driver == "asyncpg" and not config.db.dialect._is_v231plus): 373 | super().test_metadata(connection, use_schema, views, []) 374 | 375 | @skip("cockroachdb") 376 | def test_not_existing_table(self): 377 | # TODO: Why "AssertionError: Callable did not raise an exception"? 378 | pass 379 | 380 | 381 | class HasIndexTest(_HasIndexTest): 382 | @skip("cockroachdb") 383 | def test_has_index(self): 384 | """ 385 | ObjectNotInPrerequisiteState: index "my_idx_2" in the middle of being added, try again later 386 | """ 387 | pass 388 | 389 | 390 | class HasTableTest(_HasTableTest): 391 | @skip("cockroachdb") 392 | def test_has_table_cache(self): 393 | pass 394 | 395 | 396 | class InsertBehaviorTest(_InsertBehaviorTest): 397 | @skip("cockroachdb") 398 | def test_no_results_for_non_returning_insert(self): 399 | # we support RETURNING, so this should not be necessary 400 | pass 401 | 402 | 403 | class IntegerTest(_IntegerTest): 404 | @_IntegerTest._huge_ints() 405 | def test_huge_int(self, integer_round_trip, intvalue): 406 | if config.db.dialect.driver != "asyncpg": 407 | super().test_huge_int(integer_round_trip, intvalue) 408 | 409 | 410 | class IsolationLevelTest(_IsolationLevelTest): 411 | @skip("cockroachdb") 412 | def test_dialect_user_setting_is_restored(self): 413 | # IndexError: list index out of range 414 | pass 415 | 416 | 417 | class LongNameBlowoutTest(_LongNameBlowoutTest): 418 | @testing.combinations( 419 | ("fk",), 420 | ("pk",), 421 | ("ix",), 422 | ("ck"), # (exclusion(s) omitted) 423 | ("uq"), # (exclusion(s) omitted) 424 | argnames="type_", 425 | ) 426 | def test_long_convention_name(self, type_, metadata, connection): 427 | if not (config.db.dialect.driver == "asyncpg" and not config.db.dialect._is_v231plus): 428 | super().test_long_convention_name(type_, metadata, connection, None) 429 | 430 | 431 | class NumericTest(_NumericTest): 432 | def test_numeric_as_float(self, do_numeric_test): 433 | # psycopg.errors.InvalidParameterValue: unsupported binary operator: + 434 | if config.db.dialect.driver != "psycopg": 435 | super().test_numeric_as_float(do_numeric_test) 436 | 437 | def test_numeric_null_as_float(self, do_numeric_test): 438 | # psycopg.errors.InvalidParameterValue: unsupported binary operator: + 439 | if config.db.dialect.driver != "psycopg": 440 | super().test_numeric_null_as_float(do_numeric_test) 441 | 442 | 443 | class QuotedNameArgumentTest(_QuotedNameArgumentTest): 444 | def quote_fixtures(fn): 445 | return testing.combinations( 446 | ("quote ' one",), 447 | ('quote " two', testing.requires.symbol_names_w_double_quote), 448 | )(fn) 449 | 450 | @quote_fixtures 451 | def test_get_indexes(self, name): 452 | # could not decorrelate subquery 453 | if not (config.db.dialect.driver == "asyncpg" and not config.db.dialect._is_v231plus): 454 | super().test_get_indexes(name, None) 455 | 456 | 457 | class TrueDivTest(_TrueDivTest): 458 | @skip("cockroachdb") 459 | def test_floordiv_integer(self): 460 | # we return SELECT 15 / 10 as Decimal('1.5'), not Integer 461 | pass 462 | 463 | @skip("cockroachdb") 464 | def test_floordiv_integer_bound(self): 465 | # we return SELECT 15 / 10 as Decimal('1.5'), not Integer 466 | pass 467 | 468 | 469 | class UnicodeSchemaTest(_UnicodeSchemaTest): 470 | def test_reflect(self, connection): 471 | if not (config.db.dialect.driver == "asyncpg" and not config.db.dialect._is_v231plus): 472 | super().test_reflect(connection) 473 | -------------------------------------------------------------------------------- /test/test_with_hint.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column 2 | from sqlalchemy import Index 3 | from sqlalchemy import Integer 4 | from sqlalchemy import select 5 | from sqlalchemy import String 6 | from sqlalchemy import Table 7 | from sqlalchemy.testing import AssertsCompiledSQL 8 | from sqlalchemy.testing import config 9 | from sqlalchemy.testing import fixtures 10 | from sqlalchemy.testing import provide_metadata 11 | 12 | 13 | class WithHintTest(fixtures.TestBase, AssertsCompiledSQL): 14 | @provide_metadata 15 | def test_with_hint(self): 16 | meta = self.metadata 17 | t = Table( 18 | "t", 19 | meta, 20 | Column("id", Integer), 21 | Column("txt", String(50)), 22 | Index("ix_t_txt", "txt"), 23 | ) 24 | self.assert_compile( 25 | select(t).with_hint(t, "ix_t_txt"), 26 | "SELECT t.id, t.txt FROM t@ix_t_txt", 27 | ) 28 | if config.db.dialect.driver == "psycopg2": 29 | param_placeholder = "%(id_1)s" 30 | cast_str = "" 31 | elif config.db.dialect.driver == "asyncpg": 32 | param_placeholder = "$1" 33 | cast_str = "::INTEGER" 34 | elif config.db.dialect.driver == "psycopg": 35 | param_placeholder = "%(id_1)s" 36 | cast_str = "::INTEGER" 37 | self.assert_compile( 38 | select(t).with_hint(t, "ix_t_txt").where(t.c.id < 3), 39 | f"SELECT t.id, t.txt FROM t@ix_t_txt WHERE t.id < {param_placeholder}{cast_str}", 40 | ) 41 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | # We do not currently test with pypy because psycopg2 does not work there. 3 | envlist = 4 | py39 5 | lint 6 | 7 | [testenv] 8 | commands = 9 | python -m pytest {posargs} test 10 | # For some reason pip fails to load the requirements file without this. 11 | setenv = 12 | LANG = en_US.utf-8 13 | install_command=python -m pip install {env:TOX_PIP_OPTS:} {opts} {packages} -r test-requirements.txt 14 | 15 | [testenv:lint] 16 | skip_install = True 17 | deps = 18 | flake8==3.9.2 19 | commands = 20 | flake8 --extend-ignore F405 sqlalchemy_cockroachdb test 21 | 22 | [testenv:pip-compile] 23 | skip_install = True 24 | deps = 25 | pip-tools==6.13.0 26 | commands = 27 | pip-compile --upgrade --no-emit-index-url --no-header --resolver=backtracking dev-requirements.in 28 | pip-compile --upgrade --no-emit-index-url --no-header --resolver=backtracking test-requirements.in 29 | --------------------------------------------------------------------------------