├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ └── all-issues.md └── workflows │ └── ci.yml ├── .gitignore ├── Changes.rst ├── LICENSE ├── Makefile ├── README.rst ├── common.mk ├── docs ├── conf.py └── index.rst ├── pyproject.toml ├── setup.py ├── sqlalchemy_aurora_data_api └── __init__.py └── test └── test.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [kislyuk] 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/all-issues.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: All issues 3 | about: All sqlalchemy-aurora-data-api issues 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | 24 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | ruff: 7 | runs-on: ubuntu-22.04 8 | steps: 9 | - uses: actions/checkout@v4 10 | - name: ruff check 11 | uses: chartboost/ruff-action@v1 12 | with: 13 | version: 0.1.9 14 | - name: ruff format 15 | uses: chartboost/ruff-action@v1 16 | with: 17 | version: 0.1.9 18 | args: format --check 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Reminder: 2 | # - A leading slash means the pattern is anchored at the root. 3 | # - No leading slash means the pattern matches at any depth. 4 | 5 | # Python files 6 | *.pyc 7 | __pycache__/ 8 | .tox/ 9 | *.egg-info/ 10 | /build/ 11 | /dist/ 12 | /.eggs/ 13 | 14 | # Sphinx documentation 15 | /docs/_build/ 16 | 17 | # IDE project files 18 | /.pydevproject 19 | 20 | # vim python-mode plugin 21 | /.ropeproject 22 | 23 | # IntelliJ IDEA / PyCharm project files 24 | /.idea 25 | /*.iml 26 | 27 | # JS/node/npm/web dev files 28 | node_modules 29 | npm-debug.log 30 | 31 | # OS X metadata files 32 | .DS_Store 33 | -------------------------------------------------------------------------------- /Changes.rst: -------------------------------------------------------------------------------- 1 | Changes for v0.5.0 (2023-12-29) 2 | =============================== 3 | 4 | - Update dependencies 5 | 6 | - Test and release infrastructure improvements 7 | 8 | Changes for v0.4.1 (2022-05-18) 9 | =============================== 10 | 11 | - Support for fractional seconds when binding time and datetime data 12 | types (#40) 13 | 14 | Changes for v0.4.0 (2022-02-27) 15 | =============================== 16 | 17 | - Bump dependency version 18 | 19 | Changes for v0.3.4 (2022-01-23) 20 | =============================== 21 | 22 | Bump dependency version 23 | 24 | Changes for v0.3.3 (2022-01-23) 25 | =============================== 26 | 27 | - Update build tooling 28 | 29 | Changes for v0.3.2 (2022-01-23) 30 | =============================== 31 | 32 | - Bump dependency to incorporate fix; add regression test 33 | 34 | Changes for v0.3.1 (2021-12-25) 35 | =============================== 36 | 37 | - Update release script 38 | 39 | Changes for v0.3.0 (2021-12-25) 40 | =============================== 41 | 42 | - Add supports_statement_cache flag. Fixes #29 43 | 44 | - Add error code extraction (#31) 45 | 46 | - Format with DataAPI supported format for dates (#24) 47 | 48 | - Set supports_sane_multi_rowcount = False on 49 | AuroraPostgresDataAPIDialect (#28) 50 | 51 | - Tell SQLAlchemy: mysql supports native decimal (fixes #25) (#26) 52 | 53 | Changes for v0.2.7 (2020-12-12) 54 | =============================== 55 | 56 | Bump dependency version 57 | 58 | Changes for v0.2.6 (2020-11-13) 59 | =============================== 60 | 61 | Bump dependency version 62 | 63 | Changes for v0.2.5 (2020-10-03) 64 | =============================== 65 | 66 | Bump dependency version 67 | 68 | Changes for v0.2.4 (2020-10-02) 69 | =============================== 70 | 71 | Bump dependency version 72 | 73 | Changes for v0.2.3 (2020-10-02) 74 | =============================== 75 | 76 | - Bump dependency version 77 | 78 | - Fix strptime handling in Python 3.6 and earlier 79 | 80 | Changes for v0.2.2 (2020-10-02) 81 | =============================== 82 | 83 | - Bump aurora-data-api dependency 84 | 85 | Changes for v0.2.1 (2020-10-01) 86 | =============================== 87 | 88 | - Fall back to strptime if fromisoformat is not available 89 | 90 | - Merge pull request #8 from olinger/master (Add colspecs for mySQL 91 | date, time and datetime types) 92 | 93 | - Merge pull request #11 from romibuzi/master (Return error code 94 | instead of enum entry) 95 | 96 | Changes for v0.2.0 (2020-01-02) 97 | =============================== 98 | 99 | - Bump aurora-data-api dependency 100 | 101 | Changes for v0.1.6 (2020-01-02) 102 | =============================== 103 | 104 | - Add enum support 105 | 106 | Changes for v0.1.5 (2020-01-01) 107 | =============================== 108 | 109 | Fix handling of non-dialect-specific datetime types 110 | 111 | Changes for v0.1.4 (2019-11-18) 112 | =============================== 113 | 114 | - Conform to dialect interface definition 115 | 116 | - MySQL: Return actual client charset 117 | 118 | Changes for v0.1.3 (2019-11-10) 119 | =============================== 120 | 121 | - Begin MySQL support 122 | 123 | Changes for v0.1.2 (2019-10-31) 124 | =============================== 125 | 126 | - Fix timestamp microsecond handling 127 | 128 | Changes for v0.1.1 (2019-10-31) 129 | =============================== 130 | 131 | - Begin array support 132 | 133 | - Improve datetime support 134 | 135 | Changes for v0.1.0 (2019-10-29) 136 | =============================== 137 | 138 | - Fix postgresql type compatibility issues 139 | 140 | Changes for v0.0.2 (2019-10-24) 141 | =============================== 142 | 143 | Add MySQL dialect 144 | 145 | Changes for v0.0.1 (2019-10-10) 146 | =============================== 147 | 148 | - Begin sqlalchemy-aurora-data-api 149 | 150 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SHELL=/bin/bash 2 | 3 | lint: 4 | ruff . 5 | 6 | test: lint 7 | python ./test/test.py -v 8 | 9 | init_docs: 10 | cd docs; sphinx-quickstart 11 | 12 | docs: 13 | sphinx-build docs docs/html 14 | 15 | install: 16 | -rm -rf dist 17 | python -m build 18 | pip install --upgrade dist/*.whl 19 | 20 | .PHONY: test release docs 21 | 22 | include common.mk 23 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | sqlalchemy-aurora-data-api - An AWS Aurora Serverless Data API dialect for SQLAlchemy 2 | ===================================================================================== 3 | 4 | This package provides a `SQLAlchemy `_ 5 | `dialect `_ for accessing PostgreSQL and MySQL databases via the 6 | `AWS Aurora Data API `_. 7 | 8 | Installation 9 | ------------ 10 | :: 11 | 12 | pip install sqlalchemy-aurora-data-api 13 | 14 | Prerequisites 15 | ------------- 16 | * Set up an 17 | `AWS Aurora Serverless cluster `_ 18 | and enable Data API access for it. If you have previously set up an Aurora Serverless cluster, you can enable Data API 19 | with the following `AWS CLI `_ command:: 20 | 21 | aws rds modify-db-cluster --db-cluster-identifier DB_CLUSTER_NAME --enable-http-endpoint --apply-immediately 22 | 23 | * Save the database credentials in 24 | `AWS Secrets Manager `_ using a format 25 | expected by the Data API (a JSON object with the keys ``username`` and ``password``):: 26 | 27 | aws secretsmanager create-secret --name rds-db-credentials/MY_DB 28 | aws secretsmanager put-secret-value --secret-id rds-db-credentials/MY_DB --secret-string "$(jq -n '.username=env.PGUSER | .password=env.PGPASSWORD')" 29 | 30 | * Configure your AWS command line credentials using 31 | `standard AWS conventions `_. 32 | You can verify that everything works correctly by running a test query via the AWS CLI:: 33 | 34 | aws rds-data execute-statement --resource-arn RESOURCE_ARN --secret-arn SECRET_ARN --sql "select * from pg_catalog.pg_tables" 35 | 36 | * Here, RESOURCE_ARN refers to the Aurora RDS database ARN, which can be found in the 37 | `AWS RDS Console `_ (click on your database, then "Configuration") 38 | or in the CLI by running ``aws rds describe-db-clusters``. SECRET_ARN refers to the AWS Secrets Manager secret 39 | created above. 40 | 41 | * When running deployed code (on an EC2 instance, ECS/EKS container, or Lambda), you can use the managed IAM policy 42 | **AmazonRDSDataFullAccess** to grant your IAM role permissions to access the RDS Data API (while this policy is 43 | convenient for testing, we recommend that you create your own scoped down least-privilege policy for production 44 | applications). 45 | 46 | Usage 47 | ----- 48 | 49 | The package registers two SQLAlchemy dialects, ``mysql+auroradataapi://`` and ``postgresql+auroradataapi://``. Two 50 | ``sqlalchemy.create_engine()`` `connect_args `_ 51 | keyword arguments are required to connect to the database: 52 | 53 | * ``aurora_cluster_arn`` (also referred to as ``resourceArn`` in the 54 | `Data API documentation `_) 55 | 56 | * If not given as a keyword argument, this can also be specified using the ``AURORA_CLUSTER_ARN`` environment variable 57 | 58 | * ``secret_arn`` (the database credentials secret) 59 | 60 | * If not given as a keyword argument, this can also be specified using the ``AURORA_SECRET_ARN`` environment variable 61 | 62 | All connection string contents other than the protocol (dialect) and the database name (path component, ``my_db_name`` 63 | in the example below) are ignored. 64 | 65 | .. code-block:: python 66 | 67 | from sqlalchemy import create_engine 68 | 69 | cluster_arn = "arn:aws:rds:us-east-1:123456789012:cluster:my-aurora-serverless-cluster" 70 | secret_arn = "arn:aws:secretsmanager:us-east-1:123456789012:secret:rds-db-credentials/MY_DB" 71 | 72 | engine = create_engine('postgresql+auroradataapi://:@/my_db_name', 73 | echo=True, 74 | connect_args=dict(aurora_cluster_arn=cluster_arn, secret_arn=secret_arn)) 75 | 76 | with engine.connect() as conn: 77 | for result in conn.execute("select * from pg_catalog.pg_tables"): 78 | print(result) 79 | 80 | Motivation 81 | ---------- 82 | The `RDS Data API `_ is the link between the 83 | AWS Lambda serverless environment and the sophisticated features provided by PostgreSQL and MySQL. The Data API tunnels 84 | SQL over HTTP, which has advantages in the context of AWS Lambda: 85 | 86 | * It eliminates the need to open database ports to the AWS Lambda public IP address pool 87 | * It uses stateless HTTP connections instead of stateful internal TCP connection pools used by most database drivers 88 | (the stateful pools become invalid after going through 89 | `AWS Lambda freeze-thaw cycles `_, causing 90 | connection errors and burdening the database server with abandoned invalid connections) 91 | * It uses AWS role-based authentication, eliminating the need for the Lambda to handle database credentials directly 92 | 93 | Debugging 94 | --------- 95 | 96 | This package uses standard Python logging conventions. To enable debug output, set the package log level to DEBUG:: 97 | 98 | logging.basicConfig() 99 | 100 | logging.getLogger("aurora_data_api").setLevel(logging.DEBUG) 101 | 102 | Links 103 | ----- 104 | * `Project home page (GitHub) `_ 105 | * `Documentation (Read the Docs) `_ 106 | * `Package distribution (PyPI) `_ 107 | * `Change log `_ 108 | * `aurora-data-api `_, the Python DB-API 2.0 client that 109 | sqlalchemy-aurora-data-api depends on 110 | 111 | Bugs 112 | ~~~~ 113 | Please report bugs, issues, feature requests, etc. on 114 | `GitHub `_. 115 | 116 | License 117 | ------- 118 | Licensed under the terms of the `Apache License, Version 2.0 `_. 119 | 120 | .. image:: https://travis-ci.org/chanzuckerberg/sqlalchemy-aurora-data-api.png 121 | :target: https://travis-ci.org/chanzuckerberg/sqlalchemy-aurora-data-api 122 | .. image:: https://codecov.io/github/chanzuckerberg/sqlalchemy-aurora-data-api/coverage.svg?branch=master 123 | :target: https://codecov.io/github/chanzuckerberg/sqlalchemy-aurora-data-api?branch=master 124 | .. image:: https://img.shields.io/pypi/v/sqlalchemy-aurora-data-api.svg 125 | :target: https://pypi.python.org/pypi/sqlalchemy-aurora-data-api 126 | .. image:: https://img.shields.io/pypi/l/sqlalchemy-aurora-data-api.svg 127 | :target: https://pypi.python.org/pypi/sqlalchemy-aurora-data-api 128 | .. image:: https://readthedocs.org/projects/sqlalchemy-aurora-data-api/badge/?version=latest 129 | :target: https://sqlalchemy-aurora-data-api.readthedocs.org/ 130 | -------------------------------------------------------------------------------- /common.mk: -------------------------------------------------------------------------------- 1 | SHELL=/bin/bash -eo pipefail 2 | 3 | release-major: 4 | $(eval export TAG=$(shell git describe --tags --match 'v*.*.*' | perl -ne '/^v(\d+)\.(\d+)\.(\d+)/; print "v@{[$$1+1]}.0.0"')) 5 | $(MAKE) release 6 | 7 | release-minor: 8 | $(eval export TAG=$(shell git describe --tags --match 'v*.*.*' | perl -ne '/^v(\d+)\.(\d+)\.(\d+)/; print "v$$1.@{[$$2+1]}.0"')) 9 | $(MAKE) release 10 | 11 | release-patch: 12 | $(eval export TAG=$(shell git describe --tags --match 'v*.*.*' | perl -ne '/^v(\d+)\.(\d+)\.(\d+)/; print "v$$1.$$2.@{[$$3+1]}"')) 13 | $(MAKE) release 14 | 15 | release: 16 | @if ! git diff --cached --exit-code; then echo "Commit staged files before proceeding"; exit 1; fi 17 | @if [[ -z $$TAG ]]; then echo "Use release-{major,minor,patch}"; exit 1; fi 18 | @if ! type -P pandoc; then echo "Please install pandoc"; exit 1; fi 19 | @if ! type -P sponge; then echo "Please install moreutils"; exit 1; fi 20 | @if ! type -P gh; then echo "Please install gh"; exit 1; fi 21 | @if ! type -P twine; then echo "Please install twine"; exit 1; fi 22 | git pull 23 | git clean -x --force $$(python setup.py --name) 24 | sed -i -e "s/version=\([\'\"]\)[0-9]*\.[0-9]*\.[0-9]*/version=\1$${TAG:1}/" setup.py 25 | git add setup.py 26 | TAG_MSG=$$(mktemp); \ 27 | echo "# Changes for ${TAG} ($$(date +%Y-%m-%d))" > $$TAG_MSG; \ 28 | git log --pretty=format:%s $$(git describe --abbrev=0)..HEAD >> $$TAG_MSG; \ 29 | $${EDITOR:-emacs} $$TAG_MSG; \ 30 | if [[ -f Changes.md ]]; then cat $$TAG_MSG <(echo) Changes.md | sponge Changes.md; git add Changes.md; fi; \ 31 | if [[ -f Changes.rst ]]; then cat <(pandoc --from markdown --to rst $$TAG_MSG) <(echo) Changes.rst | sponge Changes.rst; git add Changes.rst; fi; \ 32 | git commit -m ${TAG}; \ 33 | git tag --annotate --file $$TAG_MSG ${TAG} 34 | git push --follow-tags 35 | $(MAKE) install 36 | gh release create ${TAG} dist/*.whl --notes="$$(git tag --list ${TAG} -n99 | perl -pe 's/^\S+\s*// if $$. == 1' | sed 's/^\s\s\s\s//')" 37 | $(MAKE) release-pypi 38 | $(MAKE) release-docs 39 | 40 | release-pypi: 41 | python -m build 42 | twine upload dist/*.tar.gz dist/*.whl --verbose 43 | 44 | release-docs: 45 | $(MAKE) docs 46 | -git branch -D gh-pages 47 | git checkout -B gh-pages-stage 48 | touch docs/html/.nojekyll 49 | git add --force docs/html 50 | git commit -m "Docs for ${TAG}" 51 | git push --force origin $$(git subtree split --prefix docs/html --branch gh-pages):refs/heads/gh-pages 52 | git checkout - 53 | 54 | .PHONY: release 55 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | import guzzle_sphinx_theme 2 | 3 | project = "sqlalchemy-aurora-data-api" 4 | copyright = "CZI" 5 | author = "Andrey Kislyuk" 6 | version = "" 7 | release = "" 8 | language = None 9 | master_doc = "index" 10 | extensions = ["sphinx.ext.autodoc", "sphinx.ext.viewcode"] 11 | source_suffix = [".rst", ".md"] 12 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 13 | pygments_style = "sphinx" 14 | html_theme_path = guzzle_sphinx_theme.html_theme_path() 15 | html_theme = "guzzle_sphinx_theme" 16 | html_theme_options = { 17 | "project_nav_name": project, 18 | "projectlink": "https://github.com/chanzuckerberg/" + project, 19 | } 20 | html_sidebars = { 21 | "**": [ 22 | "logo-text.html", 23 | # "globaltoc.html", 24 | "localtoc.html", 25 | "searchbox.html", 26 | ] 27 | } 28 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. include:: ../README.rst 2 | 3 | API documentation 4 | ================= 5 | 6 | .. automodule:: sqlalchemy-aurora-data-api 7 | :members: 8 | :special-members: 9 | :exclude-members: __weakref__ 10 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.ruff] 2 | line-length=120 3 | [tool.ruff.per-file-ignores] 4 | "sqlalchemy_aurora_data_api/__init__.py" = ["E401", "F401"] 5 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from setuptools import setup, find_packages 4 | 5 | setup( 6 | name="sqlalchemy-aurora-data-api", 7 | version="0.5.0", 8 | url="https://github.com/chanzuckerberg/sqlalchemy-aurora-data-api", 9 | license="Apache Software License", 10 | author="Andrey Kislyuk", 11 | author_email="kislyuk@gmail.com", 12 | description="An AWS Aurora Serverless Data API dialect for SQLAlchemy", 13 | long_description=open("README.rst").read(), 14 | install_requires=["sqlalchemy", "aurora-data-api >= 0.5.0"], 15 | extras_require={}, 16 | packages=find_packages(exclude=["test"]), 17 | entry_points={ 18 | "sqlalchemy.dialects": [ 19 | "mysql.auroradataapi = sqlalchemy_aurora_data_api:AuroraMySQLDataAPIDialect", 20 | "postgresql.auroradataapi = sqlalchemy_aurora_data_api:AuroraPostgresDataAPIDialect", 21 | ] 22 | }, 23 | platforms=["MacOS X", "Posix"], 24 | test_suite="test", 25 | classifiers=[ 26 | "Intended Audience :: Developers", 27 | "License :: OSI Approved :: Apache Software License", 28 | "Operating System :: MacOS :: MacOS X", 29 | "Operating System :: POSIX", 30 | "Programming Language :: Python", 31 | "Programming Language :: Python :: 3.8", 32 | "Programming Language :: Python :: 3.9", 33 | "Programming Language :: Python :: 3.10", 34 | "Programming Language :: Python :: 3.11", 35 | "Programming Language :: Python :: 3.12", 36 | "Topic :: Software Development :: Libraries :: Python Modules", 37 | ], 38 | ) 39 | -------------------------------------------------------------------------------- /sqlalchemy_aurora_data_api/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | sqlalchemy-aurora-data-api 3 | """ 4 | 5 | import json, datetime, re 6 | 7 | from sqlalchemy import cast, func, util 8 | import sqlalchemy.sql.sqltypes as sqltypes 9 | from sqlalchemy.dialects.postgresql.base import PGDialect 10 | from sqlalchemy.dialects.postgresql import JSON, JSONB, UUID, DATE, TIME, TIMESTAMP, ARRAY, ENUM 11 | from sqlalchemy.dialects.mysql.base import MySQLDialect 12 | 13 | import aurora_data_api 14 | 15 | 16 | class _ADA_SA_JSON(sqltypes.JSON): 17 | def bind_expression(self, value): 18 | return cast(value, sqltypes.JSON) 19 | 20 | 21 | class _ADA_JSON(JSON): 22 | def bind_expression(self, value): 23 | return cast(value, JSON) 24 | 25 | 26 | class _ADA_JSONB(JSONB): 27 | def bind_expression(self, value): 28 | return cast(value, JSONB) 29 | 30 | 31 | class _ADA_UUID(UUID): 32 | def bind_expression(self, value): 33 | return cast(value, UUID) 34 | 35 | 36 | class _ADA_ENUM(ENUM): 37 | def bind_expression(self, value): 38 | return cast(value, self) 39 | 40 | 41 | # TODO: is TZ awareness needed here? 42 | class _ADA_DATETIME_MIXIN: 43 | iso_ts_re = re.compile(r"\d{4}-\d\d-\d\d \d\d:\d\d:\d\d\.\d+") 44 | 45 | @staticmethod 46 | def ms(value): 47 | # Three digit fractional second component, truncated and zero padded. This is what the data api requires. 48 | return str(value.microsecond).zfill(6)[:-3] 49 | 50 | def bind_processor(self, dialect): 51 | def process(value): 52 | return value.isoformat() if isinstance(value, self.py_type) else value 53 | 54 | return process 55 | 56 | def bind_expression(self, value): 57 | return cast(value, self.sa_type) 58 | 59 | def result_processor(self, dialect, coltype): 60 | def process(value): 61 | # When the microsecond component ends in zeros, they are omitted from the return value, 62 | # and datetime.datetime.fromisoformat can't parse the result (example: '2019-10-31 09:37:17.31869 63 | # '). Pad it. 64 | if isinstance(value, str) and self.iso_ts_re.match(value): 65 | value = self.iso_ts_re.sub(lambda match: match.group(0).ljust(26, "0"), value) 66 | if isinstance(value, str): 67 | try: 68 | return self.py_type.fromisoformat(value) 69 | except AttributeError: # fromisoformat not supported on Python < 3.7 70 | if self.py_type == datetime.date: 71 | return datetime.datetime.strptime(value, "%Y-%m-%d").date() 72 | if self.py_type == datetime.time: 73 | return datetime.datetime.strptime(value, "%H:%M:%S").time() 74 | if "." in value: 75 | return datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S.%f") 76 | return datetime.datetime.strptime(value, "%Y-%m-%d %H:%M:%S") 77 | return value 78 | 79 | return process 80 | 81 | 82 | class _ADA_DATE(_ADA_DATETIME_MIXIN, DATE): 83 | py_type = datetime.date 84 | sa_type = sqltypes.Date 85 | 86 | def bind_processor(self, dialect): 87 | def process(value): 88 | return value.strftime("%Y-%m-%d") if isinstance(value, self.py_type) else value 89 | 90 | return process 91 | 92 | 93 | class _ADA_TIME(_ADA_DATETIME_MIXIN, TIME): 94 | py_type = datetime.time 95 | sa_type = sqltypes.Time 96 | 97 | def bind_processor(self, dialect): 98 | def process(value): 99 | return value.strftime("%H:%M:%S.") + self.ms(value) if isinstance(value, self.py_type) else value 100 | 101 | return process 102 | 103 | 104 | class _ADA_TIMESTAMP(_ADA_DATETIME_MIXIN, TIMESTAMP): 105 | py_type = datetime.datetime 106 | sa_type = sqltypes.DateTime 107 | 108 | def bind_processor(self, dialect): 109 | def process(value): 110 | return value.strftime("%Y-%m-%d %H:%M:%S.") + self.ms(value) if isinstance(value, self.py_type) else value 111 | 112 | return process 113 | 114 | 115 | class _ADA_ARRAY(ARRAY): 116 | def bind_processor(self, dialect): 117 | def process(value): 118 | # FIXME: escape strings properly here 119 | return "\v".join(value) if isinstance(value, list) else value 120 | 121 | return process 122 | 123 | def bind_expression(self, value): 124 | return func.string_to_array(value, "\v") 125 | 126 | 127 | class AuroraMySQLDataAPIDialect(MySQLDialect): 128 | # See https://docs.sqlalchemy.org/en/13/core/internals.html#sqlalchemy.engine.interfaces.Dialect 129 | driver = "aurora_data_api" 130 | default_schema_name = None 131 | supports_native_decimal = True 132 | colspecs = util.update_copy( 133 | MySQLDialect.colspecs, 134 | { 135 | sqltypes.Date: _ADA_DATE, 136 | sqltypes.Time: _ADA_TIME, 137 | sqltypes.DateTime: _ADA_TIMESTAMP, 138 | }, 139 | ) 140 | supports_statement_cache = True 141 | 142 | @classmethod 143 | def import_dbapi(cls): 144 | return aurora_data_api 145 | 146 | def _detect_charset(self, connection): 147 | return connection.execute("SHOW VARIABLES LIKE 'character_set_client'").fetchone()[1] 148 | 149 | def _extract_error_code(self, exception): 150 | return exception.args[0].value 151 | 152 | 153 | class AuroraPostgresDataAPIDialect(PGDialect): 154 | # See https://docs.sqlalchemy.org/en/13/core/internals.html#sqlalchemy.engine.interfaces.Dialect 155 | driver = "aurora_data_api" 156 | default_schema_name = None 157 | colspecs = util.update_copy( 158 | PGDialect.colspecs, 159 | { 160 | sqltypes.JSON: _ADA_SA_JSON, 161 | JSON: _ADA_JSON, 162 | JSONB: _ADA_JSONB, 163 | UUID: _ADA_UUID, 164 | sqltypes.Date: _ADA_DATE, 165 | sqltypes.Time: _ADA_TIME, 166 | sqltypes.DateTime: _ADA_TIMESTAMP, 167 | sqltypes.Enum: _ADA_ENUM, 168 | ARRAY: _ADA_ARRAY, 169 | }, 170 | ) 171 | supports_sane_multi_rowcount = False 172 | supports_statement_cache = True 173 | 174 | @classmethod 175 | def import_dbapi(cls): 176 | return aurora_data_api 177 | 178 | def _extract_error_code(self, exception): 179 | return exception.args[0].value 180 | 181 | 182 | def register_dialects(): 183 | from sqlalchemy.dialects import registry 184 | 185 | registry.register("mysql.auroradataapi", __name__, AuroraMySQLDataAPIDialect.__name__) 186 | registry.register("postgresql.auroradataapi", __name__, AuroraPostgresDataAPIDialect.__name__) 187 | -------------------------------------------------------------------------------- /test/test.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import unittest 4 | import logging 5 | import datetime 6 | import enum 7 | from uuid import UUID as uuid_type, uuid4 8 | 9 | from sqlalchemy import ( 10 | create_engine, 11 | Column, 12 | Integer, 13 | String, 14 | Boolean, 15 | Float, 16 | LargeBinary, 17 | Numeric, 18 | Date, 19 | Time, 20 | DateTime, 21 | Text, 22 | # Enum, 23 | ) 24 | from sqlalchemy.dialects.postgresql import UUID, JSONB, JSON, DATE, TIME, TIMESTAMP, ARRAY 25 | from sqlalchemy.orm import sessionmaker, declarative_base 26 | from sqlalchemy.sql import text 27 | 28 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) 29 | 30 | from sqlalchemy_aurora_data_api import register_dialects, _ADA_TIMESTAMP # noqa 31 | 32 | logging.basicConfig(level=logging.INFO) 33 | logging.getLogger("aurora_data_api").setLevel(logging.DEBUG) 34 | logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) 35 | 36 | dialect_interface_attributes = { 37 | "name", 38 | "driver", 39 | "positional", 40 | "paramstyle", 41 | # "convert_unicode", 42 | # "encoding", 43 | "statement_compiler", 44 | "ddl_compiler", 45 | "server_version_info", 46 | "default_schema_name", 47 | "execution_ctx_cls", 48 | "execute_sequence_format", 49 | "preparer", 50 | "supports_alter", 51 | "max_identifier_length", 52 | # "supports_unicode_statements", 53 | # "supports_unicode_binds", 54 | "supports_sane_rowcount", 55 | "supports_sane_multi_rowcount", 56 | "preexecute_autoincrement_sequences", 57 | # "implicit_returning", 58 | "colspecs", 59 | "supports_default_values", 60 | "supports_sequences", 61 | "sequences_optional", 62 | "supports_native_enum", 63 | "supports_native_boolean", 64 | "dbapi_exception_translation_map", 65 | } 66 | 67 | dialect_interface_methods = { 68 | "connect", 69 | "create_connect_args", 70 | "create_xid", 71 | "denormalize_name", 72 | "do_begin", 73 | "do_begin_twophase", 74 | "do_close", 75 | "do_commit", 76 | "do_commit_twophase", 77 | "do_execute", 78 | "do_execute_no_params", 79 | "do_executemany", 80 | "do_prepare_twophase", 81 | "do_recover_twophase", 82 | "do_release_savepoint", 83 | "do_rollback", 84 | "do_rollback_to_savepoint", 85 | "do_rollback_twophase", 86 | "do_savepoint", 87 | "engine_created", 88 | "get_check_constraints", 89 | "get_columns", 90 | "get_dialect_cls", 91 | "get_foreign_keys", 92 | "get_indexes", 93 | "get_isolation_level", 94 | "get_pk_constraint", 95 | "get_table_comment", 96 | "get_table_names", 97 | "get_temp_table_names", 98 | "get_temp_view_names", 99 | "get_unique_constraints", 100 | "get_view_definition", 101 | "get_view_names", 102 | "has_sequence", 103 | "has_table", 104 | "initialize", 105 | "is_disconnect", 106 | "normalize_name", 107 | # "reflect_table", 108 | "reset_isolation_level", 109 | "set_isolation_level", 110 | "type_descriptor", 111 | } 112 | 113 | BasicBase = declarative_base() 114 | Base = declarative_base() 115 | 116 | 117 | class Socks(enum.Enum): 118 | red = 1 119 | green = 2 120 | black = 3 121 | 122 | 123 | class BasicUser(BasicBase): 124 | __tablename__ = "sqlalchemy_aurora_data_api_testI" 125 | 126 | id = Column(Integer, primary_key=True) 127 | name = Column(String(64)) 128 | fullname = Column(String(64)) 129 | nickname = Column(String(64)) 130 | birthday = Column(Date) 131 | eats_breakfast_at = Column(Time) 132 | married_at = Column(DateTime) 133 | 134 | 135 | class User(Base): 136 | __tablename__ = "sqlalchemy_aurora_data_api_testJ" 137 | id = Column(Integer, primary_key=True) 138 | name = Column(String) 139 | fullname = Column(String) 140 | nickname = Column(String) 141 | doc = Column(JSONB) 142 | doc2 = Column(JSON) 143 | uuid = Column(UUID) 144 | uuid2 = Column(UUID(as_uuid=True), default=uuid4) 145 | flag = Column(Boolean, nullable=True) 146 | nonesuch = Column(Boolean, nullable=True) 147 | birthday = Column(DATE) 148 | wakes_up_at = Column(TIME) 149 | added = Column(TIMESTAMP) 150 | floated = Column(Float) 151 | nybbled = Column(LargeBinary) 152 | friends = Column(ARRAY(String)) 153 | num_friends = Numeric(asdecimal=True) 154 | num_laptops = Numeric(asdecimal=False) 155 | first_date = Column(Date) 156 | note = Column(Text) 157 | # socks = Column(Enum(Socks)) 158 | 159 | 160 | class TestAuroraDataAPI(unittest.TestCase): 161 | @classmethod 162 | def tearDownClass(cls): 163 | pass 164 | 165 | def test_interface_conformance(self): 166 | for attr in dialect_interface_attributes: 167 | self.assertIn(attr, dir(self.engine.dialect)) 168 | 169 | for attr in dialect_interface_methods: 170 | self.assertIn(attr, dir(self.engine.dialect)) 171 | assert callable(getattr(self.engine.dialect, attr)) 172 | 173 | 174 | class TestAuroraDataAPIPostgresDialect(TestAuroraDataAPI): 175 | dialect = "postgresql+auroradataapi://" 176 | # dialect = "postgresql+psycopg2://" + getpass.getuser() 177 | 178 | @classmethod 179 | def setUpClass(cls): 180 | register_dialects() 181 | cls.db_name = os.environ.get("AURORA_DB_NAME", __name__) 182 | cls.engine = create_engine(cls.dialect + ":@/" + cls.db_name) 183 | 184 | def test_execute(self): 185 | with self.engine.connect() as conn: 186 | for result in conn.execute(text("select * from pg_catalog.pg_tables")): 187 | print(result) 188 | 189 | def test_orm(self): 190 | uuid = uuid4() 191 | doc = {"foo": [1, 2, 3]} 192 | blob = b"0123456789ABCDEF" * 1024 193 | friends = ["Scarlett O'Hara", 'Ada "Hacker" Lovelace'] 194 | Base.metadata.create_all(self.engine) 195 | added = datetime.datetime.now().replace(microsecond=123456) 196 | ed_user = User( 197 | name="ed", 198 | fullname="Ed Jones", 199 | nickname="edsnickname", 200 | doc=doc, 201 | doc2=doc, 202 | uuid=str(uuid), 203 | flag=True, 204 | birthday=datetime.datetime.fromtimestamp(0), 205 | added=added, 206 | floated=1.2, 207 | nybbled=blob, 208 | friends=friends, 209 | num_friends=500, 210 | num_laptops=9000, 211 | first_date=added, 212 | note="note", 213 | # socks=Socks.red, 214 | ) 215 | Session = sessionmaker(bind=self.engine) 216 | session = Session() 217 | 218 | session.query(User).delete() 219 | session.commit() 220 | 221 | session.add(ed_user) 222 | self.assertEqual(session.query(User).filter_by(name="ed").first().name, "ed") 223 | session.commit() 224 | self.assertGreater(session.query(User).filter(User.name.like("%ed")).count(), 0) 225 | u = session.query(User).filter(User.name.like("%ed")).first() 226 | self.assertEqual(u.doc, doc) 227 | self.assertEqual(u.doc2, doc) 228 | self.assertEqual(u.flag, True) 229 | self.assertEqual(u.nonesuch, None) 230 | self.assertEqual(u.birthday, datetime.date.fromtimestamp(0)) 231 | self.assertEqual(u.added, added.replace(microsecond=123000)) 232 | self.assertEqual(u.floated, 1.2) 233 | self.assertEqual(u.nybbled, blob) 234 | self.assertEqual(u.friends, friends) 235 | self.assertEqual(u.num_friends, 500) 236 | self.assertEqual(u.num_laptops, 9000) 237 | self.assertEqual(u.first_date, added.date()) 238 | self.assertEqual(u.note, "note") 239 | print("FIXME: re-enable test for enums support", self.assertEqual, "u.socks", Socks.red) 240 | self.assertEqual(u.uuid, str(uuid)) 241 | print("FIXME: re-enable test for uuid support", self.assertIsInstance, u.uuid2, uuid_type) 242 | 243 | # u.socks = Socks.green 244 | session.commit() 245 | 246 | session2 = Session() 247 | # u2 = 248 | session2.query(User).filter(User.name.like("%ed")).first() 249 | print("FIXME: re-enable test for enums support", self.assertEqual, "u2.socks", Socks.green) 250 | 251 | @unittest.skipIf(sys.version_info < (3, 7), "Skipping test that requires Python 3.7+") 252 | def test_timestamp_microsecond_padding(self): 253 | ts = "2019-10-31 09:37:17.3186" 254 | processor = _ADA_TIMESTAMP.result_processor(_ADA_TIMESTAMP, None, None) 255 | self.assertEqual(processor(ts), datetime.datetime.fromisoformat(ts.ljust(26, "0"))) 256 | 257 | 258 | class TestAuroraDataAPIMySQLDialect(TestAuroraDataAPI): 259 | dialect = "mysql+auroradataapi://" 260 | 261 | @classmethod 262 | def setUpClass(cls): 263 | register_dialects() 264 | cls.db_name = os.environ.get("AURORA_DB_NAME", __name__) 265 | cls.engine = create_engine(cls.dialect + ":@/" + cls.db_name + "?charset=utf8mb4") 266 | 267 | def test_execute(self): 268 | with self.engine.connect() as conn: 269 | for result in conn.execute("select * from information_schema.tables"): 270 | print(result) 271 | 272 | def test_orm(self): 273 | BasicBase.metadata.create_all(self.engine) 274 | birthday = datetime.datetime.fromtimestamp(0).date() 275 | eats_breakfast_at = datetime.time(9, 0, 0, 123) 276 | married_at = datetime.datetime(2020, 2, 20, 2, 20, 2, 200200) 277 | ed_user = BasicUser( 278 | name="ed", 279 | fullname="Ed Jones", 280 | nickname="edsnickname", 281 | birthday=birthday, 282 | eats_breakfast_at=eats_breakfast_at, 283 | married_at=married_at, 284 | ) 285 | Session = sessionmaker(bind=self.engine) 286 | session = Session() 287 | 288 | session.query(BasicUser).delete() 289 | session.commit() 290 | 291 | session.add(ed_user) 292 | self.assertEqual(session.query(BasicUser).filter_by(name="ed").first().name, "ed") 293 | session.commit() 294 | self.assertGreater(session.query(BasicUser).filter(BasicUser.name.like("%ed")).count(), 0) 295 | u = session.query(BasicUser).filter(BasicUser.name.like("%ed")).first() 296 | self.assertEqual(u.nickname, "edsnickname") 297 | self.assertEqual(u.birthday, birthday) 298 | self.assertEqual(u.eats_breakfast_at, eats_breakfast_at.replace(microsecond=0)) 299 | self.assertEqual(u.married_at, married_at.replace(microsecond=0)) 300 | 301 | 302 | if __name__ == "__main__": 303 | unittest.main() 304 | --------------------------------------------------------------------------------