├── .build.cmd ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ └── sqlalchemy-firebird.yml ├── .gitignore ├── .pypi_test_upload.cmd ├── .pypi_upload.cmd ├── .vscode ├── launch.json └── settings.json ├── AUTHORS ├── LICENSE ├── README.development.md ├── README.rst ├── pyproject.toml ├── run-all-tests.ps1 ├── run-tests.ps1 ├── setup.cfg ├── sqlalchemy_firebird ├── __init__.py ├── base.py ├── fb_info25.py ├── fb_info30.py ├── fb_info40.py ├── fdb.py ├── firebird.py ├── infrastructure.py ├── provision.py ├── requirements.py └── types.py ├── test ├── __init__.py ├── conftest.py ├── test_compiler.py ├── test_dialect.py ├── test_query.py ├── test_reflection.py ├── test_suite.py └── test_types.py └── tox.ini /.build.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | pushd "%~dp0" 3 | 4 | if exist dist\ (del /q dist\*.*) 5 | 6 | python -m build 7 | 8 | if errorlevel 1 ( 9 | echo. 10 | pause 11 | ) 12 | 13 | popd 14 | exit -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/workflows/sqlalchemy-firebird.yml: -------------------------------------------------------------------------------- 1 | name: sqlalchemy-firebird 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | strategy: 10 | max-parallel: 4 11 | matrix: 12 | python-version: [3.7, 3.8, 3.9, '3.10', '3.11'] 13 | 14 | steps: 15 | - uses: actions/checkout@v3 16 | - name: Set up Python ${{ matrix.python-version }} 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: ${{ matrix.python-version }} 20 | - name: Install dependencies 21 | run: | 22 | python -m pip install --upgrade pip 23 | pip install . 24 | - name: Lint with flake8 25 | run: | 26 | pip install flake8 27 | # stop the build if there are Python syntax errors or undefined names 28 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics --ignore E203,E266,E501,W503,F403,F401 29 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 30 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 31 | # - name: Test with pytest 32 | # run: | 33 | # pip install pytest 34 | # pytest 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | .venv/ 3 | _venv/ 4 | /venv/ 5 | build/ 6 | dist/ 7 | *.egg-info 8 | /test.cfg 9 | /test/test.cfg 10 | *.html 11 | sqlalchemy_firebird/__pycache__/ 12 | test/__pycache__/ 13 | *.log 14 | readme.md 15 | /_venv1-3/ 16 | /_venv1-4/ 17 | 18 | .vscode/ 19 | -------------------------------------------------------------------------------- /.pypi_test_upload.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | pushd "%~dp0" 3 | 4 | twine upload --repository testpypi dist/* 5 | 6 | if errorlevel 1 ( 7 | echo. 8 | pause 9 | ) 10 | 11 | popd 12 | exit -------------------------------------------------------------------------------- /.pypi_upload.cmd: -------------------------------------------------------------------------------- 1 | @echo off 2 | pushd "%~dp0" 3 | 4 | twine upload dist/* 5 | 6 | if errorlevel 1 ( 7 | echo. 8 | pause 9 | ) 10 | 11 | popd 12 | exit 13 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python: Module", 9 | "type": "python", 10 | "request": "launch", 11 | "module": "pytest", 12 | "env": {"CI": "True"}, 13 | "args": [ 14 | "./test/test_suite.py::NormalizedNameTest::test_get_table_names", 15 | "--db", 16 | "firebird_fb50" 17 | ], 18 | "justMyCode": false 19 | }, 20 | { 21 | // Disable JustMyCode on Pytest -- https://stackoverflow.com/a/57831657 22 | "name": "Debug Unit Test", 23 | "type": "python", 24 | "request": "launch", 25 | "console": "integratedTerminal", 26 | "purpose": ["debug-test"], 27 | "justMyCode": false 28 | } 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "[python]": { 3 | "editor.defaultFormatter": "ms-python.black-formatter", 4 | "editor.formatOnSave": true 5 | }, 6 | "python.testing.pytestArgs": [ 7 | "test", 8 | "--tb=no", 9 | ], 10 | "python.testing.unittestEnabled": false, 11 | "python.testing.pytestEnabled": true, 12 | } 13 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | SQLAlchemy was created by Michael Bayer. 2 | 3 | Major contributing authors include: 4 | 5 | - Michael Bayer 6 | - Jason Kirtland 7 | - Gaetan de Menten 8 | - Diana Clarke 9 | - Michael Trier 10 | - Philip Jenvey 11 | - Ants Aasma 12 | - Paul Johnston 13 | - Jonathan Ellis 14 | 15 | 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2005-2023 SQLAlchemy authors and contributors . 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of 4 | this software and associated documentation files (the "Software"), to deal in 5 | the Software without restriction, including without limitation the rights to 6 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies 7 | of the Software, and to permit persons to whom the Software is furnished to do 8 | so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. -------------------------------------------------------------------------------- /README.development.md: -------------------------------------------------------------------------------- 1 | # Development notes 2 | 3 | Support for 32/64 bit Python 2.7, 3.6+ on Windows/Linux/Mac. 4 | 5 | * Use `firebird-driver` and/or `fdb` 6 | * Python >= 3.8 7 | * SQLAlchemy 1.4 or 2.0 8 | 9 | * Use `fdb` 10 | * Python == 3.7 and SQLAlchemy 2.0 11 | * Python >= 3.6 and SQLAlchemy 1.4 12 | 13 | 14 | # Windows environment 15 | 16 | ## Install Python 17 | 18 | You may install Python with [Chocolatey](https://chocolatey.org/install): 19 | 20 | ```powershell 21 | choco install python -y 22 | ``` 23 | 24 | 25 | ## Install Visual Studio Code 26 | 27 | We strongly recommend Visual Studio Code for development. You may install it with: 28 | 29 | ```powershell 30 | choco install vscode -y 31 | ``` 32 | 33 | 34 | ## Initial checkout 35 | 36 | Clone this repository into a local folder on your computer and, from the root folder, run 37 | 38 | ```powershell 39 | python -m venv .venv 40 | .venv/Scripts/activate 41 | pip install .[dev] 42 | pip install fdb 43 | ``` 44 | 45 | This will create a Python virtual environment in `.venv` subfolder and install all required components. 46 | 47 | Open the project folder with VSCode. It should detect the virtual environment automatically and activate it. Please refer to [Visual Studio Code documentation on Python](https://code.visualstudio.com/docs/languages/python) for more information. 48 | 49 | To activate the virtual environment on a command prompt instance (cmd or powershell) use: 50 | 51 | ```powershell 52 | .venv/Scripts/activate 53 | ``` 54 | 55 | 56 | # Linux environment 57 | 58 | ## Initial checkout 59 | 60 | Clone this repository into a local folder on your computer and, from the root folder, run 61 | 62 | ```bash 63 | python3 -m venv .venv 64 | . .venv/bin/activate 65 | pip install .[dev] 66 | pip install fdb 67 | ``` 68 | 69 | This will create a Python virtual environment in `.venv` subfolder and install all required components. 70 | 71 | To activate the virtual environment use: 72 | 73 | ```bash 74 | . .venv/bin/activate 75 | ``` 76 | 77 | 78 | # Tests 79 | 80 | ## Preparing the tests infrastructure 81 | 82 | With the virtual environment activated, run the following script 83 | 84 | ``` 85 | rebuild-test-databases 86 | ``` 87 | 88 | This script will 89 | 90 | - Create a `sqlalchemy-firebird-tests` in your temp folder containing the binaries for each supported Firebird version; 91 | - Create databases for each Firebird version; and 92 | - Add a `[db]` section into your `setup.cfg` containing one entry for each of the databases created. 93 | 94 | You may run this script whenever you need a clean database for your tests. It won't download the files again if they already exist. 95 | 96 | 97 | ## Running the tests 98 | 99 | Run the following Powershell script 100 | 101 | ```powershell 102 | .\run-all-tests.ps1 103 | ``` 104 | 105 | This will start 5 different processes, each one running a different combination of driver/Firebird version supported. 106 | 107 | To run only the tests for a specific database, use 108 | 109 | ```powershell 110 | .\run-tests.ps1 -Database 'firebird_fb50' 111 | ``` 112 | 113 | 114 | ## Debugging the tests 115 | 116 | SQLAlchemy has a complex test infrastructure which unfortunately is not completely functional from VSCode test runner. 117 | 118 | To run a specific test under VSCode debugger this repository already provides a `.vscode/launch.json` file preconfigured as a sample. 119 | 120 | E.g. to run the test `test_get_table_names` with `firebird-driver` and Firebird 5.0 you must set `pytest` arguments as: 121 | 122 | ```json 123 | "args": ["./test/test_suite.py::NormalizedNameTest::test_get_table_names", "--db", "firebird_fb50"], 124 | ``` 125 | 126 | Now run the code (with `F5`) and the debugger should work as expected (e.g. set a breakpoint and it should stop). 127 | 128 | 129 | ## Debugging SQLAlchemy code 130 | 131 | Sooner or later you probably will need to debug SQLAlchemy code. Fortunately, this is easy as 132 | 133 | ```bash 134 | # [From your 'sqlalchemy-firebird' root folder, inside virtual environment] 135 | pip install -e $path_to_your_sqlalchemy_local_folder 136 | ``` 137 | 138 | The `launch.json` file already has the required `"justMyCode": false` configuration which allows you to step into SQLAlchemy source files during debugging. 139 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | sqlalchemy-firebird 2 | ################### 3 | 4 | An external SQLAlchemy dialect for Firebird 5 | =========================================== 6 | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg 7 | :target: https://github.com/psf/black 8 | .. image:: https://github.com/pauldex/sqlalchemy-firebird/workflows/sqlalchemy-firebird/badge.svg 9 | :target: https://github.com/pauldex/sqlalchemy-firebird 10 | 11 | ---- 12 | 13 | Those who want to use the open source `Firebird `_ database server with `Python `_ using `SQLAlchemy `_ need to provide a dialect that SQLAlchemy can use to communicate to the database, because Firebird is not among the included dialects. 14 | 15 | This package provides a Firebird dialect for SQLAlchemy using the Python Database API 2.0 compliant support provided from either `firebird-driver `_ or `fdb `_. 16 | 17 | ---- 18 | 19 | **Installation** 20 | 21 | The pip command to install the sqlalchemy-firebird package is:: 22 | 23 | pip install sqlalchemy-firebird 24 | 25 | If you are using Python 3.8+, installing sqlalchemy-firebird will automatically install SQLAlchemy 2.0+ and firebird-driver. This configuration can be used to access Firebird server versions 3 and up. If you need to access a Firebird version 2.5 server, just install fdb using pip:: 26 | 27 | pip install fdb 28 | 29 | If you are using a version of Python less than 3.8, SQLAlchemy 1.4+ and fdb are automatically installed, which can only be used for Firebird server versions 2.5.9 and 3.0+. 30 | 31 | ---- 32 | 33 | **Getting Started** 34 | 35 | The first thing you need when connecting your application to the database using SQLAlchemy is an engine object, obtained by calling *create_engine* with the appropriate parameters. This can be a connection string (also known as a database uniform resource identifier/locator, *dburi* or *dburl* for short), or the URL object returned by calling *create* from sqlalchemy.engine.URL. 36 | 37 | The following information is needed to make the connection string: 38 | 39 | - - which driver to use; 'firebird' to use firebird-driver, or 'fdb' to use the fdb driver 40 | - - Firebird default is 'sysdba' 41 | - - Firebird default is 'masterkey' 42 | - - the location of the database server 43 | - - Firebird default is '3050' 44 | - - location of the database file 45 | - - character set used by the database file, Firebird default is UTF8 46 | - - path to the firebird client library file. Linux needs 'libfbclient.so', Windows uses fblient.dll. This is only needed when using the embedded server or a remotely installed server. 47 | 48 | Connection Strings 49 | 50 | A typical connection string for SQLAlchemy is *dialect+driver://username:password@host:port/database*. 51 | 52 | The template for a Firebird connection string looks like this (using the information listed above): 53 | :: 54 | 55 | firebird+://:@:/[?charset=UTF8&key=value&key=value...] 56 | 57 | Note the only differences between the Linux and Windows versions of the following example configuration strings is that the Linux paths begin with '//home/testuser' while the Windows paths begin with 'c:/': 58 | 59 | 60 | - The simplest configuration string is for the Firebird server installed locally using the default port. 61 | 62 | :: 63 | 64 | [Linux] 65 | # Use the fdb driver (Python 3.6/3.7, or Firebird server 2.5.9) 66 | firebird+fdb://sysdba:masterkey@localhost///home/testuser/projects/databases/my_project.fdb 67 | # Use the firebird-driver driver (Python 3.8+, Firebird server 3.0 or greater) 68 | firebird+firebird://sysdba:masterkey@localhost///home/testuser/projects/databases/my_project.fdb 69 | 70 | [Windows] 71 | # Use the fdb driver (Python 3.6/3.7, or Firebird server 2.5.9) 72 | firebird+fdb://sysdba:masterkey@localhost/c:/projects/databases/my_project.fdb 73 | # Use the firebird-driver driver (Python 3.8+, Firebird server 3.0 or greater) 74 | firebird+firebird://sysdba:masterkey@localhost/c:/projects/databases/my_project.fdb 75 | 76 | - Firebird server installed remotely using port 3040 and specifying the character set to use 77 | 78 | :: 79 | 80 | [Linux] 81 | # Use the fdb driver (Python 3.6/3.7, or Firebird server 2.5.9) 82 | firebird+fdb://sysdba:masterkey@localhost:3040///home/testuser/databases/my_project.fdb?charset=UTF8&fb_library_name=//home/testuser/dbclient/lib/libfbclient.so 83 | # Use the firebird-driver driver (Python 3.8+) 84 | firebird+firebird://sysdba:masterkey@localhost:3040///home/testuser/databases/my_project.fdb?charset=UTF8&fb_client_library=//home/testuser/dbclient/lib/libfbclient.so 85 | 86 | [Windows] 87 | # Use the fdb driver (Python 3.6/3.7, or Firebird server 2.5.9) 88 | firebird+fdb://sysdba:masterkey@localhost:3040/c:/projects/databases/my_project.fdb?charset=UTF8&fb_library_name=c:/projects/dbclient/fbclient.dll 89 | # Use the firebird-driver driver (Python 3.8+) 90 | firebird+firebird://sysdba:masterkey@localhost:3040/c:/projects/databases/my_project.fdb?charset=UTF8&fb_client_library=c:/projects/dbclient/fbclient.dll 91 | 92 | - Firebird embedded server specifying the character set to use 93 | 94 | :: 95 | 96 | [Linux] 97 | # Use the fdb driver (Python 3.6/3.7, or Firebird server 2.5.9) 98 | firebird+fdb://sysdba@///home/testuser/databases/my_project.fdb?charset=UTF8&fb_library_name=//home/testuser/dbserver/lib/libfbclient.so 99 | # Use the firebird-driver driver (Python 3.8+) 100 | firebird+firebird://sysdba@///home/testuser/databases/my_project.fdb?charset=UTF8&fb_client_library=//home/testuser/dbserver/lib/libfbclient.so 101 | 102 | [Windows] 103 | # Use the fdb driver (Python 3.6/3.7, or Firebird server 2.5.9) 104 | firebird+fdb://sysdba@/c:/projects/databases/my_project.fdb?charset=UTF8&fb_library_name=c:/projects/dbserver/fbclient.dll 105 | # Use the firebird-driver driver (Python 3.8+) 106 | firebird+firebird://sysdba@/c:/projects/databases/my_project.fdb?charset=UTF8&fb_client_library=c:/projects/dbserver/fbclient.dll 107 | 108 | 109 | ---- 110 | 111 | **How to use** 112 | 113 | For example, to connect to an embedded Firebird server using firebird-driver on Windows: 114 | 115 | :: 116 | 117 | db_uri = "firebird+firebird://sysdba@/c:/projects/databases/my_project.fdb?charset=UTF8&fb_client_library=c:/projects/databases/fb40_svr/fbclient.dll" 118 | from sqlalchemy import create_engine 119 | engine = create_engine(db_uri, echo=True) 120 | 121 | # force the engine to connect, revealing any problems with the connection string 122 | with engine.begin(): 123 | pass 124 | 125 | Connecting to different types of Firebird servers, databases, or drivers is done simply by changing the db_uri string 126 | used in the call to create_engine. 127 | 128 | ---- 129 | 130 | **Code of Conduct** 131 | 132 | As with SQLAlchemy, sqlalchemy-firebird places great emphasis on polite, thoughtful, and 133 | constructive communication between users and developers. 134 | We use the SQLAlchemy `Code of Conduct `_. 135 | 136 | ---- 137 | 138 | **License** 139 | 140 | sqlalchemy-firebird is distributed under the `MIT license 141 | `_. 142 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.pytest.ini_options] 2 | addopts = "--tb native -v -r fxX --maxfail=100 -p no:warnings --log-info=sqlalchemy.engine " 3 | 4 | markers = [ 5 | "backend: mark test as for the backend", 6 | ] 7 | 8 | python_files = "test/*test_*.py" 9 | 10 | # SQLAlchemy-Firebird configuration for Black. 11 | [tool.black] 12 | line-length = 79 13 | target-version = ['py37'] 14 | include = '\.pyi?$' 15 | exclude = ''' 16 | /( 17 | \.github 18 | | \.pytest_cache 19 | | _venv 20 | )/ 21 | ''' 22 | 23 | [build-system] 24 | requires = ["setuptools>=61.0"] 25 | build-backend = "setuptools.build_meta" 26 | 27 | [project] 28 | name = "sqlalchemy-firebird" 29 | description = "Firebird for SQLAlchemy" 30 | requires-python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" 31 | keywords = ["SQLAlchemy", "Firebird", "fdb", "firebird-driver"] 32 | license = {file = "LICENSE"} 33 | authors = [ 34 | {name = "Paul Graves-DesLauriers", email="paul@dexmicro.com"}, 35 | {name = "F.D.Castel"}, 36 | ] 37 | maintainers = [ 38 | {name = "Paul Graves-DesLauriers", email="paul@dexmicro.com"}, 39 | ] 40 | classifiers = [ 41 | "Development Status :: 4 - Beta", 42 | "Intended Audience :: Developers", 43 | "License :: OSI Approved :: MIT License", 44 | "Programming Language :: Python", 45 | "Programming Language :: Python :: 3", 46 | "Programming Language :: Python :: Implementation :: CPython", 47 | "Topic :: Database :: Front-Ends", 48 | "Operating System :: OS Independent", 49 | ] 50 | dependencies = [ 51 | "SQLAlchemy >= 1.4, < 2.0; python_version < '3.8'", 52 | "fdb; python_version < '3.8'", 53 | "SQLAlchemy >= 2.0; python_version >= '3.8'", 54 | "firebird-driver; python_version >= '3.8'", 55 | "packaging", 56 | ] 57 | dynamic = ["readme", "version"] 58 | 59 | [project.optional-dependencies] 60 | dev = ["pytest", "black", "flake8", "build", "twine"] 61 | test = ["pytest"] 62 | 63 | [project.urls] 64 | Documentation = "https://github.com/pauldex/sqlalchemy-firebird/wiki" 65 | Source = "https://github.com/pauldex/sqlalchemy-firebird" 66 | Tracker = "https://github.com/pauldex/sqlalchemy-firebird/issues" 67 | 68 | [project.entry-points."sqlalchemy.dialects"] 69 | "firebird" = "sqlalchemy_firebird.firebird:FBDialect_firebird" 70 | "firebird.fdb" = "sqlalchemy_firebird.fdb:FBDialect_fdb" 71 | "firebird.firebird" = "sqlalchemy_firebird.firebird:FBDialect_firebird" 72 | 73 | [project.scripts] 74 | prepare-test-environment = "sqlalchemy_firebird:infrastructure.prepare_test_environment" 75 | rebuild-test-databases = "sqlalchemy_firebird:infrastructure.rebuild_test_databases" 76 | 77 | [tool.setuptools.dynamic] 78 | readme = {file = ["README.rst"], content-type = "text/x-rst"} 79 | version = {attr = "sqlalchemy_firebird.__version__"} 80 | -------------------------------------------------------------------------------- /run-all-tests.ps1: -------------------------------------------------------------------------------- 1 | # 2 | # Run all tests in parallel (one process per driver/engine combination) 3 | # 4 | 5 | 'fdb_fb25','fdb_fb30','firebird_fb30','firebird_fb40','firebird_fb50' | ForEach-Object { 6 | Start-Process 'powershell' ".\.venv\Scripts\activate ; while (`$true) { .\run-tests.ps1 -Db $_ ; pause }" 7 | } 8 | -------------------------------------------------------------------------------- /run-tests.ps1: -------------------------------------------------------------------------------- 1 | # 2 | # Run all tests for a given driver/engine. 3 | # 4 | 5 | [CmdletBinding()] 6 | param( 7 | [Parameter(Mandatory=$true)] 8 | [ValidateScript({ 9 | if (Get-Content 'setup.cfg' | Select-String -Pattern "^$_\s*=") { return $true } 10 | throw [System.Management.Automation.ValidationMetadataException] "The database '$_' was not found in 'setup.cfg'." 11 | })] 12 | [string]$Database 13 | ) 14 | 15 | if (-not $env:VIRTUAL_ENV) { 16 | throw "Virtual environment not detected. Please run '.venv/scripts/activate' first." 17 | } 18 | 19 | # Set console width 20 | [console]::WindowWidth=260 21 | 22 | Clear-Host 23 | Write-Warning "Using connection '$Database'..." 24 | 25 | # Recreate test database 26 | rebuild-test-databases $Database 27 | 28 | # pytest: do not truncate error messages -- https://github.com/pytest-dev/pytest/issues/9920 29 | $env:CI = 'True' 30 | 31 | # pytest additional parameters 32 | $extraParams = @( 33 | '--tb=no', # Disable tracebacks 34 | '--color=yes' # Force color in output. Pytest disables it because Tee-Object redirection. 35 | ) 36 | 37 | # Run pytest 38 | $host.ui.RawUI.WindowTitle = "[$Database]: (Running...)" 39 | & pytest --db $Database $extraParams 2>$null | Tee-Object -Variable testOutput 40 | $pytestExitCode = $LASTEXITCODE 41 | 42 | # Update window title with test results 43 | $summary1st = $testOutput[-1] -replace '\x1b\[\d+(;\d+)?m' -replace '=' # strip colors and '=' 44 | $host.ui.RawUI.WindowTitle = "[$Database]: $summary1st (exit code = $pytestExitCode)" 45 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [sqla_testing] 2 | requirement_cls=sqlalchemy_firebird.requirements:Requirements 3 | profile_file=test/profiles.txt 4 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/__init__.py: -------------------------------------------------------------------------------- 1 | # firebird/__init__.py 2 | # Copyright (C) 2005-2023 the SQLAlchemy authors and contributors 3 | # 4 | # 5 | # This module is released under the MIT License: http://www.opensource.org/licenses/mit-license.php 6 | __version__ = "2.1" 7 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/fb_info25.py: -------------------------------------------------------------------------------- 1 | """Provide Firebird 2.5 specific information. 2 | 3 | Variables: 4 | MAX_IDENTIFIER_LENGTH -> int 5 | RESERVED_WORDS -> set 6 | 7 | .._Firebird 2.5: 8 | https://www.firebirdsql.org/file/documentation/html/en/refdocs/fblangref25/firebird-25-language-reference.html#fblangref25-intro 9 | 10 | """ 11 | 12 | import sqlalchemy.types as sa_types 13 | 14 | # https://www.firebirdsql.org/file/documentation/html/en/refdocs/fblangref25/firebird-25-language-reference.html#fblangref25-datatypes 15 | # "Length cannot exceed 31 characters." 16 | MAX_IDENTIFIER_LENGTH = 31 17 | 18 | # https://www.firebirdsql.org/file/documentation/html/en/refdocs/fblangref25/firebird-25-language-reference.html#fblangref25-appx03-reskeywords 19 | # This set is for Firebird versions >= 2.5.1 20 | RESERVED_WORDS = { 21 | "add", 22 | "admin", 23 | "all", 24 | "alter", 25 | "and", 26 | "any", 27 | "as", 28 | "at", 29 | "avg", 30 | "begin", 31 | "between", 32 | "bigint", 33 | "bit_length", 34 | "blob", 35 | "both", 36 | "by", 37 | "case", 38 | "cast", 39 | "char", 40 | "char_length", 41 | "character", 42 | "character_length", 43 | "check", 44 | "close", 45 | "collate", 46 | "column", 47 | "commit", 48 | "connect", 49 | "constraint", 50 | "count", 51 | "create", 52 | "cross", 53 | "current", 54 | "current_connection", 55 | "current_date", 56 | "current_role", 57 | "current_time", 58 | "current_timestamp", 59 | "current_transaction", 60 | "current_user", 61 | "cursor", 62 | "date", 63 | "day", 64 | "dec", 65 | "decimal", 66 | "declare", 67 | "default", 68 | "delete", 69 | "deleting", 70 | "disconnect", 71 | "distinct", 72 | "double", 73 | "drop", 74 | "else", 75 | "end", 76 | "escape", 77 | "execute", 78 | "exists", 79 | "external", 80 | "extract", 81 | "fetch", 82 | "filter", 83 | "float", 84 | "for", 85 | "foreign", 86 | "from", 87 | "full", 88 | "function", 89 | "gdscode", 90 | "global", 91 | "grant", 92 | "group", 93 | "having", 94 | "hour", 95 | "in", 96 | "index", 97 | "inner", 98 | "insensitive", 99 | "insert", 100 | "inserting", 101 | "int", 102 | "integer", 103 | "into", 104 | "is", 105 | "join", 106 | "leading", 107 | "left", 108 | "like", 109 | "long", 110 | "lower", 111 | "max", 112 | "maximum_segment", 113 | "merge", 114 | "min", 115 | "minute", 116 | "month", 117 | "national", 118 | "natural", 119 | "nchar", 120 | "no", 121 | "not", 122 | "null", 123 | "numeric", 124 | "octet_length", 125 | "of", 126 | "on", 127 | "only", 128 | "open", 129 | "or", 130 | "order", 131 | "outer", 132 | "parameter", 133 | "plan", 134 | "position", 135 | "post_event", 136 | "precision", 137 | "primary", 138 | "procedure", 139 | "rdb$db_key", 140 | "real", 141 | "record_version", 142 | "recreate", 143 | "recursive", 144 | "references", 145 | "release", 146 | "returning_values", 147 | "returns", 148 | "revoke", 149 | "right", 150 | "rollback", 151 | "row_count", 152 | "rows", 153 | "savepoint", 154 | "second", 155 | "select", 156 | "sensitive", 157 | "set", 158 | "similar", 159 | "smallint", 160 | "some", 161 | "sqlcode", 162 | "sqlstate", 163 | "start", 164 | "sum", 165 | "table", 166 | "then", 167 | "time", 168 | "timestamp", 169 | "to", 170 | "trailing", 171 | "trigger", 172 | "trim", 173 | "union", 174 | "unique", 175 | "update", 176 | "updating", 177 | "upper", 178 | "user", 179 | "using", 180 | "value", 181 | "values", 182 | "varchar", 183 | "variable", 184 | "varying", 185 | "view", 186 | "when", 187 | "where", 188 | "while", 189 | "with", 190 | "year", 191 | } 192 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/fb_info30.py: -------------------------------------------------------------------------------- 1 | """Provide Firebird 3.0 specific information. 2 | 3 | Variables: 4 | MAX_IDENTIFIER_LENGTH -> int 5 | RESERVED_WORDS -> set 6 | 7 | .._Firebird 3.0: 8 | https://firebirdsql.org/file/documentation/html/en/refdocs/fblangref30/firebird-30-language-reference.html 9 | 10 | """ 11 | 12 | import sqlalchemy.types as sa_types 13 | 14 | # https://firebirdsql.org/file/documentation/html/en/refdocs/fblangref30/firebird-30-language-reference.html 15 | # "Length cannot exceed 31 bytes. Identifiers are stored in character set UNICODE_FSS, which means 16 | # characters outside the ASCII range are stored using 2 or 3 bytes." 17 | MAX_IDENTIFIER_LENGTH = 31 18 | 19 | # https://firebirdsql.org/file/documentation/html/en/refdocs/fblangref30/firebird-30-language-reference.html#fblangref30-appx03-reskeywords 20 | RESERVED_WORDS = { 21 | "add", 22 | "admin", 23 | "all", 24 | "alter", 25 | "and", 26 | "any", 27 | "as", 28 | "at", 29 | "avg", 30 | "begin", 31 | "between", 32 | "bigint", 33 | "bit_length", 34 | "blob", 35 | "boolean", 36 | "both", 37 | "by", 38 | "case", 39 | "cast", 40 | "char", 41 | "character", 42 | "character_length", 43 | "char_length", 44 | "check", 45 | "close", 46 | "collate", 47 | "column", 48 | "commit", 49 | "connect", 50 | "constraint", 51 | "corr", 52 | "count", 53 | "covar_pop", 54 | "covar_samp", 55 | "create", 56 | "cross", 57 | "current", 58 | "current_connection", 59 | "current_date", 60 | "current_role", 61 | "current_time", 62 | "current_timestamp", 63 | "current_transaction", 64 | "current_user", 65 | "cursor", 66 | "date", 67 | "day", 68 | "dec", 69 | "decimal", 70 | "declare", 71 | "default", 72 | "delete", 73 | "deleting", 74 | "deterministic", 75 | "disconnect", 76 | "distinct", 77 | "double", 78 | "drop", 79 | "else", 80 | "end", 81 | "escape", 82 | "execute", 83 | "exists", 84 | "external", 85 | "extract", 86 | "false", 87 | "fetch", 88 | "filter", 89 | "float", 90 | "for", 91 | "foreign", 92 | "from", 93 | "full", 94 | "function", 95 | "gdscode", 96 | "global", 97 | "grant", 98 | "group", 99 | "having", 100 | "hour", 101 | "in", 102 | "index", 103 | "inner", 104 | "insensitive", 105 | "insert", 106 | "inserting", 107 | "int", 108 | "integer", 109 | "into", 110 | "is", 111 | "join", 112 | "leading", 113 | "left", 114 | "like", 115 | "long", 116 | "lower", 117 | "max", 118 | "merge", 119 | "min", 120 | "minute", 121 | "month", 122 | "national", 123 | "natural", 124 | "nchar", 125 | "no", 126 | "not", 127 | "null", 128 | "numeric", 129 | "octet_length", 130 | "of", 131 | "offset", 132 | "on", 133 | "only", 134 | "open", 135 | "or", 136 | "order", 137 | "outer", 138 | "over", 139 | "parameter", 140 | "plan", 141 | "position", 142 | "post_event", 143 | "precision", 144 | "primary", 145 | "procedure", 146 | "rdb$db_key", 147 | "rdb$record_version", 148 | "real", 149 | "record_version", 150 | "recreate", 151 | "recursive", 152 | "references", 153 | "regr_avgx", 154 | "regr_avgy", 155 | "regr_count", 156 | "regr_intercept", 157 | "regr_r2", 158 | "regr_slope", 159 | "regr_sxx", 160 | "regr_sxy", 161 | "regr_syy", 162 | "release", 163 | "return", 164 | "returning_values", 165 | "returns", 166 | "revoke", 167 | "right", 168 | "rollback", 169 | "row", 170 | "rows", 171 | "row_count", 172 | "savepoint", 173 | "scroll", 174 | "second", 175 | "select", 176 | "sensitive", 177 | "set", 178 | "similar", 179 | "smallint", 180 | "some", 181 | "sqlcode", 182 | "sqlstate", 183 | "start", 184 | "stddev_pop", 185 | "stddev_samp", 186 | "sum", 187 | "table", 188 | "then", 189 | "time", 190 | "timestamp", 191 | "to", 192 | "trailing", 193 | "trigger", 194 | "trim", 195 | "true", 196 | "union", 197 | "unique", 198 | "unknown", 199 | "update", 200 | "updating", 201 | "upper", 202 | "user", 203 | "using", 204 | "value", 205 | "values", 206 | "varchar", 207 | "variable", 208 | "varying", 209 | "var_pop", 210 | "var_samp", 211 | "view", 212 | "when", 213 | "where", 214 | "while", 215 | "with", 216 | "year", 217 | } 218 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/fb_info40.py: -------------------------------------------------------------------------------- 1 | """Provide Firebird 4.0+ specific information. 2 | 3 | Variables: 4 | MAX_IDENTIFIER_LENGTH -> int 5 | RESERVED_WORDS -> set 6 | 7 | .._Firebird 4.0: 8 | https://firebirdsql.org/file/documentation/html/en/refdocs/fblangref40/firebird-40-language-reference.html 9 | 10 | .._Firebird 5.0: 11 | https://firebirdsql.org/file/documentation/html/en/refdocs/fblangref50/firebird-50-language-reference.html 12 | 13 | """ 14 | 15 | import sqlalchemy.types as sa_types 16 | 17 | # https://firebirdsql.org/file/documentation/html/en/refdocs/fblangref40/firebird-40-language-reference.html 18 | # For Firebird version 4.0 and greater, the "...maximum identifier length is 63 characters 19 | # character set UTF8 (252 bytes)". 20 | MAX_IDENTIFIER_LENGTH = 63 21 | 22 | # https://firebirdsql.org/file/documentation/html/en/refdocs/fblangref40/firebird-40-language-reference.html#fblangref40-reskeywords-reswords 23 | # This set is also good for Firebird version 5.0 Beta 1 24 | # Note that reserved words in Firebird 5 are the same as those in Firebird 4 25 | RESERVED_WORDS = { 26 | "add", 27 | "admin", 28 | "all", 29 | "alter", 30 | "and", 31 | "any", 32 | "as", 33 | "at", 34 | "avg", 35 | "begin", 36 | "between", 37 | "bigint", 38 | "binary", 39 | "bit_length", 40 | "blob", 41 | "boolean", 42 | "both", 43 | "by", 44 | "case", 45 | "cast", 46 | "char", 47 | "character", 48 | "character_length", 49 | "char_length", 50 | "check", 51 | "close", 52 | "collate", 53 | "column", 54 | "comment", 55 | "commit", 56 | "connect", 57 | "constraint", 58 | "corr", 59 | "count", 60 | "covar_pop", 61 | "covar_samp", 62 | "create", 63 | "cross", 64 | "current", 65 | "current_connection", 66 | "current_date", 67 | "current_role", 68 | "current_time", 69 | "current_timestamp", 70 | "current_transaction", 71 | "current_user", 72 | "cursor", 73 | "date", 74 | "day", 75 | "dec", 76 | "decfloat", 77 | "decimal", 78 | "declare", 79 | "default", 80 | "delete", 81 | "deleting", 82 | "deterministic", 83 | "disconnect", 84 | "distinct", 85 | "double", 86 | "drop", 87 | "else", 88 | "end", 89 | "escape", 90 | "execute", 91 | "exists", 92 | "external", 93 | "extract", 94 | "false", 95 | "fetch", 96 | "filter", 97 | "float", 98 | "for", 99 | "foreign", 100 | "from", 101 | "full", 102 | "function", 103 | "gdscode", 104 | "global", 105 | "grant", 106 | "group", 107 | "having", 108 | "hour", 109 | "in", 110 | "index", 111 | "inner", 112 | "insensitive", 113 | "insert", 114 | "inserting", 115 | "int", 116 | "int128", 117 | "integer", 118 | "into", 119 | "is", 120 | "join", 121 | "lateral", 122 | "leading", 123 | "left", 124 | "like", 125 | "local", 126 | "localtime", 127 | "localtimestamp", 128 | "long", 129 | "lower", 130 | "max", 131 | "merge", 132 | "min", 133 | "minute", 134 | "month", 135 | "national", 136 | "natural", 137 | "nchar", 138 | "no", 139 | "not", 140 | "null", 141 | "numeric", 142 | "octet_length", 143 | "of", 144 | "offset", 145 | "on", 146 | "only", 147 | "open", 148 | "or", 149 | "order", 150 | "outer", 151 | "over", 152 | "parameter", 153 | "plan", 154 | "position", 155 | "post_event", 156 | "precision", 157 | "primary", 158 | "procedure", 159 | "publication", 160 | "rdb$db_key", 161 | "rdb$error", 162 | "rdb$get_context", 163 | "rdb$get_transaction_cn", 164 | "rdb$record_version", 165 | "rdb$role_in_use", 166 | "rdb$set_context", 167 | "rdb$system_privilege", 168 | "real", 169 | "record_version", 170 | "recreate", 171 | "recursive", 172 | "references", 173 | "regr_avgx", 174 | "regr_avgy", 175 | "regr_count", 176 | "regr_intercept", 177 | "regr_r2", 178 | "regr_slope", 179 | "regr_sxx", 180 | "regr_sxy", 181 | "regr_syy", 182 | "release", 183 | "resetting", 184 | "return", 185 | "returning_values", 186 | "returns", 187 | "revoke", 188 | "right", 189 | "rollback", 190 | "row", 191 | "rows", 192 | "row_count", 193 | "savepoint", 194 | "scroll", 195 | "second", 196 | "select", 197 | "sensitive", 198 | "set", 199 | "similar", 200 | "smallint", 201 | "some", 202 | "sqlcode", 203 | "sqlstate", 204 | "start", 205 | "stddev_pop", 206 | "stddev_samp", 207 | "sum", 208 | "table", 209 | "then", 210 | "time", 211 | "timestamp", 212 | "timezone_hour", 213 | "timezone_minute", 214 | "to", 215 | "trailing", 216 | "trigger", 217 | "trim", 218 | "true", 219 | "unbounded", 220 | "union", 221 | "unique", 222 | "unknown", 223 | "update", 224 | "updating", 225 | "upper", 226 | "user", 227 | "using", 228 | "value", 229 | "values", 230 | "varbinary", 231 | "varchar", 232 | "variable", 233 | "varying", 234 | "var_pop", 235 | "var_samp", 236 | "view", 237 | "when", 238 | "where", 239 | "while", 240 | "window", 241 | "with", 242 | "without", 243 | "year", 244 | } 245 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/fdb.py: -------------------------------------------------------------------------------- 1 | """ 2 | .. dialect:: firebird+fdb 3 | :name: fdb 4 | :dbapi: fdb 5 | :connectstring: firebird+fdb://user:password@host:port/path/to/db[?key=value&key=value...] 6 | :url: http://pypi.python.org/pypi/fdb/ 7 | :documentation: https://fdb.readthedocs.io/en/latest/ 8 | 9 | The FDB package provides legacy driver for Python 2 and 3, and Firebird 2.x and 3. 10 | This driver uses classic Firebird API provided by fbclient library. 11 | """ # noqa 12 | 13 | from math import modf 14 | 15 | from sqlalchemy import util 16 | from .base import FBDialect 17 | 18 | 19 | class FBDialect_fdb(FBDialect): 20 | name = "firebird.fdb" 21 | driver = "fdb" 22 | supports_statement_cache = True 23 | 24 | @classmethod 25 | def dbapi(cls): 26 | # For SQLAlchemy 1.4 compatibility only. Deprecated in 2.0. 27 | return __import__("fdb") 28 | 29 | @classmethod 30 | def import_dbapi(cls): 31 | return __import__("fdb") 32 | 33 | def create_connect_args(self, url): 34 | opts = url.translate_connect_args(username="user") 35 | if opts.get("port"): 36 | opts["host"] = "%s/%s" % (opts["host"], opts["port"]) 37 | del opts["port"] 38 | opts.update(url.query) 39 | 40 | util.coerce_kw_type(opts, "type_conv", int) 41 | 42 | return ([], opts) 43 | 44 | def _get_server_version_info(self, connection): 45 | dbapi_connection = ( 46 | connection.connection.dbapi_connection 47 | if self.using_sqlalchemy2 48 | else connection.connection 49 | ) 50 | minor, major = modf(dbapi_connection.engine_version) 51 | return (int(major), int(minor * 10)) 52 | 53 | 54 | dialect = FBDialect_fdb 55 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/firebird.py: -------------------------------------------------------------------------------- 1 | """ 2 | .. dialect:: firebird+firebird 3 | :name: firebird 4 | :dbapi: firebird-driver 5 | :connectstring: firebird+firebird://user:password@host:port/path/to/db[?key=value&key=value...] 6 | :url: https://pypi.org/project/firebird-driver/ 7 | :documentation: https://firebird-driver.readthedocs.io/en/latest/ 8 | 9 | The firebird-driver package provides driver for Python 3.8+ and Firebird 3+. 10 | This driver uses new Firebird OO API provided by fbclient library. 11 | """ # noqa 12 | 13 | from datetime import datetime 14 | from datetime import time 15 | from math import modf 16 | from sqlalchemy import util 17 | from .base import FBDialect 18 | 19 | import firebird.driver 20 | from firebird.driver import driver_config 21 | from firebird.driver import get_timezone 22 | 23 | 24 | class FBDialect_firebird(FBDialect): 25 | name = "firebird.firebird" 26 | driver = "firebird-driver" 27 | supports_statement_cache = True 28 | 29 | @classmethod 30 | def dbapi(cls): 31 | # For SQLAlchemy 1.4 compatibility only. Deprecated in 2.0. 32 | return firebird.driver 33 | 34 | @classmethod 35 | def import_dbapi(cls): 36 | return firebird.driver 37 | 38 | @util.memoized_property 39 | def _isolation_lookup(self): 40 | return { 41 | "AUTOCOMMIT": "autocommit", 42 | "READ COMMITTED": "read_committed", 43 | "REPEATABLE READ": "repeatable_read", 44 | "SERIALIZABLE": "serializable", 45 | } 46 | 47 | def get_isolation_level_values(self, dbapi_connection): 48 | return list(self._isolation_lookup) 49 | 50 | def set_isolation_level(self, dbapi_connection, level): 51 | dbapi_connection.set_isolation_level(self._isolation_lookup[level]) 52 | 53 | def set_readonly(self, connection, value): 54 | connection.readonly = value 55 | 56 | def get_readonly(self, connection): 57 | return connection.readonly 58 | 59 | def set_deferrable(self, connection, value): 60 | connection.deferrable = value 61 | 62 | def get_deferrable(self, connection): 63 | return connection.deferrable 64 | 65 | def do_terminate(self, dbapi_connection) -> None: 66 | dbapi_connection.terminate() 67 | 68 | def create_connect_args(self, url): 69 | opts = url.translate_connect_args(username="user") 70 | 71 | qry = url.query 72 | if qry.get("fb_client_library"): 73 | # Set driver_config.fb_client_library and remove it from remaining keys passed to .connect() 74 | driver_config.fb_client_library.value = qry["fb_client_library"] 75 | qry = remove_keys(qry, {"fb_client_library"}) 76 | 77 | if opts.get("host"): 78 | host_name = opts["host"] 79 | database_name = opts["database"] 80 | 81 | port_number = "3050" 82 | if opts.get("port") is not None: 83 | port_number = str(opts["port"]) 84 | del opts["port"] 85 | 86 | cfg_driver_server = driver_config.get_server(host_name) 87 | if cfg_driver_server is None: 88 | cfg_driver_server = driver_config.register_server(host_name) 89 | cfg_driver_server.host.value = host_name 90 | cfg_driver_server.port.value = port_number 91 | 92 | cfg_driver_database = driver_config.get_database(database_name) 93 | if cfg_driver_database is None: 94 | cfg_driver_database = driver_config.register_database( 95 | database_name 96 | ) 97 | cfg_driver_database.server.value = host_name 98 | cfg_driver_database.database.value = opts["database"] 99 | 100 | del opts["host"] 101 | 102 | opts.update(qry) 103 | return ([], opts) 104 | 105 | def do_rollback(self, dbapi_connection): 106 | if dbapi_connection.is_active(): 107 | dbapi_connection.rollback() 108 | 109 | def do_commit(self, dbapi_connection): 110 | if dbapi_connection.is_active(): 111 | dbapi_connection.commit() 112 | 113 | def _get_server_version_info(self, connection): 114 | dbapi_connection = ( 115 | connection.connection.dbapi_connection 116 | if self.using_sqlalchemy2 117 | else connection.connection 118 | ) 119 | minor, major = modf(dbapi_connection.info.engine_version) 120 | return (int(major), int(minor * 10)) 121 | 122 | def adapt_timezone(self, param): 123 | # Convert tzinfo for firebird-driver. Requires tzinfo.tzname() method implemented. 124 | if isinstance(param, datetime) and param.tzinfo: 125 | return param.replace(tzinfo=get_timezone(param.tzname())) 126 | elif isinstance(param, time) and param.tzinfo: 127 | return param.replace(tzinfo=get_timezone(param.tzname())) 128 | return param 129 | 130 | def do_execute(self, cursor, statement, parameters, context=None): 131 | # Firebird-driver needs special time zone handling. 132 | # https://github.com/FirebirdSQL/python3-driver/issues/19#issuecomment-1523045743 133 | adapted_parameters = [self.adapt_timezone(p) for p in parameters] 134 | super().do_execute(cursor, statement, adapted_parameters, context) 135 | 136 | 137 | def remove_keys(d, keys): 138 | return {x: d[x] for x in d if x not in keys} 139 | 140 | 141 | dialect = FBDialect_firebird 142 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/infrastructure.py: -------------------------------------------------------------------------------- 1 | from configparser import ConfigParser 2 | from glob import glob 3 | from os import environ 4 | from os import listdir 5 | from os import makedirs 6 | from os import remove 7 | from os import rename 8 | from os import name as os_name 9 | from os.path import basename 10 | from os.path import isdir 11 | from os.path import isfile 12 | from os.path import join 13 | from shutil import copy, rmtree 14 | from subprocess import run 15 | from sys import argv 16 | from tempfile import gettempdir 17 | from urllib.request import urlretrieve 18 | 19 | # 20 | # Globals 21 | # 22 | 23 | if os_name == "nt": 24 | FB50_URL = "https://github.com/FirebirdSQL/firebird/releases/download/v5.0.0-RC2/Firebird-5.0.0.1304-RC2-windows-x64.zip" 25 | FB40_URL = "https://github.com/FirebirdSQL/firebird/releases/download/v4.0.4/Firebird-4.0.4.3010-0-x64.zip" 26 | FB30_URL = "https://github.com/FirebirdSQL/firebird/releases/download/v3.0.11/Firebird-3.0.11.33703-0_x64.zip" 27 | FB25_URL = "https://github.com/FirebirdSQL/firebird/releases/download/R2_5_9/Firebird-2.5.9.27139-0_x64_embed.zip" 28 | FB25_EXTRA_URL = "https://github.com/FirebirdSQL/firebird/releases/download/R2_5_9/Firebird-2.5.9.27139-0_x64.zip" 29 | else: 30 | FB50_URL = "https://github.com/FirebirdSQL/firebird/releases/download/v5.0.0-RC2/Firebird-5.0.0.1304-RC2-linux-x64.tar.gz" 31 | FB40_URL = "https://github.com/FirebirdSQL/firebird/releases/download/v4.0.4/Firebird-4.0.4.3010-0.amd64.tar.gz" 32 | FB30_URL = "https://github.com/FirebirdSQL/firebird/releases/download/v3.0.11/Firebird-3.0.11.33703-0.amd64.tar.gz" 33 | FB25_URL = "https://github.com/FirebirdSQL/firebird/releases/download/R2_5_9/FirebirdCS-2.5.9.27139-0.amd64.tar.gz" 34 | 35 | TEMP_PATH = gettempdir() 36 | 37 | # Disable black formatter 38 | # fmt: off 39 | 40 | # 41 | # Functions 42 | # 43 | 44 | 45 | def log(message): 46 | print(message.replace(TEMP_PATH, "$(tmp)")) 47 | 48 | 49 | def download_firebird(url, root_folder, ipc_name=None): 50 | source_package = join(root_folder, basename(url)) 51 | 52 | extension_length = 3 if os_name == "nt" else 7 53 | base_name = source_package[:-extension_length] 54 | 55 | target_folder = join(root_folder, base_name) 56 | 57 | log(f"Downloading '{source_package}'...") 58 | urlretrieve(url, source_package) 59 | 60 | log(f" Extracting to '{target_folder}'...") 61 | if os_name == "nt": 62 | import zipfile 63 | 64 | with zipfile.ZipFile(source_package, "r") as f: 65 | f.extractall(target_folder) 66 | else: 67 | import tarfile 68 | 69 | with tarfile.open(source_package, "r:gz") as f: 70 | f.extractall(path=root_folder) 71 | 72 | buildroot_package = join(target_folder, "buildroot.tar.gz") 73 | with tarfile.open(buildroot_package, "r:gz") as f: 74 | f.extractall(path=target_folder) 75 | 76 | log(f" Deleting '{source_package}'...") 77 | remove(source_package) 78 | 79 | # Windows-only: Set unique "IpcName" for each instance 80 | if os_name == "nt" and ipc_name is not None: 81 | conf_file = join(target_folder, "firebird.conf") 82 | log(f" Patching {conf_file}....") 83 | 84 | with open(conf_file, "r") as f: 85 | lines = f.read() 86 | lines = lines.replace("#IpcName = FIREBIRD", f"IpcName = {ipc_name}") 87 | with open(conf_file, "w") as f: 88 | f.write(lines) 89 | 90 | return base_name 91 | 92 | 93 | # 94 | # Main scripts entrypoints 95 | # 96 | 97 | 98 | def prepare_test_environment(force=True): 99 | root_folder = join(gettempdir(), "sqlalchemy-firebird-tests") 100 | 101 | if isdir(root_folder) and listdir(root_folder) and not force: 102 | # Folder already exists and is not empty. Nothing to do. 103 | log(f"Folder '{root_folder}' already exists.") 104 | return root_folder 105 | 106 | log(f"Creating {root_folder}...") 107 | rmtree(root_folder, ignore_errors=True) 108 | makedirs(root_folder) 109 | 110 | fb50_basename = download_firebird(FB50_URL, root_folder, "FIREBIRD50") 111 | fb40_basename = download_firebird(FB40_URL, root_folder, "FIREBIRD40") 112 | fb30_basename = download_firebird(FB30_URL, root_folder, "FIREBIRD30") 113 | fb25_basename = download_firebird(FB25_URL, root_folder, "FIREBIRD25") 114 | 115 | fb25_root_path = join(root_folder, fb25_basename) 116 | 117 | # Extra steps for Firebird 2.5 118 | if os_name == "nt": 119 | # Download non-embedded version to copy isql.exe (which does not exists in the embedded version) 120 | fb25_extra_basename = download_firebird(FB25_EXTRA_URL, root_folder) 121 | fb25_extra_path = join(root_folder, fb25_extra_basename) 122 | fb25_extra_bin_path = join(fb25_extra_path, "bin") 123 | log(f" Copy {fb25_extra_basename}/bin/isql.exe...") 124 | copy(f"{fb25_extra_bin_path}/isql.exe", fb25_root_path) 125 | 126 | # Rename fbembed.dll to fbclient.dll 127 | log(f" Renaming '{fb25_extra_basename}/fbembed.dll' to 'fbclient.dll'...") 128 | rename(f"{fb25_root_path}/fbembed.dll", f"{fb25_root_path}/fbclient.dll") 129 | 130 | log(f" Deleting {fb25_extra_basename}...") 131 | rmtree(fb25_extra_path) 132 | else: 133 | # On Linux, rename "FirebirdCS" to "Firebird" to keep the same pattern 134 | rename(fb25_root_path, join(root_folder, "Firebird-2.5.9.27139-0.amd64")) 135 | 136 | log("Test environment ready.") 137 | return root_folder 138 | 139 | 140 | def rebuild_test_databases(): 141 | root_folder = prepare_test_environment(force=False) 142 | 143 | log("Rebuilding databases...") 144 | 145 | root_path_for = { 146 | "fb50": glob(f"{root_folder}/Firebird-5*")[0], 147 | "fb40": glob(f"{root_folder}/Firebird-4*")[0], 148 | "fb30": glob(f"{root_folder}/Firebird-3*")[0], 149 | "fb25": glob(f"{root_folder}/Firebird-2*")[0], 150 | } 151 | 152 | if os_name != "nt": 153 | root_path_for["fb50"] = join(root_path_for["fb50"], "opt", "firebird") 154 | root_path_for["fb40"] = join(root_path_for["fb40"], "opt", "firebird") 155 | root_path_for["fb30"] = join(root_path_for["fb30"], "opt", "firebird") 156 | root_path_for["fb25"] = join(root_path_for["fb25"], "opt", "firebird") 157 | 158 | # If an argument is passed, use it to filter only that database 159 | filter = argv[1] if len(argv) > 1 else None 160 | if filter: 161 | log(f" Only for '{filter}'") 162 | 163 | config = ConfigParser() 164 | config.read("setup.cfg") 165 | 166 | if not config.has_section("db"): 167 | config.add_section("db") 168 | 169 | for driver in ["firebird", "fdb"]: 170 | for engine in ["fb50", "fb40", "fb30", "fb25"]: 171 | db_key = f"{driver}_{engine}" 172 | 173 | if (filter is not None) and (filter != db_key): 174 | continue 175 | 176 | # Create database with isql 177 | if os_name == "nt": 178 | isql = join(root_path_for[engine], "isql") 179 | else: 180 | isql = join(root_path_for[engine], "bin", "isql") 181 | 182 | database = join(root_folder, f"{driver}.{engine}.fdb") 183 | log(f" Creating '{database}'...") 184 | 185 | if isfile(database): 186 | remove(database) 187 | 188 | # Sets FIREBIRD env var to avoid problems with 'firebird.msg' not found. 189 | fb_env = environ.copy() 190 | fb_env["FIREBIRD"] = root_path_for[engine] 191 | if engine == "fb25" and os_name != "nt": 192 | # Firebird 2.5 on Linux needs LD_LIBRARY_PATH set to './lib' 193 | # https://groups.google.com/g/firebird-support/c/T6Nu6snaBWM/m/yLYqcJj0BAAJ 194 | fb_env["LD_LIBRARY_PATH"] = join(root_path_for[engine], "lib") 195 | 196 | create_sql = f"CREATE DATABASE '{database}' USER 'SYSDBA' PASSWORD 'masterkey' PAGE_SIZE 8192 DEFAULT CHARACTER SET UTF8;" 197 | cp = run( 198 | [isql, "-quiet"], 199 | capture_output=True, 200 | input=create_sql, 201 | text=True, 202 | env=fb_env 203 | ) 204 | if cp.returncode != 0: 205 | raise Exception(cp.stderr) 206 | 207 | # Add [db] section to setup.cfg 208 | lib_key = "fb_library_name" if driver == "fdb" else "fb_client_library" 209 | 210 | if os_name == "nt": 211 | lib_value = join(root_path_for[engine], "fbclient.dll") 212 | else: 213 | lib_value = join(root_path_for[engine], "lib", "libfbclient.so") 214 | 215 | db_uri = f"firebird+{driver}://SYSDBA@/{database}?charset=UTF8&{lib_key}={lib_value}" 216 | 217 | if driver == "firebird" and engine == "fb50": 218 | # Set firebird_fb50 also as default 219 | config.set("db", "default", db_uri) 220 | 221 | config.set("db", db_key, db_uri) 222 | 223 | log(f" Updating 'setup.cfg'...") 224 | with open("setup.cfg", "w") as f: 225 | config.write(f) 226 | 227 | log("Databases created.") 228 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/provision.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import event 2 | from sqlalchemy.engine import Engine 3 | from sqlalchemy.schema import CreateTable, DropTable, CreateIndex, DropIndex 4 | from sqlalchemy.testing.provision import temp_table_keyword_args 5 | 6 | 7 | @temp_table_keyword_args.for_db("firebird") 8 | def _firebird_temp_table_keyword_args(cfg, eng): 9 | return { 10 | "prefixes": ["GLOBAL TEMPORARY"], 11 | "firebird.fdb_on_commit": "PRESERVE ROWS", 12 | "firebird.firebird_on_commit": "PRESERVE ROWS", 13 | } 14 | 15 | 16 | @event.listens_for(Engine, "after_execute") 17 | def receive_after_execute(connection, statement, *arg): 18 | # 19 | # Important: Statements executed with connection.exec_driver_sql() don't pass through here. 20 | # Use connection.execute(text()) instead. 21 | # 22 | if isinstance(statement, (CreateTable, DropTable, CreateIndex, DropIndex)): 23 | # Using Connection protected methods here because the public ones cause errors with TransactionManager 24 | connection._commit_impl() 25 | connection._begin_impl(connection._transaction) 26 | -------------------------------------------------------------------------------- /sqlalchemy_firebird/types.py: -------------------------------------------------------------------------------- 1 | import datetime as dt 2 | 3 | from typing import Any 4 | from typing import Optional 5 | from sqlalchemy import Dialect, types as sqltypes 6 | 7 | 8 | # Character set of BINARY/VARBINARY 9 | BINARY_CHARSET = "OCTETS" 10 | 11 | # Character set of NCHAR/NVARCHAR 12 | NATIONAL_CHARSET = "ISO8859_1" 13 | 14 | 15 | class _FBString(sqltypes.String): 16 | render_bind_cast = True 17 | 18 | def __init__(self, length=None, charset=None, collation=None): 19 | super().__init__(length, collation) 20 | self.charset = charset 21 | 22 | 23 | class FBCHAR(_FBString): 24 | __visit_name__ = "CHAR" 25 | 26 | def __init__(self, length=None, charset=None, collation=None): 27 | super().__init__(length, charset, collation) 28 | 29 | 30 | class FBBINARY(FBCHAR): 31 | __visit_name__ = "BINARY" 32 | 33 | # Synonym for CHAR(n) CHARACTER SET OCTETS 34 | def __init__(self, length=None, charset=None, collation=None): 35 | super().__init__(length, BINARY_CHARSET) 36 | 37 | 38 | class FBNCHAR(FBCHAR): 39 | __visit_name__ = "NCHAR" 40 | 41 | # Synonym for CHAR(n) CHARACTER SET ISO8859_1 42 | def __init__(self, length=None, charset=None, collation=None): 43 | super().__init__(length, NATIONAL_CHARSET) 44 | 45 | 46 | class FBVARCHAR(_FBString): 47 | __visit_name__ = "VARCHAR" 48 | 49 | def __init__(self, length=None, charset=None, collation=None): 50 | super().__init__(length, charset, collation) 51 | 52 | 53 | class FBVARBINARY(FBVARCHAR): 54 | __visit_name__ = "VARBINARY" 55 | 56 | # Synonym for VARCHAR(n) CHARACTER SET OCTETS 57 | def __init__(self, length=None, charset=None, collation=None): 58 | super().__init__(length, BINARY_CHARSET) 59 | 60 | 61 | class FBNVARCHAR(FBVARCHAR): 62 | __visit_name__ = "NVARCHAR" 63 | 64 | # Synonym for VARCHAR(n) CHARACTER SET ISO8859_1 65 | def __init__(self, length=None, charset=None, collation=None): 66 | super().__init__(length, NATIONAL_CHARSET) 67 | 68 | 69 | class _FBNumeric(sqltypes.Numeric): 70 | render_bind_cast = True 71 | 72 | def bind_processor(self, dialect): 73 | return None # Dialect supports_native_decimal = True (no processor needed) 74 | 75 | 76 | class FBFLOAT(_FBNumeric, sqltypes.FLOAT): 77 | __visit_name__ = "FLOAT" 78 | 79 | 80 | class FBDOUBLE_PRECISION(_FBNumeric, sqltypes.DOUBLE_PRECISION): 81 | __visit_name__ = "DOUBLE_PRECISION" 82 | 83 | 84 | class FBDECFLOAT(_FBNumeric): 85 | __visit_name__ = "DECFLOAT" 86 | 87 | 88 | class FBREAL(FBFLOAT): 89 | __visit_name__ = "REAL" 90 | 91 | # Synonym for FLOAT 92 | def __init__(self, precision=None, scale=None): 93 | super().__init__(None, None) 94 | 95 | 96 | class _FBFixedPoint(_FBNumeric): 97 | def __init__( 98 | self, 99 | precision=None, 100 | scale=None, 101 | decimal_return_scale=None, 102 | asdecimal=None, 103 | ): 104 | super().__init__( 105 | precision, scale, decimal_return_scale, asdecimal=True 106 | ) 107 | 108 | 109 | class FBDECIMAL(_FBFixedPoint): 110 | __visit_name__ = "DECIMAL" 111 | 112 | 113 | class FBNUMERIC(_FBFixedPoint): 114 | __visit_name__ = "NUMERIC" 115 | 116 | 117 | class FBDATE(sqltypes.DATE): 118 | render_bind_cast = True 119 | 120 | 121 | class FBTIME(sqltypes.TIME): 122 | render_bind_cast = True 123 | 124 | 125 | class FBTIMESTAMP(sqltypes.TIMESTAMP): 126 | render_bind_cast = True 127 | 128 | 129 | class _FBInteger(sqltypes.Integer): 130 | render_bind_cast = True 131 | 132 | 133 | class FBSMALLINT(_FBInteger): 134 | __visit_name__ = "SMALLINT" 135 | 136 | 137 | class FBINTEGER(_FBInteger): 138 | __visit_name__ = "INTEGER" 139 | 140 | 141 | class FBBIGINT(_FBInteger): 142 | __visit_name__ = "BIGINT" 143 | 144 | 145 | class FBINT128(_FBInteger): 146 | __visit_name__ = "INT128" 147 | 148 | 149 | class FBBOOLEAN(sqltypes.BOOLEAN): 150 | render_bind_cast = True 151 | 152 | 153 | class _FBLargeBinary(sqltypes.LargeBinary): 154 | render_bind_cast = True 155 | 156 | def __init__( 157 | self, subtype=None, segment_size=None, charset=None, collation=None 158 | ): 159 | super().__init__() 160 | self.subtype = subtype 161 | self.segment_size = segment_size 162 | self.charset = charset 163 | self.collation = collation 164 | 165 | def bind_processor(self, dialect): 166 | def process(value): 167 | return None if value is None else bytes(value) 168 | 169 | return process 170 | 171 | 172 | class FBBLOB(_FBLargeBinary, sqltypes.BLOB): 173 | __visit_name__ = "BLOB" 174 | 175 | def __init__( 176 | self, 177 | segment_size=None, 178 | ): 179 | super().__init__(0, segment_size) 180 | 181 | 182 | class FBTEXT(_FBLargeBinary, sqltypes.TEXT): 183 | __visit_name__ = "BLOB" 184 | 185 | def __init__( 186 | self, 187 | segment_size=None, 188 | charset=None, 189 | collation=None, 190 | ): 191 | super().__init__(1, segment_size, charset, collation) 192 | 193 | 194 | class _FBNumericInterval(_FBNumeric): 195 | # NUMERIC(18,9) -- Used for _FBInterval storage 196 | def __init__(self): 197 | super().__init__(precision=18, scale=9) 198 | 199 | 200 | class _FBInterval(sqltypes.Interval): 201 | """A type for ``datetime.timedelta()`` objects. 202 | 203 | Value is stored as number of days. 204 | """ 205 | 206 | # ToDo: Fix operations with TIME datatype (operand must be in seconds, not in days) 207 | # https://firebirdsql.org/file/documentation/html/en/refdocs/fblangref50/firebird-50-language-reference.html#fblangref50-datatypes-datetimeops 208 | 209 | impl = _FBNumericInterval 210 | cache_ok = True 211 | 212 | def __init__(self): 213 | super().__init__(native=False) 214 | 215 | def bind_processor(self, dialect: Dialect): 216 | impl_processor = self.impl_instance.bind_processor(dialect) 217 | if impl_processor: 218 | fixed_impl_processor = impl_processor 219 | 220 | def process(value: Optional[dt.timedelta]): 221 | dt_value = ( 222 | value.total_seconds() / 86400 223 | if value is not None 224 | else None 225 | ) 226 | return fixed_impl_processor(dt_value) 227 | 228 | else: 229 | 230 | def process(value: Optional[dt.timedelta]): 231 | return ( 232 | value.total_seconds() / 86400 233 | if value is not None 234 | else None 235 | ) 236 | 237 | return process 238 | 239 | def result_processor(self, dialect: Dialect, coltype: Any): 240 | impl_processor = self.impl_instance.result_processor(dialect, coltype) 241 | if impl_processor: 242 | fixed_impl_processor = impl_processor 243 | 244 | def process(value: Any) -> Optional[dt.timedelta]: 245 | dt_value = fixed_impl_processor(value) 246 | if dt_value is None: 247 | return None 248 | return dt.timedelta(days=dt_value) 249 | 250 | else: 251 | 252 | def process(value: Any) -> Optional[dt.timedelta]: 253 | return dt.timedelta(days=value) if value is not None else None 254 | 255 | return process 256 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pauldex/sqlalchemy-firebird/56a2b9fa4bc12b67a2e3939b675247838ac04ba2/test/__init__.py -------------------------------------------------------------------------------- /test/conftest.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.dialects import registry 2 | import pytest 3 | 4 | # setup default dialect for sqlalchemy 5 | try: 6 | import firebird.driver # is firebird-driver available? 7 | 8 | registry.register( 9 | "firebird", "sqlalchemy_firebird.firebird", "FBDialect_firebird" 10 | ) 11 | except ImportError: 12 | registry.register("firebird", "sqlalchemy_firebird.fdb", "FBDialect_fdb") 13 | 14 | registry.register("firebird.fdb", "sqlalchemy_firebird.fdb", "FBDialect_fdb") 15 | registry.register( 16 | "firebird.firebird", "sqlalchemy_firebird.firebird", "FBDialect_firebird" 17 | ) 18 | 19 | pytest.register_assert_rewrite("sqlalchemy.testing.assertions") 20 | 21 | # this happens after pytest.register_assert_rewrite to avoid pytest warning 22 | from sqlalchemy.testing.plugin.pytestplugin import * # noqa: F401, E402, F403 23 | -------------------------------------------------------------------------------- /test/test_compiler.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Date, Identity, and_ 2 | from sqlalchemy import cast 3 | from sqlalchemy import column 4 | from sqlalchemy import Column 5 | from sqlalchemy import Computed 6 | from sqlalchemy import exc 7 | from sqlalchemy import func 8 | from sqlalchemy import insert 9 | from sqlalchemy import Index 10 | from sqlalchemy import Integer 11 | from sqlalchemy import MetaData 12 | from sqlalchemy import schema 13 | from sqlalchemy import select 14 | from sqlalchemy import String 15 | from sqlalchemy import table 16 | from sqlalchemy import Table 17 | from sqlalchemy import testing 18 | from sqlalchemy import text 19 | from sqlalchemy import update 20 | from sqlalchemy.sql import sqltypes 21 | from sqlalchemy.testing import assert_raises_message 22 | from sqlalchemy.testing import AssertsCompiledSQL 23 | from sqlalchemy.testing import fixtures 24 | from sqlalchemy.testing.assertions import eq_ignore_whitespace 25 | from sqlalchemy.types import TypeEngine 26 | 27 | import sqlalchemy_firebird.types as FbTypes 28 | 29 | from sqlalchemy_firebird.firebird import FBDialect_firebird 30 | 31 | 32 | class CompileTest(fixtures.TablesTest, AssertsCompiledSQL): 33 | __dialect__ = FBDialect_firebird() 34 | 35 | def test_alias(self): 36 | t = table("sometable", column("col1"), column("col2")) 37 | s = select(t.alias()) 38 | self.assert_compile( 39 | s, 40 | "SELECT sometable_1.col1, sometable_1.col2 " 41 | "FROM sometable AS sometable_1", 42 | ) 43 | 44 | @testing.provide_metadata 45 | def test_function(self): 46 | self.assert_compile( 47 | func.foo(1, 2), 48 | "foo(CAST(:foo_1 AS INTEGER), CAST(:foo_2 AS INTEGER))", 49 | ) 50 | self.assert_compile(func.current_time(), "CURRENT_TIME") 51 | self.assert_compile(func.foo(), "foo") 52 | t = Table( 53 | "sometable", 54 | self.metadata, 55 | Column("col1", Integer), 56 | Column("col2", Integer), 57 | ) 58 | self.assert_compile( 59 | select(func.max(t.c.col1)), 60 | "SELECT max(sometable.col1) AS max_1 FROM sometable", 61 | ) 62 | 63 | def test_charset(self): 64 | """Exercise CHARACTER SET options on string types.""" 65 | columns = [ 66 | (FbTypes.FBCHAR, [1], {}, "CHAR(1)"), 67 | ( 68 | FbTypes.FBCHAR, 69 | [1], 70 | {"charset": "OCTETS"}, 71 | "CHAR(1) CHARACTER SET OCTETS", 72 | ), 73 | (FbTypes.FBVARCHAR, [1], {}, "VARCHAR(1)"), 74 | ( 75 | FbTypes.FBVARCHAR, 76 | [1], 77 | {"charset": "OCTETS"}, 78 | "VARCHAR(1) CHARACTER SET OCTETS", 79 | ), 80 | ] 81 | for type_, args, kw, res in columns: 82 | self.assert_compile(type_(*args, **kw), res) 83 | 84 | def test_quoting_initial_chars(self): 85 | self.assert_compile(column("_somecol"), '"_somecol"') 86 | self.assert_compile(column("$somecol"), '"$somecol"') 87 | 88 | # 89 | # Tests from postgresql/test_compiler.py 90 | # 91 | 92 | def test_plain_stringify_returning(self): 93 | t = Table( 94 | "t", 95 | MetaData(), 96 | Column("myid", Integer, primary_key=True), 97 | Column("name", String, server_default="some str"), 98 | Column("description", String, default=func.lower("hi")), 99 | ) 100 | stmt = t.insert().values().return_defaults() 101 | eq_ignore_whitespace( 102 | str(stmt.compile()), 103 | "INSERT INTO t (description) VALUES (lower(:lower_1)) " 104 | "RETURNING t.myid, t.name, t.description", 105 | ) 106 | 107 | def test_update_returning(self): 108 | table1 = table( 109 | "mytable", 110 | column("myid", Integer), 111 | column("name", String(128)), 112 | column("description", String(128)), 113 | ) 114 | u = ( 115 | update(table1) 116 | .values(dict(name="foo")) 117 | .returning(table1.c.myid, table1.c.name) 118 | ) 119 | self.assert_compile( 120 | u, 121 | "UPDATE mytable SET name=CAST(:name AS VARCHAR(128)) " 122 | "RETURNING mytable.myid, mytable.name", 123 | ) 124 | u = update(table1).values(dict(name="foo")).returning(table1) 125 | self.assert_compile( 126 | u, 127 | "UPDATE mytable SET name=CAST(:name AS VARCHAR(128)) " 128 | "RETURNING mytable.myid, mytable.name, " 129 | "mytable.description", 130 | ) 131 | u = ( 132 | update(table1) 133 | .values(dict(name="foo")) 134 | .returning(func.length(table1.c.name)) 135 | ) 136 | self.assert_compile( 137 | u, 138 | "UPDATE mytable SET name=CAST(:name AS VARCHAR(128)) " 139 | "RETURNING CHAR_LENGTH(mytable.name) AS length_1", 140 | ) 141 | 142 | def test_insert_returning(self): 143 | table1 = table( 144 | "mytable", 145 | column("myid", Integer), 146 | column("name", String(128)), 147 | column("description", String(128)), 148 | ) 149 | i = ( 150 | insert(table1) 151 | .values(dict(name="foo")) 152 | .returning(table1.c.myid, table1.c.name) 153 | ) 154 | self.assert_compile( 155 | i, 156 | "INSERT INTO mytable (name) VALUES " 157 | "(CAST(:name AS VARCHAR(128))) RETURNING mytable.myid, " 158 | "mytable.name", 159 | ) 160 | i = insert(table1).values(dict(name="foo")).returning(table1) 161 | self.assert_compile( 162 | i, 163 | "INSERT INTO mytable (name) VALUES " 164 | "(CAST(:name AS VARCHAR(128))) RETURNING mytable.myid, " 165 | "mytable.name, mytable.description", 166 | ) 167 | i = ( 168 | insert(table1) 169 | .values(dict(name="foo")) 170 | .returning(func.length(table1.c.name)) 171 | ) 172 | self.assert_compile( 173 | i, 174 | "INSERT INTO mytable (name) VALUES " 175 | "(CAST(:name AS VARCHAR(128))) RETURNING CHAR_LENGTH(mytable.name) " 176 | "AS length_1", 177 | ) 178 | 179 | @testing.fixture 180 | def column_expression_fixture(self): 181 | class MyString(TypeEngine): 182 | def column_expression(self, column): 183 | return func.lower(column) 184 | 185 | return table( 186 | "some_table", column("name", String), column("value", MyString) 187 | ) 188 | 189 | @testing.combinations("columns", "table", argnames="use_columns") 190 | def test_plain_returning_column_expression( 191 | self, column_expression_fixture, use_columns 192 | ): 193 | """test #8770""" 194 | table1 = column_expression_fixture 195 | 196 | if use_columns == "columns": 197 | stmt = insert(table1).returning(table1) 198 | else: 199 | stmt = insert(table1).returning(table1.c.name, table1.c.value) 200 | 201 | # Type MyString have render_bind_cast = False 202 | self.assert_compile( 203 | stmt, 204 | 'INSERT INTO some_table (name, "value") ' 205 | "VALUES (CAST(:name AS BLOB SUB_TYPE TEXT), :value) RETURNING some_table.name, " 206 | 'lower(some_table."value") AS "value"', 207 | ) 208 | 209 | def test_cast_double_pg_double(self): 210 | """test #5465: 211 | 212 | test sqlalchemy Double/DOUBLE to Firebird DOUBLE 213 | """ 214 | d1 = sqltypes.Double 215 | 216 | stmt = select(cast(column("foo"), d1)) 217 | self.assert_compile( 218 | stmt, "SELECT CAST(foo AS DOUBLE) AS foo FROM rdb$database" 219 | ) 220 | 221 | def test_create_table_with_multiple_options(self): 222 | m = MetaData() 223 | tbl = Table( 224 | "atable", 225 | m, 226 | Column("id", Integer), 227 | prefixes=["GLOBAL TEMPORARY"], 228 | firebird_on_commit="PRESERVE ROWS", 229 | ) 230 | self.assert_compile( 231 | schema.CreateTable(tbl), 232 | "CREATE GLOBAL TEMPORARY TABLE atable (id INTEGER) " 233 | "ON COMMIT PRESERVE ROWS", 234 | ) 235 | 236 | def test_create_index_descending(self): 237 | m = MetaData() 238 | tbl = Table("testtbl", m, Column("data", Integer)) 239 | 240 | idx1 = Index("test_idx1", tbl.c.data, firebird_descending=True) 241 | self.assert_compile( 242 | schema.CreateIndex(idx1), 243 | "CREATE DESCENDING INDEX test_idx1 ON testtbl (data)", 244 | ) 245 | 246 | def test_create_partial_index(self): 247 | m = MetaData() 248 | tbl = Table("testtbl", m, Column("data", Integer)) 249 | idx = Index( 250 | "test_idx1", 251 | tbl.c.data, 252 | firebird_where=and_(tbl.c.data > 5, tbl.c.data < 10), 253 | ) 254 | idx = Index( 255 | "test_idx1", 256 | tbl.c.data, 257 | firebird_where=and_(tbl.c.data > 5, tbl.c.data < 10), 258 | ) 259 | 260 | # test quoting and all that 261 | 262 | idx2 = Index( 263 | "test_idx2", 264 | tbl.c.data, 265 | firebird_where=and_(tbl.c.data > "a", tbl.c.data < "b's"), 266 | ) 267 | self.assert_compile( 268 | schema.CreateIndex(idx), 269 | "CREATE INDEX test_idx1 ON testtbl (data) " 270 | "WHERE data > 5 AND data < 10", 271 | ) 272 | self.assert_compile( 273 | schema.CreateIndex(idx2), 274 | "CREATE INDEX test_idx2 ON testtbl (data) " 275 | "WHERE data > 'a' AND data < 'b''s'", 276 | ) 277 | 278 | idx3 = Index( 279 | "test_idx2", 280 | tbl.c.data, 281 | firebird_where=text("data > 'a' AND data < 'b''s'"), 282 | ) 283 | self.assert_compile( 284 | schema.CreateIndex(idx3), 285 | "CREATE INDEX test_idx2 ON testtbl (data) " 286 | "WHERE data > 'a' AND data < 'b''s'", 287 | ) 288 | 289 | def test_create_index_with_text_or_composite(self): 290 | m = MetaData() 291 | tbl = Table("testtbl", m, Column("d1", String), Column("d2", Integer)) 292 | 293 | idx = Index("test_idx1", text("x")) 294 | tbl.append_constraint(idx) 295 | 296 | idx2 = Index("test_idx2", text("y"), tbl.c.d2) 297 | 298 | self.assert_compile( 299 | schema.CreateIndex(idx), 300 | "CREATE INDEX test_idx1 ON testtbl COMPUTED BY (x)", 301 | ) 302 | self.assert_compile( 303 | schema.CreateIndex(idx2), 304 | "CREATE INDEX test_idx2 ON testtbl COMPUTED BY (y||d2)", 305 | ) 306 | 307 | def test_create_index_with_multiple_options(self): 308 | m = MetaData() 309 | tbl = Table("testtbl", m, Column("data", String)) 310 | 311 | idx1 = Index( 312 | "test_idx1", 313 | tbl.c.data, 314 | firebird_descending=True, 315 | firebird_where=and_(tbl.c.data > 5, tbl.c.data < 10), 316 | ) 317 | 318 | self.assert_compile( 319 | schema.CreateIndex(idx1), 320 | "CREATE DESCENDING INDEX test_idx1 ON testtbl " 321 | "(data) " 322 | "WHERE data > 5 AND data < 10", 323 | ) 324 | 325 | def test_create_index_expr_gets_parens(self): 326 | m = MetaData() 327 | tbl = Table("testtbl", m, Column("x", Integer), Column("y", Integer)) 328 | 329 | idx1 = Index("test_idx1", 5 // (tbl.c.x + tbl.c.y)) 330 | self.assert_compile( 331 | schema.CreateIndex(idx1), 332 | "CREATE INDEX test_idx1 ON testtbl COMPUTED BY (5 / (x + y))", 333 | ) 334 | 335 | def test_create_index_literals(self): 336 | m = MetaData() 337 | tbl = Table("testtbl", m, Column("data", Integer)) 338 | 339 | idx1 = Index("test_idx1", tbl.c.data + 5) 340 | self.assert_compile( 341 | schema.CreateIndex(idx1), 342 | "CREATE INDEX test_idx1 ON testtbl COMPUTED BY (data + 5)", 343 | ) 344 | 345 | def test_substring(self): 346 | self.assert_compile( 347 | func.substring("abc", 1, 2), 348 | "SUBSTRING(CAST(:substring_1 AS BLOB SUB_TYPE TEXT) FROM CAST(:substring_2 AS INTEGER) FOR CAST(:substring_3 AS INTEGER))", 349 | ) 350 | self.assert_compile( 351 | func.substring("abc", 1), 352 | "SUBSTRING(CAST(:substring_1 AS BLOB SUB_TYPE TEXT) FROM CAST(:substring_2 AS INTEGER))", 353 | ) 354 | 355 | def test_for_update(self): 356 | table1 = table( 357 | "mytable", column("myid"), column("name"), column("description") 358 | ) 359 | 360 | self.assert_compile( 361 | table1.select().where(table1.c.myid == 7).with_for_update(), 362 | "SELECT mytable.myid, mytable.name, mytable.description " 363 | "FROM mytable WHERE mytable.myid = CAST(:myid_1 AS INTEGER) FOR UPDATE", 364 | ) 365 | 366 | self.assert_compile( 367 | table1.select() 368 | .where(table1.c.myid == 7) 369 | .with_for_update(nowait=True), 370 | "SELECT mytable.myid, mytable.name, mytable.description " 371 | "FROM mytable WHERE mytable.myid = CAST(:myid_1 AS INTEGER) FOR UPDATE WITH LOCK", 372 | ) 373 | 374 | self.assert_compile( 375 | table1.select() 376 | .where(table1.c.myid == 7) 377 | .with_for_update(skip_locked=True), 378 | "SELECT mytable.myid, mytable.name, mytable.description " 379 | "FROM mytable WHERE mytable.myid = CAST(:myid_1 AS INTEGER) " 380 | "FOR UPDATE WITH LOCK SKIP LOCKED", 381 | ) 382 | 383 | def test_reserved_words(self): 384 | table = Table( 385 | "pg_table", 386 | MetaData(), 387 | Column("col1", Integer), 388 | Column("character_length", Integer), 389 | ) 390 | x = select(table.c.col1, table.c.character_length) 391 | 392 | self.assert_compile( 393 | x, 394 | """SELECT pg_table.col1, pg_table."character_length" FROM pg_table""", 395 | ) 396 | 397 | @testing.provide_metadata 398 | @testing.combinations( 399 | ("no_persisted", "ignore"), ("persisted", True), id_="ia" 400 | ) 401 | def test_column_computed(self, persisted): 402 | kwargs = {"persisted": persisted} if persisted != "ignore" else {} 403 | 404 | t = Table( 405 | "t", 406 | self.metadata, 407 | Column("x", Integer), 408 | Column("y", Integer, Computed("x + 2", **kwargs)), 409 | ) 410 | if persisted == "ignore": 411 | self.assert_compile( 412 | schema.CreateTable(t), 413 | "CREATE TABLE t (x INTEGER, y INTEGER GENERATED " 414 | "ALWAYS AS (x + 2))", 415 | ) 416 | else: 417 | assert_raises_message( 418 | exc.CompileError, 419 | "Firebird computed columns do not support a persistence method", 420 | schema.CreateTable(t).compile, 421 | dialect=self.__dialect__, 422 | ) 423 | 424 | @testing.combinations(True, False) 425 | def test_column_identity(self, pk): 426 | # all other tests are in test_identity_column.py 427 | m = MetaData() 428 | t = Table( 429 | "t", 430 | m, 431 | Column( 432 | "y", 433 | Integer, 434 | Identity(always=True, start=4, increment=7), 435 | primary_key=pk, 436 | ), 437 | ) 438 | self.assert_compile( 439 | schema.CreateTable(t), 440 | "CREATE TABLE t (y INTEGER GENERATED ALWAYS AS IDENTITY " 441 | "(START WITH 4 INCREMENT BY 7)%s)" 442 | % (", PRIMARY KEY (y)" if pk else ""), 443 | ) 444 | 445 | def test_column_identity_null(self): 446 | # all other tests are in test_identity_column.py 447 | m = MetaData() 448 | t = Table( 449 | "t", 450 | m, 451 | Column( 452 | "y", 453 | Integer, 454 | Identity(always=True, start=4, increment=7), 455 | nullable=True, 456 | ), 457 | ) 458 | self.assert_compile( 459 | schema.CreateTable(t), 460 | "CREATE TABLE t (y INTEGER GENERATED ALWAYS AS IDENTITY " 461 | "(START WITH 4 INCREMENT BY 7) NULL)", 462 | ) 463 | 464 | @testing.fixture 465 | def update_tables(self): 466 | self.weather = table( 467 | "weather", 468 | column("temp_lo", Integer), 469 | column("temp_hi", Integer), 470 | column("prcp", Integer), 471 | column("city", String), 472 | column("date", Date), 473 | ) 474 | self.accounts = table( 475 | "accounts", 476 | column("sales_id", Integer), 477 | column("sales_person", Integer), 478 | column("contact_first_name", String), 479 | column("contact_last_name", String), 480 | column("name", String), 481 | ) 482 | self.salesmen = table( 483 | "salesmen", 484 | column("id", Integer), 485 | column("first_name", String), 486 | column("last_name", String), 487 | ) 488 | self.employees = table( 489 | "employees", 490 | column("id", Integer), 491 | column("sales_count", String), 492 | ) 493 | 494 | def test_bitwise_xor(self): 495 | c1 = column("c1", Integer) 496 | c2 = column("c2", Integer) 497 | self.assert_compile( 498 | select(c1.bitwise_xor(c2)), 499 | "SELECT BIN_XOR(c1, c2) AS anon_1 FROM rdb$database", 500 | ) 501 | -------------------------------------------------------------------------------- /test/test_dialect.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | from sqlalchemy import bindparam 4 | from sqlalchemy import cast 5 | from sqlalchemy import Column 6 | from sqlalchemy import DateTime 7 | from sqlalchemy import extract 8 | from sqlalchemy import func 9 | from sqlalchemy import Integer 10 | from sqlalchemy import literal 11 | from sqlalchemy import MetaData 12 | from sqlalchemy import select 13 | from sqlalchemy import Sequence 14 | from sqlalchemy import String 15 | from sqlalchemy import Table 16 | from sqlalchemy import testing 17 | from sqlalchemy import text 18 | from sqlalchemy.testing import config 19 | from sqlalchemy.testing import engines 20 | from sqlalchemy.testing import fixtures 21 | from sqlalchemy.testing.assertions import AssertsCompiledSQL 22 | from sqlalchemy.testing.assertions import AssertsExecutionResults 23 | from sqlalchemy.testing.assertions import eq_ 24 | 25 | 26 | class ConnectionTest(fixtures.TablesTest): 27 | def test_is_disconnect(self): 28 | try: 29 | with testing.db.begin() as first_conn: 30 | con1_id = first_conn.exec_driver_sql( 31 | "SELECT CURRENT_CONNECTION FROM rdb$database" 32 | ).scalar() 33 | 34 | with testing.db.begin() as second_conn: 35 | # Kills first_conn 36 | second_conn.exec_driver_sql( 37 | "DELETE FROM mon$attachments WHERE mon$attachment_id = ?", 38 | (con1_id,), 39 | ) 40 | 41 | # Attemps to read from first_conn 42 | first_conn.exec_driver_sql( 43 | "SELECT CURRENT_CONNECTION FROM rdb$database" 44 | ) 45 | 46 | assert False 47 | except Exception as err: 48 | eq_(testing.db.dialect.is_disconnect(err.orig, None, None), True) 49 | 50 | 51 | # 52 | # Tests from postgresql/test_dialect.py 53 | # 54 | 55 | 56 | class ExecuteManyTest(fixtures.TablesTest): 57 | __backend__ = True 58 | 59 | run_create_tables = "each" 60 | run_deletes = None 61 | 62 | @config.fixture() 63 | def connection(self): 64 | eng = engines.testing_engine(options={"use_reaper": False}) 65 | 66 | conn = eng.connect() 67 | trans = conn.begin() 68 | yield conn 69 | if trans.is_active: 70 | trans.rollback() 71 | conn.close() 72 | eng.dispose() 73 | 74 | @classmethod 75 | def define_tables(cls, metadata): 76 | Table( 77 | "data", 78 | metadata, 79 | Column("id", Integer, primary_key=True), 80 | Column("x", String), 81 | Column("y", String), 82 | Column("z", Integer, server_default="5"), 83 | ) 84 | 85 | Table( 86 | "Unitéble2", 87 | metadata, 88 | Column("méil", Integer, primary_key=True), 89 | Column("\u6e2c\u8a66", Integer), 90 | ) 91 | 92 | def test_insert_unicode_keys(self, connection): 93 | table = self.tables["Unitéble2"] 94 | 95 | stmt = table.insert() 96 | 97 | connection.execute( 98 | stmt, 99 | [ 100 | {"méil": 1, "\u6e2c\u8a66": 1}, 101 | {"méil": 2, "\u6e2c\u8a66": 2}, 102 | {"méil": 3, "\u6e2c\u8a66": 3}, 103 | ], 104 | ) 105 | 106 | eq_(connection.execute(table.select()).all(), [(1, 1), (2, 2), (3, 3)]) 107 | 108 | @testing.requires.identity_columns 109 | def test_update(self, connection): 110 | connection.execute( 111 | self.tables.data.insert(), 112 | [ 113 | {"x": "x1", "y": "y1"}, 114 | {"x": "x2", "y": "y2"}, 115 | {"x": "x3", "y": "y3"}, 116 | ], 117 | ) 118 | 119 | connection.execute( 120 | self.tables.data.update() 121 | .where(self.tables.data.c.x == bindparam("xval")) 122 | .values(y=bindparam("yval")), 123 | [{"xval": "x1", "yval": "y5"}, {"xval": "x3", "yval": "y6"}], 124 | ) 125 | eq_( 126 | connection.execute( 127 | select(self.tables.data).order_by(self.tables.data.c.id) 128 | ).fetchall(), 129 | [(1, "x1", "y5", 5), (2, "x2", "y2", 5), (3, "x3", "y6", 5)], 130 | ) 131 | 132 | 133 | class MiscBackendTest( 134 | fixtures.TestBase, AssertsExecutionResults, AssertsCompiledSQL 135 | ): 136 | __backend__ = True 137 | 138 | @testing.provide_metadata 139 | def test_date_reflection(self): 140 | has_timezones = testing.requires.datetime_timezone.enabled 141 | 142 | metadata = self.metadata 143 | Table( 144 | "fbdate", 145 | metadata, 146 | Column("date1", DateTime(timezone=has_timezones)), 147 | Column("date2", DateTime(timezone=False)), 148 | ) 149 | metadata.create_all(testing.db) 150 | m2 = MetaData() 151 | t2 = Table("fbdate", m2, autoload_with=testing.db) 152 | assert t2.c.date1.type.timezone is has_timezones 153 | assert t2.c.date2.type.timezone is False 154 | 155 | @testing.requires.datetime_timezone 156 | def test_extract(self, connection): 157 | fivedaysago = connection.execute( 158 | select(func.now().op("AT TIME ZONE")("UTC")) 159 | ).scalar() - datetime.timedelta(days=5) 160 | 161 | for field, exp in ( 162 | ("year", fivedaysago.year), 163 | ("month", fivedaysago.month), 164 | ("day", fivedaysago.day), 165 | ): 166 | r = connection.execute( 167 | select( 168 | extract( 169 | field, 170 | func.now().op("AT TIME ZONE")("UTC") 171 | + datetime.timedelta(days=-5), 172 | ) 173 | ) 174 | ).scalar() 175 | eq_(r, exp) 176 | 177 | @testing.provide_metadata 178 | def test_checksfor_sequence(self, connection): 179 | meta1 = self.metadata 180 | seq = Sequence("fooseq") 181 | t = Table("mytable", meta1, Column("col1", Integer, seq)) 182 | seq.drop(connection) 183 | connection.execute(text("CREATE SEQUENCE fooseq")) 184 | t.create(connection, checkfirst=True) 185 | 186 | @testing.requires.identity_columns 187 | def test_sequence_detection_tricky_names(self, metadata, connection): 188 | for tname, cname in [ 189 | ("tb1" * 30, "abc"), 190 | ("tb2", "abc" * 30), 191 | ("tb3" * 30, "abc" * 30), 192 | ("tb4", "abc"), 193 | ]: 194 | t = Table( 195 | tname[: connection.dialect.max_identifier_length], 196 | metadata, 197 | Column( 198 | cname[: connection.dialect.max_identifier_length], 199 | Integer, 200 | primary_key=True, 201 | ), 202 | ) 203 | t.create(connection) 204 | r = connection.execute(t.insert()) 205 | eq_(r.inserted_primary_key, (1,)) 206 | 207 | def test_quoted_name_bindparam_ok(self): 208 | from sqlalchemy.sql.elements import quoted_name 209 | 210 | with testing.db.connect() as conn: 211 | eq_( 212 | conn.scalar( 213 | select( 214 | cast( 215 | literal(quoted_name("some_name", False)), 216 | String, 217 | ) 218 | ) 219 | ), 220 | "some_name", 221 | ) 222 | 223 | @testing.provide_metadata 224 | @testing.requires.identity_columns 225 | def test_preexecute_passivedefault(self, connection): 226 | """test that when we get a primary key column back from 227 | reflecting a table which has a default value on it, we pre- 228 | execute that DefaultClause upon insert.""" 229 | 230 | meta = self.metadata 231 | connection.execute( 232 | text( 233 | """ 234 | CREATE TABLE speedy_users 235 | ( 236 | speedy_user_id INTEGER GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, 237 | user_name VARCHAR(30) NOT NULL, 238 | user_password VARCHAR(30) NOT NULL 239 | ); 240 | """ 241 | ) 242 | ) 243 | connection.commit() 244 | 245 | t = Table("speedy_users", meta, autoload_with=connection) 246 | r = connection.execute( 247 | t.insert(), dict(user_name="user", user_password="lala") 248 | ) 249 | eq_(r.inserted_primary_key, (1,)) 250 | result = connection.execute(t.select()).fetchall() 251 | assert result == [(1, "user", "lala")] 252 | connection.execute(text("DROP TABLE speedy_users")) 253 | 254 | def test_select_rowcount(self): 255 | # https://firebird-driver.readthedocs.io/en/latest/python-db-api-compliance.html#caveats 256 | 257 | # Determining rowcount for SELECT statements is problematic: the 258 | # rowcount is reported as zero until at least one row has been fetched 259 | # from the result set, and the rowcount is misreported if the result 260 | # set is larger than 1302 rows. 261 | 262 | conn = testing.db.connect() 263 | cursor = conn.exec_driver_sql( 264 | "SELECT 1 FROM rdb$database UNION ALL SELECT 2 FROM rdb$database" 265 | ) 266 | eq_(cursor.rowcount, 0) 267 | -------------------------------------------------------------------------------- /test/test_query.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import pytest 3 | 4 | from sqlalchemy import Column 5 | from sqlalchemy import Date 6 | from sqlalchemy import DateTime 7 | from sqlalchemy import exc 8 | from sqlalchemy import extract 9 | from sqlalchemy import ForeignKey 10 | from sqlalchemy import func 11 | from sqlalchemy import Integer 12 | from sqlalchemy import literal 13 | from sqlalchemy import MetaData 14 | from sqlalchemy import select 15 | from sqlalchemy import Sequence 16 | from sqlalchemy import String 17 | from sqlalchemy import Table 18 | from sqlalchemy import testing 19 | from sqlalchemy import text 20 | from sqlalchemy import Time 21 | from sqlalchemy.testing import assert_raises 22 | from sqlalchemy.testing import AssertsExecutionResults 23 | from sqlalchemy.testing import engines 24 | from sqlalchemy.testing import eq_ 25 | from sqlalchemy.testing import expect_warnings 26 | from sqlalchemy.testing import fixtures 27 | from sqlalchemy.testing import requires 28 | from sqlalchemy.testing import skip_if 29 | 30 | from sqlalchemy_firebird.types import _FBInterval 31 | 32 | 33 | class QueryTest(fixtures.TestBase): 34 | @testing.provide_metadata 35 | def test_strlen(self, connection): 36 | metadata = self.metadata 37 | 38 | t = Table( 39 | "t1", 40 | metadata, 41 | Column("id", Integer, Sequence("t1idseq"), primary_key=True), 42 | Column("name", String(10)), 43 | ) 44 | metadata.create_all(testing.db) 45 | connection.execute(t.insert().values(dict(name="dante"))) 46 | connection.execute(t.insert().values(dict(name="alighieri"))) 47 | eq_( 48 | connection.execute( 49 | select(func.count(t.c.id)).where(func.length(t.c.name) == 5) 50 | ).scalar(), 51 | 1, 52 | ) 53 | 54 | def test_percents_in_text(self, connection): 55 | for expr, result in ( 56 | (text("select '%' from rdb$database"), "%"), 57 | (text("select '%%' from rdb$database"), "%%"), 58 | (text("select '%%%' from rdb$database"), "%%%"), 59 | ( 60 | text("select 'hello % world' from rdb$database"), 61 | "hello % world", 62 | ), 63 | ): 64 | eq_(connection.scalar(expr), result) 65 | 66 | 67 | # 68 | # Tests from postgresql/test_query.py 69 | # 70 | 71 | 72 | class FunctionTypingTest(fixtures.TestBase, AssertsExecutionResults): 73 | __backend__ = True 74 | 75 | def test_count_star(self, connection): 76 | eq_(connection.scalar(func.count("*")), 1) 77 | 78 | def test_count_int(self, connection): 79 | eq_(connection.scalar(func.count(1)), 1) 80 | 81 | 82 | class InsertTest(fixtures.TestBase, AssertsExecutionResults): 83 | __backend__ = True 84 | 85 | @skip_if( 86 | lambda config: config.db.dialect.driver == "fdb", 87 | "Driver fdb hangs in this test.", 88 | ) 89 | def test_foreignkey_missing_insert(self, metadata, connection): 90 | Table( 91 | "t1", 92 | metadata, 93 | Column("id", Integer, primary_key=True), 94 | ) 95 | t2 = Table( 96 | "t2", 97 | metadata, 98 | Column("id", Integer, ForeignKey("t1.id"), primary_key=True), 99 | ) 100 | 101 | metadata.create_all(connection) 102 | 103 | # want to ensure that "null value in column "id" violates not- 104 | # null constraint" is raised (IntegrityError on psycoopg2, but 105 | # ProgrammingError on pg8000), and not "ProgrammingError: 106 | # (ProgrammingError) relationship "t2_id_seq" does not exist". 107 | # the latter corresponds to autoincrement behavior, which is not 108 | # the case here due to the foreign key. 109 | 110 | with expect_warnings(".*has no Python-side or server-side default.*"): 111 | assert_raises( 112 | (exc.DatabaseError), 113 | connection.execute, 114 | t2.insert(), 115 | ) 116 | 117 | def test_sequence_insert(self, metadata, connection): 118 | table = Table( 119 | "testtable", 120 | metadata, 121 | Column("id", Integer, Sequence("my_seq"), primary_key=True), 122 | Column("data", String(30)), 123 | ) 124 | metadata.create_all(connection) 125 | self._assert_data_with_sequence_returning(connection, table, "my_seq") 126 | 127 | # This test fails on Firebird 2.5/fdb due to the wrong collation being reflected. 128 | @testing.requires.firebird_3_or_higher 129 | def test_opt_sequence_insert(self, metadata, connection): 130 | table = Table( 131 | "testtable", 132 | metadata, 133 | Column( 134 | "id", 135 | Integer, 136 | Sequence("my_seq", optional=True), 137 | primary_key=True, 138 | ), 139 | Column("data", String(30)), 140 | ) 141 | metadata.create_all(connection) 142 | self._assert_data_autoincrement_returning( 143 | connection, table, pk_sequence="my_seq" 144 | ) 145 | 146 | @skip_if( 147 | lambda config: config.db.dialect.driver == "fdb", 148 | "Driver fdb hangs in this test.", 149 | ) 150 | def test_autoincrement_insert(self, metadata, connection): 151 | table = Table( 152 | "testtable", 153 | metadata, 154 | Column("id", Integer, primary_key=True), 155 | Column("data", String(30)), 156 | ) 157 | metadata.create_all(connection) 158 | self._assert_data_autoincrement_returning(connection, table) 159 | 160 | @skip_if( 161 | lambda config: config.db.dialect.driver == "fdb", 162 | "Driver fdb hangs in this test.", 163 | ) 164 | def test_noautoincrement_insert(self, metadata, connection): 165 | table = Table( 166 | "testtable", 167 | metadata, 168 | Column("id", Integer, primary_key=True, autoincrement=False), 169 | Column("data", String(30)), 170 | ) 171 | metadata.create_all(connection) 172 | self._assert_data_noautoincrement(connection, table) 173 | 174 | def _assert_data_autoincrement(self, connection, table): 175 | """ 176 | invoked by: 177 | * test_opt_sequence_insert 178 | * test_autoincrement_insert 179 | """ 180 | 181 | with self.sql_execution_asserter(connection) as asserter: 182 | conn = connection 183 | 184 | # execute with explicit id 185 | r = conn.execute(table.insert(), {"id": 30, "data": "d1"}) 186 | eq_(r.inserted_primary_key, (30,)) 187 | 188 | # execute with prefetch id 189 | s = table.insert() 190 | r = conn.execute(s, {"data": "d2"}) 191 | eq_(r.inserted_primary_key, (1,)) 192 | 193 | # executemany with explicit ids 194 | conn.execute( 195 | table.insert(), 196 | [{"id": 31, "data": "d3"}, {"id": 32, "data": "d4"}], 197 | ) 198 | 199 | # executemany, uses SERIAL 200 | conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}]) 201 | 202 | # single execute, explicit id, inline 203 | conn.execute(table.insert().inline(), {"id": 33, "data": "d7"}) 204 | 205 | # single execute, inline, uses SERIAL 206 | conn.execute(table.insert().inline(), {"data": "d8"}) 207 | 208 | eq_( 209 | conn.execute(table.select()).fetchall(), 210 | [ 211 | (30, "d1"), 212 | (1, "d2"), 213 | (31, "d3"), 214 | (32, "d4"), 215 | (2, "d5"), 216 | (3, "d6"), 217 | (33, "d7"), 218 | (4, "d8"), 219 | ], 220 | ) 221 | 222 | conn.execute(table.delete()) 223 | 224 | # test the same series of events using a reflected version of the table 225 | 226 | m2 = MetaData() 227 | table = Table(table.name, m2, autoload_with=connection) 228 | 229 | with self.sql_execution_asserter(connection) as asserter: 230 | conn.execute(table.insert(), {"id": 30, "data": "d1"}) 231 | r = conn.execute(table.insert(), {"data": "d2"}) 232 | eq_(r.inserted_primary_key, (5,)) 233 | conn.execute( 234 | table.insert(), 235 | [{"id": 31, "data": "d3"}, {"id": 32, "data": "d4"}], 236 | ) 237 | conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}]) 238 | conn.execute(table.insert().inline(), {"id": 33, "data": "d7"}) 239 | conn.execute(table.insert().inline(), {"data": "d8"}) 240 | 241 | eq_( 242 | conn.execute(table.select()).fetchall(), 243 | [ 244 | (30, "d1"), 245 | (5, "d2"), 246 | (31, "d3"), 247 | (32, "d4"), 248 | (6, "d5"), 249 | (7, "d6"), 250 | (33, "d7"), 251 | (8, "d8"), 252 | ], 253 | ) 254 | 255 | def _assert_data_autoincrement_returning( 256 | self, connection, table, pk_sequence=None 257 | ): 258 | """ 259 | invoked by: 260 | * test_opt_sequence_returning_insert 261 | * test_autoincrement_returning_insert 262 | """ 263 | with self.sql_execution_asserter(connection) as asserter: 264 | conn = connection 265 | 266 | # execute with explicit id 267 | r = conn.execute(table.insert(), {"id": 30, "data": "d1"}) 268 | eq_(r.inserted_primary_key, (30,)) 269 | 270 | # execute with prefetch id 271 | r = conn.execute(table.insert(), {"data": "d2"}) 272 | eq_(r.inserted_primary_key, (1,)) 273 | 274 | # executemany with explicit ids 275 | conn.execute( 276 | table.insert(), 277 | [{"id": 31, "data": "d3"}, {"id": 32, "data": "d4"}], 278 | ) 279 | 280 | # executemany, uses SERIAL 281 | r = conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}]) 282 | 283 | # single execute, explicit id, inline 284 | r = conn.execute(table.insert().inline(), {"id": 33, "data": "d7"}) 285 | 286 | # single execute, inline, uses SERIAL 287 | r = conn.execute(table.insert().inline(), {"data": "d8"}) 288 | 289 | eq_( 290 | conn.execute(table.select()).fetchall(), 291 | [ 292 | (30, "d1"), 293 | (1, "d2"), 294 | (31, "d3"), 295 | (32, "d4"), 296 | (2, "d5"), 297 | (3, "d6"), 298 | (33, "d7"), 299 | (4, "d8"), 300 | ], 301 | ) 302 | conn.execute(table.delete()) 303 | 304 | # test the same series of events using a reflected version of the table 305 | 306 | m2 = MetaData() 307 | old_table = table 308 | table = Table(table.name, m2, autoload_with=connection) 309 | 310 | # Firebird has no metadata to know that we are using this sequence as the primary key generator. 311 | # Override the reflected information to add this information. 312 | if pk_sequence: 313 | table.columns[0].default = Sequence(pk_sequence) 314 | 315 | with self.sql_execution_asserter(connection) as asserter: 316 | conn.execute(table.insert(), {"id": 30, "data": "d1"}) 317 | r = conn.execute(table.insert(), {"data": "d2"}) 318 | eq_(r.inserted_primary_key, (5,)) 319 | conn.execute( 320 | table.insert(), 321 | [{"id": 31, "data": "d3"}, {"id": 32, "data": "d4"}], 322 | ) 323 | conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}]) 324 | conn.execute(table.insert().inline(), {"id": 33, "data": "d7"}) 325 | conn.execute(table.insert().inline(), {"data": "d8"}) 326 | 327 | eq_( 328 | conn.execute(table.select()).fetchall(), 329 | [ 330 | (30, "d1"), 331 | (5, "d2"), 332 | (31, "d3"), 333 | (32, "d4"), 334 | (6, "d5"), 335 | (7, "d6"), 336 | (33, "d7"), 337 | (8, "d8"), 338 | ], 339 | ) 340 | 341 | def _assert_data_with_sequence(self, connection, table, seqname): 342 | """ 343 | invoked by: 344 | * test_sequence_insert 345 | """ 346 | 347 | with self.sql_execution_asserter(connection) as asserter: 348 | conn = connection 349 | conn.execute(table.insert(), {"id": 30, "data": "d1"}) 350 | conn.execute(table.insert(), {"data": "d2"}) 351 | conn.execute( 352 | table.insert(), 353 | [{"id": 31, "data": "d3"}, {"id": 32, "data": "d4"}], 354 | ) 355 | conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}]) 356 | conn.execute(table.insert().inline(), {"id": 33, "data": "d7"}) 357 | conn.execute(table.insert().inline(), {"data": "d8"}) 358 | 359 | eq_( 360 | conn.execute(table.select()).fetchall(), 361 | [ 362 | (30, "d1"), 363 | (1, "d2"), 364 | (31, "d3"), 365 | (32, "d4"), 366 | (2, "d5"), 367 | (3, "d6"), 368 | (33, "d7"), 369 | (4, "d8"), 370 | ], 371 | ) 372 | 373 | def _assert_data_with_sequence_returning(self, connection, table, seqname): 374 | """ 375 | invoked by: 376 | * test_sequence_returning_insert 377 | """ 378 | 379 | with self.sql_execution_asserter(connection) as asserter: 380 | conn = connection 381 | conn.execute(table.insert(), {"id": 30, "data": "d1"}) 382 | conn.execute(table.insert(), {"data": "d2"}) 383 | conn.execute( 384 | table.insert(), 385 | [{"id": 31, "data": "d3"}, {"id": 32, "data": "d4"}], 386 | ) 387 | conn.execute(table.insert(), [{"data": "d5"}, {"data": "d6"}]) 388 | conn.execute(table.insert().inline(), {"id": 33, "data": "d7"}) 389 | conn.execute(table.insert().inline(), {"data": "d8"}) 390 | 391 | eq_( 392 | connection.execute(table.select()).fetchall(), 393 | [ 394 | (30, "d1"), 395 | (1, "d2"), 396 | (31, "d3"), 397 | (32, "d4"), 398 | (2, "d5"), 399 | (3, "d6"), 400 | (33, "d7"), 401 | (4, "d8"), 402 | ], 403 | ) 404 | 405 | def _assert_data_noautoincrement(self, connection, table): 406 | """ 407 | invoked by: 408 | * test_noautoincrement_insert 409 | """ 410 | 411 | # turning off the cache because we are checking for compile-time warnings 412 | connection.execution_options(compiled_cache=None) 413 | 414 | conn = connection 415 | conn.execute(table.insert(), {"id": 30, "data": "d1"}) 416 | 417 | with conn.begin_nested() as nested: 418 | with expect_warnings( 419 | ".*has no Python-side or server-side default.*" 420 | ): 421 | assert_raises( 422 | (exc.DatabaseError), 423 | conn.execute, 424 | table.insert(), 425 | {"data": "d2"}, 426 | ) 427 | nested.rollback() 428 | 429 | with conn.begin_nested() as nested: 430 | with expect_warnings( 431 | ".*has no Python-side or server-side default.*" 432 | ): 433 | assert_raises( 434 | (exc.DatabaseError), 435 | conn.execute, 436 | table.insert(), 437 | [{"data": "d2"}, {"data": "d3"}], 438 | ) 439 | nested.rollback() 440 | 441 | with conn.begin_nested() as nested: 442 | with expect_warnings( 443 | ".*has no Python-side or server-side default.*" 444 | ): 445 | assert_raises( 446 | (exc.DatabaseError), 447 | conn.execute, 448 | table.insert(), 449 | {"data": "d2"}, 450 | ) 451 | nested.rollback() 452 | 453 | with conn.begin_nested() as nested: 454 | with expect_warnings( 455 | ".*has no Python-side or server-side default.*" 456 | ): 457 | assert_raises( 458 | (exc.DatabaseError), 459 | conn.execute, 460 | table.insert(), 461 | [{"data": "d2"}, {"data": "d3"}], 462 | ) 463 | nested.rollback() 464 | 465 | conn.execute( 466 | table.insert(), 467 | [{"id": 31, "data": "d2"}, {"id": 32, "data": "d3"}], 468 | ) 469 | conn.execute(table.insert().inline(), {"id": 33, "data": "d4"}) 470 | eq_( 471 | conn.execute(table.select()).fetchall(), 472 | [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")], 473 | ) 474 | conn.execute(table.delete()) 475 | 476 | # test the same series of events using a reflected version of the table 477 | 478 | m2 = MetaData() 479 | table = Table(table.name, m2, autoload_with=connection) 480 | conn = connection 481 | 482 | conn.execute(table.insert(), {"id": 30, "data": "d1"}) 483 | 484 | with conn.begin_nested() as nested: 485 | with expect_warnings( 486 | ".*has no Python-side or server-side default.*" 487 | ): 488 | assert_raises( 489 | (exc.DatabaseError), 490 | conn.execute, 491 | table.insert(), 492 | {"data": "d2"}, 493 | ) 494 | nested.rollback() 495 | 496 | with conn.begin_nested() as nested: 497 | with expect_warnings( 498 | ".*has no Python-side or server-side default.*" 499 | ): 500 | assert_raises( 501 | (exc.DatabaseError), 502 | conn.execute, 503 | table.insert(), 504 | [{"data": "d2"}, {"data": "d3"}], 505 | ) 506 | nested.rollback() 507 | 508 | conn.execute( 509 | table.insert(), 510 | [{"id": 31, "data": "d2"}, {"id": 32, "data": "d3"}], 511 | ) 512 | conn.execute(table.insert().inline(), {"id": 33, "data": "d4"}) 513 | eq_( 514 | conn.execute(table.select()).fetchall(), 515 | [(30, "d1"), (31, "d2"), (32, "d3"), (33, "d4")], 516 | ) 517 | 518 | 519 | class ExtractTest(fixtures.TablesTest): 520 | __backend__ = True 521 | 522 | run_inserts = "once" 523 | run_deletes = None 524 | 525 | class TZ(datetime.tzinfo): 526 | def tzname(self, dt): 527 | return "UTC+04:00" 528 | 529 | def utcoffset(self, dt): 530 | return datetime.timedelta(hours=4) 531 | 532 | @classmethod 533 | def setup_bind(cls): 534 | from sqlalchemy import event 535 | 536 | eng = engines.testing_engine(options={"scope": "class"}) 537 | 538 | @event.listens_for(eng, "connect") 539 | def connect(dbapi_conn, rec): 540 | if requires.datetime_timezone.enabled: 541 | cursor = dbapi_conn.cursor() 542 | cursor.execute("SET TIME ZONE 'UTC'") 543 | cursor.close() 544 | 545 | return eng 546 | 547 | @classmethod 548 | def define_tables(cls, metadata): 549 | Table( 550 | "t", 551 | metadata, 552 | Column("dtme", DateTime), 553 | Column("dt", Date), 554 | Column("tm", Time), 555 | Column("intv", _FBInterval), 556 | Column("dttz", DateTime(timezone=True)), 557 | ) 558 | 559 | @classmethod 560 | def insert_data(cls, connection): 561 | connection.execute( 562 | cls.tables.t.insert(), 563 | { 564 | "dtme": datetime.datetime(2012, 5, 10, 12, 15, 25), 565 | "dt": datetime.date(2012, 5, 10), 566 | "tm": datetime.time(12, 15, 25), 567 | "intv": datetime.timedelta(seconds=570), 568 | "dttz": datetime.datetime( 569 | 2012, 5, 10, 12, 15, 25, tzinfo=cls.TZ() 570 | ), 571 | }, 572 | ) 573 | 574 | def _test(self, connection, expr, field="all", overrides=None): 575 | t = self.tables.t 576 | 577 | if field == "all": 578 | fields = { 579 | "year": 2012, 580 | "month": 5, 581 | "day": 10, 582 | "hour": 12, 583 | "minute": 15, 584 | } 585 | elif field == "time": 586 | fields = {"hour": 12, "minute": 15, "second": 25} 587 | elif field == "date": 588 | fields = {"year": 2012, "month": 5, "day": 10} 589 | elif field == "all+tz": 590 | fields = { 591 | "year": 2012, 592 | "month": 5, 593 | "day": 10, 594 | "hour": 12, 595 | "timezone_hour": 4, 596 | } 597 | else: 598 | fields = field 599 | 600 | if overrides: 601 | fields.update(overrides) 602 | 603 | for field in fields: 604 | try: 605 | result = connection.execute( 606 | select(extract(field, expr)).select_from(t) 607 | ).scalar() 608 | eq_(result, fields[field]) 609 | except exc.DatabaseError as e: 610 | # Ignores "Specified EXTRACT part does not exist in input datatype" error. 611 | if "EXTRACT part does not exist" not in str(e): 612 | raise 613 | 614 | def test_one(self, connection): 615 | t = self.tables.t 616 | self._test(connection, t.c.dtme, "all") 617 | 618 | def test_two(self, connection): 619 | t = self.tables.t 620 | self._test( 621 | connection, 622 | t.c.dtme + t.c.intv, 623 | overrides={"minute": 24}, 624 | ) 625 | 626 | def test_three(self, connection): 627 | self.tables.t 628 | 629 | actual_ts = self.bind.connect().execute( 630 | func.current_timestamp() 631 | ).scalar() - datetime.timedelta(days=5) 632 | self._test( 633 | connection, 634 | func.current_timestamp() - datetime.timedelta(days=5), 635 | { 636 | "hour": actual_ts.hour, 637 | "year": actual_ts.year, 638 | "month": actual_ts.month, 639 | }, 640 | ) 641 | 642 | def test_four(self, connection): 643 | t = self.tables.t 644 | self._test( 645 | connection, 646 | datetime.timedelta(days=5) + t.c.dt, 647 | overrides={ 648 | "day": 15, 649 | "hour": 0, 650 | "minute": 0, 651 | }, 652 | ) 653 | 654 | def test_five(self, connection): 655 | t = self.tables.t 656 | self._test( 657 | connection, 658 | func.coalesce(t.c.dtme, func.current_timestamp()), 659 | ) 660 | 661 | @pytest.mark.skip( 662 | reason="Fix operations with TIME datatype (operand must be in seconds, not in days)" 663 | ) 664 | def test_six(self, connection): 665 | t = self.tables.t 666 | self._test( 667 | connection, 668 | t.c.tm + datetime.timedelta(seconds=30), 669 | "time", 670 | overrides={"second": 55}, 671 | ) 672 | 673 | def test_seven(self, connection): 674 | self._test( 675 | connection, 676 | literal(datetime.timedelta(seconds=10)) 677 | - literal(datetime.timedelta(seconds=10)), 678 | "all", 679 | overrides={ 680 | "hour": 0, 681 | "minute": 0, 682 | "month": 0, 683 | "year": 0, 684 | "day": 0, 685 | }, 686 | ) 687 | 688 | @pytest.mark.skip( 689 | reason="Fix operations with TIME datatype (operand must be in seconds, not in days)" 690 | ) 691 | def test_eight(self, connection): 692 | t = self.tables.t 693 | self._test( 694 | connection, 695 | t.c.tm + datetime.timedelta(seconds=30), 696 | {"hour": 12, "minute": 15, "second": 55}, 697 | ) 698 | 699 | def test_nine(self, connection): 700 | self._test(connection, text("t.dt + t.tm")) 701 | 702 | def test_ten(self, connection): 703 | t = self.tables.t 704 | self._test(connection, t.c.dt + t.c.tm) 705 | 706 | def test_eleven(self, connection): 707 | self._test( 708 | connection, 709 | func.current_timestamp() - func.current_timestamp(), 710 | {"year": 0, "month": 0, "day": 0, "hour": 0}, 711 | ) 712 | 713 | @requires.datetime_timezone 714 | def test_twelve(self, connection): 715 | t = self.tables.t 716 | 717 | actual_ts = connection.scalar( 718 | func.current_timestamp() 719 | ) - datetime.datetime(2012, 5, 10, 12, 15, 25, tzinfo=self.TZ()) 720 | 721 | self._test( 722 | connection, 723 | func.current_timestamp() - t.c.dttz, 724 | {"day": actual_ts.days}, 725 | ) 726 | 727 | @requires.datetime_timezone 728 | def test_thirteen(self, connection): 729 | t = self.tables.t 730 | self._test(connection, t.c.dttz, "all+tz") 731 | 732 | def test_fourteen(self, connection): 733 | t = self.tables.t 734 | self._test(connection, t.c.tm, "time") 735 | 736 | def test_fifteen(self, connection): 737 | t = self.tables.t 738 | self._test( 739 | connection, 740 | datetime.timedelta(days=5) + t.c.dtme, 741 | overrides={"day": 15}, 742 | ) 743 | -------------------------------------------------------------------------------- /test/test_reflection.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from sqlalchemy import BigInteger 3 | from sqlalchemy import Column 4 | from sqlalchemy import exc 5 | from sqlalchemy import ForeignKey 6 | from sqlalchemy import Identity 7 | from sqlalchemy import Index 8 | from sqlalchemy import inspect 9 | from sqlalchemy import Integer 10 | from sqlalchemy import join 11 | from sqlalchemy import MetaData 12 | from sqlalchemy import PrimaryKeyConstraint 13 | from sqlalchemy import Sequence 14 | from sqlalchemy import SmallInteger 15 | from sqlalchemy import String 16 | from sqlalchemy import Table 17 | from sqlalchemy import testing 18 | from sqlalchemy import UniqueConstraint 19 | from sqlalchemy.schema import CreateIndex 20 | from sqlalchemy.sql.schema import CheckConstraint 21 | from sqlalchemy.testing import AssertsCompiledSQL 22 | from sqlalchemy.testing import fixtures 23 | from sqlalchemy.testing.assertions import AssertsExecutionResults 24 | from sqlalchemy.testing.assertions import ComparesIndexes 25 | from sqlalchemy.testing.assertions import eq_ 26 | from sqlalchemy.testing.assertions import is_ 27 | from sqlalchemy.testing.assertions import is_true 28 | 29 | from sqlalchemy import MetaData 30 | from sqlalchemy import Table 31 | from sqlalchemy import testing 32 | from sqlalchemy.testing import AssertsExecutionResults 33 | from sqlalchemy.testing import eq_ 34 | from sqlalchemy.testing import fixtures 35 | 36 | from sqlalchemy_firebird.types import FBINTEGER 37 | 38 | # 39 | # Tests from postgresql/test_reflection.py 40 | # 41 | 42 | 43 | class ReflectionFixtures: 44 | @testing.fixture( 45 | params=[ 46 | ("engine", True), 47 | ("connection", True), 48 | ("engine", False), 49 | ("connection", False), 50 | ] 51 | ) 52 | def inspect_fixture(self, request, metadata, testing_engine): 53 | engine, future = request.param 54 | 55 | eng = testing_engine(future=future) 56 | 57 | conn = eng.connect() 58 | 59 | if engine == "connection": 60 | yield inspect(eng), conn 61 | else: 62 | yield inspect(conn), conn 63 | 64 | conn.close() 65 | 66 | 67 | class DomainReflectionTest(fixtures.TestBase, AssertsExecutionResults): 68 | """Test Firebird domains""" 69 | 70 | __backend__ = True 71 | 72 | @classmethod 73 | def setup_test_class(cls): 74 | with testing.db.begin() as con: 75 | for ddl in [ 76 | "CREATE DOMAIN testdomain AS INTEGER DEFAULT 42 NOT NULL", 77 | "CREATE DOMAIN testdomain2 AS INTEGER DEFAULT 0", 78 | 'CREATE DOMAIN "Quoted.Domain" AS INTEGER DEFAULT 0', 79 | "CREATE DOMAIN nullable_domain AS VARCHAR(30) CHECK (VALUE IN('FOO', 'BAR'))", 80 | "CREATE DOMAIN not_nullable_domain AS VARCHAR(30) NOT NULL", 81 | "CREATE DOMAIN my_int AS int CHECK (VALUE > 1)", 82 | ]: 83 | try: 84 | con.exec_driver_sql(ddl) 85 | except exc.DBAPIError as e: 86 | if "already exists" not in str(e): 87 | raise e 88 | con.exec_driver_sql( 89 | "CREATE TABLE testtable (question integer, answer " 90 | "testdomain)" 91 | ) 92 | con.exec_driver_sql( 93 | "CREATE TABLE testtable2(question " 94 | "integer, answer testdomain2, anything integer)" 95 | ) 96 | con.exec_driver_sql( 97 | 'CREATE TABLE quote_test (id integer, data "Quoted.Domain")' 98 | ) 99 | con.exec_driver_sql( 100 | "CREATE TABLE nullable_domain_test " 101 | "(not_nullable_domain_col nullable_domain not null," 102 | "nullable_local not_nullable_domain)" 103 | ) 104 | 105 | @classmethod 106 | def teardown_test_class(cls): 107 | with testing.db.begin() as con: 108 | con.exec_driver_sql("DROP TABLE nullable_domain_test") 109 | con.exec_driver_sql("DROP TABLE quote_test") 110 | con.exec_driver_sql("DROP TABLE testtable2") 111 | con.exec_driver_sql("DROP TABLE testtable") 112 | con.exec_driver_sql("DROP DOMAIN my_int") 113 | con.exec_driver_sql("DROP DOMAIN not_nullable_domain") 114 | con.exec_driver_sql("DROP DOMAIN nullable_domain") 115 | con.exec_driver_sql('DROP DOMAIN "Quoted.Domain"') 116 | con.exec_driver_sql("DROP DOMAIN testdomain2") 117 | con.exec_driver_sql("DROP DOMAIN testdomain") 118 | 119 | def test_table_is_reflected(self, connection): 120 | metadata = MetaData() 121 | table1 = Table("testtable", metadata, autoload_with=connection) 122 | eq_( 123 | set(table1.columns.keys()), 124 | {"question", "answer"}, 125 | "Columns of reflected table didn't equal expected columns", 126 | ) 127 | assert isinstance(table1.c.answer.type, Integer) 128 | 129 | table2 = Table( 130 | "testtable2", 131 | metadata, 132 | autoload_with=connection, 133 | ) 134 | eq_( 135 | set(table2.columns.keys()), 136 | {"question", "answer", "anything"}, 137 | "Columns of reflected table didn't equal expected columns", 138 | ) 139 | assert isinstance(table2.c.anything.type, Integer) 140 | 141 | def test_nullable_from_domain(self, connection): 142 | metadata = MetaData() 143 | table = Table( 144 | "nullable_domain_test", metadata, autoload_with=connection 145 | ) 146 | is_(table.c.not_nullable_domain_col.nullable, False) 147 | is_(table.c.nullable_local.nullable, False) 148 | 149 | def test_domain_is_reflected(self, connection): 150 | metadata = MetaData() 151 | table1 = Table("testtable", metadata, autoload_with=connection) 152 | eq_( 153 | str(table1.columns.answer.server_default.arg), 154 | "42", 155 | "Reflected default value didn't equal expected value", 156 | ) 157 | assert ( 158 | not table1.columns.answer.nullable 159 | ), "Expected reflected column to not be nullable." 160 | 161 | table2 = Table( 162 | "testtable2", 163 | metadata, 164 | autoload_with=connection, 165 | ) 166 | eq_( 167 | str(table2.columns.answer.server_default.arg), 168 | "0", 169 | "Reflected default value didn't equal expected value", 170 | ) 171 | assert ( 172 | table2.columns.answer.nullable 173 | ), "Expected reflected column to be nullable." 174 | 175 | def test_quoted_domain_is_reflected(self, connection): 176 | metadata = MetaData() 177 | table = Table("quote_test", metadata, autoload_with=connection) 178 | eq_(table.c.data.type.__class__, FBINTEGER) 179 | 180 | @property 181 | def all_domains(self): 182 | return [ 183 | { 184 | "name": "my_int", 185 | "nullable": True, 186 | "default": None, 187 | "check": "VALUE > 1", 188 | "comment": None, 189 | }, 190 | { 191 | "name": "not_nullable_domain", 192 | "nullable": False, 193 | "default": None, 194 | "check": None, 195 | "comment": None, 196 | }, 197 | { 198 | "name": "nullable_domain", 199 | "nullable": True, 200 | "default": None, 201 | "check": "VALUE IN('FOO', 'BAR')", 202 | "comment": None, 203 | }, 204 | { 205 | "name": "Quoted.Domain", 206 | "nullable": True, 207 | "default": "0", 208 | "check": None, 209 | "comment": None, 210 | }, 211 | { 212 | "name": "testdomain", 213 | "nullable": False, 214 | "default": "42", 215 | "check": None, 216 | "comment": None, 217 | }, 218 | { 219 | "name": "testdomain2", 220 | "nullable": True, 221 | "default": "0", 222 | "check": None, 223 | "comment": None, 224 | }, 225 | ] 226 | 227 | def test_inspect_domains(self, connection): 228 | inspector = inspect(connection) 229 | ds = inspector.get_domains() 230 | eq_(ds, self.all_domains) 231 | 232 | 233 | class ReflectionTest( 234 | ReflectionFixtures, AssertsCompiledSQL, ComparesIndexes, fixtures.TestBase 235 | ): 236 | __backend__ = True 237 | 238 | def test_reflected_primary_key_order(self, metadata, connection): 239 | meta1 = metadata 240 | subject = Table( 241 | "subject", 242 | meta1, 243 | Column("p1", Integer, primary_key=True), 244 | Column("p2", Integer, primary_key=True), 245 | PrimaryKeyConstraint("p2", "p1"), 246 | ) 247 | meta1.create_all(connection) 248 | meta2 = MetaData() 249 | subject = Table("subject", meta2, autoload_with=connection) 250 | eq_(subject.primary_key.columns.keys(), ["p2", "p1"]) 251 | 252 | def test_pg_weirdchar_reflection(self, metadata, connection): 253 | meta1 = metadata 254 | subject = Table( 255 | "subject", meta1, Column("id$", Integer, primary_key=True) 256 | ) 257 | referer = Table( 258 | "referer", 259 | meta1, 260 | Column("id", Integer, primary_key=True), 261 | Column("ref", Integer, ForeignKey("subject.id$")), 262 | ) 263 | meta1.create_all(connection) 264 | meta2 = MetaData() 265 | subject = Table("subject", meta2, autoload_with=connection) 266 | referer = Table("referer", meta2, autoload_with=connection) 267 | self.assert_( 268 | (subject.c["id$"] == referer.c.ref).compare( 269 | subject.join(referer).onclause 270 | ) 271 | ) 272 | 273 | # This test fails on Firebird 2.5/fdb 274 | @testing.requires.firebird_3_or_higher 275 | def test_reflect_default_over_128_chars(self, metadata, connection): 276 | Table( 277 | "t", 278 | metadata, 279 | Column("x", String(200), server_default="abcd" * 40), 280 | ).create(connection) 281 | 282 | m = MetaData() 283 | t = Table("t", m, autoload_with=connection) 284 | eq_( 285 | t.c.x.server_default.arg.text, 286 | "'%s'" % ("abcd" * 40), 287 | ) 288 | 289 | def test_has_temporary_table(self, metadata, connection): 290 | assert not inspect(connection).has_table("some_temp_table") 291 | user_tmp = Table( 292 | "some_temp_table", 293 | metadata, 294 | Column("id", Integer, primary_key=True), 295 | Column("name", String(50)), 296 | prefixes=["GLOBAL TEMPORARY"], 297 | ) 298 | user_tmp.create(connection) 299 | assert inspect(connection).has_table("some_temp_table") 300 | 301 | def test_cross_schema_reflection_one(self, metadata, connection): 302 | meta1 = metadata 303 | 304 | users = Table( 305 | "test_schema$users", 306 | meta1, 307 | Column("user_id", Integer, primary_key=True), 308 | Column("user_name", String(30), nullable=False), 309 | ) 310 | addresses = Table( 311 | "test_schema$email_addresses", 312 | meta1, 313 | Column("address_id", Integer, primary_key=True), 314 | Column("remote_user_id", Integer, ForeignKey(users.c.user_id)), 315 | Column("email_address", String(20)), 316 | ) 317 | meta1.create_all(connection) 318 | meta2 = MetaData() 319 | addresses = Table( 320 | "test_schema$email_addresses", 321 | meta2, 322 | autoload_with=connection, 323 | ) 324 | users = Table("test_schema$users", meta2, must_exist=True) 325 | j = join(users, addresses) 326 | self.assert_( 327 | (users.c.user_id == addresses.c.remote_user_id).compare(j.onclause) 328 | ) 329 | 330 | def test_cross_schema_reflection_two(self, metadata, connection): 331 | meta1 = metadata 332 | subject = Table( 333 | "subject", meta1, Column("id", Integer, primary_key=True) 334 | ) 335 | referer = Table( 336 | "referer", 337 | meta1, 338 | Column("id", Integer, primary_key=True), 339 | Column("ref", Integer, ForeignKey("subject.id")), 340 | schema="test_schema", 341 | ) 342 | meta1.create_all(connection) 343 | meta2 = MetaData() 344 | subject = Table("subject", meta2, autoload_with=connection) 345 | referer = Table( 346 | "referer", meta2, schema="test_schema", autoload_with=connection 347 | ) 348 | self.assert_( 349 | (subject.c.id == referer.c.ref).compare( 350 | subject.join(referer).onclause 351 | ) 352 | ) 353 | 354 | def test_cross_schema_reflection_three(self, metadata, connection): 355 | meta1 = metadata 356 | subject = Table( 357 | "test_schema_2$subject", 358 | meta1, 359 | Column("id", Integer, primary_key=True), 360 | ) 361 | referer = Table( 362 | "test_schema$referer", 363 | meta1, 364 | Column("id", Integer, primary_key=True), 365 | Column("ref", Integer, ForeignKey("test_schema_2$subject.id")), 366 | ) 367 | meta1.create_all(connection) 368 | meta2 = MetaData() 369 | subject = Table( 370 | "test_schema_2$subject", 371 | meta2, 372 | autoload_with=connection, 373 | ) 374 | referer = Table( 375 | "test_schema$referer", 376 | meta2, 377 | autoload_with=connection, 378 | ) 379 | self.assert_( 380 | (subject.c.id == referer.c.ref).compare( 381 | subject.join(referer).onclause 382 | ) 383 | ) 384 | 385 | def test_cross_schema_reflection_four(self, metadata, connection): 386 | meta1 = metadata 387 | subject = Table( 388 | "test_schema_2$subject", 389 | meta1, 390 | Column("id", Integer, primary_key=True), 391 | ) 392 | referer = Table( 393 | "test_schema$referer", 394 | meta1, 395 | Column("id", Integer, primary_key=True), 396 | Column("ref", Integer, ForeignKey("test_schema_2$subject.id")), 397 | ) 398 | meta1.create_all(connection) 399 | 400 | connection.detach() 401 | 402 | meta2 = MetaData() 403 | subject = Table( 404 | "test_schema_2$subject", 405 | meta2, 406 | autoload_with=connection, 407 | ) 408 | referer = Table( 409 | "test_schema$referer", 410 | meta2, 411 | autoload_with=connection, 412 | ) 413 | self.assert_( 414 | (subject.c.id == referer.c.ref).compare( 415 | subject.join(referer).onclause 416 | ) 417 | ) 418 | 419 | # This test fails on Firebird 2.5/fdb 420 | @testing.requires.firebird_3_or_higher 421 | def test_cross_schema_reflection_metadata_uses_schema( 422 | self, metadata, connection 423 | ): 424 | # test [ticket:3716] 425 | 426 | Table( 427 | "test_schema$some_table", 428 | metadata, 429 | Column("id", Integer, primary_key=True), 430 | Column("sid", Integer, ForeignKey("some_other_table.id")), 431 | ) 432 | Table( 433 | "some_other_table", 434 | metadata, 435 | Column("id", Integer, primary_key=True), 436 | ) 437 | metadata.create_all(connection) 438 | meta2 = MetaData() 439 | meta2.reflect(connection) 440 | 441 | eq_( 442 | set(meta2.tables), 443 | {"some_other_table", "test_schema$some_table"}, 444 | ) 445 | 446 | @pytest.mark.skip( 447 | reason="Wait for https://github.com/sqlalchemy/sqlalchemy/issues/10789 in SQLAlchemy 2.1" 448 | ) 449 | def test_uppercase_lowercase_table(self, metadata, connection): 450 | a_table = Table("a", metadata, Column("x", Integer)) 451 | A_table = Table("A", metadata, Column("x", Integer)) 452 | 453 | A_table.create(connection, checkfirst=True) 454 | assert inspect(connection).has_table("A") 455 | a_table.create(connection) 456 | assert inspect(connection).has_table("a") 457 | assert not inspect(connection).has_table("A") 458 | 459 | @pytest.mark.skip( 460 | reason="Wait for https://github.com/sqlalchemy/sqlalchemy/issues/10789 in SQLAlchemy 2.1" 461 | ) 462 | def test_uppercase_lowercase_sequence(self, connection): 463 | a_seq = Sequence("a") 464 | A_seq = Sequence("A") 465 | 466 | a_seq.create(connection) 467 | assert connection.dialect.has_sequence(connection, "a") 468 | assert not connection.dialect.has_sequence(connection, "A") 469 | A_seq.create(connection, checkfirst=True) 470 | assert connection.dialect.has_sequence(connection, "A") 471 | 472 | a_seq.drop(connection) 473 | A_seq.drop(connection) 474 | 475 | def test_index_reflection(self, metadata, connection): 476 | """Reflecting expression-based indexes works""" 477 | 478 | Table( 479 | "party", 480 | metadata, 481 | Column("id", String(10), nullable=False), 482 | Column("name", String(20), index=True), 483 | Column("aname", String(20)), 484 | Column("other", String(20)), 485 | ) 486 | metadata.create_all(connection) 487 | connection.exec_driver_sql( 488 | """ 489 | CREATE DESCENDING INDEX idx3 ON party 490 | COMPUTED BY (LOWER(name)||other||LOWER(aname)) 491 | """ 492 | ) 493 | connection.exec_driver_sql( 494 | "CREATE INDEX idx1 ON party COMPUTED BY (id||name||other||CAST(id AS VARCHAR(30)))" 495 | ) 496 | 497 | if testing.requires.partial_indices.enabled: 498 | connection.exec_driver_sql( 499 | "CREATE UNIQUE INDEX idx2 ON party (id) WHERE name = 'test'" 500 | ) 501 | 502 | expected = [ 503 | { 504 | "name": "idx1", 505 | "column_names": ["id", "name", "other", None], 506 | "unique": False, 507 | "expressions": [ 508 | "id", 509 | "name", 510 | "other", 511 | "CAST(id AS VARCHAR(30))", 512 | ], 513 | "dialect_options": { 514 | "firebird_descending": False, 515 | "firebird_where": None, 516 | }, 517 | }, 518 | { 519 | "name": "idx2", 520 | "column_names": ["id"], 521 | "unique": True, 522 | "dialect_options": { 523 | "firebird_descending": False, 524 | "firebird_where": "name = 'test'", 525 | }, 526 | }, 527 | { 528 | "name": "idx3", 529 | "column_names": [None, "other", None], 530 | "unique": False, 531 | "expressions": ["LOWER(name)", "other", "LOWER(aname)"], 532 | "dialect_options": { 533 | "firebird_descending": True, 534 | "firebird_where": None, 535 | }, 536 | }, 537 | { 538 | "name": "ix_party_name", 539 | "column_names": ["name"], 540 | "unique": False, 541 | "dialect_options": { 542 | "firebird_descending": False, 543 | "firebird_where": None, 544 | }, 545 | }, 546 | ] 547 | 548 | if not testing.requires.partial_indices.enabled: 549 | expected.pop(1) 550 | 551 | insp = inspect(connection) 552 | eq_(insp.get_indexes("party"), expected) 553 | 554 | m2 = MetaData() 555 | t2 = Table("party", m2, autoload_with=connection) 556 | self.compare_table_index_with_expected(t2, expected, "firebird") 557 | 558 | @testing.requires.partial_indices 559 | def test_index_reflection_partial(self, metadata, connection): 560 | """Reflect the filter definition on partial indexes""" 561 | 562 | metadata = metadata 563 | 564 | t1 = Table( 565 | "table1", 566 | metadata, 567 | Column("id", Integer, primary_key=True), 568 | Column("name", String(20)), 569 | Column("x", Integer), 570 | ) 571 | Index("idx1", t1.c.id, firebird_where=t1.c.name == "test") 572 | Index("idx2", t1.c.id, firebird_where=t1.c.x >= 5) 573 | 574 | metadata.create_all(connection) 575 | 576 | ind = connection.dialect.get_indexes(connection, t1.name, None) 577 | 578 | partial_definitions = [] 579 | for ix in ind: 580 | if "dialect_options" in ix: 581 | partial_definitions.append( 582 | ix["dialect_options"]["firebird_where"] 583 | ) 584 | 585 | eq_( 586 | sorted(partial_definitions), 587 | ["name = 'test'", "x >= 5"], 588 | ) 589 | 590 | t2 = Table("table1", MetaData(), autoload_with=connection) 591 | idx = list(sorted(t2.indexes, key=lambda idx: idx.name))[0] 592 | 593 | self.assert_compile( 594 | CreateIndex(idx), 595 | "CREATE INDEX idx1 ON table1 (id) " "WHERE name = 'test'", 596 | ) 597 | 598 | def test_foreign_key_option_inspection(self, metadata, connection): 599 | Table( 600 | "person", 601 | metadata, 602 | Column("id", String(length=32), nullable=False, primary_key=True), 603 | Column( 604 | "company_id", 605 | ForeignKey( 606 | "company.id", 607 | name="person_company_id_fkey", 608 | onupdate="NO ACTION", # None 609 | ondelete="CASCADE", 610 | ), 611 | ), 612 | ) 613 | Table( 614 | "company", 615 | metadata, 616 | Column("id", String(length=32), nullable=False, primary_key=True), 617 | Column("name", String(length=255)), 618 | Column( 619 | "industry_id", 620 | ForeignKey( 621 | "industry.id", 622 | name="company_industry_id_fkey", 623 | onupdate="SET DEFAULT", 624 | ondelete="SET NULL", 625 | ), 626 | ), 627 | ) 628 | Table( 629 | "industry", 630 | metadata, 631 | Column("id", Integer(), nullable=False, primary_key=True), 632 | Column("name", String(length=255)), 633 | ) 634 | fk_ref = { 635 | "person_company_id_fkey": { 636 | "name": "person_company_id_fkey", 637 | "constrained_columns": ["company_id"], 638 | "referred_schema": None, 639 | "referred_table": "company", 640 | "referred_columns": ["id"], 641 | "options": { 642 | "ondelete": "CASCADE", 643 | }, 644 | }, 645 | "company_industry_id_fkey": { 646 | "name": "company_industry_id_fkey", 647 | "constrained_columns": ["industry_id"], 648 | "referred_schema": None, 649 | "referred_table": "industry", 650 | "referred_columns": ["id"], 651 | "options": {"onupdate": "SET DEFAULT", "ondelete": "SET NULL"}, 652 | }, 653 | } 654 | metadata.create_all(connection) 655 | inspector = inspect(connection) 656 | fks = inspector.get_foreign_keys( 657 | "person" 658 | ) + inspector.get_foreign_keys("company") 659 | for fk in fks: 660 | eq_(fk, fk_ref[fk["name"]]) 661 | 662 | def test_reflection_with_unique_constraint(self, metadata, connection): 663 | insp = inspect(connection) 664 | 665 | meta = metadata 666 | uc_table = Table( 667 | "fbsql_uc", 668 | meta, 669 | Column("a", String(10)), 670 | UniqueConstraint("a", name="uc_a"), 671 | ) 672 | 673 | uc_table.create(connection) 674 | 675 | indexes = {i["name"] for i in insp.get_indexes("fbsql_uc")} 676 | constraints = { 677 | i["name"] for i in insp.get_unique_constraints("fbsql_uc") 678 | } 679 | 680 | self.assert_("uc_a" in indexes) 681 | self.assert_("uc_a" in constraints) 682 | 683 | reflected = Table("fbsql_uc", MetaData(), autoload_with=connection) 684 | 685 | indexes = {i.name for i in reflected.indexes} 686 | constraints = {uc.name for uc in reflected.constraints} 687 | 688 | self.assert_("uc_a" in indexes) 689 | self.assert_("uc_a" in constraints) 690 | 691 | def test_reflect_unique_index(self, metadata, connection): 692 | insp = inspect(connection) 693 | 694 | meta = metadata 695 | 696 | # a unique index OTOH we are able to detect is an index 697 | # and not a unique constraint 698 | uc_table = Table( 699 | "fbsql_uc", 700 | meta, 701 | Column("a", String(10)), 702 | Index("ix_a", "a", unique=True), 703 | ) 704 | 705 | uc_table.create(connection) 706 | 707 | indexes = {i["name"]: i for i in insp.get_indexes("fbsql_uc")} 708 | constraints = { 709 | i["name"] for i in insp.get_unique_constraints("fbsql_uc") 710 | } 711 | 712 | self.assert_("ix_a" in indexes) 713 | assert indexes["ix_a"]["unique"] 714 | self.assert_("ix_a" not in constraints) 715 | 716 | reflected = Table("fbsql_uc", MetaData(), autoload_with=connection) 717 | 718 | indexes = {i.name: i for i in reflected.indexes} 719 | constraints = {uc.name for uc in reflected.constraints} 720 | 721 | self.assert_("ix_a" in indexes) 722 | assert indexes["ix_a"].unique 723 | self.assert_("ix_a" not in constraints) 724 | 725 | def test_reflect_check_constraint(self, metadata, connection): 726 | meta = metadata 727 | 728 | Table( 729 | "fbsql_cc", 730 | meta, 731 | Column("a", Integer()), 732 | Column("b", String), 733 | CheckConstraint("a > 1 AND a < 5", name="cc1"), 734 | CheckConstraint("a = 1 OR (a > 2 AND a < 5)", name="cc2"), 735 | CheckConstraint("b <> 'hi\nim a name \nyup\n'", name="cc4"), 736 | ) 737 | 738 | meta.create_all(connection) 739 | 740 | reflected = Table("fbsql_cc", MetaData(), autoload_with=connection) 741 | 742 | check_constraints = { 743 | uc.name: uc.sqltext.text 744 | for uc in reflected.constraints 745 | if isinstance(uc, CheckConstraint) 746 | } 747 | 748 | eq_( 749 | check_constraints, 750 | { 751 | "cc1": "a > 1 AND a < 5", 752 | "cc2": "a = 1 OR (a > 2 AND a < 5)", 753 | "cc4": "b <> 'hi\nim a name \nyup\n'", 754 | }, 755 | ) 756 | 757 | 758 | class IdentityReflectionTest(fixtures.TablesTest): 759 | __backend__ = True 760 | __requires__ = ("identity_columns",) 761 | 762 | _names = ("t1", "T2", "MiXeDCaSe!") 763 | 764 | @classmethod 765 | def define_tables(cls, metadata): 766 | for name in cls._names: 767 | Table( 768 | name, 769 | metadata, 770 | Column( 771 | "id1", 772 | Integer, 773 | Identity( 774 | always=True, 775 | start=2, 776 | increment=3, 777 | ), 778 | ), 779 | Column("id2", Integer, Identity()), 780 | Column("id3", BigInteger, Identity()), 781 | Column("id4", SmallInteger, Identity()), 782 | ) 783 | 784 | @testing.combinations(*_names, argnames="name") 785 | def test_reflect_identity(self, connection, name): 786 | firebird_4_or_higher = testing.requires.firebird_4_or_higher.enabled 787 | 788 | insp = inspect(connection) 789 | expected = dict( 790 | always=True if firebird_4_or_higher else False, 791 | start=2, 792 | increment=3 if firebird_4_or_higher else 1, 793 | ) 794 | 795 | default = dict( 796 | always=False, 797 | start=1 if firebird_4_or_higher else 0, 798 | increment=1, 799 | ) 800 | cols = insp.get_columns(name) 801 | for col in cols: 802 | if col["name"] == "id1": 803 | is_true("identity" in col) 804 | eq_( 805 | col["identity"], 806 | expected, 807 | ) 808 | elif col["name"] == "id2": 809 | is_true("identity" in col) 810 | eq_(col["identity"], default) 811 | elif col["name"] == "id3": 812 | is_true("identity" in col) 813 | eq_(col["identity"], default) 814 | elif col["name"] == "id4": 815 | is_true("identity" in col) 816 | eq_(col["identity"], default) 817 | -------------------------------------------------------------------------------- /test/test_suite.py: -------------------------------------------------------------------------------- 1 | import operator 2 | import pytest 3 | 4 | import sqlalchemy as sa 5 | 6 | from packaging import version 7 | from sqlalchemy import __version__ as SQLALCHEMY_VERSION 8 | from sqlalchemy import Index 9 | from sqlalchemy.testing import is_false 10 | from sqlalchemy.testing.suite import * # noqa: F401, F403 11 | 12 | from sqlalchemy.testing.suite import ( 13 | CTETest as _CTETest, 14 | ComponentReflectionTest as _ComponentReflectionTest, 15 | ComponentReflectionTestExtra as _ComponentReflectionTestExtra, 16 | CompoundSelectTest as _CompoundSelectTest, 17 | DeprecatedCompoundSelectTest as _DeprecatedCompoundSelectTest, 18 | IdentityColumnTest as _IdentityColumnTest, 19 | IdentityReflectionTest as _IdentityReflectionTest, 20 | StringTest as _StringTest, 21 | InsertBehaviorTest as _InsertBehaviorTest, 22 | RowCountTest as _RowCountTest, 23 | SimpleUpdateDeleteTest as _SimpleUpdateDeleteTest, 24 | ) 25 | 26 | 27 | @pytest.mark.skip( 28 | reason="These tests fails in Firebird because a DELETE FROM with self-referencing FK raises integrity errors." 29 | ) 30 | class CTETest(_CTETest): 31 | pass 32 | 33 | 34 | class ComponentReflectionTest(_ComponentReflectionTest): 35 | def test_get_unique_constraints(self, metadata, connection): 36 | # Clone of super().test_get_unique_constraints() adapted for Firebird. 37 | 38 | schema = None 39 | uniques = sorted( 40 | [ 41 | {"name": "unique_a", "column_names": ["a"]}, 42 | # Firebird won't allow two unique index with same set of columns. 43 | {"name": "unique_a_b_c", "column_names": ["a", "b", "c"]}, 44 | {"name": "unique_c_a", "column_names": ["c", "a"]}, 45 | {"name": "unique_asc_key", "column_names": ["asc", "key"]}, 46 | {"name": "i.have.dots", "column_names": ["b"]}, 47 | {"name": "i have spaces", "column_names": ["c"]}, 48 | ], 49 | key=operator.itemgetter("name"), 50 | ) 51 | table = Table( 52 | "testtbl", 53 | metadata, 54 | Column("a", sa.String(20)), 55 | Column("b", sa.String(30)), 56 | Column("c", sa.Integer), 57 | # reserved identifiers 58 | Column("asc", sa.String(30)), 59 | Column("key", sa.String(30)), 60 | schema=schema, 61 | ) 62 | for uc in uniques: 63 | table.append_constraint( 64 | sa.UniqueConstraint(*uc["column_names"], name=uc["name"]) 65 | ) 66 | table.create(connection) 67 | 68 | inspector = inspect(connection) 69 | reflected = sorted( 70 | inspector.get_unique_constraints("testtbl", schema=schema), 71 | key=operator.itemgetter("name"), 72 | ) 73 | 74 | eq_(uniques, reflected) 75 | 76 | def test_get_temp_table_indexes(self, connection): 77 | # Clone of super().test_get_temp_table_indexes() adapted for Firebird. 78 | insp = inspect(connection) 79 | table_name = self.temp_table_name() 80 | indexes = insp.get_indexes(table_name) 81 | 82 | expected = [ 83 | { 84 | "unique": False, 85 | "column_names": ["foo"], 86 | "name": "user_tmp_ix", 87 | "dialect_options": { 88 | "firebird_descending": False, 89 | "firebird_where": None, 90 | }, 91 | } 92 | ] 93 | eq_( 94 | [idx for idx in indexes if idx["name"] == "user_tmp_ix"], 95 | expected, 96 | ) 97 | 98 | 99 | class ComponentReflectionTestExtra(_ComponentReflectionTestExtra): 100 | def test_reflect_descending_indexes(self, metadata, connection): 101 | t = Table( 102 | "t", 103 | metadata, 104 | Column("x", String(30)), 105 | Column("y", String(30)), 106 | Column("z", String(30)), 107 | ) 108 | 109 | Index("t_idx_2", t.c.x, firebird_descending=True) 110 | 111 | metadata.create_all(connection) 112 | 113 | insp = inspect(connection) 114 | 115 | expected = [ 116 | { 117 | "name": "t_idx_2", 118 | "column_names": ["x"], 119 | "unique": False, 120 | "dialect_options": { 121 | "firebird_descending": True, 122 | "firebird_where": None, 123 | }, 124 | } 125 | ] 126 | 127 | eq_(insp.get_indexes("t"), expected) 128 | m2 = MetaData() 129 | t2 = Table("t", m2, autoload_with=connection) 130 | 131 | self.compare_table_index_with_expected( 132 | t2, expected, connection.engine.name 133 | ) 134 | 135 | def test_reflect_expression_based_indexes(self, metadata, connection): 136 | # Clone of super().test_reflect_expression_based_indexes adapted for Firebird. 137 | 138 | using_sqlalchemy2 = version.parse(SQLALCHEMY_VERSION).major >= 2 139 | if not using_sqlalchemy2: 140 | # Test from SQLAlchemy 1.4 141 | t = Table( 142 | "t", 143 | metadata, 144 | Column("x", String(30)), 145 | Column("y", String(30)), 146 | ) 147 | 148 | Index("t_idx", func.lower(t.c.x), func.lower(t.c.y)) 149 | 150 | Index("t_idx_2", t.c.x) 151 | 152 | metadata.create_all(connection) 153 | 154 | insp = inspect(connection) 155 | 156 | expected = [ 157 | { 158 | "name": "t_idx", 159 | "column_names": [None, None], 160 | "unique": False, 161 | "expressions": ["lower(x)", "lower(y)"], 162 | "dialect_options": {}, 163 | "descending": False, 164 | }, 165 | { 166 | "name": "t_idx_2", 167 | "column_names": ["x"], 168 | "unique": False, 169 | "dialect_options": {}, 170 | "descending": False, 171 | }, 172 | ] 173 | 174 | eq_(insp.get_indexes("t"), expected) 175 | return 176 | 177 | # Test from SQLAlchemy 2.0 178 | 179 | t = Table( 180 | "t", 181 | metadata, 182 | Column("x", String(30)), 183 | Column("y", String(30)), 184 | Column("z", String(30)), 185 | ) 186 | 187 | Index("t_idx", func.lower(t.c.x), t.c.z, func.lower(t.c.y)) 188 | # Maximum allowed for database page size = 8K 189 | long_str = "long string " * 42 190 | Index("t_idx_long", func.coalesce(t.c.x, long_str)) 191 | Index("t_idx_2", t.c.x) 192 | 193 | metadata.create_all(connection) 194 | 195 | insp = inspect(connection) 196 | 197 | expected = [ 198 | { 199 | "name": "t_idx_2", 200 | "column_names": ["x"], 201 | } 202 | ] 203 | 204 | def completeIndex(entry): 205 | entry.setdefault("unique", False) 206 | entry.setdefault( 207 | "dialect_options", 208 | { 209 | "firebird_descending": False, 210 | "firebird_where": None, 211 | }, 212 | ) 213 | 214 | completeIndex(expected[0]) 215 | 216 | class lower_index_str(str): 217 | def __eq__(self, other): 218 | # test that lower and x or y are in the string 219 | return "lower" in other and ("x" in other or "y" in other) 220 | 221 | class coalesce_index_str(str): 222 | def __eq__(self, other): 223 | # test that coalesce and the string is in other 224 | return "coalesce" in other.lower() and long_str in other 225 | 226 | expr_index = { 227 | "name": "t_idx", 228 | "column_names": [None, "z", None], 229 | "unique": False, 230 | "expressions": [ 231 | lower_index_str("lower(x)"), 232 | "z", 233 | lower_index_str("lower(y)"), 234 | ], 235 | } 236 | completeIndex(expr_index) 237 | expected.insert(0, expr_index) 238 | 239 | expr_index_long = { 240 | "name": "t_idx_long", 241 | "column_names": [None], 242 | "expressions": [coalesce_index_str(f"coalesce(x, '{long_str}')")], 243 | } 244 | completeIndex(expr_index_long) 245 | expected.append(expr_index_long) 246 | 247 | eq_(insp.get_indexes("t"), expected) 248 | m2 = MetaData() 249 | t2 = Table("t", m2, autoload_with=connection) 250 | 251 | self.compare_table_index_with_expected( 252 | t2, expected, connection.engine.name 253 | ) 254 | 255 | 256 | class CompoundSelectTest(_CompoundSelectTest): 257 | @pytest.mark.skip(reason="Firebird does not support ORDER BY alias") 258 | def test_distinct_selectable_in_unions(self): 259 | super().test_distinct_selectable_in_unions() 260 | 261 | @pytest.mark.skip(reason="Firebird does not support ORDER BY alias") 262 | def test_limit_offset_aliased_selectable_in_unions(self): 263 | super().test_limit_offset_aliased_selectable_in_unions() 264 | 265 | @pytest.mark.skip(reason="Firebird does not support ORDER BY alias") 266 | def test_plain_union(self): 267 | super().test_plain_union() 268 | 269 | 270 | class DeprecatedCompoundSelectTest(_DeprecatedCompoundSelectTest): 271 | @pytest.mark.skip(reason="Firebird does not support ORDER BY alias") 272 | def test_distinct_selectable_in_unions(self): 273 | super().test_distinct_selectable_in_unions() 274 | 275 | @pytest.mark.skip(reason="Firebird does not support ORDER BY alias") 276 | def test_limit_offset_aliased_selectable_in_unions(self): 277 | super().test_limit_offset_aliased_selectable_in_unions() 278 | 279 | @pytest.mark.skip(reason="Firebird does not support ORDER BY alias") 280 | def test_plain_union(self): 281 | super().test_plain_union() 282 | 283 | 284 | class IdentityColumnTest(_IdentityColumnTest): 285 | @testing.requires.firebird_4_or_higher 286 | def test_select_all(self, connection): 287 | super().test_select_all(connection) 288 | 289 | @testing.requires.firebird_4_or_higher 290 | def test_insert_always_error(self, connection): 291 | super().test_insert_always_error(connection) 292 | 293 | def test_select_columns(self, connection): 294 | # Clone of super().test_select_columns adjusted for Firebird. 295 | expected = [(42,), (43,)] 296 | 297 | if config.db.dialect.server_version_info < (4,): 298 | # Firebird 3 has distinct START WITH semantic. 299 | # https://firebirdsql.org/file/documentation/release_notes/html/en/4_0/rlsnotes40.html#rnfb40-compat-sql-sequence-start-value 300 | expected = [(43,), (44,)] 301 | 302 | res = connection.execute( 303 | select(self.tables.tbl_a.c.id).order_by(self.tables.tbl_a.c.id) 304 | ).fetchall() 305 | eq_(res, expected) 306 | 307 | 308 | class IdentityReflectionTest(_IdentityReflectionTest): 309 | # Clone of IdentityReflectionTest adapted for Firebird. 310 | 311 | @classmethod 312 | def define_tables(cls, metadata): 313 | firebird_4_or_higher = config.db.dialect.server_version_info >= (4, 0) 314 | 315 | Table( 316 | "t1", 317 | metadata, 318 | Column("normal", Integer), 319 | Column("id1", Integer, Identity()), 320 | ) 321 | 322 | Table( 323 | "t2", 324 | metadata, 325 | Column( 326 | "id2", 327 | Integer, 328 | Identity( 329 | always=firebird_4_or_higher, 330 | start=2, 331 | increment=3 if firebird_4_or_higher else None, 332 | ), 333 | ), 334 | ) 335 | 336 | def test_reflect_identity(self): 337 | firebird_4_or_higher = config.db.dialect.server_version_info >= (4, 0) 338 | 339 | insp = inspect(config.db) 340 | 341 | cols = insp.get_columns("t1") 342 | for col in cols: 343 | if col["name"] == "normal": 344 | is_false("identity" in col) 345 | elif col["name"] == "id1": 346 | if "autoincrement" in col: 347 | is_true(col["autoincrement"]) 348 | eq_(col["default"], None) 349 | is_true("identity" in col) 350 | self.check( 351 | col["identity"], 352 | dict( 353 | always=False, 354 | start=1 if firebird_4_or_higher else 0, 355 | increment=1, 356 | ), 357 | approx=True, 358 | ) 359 | 360 | @testing.requires.firebird_4_or_higher 361 | def test_reflect_identity_v4(self): 362 | insp = inspect(config.db) 363 | 364 | cols = insp.get_columns("t2") 365 | for col in cols: 366 | if col["name"] == "id2": 367 | if "autoincrement" in col: 368 | is_true(col["autoincrement"]) 369 | eq_(col["default"], None) 370 | is_true("identity" in col) 371 | self.check( 372 | col["identity"], 373 | dict( 374 | always=True, 375 | start=2, 376 | increment=3, 377 | ), 378 | approx=False, 379 | ) 380 | 381 | 382 | class StringTest(_StringTest): 383 | @pytest.mark.skip( 384 | reason="Firebird does not accept a LIKE 'A%C%Z' in a VARCHAR(2) column" 385 | ) 386 | def test_dont_truncate_rightside( 387 | self, metadata, connection, expr, expected 388 | ): 389 | super().test_dont_truncate_rightside( 390 | self, metadata, connection, expr, expected 391 | ) 392 | 393 | 394 | class InsertBehaviorTest(_InsertBehaviorTest): 395 | @testing.skip_if( 396 | lambda config: config.db.dialect.driver == "fdb", 397 | "Driver fdb returns erroneous 'returns_rows = True'.", 398 | ) 399 | @testing.variation("style", ["plain", "return_defaults"]) 400 | @testing.variation("executemany", [True, False]) 401 | def test_no_results_for_non_returning_insert( 402 | self, connection, style, executemany 403 | ): 404 | super().test_no_results_for_non_returning_insert( 405 | connection, style, executemany 406 | ) 407 | 408 | @requirements.autoincrement_insert # missing in SQLAlchemy 409 | def test_autoclose_on_insert_implicit_returning(self, connection): 410 | super().test_autoclose_on_insert_implicit_returning(connection) 411 | 412 | 413 | class RowCountTest(_RowCountTest): 414 | @testing.requires.firebird_5_or_higher 415 | @testing.variation("implicit_returning", [True, False]) 416 | @testing.variation( 417 | "dml", 418 | [ 419 | ("update", testing.requires.update_returning), 420 | ("delete", testing.requires.delete_returning), 421 | ], 422 | ) 423 | def test_update_delete_rowcount_return_defaults( 424 | self, connection, implicit_returning, dml 425 | ): 426 | super().test_update_delete_rowcount_return_defaults( 427 | connection, implicit_returning, dml, None 428 | ) 429 | 430 | 431 | class SimpleUpdateDeleteTest(_SimpleUpdateDeleteTest): 432 | @testing.requires.firebird_5_or_higher 433 | @testing.variation("criteria", ["rows", "norows", "emptyin"]) 434 | @testing.requires.update_returning 435 | def test_update_returning(self, connection, criteria): 436 | super().test_update_returning(connection, criteria) 437 | 438 | @testing.requires.firebird_5_or_higher 439 | @testing.variation("criteria", ["rows", "norows", "emptyin"]) 440 | @testing.requires.delete_returning 441 | def test_delete_returning(self, connection, criteria): 442 | super().test_delete_returning(connection, criteria) 443 | -------------------------------------------------------------------------------- /test/test_types.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy import Column 2 | from sqlalchemy import Float 3 | from sqlalchemy import MetaData 4 | from sqlalchemy import Table 5 | from sqlalchemy import testing 6 | from sqlalchemy.testing import eq_ 7 | from sqlalchemy.testing import fixtures 8 | from sqlalchemy.testing import is_instance_of 9 | from sqlalchemy.testing import is_not_none 10 | 11 | import sqlalchemy.types as sa_types 12 | import sqlalchemy_firebird.types as fb_types 13 | 14 | 15 | TEST_CHARSET = "UTF8" 16 | TEST_COLLATION = "UNICODE_CI" 17 | 18 | 19 | def eq_col(col: Column, expected_type, **expected_options): 20 | is_instance_of(col.type, expected_type) 21 | 22 | if expected_options: 23 | for k, v in expected_options.items(): 24 | actual = getattr(col.type, k, None) 25 | is_not_none(actual, f'"{col}" type does not have "{k}".') 26 | eq_(actual, v) 27 | 28 | 29 | class TypesTest(fixtures.TestBase): 30 | @testing.provide_metadata 31 | def test_infinite_float(self, connection): 32 | t = Table("test_infinite_float", self.metadata, Column("data", Float)) 33 | self.metadata.create_all(testing.db) 34 | connection.execute(t.insert(), dict(data=float("inf"))) 35 | eq_(connection.execute(t.select()).fetchall(), [(float("inf"),)]) 36 | 37 | @testing.provide_metadata 38 | def test_blob_types(self, connection): 39 | t = Table( 40 | "test_blob_types", 41 | self.metadata, 42 | Column("b", sa_types.BLOB), 43 | Column("fb", fb_types.FBBLOB), 44 | Column("fbs", fb_types.FBBLOB(segment_size=100)), 45 | Column("t", sa_types.TEXT), 46 | Column("ft", fb_types.FBTEXT), 47 | Column("fts", fb_types.FBTEXT(segment_size=200)), 48 | Column("ftc", fb_types.FBTEXT(charset=TEST_CHARSET)), 49 | Column( 50 | "ftcc", 51 | fb_types.FBTEXT( 52 | charset=TEST_CHARSET, collation=TEST_COLLATION 53 | ), 54 | ), 55 | ) 56 | self.metadata.create_all(testing.db) 57 | 58 | rm = MetaData() 59 | rt = Table("test_blob_types", rm, autoload_with=testing.db) 60 | 61 | eq_col(rt.columns["b"], fb_types.FBBLOB) 62 | eq_col(rt.columns["fb"], fb_types.FBBLOB) 63 | eq_col(rt.columns["fbs"], fb_types.FBBLOB, segment_size=100) 64 | eq_col(rt.columns["t"], sa_types.TEXT) 65 | eq_col(rt.columns["ft"], fb_types.FBTEXT) 66 | eq_col(rt.columns["fts"], fb_types.FBTEXT, segment_size=200) 67 | eq_col(rt.columns["ftc"], fb_types.FBTEXT, charset=TEST_CHARSET) 68 | eq_col( 69 | rt.columns["ftcc"], 70 | fb_types.FBTEXT, 71 | charset=TEST_CHARSET, 72 | collation=TEST_COLLATION, 73 | ) 74 | 75 | @testing.provide_metadata 76 | def test_character_types(self, connection): 77 | t = Table( 78 | "test_character_types", 79 | self.metadata, 80 | Column("c", sa_types.CHAR), 81 | Column("cl", sa_types.CHAR(length=10)), 82 | Column("nc", sa_types.NCHAR), 83 | Column("ncl", sa_types.NCHAR(length=11)), 84 | Column("vc", sa_types.VARCHAR), 85 | Column("vcl", sa_types.VARCHAR(length=12)), 86 | Column("nvc", sa_types.NVARCHAR), 87 | Column("nvcl", sa_types.NVARCHAR(length=13)), 88 | Column("fc", fb_types.FBCHAR), 89 | Column("fcl", fb_types.FBCHAR(length=20)), 90 | Column("fclc", fb_types.FBCHAR(length=21, charset=TEST_CHARSET)), 91 | Column( 92 | "fclcc", 93 | fb_types.FBCHAR( 94 | length=22, charset=TEST_CHARSET, collation=TEST_COLLATION 95 | ), 96 | ), 97 | Column("fb", fb_types.FBBINARY), 98 | Column("fbl", fb_types.FBBINARY(length=31)), 99 | Column("fnc", fb_types.FBNCHAR), 100 | Column("fncl", fb_types.FBNCHAR(length=32)), 101 | Column("fvc", fb_types.FBVARCHAR), 102 | Column("fvcl", fb_types.FBVARCHAR(length=33)), 103 | Column( 104 | "fvclc", fb_types.FBVARCHAR(length=34, charset=TEST_CHARSET) 105 | ), 106 | Column( 107 | "fvclcc", 108 | fb_types.FBVARCHAR( 109 | length=35, charset=TEST_CHARSET, collation=TEST_COLLATION 110 | ), 111 | ), 112 | Column("fvb", fb_types.FBVARBINARY), 113 | Column("fvbl", fb_types.FBVARBINARY(length=36)), 114 | Column("fnvc", fb_types.FBNVARCHAR), 115 | Column("fnvcl", fb_types.FBNVARCHAR(length=37)), 116 | ) 117 | self.metadata.create_all(testing.db) 118 | 119 | rm = MetaData() 120 | rt = Table("test_character_types", rm, autoload_with=testing.db) 121 | 122 | eq_col(rt.columns["c"], fb_types.FBCHAR), 123 | eq_col(rt.columns["cl"], fb_types.FBCHAR, length=10), 124 | eq_col(rt.columns["nc"], fb_types.FBNCHAR), 125 | eq_col(rt.columns["ncl"], fb_types.FBNCHAR, length=11), 126 | eq_col(rt.columns["vc"], fb_types.FBTEXT), 127 | eq_col(rt.columns["vcl"], fb_types.FBVARCHAR, length=12), 128 | eq_col( 129 | rt.columns["nvc"], 130 | fb_types.FBTEXT, 131 | charset=fb_types.NATIONAL_CHARSET, 132 | ), 133 | eq_col(rt.columns["nvcl"], fb_types.FBNVARCHAR, length=13), 134 | eq_col(rt.columns["fc"], fb_types.FBCHAR), 135 | eq_col(rt.columns["fcl"], fb_types.FBCHAR, length=20), 136 | eq_col( 137 | rt.columns["fclc"], 138 | fb_types.FBCHAR, 139 | length=21, 140 | charset=TEST_CHARSET, 141 | ), 142 | eq_col( 143 | rt.columns["fclcc"], 144 | fb_types.FBCHAR, 145 | length=22, 146 | charset=TEST_CHARSET, 147 | collation=TEST_COLLATION, 148 | ), 149 | eq_col(rt.columns["fb"], fb_types.FBBINARY), 150 | eq_col(rt.columns["fbl"], fb_types.FBBINARY, length=31), 151 | eq_col(rt.columns["fnc"], fb_types.FBNCHAR), 152 | eq_col(rt.columns["fncl"], fb_types.FBNCHAR, length=32), 153 | eq_col(rt.columns["fvc"], fb_types.FBTEXT, charset=TEST_CHARSET) 154 | eq_col(rt.columns["fvcl"], fb_types.FBVARCHAR, length=33), 155 | eq_col( 156 | rt.columns["fvclc"], 157 | fb_types.FBVARCHAR, 158 | length=34, 159 | charset=TEST_CHARSET, 160 | ), 161 | eq_col( 162 | rt.columns["fvclcc"], 163 | fb_types.FBVARCHAR, 164 | length=35, 165 | charset=TEST_CHARSET, 166 | collation=TEST_COLLATION, 167 | ), 168 | eq_col( 169 | rt.columns["fvb"], 170 | fb_types.FBTEXT, 171 | charset=fb_types.BINARY_CHARSET, 172 | ) 173 | eq_col(rt.columns["fvbl"], fb_types.FBVARBINARY, length=36), 174 | eq_col( 175 | rt.columns["fnvc"], 176 | fb_types.FBTEXT, 177 | charset=fb_types.NATIONAL_CHARSET, 178 | ) 179 | eq_col(rt.columns["fnvcl"], fb_types.FBNVARCHAR, length=37), 180 | 181 | @testing.provide_metadata 182 | def test_integer_types(self, connection): 183 | t = Table( 184 | "test_integer_types", 185 | self.metadata, 186 | Column("si", sa_types.SMALLINT), 187 | Column("i", sa_types.INTEGER), 188 | Column("bi", sa_types.BIGINT), 189 | Column("fsi", fb_types.FBSMALLINT), 190 | Column("fi", fb_types.FBINTEGER), 191 | Column("fbi", fb_types.FBBIGINT), 192 | ) 193 | self.metadata.create_all(testing.db) 194 | 195 | rm = MetaData() 196 | rt = Table("test_integer_types", rm, autoload_with=testing.db) 197 | 198 | eq_col(rt.columns["si"], fb_types.FBSMALLINT), 199 | eq_col(rt.columns["i"], fb_types.FBINTEGER), 200 | eq_col(rt.columns["bi"], fb_types.FBBIGINT), 201 | eq_col(rt.columns["fsi"], fb_types.FBSMALLINT), 202 | eq_col(rt.columns["fi"], fb_types.FBINTEGER), 203 | eq_col(rt.columns["fbi"], fb_types.FBBIGINT), 204 | 205 | @testing.provide_metadata 206 | @testing.requires.firebird_3_or_lower 207 | def test_float_types_v3(self, connection): 208 | # Firebird 2.5 and 3.0 have only two possible FLOAT data types 209 | t = Table( 210 | "test_float_types_v3", 211 | self.metadata, 212 | Column("f", sa_types.FLOAT), 213 | Column("r", sa_types.REAL), 214 | Column("dp", sa_types.DOUBLE_PRECISION), 215 | ) 216 | self.metadata.create_all(testing.db) 217 | 218 | rm = MetaData() 219 | rt = Table("test_float_types_v3", rm, autoload_with=testing.db) 220 | 221 | eq_col(rt.columns["f"], fb_types.FBFLOAT), 222 | eq_col(rt.columns["r"], fb_types.FBFLOAT), 223 | eq_col(rt.columns["dp"], fb_types.FBDOUBLE_PRECISION), 224 | 225 | @testing.provide_metadata 226 | @testing.requires.firebird_4_or_higher 227 | def test_float_types(self, connection): 228 | t = Table( 229 | "test_float_types", 230 | self.metadata, 231 | Column("f", sa_types.FLOAT), 232 | Column("f24", sa_types.FLOAT(precision=24)), 233 | Column("f53", sa_types.FLOAT(precision=53)), 234 | Column("r", sa_types.REAL), 235 | Column("dp", sa_types.DOUBLE_PRECISION), 236 | Column("ff", fb_types.FBFLOAT), 237 | Column("ff24", fb_types.FBFLOAT(precision=24)), 238 | Column("ff53", fb_types.FBFLOAT(precision=53)), 239 | Column("fr", fb_types.FBREAL), 240 | Column("fdp", fb_types.FBDOUBLE_PRECISION), 241 | Column("fdf", fb_types.FBDECFLOAT), 242 | Column("fdf16", fb_types.FBDECFLOAT(precision=16)), 243 | Column("fdf34", fb_types.FBDECFLOAT(precision=34)), 244 | ) 245 | self.metadata.create_all(testing.db) 246 | 247 | rm = MetaData() 248 | rt = Table("test_float_types", rm, autoload_with=testing.db) 249 | 250 | eq_col(rt.columns["f"], fb_types.FBFLOAT), 251 | eq_col(rt.columns["f24"], fb_types.FBFLOAT), 252 | eq_col(rt.columns["f53"], fb_types.FBDOUBLE_PRECISION), 253 | eq_col(rt.columns["r"], fb_types.FBFLOAT), 254 | eq_col(rt.columns["dp"], fb_types.FBDOUBLE_PRECISION), 255 | eq_col(rt.columns["ff"], fb_types.FBFLOAT), 256 | eq_col(rt.columns["ff24"], fb_types.FBFLOAT), 257 | eq_col(rt.columns["ff53"], fb_types.FBDOUBLE_PRECISION), 258 | eq_col(rt.columns["fr"], fb_types.FBFLOAT), 259 | eq_col(rt.columns["fdp"], fb_types.FBDOUBLE_PRECISION), 260 | eq_col(rt.columns["fdf"], fb_types.FBDECFLOAT, precision=34), 261 | eq_col(rt.columns["fdf16"], fb_types.FBDECFLOAT, precision=16), 262 | eq_col(rt.columns["fdf34"], fb_types.FBDECFLOAT, precision=34), 263 | 264 | @testing.provide_metadata 265 | def test_fixed_types(self, connection): 266 | t = Table( 267 | "test_fixed_types", 268 | self.metadata, 269 | Column("n4", sa_types.NUMERIC(precision=4, scale=2)), 270 | Column("d4", sa_types.DECIMAL(precision=4, scale=2)), 271 | Column("n9", sa_types.NUMERIC(precision=9, scale=3)), 272 | Column("d9", sa_types.DECIMAL(precision=9, scale=3)), 273 | Column("n18", sa_types.NUMERIC(precision=18, scale=4)), 274 | Column("d18", sa_types.DECIMAL(precision=18, scale=4)), 275 | Column("fn4", fb_types.FBNUMERIC(precision=4, scale=2)), 276 | Column("fd4", fb_types.FBDECIMAL(precision=4, scale=2)), 277 | Column("fn9", fb_types.FBNUMERIC(precision=9, scale=3)), 278 | Column("fd9", fb_types.FBDECIMAL(precision=9, scale=3)), 279 | Column("fn18", fb_types.FBNUMERIC(precision=18, scale=4)), 280 | Column("fd18", fb_types.FBDECIMAL(precision=18, scale=4)), 281 | ) 282 | self.metadata.create_all(testing.db) 283 | 284 | rm = MetaData() 285 | rt = Table("test_fixed_types", rm, autoload_with=testing.db) 286 | 287 | eq_col(rt.columns["n4"], fb_types.FBNUMERIC, precision=4, scale=2), 288 | eq_col(rt.columns["d4"], fb_types.FBDECIMAL, precision=4, scale=2), 289 | eq_col(rt.columns["n9"], fb_types.FBNUMERIC, precision=9, scale=3), 290 | eq_col(rt.columns["d9"], fb_types.FBDECIMAL, precision=9, scale=3), 291 | eq_col(rt.columns["n18"], fb_types.FBNUMERIC, precision=18, scale=4), 292 | eq_col(rt.columns["d18"], fb_types.FBDECIMAL, precision=18, scale=4), 293 | eq_col(rt.columns["fn4"], fb_types.FBNUMERIC, precision=4, scale=2), 294 | eq_col(rt.columns["fd4"], fb_types.FBDECIMAL, precision=4, scale=2), 295 | eq_col(rt.columns["fn9"], fb_types.FBNUMERIC, precision=9, scale=3), 296 | eq_col(rt.columns["fd9"], fb_types.FBDECIMAL, precision=9, scale=3), 297 | eq_col(rt.columns["fn18"], fb_types.FBNUMERIC, precision=18, scale=4), 298 | eq_col(rt.columns["fd18"], fb_types.FBDECIMAL, precision=18, scale=4), 299 | 300 | @testing.provide_metadata 301 | @testing.requires.firebird_4_or_higher 302 | def test_fb4_types(self, connection): 303 | t = Table( 304 | "test_fb4_types", 305 | self.metadata, 306 | Column("n38", sa_types.NUMERIC(precision=38, scale=8)), 307 | Column("d38", sa_types.DECIMAL(precision=38, scale=8)), 308 | Column("fli", fb_types.FBINT128), 309 | Column("fn38", fb_types.FBNUMERIC(precision=38, scale=8)), 310 | Column("fd38", fb_types.FBDECIMAL(precision=38, scale=8)), 311 | ) 312 | self.metadata.create_all(testing.db) 313 | 314 | rm = MetaData() 315 | rt = Table("test_fb4_types", rm, autoload_with=testing.db) 316 | 317 | eq_col(rt.columns["n38"], fb_types.FBNUMERIC, precision=38, scale=8), 318 | eq_col(rt.columns["d38"], fb_types.FBDECIMAL, precision=38, scale=8), 319 | eq_col(rt.columns["fli"], fb_types.FBINT128), 320 | eq_col(rt.columns["fn38"], fb_types.FBNUMERIC, precision=38, scale=8), 321 | eq_col(rt.columns["fd38"], fb_types.FBDECIMAL, precision=38, scale=8), 322 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [testenv:pep8] 2 | deps = 3 | flake8 4 | flake8-import-order 5 | flake8-blind-except 6 | flake8-builtins 7 | flake8-docstrings 8 | flake8-rst-docstrings 9 | flake8-logging-format 10 | black 11 | commands = flake8 12 | 13 | [flake8] 14 | # max-line-length = 88 15 | max-line-length = 79 16 | extend-ignore = E203,F405,D400,D100,D101,D102,D104,I100,I101,I201,G002,G010 17 | exclude = 18 | .idea, 19 | .tox, 20 | _venv, 21 | max-complexity = 10 22 | show-source = true 23 | enable-extensions=G 24 | application-import-names = 25 | # options are: pep257, google, numpy 26 | docstring-convention=numpy --------------------------------------------------------------------------------