├── .dockerignore ├── docs ├── contributing.rst ├── examples.rst ├── tuning.rst ├── glossary.rst ├── index.rst ├── Makefile ├── make.bat └── conf.py ├── asyncodbc ├── log.py ├── __init__.py ├── utils.py ├── pool.py ├── connection.py └── cursor.py ├── CHANGELOG.rst ├── examples ├── example_simple.py ├── example_context_managers.py ├── example_pool.py └── example_complex_queries.py ├── .github └── workflows │ ├── pypi.yml │ └── ci.yml ├── tests ├── test_slow.py ├── test_connection.py ├── test_cursor.py └── test_pool.py ├── .gitignore ├── Makefile ├── pyproject.toml ├── conftest.py ├── CONTRIBUTING.rst ├── README.rst └── LICENSE /.dockerignore: -------------------------------------------------------------------------------- 1 | venv* 2 | virtualenv* 3 | *.log 4 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. _aioodbc-contributing: 2 | 3 | .. include:: ../CONTRIBUTING.rst 4 | -------------------------------------------------------------------------------- /asyncodbc/log.py: -------------------------------------------------------------------------------- 1 | """Logging configuration.""" 2 | 3 | import logging 4 | 5 | # Name the logger after the package. 6 | logger = logging.getLogger(__package__) 7 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | .. _changelog: 2 | :no-search: 3 | 4 | ========= 5 | Changelog 6 | ========= 7 | 8 | .. rst-class:: emphasize-children 9 | 10 | 0.2 11 | ==== 12 | 13 | 0.2.0 (unreleased) 14 | ------------------- 15 | Added 16 | ^^^^^ 17 | - Run bandit check in ci 18 | 19 | Changed 20 | ^^^^^ 21 | - feat: migrate from poetry to uv (#3) 22 | - Drop support for Python3.8 23 | 24 | Fixed 25 | ^^^^^ 26 | - Pool freezes when connection is interrupted (#2) 27 | -------------------------------------------------------------------------------- /examples/example_simple.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import asyncodbc 4 | 5 | 6 | async def example(): 7 | dsn = "Driver=SQLite;Database=sqlite.db" 8 | conn = await asyncodbc.connect( 9 | dsn=dsn, 10 | ) 11 | 12 | cur = await conn.cursor() 13 | await cur.execute("SELECT 42 AS age;") 14 | rows = await cur.fetchall() 15 | print(rows) 16 | print(rows[0]) 17 | print(rows[0].age) 18 | await cur.close() 19 | await conn.close() 20 | 21 | 22 | if __name__ == "__main__": 23 | asyncio.run(example()) 24 | -------------------------------------------------------------------------------- /examples/example_context_managers.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import asyncodbc 4 | 5 | 6 | async def example(): 7 | dsn = "Driver=SQLite;Database=sqlite.db" 8 | 9 | async with asyncodbc.create_pool(dsn=dsn) as pool: 10 | async with pool.acquire() as conn: 11 | async with conn.cursor() as cur: 12 | await cur.execute("SELECT 42 AS age;") 13 | val = await cur.fetchone() 14 | print(val) 15 | print(val.age) 16 | 17 | 18 | if __name__ == "__main__": 19 | asyncio.run(example()) 20 | -------------------------------------------------------------------------------- /examples/example_pool.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import asyncodbc 4 | 5 | 6 | async def pool(): 7 | dsn = "Driver=SQLite;Database=sqlite.db" 8 | pool = await asyncodbc.create_pool(dsn=dsn) 9 | 10 | async with pool.acquire() as conn: 11 | cur = await conn.cursor() 12 | await cur.execute("SELECT 42;") 13 | r = await cur.fetchall() 14 | print(r) 15 | await cur.close() 16 | await conn.close() 17 | pool.close() 18 | await pool.wait_closed() 19 | 20 | 21 | if __name__ == "__main__": 22 | asyncio.run(pool()) 23 | -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: pypi 2 | on: 3 | release: 4 | types: 5 | - created 6 | jobs: 7 | publish: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v5 11 | - uses: actions/setup-python@v6 12 | with: 13 | python-version: '3.x' 14 | - name: Install dependencies 15 | run: | 16 | python -m pip install --upgrade pip 17 | pip install build 18 | - name: Build package 19 | run: python -m build 20 | - name: Pypi Publish 21 | uses: pypa/gh-action-pypi-publish@release/v1 22 | with: 23 | user: __token__ 24 | password: ${{ secrets.pypi_password }} 25 | -------------------------------------------------------------------------------- /docs/examples.rst: -------------------------------------------------------------------------------- 1 | Examples of asyncodbc usage 2 | ========================= 3 | 4 | Below is a list of examples from `asyncodbc/examples 5 | `_ 6 | 7 | Every example is a correct tiny python program. 8 | 9 | .. _aioodbc-examples-simple: 10 | 11 | Basic Usage 12 | ----------- 13 | 14 | Basic example, executes query that return important number 42. 15 | 16 | .. literalinclude:: ../examples/example_simple.py 17 | 18 | Example of query execution in connection pool. 19 | 20 | .. literalinclude:: ../examples/example_pool.py 21 | 22 | Example of using async context managers with Pool, Connection and Cursor 23 | objects. 24 | 25 | .. literalinclude:: ../examples/example_context_managers.py 26 | -------------------------------------------------------------------------------- /asyncodbc/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from pyodbc import dataSources as _dataSources 4 | 5 | from .connection import Connection, connect 6 | from .pool import Pool, create_pool 7 | 8 | __version__ = "0.1.1" 9 | __all__ = ["connect", "Connection", "create_pool", "Pool", "data_sources"] 10 | 11 | 12 | async def data_sources(executor=None): 13 | """Returns a dictionary mapping available DSNs to their descriptions. 14 | 15 | :param executor: instance of custom ThreadPoolExecutor, if not supplied 16 | default executor will be used 17 | :return dict: mapping of dsn to driver description 18 | """ 19 | loop = asyncio.get_event_loop() 20 | sources = await loop.run_in_executor(executor, _dataSources) 21 | return sources 22 | -------------------------------------------------------------------------------- /tests/test_slow.py: -------------------------------------------------------------------------------- 1 | import gc 2 | from unittest import mock 3 | 4 | import pytest 5 | 6 | import asyncodbc 7 | 8 | 9 | @pytest.mark.asyncio 10 | async def test___del__(dsn, recwarn, executor): 11 | conn = await asyncodbc.connect(dsn=dsn, executor=executor) 12 | exc_handler = mock.Mock() 13 | loop = conn.loop 14 | loop.set_exception_handler(exc_handler) 15 | 16 | del conn 17 | gc.collect() 18 | w = recwarn.pop() 19 | assert issubclass(w.category, ResourceWarning) 20 | 21 | msg = {"connection": mock.ANY, "message": "Unclosed connection"} # conn was deleted 22 | if loop.get_debug(): 23 | msg["source_traceback"] = mock.ANY 24 | exc_handler.assert_called_with(loop, msg) 25 | assert not loop.is_closed() 26 | -------------------------------------------------------------------------------- /docs/tuning.rst: -------------------------------------------------------------------------------- 1 | .. _tuning: 2 | 3 | 4 | ******** 5 | Configuration Tuning 6 | ******** 7 | 8 | 9 | after_created 10 | 11 | When calling ``asyncodbc.connect`` it is possible to pass an async 12 | unary function as a parameter for ``after_created``. This allows 13 | you to configure additional attributes on the underlying 14 | pyodbc connection such as ``.setencoding`` or ``.setdecoding``. 15 | 16 | TheadPoolExecutor 17 | 18 | When using ``asyncodbc.create_pool`` it is considered a 19 | good practice to use ``ThreadPoolExecutor`` from 20 | ``concurrent.futures`` to create worker threads that 21 | are dedicated for database work allowing default threads 22 | to do other work and prevent competition between database 23 | and default workers. 24 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ># Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | pyvenv/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # PyInstaller 27 | # Usually these files are written by a python script from a template 28 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 29 | *.manifest 30 | *.spec 31 | 32 | # Installer logs 33 | pip-log.txt 34 | pip-delete-this-directory.txt 35 | 36 | # Unit test / coverage reports 37 | htmlcov/ 38 | .tox/ 39 | .coverage 40 | .cache 41 | nosetests.xml 42 | coverage.xml 43 | cover 44 | 45 | # Translations 46 | *.mo 47 | *.pot 48 | 49 | # Django stuff: 50 | *.log 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | # PyBuilder 56 | target/ 57 | 58 | # PyCharm 59 | .idea 60 | *.iml 61 | # rope 62 | *.swp 63 | .ropeproject 64 | 65 | # Project 66 | tags 67 | ci/asyncodbc 68 | sqlite.db 69 | 70 | # virtual envs 71 | venv*/ 72 | virtualenv*/ 73 | .venv*/ 74 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | checkfiles = asyncodbc/ examples/ tests/ conftest.py 2 | py_warn = PYTHONDEVMODE=1 3 | pytest_opts = -n auto --cov=asyncodbc --tb=native -q 4 | 5 | up: 6 | @uv lock --upgrade 7 | 8 | deps: 9 | @uv sync --all-extras --all-groups --no-group docs $(options) 10 | 11 | _style: 12 | @ruff format $(checkfiles) 13 | @ruff check --fix $(checkfiles) 14 | style: deps _style 15 | 16 | _codeqc: 17 | #mypy $(checkfiles) 18 | bandit -c pyproject.toml -r $(checkfiles) 19 | twine check dist/* 20 | codeqc: build _codeqc 21 | 22 | _check: _build 23 | @ruff format --check $(checkfiles) || (echo "Please run 'make style' to auto-fix style issues" && false) 24 | @ruff check $(checkfiles) 25 | $(MAKE) _codeqc 26 | check: deps _check 27 | 28 | _lint: _build _style _codeqc 29 | lint: deps _lint 30 | 31 | test: deps test_mssql 32 | 33 | test_mssql: 34 | $(py_warn) TEST_DSN="DRIVER=ODBC Driver 18 for SQL Server;SERVER=127.0.0.1,1433;UID=sa;PWD=$(TEST_MSSQL_PASS);TrustServerCertificate=YES;MARS_Connection=YES" pytest $(pytest_opts) 35 | 36 | _testall: test_mssql 37 | 38 | testall: deps _testall 39 | coverage report 40 | 41 | ci: check _testall 42 | 43 | docs: deps 44 | uv pip install --group docs 45 | rm -fR ./build 46 | sphinx-build -M html docs build 47 | 48 | _build: 49 | rm -fR dist/ 50 | uv build 51 | build: deps _build 52 | 53 | publish: deps build 54 | twine upload dist/* 55 | -------------------------------------------------------------------------------- /docs/glossary.rst: -------------------------------------------------------------------------------- 1 | .. _glossary: 2 | 3 | 4 | ******** 5 | Glossary 6 | ******** 7 | 8 | .. if you add new entries, keep the alphabetical sorting! 9 | 10 | .. glossary:: 11 | 12 | DBAPI 13 | 14 | :pep:`249` -- Python Database API Specification v2.0 15 | 16 | ipdb 17 | 18 | ipdb exports functions to access the IPython debugger, which 19 | features tab completion, syntax highlighting, better tracebacks, 20 | better introspection with the same interface as the pdb module. 21 | 22 | MySQL 23 | 24 | A popular database server. 25 | 26 | http://www.mysql.com/ 27 | 28 | ODBC 29 | 30 | Open Database Connectivity (ODBC) is a standard programming language 31 | middleware application programming interface (API) for accessing 32 | database management systems (DBMS) 33 | 34 | pep8 35 | 36 | Python style guide checker 37 | 38 | *pep8* is a tool to check your Python code against some of the 39 | style conventions in :pep:`8` -- Style Guide for Python Code. 40 | 41 | pyflakes 42 | 43 | passive checker of Python programs 44 | 45 | A simple program which checks Python source files for errors. 46 | 47 | Pyflakes analyzes programs and detects various errors. It works 48 | by parsing the source file, not importing it, so it is safe to 49 | use on modules with side effects. It's also much faster. 50 | 51 | https://pypi.python.org/pypi/pyflakes 52 | 53 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | on: 3 | push: 4 | paths-ignore: 5 | - "docs/**" 6 | - "*.rst" 7 | pull_request: 8 | branches: 9 | - main 10 | paths-ignore: 11 | - "docs/**" 12 | - "*.rst" 13 | 14 | env: 15 | UV_SYSTEM_PYTHON: 1 16 | 17 | jobs: 18 | test: 19 | runs-on: ubuntu-22.04 20 | services: 21 | mssql: 22 | image: mcr.microsoft.com/mssql/server:2022-CU15-ubuntu-22.04 23 | ports: 24 | - 1433:1433 25 | env: 26 | ACCEPT_EULA: Y 27 | SA_PASSWORD: Abcd12345678 28 | options: >- 29 | --health-cmd "/opt/mssql-tools18/bin/sqlcmd -C -S localhost -U sa -P Abcd12345678 -Q 'SELECT 1' -b -o /dev/null" 30 | --health-interval 10s 31 | --health-timeout 5s 32 | --health-retries 5 33 | env: 34 | TEST_MSSQL_PASS: Abcd12345678 35 | strategy: 36 | matrix: 37 | python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13", "3.14" ] 38 | steps: 39 | - uses: actions/checkout@v5 40 | - uses: actions/setup-python@v6 41 | with: 42 | python-version: ${{ matrix.python-version }} 43 | allow-prereleases: true 44 | - name: Install ODBC driver 45 | run: | 46 | curl -sSL -O https://packages.microsoft.com/config/ubuntu/$(grep VERSION_ID /etc/os-release | cut -d '"' -f 2)/packages-microsoft-prod.deb 47 | sudo dpkg -i packages-microsoft-prod.deb 48 | sudo apt-get update 49 | sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18 50 | - uses: astral-sh/setup-uv@v7 51 | with: 52 | enable-cache: true 53 | activate-environment: true 54 | - name: Install requirements 55 | run: make deps 56 | - name: Check style 57 | run: make _check 58 | - name: Run tests 59 | run: make _testall 60 | - name: Upload Coverage 61 | run: uvx coveralls --service=github 62 | env: 63 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 64 | COVERALLS_FLAG_NAME: ${{ matrix.python-version }} 65 | COVERALLS_PARALLEL: true 66 | 67 | coveralls: 68 | name: Finish Coveralls 69 | needs: test 70 | runs-on: ubuntu-22.04 71 | steps: 72 | - name: Finished 73 | run: | 74 | pip3 install --upgrade coveralls 75 | coveralls --finish 76 | env: 77 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 78 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. asyncodbc documentation master file, created by 2 | sphinx-quickstart on Sun Jan 18 22:02:31 2015. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to asyncodbc's documentation! 7 | =================================== 8 | 9 | .. _GitHub: https://github.com/tortoise/asyncodbc 10 | .. _asyncio: http://docs.python.org/3.14/library/asyncio.html 11 | .. _aioodbc: https://github.com/aio-libs/aioodbc 12 | .. _pyodbc: https://github.com/mkleehammer/pyodbc 13 | .. _PEP492: https://www.python.org/dev/peps/pep-0492/ 14 | .. _ODBC: https://learn.microsoft.com/en-us/sql/odbc/microsoft-open-database-connectivity-odbc 15 | .. _unixODBC: http://www.unixodbc.org/ 16 | .. _threads: http://techspot.zzzeek.org/2015/02/15/asynchronous-python-and-databases/ 17 | 18 | **asyncodbc** is Python 3.9+ module that makes possible accessing ODBC_ databases 19 | with asyncio_. It is rely on awesome pyodbc_ library, preserve same look and 20 | feel. *asyncodbc* was written using `async/await` syntax (PEP492_) and only support 21 | Python that is not end-of-life(EOL). Internally *asyncodbc* employs threads 22 | to avoid blocking the event loop, btw threads_ are not that bad as you think :) 23 | 24 | 25 | Features 26 | -------- 27 | * Implements `asyncio` :term:`DBAPI` *like* interface for 28 | :term:`ODBC`. It includes :ref:`asyncodbc-connection`, 29 | :ref:`asyncodbc-cursor` and :ref:`asyncodbc-pool` objects. 30 | * Support connection pooling. 31 | 32 | 33 | Source code 34 | ----------- 35 | 36 | The project is hosted on GitHub_ 37 | 38 | Please feel free to file an issue on `bug tracker 39 | `_ if you have found a bug 40 | or have some suggestion for library improvement. 41 | 42 | The library uses `Github Action `_ for 43 | Continious Integration and `Coveralls 44 | `_ for 45 | coverage reports. 46 | 47 | 48 | Dependencies 49 | ------------ 50 | 51 | - Python 3.9+ (PEP492_ coroutines) 52 | - pyodbc_ 53 | - unixODBC_ 54 | 55 | 56 | Authors and License 57 | ------------------- 58 | 59 | The ``asyncodbc`` package is inspired by aioodbc_. 60 | It's Apache-2.0 licensed. 61 | 62 | Feel free to improve this package and send a pull request to GitHub_. 63 | 64 | Contents: 65 | --------- 66 | 67 | .. toctree:: 68 | :maxdepth: 2 69 | 70 | examples 71 | tuning 72 | glossary 73 | contributing 74 | 75 | Indices and tables 76 | ================== 77 | 78 | * :ref:`genindex` 79 | * :ref:`modindex` 80 | * :ref:`search` 81 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "asyncodbc" 3 | dynamic = ["version"] 4 | description = "Forked from aioodbc and make improvement" 5 | authors = [{name="long2ice", email="long2ice@gmail.com"}] 6 | license = "Apache-2.0" 7 | readme = "README.rst" 8 | keywords = ["sql", "async", "asyncio", "aio", "mssql", "odbc"] 9 | requires-python = ">=3.9" 10 | dependencies = ["pyodbc"] 11 | 12 | [project.urls] 13 | homepage = "https://github.com/tortoise/asyncodbc" 14 | repository = "https://github.com/tortoise/asyncodbc.git" 15 | documentation = "https://github.com/tortoise/asyncodbc" 16 | 17 | [dependency-groups] 18 | dev = [ 19 | "ruff >=0.14.1", 20 | "bandit >=1.8.6", 21 | "mypy >=1.18.2", 22 | "twine >=6.2.0", 23 | ] 24 | test = [ 25 | "pytest >=8.4.2", 26 | "pytest-mock >=3.15.1", 27 | "pytest-cov >=7.0.0", 28 | "pytest-xdist >=3.8.0", 29 | "pytest-asyncio >=1.2.0", 30 | ] 31 | docs = [ 32 | "sphinx>=7.4.7", 33 | ] 34 | 35 | [build-system] 36 | requires = ["pdm-backend"] 37 | build-backend = "pdm.backend" 38 | 39 | [tool.pdm] 40 | version = {source="file", path="asyncodbc/__init__.py"} 41 | 42 | [tool.pdm.build] 43 | excludes = ["./**/.git", "./**/.*_cache", "./**/*.pyc", "./**/*.swp"] 44 | include = ["LICENSE", "README.rst"] 45 | 46 | [tool.pytest.ini_options] 47 | asyncio_mode = "auto" 48 | asyncio_default_fixture_loop_scope = "session" 49 | 50 | [tool.flake8] 51 | ignore = "E501,W503,DAR101,DAR201,DAR402" 52 | max-line-length = 100 53 | docstring_style = "sphinx" 54 | 55 | [tool.ruff] 56 | line-length = 100 57 | 58 | [tool.ruff.lint] 59 | extend-select = [ 60 | "I", # https://docs.astral.sh/ruff/rules/#isort-i 61 | "FA", # https://docs.astral.sh/ruff/rules/#flake8-future-annotations-fa 62 | "UP", # https://docs.astral.sh/ruff/rules/#pyupgrade-up 63 | "RUF100", # https://docs.astral.sh/ruff/rules/#ruff-specific-rules-ruf 64 | ] 65 | ignore = ["UP032"] # https://docs.astral.sh/ruff/rules/f-string/ 66 | 67 | [tool.ruff.lint.pydocstyle] 68 | convention = "pep257" 69 | 70 | [tool.ruff.lint.per-file-ignores] 71 | "docs/conf.py" = ["E401","F401","I001","UP009"] 72 | 73 | [tool.ruff.format] 74 | exclude = ["docs/conf.py"] 75 | 76 | [tool.bandit] 77 | exclude_dirs = [".venv", "examples", "tests", "conftest.py"] 78 | 79 | [tool.coverage.run] 80 | branch = true 81 | source = ["asyncodbc"] 82 | 83 | [tool.coverage.report] 84 | show_missing = true 85 | 86 | [tool.mypy] 87 | pretty = true 88 | ignore_missing_imports = true 89 | check_untyped_defs = true 90 | disallow_subclassing_any = true 91 | disallow_untyped_calls = true 92 | disallow_untyped_defs = false 93 | disallow_incomplete_defs = false 94 | disallow_untyped_decorators = true 95 | no_implicit_optional = true 96 | warn_redundant_casts = true 97 | warn_unused_ignores = true 98 | warn_no_return = true 99 | warn_return_any = false 100 | warn_unused_configs = true 101 | warn_unreachable = true 102 | allow_redefinition = true 103 | strict_equality = true 104 | show_error_context = true 105 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import os 3 | import uuid 4 | from concurrent.futures import ThreadPoolExecutor 5 | 6 | import pytest 7 | import pytest_asyncio 8 | 9 | import asyncodbc 10 | 11 | 12 | @pytest_asyncio.fixture 13 | async def conn(connection_maker, database): 14 | connection = await connection_maker() 15 | await connection.execute(f"USE {database};") 16 | await connection.commit() 17 | return connection 18 | 19 | 20 | @pytest.fixture(scope="session") 21 | def event_loop(): 22 | return asyncio.get_event_loop() 23 | 24 | 25 | @pytest_asyncio.fixture(scope="session", autouse=True) 26 | async def database(): 27 | connection = await asyncodbc.connect(dsn=os.getenv("TEST_DSN"), autocommit=True) 28 | db = f"test_{uuid.uuid4()}".replace("-", "") 29 | await connection.execute(f"CREATE DATABASE {db};") 30 | yield db 31 | await connection.execute(f"DROP DATABASE {db};") 32 | await connection.close() 33 | 34 | 35 | @pytest.fixture 36 | async def connection_maker(dsn, database): 37 | cleanup = [] 38 | 39 | async def make(**kw): 40 | if kw.get("executor", None) is None: 41 | executor = ThreadPoolExecutor(max_workers=1) 42 | kw["executor"] = executor 43 | else: 44 | executor = kw["executor"] 45 | 46 | conn = await asyncodbc.connect(dsn=dsn, database=database, **kw) 47 | cleanup.append((conn, executor)) 48 | return conn 49 | 50 | try: 51 | yield make 52 | finally: 53 | for conn, executor in cleanup: 54 | await conn.close() 55 | executor.shutdown(True) 56 | 57 | 58 | @pytest_asyncio.fixture 59 | async def pool(dsn): 60 | p = await asyncodbc.create_pool(dsn=dsn) 61 | 62 | try: 63 | yield p 64 | finally: 65 | p.close() 66 | await p.wait_closed() 67 | 68 | 69 | @pytest.fixture 70 | def dsn(): 71 | return os.getenv("TEST_DSN") 72 | 73 | 74 | @pytest_asyncio.fixture 75 | async def pool_maker(): 76 | pool_list = [] 77 | 78 | async def make(**kw): 79 | pool = await asyncodbc.create_pool(**kw) 80 | pool_list.append(pool) 81 | return pool 82 | 83 | try: 84 | yield make 85 | finally: 86 | for pool in pool_list: 87 | pool.close() 88 | await pool.wait_closed() 89 | 90 | 91 | @pytest.fixture 92 | def executor(): 93 | return ThreadPoolExecutor(max_workers=10) 94 | 95 | 96 | @pytest_asyncio.fixture 97 | async def table(conn): 98 | cur = await conn.cursor() 99 | await cur.execute("CREATE TABLE t1(n INT, v VARCHAR(10));") 100 | await cur.execute("INSERT INTO t1 VALUES (1, '123.45');") 101 | await cur.execute("INSERT INTO t1 VALUES (2, 'foo');") 102 | await conn.commit() 103 | await cur.close() 104 | 105 | try: 106 | yield "t1" 107 | finally: 108 | cur = await conn.cursor() 109 | await cur.execute("DROP TABLE t1;") 110 | await cur.commit() 111 | await cur.close() 112 | -------------------------------------------------------------------------------- /asyncodbc/utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from collections.abc import Coroutine 3 | 4 | from pyodbc import Error 5 | 6 | PY_352 = sys.version_info >= (3, 5, 2) 7 | 8 | # Issue #195. Don't pollute the pool with bad conns 9 | # Unfortunately occasionally sqlite will return 'HY000' for invalid query, 10 | # so we need specialize the check 11 | _CONN_CLOSE_ERRORS = { 12 | # [Microsoft][ODBC Driver 17 for SQL Server]Communication link failure 13 | "08S01": None, 14 | # [HY000] server closed the connection unexpectedly 15 | "HY000": "[HY000] server closed the connection unexpectedly", 16 | } 17 | 18 | 19 | def _is_conn_close_error(e): 20 | if not isinstance(e, Error) or len(e.args) < 2: 21 | return False 22 | 23 | sqlstate, msg = e.args[0], e.args[1] 24 | if sqlstate not in _CONN_CLOSE_ERRORS: 25 | return False 26 | 27 | check_msg = _CONN_CLOSE_ERRORS[sqlstate] 28 | if not check_msg: 29 | return True 30 | 31 | return msg.startswith(check_msg) 32 | 33 | 34 | class _ContextManager(Coroutine): 35 | __slots__ = ("_coro", "_obj") 36 | 37 | def __init__(self, coro): 38 | self._coro = coro 39 | self._obj = None 40 | 41 | def send(self, value): 42 | return self._coro.send(value) 43 | 44 | def throw(self, typ, val=None, tb=None): 45 | if val is None: 46 | return self._coro.throw(typ) 47 | elif tb is None: 48 | return self._coro.throw(typ, val) 49 | else: 50 | return self._coro.throw(typ, val, tb) 51 | 52 | def close(self): 53 | return self._coro.close() 54 | 55 | @property 56 | def gi_frame(self): 57 | return self._coro.gi_frame 58 | 59 | @property 60 | def gi_running(self): 61 | return self._coro.gi_running 62 | 63 | @property 64 | def gi_code(self): 65 | return self._coro.gi_code 66 | 67 | def __next__(self): 68 | return self.send(None) 69 | 70 | def __iter__(self): 71 | return self._coro.__await__() 72 | 73 | def __await__(self): 74 | return self._coro.__await__() 75 | 76 | async def __aenter__(self): 77 | self._obj = await self._coro 78 | return self._obj 79 | 80 | async def __aexit__(self, exc_type, exc, tb): 81 | await self._obj.close() 82 | self._obj = None 83 | 84 | 85 | class _PoolContextManager(_ContextManager): 86 | async def __aexit__(self, exc_type, exc, tb): 87 | self._obj.close() 88 | await self._obj.wait_closed() 89 | self._obj = None 90 | 91 | 92 | class _PoolAcquireContextManager(_ContextManager): 93 | __slots__ = ("_coro", "_conn", "_pool") 94 | 95 | def __init__(self, coro, pool): 96 | super().__init__(coro) 97 | self._coro = coro 98 | self._conn = None 99 | self._pool = pool 100 | 101 | async def __aenter__(self): 102 | self._conn = await self._coro 103 | return self._conn 104 | 105 | async def __aexit__(self, exc_type, exc, tb): 106 | try: 107 | await self._pool.release(self._conn) 108 | finally: 109 | self._pool = None 110 | self._conn = None 111 | 112 | 113 | class _ConnectionContextManager(_ContextManager): 114 | async def __aexit__(self, exc_type, exc, tb): 115 | await self._obj.close() 116 | self._obj = None 117 | -------------------------------------------------------------------------------- /examples/example_complex_queries.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from functools import partial 3 | 4 | import asyncodbc 5 | 6 | dsn = "Driver=SQLite;Database=sqlite.db" 7 | 8 | 9 | # Sometimes you may want to reuse same connection parameters multiple times. 10 | # This can be accomplished in a way below using partial function 11 | connect = partial(asyncodbc.connect, dsn=dsn, echo=True, autocommit=True) 12 | 13 | 14 | async def init_database(): 15 | """ 16 | Initialize test database with sample schema/data to reuse in other tests. 17 | Make sure that in real applications you have database initialization 18 | file as separate *.sql script or rely on autogenerated code provided 19 | by your ORM. 20 | """ 21 | async with connect(loop=loop) as conn: 22 | async with conn.cursor() as cur: 23 | sql = "CREATE TABLE IF NOT EXISTS t1(n INTEGER, v TEXT);" 24 | await cur.execute(sql) 25 | 26 | 27 | async def error_without_context_managers(): 28 | """ 29 | When not using context manager you may end up having unclosed connections 30 | in case of any error which lead to resource leakage. To avoid 31 | `Unclosed connection` errors in your code always close after yourself. 32 | """ 33 | conn = await asyncodbc.connect(dsn=dsn) 34 | cur = await conn.cursor() 35 | 36 | try: 37 | await cur.execute("SELECT 42 AS;") 38 | rows = await cur.fetchall() 39 | print(rows) 40 | except Exception: 41 | pass 42 | finally: 43 | await cur.close() 44 | await conn.close() 45 | 46 | 47 | async def insert_with_values(): 48 | """ 49 | When providing data to your SQL statement make sure to parametrize it with 50 | question marks placeholders. Do not use string formatting or make sure 51 | your data is escaped to prevent sql injections. 52 | 53 | NOTE: pyodbc does not support named placeholders syntax. 54 | """ 55 | async with connect(loop=loop) as conn: 56 | async with conn.cursor() as cur: 57 | # Substitute sql markers with variables 58 | await cur.execute("INSERT INTO t1(n, v) VALUES(?, ?);", ("2", "test 2")) 59 | # NOTE: make sure to pass variables as tuple of strings even if 60 | # your data types are different to prevent 61 | # pyodbc.ProgrammingError errors. You can even do like this 62 | values = (3, "test 3") 63 | await cur.execute("INSERT INTO t1(n, v) VALUES(?, ?);", *map(str, values)) 64 | 65 | # Retrieve id of last inserted row 66 | await cur.execute("SELECT last_insert_rowid();") 67 | result = await cur.fetchone() 68 | print(result[0]) 69 | 70 | 71 | async def commit(): 72 | """ 73 | When not using `autocommit` parameter do not forget to explicitly call 74 | this method for your changes to persist within database. 75 | """ 76 | async with asyncodbc.connect(dsn=dsn, loop=loop) as conn: 77 | async with conn.cursor() as cur: 78 | sql = 'INSERT INTO t1 VALUES(1, "test");' 79 | await cur.execute(sql) 80 | # Make sure your changes will be actually saved into database 81 | await cur.commit() 82 | 83 | async with asyncodbc.connect(dsn=dsn, loop=loop) as conn: 84 | async with conn.cursor() as cur: 85 | sql_select = "SELECT * FROM t1;" 86 | await cur.execute(sql_select) 87 | # At this point without autocommiting you will not see 88 | # the data inserted above 89 | print(await cur.fetchone()) 90 | 91 | 92 | if __name__ == "__main__": 93 | loop = asyncio.get_event_loop() 94 | loop.run_until_complete(init_database()) 95 | loop.run_until_complete(commit()) 96 | loop.run_until_complete(insert_with_values()) 97 | loop.run_until_complete(error_without_context_managers()) 98 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | Thanks for your interest in contributing to ``asyncodbc``, there are multiple 5 | ways and places you can contribute. 6 | 7 | Reporting an Issue 8 | ------------------ 9 | If you have found issue with `asyncodbc` please do 10 | not hesitate to file an issue on the GitHub_ project. When filing your 11 | issue please make sure you can express the issue with a reproducible test 12 | case. 13 | 14 | When reporting an issue we also need as much information about your environment 15 | that you can include. We never know what information will be pertinent when 16 | trying narrow down the issue. Please include at least the following 17 | information: 18 | 19 | * Version of `asyncodbc` and `python`. 20 | * Version of your ODBC database 21 | * Version of database ODBC driver 22 | * Version of unixODBC_ 23 | * Platform you're running on (OS X, Linux, Windows). 24 | 25 | 26 | Instructions for contributors 27 | ----------------------------- 28 | 29 | 30 | In order to make a clone of the GitHub_ repo: open the link and press the 31 | "Fork" button on the upper-right menu of the web page. 32 | 33 | I hope everybody knows how to work with git and github nowadays :) 34 | 35 | Work flow is pretty straightforward: 36 | 37 | 1. Clone the GitHub_ repo 38 | 39 | 2. Make a change 40 | 41 | 3. Make sure all tests passed 42 | 43 | 4. Commit changes to own asyncodbc clone 44 | 45 | 5. Make pull request from github page for your clone 46 | 47 | Preconditions for running asyncodbc test suite 48 | --------------------------------------------- 49 | 50 | We expect you to use a python virtual environment and docker_ to run 51 | our tests. 52 | 53 | There are several ways to make a virtual environment. 54 | 55 | If you like to use *virtualenv* please run:: 56 | 57 | $ cd asyncodbc 58 | $ virtualenv --python=`which python3.13` venv 59 | 60 | For standard python *venv*:: 61 | 62 | $ cd asyncodbc 63 | $ python3.13 -m venv venv 64 | 65 | For *virtualenvwrapper*:: 66 | 67 | $ cd asyncodbc 68 | $ mkvirtualenv --python=`which python3.13` asyncodbc 69 | 70 | For *uv*:: 71 | 72 | $ cd asyncodbc 73 | $ uv venv --python=3.13 --prompt=asyncodbc-py3.13 74 | 75 | There are other tools like *pyvenv* but you know the rule of thumb 76 | now: create a python3 virtual environment and activate it. 77 | 78 | After that please install libraries required for development:: 79 | 80 | $ pip install -r pyproject.toml --group dev --group test -e . 81 | 82 | We also recommend to install *ipdb* but it's on your own:: 83 | 84 | $ pip install ipdb 85 | 86 | Congratulations, you are ready to run the test suite 87 | 88 | 89 | Install database 90 | ---------------- 91 | You do not need to install any databases, docker_ will pull images and create 92 | containers for you automatically, after the tests, containers will be removed. 93 | 94 | 95 | Run asyncodbc test suite 96 | ---------------------- 97 | 98 | After all the preconditions are met you can run tests typing the next 99 | command:: 100 | 101 | $ make ci 102 | 103 | Or if you want to run only one particular test:: 104 | 105 | $ pytest tests/test_connection.py -k test_basic_cursor 106 | 107 | The command at first will run the static and style checkers (sorry, we don't 108 | accept pull requests with `pep8` or `pyflakes` errors). 109 | 110 | On `ruff` success the tests will be run. 111 | 112 | Please take a look on the produced output. 113 | 114 | Any extra texts (print statements and so on) should be removed. 115 | 116 | 117 | Tests coverage 118 | -------------- 119 | 120 | We are trying hard to have good test coverage; please don't make it worse. 121 | 122 | Use:: 123 | 124 | $ make testall 125 | 126 | to run test suite and collect coverage information. Once the command 127 | has finished check your coverage at the file that appears in the last 128 | line of the output: 129 | ``open file:///.../asyncodbc/htmlcov/index.html`` 130 | 131 | Please go to the link and make sure that your code change is covered. 132 | 133 | 134 | Documentation 135 | ------------- 136 | 137 | We encourage documentation improvements. 138 | 139 | Please before making a Pull Request about documentation changes run:: 140 | 141 | $ make docs 142 | 143 | Once it finishes it will output the index html page 144 | ``open file:///.../asyncodbc/build/html/index.html``. 145 | 146 | Go to the link and make sure your doc changes looks good. 147 | 148 | The End 149 | ------- 150 | 151 | After finishing all steps make a GitHub_ Pull Request, thanks. 152 | 153 | 154 | .. _unixODBC: http://www.unixodbc.org/ 155 | .. _GitHub: https://github.com/aio-libs/aioodbc 156 | .. _docker: https://docs.docker.com/engine/installation/ 157 | -------------------------------------------------------------------------------- /tests/test_connection.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import gc 3 | from unittest import mock 4 | 5 | import pyodbc 6 | import pytest 7 | 8 | import asyncodbc 9 | 10 | 11 | def test_connect(conn): 12 | assert not conn.autocommit 13 | assert conn.timeout == 0 14 | assert not conn.closed 15 | 16 | 17 | @pytest.mark.asyncio 18 | async def test_connect_hook(connection_maker): 19 | raw_conn = None 20 | 21 | async def hook(conn): 22 | nonlocal raw_conn 23 | raw_conn = conn 24 | 25 | connection = await connection_maker(after_created=hook) 26 | assert connection._conn == raw_conn 27 | 28 | 29 | @pytest.mark.asyncio 30 | async def test_basic_cursor(conn): 31 | cursor = await conn.cursor() 32 | sql = "SELECT 10;" 33 | await cursor.execute(sql) 34 | (resp,) = await cursor.fetchone() 35 | assert resp == 10 36 | 37 | 38 | @pytest.mark.asyncio 39 | async def test_default_loop(dsn): 40 | conn = await asyncodbc.connect(dsn=dsn) 41 | assert conn._loop is asyncio.get_running_loop() 42 | await conn.close() 43 | 44 | 45 | @pytest.mark.asyncio 46 | async def test_close_twice(conn): 47 | await conn.close() 48 | await conn.close() 49 | assert conn.closed 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_execute(conn): 54 | cur = await conn.execute("SELECT 10;") 55 | (resp,) = await cur.fetchone() 56 | await conn.close() 57 | assert resp == 10 58 | assert conn.closed 59 | 60 | 61 | @pytest.mark.asyncio 62 | async def test_output_conversion(conn, table): 63 | def convert(value): 64 | # value will be a string. We'll simply add an X at the 65 | # beginning at the end. 66 | if isinstance(value, str): 67 | return "X" + value + "X" 68 | return b"X" + value + b"X" 69 | 70 | await conn.add_output_converter(pyodbc.SQL_VARCHAR, convert) 71 | cur = await conn.cursor() 72 | 73 | await cur.execute("INSERT INTO t1 VALUES (3, '123.45')") 74 | await cur.execute("SELECT v FROM t1 WHERE n=3;") 75 | (value,) = await cur.fetchone() 76 | 77 | assert value in (b"X123.45X", "X123.45X") 78 | 79 | # Now clear the conversions and try again. There should be 80 | # no Xs this time. 81 | await conn.clear_output_converters() 82 | await cur.execute("SELECT v FROM t1") 83 | (value,) = await cur.fetchone() 84 | assert value == "123.45" 85 | await cur.close() 86 | 87 | 88 | @pytest.mark.asyncio 89 | async def test_autocommit(connection_maker): 90 | conn = await connection_maker(autocommit=True) 91 | assert conn.autocommit, True 92 | 93 | 94 | @pytest.mark.asyncio 95 | async def test_rollback(conn): 96 | assert not conn.autocommit 97 | 98 | cur = await conn.cursor() 99 | await cur.execute("CREATE TABLE t1(n INT, v VARCHAR(10));") 100 | 101 | await conn.commit() 102 | 103 | await cur.execute("INSERT INTO t1 VALUES (1, '123.45');") 104 | await cur.execute("SELECT v FROM t1") 105 | (value,) = await cur.fetchone() 106 | assert value == "123.45" 107 | 108 | await conn.rollback() 109 | await cur.execute("SELECT v FROM t1;") 110 | value = await cur.fetchone() 111 | assert value is None 112 | await cur.execute("DROP TABLE t1;") 113 | await conn.commit() 114 | 115 | await conn.close() 116 | 117 | 118 | @pytest.mark.asyncio 119 | async def test_custom_executor(dsn, executor): 120 | conn = await asyncodbc.connect( 121 | dsn=dsn, 122 | executor=executor, 123 | ) 124 | assert conn._executor is executor 125 | cur = await conn.execute("SELECT 10;") 126 | (resp,) = await cur.fetchone() 127 | await conn.close() 128 | assert resp == 10 129 | assert conn.closed 130 | 131 | 132 | @pytest.mark.asyncio 133 | async def test_data_sources(executor): 134 | data = await asyncodbc.data_sources(executor) 135 | assert isinstance(data, dict) 136 | 137 | 138 | @pytest.mark.asyncio 139 | async def test_connection_simple_with(conn): 140 | assert not conn.closed 141 | async with conn: 142 | pass 143 | 144 | assert conn.closed 145 | 146 | 147 | @pytest.mark.asyncio 148 | async def test_connect_context_manager(dsn): 149 | async with asyncodbc.connect(dsn=dsn, echo=True) as conn: 150 | assert not conn.closed 151 | assert conn.echo 152 | 153 | cur = await conn.execute("SELECT 10;") 154 | assert cur.echo 155 | (resp,) = await cur.fetchone() 156 | assert resp == 10 157 | await cur.close() 158 | 159 | assert conn.closed 160 | 161 | 162 | @pytest.mark.asyncio 163 | async def test___del__(dsn, recwarn, executor): 164 | conn = await asyncodbc.connect(dsn=dsn, executor=executor) 165 | exc_handler = mock.Mock() 166 | loop = conn._loop 167 | loop.set_exception_handler(exc_handler) 168 | 169 | del conn 170 | gc.collect() 171 | w = recwarn.pop() 172 | assert issubclass(w.category, ResourceWarning) 173 | 174 | msg = {"connection": mock.ANY, "message": "Unclosed connection"} # conn was deleted 175 | if loop.get_debug(): 176 | msg["source_traceback"] = mock.ANY 177 | exc_handler.assert_called_with(loop, msg) 178 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | asyncodbc 2 | ========= 3 | .. image:: https://github.com/tortoise/asyncodbc/actions/workflows/ci.yml/badge.svg?branch=main 4 | :target: https://github.com/tortoise/asyncodbc/actions?query=workflow:ci 5 | .. image:: https://coveralls.io/repos/tortoise/asyncodbc/badge.svg?branch=main&service=github 6 | :target: https://coveralls.io/github/tortoise/asyncodbc?branch=main 7 | .. image:: https://img.shields.io/pypi/v/asyncodbc.svg 8 | :target: https://pypi.python.org/pypi/asyncodbc 9 | 10 | **asyncodbc** is a Python 3.9+ module that makes it possible to access ODBC_ databases 11 | with asyncio_. It relies on the awesome pyodbc_ library and preserves the same look and 12 | feel. *asyncodbc* was written using `async/await` syntax (PEP492_) and only support 13 | Python that is not end-of-life(EOL). Internally *asyncodbc* employs threads to avoid 14 | blocking the event loop, threads_ are not that as bad as you think! Other 15 | drivers like motor_ use the same approach. 16 | 17 | **asyncodbc** is fully compatible and tested with uvloop_. Take a look at the test 18 | suite, all tests are executed with both the default event loop and uvloop_. 19 | 20 | Supported Databases 21 | ------------------- 22 | 23 | **asyncodbc** should work with all databases supported by pyodbc_. But for now the 24 | library has been tested with: **SQLite**, **MySQL** and **PostgreSQL**. Feel 25 | free to add other databases to the test suite by submitting a PR. 26 | 27 | Basic Example 28 | ------------- 29 | 30 | **asyncodbc** is based on pyodbc_ and provides the same api, you just need 31 | to use ``yield from conn.f()`` or ``await conn.f()`` instead of ``conn.f()`` 32 | 33 | Properties are unchanged, so ``conn.prop`` is correct as well as 34 | ``conn.prop = val``. 35 | 36 | 37 | .. code:: python 38 | 39 | import asyncio 40 | import asyncodbc 41 | 42 | async def test_example(): 43 | dsn = 'Driver=SQLite;Database=sqlite.db' 44 | conn = await asyncodbc.connect(dsn=dsn, loop=loop) 45 | 46 | cur = await conn.cursor() 47 | await cur.execute("SELECT 42 AS age;") 48 | rows = await cur.fetchall() 49 | print(rows) 50 | print(rows[0]) 51 | print(rows[0].age) 52 | await cur.close() 53 | await conn.close() 54 | 55 | asyncio.run(test_example()) 56 | 57 | 58 | Connection Pool 59 | --------------- 60 | Connection pooling is ported from aiopg_ and relies on PEP492_ features: 61 | 62 | .. code:: python 63 | 64 | import asyncio 65 | import asyncodbc 66 | 67 | async def test_pool(): 68 | dsn = 'Driver=SQLite;Database=sqlite.db' 69 | pool = await asyncodbc.create_pool(dsn=dsn, loop=loop) 70 | 71 | async with pool.acquire() as conn: 72 | cur = await conn.cursor() 73 | await cur.execute("SELECT 42;") 74 | r = await cur.fetchall() 75 | print(r) 76 | await cur.close() 77 | await conn.close() 78 | pool.close() 79 | await pool.wait_closed() 80 | 81 | asyncio.run(test_pool()) 82 | 83 | 84 | Context Managers 85 | ---------------- 86 | `Pool`, `Connection` and `Cursor` objects support the context management 87 | protocol: 88 | 89 | .. code:: python 90 | 91 | import asyncio 92 | import asyncodbc 93 | 94 | async def test_example(): 95 | dsn = 'Driver=SQLite;Database=sqlite.db' 96 | 97 | async with asyncodbc.create_pool(dsn=dsn, loop=loop) as pool: 98 | async with pool.acquire() as conn: 99 | async with conn.cursor() as cur: 100 | await cur.execute('SELECT 42 AS age;') 101 | val = await cur.fetchone() 102 | print(val) 103 | print(val.age) 104 | 105 | asyncio.run(test_example()) 106 | 107 | 108 | Installation 109 | ------------ 110 | 111 | In a linux environment pyodbc_ (hence *asyncodbc*) requires the unixODBC_ library. 112 | You can install it using your package manager, for example:: 113 | 114 | $ sudo apt-get install unixodbc 115 | $ sudo apt-get install unixodbc-dev 116 | 117 | then:: 118 | 119 | pip install asyncodbc 120 | 121 | 122 | Run tests 123 | --------- 124 | 125 | For testing purposes you need to install the test group requirements:: 126 | 127 | $ uv pip install -r pyproject.toml --group test -e . 128 | 129 | Then just execute:: 130 | 131 | $ make test_mssql 132 | 133 | *NOTE:* Running tests requires Python 3.9 or higher. 134 | 135 | 136 | Other SQL Drivers 137 | ----------------- 138 | 139 | * asyncpg_ - asyncio client for PostgreSQL 140 | * aiomysql_ - asyncio client form MySQL 141 | 142 | 143 | Requirements 144 | ------------ 145 | 146 | * Python_ 3.9+ 147 | * pyodbc_ 148 | * uvloop_ (optional) 149 | 150 | 151 | .. _Python: https://www.python.org 152 | .. _asyncio: http://docs.python.org/3.14/library/asyncio.html 153 | .. _pyodbc: https://github.com/mkleehammer/pyodbc 154 | .. _uvloop: https://github.com/MagicStack/uvloop 155 | .. _ODBC: https://en.wikipedia.org/wiki/Open_Database_Connectivity 156 | .. _asyncpg: https://github.com/MagicStack/asyncpg 157 | .. _aiopg: https://github.com/aio-libs/aiopg 158 | .. _aiomysql: https://github.com/aio-libs/aiomysql 159 | .. _PEP492: https://www.python.org/dev/peps/pep-0492/ 160 | .. _unixODBC: http://www.unixodbc.org/ 161 | .. _threads: http://techspot.zzzeek.org/2015/02/15/asynchronous-python-and-databases/ 162 | .. _docker: https://docs.docker.com/engine/installation/ 163 | .. _motor: https://emptysqua.re/blog/motor-0-7-beta/ 164 | -------------------------------------------------------------------------------- /tests/test_cursor.py: -------------------------------------------------------------------------------- 1 | import pyodbc 2 | import pytest 3 | 4 | 5 | @pytest.mark.asyncio 6 | async def test_cursor_with(conn, table): 7 | ret = [] 8 | 9 | # regular cursor usage 10 | cur = await conn.cursor() 11 | await cur.execute("SELECT * FROM t1;") 12 | assert not cur.closed 13 | assert not cur.echo 14 | 15 | # cursor should be closed 16 | async with cur: 17 | assert not cur.echo 18 | async for i in cur: 19 | ret.append(i) 20 | expected = [tuple(r) for r in ret] 21 | assert [(1, "123.45"), (2, "foo")] == expected 22 | assert cur.closed 23 | 24 | 25 | @pytest.mark.asyncio 26 | async def test_cursor_lightweight(conn, table): 27 | cur = await conn.cursor() 28 | ex_cursor = await cur.execute("SELECT * FROM t1;") 29 | assert ex_cursor is cur 30 | 31 | assert not cur.closed 32 | async with cur: 33 | pass 34 | 35 | assert cur.closed 36 | 37 | 38 | @pytest.mark.asyncio 39 | async def test_cursor_await(conn, table): 40 | async with conn.cursor() as cur: 41 | await cur.execute("SELECT * FROM t1;") 42 | assert not cur.closed 43 | 44 | assert cur.closed 45 | 46 | 47 | @pytest.mark.asyncio 48 | async def test_cursor(conn): 49 | cur = await conn.cursor() 50 | assert cur.connection is conn 51 | assert cur._loop, conn.loop 52 | assert cur.arraysize == 1 53 | assert cur.rowcount == -1 54 | 55 | r = await cur.setinputsizes() 56 | assert r is None 57 | 58 | await cur.setoutputsize() 59 | assert r is None 60 | await cur.close() 61 | 62 | 63 | @pytest.mark.asyncio 64 | async def test_execute_on_closed_cursor(conn): 65 | cur = await conn.cursor() 66 | await cur.close() 67 | with pytest.raises(pyodbc.OperationalError): 68 | await cur.execute("SELECT 1;") 69 | 70 | 71 | @pytest.mark.asyncio 72 | async def test_close(conn): 73 | cur = await conn.cursor() 74 | assert not cur.closed 75 | await cur.close() 76 | await cur.close() 77 | assert cur.closed 78 | 79 | 80 | @pytest.mark.asyncio 81 | async def test_description(conn): 82 | cur = await conn.cursor() 83 | assert cur.description is None 84 | await cur.execute("SELECT 1;") 85 | expected = (("", int, None, 10, 10, 0, False),) 86 | assert cur.description == expected 87 | await cur.close() 88 | 89 | 90 | @pytest.mark.asyncio 91 | async def test_description_with_real_table(conn, table): 92 | cur = await conn.cursor() 93 | await cur.execute("SELECT * FROM t1;") 94 | 95 | expected = (("n", int, None, 10, 10, 0, True), ("v", str, None, 10, 10, 0, True)) 96 | assert cur.description == expected 97 | await cur.close() 98 | 99 | 100 | @pytest.mark.asyncio 101 | async def test_rowcount_with_table(conn, table): 102 | cur = await conn.cursor() 103 | await cur.execute("SELECT * FROM t1;") 104 | await cur.fetchall() 105 | assert cur.rowcount == -1 106 | await cur.close() 107 | 108 | 109 | @pytest.mark.asyncio 110 | async def test_arraysize(conn): 111 | cur = await conn.cursor() 112 | assert 1 == cur.arraysize 113 | cur.arraysize = 10 114 | assert 10 == cur.arraysize 115 | await cur.close() 116 | 117 | 118 | @pytest.mark.asyncio 119 | async def test_fetchall(conn, table): 120 | cur = await conn.cursor() 121 | await cur.execute("SELECT * FROM t1;") 122 | resp = await cur.fetchall() 123 | expected = [(1, "123.45"), (2, "foo")] 124 | 125 | for row, exp in zip(resp, expected): 126 | assert exp == tuple(row) 127 | 128 | await cur.close() 129 | 130 | 131 | @pytest.mark.asyncio 132 | async def test_fetchmany(conn, table): 133 | cur = await conn.cursor() 134 | await cur.execute("SELECT * FROM t1;") 135 | resp = await cur.fetchmany(1) 136 | expected = [(1, "123.45")] 137 | 138 | for row, exp in zip(resp, expected): 139 | assert exp == tuple(row) 140 | 141 | await cur.close() 142 | 143 | 144 | @pytest.mark.asyncio 145 | async def test_fetchone(conn, table): 146 | cur = await conn.cursor() 147 | await cur.execute("SELECT * FROM t1;") 148 | resp = await cur.fetchone() 149 | expected = (1, "123.45") 150 | 151 | assert expected == tuple(resp) 152 | await cur.close() 153 | 154 | 155 | @pytest.mark.asyncio 156 | async def test_cursor_rollback(conn, table): 157 | cur = await conn.cursor() 158 | await cur.execute("INSERT INTO t1 VALUES (3, '123.45');") 159 | await cur.execute("SELECT v FROM t1 WHERE n=3;") 160 | (value,) = await cur.fetchone() 161 | assert value == "123.45" 162 | 163 | await cur.rollback() 164 | await cur.execute("SELECT v FROM t1 WHERE n=3;") 165 | value = await cur.fetchone() 166 | assert value is None 167 | 168 | 169 | @pytest.mark.asyncio 170 | async def test_executemany(conn): 171 | cur = await conn.cursor() 172 | await cur.execute("CREATE TABLE t1(a int, b VARCHAR(10))") 173 | # TODO: figure out why it is possible to insert only strings... but not int 174 | params = [(str(i), str(i)) for i in range(1, 6)] 175 | await cur.executemany("INSERT INTO t1(a, b) VALUES (?, ?)", params) 176 | await cur.execute("SELECT COUNT(*) FROM t1") 177 | count = await cur.fetchone() 178 | assert count[0] == len(params) 179 | 180 | await cur.execute("SELECT a, b FROM t1 ORDER BY a") 181 | rows = await cur.fetchall() 182 | assert count[0] == len(rows) 183 | 184 | for param, row in zip(params, rows): 185 | assert int(param[0]) == row[0] 186 | assert param[1] == row[1] 187 | await cur.execute("DROP TABLE t1;") 188 | 189 | 190 | @pytest.mark.asyncio 191 | async def test_primaryKeys_empty(conn, table): 192 | cur = await conn.cursor() 193 | await cur.primaryKeys("t1", "t1", "t1") 194 | resp = await cur.fetchall() 195 | assert resp == [] 196 | 197 | 198 | @pytest.mark.asyncio 199 | async def test_foreignKeys_empty(conn, table): 200 | cur = await conn.cursor() 201 | await cur.foreignKeys("t1") 202 | resp = await cur.fetchall() 203 | assert resp == [] 204 | 205 | 206 | @pytest.mark.asyncio 207 | async def test_getTypeInfo_empty(conn, table): 208 | cur = await conn.cursor() 209 | await cur.getTypeInfo(pyodbc.SQL_CHAR) 210 | resp = await cur.fetchall() 211 | expected = [ 212 | ( 213 | "char", 214 | 1, 215 | 8000, 216 | "'", 217 | "'", 218 | "length", 219 | 1, 220 | 0, 221 | 3, 222 | None, 223 | 0, 224 | None, 225 | "char", 226 | None, 227 | None, 228 | 1, 229 | None, 230 | None, 231 | None, 232 | 1, 233 | ) 234 | ] 235 | type_info = [tuple(r) for r in resp] 236 | assert type_info == expected 237 | -------------------------------------------------------------------------------- /asyncodbc/pool.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import collections 3 | 4 | from .connection import Connection, connect 5 | from .utils import _PoolAcquireContextManager, _PoolContextManager 6 | 7 | __all__ = ["create_pool", "Pool"] 8 | 9 | 10 | def create_pool(minsize=1, maxsize=10, echo=False, pool_recycle=-1, **kwargs): 11 | return _PoolContextManager( 12 | _create_pool( 13 | minsize=minsize, maxsize=maxsize, echo=echo, pool_recycle=pool_recycle, **kwargs 14 | ) 15 | ) 16 | 17 | 18 | async def _create_pool(minsize=1, maxsize=10, echo=False, pool_recycle=-1, **kwargs): 19 | pool = Pool(minsize=minsize, maxsize=maxsize, echo=echo, pool_recycle=pool_recycle, **kwargs) 20 | if minsize > 0: 21 | async with pool.cond: 22 | await pool.fill_free_pool(False) 23 | return pool 24 | 25 | 26 | class Pool(asyncio.AbstractServer): 27 | """Connection pool, just from aiomysql""" 28 | 29 | def __init__( 30 | self, minsize: int, maxsize: int, pool_recycle: int, echo: bool = False, **kwargs 31 | ) -> None: 32 | if minsize < 0: 33 | raise ValueError("minsize should be zero or greater") 34 | if maxsize < minsize: 35 | raise ValueError("maxsize should be not less than minsize") 36 | self._minsize = minsize 37 | self._loop = asyncio.get_event_loop() 38 | self._conn_kwargs = kwargs 39 | self._acquiring = 0 40 | self._free: collections.deque[Connection] = collections.deque(maxlen=maxsize) 41 | self._cond = asyncio.Condition() 42 | self._used: set[Connection] = set() 43 | self._terminated: set[Connection] = set() 44 | self._closing = False 45 | self._closed = False 46 | self._echo = echo 47 | self._recycle = pool_recycle 48 | 49 | @property 50 | def echo(self): 51 | return self._echo 52 | 53 | @property 54 | def cond(self): 55 | return self._cond 56 | 57 | @property 58 | def minsize(self): 59 | return self._minsize 60 | 61 | @property 62 | def maxsize(self): 63 | return self._free.maxlen 64 | 65 | @property 66 | def size(self): 67 | return self.freesize + len(self._used) + self._acquiring 68 | 69 | @property 70 | def freesize(self): 71 | return len(self._free) 72 | 73 | @property 74 | def closed(self): 75 | return self._closed 76 | 77 | async def clear(self): 78 | """Close all free connections in pool.""" 79 | async with self._cond: 80 | while self._free: 81 | conn = self._free.popleft() 82 | await conn.close() 83 | self._cond.notify() 84 | 85 | def close(self): 86 | """Close pool. 87 | 88 | Mark all pool connections to be closed on getting back to pool. 89 | Closed pool doesn't allow to acquire new connections. 90 | """ 91 | if self._closed: 92 | return 93 | self._closing = True 94 | 95 | def terminate(self): 96 | """Terminate pool. 97 | 98 | Close pool with instantly closing all acquired connections also. 99 | """ 100 | 101 | self.close() 102 | 103 | for conn in list(self._used): 104 | conn.close() 105 | self._terminated.add(conn) 106 | 107 | self._used.clear() 108 | 109 | async def wait_closed(self): 110 | """ 111 | Wait for closing all pool's connections. 112 | 113 | :raises RuntimeError: if pool is not closing 114 | """ 115 | 116 | if self._closed: 117 | return 118 | if not self._closing: 119 | raise RuntimeError(".wait_closed() should be called after .close()") 120 | 121 | while self._free: 122 | conn = self._free.popleft() 123 | await conn.close() 124 | 125 | async with self._cond: 126 | while self.size > self.freesize: 127 | await self._cond.wait() 128 | 129 | self._closed = True 130 | 131 | def acquire(self): 132 | """Acquire free connection from the pool.""" 133 | coro = self._acquire() 134 | return _PoolAcquireContextManager(coro, self) 135 | 136 | async def _acquire(self): 137 | if self._closing: 138 | raise RuntimeError("Cannot acquire connection after closing pool") 139 | async with self._cond: 140 | while True: 141 | await self.fill_free_pool(True) 142 | if self._free: 143 | conn = self._free.popleft() 144 | self._used.add(conn) 145 | return conn 146 | else: 147 | await self._cond.wait() 148 | 149 | async def fill_free_pool(self, override_min: bool = False): 150 | # iterate over free connections and remove timeouted ones 151 | free_size = len(self._free) 152 | n = 0 153 | while n < free_size: 154 | conn = self._free[-1] 155 | if conn.expired or ( 156 | self._recycle > -1 and self._loop.time() - conn.last_usage > self._recycle 157 | ): 158 | self._free.pop() 159 | await conn.close() 160 | else: 161 | self._free.rotate() 162 | n += 1 163 | 164 | while self.size < self.minsize: 165 | self._acquiring += 1 166 | try: 167 | conn = await connect(echo=self._echo, **self._conn_kwargs) 168 | # raise exception if pool is closing 169 | self._free.append(conn) 170 | self._cond.notify() 171 | finally: 172 | self._acquiring -= 1 173 | if self._free: 174 | return 175 | 176 | if override_min and self.size < self.maxsize: 177 | self._acquiring += 1 178 | try: 179 | conn = await connect(echo=self._echo, **self._conn_kwargs) 180 | # raise exception if pool is closing 181 | self._free.append(conn) 182 | self._cond.notify() 183 | finally: 184 | self._acquiring -= 1 185 | 186 | async def _wakeup(self): 187 | async with self._cond: 188 | self._cond.notify() 189 | 190 | async def release(self, conn): 191 | if conn in self._terminated: 192 | self._terminated.remove(conn) 193 | return 194 | self._used.remove(conn) 195 | if conn.connected and not conn.closed: 196 | if self._closing: 197 | await conn.close() 198 | else: 199 | self._free.append(conn) 200 | await self._wakeup() 201 | 202 | async def __aenter__(self): 203 | return self 204 | 205 | async def __aexit__(self, exc_type, exc_val, exc_tb): 206 | self.close() 207 | await self.wait_closed() 208 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiomysql.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiomysql.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/aiomysql" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiomysql" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aiomysql.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aiomysql.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /asyncodbc/connection.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | import traceback 4 | import warnings 5 | from functools import partial 6 | 7 | import pyodbc 8 | 9 | from .cursor import Cursor 10 | from .utils import _ConnectionContextManager, _ContextManager, _is_conn_close_error 11 | 12 | __all__ = ["connect", "Connection"] 13 | 14 | 15 | def connect( 16 | *, 17 | dsn, 18 | autocommit=False, 19 | ansi=False, 20 | timeout=0, 21 | executor=None, 22 | echo=False, 23 | after_created=None, 24 | **kwargs, 25 | ): 26 | """Accepts an ODBC connection string and returns a new Connection object. 27 | 28 | The connection string can be passed as the string `str`, as a list of 29 | keywords,or a combination of the two. Any keywords except autocommit, 30 | ansi, and timeout are simply added to the connection string. 31 | 32 | param autocommit bool: False or zero, the default, if True or non-zero, 33 | the connection is put into ODBC autocommit mode and statements are 34 | committed automatically. 35 | param ansi bool: By default, pyodbc first attempts to connect using 36 | the Unicode version of SQLDriverConnectW. If the driver returns IM001 37 | indicating it does not support the Unicode version, the ANSI version 38 | is tried. 39 | param timeout int: An integer login timeout in seconds, used to set 40 | the SQL_ATTR_LOGIN_TIMEOUT attribute of the connection. The default is 41 | 0 which means the database's default timeout, if any, is use 42 | param after_created callable: support customize configuration after 43 | connection is connected. Must be an async unary function, or leave it 44 | as None. 45 | param ansi bool: If True, use the ANSI version of SQLDriverConnectW. 46 | """ 47 | return _ConnectionContextManager( 48 | _connect( 49 | dsn=dsn, 50 | autocommit=autocommit, 51 | ansi=ansi, 52 | timeout=timeout, 53 | executor=executor, 54 | echo=echo, 55 | after_created=after_created, 56 | **kwargs, 57 | ) 58 | ) 59 | 60 | 61 | async def _connect( 62 | *, 63 | dsn, 64 | autocommit=False, 65 | ansi=False, 66 | timeout=0, 67 | executor=None, 68 | echo=False, 69 | after_created=None, 70 | **kwargs, 71 | ): 72 | conn = Connection( 73 | dsn=dsn, 74 | autocommit=autocommit, 75 | ansi=ansi, 76 | timeout=timeout, 77 | echo=echo, 78 | executor=executor, 79 | after_created=after_created, 80 | **kwargs, 81 | ) 82 | await conn._connect() 83 | return conn 84 | 85 | 86 | class Connection: 87 | """Connection objects manage connections to the database. 88 | 89 | Connections should only be created by the asyncodbc.connect function. 90 | """ 91 | 92 | _source_traceback = None 93 | 94 | def __init__( 95 | self, 96 | *, 97 | dsn, 98 | autocommit=False, 99 | ansi=None, 100 | timeout=0, 101 | executor=None, 102 | echo=False, 103 | after_created=None, 104 | **kwargs, 105 | ): 106 | self._executor = executor 107 | self._loop = asyncio.get_event_loop() 108 | self._conn = None 109 | self._expired = False 110 | self._timeout = timeout 111 | self._last_usage = self._loop.time() 112 | self._autocommit = autocommit 113 | self._ansi = ansi 114 | self._dsn = dsn 115 | self._echo = echo 116 | self._posthook = after_created 117 | self._kwargs = kwargs 118 | self._connected = False 119 | if self.loop.get_debug(): 120 | self._source_traceback = traceback.extract_stack(sys._getframe(1)) 121 | 122 | def _execute(self, func, *args, **kwargs): 123 | # execute function with args and kwargs in thread pool 124 | func = partial(func, *args, **kwargs) 125 | future = asyncio.get_event_loop().run_in_executor(self._executor, func) 126 | return future 127 | 128 | async def _connect(self): 129 | # create pyodbc connection 130 | f = self._execute( 131 | pyodbc.connect, 132 | self._dsn, 133 | autocommit=self._autocommit, 134 | ansi=self._ansi, 135 | timeout=self._timeout, 136 | **self._kwargs, 137 | ) 138 | self._conn = await f 139 | self._connected = True 140 | if self._posthook is not None: 141 | await self._posthook(self._conn) 142 | 143 | @property 144 | def connected(self): 145 | return self._connected 146 | 147 | @property 148 | def expired(self): 149 | return self._expired 150 | 151 | @property 152 | def loop(self): 153 | return self._loop 154 | 155 | @property 156 | def closed(self): 157 | if self._conn: 158 | return False 159 | return True 160 | 161 | @property 162 | def autocommit(self): 163 | """Show autocommit mode for current database session. True if the 164 | connection is in autocommit mode; False otherwise. The default 165 | is False 166 | """ 167 | return self._conn.autocommit 168 | 169 | @property 170 | def timeout(self): 171 | return self._conn.timeout 172 | 173 | @property 174 | def last_usage(self): 175 | return self._last_usage 176 | 177 | @property 178 | def echo(self): 179 | return self._echo 180 | 181 | async def _cursor(self): 182 | c = await self._execute(self._conn.cursor) 183 | self._last_usage = self._loop.time() 184 | return Cursor(c, self, echo=self._echo) 185 | 186 | def cursor(self): 187 | return _ContextManager(self._cursor()) 188 | 189 | async def close(self): 190 | """Close pyodbc connection""" 191 | if not self._conn: 192 | return 193 | c = await self._execute(self._conn.close) 194 | self._conn = None 195 | return c 196 | 197 | def commit(self): 198 | """Commit any pending transaction to the database.""" 199 | fut = self._execute(self._conn.commit) 200 | return fut 201 | 202 | def rollback(self): 203 | """Causes the database to roll back to the start of any pending 204 | transaction. 205 | """ 206 | fut = self._execute(self._conn.rollback) 207 | return fut 208 | 209 | async def execute(self, sql, *args): 210 | """Create a new Cursor object, call its execute method, and return it. 211 | 212 | See Cursor.execute for more details.This is a convenience method 213 | that is not part of the DB API. Since a new Cursor is allocated 214 | by each call, this should not be used if more than one SQL 215 | statement needs to be executed. 216 | 217 | :raises pyodbc.Error: When an error is encountered during execution 218 | """ 219 | try: 220 | _cursor = await self._execute(self._conn.execute, sql, *args) 221 | connection = self 222 | cursor = Cursor(_cursor, connection, echo=self._echo) 223 | return cursor 224 | except pyodbc.Error as e: 225 | if _is_conn_close_error(e): 226 | await self.close() 227 | raise 228 | 229 | def getinfo(self, type_): 230 | """Returns general information about the driver and data source 231 | associated with a connection by calling SQLGetInfo and returning its 232 | results. See Microsoft's SQLGetInfo documentation for the types of 233 | information available. 234 | 235 | :param type_: int, pyodbc.SQL_* constant 236 | """ 237 | fut = self._execute(self._conn.getinfo, type_) 238 | return fut 239 | 240 | def add_output_converter(self, sqltype, func): 241 | """Register an output converter function that will be called whenever 242 | a value with the given SQL type is read from the database. 243 | 244 | :param sqltype: the integer SQL type value to convert, which can 245 | be one of the defined standard constants (pyodbc.SQL_VARCHAR) 246 | or a database-specific value (e.g. -151 for the SQL Server 2008 247 | geometry data type). 248 | :param func: the converter function which will be called with a 249 | single parameter, the value, and should return the converted 250 | value. If the value is NULL, the parameter will be None. 251 | Otherwise it will be a Python string. 252 | """ 253 | fut = self._execute(self._conn.add_output_converter, sqltype, func) 254 | return fut 255 | 256 | def clear_output_converters(self): 257 | """Remove all output converter functions added by 258 | add_output_converter. 259 | """ 260 | fut = self._execute(self._conn.clear_output_converters) 261 | return fut 262 | 263 | def set_attr(self, attr_id, value): 264 | """Calls SQLSetConnectAttr with the given values. 265 | 266 | param attr_id: the attribute ID (integer) to set. These are ODBC or 267 | driver constants. 268 | param value: the connection attribute value to set. At this time 269 | only integer values are supported. 270 | """ 271 | fut = self._execute(self._conn.set_attr, attr_id, value) 272 | return fut 273 | 274 | def __del__(self): 275 | if not self.closed: 276 | # This will block the loop, please use close 277 | # coroutine to close connection 278 | self._conn.close() 279 | self._conn = None 280 | 281 | warnings.warn("Unclosed connection {!r}".format(self), ResourceWarning) 282 | 283 | context = {"connection": self, "message": "Unclosed connection"} 284 | if self._source_traceback is not None: 285 | context["source_traceback"] = self._source_traceback 286 | self._loop.call_exception_handler(context) 287 | 288 | async def __aenter__(self): 289 | return self 290 | 291 | async def __aexit__(self, exc_type, exc_val, exc_tb): 292 | await self.close() 293 | return 294 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # aiomysql documentation build configuration file, created by 5 | # sphinx-quickstart on Sun Jan 18 22:02:31 2015. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import sys 17 | import os 18 | 19 | # If extensions (or modules to document with autodoc) are in another directory, 20 | # add these directories to sys.path here. If the directory is relative to the 21 | # documentation root, use os.path.abspath to make it absolute, like shown here. 22 | #sys.path.insert(0, os.path.abspath('.')) 23 | 24 | # -- General configuration ------------------------------------------------ 25 | 26 | # If your documentation needs a minimal Sphinx version, state it here. 27 | #needs_sphinx = '1.0' 28 | 29 | # Add any Sphinx extension module names here, as strings. They can be 30 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 31 | # ones. 32 | 33 | import re, os.path 34 | 35 | def get_release(): 36 | regexp = re.compile(r"^__version__\W*=\W*[\"']([\d.abrc]+)[\"']") 37 | here = os.path.dirname(__file__) 38 | root = os.path.dirname(here) 39 | init_py = os.path.join(root, 'asyncodbc', '__init__.py') 40 | with open(init_py) as f: 41 | for line in f: 42 | match = regexp.match(line) 43 | if match is not None: 44 | return match.group(1) 45 | else: 46 | raise RuntimeError('Cannot find version in asyncodbc/__init__.py') 47 | 48 | 49 | def get_version(release): 50 | parts = release.split('.') 51 | return '.'.join(parts[:2]) 52 | 53 | extensions = [ 54 | 'sphinx.ext.autodoc', 55 | 'sphinx.ext.intersphinx', 56 | 'sphinx.ext.viewcode', 57 | ] 58 | 59 | intersphinx_mapping = {'python': ('http://docs.python.org/3', None)} 60 | 61 | # Add any paths that contain templates here, relative to this directory. 62 | templates_path = ['_templates'] 63 | 64 | # The suffix of source filenames. 65 | source_suffix = '.rst' 66 | 67 | # The encoding of source files. 68 | #source_encoding = 'utf-8-sig' 69 | 70 | # The master toctree document. 71 | master_doc = 'index' 72 | 73 | # General information about the project. 74 | project = 'asyncodbc' 75 | copyright = '2015,2016 Nikolay Novik' 76 | 77 | # The version info for the project you're documenting, acts as replacement for 78 | # |version| and |release|, also used in various other places throughout the 79 | # built documents. 80 | # 81 | release = get_release() 82 | version = get_version(release) 83 | 84 | # The language for content autogenerated by Sphinx. Refer to documentation 85 | # for a list of supported languages. 86 | #language = None 87 | 88 | # There are two options for replacing |today|: either, you set today to some 89 | # non-false value, then it is used: 90 | #today = '' 91 | # Else, today_fmt is used as the format for a strftime call. 92 | #today_fmt = '%B %d, %Y' 93 | 94 | # List of patterns, relative to source directory, that match files and 95 | # directories to ignore when looking for source files. 96 | exclude_patterns = ['_build'] 97 | 98 | # The reST default role (used for this markup: `text`) to use for all 99 | # documents. 100 | #default_role = None 101 | 102 | # If true, '()' will be appended to :func: etc. cross-reference text. 103 | #add_function_parentheses = True 104 | 105 | # If true, the current module name will be prepended to all description 106 | # unit titles (such as .. function::). 107 | #add_module_names = True 108 | 109 | # If true, sectionauthor and moduleauthor directives will be shown in the 110 | # output. They are ignored by default. 111 | #show_authors = False 112 | 113 | # The name of the Pygments (syntax highlighting) style to use. 114 | pygments_style = 'sphinx' 115 | 116 | # A list of ignored prefixes for module index sorting. 117 | #modindex_common_prefix = [] 118 | 119 | # If true, keep warnings as "system message" paragraphs in the built documents. 120 | #keep_warnings = False 121 | highlight_language = 'python3' 122 | 123 | # The theme to use for HTML and HTML Help pages. See the documentation for 124 | # a list of builtin themes. 125 | on_rtd = os.environ.get('READTHEDOCS', None) == 'True' 126 | 127 | if on_rtd: 128 | html_theme = 'default' 129 | else: 130 | html_theme = 'pyramid' 131 | 132 | # -- Options for HTML output ---------------------------------------------- 133 | 134 | # The theme to use for HTML and HTML Help pages. See the documentation for 135 | # a list of builtin themes. 136 | html_theme = 'default' 137 | 138 | # Theme options are theme-specific and customize the look and feel of a theme 139 | # further. For a list of options available for each theme, see the 140 | # documentation. 141 | #html_theme_options = {} 142 | 143 | # Add any paths that contain custom themes here, relative to this directory. 144 | #html_theme_path = [] 145 | 146 | # The name for this set of Sphinx documents. If None, it defaults to 147 | # " v documentation". 148 | #html_title = None 149 | 150 | # A shorter title for the navigation bar. Default is the same as html_title. 151 | #html_short_title = None 152 | 153 | # The name of an image file (relative to this directory) to place at the top 154 | # of the sidebar. 155 | #html_logo = None 156 | 157 | # The name of an image file (within the static path) to use as favicon of the 158 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 159 | # pixels large. 160 | #html_favicon = None 161 | 162 | # Add any paths that contain custom static files (such as style sheets) here, 163 | # relative to this directory. They are copied after the builtin static files, 164 | # so a file named "default.css" will overwrite the builtin "default.css". 165 | html_static_path = ['_static'] 166 | 167 | # Add any extra paths that contain custom files (such as robots.txt or 168 | # .htaccess) here, relative to this directory. These files are copied 169 | # directly to the root of the documentation. 170 | #html_extra_path = [] 171 | 172 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 173 | # using the given strftime format. 174 | #html_last_updated_fmt = '%b %d, %Y' 175 | 176 | # If true, SmartyPants will be used to convert quotes and dashes to 177 | # typographically correct entities. 178 | #html_use_smartypants = True 179 | 180 | # Custom sidebar templates, maps document names to template names. 181 | #html_sidebars = {} 182 | 183 | # Additional templates that should be rendered to pages, maps page names to 184 | # template names. 185 | #html_additional_pages = {} 186 | 187 | # If false, no module index is generated. 188 | #html_domain_indices = True 189 | 190 | # If false, no index is generated. 191 | #html_use_index = True 192 | 193 | # If true, the index is split into individual pages for each letter. 194 | #html_split_index = False 195 | 196 | # If true, links to the reST sources are added to the pages. 197 | #html_show_sourcelink = True 198 | 199 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 200 | #html_show_sphinx = True 201 | 202 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 203 | #html_show_copyright = True 204 | 205 | # If true, an OpenSearch description file will be output, and all pages will 206 | # contain a tag referring to it. The value of this option must be the 207 | # base URL from which the finished HTML is served. 208 | #html_use_opensearch = '' 209 | 210 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 211 | #html_file_suffix = None 212 | 213 | # Output file base name for HTML help builder. 214 | htmlhelp_basename = 'aioodbcdoc' 215 | 216 | 217 | # -- Options for LaTeX output --------------------------------------------- 218 | 219 | latex_elements = { 220 | # The paper size ('letterpaper' or 'a4paper'). 221 | #'papersize': 'letterpaper', 222 | 223 | # The font size ('10pt', '11pt' or '12pt'). 224 | #'pointsize': '10pt', 225 | 226 | # Additional stuff for the LaTeX preamble. 227 | #'preamble': '', 228 | } 229 | 230 | # Grouping the document tree into LaTeX files. List of tuples 231 | # (source start file, target name, title, 232 | # author, documentclass [howto, manual, or own class]). 233 | latex_documents = [ 234 | ('index', 'asyncodbc.tex', 'asyncodbc Documentation', 235 | 'Nikolay Novik', 'manual'), 236 | ] 237 | 238 | # The name of an image file (relative to this directory) to place at the top of 239 | # the title page. 240 | #latex_logo = None 241 | 242 | # For "manual" documents, if this is true, then toplevel headings are parts, 243 | # not chapters. 244 | #latex_use_parts = False 245 | 246 | # If true, show page references after internal links. 247 | #latex_show_pagerefs = False 248 | 249 | # If true, show URL addresses after external links. 250 | #latex_show_urls = False 251 | 252 | # Documents to append as an appendix to all manuals. 253 | #latex_appendices = [] 254 | 255 | # If false, no module index is generated. 256 | #latex_domain_indices = True 257 | 258 | 259 | # -- Options for manual page output --------------------------------------- 260 | 261 | # One entry per manual page. List of tuples 262 | # (source start file, name, description, authors, manual section). 263 | man_pages = [ 264 | ('index', 'asyncodbc', 'asyncodbc Documentation', 265 | ['Nikolay Novik'], 1) 266 | ] 267 | 268 | # If true, show URL addresses after external links. 269 | #man_show_urls = False 270 | 271 | 272 | # -- Options for Texinfo output ------------------------------------------- 273 | 274 | # Grouping the document tree into Texinfo files. List of tuples 275 | # (source start file, target name, title, author, 276 | # dir menu entry, description, category) 277 | texinfo_documents = [ 278 | ('index', 'asyncodbc', 'asyncodbc Documentation', 279 | 'Nikolay Novik', 'asyncodbc', 'One line description of project.', 280 | 'Miscellaneous'), 281 | ] 282 | 283 | # Documents to append as an appendix to all manuals. 284 | #texinfo_appendices = [] 285 | 286 | # If false, no module index is generated. 287 | #texinfo_domain_indices = True 288 | 289 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 290 | #texinfo_show_urls = 'footnote' 291 | 292 | # If true, do not generate a @detailmenu in the "Top" node's menu. 293 | #texinfo_no_detailmenu = False 294 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2015-2019 Nikolay Novik 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /asyncodbc/cursor.py: -------------------------------------------------------------------------------- 1 | import pyodbc 2 | 3 | from .log import logger 4 | from .utils import PY_352, _is_conn_close_error 5 | 6 | __all__ = ["Cursor"] 7 | 8 | 9 | class Cursor: 10 | """Cursors represent a database cursor (and map to ODBC HSTMTs), which 11 | is used to manage the context of a fetch operation. 12 | 13 | Cursors created from the same connection are not isolated, i.e., any 14 | changes made to the database by a cursor are immediately visible by 15 | the other cursors. 16 | """ 17 | 18 | def __init__(self, pyodbc_cursor, connection, echo=False): 19 | self._conn = connection 20 | self._impl = pyodbc_cursor 21 | self._loop = connection.loop 22 | self._echo = echo 23 | 24 | async def _run_operation(self, func, *args, **kwargs): 25 | # execute func in thread pool of attached to cursor connection 26 | if not self._conn: 27 | raise pyodbc.OperationalError("Cursor is closed.") 28 | 29 | try: 30 | result = await self._conn._execute(func, *args, **kwargs) 31 | return result 32 | except pyodbc.Error as e: 33 | if self._conn and _is_conn_close_error(e): 34 | await self._conn.close() 35 | raise 36 | 37 | @property 38 | def echo(self): 39 | """Return echo mode status.""" 40 | return self._echo 41 | 42 | @property 43 | def connection(self): 44 | """Cursors database connection""" 45 | return self._conn 46 | 47 | @property 48 | def autocommit(self): 49 | """Show autocommit mode for current database session. True if 50 | connection is in autocommit mode; False otherwse. The default 51 | is False. 52 | """ 53 | return self._conn.autocommit 54 | 55 | @property 56 | def rowcount(self): 57 | """The number of rows modified by the previous DDL statement. 58 | 59 | This is -1 if no SQL has been executed or if the number of rows is 60 | unknown. Note that it is not uncommon for databases to report -1 61 | after a select statement for performance reasons. (The exact number 62 | may not be known before the first records are returned to the 63 | application.) 64 | """ 65 | return self._impl.rowcount 66 | 67 | @property 68 | def description(self): 69 | """This read-only attribute is a list of 7-item tuples, each 70 | containing (name, type_code, display_size, internal_size, precision, 71 | scale, null_ok). 72 | 73 | pyodbc only provides values for name, type_code, internal_size, 74 | and null_ok. The other values are set to None. 75 | 76 | This attribute will be None for operations that do not return rows 77 | or if one of the execute methods has not been called. 78 | 79 | The type_code member is the class type used to create the Python 80 | objects when reading rows. For example, a varchar column's type will 81 | be str. 82 | """ 83 | return self._impl.description 84 | 85 | @property 86 | def closed(self): 87 | """Read only property indicates if cursor has been closed""" 88 | return self._conn is None 89 | 90 | @property 91 | def arraysize(self): 92 | """This read/write attribute specifies the number of rows to fetch 93 | at a time with .fetchmany() . It defaults to 1 meaning to fetch a 94 | single row at a time. 95 | """ 96 | return self._impl.arraysize 97 | 98 | @arraysize.setter 99 | def arraysize(self, size): 100 | self._impl.arraysize = size 101 | 102 | async def close(self): 103 | """Close the cursor now (rather than whenever __del__ is called). 104 | 105 | The cursor will be unusable from this point forward; an Error 106 | (or subclass) exception will be raised if any operation is attempted 107 | with the cursor. 108 | """ 109 | if self._conn is None: 110 | return 111 | await self._run_operation(self._impl.close) 112 | self._conn = None 113 | 114 | async def execute(self, sql, *params): 115 | """Executes the given operation substituting any markers with 116 | the given parameters. 117 | 118 | :param sql: the SQL statement to execute with optional ? parameter 119 | markers. Note that pyodbc never modifies the SQL statement. 120 | :param params: optional parameters for the markers in the SQL. They 121 | can be passed in a single sequence as defined by the DB API. 122 | For convenience, however, they can also be passed individually 123 | """ 124 | if self._echo: 125 | logger.info(sql) 126 | logger.info("%r", sql) 127 | 128 | await self._run_operation(self._impl.execute, sql, *params) 129 | return self 130 | 131 | def executemany(self, sql, *params): 132 | """Prepare a database query or command and then execute it against 133 | all parameter sequences found in the sequence seq_of_params. 134 | 135 | :param sql: the SQL statement to execute with optional ? parameters 136 | :param params: sequence parameters for the markers in the SQL. 137 | """ 138 | fut = self._run_operation(self._impl.executemany, sql, *params) 139 | return fut 140 | 141 | def callproc(self, procname, args=()): 142 | raise NotImplementedError 143 | 144 | async def setinputsizes(self, *args, **kwargs): 145 | """Does nothing, required by DB API.""" 146 | return None 147 | 148 | async def setoutputsize(self, *args, **kwargs): 149 | """Does nothing, required by DB API.""" 150 | return None 151 | 152 | def fetchone(self): 153 | """Returns the next row or None when no more data is available. 154 | 155 | A ProgrammingError exception is raised if no SQL has been executed 156 | or if it did not return a result set (e.g. was not a SELECT 157 | statement). 158 | """ 159 | fut = self._run_operation(self._impl.fetchone) 160 | return fut 161 | 162 | def fetchall(self): 163 | """Returns a list of all remaining rows. 164 | 165 | Since this reads all rows into memory, it should not be used if 166 | there are a lot of rows. Consider iterating over the rows instead. 167 | However, it is useful for freeing up a Cursor so you can perform a 168 | second query before processing the resulting rows. 169 | 170 | A ProgrammingError exception is raised if no SQL has been executed 171 | or if it did not return a result set (e.g. was not a SELECT statement) 172 | """ 173 | fut = self._run_operation(self._impl.fetchall) 174 | return fut 175 | 176 | def fetchmany(self, size): 177 | """Returns a list of remaining rows, containing no more than size 178 | rows, used to process results in chunks. The list will be empty when 179 | there are no more rows. 180 | 181 | The default for cursor.arraysize is 1 which is no different than 182 | calling fetchone(). 183 | 184 | A ProgrammingError exception is raised if no SQL has been executed 185 | or if it did not return a result set (e.g. was not a SELECT 186 | statement). 187 | 188 | :param size: int, max number of rows to return 189 | """ 190 | fut = self._run_operation(self._impl.fetchmany, size) 191 | return fut 192 | 193 | def nextset(self): 194 | """This method will make the cursor skip to the next available 195 | set, discarding any remaining rows from the current set. 196 | 197 | If there are no more sets, the method returns None. Otherwise, 198 | it returns a true value and subsequent calls to the fetch methods 199 | will return rows from the next result set. 200 | 201 | This method is primarily used if you have stored procedures that 202 | return multiple results. 203 | """ 204 | fut = self._run_operation(self._impl.nextset) 205 | return fut 206 | 207 | def tables(self, **kw): 208 | """Creates a result set of tables in the database that match the 209 | given criteria. 210 | """ 211 | fut = self._run_operation(self._impl.tables, **kw) 212 | return fut 213 | 214 | def columns(self, **kw): 215 | """Creates a results set of column names in specified tables by 216 | executing the ODBC SQLColumns function. Each row fetched has the 217 | following columns. 218 | """ 219 | fut = self._run_operation(self._impl.columns, **kw) 220 | return fut 221 | 222 | def statistics(self, catalog=None, schema=None, unique=False, quick=True): 223 | """Creates a results set of statistics about a single table and 224 | the indexes associated with the table by executing SQLStatistics. 225 | 226 | :param catalog: the catalog name 227 | :param schema: the schmea name 228 | :param unique: if True, only unique indexes are retured. Otherwise 229 | all indexes are returned. 230 | :param quick: if True, CARDINALITY and PAGES are returned only if 231 | they are readily available from the server 232 | """ 233 | fut = self._run_operation( 234 | self._impl.statistics, 235 | catalog=catalog, 236 | schema=schema, 237 | unique=unique, 238 | quick=quick, 239 | ) 240 | return fut 241 | 242 | def rowIdColumns(self, table, catalog=None, schema=None, nullable=True): # nopep8 243 | """Executes SQLSpecialColumns with SQL_BEST_ROWID which creates a 244 | result set of columns that uniquely identify a row 245 | """ 246 | fut = self._run_operation( 247 | self._impl.rowIdColumns, 248 | table, 249 | catalog=catalog, 250 | schema=schema, 251 | nullable=nullable, 252 | ) 253 | return fut 254 | 255 | def rowVerColumns(self, table, catalog=None, schema=None, nullable=True): # nopep8 256 | """Executes SQLSpecialColumns with SQL_ROWVER which creates a 257 | result set of columns that are automatically updated when any 258 | value in the row is updated. 259 | """ 260 | fut = self._run_operation( 261 | self._impl.rowVerColumns, 262 | table, 263 | catalog=catalog, 264 | schema=schema, 265 | nullable=nullable, 266 | ) 267 | return fut 268 | 269 | def primaryKeys(self, table, catalog=None, schema=None): # nopep8 270 | """Creates a result set of column names that make up the primary key 271 | for a table by executing the SQLPrimaryKeys function.""" 272 | fut = self._run_operation(self._impl.primaryKeys, table, catalog=catalog, schema=schema) 273 | return fut 274 | 275 | def foreignKeys(self, *a, **kw): # nopep8 276 | """Executes the SQLForeignKeys function and creates a result set 277 | of column names that are foreign keys in the specified table (columns 278 | in the specified table that refer to primary keys in other tables) 279 | or foreign keys in other tables that refer to the primary key in 280 | the specified table. 281 | """ 282 | fut = self._run_operation(self._impl.foreignKeys, *a, **kw) 283 | return fut 284 | 285 | def getTypeInfo(self, sql_type): # nopep8 286 | """Executes SQLGetTypeInfo a creates a result set with information 287 | about the specified data type or all data types supported by the 288 | ODBC driver if not specified. 289 | """ 290 | fut = self._run_operation(self._impl.getTypeInfo, sql_type) 291 | return fut 292 | 293 | def procedures(self, *a, **kw): 294 | """Executes SQLProcedures and creates a result set of information 295 | about the procedures in the data source. 296 | """ 297 | fut = self._run_operation(self._impl.procedures, *a, **kw) 298 | return fut 299 | 300 | def procedureColumns(self, *a, **kw): # nopep8 301 | fut = self._run_operation(self._impl.procedureColumns, *a, **kw) 302 | return fut 303 | 304 | def skip(self, count): 305 | fut = self._run_operation(self._impl.skip, count) 306 | return fut 307 | 308 | def commit(self): 309 | fut = self._run_operation(self._impl.commit) 310 | return fut 311 | 312 | def rollback(self): 313 | fut = self._run_operation(self._impl.rollback) 314 | return fut 315 | 316 | if PY_352: 317 | 318 | def __aiter__(self): 319 | return self 320 | 321 | else: 322 | 323 | async def __aiter__(self): 324 | return self 325 | 326 | async def __anext__(self): 327 | ret = await self.fetchone() 328 | if ret is not None: 329 | return ret 330 | else: 331 | raise StopAsyncIteration 332 | 333 | async def __aenter__(self): 334 | return self 335 | 336 | async def __aexit__(self, exc_type, exc_val, exc_tb): 337 | await self.close() 338 | -------------------------------------------------------------------------------- /tests/test_pool.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | from pyodbc import Error 5 | 6 | import asyncodbc 7 | from asyncodbc import Connection, Pool 8 | 9 | 10 | @pytest.mark.asyncio 11 | async def test_create_pool(pool_maker, dsn): 12 | pool = await pool_maker(dsn=dsn) 13 | assert isinstance(pool, Pool) 14 | assert 1 == pool.minsize 15 | assert 10 == pool.maxsize 16 | assert 1 == pool.size 17 | assert 1 == pool.freesize 18 | assert not pool.echo 19 | 20 | 21 | @pytest.mark.asyncio 22 | async def test_create_pool2(pool_maker, dsn): 23 | pool = await pool_maker(dsn=dsn, maxsize=20) 24 | assert isinstance(pool, Pool) 25 | assert 1 == pool.minsize 26 | assert 20 == pool.maxsize 27 | assert 1 == pool.size 28 | assert 1 == pool.freesize 29 | 30 | 31 | @pytest.mark.asyncio 32 | async def test_acquire(pool): 33 | conn = await pool.acquire() 34 | try: 35 | assert isinstance(conn, Connection) 36 | assert not conn.closed 37 | cur = await conn.cursor() 38 | await cur.execute("SELECT 1") 39 | val = await cur.fetchone() 40 | assert (1,) == tuple(val) 41 | finally: 42 | await pool.release(conn) 43 | 44 | 45 | @pytest.mark.asyncio 46 | async def test_release(pool): 47 | conn = await pool.acquire() 48 | try: 49 | assert 0 == pool.freesize 50 | assert {conn} == pool._used 51 | finally: 52 | await pool.release(conn) 53 | assert 1 == pool.freesize 54 | assert not pool._used 55 | 56 | 57 | @pytest.mark.asyncio 58 | async def test_release_closed(pool): 59 | conn = await pool.acquire() 60 | assert 0 == pool.freesize 61 | await conn.close() 62 | await pool.release(conn) 63 | assert 0 == pool.freesize 64 | assert not pool._used 65 | assert 0 == pool.size 66 | 67 | conn2 = await pool.acquire() 68 | assert 0 == pool.freesize 69 | assert 1 == pool.size 70 | await pool.release(conn2) 71 | 72 | 73 | @pytest.mark.asyncio 74 | async def test_context_manager(pool): 75 | conn = await pool.acquire() 76 | try: 77 | assert isinstance(conn, Connection) 78 | assert 0 == pool.freesize 79 | assert {conn} == pool._used 80 | finally: 81 | await pool.release(conn) 82 | assert 1 == pool.freesize 83 | 84 | 85 | @pytest.mark.asyncio 86 | async def test_clear(pool): 87 | await pool.clear() 88 | assert 0 == pool.freesize 89 | 90 | 91 | @pytest.mark.asyncio 92 | async def test_initial_empty(pool_maker, dsn): 93 | pool = await pool_maker(dsn=dsn, minsize=0) 94 | 95 | assert 10 == pool.maxsize 96 | assert 0 == pool.minsize 97 | assert 0 == pool.size 98 | assert 0 == pool.freesize 99 | 100 | conn = await pool.acquire() 101 | try: 102 | assert 1 == pool.size 103 | assert 0 == pool.freesize 104 | finally: 105 | await pool.release(conn) 106 | assert 1 == pool.size 107 | assert 1 == pool.freesize 108 | 109 | conn1 = await pool.acquire() 110 | assert 1 == pool.size 111 | assert 0 == pool.freesize 112 | 113 | conn2 = await pool.acquire() 114 | assert 2 == pool.size 115 | assert 0 == pool.freesize 116 | 117 | await pool.release(conn1) 118 | assert 2 == pool.size 119 | assert 1 == pool.freesize 120 | 121 | await pool.release(conn2) 122 | assert 2 == pool.size 123 | assert 2 == pool.freesize 124 | 125 | 126 | @pytest.mark.asyncio 127 | async def test_parallel_tasks(pool_maker, dsn): 128 | pool = await pool_maker(dsn=dsn, minsize=0, maxsize=2) 129 | 130 | assert 2 == pool.maxsize 131 | assert 0 == pool.minsize 132 | assert 0 == pool.size 133 | assert 0 == pool.freesize 134 | 135 | fut1 = pool.acquire() 136 | fut2 = pool.acquire() 137 | 138 | conn1, conn2 = await asyncio.gather( 139 | fut1, 140 | fut2, 141 | ) 142 | assert 2 == pool.size 143 | assert 0 == pool.freesize 144 | assert {conn1, conn2} == pool._used 145 | 146 | await pool.release(conn1) 147 | assert 2 == pool.size 148 | assert 1 == pool.freesize 149 | assert {conn2} == pool._used 150 | 151 | await pool.release(conn2) 152 | assert 2 == pool.size 153 | assert 2 == pool.freesize 154 | assert not conn1.closed 155 | assert not conn2.closed 156 | 157 | conn3 = await pool.acquire() 158 | assert conn3 is conn1 159 | await pool.release(conn3) 160 | 161 | 162 | @pytest.mark.asyncio 163 | async def test_parallel_tasks_more(pool_maker, dsn): 164 | pool = await pool_maker(dsn=dsn, minsize=0, maxsize=3) 165 | 166 | fut1 = pool.acquire() 167 | fut2 = pool.acquire() 168 | fut3 = pool.acquire() 169 | 170 | conn1, conn2, conn3 = await asyncio.gather( 171 | fut1, 172 | fut2, 173 | fut3, 174 | ) 175 | assert 3 == pool.size 176 | assert 0 == pool.freesize 177 | assert {conn1, conn2, conn3} == pool._used 178 | 179 | await pool.release(conn1) 180 | assert 3 == pool.size 181 | assert 1 == pool.freesize 182 | assert {conn2, conn3} == pool._used 183 | 184 | await pool.release(conn2) 185 | assert 3 == pool.size 186 | assert 2 == pool.freesize 187 | assert {conn3} == pool._used 188 | assert not conn1.closed 189 | assert not conn2.closed 190 | 191 | await pool.release(conn3) 192 | assert 3 == pool.size 193 | assert 3 == pool.freesize 194 | assert not pool._used 195 | assert not conn1.closed 196 | assert not conn2.closed 197 | assert not conn3.closed 198 | 199 | conn4 = await pool.acquire() 200 | assert conn4 is conn1 201 | await pool.release(conn4) 202 | 203 | 204 | @pytest.mark.asyncio 205 | async def test__fill_free(pool_maker, dsn): 206 | pool = await pool_maker(dsn=dsn, minsize=1) 207 | 208 | first_conn = await pool.acquire() 209 | try: 210 | assert 0 == pool.freesize 211 | assert 1 == pool.size 212 | 213 | conn = await asyncio.wait_for( 214 | pool.acquire(), 215 | timeout=0.5, 216 | ) 217 | assert 0 == pool.freesize 218 | assert 2 == pool.size 219 | await pool.release(conn) 220 | assert 1 == pool.freesize 221 | assert 2 == pool.size 222 | finally: 223 | await pool.release(first_conn) 224 | assert 2 == pool.freesize 225 | assert 2 == pool.size 226 | 227 | 228 | @pytest.mark.asyncio 229 | async def test_connect_from_acquire(pool_maker, dsn): 230 | pool = await pool_maker(dsn=dsn, minsize=0) 231 | 232 | assert 0 == pool.freesize 233 | assert 0 == pool.size 234 | conn = await pool.acquire() 235 | try: 236 | assert 1 == pool.size 237 | assert 0 == pool.freesize 238 | finally: 239 | await pool.release(conn) 240 | assert 1 == pool.size 241 | assert 1 == pool.freesize 242 | 243 | 244 | @pytest.mark.asyncio 245 | async def test_pool_with_connection_recycling(pool_maker, dsn): 246 | pool = await pool_maker(dsn=dsn, minsize=1, maxsize=1, pool_recycle=3) 247 | async with pool.acquire() as conn: 248 | conn1 = conn 249 | 250 | await asyncio.sleep( 251 | 5, 252 | ) 253 | 254 | assert 1 == pool.freesize 255 | async with pool.acquire() as conn: 256 | conn2 = conn 257 | 258 | assert conn1 is not conn2 259 | 260 | 261 | @pytest.mark.asyncio 262 | async def test_concurrency(pool_maker, dsn): 263 | pool = await pool_maker(dsn=dsn, minsize=2, maxsize=4) 264 | 265 | c1 = await pool.acquire() 266 | c2 = await pool.acquire() 267 | assert 0 == pool.freesize 268 | assert 2 == pool.size 269 | await pool.release(c1) 270 | await pool.release(c2) 271 | 272 | 273 | @pytest.mark.asyncio 274 | async def test_invalid_minsize_and_maxsize(dsn): 275 | with pytest.raises(ValueError): 276 | await asyncodbc.create_pool(dsn=dsn, minsize=-1) 277 | 278 | with pytest.raises(ValueError): 279 | await asyncodbc.create_pool(dsn=dsn, minsize=5, maxsize=2) 280 | 281 | 282 | @pytest.mark.asyncio 283 | async def test_true_parallel_tasks(pool_maker, dsn): 284 | pool = await pool_maker(dsn=dsn, minsize=0, maxsize=1) 285 | 286 | assert 1 == pool.maxsize 287 | assert 0 == pool.minsize 288 | assert 0 == pool.size 289 | assert 0 == pool.freesize 290 | 291 | maxsize = 0 292 | minfreesize = 100 293 | 294 | async def inner(): 295 | nonlocal maxsize, minfreesize 296 | maxsize = max(maxsize, pool.size) 297 | minfreesize = min(minfreesize, pool.freesize) 298 | conn = await pool.acquire() 299 | maxsize = max(maxsize, pool.size) 300 | minfreesize = min(minfreesize, pool.freesize) 301 | await asyncio.sleep( 302 | 0.01, 303 | ) 304 | await pool.release(conn) 305 | maxsize = max(maxsize, pool.size) 306 | minfreesize = min(minfreesize, pool.freesize) 307 | 308 | await asyncio.gather( 309 | inner(), 310 | inner(), 311 | ) 312 | 313 | assert 1 == maxsize 314 | assert 0 == minfreesize 315 | 316 | 317 | @pytest.mark.asyncio 318 | async def test_cannot_acquire_after_closing(pool_maker, dsn): 319 | pool = await pool_maker(dsn=dsn) 320 | 321 | pool.close() 322 | 323 | with pytest.raises(RuntimeError): 324 | await pool.acquire() 325 | 326 | 327 | @pytest.mark.asyncio 328 | async def test_wait_closed(pool_maker, dsn): 329 | pool = await pool_maker(dsn=dsn) 330 | 331 | c1 = await pool.acquire() 332 | c2 = await pool.acquire() 333 | assert 2 == pool.size 334 | assert 0 == pool.freesize 335 | 336 | ops = [] 337 | 338 | async def do_release(conn): 339 | await asyncio.sleep( 340 | 0, 341 | ) 342 | await pool.release(conn) 343 | ops.append("release") 344 | 345 | async def wait_closed(): 346 | await pool.wait_closed() 347 | ops.append("wait_closed") 348 | 349 | pool.close() 350 | await asyncio.gather( 351 | wait_closed(), 352 | do_release(c1), 353 | do_release(c2), 354 | ) 355 | assert sorted(["release", "release", "wait_closed"]) == sorted(ops) 356 | assert 0 == pool.freesize 357 | 358 | 359 | @pytest.mark.asyncio 360 | async def test_echo(pool_maker, dsn): 361 | pool = await pool_maker(dsn=dsn, echo=True) 362 | 363 | assert pool.echo 364 | conn = await pool.acquire() 365 | assert conn.echo 366 | await pool.release(conn) 367 | 368 | 369 | @pytest.mark.asyncio 370 | async def test_release_closed_connection(pool_maker, dsn): 371 | pool = await pool_maker(dsn=dsn) 372 | 373 | conn = await pool.acquire() 374 | await conn.close() 375 | 376 | await pool.release(conn) 377 | pool.close() 378 | 379 | 380 | @pytest.mark.asyncio 381 | async def test_wait_closing_on_not_closed(pool_maker, dsn): 382 | pool = await pool_maker(dsn=dsn) 383 | 384 | with pytest.raises(RuntimeError): 385 | await pool.wait_closed() 386 | pool.close() 387 | 388 | 389 | @pytest.mark.asyncio 390 | async def test_close_with_acquired_connections(pool_maker, dsn): 391 | pool = await pool_maker(dsn=dsn) 392 | 393 | conn = await pool.acquire() 394 | pool.close() 395 | 396 | with pytest.raises(asyncio.TimeoutError): 397 | await asyncio.wait_for( 398 | pool.wait_closed(), 399 | 0.1, 400 | ) 401 | await conn.close() 402 | await pool.release(conn) 403 | 404 | 405 | @pytest.mark.asyncio 406 | async def test_pool_with_executor(pool_maker, dsn, executor): 407 | pool = await pool_maker(executor=executor, dsn=dsn, minsize=2, maxsize=2) 408 | 409 | conn = await pool.acquire() 410 | try: 411 | assert isinstance(conn, Connection) 412 | assert not conn.closed 413 | assert conn._executor is executor 414 | cur = await conn.cursor() 415 | await cur.execute("SELECT 1") 416 | val = await cur.fetchone() 417 | assert (1,) == tuple(val) 418 | finally: 419 | await pool.release(conn) 420 | # we close pool here instead in finalizer because of pool should be 421 | # closed before executor 422 | pool.close() 423 | await pool.wait_closed() 424 | 425 | 426 | @pytest.mark.asyncio 427 | async def test_pool_context_manager(pool): 428 | assert not pool.closed 429 | async with pool: 430 | assert not pool.closed 431 | assert pool.closed 432 | 433 | 434 | @pytest.mark.asyncio 435 | async def test_pool_context_manager2(pool): 436 | async with pool.acquire() as conn: 437 | assert not conn.closed 438 | cur = await conn.cursor() 439 | await cur.execute("SELECT 1") 440 | val = await cur.fetchone() 441 | assert (1,) == tuple(val) 442 | 443 | 444 | @pytest.mark.asyncio 445 | async def test_all_context_managers(dsn, executor): 446 | kw = {"dsn": dsn, "executor": executor} 447 | async with asyncodbc.create_pool(**kw) as pool: 448 | async with pool.acquire() as conn: 449 | async with conn.cursor() as cur: 450 | assert not pool.closed 451 | assert not conn.closed 452 | assert not cur.closed 453 | 454 | await cur.execute("SELECT 1") 455 | val = await cur.fetchone() 456 | assert (1,) == tuple(val) 457 | 458 | assert pool.closed 459 | assert conn.closed 460 | assert cur.closed 461 | 462 | 463 | @pytest.mark.asyncio 464 | async def test_context_manager_aexit(connection_maker): 465 | async def aexit_conntex_managet(conn): 466 | # commit on exit if no error 467 | params = (1, "123.45") 468 | async with conn.cursor() as cur: 469 | await cur.execute("CREATE TABLE cmt(n int, v VARCHAR(10))") 470 | await cur.execute("INSERT INTO cmt VALUES (?,?)", params) 471 | async with conn.cursor() as cur: 472 | await cur.execute("SELECT v FROM cmt WHERE n=1;") 473 | (value,) = await cur.fetchone() 474 | assert value == params[1] 475 | 476 | # rollback on exit if error 477 | with pytest.raises(Error): 478 | async with conn.cursor() as cur: 479 | await cur.execute("ins INTO cmt VALUES (2, '666');") 480 | async with conn.cursor() as cur: 481 | await cur.execute("SELECT v FROM cmt WHERE n=2") 482 | row = await cur.fetchone() 483 | assert row is None 484 | 485 | async with conn.cursor() as cur: 486 | await cur.execute("DROP TABLE cmt") 487 | 488 | conn = await connection_maker(autocommit=False) 489 | assert not conn.autocommit 490 | await aexit_conntex_managet(conn) 491 | await conn.commit() 492 | 493 | conn = await connection_maker(autocommit=True) 494 | assert conn.autocommit 495 | await aexit_conntex_managet(conn) 496 | --------------------------------------------------------------------------------