├── tests ├── __init__.py ├── trio │ ├── __init__.py │ ├── README │ ├── test_worker.py │ ├── test_result_proxy.py │ ├── test_connection.py │ └── test_engine.py ├── asyncio │ ├── __init__.py │ ├── test_worker.py │ ├── test_result_proxy.py │ ├── test_connection.py │ └── test_engine.py └── conftest.py ├── dev-requirements.txt ├── .gitignore ├── .gitlab-ci.yml ├── docs ├── contributing.rst ├── limitations.rst ├── api-reference.rst ├── ddl.rst ├── index.rst └── conf.py ├── sqlalchemy_aio ├── __init__.py ├── strategy.py ├── exc.py ├── asyncio.py ├── trio.py └── base.py ├── pyproject.toml ├── tasks.py ├── tox.ini ├── .github └── workflows │ ├── python-publish.yml │ └── main.yml ├── LICENSE ├── setup.py ├── CHANGELOG.md └── README.rst /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/trio/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/asyncio/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | -e .[test] 2 | invoke 3 | sphinx 4 | sphinx-autobuild 5 | twine 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | *.py[cod] 3 | build/ 4 | *.egg-info/ 5 | _build/ 6 | dist/ 7 | htmlcov/ 8 | .cache/ 9 | .coverage 10 | .tox/ 11 | .pytest_cache/ 12 | -------------------------------------------------------------------------------- /tests/trio/README: -------------------------------------------------------------------------------- 1 | The trio tests are a copy of the asyncio tests with minimal changes, unless 2 | I can use pytest magic to abstract away whether asyncio or trio is running. 3 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | cache: 2 | paths: 3 | - pip_cache 4 | 5 | before_script: 6 | - export PIP_CACHE_DIR="pip_cache" 7 | - pip install .[test] 8 | 9 | test:3.5: 10 | image: python:3.5 11 | script: 12 | - pytest 13 | 14 | test:3.6: 15 | image: python:3.6 16 | script: 17 | - pytest 18 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | Contributing 2 | ============ 3 | 4 | As an open source project, sqlalchemy_aio welcomes contributions of many forms. 5 | 6 | Examples of contributions include: 7 | 8 | * Code patches 9 | * Documentation improvements 10 | * Bug reports and patch reviews 11 | 12 | We welcome pull requests and tickets on `github`_! 13 | 14 | .. _`github`: https://github.com/RazerM/sqlalchemy_aio 15 | -------------------------------------------------------------------------------- /sqlalchemy_aio/__init__.py: -------------------------------------------------------------------------------- 1 | from .exc import AlreadyQuit, BlockingWarning, SQLAlchemyAioDeprecationWarning 2 | from .strategy import ASYNCIO_STRATEGY, TRIO_STRATEGY 3 | 4 | __all__ = [ 5 | 'ASYNCIO_STRATEGY', 6 | 'TRIO_STRATEGY', 7 | ] 8 | 9 | __author__ = 'Frazer McLean ' 10 | __version__ = '0.17.0' 11 | __license__ = 'MIT' 12 | __description__ = 'Async support for SQLAlchemy.' 13 | -------------------------------------------------------------------------------- /docs/limitations.rst: -------------------------------------------------------------------------------- 1 | Limitations 2 | =========== 3 | 4 | There are two reasons stuff isn't implemented in ``sqlalchemy_aio``. 5 | 6 | First, because we haven't gotten there yet. For these items you should 7 | :doc:`file bugs or send pull requests `. 8 | 9 | Second, some items can't be implemented because of limitations in SQLAlchemy, 10 | there's almost always a workaround though. 11 | 12 | * :doc:`Table creation ` 13 | -------------------------------------------------------------------------------- /tests/trio/test_worker.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from sqlalchemy_aio import AlreadyQuit 4 | 5 | 6 | @pytest.mark.trio 7 | async def test_already_quit(): 8 | from sqlalchemy_aio.trio import TrioThreadWorker 9 | worker = TrioThreadWorker() 10 | await worker.quit() 11 | 12 | with pytest.raises(AlreadyQuit): 13 | await worker.run(lambda: None) 14 | 15 | with pytest.raises(AlreadyQuit): 16 | await worker.quit() 17 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.coverage.run] 2 | branch = true 3 | source = [ 4 | "sqlalchemy_aio", 5 | "tests/", 6 | ] 7 | 8 | [tool.coverage.paths] 9 | source = [ 10 | "sqlalchemy_aio", 11 | ".tox/*/lib/python*/site-packages/sqlalchemy_aio", 12 | ] 13 | 14 | [tool.coverage.report] 15 | exclude_lines = [ 16 | "if __name__ == '__main__':", 17 | "pass", 18 | "raise NotImplementedError", 19 | ] 20 | 21 | [tool.pytest.ini_options] 22 | addopts = "-r s" 23 | markers = [ 24 | "noextras", 25 | ] 26 | asyncio_mode = "strict" 27 | -------------------------------------------------------------------------------- /sqlalchemy_aio/strategy.py: -------------------------------------------------------------------------------- 1 | from sqlalchemy.engine.strategies import DefaultEngineStrategy 2 | 3 | from .asyncio import AsyncioEngine 4 | try: 5 | from .trio import TrioEngine 6 | except ImportError: 7 | TrioEngine = None 8 | 9 | ASYNCIO_STRATEGY = '_asyncio' 10 | TRIO_STRATEGY = '_trio' 11 | 12 | 13 | class AsyncioEngineStrategy(DefaultEngineStrategy): 14 | name = ASYNCIO_STRATEGY 15 | engine_cls = AsyncioEngine 16 | 17 | 18 | AsyncioEngineStrategy() 19 | 20 | 21 | if TrioEngine is not None: 22 | class TrioEngineStrategy(DefaultEngineStrategy): 23 | name = TRIO_STRATEGY 24 | engine_cls = TrioEngine 25 | 26 | TrioEngineStrategy() 27 | -------------------------------------------------------------------------------- /tasks.py: -------------------------------------------------------------------------------- 1 | from invoke import Collection, task 2 | 3 | 4 | @task 5 | def build_docs(ctx, watch=False): 6 | if watch: 7 | ctx.run('sphinx-autobuild --open-browser --watch sqlalchemy_aio ' 8 | '-b html docs docs/_build/html') 9 | else: 10 | ctx.run('sphinx-build -b html -W -E docs docs/_build/html') 11 | 12 | 13 | @task 14 | def release(ctx, version): 15 | ctx.run("git tag {0} -m '{0} release'".format(version)) 16 | ctx.run('git push --tags') 17 | ctx.run('rm dist/*') 18 | ctx.run('python setup.py sdist bdist_wheel') 19 | ctx.run('twine upload dist/*') 20 | 21 | 22 | docs = Collection('docs') 23 | docs.add_task(build_docs, 'build') 24 | 25 | namespace = Collection(release, docs) 26 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist=py{36,37,38,39,310}{,-noextras} 3 | [testenv] 4 | deps= 5 | coverage[toml] 6 | !noextras: .[trio,test] 7 | noextras: .[test-noextras] 8 | commands= 9 | # We use parallel mode and then combine here so that coverage.py will take 10 | # the paths like 11 | # .tox/py34/lib/python3.4/site-packages/sqlalchemy_aio/__init__.py and 12 | # collapse them into sqlalchemy_aio/__init__.py. 13 | !noextras: coverage run --parallel-mode -m pytest {posargs} 14 | noextras: coverage run --parallel-mode -m pytest -m noextras {posargs} 15 | coverage combine 16 | coverage report -m 17 | 18 | [gh-actions] 19 | python = 20 | 3.6: py36 21 | 3.7: py37 22 | 3.8: py38 23 | 3.9: py39 24 | 3.10: py310 25 | -------------------------------------------------------------------------------- /docs/api-reference.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============= 3 | 4 | .. currentmodule:: sqlalchemy_aio.base 5 | 6 | .. autoclass:: AsyncEngine 7 | :members: 8 | 9 | .. autoclass:: AsyncConnection 10 | :members: 11 | 12 | .. autoclass:: AsyncResultProxy 13 | :members: 14 | 15 | .. autoclass:: AsyncTransaction 16 | :members: 17 | 18 | .. currentmodule:: sqlalchemy_aio.asyncio 19 | 20 | .. autoclass:: AsyncioEngine 21 | :members: 22 | 23 | .. currentmodule:: sqlalchemy_aio.trio 24 | 25 | .. autoclass:: TrioEngine 26 | :members: 27 | 28 | .. currentmodule:: sqlalchemy_aio.exc 29 | 30 | .. autoclass:: AlreadyQuit 31 | :members: 32 | 33 | .. autoclass:: BlockingWarning 34 | :members: 35 | 36 | .. autoclass:: SQLAlchemyAioDeprecationWarning 37 | :members: 38 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | name: Upload Python Package 2 | 3 | on: 4 | push: 5 | tags: 6 | - "*" 7 | 8 | jobs: 9 | deploy: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: "3.x" 18 | - name: Install dependencies 19 | run: | 20 | python -m pip install --upgrade pip 21 | pip install setuptools wheel twine 22 | - name: Build and publish 23 | env: 24 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 25 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 26 | run: | 27 | python setup.py sdist bdist_wheel 28 | twine upload dist/* 29 | -------------------------------------------------------------------------------- /sqlalchemy_aio/exc.py: -------------------------------------------------------------------------------- 1 | class AlreadyQuit(Exception): 2 | """Raised by :class:`~sqlalchemy_aio.base.ThreadWorker` if an attempt is 3 | made to use it after its thread has quit. 4 | """ 5 | 6 | 7 | class BlockingWarning(RuntimeWarning): 8 | """Emitted when an :class:`~sqlalchemy_aio.base.AsyncEngine` or 9 | :class:`~sqlalchemy_aio.base.AsyncConnection` is used in a blocking 10 | fashion accidentally. 11 | 12 | For example, it is emitted in this case: 13 | 14 | .. code-block:: python 15 | 16 | engine = create_engine(..., strategy=TRIO_STRATEGY) 17 | Table(..., autoload_with=engine) 18 | """ 19 | 20 | 21 | # DeprecationWarning is ignored by default on Python < 3.7, so use UserWarning 22 | class SQLAlchemyAioDeprecationWarning(UserWarning): 23 | """Emitted for deprecated functionality.""" 24 | -------------------------------------------------------------------------------- /tests/asyncio/test_worker.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import pytest 3 | 4 | from sqlalchemy_aio import AlreadyQuit 5 | from sqlalchemy_aio.asyncio import AsyncioThreadWorker 6 | 7 | 8 | @pytest.mark.asyncio 9 | async def test_already_quit(): 10 | worker = AsyncioThreadWorker() 11 | await worker.quit() 12 | 13 | with pytest.raises(AlreadyQuit): 14 | await worker.run(lambda: None) 15 | 16 | with pytest.raises(AlreadyQuit): 17 | await worker.quit() 18 | 19 | 20 | @pytest.mark.asyncio 21 | async def test_interrupted_run(): 22 | worker = AsyncioThreadWorker() 23 | 24 | loop = asyncio.get_event_loop() 25 | event = asyncio.Event() 26 | 27 | async def set_event(): 28 | event.set() 29 | 30 | def returns_number(number): 31 | asyncio.run_coroutine_threadsafe(set_event(), loop) 32 | return number 33 | 34 | task = asyncio.ensure_future(worker.run(returns_number, [2])) 35 | await event.wait() 36 | task.cancel() 37 | value = await worker.run(returns_number, [3]) 38 | assert 3 == value 39 | await worker.quit() 40 | -------------------------------------------------------------------------------- /docs/ddl.rst: -------------------------------------------------------------------------------- 1 | DDL 2 | === 3 | 4 | Because of some of the limitations in the SQLAlchemy API, it’s not possible to 5 | asynchronously create tables using :meth:`sqlalchemy.schema.Table.create` or 6 | :meth:`sqlalchemy.schema.MetaData.create_all`. 7 | 8 | Instead of: 9 | 10 | .. code-block:: python 11 | 12 | users = Table('users', metadata, 13 | Column('id', Integer, primary_key=True), 14 | Column('name', String), 15 | ) 16 | 17 | users.create(engine) 18 | 19 | 20 | you can use :class:`sqlalchemy.schema.CreateTable` or 21 | :meth:`AsyncEngine.run_in_thread`: 22 | 23 | .. code-block:: python 24 | 25 | await engine.execute(CreateTable(users)) 26 | 27 | .. code-block:: python 28 | 29 | await engine.run_in_thread(users.create, engine.sync_engine) 30 | 31 | 32 | For :meth:`MetaData.create_all() `, 33 | instead of: 34 | 35 | .. code-block:: python 36 | 37 | metadata.create_all(engine) 38 | 39 | you have to do: 40 | 41 | .. code-block:: python 42 | 43 | await engine.run_in_thread(metadata.create_all, engine.sync_engine) 44 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI 3 | 4 | on: 5 | push: 6 | branches: ["master"] 7 | pull_request: 8 | branches: ["master"] 9 | workflow_dispatch: 10 | 11 | jobs: 12 | tests: 13 | name: "Python ${{ matrix.python-version }}" 14 | runs-on: "ubuntu-latest" 15 | 16 | strategy: 17 | matrix: 18 | python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] 19 | 20 | steps: 21 | - uses: "actions/checkout@v2" 22 | - uses: "actions/setup-python@v2" 23 | with: 24 | python-version: "${{ matrix.python-version }}" 25 | - name: "Install dependencies" 26 | run: | 27 | set -xe 28 | python -VV 29 | python -m site 30 | python -m pip install --upgrade pip setuptools wheel 31 | python -m pip install --upgrade coverage[toml] virtualenv tox tox-gh-actions 32 | 33 | - name: "Run tox targets for ${{ matrix.python-version }}" 34 | run: "python -m tox" 35 | - name: "Convert coverage" 36 | run: "python -m coverage xml" 37 | - name: "Upload coverage to Codecov" 38 | uses: "codecov/codecov-action@v1" 39 | with: 40 | fail_ci_if_error: true 41 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Frazer McLean 4 | Derived from alchimia (c) Alex Gaynor and David Reid 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from setuptools import setup, find_packages 4 | 5 | 6 | INIT_FILE = 'sqlalchemy_aio/__init__.py' 7 | init_data = open(INIT_FILE).read() 8 | 9 | metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", init_data)) 10 | 11 | AUTHOR_EMAIL = metadata['author'] 12 | VERSION = metadata['version'] 13 | LICENSE = metadata['license'] 14 | DESCRIPTION = metadata['description'] 15 | 16 | AUTHOR, EMAIL = re.match(r'(.*) <(.*)>', AUTHOR_EMAIL).groups() 17 | 18 | requires = [ 19 | 'represent>=1.4', 20 | 'sqlalchemy<1.4', 21 | 'outcome', 22 | ] 23 | 24 | extras_require = dict() 25 | 26 | extras_require['test-noextras'] = [ 27 | 'pytest >= 5.4', 28 | 'pytest-asyncio >= 0.14', 29 | ] 30 | 31 | extras_require['test'] = extras_require['test-noextras'] + [ 32 | 'pytest-trio >= 0.6', 33 | ] 34 | 35 | extras_require['trio'] = [ 36 | 'trio >= 0.15', 37 | ] 38 | 39 | 40 | setup( 41 | name='sqlalchemy_aio', 42 | version=VERSION, 43 | description=DESCRIPTION, 44 | long_description=open('README.rst').read(), 45 | author=AUTHOR, 46 | author_email=EMAIL, 47 | url='https://github.com/RazerM/sqlalchemy_aio', 48 | packages=find_packages(exclude=['tests']), 49 | classifiers=[ 50 | 'Development Status :: 4 - Beta', 51 | 'License :: OSI Approved :: MIT License', 52 | 'Programming Language :: Python :: 3', 53 | ], 54 | license=LICENSE, 55 | install_requires=requires, 56 | extras_require=extras_require, 57 | python_requires='>=3.6', 58 | ) 59 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from sqlalchemy import Column, Integer, MetaData, Table, create_engine, event 3 | 4 | from sqlalchemy_aio import ASYNCIO_STRATEGY, TRIO_STRATEGY 5 | 6 | 7 | def fix_pysqlite_transactions(engine): 8 | """See http://docs.sqlalchemy.org/en/latest/dialects/ 9 | sqlite.html#serializable-isolation-savepoints-transactional-ddl 10 | """ 11 | 12 | @event.listens_for(engine, 'connect') 13 | def connect(dbapi_connection, connection_record): 14 | # disable pysqlite's emitting of the BEGIN statement entirely. 15 | # also stops it from emitting COMMIT before any DDL. 16 | dbapi_connection.isolation_level = None 17 | 18 | @event.listens_for(engine, 'begin') 19 | def begin(conn): 20 | # emit our own BEGIN 21 | conn.execute('BEGIN') 22 | 23 | 24 | @pytest.fixture(params=[True, False], ids=['memory', 'file']) 25 | def engine_url(request, tmpdir): 26 | # sqlite has different behaviour when used with multiple threads with an 27 | # in-memory or file database. 28 | if request.param: 29 | url = 'sqlite:///:memory:' 30 | else: 31 | file = tmpdir.join('test.db') 32 | url = 'sqlite:///' + str(file) 33 | return url 34 | 35 | 36 | @pytest.fixture 37 | def asyncio_engine(engine_url, event_loop): 38 | engine = create_engine(engine_url, strategy=ASYNCIO_STRATEGY) 39 | fix_pysqlite_transactions(engine.sync_engine) 40 | return engine 41 | 42 | 43 | @pytest.fixture 44 | def trio_engine(engine_url): 45 | engine = create_engine(engine_url, strategy=TRIO_STRATEGY) 46 | fix_pysqlite_transactions(engine.sync_engine) 47 | yield engine 48 | 49 | 50 | @pytest.fixture 51 | def mytable(): 52 | metadata = MetaData() 53 | mytable = Table( 54 | 'mytable', metadata, 55 | Column('id', Integer, primary_key=True), 56 | ) 57 | return mytable 58 | -------------------------------------------------------------------------------- /sqlalchemy_aio/asyncio.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import threading 3 | import warnings 4 | from concurrent.futures import CancelledError 5 | from functools import partial 6 | 7 | import outcome 8 | 9 | from .base import AsyncEngine, ThreadWorker 10 | from .exc import AlreadyQuit, SQLAlchemyAioDeprecationWarning 11 | 12 | 13 | class Request: 14 | def __init__(self, func): 15 | self.func = func 16 | self.finished = asyncio.Event() 17 | self.response = None 18 | 19 | def set_finished(self): 20 | """Needed to be executed in the same thread as the loop. 21 | Since Event() is not thread-safe. 22 | """ 23 | self.finished.set() 24 | 25 | 26 | class AsyncioThreadWorker(ThreadWorker): 27 | def __init__(self, *, branch_from=None): 28 | self._loop = asyncio.get_event_loop() 29 | 30 | if branch_from is None: 31 | self._request_queue = asyncio.Queue(1) 32 | self._thread = threading.Thread(target=self.thread_fn, daemon=True) 33 | self._thread.start() 34 | else: 35 | self._request_queue = branch_from._request_queue 36 | self._thread = branch_from._thread 37 | 38 | self._branched = branch_from is not None 39 | self._has_quit = False 40 | 41 | def thread_fn(self): 42 | while True: 43 | fut = asyncio.run_coroutine_threadsafe( 44 | self._request_queue.get(), self._loop) 45 | try: 46 | request = fut.result() 47 | except CancelledError: 48 | continue 49 | 50 | if request.func is not None: 51 | request.response = outcome.capture(request.func) 52 | 53 | self._loop.call_soon_threadsafe(request.set_finished) 54 | else: 55 | self._loop.call_soon_threadsafe(request.set_finished) 56 | break 57 | 58 | async def run(self, func, args=(), kwargs=None): 59 | if self._has_quit: 60 | raise AlreadyQuit 61 | 62 | if kwargs: 63 | func = partial(func, *args, **kwargs) 64 | elif args: 65 | func = partial(func, *args) 66 | 67 | request = Request(func) 68 | await self._request_queue.put(request) 69 | await request.finished.wait() 70 | return request.response.unwrap() 71 | 72 | async def quit(self): 73 | if self._has_quit: 74 | raise AlreadyQuit 75 | 76 | self._has_quit = True 77 | 78 | if self._branched: 79 | return 80 | 81 | stop = Request(None) 82 | await self._request_queue.put(stop) 83 | await stop.finished.wait() 84 | 85 | 86 | class AsyncioEngine(AsyncEngine): 87 | """Mostly like :class:`sqlalchemy.engine.Engine` except some of the methods 88 | are coroutines.""" 89 | def __init__(self, pool, dialect, url, logging_name=None, echo=None, 90 | execution_options=None, **kwargs): 91 | 92 | super().__init__( 93 | pool, dialect, url, logging_name, echo, execution_options, **kwargs) 94 | 95 | def _make_worker(self, *, branch_from=None): 96 | return AsyncioThreadWorker(branch_from=branch_from) 97 | -------------------------------------------------------------------------------- /sqlalchemy_aio/trio.py: -------------------------------------------------------------------------------- 1 | import threading 2 | from contextlib import suppress 3 | from functools import partial 4 | 5 | import outcome 6 | import trio 7 | from trio import Cancelled, RunFinishedError 8 | 9 | from .base import AsyncEngine, ThreadWorker 10 | from .exc import AlreadyQuit 11 | 12 | _STOP = object() 13 | 14 | 15 | class TrioThreadWorker(ThreadWorker): 16 | def __init__(self, *, branch_from=None): 17 | if branch_from is None: 18 | self._trio_token = trio.lowlevel.current_trio_token() 19 | send_to_thread, receive_from_trio = trio.open_memory_channel(1) 20 | send_to_trio, receive_from_thread = trio.open_memory_channel(1) 21 | 22 | self._send_to_thread = send_to_thread 23 | self._send_to_trio = send_to_trio 24 | self._receive_from_trio = receive_from_trio 25 | self._receive_from_thread = receive_from_thread 26 | 27 | self._thread = threading.Thread(target=self.thread_fn, daemon=True) 28 | self._thread.start() 29 | else: 30 | self._send_to_thread = branch_from._send_to_thread 31 | self._send_to_trio = branch_from._send_to_trio 32 | self._receive_from_trio = branch_from._receive_from_trio 33 | self._receive_from_thread = branch_from._receive_from_thread 34 | self._thread = branch_from._thread 35 | 36 | self._branched = branch_from is not None 37 | self._has_quit = False 38 | 39 | def thread_fn(self): 40 | while True: 41 | try: 42 | request = trio.from_thread.run( 43 | self._receive_from_trio.receive, trio_token=self._trio_token 44 | ) 45 | except (Cancelled, RunFinishedError): 46 | break 47 | except trio.EndOfChannel: 48 | with suppress(Cancelled, RunFinishedError): 49 | trio.from_thread.run( 50 | self._send_to_trio.aclose, trio_token=self._trio_token 51 | ) 52 | break 53 | 54 | response = outcome.capture(request) 55 | trio.from_thread.run( 56 | self._send_to_trio.send, response, trio_token=self._trio_token 57 | ) 58 | 59 | async def run(self, func, args=(), kwargs=None): 60 | if self._has_quit: 61 | raise AlreadyQuit 62 | 63 | if kwargs: 64 | func = partial(func, *args, **kwargs) 65 | elif args: 66 | func = partial(func, *args) 67 | 68 | await self._send_to_thread.send(func) 69 | resp = await self._receive_from_thread.receive() 70 | return resp.unwrap() 71 | 72 | async def quit(self): 73 | if self._has_quit: 74 | raise AlreadyQuit 75 | 76 | self._has_quit = True 77 | 78 | if self._branched: 79 | return 80 | 81 | await self._send_to_thread.aclose() 82 | 83 | 84 | class TrioEngine(AsyncEngine): 85 | """Mostly like :class:`sqlalchemy.engine.Engine` except some of the methods 86 | are coroutines.""" 87 | def _make_worker(self, *, branch_from=None): 88 | return TrioThreadWorker(branch_from=branch_from) 89 | -------------------------------------------------------------------------------- /tests/trio/test_result_proxy.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from sqlalchemy import func, select 3 | from sqlalchemy.schema import CreateTable 4 | 5 | from sqlalchemy_aio.base import AsyncResultProxy 6 | 7 | 8 | @pytest.mark.trio 9 | async def test_result_proxy(trio_engine): 10 | result = await trio_engine.execute(select([1])) 11 | assert isinstance(result, AsyncResultProxy) 12 | await result.close() 13 | 14 | 15 | @pytest.mark.trio 16 | async def test_fetchone(trio_engine): 17 | result = await trio_engine.execute(select([1])) 18 | assert await result.fetchone() == (1,) 19 | await result.close() 20 | 21 | 22 | @pytest.mark.trio 23 | async def test_fetchmany_value(trio_engine): 24 | result = await trio_engine.execute(select([1])) 25 | assert await result.fetchmany() == [(1,)] 26 | await result.close() 27 | 28 | 29 | @pytest.mark.trio 30 | async def test_fetchmany_quantity(trio_engine, mytable): 31 | await trio_engine.execute(CreateTable(mytable)) 32 | await trio_engine.execute(mytable.insert()) 33 | await trio_engine.execute(mytable.insert()) 34 | result = await trio_engine.execute(select([mytable])) 35 | rows = await result.fetchmany(1) 36 | assert len(rows) == 1 37 | await result.close() 38 | await trio_engine.execute(mytable.delete()) 39 | 40 | 41 | @pytest.mark.trio 42 | async def test_fetchmany_all(trio_engine, mytable): 43 | await trio_engine.execute(CreateTable(mytable)) 44 | await trio_engine.execute(mytable.insert()) 45 | await trio_engine.execute(mytable.insert()) 46 | await trio_engine.execute(mytable.insert()) 47 | result = await trio_engine.execute(select([mytable])) 48 | rows = await result.fetchmany(100) 49 | assert len(rows) == 3 50 | await result.close() 51 | await trio_engine.execute(mytable.delete()) 52 | 53 | 54 | @pytest.mark.trio 55 | async def test_fetchall(trio_engine): 56 | result = await trio_engine.execute(select([1])) 57 | assert await result.fetchall() == [(1,)] 58 | 59 | 60 | @pytest.mark.trio 61 | async def test_scalar(trio_engine): 62 | result = await trio_engine.execute(select([1])) 63 | assert await result.scalar() == 1 64 | 65 | 66 | @pytest.mark.trio 67 | async def test_first(trio_engine): 68 | result = await trio_engine.execute(select([1])) 69 | assert await result.first() == (1,) 70 | await result.close() 71 | 72 | 73 | @pytest.mark.trio 74 | async def test_keys(trio_engine): 75 | result = await trio_engine.execute(select([func.now().label('time')])) 76 | assert await result.keys() == ['time'] 77 | await result.close() 78 | 79 | 80 | @pytest.mark.trio 81 | async def test_returns_rows(trio_engine, mytable): 82 | result = await trio_engine.execute(select([1])) 83 | assert result.returns_rows 84 | await result.close() 85 | result = await trio_engine.execute(CreateTable(mytable)) 86 | assert not result.returns_rows 87 | await result.close() 88 | 89 | 90 | @pytest.mark.trio 91 | async def test_rowcount(trio_engine, mytable): 92 | await trio_engine.execute(CreateTable(mytable)) 93 | await trio_engine.execute(mytable.insert()) 94 | await trio_engine.execute(mytable.insert()) 95 | result = await trio_engine.execute(mytable.delete()) 96 | assert result.rowcount == 2 97 | 98 | 99 | @pytest.mark.trio 100 | async def test_inserted_primary_key(trio_engine, mytable): 101 | await trio_engine.execute(CreateTable(mytable)) 102 | result = await trio_engine.execute(mytable.insert()) 103 | assert result.inserted_primary_key == [1] 104 | 105 | 106 | @pytest.mark.trio 107 | async def test_aiter(trio_engine, mytable): 108 | await trio_engine.execute(CreateTable(mytable)) 109 | await trio_engine.execute(mytable.insert()) 110 | await trio_engine.execute(mytable.insert()) 111 | result = await trio_engine.execute(select([mytable])) 112 | fetched = [] 113 | async for row in result: 114 | fetched.append(row) 115 | await trio_engine.execute(mytable.delete()) 116 | assert len(fetched) == 2 117 | -------------------------------------------------------------------------------- /tests/asyncio/test_result_proxy.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from sqlalchemy import func, select 3 | from sqlalchemy.schema import CreateTable 4 | 5 | from sqlalchemy_aio.base import AsyncResultProxy 6 | 7 | pytestmark = pytest.mark.noextras 8 | 9 | 10 | @pytest.mark.asyncio 11 | async def test_result_proxy(asyncio_engine): 12 | result = await asyncio_engine.execute(select([1])) 13 | assert isinstance(result, AsyncResultProxy) 14 | await result.close() 15 | 16 | 17 | @pytest.mark.asyncio 18 | async def test_fetchone(asyncio_engine): 19 | result = await asyncio_engine.execute(select([1])) 20 | assert await result.fetchone() == (1,) 21 | await result.close() 22 | 23 | 24 | @pytest.mark.asyncio 25 | async def test_fetchmany_value(asyncio_engine): 26 | result = await asyncio_engine.execute(select([1])) 27 | assert await result.fetchmany() == [(1,)] 28 | await result.close() 29 | 30 | 31 | @pytest.mark.asyncio 32 | async def test_fetchmany_quantity(asyncio_engine, mytable): 33 | await asyncio_engine.execute(CreateTable(mytable)) 34 | await asyncio_engine.execute(mytable.insert()) 35 | await asyncio_engine.execute(mytable.insert()) 36 | result = await asyncio_engine.execute(select([mytable])) 37 | rows = await result.fetchmany(1) 38 | assert len(rows) == 1 39 | await result.close() 40 | await asyncio_engine.execute(mytable.delete()) 41 | 42 | 43 | @pytest.mark.asyncio 44 | async def test_fetchmany_all(asyncio_engine, mytable): 45 | await asyncio_engine.execute(CreateTable(mytable)) 46 | await asyncio_engine.execute(mytable.insert()) 47 | await asyncio_engine.execute(mytable.insert()) 48 | await asyncio_engine.execute(mytable.insert()) 49 | result = await asyncio_engine.execute(select([mytable])) 50 | rows = await result.fetchmany(100) 51 | assert len(rows) == 3 52 | await result.close() 53 | await asyncio_engine.execute(mytable.delete()) 54 | 55 | 56 | @pytest.mark.asyncio 57 | async def test_fetchall(asyncio_engine): 58 | result = await asyncio_engine.execute(select([1])) 59 | assert await result.fetchall() == [(1,)] 60 | 61 | 62 | @pytest.mark.asyncio 63 | async def test_scalar(asyncio_engine): 64 | result = await asyncio_engine.execute(select([1])) 65 | assert await result.scalar() == 1 66 | 67 | 68 | @pytest.mark.asyncio 69 | async def test_first(asyncio_engine): 70 | result = await asyncio_engine.execute(select([1])) 71 | assert await result.first() == (1,) 72 | await result.close() 73 | 74 | 75 | @pytest.mark.asyncio 76 | async def test_keys(asyncio_engine): 77 | result = await asyncio_engine.execute(select([func.now().label('time')])) 78 | assert await result.keys() == ['time'] 79 | await result.close() 80 | 81 | 82 | @pytest.mark.asyncio 83 | async def test_returns_rows(asyncio_engine, mytable): 84 | result = await asyncio_engine.execute(select([1])) 85 | assert result.returns_rows 86 | await result.close() 87 | result = await asyncio_engine.execute(CreateTable(mytable)) 88 | assert not result.returns_rows 89 | await result.close() 90 | 91 | 92 | @pytest.mark.asyncio 93 | async def test_rowcount(asyncio_engine, mytable): 94 | await asyncio_engine.execute(CreateTable(mytable)) 95 | await asyncio_engine.execute(mytable.insert()) 96 | await asyncio_engine.execute(mytable.insert()) 97 | result = await asyncio_engine.execute(mytable.delete()) 98 | assert result.rowcount == 2 99 | 100 | 101 | @pytest.mark.asyncio 102 | async def test_inserted_primary_key(asyncio_engine, mytable): 103 | await asyncio_engine.execute(CreateTable(mytable)) 104 | result = await asyncio_engine.execute(mytable.insert()) 105 | assert result.inserted_primary_key == [1] 106 | 107 | 108 | @pytest.mark.asyncio 109 | async def test_aiter(asyncio_engine, mytable): 110 | await asyncio_engine.execute(CreateTable(mytable)) 111 | await asyncio_engine.execute(mytable.insert()) 112 | await asyncio_engine.execute(mytable.insert()) 113 | result = await asyncio_engine.execute(select([mytable])) 114 | fetched = [] 115 | async for row in result: 116 | fetched.append(row) 117 | await asyncio_engine.execute(mytable.delete()) 118 | assert len(fetched) == 2 119 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | ## [Unreleased][unreleased] 3 | 4 | N/A 5 | 6 | ## [0.17.0] 7 | 8 | ### Added 9 | - Python 3.9 and 3.10 support. 10 | 11 | ### Removed 12 | - The previously deprecated loop argument to `AsyncioEngine`. 13 | 14 | ### Fixed 15 | - Added `sqlalchemy<1.4` version constraint since SQLAlchemy 1.4 is not supported. 16 | - Added `AsyncEngine.hide_parameters` property. 17 | 18 | ## [0.16.0] 19 | 20 | ### Changed 21 | - Trio support requires trio 0.15+ 22 | 23 | ### Deprecated 24 | - The loop argument to `AsyncioEngine`. 25 | 26 | ### Removed 27 | - Support for Python 3.5 28 | 29 | ## [0.15.0] 30 | ### Fixed 31 | - Concurrency problems in `AsyncioThreadWorker` ([#20][]). 32 | - Wait for worker to quit if `AsyncConnection.connect` fails. 33 | 34 | ### Changed 35 | - Trio support requires trio 0.12+ 36 | 37 | [#20]: https://github.com/RazerM/sqlalchemy_aio/issues/20 38 | 39 | ## [0.14.1] 40 | ### Fixed 41 | - Wait for worker to quit if `AsyncEngine.connect` fails ([#18][]). 42 | 43 | [#18]: https://github.com/RazerM/sqlalchemy_aio/issues/18 44 | 45 | ## [0.14.0] 46 | ### Added 47 | - `AsyncConnection.dialect` property. 48 | - `AsyncEngine.sync_engine` property. 49 | - `AsyncConnection.sync_connection`property. 50 | - Blocking method `run_callable` has been implemented for 51 | `AsyncConnection` and `AsyncEngine`. This allows 52 | `Table(..., autoload_with=engine)`, which emits a `BlockingWarning` ([#10][]). 53 | - Detects attempts to use `Table().create(bind=engine)` or 54 | `MetaData().create_all()` and raises a helpful error message. 55 | - Detects attempts to use `MetData().reflect()` and raises a helpful 56 | error message ([#13][]). 57 | - `AsyncConnection.connect()` method. 58 | - Public `run_in_thread()` async method has been added to `AsyncConnection` 59 | and `AsyncEngine`. 60 | - Detects attempts to use `event.listen()` with `AsyncConnection` or 61 | `AsyncEngine` and raises a more helpful error message ([#1][]). 62 | 63 | ### Changed 64 | - Trio support requires trio 0.9+ 65 | 66 | ### Fixed 67 | - `ThreadWorker.quit()` will raise `AlreadyQuit` instead of blocking. 68 | This is only called internally. 69 | - Connections created using `AsyncEngine.begin()` now create their own 70 | worker, like `AsyncEngine.connect()`. 71 | - Passing `echo=True` to `create_engine` was broken ([#12][]). 72 | 73 | [#1]: https://github.com/RazerM/sqlalchemy_aio/issues/1 74 | [#10]: https://github.com/RazerM/sqlalchemy_aio/issues/10 75 | [#12]: https://github.com/RazerM/sqlalchemy_aio/issues/12 76 | [#13]: https://github.com/RazerM/sqlalchemy_aio/issues/13 77 | 78 | ## [0.13.0] 79 | ### Added 80 | - [Trio] support with `TRIO_STRATEGY`. 81 | 82 | ### Changed 83 | - A new `ThreadWorker` class is used internally to defer work to instead 84 | of using a `ThreadPoolExecutor`. 85 | 86 | [Trio]: https://github.com/python-trio/trio 87 | 88 | ## [0.12.0] - 2018-02-06 89 | ### Added 90 | - `AsyncioResultProxy.fetchmany` 91 | - `AsyncioResultProxy.__aiter__` 92 | 93 | ## [0.11.0] - 2017-03-12 94 | ### Added 95 | - `AsyncioEngine.scalar()` 96 | - `AsyncioConnection.scalar()` 97 | 98 | ### Fixed 99 | - Connections now get their own thread. Now threadsafe DBAPI modules are more 100 | useful without passing a custom executor as in 0.10.0 101 | 102 | ### Changed 103 | - **Backwards incompatible:** removed `executor` argument, since the engine 104 | takes care of threads now. 105 | 106 | 107 | ## [0.10.0] - 2016-12-19 108 | Initial release. 109 | 110 | [unreleased]: https://github.com/RazerM/sqlalchemy_aio/compare/0.17.0...HEAD 111 | [0.17.0]: https://github.com/RazerM/sqlalchemy_aio/compare/0.16.0...0.17.0 112 | [0.16.0]: https://github.com/RazerM/sqlalchemy_aio/compare/0.15.0...0.16.0 113 | [0.15.0]: https://github.com/RazerM/sqlalchemy_aio/compare/0.14.1...0.15.0 114 | [0.14.1]: https://github.com/RazerM/sqlalchemy_aio/compare/0.14.0...0.14.1 115 | [0.14.0]: https://github.com/RazerM/sqlalchemy_aio/compare/0.13.0...0.14.0 116 | [0.13.0]: https://github.com/RazerM/sqlalchemy_aio/compare/0.12.0...0.13.0 117 | [0.12.0]: https://github.com/RazerM/sqlalchemy_aio/compare/0.11.0...0.12.0 118 | [0.11.0]: https://github.com/RazerM/sqlalchemy_aio/compare/0.10.0...0.11.0 119 | [0.10.0]: https://github.com/RazerM/sqlalchemy_aio/compare/458d37d8...0.10.0 120 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | sqlalchemy_aio 2 | ============== 3 | 4 | |PyPI Version| |Documentation| |Travis| |Coverage| |MIT License| 5 | 6 | ``sqlalchemy_aio`` adds asyncio and `Trio`_ support to SQLAlchemy core, derived 7 | from `alchimia`_. 8 | 9 | +-------------------------------------------------------------------------------+ 10 | | ⚠️ **Compatibility Note** | 11 | +===============================================================================+ 12 | | **SQLAlchemy 1.3** is the latest supported version. **SQLAlchemy 1.4** | 13 | | brings `native asyncio support`_, so you should consider using that instead. | 14 | +-------------------------------------------------------------------------------+ 15 | 16 | .. _alchimia: https://github.com/alex/alchimia 17 | .. _Trio: https://github.com/python-trio/trio 18 | .. _`native asyncio support`: https://docs.sqlalchemy.org/en/14/orm/extensions/asyncio.html 19 | 20 | 21 | Getting started 22 | --------------- 23 | 24 | .. code-block:: python 25 | 26 | import asyncio 27 | 28 | from sqlalchemy_aio import ASYNCIO_STRATEGY 29 | 30 | from sqlalchemy import ( 31 | Column, Integer, MetaData, Table, Text, create_engine, select) 32 | from sqlalchemy.schema import CreateTable, DropTable 33 | 34 | 35 | async def main(): 36 | engine = create_engine( 37 | # In-memory sqlite database cannot be accessed from different 38 | # threads, use file. 39 | 'sqlite:///test.db', strategy=ASYNCIO_STRATEGY 40 | ) 41 | 42 | metadata = MetaData() 43 | users = Table( 44 | 'users', metadata, 45 | Column('id', Integer, primary_key=True), 46 | Column('name', Text), 47 | ) 48 | 49 | # Create the table 50 | await engine.execute(CreateTable(users)) 51 | 52 | conn = await engine.connect() 53 | 54 | # Insert some users 55 | await conn.execute(users.insert().values(name='Jeremy Goodwin')) 56 | await conn.execute(users.insert().values(name='Natalie Hurley')) 57 | await conn.execute(users.insert().values(name='Dan Rydell')) 58 | await conn.execute(users.insert().values(name='Casey McCall')) 59 | await conn.execute(users.insert().values(name='Dana Whitaker')) 60 | 61 | result = await conn.execute(users.select(users.c.name.startswith('D'))) 62 | d_users = await result.fetchall() 63 | 64 | await conn.close() 65 | 66 | # Print out the users 67 | for user in d_users: 68 | print('Username: %s' % user[users.c.name]) 69 | 70 | # Supports context async managers 71 | async with engine.connect() as conn: 72 | async with conn.begin() as trans: 73 | assert await conn.scalar(select([1])) == 1 74 | 75 | await engine.execute(DropTable(users)) 76 | 77 | 78 | if __name__ == '__main__': 79 | loop = asyncio.get_event_loop() 80 | loop.run_until_complete(main()) 81 | 82 | Getting started with Trio 83 | ------------------------- 84 | 85 | To use the above example with `Trio`_, just change the following: 86 | 87 | .. code-block:: python 88 | 89 | import trio 90 | from sqlalchemy_aio import TRIO_STRATEGY 91 | 92 | async def main(): 93 | engine = create_engine('sqlite:///test.db', strategy=TRIO_STRATEGY) 94 | 95 | ... 96 | 97 | trio.run(main) 98 | 99 | What is this? 100 | ------------- 101 | 102 | It's *not* an ``asyncio`` implementation of SQLAlchemy or the drivers it uses. 103 | ``sqlalchemy_aio`` lets you use SQLAlchemy by running operations in a separate 104 | thread. 105 | 106 | If you're already using `run_in_executor`_ to execute SQLAlchemy tasks, 107 | ``sqlalchemy_aio`` will work well with similar performance. If performance is 108 | critical, perhaps `asyncpg`_ can help. 109 | 110 | .. _asyncpg: https://github.com/MagicStack/asyncpg 111 | .. _`run_in_executor`: https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.AbstractEventLoop.run_in_executor 112 | 113 | Documentation 114 | ------------- 115 | 116 | `The documentation`_ has more information, including limitations of the API. 117 | 118 | .. _The documentation: https://sqlalchemy-aio.readthedocs.io/en/latest/ 119 | 120 | 121 | .. |PyPI Version| image:: https://img.shields.io/pypi/v/sqlalchemy_aio.svg?style=flat-square 122 | :target: https://pypi.python.org/pypi/sqlalchemy_aio/ 123 | .. |Documentation| image:: https://img.shields.io/badge/docs-latest-brightgreen.svg?style=flat-square 124 | :target: https://sqlalchemy-aio.readthedocs.io/en/latest/ 125 | .. |Travis| image:: http://img.shields.io/travis/RazerM/sqlalchemy_aio/master.svg?style=flat-square&label=travis 126 | :target: https://travis-ci.org/RazerM/sqlalchemy_aio 127 | .. |Coverage| image:: https://img.shields.io/codecov/c/github/RazerM/sqlalchemy_aio/master.svg?style=flat-square 128 | :target: https://codecov.io/github/RazerM/sqlalchemy_aio?branch=master 129 | .. |MIT License| image:: http://img.shields.io/badge/license-MIT-blue.svg?style=flat-square 130 | :target: https://raw.githubusercontent.com/RazerM/sqlalchemy_aio/master/LICENSE 131 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | sqlalchemy_aio 2 | ============== 3 | 4 | What is this? 5 | ------------- 6 | 7 | It's *not* an ``asyncio`` implementation of SQLAlchemy or the drivers it uses. 8 | ``sqlalchemy_aio`` lets you use SQLAlchemy by running operations in a separate 9 | thread. 10 | 11 | If you're already using :meth:`run_in_executor` to execute SQLAlchemy tasks, 12 | ``sqlalchemy_aio`` will work well with similar performance. If performance is 13 | critical, perhaps `asyncpg`_ can help. 14 | 15 | .. _asyncpg: https://github.com/MagicStack/asyncpg 16 | 17 | Threading Model 18 | --------------- 19 | 20 | Explicit connections (:meth:`engine.connect()\ 21 | `) each run in their own thread. 22 | The engine uses a single worker thread, including implicit connections (e.g. 23 | :meth:`engine.execute() `). 24 | 25 | Getting started 26 | --------------- 27 | 28 | Asyncio 29 | ~~~~~~~ 30 | 31 | .. code-block:: python 32 | 33 | import asyncio 34 | 35 | from sqlalchemy_aio import ASYNCIO_STRATEGY 36 | 37 | from sqlalchemy import ( 38 | Column, Integer, MetaData, Table, Text, create_engine, select) 39 | from sqlalchemy.schema import CreateTable, DropTable 40 | 41 | 42 | async def main(): 43 | engine = create_engine( 44 | # In-memory sqlite database cannot be accessed from different 45 | # threads, use file. 46 | 'sqlite:///test.db', strategy=ASYNCIO_STRATEGY 47 | ) 48 | 49 | metadata = MetaData() 50 | users = Table( 51 | 'users', metadata, 52 | Column('id', Integer, primary_key=True), 53 | Column('name', Text), 54 | ) 55 | 56 | # Create the table 57 | await engine.execute(CreateTable(users)) 58 | 59 | conn = await engine.connect() 60 | 61 | # Insert some users 62 | await conn.execute(users.insert().values(name='Jeremy Goodwin')) 63 | await conn.execute(users.insert().values(name='Natalie Hurley')) 64 | await conn.execute(users.insert().values(name='Dan Rydell')) 65 | await conn.execute(users.insert().values(name='Casey McCall')) 66 | await conn.execute(users.insert().values(name='Dana Whitaker')) 67 | 68 | result = await conn.execute(users.select(users.c.name.startswith('D'))) 69 | d_users = await result.fetchall() 70 | 71 | await conn.close() 72 | 73 | # Print out the users 74 | for user in d_users: 75 | print('Username: %s' % user[users.c.name]) 76 | 77 | # Supports context async managers 78 | async with engine.connect() as conn: 79 | async with conn.begin() as trans: 80 | assert await conn.scalar(select([1])) == 1 81 | 82 | await engine.execute(DropTable(users)) 83 | 84 | 85 | if __name__ == '__main__': 86 | loop = asyncio.get_event_loop() 87 | loop.run_until_complete(main()) 88 | 89 | Trio 90 | ~~~~ 91 | 92 | .. code-block:: python 93 | 94 | import trio 95 | from sqlalchemy_aio import TRIO_STRATEGY 96 | 97 | from sqlalchemy import ( 98 | Column, Integer, MetaData, Table, Text, create_engine, select) 99 | from sqlalchemy.schema import CreateTable, DropTable 100 | 101 | 102 | async def main(): 103 | engine = create_engine( 104 | # In-memory sqlite database cannot be accessed from different 105 | # threads, use file. 106 | 'sqlite:///test.db', strategy=TRIO_STRATEGY 107 | ) 108 | 109 | metadata = MetaData() 110 | users = Table( 111 | 'users', metadata, 112 | Column('id', Integer, primary_key=True), 113 | Column('name', Text), 114 | ) 115 | 116 | # Create the table 117 | await engine.execute(CreateTable(users)) 118 | 119 | conn = await engine.connect() 120 | 121 | # Insert some users 122 | await conn.execute(users.insert().values(name='Jeremy Goodwin')) 123 | await conn.execute(users.insert().values(name='Natalie Hurley')) 124 | await conn.execute(users.insert().values(name='Dan Rydell')) 125 | await conn.execute(users.insert().values(name='Casey McCall')) 126 | await conn.execute(users.insert().values(name='Dana Whitaker')) 127 | 128 | result = await conn.execute(users.select(users.c.name.startswith('D'))) 129 | d_users = await result.fetchall() 130 | 131 | await conn.close() 132 | 133 | # Print out the users 134 | for user in d_users: 135 | print('Username: %s' % user[users.c.name]) 136 | 137 | # Supports context async managers 138 | async with engine.connect() as conn: 139 | async with conn.begin() as trans: 140 | assert await conn.scalar(select([1])) == 1 141 | 142 | await engine.execute(DropTable(users)) 143 | 144 | 145 | if __name__ == '__main__': 146 | trio.run(main) 147 | 148 | 149 | Contents 150 | ======== 151 | 152 | .. toctree:: 153 | :maxdepth: 2 154 | 155 | ddl 156 | api-reference 157 | limitations 158 | contributing 159 | 160 | 161 | 162 | Indices and tables 163 | ================== 164 | 165 | * :ref:`genindex` 166 | * :ref:`modindex` 167 | * :ref:`search` 168 | 169 | -------------------------------------------------------------------------------- /tests/trio/test_connection.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | from contextlib import suppress 3 | 4 | import pytest 5 | from sqlalchemy import MetaData, Table, event, select 6 | from sqlalchemy.exc import NoSuchTableError, StatementError 7 | from sqlalchemy.schema import CreateTable 8 | 9 | from sqlalchemy_aio.base import AsyncTransaction 10 | from sqlalchemy_aio.exc import BlockingWarning 11 | 12 | 13 | @pytest.mark.trio 14 | async def test_execute(trio_engine): 15 | conn = await trio_engine.connect() 16 | result = await conn.execute(select([1])) 17 | assert await result.scalar() == 1 18 | await conn.close() 19 | 20 | 21 | @pytest.mark.trio 22 | async def test_scalar(trio_engine): 23 | async with trio_engine.connect() as conn: 24 | assert await conn.scalar(select([1])) == 1 25 | 26 | 27 | @pytest.mark.trio 28 | async def test_close(trio_engine): 29 | conn = await trio_engine.connect() 30 | assert not conn.closed 31 | 32 | result = await conn.execute(select([1])) 33 | assert await result.scalar() == 1 34 | 35 | await conn.close() 36 | assert conn.closed 37 | 38 | with pytest.raises(StatementError, match='This Connection is closed'): 39 | await conn.close() 40 | 41 | with pytest.raises(StatementError, match='This Connection is closed'): 42 | await conn.execute(select([1])) 43 | 44 | with pytest.raises(StatementError, match='This Connection is closed'): 45 | await conn.begin() 46 | 47 | with pytest.raises(StatementError, match='This Connection is closed'): 48 | await conn.begin_nested() 49 | 50 | 51 | @pytest.mark.trio 52 | async def test_in_transaction(trio_engine): 53 | conn = await trio_engine.connect() 54 | assert not conn.in_transaction() 55 | 56 | trans = await conn.begin() 57 | assert isinstance(trans, AsyncTransaction) 58 | assert conn.in_transaction() 59 | 60 | await trans.close() 61 | assert not conn.in_transaction() 62 | 63 | await conn.close() 64 | 65 | 66 | @pytest.mark.trio 67 | async def test_transaction_commit(trio_engine, mytable): 68 | async with trio_engine.connect() as conn: 69 | trans = await conn.begin() 70 | await conn.execute(CreateTable(mytable)) 71 | await conn.execute(mytable.insert()) 72 | 73 | result = await conn.execute(mytable.select()) 74 | rows = await result.fetchall() 75 | assert len(rows) == 1 76 | 77 | await trans.commit() 78 | 79 | result = await conn.execute(mytable.select()) 80 | rows = await result.fetchall() 81 | assert len(rows) == 1 82 | 83 | 84 | @pytest.mark.trio 85 | async def test_transaction_rollback(trio_engine, mytable): 86 | async with trio_engine.connect() as conn: 87 | await conn.execute(CreateTable(mytable)) 88 | 89 | trans = await conn.begin() 90 | await conn.execute(mytable.insert()) 91 | 92 | result = await conn.execute(mytable.select()) 93 | rows = await result.fetchall() 94 | assert len(rows) == 1 95 | 96 | await trans.rollback() 97 | 98 | result = await conn.execute(mytable.select()) 99 | rows = await result.fetchall() 100 | assert len(rows) == 0 101 | 102 | 103 | @pytest.mark.trio 104 | async def test_transaction_context_manager_success(trio_engine, mytable): 105 | async with trio_engine.connect() as conn: 106 | await conn.execute(CreateTable(mytable)) 107 | 108 | async with conn.begin() as trans: 109 | await conn.execute(mytable.insert()) 110 | 111 | result = await conn.execute(mytable.select()) 112 | rows = await result.fetchall() 113 | assert len(rows) == 1 114 | 115 | result = await conn.execute(mytable.select()) 116 | rows = await result.fetchall() 117 | assert len(rows) == 1 118 | 119 | 120 | @pytest.mark.trio 121 | async def test_transaction_context_manager_failure(trio_engine, mytable): 122 | async with trio_engine.connect() as conn: 123 | await conn.execute(CreateTable(mytable)) 124 | 125 | with pytest.raises(RuntimeError): 126 | async with conn.begin() as trans: 127 | await conn.execute(mytable.insert()) 128 | 129 | result = await conn.execute(mytable.select()) 130 | rows = await result.fetchall() 131 | assert len(rows) == 1 132 | 133 | raise RuntimeError 134 | 135 | result = await conn.execute(mytable.select()) 136 | rows = await result.fetchall() 137 | assert len(rows) == 0 138 | 139 | 140 | @pytest.mark.trio 141 | async def test_begin_nested(trio_engine, mytable): 142 | async with trio_engine.connect() as conn: 143 | await conn.execute(CreateTable(mytable)) 144 | 145 | async with conn.begin() as trans1: 146 | await conn.execute(mytable.insert()) 147 | 148 | async with conn.begin_nested() as trans2: 149 | assert isinstance(trans2, AsyncTransaction) 150 | await conn.execute(mytable.insert()) 151 | await trans2.rollback() 152 | 153 | await trans1.commit() 154 | 155 | result = await conn.execute(mytable.select()) 156 | rows = await result.fetchall() 157 | assert len(rows) == 1 158 | 159 | 160 | @pytest.mark.trio 161 | async def test_run_callable_warning(trio_engine): 162 | meta = MetaData() 163 | thread_called = False 164 | 165 | # we must use sqlite connections in the same thread they were created in, 166 | # hence the indirection here. 167 | 168 | def thread_fn(conn): 169 | nonlocal thread_called 170 | 171 | with pytest.warns(BlockingWarning, match='sync_connection') as record: 172 | with suppress(NoSuchTableError): 173 | Table('sometable', meta, autoload_with=conn) 174 | 175 | assert len(record) == 1 176 | 177 | with warnings.catch_warnings(): 178 | warnings.simplefilter('error') 179 | with suppress(NoSuchTableError): 180 | Table('sometable', meta, autoload_with=conn.sync_connection) 181 | 182 | thread_called = True 183 | 184 | async with trio_engine.connect() as conn: 185 | await conn.run_in_thread(thread_fn, conn) 186 | assert thread_called 187 | 188 | 189 | @pytest.mark.trio 190 | async def test_run_visitor_exception(trio_engine, mytable): 191 | thread_called = False 192 | 193 | def thread_fn(conn): 194 | nonlocal thread_called 195 | 196 | with pytest.raises(AttributeError, match='Did you try to use'): 197 | mytable.create(conn) 198 | 199 | mytable.create(conn.sync_connection) 200 | 201 | thread_called = True 202 | 203 | async with trio_engine.connect() as conn: 204 | await conn.run_in_thread(thread_fn, conn) 205 | assert thread_called 206 | 207 | 208 | @pytest.mark.trio 209 | async def test_sync_cm_exception(trio_engine): 210 | thread_called = False 211 | 212 | def thread_fn(conn): 213 | nonlocal thread_called 214 | 215 | meta = MetaData() 216 | with warnings.catch_warnings(): 217 | # ignore warning caused by creating a runtime that is never awaited 218 | warnings.simplefilter('ignore', RuntimeWarning) 219 | with pytest.raises(TypeError, match='Use async with'): 220 | meta.reflect(conn) 221 | 222 | meta.reflect(conn.sync_connection) 223 | 224 | thread_called = True 225 | 226 | async with trio_engine.connect() as conn: 227 | await conn.run_in_thread(thread_fn, conn) 228 | assert thread_called 229 | 230 | 231 | @pytest.mark.trio 232 | async def test_event_listen_exception(trio_engine): 233 | async with trio_engine.connect() as conn: 234 | with pytest.raises(AttributeError, match='Did you try to use'): 235 | event.listen(conn, 'connect', None) 236 | 237 | 238 | @pytest.mark.trio 239 | async def test_connection_connect(trio_engine): 240 | async with trio_engine.connect() as conn1: 241 | assert await conn1.scalar(select([1])) == 1 242 | async with conn1.connect() as conn2: 243 | assert await conn2.scalar(select([1])) == 1 244 | 245 | assert not conn1.closed 246 | assert conn2.closed 247 | assert not conn1._worker._has_quit 248 | assert conn2._worker._has_quit 249 | 250 | assert conn1.closed 251 | assert conn1._worker._has_quit 252 | 253 | 254 | @pytest.mark.trio 255 | async def test_attribute_error(trio_engine): 256 | async with trio_engine.connect() as conn: 257 | with pytest.raises(AttributeError): 258 | conn.spam 259 | -------------------------------------------------------------------------------- /tests/asyncio/test_connection.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | from contextlib import suppress 3 | 4 | import pytest 5 | from sqlalchemy import MetaData, Table, event, select 6 | from sqlalchemy.exc import NoSuchTableError, StatementError 7 | from sqlalchemy.schema import CreateTable 8 | 9 | from sqlalchemy_aio.base import AsyncTransaction 10 | from sqlalchemy_aio.exc import BlockingWarning 11 | 12 | pytestmark = pytest.mark.noextras 13 | 14 | 15 | @pytest.mark.asyncio 16 | async def test_execute(asyncio_engine): 17 | conn = await asyncio_engine.connect() 18 | result = await conn.execute(select([1])) 19 | assert await result.scalar() == 1 20 | await conn.close() 21 | 22 | 23 | @pytest.mark.asyncio 24 | async def test_scalar(asyncio_engine): 25 | async with asyncio_engine.connect() as conn: 26 | assert await conn.scalar(select([1])) == 1 27 | 28 | 29 | @pytest.mark.asyncio 30 | async def test_close(asyncio_engine): 31 | conn = await asyncio_engine.connect() 32 | assert not conn.closed 33 | 34 | result = await conn.execute(select([1])) 35 | assert await result.scalar() == 1 36 | 37 | await conn.close() 38 | assert conn.closed 39 | 40 | with pytest.raises(StatementError, match='This Connection is closed'): 41 | await conn.close() 42 | 43 | with pytest.raises(StatementError, match='This Connection is closed'): 44 | await conn.execute(select([1])) 45 | 46 | with pytest.raises(StatementError, match='This Connection is closed'): 47 | await conn.begin() 48 | 49 | with pytest.raises(StatementError, match='This Connection is closed'): 50 | await conn.begin_nested() 51 | 52 | 53 | @pytest.mark.asyncio 54 | async def test_in_transaction(asyncio_engine): 55 | conn = await asyncio_engine.connect() 56 | assert not conn.in_transaction() 57 | 58 | trans = await conn.begin() 59 | assert isinstance(trans, AsyncTransaction) 60 | assert conn.in_transaction() 61 | 62 | await trans.close() 63 | assert not conn.in_transaction() 64 | 65 | await conn.close() 66 | 67 | 68 | @pytest.mark.asyncio 69 | async def test_transaction_commit(asyncio_engine, mytable): 70 | async with asyncio_engine.connect() as conn: 71 | trans = await conn.begin() 72 | await conn.execute(CreateTable(mytable)) 73 | await conn.execute(mytable.insert()) 74 | 75 | result = await conn.execute(mytable.select()) 76 | rows = await result.fetchall() 77 | assert len(rows) == 1 78 | 79 | await trans.commit() 80 | 81 | result = await conn.execute(mytable.select()) 82 | rows = await result.fetchall() 83 | assert len(rows) == 1 84 | 85 | 86 | @pytest.mark.asyncio 87 | async def test_transaction_rollback(asyncio_engine, mytable): 88 | async with asyncio_engine.connect() as conn: 89 | await conn.execute(CreateTable(mytable)) 90 | 91 | trans = await conn.begin() 92 | await conn.execute(mytable.insert()) 93 | 94 | result = await conn.execute(mytable.select()) 95 | rows = await result.fetchall() 96 | assert len(rows) == 1 97 | 98 | await trans.rollback() 99 | 100 | result = await conn.execute(mytable.select()) 101 | rows = await result.fetchall() 102 | assert len(rows) == 0 103 | 104 | 105 | @pytest.mark.asyncio 106 | async def test_transaction_context_manager_success(asyncio_engine, mytable): 107 | async with asyncio_engine.connect() as conn: 108 | await conn.execute(CreateTable(mytable)) 109 | 110 | async with conn.begin() as trans: 111 | await conn.execute(mytable.insert()) 112 | 113 | result = await conn.execute(mytable.select()) 114 | rows = await result.fetchall() 115 | assert len(rows) == 1 116 | 117 | result = await conn.execute(mytable.select()) 118 | rows = await result.fetchall() 119 | assert len(rows) == 1 120 | 121 | 122 | @pytest.mark.asyncio 123 | async def test_transaction_context_manager_failure(asyncio_engine, mytable): 124 | async with asyncio_engine.connect() as conn: 125 | await conn.execute(CreateTable(mytable)) 126 | 127 | with pytest.raises(RuntimeError): 128 | async with conn.begin() as trans: 129 | await conn.execute(mytable.insert()) 130 | 131 | result = await conn.execute(mytable.select()) 132 | rows = await result.fetchall() 133 | assert len(rows) == 1 134 | 135 | raise RuntimeError 136 | 137 | result = await conn.execute(mytable.select()) 138 | rows = await result.fetchall() 139 | assert len(rows) == 0 140 | 141 | 142 | @pytest.mark.asyncio 143 | async def test_begin_nested(asyncio_engine, mytable): 144 | async with asyncio_engine.connect() as conn: 145 | await conn.execute(CreateTable(mytable)) 146 | 147 | async with conn.begin() as trans1: 148 | await conn.execute(mytable.insert()) 149 | 150 | async with conn.begin_nested() as trans2: 151 | assert isinstance(trans2, AsyncTransaction) 152 | await conn.execute(mytable.insert()) 153 | await trans2.rollback() 154 | 155 | await trans1.commit() 156 | 157 | result = await conn.execute(mytable.select()) 158 | rows = await result.fetchall() 159 | assert len(rows) == 1 160 | 161 | 162 | @pytest.mark.asyncio 163 | async def test_run_callable_warning(asyncio_engine): 164 | meta = MetaData() 165 | thread_called = False 166 | 167 | # we must use sqlite connections in the same thread they were created in, 168 | # hence the indirection here. 169 | 170 | def thread_fn(conn): 171 | nonlocal thread_called 172 | 173 | with pytest.warns(BlockingWarning, match='sync_connection') as record: 174 | with suppress(NoSuchTableError): 175 | Table('sometable', meta, autoload_with=conn) 176 | 177 | assert len(record) == 1 178 | 179 | with warnings.catch_warnings(): 180 | warnings.simplefilter('error') 181 | with suppress(NoSuchTableError): 182 | Table('sometable', meta, autoload_with=conn.sync_connection) 183 | 184 | thread_called = True 185 | 186 | async with asyncio_engine.connect() as conn: 187 | await conn.run_in_thread(thread_fn, conn) 188 | assert thread_called 189 | 190 | 191 | @pytest.mark.asyncio 192 | async def test_run_visitor_exception(asyncio_engine, mytable): 193 | thread_called = False 194 | 195 | def thread_fn(conn): 196 | nonlocal thread_called 197 | 198 | with pytest.raises(AttributeError, match='Did you try to use'): 199 | mytable.create(conn) 200 | 201 | mytable.create(conn.sync_connection) 202 | 203 | thread_called = True 204 | 205 | async with asyncio_engine.connect() as conn: 206 | await conn.run_in_thread(thread_fn, conn) 207 | assert thread_called 208 | 209 | 210 | @pytest.mark.asyncio 211 | async def test_sync_cm_exception(asyncio_engine): 212 | thread_called = False 213 | 214 | def thread_fn(conn): 215 | nonlocal thread_called 216 | 217 | meta = MetaData() 218 | with warnings.catch_warnings(): 219 | # ignore warning caused by creating a runtime that is never awaited 220 | warnings.simplefilter('ignore', RuntimeWarning) 221 | with pytest.raises(TypeError, match='Use async with'): 222 | meta.reflect(conn) 223 | 224 | meta.reflect(conn.sync_connection) 225 | 226 | thread_called = True 227 | 228 | async with asyncio_engine.connect() as conn: 229 | await conn.run_in_thread(thread_fn, conn) 230 | assert thread_called 231 | 232 | 233 | @pytest.mark.asyncio 234 | async def test_event_listen_exception(asyncio_engine): 235 | async with asyncio_engine.connect() as conn: 236 | with pytest.raises(AttributeError, match='Did you try to use'): 237 | event.listen(conn, 'connect', None) 238 | 239 | 240 | @pytest.mark.asyncio 241 | async def test_connection_connect(asyncio_engine): 242 | async with asyncio_engine.connect() as conn1: 243 | assert await conn1.scalar(select([1])) == 1 244 | async with conn1.connect() as conn2: 245 | assert await conn2.scalar(select([1])) == 1 246 | 247 | assert not conn1.closed 248 | assert conn2.closed 249 | assert not conn1._worker._has_quit 250 | assert conn2._worker._has_quit 251 | 252 | assert conn1.closed 253 | assert conn1._worker._has_quit 254 | 255 | 256 | @pytest.mark.asyncio 257 | async def test_attribute_error(asyncio_engine): 258 | async with asyncio_engine.connect() as conn: 259 | with pytest.raises(AttributeError): 260 | conn.spam 261 | -------------------------------------------------------------------------------- /tests/trio/test_engine.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | from contextlib import suppress 3 | from functools import partial 4 | from unittest.mock import Mock, patch 5 | 6 | import pytest 7 | from sqlalchemy import MetaData, Table, create_engine, event, select 8 | from sqlalchemy.exc import NoSuchTableError 9 | from sqlalchemy.schema import CreateTable 10 | 11 | from sqlalchemy_aio import TRIO_STRATEGY 12 | from sqlalchemy_aio.base import AsyncConnection, AsyncTransaction 13 | from sqlalchemy_aio.exc import BlockingWarning 14 | 15 | 16 | def test_create_engine(): 17 | from sqlalchemy_aio.trio import TrioEngine 18 | engine = create_engine('sqlite://', strategy=TRIO_STRATEGY) 19 | assert isinstance(engine, TrioEngine) 20 | 21 | 22 | @pytest.mark.trio 23 | async def test_implicit_loop(): 24 | engine = create_engine('sqlite://', strategy=TRIO_STRATEGY) 25 | assert await engine.scalar(select([1])) == 1 26 | 27 | 28 | @pytest.mark.trio 29 | async def test_run_in_thread(trio_engine): 30 | def fn(*args, **kwargs): 31 | return args, kwargs 32 | 33 | assert await trio_engine._run_in_thread(fn) == ((), {}) 34 | assert await trio_engine._run_in_thread(fn, 1, 2, a=3) == ((1, 2), {'a': 3}) 35 | assert await trio_engine._run_in_thread(fn, 1) == ((1,), {}) 36 | 37 | # Test that self is passed to the function rather than consumed by the 38 | # method. 39 | assert await trio_engine._run_in_thread(fn, self=1) == ((), {'self': 1}) 40 | 41 | 42 | @pytest.mark.trio 43 | async def test_connect(trio_engine): 44 | conn = await trio_engine.connect() 45 | assert isinstance(conn, AsyncConnection) 46 | await conn.close() 47 | 48 | 49 | @pytest.mark.trio 50 | async def test_connect_context_manager(trio_engine): 51 | async with trio_engine.connect() as conn: 52 | assert isinstance(conn, AsyncConnection) 53 | assert conn.closed 54 | 55 | 56 | @pytest.mark.trio 57 | async def test_implicit_transaction_success(trio_engine, mytable): 58 | if ':memory:' in str(trio_engine.sync_engine.url): 59 | pytest.skip(":memory: connections don't persist across threads") 60 | 61 | async with trio_engine.begin() as conn: 62 | assert isinstance(conn, AsyncConnection) 63 | 64 | await conn.execute(CreateTable(mytable)) 65 | await conn.execute(mytable.insert()) 66 | result = await conn.execute(mytable.select()) 67 | rows = await result.fetchall() 68 | assert len(rows) == 1 69 | 70 | # Transaction should have been committed automatically 71 | result = await trio_engine.execute(mytable.select()) 72 | rows = await result.fetchall() 73 | assert len(rows) == 1 74 | 75 | 76 | @pytest.mark.trio 77 | async def test_implicit_transaction_failure(trio_engine, mytable): 78 | if ':memory:' in str(trio_engine.sync_engine.url): 79 | pytest.skip(":memory: connections don't persist across threads") 80 | 81 | await trio_engine.execute(CreateTable(mytable)) 82 | 83 | with pytest.raises(RuntimeError): 84 | async with trio_engine.begin() as conn: 85 | assert isinstance(conn, AsyncConnection) 86 | 87 | await conn.execute(mytable.insert()) 88 | result = await conn.execute(mytable.select()) 89 | rows = await result.fetchall() 90 | assert len(rows) == 1 91 | 92 | raise RuntimeError 93 | 94 | # Transaction should have been rolled back automatically 95 | result = await trio_engine.execute(mytable.select()) 96 | rows = await result.fetchall() 97 | assert len(rows) == 0 98 | 99 | 100 | @pytest.mark.trio 101 | async def test_implicit_transaction_commit_failure(trio_engine, mytable): 102 | # Patch commit to raise an exception. We can then check that a) the 103 | # transaction is rolled back, and b) that the exception is reraised. 104 | patch_commit = patch.object( 105 | AsyncTransaction, 'commit', side_effect=RuntimeError) 106 | 107 | # Patch a coroutine in place of AsyncioTransaction.rollback that calls 108 | # a Mock which we can later check. 109 | mock_rollback = Mock() 110 | 111 | async def mock_coro(*args, **kwargs): 112 | mock_rollback(*args, **kwargs) 113 | 114 | patch_rollback = patch.object(AsyncTransaction, 'rollback', mock_coro) 115 | 116 | with pytest.raises(RuntimeError): 117 | with patch_commit, patch_rollback: 118 | 119 | async with trio_engine.connect() as conn: 120 | await conn.execute(CreateTable(mytable)) 121 | 122 | async with conn.begin() as trans: 123 | await conn.execute(mytable.insert()) 124 | 125 | assert mock_rollback.call_count == 1 126 | 127 | 128 | @pytest.mark.trio 129 | async def test_execute(trio_engine): 130 | result = await trio_engine.execute(select([1])) 131 | assert await result.scalar() == 1 132 | 133 | 134 | @pytest.mark.trio 135 | async def test_scalar(trio_engine): 136 | assert await trio_engine.scalar(select([1])) == 1 137 | 138 | 139 | @pytest.mark.trio 140 | async def test_has_table(trio_engine, mytable): 141 | assert not await trio_engine.has_table('mytable') 142 | await trio_engine.execute(CreateTable(mytable)) 143 | assert await trio_engine.has_table('mytable') 144 | 145 | 146 | @pytest.mark.trio 147 | async def test_table_names(trio_engine, mytable): 148 | assert await trio_engine.table_names() == [] 149 | await trio_engine.execute(CreateTable(mytable)) 150 | assert await trio_engine.table_names() == ['mytable'] 151 | 152 | 153 | @pytest.mark.trio 154 | async def test_table_names_with_connection(trio_engine, mytable): 155 | conn = await trio_engine.connect() 156 | 157 | # spy on connection to make sure .execute is called 158 | patch_conn = patch.object(conn, '_connection', wraps=conn._connection) 159 | 160 | with patch_conn as mock_conn: 161 | assert await trio_engine.table_names(connection=conn) == [] 162 | await conn.execute(CreateTable(mytable)) 163 | assert await trio_engine.table_names(connection=conn) == ['mytable'] 164 | assert mock_conn.execute.called 165 | 166 | await conn.close() 167 | 168 | 169 | def test_repr(): 170 | trio_engine = create_engine('sqlite://', strategy=TRIO_STRATEGY) 171 | assert repr(trio_engine) == 'TrioEngine' 172 | 173 | 174 | def test_engine_keywords(): 175 | # SQLAlchemy checks which keywords AsyncioEngine expects, so check that 176 | # echo, logging_name, and execution_options are accepted and then passed on 177 | # by AsyncioEngine. 178 | 179 | with patch('sqlalchemy_aio.base.Engine') as mock_engine: 180 | create_engine('sqlite://', strategy=TRIO_STRATEGY, echo=True, 181 | logging_name='myengine', execution_options=dict()) 182 | 183 | kwargs = mock_engine.call_args[1] 184 | assert {'echo', 'logging_name', 'execution_options'} <= set(kwargs) 185 | 186 | 187 | def test_logger(trio_engine): 188 | assert trio_engine.logger 189 | 190 | 191 | @pytest.mark.xfail(reason='capsys not working with Trio test') 192 | @pytest.mark.trio 193 | async def test_echo(capsys): 194 | trio_engine = create_engine( 195 | 'sqlite://', strategy=TRIO_STRATEGY, echo=True) 196 | await trio_engine.scalar(select([98465])) 197 | captured = capsys.readouterr() 198 | assert '98465' in captured.out 199 | 200 | 201 | @pytest.mark.trio 202 | async def test_run_callable_warning(trio_engine): 203 | meta = MetaData() 204 | with pytest.warns(BlockingWarning, match='sync_engine') as record: 205 | with suppress(NoSuchTableError): 206 | Table('sometable', meta, autoload_with=trio_engine) 207 | 208 | assert len(record) == 1 209 | 210 | with warnings.catch_warnings(): 211 | warnings.simplefilter('error') 212 | with suppress(NoSuchTableError): 213 | Table('sometable', meta, autoload_with=trio_engine.sync_engine) 214 | 215 | 216 | @pytest.mark.trio 217 | async def test_run_visitor_exception(trio_engine, mytable): 218 | with pytest.raises(AttributeError, match='Did you try to use'): 219 | mytable.create(trio_engine) 220 | 221 | mytable.create(trio_engine.sync_engine) 222 | 223 | 224 | @pytest.mark.trio 225 | async def test_sync_cm_exception(trio_engine): 226 | meta = MetaData() 227 | with warnings.catch_warnings(): 228 | # ignore warning caused by creating a runtime that is never awaited 229 | warnings.simplefilter('ignore', RuntimeWarning) 230 | with pytest.raises(TypeError, match='Use async with'): 231 | meta.reflect(trio_engine) 232 | 233 | meta.reflect(trio_engine.sync_engine) 234 | 235 | 236 | @pytest.mark.trio 237 | async def test_event_listen_exception(trio_engine): 238 | with pytest.raises(AttributeError, match='Did you try to use'): 239 | event.listen(trio_engine, 'connect', None) 240 | 241 | 242 | @pytest.mark.trio 243 | async def test_public_run_in_thread(trio_engine): 244 | def fn(*args, **kwargs): 245 | return args, kwargs 246 | 247 | pfn = partial(fn, 1, 2, a=3) 248 | 249 | assert await trio_engine.run_in_thread(pfn) == ((1, 2), {'a': 3}) 250 | assert await trio_engine.run_in_thread(fn, 1) == ((1,), {}) 251 | 252 | # doesn't accept kwargs 253 | with pytest.raises(TypeError): 254 | await trio_engine.run_in_thread(fn, a=1) 255 | 256 | 257 | def test_attribute_error(trio_engine): 258 | with pytest.raises(AttributeError): 259 | trio_engine.spam 260 | -------------------------------------------------------------------------------- /tests/asyncio/test_engine.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | from contextlib import suppress 3 | from functools import partial 4 | from unittest.mock import Mock, patch 5 | 6 | import pytest 7 | from sqlalchemy import MetaData, Table, create_engine, event, select 8 | from sqlalchemy.exc import NoSuchTableError 9 | from sqlalchemy.schema import CreateTable 10 | 11 | from sqlalchemy_aio import ASYNCIO_STRATEGY 12 | from sqlalchemy_aio.asyncio import AsyncioEngine 13 | from sqlalchemy_aio.base import AsyncConnection, AsyncTransaction 14 | from sqlalchemy_aio.exc import BlockingWarning 15 | 16 | pytestmark = pytest.mark.noextras 17 | 18 | 19 | def test_create_engine(): 20 | engine = create_engine('sqlite://', strategy=ASYNCIO_STRATEGY) 21 | assert isinstance(engine, AsyncioEngine) 22 | 23 | 24 | @pytest.mark.asyncio 25 | async def test_implicit_loop(): 26 | engine = create_engine('sqlite://', strategy=ASYNCIO_STRATEGY) 27 | assert await engine.scalar(select([1])) == 1 28 | 29 | 30 | @pytest.mark.asyncio 31 | async def test_run_in_thread(asyncio_engine): 32 | def fn(*args, **kwargs): 33 | return args, kwargs 34 | 35 | assert await asyncio_engine._run_in_thread(fn) == ((), {}) 36 | assert await asyncio_engine._run_in_thread(fn, 1, 2, a=3) == ((1, 2), {'a': 3}) 37 | assert await asyncio_engine._run_in_thread(fn, 1) == ((1,), {}) 38 | 39 | # Test that self is passed to the function rather than consumed by the 40 | # method. 41 | assert await asyncio_engine._run_in_thread(fn, self=1) == ((), {'self': 1}) 42 | 43 | 44 | @pytest.mark.asyncio 45 | async def test_connect(asyncio_engine): 46 | conn = await asyncio_engine.connect() 47 | assert isinstance(conn, AsyncConnection) 48 | await conn.close() 49 | 50 | 51 | @pytest.mark.asyncio 52 | async def test_connect_context_manager(asyncio_engine): 53 | async with asyncio_engine.connect() as conn: 54 | assert isinstance(conn, AsyncConnection) 55 | assert conn.closed 56 | 57 | 58 | @pytest.mark.asyncio 59 | async def test_implicit_transaction_success(asyncio_engine, mytable): 60 | if ':memory:' in str(asyncio_engine.sync_engine.url): 61 | pytest.skip(":memory: connections don't persist across threads") 62 | 63 | async with asyncio_engine.begin() as conn: 64 | assert isinstance(conn, AsyncConnection) 65 | 66 | await conn.execute(CreateTable(mytable)) 67 | await conn.execute(mytable.insert()) 68 | result = await conn.execute(mytable.select()) 69 | rows = await result.fetchall() 70 | assert len(rows) == 1 71 | 72 | # Transaction should have been committed automatically 73 | result = await asyncio_engine.execute(mytable.select()) 74 | rows = await result.fetchall() 75 | assert len(rows) == 1 76 | 77 | 78 | @pytest.mark.asyncio 79 | async def test_implicit_transaction_failure(asyncio_engine, mytable): 80 | if ':memory:' in str(asyncio_engine.sync_engine.url): 81 | pytest.skip(":memory: connections don't persist across threads") 82 | 83 | await asyncio_engine.execute(CreateTable(mytable)) 84 | 85 | with pytest.raises(RuntimeError): 86 | async with asyncio_engine.begin() as conn: 87 | assert isinstance(conn, AsyncConnection) 88 | 89 | await conn.execute(mytable.insert()) 90 | result = await conn.execute(mytable.select()) 91 | rows = await result.fetchall() 92 | assert len(rows) == 1 93 | 94 | raise RuntimeError 95 | 96 | # Transaction should have been rolled back automatically 97 | result = await asyncio_engine.execute(mytable.select()) 98 | rows = await result.fetchall() 99 | assert len(rows) == 0 100 | 101 | 102 | @pytest.mark.asyncio 103 | async def test_implicit_transaction_commit_failure(asyncio_engine, mytable): 104 | # Patch commit to raise an exception. We can then check that a) the 105 | # transaction is rolled back, and b) that the exception is reraised. 106 | patch_commit = patch.object( 107 | AsyncTransaction, 'commit', side_effect=RuntimeError) 108 | 109 | # Patch a coroutine in place of AsyncioTransaction.rollback that calls 110 | # a Mock which we can later check. 111 | mock_rollback = Mock() 112 | 113 | async def mock_coro(*args, **kwargs): 114 | mock_rollback(*args, **kwargs) 115 | 116 | patch_rollback = patch.object(AsyncTransaction, 'rollback', mock_coro) 117 | 118 | with pytest.raises(RuntimeError): 119 | with patch_commit, patch_rollback: 120 | 121 | async with asyncio_engine.connect() as conn: 122 | await conn.execute(CreateTable(mytable)) 123 | 124 | async with conn.begin() as trans: 125 | await conn.execute(mytable.insert()) 126 | 127 | assert mock_rollback.call_count == 1 128 | 129 | 130 | @pytest.mark.asyncio 131 | async def test_execute(asyncio_engine): 132 | result = await asyncio_engine.execute(select([1])) 133 | assert await result.scalar() == 1 134 | 135 | 136 | @pytest.mark.asyncio 137 | async def test_scalar(asyncio_engine): 138 | assert await asyncio_engine.scalar(select([1])) == 1 139 | 140 | 141 | @pytest.mark.asyncio 142 | async def test_has_table(asyncio_engine, mytable): 143 | assert not await asyncio_engine.has_table('mytable') 144 | await asyncio_engine.execute(CreateTable(mytable)) 145 | assert await asyncio_engine.has_table('mytable') 146 | 147 | 148 | @pytest.mark.asyncio 149 | async def test_table_names(asyncio_engine, mytable): 150 | assert await asyncio_engine.table_names() == [] 151 | await asyncio_engine.execute(CreateTable(mytable)) 152 | assert await asyncio_engine.table_names() == ['mytable'] 153 | 154 | 155 | @pytest.mark.asyncio 156 | async def test_table_names_with_connection(asyncio_engine, mytable): 157 | conn = await asyncio_engine.connect() 158 | 159 | # spy on connection to make sure .execute is called 160 | patch_conn = patch.object(conn, '_connection', wraps=conn._connection) 161 | 162 | with patch_conn as mock_conn: 163 | assert await asyncio_engine.table_names(connection=conn) == [] 164 | await conn.execute(CreateTable(mytable)) 165 | assert await asyncio_engine.table_names(connection=conn) == ['mytable'] 166 | assert mock_conn.execute.called 167 | 168 | await conn.close() 169 | 170 | 171 | def test_repr(): 172 | asyncio_engine = create_engine('sqlite://', strategy=ASYNCIO_STRATEGY) 173 | assert repr(asyncio_engine) == 'AsyncioEngine' 174 | 175 | 176 | def test_engine_keywords(): 177 | # SQLAlchemy checks which keywords AsyncioEngine expects, so check that 178 | # echo, logging_name, and execution_options are accepted and then passed on 179 | # by AsyncioEngine. 180 | 181 | with patch('sqlalchemy_aio.base.Engine') as mock_engine: 182 | create_engine('sqlite://', strategy=ASYNCIO_STRATEGY, echo=True, 183 | logging_name='myengine', execution_options=dict()) 184 | 185 | kwargs = mock_engine.call_args[1] 186 | assert {'echo', 'logging_name', 'execution_options'} <= set(kwargs) 187 | 188 | 189 | def test_logger(asyncio_engine): 190 | assert asyncio_engine.logger 191 | 192 | 193 | @pytest.mark.asyncio 194 | async def test_echo(capsys): 195 | asyncio_engine = create_engine( 196 | 'sqlite://', strategy=ASYNCIO_STRATEGY, echo=True) 197 | await asyncio_engine.scalar(select([98465])) 198 | captured = capsys.readouterr() 199 | assert '98465' in captured.out 200 | 201 | 202 | @pytest.mark.asyncio 203 | async def test_run_callable_warning(asyncio_engine): 204 | meta = MetaData() 205 | with pytest.warns(BlockingWarning, match='sync_engine') as record: 206 | with suppress(NoSuchTableError): 207 | Table('sometable', meta, autoload_with=asyncio_engine) 208 | 209 | assert len(record) == 1 210 | 211 | with warnings.catch_warnings(): 212 | warnings.simplefilter('error') 213 | with suppress(NoSuchTableError): 214 | Table('sometable', meta, autoload_with=asyncio_engine.sync_engine) 215 | 216 | 217 | @pytest.mark.asyncio 218 | async def test_run_visitor_exception(asyncio_engine, mytable): 219 | with pytest.raises(AttributeError, match='Did you try to use'): 220 | mytable.create(asyncio_engine) 221 | 222 | mytable.create(asyncio_engine.sync_engine) 223 | 224 | 225 | @pytest.mark.asyncio 226 | async def test_sync_cm_exception(asyncio_engine): 227 | meta = MetaData() 228 | with warnings.catch_warnings(): 229 | # ignore warning caused by creating a runtime that is never awaited 230 | warnings.simplefilter('ignore', RuntimeWarning) 231 | with pytest.raises(TypeError, match='Use async with'): 232 | meta.reflect(asyncio_engine) 233 | 234 | meta.reflect(asyncio_engine.sync_engine) 235 | 236 | 237 | @pytest.mark.asyncio 238 | async def test_event_listen_exception(asyncio_engine): 239 | with pytest.raises(AttributeError, match='Did you try to use'): 240 | event.listen(asyncio_engine, 'connect', None) 241 | 242 | 243 | @pytest.mark.asyncio 244 | async def test_public_run_in_thread(asyncio_engine): 245 | def fn(*args, **kwargs): 246 | return args, kwargs 247 | 248 | pfn = partial(fn, 1, 2, a=3) 249 | 250 | assert await asyncio_engine.run_in_thread(pfn) == ((1, 2), {'a': 3}) 251 | assert await asyncio_engine.run_in_thread(fn, 1) == ((1,), {}) 252 | 253 | # doesn't accept kwargs 254 | with pytest.raises(TypeError): 255 | await asyncio_engine.run_in_thread(fn, a=1) 256 | 257 | 258 | def test_attribute_error(asyncio_engine): 259 | with pytest.raises(AttributeError): 260 | asyncio_engine.spam 261 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # sqlalchemy_aio documentation build configuration file, created by 4 | # sphinx-quickstart on Wed Oct 12 23:59:14 2016. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | # If extensions (or modules to document with autodoc) are in another directory, 16 | # add these directories to sys.path here. If the directory is relative to the 17 | # documentation root, use os.path.abspath to make it absolute, like shown here. 18 | # 19 | # import os 20 | # import sys 21 | # sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | # 27 | # needs_sphinx = '1.0' 28 | 29 | # Add any Sphinx extension module names here, as strings. They can be 30 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 31 | # ones. 32 | extensions = [ 33 | 'sphinx.ext.autodoc', 34 | 'sphinx.ext.intersphinx', 35 | 'sphinx.ext.viewcode', 36 | 'sphinx.ext.napoleon', 37 | ] 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ['_templates'] 41 | 42 | # The suffix(es) of source filenames. 43 | # You can specify multiple suffix as a list of string: 44 | # 45 | # source_suffix = ['.rst', '.md'] 46 | source_suffix = '.rst' 47 | 48 | # The encoding of source files. 49 | # 50 | # source_encoding = 'utf-8-sig' 51 | 52 | # The master toctree document. 53 | master_doc = 'index' 54 | 55 | # General information about the project. 56 | project = 'sqlalchemy_aio' 57 | copyright = '2018, Frazer McLean, Alex Gaynor, and David Reid' 58 | author = 'Frazer McLean' 59 | 60 | # The version info for the project you're documenting, acts as replacement for 61 | # |version| and |release|, also used in various other places throughout the 62 | # built documents. 63 | # 64 | # The short X.Y version. 65 | version = '0.1' 66 | # The full version, including alpha/beta/rc tags. 67 | release = '0.1' 68 | 69 | # The language for content autogenerated by Sphinx. Refer to documentation 70 | # for a list of supported languages. 71 | # 72 | # This is also used if you do content translation via gettext catalogs. 73 | # Usually you set "language" from the command line for these cases. 74 | language = None 75 | 76 | # There are two options for replacing |today|: either, you set today to some 77 | # non-false value, then it is used: 78 | # 79 | # today = '' 80 | # 81 | # Else, today_fmt is used as the format for a strftime call. 82 | # 83 | # today_fmt = '%B %d, %Y' 84 | 85 | # List of patterns, relative to source directory, that match files and 86 | # directories to ignore when looking for source files. 87 | # This patterns also effect to html_static_path and html_extra_path 88 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 89 | 90 | # The reST default role (used for this markup: `text`) to use for all 91 | # documents. 92 | # 93 | # default_role = None 94 | 95 | # If true, '()' will be appended to :func: etc. cross-reference text. 96 | # 97 | # add_function_parentheses = True 98 | 99 | # If true, the current module name will be prepended to all description 100 | # unit titles (such as .. function::). 101 | # 102 | # add_module_names = True 103 | 104 | # If true, sectionauthor and moduleauthor directives will be shown in the 105 | # output. They are ignored by default. 106 | # 107 | # show_authors = False 108 | 109 | # The name of the Pygments (syntax highlighting) style to use. 110 | pygments_style = 'sphinx' 111 | 112 | # A list of ignored prefixes for module index sorting. 113 | # modindex_common_prefix = [] 114 | 115 | # If true, keep warnings as "system message" paragraphs in the built documents. 116 | # keep_warnings = False 117 | 118 | # If true, `todo` and `todoList` produce output, else they produce nothing. 119 | todo_include_todos = False 120 | 121 | 122 | # -- Options for HTML output ---------------------------------------------- 123 | 124 | # The theme to use for HTML and HTML Help pages. See the documentation for 125 | # a list of builtin themes. 126 | # 127 | import sphinx_rtd_theme 128 | html_theme = 'sphinx_rtd_theme' 129 | 130 | # Theme options are theme-specific and customize the look and feel of a theme 131 | # further. For a list of options available for each theme, see the 132 | # documentation. 133 | # 134 | # html_theme_options = {} 135 | 136 | # Add any paths that contain custom themes here, relative to this directory. 137 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 138 | 139 | # The name for this set of Sphinx documents. 140 | # " v documentation" by default. 141 | # 142 | # html_title = 'sqlalchemy_aio v0.1' 143 | 144 | # A shorter title for the navigation bar. Default is the same as html_title. 145 | # 146 | # html_short_title = None 147 | 148 | # The name of an image file (relative to this directory) to place at the top 149 | # of the sidebar. 150 | # 151 | # html_logo = None 152 | 153 | # The name of an image file (relative to this directory) to use as a favicon of 154 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 155 | # pixels large. 156 | # 157 | # html_favicon = None 158 | 159 | # Add any paths that contain custom static files (such as style sheets) here, 160 | # relative to this directory. They are copied after the builtin static files, 161 | # so a file named "default.css" will overwrite the builtin "default.css". 162 | html_static_path = ['_static'] 163 | 164 | # Add any extra paths that contain custom files (such as robots.txt or 165 | # .htaccess) here, relative to this directory. These files are copied 166 | # directly to the root of the documentation. 167 | # 168 | # html_extra_path = [] 169 | 170 | # If not None, a 'Last updated on:' timestamp is inserted at every page 171 | # bottom, using the given strftime format. 172 | # The empty string is equivalent to '%b %d, %Y'. 173 | # 174 | # html_last_updated_fmt = None 175 | 176 | # If true, SmartyPants will be used to convert quotes and dashes to 177 | # typographically correct entities. 178 | # 179 | # html_use_smartypants = True 180 | 181 | # Custom sidebar templates, maps document names to template names. 182 | # 183 | # html_sidebars = {} 184 | 185 | # Additional templates that should be rendered to pages, maps page names to 186 | # template names. 187 | # 188 | # html_additional_pages = {} 189 | 190 | # If false, no module index is generated. 191 | # 192 | # html_domain_indices = True 193 | 194 | # If false, no index is generated. 195 | # 196 | # html_use_index = True 197 | 198 | # If true, the index is split into individual pages for each letter. 199 | # 200 | # html_split_index = False 201 | 202 | # If true, links to the reST sources are added to the pages. 203 | # 204 | # html_show_sourcelink = True 205 | 206 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 207 | # 208 | # html_show_sphinx = True 209 | 210 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 211 | # 212 | # html_show_copyright = True 213 | 214 | # If true, an OpenSearch description file will be output, and all pages will 215 | # contain a tag referring to it. The value of this option must be the 216 | # base URL from which the finished HTML is served. 217 | # 218 | # html_use_opensearch = '' 219 | 220 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 221 | # html_file_suffix = None 222 | 223 | # Language to be used for generating the HTML full-text search index. 224 | # Sphinx supports the following languages: 225 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' 226 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' 227 | # 228 | # html_search_language = 'en' 229 | 230 | # A dictionary with options for the search language support, empty by default. 231 | # 'ja' uses this config value. 232 | # 'zh' user can custom change `jieba` dictionary path. 233 | # 234 | # html_search_options = {'type': 'default'} 235 | 236 | # The name of a javascript file (relative to the configuration directory) that 237 | # implements a search results scorer. If empty, the default will be used. 238 | # 239 | # html_search_scorer = 'scorer.js' 240 | 241 | # Output file base name for HTML help builder. 242 | htmlhelp_basename = 'sqlalchemy_aiodoc' 243 | 244 | # -- Options for LaTeX output --------------------------------------------- 245 | 246 | latex_elements = { 247 | # The paper size ('letterpaper' or 'a4paper'). 248 | # 249 | # 'papersize': 'letterpaper', 250 | 251 | # The font size ('10pt', '11pt' or '12pt'). 252 | # 253 | # 'pointsize': '10pt', 254 | 255 | # Additional stuff for the LaTeX preamble. 256 | # 257 | # 'preamble': '', 258 | 259 | # Latex figure (float) alignment 260 | # 261 | # 'figure_align': 'htbp', 262 | } 263 | 264 | # Grouping the document tree into LaTeX files. List of tuples 265 | # (source start file, target name, title, 266 | # author, documentclass [howto, manual, or own class]). 267 | latex_documents = [ 268 | (master_doc, 'sqlalchemy_aio.tex', 'sqlalchemy\\_aio Documentation', 269 | 'Frazer McLean', 'manual'), 270 | ] 271 | 272 | # The name of an image file (relative to this directory) to place at the top of 273 | # the title page. 274 | # 275 | # latex_logo = None 276 | 277 | # For "manual" documents, if this is true, then toplevel headings are parts, 278 | # not chapters. 279 | # 280 | # latex_use_parts = False 281 | 282 | # If true, show page references after internal links. 283 | # 284 | # latex_show_pagerefs = False 285 | 286 | # If true, show URL addresses after external links. 287 | # 288 | # latex_show_urls = False 289 | 290 | # Documents to append as an appendix to all manuals. 291 | # 292 | # latex_appendices = [] 293 | 294 | # It false, will not define \strong, \code, itleref, \crossref ... but only 295 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added 296 | # packages. 297 | # 298 | # latex_keep_old_macro_names = True 299 | 300 | # If false, no module index is generated. 301 | # 302 | # latex_domain_indices = True 303 | 304 | 305 | # -- Options for manual page output --------------------------------------- 306 | 307 | # One entry per manual page. List of tuples 308 | # (source start file, name, description, authors, manual section). 309 | man_pages = [ 310 | (master_doc, 'sqlalchemy_aio', 'sqlalchemy_aio Documentation', 311 | [author], 1) 312 | ] 313 | 314 | # If true, show URL addresses after external links. 315 | # 316 | # man_show_urls = False 317 | 318 | 319 | # -- Options for Texinfo output ------------------------------------------- 320 | 321 | # Grouping the document tree into Texinfo files. List of tuples 322 | # (source start file, target name, title, author, 323 | # dir menu entry, description, category) 324 | texinfo_documents = [ 325 | (master_doc, 'sqlalchemy_aio', 'sqlalchemy_aio Documentation', 326 | author, 'sqlalchemy_aio', 'Asyncio strategy for SQLAlchemy.', 327 | 'Miscellaneous'), 328 | ] 329 | 330 | # Documents to append as an appendix to all manuals. 331 | # 332 | # texinfo_appendices = [] 333 | 334 | # If false, no module index is generated. 335 | # 336 | # texinfo_domain_indices = True 337 | 338 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 339 | # 340 | # texinfo_show_urls = 'footnote' 341 | 342 | # If true, do not generate a @detailmenu in the "Top" node's menu. 343 | # 344 | # texinfo_no_detailmenu = False 345 | 346 | 347 | # Example configuration for intersphinx: refer to the Python standard library. 348 | intersphinx_mapping = { 349 | 'https://docs.python.org/': None, 350 | 'sqlalchemy': ('http://docs.sqlalchemy.org/en/latest/', None), 351 | } 352 | -------------------------------------------------------------------------------- /sqlalchemy_aio/base.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | import weakref 3 | from abc import ABC, abstractmethod 4 | from collections.abc import Coroutine 5 | 6 | from represent import ReprHelper 7 | from sqlalchemy.engine import Engine 8 | from sqlalchemy.exc import StatementError 9 | from sqlalchemy import util 10 | from sqlalchemy.log import Identified 11 | 12 | from .exc import AlreadyQuit, BlockingWarning 13 | 14 | 15 | class AsyncEngine(Identified, ABC): 16 | def __init__(self, pool, dialect, url, logging_name=None, echo=None, 17 | execution_options=None, **kwargs): 18 | self._engine = Engine( 19 | pool, dialect, url, logging_name=logging_name, echo=echo, 20 | execution_options=execution_options, **kwargs) 21 | 22 | self._engine_worker = None 23 | 24 | @abstractmethod 25 | def _make_worker(self, *, branch_from=None): 26 | raise NotImplementedError 27 | 28 | async def _run_in_thread(_self, _func, *args, **kwargs): 29 | """Unlike the public-facing `run_in_thread` method, we want this one 30 | to let us call SQLAlchemy methods like normal internally. 31 | """ 32 | if _self._engine_worker is None: 33 | _self._engine_worker = _self._make_worker() 34 | 35 | return await _self._engine_worker.run(_func, args, kwargs) 36 | 37 | async def run_in_thread(self, func, *args): 38 | """Run a synchronous function in the engine's worker thread. 39 | 40 | Example: 41 | The following blocking function: 42 | 43 | .. code-block:: python 44 | 45 | some_fn(engine.sync_engine) 46 | 47 | can be called like this instead: 48 | 49 | .. code-block:: python 50 | 51 | await engine.run_in_thread(some_fn, engine.sync_engine) 52 | 53 | Parameters: 54 | func: A synchronous function. 55 | args: Positional arguments to be passed to `func`. If you need to 56 | pass keyword arguments, then use :func:`functools.partial`. 57 | """ 58 | if self._engine_worker is None: 59 | self._engine_worker = self._make_worker() 60 | 61 | return await self._engine_worker.run(func, args) 62 | 63 | @property 64 | def dialect(self): 65 | return self._engine.dialect 66 | 67 | @property 68 | def hide_parameters(self): 69 | return self._engine.hide_parameters 70 | 71 | @property 72 | def _has_events(self): 73 | return self._engine._has_events 74 | 75 | @property 76 | def logger(self): 77 | return self._engine.logger 78 | 79 | @property 80 | def _execution_options(self): 81 | return self._engine._execution_options 82 | 83 | @property 84 | def sync_engine(self): 85 | """Public property of the underlying SQLAlchemy engine.""" 86 | return self._engine 87 | 88 | def connect(self): 89 | """Like :meth:`Engine.connect `, but 90 | returns an awaitable that can also be used as an asynchronous context 91 | manager. 92 | 93 | Examples: 94 | .. code-block:: python 95 | 96 | conn = await engine.connect() 97 | await conn.execute(...) 98 | await conn.close() 99 | 100 | .. code-block:: python 101 | 102 | async with engine.connect() as conn: 103 | await conn.execute(...) 104 | """ 105 | return _ConnectionContextManager(self._make_async_connection()) 106 | 107 | async def _make_async_connection(self): 108 | worker = self._make_worker() 109 | try: 110 | connection = await worker.run(self._engine.connect) 111 | except Exception: 112 | await worker.quit() 113 | raise 114 | return AsyncConnection(connection, worker, self) 115 | 116 | def begin(self, close_with_result=False): 117 | """Like :meth:`Engine.begin `, but 118 | returns an asynchronous context manager. 119 | 120 | Example: 121 | .. code-block:: python 122 | 123 | async with engine.begin(): 124 | await engine.execute(...) 125 | """ 126 | return _EngineTransactionContextManager(self, close_with_result) 127 | 128 | async def execute(self, *args, **kwargs): 129 | """Like :meth:`Engine.execute `, but 130 | is a coroutine that returns an :class:`AsyncioResultProxy`. 131 | 132 | Example: 133 | .. code-block:: python 134 | 135 | result = await engine.execute(...) 136 | data = await result.fetchall() 137 | 138 | .. warning:: 139 | 140 | Make sure to explicitly call :meth:`AsyncioResultProxy.close` if the 141 | :class:`~sqlalchemy.engine.ResultProxy` has pending rows remaining 142 | otherwise it will be closed during garbage collection. With SQLite, 143 | this will raise an exception since the DBAPI connection was created 144 | in a different thread. 145 | """ 146 | rp = await self._run_in_thread(self._engine.execute, *args, **kwargs) 147 | return AsyncResultProxy(rp, self._run_in_thread) 148 | 149 | async def scalar(self, *args, **kwargs): 150 | """Like :meth:`Connection.scalar `, 151 | but is a coroutine. 152 | """ 153 | rp = await self.execute(*args, **kwargs) 154 | return await rp.scalar() 155 | 156 | async def has_table(self, table_name, schema=None): 157 | """Like :meth:`Engine.has_table `, 158 | but is a coroutine. 159 | """ 160 | return await self._run_in_thread( 161 | self._engine.has_table, table_name, schema) 162 | 163 | async def table_names( 164 | self, schema=None, connection: 'AsyncConnection' = None): 165 | """Like :meth:`Engine.table_names `, 166 | but is a coroutine. 167 | """ 168 | run_in_thread = self._run_in_thread 169 | 170 | if connection is not None: 171 | run_in_thread = connection._run_in_thread 172 | connection = connection._connection 173 | 174 | return await run_in_thread(self._engine.table_names, schema, connection) 175 | 176 | def run_callable(self, callable_, *args, **kwargs): 177 | """Like :meth:`Engine.run_callable\ 178 | `. 179 | 180 | .. warning:: 181 | 182 | This method blocks. It exists so that we can warn the user if 183 | they try to use an async engine for table reflection: 184 | 185 | .. code-block:: python 186 | 187 | Table(..., autoload_with=engine) 188 | """ 189 | warnings.warn( 190 | 'The AsyncEngine has been called in a blocking fashion, e.g. with ' 191 | 'Table(..., autoload_with=engine). You may wish to run it in a ' 192 | 'separate thread to avoid blocking the event loop. You can use ' 193 | 'Table(..., autoload_with=engine.sync_engine) to opt out of the ' 194 | 'warning for this blocking behaviour.', 195 | BlockingWarning) 196 | self._engine.run_callable(callable_, *args, **kwargs) 197 | 198 | def __repr__(self): 199 | r = ReprHelper(self) 200 | r.parantheses = ('<', '>') 201 | r.positional_from_attr('_engine') 202 | return str(r) 203 | 204 | def __getattr__(self, item): 205 | msg = '{!r} object has no attribute {!r}.'.format( 206 | self.__class__.__name__, item) 207 | 208 | if item == '_run_visitor': 209 | raise AttributeError( 210 | msg + ' Did you try to use Table.create(engine) or similar? ' 211 | 'You must use Table.create(engine.sync_engine) instead, which' 212 | 'is a blocking function. Consider using sqlalchemy.schema.' 213 | 'CreateTable instead' 214 | ) 215 | elif item == 'dispatch': 216 | raise AttributeError( 217 | msg + ' Did you try to use event.listen(engine, ...)? You must ' 218 | 'use event.listen(engine.sync_engine, ...) instead.' 219 | ) 220 | 221 | raise AttributeError(msg) 222 | 223 | 224 | class AsyncConnection: 225 | """Mostly like :class:`sqlalchemy.engine.Connection` except some of the 226 | methods are coroutines. 227 | """ 228 | def __init__(self, connection, worker, engine): 229 | self._connection = connection 230 | self._worker = worker 231 | self._engine_ref = weakref.ref(engine) 232 | 233 | async def _run_in_thread(_self, _func, *args, **kwargs): 234 | return await _self._worker.run(_func, args, kwargs) 235 | 236 | async def run_in_thread(self, func, *args): 237 | """Run a synchronous function in the connection's worker thread. 238 | 239 | Example: 240 | The following blocking function: 241 | 242 | .. code-block:: python 243 | 244 | some_fn(conn.sync_connection) 245 | 246 | can be called like this instead: 247 | 248 | .. code-block:: python 249 | 250 | await engine.run_in_thread(some_fn, conn.sync_connection) 251 | 252 | Parameters: 253 | func: A synchronous function. 254 | args: Positional arguments to be passed to `func`. If you need to 255 | pass keyword arguments, then use :func:`functools.partial`. 256 | """ 257 | return await self._worker.run(func, args) 258 | 259 | @property 260 | def _engine(self): 261 | return self._engine_ref() 262 | 263 | @property 264 | def sync_connection(self): 265 | """Public property of the underlying SQLAlchemy connection.""" 266 | return self._connection 267 | 268 | @property 269 | def dialect(self): 270 | return self._connection.dialect 271 | 272 | async def execute(self, *args, **kwargs): 273 | """Like :meth:`Connection.execute `, 274 | but is a coroutine that returns an :class:`AsyncioResultProxy`. 275 | 276 | Example: 277 | .. code-block:: python 278 | 279 | result = await conn.execute(...) 280 | data = await result.fetchall() 281 | 282 | .. warning:: 283 | 284 | Make sure to explicitly call :meth:`AsyncioResultProxy.close` if the 285 | :class:`~sqlalchemy.engine.ResultProxy` has pending rows remaining 286 | otherwise it will be closed during garbage collection. With SQLite, 287 | this will raise an exception since the DBAPI connection was created 288 | in a different thread. 289 | """ 290 | try: 291 | rp = await self._run_in_thread( 292 | self._connection.execute, *args, **kwargs) 293 | except AlreadyQuit: 294 | raise StatementError("This Connection is closed.", None, None, None) 295 | 296 | return AsyncResultProxy(rp, self._run_in_thread) 297 | 298 | def connect(self): 299 | """Like :meth:`Connection.connect `, 300 | but is a coroutine. 301 | """ 302 | return _ConnectionContextManager(self._make_async_connection()) 303 | 304 | async def _make_async_connection(self): 305 | worker = self._engine._make_worker(branch_from=self._worker) 306 | try: 307 | connection = await worker.run(self._connection.connect) 308 | except Exception: 309 | await worker.quit() 310 | raise 311 | return AsyncConnection(connection, worker, self._engine) 312 | 313 | async def scalar(self, *args, **kwargs): 314 | """Like :meth:`Connection.scalar `, 315 | but is a coroutine. 316 | """ 317 | rp = await self.execute(*args, **kwargs) 318 | return await rp.scalar() 319 | 320 | async def close(self, *args, **kwargs): 321 | """Like :meth:`Connection.close `, 322 | but is a coroutine. 323 | """ 324 | try: 325 | res = await self._run_in_thread( 326 | self._connection.close, *args, **kwargs) 327 | await self._worker.quit() 328 | except AlreadyQuit: 329 | raise StatementError("This Connection is closed.", None, None, None) 330 | 331 | return res 332 | 333 | @property 334 | def closed(self): 335 | """Like the :attr:`Connection.closed\ 336 | ` attribute. 337 | """ 338 | return self._connection.closed 339 | 340 | def begin(self): 341 | """Like :meth:`Connection.begin `, 342 | but returns an awaitable that can also be used as an asynchronous 343 | context manager. 344 | 345 | Examples: 346 | .. code-block:: python 347 | 348 | async with conn.begin() as trans: 349 | await conn.execute(...) 350 | await conn.execute(...) 351 | 352 | .. code-block:: python 353 | 354 | trans = await conn.begin(): 355 | await conn.execute(...) 356 | await conn.execute(...) 357 | await trans.commit() 358 | """ 359 | return _TransactionContextManager(self._begin()) 360 | 361 | async def _begin(self): 362 | try: 363 | transaction = await self._run_in_thread(self._connection.begin) 364 | except AlreadyQuit: 365 | raise StatementError("This Connection is closed.", None, None, None) 366 | 367 | return AsyncTransaction(transaction, self._run_in_thread) 368 | 369 | def begin_nested(self): 370 | """Like :meth:`Connection.begin_nested\ 371 | `, but returns an awaitable 372 | that can also be used as an asynchronous context manager. 373 | 374 | .. seealso:: :meth:`begin` for examples. 375 | """ 376 | return _TransactionContextManager(self._begin_nested()) 377 | 378 | async def _begin_nested(self): 379 | try: 380 | transaction = await self._run_in_thread(self._connection.begin_nested) 381 | except AlreadyQuit: 382 | raise StatementError("This Connection is closed.", None, None, None) 383 | 384 | return AsyncTransaction(transaction, self._run_in_thread) 385 | 386 | def in_transaction(self): 387 | """Like :meth:`Connection.in_transaction\ 388 | `. 389 | """ 390 | return self._connection.in_transaction() 391 | 392 | def run_callable(self, callable_, *args, **kwargs): 393 | """Like :meth:`Connection.run_callable\ 394 | `. 395 | 396 | .. warning:: 397 | 398 | This method blocks. It exists so that we can warn the user if 399 | they try to use an async connection for table reflection: 400 | 401 | .. code-block:: python 402 | 403 | Table(..., autoload_with=connection) 404 | """ 405 | warnings.warn( 406 | 'The AsyncConnection has been called in a blocking fashion, e.g. ' 407 | 'with Table(..., autoload_with=connection). You may wish to run it ' 408 | 'in a separate thread to avoid blocking the event loop. You can ' 409 | 'use Table(..., autoload_with=connection.sync_connection) to opt ' 410 | 'out of the warning for this blocking behaviour.', 411 | BlockingWarning) 412 | self._connection.run_callable(callable_, *args, **kwargs) 413 | 414 | def __getattr__(self, item): 415 | msg = '{!r} object has no attribute {!r}.'.format( 416 | self.__class__.__name__, item) 417 | 418 | if item == '_run_visitor': 419 | raise AttributeError( 420 | msg + ' Did you try to use Table.create(connection) or ' 421 | 'similar? You must use Table.create(connection.sync_connection' 422 | ') instead, which is a blocking function. Consider using ' 423 | 'sqlalchemy.schema.CreateTable instead.' 424 | ) 425 | elif item == 'dispatch': 426 | raise AttributeError( 427 | msg + ' Did you try to use event.listen(connection, ...)? You' 428 | 'must use event.listen(connection.sync_connection, ...) instead.' 429 | ) 430 | 431 | raise AttributeError(msg) 432 | 433 | 434 | class AsyncTransaction: 435 | """Mostly like :class:`sqlalchemy.engine.Transaction` except some of the 436 | methods are coroutines. 437 | """ 438 | def __init__(self, transaction, run_in_thread): 439 | self._transaction = transaction 440 | self._run_in_thread = run_in_thread 441 | 442 | async def commit(self): 443 | """Like :meth:`Transaction.commit `, 444 | but is a coroutine. 445 | """ 446 | return await self._run_in_thread(self._transaction.commit) 447 | 448 | async def rollback(self): 449 | """Like :meth:`Transaction.rollback `, 450 | but is a coroutine. 451 | """ 452 | return await self._run_in_thread(self._transaction.rollback) 453 | 454 | async def close(self): 455 | """Like :meth:`Transaction.close `, 456 | but is a coroutine. 457 | """ 458 | return await self._run_in_thread(self._transaction.close) 459 | 460 | 461 | class _AsyncResultProxyIterator: 462 | def __init__(self, result_proxy, run_in_thread): 463 | self._result_proxy = result_proxy 464 | self._run_in_thread = run_in_thread 465 | 466 | def __aiter__(self): 467 | return self 468 | 469 | async def __anext__(self): 470 | row = await self._run_in_thread(self._result_proxy.fetchone) 471 | if row is None: 472 | raise StopAsyncIteration() 473 | else: 474 | return row 475 | 476 | 477 | class AsyncResultProxy: 478 | """Mostly like :class:`sqlalchemy.engine.ResultProxy` except some of the 479 | methods are coroutines. 480 | """ 481 | def __init__(self, result_proxy, run_in_thread): 482 | self._result_proxy = result_proxy 483 | self._run_in_thread = run_in_thread 484 | 485 | def __aiter__(self): 486 | return _AsyncResultProxyIterator( 487 | self._result_proxy, 488 | self._run_in_thread) 489 | 490 | async def fetchone(self): 491 | """Like :meth:`ResultProxy.fetchone\ 492 | `, but is a coroutine. 493 | """ 494 | return await self._run_in_thread(self._result_proxy.fetchone) 495 | 496 | async def fetchmany(self, size=None): 497 | """Like :meth:`ResultProxy.fetchmany\ 498 | `, but is a coroutine. 499 | """ 500 | return await self._run_in_thread(self._result_proxy.fetchmany, size=size) 501 | 502 | async def fetchall(self): 503 | """Like :meth:`ResultProxy.fetchall\ 504 | `, but is a coroutine. 505 | """ 506 | return await self._run_in_thread(self._result_proxy.fetchall) 507 | 508 | async def scalar(self): 509 | """Like :meth:`ResultProxy.scalar\ 510 | `, but is a coroutine. 511 | """ 512 | return await self._run_in_thread(self._result_proxy.scalar) 513 | 514 | async def first(self): 515 | """Like :meth:`ResultProxy.first\ 516 | `, but is a coroutine. 517 | """ 518 | return await self._run_in_thread(self._result_proxy.first) 519 | 520 | async def keys(self): 521 | """Like :meth:`ResultProxy.keys\ 522 | `, but is a coroutine. 523 | """ 524 | return await self._run_in_thread(self._result_proxy.keys) 525 | 526 | async def close(self): 527 | """Like :meth:`ResultProxy.close\ 528 | `, but is a coroutine. 529 | """ 530 | return await self._run_in_thread(self._result_proxy.close) 531 | 532 | @property 533 | def returns_rows(self): 534 | """Like the :attr:`ResultProxy.returns_rows\ 535 | ` attribute. 536 | """ 537 | return self._result_proxy.returns_rows 538 | 539 | @property 540 | def rowcount(self): 541 | """Like the :attr:`ResultProxy.rowcount\ 542 | ` attribute. 543 | """ 544 | return self._result_proxy.rowcount 545 | 546 | @property 547 | def inserted_primary_key(self): 548 | """Like the :attr:`ResultProxy.inserted_primary_key\ 549 | ` attribute. 550 | """ 551 | return self._result_proxy.inserted_primary_key 552 | 553 | 554 | class _BaseContextManager(Coroutine): 555 | """Allow ``async with `` or ``await ``.""" 556 | __slots__ = ('_coro', '_result') 557 | 558 | def __init__(self, coro): 559 | self._coro = coro 560 | self._result = None 561 | 562 | def send(self, value): 563 | return self._coro.send(value) 564 | 565 | def throw(self, typ, val=None, tb=None): 566 | if val is None: 567 | return self._coro.throw(typ) 568 | elif tb is None: 569 | return self._coro.throw(typ, val) 570 | else: 571 | return self._coro.throw(typ, val, tb) 572 | 573 | def close(self): 574 | return self._coro.close() 575 | 576 | def __await__(self): 577 | return self._coro.__await__() 578 | 579 | async def __aenter__(self): 580 | self._result = await self._coro 581 | return self._result 582 | 583 | def __enter__(self): 584 | raise TypeError( 585 | 'Use async with instead. This error can occur when trying to ' 586 | 'pass the AsyncEngine to something that expects a normal engine, ' 587 | 'e.g. MetaData.reflect(). You can use engine.sync_engine, but be ' 588 | 'aware that the function will block. You should run it in a ' 589 | 'separate thread. See also connection.sync_connection' 590 | ) 591 | 592 | def __exit__(self, exc_type, exc_val, exc_tb): 593 | pass 594 | 595 | 596 | class _ConnectionContextManager(_BaseContextManager): 597 | async def __aexit__(self, exc_type, exc_val, exc_tb): 598 | await self._result.close() 599 | 600 | 601 | class _TransactionContextManager(_BaseContextManager): 602 | async def __aexit__(self, exc_type, exc_val, exc_tb): 603 | if exc_type is None and self._result._transaction.is_active: 604 | try: 605 | await self._result.commit() 606 | except: 607 | with util.safe_reraise(): 608 | await self._result.rollback() 609 | else: 610 | await self._result.rollback() 611 | 612 | 613 | class _EngineTransactionContextManager: 614 | __slots__ = ('_engine', '_close_with_result', '_context', '_worker') 615 | 616 | def __init__(self, engine: AsyncEngine, close_with_result): 617 | self._engine = engine 618 | self._close_with_result = close_with_result 619 | self._worker = self._engine._make_worker() 620 | 621 | async def _run_in_thread(_self, _func, *args, **kwargs): 622 | return await _self._worker.run(_func, args, kwargs) 623 | 624 | async def __aenter__(self): 625 | self._context = await self._run_in_thread( 626 | self._engine._engine.begin, self._close_with_result) 627 | 628 | conn = await self._run_in_thread(self._context.__enter__) 629 | return AsyncConnection(conn, self._worker, self._engine) 630 | 631 | async def __aexit__(self, exc_type, exc_val, exc_tb): 632 | return await self._run_in_thread( 633 | self._context.__exit__, exc_type, exc_val, exc_tb) 634 | 635 | 636 | class ThreadWorker(ABC): 637 | @abstractmethod 638 | async def run(self, func, args=(), kwargs=None): 639 | raise NotImplementedError 640 | 641 | @abstractmethod 642 | async def quit(self): 643 | raise NotImplementedError 644 | --------------------------------------------------------------------------------