├── aiopg ├── py.typed ├── sa │ ├── utils.py │ ├── __init__.py │ ├── exc.py │ ├── transaction.py │ └── engine.py ├── log.py ├── __init__.py └── utils.py ├── docs ├── contributing.rst ├── essays.rst ├── _static │ └── aiopg-icon.png ├── changes.rst ├── misc.rst ├── team.rst ├── run_loop.rst ├── examples.rst ├── glossary.rst ├── index.rst ├── Makefile ├── make.bat └── conf.py ├── .pyup.yml ├── setup.cfg ├── .coveragerc ├── MANIFEST.in ├── .isort.cfg ├── MAINTAINERS.txt ├── .gitignore ├── .readthedocs.yml ├── .github ├── dependabot.yml ├── ISSUE_TEMPLATE │ ├── config.yml │ ├── feature_request.yml │ └── bug_report.yml └── workflows │ └── ci.yml ├── CONTRIBUTORS.txt ├── requirements.txt ├── examples ├── simple.py ├── simple_sa.py ├── transaction.py ├── notify.py ├── named_field_sa.py ├── default_field_sa.py ├── types_field_sa.py ├── simple_sa_transaction.py ├── sa.py └── isolation_sa_transaction.py ├── tests ├── test_version.py ├── test_sa_result.py ├── test_extended_types.py ├── dump.txt ├── test_sa_cursor.py ├── test_sa_distil.py ├── test_isolation_level.py ├── test_utils.py ├── test_sa_priority_name.py ├── test_sa_default.py ├── test_sa_types.py ├── test_sa_engine.py ├── test_async_transaction.py ├── test_transaction.py ├── test_async_await.py ├── test_cursor.py ├── test_sa_connection.py └── test_sa_transaction.py ├── LICENSE ├── Makefile ├── README.rst ├── CONTRIBUTING.rst ├── setup.py └── CHANGES.txt /aiopg/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/contributing.rst: -------------------------------------------------------------------------------- 1 | .. _aiopg-contributing: 2 | 3 | .. include:: ../CONTRIBUTING.rst 4 | -------------------------------------------------------------------------------- /aiopg/sa/utils.py: -------------------------------------------------------------------------------- 1 | import sqlalchemy 2 | 3 | SQLALCHEMY_VERSION = sqlalchemy.__version__.split(".") 4 | -------------------------------------------------------------------------------- /docs/essays.rst: -------------------------------------------------------------------------------- 1 | Essays 2 | ====== 3 | 4 | 5 | .. toctree:: 6 | 7 | one_cursor 8 | run_loop 9 | -------------------------------------------------------------------------------- /.pyup.yml: -------------------------------------------------------------------------------- 1 | # Label PRs with `deps-update` label 2 | label_prs: deps-update 3 | 4 | schedule: every week 5 | -------------------------------------------------------------------------------- /docs/_static/aiopg-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aio-libs/aiopg/HEAD/docs/_static/aiopg-icon.png -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [tool:pytest] 2 | timeout = 300 3 | 4 | [coverage:run] 5 | branch = true 6 | source = aiopg,tests 7 | -------------------------------------------------------------------------------- /docs/changes.rst: -------------------------------------------------------------------------------- 1 | .. _aiopg-changes: 2 | 3 | ========= 4 | Changelog 5 | ========= 6 | 7 | 8 | .. include:: ../CHANGES.txt 9 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = aiopg, tests 4 | omit = site-packages 5 | 6 | [html] 7 | directory = htmlcov 8 | -------------------------------------------------------------------------------- /aiopg/log.py: -------------------------------------------------------------------------------- 1 | """Logging configuration.""" 2 | 3 | import logging 4 | 5 | # Name the logger after the package. 6 | logger = logging.getLogger(__package__) 7 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include CHANGES.txt 3 | include README.rst 4 | include MAINTAINERS.txt 5 | graft aiopg 6 | global-exclude *.pyc 7 | exclude tests/** 8 | -------------------------------------------------------------------------------- /.isort.cfg: -------------------------------------------------------------------------------- 1 | [settings] 2 | line_length=79 3 | multi_line_output=3 4 | include_trailing_comma=True 5 | indent=' ' 6 | sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER -------------------------------------------------------------------------------- /MAINTAINERS.txt: -------------------------------------------------------------------------------- 1 | * Andrew Svetlov 2 | * Alexey Firsov 3 | * Alexey Popravka 4 | * Yury Pliner 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | *.pyc 3 | __pycache__ 4 | dist 5 | build 6 | *.egg-info 7 | .coverage 8 | .coverage.* 9 | htmlcov 10 | docs/_build 11 | .cache 12 | .pytest_cache 13 | .python-version 14 | venv 15 | -------------------------------------------------------------------------------- /docs/misc.rst: -------------------------------------------------------------------------------- 1 | .. _aiopg-misc: 2 | 3 | Miscellaneous 4 | ============= 5 | 6 | Helpful pages. 7 | 8 | .. toctree:: 9 | :name: misc 10 | 11 | essays 12 | team 13 | glossary 14 | changes 15 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | image: latest 5 | 6 | python: 7 | version: 3.7 8 | install: 9 | - requirements: requirements.txt 10 | - method: pip 11 | path: . 12 | system_packages: true 13 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: pip 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | open-pull-requests-limit: 10 8 | ignore: 9 | - dependency-name: sqlalchemy[postgresql_psycopg2binary] 10 | versions: 11 | - 1.4.0 12 | - 1.4.1 13 | - 1.4.2 14 | -------------------------------------------------------------------------------- /CONTRIBUTORS.txt: -------------------------------------------------------------------------------- 1 | * Alexander 2 | * Eugene Krevenets 3 | * Fantix King 4 | * Lena Kryvonos 5 | * Low Kian Seong 6 | * Marco Paolini 7 | * Michal Kuffa 8 | * Nikolay Novik 9 | * Petr Viktorin 10 | * R\. David Murray 11 | * Ryan Hodge 12 | * Theron Luhn 13 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | coverage==5.5 2 | docker==6.1.2 3 | flake8==3.9.2 4 | isort==5.9.3 5 | -e .[sa] 6 | sphinx==4.1.2 7 | pytest==6.2.4 8 | pytest-cov==2.12.1 9 | pytest-sugar==0.9.4 10 | pytest-timeout==1.4.2 11 | sphinxcontrib-asyncio==0.3.0 12 | psycopg2-binary==2.9.5 13 | sqlalchemy[postgresql_psycopg2binary]==2.0.20 14 | async-timeout==4.0.0 15 | mypy==0.910 16 | black==22.3.0 17 | six==1.16.0 18 | -------------------------------------------------------------------------------- /docs/team.rst: -------------------------------------------------------------------------------- 1 | .. _aiopg-team: 2 | 3 | Team AIOPG 4 | ========== 5 | 6 | Maintainers 7 | ----------- 8 | 9 | Main library developers and those who support: 10 | 11 | .. include:: ../MAINTAINERS.txt 12 | 13 | Contributors 14 | ------------ 15 | 16 | A number of people have contributed to *aiopg* by reporting problems, 17 | suggesting improvements or submitting changes. Some of these people are: 18 | 19 | .. include:: ../CONTRIBUTORS.txt 20 | -------------------------------------------------------------------------------- /examples/simple.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import aiopg 4 | 5 | dsn = "dbname=aiopg user=aiopg password=passwd host=127.0.0.1" 6 | 7 | 8 | async def test_select(): 9 | async with aiopg.create_pool(dsn) as pool: 10 | async with pool.acquire() as conn: 11 | async with conn.cursor() as cur: 12 | await cur.execute("SELECT 1") 13 | ret = [] 14 | async for row in cur: 15 | ret.append(row) 16 | assert ret == [(1,)] 17 | print("ALL DONE") 18 | 19 | 20 | asyncio.run(test_select()) 21 | -------------------------------------------------------------------------------- /docs/run_loop.rst: -------------------------------------------------------------------------------- 1 | .. _aiopg-run-loop: 2 | 3 | Only use get_running_loop 4 | ========================= 5 | 6 | Rationale 7 | --------- 8 | 9 | :func:`asyncio.get_event_loop()` returns the 10 | running loop :class:`asyncio.AbstractEventLoop` instead of **default**, 11 | which may be different, e.g. 12 | 13 | .. code-block:: py3 14 | 15 | loop = asyncio.new_event_loop() 16 | loop.run_until_complete(f()) 17 | 18 | .. note:: 19 | 20 | :func:`asyncio.set_event_loop` was not called and default 21 | loop :class:`asyncio.AbstractEventLoop` 22 | is not equal to actually executed one. 23 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | # Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser 2 | blank_issues_enabled: false # default: true 3 | contact_links: 4 | - name: 🤷💻🤦 StackOverflow 5 | url: https://stackoverflow.com/questions/tagged/aiopg 6 | about: Please ask typical Q&A here 7 | - name: 💬 Discourse 8 | url: https://aio-libs.discourse.group/ 9 | about: Please start usage discussions here 10 | - name: 💬 Gitter Chat 11 | url: https://gitter.im/aio-libs/Lobby 12 | about: Chat with devs and community 13 | -------------------------------------------------------------------------------- /aiopg/sa/__init__.py: -------------------------------------------------------------------------------- 1 | """Optional support for sqlalchemy.sql dynamic query generation.""" 2 | from .connection import SAConnection 3 | from .engine import Engine, create_engine 4 | from .exc import ( 5 | ArgumentError, 6 | Error, 7 | InvalidRequestError, 8 | NoSuchColumnError, 9 | ResourceClosedError, 10 | ) 11 | 12 | __all__ = ( 13 | "create_engine", 14 | "SAConnection", 15 | "Error", 16 | "ArgumentError", 17 | "InvalidRequestError", 18 | "NoSuchColumnError", 19 | "ResourceClosedError", 20 | "Engine", 21 | ) 22 | 23 | 24 | ( 25 | SAConnection, 26 | Error, 27 | ArgumentError, 28 | InvalidRequestError, 29 | NoSuchColumnError, 30 | ResourceClosedError, 31 | create_engine, 32 | Engine, 33 | ) 34 | -------------------------------------------------------------------------------- /aiopg/sa/exc.py: -------------------------------------------------------------------------------- 1 | class Error(Exception): 2 | """Generic error class.""" 3 | 4 | 5 | class ArgumentError(Error): 6 | """Raised when an invalid or conflicting function argument is supplied. 7 | 8 | This error generally corresponds to construction time state errors. 9 | 10 | """ 11 | 12 | 13 | class InvalidRequestError(ArgumentError): 14 | """aiopg.sa was asked to do something it can't do. 15 | 16 | This error generally corresponds to runtime state errors. 17 | 18 | """ 19 | 20 | 21 | class NoSuchColumnError(KeyError, InvalidRequestError): 22 | """A nonexistent column is requested from a ``RowProxy``.""" 23 | 24 | 25 | class ResourceClosedError(InvalidRequestError): 26 | """An operation was requested from a connection, cursor, or other 27 | object that's in a closed state.""" 28 | -------------------------------------------------------------------------------- /tests/test_version.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from aiopg import _parse_version 4 | 5 | 6 | def test_alpha(): 7 | assert (0, 1, 2, "alpha", 2) == _parse_version("0.1.2a2") 8 | assert (1, 2, 3, "alpha", 0) == _parse_version("1.2.3a") 9 | 10 | 11 | def test_beta(): 12 | assert (0, 1, 2, "beta", 2) == _parse_version("0.1.2b2") 13 | assert (0, 1, 2, "beta", 0) == _parse_version("0.1.2b") 14 | 15 | 16 | def test_rc(): 17 | assert (0, 1, 2, "candidate", 5) == _parse_version("0.1.2rc5") 18 | assert (0, 1, 2, "candidate", 0) == _parse_version("0.1.2rc") 19 | 20 | 21 | def test_final(): 22 | assert (0, 1, 2, "final", 0) == _parse_version("0.1.2") 23 | 24 | 25 | def test_invalid(): 26 | pytest.raises(ImportError, _parse_version, "0.1") 27 | pytest.raises(ImportError, _parse_version, "0.1.1.2") 28 | pytest.raises(ImportError, _parse_version, "0.1.1z2") 29 | -------------------------------------------------------------------------------- /examples/simple_sa.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import sqlalchemy as sa 4 | 5 | from aiopg.sa import create_engine 6 | 7 | metadata = sa.MetaData() 8 | 9 | tbl = sa.Table( 10 | "tbl", 11 | metadata, 12 | sa.Column("id", sa.Integer, primary_key=True), 13 | sa.Column("val", sa.String(255)), 14 | ) 15 | 16 | 17 | async def create_table(conn): 18 | await conn.execute("DROP TABLE IF EXISTS tbl") 19 | await conn.execute( 20 | """CREATE TABLE tbl ( 21 | id serial PRIMARY KEY, 22 | val varchar(255))""" 23 | ) 24 | 25 | 26 | async def go(): 27 | async with create_engine( 28 | user="aiopg", database="aiopg", host="127.0.0.1", password="passwd" 29 | ) as engine: 30 | async with engine.acquire() as conn: 31 | await create_table(conn) 32 | async with engine.acquire() as conn: 33 | await conn.execute(tbl.insert().values(val="abc")) 34 | 35 | async for row in conn.execute(tbl.select()): 36 | print(row.id, row.val) 37 | 38 | 39 | asyncio.run(go()) 40 | -------------------------------------------------------------------------------- /tests/test_sa_result.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from sqlalchemy import Column, Integer, MetaData, String, Table 3 | 4 | meta = MetaData() 5 | tbl = Table( 6 | "sa_tbl", 7 | meta, 8 | Column("id", Integer, nullable=False, primary_key=True), 9 | Column("name", String(255)), 10 | ) 11 | 12 | 13 | @pytest.fixture 14 | def connect(make_sa_connection, loop): 15 | async def start(): 16 | conn = await make_sa_connection() 17 | await conn.execute("DROP TABLE IF EXISTS sa_tbl") 18 | await conn.execute( 19 | "CREATE TABLE sa_tbl (id serial, name varchar(255))" 20 | ) 21 | 22 | await conn.execute(tbl.insert().values(name="test_name")) 23 | 24 | return conn 25 | 26 | return loop.run_until_complete(start()) 27 | 28 | 29 | async def test_non_existing_column_error(connect): 30 | ret = await connect.execute(tbl.select()) 31 | row = await ret.fetchone() 32 | with pytest.raises(AttributeError) as excinfo: 33 | row.non_existing_column 34 | assert excinfo.value.args == ( 35 | "Could not locate column in row for column 'non_existing_column'", 36 | ) 37 | -------------------------------------------------------------------------------- /examples/transaction.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from aiopg import IsolationLevel, Transaction, create_pool 4 | 5 | dsn = "dbname=aiopg user=aiopg password=passwd host=127.0.0.1" 6 | 7 | 8 | async def transaction(cur, isolation_level, readonly=False, deferrable=False): 9 | async with Transaction( 10 | cur, isolation_level, readonly, deferrable 11 | ) as transaction: 12 | await cur.execute("insert into tbl values (1)") 13 | 14 | async with transaction.point(): 15 | await cur.execute("insert into tbl values (3)") 16 | 17 | await cur.execute("insert into tbl values (4)") 18 | 19 | 20 | async def main(): 21 | async with create_pool(dsn) as pool: 22 | async with pool.acquire() as conn: 23 | async with conn.cursor() as cur: 24 | await cur.execute("DROP TABLE IF EXISTS tbl") 25 | await cur.execute("CREATE TABLE tbl (id int)") 26 | await transaction(cur, IsolationLevel.repeatable_read) 27 | await transaction(cur, IsolationLevel.read_committed) 28 | await transaction(cur, IsolationLevel.serializable) 29 | 30 | await cur.execute("select * from tbl") 31 | 32 | 33 | asyncio.run(main()) 34 | -------------------------------------------------------------------------------- /tests/test_extended_types.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from psycopg2.extras import Json 4 | 5 | 6 | async def test_uuid(make_connection): 7 | conn = await make_connection() 8 | _id = uuid.uuid1() 9 | cur = await conn.cursor() 10 | try: 11 | await cur.execute("DROP TABLE IF EXISTS tbl") 12 | await cur.execute("""CREATE TABLE tbl (id UUID)""") 13 | await cur.execute("INSERT INTO tbl (id) VALUES (%s)", [_id]) 14 | await cur.execute("SELECT * FROM tbl") 15 | item = await cur.fetchone() 16 | assert (_id,) == item 17 | finally: 18 | cur.close() 19 | 20 | 21 | async def test_json(make_connection): 22 | conn = await make_connection() 23 | data = {"a": 1, "b": "str"} 24 | cur = await conn.cursor() 25 | try: 26 | await cur.execute("DROP TABLE IF EXISTS tbl") 27 | await cur.execute( 28 | """CREATE TABLE tbl ( 29 | id SERIAL, 30 | val JSON)""" 31 | ) 32 | await cur.execute("INSERT INTO tbl (val) VALUES (%s)", [Json(data)]) 33 | await cur.execute("SELECT * FROM tbl") 34 | item = await cur.fetchone() 35 | assert (1, {"b": "str", "a": 1}) == item 36 | finally: 37 | cur.close() 38 | -------------------------------------------------------------------------------- /examples/notify.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import psycopg2 4 | 5 | import aiopg 6 | 7 | dsn = "dbname=aiopg user=aiopg password=passwd host=127.0.0.1" 8 | 9 | 10 | async def notify(conn): 11 | async with conn.cursor() as cur: 12 | for i in range(5): 13 | msg = f"message {i}" 14 | print("Send ->", msg) 15 | await cur.execute("NOTIFY channel, %s", (msg,)) 16 | 17 | await cur.execute("NOTIFY channel, 'finish'") 18 | 19 | 20 | async def listen(conn): 21 | async with conn.cursor() as cur: 22 | await cur.execute("LISTEN channel") 23 | while True: 24 | try: 25 | msg = await conn.notifies.get() 26 | except psycopg2.Error as ex: 27 | print("ERROR: ", ex) 28 | return 29 | if msg.payload == "finish": 30 | return 31 | else: 32 | print("Receive <-", msg.payload) 33 | 34 | 35 | async def main(): 36 | async with aiopg.connect(dsn) as listenConn: 37 | async with aiopg.create_pool(dsn) as notifyPool: 38 | async with notifyPool.acquire() as notifyConn: 39 | listener = listen(listenConn) 40 | notifier = notify(notifyConn) 41 | await asyncio.gather(listener, notifier) 42 | print("ALL DONE") 43 | 44 | 45 | asyncio.run(main()) 46 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014, 2015, Andrew Svetlov 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are 6 | met: 7 | 8 | 1. Redistributions of source code must retain the above copyright 9 | notice, this list of conditions and the following disclaimer. 10 | 11 | 2. Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | 15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /examples/named_field_sa.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | 4 | import sqlalchemy as sa 5 | 6 | from aiopg.sa import create_engine 7 | 8 | metadata = sa.MetaData() 9 | 10 | now = datetime.datetime.now 11 | 12 | tbl = sa.Table( 13 | "tbl", 14 | metadata, 15 | sa.Column("MyIDField", sa.Integer, key="id", primary_key=True), 16 | sa.Column("NaMe", sa.String(255), key="name", default="default name"), 17 | ) 18 | 19 | 20 | async def insert_tbl(conn, **kwargs): 21 | await conn.execute(tbl.insert().values(**kwargs)) 22 | row = await (await conn.execute(tbl.select())).first() 23 | 24 | for name, val in kwargs.items(): 25 | assert row[name] == val 26 | 27 | await conn.execute(sa.delete(tbl)) 28 | 29 | 30 | async def create_table(conn): 31 | await conn.execute("DROP TABLE IF EXISTS tbl") 32 | await conn.execute( 33 | "CREATE TABLE tbl (" 34 | '"MyIDField" INTEGER NOT NULL, ' 35 | '"NaMe" VARCHAR(255), ' 36 | 'PRIMARY KEY ("MyIDField"))' 37 | ) 38 | 39 | 40 | async def go(): 41 | async with create_engine( 42 | user="aiopg", database="aiopg", host="127.0.0.1", password="passwd" 43 | ) as engine: 44 | async with engine.acquire() as conn: 45 | await create_table(conn) 46 | await insert_tbl(conn, id=1) 47 | await insert_tbl(conn, id=2, name="test") 48 | 49 | 50 | asyncio.run(go()) 51 | -------------------------------------------------------------------------------- /tests/dump.txt: -------------------------------------------------------------------------------- 1 | -- 2 | -- PostgreSQL database dump 3 | -- 4 | 5 | SET statement_timeout = 0; 6 | SET lock_timeout = 0; 7 | SET client_encoding = 'UTF8'; 8 | SET standard_conforming_strings = on; 9 | SET check_function_bodies = false; 10 | SET client_min_messages = warning; 11 | 12 | -- 13 | -- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: 14 | -- 15 | 16 | CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; 17 | 18 | 19 | -- 20 | -- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: 21 | -- 22 | 23 | COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; 24 | 25 | 26 | SET search_path = public, pg_catalog; 27 | 28 | SET default_tablespace = ''; 29 | 30 | SET default_with_oids = false; 31 | 32 | -- 33 | -- Name: tbl; Type: TABLE; Schema: public; Owner: aiopg; Tablespace: 34 | -- 35 | 36 | CREATE TABLE tbl ( 37 | a integer, 38 | b character varying(255) 39 | ); 40 | 41 | 42 | ALTER TABLE public.tbl OWNER TO aiopg; 43 | 44 | -- 45 | -- Data for Name: tbl; Type: TABLE DATA; Schema: public; Owner: aiopg 46 | -- 47 | 48 | COPY tbl (a, b) FROM stdin; 49 | 1 a 50 | 2 b 51 | 3 c 52 | \. 53 | 54 | 55 | -- 56 | -- Name: public; Type: ACL; Schema: -; Owner: postgres 57 | -- 58 | 59 | REVOKE ALL ON SCHEMA public FROM PUBLIC; 60 | REVOKE ALL ON SCHEMA public FROM postgres; 61 | GRANT ALL ON SCHEMA public TO postgres; 62 | GRANT ALL ON SCHEMA public TO PUBLIC; 63 | 64 | 65 | -- 66 | -- PostgreSQL database dump complete 67 | -- 68 | 69 | -------------------------------------------------------------------------------- /examples/default_field_sa.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | import uuid 4 | 5 | import sqlalchemy as sa 6 | from sqlalchemy.sql.ddl import CreateTable 7 | 8 | from aiopg.sa import create_engine 9 | 10 | metadata = sa.MetaData() 11 | 12 | now = datetime.datetime.now 13 | 14 | tbl = sa.Table( 15 | "tbl", 16 | metadata, 17 | sa.Column("id", sa.Integer, autoincrement=True, primary_key=True), 18 | sa.Column("uuid", sa.String, default=lambda: str(uuid.uuid4())), 19 | sa.Column("name", sa.String(255), default="default name"), 20 | sa.Column("date", sa.DateTime, default=datetime.datetime.now), 21 | sa.Column("flag", sa.Integer, default=0), 22 | sa.Column("count_str", sa.Integer, default=sa.func.length("default")), 23 | sa.Column("is_active", sa.Boolean, default=True), 24 | ) 25 | 26 | 27 | async def insert_tbl(conn, pk, **kwargs): 28 | await conn.execute(tbl.insert().values(**kwargs)) 29 | row = await (await conn.execute(tbl.select())).first() 30 | 31 | assert row.id == pk 32 | 33 | for name, val in kwargs.items(): 34 | assert row[name] == val 35 | 36 | await conn.execute(sa.delete(tbl)) 37 | 38 | 39 | async def create_table(conn): 40 | await conn.execute("DROP TABLE IF EXISTS tbl") 41 | await conn.execute(CreateTable(tbl)) 42 | 43 | 44 | async def go(): 45 | async with create_engine( 46 | user="aiopg", database="aiopg", host="127.0.0.1", password="passwd" 47 | ) as engine: 48 | async with engine.acquire() as conn: 49 | await create_table(conn) 50 | async with engine.acquire() as conn: 51 | await insert_tbl(conn, 1) 52 | await insert_tbl(conn, 2, name="test", is_active=False, date=now()) 53 | 54 | 55 | asyncio.run(go()) 56 | -------------------------------------------------------------------------------- /tests/test_sa_cursor.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sqlalchemy as sa 3 | 4 | meta = sa.MetaData() 5 | tbl = sa.Table( 6 | "sa_tbl5", 7 | meta, 8 | sa.Column("ID", sa.String, primary_key=True, key="id"), 9 | sa.Column("Name", sa.String(255), key="name"), 10 | ) 11 | 12 | 13 | @pytest.fixture 14 | def connect(make_sa_connection, loop): 15 | async def start(): 16 | conn = await make_sa_connection() 17 | await conn.execute("DROP TABLE IF EXISTS sa_tbl5") 18 | await conn.execute( 19 | "CREATE TABLE sa_tbl5 (" 20 | '"ID" VARCHAR(255) NOT NULL, ' 21 | '"Name" VARCHAR(255), ' 22 | 'PRIMARY KEY ("ID"))' 23 | ) 24 | 25 | await conn.execute(tbl.insert().values(id="test1", name="test_name")) 26 | await conn.execute(tbl.insert().values(id="test2", name="test_name")) 27 | await conn.execute(tbl.insert().values(id="test3", name="test_name")) 28 | 29 | return conn 30 | 31 | return loop.run_until_complete(start()) 32 | 33 | 34 | async def test_insert(connect): 35 | await connect.execute(tbl.insert().values(id="test-4", name="test_name")) 36 | await connect.execute(tbl.insert().values(id="test-5", name="test_name")) 37 | assert 5 == len(await (await connect.execute(tbl.select())).fetchall()) 38 | 39 | 40 | async def test_two_cursor_create_context_manager(make_engine, connect): 41 | engine = await make_engine(maxsize=1) 42 | 43 | async with engine.acquire() as conn: 44 | r1 = await conn.execute(tbl.insert().values(id="1", name="test")) 45 | 46 | r2 = await conn.execute(tbl.select()) 47 | await r2.fetchone() 48 | assert not r2.closed 49 | 50 | r3 = await conn.execute(tbl.insert().values(id="3", name="test")) 51 | 52 | assert r1.closed 53 | assert r2.closed 54 | assert r3.closed 55 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🚀 Feature request 3 | description: Suggest an idea for this project. 4 | labels: enhancement 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | **Thanks for taking a minute to file a feature for aiopg!** 10 | 11 | ⚠ 12 | Verify first that your feature request is not [already reported on 13 | GitHub][issue search]. 14 | 15 | _Please fill out the form below with as many precise 16 | details as possible._ 17 | 18 | [issue search]: ../search?q=is%3Aissue&type=issues 19 | 20 | - type: textarea 21 | attributes: 22 | label: Is your feature request related to a problem? 23 | description: >- 24 | Please add a clear and concise description of what 25 | the problem is. _Ex. I'm always frustrated when [...]_ 26 | 27 | - type: textarea 28 | attributes: 29 | label: Describe the solution you'd like 30 | description: >- 31 | A clear and concise description of what you want to happen. 32 | validations: 33 | required: true 34 | 35 | - type: textarea 36 | attributes: 37 | label: Describe alternatives you've considered 38 | description: >- 39 | A clear and concise description of any alternative solutions 40 | or features you've considered. 41 | validations: 42 | required: true 43 | 44 | - type: textarea 45 | attributes: 46 | label: Additional context 47 | description: >- 48 | Add any other context or screenshots about 49 | the feature request here. 50 | 51 | - type: checkboxes 52 | attributes: 53 | label: Code of Conduct 54 | description: | 55 | Read the [aio-libs Code of Conduct][CoC] first. 56 | 57 | [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md 58 | options: 59 | - label: I agree to follow the aio-libs Code of Conduct 60 | required: true 61 | ... 62 | -------------------------------------------------------------------------------- /docs/examples.rst: -------------------------------------------------------------------------------- 1 | .. _aiopg-examples: 2 | 3 | ======================== 4 | Examples of aiopg usage 5 | ======================== 6 | 7 | Below is a list of examples from `aiopg/examples 8 | `_ 9 | 10 | Every example is a correct tiny python program. 11 | 12 | .. _aiopg-examples-simple: 13 | 14 | Low-level API 15 | ------------- 16 | 17 | .. literalinclude:: ../examples/simple.py 18 | 19 | 20 | .. _aiopg-examples-notify: 21 | 22 | Usage of LISTEN/NOTIFY commands 23 | ------------------------------- 24 | 25 | .. literalinclude:: ../examples/notify.py 26 | 27 | 28 | .. _aiopg-examples-sa-simple: 29 | 30 | Simple sqlalchemy usage 31 | ----------------------- 32 | 33 | .. literalinclude:: ../examples/simple_sa.py 34 | 35 | 36 | .. _aiopg-examples-sa-default-field: 37 | 38 | Default value field sqlalchemy usage 39 | ------------------------------------ 40 | 41 | .. literalinclude:: ../examples/default_field_sa.py 42 | 43 | 44 | .. _aiopg-examples-sa-types-field: 45 | 46 | Types field sqlalchemy usage 47 | ---------------------------- 48 | 49 | .. literalinclude:: ../examples/types_field_sa.py 50 | 51 | 52 | .. _aiopg-examples-sa-named-field: 53 | 54 | Named field sqlalchemy usage 55 | ---------------------------- 56 | 57 | .. literalinclude:: ../examples/named_field_sa.py 58 | 59 | 60 | .. _aiopg-examples-sa-complex: 61 | 62 | Complex sqlalchemy queries 63 | --------------------------- 64 | 65 | .. literalinclude:: ../examples/sa.py 66 | 67 | 68 | .. _aiopg-examples-sa-simple-transaction: 69 | 70 | Simple transaction in sqlalchemy 71 | -------------------------------- 72 | 73 | .. literalinclude:: ../examples/simple_sa_transaction.py 74 | 75 | 76 | .. _aiopg-examples-sa-isolation-transaction: 77 | 78 | Isolation transaction in sqlalchemy 79 | ----------------------------------- 80 | 81 | .. literalinclude:: ../examples/isolation_sa_transaction.py 82 | -------------------------------------------------------------------------------- /tests/test_sa_distil.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from aiopg.sa.connection import _distill_params 4 | 5 | pytest.importorskip("aiopg.sa") # noqa 6 | 7 | 8 | def test_distill_none(): 9 | assert _distill_params(None, None) == [] 10 | 11 | 12 | def test_distill_no_multi_no_param(): 13 | assert _distill_params((), {}) == [] 14 | 15 | 16 | def test_distill_dict_multi_none_param(): 17 | assert _distill_params(None, {"foo": "bar"}) == [{"foo": "bar"}] 18 | 19 | 20 | def test_distill_dict_multi_empty_param(): 21 | assert _distill_params((), {"foo": "bar"}) == [{"foo": "bar"}] 22 | 23 | 24 | def test_distill_single_dict(): 25 | assert _distill_params(({"foo": "bar"},), {}) == [{"foo": "bar"}] 26 | 27 | 28 | def test_distill_single_list_strings(): 29 | assert _distill_params((["foo", "bar"],), {}) == [["foo", "bar"]] 30 | 31 | 32 | def test_distill_single_list_tuples(): 33 | v1 = _distill_params(([("foo", "bar"), ("bat", "hoho")],), {}) 34 | v2 = [("foo", "bar"), ("bat", "hoho")] 35 | assert v1 == v2 36 | 37 | 38 | def test_distill_single_list_tuple(): 39 | v1 = _distill_params(([("foo", "bar")],), {}) 40 | v2 = [("foo", "bar")] 41 | assert v1 == v2 42 | 43 | 44 | def test_distill_multi_list_tuple(): 45 | v1 = _distill_params(([("foo", "bar")], [("bar", "bat")]), {}) 46 | v2 = ([("foo", "bar")], [("bar", "bat")]) 47 | assert v1 == v2 48 | 49 | 50 | def test_distill_multi_strings(): 51 | assert _distill_params(("foo", "bar"), {}) == [("foo", "bar")] 52 | 53 | 54 | def test_distill_single_list_dicts(): 55 | v1 = _distill_params(([{"foo": "bar"}, {"foo": "hoho"}],), {}) 56 | assert v1 == [{"foo": "bar"}, {"foo": "hoho"}] 57 | 58 | 59 | def test_distill_single_string(): 60 | assert _distill_params(("arg",), {}) == [["arg"]] 61 | 62 | 63 | def test_distill_multi_string_tuple(): 64 | v1 = _distill_params((("arg", "arg"),), {}) 65 | assert v1 == [("arg", "arg")] 66 | -------------------------------------------------------------------------------- /tests/test_isolation_level.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from aiopg import IsolationLevel 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "isolation_level, name", 8 | [ 9 | (IsolationLevel.default, "Default"), 10 | (IsolationLevel.read_committed, "Read committed"), 11 | (IsolationLevel.repeatable_read, "Repeatable read"), 12 | (IsolationLevel.serializable, "Serializable"), 13 | ], 14 | ) 15 | def test_isolation_level_name(isolation_level, name): 16 | assert isolation_level(False, False).name == name 17 | 18 | 19 | @pytest.mark.parametrize( 20 | "isolation_level, readonly, deferred, expected_begin", 21 | [ 22 | (IsolationLevel.default, False, False, "BEGIN"), 23 | (IsolationLevel.default, True, False, "BEGIN READ ONLY"), 24 | ( 25 | IsolationLevel.read_committed, 26 | False, 27 | False, 28 | "BEGIN ISOLATION LEVEL READ COMMITTED", 29 | ), 30 | ( 31 | IsolationLevel.read_committed, 32 | True, 33 | False, 34 | "BEGIN ISOLATION LEVEL READ COMMITTED READ ONLY", 35 | ), 36 | ( 37 | IsolationLevel.repeatable_read, 38 | False, 39 | False, 40 | "BEGIN ISOLATION LEVEL REPEATABLE READ", 41 | ), 42 | ( 43 | IsolationLevel.repeatable_read, 44 | True, 45 | False, 46 | "BEGIN ISOLATION LEVEL REPEATABLE READ READ ONLY", 47 | ), 48 | ( 49 | IsolationLevel.serializable, 50 | False, 51 | False, 52 | "BEGIN ISOLATION LEVEL SERIALIZABLE", 53 | ), 54 | ( 55 | IsolationLevel.serializable, 56 | True, 57 | False, 58 | "BEGIN ISOLATION LEVEL SERIALIZABLE READ ONLY", 59 | ), 60 | ( 61 | IsolationLevel.serializable, 62 | True, 63 | True, 64 | "BEGIN ISOLATION LEVEL SERIALIZABLE READ ONLY DEFERRABLE", 65 | ), 66 | ], 67 | ) 68 | def test_isolation_level_begin( 69 | isolation_level, readonly, deferred, expected_begin 70 | ): 71 | assert isolation_level(readonly, deferred).begin() == expected_begin 72 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Some simple testing tasks (sorry, UNIX only). 2 | 3 | clean-docs: 4 | cd docs && rm -rf _build/html 5 | 6 | doc: clean-docs 7 | cd docs && make html 8 | @echo "open file://`pwd`/docs/_build/html/index.html" 9 | 10 | isort: 11 | isort --use-parentheses --multi-line 3 --combine-as --trailing-comma aiopg 12 | isort --use-parentheses --multi-line 3 --combine-as --trailing-comma tests 13 | isort --use-parentheses --multi-line 3 --combine-as --trailing-comma examples 14 | 15 | black: 16 | black --line-length 79 aiopg 17 | black --line-length 79 tests 18 | black --line-length 79 examples 19 | 20 | lint: .lint 21 | 22 | .lint: $(shell find aiopg -type f) \ 23 | $(shell find tests -type f) \ 24 | $(shell find examples -type f) 25 | flake8 aiopg tests examples 26 | python setup.py check -rms 27 | @if ! black --line-length 79 --check aiopg tests examples; then \ 28 | echo "Format errors, run 'make black' to fix them!!!"; \ 29 | false; \ 30 | fi 31 | @if ! isort --use-parentheses --multi-line 3 --combine-as --trailing-comma -c aiopg tests examples; then \ 32 | echo "Import sort errors, run 'make isort' to fix them!!!"; \ 33 | isort --diff aiopg tests examples; \ 34 | false; \ 35 | fi 36 | @if ! mypy --strict --ignore-missing-imports --exclude sa aiopg; then \ 37 | echo "Typing errors"; \ 38 | false; \ 39 | fi 40 | 41 | test: flake 42 | pytest -q tests 43 | 44 | vtest: flake 45 | pytest tests 46 | 47 | cov cover coverage: flake 48 | py.test -svvv -rs --cov=aiopg --cov-report=html --cov-report=term tests 49 | @echo "open file://`pwd`/htmlcov/index.html" 50 | 51 | cov-ci: flake 52 | py.test -svvv -rs --cov=aiopg --cov-report=term tests --pg_tag all 53 | 54 | clean-pip: 55 | pip freeze | grep -v "^-e" | xargs pip uninstall -y 56 | 57 | clean: 58 | find . -name __pycache__ |xargs rm -rf 59 | find . -type f -name '*.py[co]' -delete 60 | find . -type f -name '*~' -delete 61 | find . -type f -name '.*~' -delete 62 | find . -type f -name '@*' -delete 63 | find . -type f -name '#*#' -delete 64 | find . -type f -name '*.orig' -delete 65 | find . -type f -name '*.rej' -delete 66 | rm -f .coverage 67 | rm -rf coverage 68 | rm -rf docs/_build 69 | 70 | .PHONY: all isort flake test vtest cov clean clean-pip clean-docs 71 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from aiopg.utils import ClosableQueue 6 | 7 | 8 | async def test_closable_queue_noclose(loop): 9 | the_queue = asyncio.Queue() 10 | queue = ClosableQueue(the_queue, loop) 11 | assert queue.empty() 12 | assert queue.qsize() == 0 13 | 14 | await the_queue.put(1) 15 | assert not queue.empty() 16 | assert queue.qsize() == 1 17 | v = await queue.get() 18 | assert v == 1 19 | 20 | await the_queue.put(2) 21 | v = queue.get_nowait() 22 | assert v == 2 23 | 24 | 25 | async def test_closable_queue_close(loop): 26 | the_queue = asyncio.Queue() 27 | queue = ClosableQueue(the_queue, loop) 28 | v1 = None 29 | 30 | async def read(): 31 | nonlocal v1 32 | v1 = await queue.get() 33 | await queue.get() 34 | 35 | reader = loop.create_task(read()) 36 | await the_queue.put(1) 37 | await asyncio.sleep(0.1) 38 | assert v1 == 1 39 | 40 | queue.close(RuntimeError("connection closed")) 41 | with pytest.raises(RuntimeError) as excinfo: 42 | await reader 43 | assert excinfo.value.args == ("connection closed",) 44 | 45 | 46 | async def test_closable_queue_close_get_nowait(loop): 47 | the_queue = asyncio.Queue() 48 | queue = ClosableQueue(the_queue, loop) 49 | 50 | await the_queue.put(1) 51 | queue.close(RuntimeError("connection closed")) 52 | 53 | # even when the queue is closed, while there are items in the queu, we 54 | # allow reading them. 55 | assert queue.get_nowait() == 1 56 | 57 | # when there are no more items in the queue, if there is a close exception 58 | # then it will get raises here 59 | with pytest.raises(RuntimeError) as excinfo: 60 | queue.get_nowait() 61 | assert excinfo.value.args == ("connection closed",) 62 | 63 | 64 | async def test_closable_queue_get_nowait_noclose(loop): 65 | the_queue = asyncio.Queue() 66 | queue = ClosableQueue(the_queue, loop) 67 | await the_queue.put(1) 68 | assert queue.get_nowait() == 1 69 | with pytest.raises(asyncio.QueueEmpty): 70 | queue.get_nowait() 71 | 72 | 73 | async def test_closable_queue_get_cancellation(loop): 74 | queue = ClosableQueue(asyncio.Queue(), loop) 75 | get_task = loop.create_task(queue.get()) 76 | await asyncio.sleep(0.1) 77 | get_task.cancel() 78 | with pytest.raises(asyncio.CancelledError): 79 | await get_task 80 | -------------------------------------------------------------------------------- /aiopg/__init__.py: -------------------------------------------------------------------------------- 1 | import re 2 | import sys 3 | import warnings 4 | from collections import namedtuple 5 | 6 | from .connection import ( 7 | TIMEOUT as DEFAULT_TIMEOUT, 8 | Connection, 9 | Cursor, 10 | DefaultCompiler, 11 | IsolationCompiler, 12 | IsolationLevel, 13 | ReadCommittedCompiler, 14 | RepeatableReadCompiler, 15 | SerializableCompiler, 16 | Transaction, 17 | connect, 18 | ) 19 | from .pool import Pool, create_pool 20 | from .utils import get_running_loop 21 | 22 | warnings.filterwarnings( 23 | "always", 24 | ".*", 25 | category=ResourceWarning, 26 | module=r"aiopg(\.\w+)+", 27 | append=False, 28 | ) 29 | 30 | __all__ = ( 31 | "connect", 32 | "create_pool", 33 | "get_running_loop", 34 | "Connection", 35 | "Cursor", 36 | "Pool", 37 | "version", 38 | "version_info", 39 | "DEFAULT_TIMEOUT", 40 | "IsolationLevel", 41 | "Transaction", 42 | ) 43 | 44 | __version__ = "1.5.0a1" 45 | 46 | version = f"{__version__}, Python {sys.version}" 47 | 48 | VersionInfo = namedtuple( 49 | "VersionInfo", "major minor micro releaselevel serial" 50 | ) 51 | 52 | 53 | def _parse_version(ver: str) -> VersionInfo: 54 | RE = ( 55 | r"^" 56 | r"(?P\d+)\.(?P\d+)\.(?P\d+)" 57 | r"((?P[a-z]+)(?P\d+)?)?" 58 | r"$" 59 | ) 60 | match = re.match(RE, ver) 61 | if not match: 62 | raise ImportError(f"Invalid package version {ver}") 63 | try: 64 | major = int(match.group("major")) 65 | minor = int(match.group("minor")) 66 | micro = int(match.group("micro")) 67 | levels = {"rc": "candidate", "a": "alpha", "b": "beta", None: "final"} 68 | releaselevel = levels[match.group("releaselevel")] 69 | serial = int(match.group("serial")) if match.group("serial") else 0 70 | return VersionInfo(major, minor, micro, releaselevel, serial) 71 | except Exception as e: 72 | raise ImportError(f"Invalid package version {ver}") from e 73 | 74 | 75 | version_info = _parse_version(__version__) 76 | 77 | # make pyflakes happy 78 | ( 79 | connect, 80 | create_pool, 81 | Connection, 82 | Cursor, 83 | Pool, 84 | DEFAULT_TIMEOUT, 85 | IsolationLevel, 86 | Transaction, 87 | get_running_loop, 88 | IsolationCompiler, 89 | DefaultCompiler, 90 | ReadCommittedCompiler, 91 | RepeatableReadCompiler, 92 | SerializableCompiler, 93 | ) 94 | -------------------------------------------------------------------------------- /examples/types_field_sa.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import sqlalchemy as sa 4 | from sqlalchemy.dialects.postgresql import ARRAY, ENUM, JSON 5 | from sqlalchemy.sql.ddl import CreateTable 6 | 7 | from aiopg.sa import create_engine 8 | 9 | metadata = sa.MetaData() 10 | 11 | 12 | class CustomStrList(sa.types.TypeDecorator): 13 | impl = sa.types.String 14 | 15 | def __init__(self, sep=",", *args, **kwargs): 16 | self._sep = sep 17 | self._args = args 18 | self._kwargs = kwargs 19 | super().__init__(*args, **kwargs) 20 | 21 | def process_bind_param(self, value, dialect): 22 | return f"{self._sep}".join(map(str, value)) 23 | 24 | def process_result_value(self, value, dialect): 25 | if value is None: 26 | return value 27 | 28 | return value.split(self._sep) 29 | 30 | def copy(self): 31 | return CustomStrList(self._sep, *self._args, **self._kwargs) 32 | 33 | 34 | tbl = sa.Table( 35 | "tbl", 36 | metadata, 37 | sa.Column("id", sa.Integer, autoincrement=True, primary_key=True), 38 | sa.Column("json", JSON, default=None), 39 | sa.Column("array_int", ARRAY(sa.Integer), default=list), 40 | sa.Column("enum", ENUM("f", "s", name="s_enum"), default="s"), 41 | sa.Column("custom_list", CustomStrList(), default=list), 42 | ) 43 | 44 | 45 | async def insert_tbl(conn, pk, **kwargs): 46 | await conn.execute(tbl.insert().values(**kwargs)) 47 | row = await (await conn.execute(tbl.select())).first() 48 | 49 | assert row.id == pk 50 | 51 | for name, val in kwargs.items(): 52 | assert row[name] == val 53 | 54 | await conn.execute(sa.delete(tbl)) 55 | 56 | 57 | async def create_table(conn): 58 | await conn.execute("DROP TABLE IF EXISTS tbl") 59 | await conn.execute("DROP TYPE IF EXISTS s_enum CASCADE") 60 | await conn.execute("CREATE TYPE s_enum AS ENUM ('f', 's')") 61 | await conn.execute(CreateTable(tbl)) 62 | 63 | 64 | async def go(): 65 | async with create_engine( 66 | user="aiopg", database="aiopg", host="127.0.0.1", password="passwd" 67 | ) as engine: 68 | async with engine.acquire() as conn: 69 | await create_table(conn) 70 | async with engine.acquire() as conn: 71 | await insert_tbl(conn, 1) 72 | await insert_tbl(conn, 2, json={"data": 123}) 73 | await insert_tbl(conn, 3, array_int=[1, 3, 4]) 74 | await insert_tbl(conn, 4, enum="f") 75 | await insert_tbl(conn, 5, custom_list=["1", "test", "4"]) 76 | 77 | 78 | asyncio.run(go()) 79 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - '[0-9].[0-9]+' 8 | tags: [ 'v*' ] 9 | pull_request: 10 | branches: 11 | - master 12 | - '[0-9].[0-9]+' 13 | 14 | jobs: 15 | test: 16 | name: Test on Python ${{ matrix.python }} 17 | strategy: 18 | matrix: 19 | python: ['3.8', '3.9', '3.10', '3.11'] 20 | sqlalchemy: ['sqlalchemy[postgresql_psycopg2binary]>=1.4,<1.5', 'sqlalchemy[postgresql_psycopg2binary]>=2.0,<2.1'] 21 | fail-fast: false 22 | runs-on: ubuntu-latest 23 | timeout-minutes: 20 24 | steps: 25 | - name: Checkout 26 | uses: actions/checkout@v2 27 | - name: Setup Python 28 | uses: actions/setup-python@v2 29 | with: 30 | python-version: ${{ matrix.python }} 31 | - name: Install dependencies 32 | run: | 33 | pip install -e . 34 | pip install -r requirements.txt 35 | pip install codecov 36 | - name: Install ${{ matrix.sqlalchemy }} 37 | run: | 38 | pip install "${{ matrix.sqlalchemy }}" 39 | - name: Run tests 40 | run: | 41 | make cov-ci 42 | python setup.py check -rms 43 | codecov 44 | 45 | lint: 46 | name: Run linters 47 | runs-on: ubuntu-latest 48 | steps: 49 | - name: Checkout 50 | uses: actions/checkout@v2 51 | - name: Setup Python 52 | uses: actions/setup-python@v2 53 | with: 54 | python-version: 3.9 55 | - name: Install dependencies 56 | run: | 57 | pip install -e . 58 | pip install -r requirements.txt 59 | - name: Run linters 60 | run: | 61 | make lint 62 | 63 | deploy: 64 | name: Deploy 65 | runs-on: ubuntu-latest 66 | needs: 67 | - test 68 | - lint 69 | # Run only on pushing a tag 70 | if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') 71 | steps: 72 | - name: Checkout 73 | uses: actions/checkout@v2 74 | - name: Setup Python 3.9 75 | uses: actions/setup-python@v2 76 | with: 77 | python-version: 3.9 78 | - name: Install dependencies 79 | run: 80 | python -m pip install -U pip wheel twine 81 | - name: Make dists 82 | run: 83 | python setup.py sdist bdist_wheel 84 | - name: Check dists 85 | run: 86 | twine check dist/* 87 | - name: PyPI upload 88 | env: 89 | TWINE_USERNAME: __token__ 90 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} 91 | run: | 92 | twine upload dist/* 93 | -------------------------------------------------------------------------------- /tests/test_sa_priority_name.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import sqlalchemy as sa 3 | from sqlalchemy import LABEL_STYLE_TABLENAME_PLUS_COL 4 | 5 | meta = sa.MetaData() 6 | tbl = sa.Table( 7 | "sa_tbl5", 8 | meta, 9 | sa.Column("ID", sa.String, primary_key=True, key="id"), 10 | sa.Column("Name", sa.String(255), key="name"), 11 | ) 12 | 13 | 14 | @pytest.fixture 15 | def connect(make_sa_connection, loop): 16 | async def start(): 17 | conn = await make_sa_connection() 18 | await conn.execute("DROP TABLE IF EXISTS sa_tbl5") 19 | await conn.execute( 20 | "CREATE TABLE sa_tbl5 (" 21 | '"ID" VARCHAR(255) NOT NULL, ' 22 | '"Name" VARCHAR(255), ' 23 | 'PRIMARY KEY ("ID"))' 24 | ) 25 | 26 | return conn 27 | 28 | return loop.run_until_complete(start()) 29 | 30 | 31 | async def test_priority_name(connect): 32 | await connect.execute(tbl.insert().values(id="test_id", name="test_name")) 33 | row = await (await connect.execute(tbl.select())).first() 34 | assert row.name == "test_name" 35 | assert row.id == "test_id" 36 | 37 | 38 | async def test_priority_name_label(connect): 39 | await connect.execute(tbl.insert().values(id="test_id", name="test_name")) 40 | query = sa.select(tbl.c.name.label("test_label_name"), tbl.c.id) 41 | query = query.select_from(tbl) 42 | row = await (await connect.execute(query)).first() 43 | assert row.test_label_name == "test_name" 44 | assert row.id == "test_id" 45 | 46 | 47 | async def test_priority_name_and_label(connect): 48 | await connect.execute(tbl.insert().values(id="test_id", name="test_name")) 49 | query = sa.select( 50 | tbl.c.name.label("test_label_name"), tbl.c.name, tbl.c.id 51 | ) 52 | query = query.select_from(tbl) 53 | row = await (await connect.execute(query)).first() 54 | assert row.test_label_name == "test_name" 55 | assert row.name == "test_name" 56 | assert row.id == "test_id" 57 | 58 | 59 | async def test_priority_name_all_get(connect): 60 | await connect.execute(tbl.insert().values(id="test_id", name="test_name")) 61 | query = sa.select(tbl.c.name) 62 | query = query.select_from(tbl) 63 | row = await (await connect.execute(query)).first() 64 | assert row.name == "test_name" 65 | assert row["name"] == "test_name" 66 | assert row[0] == "test_name" 67 | assert row[tbl.c.name] == "test_name" 68 | 69 | 70 | async def test_use_labels(connect): 71 | """key property is ignored""" 72 | await connect.execute(tbl.insert().values(id="test_id", name="test_name")) 73 | query = tbl.select().set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL) 74 | row = await (await connect.execute(query)).first() 75 | assert row.sa_tbl5_Name == "test_name" 76 | assert row.sa_tbl5_ID == "test_id" 77 | 78 | assert not hasattr(row, "name") 79 | assert not hasattr(row, "id") 80 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | aiopg 2 | ===== 3 | .. image:: https://github.com/aio-libs/aiopg/workflows/CI/badge.svg 4 | :target: https://github.com/aio-libs/aiopg/actions?query=workflow%3ACI 5 | .. image:: https://codecov.io/gh/aio-libs/aiopg/branch/master/graph/badge.svg 6 | :target: https://codecov.io/gh/aio-libs/aiopg 7 | .. image:: https://badges.gitter.im/Join%20Chat.svg 8 | :target: https://gitter.im/aio-libs/Lobby 9 | :alt: Chat on Gitter 10 | 11 | **aiopg** is a library for accessing a PostgreSQL_ database 12 | from the asyncio_ (PEP-3156/tulip) framework. It wraps 13 | asynchronous features of the Psycopg database driver. 14 | 15 | Example 16 | ------- 17 | 18 | .. code:: python 19 | 20 | import asyncio 21 | import aiopg 22 | 23 | dsn = 'dbname=aiopg user=aiopg password=passwd host=127.0.0.1' 24 | 25 | async def go(): 26 | pool = await aiopg.create_pool(dsn) 27 | async with pool.acquire() as conn: 28 | async with conn.cursor() as cur: 29 | await cur.execute("SELECT 1") 30 | ret = [] 31 | async for row in cur: 32 | ret.append(row) 33 | assert ret == [(1,)] 34 | 35 | loop = asyncio.get_event_loop() 36 | loop.run_until_complete(go()) 37 | 38 | 39 | Example of SQLAlchemy optional integration 40 | ------------------------------------------ 41 | 42 | .. code:: python 43 | 44 | import asyncio 45 | from aiopg.sa import create_engine 46 | import sqlalchemy as sa 47 | 48 | metadata = sa.MetaData() 49 | 50 | tbl = sa.Table('tbl', metadata, 51 | sa.Column('id', sa.Integer, primary_key=True), 52 | sa.Column('val', sa.String(255))) 53 | 54 | async def create_table(engine): 55 | async with engine.acquire() as conn: 56 | await conn.execute('DROP TABLE IF EXISTS tbl') 57 | await conn.execute('''CREATE TABLE tbl ( 58 | id serial PRIMARY KEY, 59 | val varchar(255))''') 60 | 61 | async def go(): 62 | async with create_engine(user='aiopg', 63 | database='aiopg', 64 | host='127.0.0.1', 65 | password='passwd') as engine: 66 | 67 | async with engine.acquire() as conn: 68 | await conn.execute(tbl.insert().values(val='abc')) 69 | 70 | async for row in conn.execute(tbl.select()): 71 | print(row.id, row.val) 72 | 73 | loop = asyncio.get_event_loop() 74 | loop.run_until_complete(go()) 75 | 76 | .. _PostgreSQL: http://www.postgresql.org/ 77 | .. _asyncio: https://docs.python.org/3/library/asyncio.html 78 | 79 | Please use:: 80 | 81 | $ make test 82 | 83 | for executing the project's unittests. 84 | See https://aiopg.readthedocs.io/en/stable/contributing.html for details 85 | on how to set up your environment to run the tests. 86 | -------------------------------------------------------------------------------- /CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | Instruction for contributors 2 | ============================ 3 | 4 | Developer environment 5 | --------------------- 6 | 7 | First clone the git repo: 8 | 9 | .. code-block:: shell 10 | 11 | $ git clone git@github.com:aio-libs/aiopg.git 12 | $ cd aiopg 13 | 14 | After that you need to create and activate a virtual environment. I 15 | recommend using :term:`virtualenvwrapper` but just :term:`virtualenv` or 16 | :term:`venv` will also work. For :term:`virtualenvwrapper`: 17 | 18 | .. code-block:: shell 19 | 20 | $ mkvirtualenv aiopg -p `which python3` 21 | 22 | For `venv` (for example; put the directory wherever you want): 23 | 24 | .. code-block:: shell 25 | 26 | $ python3 -m venv ../venv_directory 27 | $ source ../venv_directory/bin/activate 28 | 29 | Just as when doing a normal install, you need the :term:`libpq` library: 30 | 31 | .. code-block:: shell 32 | 33 | $ sudo apt-get install libpq-dev 34 | 35 | **UPD** 36 | 37 | The latest ``aiopg`` test suite uses docker container for running 38 | Postgres server. See 39 | https://docs.docker.com/engine/installation/linux/ubuntulinux/ for 40 | instructions for Docker installing. 41 | 42 | No local Postgres server needed. 43 | 44 | In the virtual environment you need to install *aiopg* itself and some 45 | additional development tools (the development tools are needed for running 46 | the test suite and other development tasks) 47 | 48 | .. code-block:: shell 49 | 50 | $ pip install -Ue . 51 | $ pip install -Ur requirements.txt 52 | 53 | That's all. 54 | 55 | To run all of the *aiopg* tests do: 56 | 57 | .. code-block:: shell 58 | 59 | $ make test 60 | 61 | This command runs :term:`pep8` and :term:`pyflakes` first and then executes 62 | the *aiopg* unit tests. 63 | 64 | 65 | When you are working on solving an issue you will probably want to run 66 | some specific test, not the whole suite: 67 | 68 | .. code-block:: shell 69 | 70 | $ py.test -s -k test_initial_empty 71 | 72 | For debug sessions I prefer to use :term:`ipdb`, which is installed 73 | as part of the development tools. Insert the following line into your 74 | code in the place where you want to start interactively debugging the 75 | execution process: 76 | 77 | .. code-block:: py3 78 | 79 | import ipdb; ipdb.set_trace() 80 | 81 | The library is reasonably well covered by tests. There is a make 82 | target for generating the coverage report: 83 | 84 | .. code-block:: shell 85 | 86 | $ make cov 87 | 88 | 89 | Contribution 90 | ------------ 91 | 92 | I like to get well-formed Pull Requests on github_. The pull request 93 | should include both the code fix and tests for the bug. 94 | 95 | If you cannot make a good test yourself or want to report a problem, 96 | please open an issue at https://github.com/aio-libs/aiopg/issues. 97 | 98 | 99 | 100 | .. _github: https://github.com/ 101 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import re 2 | from pathlib import Path 3 | 4 | from setuptools import setup, find_packages 5 | 6 | install_requires = ["psycopg2-binary>=2.9.5", "async_timeout>=3.0,<5.0"] 7 | extras_require = {"sa": ["sqlalchemy[postgresql_psycopg2binary]>=1.4,<2.1"]} 8 | 9 | 10 | def read(*parts): 11 | return Path(__file__).resolve().parent.joinpath(*parts).read_text().strip() 12 | 13 | 14 | def get_maintainers(path="MAINTAINERS.txt"): 15 | return ", ".join(x.strip().strip("*").strip() for x in read(path).splitlines()) 16 | 17 | 18 | def read_version(): 19 | regexp = re.compile(r"^__version__\W*=\W*\"([\d.abrc]+)\"") 20 | for line in read("aiopg", "__init__.py").splitlines(): 21 | match = regexp.match(line) 22 | if match is not None: 23 | return match.group(1) 24 | 25 | raise RuntimeError("Cannot find version in aiopg/__init__.py") 26 | 27 | 28 | def read_changelog(path="CHANGES.txt"): 29 | return f"Changelog\n---------\n\n{read(path)}" 30 | 31 | 32 | classifiers = [ 33 | "License :: OSI Approved :: BSD License", 34 | "Intended Audience :: Developers", 35 | "Programming Language :: Python :: 3", 36 | "Programming Language :: Python :: 3 :: Only", 37 | "Programming Language :: Python :: 3.8", 38 | "Programming Language :: Python :: 3.9", 39 | "Programming Language :: Python :: 3.10", 40 | "Programming Language :: Python :: 3.11", 41 | "Operating System :: POSIX", 42 | "Operating System :: MacOS :: MacOS X", 43 | "Operating System :: Microsoft :: Windows", 44 | "Environment :: Web Environment", 45 | "Development Status :: 5 - Production/Stable", 46 | "Topic :: Database", 47 | "Topic :: Database :: Front-Ends", 48 | "Framework :: AsyncIO", 49 | ] 50 | 51 | setup( 52 | name="aiopg", 53 | version=read_version(), 54 | description="Postgres integration with asyncio.", 55 | long_description="\n\n".join((read("README.rst"), read_changelog())), 56 | long_description_content_type="text/x-rst", 57 | classifiers=classifiers, 58 | platforms=["macOS", "POSIX", "Windows"], 59 | author="Andrew Svetlov", 60 | python_requires=">=3.8", 61 | project_urls={ 62 | "Chat: Gitter": "https://gitter.im/aio-libs/Lobby", 63 | "CI: GA": "https://github.com/aio-libs/aiopg/actions?query=workflow%3ACI", 64 | "Coverage: codecov": "https://codecov.io/gh/aio-libs/aiopg", 65 | "Docs: RTD": "https://aiopg.readthedocs.io", 66 | "GitHub: issues": "https://github.com/aio-libs/aiopg/issues", 67 | "GitHub: repo": "https://github.com/aio-libs/aiopg", 68 | }, 69 | author_email="andrew.svetlov@gmail.com", 70 | maintainer=get_maintainers(), 71 | maintainer_email="virmir49@gmail.com", 72 | url="https://aiopg.readthedocs.io", 73 | download_url="https://pypi.python.org/pypi/aiopg", 74 | license="BSD", 75 | packages=find_packages(), 76 | install_requires=install_requires, 77 | extras_require=extras_require, 78 | include_package_data=True, 79 | ) 80 | -------------------------------------------------------------------------------- /tests/test_sa_default.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | 3 | import pytest 4 | import sqlalchemy as sa 5 | from sqlalchemy.sql.ddl import CreateTable 6 | 7 | meta = sa.MetaData() 8 | tbl = sa.Table( 9 | "sa_tbl4", 10 | meta, 11 | sa.Column("id", sa.Integer, nullable=False, primary_key=True), 12 | sa.Column( 13 | "id_sequence", 14 | sa.Integer, 15 | nullable=False, 16 | default=sa.Sequence("id_sequence_seq"), 17 | ), 18 | sa.Column("name", sa.String(255), nullable=False, default="default test"), 19 | sa.Column("count", sa.Integer, default=100, nullable=None), 20 | sa.Column("date", sa.DateTime, default=datetime.datetime.now), 21 | sa.Column("count_str", sa.Integer, default=sa.func.length("abcdef")), 22 | sa.Column("is_active", sa.Boolean, default=True), 23 | ) 24 | 25 | 26 | @pytest.fixture 27 | def engine(make_engine, loop): 28 | async def start(): 29 | engine = await make_engine() 30 | with (await engine) as conn: 31 | await conn.execute("DROP TABLE IF EXISTS sa_tbl4") 32 | await conn.execute("DROP SEQUENCE IF EXISTS id_sequence_seq") 33 | await conn.execute(CreateTable(tbl)) 34 | await conn.execute("CREATE SEQUENCE id_sequence_seq") 35 | 36 | return engine 37 | 38 | return loop.run_until_complete(start()) 39 | 40 | 41 | async def test_default_fields(engine): 42 | with (await engine) as conn: 43 | await conn.execute(tbl.insert().values()) 44 | 45 | res = await conn.execute(tbl.select()) 46 | row = await res.fetchone() 47 | assert row.count == 100 48 | assert row.id_sequence == 1 49 | assert row.count_str == 6 50 | assert row.name == "default test" 51 | assert row.is_active is True 52 | assert type(row.date) == datetime.datetime 53 | 54 | 55 | async def test_default_fields_isnull(engine): 56 | with (await engine) as conn: 57 | await conn.execute( 58 | tbl.insert().values( 59 | is_active=False, 60 | date=None, 61 | ) 62 | ) 63 | 64 | res = await conn.execute(tbl.select()) 65 | row = await res.fetchone() 66 | assert row.count == 100 67 | assert row.id_sequence == 1 68 | assert row.count_str == 6 69 | assert row.name == "default test" 70 | assert row.is_active is False 71 | assert row.date is None 72 | 73 | 74 | async def test_default_fields_edit(engine): 75 | with (await engine) as conn: 76 | date = datetime.datetime.now() 77 | await conn.execute( 78 | tbl.insert().values( 79 | name="edit name", 80 | is_active=False, 81 | date=date, 82 | count=1, 83 | ) 84 | ) 85 | 86 | res = await conn.execute(tbl.select()) 87 | row = await res.fetchone() 88 | assert row.count == 1 89 | assert row.id_sequence == 1 90 | assert row.count_str == 6 91 | assert row.name == "edit name" 92 | assert row.is_active is False 93 | assert row.date == date 94 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | description: Create a report to help us improve. 4 | labels: [bug] 5 | assignees: aio-libs/triagers 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: | 10 | **Thanks for taking a minute to file a bug report!** 11 | 12 | ⚠ 13 | Verify first that your issue is not [already reported on 14 | GitHub][issue search]. 15 | 16 | _Please fill out the form below with as many precise 17 | details as possible._ 18 | 19 | [issue search]: ../search?q=is%3Aissue&type=issues 20 | 21 | - type: textarea 22 | attributes: 23 | label: Describe the bug 24 | description: >- 25 | A clear and concise description of what the bug is. 26 | validations: 27 | required: true 28 | 29 | - type: textarea 30 | attributes: 31 | label: To Reproduce 32 | description: >- 33 | Describe the steps to reproduce this bug. 34 | placeholder: | 35 | 1. Have certain environment 36 | 2. Run given code snippet in a certain way 37 | 3. See some behavior described 38 | validations: 39 | required: true 40 | 41 | - type: textarea 42 | attributes: 43 | label: Expected behavior 44 | description: >- 45 | A clear and concise description of what you expected to happen. 46 | validations: 47 | required: true 48 | 49 | - type: textarea 50 | attributes: 51 | label: Logs/tracebacks 52 | description: | 53 | If applicable, add logs/tracebacks to help explain your problem. 54 | Paste the output of the steps above, including the commands 55 | themselves and their output/traceback etc. 56 | render: python-traceback 57 | validations: 58 | required: true 59 | 60 | - type: textarea 61 | attributes: 62 | label: Python Version 63 | description: Attach your version of Python. 64 | render: console 65 | value: | 66 | $ python --version 67 | validations: 68 | required: true 69 | - type: textarea 70 | attributes: 71 | label: aiopg Version 72 | description: Attach your version of aiopg. 73 | render: console 74 | value: | 75 | $ python -m pip show aiopg 76 | validations: 77 | required: true 78 | 79 | - type: textarea 80 | attributes: 81 | label: OS 82 | placeholder: >- 83 | For example, Arch Linux, Windows, macOS, etc. 84 | validations: 85 | required: true 86 | 87 | - type: textarea 88 | attributes: 89 | label: Additional context 90 | description: | 91 | Add any other context about the problem here. 92 | 93 | Describe the environment you have that lead to your issue. 94 | This includes proxy server and other bits that are related to your case. 95 | 96 | - type: checkboxes 97 | attributes: 98 | label: Code of Conduct 99 | description: | 100 | Read the [aio-libs Code of Conduct][CoC] first. 101 | 102 | [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md 103 | options: 104 | - label: I agree to follow the aio-libs Code of Conduct 105 | required: true 106 | ... 107 | -------------------------------------------------------------------------------- /docs/glossary.rst: -------------------------------------------------------------------------------- 1 | .. _glossary: 2 | 3 | 4 | ======== 5 | Glossary 6 | ======== 7 | 8 | .. if you add new entries, keep the alphabetical sorting! 9 | 10 | .. glossary:: 11 | 12 | DBAPI 13 | 14 | :pep:`249` -- Python Database API Specification v2.0 15 | 16 | ipdb 17 | 18 | ipdb exports functions to access the IPython debugger, which 19 | features tab completion, syntax highlighting, better tracebacks, 20 | better introspection with the same interface as the pdb module. 21 | 22 | libpq 23 | 24 | The standard C library to communicate with :term:`PostgreSQL` server. 25 | 26 | http://www.postgresql.org/docs/9.3/interactive/libpq.html 27 | 28 | pep8 29 | 30 | Python style guide checker 31 | 32 | *pep8* is a tool to check your Python code against some of the 33 | style conventions in :pep:`8` -- Style Guide for Python Code. 34 | 35 | PostgreSQL 36 | 37 | A popular database server. 38 | 39 | http://www.postgresql.org/ 40 | 41 | PostgreSQL Error Codes 42 | 43 | All messages emitted by the PostgreSQL server are assigned 44 | five-character error codes that follow the 45 | SQL standard's conventions for “SQLSTATE” codes. 46 | 47 | https://www.postgresql.org/docs/current/errcodes-appendix.html#ERRCODES-TABLE 48 | 49 | psycopg2-binary 50 | 51 | Psycopg is the most popular PostgreSQL database adapter for 52 | the Python programming language. 53 | Its main features are the complete implementation of 54 | the Python DB API 2.0 specification and the thread safety 55 | (several threads can share the same connection). 56 | 57 | https://pypi.org/project/psycopg2-binary/ 58 | 59 | pyflakes 60 | 61 | passive checker of Python programs 62 | 63 | A simple program which checks Python source files for errors. 64 | 65 | Pyflakes analyzes programs and detects various errors. It works 66 | by parsing the source file, not importing it, so it is safe to 67 | use on modules with side effects. It's also much faster. 68 | 69 | https://pypi.python.org/pypi/pyflakes 70 | 71 | sqlalchemy 72 | 73 | The Python SQL Toolkit and Object Relational Mapper. 74 | 75 | http://www.sqlalchemy.org/ 76 | 77 | venv 78 | 79 | standard python module for creating lightweight “virtual 80 | environments” with their own site directories, optionally 81 | isolated from system site directories. Each virtual environment 82 | has its own Python binary (allowing creation of environments 83 | with various Python versions) and can have its own independent 84 | set of installed Python packages in its site directories. 85 | 86 | https://docs.python.org/dev/library/venv.html 87 | 88 | virtualenv 89 | 90 | The tool to create isolated Python environments. It's not 91 | included into python stdlib's but it is very popular instrument. 92 | 93 | :term:`venv` and :term:`virtualenv` does almost the same, *venv* 94 | has been developed after the *virtualenv*. 95 | 96 | https://virtualenv.readthedocs.io/en/latest/ 97 | 98 | virtualenvwrapper 99 | 100 | virtualenvwrapper is a set of extensions to Ian Bicking’s 101 | :term:`virtualenv` tool. The extensions include wrappers for 102 | creating and deleting virtual environments and otherwise 103 | managing your development workflow, making it easier to work on 104 | more than one project at a time without introducing conflicts in 105 | their dependencies. 106 | 107 | virtualenvwrapper is my choice, highly recommend the tool to everyone. 108 | 109 | https://virtualenvwrapper.readthedocs.io/en/latest/ 110 | -------------------------------------------------------------------------------- /examples/simple_sa_transaction.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import sqlalchemy as sa 4 | from sqlalchemy.schema import CreateTable, DropTable 5 | 6 | from aiopg.sa import create_engine 7 | 8 | metadata = sa.MetaData() 9 | 10 | users = sa.Table( 11 | "users_sa_transaction", 12 | metadata, 13 | sa.Column("id", sa.Integer, primary_key=True), 14 | sa.Column("name", sa.String(255)), 15 | ) 16 | 17 | 18 | async def create_sa_transaction_tables(conn): 19 | await conn.execute(DropTable(users, if_exists=True)) 20 | await conn.execute(CreateTable(users)) 21 | 22 | 23 | async def check_count_users(conn, *, count): 24 | s_query = sa.select(users).select_from(users) 25 | assert count == len(list(await (await conn.execute(s_query)).fetchall())) 26 | 27 | 28 | async def success_transaction(conn): 29 | await check_count_users(conn, count=0) 30 | 31 | async with conn.begin(): 32 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 33 | await conn.execute(sa.insert(users).values(id=2, name="test2")) 34 | 35 | await check_count_users(conn, count=2) 36 | 37 | async with conn.begin(): 38 | await conn.execute(sa.delete(users).where(users.c.id == 1)) 39 | await conn.execute(sa.delete(users).where(users.c.id == 2)) 40 | 41 | await check_count_users(conn, count=0) 42 | 43 | 44 | async def fail_transaction(conn): 45 | await check_count_users(conn, count=0) 46 | 47 | trans = await conn.begin() 48 | 49 | try: 50 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 51 | raise RuntimeError() 52 | 53 | except RuntimeError: 54 | await trans.rollback() 55 | else: 56 | await trans.commit() 57 | 58 | await check_count_users(conn, count=0) 59 | 60 | 61 | async def success_nested_transaction(conn): 62 | await check_count_users(conn, count=0) 63 | 64 | async with conn.begin_nested(): 65 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 66 | 67 | async with conn.begin_nested(): 68 | await conn.execute(sa.insert(users).values(id=2, name="test2")) 69 | 70 | await check_count_users(conn, count=2) 71 | 72 | async with conn.begin(): 73 | await conn.execute(sa.delete(users).where(users.c.id == 1)) 74 | await conn.execute(sa.delete(users).where(users.c.id == 2)) 75 | 76 | await check_count_users(conn, count=0) 77 | 78 | 79 | async def fail_nested_transaction(conn): 80 | await check_count_users(conn, count=0) 81 | 82 | async with conn.begin_nested(): 83 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 84 | 85 | tr_f = await conn.begin_nested() 86 | try: 87 | await conn.execute(sa.insert(users).values(id=2, name="test2")) 88 | raise RuntimeError() 89 | 90 | except RuntimeError: 91 | await tr_f.rollback() 92 | else: 93 | await tr_f.commit() 94 | 95 | async with conn.begin_nested(): 96 | await conn.execute(sa.insert(users).values(id=2, name="test2")) 97 | 98 | await check_count_users(conn, count=2) 99 | 100 | async with conn.begin(): 101 | await conn.execute(sa.delete(users).where(users.c.id == 1)) 102 | await conn.execute(sa.delete(users).where(users.c.id == 2)) 103 | 104 | await check_count_users(conn, count=0) 105 | 106 | 107 | async def fail_first_nested_transaction(conn): 108 | trans = await conn.begin_nested() 109 | 110 | try: 111 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 112 | 113 | async with conn.begin_nested(): 114 | await conn.execute(sa.insert(users).values(id=2, name="test2")) 115 | 116 | async with conn.begin_nested(): 117 | await conn.execute(sa.insert(users).values(id=3, name="test3")) 118 | 119 | raise RuntimeError() 120 | 121 | except RuntimeError: 122 | await trans.rollback() 123 | else: 124 | await trans.commit() 125 | 126 | await check_count_users(conn, count=0) 127 | 128 | 129 | async def go(): 130 | engine = await create_engine( 131 | user="aiopg", database="aiopg", host="127.0.0.1", password="passwd" 132 | ) 133 | async with engine: 134 | async with engine.acquire() as conn: 135 | await create_sa_transaction_tables(conn) 136 | 137 | await success_transaction(conn) 138 | await fail_transaction(conn) 139 | 140 | await success_nested_transaction(conn) 141 | await fail_nested_transaction(conn) 142 | await fail_first_nested_transaction(conn) 143 | 144 | 145 | asyncio.run(go()) 146 | -------------------------------------------------------------------------------- /examples/sa.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | import random 4 | 5 | import sqlalchemy as sa 6 | 7 | from aiopg.sa import create_engine 8 | 9 | metadata = sa.MetaData() 10 | 11 | users = sa.Table( 12 | "users", 13 | metadata, 14 | sa.Column("id", sa.Integer, primary_key=True), 15 | sa.Column("name", sa.String(255)), 16 | sa.Column("birthday", sa.DateTime), 17 | ) 18 | 19 | emails = sa.Table( 20 | "emails", 21 | metadata, 22 | sa.Column("id", sa.Integer, primary_key=True), 23 | sa.Column("user_id", None, sa.ForeignKey("users.id")), 24 | sa.Column("email", sa.String(255), nullable=False), 25 | sa.Column("private", sa.Boolean, nullable=False), 26 | ) 27 | 28 | 29 | async def create_tables(conn): 30 | await conn.execute("DROP TABLE IF EXISTS emails") 31 | await conn.execute("DROP TABLE IF EXISTS users") 32 | await conn.execute( 33 | """CREATE TABLE users ( 34 | id serial PRIMARY KEY, 35 | name varchar(255), 36 | birthday timestamp)""" 37 | ) 38 | await conn.execute( 39 | """CREATE TABLE emails ( 40 | id serial, 41 | user_id int references users(id), 42 | email varchar(253), 43 | private bool)""" 44 | ) 45 | 46 | 47 | names = { 48 | "Andrew", 49 | "Bob", 50 | "John", 51 | "Vitaly", 52 | "Alex", 53 | "Lina", 54 | "Olga", 55 | "Doug", 56 | "Julia", 57 | "Matt", 58 | "Jessica", 59 | "Nick", 60 | "Dave", 61 | "Martin", 62 | "Abbi", 63 | "Eva", 64 | "Lori", 65 | "Rita", 66 | "Rosa", 67 | "Ivy", 68 | "Clare", 69 | "Maria", 70 | "Jenni", 71 | "Margo", 72 | "Anna", 73 | } 74 | 75 | 76 | def gen_birthday(): 77 | now = datetime.datetime.now() 78 | year = random.randint(now.year - 30, now.year - 20) 79 | month = random.randint(1, 12) 80 | day = random.randint(1, 28) 81 | return datetime.datetime(year, month, day) 82 | 83 | 84 | async def fill_data(conn): 85 | async with conn.begin(): 86 | for name in random.sample(names, len(names)): 87 | uid = await conn.scalar( 88 | users.insert().values(name=name, birthday=gen_birthday()) 89 | ) 90 | emails_count = int(random.paretovariate(2)) 91 | for num in random.sample(range(10000), emails_count): 92 | is_private = random.uniform(0, 1) < 0.8 93 | await conn.execute( 94 | emails.insert().values( 95 | user_id=uid, 96 | email=f"{name}+{num}@gmail.com", 97 | private=is_private, 98 | ) 99 | ) 100 | 101 | 102 | async def count(conn): 103 | c1 = await conn.scalar(sa.select(sa.func.count(users.c.id))) 104 | c2 = await conn.scalar(sa.select(sa.func.count(emails.c.id))) 105 | print("Population consists of", c1, "people with", c2, "emails in total") 106 | join = sa.join(emails, users, users.c.id == emails.c.user_id) 107 | query = ( 108 | sa.select(users.c.name) 109 | .select_from(join) 110 | .where(emails.c.private == False) # noqa 111 | .group_by(users.c.name) 112 | .having(sa.func.count(emails.c.private) > 0) 113 | ) 114 | 115 | print("Users with public emails:") 116 | async for row in conn.execute(query): 117 | print(row.name) 118 | 119 | print() 120 | 121 | 122 | async def show_julia(conn): 123 | print("Lookup for Julia:") 124 | query = ( 125 | sa.select(users, emails) 126 | .join(emails, users.c.id == emails.c.user_id) 127 | .where(users.c.name == "Julia") 128 | .set_label_style(sa.LABEL_STYLE_TABLENAME_PLUS_COL) 129 | ) 130 | async for row in conn.execute(query): 131 | print( 132 | row.users_name, 133 | row.users_birthday, 134 | row.emails_email, 135 | row.emails_private, 136 | ) 137 | print() 138 | 139 | 140 | async def ave_age(conn): 141 | query = sa.select(sa.func.avg(sa.func.age(users.c.birthday))) 142 | ave = await conn.scalar(query) 143 | print( 144 | "Average age of population is", 145 | ave, 146 | "or ~", 147 | int(ave.days / 365), 148 | "years", 149 | ) 150 | print() 151 | 152 | 153 | async def go(): 154 | engine = await create_engine( 155 | user="aiopg", database="aiopg", host="127.0.0.1", password="passwd" 156 | ) 157 | async with engine: 158 | async with engine.acquire() as conn: 159 | await create_tables(conn) 160 | await fill_data(conn) 161 | await count(conn) 162 | await show_julia(conn) 163 | await ave_age(conn) 164 | 165 | 166 | asyncio.run(go()) 167 | -------------------------------------------------------------------------------- /tests/test_sa_types.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | import psycopg2 4 | import pytest 5 | from sqlalchemy import Column, Integer, MetaData, Table, types 6 | from sqlalchemy.dialects.postgresql import ARRAY, ENUM, HSTORE, JSON 7 | from sqlalchemy.schema import CreateTable, DropTable 8 | 9 | sa = pytest.importorskip("aiopg.sa") # noqa 10 | 11 | meta = MetaData() 12 | 13 | 14 | class SimpleEnum(Enum): 15 | first = "first" 16 | second = "second" 17 | 18 | 19 | class PythonEnum(types.TypeDecorator): 20 | impl = types.Enum 21 | 22 | def __init__(self, python_enum_type, **kwargs): 23 | self.python_enum_type = python_enum_type 24 | self.kwargs = kwargs 25 | enum_args = [x.value for x in python_enum_type] 26 | super().__init__(*enum_args, **self.kwargs) 27 | 28 | def process_bind_param(self, value, dialect): 29 | return value.value 30 | 31 | def process_result_value(self, value: str, dialect): 32 | for __, case in self.python_enum_type.__members__.items(): 33 | if case.value == value: 34 | return case 35 | raise TypeError( 36 | f"Cannot map Enum value {value!r} " 37 | f"to Python's {self.python_enum_type}" 38 | ) 39 | 40 | def copy(self): 41 | return PythonEnum(self.python_enum_type, **self.kwargs) 42 | 43 | 44 | tbl = Table( 45 | "sa_tbl_types", 46 | meta, 47 | Column("id", Integer, nullable=False, primary_key=True), 48 | Column("json_val", JSON), 49 | Column("array_val", ARRAY(Integer)), 50 | Column("hstore_val", HSTORE), 51 | Column("pyt_enum_val", PythonEnum(SimpleEnum, name="simple_enum")), 52 | Column("enum_val", ENUM("first", "second", name="simple_enum")), 53 | ) 54 | 55 | tbl2 = Table( 56 | "sa_tbl_types2", 57 | meta, 58 | Column("id", Integer, nullable=False, primary_key=True), 59 | Column("json_val", JSON), 60 | Column("array_val", ARRAY(Integer)), 61 | Column("pyt_enum_val", PythonEnum(SimpleEnum, name="simple_enum")), 62 | Column("enum_val", ENUM("first", "second", name="simple_enum")), 63 | ) 64 | 65 | 66 | @pytest.fixture 67 | def connect(make_engine): 68 | async def go(**kwargs): 69 | engine = await make_engine(**kwargs) 70 | with (await engine) as conn: 71 | try: 72 | await conn.execute(DropTable(tbl)) 73 | except psycopg2.ProgrammingError: 74 | pass 75 | try: 76 | await conn.execute(DropTable(tbl2)) 77 | except psycopg2.ProgrammingError: 78 | pass 79 | await conn.execute("DROP TYPE IF EXISTS simple_enum CASCADE;") 80 | await conn.execute( 81 | """CREATE TYPE simple_enum AS ENUM 82 | ('first', 'second');""" 83 | ) 84 | try: 85 | await conn.execute(CreateTable(tbl)) 86 | ret_tbl = tbl 87 | has_hstore = True 88 | except psycopg2.ProgrammingError: 89 | await conn.execute(CreateTable(tbl2)) 90 | ret_tbl = tbl2 91 | has_hstore = False 92 | return engine, ret_tbl, has_hstore 93 | 94 | yield go 95 | 96 | 97 | async def test_json(connect): 98 | engine, tbl, has_hstore = await connect() 99 | data = {"a": 1, "b": "name"} 100 | with (await engine) as conn: 101 | await conn.execute(tbl.insert().values(json_val=data)) 102 | 103 | ret = await conn.execute(tbl.select()) 104 | item = await ret.fetchone() 105 | assert data == item["json_val"] 106 | 107 | 108 | async def test_array(connect): 109 | engine, tbl, has_hstore = await connect() 110 | data = [1, 2, 3] 111 | with (await engine) as conn: 112 | await conn.execute(tbl.insert().values(array_val=data)) 113 | 114 | ret = await conn.execute(tbl.select()) 115 | item = await ret.fetchone() 116 | assert data == item["array_val"] 117 | 118 | 119 | async def test_hstore(connect): 120 | engine, tbl, has_hstore = await connect() 121 | if not has_hstore: 122 | raise pytest.skip("hstore is not supported") 123 | data = {"a": "str", "b": "name"} 124 | with (await engine) as conn: 125 | await conn.execute(tbl.insert().values(hstore_val=data)) 126 | 127 | ret = await conn.execute(tbl.select()) 128 | item = await ret.fetchone() 129 | assert data == item["hstore_val"] 130 | 131 | 132 | async def test_enum(connect): 133 | engine, tbl, has_hstore = await connect() 134 | with (await engine) as conn: 135 | await conn.execute(tbl.insert().values(enum_val="second")) 136 | 137 | ret = await conn.execute(tbl.select()) 138 | item = await ret.fetchone() 139 | assert "second" == item["enum_val"] 140 | 141 | 142 | async def test_pyenum(connect): 143 | engine, tbl, has_hstore = await connect() 144 | with (await engine) as conn: 145 | await conn.execute(tbl.insert().values(pyt_enum_val=SimpleEnum.first)) 146 | 147 | ret = await conn.execute(tbl.select()) 148 | item = await ret.fetchone() 149 | assert SimpleEnum.first == item.pyt_enum_val 150 | -------------------------------------------------------------------------------- /examples/isolation_sa_transaction.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import sqlalchemy as sa 4 | from psycopg2 import InternalError 5 | from psycopg2.extensions import TransactionRollbackError 6 | from sqlalchemy.sql.ddl import CreateTable, DropTable 7 | 8 | from aiopg.sa import create_engine 9 | 10 | metadata = sa.MetaData() 11 | 12 | users = sa.Table( 13 | "users_sa_isolation_transaction", 14 | metadata, 15 | sa.Column("id", sa.Integer, primary_key=True), 16 | sa.Column("name", sa.String(255)), 17 | ) 18 | 19 | 20 | async def create_sa_transaction_tables(conn): 21 | await conn.execute(DropTable(users, if_exists=True)) 22 | await conn.execute(CreateTable(users)) 23 | 24 | 25 | async def repea_sa_transaction(conn, conn2): 26 | isolation_level = "REPEATABLE READ" 27 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 28 | t1 = await conn.begin(isolation_level=isolation_level) 29 | 30 | where = users.c.id == 1 31 | q_user = users.select().where(where) 32 | user = await (await conn.execute(q_user)).fetchone() 33 | 34 | assert await (await conn2.execute(q_user)).fetchone() == user 35 | 36 | await conn.execute(sa.update(users).values({"name": "name2"}).where(where)) 37 | 38 | t2 = await conn2.begin(isolation_level=isolation_level) 39 | assert await (await conn2.execute(q_user)).fetchone() == user 40 | 41 | await t1.commit() 42 | 43 | await conn2.execute(users.insert().values({"id": 2, "name": "test"})) 44 | 45 | try: 46 | await conn2.execute( 47 | sa.update(users).values({"name": "t"}).where(where) 48 | ) 49 | except TransactionRollbackError as e: 50 | assert e.pgcode == "40001" 51 | 52 | await t2.commit() 53 | 54 | assert len(await (await conn2.execute(q_user)).fetchall()) == 1 55 | await conn.execute(sa.delete(users)) 56 | assert len(await (await conn.execute(users.select())).fetchall()) == 0 57 | 58 | 59 | async def serializable_sa_transaction(conn, conn2): 60 | isolation_level = "SERIALIZABLE" 61 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 62 | t1 = await conn.begin(isolation_level=isolation_level) 63 | 64 | where = users.c.id == 1 65 | q_user = users.select().where(where) 66 | user = await (await conn.execute(q_user)).fetchone() 67 | 68 | assert await (await conn2.execute(q_user)).fetchone() == user 69 | 70 | await conn.execute(sa.update(users).values({"name": "name2"}).where(where)) 71 | 72 | t2 = await conn2.begin(isolation_level=isolation_level) 73 | assert await (await conn2.execute(q_user)).fetchone() == user 74 | 75 | await t1.commit() 76 | 77 | try: 78 | await conn2.execute(users.insert().values({"id": 2, "name": "test"})) 79 | except TransactionRollbackError as e: 80 | assert e.pgcode == "40001" 81 | 82 | try: 83 | await conn2.execute(users.update().values({"name": "t"}).where(where)) 84 | except InternalError as e: 85 | assert e.pgcode == "25P02" 86 | 87 | await t2.commit() 88 | 89 | user = dict(await (await conn2.execute(q_user)).fetchone()) 90 | assert user == {"name": "name2", "id": 1} 91 | 92 | await conn.execute(sa.delete(users)) 93 | assert len(await (await conn.execute(users.select())).fetchall()) == 0 94 | 95 | 96 | async def read_only_read_sa_transaction(conn, deferrable): 97 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 98 | t1 = await conn.begin( 99 | isolation_level="SERIALIZABLE", readonly=True, deferrable=deferrable 100 | ) 101 | 102 | where = users.c.id == 1 103 | 104 | try: 105 | await conn.execute(sa.update(users).values({"name": "t"}).where(where)) 106 | except InternalError as e: 107 | assert e.pgcode == "25006" 108 | 109 | await t1.commit() 110 | 111 | await conn.execute(sa.delete(users)) 112 | assert len(await (await conn.execute(users.select())).fetchall()) == 0 113 | 114 | 115 | async def isolation_read_sa_transaction(conn, conn2): 116 | await conn.execute(sa.insert(users).values(id=1, name="test1")) 117 | t1 = await conn.begin() 118 | 119 | where = users.c.id == 1 120 | q_user = users.select().where(where) 121 | user = await (await conn.execute(q_user)).fetchone() 122 | 123 | assert await (await conn2.execute(q_user)).fetchone() == user 124 | 125 | await conn.execute(sa.update(users).values({"name": "name2"}).where(where)) 126 | 127 | t2 = await conn2.begin() 128 | assert await (await conn2.execute(q_user)).fetchone() == user 129 | 130 | await t1.commit() 131 | 132 | await conn2.execute(sa.update(users).values(user).where(where)) 133 | await t2.commit() 134 | 135 | assert await (await conn2.execute(q_user)).fetchone() == user 136 | 137 | await conn.execute(sa.delete(users)) 138 | assert len(await (await conn.execute(users.select())).fetchall()) == 0 139 | 140 | 141 | async def go(): 142 | engine = await create_engine( 143 | user="aiopg", database="aiopg", host="127.0.0.1", password="passwd" 144 | ) 145 | async with engine: 146 | async with engine.acquire() as conn: 147 | await create_sa_transaction_tables(conn) 148 | 149 | async with engine.acquire() as conn: 150 | await read_only_read_sa_transaction(conn, True) 151 | await read_only_read_sa_transaction(conn, False) 152 | 153 | async with engine.acquire() as conn2: 154 | await repea_sa_transaction(conn, conn2) 155 | await serializable_sa_transaction(conn, conn2) 156 | await isolation_read_sa_transaction(conn, conn2) 157 | 158 | 159 | asyncio.run(go()) 160 | -------------------------------------------------------------------------------- /tests/test_sa_engine.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import psycopg2 4 | import pytest 5 | from psycopg2.extensions import parse_dsn 6 | from sqlalchemy import Column, Integer, MetaData, String, Table 7 | 8 | from aiopg.connection import TIMEOUT 9 | 10 | sa = pytest.importorskip("aiopg.sa") # noqa 11 | 12 | 13 | meta = MetaData() 14 | tbl = Table( 15 | "sa_tbl3", 16 | meta, 17 | Column("id", Integer, nullable=False, primary_key=True), 18 | Column("name", String(255)), 19 | ) 20 | 21 | 22 | @pytest.fixture 23 | def engine(make_engine, loop): 24 | async def start(): 25 | engine = await make_engine() 26 | async with engine.acquire() as conn: 27 | await conn.execute("DROP TABLE IF EXISTS sa_tbl3") 28 | await conn.execute( 29 | "CREATE TABLE sa_tbl3 " "(id serial, name varchar(255))" 30 | ) 31 | return engine 32 | 33 | return loop.run_until_complete(start()) 34 | 35 | 36 | def test_dialect(engine): 37 | assert sa.engine._dialect is engine.dialect 38 | 39 | 40 | def test_name(engine): 41 | assert "postgresql" == engine.name 42 | 43 | 44 | def test_driver(engine): 45 | assert "psycopg2" == engine.driver 46 | 47 | 48 | def test_dsn(engine, pg_params): 49 | params = pg_params.copy() 50 | params["password"] = "xxx" 51 | params["dbname"] = params.pop("database") 52 | params["port"] = str(params["port"]) 53 | assert parse_dsn(engine.dsn) == params 54 | 55 | 56 | def test_minsize(engine): 57 | assert 1 == engine.minsize 58 | 59 | 60 | def test_maxsize(engine): 61 | assert 10 == engine.maxsize 62 | 63 | 64 | def test_size(engine): 65 | assert 1 == engine.size 66 | 67 | 68 | def test_freesize(engine): 69 | assert 1 == engine.freesize 70 | 71 | 72 | async def test_make_engine_with_default_loop(make_engine, loop): 73 | asyncio.set_event_loop(loop) 74 | engine = await make_engine() 75 | engine.close() 76 | await engine.wait_closed() 77 | 78 | 79 | def test_not_context_manager(engine): 80 | with pytest.raises(RuntimeError): 81 | with engine: 82 | pass 83 | 84 | 85 | async def test_release_transacted(engine): 86 | conn = await engine.acquire() 87 | tr = await conn.begin() 88 | with pytest.warns(ResourceWarning, match="Invalid transaction status"): 89 | await engine.release(conn) 90 | del tr 91 | assert conn.closed 92 | 93 | 94 | def test_timeout(engine): 95 | assert TIMEOUT == engine.timeout 96 | 97 | 98 | async def test_timeout_override(make_engine): 99 | timeout = 1 100 | engine = await make_engine(timeout=timeout) 101 | assert timeout == engine.timeout 102 | conn = await engine.acquire() 103 | with pytest.raises(asyncio.TimeoutError): 104 | await conn.execute("SELECT pg_sleep(10)") 105 | 106 | engine.terminate() 107 | await engine.wait_closed() 108 | 109 | 110 | async def test_cannot_acquire_after_closing(make_engine): 111 | engine = await make_engine() 112 | engine.close() 113 | 114 | with pytest.raises(RuntimeError): 115 | await engine.acquire() 116 | 117 | await engine.wait_closed() 118 | 119 | 120 | async def test_wait_closed(make_engine): 121 | engine = await make_engine(minsize=10) 122 | 123 | c1 = await engine.acquire() 124 | c2 = await engine.acquire() 125 | assert 10 == engine.size 126 | assert 8 == engine.freesize 127 | 128 | ops = [] 129 | 130 | async def do_release(conn): 131 | await asyncio.sleep(0) 132 | engine.release(conn) 133 | ops.append("release") 134 | 135 | async def wait_closed(): 136 | await engine.wait_closed() 137 | ops.append("wait_closed") 138 | 139 | engine.close() 140 | await asyncio.gather(wait_closed(), do_release(c1), do_release(c2)) 141 | assert ["release", "release", "wait_closed"] == ops 142 | assert 0 == engine.freesize 143 | 144 | 145 | async def test_terminate_with_acquired_connections(make_engine): 146 | engine = await make_engine() 147 | conn = await engine.acquire() 148 | engine.terminate() 149 | await engine.wait_closed() 150 | 151 | assert conn.closed 152 | 153 | 154 | async def test_release_after_connection_disconnected_before_select( 155 | tcp_proxy, unused_port, pg_params, make_engine 156 | ): 157 | server_port = pg_params["port"] 158 | proxy_port = unused_port() 159 | 160 | tcp_proxy = await tcp_proxy(proxy_port, server_port) 161 | engine = await make_engine(port=proxy_port) 162 | 163 | with pytest.raises((psycopg2.InterfaceError, psycopg2.OperationalError)): 164 | with pytest.warns(ResourceWarning, match="Invalid transaction status"): 165 | async with engine.acquire() as conn, conn.begin(): 166 | await conn.execute("SELECT 1;") 167 | await tcp_proxy.disconnect() 168 | await conn.execute("SELECT 1;") 169 | 170 | assert engine.size == 0 171 | 172 | 173 | async def test_release_after_connection_disconnected_before_begin( 174 | tcp_proxy, unused_port, pg_params, make_engine 175 | ): 176 | server_port = pg_params["port"] 177 | proxy_port = unused_port() 178 | 179 | tcp_proxy = await tcp_proxy(proxy_port, server_port) 180 | engine = await make_engine(port=proxy_port) 181 | 182 | with pytest.raises((psycopg2.InterfaceError, psycopg2.OperationalError)): 183 | with pytest.warns(ResourceWarning, match="Invalid transaction status"): 184 | async with engine.acquire() as conn: 185 | await conn.execute("SELECT 1;") 186 | await tcp_proxy.disconnect() 187 | async with conn.begin(): 188 | pytest.fail("Should not be here") 189 | 190 | assert engine.size == 0 191 | -------------------------------------------------------------------------------- /aiopg/utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from types import TracebackType 3 | from typing import ( 4 | Any, 5 | Awaitable, 6 | Callable, 7 | Coroutine, 8 | Generator, 9 | Generic, 10 | Optional, 11 | Type, 12 | TypeVar, 13 | Union, 14 | ) 15 | 16 | 17 | def get_running_loop() -> asyncio.AbstractEventLoop: 18 | return asyncio.get_running_loop() 19 | 20 | 21 | def create_completed_future( 22 | loop: asyncio.AbstractEventLoop, 23 | ) -> "asyncio.Future[Any]": 24 | future = loop.create_future() 25 | future.set_result(None) 26 | return future 27 | 28 | 29 | _TObj = TypeVar("_TObj") 30 | _Release = Callable[[_TObj], Awaitable[None]] 31 | 32 | 33 | class _ContextManager(Coroutine[Any, None, _TObj], Generic[_TObj]): 34 | __slots__ = ("_coro", "_obj", "_release", "_release_on_exception") 35 | 36 | def __init__( 37 | self, 38 | coro: Coroutine[Any, None, _TObj], 39 | release: _Release[_TObj], 40 | release_on_exception: Optional[_Release[_TObj]] = None, 41 | ): 42 | self._coro = coro 43 | self._obj: Optional[_TObj] = None 44 | self._release = release 45 | self._release_on_exception = ( 46 | release if release_on_exception is None else release_on_exception 47 | ) 48 | 49 | def send(self, value: Any) -> "Any": 50 | return self._coro.send(value) 51 | 52 | def throw( # type: ignore 53 | self, 54 | typ: Type[BaseException], 55 | val: Optional[Union[BaseException, object]] = None, 56 | tb: Optional[TracebackType] = None, 57 | ) -> Any: 58 | if val is None: 59 | return self._coro.throw(typ) 60 | if tb is None: 61 | return self._coro.throw(typ, val) 62 | return self._coro.throw(typ, val, tb) 63 | 64 | def close(self) -> None: 65 | self._coro.close() 66 | 67 | def __await__(self) -> Generator[Any, None, _TObj]: 68 | return self._coro.__await__() 69 | 70 | async def __aenter__(self) -> _TObj: 71 | self._obj = await self._coro 72 | assert self._obj 73 | return self._obj 74 | 75 | async def __aexit__( 76 | self, 77 | exc_type: Optional[Type[BaseException]], 78 | exc: Optional[BaseException], 79 | tb: Optional[TracebackType], 80 | ) -> None: 81 | if self._obj is None: 82 | return 83 | 84 | try: 85 | if exc_type is not None: 86 | await self._release_on_exception(self._obj) 87 | else: 88 | await self._release(self._obj) 89 | finally: 90 | self._obj = None 91 | 92 | 93 | class _IterableContextManager(_ContextManager[_TObj]): 94 | __slots__ = () 95 | 96 | def __init__(self, *args: Any, **kwargs: Any): 97 | super().__init__(*args, **kwargs) 98 | 99 | def __aiter__(self) -> "_IterableContextManager[_TObj]": 100 | return self 101 | 102 | async def __anext__(self) -> _TObj: 103 | if self._obj is None: 104 | self._obj = await self._coro 105 | 106 | try: 107 | return await self._obj.__anext__() # type: ignore 108 | except StopAsyncIteration: 109 | try: 110 | await self._release(self._obj) 111 | finally: 112 | self._obj = None 113 | raise 114 | 115 | 116 | class ClosableQueue: 117 | """ 118 | Proxy object for an asyncio.Queue that is "closable" 119 | 120 | When the ClosableQueue is closed, with an exception object as parameter, 121 | subsequent or ongoing attempts to read from the queue will result in that 122 | exception being result in that exception being raised. 123 | 124 | Note: closing a queue with exception will still allow to read any items 125 | pending in the queue. The close exception is raised only once all items 126 | are consumed. 127 | """ 128 | 129 | __slots__ = ("_loop", "_queue", "_close_event") 130 | 131 | def __init__( 132 | self, 133 | queue: asyncio.Queue, # type: ignore 134 | loop: asyncio.AbstractEventLoop, 135 | ): 136 | self._loop = loop 137 | self._queue = queue 138 | self._close_event = loop.create_future() 139 | # suppress Future exception was never retrieved 140 | self._close_event.add_done_callback(lambda f: f.exception()) 141 | 142 | def close(self, exception: Exception) -> None: 143 | if self._close_event.done(): 144 | return 145 | self._close_event.set_exception(exception) 146 | 147 | async def get(self) -> Any: 148 | if self._close_event.done(): 149 | try: 150 | return self._queue.get_nowait() 151 | except asyncio.QueueEmpty: 152 | return self._close_event.result() 153 | 154 | get = asyncio.ensure_future(self._queue.get(), loop=self._loop) 155 | try: 156 | await asyncio.wait( 157 | [get, self._close_event], return_when=asyncio.FIRST_COMPLETED 158 | ) 159 | except asyncio.CancelledError: 160 | get.cancel() 161 | raise 162 | 163 | if get.done(): 164 | return get.result() 165 | 166 | try: 167 | return self._close_event.result() 168 | finally: 169 | get.cancel() 170 | 171 | def empty(self) -> bool: 172 | return self._queue.empty() 173 | 174 | def qsize(self) -> int: 175 | return self._queue.qsize() 176 | 177 | def get_nowait(self) -> Any: 178 | if self._close_event.done(): 179 | try: 180 | return self._queue.get_nowait() 181 | except asyncio.QueueEmpty: 182 | return self._close_event.result() 183 | 184 | return self._queue.get_nowait() 185 | -------------------------------------------------------------------------------- /aiopg/sa/transaction.py: -------------------------------------------------------------------------------- 1 | from . import exc 2 | 3 | 4 | class Transaction: 5 | """Represent a database transaction in progress. 6 | 7 | The Transaction object is procured by 8 | calling the SAConnection.begin() method of 9 | SAConnection: 10 | 11 | async with engine as conn: 12 | trans = await conn.begin() 13 | try: 14 | await conn.execute("insert into x (a, b) values (1, 2)") 15 | except Exception: 16 | await trans.rollback() 17 | else: 18 | await trans.commit() 19 | 20 | The object provides .rollback() and .commit() 21 | methods in order to control transaction boundaries. 22 | 23 | See also: SAConnection.begin(), SAConnection.begin_twophase(), 24 | SAConnection.begin_nested(). 25 | """ 26 | 27 | __slots__ = ("_connection", "_parent", "_is_active") 28 | 29 | def __init__(self, connection, parent) -> None: 30 | self._connection = connection 31 | self._parent = parent or self 32 | self._is_active = True 33 | 34 | @property 35 | def is_active(self) -> bool: 36 | """Return ``True`` if a transaction is active.""" 37 | return self._is_active 38 | 39 | @property 40 | def connection(self): 41 | """Return transaction's connection (SAConnection instance).""" 42 | return self._connection 43 | 44 | async def close(self) -> None: 45 | """Close this transaction. 46 | 47 | If this transaction is the base transaction in a begin/commit 48 | nesting, the transaction will rollback(). Otherwise, the 49 | method returns. 50 | 51 | This is used to cancel a Transaction without affecting the scope of 52 | an enclosing transaction. 53 | """ 54 | if not self._parent._is_active: 55 | return 56 | if self._parent is self: 57 | await self.rollback() 58 | else: 59 | self._is_active = False 60 | 61 | async def rollback(self) -> None: 62 | """Roll back this transaction.""" 63 | if not self._parent._is_active: 64 | return 65 | await self._do_rollback() 66 | self._is_active = False 67 | 68 | async def _do_rollback(self) -> None: 69 | await self._parent.rollback() 70 | 71 | async def commit(self) -> None: 72 | """Commit this transaction.""" 73 | 74 | if not self._parent._is_active: 75 | raise exc.InvalidRequestError("This transaction is inactive") 76 | await self._do_commit() 77 | self._is_active = False 78 | 79 | async def _do_commit(self) -> None: 80 | pass 81 | 82 | async def __aenter__(self) -> "Transaction": 83 | return self 84 | 85 | async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: 86 | if exc_type: 87 | await self.rollback() 88 | elif self._is_active: 89 | await self.commit() 90 | 91 | 92 | class RootTransaction(Transaction): 93 | __slots__ = () 94 | 95 | def __init__(self, connection) -> None: 96 | super().__init__(connection, None) 97 | 98 | async def _do_rollback(self): 99 | await self._connection._rollback_impl() 100 | 101 | async def _do_commit(self): 102 | await self._connection._commit_impl() 103 | 104 | 105 | class NestedTransaction(Transaction): 106 | """Represent a 'nested', or SAVEPOINT transaction. 107 | 108 | A new NestedTransaction object may be procured 109 | using the SAConnection.begin_nested() method. 110 | 111 | The interface is the same as that of Transaction class. 112 | """ 113 | 114 | __slots__ = ("_savepoint",) 115 | 116 | def __init__(self, connection, parent) -> None: 117 | super().__init__(connection, parent) 118 | self._savepoint = None 119 | 120 | async def _do_rollback(self): 121 | assert self._savepoint is not None, "Broken transaction logic" 122 | if self._is_active: 123 | await self._connection._rollback_to_savepoint_impl( 124 | self._savepoint, self._parent 125 | ) 126 | 127 | async def _do_commit(self): 128 | assert self._savepoint is not None, "Broken transaction logic" 129 | if self._is_active: 130 | await self._connection._release_savepoint_impl( 131 | self._savepoint, self._parent 132 | ) 133 | 134 | 135 | class TwoPhaseTransaction(Transaction): 136 | """Represent a two-phase transaction. 137 | 138 | A new TwoPhaseTransaction object may be procured 139 | using the SAConnection.begin_twophase() method. 140 | 141 | The interface is the same as that of Transaction class 142 | with the addition of the .prepare() method. 143 | """ 144 | 145 | __slots__ = ("_is_prepared", "_xid") 146 | 147 | def __init__(self, connection, xid) -> None: 148 | super().__init__(connection, None) 149 | self._is_prepared = False 150 | self._xid = xid 151 | 152 | @property 153 | def xid(self): 154 | """Returns twophase transaction id.""" 155 | return self._xid 156 | 157 | async def prepare(self): 158 | """Prepare this TwoPhaseTransaction. 159 | 160 | After a PREPARE, the transaction can be committed. 161 | """ 162 | 163 | if not self._parent.is_active: 164 | raise exc.InvalidRequestError("This transaction is inactive") 165 | await self._connection._prepare_twophase_impl(self._xid) 166 | self._is_prepared = True 167 | 168 | async def _do_rollback(self): 169 | await self._connection._rollback_twophase_impl( 170 | self._xid, is_prepared=self._is_prepared 171 | ) 172 | 173 | async def _do_commit(self): 174 | await self._connection._commit_twophase_impl( 175 | self._xid, is_prepared=self._is_prepared 176 | ) 177 | -------------------------------------------------------------------------------- /tests/test_async_transaction.py: -------------------------------------------------------------------------------- 1 | import psycopg2 2 | import pytest 3 | 4 | from aiopg import IsolationLevel, Transaction 5 | 6 | 7 | @pytest.fixture 8 | def engine(make_connection, loop): 9 | async def start(): 10 | engine = await make_connection() 11 | async with engine.cursor() as cur: 12 | await cur.execute("DROP TABLE IF EXISTS tbl") 13 | await cur.execute("CREATE TABLE tbl (id int, name varchar(255))") 14 | await cur.execute("insert into tbl values(22, 'read only')") 15 | return engine 16 | 17 | return loop.run_until_complete(start()) 18 | 19 | 20 | @pytest.mark.parametrize( 21 | "isolation_level", 22 | [ 23 | IsolationLevel.default, 24 | IsolationLevel.read_committed, 25 | IsolationLevel.repeatable_read, 26 | IsolationLevel.serializable, 27 | ], 28 | ) 29 | @pytest.mark.parametrize( 30 | "deferrable", 31 | [ 32 | False, 33 | True, 34 | ], 35 | ) 36 | async def test_transaction(engine, isolation_level, deferrable): 37 | async with engine.cursor() as cur: 38 | async with Transaction( 39 | cur, isolation_level, readonly=False, deferrable=deferrable 40 | ): 41 | await cur.execute("insert into tbl values(1, 'data')") 42 | await cur.execute("select * from tbl where id = 1") 43 | row = await cur.fetchone() 44 | 45 | assert row[0] == 1 46 | assert row[1] == "data" 47 | 48 | 49 | @pytest.mark.parametrize( 50 | "isolation_level", 51 | [ 52 | IsolationLevel.default, 53 | IsolationLevel.read_committed, 54 | IsolationLevel.repeatable_read, 55 | IsolationLevel.serializable, 56 | ], 57 | ) 58 | @pytest.mark.parametrize( 59 | "deferrable", 60 | [ 61 | False, 62 | True, 63 | ], 64 | ) 65 | async def test_transaction_readonly_insert( 66 | engine, isolation_level, deferrable 67 | ): 68 | async with engine.cursor() as cur: 69 | async with Transaction( 70 | cur, isolation_level, readonly=True, deferrable=deferrable 71 | ): 72 | with pytest.raises(psycopg2.InternalError): 73 | await cur.execute("insert into tbl values(1, 'data')") 74 | 75 | 76 | @pytest.mark.parametrize( 77 | "isolation_level", 78 | [ 79 | IsolationLevel.default, 80 | IsolationLevel.read_committed, 81 | IsolationLevel.repeatable_read, 82 | IsolationLevel.serializable, 83 | ], 84 | ) 85 | @pytest.mark.parametrize( 86 | "deferrable", 87 | [ 88 | False, 89 | True, 90 | ], 91 | ) 92 | async def test_transaction_readonly(engine, isolation_level, deferrable): 93 | async with engine.cursor() as cur: 94 | async with Transaction( 95 | cur, isolation_level, readonly=True, deferrable=deferrable 96 | ): 97 | await cur.execute("select * from tbl where id = 22") 98 | row = await cur.fetchone() 99 | 100 | assert row[0] == 22 101 | assert row[1] == "read only" 102 | 103 | 104 | async def test_transaction_rollback(engine): 105 | async with engine.cursor() as cur: 106 | with pytest.raises(psycopg2.DataError): 107 | async with Transaction(cur, IsolationLevel.read_committed): 108 | await cur.execute("insert into tbl values(1/0, 'no data')") 109 | 110 | await cur.execute("select * from tbl") 111 | row = await cur.fetchall() 112 | assert row == [ 113 | (22, "read only"), 114 | ] 115 | 116 | 117 | async def test_transaction_point(engine): 118 | async with engine.cursor() as cur: 119 | async with Transaction(cur, IsolationLevel.read_committed) as tr: 120 | await cur.execute("insert into tbl values(1, 'data')") 121 | 122 | with pytest.raises(psycopg2.DataError): 123 | async with tr.point(): 124 | await cur.execute("insert into tbl values(1/0, 'data')") 125 | 126 | async with tr.point(): 127 | await cur.execute("insert into tbl values(2, 'data point')") 128 | 129 | await cur.execute("insert into tbl values(3, 'data')") 130 | 131 | await cur.execute("select * from tbl") 132 | row = await cur.fetchall() 133 | assert row == [ 134 | (22, "read only"), 135 | (1, "data"), 136 | (2, "data point"), 137 | (3, "data"), 138 | ] 139 | 140 | 141 | async def test_begin(engine): 142 | async with engine.cursor() as cur: 143 | async with cur.begin(): 144 | await cur.execute("insert into tbl values(1, 'data')") 145 | 146 | async with cur.begin(): 147 | await cur.execute("select * from tbl") 148 | row = await cur.fetchall() 149 | assert row == [ 150 | (22, "read only"), 151 | (1, "data"), 152 | ] 153 | 154 | 155 | async def test_begin_nested(engine): 156 | async with engine.cursor() as cur: 157 | async with cur.begin_nested(): 158 | await cur.execute("insert into tbl values(1, 'data')") 159 | 160 | with pytest.raises(psycopg2.DataError): 161 | async with cur.begin_nested(): 162 | await cur.execute("insert into tbl values(1/0, 'no data')") 163 | 164 | async with cur.begin_nested(): 165 | await cur.execute("insert into tbl values(2, 'data')") 166 | 167 | async with cur.begin_nested(): 168 | await cur.execute("select * from tbl") 169 | row = await cur.fetchall() 170 | assert row == [ 171 | (22, "read only"), 172 | (1, "data"), 173 | (2, "data"), 174 | ] 175 | 176 | 177 | async def test_begin_nested_fail(engine): 178 | async with engine.cursor() as cur: 179 | with pytest.raises(psycopg2.DataError): 180 | async with cur.begin_nested(): 181 | await cur.execute("insert into tbl values(1/0, 'data')") 182 | 183 | async with cur.begin_nested(): 184 | await cur.execute("select * from tbl") 185 | row = await cur.fetchall() 186 | assert row == [(22, "read only")] 187 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. aiopg documentation master file, created by 2 | sphinx-quickstart on Sat Apr 5 00:00:44 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | ================ 7 | Welcome to AIOPG 8 | ================ 9 | 10 | 11 | **aiopg** is a library for accessing a :term:`PostgreSQL` database 12 | from the asyncio_ (PEP-3156/tulip) framework. It wraps 13 | asynchronous features of the Psycopg database driver. 14 | 15 | Current version is |release|. 16 | 17 | .. image:: https://travis-ci.com/aio-libs/aiopg.svg?branch=master 18 | :target: https://travis-ci.com/aio-libs/aiopg 19 | :alt: Travis CI status 20 | 21 | .. image:: https://codecov.io/github/aio-libs/aiopg/coverage.svg?branch=master 22 | :target: https://codecov.io/github/aio-libs/aiopg 23 | :alt: Code coverage status 24 | 25 | .. image:: https://badge.fury.io/py/aiopg.svg 26 | :target: https://badge.fury.io/py/aiopg 27 | :alt: Latest PyPI package version 28 | 29 | .. _GitHub: https://github.com/aio-libs/aiopg 30 | .. _asyncio: http://docs.python.org/3.4/library/asyncio.html 31 | 32 | .. warning:: 33 | 1. Removing await the before :meth:`Cursor.mogrify` function 34 | 35 | 2. Only supports ``python >= 3.8`` 36 | 37 | 3. Only support syntax ``async/await`` 38 | 39 | 4. :ref:`aiopg-run-loop` 40 | 41 | 42 | Features 43 | -------- 44 | 45 | - Implements *asyncio* :term:`DBAPI` *like* interface for 46 | :term:`PostgreSQL`. It includes :ref:`aiopg-core-connection`, 47 | :ref:`aiopg-core-cursor` and :ref:`aiopg-core-pool` objects. 48 | - Implements *optional* support for charming :term:`sqlalchemy` 49 | functional sql layer. 50 | 51 | 52 | Basics 53 | ------ 54 | 55 | The library uses :mod:`psycopg2-binary` connections in **asynchronous** mode 56 | internally. 57 | 58 | Literally it is an (almost) transparent wrapper for psycopg2-binary 59 | connection and cursor, but with only exception. 60 | 61 | You should use ``await conn.f()`` instead of just call ``conn.f()`` for 62 | every method. 63 | 64 | Properties are unchanged, so ``conn.prop`` is correct as well as 65 | ``conn.prop = val``. 66 | 67 | See example:: 68 | 69 | import asyncio 70 | import aiopg 71 | 72 | dsn = 'dbname=aiopg user=aiopg password=passwd host=127.0.0.1' 73 | 74 | async def go(): 75 | async with aiopg.create_pool(dsn) as pool: 76 | async with pool.acquire() as conn: 77 | async with conn.cursor() as cur: 78 | await cur.execute("SELECT 1") 79 | ret = [] 80 | async for row in cur: 81 | ret.append(row) 82 | assert ret == [(1,)] 83 | 84 | loop = asyncio.get_event_loop() 85 | loop.run_until_complete(go()) 86 | 87 | For documentation about connection and cursor methods/properties 88 | please go to psycopg docs: http://initd.org/psycopg/docs/ 89 | 90 | .. note:: psycopg2-binary creates new connections with ``autocommit=True`` 91 | option in asynchronous mode. Autocommitting cannot be disabled. 92 | 93 | See :ref:`aiopg-core-transactions` about transaction usage 94 | in *autocommit mode*. 95 | 96 | 97 | SQLAlchemy and aiopg 98 | -------------------- 99 | 100 | :ref:`aiopg-core` provides core support for :term:`PostgreSQL` connections. 101 | 102 | We have found it to be very annoying to write raw SQL queries manually, 103 | so we introduce support for :term:`sqlalchemy` query builders:: 104 | 105 | import asyncio 106 | from aiopg.sa import create_engine 107 | import sqlalchemy as sa 108 | 109 | 110 | metadata = sa.MetaData() 111 | 112 | tbl = sa.Table('tbl', metadata, 113 | sa.Column('id', sa.Integer, primary_key=True), 114 | sa.Column('val', sa.String(255))) 115 | 116 | async def go(): 117 | async with create_engine(user='aiopg', 118 | database='aiopg', 119 | host='127.0.0.1', 120 | password='passwd') as engine: 121 | async with engine.acquire() as conn: 122 | await conn.execute(tbl.insert().values(val='abc')) 123 | 124 | async for row in conn.execute(tbl.select().where(tbl.c.val=='abc')): 125 | print(row.id, row.val) 126 | 127 | 128 | loop = asyncio.get_event_loop() 129 | loop.run_until_complete(go()) 130 | 131 | We believe constructions like ``tbl.insert().values(val='abc')`` and 132 | ``tbl.select().where(tbl.c.val=='abc')`` to be very handy and 133 | convenient. 134 | 135 | 136 | Installation 137 | -------------------- 138 | 139 | .. code:: 140 | 141 | pip3 install aiopg 142 | 143 | .. note:: :mod:`aiopg` requires :term:`psycopg2-binary` library. 144 | 145 | You can use global environment or you use like to use virtual environments 146 | (:term:`virtualenvwrapper`, :term:`virtualenv` or :term:`venv`) you 147 | probably have to install :term:`libpq` development package 148 | 149 | .. code-block:: shell 150 | 151 | $ sudo apt-get install libpq-dev 152 | 153 | Also you probably want to use :mod:`aiopg.sa`. 154 | 155 | .. _aiozmq-install-sqlalchemy: 156 | 157 | :mod:`aiopg.sa` module is **optional** and requires 158 | :term:`sqlalchemy`. You can install *sqlalchemy* by running 159 | 160 | .. code-block:: shell 161 | 162 | $ pip3 install aiopg[sa] 163 | 164 | Source code 165 | ----------- 166 | 167 | The project is hosted on GitHub_ 168 | 169 | Please feel free to file an issue on `bug tracker 170 | `_ if you have found a bug 171 | or have some suggestion for library improvement. 172 | 173 | The library uses `Travis `_ for 174 | Continious Integration. 175 | 176 | Discussion list 177 | --------------- 178 | 179 | *aio-libs* google group: https://groups.google.com/forum/#!forum/aio-libs 180 | 181 | Feel free to post your questions and ideas here. 182 | 183 | 184 | Dependencies 185 | ------------ 186 | 187 | - Python 3.6+ 188 | - psycopg2-binary 189 | - aiopg.sa requires :term:`sqlalchemy`. 190 | 191 | Authors and License 192 | ------------------- 193 | 194 | The ``aiopg`` package is written by Andrew Svetlov. It's BSD 195 | licensed and freely available. 196 | 197 | Feel free to improve this package and send a pull request to GitHub_. 198 | 199 | Contents: 200 | 201 | .. toctree:: 202 | :maxdepth: 2 203 | 204 | core 205 | sa 206 | examples 207 | contributing 208 | glossary 209 | misc 210 | 211 | Indices and tables 212 | ================== 213 | 214 | * :ref:`genindex` 215 | * :ref:`modindex` 216 | * :ref:`search` 217 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/aiopg.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/aiopg.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/aiopg" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/aiopg" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\aiopg.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\aiopg.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /aiopg/sa/engine.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import asyncio 4 | import json 5 | 6 | from sqlalchemy.dialects.postgresql.base import PGDialect 7 | 8 | import aiopg 9 | 10 | from ..connection import TIMEOUT 11 | from ..utils import _ContextManager, get_running_loop 12 | from .connection import SAConnection 13 | 14 | try: 15 | from sqlalchemy import __version__ 16 | 17 | sa_version = tuple(map(int, __version__.split("."))) 18 | if sa_version[0] < 2: 19 | from sqlalchemy.dialects.postgresql.psycopg2 import ( 20 | PGCompiler_psycopg2, 21 | PGDialect_psycopg2, 22 | ) 23 | else: 24 | from sqlalchemy.dialects.postgresql.base import ( 25 | PGCompiler as PGCompiler_psycopg2, 26 | ) 27 | from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2 28 | except ImportError: # pragma: no cover 29 | raise ImportError("aiopg.sa requires sqlalchemy") 30 | 31 | 32 | class APGCompiler_psycopg2(PGCompiler_psycopg2): 33 | def construct_params(self, *args, **kwargs): 34 | pd = super().construct_params(*args, **kwargs) 35 | 36 | for column in self.prefetch: 37 | pd[column.key] = self._exec_default(column.default) 38 | 39 | return pd 40 | 41 | def _exec_default(self, default): 42 | if default.is_callable: 43 | return default.arg(self.dialect) 44 | else: 45 | return default.arg 46 | 47 | 48 | def get_dialect( 49 | json_serializer=json.dumps, json_deserializer=lambda x: x 50 | ) -> PGDialect: 51 | dialect = PGDialect_psycopg2( 52 | json_serializer=json_serializer, json_deserializer=json_deserializer 53 | ) 54 | 55 | dialect.statement_compiler = APGCompiler_psycopg2 56 | dialect.implicit_returning = True 57 | dialect.supports_native_enum = True 58 | dialect.supports_smallserial = True # 9.2+ 59 | dialect._backslash_escapes = False 60 | dialect.supports_sane_multi_rowcount = True # psycopg 2.0.9+ 61 | dialect._has_native_hstore = True 62 | 63 | return dialect 64 | 65 | 66 | _dialect = get_dialect() 67 | 68 | 69 | def create_engine( 70 | dsn=None, 71 | *, 72 | minsize=1, 73 | maxsize=10, 74 | dialect=_dialect, 75 | timeout=TIMEOUT, 76 | pool_recycle=-1, 77 | **kwargs, 78 | ) -> _ContextManager[Engine]: 79 | """A coroutine for Engine creation. 80 | 81 | Returns Engine instance with embedded connection pool. 82 | 83 | The pool has *minsize* opened connections to PostgreSQL server. 84 | """ 85 | 86 | coro = _create_engine( 87 | dsn=dsn, 88 | minsize=minsize, 89 | maxsize=maxsize, 90 | dialect=dialect, 91 | timeout=timeout, 92 | pool_recycle=pool_recycle, 93 | **kwargs, 94 | ) 95 | return _ContextManager(coro, _close_engine) 96 | 97 | 98 | async def _create_engine( 99 | dsn=None, 100 | *, 101 | minsize=1, 102 | maxsize=10, 103 | dialect=_dialect, 104 | timeout=TIMEOUT, 105 | pool_recycle=-1, 106 | **kwargs, 107 | ) -> Engine: 108 | 109 | pool = await aiopg.create_pool( 110 | dsn, 111 | minsize=minsize, 112 | maxsize=maxsize, 113 | timeout=timeout, 114 | pool_recycle=pool_recycle, 115 | **kwargs, 116 | ) 117 | conn = await pool.acquire() 118 | try: 119 | real_dsn = conn.dsn 120 | return Engine(dialect, pool, real_dsn) 121 | finally: 122 | await pool.release(conn) 123 | 124 | 125 | async def _close_engine(engine: Engine) -> None: 126 | engine.close() 127 | await engine.wait_closed() 128 | 129 | 130 | async def _close_connection(c: SAConnection) -> None: 131 | await c.close() 132 | 133 | 134 | class Engine: 135 | """Connects a aiopg.Pool and 136 | sqlalchemy.engine.interfaces.Dialect together to provide a 137 | source of database connectivity and behavior. 138 | 139 | An Engine object is instantiated publicly using the 140 | create_engine coroutine. 141 | """ 142 | 143 | __slots__ = ("_dialect", "_pool", "_dsn", "_loop") 144 | 145 | def __init__(self, dialect, pool, dsn) -> None: 146 | self._dialect = dialect 147 | self._pool = pool 148 | self._dsn = dsn 149 | self._loop = get_running_loop() 150 | 151 | @property 152 | def dialect(self) -> PGDialect: 153 | """An dialect for engine.""" 154 | return self._dialect 155 | 156 | @property 157 | def name(self) -> str: 158 | """A name of the dialect.""" 159 | return self._dialect.name 160 | 161 | @property 162 | def driver(self): 163 | """A driver of the dialect.""" 164 | return self._dialect.driver 165 | 166 | @property 167 | def dsn(self): 168 | """DSN connection info""" 169 | return self._dsn 170 | 171 | @property 172 | def timeout(self): 173 | return self._pool.timeout 174 | 175 | @property 176 | def minsize(self): 177 | return self._pool.minsize 178 | 179 | @property 180 | def maxsize(self): 181 | return self._pool.maxsize 182 | 183 | @property 184 | def size(self): 185 | return self._pool.size 186 | 187 | @property 188 | def freesize(self): 189 | return self._pool.freesize 190 | 191 | @property 192 | def closed(self): 193 | return self._pool.closed 194 | 195 | def close(self) -> None: 196 | """Close engine. 197 | 198 | Mark all engine connections to be closed on getting back to pool. 199 | Closed engine doesn't allow to acquire new connections. 200 | """ 201 | self._pool.close() 202 | 203 | def terminate(self) -> None: 204 | """Terminate engine. 205 | 206 | Terminate engine pool with instantly closing all acquired 207 | connections also. 208 | """ 209 | self._pool.terminate() 210 | 211 | async def wait_closed(self): 212 | """Wait for closing all engine's connections.""" 213 | await self._pool.wait_closed() 214 | 215 | def acquire(self) -> _ContextManager[SAConnection]: 216 | """Get a connection from pool.""" 217 | coro = self._acquire() 218 | return _ContextManager[SAConnection](coro, _close_connection) 219 | 220 | async def _acquire(self) -> SAConnection: 221 | raw = await self._pool.acquire() 222 | return SAConnection(raw, self) 223 | 224 | def release(self, conn): 225 | return self._pool.release(conn.connection) 226 | 227 | def __enter__(self): 228 | raise RuntimeError( 229 | '"await" should be used as context manager expression' 230 | ) 231 | 232 | def __exit__(self, *args): 233 | # This must exist because __enter__ exists, even though that 234 | # always raises; that's how the with-statement works. 235 | pass # pragma: nocover 236 | 237 | def __await__(self): 238 | # This is not a coroutine. It is meant to enable the idiom: 239 | # 240 | # with (await engine) as conn: 241 | # 242 | # 243 | # as an alternative to: 244 | # 245 | # conn = await engine.acquire() 246 | # try: 247 | # 248 | # finally: 249 | # engine.release(conn) 250 | conn = yield from self._acquire().__await__() 251 | return _ConnectionContextManager(conn, self._loop) 252 | 253 | async def __aenter__(self) -> Engine: 254 | return self 255 | 256 | async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: 257 | self.close() 258 | await self.wait_closed() 259 | 260 | 261 | class _ConnectionContextManager: 262 | """Context manager. 263 | 264 | This enables the following idiom for acquiring and releasing a 265 | connection around a block: 266 | 267 | async with engine as conn: 268 | cur = await conn.cursor() 269 | 270 | while failing loudly when accidentally using: 271 | 272 | with engine: 273 | 274 | """ 275 | 276 | __slots__ = ("_conn", "_loop") 277 | 278 | def __init__(self, conn: SAConnection, loop: asyncio.AbstractEventLoop): 279 | self._conn = conn 280 | self._loop = loop 281 | 282 | def __enter__(self) -> SAConnection: 283 | return self._conn 284 | 285 | def __exit__(self, *args) -> None: 286 | asyncio.ensure_future(self._conn.close(), loop=self._loop) 287 | self._conn = None 288 | -------------------------------------------------------------------------------- /tests/test_transaction.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import psycopg2 4 | import pytest 5 | 6 | from aiopg import IsolationLevel, Transaction 7 | 8 | 9 | @pytest.fixture 10 | def engine(make_engine, loop): 11 | async def start(): 12 | engine = await make_engine() 13 | async with engine.acquire() as cur: 14 | await cur.execute("DROP TABLE IF EXISTS tbl") 15 | await cur.execute( 16 | "CREATE TABLE tbl (id int, " "name varchar(255))" 17 | ) 18 | 19 | await cur.execute("insert into tbl values(22, 'read only')") 20 | return engine 21 | 22 | return loop.run_until_complete(start()) 23 | 24 | 25 | @pytest.mark.parametrize( 26 | "isolation_level", 27 | [ 28 | IsolationLevel.default, 29 | IsolationLevel.read_committed, 30 | IsolationLevel.repeatable_read, 31 | IsolationLevel.serializable, 32 | ], 33 | ) 34 | @pytest.mark.parametrize( 35 | "deferrable", 36 | [ 37 | False, 38 | True, 39 | ], 40 | ) 41 | async def test_transaction_oldstyle(engine, isolation_level, deferrable): 42 | async with engine.acquire() as cur: 43 | tr = Transaction( 44 | cur, isolation_level, readonly=False, deferrable=deferrable 45 | ) 46 | await tr.begin() 47 | await cur.execute("insert into tbl values(1, 'data')") 48 | resp = await cur.execute("select * from tbl where id = 1") 49 | row = await resp.fetchone() 50 | 51 | assert row.id == 1 52 | assert row.name == "data" 53 | 54 | await tr.commit() 55 | 56 | 57 | async def two_begin(cur): 58 | tr = Transaction(cur, IsolationLevel.read_committed) 59 | await tr.begin() 60 | try: 61 | await tr.begin() 62 | except psycopg2.ProgrammingError as e: 63 | await tr.rollback() 64 | raise e 65 | 66 | 67 | async def two_commit(cur): 68 | tr = Transaction(cur, IsolationLevel.read_committed) 69 | await tr.begin() 70 | await tr.commit() 71 | await tr.commit() 72 | 73 | 74 | async def two_rollback(cur): 75 | tr = Transaction(cur, IsolationLevel.read_committed) 76 | await tr.begin() 77 | await tr.rollback() 78 | await tr.rollback() 79 | 80 | 81 | async def e_rollback_savepoint(cur): 82 | tr = Transaction(cur, IsolationLevel.read_committed) 83 | await tr.rollback_savepoint() 84 | 85 | 86 | async def e_release_savepoint(cur): 87 | tr = Transaction(cur, IsolationLevel.read_committed) 88 | await tr.release_savepoint() 89 | 90 | 91 | async def two_rollback_savepoint(cur): 92 | tr = Transaction(cur, IsolationLevel.read_committed) 93 | await tr.begin() 94 | try: 95 | await tr.release_savepoint() 96 | except psycopg2.ProgrammingError as e: 97 | await tr.commit() 98 | raise e 99 | 100 | 101 | async def e_savepoint(cur): 102 | tr = Transaction(cur, IsolationLevel.read_committed) 103 | await tr.savepoint() 104 | 105 | 106 | async def e_commit_savepoint(cur): 107 | tr = Transaction(cur, IsolationLevel.read_committed) 108 | await tr.begin() 109 | await tr.savepoint() 110 | try: 111 | await tr.savepoint() 112 | except psycopg2.ProgrammingError as e: 113 | await tr.rollback_savepoint() 114 | await tr.commit() 115 | raise e 116 | 117 | 118 | @pytest.mark.parametrize( 119 | "fn", 120 | [ 121 | two_begin, 122 | two_commit, 123 | two_rollback, 124 | e_rollback_savepoint, 125 | e_release_savepoint, 126 | e_savepoint, 127 | e_commit_savepoint, 128 | two_rollback_savepoint, 129 | ], 130 | ) 131 | async def test_transaction_fail_oldstyle(engine, fn): 132 | with pytest.raises(psycopg2.ProgrammingError): 133 | async with engine.acquire() as cur: 134 | await fn(cur) 135 | 136 | 137 | async def test_transaction_finalization_warning(engine, monkeypatch): 138 | async with engine.acquire() as cur: 139 | tr = Transaction(cur, IsolationLevel.read_committed) 140 | 141 | def valid(x, _): 142 | assert x in [ 143 | "Invalid transaction status on released connection: 2", 144 | f"You have not closed transaction {tr!r}", 145 | f"You have not closed savepoint {tr!r}", 146 | ] 147 | 148 | monkeypatch.setattr("aiopg.warnings.warn", valid) 149 | await tr.begin() 150 | await tr.savepoint() 151 | 152 | 153 | async def test_transaction_readonly_insert_oldstyle(engine): 154 | async with engine.acquire() as cur: 155 | tr = Transaction(cur, IsolationLevel.serializable, readonly=True) 156 | 157 | await tr.begin() 158 | with pytest.raises(psycopg2.InternalError): 159 | await cur.execute("insert into tbl values(1, 'data')") 160 | await tr.rollback() 161 | 162 | 163 | async def test_transaction_readonly_oldstyle(engine): 164 | async with engine.acquire() as cur: 165 | tr = Transaction(cur, IsolationLevel.serializable, readonly=True) 166 | 167 | await tr.begin() 168 | resp = await cur.execute("select * from tbl where id = 22") 169 | row = await resp.fetchone() 170 | 171 | assert row.id == 22 172 | assert row.name == "read only" 173 | await tr.commit() 174 | 175 | 176 | async def test_transaction_point_oldstyle(engine): 177 | async with engine.acquire() as cur: 178 | tr = Transaction(cur, IsolationLevel.read_committed) 179 | await tr.begin() 180 | 181 | await cur.execute("insert into tbl values(1, 'data')") 182 | 183 | try: 184 | await tr.savepoint() 185 | await cur.execute("insert into tbl values(1/0, 'no data')") 186 | except psycopg2.DataError: 187 | await tr.rollback_savepoint() 188 | 189 | await tr.savepoint() 190 | await cur.execute("insert into tbl values(2, 'data')") 191 | await tr.release_savepoint() 192 | 193 | await cur.execute("insert into tbl values(3, 'data')") 194 | 195 | resp = await cur.execute("select * from tbl") 196 | row = await resp.fetchall() 197 | assert row == [ 198 | (22, "read only"), 199 | (1, "data"), 200 | (2, "data"), 201 | (3, "data"), 202 | ] 203 | 204 | await tr.commit() 205 | 206 | 207 | async def test_timeout_in_transaction_context_manager(make_engine): 208 | engine = await make_engine(timeout=1) 209 | with pytest.raises(asyncio.TimeoutError): 210 | async with engine.acquire() as connection: 211 | async with Transaction(connection, IsolationLevel.read_committed): 212 | await connection.execute("SELECT pg_sleep(10)") 213 | 214 | engine.terminate() 215 | await engine.wait_closed() 216 | 217 | 218 | async def test_timeout_in_savepoint_context_manager(make_engine): 219 | engine = await make_engine(timeout=1) 220 | with pytest.raises(asyncio.TimeoutError): 221 | async with engine.acquire() as connection: 222 | async with Transaction( 223 | connection, IsolationLevel.read_committed 224 | ) as transaction: 225 | async with transaction.point(): 226 | await connection.execute("SELECT pg_sleep(10)") 227 | 228 | engine.terminate() 229 | await engine.wait_closed() 230 | 231 | 232 | async def test_cancel_in_transaction_context_manager(engine, loop): 233 | with pytest.raises(asyncio.CancelledError): 234 | async with engine.acquire() as connection: 235 | async with Transaction(connection, IsolationLevel.read_committed): 236 | task = loop.create_task( 237 | connection.execute("SELECT pg_sleep(10)") 238 | ) 239 | 240 | async def cancel_soon(): 241 | await asyncio.sleep(1) 242 | task.cancel() 243 | 244 | loop.create_task(cancel_soon()) 245 | await task 246 | 247 | 248 | async def test_cancel_in_savepoint_context_manager(engine, loop): 249 | with pytest.raises(asyncio.CancelledError): 250 | async with engine.acquire() as connection: 251 | async with Transaction( 252 | connection, IsolationLevel.read_committed 253 | ) as transaction: 254 | async with transaction.point(): 255 | task = loop.create_task( 256 | connection.execute("SELECT pg_sleep(10)") 257 | ) 258 | 259 | async def cancel_soon(): 260 | await asyncio.sleep(1) 261 | task.cancel() 262 | 263 | loop.create_task(cancel_soon()) 264 | await task 265 | -------------------------------------------------------------------------------- /tests/test_async_await.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | import aiopg 6 | import aiopg.sa 7 | from aiopg.sa import SAConnection 8 | 9 | 10 | async def test_cursor_await(make_connection): 11 | conn = await make_connection() 12 | 13 | cursor = await conn.cursor() 14 | await cursor.execute("SELECT 42;") 15 | resp = await cursor.fetchone() 16 | assert resp == (42,) 17 | cursor.close() 18 | 19 | 20 | async def test_connect_context_manager(pg_params): 21 | async with aiopg.connect(**pg_params) as conn: 22 | cursor = await conn.cursor() 23 | await cursor.execute("SELECT 42") 24 | resp = await cursor.fetchone() 25 | assert resp == (42,) 26 | cursor.close() 27 | assert conn.closed 28 | 29 | 30 | async def test_connection_context_manager(make_connection): 31 | conn = await make_connection() 32 | assert not conn.closed 33 | async with conn: 34 | cursor = await conn.cursor() 35 | await cursor.execute("SELECT 42;") 36 | resp = await cursor.fetchone() 37 | assert resp == (42,) 38 | cursor.close() 39 | assert conn.closed 40 | 41 | 42 | async def test_cursor_create_with_context_manager(make_connection): 43 | conn = await make_connection() 44 | 45 | async with conn.cursor() as cursor: 46 | await cursor.execute("SELECT 42;") 47 | resp = await cursor.fetchone() 48 | assert resp == (42,) 49 | assert not cursor.closed 50 | 51 | assert cursor.closed 52 | 53 | 54 | async def test_pool_context_manager_timeout(pg_params, loop): 55 | async with aiopg.create_pool(**pg_params, minsize=1, maxsize=1) as pool: 56 | cursor_ctx = await pool.cursor() 57 | with pytest.warns(ResourceWarning, match="Invalid transaction status"): 58 | with cursor_ctx as cursor: 59 | hung_task = cursor.execute("SELECT pg_sleep(10000);") 60 | # start task 61 | loop.create_task(hung_task) 62 | # sleep for a bit so it gets going 63 | await asyncio.sleep(1) 64 | 65 | cursor_ctx = await pool.cursor() 66 | with cursor_ctx as cursor: 67 | resp = await cursor.execute("SELECT 42;") 68 | resp = await cursor.fetchone() 69 | assert resp == (42,) 70 | 71 | assert cursor.closed 72 | assert pool.closed 73 | 74 | 75 | async def test_cursor_with_context_manager(make_connection): 76 | conn = await make_connection() 77 | cursor = await conn.cursor() 78 | await cursor.execute("SELECT 42;") 79 | 80 | assert not cursor.closed 81 | async with cursor: 82 | resp = await cursor.fetchone() 83 | assert resp == (42,) 84 | assert cursor.closed 85 | 86 | 87 | async def test_cursor_lightweight(make_connection): 88 | conn = await make_connection() 89 | cursor = await conn.cursor() 90 | await cursor.execute("SELECT 42;") 91 | 92 | assert not cursor.closed 93 | async with cursor: 94 | pass 95 | assert cursor.closed 96 | 97 | 98 | async def test_pool_context_manager(pg_params): 99 | pool = await aiopg.create_pool(**pg_params) 100 | 101 | async with pool: 102 | conn = await pool.acquire() 103 | async with conn.cursor() as cursor: 104 | await cursor.execute("SELECT 42;") 105 | resp = await cursor.fetchone() 106 | assert resp == (42,) 107 | pool.release(conn) 108 | assert cursor.closed 109 | assert pool.closed 110 | 111 | 112 | async def test_create_pool_context_manager(pg_params): 113 | async with aiopg.create_pool(**pg_params) as pool: 114 | async with pool.acquire() as conn: 115 | async with conn.cursor() as cursor: 116 | await cursor.execute("SELECT 42;") 117 | resp = await cursor.fetchone() 118 | assert resp == (42,) 119 | 120 | assert cursor.closed 121 | assert conn.closed 122 | assert pool.closed 123 | 124 | 125 | async def test_cursor_aiter(make_connection): 126 | result = [] 127 | conn = await make_connection() 128 | assert not conn.closed 129 | async with conn: 130 | cursor = await conn.cursor() 131 | await cursor.execute("SELECT generate_series(1, 5);") 132 | async for v in cursor: 133 | result.append(v) 134 | assert result == [(1,), (2,), (3,), (4,), (5,)] 135 | cursor.close() 136 | assert conn.closed 137 | 138 | 139 | async def test_engine_context_manager(pg_params): 140 | engine = await aiopg.sa.create_engine(**pg_params) 141 | async with engine: 142 | conn = await engine.acquire() 143 | assert isinstance(conn, SAConnection) 144 | engine.release(conn) 145 | assert engine.closed 146 | 147 | 148 | async def test_create_engine_context_manager(pg_params): 149 | async with aiopg.sa.create_engine(**pg_params) as engine: 150 | async with engine.acquire() as conn: 151 | assert isinstance(conn, SAConnection) 152 | assert engine.closed 153 | 154 | 155 | async def test_result_proxy_aiter(pg_params): 156 | sql = "SELECT generate_series(1, 5);" 157 | result = [] 158 | async with aiopg.sa.create_engine(**pg_params) as engine: 159 | async with engine.acquire() as conn: 160 | async with conn.execute(sql) as cursor: 161 | async for v in cursor: 162 | result.append(v) 163 | assert result == [(1,), (2,), (3,), (4,), (5,)] 164 | assert cursor.closed 165 | assert conn.closed 166 | 167 | 168 | async def test_transaction_context_manager(pg_params): 169 | sql = "SELECT generate_series(1, 5);" 170 | result = [] 171 | async with aiopg.sa.create_engine(**pg_params) as engine: 172 | async with engine.acquire() as conn: 173 | async with conn.begin() as tr: 174 | async with conn.execute(sql) as cursor: 175 | async for v in cursor: 176 | result.append(v) 177 | assert tr.is_active 178 | assert result == [(1,), (2,), (3,), (4,), (5,)] 179 | assert cursor.closed 180 | assert not tr.is_active 181 | 182 | tr2 = await conn.begin() 183 | async with tr2: 184 | assert tr2.is_active 185 | async with conn.execute("SELECT 1;") as cursor: 186 | rec = await cursor.scalar() 187 | assert rec == 1 188 | cursor.close() 189 | assert not tr2.is_active 190 | 191 | assert conn.closed 192 | 193 | 194 | async def test_transaction_context_manager_error(pg_params): 195 | async with aiopg.sa.create_engine(**pg_params) as engine: 196 | async with engine.acquire() as conn: 197 | with pytest.raises(RuntimeError) as ctx: 198 | async with conn.begin() as tr: 199 | assert tr.is_active 200 | raise RuntimeError("boom") 201 | assert str(ctx.value) == "boom" 202 | assert not tr.is_active 203 | assert conn.closed 204 | 205 | 206 | async def test_transaction_context_manager_commit_once(pg_params): 207 | async with aiopg.sa.create_engine(**pg_params) as engine: 208 | async with engine.acquire() as conn: 209 | async with conn.begin() as tr: 210 | # check that in context manager we do not execute 211 | # commit for second time. Two commits in row causes 212 | # InvalidRequestError exception 213 | await tr.commit() 214 | assert not tr.is_active 215 | 216 | tr2 = await conn.begin() 217 | async with tr2: 218 | assert tr2.is_active 219 | # check for double commit one more time 220 | await tr2.commit() 221 | assert not tr2.is_active 222 | assert conn.closed 223 | 224 | 225 | async def test_transaction_context_manager_nested_commit(pg_params): 226 | sql = "SELECT generate_series(1, 5);" 227 | result = [] 228 | async with aiopg.sa.create_engine(**pg_params) as engine: 229 | async with engine.acquire() as conn: 230 | async with conn.begin_nested() as tr1: 231 | async with conn.begin_nested() as tr2: 232 | async with conn.execute(sql) as cursor: 233 | async for v in cursor: 234 | result.append(v) 235 | assert tr1.is_active 236 | assert tr2.is_active 237 | assert result == [(1,), (2,), (3,), (4,), (5,)] 238 | assert cursor.closed 239 | assert not tr2.is_active 240 | 241 | tr2 = await conn.begin_nested() 242 | async with tr2: 243 | assert tr2.is_active 244 | async with conn.execute("SELECT 1;") as cursor: 245 | rec = await cursor.scalar() 246 | assert rec == 1 247 | cursor.close() 248 | assert not tr2.is_active 249 | assert not tr1.is_active 250 | 251 | assert conn.closed 252 | 253 | 254 | async def test_sa_connection_execute(pg_params): 255 | sql = "SELECT generate_series(1, 5);" 256 | result = [] 257 | async with aiopg.sa.create_engine(**pg_params) as engine: 258 | async with engine.acquire() as conn: 259 | async for value in conn.execute(sql): 260 | result.append(value) 261 | assert result == [(1,), (2,), (3,), (4,), (5,)] 262 | assert conn.closed 263 | -------------------------------------------------------------------------------- /tests/test_cursor.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import datetime 3 | import time 4 | 5 | import psycopg2 6 | import psycopg2.tz 7 | import pytest 8 | 9 | from aiopg import IsolationLevel, ReadCommittedCompiler 10 | from aiopg.connection import TIMEOUT 11 | 12 | 13 | @pytest.fixture 14 | def connect(make_connection): 15 | async def go(**kwargs): 16 | conn = await make_connection(**kwargs) 17 | async with conn.cursor() as cur: 18 | await cur.execute("DROP TABLE IF EXISTS tbl") 19 | await cur.execute("CREATE TABLE tbl (id int, name varchar(255))") 20 | for i in [(1, "a"), (2, "b"), (3, "c")]: 21 | await cur.execute("INSERT INTO tbl VALUES(%s, %s)", i) 22 | await cur.execute("DROP TABLE IF EXISTS tbl2") 23 | await cur.execute( 24 | """CREATE TABLE tbl2 25 | (id int, name varchar(255))""" 26 | ) 27 | await cur.execute("DROP FUNCTION IF EXISTS inc(val integer)") 28 | await cur.execute( 29 | """CREATE FUNCTION inc(val integer) 30 | RETURNS integer AS $$ 31 | BEGIN 32 | RETURN val + 1; 33 | END; $$ 34 | LANGUAGE PLPGSQL;""" 35 | ) 36 | return conn 37 | 38 | return go 39 | 40 | 41 | @pytest.fixture 42 | def cursor(connect, loop): 43 | async def go(): 44 | return await (await connect()).cursor() 45 | 46 | cur = loop.run_until_complete(go()) 47 | yield cur 48 | cur.close() 49 | 50 | 51 | async def test_description(cursor): 52 | async with cursor as cur: 53 | assert cur.description is None 54 | await cur.execute("SELECT * from tbl;") 55 | 56 | assert ( 57 | len(cur.description) == 2 58 | ), "cursor.description describes too many columns" 59 | 60 | assert ( 61 | len(cur.description[0]) == 7 62 | ), "cursor.description[x] tuples must have 7 elements" 63 | 64 | assert ( 65 | cur.description[0][0].lower() == "id" 66 | ), "cursor.description[x][0] must return column name" 67 | 68 | assert ( 69 | cur.description[1][0].lower() == "name" 70 | ), "cursor.description[x][0] must return column name" 71 | 72 | # Make sure self.description gets reset, cursor should be 73 | # set to None in case of none resulting queries like DDL 74 | await cur.execute("DROP TABLE IF EXISTS foobar;") 75 | assert cur.description is None 76 | 77 | 78 | async def test_raw(cursor): 79 | assert cursor._impl is cursor.raw 80 | 81 | 82 | async def test_close(cursor): 83 | cursor.close() 84 | assert cursor.closed 85 | with pytest.raises(psycopg2.InterfaceError): 86 | await cursor.execute("SELECT 1") 87 | 88 | 89 | async def test_close_twice(connect): 90 | conn = await connect() 91 | cur = await conn.cursor() 92 | cur.close() 93 | cur.close() 94 | assert cur.closed 95 | with pytest.raises(psycopg2.InterfaceError): 96 | await cur.execute("SELECT 1") 97 | assert conn._waiter is None 98 | 99 | 100 | async def test_connection(connect): 101 | conn = await connect() 102 | cur = await conn.cursor() 103 | assert cur.connection is conn 104 | 105 | 106 | async def test_name(cursor): 107 | assert cursor.name is None 108 | 109 | 110 | async def test_scrollable(cursor): 111 | assert cursor.scrollable is None 112 | with pytest.raises(psycopg2.ProgrammingError): 113 | cursor.scrollable = True 114 | 115 | 116 | async def test_withhold(cursor): 117 | assert not cursor.withhold 118 | with pytest.raises(psycopg2.ProgrammingError): 119 | cursor.withhold = True 120 | assert not cursor.withhold 121 | 122 | 123 | async def test_execute(cursor): 124 | await cursor.execute("SELECT 1") 125 | ret = await cursor.fetchone() 126 | assert (1,) == ret 127 | 128 | 129 | async def test_executemany(cursor): 130 | with pytest.raises(psycopg2.ProgrammingError): 131 | await cursor.executemany("SELECT %s", ["1", "2"]) 132 | 133 | 134 | def test_mogrify(cursor): 135 | ret = cursor.mogrify("SELECT %s", ["1"]) 136 | assert b"SELECT '1'" == ret 137 | 138 | 139 | async def test_setinputsizes(cursor): 140 | await cursor.setinputsizes(10) 141 | 142 | 143 | async def test_fetchmany(cursor): 144 | await cursor.execute("SELECT * from tbl;") 145 | ret = await cursor.fetchmany() 146 | assert [(1, "a")] == ret 147 | 148 | await cursor.execute("SELECT * from tbl;") 149 | ret = await cursor.fetchmany(2) 150 | assert [(1, "a"), (2, "b")] == ret 151 | 152 | 153 | async def test_fetchall(cursor): 154 | await cursor.execute("SELECT * from tbl;") 155 | ret = await cursor.fetchall() 156 | assert [(1, "a"), (2, "b"), (3, "c")] == ret 157 | 158 | 159 | async def test_scroll(cursor): 160 | await cursor.execute("SELECT * from tbl;") 161 | await cursor.scroll(1) 162 | ret = await cursor.fetchone() 163 | assert (2, "b") == ret 164 | 165 | 166 | async def test_arraysize(cursor): 167 | assert 1 == cursor.arraysize 168 | 169 | cursor.arraysize = 10 170 | assert 10 == cursor.arraysize 171 | 172 | 173 | async def test_itersize(cursor): 174 | assert 2000 == cursor.itersize 175 | 176 | cursor.itersize = 10 177 | assert 10 == cursor.itersize 178 | 179 | 180 | async def test_rows(cursor): 181 | await cursor.execute("SELECT * from tbl") 182 | assert 3 == cursor.rowcount 183 | assert 0 == cursor.rownumber 184 | await cursor.fetchone() 185 | assert 1 == cursor.rownumber 186 | 187 | 188 | async def test_query(cursor): 189 | await cursor.execute("SELECT 1") 190 | assert b"SELECT 1" == cursor.query 191 | 192 | 193 | async def test_statusmessage(cursor): 194 | await cursor.execute("SELECT 1") 195 | assert "SELECT 1" == cursor.statusmessage 196 | 197 | 198 | async def test_tzinfo_factory(cursor): 199 | assert datetime.timezone is cursor.tzinfo_factory 200 | 201 | cursor.tzinfo_factory = psycopg2.tz.LocalTimezone 202 | assert psycopg2.tz.LocalTimezone is cursor.tzinfo_factory 203 | 204 | 205 | async def test_nextset(cursor): 206 | with pytest.raises(psycopg2.NotSupportedError): 207 | await cursor.nextset() 208 | 209 | 210 | async def test_setoutputsize(cursor): 211 | await cursor.setoutputsize(4, 1) 212 | 213 | 214 | async def test_copy_family(connect): 215 | conn = await connect() 216 | cur = await conn.cursor() 217 | 218 | with pytest.raises(psycopg2.ProgrammingError): 219 | await cur.copy_from("file", "table") 220 | 221 | with pytest.raises(psycopg2.ProgrammingError): 222 | await cur.copy_to("file", "table") 223 | 224 | with pytest.raises(psycopg2.ProgrammingError): 225 | await cur.copy_expert("sql", "table") 226 | 227 | 228 | async def test_callproc(connect): 229 | conn = await connect() 230 | cur = await conn.cursor() 231 | await cur.callproc("inc", [1]) 232 | ret = await cur.fetchone() 233 | assert (2,) == ret 234 | 235 | cur.close() 236 | with pytest.raises(psycopg2.InterfaceError): 237 | await cur.callproc("inc", [1]) 238 | assert conn._waiter is None 239 | 240 | 241 | async def test_execute_timeout(connect): 242 | timeout = 0.1 243 | conn = await connect() 244 | cur = await conn.cursor(timeout=timeout) 245 | assert timeout == cur.timeout 246 | 247 | t1 = time.time() 248 | with pytest.raises(asyncio.TimeoutError): 249 | await cur.execute("SELECT pg_sleep(1)") 250 | t2 = time.time() 251 | dt = t2 - t1 252 | assert 0.08 <= dt <= 0.15, dt 253 | 254 | 255 | async def test_execute_override_timeout(connect): 256 | timeout = 0.1 257 | conn = await connect() 258 | cur = await conn.cursor() 259 | assert TIMEOUT == cur.timeout 260 | 261 | t1 = time.time() 262 | with pytest.raises(asyncio.TimeoutError): 263 | await cur.execute("SELECT pg_sleep(1)", timeout=timeout) 264 | t2 = time.time() 265 | dt = t2 - t1 266 | assert 0.08 <= dt <= 0.15, dt 267 | 268 | 269 | async def test_callproc_timeout(connect): 270 | timeout = 0.1 271 | conn = await connect() 272 | cur = await conn.cursor(timeout=timeout) 273 | assert timeout == cur.timeout 274 | 275 | t1 = time.time() 276 | with pytest.raises(asyncio.TimeoutError): 277 | await cur.callproc("pg_sleep", [1]) 278 | t2 = time.time() 279 | dt = t2 - t1 280 | assert 0.08 <= dt <= 0.15, dt 281 | 282 | 283 | async def test_callproc_override_timeout(connect): 284 | timeout = 0.1 285 | conn = await connect() 286 | cur = await conn.cursor() 287 | assert TIMEOUT == cur.timeout 288 | 289 | t1 = time.time() 290 | with pytest.raises(asyncio.TimeoutError): 291 | await cur.callproc("pg_sleep", [1], timeout=timeout) 292 | t2 = time.time() 293 | dt = t2 - t1 294 | assert 0.08 <= dt <= 0.15, dt 295 | 296 | 297 | async def test_echo(connect): 298 | conn = await connect(echo=True) 299 | cur = await conn.cursor() 300 | assert cur.echo 301 | 302 | 303 | async def test_echo_false(connect): 304 | conn = await connect() 305 | cur = await conn.cursor() 306 | assert not cur.echo 307 | 308 | 309 | async def test_isolation_level(connect): 310 | conn = await connect() 311 | cur = await conn.cursor(isolation_level=IsolationLevel.read_committed) 312 | assert isinstance(cur._transaction._isolation, ReadCommittedCompiler) 313 | 314 | 315 | async def test_iter(connect): 316 | conn = await connect() 317 | cur = await conn.cursor() 318 | await cur.execute("SELECT * FROM tbl") 319 | rows = [] 320 | async for r in cur: 321 | rows.append(r) 322 | 323 | data = [(1, "a"), (2, "b"), (3, "c")] 324 | for item, tst in zip(rows, data): 325 | assert item == tst 326 | 327 | 328 | async def test_echo_callproc(connect): 329 | conn = await connect(echo=True) 330 | cur = await conn.cursor() 331 | 332 | # TODO: check log records 333 | await cur.callproc("inc", [1]) 334 | ret = await cur.fetchone() 335 | assert (2,) == ret 336 | cur.close() 337 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # aiopg documentation build configuration file, created by 4 | # sphinx-quickstart on Sat Apr 5 00:00:44 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | import datetime 15 | import re 16 | 17 | from pathlib import Path 18 | 19 | 20 | def get_release(): 21 | regexp = re.compile(r"^__version__\W*=\W*\"([\d.abrc]+)\"") 22 | init_py = Path(__file__).resolve().parent.parent / 'aiopg' / '__init__.py' 23 | with init_py.open() as f: 24 | for line in f: 25 | match = regexp.match(line) 26 | if match is not None: 27 | return match.group(1) 28 | else: 29 | raise RuntimeError('Cannot find version in aiopg/__init__.py') 30 | 31 | 32 | def get_version(release): 33 | parts = release.split('.') 34 | return '.'.join(parts[:2]) 35 | 36 | 37 | # If extensions (or modules to document with autodoc) are in another directory, 38 | # add these directories to sys.path here. If the directory is relative to the 39 | # documentation root, use Path.resolve to make it absolute, like shown here. 40 | # sys.path.insert(0, str(Path('.').resolve())) 41 | 42 | # -- General configuration ------------------------------------------------ 43 | 44 | # If your documentation needs a minimal Sphinx version, state it here. 45 | # needs_sphinx = '1.0' 46 | 47 | # Add any Sphinx extension module names here, as strings. They can be 48 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 49 | # ones. 50 | extensions = ['sphinx.ext.viewcode', 51 | 'sphinx.ext.autodoc', 52 | 'sphinx.ext.intersphinx', 53 | 'sphinxcontrib.asyncio'] 54 | 55 | intersphinx_mapping = { 56 | 'python': ('http://docs.python.org/3', None), 57 | 'sqlalchemy': ('http://docs.sqlalchemy.org/en/latest', None), 58 | 'psycopg2-binary': ('http://initd.org/psycopg/docs/', None), 59 | } 60 | 61 | # Add any paths that contain templates here, relative to this directory. 62 | templates_path = ['_templates'] 63 | 64 | # The suffix of source filenames. 65 | source_suffix = '.rst' 66 | 67 | # The encoding of source files. 68 | # source_encoding = 'utf-8-sig' 69 | 70 | # The master toctree document. 71 | master_doc = 'index' 72 | 73 | # General information about the project. 74 | project = 'aiopg' 75 | date = datetime.date.today() 76 | 77 | copyright = f'2014-{date.year}, Andrew Svetlov, Alexey Firsov' 78 | 79 | # The version info for the project you're documenting, acts as replacement for 80 | # |version| and |release|, also used in various other places throughout the 81 | # built documents. 82 | # 83 | # The short X.Y version. 84 | # The full version, including alpha/beta/rc tags. 85 | release = get_release() 86 | version = get_version(release) 87 | 88 | # The language for content autogenerated by Sphinx. Refer to documentation 89 | # for a list of supported languages. 90 | # language = None 91 | 92 | # There are two options for replacing |today|: either, you set today to some 93 | # non-false value, then it is used: 94 | # today = '' 95 | # Else, today_fmt is used as the format for a strftime call. 96 | # today_fmt = '%B %d, %Y' 97 | 98 | # List of patterns, relative to source directory, that match files and 99 | # directories to ignore when looking for source files. 100 | exclude_patterns = ['_build'] 101 | 102 | # The reST default role (used for this markup: `text`) to use for all 103 | # documents. 104 | # default_role = None 105 | 106 | # If true, '()' will be appended to :func: etc. cross-reference text. 107 | # add_function_parentheses = True 108 | 109 | # If true, the current module name will be prepended to all description 110 | # unit titles (such as .. function::). 111 | # add_module_names = True 112 | 113 | # If true, sectionauthor and moduleauthor directives will be shown in the 114 | # output. They are ignored by default. 115 | # show_authors = False 116 | 117 | # The name of the Pygments (syntax highlighting) style to use. 118 | pygments_style = 'sphinx' 119 | 120 | # A list of ignored prefixes for module index sorting. 121 | # modindex_common_prefix = [] 122 | 123 | # If true, keep warnings as "system message" paragraphs in the built documents. 124 | # keep_warnings = False 125 | 126 | highlight_language = 'python3' 127 | 128 | # -- Options for HTML output ---------------------------------------------- 129 | 130 | # The theme to use for HTML and HTML Help pages. See the documentation for 131 | # a list of builtin themes. 132 | html_theme = 'alabaster' 133 | 134 | # Theme options are theme-specific and customize the look and feel of a theme 135 | # further. For a list of options available for each theme, see the 136 | # documentation. 137 | html_theme_options = { 138 | 'logo': 'aiopg-icon.png', 139 | 'description': 'aiopg - Postgres integration with asyncio', 140 | 'github_user': 'aio-libs', 141 | 'github_repo': 'aiopg', 142 | 'github_button': True, 143 | 'github_type': 'star', 144 | 'github_banner': True, 145 | 'canonical_url': 'https://aiopg.readthedocs.io/en/stable/', 146 | 147 | } 148 | # Add any paths that contain custom themes here, relative to this directory. 149 | # html_theme_path = [] 150 | 151 | # The name for this set of Sphinx documents. If None, it defaults to 152 | # " v documentation". 153 | html_title = 'Welcome to AIOPG' 154 | 155 | # A shorter title for the navigation bar. Default is the same as html_title. 156 | # html_short_title = None 157 | 158 | # The name of an image file (relative to this directory) to place at the top 159 | # of the sidebar. 160 | # html_logo = None 161 | 162 | # The name of an image file (within the static path) to use as favicon of the 163 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 164 | # pixels large. 165 | # html_favicon = None 166 | 167 | # Add any paths that contain custom static files (such as style sheets) here, 168 | # relative to this directory. They are copied after the builtin static files, 169 | # so a file named "default.css" will overwrite the builtin "default.css". 170 | html_static_path = ['_static'] 171 | 172 | # Add any extra paths that contain custom files (such as robots.txt or 173 | # .htaccess) here, relative to this directory. These files are copied 174 | # directly to the root of the documentation. 175 | # html_extra_path = [] 176 | 177 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 178 | # using the given strftime format. 179 | # html_last_updated_fmt = '%b %d, %Y' 180 | 181 | # If true, SmartyPants will be used to convert quotes and dashes to 182 | # typographically correct entities. 183 | # html_use_smartypants = True 184 | 185 | # Custom sidebar templates, maps document names to template names. 186 | html_sidebars = { 187 | '**': [ 188 | 'about.html', 'navigation.html', 'searchbox.html', 189 | ] 190 | } 191 | # Additional templates that should be rendered to pages, maps page names to 192 | # template names. 193 | # html_additional_pages = {} 194 | 195 | # If false, no module index is generated. 196 | # html_domain_indices = True 197 | 198 | # If false, no index is generated. 199 | # html_use_index = True 200 | 201 | # If true, the index is split into individual pages for each letter. 202 | # html_split_index = False 203 | 204 | # If true, links to the reST sources are added to the pages. 205 | # html_show_sourcelink = True 206 | 207 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 208 | # html_show_sphinx = True 209 | 210 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 211 | # html_show_copyright = True 212 | 213 | # If true, an OpenSearch description file will be output, and all pages will 214 | # contain a tag referring to it. The value of this option must be the 215 | # base URL from which the finished HTML is served. 216 | # html_use_opensearch = '' 217 | 218 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 219 | # html_file_suffix = None 220 | 221 | # Output file base name for HTML help builder. 222 | htmlhelp_basename = 'aiopgdoc' 223 | 224 | # -- Options for LaTeX output --------------------------------------------- 225 | 226 | latex_elements = { 227 | # The paper size ('letterpaper' or 'a4paper'). 228 | # 'papersize': 'letterpaper', 229 | 230 | # The font size ('10pt', '11pt' or '12pt'). 231 | # 'pointsize': '10pt', 232 | 233 | # Additional stuff for the LaTeX preamble. 234 | # 'preamble': '', 235 | } 236 | 237 | # Grouping the document tree into LaTeX files. List of tuples 238 | # (source start file, target name, title, 239 | # author, documentclass [howto, manual, or own class]). 240 | latex_documents = [ 241 | ('index', 'aiopg.tex', 'aiopg Documentation', 242 | 'Andrew Svetlov, Alexey Firsov', 'manual'), 243 | ] 244 | 245 | # The name of an image file (relative to this directory) to place at the top of 246 | # the title page. 247 | # latex_logo = None 248 | 249 | # For "manual" documents, if this is true, then toplevel headings are parts, 250 | # not chapters. 251 | # latex_use_parts = False 252 | 253 | # If true, show page references after internal links. 254 | # latex_show_pagerefs = False 255 | 256 | # If true, show URL addresses after external links. 257 | # latex_show_urls = False 258 | 259 | # Documents to append as an appendix to all manuals. 260 | # latex_appendices = [] 261 | 262 | # If false, no module index is generated. 263 | # latex_domain_indices = True 264 | 265 | 266 | # -- Options for manual page output --------------------------------------- 267 | 268 | # One entry per manual page. List of tuples 269 | # (source start file, name, description, authors, manual section). 270 | man_pages = [ 271 | ('index', 'aiopg', 'aiopg Documentation', 272 | ['Andrew Svetlov', 'Alexey Firsov'], 1) 273 | ] 274 | 275 | # If true, show URL addresses after external links. 276 | # man_show_urls = False 277 | 278 | 279 | # -- Options for Texinfo output ------------------------------------------- 280 | 281 | # Grouping the document tree into Texinfo files. List of tuples 282 | # (source start file, target name, title, author, 283 | # dir menu entry, description, category) 284 | texinfo_documents = [ 285 | ('index', 'aiopg', 'aiopg Documentation', 286 | 'Andrew Svetlov, Alexey Firsov', 287 | 'aiopg', 'One line description of project.', 288 | 'Miscellaneous'), 289 | ] 290 | 291 | # Documents to append as an appendix to all manuals. 292 | # texinfo_appendices = [] 293 | 294 | # If false, no module index is generated. 295 | # texinfo_domain_indices = True 296 | 297 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 298 | # texinfo_show_urls = 'footnote' 299 | 300 | # If true, do not generate a @detailmenu in the "Top" node's menu. 301 | # texinfo_no_detailmenu = False 302 | -------------------------------------------------------------------------------- /CHANGES.txt: -------------------------------------------------------------------------------- 1 | 1.5.0a1 (2023-08-29) 2 | ^^^^^^^^^^^^^^^^^^^^ 3 | 4 | * SQLAlchemy 2.0 compatibility `#901 `_ 5 | 6 | 7 | 1.4.0 (2022-10-26) 8 | ^^^^^^^^^^^^^^^^^^ 9 | 10 | * Add python 3.11 and drop python 3.6 support `#892 `_ 11 | 12 | 13 | 1.3.5 (2022-09-25) 14 | ^^^^^^^^^^^^^^^^^^ 15 | 16 | * Fix pool size limit check for unlimited pools `#888 `_ 17 | 18 | 19 | 1.3.4 (2022-06-30) 20 | ^^^^^^^^^^^^^^^^^^ 21 | 22 | 23 | 1.3.4b3 (2022-06-29) 24 | ^^^^^^^^^^^^^^^^^^^^ 25 | 26 | 27 | 1.3.4b2 (2022-06-29) 28 | ^^^^^^^^^^^^^^^^^^^^ 29 | 30 | 31 | 1.3.4b1 (2022-06-29) 32 | ^^^^^^^^^^^^^^^^^^^^ 33 | 34 | * Fix compatibility with SA 1.4.38 `#891 `_ 35 | * Add py.typed marker `#878 `_ 36 | 37 | 38 | 1.3.3 (2021-11-01) 39 | ^^^^^^^^^^^^^^^^^^ 40 | 41 | * Support async-timeout 4.0+ 42 | 43 | 44 | 1.3.2 (2021-10-07) 45 | ^^^^^^^^^^^^^^^^^^ 46 | 47 | 48 | 1.3.2b2 (2021-10-07) 49 | ^^^^^^^^^^^^^^^^^^^^ 50 | 51 | * Respect use_labels for select statement `#882 `_ 52 | 53 | 54 | 1.3.2b1 (2021-07-11) 55 | ^^^^^^^^^^^^^^^^^^^^ 56 | 57 | * Fix compatibility with SQLAlchemy >= 1.4 `#870 `_ 58 | 59 | 60 | 1.3.1 (2021-07-08) 61 | ^^^^^^^^^^^^^^^^^^ 62 | 63 | 64 | 1.3.1b2 (2021-07-06) 65 | ^^^^^^^^^^^^^^^^^^^^ 66 | 67 | * Suppress "Future exception was never retrieved" `#862 `_ 68 | 69 | 70 | 1.3.1b1 (2021-07-05) 71 | ^^^^^^^^^^^^^^^^^^^^ 72 | 73 | * Fix ClosableQueue.get on cancellation, close it on Connection.close `#859 `_ 74 | 75 | 76 | 1.3.0 (2021-06-30) 77 | ^^^^^^^^^^^^^^^^^^ 78 | 79 | 80 | 1.3.0b4 (2021-06-28) 81 | ^^^^^^^^^^^^^^^^^^^^ 82 | 83 | * Fix "Unable to detect disconnect when using NOTIFY/LISTEN" `#559 `_ 84 | 85 | 86 | 1.3.0b3 (2021-04-03) 87 | ^^^^^^^^^^^^^^^^^^^^ 88 | 89 | * Reformat using black `#814 `_ 90 | 91 | 92 | 1.3.0b2 (2021-04-02) 93 | ^^^^^^^^^^^^^^^^^^^^ 94 | 95 | * Type annotations `#813 `_ 96 | 97 | 98 | 1.3.0b1 (2021-03-30) 99 | ^^^^^^^^^^^^^^^^^^^^ 100 | 101 | * Raise ResourceClosedError if we try to open a cursor on a closed SAConnection `#811 `_ 102 | 103 | 104 | 1.3.0b0 (2021-03-25) 105 | ^^^^^^^^^^^^^^^^^^^^ 106 | 107 | * Fix compatibility with SA 1.4 for IN statement `#806 `_ 108 | 109 | 110 | 1.2.1 (2021-03-23) 111 | ^^^^^^^^^^^^^^^^^^ 112 | 113 | * Pop loop in connection init due to backward compatibility `#808 `_ 114 | 115 | 116 | 1.2.0b4 (2021-03-23) 117 | ^^^^^^^^^^^^^^^^^^^^ 118 | 119 | * Set max supported sqlalchemy version `#805 `_ 120 | 121 | 122 | 1.2.0b3 (2021-03-22) 123 | ^^^^^^^^^^^^^^^^^^^^ 124 | 125 | * Don't run ROLLBACK when the connection is closed `#778 `_ 126 | 127 | * Multiple cursors support `#801 `_ 128 | 129 | 130 | 1.2.0b2 (2020-12-21) 131 | ^^^^^^^^^^^^^^^^^^^^ 132 | 133 | * Fix IsolationLevel.read_committed and introduce IsolationLevel.default `#770 `_ 134 | 135 | * Fix python 3.8 warnings in tests `#771 `_ 136 | 137 | 138 | 1.2.0b1 (2020-12-16) 139 | ^^^^^^^^^^^^^^^^^^^^ 140 | 141 | * Deprecate blocking connection.cancel() method `#570 `_ 142 | 143 | 144 | 1.2.0b0 (2020-12-15) 145 | ^^^^^^^^^^^^^^^^^^^^ 146 | 147 | * Implement timeout on acquiring connection from pool `#766 `_ 148 | 149 | 150 | 1.1.0 (2020-12-10) 151 | ^^^^^^^^^^^^^^^^^^ 152 | 153 | 154 | 1.1.0b2 (2020-12-09) 155 | ^^^^^^^^^^^^^^^^^^^^ 156 | 157 | * Added missing slots to context managers `#763 `_ 158 | 159 | 160 | 1.1.0b1 (2020-12-07) 161 | ^^^^^^^^^^^^^^^^^^^^ 162 | 163 | * Fix on_connect multiple call on acquire `#552 `_ 164 | 165 | * Fix python 3.8 warnings `#622 `_ 166 | 167 | * Bump minimum psycopg version to 2.8.4 `#754 `_ 168 | 169 | * Fix Engine.release method to release connection in any way `#756 `_ 170 | 171 | 172 | 1.0.0 (2019-09-20) 173 | ^^^^^^^^^^^^^^^^^^ 174 | 175 | * Removal of an asynchronous call in favor of issues # 550 176 | 177 | * Big editing of documentation and minor bugs #534 178 | 179 | 180 | 0.16.0 (2019-01-25) 181 | ^^^^^^^^^^^^^^^^^^^ 182 | 183 | * Fix select priority name `#525 `_ 184 | 185 | * Rename `psycopg2` to `psycopg2-binary` to fix deprecation warning `#507 `_ 186 | 187 | * Fix `#189 `_ hstore when using ReadDictCursor `#512 `_ 188 | 189 | * close cannot be used while an asynchronous query is underway `#452 `_ 190 | 191 | * sqlalchemy adapter trx begin allow transaction_mode `#498 `_ 192 | 193 | 194 | 0.15.0 (2018-08-14) 195 | ^^^^^^^^^^^^^^^^^^^ 196 | 197 | * Support Python 3.7 `#437 `_ 198 | 199 | 200 | 0.14.0 (2018-05-10) 201 | ^^^^^^^^^^^^^^^^^^^ 202 | 203 | * Add ``get_dialect`` func to have ability to pass ``json_serializer`` `#451 `_ 204 | 205 | 206 | 0.13.2 (2018-01-03) 207 | ^^^^^^^^^^^^^^^^^^^ 208 | 209 | * Fixed compatibility with SQLAlchemy 1.2.0 `#412 `_ 210 | 211 | * Added support for transaction isolation levels `#219 `_ 212 | 213 | 214 | 0.13.1 (2017-09-10) 215 | ^^^^^^^^^^^^^^^^^^^ 216 | 217 | * Added connection poll recycling logic `#373 `_ 218 | 219 | 220 | 0.13.0 (2016-12-02) 221 | ^^^^^^^^^^^^^^^^^^^ 222 | 223 | * Add `async with` support to `.begin_nested()` `#208 `_ 224 | 225 | * Fix connection.cancel() `#212 `_ `#223 `_ 226 | 227 | * Raise informative error on unexpected connection closing `#191 `_ 228 | 229 | * Added support for python types columns issues `#217 `_ 230 | 231 | * Added support for default values in SA table issues `#206 `_ 232 | 233 | 234 | 0.12.0 (2016-10-09) 235 | ^^^^^^^^^^^^^^^^^^^ 236 | 237 | * Add an on_connect callback parameter to pool `#141 `_ 238 | 239 | * Fixed connection to work under both windows and posix based systems `#142 `_ 240 | 241 | 242 | 0.11.0 (2016-09-12) 243 | ^^^^^^^^^^^^^^^^^^^ 244 | 245 | * Immediately remove callbacks from a closed file descriptor `#139 `_ 246 | 247 | * Drop Python 3.3 support 248 | 249 | 250 | 0.10.0 (2016-07-16) 251 | ^^^^^^^^^^^^^^^^^^^ 252 | 253 | * Refactor tests to use dockerized Postgres server `#107 `_ 254 | 255 | * Reduce default pool minsize to 1 `#106 `_ 256 | 257 | * Explicitly enumerate packages in setup.py `#85 `_ 258 | 259 | * Remove expired connections from pool on acquire `#116 `_ 260 | 261 | * Don't crash when Connection is GC'ed `#124 `_ 262 | 263 | * Use loop.create_future() if available 264 | 265 | 266 | 0.9.2 (2016-01-31) 267 | ^^^^^^^^^^^^^^^^^^ 268 | 269 | * Make pool.release return asyncio.Future, so we can wait on it in 270 | `__aexit__` `#102 `_ 271 | 272 | * Add support for uuid type `#103 `_ 273 | 274 | 275 | 0.9.1 (2016-01-17) 276 | ^^^^^^^^^^^^^^^^^^ 277 | 278 | * Documentation update `#101 `_ 279 | 280 | 281 | 0.9.0 (2016-01-14) 282 | ^^^^^^^^^^^^^^^^^^ 283 | 284 | * Add async context managers for transactions `#91 `_ 285 | 286 | * Support async iterator in ResultProxy `#92 `_ 287 | 288 | * Add async with for engine `#90 `_ 289 | 290 | 291 | 0.8.0 (2015-12-31) 292 | ^^^^^^^^^^^^^^^^^^ 293 | 294 | * Add PostgreSQL notification support `#58 `_ 295 | 296 | * Support pools with unlimited size `#59 `_ 297 | 298 | * Cancel current DB operation on asyncio timeout `#66 `_ 299 | 300 | * Add async with support for Pool, Connection, Cursor `#88 `_ 301 | 302 | 303 | 0.7.0 (2015-04-22) 304 | ^^^^^^^^^^^^^^^^^^ 305 | 306 | * Get rid of resource leak on connection failure. 307 | 308 | * Report ResourceWarning on non-closed connections. 309 | 310 | * Deprecate iteration protocol support in cursor and ResultProxy. 311 | 312 | * Release sa connection to pool on `connection.close()`. 313 | 314 | 315 | 0.6.0 (2015-02-03) 316 | ^^^^^^^^^^^^^^^^^^ 317 | 318 | * Accept dict, list, tuple, named and positional parameters in 319 | `SAConnection.execute()` 320 | 321 | 322 | 0.5.2 (2014-12-08) 323 | ^^^^^^^^^^^^^^^^^^ 324 | 325 | * Minor release, fixes a bug that leaves connection in broken state 326 | after `cursor.execute()` failure. 327 | 328 | 329 | 0.5.1 (2014-10-31) 330 | ^^^^^^^^^^^^^^^^^^ 331 | 332 | * Fix a bug for processing transactions in line. 333 | 334 | 335 | 0.5.0 (2014-10-31) 336 | ^^^^^^^^^^^^^^^^^^ 337 | 338 | * Add .terminate() to Pool and Engine 339 | 340 | * Reimplement connection pool (now pool size cannot be greater than pool.maxsize) 341 | 342 | * Add .close() and .wait_closed() to Pool and Engine 343 | 344 | * Add minsize, maxsize, size and freesize properties to sa.Engine 345 | 346 | * Support *echo* parameter for logging executed SQL commands 347 | 348 | * Connection.close() is not a coroutine (but we keep backward compatibility). 349 | 350 | 351 | 0.4.1 (2014-10-02) 352 | ^^^^^^^^^^^^^^^^^^ 353 | 354 | * make cursor iterable 355 | 356 | * update docs 357 | 358 | 359 | 0.4.0 (2014-10-02) 360 | ^^^^^^^^^^^^^^^^^^ 361 | 362 | * add timeouts for database operations. 363 | 364 | * Autoregister psycopg2 support for json data type. 365 | 366 | * Support JSON in aiopg.sa 367 | 368 | * Support ARRAY in aiopg.sa 369 | 370 | * Autoregister hstore support if present in connected DB 371 | 372 | * Support HSTORE in aiopg.sa 373 | 374 | 375 | 0.3.2 (2014-07-07) 376 | ^^^^^^^^^^^^^^^^^^ 377 | 378 | * change signature to cursor.execute(operation, parameters=None) to 379 | follow psycopg2 convention. 380 | 381 | 382 | 0.3.1 (2014-07-04) 383 | ^^^^^^^^^^^^^^^^^^ 384 | 385 | * Forward arguments to cursor constructor for pooled connections. 386 | 387 | 388 | 0.3.0 (2014-06-22) 389 | ^^^^^^^^^^^^^^^^^^ 390 | 391 | * Allow executing SQLAlchemy DDL statements. 392 | 393 | * Fix bug with race conditions on acquiring/releasing connections from pool. 394 | 395 | 396 | 0.2.3 (2014-06-12) 397 | ^^^^^^^^^^^^^^^^^^ 398 | 399 | * Fix bug in connection pool. 400 | 401 | 402 | 0.2.2 (2014-06-07) 403 | ^^^^^^^^^^^^^^^^^^ 404 | 405 | * Fix bug with passing parameters into SAConnection.execute when 406 | executing raw SQL expression. 407 | 408 | 409 | 0.2.1 (2014-05-08) 410 | ^^^^^^^^^^^^^^^^^^ 411 | 412 | * Close connection with invalid transaction status on returning to pool. 413 | 414 | 415 | 0.2.0 (2014-05-04) 416 | ^^^^^^^^^^^^^^^^^^ 417 | 418 | * Implemented optional support for sqlalchemy functional sql layer. 419 | 420 | 421 | 0.1.0 (2014-04-06) 422 | ^^^^^^^^^^^^^^^^^^ 423 | 424 | * Implemented plain connections: connect, Connection, Cursor. 425 | 426 | * Implemented database pools: create_pool and Pool. 427 | -------------------------------------------------------------------------------- /tests/test_sa_connection.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import psycopg2 4 | import pytest 5 | from sqlalchemy import Column, Integer, MetaData, String, Table, func, select 6 | from sqlalchemy.schema import CreateTable, DropTable 7 | 8 | from aiopg import Cursor, sa 9 | 10 | meta = MetaData() 11 | tbl = Table( 12 | "sa_tbl", 13 | meta, 14 | Column("id", Integer, nullable=False, primary_key=True), 15 | Column("name", String(255)), 16 | ) 17 | 18 | 19 | @pytest.fixture 20 | def connect(make_connection): 21 | async def go(**kwargs): 22 | conn = await make_connection(**kwargs) 23 | cur = await conn.cursor() 24 | await cur.execute("DROP TABLE IF EXISTS sa_tbl") 25 | await cur.execute( 26 | "CREATE TABLE sa_tbl " "(id serial, name varchar(255))" 27 | ) 28 | await cur.execute("INSERT INTO sa_tbl (name)" "VALUES ('first')") 29 | cur.close() 30 | 31 | engine = mock.Mock(from_spec=sa.engine.Engine) 32 | engine.dialect = sa.engine._dialect 33 | return sa.SAConnection(conn, engine) 34 | 35 | yield go 36 | 37 | 38 | async def test_execute_text_select(connect): 39 | conn = await connect() 40 | res = await conn.execute("SELECT * FROM sa_tbl;") 41 | assert isinstance(res.cursor, Cursor) 42 | assert ("id", "name") == res.keys() 43 | rows = await res.fetchall() 44 | assert res.closed 45 | assert res.cursor is None 46 | assert 1 == len(rows) 47 | row = rows[0] 48 | assert 1 == row[0] 49 | assert 1 == row["id"] 50 | assert 1 == row.id 51 | assert "first" == row[1] 52 | assert "first" == row["name"] 53 | assert "first" == row.name 54 | 55 | 56 | async def test_execute_sa_select(connect): 57 | conn = await connect() 58 | res = await conn.execute(tbl.select()) 59 | assert isinstance(res.cursor, Cursor) 60 | assert ("id", "name") == res.keys() 61 | rows = await res.fetchall() 62 | assert res.closed 63 | assert res.cursor is None 64 | assert res.returns_rows 65 | 66 | assert 1 == len(rows) 67 | row = rows[0] 68 | assert 1 == row[0] 69 | assert 1 == row["id"] 70 | assert 1 == row.id 71 | assert "first" == row[1] 72 | assert "first" == row["name"] 73 | assert "first" == row.name 74 | 75 | 76 | async def test_execute_sa_select_with_in(connect): 77 | conn = await connect() 78 | await conn.execute(tbl.insert(), 2, "second") 79 | await conn.execute(tbl.insert(), 3, "third") 80 | 81 | res = await conn.execute( 82 | tbl.select().where(tbl.c.name.in_(["first", "second"])) 83 | ) 84 | rows = await res.fetchall() 85 | assert 2 == len(rows) 86 | assert (1, "first") == rows[0] 87 | assert (2, "second") == rows[1] 88 | 89 | 90 | async def test_execute_sa_insert_with_dict(connect): 91 | conn = await connect() 92 | await conn.execute(tbl.insert(), {"id": 2, "name": "second"}) 93 | 94 | res = await conn.execute(tbl.select()) 95 | rows = await res.fetchall() 96 | assert 2 == len(rows) 97 | assert (1, "first") == rows[0] 98 | assert (2, "second") == rows[1] 99 | 100 | 101 | async def test_execute_sa_insert_with_tuple(connect): 102 | conn = await connect() 103 | await conn.execute(tbl.insert(), (2, "second")) 104 | 105 | res = await conn.execute(tbl.select()) 106 | rows = await res.fetchall() 107 | assert 2 == len(rows) 108 | assert (1, "first") == rows[0] 109 | assert (2, "second") == rows[1] 110 | 111 | 112 | async def test_execute_sa_insert_named_params(connect): 113 | conn = await connect() 114 | await conn.execute(tbl.insert(), id=2, name="second") 115 | 116 | res = await conn.execute(tbl.select()) 117 | rows = await res.fetchall() 118 | assert 2 == len(rows) 119 | assert (1, "first") == rows[0] 120 | assert (2, "second") == rows[1] 121 | 122 | 123 | async def test_execute_sa_insert_positional_params(connect): 124 | conn = await connect() 125 | await conn.execute(tbl.insert(), 2, "second") 126 | 127 | res = await conn.execute(tbl.select()) 128 | rows = await res.fetchall() 129 | assert 2 == len(rows) 130 | assert (1, "first") == rows[0] 131 | assert (2, "second") == rows[1] 132 | 133 | 134 | async def test_scalar(connect): 135 | conn = await connect() 136 | res = await conn.scalar(select(func.count()).select_from(tbl)) 137 | assert 1, res 138 | 139 | 140 | async def test_scalar_None(connect): 141 | conn = await connect() 142 | await conn.execute(tbl.delete()) 143 | res = await conn.scalar(tbl.select()) 144 | assert res is None 145 | 146 | 147 | async def test_row_proxy(connect): 148 | conn = await connect() 149 | res = await conn.execute(tbl.select()) 150 | rows = await res.fetchall() 151 | row = rows[0] 152 | row2 = await (await conn.execute(tbl.select())).first() 153 | assert 2 == len(row) 154 | assert ["id", "name"] == list(row) 155 | assert "id" in row 156 | assert "unknown" not in row 157 | assert "first" == row.name 158 | assert "first" == row[tbl.c.name] 159 | with pytest.raises(AttributeError): 160 | row.unknown 161 | assert "(1, 'first')" == repr(row) 162 | assert (1, "first") == row.as_tuple() 163 | assert (555, "other") != row.as_tuple() 164 | assert row2 == row 165 | assert not (row2 != row) 166 | assert 5 != row 167 | 168 | 169 | async def test_insert(connect): 170 | conn = await connect() 171 | res = await conn.execute(tbl.insert().values(name="second")) 172 | assert ("id",) == res.keys() 173 | assert 1 == res.rowcount 174 | assert res.returns_rows 175 | 176 | rows = await res.fetchall() 177 | assert 1 == len(rows) 178 | assert 2 == rows[0].id 179 | 180 | 181 | async def test_raw_insert(connect): 182 | conn = await connect() 183 | await conn.execute("INSERT INTO sa_tbl (name) VALUES ('third')") 184 | res = await conn.execute(tbl.select()) 185 | assert 2 == res.rowcount 186 | assert ("id", "name") == res.keys() 187 | assert res.returns_rows 188 | 189 | rows = await res.fetchall() 190 | assert 2 == len(rows) 191 | assert 2 == rows[1].id 192 | 193 | 194 | async def test_raw_insert_with_params(connect): 195 | conn = await connect() 196 | res = await conn.execute( 197 | "INSERT INTO sa_tbl (id, name) VALUES (%s, %s)", 2, "third" 198 | ) 199 | res = await conn.execute(tbl.select()) 200 | assert 2 == res.rowcount 201 | assert ("id", "name") == res.keys() 202 | assert res.returns_rows 203 | 204 | rows = await res.fetchall() 205 | assert 2 == len(rows) 206 | assert 2 == rows[1].id 207 | 208 | 209 | async def test_raw_insert_with_params_dict(connect): 210 | conn = await connect() 211 | res = await conn.execute( 212 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", 213 | {"id": 2, "name": "third"}, 214 | ) 215 | res = await conn.execute(tbl.select()) 216 | assert 2 == res.rowcount 217 | assert ("id", "name") == res.keys() 218 | assert res.returns_rows 219 | 220 | rows = await res.fetchall() 221 | assert 2 == len(rows) 222 | assert 2 == rows[1].id 223 | 224 | 225 | async def test_raw_insert_with_named_params(connect): 226 | conn = await connect() 227 | res = await conn.execute( 228 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", 229 | id=2, 230 | name="third", 231 | ) 232 | res = await conn.execute(tbl.select()) 233 | assert 2 == res.rowcount 234 | assert ("id", "name") == res.keys() 235 | assert res.returns_rows 236 | 237 | rows = await res.fetchall() 238 | assert 2 == len(rows) 239 | assert 2 == rows[1].id 240 | 241 | 242 | async def test_raw_insert_with_executemany(connect): 243 | conn = await connect() 244 | with pytest.raises(sa.ArgumentError): 245 | await conn.execute( 246 | "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", 247 | [(2, "third"), (3, "forth")], 248 | ) 249 | 250 | 251 | async def test_delete(connect): 252 | conn = await connect() 253 | res = await conn.execute(tbl.delete().where(tbl.c.id == 1)) 254 | assert () == res.keys() 255 | assert 1 == res.rowcount 256 | assert not res.returns_rows 257 | assert res.closed 258 | assert res.cursor is None 259 | 260 | 261 | async def test_double_close(connect): 262 | conn = await connect() 263 | res = await conn.execute("SELECT 1") 264 | res.close() 265 | assert res.closed 266 | assert res.cursor is None 267 | res.close() 268 | assert res.closed 269 | assert res.cursor is None 270 | 271 | 272 | async def test_fetchall(connect): 273 | conn = await connect() 274 | await conn.execute(tbl.insert().values(name="second")) 275 | 276 | res = await conn.execute(tbl.select()) 277 | rows = await res.fetchall() 278 | assert 2 == len(rows) 279 | assert res.closed 280 | assert res.returns_rows 281 | assert [(1, "first") == (2, "second")], rows 282 | 283 | 284 | async def test_fetchall_closed(connect): 285 | conn = await connect() 286 | await conn.execute(tbl.insert().values(name="second")) 287 | 288 | res = await conn.execute(tbl.select()) 289 | res.close() 290 | with pytest.raises(sa.ResourceClosedError): 291 | await res.fetchall() 292 | 293 | 294 | async def test_fetchall_not_returns_rows(connect): 295 | conn = await connect() 296 | res = await conn.execute(tbl.delete()) 297 | with pytest.raises(sa.ResourceClosedError): 298 | await res.fetchall() 299 | 300 | 301 | async def test_fetchone_closed(connect): 302 | conn = await connect() 303 | await conn.execute(tbl.insert().values(name="second")) 304 | 305 | res = await conn.execute(tbl.select()) 306 | res.close() 307 | with pytest.raises(sa.ResourceClosedError): 308 | await res.fetchone() 309 | 310 | 311 | async def test_first_not_returns_rows(connect): 312 | conn = await connect() 313 | res = await conn.execute(tbl.delete()) 314 | with pytest.raises(sa.ResourceClosedError): 315 | await res.first() 316 | 317 | 318 | async def test_fetchmany(connect): 319 | conn = await connect() 320 | await conn.execute(tbl.insert().values(name="second")) 321 | 322 | res = await conn.execute(tbl.select()) 323 | rows = await res.fetchmany() 324 | assert 1 == len(rows) 325 | assert not res.closed 326 | assert res.returns_rows 327 | assert [(1, "first")] == rows 328 | 329 | 330 | async def test_fetchmany_with_size(connect): 331 | conn = await connect() 332 | await conn.execute(tbl.insert().values(name="second")) 333 | 334 | res = await conn.execute(tbl.select()) 335 | rows = await res.fetchmany(100) 336 | assert 2 == len(rows) 337 | assert not res.closed 338 | assert res.returns_rows 339 | assert [(1, "first") == (2, "second")], rows 340 | 341 | 342 | async def test_fetchmany_closed(connect): 343 | conn = await connect() 344 | await conn.execute(tbl.insert().values(name="second")) 345 | 346 | res = await conn.execute(tbl.select()) 347 | res.close() 348 | with pytest.raises(sa.ResourceClosedError): 349 | await res.fetchmany() 350 | 351 | 352 | async def test_fetchmany_with_size_closed(connect): 353 | conn = await connect() 354 | await conn.execute(tbl.insert().values(name="second")) 355 | 356 | res = await conn.execute(tbl.select()) 357 | res.close() 358 | with pytest.raises(sa.ResourceClosedError): 359 | await res.fetchmany(5555) 360 | 361 | 362 | async def test_fetchmany_not_returns_rows(connect): 363 | conn = await connect() 364 | res = await conn.execute(tbl.delete()) 365 | with pytest.raises(sa.ResourceClosedError): 366 | await res.fetchmany() 367 | 368 | 369 | async def test_fetchmany_close_after_last_read(connect): 370 | conn = await connect() 371 | 372 | res = await conn.execute(tbl.select()) 373 | rows = await res.fetchmany() 374 | assert 1 == len(rows) 375 | assert not res.closed 376 | assert res.returns_rows 377 | assert [(1, "first")] == rows 378 | rows2 = await res.fetchmany() 379 | assert 0 == len(rows2) 380 | assert res.closed 381 | 382 | 383 | async def test_create_table(connect): 384 | conn = await connect() 385 | res = await conn.execute(DropTable(tbl)) 386 | with pytest.raises(sa.ResourceClosedError): 387 | await res.fetchmany() 388 | 389 | with pytest.raises(psycopg2.ProgrammingError): 390 | await conn.execute("SELECT * FROM sa_tbl") 391 | 392 | res = await conn.execute(CreateTable(tbl)) 393 | with pytest.raises(sa.ResourceClosedError): 394 | await res.fetchmany() 395 | 396 | res = await conn.execute("SELECT * FROM sa_tbl") 397 | assert 0 == len(await res.fetchall()) 398 | 399 | 400 | async def test_execute_when_closed(connect): 401 | conn = await connect() 402 | await conn.close() 403 | 404 | with pytest.raises(sa.ResourceClosedError): 405 | await conn.execute(tbl.select()) 406 | -------------------------------------------------------------------------------- /tests/test_sa_transaction.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from unittest import mock 3 | 4 | import pytest 5 | from sqlalchemy import Column, Integer, MetaData, String, Table, func, select 6 | 7 | from aiopg import sa 8 | 9 | meta = MetaData() 10 | tbl = Table( 11 | "sa_tbl2", 12 | meta, 13 | Column("id", Integer, nullable=False, primary_key=True), 14 | Column("name", String(255)), 15 | ) 16 | 17 | 18 | @pytest.fixture 19 | def connect(make_connection): 20 | async def go(**kwargs): 21 | conn = await make_connection(**kwargs) 22 | cur = await conn.cursor() 23 | await cur.execute("DROP TABLE IF EXISTS sa_tbl2") 24 | await cur.execute( 25 | "CREATE TABLE sa_tbl2 " "(id serial, name varchar(255))" 26 | ) 27 | await cur.execute("INSERT INTO sa_tbl2 (name)" "VALUES ('first')") 28 | cur.close() 29 | 30 | engine = mock.Mock(from_spec=sa.engine.Engine) 31 | engine.dialect = sa.engine._dialect 32 | return sa.SAConnection(conn, engine) 33 | 34 | yield go 35 | 36 | 37 | @pytest.fixture 38 | def xa_connect(connect): 39 | async def go(**kwargs): 40 | conn = await connect(**kwargs) 41 | val = await conn.scalar("show max_prepared_transactions") 42 | if not int(val): 43 | raise pytest.skip( 44 | "Twophase transacions are not supported. " 45 | "Set max_prepared_transactions to " 46 | "a nonzero value" 47 | ) 48 | return conn 49 | 50 | yield go 51 | 52 | 53 | async def test_without_transactions(connect): 54 | conn1 = await connect() 55 | conn2 = await connect() 56 | res1 = await conn1.scalar(select(func.count()).select_from(tbl)) 57 | assert 1 == res1 58 | 59 | await conn2.execute(tbl.delete()) 60 | 61 | res2 = await conn1.scalar(select(func.count()).select_from(tbl)) 62 | assert 0 == res2 63 | 64 | 65 | async def test_connection_attr(connect): 66 | conn = await connect() 67 | tr = await conn.begin() 68 | assert tr.connection is conn 69 | 70 | 71 | async def test_root_transaction(connect): 72 | conn1 = await connect() 73 | conn2 = await connect() 74 | 75 | tr = await conn1.begin() 76 | assert tr.is_active 77 | await conn1.execute(tbl.delete()) 78 | 79 | res1 = await conn2.scalar(select(func.count()).select_from(tbl)) 80 | assert 1 == res1 81 | 82 | await tr.commit() 83 | 84 | assert not tr.is_active 85 | assert not conn1.in_transaction 86 | res2 = await conn2.scalar(select(func.count()).select_from(tbl)) 87 | assert 0 == res2 88 | 89 | 90 | async def test_root_transaction_rollback(connect): 91 | conn1 = await connect() 92 | conn2 = await connect() 93 | 94 | tr = await conn1.begin() 95 | assert tr.is_active 96 | await conn1.execute(tbl.delete()) 97 | 98 | res1 = await conn2.scalar(select(func.count()).select_from(tbl)) 99 | assert 1 == res1 100 | 101 | await tr.rollback() 102 | 103 | assert not tr.is_active 104 | res2 = await conn2.scalar(select(func.count()).select_from(tbl)) 105 | assert 1 == res2 106 | 107 | 108 | async def test_root_transaction_close(connect): 109 | conn1 = await connect() 110 | conn2 = await connect() 111 | 112 | tr = await conn1.begin() 113 | assert tr.is_active 114 | await conn1.execute(tbl.delete()) 115 | 116 | res1 = await conn2.scalar(select(func.count()).select_from(tbl)) 117 | assert 1 == res1 118 | 119 | await tr.close() 120 | 121 | assert not tr.is_active 122 | res2 = await conn2.scalar(select(func.count()).select_from(tbl)) 123 | assert 1 == res2 124 | 125 | 126 | async def test_root_transaction_commit_inactive(connect): 127 | conn = await connect() 128 | tr = await conn.begin() 129 | assert tr.is_active 130 | await tr.commit() 131 | assert not tr.is_active 132 | with pytest.raises(sa.InvalidRequestError): 133 | await tr.commit() 134 | 135 | 136 | async def test_root_transaction_rollback_inactive(connect): 137 | conn = await connect() 138 | tr = await conn.begin() 139 | assert tr.is_active 140 | await tr.rollback() 141 | assert not tr.is_active 142 | await tr.rollback() 143 | assert not tr.is_active 144 | 145 | 146 | async def test_root_transaction_double_close(connect): 147 | conn = await connect() 148 | tr = await conn.begin() 149 | assert tr.is_active 150 | await tr.close() 151 | assert not tr.is_active 152 | await tr.close() 153 | assert not tr.is_active 154 | 155 | 156 | async def test_inner_transaction_commit(connect): 157 | conn = await connect() 158 | tr1 = await conn.begin() 159 | tr2 = await conn.begin() 160 | assert tr2.is_active 161 | 162 | await tr2.commit() 163 | assert not tr2.is_active 164 | assert tr1.is_active 165 | 166 | await tr1.commit() 167 | assert not tr2.is_active 168 | assert not tr1.is_active 169 | 170 | 171 | async def test_rollback_on_connection_close(connect): 172 | conn1 = await connect() 173 | conn2 = await connect() 174 | 175 | tr = await conn1.begin() 176 | await conn1.execute(tbl.delete()) 177 | 178 | res1 = await conn2.scalar(select(func.count()).select_from(tbl)) 179 | assert 1 == res1 180 | 181 | await conn1.close() 182 | 183 | res2 = await conn2.scalar(select(func.count()).select_from(tbl)) 184 | assert 1 == res2 185 | del tr 186 | 187 | 188 | async def test_inner_transaction_rollback(connect): 189 | conn = await connect() 190 | tr1 = await conn.begin() 191 | tr2 = await conn.begin() 192 | assert tr2.is_active 193 | await conn.execute(tbl.insert().values(name="aaaa")) 194 | 195 | await tr2.rollback() 196 | assert not tr2.is_active 197 | assert not tr1.is_active 198 | 199 | res = await conn.scalar(select(func.count()).select_from(tbl)) 200 | assert 1 == res 201 | 202 | 203 | async def test_inner_transaction_close(connect): 204 | conn = await connect() 205 | tr1 = await conn.begin() 206 | tr2 = await conn.begin() 207 | assert tr2.is_active 208 | await conn.execute(tbl.insert().values(name="aaaa")) 209 | 210 | await tr2.close() 211 | assert not tr2.is_active 212 | assert tr1.is_active 213 | await tr1.commit() 214 | 215 | res = await conn.scalar(select(func.count()).select_from(tbl)) 216 | assert 2 == res 217 | 218 | 219 | async def test_nested_transaction_commit(connect): 220 | conn = await connect() 221 | tr1 = await conn.begin_nested() 222 | tr2 = await conn.begin_nested() 223 | assert tr1.is_active 224 | assert tr2.is_active 225 | 226 | await conn.execute(tbl.insert().values(name="aaaa")) 227 | await tr2.commit() 228 | assert not tr2.is_active 229 | assert tr1.is_active 230 | 231 | res = await conn.scalar(select(func.count()).select_from(tbl)) 232 | assert 2 == res 233 | 234 | await tr1.commit() 235 | assert not tr2.is_active 236 | assert not tr1.is_active 237 | 238 | res = await conn.scalar(select(func.count()).select_from(tbl)) 239 | assert 2 == res 240 | 241 | 242 | async def test_nested_transaction_commit_twice(connect): 243 | conn = await connect() 244 | tr1 = await conn.begin_nested() 245 | tr2 = await conn.begin_nested() 246 | 247 | await conn.execute(tbl.insert().values(name="aaaa")) 248 | await tr2.commit() 249 | assert not tr2.is_active 250 | assert tr1.is_active 251 | 252 | await tr2.commit() 253 | assert not tr2.is_active 254 | assert tr1.is_active 255 | 256 | res = await conn.scalar(select(func.count()).select_from(tbl)) 257 | assert 2 == res 258 | 259 | await tr1.close() 260 | 261 | 262 | async def test_nested_transaction_rollback(connect): 263 | conn = await connect() 264 | tr1 = await conn.begin_nested() 265 | tr2 = await conn.begin_nested() 266 | assert tr1.is_active 267 | assert tr2.is_active 268 | 269 | await conn.execute(tbl.insert().values(name="aaaa")) 270 | await tr2.rollback() 271 | assert not tr2.is_active 272 | assert tr1.is_active 273 | 274 | res = await conn.scalar(select(func.count()).select_from(tbl)) 275 | assert 1 == res 276 | 277 | await tr1.commit() 278 | assert not tr2.is_active 279 | assert not tr1.is_active 280 | 281 | res = await conn.scalar(select(func.count()).select_from(tbl)) 282 | assert 1 == res 283 | 284 | 285 | async def test_nested_transaction_rollback_twice(connect): 286 | conn = await connect() 287 | tr1 = await conn.begin_nested() 288 | tr2 = await conn.begin_nested() 289 | 290 | await conn.execute(tbl.insert().values(name="aaaa")) 291 | await tr2.rollback() 292 | assert not tr2.is_active 293 | assert tr1.is_active 294 | 295 | await tr2.rollback() 296 | assert not tr2.is_active 297 | assert tr1.is_active 298 | 299 | await tr1.commit() 300 | res = await conn.scalar(select(func.count()).select_from(tbl)) 301 | assert 1 == res 302 | 303 | 304 | async def test_twophase_transaction_commit(xa_connect): 305 | conn = await xa_connect() 306 | tr = await conn.begin_twophase() 307 | await conn.execute(tbl.insert().values(name="aaaa")) 308 | 309 | await tr.prepare() 310 | assert tr.is_active 311 | 312 | await tr.commit() 313 | assert not tr.is_active 314 | 315 | res = await conn.scalar(select(func.count()).select_from(tbl)) 316 | assert 2 == res 317 | 318 | 319 | async def test_twophase_transaction_twice(xa_connect): 320 | conn = await xa_connect() 321 | tr = await conn.begin_twophase() 322 | with pytest.raises(sa.InvalidRequestError): 323 | await conn.begin_twophase() 324 | 325 | assert tr.is_active 326 | await tr.prepare() 327 | await tr.commit() 328 | 329 | 330 | async def test_transactions_sequence(xa_connect): 331 | conn = await xa_connect() 332 | 333 | await conn.execute(tbl.delete()) 334 | 335 | assert conn._transaction is None 336 | 337 | tr1 = await conn.begin() 338 | assert tr1 is conn._transaction 339 | await conn.execute(tbl.insert().values(name="a")) 340 | res1 = await conn.scalar(select(func.count()).select_from(tbl)) 341 | assert 1 == res1 342 | 343 | await tr1.commit() 344 | assert conn._transaction is None 345 | 346 | tr2 = await conn.begin() 347 | assert tr2 is conn._transaction 348 | await conn.execute(tbl.insert().values(name="b")) 349 | res2 = await conn.scalar(select(func.count()).select_from(tbl)) 350 | assert 2 == res2 351 | 352 | await tr2.rollback() 353 | assert conn._transaction is None 354 | 355 | tr3 = await conn.begin() 356 | assert tr3 is conn._transaction 357 | await conn.execute(tbl.insert().values(name="b")) 358 | res3 = await conn.scalar(select(func.count()).select_from(tbl)) 359 | assert 2 == res3 360 | 361 | await tr3.commit() 362 | assert conn._transaction is None 363 | 364 | 365 | async def test_transaction_mode(connect): 366 | conn = await connect() 367 | 368 | await conn.execute(tbl.delete()) 369 | 370 | tr1 = await conn.begin(isolation_level="SERIALIZABLE") 371 | await conn.execute(tbl.insert().values(name="a")) 372 | res1 = await conn.scalar(select(func.count()).select_from(tbl)) 373 | assert 1 == res1 374 | await tr1.commit() 375 | 376 | tr2 = await conn.begin(isolation_level="REPEATABLE READ") 377 | await conn.execute(tbl.insert().values(name="b")) 378 | res2 = await conn.scalar(select(func.count()).select_from(tbl)) 379 | assert 2 == res2 380 | await tr2.commit() 381 | 382 | tr3 = await conn.begin(isolation_level="READ UNCOMMITTED") 383 | await conn.execute(tbl.insert().values(name="c")) 384 | res3 = await conn.scalar(select(func.count()).select_from(tbl)) 385 | assert 3 == res3 386 | await tr3.commit() 387 | 388 | tr4 = await conn.begin(readonly=True) 389 | assert tr4 is conn._transaction 390 | res1 = await conn.scalar(select(func.count()).select_from(tbl)) 391 | assert 3 == res1 392 | await tr4.commit() 393 | 394 | tr5 = await conn.begin(isolation_level="READ UNCOMMITTED", readonly=True) 395 | res1 = await conn.scalar(select(func.count()).select_from(tbl)) 396 | assert 3 == res1 397 | await tr5.commit() 398 | 399 | tr6 = await conn.begin(deferrable=True) 400 | await conn.execute(tbl.insert().values(name="f")) 401 | res1 = await conn.scalar(select(func.count()).select_from(tbl)) 402 | assert 4 == res1 403 | await tr6.commit() 404 | 405 | tr7 = await conn.begin(isolation_level="REPEATABLE READ", deferrable=True) 406 | await conn.execute(tbl.insert().values(name="g")) 407 | res1 = await conn.scalar(select(func.count()).select_from(tbl)) 408 | assert 5 == res1 409 | await tr7.commit() 410 | 411 | tr8 = await conn.begin( 412 | isolation_level="SERIALIZABLE", readonly=True, deferrable=True 413 | ) 414 | assert tr8 is conn._transaction 415 | res1 = await conn.scalar(select(func.count()).select_from(tbl)) 416 | assert 5 == res1 417 | await tr8.commit() 418 | 419 | 420 | async def test_timeout_in_transaction_context_manager(make_engine): 421 | engine = await make_engine(timeout=1) 422 | with pytest.raises(asyncio.TimeoutError): 423 | async with engine.acquire() as connection: 424 | async with connection.begin(): 425 | await connection.execute("SELECT pg_sleep(10)") 426 | 427 | engine.terminate() 428 | await engine.wait_closed() 429 | 430 | 431 | async def test_timeout_in_nested_transaction_context_manager(make_engine): 432 | engine = await make_engine(timeout=1) 433 | with pytest.raises(asyncio.TimeoutError): 434 | async with engine.acquire() as connection: 435 | async with connection.begin(): 436 | async with connection.begin_nested(): 437 | await connection.execute("SELECT pg_sleep(10)") 438 | 439 | engine.terminate() 440 | await engine.wait_closed() 441 | 442 | 443 | async def test_cancel_in_transaction_context_manager(make_engine, loop): 444 | engine = await make_engine() 445 | 446 | with pytest.raises(asyncio.CancelledError): 447 | async with engine.acquire() as connection: 448 | async with connection.begin(): 449 | task = loop.create_task( 450 | connection.execute("SELECT pg_sleep(10)") 451 | ) 452 | 453 | async def cancel_soon(): 454 | await asyncio.sleep(1) 455 | task.cancel() 456 | 457 | loop.create_task(cancel_soon()) 458 | await task 459 | 460 | engine.terminate() 461 | await engine.wait_closed() 462 | 463 | 464 | async def test_cancel_in_savepoint_context_manager(make_engine, loop): 465 | engine = await make_engine() 466 | 467 | with pytest.raises(asyncio.CancelledError): 468 | async with engine.acquire() as connection: 469 | async with connection.begin(): 470 | async with connection.begin_nested(): 471 | task = loop.create_task( 472 | connection.execute("SELECT pg_sleep(10)") 473 | ) 474 | 475 | async def cancel_soon(): 476 | await asyncio.sleep(1) 477 | task.cancel() 478 | 479 | loop.create_task(cancel_soon()) 480 | await task 481 | 482 | engine.terminate() 483 | await engine.wait_closed() 484 | --------------------------------------------------------------------------------