├── tests ├── __init__.py ├── crdb │ ├── __init__.py │ ├── test_conninfo.py │ ├── test_no_crdb.py │ ├── test_typing.py │ ├── test_cursor_async.py │ ├── test_cursor.py │ ├── test_connection.py │ ├── test_connection_async.py │ └── test_adapt.py ├── pool │ ├── __init__.py │ ├── fix_pool.py │ ├── test_pool_async_noasyncio.py │ └── test_sched.py ├── pq │ ├── __init__.py │ ├── test_pq.py │ ├── test_conninfo.py │ └── test_misc.py ├── types │ ├── __init__.py │ ├── test_none.py │ ├── test_uuid.py │ ├── test_bool.py │ └── test_hstore.py ├── test_windows.py ├── constraints.txt ├── test_dns.py ├── adapters_example.py ├── scripts │ └── dectest.py ├── test_encodings.py ├── test_module.py ├── fix_mypy.py ├── fix_psycopg.py ├── conftest.py ├── README.rst ├── fix_crdb.py ├── fix_proxy.py └── fix_pq.py ├── docs ├── _templates │ └── .keep ├── _static │ └── psycopg.css ├── advanced │ ├── index.rst │ └── prepare.rst ├── api │ ├── index.rst │ ├── conninfo.rst │ ├── module.rst │ ├── rows.rst │ ├── abc.rst │ ├── adapt.rst │ └── copy.rst ├── lib │ ├── sql_role.py │ └── ticket_role.py ├── basic │ └── index.rst ├── Makefile ├── release.rst ├── index.rst ├── news_pool.rst └── conf.py ├── psycopg ├── psycopg │ ├── py.typed │ ├── types │ │ ├── __init__.py │ │ ├── none.py │ │ ├── bool.py │ │ ├── uuid.py │ │ ├── shapely.py │ │ └── hstore.py │ ├── version.py │ ├── crdb │ │ └── __init__.py │ ├── _cmodule.py │ ├── _tz.py │ ├── _enums.py │ ├── _compat.py │ ├── _struct.py │ ├── client_cursor.py │ ├── pq │ │ └── _debug.py │ ├── __init__.py │ ├── dbapi20.py │ ├── _wrappers.py │ └── _tpc.py ├── pyproject.toml ├── .flake8 ├── setup.py ├── README.rst └── setup.cfg ├── psycopg_c ├── psycopg_c │ ├── py.typed │ ├── .gitignore │ ├── pq │ │ ├── __init__.pxd │ │ ├── pgcancel.pyx │ │ ├── conninfo.pyx │ │ └── pqbuffer.pyx │ ├── _psycopg │ │ ├── __init__.pxd │ │ └── oids.pxd │ ├── version.py │ ├── _compat.py │ ├── __init__.py │ ├── pq.pyx │ ├── _psycopg.pyx │ ├── types │ │ └── bool.pyx │ ├── pq.pxd │ └── _psycopg.pyi ├── .flake8 ├── pyproject.toml ├── README-binary.rst ├── README.rst ├── setup.cfg └── setup.py ├── psycopg_pool ├── psycopg_pool │ ├── py.typed │ ├── version.py │ ├── errors.py │ ├── __init__.py │ └── _compat.py ├── .flake8 ├── pyproject.toml ├── setup.py ├── README.rst └── setup.cfg ├── .github ├── FUNDING.yml └── workflows │ ├── docs.yml │ ├── lint.yml │ ├── packages-pool.yml │ └── packages-src.yml ├── tools ├── build │ ├── wheel_win32_before_build.bat │ ├── wheel_macos_before_all.sh │ ├── ci_test.sh │ ├── print_so_versions.sh │ ├── run_build_macos_arm64.sh │ ├── copy_to_binary.py │ ├── strip_wheel.sh │ ├── wheel_linux_before_all.sh │ ├── ci_install_libpq.sh │ └── build_macos_arm64.sh └── update_backer.py ├── .codespellrc ├── .yamllint.yaml ├── .flake8 ├── .gitignore ├── pyproject.toml ├── README.rst └── BACKERS.yaml /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/_templates/.keep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/crdb/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/pool/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/pq/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/types/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /psycopg/psycopg/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /psycopg_pool/psycopg_pool/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/.gitignore: -------------------------------------------------------------------------------- 1 | /*.so 2 | _psycopg.c 3 | pq.c 4 | *.html 5 | -------------------------------------------------------------------------------- /psycopg_c/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | ignore = W503, E203 4 | -------------------------------------------------------------------------------- /psycopg_pool/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | ignore = W503, E203 4 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: 2 | - dvarrazzo 3 | custom: 4 | - "https://www.paypal.me/dvarrazzo" 5 | -------------------------------------------------------------------------------- /tools/build/wheel_win32_before_build.bat: -------------------------------------------------------------------------------- 1 | @echo on 2 | pip install delvewheel 3 | choco upgrade postgresql 4 | -------------------------------------------------------------------------------- /.codespellrc: -------------------------------------------------------------------------------- 1 | [codespell] 2 | ignore-words-list = alot,ans,ba,fo,te 3 | skip = docs/_build,.tox,.mypy_cache,.venv,pq.c 4 | -------------------------------------------------------------------------------- /psycopg/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=49.2.0", "wheel>=0.37"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /psycopg_pool/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=49.2.0", "wheel>=0.37"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /.yamllint.yaml: -------------------------------------------------------------------------------- 1 | extends: default 2 | 3 | rules: 4 | truthy: 5 | check-keys: false 6 | document-start: disable 7 | line-length: 8 | max: 85 9 | -------------------------------------------------------------------------------- /psycopg_c/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=49.2.0", "wheel>=0.37", "Cython>=3.0.0a11"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /psycopg/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | ignore = W503, E203 4 | per-file-ignores = 5 | # Autogenerated section 6 | psycopg/errors.py: E125, E128, E302 7 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | ignore = W503, E203 4 | extend-exclude = .venv build 5 | per-file-ignores = 6 | # Autogenerated section 7 | psycopg/psycopg/errors.py: E125, E128, E302 8 | -------------------------------------------------------------------------------- /psycopg/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | PostgreSQL database adapter for Python - pure Python package 4 | """ 5 | 6 | # Copyright (C) 2020 The Psycopg Team 7 | 8 | from setuptools import setup 9 | 10 | setup() 11 | -------------------------------------------------------------------------------- /psycopg_pool/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | PostgreSQL database adapter for Python - Connection Pool 4 | """ 5 | 6 | # Copyright (C) 2020 The Psycopg Team 7 | 8 | from setuptools import setup 9 | 10 | setup() 11 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/pq/__init__.pxd: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg_c.pq cython module. 3 | 4 | This file is necessary to allow c-importing pxd files from this directory. 5 | """ 6 | 7 | # Copyright (C) 2020 The Psycopg Team 8 | 9 | from psycopg_c.pq cimport libpq 10 | -------------------------------------------------------------------------------- /psycopg/psycopg/types/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg types package 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | from .. import _typeinfo 8 | 9 | # Exposed here 10 | TypeInfo = _typeinfo.TypeInfo 11 | TypesRegistry = _typeinfo.TypesRegistry 12 | -------------------------------------------------------------------------------- /docs/_static/psycopg.css: -------------------------------------------------------------------------------- 1 | /* style rubric in furo (too small IMO) */ 2 | p.rubric { 3 | font-size: 1.2rem; 4 | font-weight: bold; 5 | } 6 | 7 | /* override a silly default */ 8 | table.align-default td, 9 | table.align-default th { 10 | text-align: left; 11 | } 12 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/_psycopg/__init__.pxd: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg_c._psycopg cython module. 3 | 4 | This file is necessary to allow c-importing pxd files from this directory. 5 | """ 6 | 7 | # Copyright (C) 2020 The Psycopg Team 8 | 9 | from psycopg_c._psycopg cimport oids 10 | -------------------------------------------------------------------------------- /psycopg/psycopg/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg distribution version file. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | from ._compat import metadata 8 | 9 | try: 10 | __version__ = metadata.version("psycopg") 11 | except metadata.PackageNotFoundError: 12 | __version__ = "0.0.0.0" 13 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg-c distribution version file. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | from ._compat import metadata 8 | 9 | try: 10 | __version__ = metadata.version("psycopg-c") 11 | except metadata.PackageNotFoundError: 12 | __version__ = "0.0.0.0" 13 | -------------------------------------------------------------------------------- /psycopg_pool/psycopg_pool/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg pool version file. 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | from ._compat import metadata 8 | 9 | try: 10 | __version__ = metadata.version("psycopg-pool") 11 | except metadata.PackageNotFoundError: 12 | __version__ = "0.0.0.0" 13 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/_compat.py: -------------------------------------------------------------------------------- 1 | """ 2 | compatibility functions for different Python versions 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | import sys 8 | 9 | if sys.version_info < (3, 8): 10 | import importlib_metadata as metadata 11 | else: 12 | from importlib import metadata 13 | 14 | 15 | __all__ = ["metadata"] 16 | -------------------------------------------------------------------------------- /tests/types/test_none.py: -------------------------------------------------------------------------------- 1 | from psycopg import sql 2 | from psycopg.adapt import Transformer, PyFormat 3 | 4 | 5 | def test_quote_none(conn): 6 | 7 | tx = Transformer() 8 | assert tx.get_dumper(None, PyFormat.TEXT).quote(None) == b"NULL" 9 | 10 | cur = conn.cursor() 11 | cur.execute(sql.SQL("select {v}").format(v=sql.Literal(None))) 12 | assert cur.fetchone()[0] is None 13 | -------------------------------------------------------------------------------- /tests/pool/fix_pool.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | def pytest_configure(config): 5 | config.addinivalue_line("markers", "pool: test related to the psycopg_pool package") 6 | 7 | 8 | def pytest_collection_modifyitems(items): 9 | # Add the pool markers to all the tests in the pool package 10 | for item in items: 11 | if "/pool/" in item.nodeid: 12 | item.add_marker(pytest.mark.pool) 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.egg-info/ 2 | .tox 3 | *.pstats 4 | *.swp 5 | .mypy_cache 6 | __pycache__/ 7 | /docs/_build/ 8 | *.html 9 | /psycopg_binary/ 10 | .vscode 11 | .venv 12 | .coverage 13 | htmlcov 14 | 15 | .eggs/ 16 | dist/ 17 | wheelhouse/ 18 | # Spelling these explicitly because we have /scripts/build/ to not ignore 19 | # but I still want 'ag' to avoid looking here. 20 | /build/ 21 | /psycopg/build/ 22 | /psycopg_c/build/ 23 | /psycopg_pool/build/ 24 | -------------------------------------------------------------------------------- /docs/advanced/index.rst: -------------------------------------------------------------------------------- 1 | .. _advanced: 2 | 3 | More advanced topics 4 | ==================== 5 | 6 | Once you have familiarised yourself with the :ref:`Psycopg basic operations 7 | `, you can take a look at the chapter of this section for more advanced 8 | usages. 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | :caption: Contents: 13 | 14 | async 15 | typing 16 | rows 17 | pool 18 | cursors 19 | adapt 20 | prepare 21 | pipeline 22 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg -- PostgreSQL database adapter for Python -- C optimization package 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | import sys 8 | 9 | # This package shouldn't be imported before psycopg itself, or weird things 10 | # will happen 11 | if "psycopg" not in sys.modules: 12 | raise ImportError("the psycopg package should be imported before psycopg_c") 13 | 14 | from .version import __version__ as __version__ # noqa 15 | -------------------------------------------------------------------------------- /tests/crdb/test_conninfo.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | pytestmark = pytest.mark.crdb 4 | 5 | 6 | def test_vendor(conn): 7 | assert conn.info.vendor == "CockroachDB" 8 | 9 | 10 | def test_server_version(conn): 11 | assert conn.info.server_version > 200000 12 | 13 | 14 | @pytest.mark.crdb("< 22") 15 | def test_backend_pid_pre_22(conn): 16 | assert conn.info.backend_pid == 0 17 | 18 | 19 | @pytest.mark.crdb(">= 22") 20 | def test_backend_pid(conn): 21 | assert conn.info.backend_pid > 0 22 | -------------------------------------------------------------------------------- /psycopg/psycopg/crdb/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | CockroachDB support package. 3 | """ 4 | 5 | # Copyright (C) 2022 The Psycopg Team 6 | 7 | from . import _types 8 | from .connection import CrdbConnection, AsyncCrdbConnection, CrdbConnectionInfo 9 | 10 | adapters = _types.adapters # exposed by the package 11 | connect = CrdbConnection.connect 12 | 13 | _types.register_crdb_adapters(adapters) 14 | 15 | __all__ = [ 16 | "AsyncCrdbConnection", 17 | "CrdbConnection", 18 | "CrdbConnectionInfo", 19 | ] 20 | -------------------------------------------------------------------------------- /docs/api/index.rst: -------------------------------------------------------------------------------- 1 | Psycopg 3 API 2 | ============= 3 | 4 | .. _api: 5 | 6 | This sections is a reference for all the public objects exposed by the 7 | `psycopg` module. For a more conceptual description you can take a look at 8 | :ref:`basic` and :ref:`advanced`. 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | :caption: Contents: 13 | 14 | module 15 | connections 16 | cursors 17 | copy 18 | objects 19 | sql 20 | rows 21 | errors 22 | pool 23 | conninfo 24 | adapt 25 | types 26 | abc 27 | pq 28 | crdb 29 | dns 30 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Build documentation 2 | 3 | on: 4 | push: 5 | branches: 6 | # This should match the DOC3_BRANCH value in the psycopg-website Makefile 7 | - master 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.ref_name }} 11 | cancel-in-progress: true 12 | 13 | jobs: 14 | docs: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Trigger docs build 18 | uses: peter-evans/repository-dispatch@v1 19 | with: 20 | repository: psycopg/psycopg-website 21 | event-type: psycopg3-commit 22 | token: ${{ secrets.ACCESS_TOKEN }} 23 | -------------------------------------------------------------------------------- /psycopg_pool/psycopg_pool/errors.py: -------------------------------------------------------------------------------- 1 | """ 2 | Connection pool errors. 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | from psycopg import errors as e 8 | 9 | 10 | class PoolClosed(e.OperationalError): 11 | """Attempt to get a connection from a closed pool.""" 12 | 13 | __module__ = "psycopg_pool" 14 | 15 | 16 | class PoolTimeout(e.OperationalError): 17 | """The pool couldn't provide a connection in acceptable time.""" 18 | 19 | __module__ = "psycopg_pool" 20 | 21 | 22 | class TooManyRequests(e.OperationalError): 23 | """Too many requests in the queue waiting for a connection from the pool.""" 24 | 25 | __module__ = "psycopg_pool" 26 | -------------------------------------------------------------------------------- /psycopg_pool/psycopg_pool/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg connection pool package 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | from .pool import ConnectionPool 8 | from .pool_async import AsyncConnectionPool 9 | from .null_pool import NullConnectionPool 10 | from .null_pool_async import AsyncNullConnectionPool 11 | from .errors import PoolClosed, PoolTimeout, TooManyRequests 12 | from .version import __version__ as __version__ # noqa: F401 13 | 14 | __all__ = [ 15 | "AsyncConnectionPool", 16 | "AsyncNullConnectionPool", 17 | "ConnectionPool", 18 | "NullConnectionPool", 19 | "PoolClosed", 20 | "PoolTimeout", 21 | "TooManyRequests", 22 | ] 23 | -------------------------------------------------------------------------------- /docs/lib/sql_role.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | sql role 4 | ~~~~~~~~ 5 | 6 | An interpreted text role to style SQL syntax in Psycopg documentation. 7 | 8 | :copyright: Copyright 2010 by Daniele Varrazzo. 9 | :copyright: Copyright 2020 The Psycopg Team. 10 | """ 11 | 12 | from docutils import nodes, utils 13 | from docutils.parsers.rst import roles 14 | 15 | 16 | def sql_role(name, rawtext, text, lineno, inliner, options={}, content=[]): 17 | text = utils.unescape(text) 18 | options["classes"] = ["sql"] 19 | return [nodes.literal(rawtext, text, **options)], [] 20 | 21 | 22 | def setup(app): 23 | roles.register_local_role("sql", sql_role) 24 | -------------------------------------------------------------------------------- /docs/basic/index.rst: -------------------------------------------------------------------------------- 1 | .. _basic: 2 | 3 | Getting started with Psycopg 3 4 | ============================== 5 | 6 | This section of the documentation will explain :ref:`how to install Psycopg 7 | ` and how to perform normal activities such as :ref:`querying 8 | the database ` or :ref:`loading data using COPY `. 9 | 10 | .. important:: 11 | 12 | If you are familiar with psycopg2 please take a look at 13 | :ref:`from-psycopg2` to see what is changed. 14 | 15 | .. toctree:: 16 | :maxdepth: 2 17 | :caption: Contents: 18 | 19 | install 20 | usage 21 | params 22 | adapt 23 | pgtypes 24 | transactions 25 | copy 26 | from_pg2 27 | -------------------------------------------------------------------------------- /docs/api/conninfo.rst: -------------------------------------------------------------------------------- 1 | .. _psycopg.conninfo: 2 | 3 | `conninfo` -- manipulate connection strings 4 | =========================================== 5 | 6 | This module contains a few utility functions to manipulate database 7 | connection strings. 8 | 9 | .. module:: psycopg.conninfo 10 | 11 | .. autofunction:: conninfo_to_dict 12 | 13 | .. code:: python 14 | 15 | >>> conninfo_to_dict("postgres://jeff@example.com/db", user="piro") 16 | {'user': 'piro', 'dbname': 'db', 'host': 'example.com'} 17 | 18 | 19 | .. autofunction:: make_conninfo 20 | 21 | .. code:: python 22 | 23 | >>> make_conninfo("dbname=db user=jeff", user="piro", port=5432) 24 | 'dbname=db user=piro port=5432' 25 | -------------------------------------------------------------------------------- /tests/test_windows.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import asyncio 3 | import sys 4 | 5 | from psycopg.errors import InterfaceError 6 | 7 | 8 | @pytest.mark.skipif(sys.platform != "win32", reason="windows only test") 9 | def test_windows_error(aconn_cls, dsn): 10 | loop = asyncio.ProactorEventLoop() # type: ignore[attr-defined] 11 | 12 | async def go(): 13 | with pytest.raises( 14 | InterfaceError, 15 | match="Psycopg cannot use the 'ProactorEventLoop'", 16 | ): 17 | await aconn_cls.connect(dsn) 18 | 19 | try: 20 | loop.run_until_complete(go()) 21 | finally: 22 | loop.run_until_complete(loop.shutdown_asyncgens()) 23 | loop.close() 24 | -------------------------------------------------------------------------------- /psycopg/psycopg/types/none.py: -------------------------------------------------------------------------------- 1 | """ 2 | Adapters for None. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | from ..abc import AdaptContext, NoneType 8 | from ..adapt import Dumper 9 | 10 | 11 | class NoneDumper(Dumper): 12 | """ 13 | Not a complete dumper as it doesn't implement dump(), but it implements 14 | quote(), so it can be used in sql composition. 15 | """ 16 | 17 | def dump(self, obj: None) -> bytes: 18 | raise NotImplementedError("NULL is passed to Postgres in other ways") 19 | 20 | def quote(self, obj: None) -> bytes: 21 | return b"NULL" 22 | 23 | 24 | def register_default_adapters(context: AdaptContext) -> None: 25 | context.adapters.register_dumper(NoneType, NoneDumper) 26 | -------------------------------------------------------------------------------- /tests/constraints.txt: -------------------------------------------------------------------------------- 1 | # This is a constraint file forcing the minimum allowed version to be 2 | # installed. 3 | # 4 | # https://pip.pypa.io/en/stable/user_guide/#constraints-files 5 | 6 | # From install_requires 7 | backports.zoneinfo == 0.2.0 8 | typing-extensions == 4.1.0 9 | 10 | # From the 'test' extra 11 | mypy == 0.981 12 | pproxy == 2.7.0 13 | pytest == 6.2.5 14 | pytest-asyncio == 0.17.0 15 | pytest-cov == 3.0.0 16 | pytest-randomly == 3.10.0 17 | 18 | # From the 'dev' extra 19 | black == 22.3.0 20 | dnspython == 2.1.0 21 | flake8 == 4.0.0 22 | mypy == 0.981 23 | types-setuptools == 57.4.0 24 | wheel == 0.37 25 | 26 | # From the 'docs' extra 27 | Sphinx == 4.2.0 28 | furo == 2021.11.23 29 | sphinx-autobuild == 2021.3.14 30 | sphinx-autodoc-typehints == 1.12.0 31 | dnspython == 2.1.0 32 | shapely == 1.7.0 33 | -------------------------------------------------------------------------------- /tests/test_dns.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import psycopg 4 | from psycopg.conninfo import conninfo_to_dict 5 | 6 | pytestmark = [pytest.mark.dns] 7 | 8 | 9 | @pytest.mark.asyncio 10 | async def test_resolve_hostaddr_async_warning(recwarn): 11 | import_dnspython() 12 | conninfo = "dbname=foo" 13 | params = conninfo_to_dict(conninfo) 14 | params = await psycopg._dns.resolve_hostaddr_async( # type: ignore[attr-defined] 15 | params 16 | ) 17 | assert conninfo_to_dict(conninfo) == params 18 | assert "resolve_hostaddr_async" in str(recwarn.pop(DeprecationWarning).message) 19 | 20 | 21 | def import_dnspython(): 22 | try: 23 | import dns.rdtypes.IN.A # noqa: F401 24 | except ImportError: 25 | pytest.skip("dnspython package not available") 26 | 27 | import psycopg._dns # noqa: F401 28 | -------------------------------------------------------------------------------- /tools/build/wheel_macos_before_all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Configure the environment needed to build wheel packages on Mac OS. 4 | # This script is designed to be used by cibuildwheel as CIBW_BEFORE_ALL_MACOS 5 | 6 | set -euo pipefail 7 | set -x 8 | 9 | dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 10 | 11 | brew update 12 | brew install gnu-sed postgresql@14 13 | # Fetch 14.1 if 14.0 is still the default version 14 | brew reinstall postgresql 15 | 16 | # Start the database for testing 17 | brew services start postgresql 18 | 19 | # Wait for postgres to come up 20 | for i in $(seq 10 -1 0); do 21 | eval pg_isready && break 22 | if [ $i == 0 ]; then 23 | echo "PostgreSQL service not ready, giving up" 24 | exit 1 25 | fi 26 | echo "PostgreSQL service not ready, waiting a bit, attempts left: $i" 27 | sleep 5 28 | done 29 | -------------------------------------------------------------------------------- /psycopg_pool/README.rst: -------------------------------------------------------------------------------- 1 | Psycopg 3: PostgreSQL database adapter for Python - Connection Pool 2 | =================================================================== 3 | 4 | This distribution contains the optional connection pool package 5 | `psycopg_pool`__. 6 | 7 | .. __: https://www.psycopg.org/psycopg3/docs/advanced/pool.html 8 | 9 | This package is kept separate from the main ``psycopg`` package because it is 10 | likely that it will follow a different release cycle. 11 | 12 | You can also install this package using:: 13 | 14 | pip install "psycopg[pool]" 15 | 16 | Please read `the project readme`__ and `the installation documentation`__ for 17 | more details. 18 | 19 | .. __: https://github.com/psycopg/psycopg#readme 20 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html 21 | #installing-the-connection-pool 22 | 23 | 24 | Copyright (C) 2020 The Psycopg Team 25 | -------------------------------------------------------------------------------- /tools/build/ci_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Run the tests in Github Action 4 | # 5 | # Failed tests run up to three times, to take into account flakey tests. 6 | # Of course the random generator is not re-seeded between runs, in order to 7 | # repeat the same result. 8 | 9 | set -euo pipefail 10 | set -x 11 | 12 | # Assemble a markers expression from the MARKERS and NOT_MARKERS env vars 13 | markers="" 14 | for m in ${MARKERS:-}; do 15 | [[ "$markers" != "" ]] && markers="$markers and" 16 | markers="$markers $m" 17 | done 18 | for m in ${NOT_MARKERS:-}; do 19 | [[ "$markers" != "" ]] && markers="$markers and" 20 | markers="$markers not $m" 21 | done 22 | 23 | pytest="python -bb -m pytest --color=yes" 24 | 25 | $pytest -m "$markers" "$@" && exit 0 26 | 27 | $pytest -m "$markers" --lf --randomly-seed=last "$@" && exit 0 28 | 29 | $pytest -m "$markers" --lf --randomly-seed=last "$@" 30 | -------------------------------------------------------------------------------- /psycopg/psycopg/_cmodule.py: -------------------------------------------------------------------------------- 1 | """ 2 | Simplify access to the _psycopg module 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | from typing import Optional 8 | 9 | from . import pq 10 | 11 | __version__: Optional[str] = None 12 | 13 | # Note: "c" must the first attempt so that mypy associates the variable the 14 | # right module interface. It will not result Optional, but hey. 15 | if pq.__impl__ == "c": 16 | from psycopg_c import _psycopg as _psycopg 17 | from psycopg_c import __version__ as __version__ # noqa: F401 18 | elif pq.__impl__ == "binary": 19 | from psycopg_binary import _psycopg as _psycopg # type: ignore 20 | from psycopg_binary import __version__ as __version__ # type: ignore # noqa: F401 21 | elif pq.__impl__ == "python": 22 | _psycopg = None # type: ignore 23 | else: 24 | raise ImportError(f"can't find _psycopg optimised module in {pq.__impl__!r}") 25 | -------------------------------------------------------------------------------- /psycopg/README.rst: -------------------------------------------------------------------------------- 1 | Psycopg 3: PostgreSQL database adapter for Python 2 | ================================================= 3 | 4 | Psycopg 3 is a modern implementation of a PostgreSQL adapter for Python. 5 | 6 | This distribution contains the pure Python package ``psycopg``. 7 | 8 | 9 | Installation 10 | ------------ 11 | 12 | In short, run the following:: 13 | 14 | pip install --upgrade pip # to upgrade pip 15 | pip install "psycopg[binary,pool]" # to install package and dependencies 16 | 17 | If something goes wrong, and for more information about installation, please 18 | check out the `Installation documentation`__. 19 | 20 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html# 21 | 22 | 23 | Hacking 24 | ------- 25 | 26 | For development information check out `the project readme`__. 27 | 28 | .. __: https://github.com/psycopg/psycopg#readme 29 | 30 | 31 | Copyright (C) 2020 The Psycopg Team 32 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/pq.pyx: -------------------------------------------------------------------------------- 1 | """ 2 | libpq Python wrapper using cython bindings. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | from psycopg_c.pq cimport libpq 8 | 9 | import logging 10 | 11 | from psycopg import errors as e 12 | from psycopg.pq import Format 13 | from psycopg.pq.misc import error_message 14 | 15 | logger = logging.getLogger("psycopg") 16 | 17 | __impl__ = 'c' 18 | __build_version__ = libpq.PG_VERSION_NUM 19 | 20 | 21 | def version(): 22 | return libpq.PQlibVersion() 23 | 24 | 25 | include "pq/pgconn.pyx" 26 | include "pq/pgresult.pyx" 27 | include "pq/pgcancel.pyx" 28 | include "pq/conninfo.pyx" 29 | include "pq/escaping.pyx" 30 | include "pq/pqbuffer.pyx" 31 | 32 | 33 | # importing the ssl module sets up Python's libcrypto callbacks 34 | import ssl # noqa 35 | 36 | # disable libcrypto setup in libpq, so it won't stomp on the callbacks 37 | # that have already been set up 38 | libpq.PQinitOpenSSL(1, 0) 39 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | PYTHON ?= python3 11 | 12 | # Put it first so that "make" without argument is like "make help". 13 | help: 14 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) || true 15 | 16 | serve: 17 | PSYCOPG_IMPL=python sphinx-autobuild . _build/html/ 18 | 19 | .PHONY: help serve env Makefile 20 | 21 | env: .venv 22 | 23 | .venv: 24 | $(PYTHON) -m venv .venv 25 | ./.venv/bin/pip install -e "../psycopg[docs]" -e ../psycopg_pool 26 | 27 | # Catch-all target: route all unknown targets to Sphinx using the new 28 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 29 | %: Makefile 30 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 31 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/pq/pgcancel.pyx: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg_c.pq.PGcancel object implementation. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | 8 | cdef class PGcancel: 9 | def __cinit__(self): 10 | self.pgcancel_ptr = NULL 11 | 12 | @staticmethod 13 | cdef PGcancel _from_ptr(libpq.PGcancel *ptr): 14 | cdef PGcancel rv = PGcancel.__new__(PGcancel) 15 | rv.pgcancel_ptr = ptr 16 | return rv 17 | 18 | def __dealloc__(self) -> None: 19 | self.free() 20 | 21 | def free(self) -> None: 22 | if self.pgcancel_ptr is not NULL: 23 | libpq.PQfreeCancel(self.pgcancel_ptr) 24 | self.pgcancel_ptr = NULL 25 | 26 | def cancel(self) -> None: 27 | cdef char buf[256] 28 | cdef int res = libpq.PQcancel(self.pgcancel_ptr, buf, sizeof(buf)) 29 | if not res: 30 | raise e.OperationalError( 31 | f"cancel failed: {buf.decode('utf8', 'ignore')}" 32 | ) 33 | -------------------------------------------------------------------------------- /psycopg_c/README-binary.rst: -------------------------------------------------------------------------------- 1 | Psycopg 3: PostgreSQL database adapter for Python - binary package 2 | ================================================================== 3 | 4 | This distribution contains the precompiled optimization package 5 | ``psycopg_binary``. 6 | 7 | You shouldn't install this package directly: use instead :: 8 | 9 | pip install "psycopg[binary]" 10 | 11 | to install a version of the optimization package matching the ``psycopg`` 12 | version installed. 13 | 14 | Installing this package requires pip >= 20.3 or newer installed. 15 | 16 | This package is not available for every platform: check out `Binary 17 | installation`__ in the documentation. 18 | 19 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html 20 | #binary-installation 21 | 22 | Please read `the project readme`__ and `the installation documentation`__ for 23 | more details. 24 | 25 | .. __: https://github.com/psycopg/psycopg#readme 26 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html 27 | 28 | 29 | Copyright (C) 2020 The Psycopg Team 30 | -------------------------------------------------------------------------------- /tools/build/print_so_versions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Take a .so file as input and print the Debian packages and versions of the 4 | # libraries it links. 5 | 6 | set -euo pipefail 7 | # set -x 8 | 9 | source /etc/os-release 10 | 11 | sofile="$1" 12 | 13 | case "$ID" in 14 | alpine) 15 | depfiles=$( (ldd "$sofile" 2>/dev/null || true) | grep '=>' | sed 's/.*=> \(.*\) (.*)/\1/') 16 | (for depfile in $depfiles; do 17 | echo "$(basename "$depfile") => $(apk info --who-owns "${depfile}" | awk '{print $(NF)}')" 18 | done) | sort | uniq 19 | ;; 20 | 21 | debian) 22 | depfiles=$(ldd "$sofile" | grep '=>' | sed 's/.*=> \(.*\) (.*)/\1/') 23 | (for depfile in $depfiles; do 24 | pkgname=$(dpkg -S "${depfile}" | sed 's/\(\): .*/\1/') 25 | dpkg -l "${pkgname}" | grep '^ii' | awk '{print $2 " => " $3}' 26 | done) | sort | uniq 27 | ;; 28 | 29 | centos) 30 | echo "TODO!" 31 | ;; 32 | 33 | *) 34 | echo "$0: unexpected Linux distribution: '$ID'" >&2 35 | exit 1 36 | ;; 37 | esac 38 | -------------------------------------------------------------------------------- /tests/crdb/test_no_crdb.py: -------------------------------------------------------------------------------- 1 | from psycopg.pq import TransactionStatus 2 | from psycopg.crdb import CrdbConnection 3 | 4 | import pytest 5 | 6 | pytestmark = pytest.mark.crdb("skip") 7 | 8 | 9 | def test_is_crdb(conn): 10 | assert not CrdbConnection.is_crdb(conn) 11 | assert not CrdbConnection.is_crdb(conn.pgconn) 12 | 13 | 14 | def test_tpc_on_pg_connection(conn, tpc): 15 | xid = conn.xid(1, "gtrid", "bqual") 16 | assert conn.info.transaction_status == TransactionStatus.IDLE 17 | 18 | conn.tpc_begin(xid) 19 | assert conn.info.transaction_status == TransactionStatus.INTRANS 20 | 21 | cur = conn.cursor() 22 | cur.execute("insert into test_tpc values ('test_tpc_commit')") 23 | assert tpc.count_xacts() == 0 24 | assert tpc.count_test_records() == 0 25 | 26 | conn.tpc_prepare() 27 | assert conn.info.transaction_status == TransactionStatus.IDLE 28 | assert tpc.count_xacts() == 1 29 | assert tpc.count_test_records() == 0 30 | 31 | conn.tpc_commit() 32 | assert conn.info.transaction_status == TransactionStatus.IDLE 33 | assert tpc.count_xacts() == 0 34 | assert tpc.count_test_records() == 1 35 | -------------------------------------------------------------------------------- /docs/release.rst: -------------------------------------------------------------------------------- 1 | :orphan: 2 | 3 | How to make a psycopg release 4 | ============================= 5 | 6 | - Change version number in: 7 | 8 | - ``psycopg_c/psycopg_c/version.py`` 9 | - ``psycopg/psycopg/version.py`` 10 | - ``psycopg_pool/psycopg_pool/version.py`` 11 | 12 | - Change docs/news.rst to drop the "unreleased" mark from the version 13 | 14 | - Push to GitHub to run `the tests workflow`__. 15 | 16 | .. __: https://github.com/psycopg/psycopg/actions/workflows/tests.yml 17 | 18 | - Build the packages by triggering manually the `Build packages workflow`__. 19 | 20 | .. __: https://github.com/psycopg/psycopg/actions/workflows/packages.yml 21 | 22 | - If all went fine, create a tag named after the version:: 23 | 24 | git tag -a -s 3.0.dev1 25 | git push --tags 26 | 27 | - Download the ``artifacts.zip`` package from the last Packages workflow run. 28 | 29 | - Unpack the packages locally:: 30 | 31 | mkdir tmp 32 | cd tmp 33 | unzip ~/Downloads/artifact.zip 34 | 35 | - If the package is a testing one, upload it on TestPyPI with:: 36 | 37 | $ twine upload -s -r testpypi * 38 | 39 | - If the package is stable, omit ``-r testpypi``. 40 | -------------------------------------------------------------------------------- /tools/build/run_build_macos_arm64.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Build psycopg-binary wheel packages for Apple M1 (cpNNN-macosx_arm64) 4 | # 5 | # This script is designed to run on a local machine: it will clone the repos 6 | # remotely and execute the `build_macos_arm64.sh` script remotely, then will 7 | # download the built packages. A tag to build must be specified. 8 | # 9 | # In order to run the script, the `m1` host must be specified in 10 | # `~/.ssh/config`; for instance: 11 | # 12 | # Host m1 13 | # User m1 14 | # HostName 1.2.3.4 15 | 16 | set -euo pipefail 17 | # set -x 18 | 19 | tag=${1:-} 20 | 21 | if [[ ! "${tag}" ]]; then 22 | echo "Usage: $0 TAG" >&2 23 | exit 2 24 | fi 25 | 26 | rdir=psycobuild 27 | 28 | # Clone the repos 29 | ssh m1 rm -rf "${rdir}" 30 | ssh m1 git clone https://github.com/psycopg/psycopg.git --branch ${tag} "${rdir}" 31 | 32 | # Allow sudoing without password, to allow brew to install 33 | ssh -t m1 bash -c \ 34 | 'test -f /etc/sudoers.d/m1 || echo "m1 ALL=(ALL) NOPASSWD:ALL" | sudo tee /etc/sudoers.d/m1' 35 | 36 | # Build the wheel packages 37 | ssh m1 "${rdir}/tools/build/build_macos_arm64.sh" 38 | 39 | # Transfer the packages locally 40 | scp -r "m1:${rdir}/wheelhouse" . 41 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | push: 5 | # This should disable running the workflow on tags, according to the 6 | # on.. GitHub Actions docs. 7 | branches: 8 | - "*" 9 | pull_request: 10 | 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref_name }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | lint: 17 | runs-on: ubuntu-latest 18 | if: true 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | 23 | - uses: actions/setup-python@v4 24 | with: 25 | python-version: "3.10" 26 | 27 | - name: install packages to tests 28 | run: pip install ./psycopg[dev,test] codespell 29 | 30 | - name: Run black 31 | run: black --check --diff . 32 | 33 | - name: Run flake8 34 | run: flake8 35 | 36 | - name: Run mypy 37 | run: mypy 38 | 39 | - name: Check spelling 40 | run: codespell 41 | 42 | - name: Install requirements to generate docs 43 | run: sudo apt-get install -y libgeos-dev 44 | 45 | - name: Install Python packages to generate docs 46 | run: pip install ./psycopg[docs] ./psycopg_pool 47 | 48 | - name: Check documentation 49 | run: sphinx-build -W -T -b html docs docs/_build/html 50 | -------------------------------------------------------------------------------- /psycopg_c/README.rst: -------------------------------------------------------------------------------- 1 | Psycopg 3: PostgreSQL database adapter for Python - optimisation package 2 | ======================================================================== 3 | 4 | This distribution contains the optional optimization package ``psycopg_c``. 5 | 6 | You shouldn't install this package directly: use instead :: 7 | 8 | pip install "psycopg[c]" 9 | 10 | to install a version of the optimization package matching the ``psycopg`` 11 | version installed. 12 | 13 | Installing this package requires some prerequisites: check `Local 14 | installation`__ in the documentation. Without a C compiler and some library 15 | headers install *will fail*: this is not a bug. 16 | 17 | If you are unable to meet the prerequisite needed you might want to install 18 | ``psycopg[binary]`` instead: look for `Binary installation`__ in the 19 | documentation. 20 | 21 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html 22 | #local-installation 23 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html 24 | #binary-installation 25 | 26 | Please read `the project readme`__ and `the installation documentation`__ for 27 | more details. 28 | 29 | .. __: https://github.com/psycopg/psycopg#readme 30 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html 31 | 32 | 33 | Copyright (C) 2020 The Psycopg Team 34 | -------------------------------------------------------------------------------- /tests/adapters_example.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from psycopg import pq 4 | from psycopg.abc import Dumper, Loader, AdaptContext, PyFormat, Buffer 5 | 6 | 7 | def f() -> None: 8 | d: Dumper = MyStrDumper(str, None) 9 | assert d.dump("abc") == b"abcabc" 10 | assert d.quote("abc") == b"'abcabc'" 11 | 12 | lo: Loader = MyTextLoader(0, None) 13 | assert lo.load(b"abc") == "abcabc" 14 | 15 | 16 | class MyStrDumper: 17 | format = pq.Format.TEXT 18 | oid = 25 # text 19 | 20 | def __init__(self, cls: type, context: Optional[AdaptContext] = None): 21 | self._cls = cls 22 | 23 | def dump(self, obj: str) -> bytes: 24 | return (obj * 2).encode() 25 | 26 | def quote(self, obj: str) -> bytes: 27 | value = self.dump(obj) 28 | esc = pq.Escaping() 29 | return b"'%s'" % esc.escape_string(value.replace(b"h", b"q")) 30 | 31 | def get_key(self, obj: str, format: PyFormat) -> type: 32 | return self._cls 33 | 34 | def upgrade(self, obj: str, format: PyFormat) -> "MyStrDumper": 35 | return self 36 | 37 | 38 | class MyTextLoader: 39 | format = pq.Format.TEXT 40 | 41 | def __init__(self, oid: int, context: Optional[AdaptContext] = None): 42 | pass 43 | 44 | def load(self, data: Buffer) -> str: 45 | return (bytes(data) * 2).decode() 46 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=49.2.0", "wheel>=0.37"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.pytest.ini_options] 6 | asyncio_mode = "auto" 7 | filterwarnings = [ 8 | "error", 9 | ] 10 | testpaths=[ 11 | "tests", 12 | ] 13 | # Note: On Travis they these options seem to leak objects 14 | # log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(name)s:%(filename)s:%(lineno)d %(message)s" 15 | # log_level = "DEBUG" 16 | 17 | [tool.coverage.run] 18 | source = [ 19 | "psycopg/psycopg", 20 | "psycopg_pool/psycopg_pool", 21 | ] 22 | [tool.coverage.report] 23 | exclude_lines = [ 24 | "if TYPE_CHECKING:", 25 | '\.\.\.$', 26 | ] 27 | 28 | [tool.mypy] 29 | files = [ 30 | "psycopg/psycopg", 31 | "psycopg_pool/psycopg_pool", 32 | "psycopg_c/psycopg_c", 33 | "tests", 34 | ] 35 | warn_unused_ignores = true 36 | show_error_codes = true 37 | disable_bytearray_promotion = true 38 | disable_memoryview_promotion = true 39 | strict = true 40 | 41 | [[tool.mypy.overrides]] 42 | module = [ 43 | "shapely.*", 44 | ] 45 | ignore_missing_imports = true 46 | 47 | [[tool.mypy.overrides]] 48 | module = "uvloop" 49 | ignore_missing_imports = true 50 | 51 | [[tool.mypy.overrides]] 52 | module = "tests.*" 53 | check_untyped_defs = true 54 | disallow_untyped_defs = false 55 | disallow_untyped_calls = false 56 | -------------------------------------------------------------------------------- /psycopg/psycopg/types/bool.py: -------------------------------------------------------------------------------- 1 | """ 2 | Adapters for booleans. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | from .. import postgres 8 | from ..pq import Format 9 | from ..abc import AdaptContext 10 | from ..adapt import Buffer, Dumper, Loader 11 | 12 | 13 | class BoolDumper(Dumper): 14 | 15 | oid = postgres.types["bool"].oid 16 | 17 | def dump(self, obj: bool) -> bytes: 18 | return b"t" if obj else b"f" 19 | 20 | def quote(self, obj: bool) -> bytes: 21 | return b"true" if obj else b"false" 22 | 23 | 24 | class BoolBinaryDumper(Dumper): 25 | 26 | format = Format.BINARY 27 | oid = postgres.types["bool"].oid 28 | 29 | def dump(self, obj: bool) -> bytes: 30 | return b"\x01" if obj else b"\x00" 31 | 32 | 33 | class BoolLoader(Loader): 34 | def load(self, data: Buffer) -> bool: 35 | return data == b"t" 36 | 37 | 38 | class BoolBinaryLoader(Loader): 39 | 40 | format = Format.BINARY 41 | 42 | def load(self, data: Buffer) -> bool: 43 | return data != b"\x00" 44 | 45 | 46 | def register_default_adapters(context: AdaptContext) -> None: 47 | adapters = context.adapters 48 | adapters.register_dumper(bool, BoolDumper) 49 | adapters.register_dumper(bool, BoolBinaryDumper) 50 | adapters.register_loader("bool", BoolLoader) 51 | adapters.register_loader("bool", BoolBinaryLoader) 52 | -------------------------------------------------------------------------------- /psycopg/psycopg/_tz.py: -------------------------------------------------------------------------------- 1 | """ 2 | Timezone utility functions. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | import logging 8 | from typing import Dict, Optional, Union 9 | from datetime import timezone, tzinfo 10 | 11 | from .pq.abc import PGconn 12 | from ._compat import ZoneInfo 13 | 14 | logger = logging.getLogger("psycopg") 15 | 16 | _timezones: Dict[Union[None, bytes], tzinfo] = { 17 | None: timezone.utc, 18 | b"UTC": timezone.utc, 19 | } 20 | 21 | 22 | def get_tzinfo(pgconn: Optional[PGconn]) -> tzinfo: 23 | """Return the Python timezone info of the connection's timezone.""" 24 | tzname = pgconn.parameter_status(b"TimeZone") if pgconn else None 25 | try: 26 | return _timezones[tzname] 27 | except KeyError: 28 | sname = tzname.decode() if tzname else "UTC" 29 | try: 30 | zi: tzinfo = ZoneInfo(sname) 31 | except (KeyError, OSError): 32 | logger.warning("unknown PostgreSQL timezone: %r; will use UTC", sname) 33 | zi = timezone.utc 34 | except Exception as ex: 35 | logger.warning( 36 | "error handling PostgreSQL timezone: %r; will use UTC (%s - %s)", 37 | sname, 38 | type(ex).__name__, 39 | ex, 40 | ) 41 | zi = timezone.utc 42 | 43 | _timezones[tzname] = zi 44 | return zi 45 | -------------------------------------------------------------------------------- /tools/build/copy_to_binary.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # Create the psycopg-binary package by renaming and patching psycopg-c 4 | 5 | import os 6 | import re 7 | import shutil 8 | from pathlib import Path 9 | from typing import Union 10 | 11 | curdir = Path(__file__).parent 12 | pdir = curdir / "../.." 13 | target = pdir / "psycopg_binary" 14 | 15 | if target.exists(): 16 | raise Exception(f"path {target} already exists") 17 | 18 | 19 | def sed_i(pattern: str, repl: str, filename: Union[str, Path]) -> None: 20 | with open(filename, "rb") as f: 21 | data = f.read() 22 | newdata = re.sub(pattern.encode("utf8"), repl.encode("utf8"), data) 23 | if newdata != data: 24 | with open(filename, "wb") as f: 25 | f.write(newdata) 26 | 27 | 28 | shutil.copytree(pdir / "psycopg_c", target) 29 | shutil.move(str(target / "psycopg_c"), str(target / "psycopg_binary")) 30 | shutil.move(str(target / "README-binary.rst"), str(target / "README.rst")) 31 | sed_i("psycopg-c", "psycopg-binary", target / "setup.cfg") 32 | sed_i( 33 | r"__impl__\s*=.*", '__impl__ = "binary"', target / "psycopg_binary/pq.pyx" 34 | ) 35 | for dirpath, dirnames, filenames in os.walk(target): 36 | for filename in filenames: 37 | if os.path.splitext(filename)[1] not in (".pyx", ".pxd", ".py"): 38 | continue 39 | sed_i(r"\bpsycopg_c\b", "psycopg_binary", Path(dirpath) / filename) 40 | -------------------------------------------------------------------------------- /tools/build/strip_wheel.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Strip symbols inplace from the libraries in a zip archive. 4 | # 5 | # Stripping symbols is beneficial (reduction of 30% of the final package, > 6 | # %90% of the installed libraries. However just running `auditwheel repair 7 | # --strip` breaks some of the libraries included from the system, which fail at 8 | # import with errors such as "ELF load command address/offset not properly 9 | # aligned". 10 | # 11 | # System libraries are already pretty stripped. Ours go around 24Mb -> 1.5Mb... 12 | # 13 | # This script is designed to run on a wheel archive before auditwheel. 14 | 15 | set -euo pipefail 16 | # set -x 17 | 18 | source /etc/os-release 19 | dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 20 | 21 | wheel=$(realpath "$1") 22 | shift 23 | 24 | tmpdir=$(mktemp -d) 25 | trap "rm -r ${tmpdir}" EXIT 26 | 27 | cd "${tmpdir}" 28 | python -m zipfile -e "${wheel}" . 29 | 30 | echo " 31 | Libs before:" 32 | # Busybox doesn't have "find -ls" 33 | find . -name \*.so | xargs ls -l 34 | 35 | # On Debian, print the package versions libraries come from 36 | echo " 37 | Dependencies versions of '_psycopg.so' library:" 38 | "${dir}/print_so_versions.sh" "$(find . -name \*_psycopg\*.so)" 39 | 40 | find . -name \*.so -exec strip "$@" {} \; 41 | 42 | echo " 43 | Libs after:" 44 | find . -name \*.so | xargs ls -l 45 | 46 | python -m zipfile -c ${wheel} * 47 | 48 | cd - 49 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/_psycopg.pyx: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg_c._psycopg optimization module. 3 | 4 | The module contains optimized C code used in preference to Python code 5 | if a compiler is available. 6 | """ 7 | 8 | # Copyright (C) 2020 The Psycopg Team 9 | 10 | from psycopg_c cimport pq 11 | from psycopg_c.pq cimport libpq 12 | from psycopg_c._psycopg cimport oids 13 | 14 | import logging 15 | 16 | from psycopg.pq import Format as _pq_Format 17 | from psycopg._enums import PyFormat as _py_Format 18 | 19 | logger = logging.getLogger("psycopg") 20 | 21 | PQ_TEXT = _pq_Format.TEXT 22 | PQ_BINARY = _pq_Format.BINARY 23 | 24 | PG_AUTO = _py_Format.AUTO 25 | PG_TEXT = _py_Format.TEXT 26 | PG_BINARY = _py_Format.BINARY 27 | 28 | 29 | cdef extern from *: 30 | """ 31 | /* Include this early to avoid a warning about redefined ARRAYSIZE in winnt.h */ 32 | #ifdef MS_WINDOWS 33 | #define WIN32_LEAN_AND_MEAN 34 | #include 35 | #endif 36 | 37 | #ifndef ARRAYSIZE 38 | #define ARRAYSIZE(a) ((sizeof(a) / sizeof(*(a)))) 39 | #endif 40 | """ 41 | int ARRAYSIZE(void *array) 42 | 43 | 44 | include "_psycopg/adapt.pyx" 45 | include "_psycopg/copy.pyx" 46 | include "_psycopg/generators.pyx" 47 | include "_psycopg/transform.pyx" 48 | include "_psycopg/waiting.pyx" 49 | 50 | include "types/array.pyx" 51 | include "types/datetime.pyx" 52 | include "types/numeric.pyx" 53 | include "types/bool.pyx" 54 | include "types/string.pyx" 55 | -------------------------------------------------------------------------------- /tests/scripts/dectest.py: -------------------------------------------------------------------------------- 1 | """ 2 | A quick and rough performance comparison of text vs. binary Decimal adaptation 3 | """ 4 | from random import randrange 5 | from decimal import Decimal 6 | import psycopg 7 | from psycopg import sql 8 | 9 | ncols = 10 10 | nrows = 500000 11 | format = psycopg.pq.Format.BINARY 12 | test = "copy" 13 | 14 | 15 | def main() -> None: 16 | cnn = psycopg.connect() 17 | 18 | cnn.execute( 19 | sql.SQL("create table testdec ({})").format( 20 | sql.SQL(", ").join( 21 | [ 22 | sql.SQL("{} numeric(10,2)").format(sql.Identifier(f"t{i}")) 23 | for i in range(ncols) 24 | ] 25 | ) 26 | ) 27 | ) 28 | cur = cnn.cursor() 29 | 30 | if test == "copy": 31 | with cur.copy(f"copy testdec from stdin (format {format.name})") as copy: 32 | for j in range(nrows): 33 | copy.write_row( 34 | [Decimal(randrange(10000000000)) / 100 for i in range(ncols)] 35 | ) 36 | 37 | elif test == "insert": 38 | ph = ["%t", "%b"][format] 39 | cur.executemany( 40 | "insert into testdec values (%s)" % ", ".join([ph] * ncols), 41 | ( 42 | [Decimal(randrange(10000000000)) / 100 for i in range(ncols)] 43 | for j in range(nrows) 44 | ), 45 | ) 46 | else: 47 | raise Exception(f"bad test: {test}") 48 | 49 | 50 | if __name__ == "__main__": 51 | main() 52 | -------------------------------------------------------------------------------- /docs/lib/ticket_role.py: -------------------------------------------------------------------------------- 1 | # type: ignore 2 | """ 3 | ticket role 4 | ~~~~~~~~~~~ 5 | 6 | An interpreted text role to link docs to tickets issues. 7 | 8 | :copyright: Copyright 2013 by Daniele Varrazzo. 9 | :copyright: Copyright 2021 The Psycopg Team 10 | """ 11 | 12 | import re 13 | from docutils import nodes, utils 14 | from docutils.parsers.rst import roles 15 | 16 | 17 | def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]): 18 | cfg = inliner.document.settings.env.app.config 19 | if cfg.ticket_url is None: 20 | msg = inliner.reporter.warning( 21 | "ticket not configured: please configure ticket_url in conf.py" 22 | ) 23 | prb = inliner.problematic(rawtext, rawtext, msg) 24 | return [prb], [msg] 25 | 26 | rv = [nodes.Text(name + " ")] 27 | tokens = re.findall(r"(#?\d+)|([^\d#]+)", text) 28 | for ticket, noise in tokens: 29 | if ticket: 30 | num = int(ticket.replace("#", "")) 31 | 32 | url = cfg.ticket_url % num 33 | roles.set_classes(options) 34 | node = nodes.reference( 35 | ticket, utils.unescape(ticket), refuri=url, **options 36 | ) 37 | 38 | rv.append(node) 39 | 40 | else: 41 | assert noise 42 | rv.append(nodes.Text(noise)) 43 | 44 | return rv, [] 45 | 46 | 47 | def setup(app): 48 | app.add_config_value("ticket_url", None, "env") 49 | app.add_role("ticket", ticket_role) 50 | app.add_role("tickets", ticket_role) 51 | -------------------------------------------------------------------------------- /psycopg_pool/psycopg_pool/_compat.py: -------------------------------------------------------------------------------- 1 | """ 2 | compatibility functions for different Python versions 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | import sys 8 | import asyncio 9 | from typing import Any, Awaitable, Generator, Optional, Union, Type, TypeVar 10 | from typing_extensions import TypeAlias 11 | 12 | import psycopg.errors as e 13 | 14 | T = TypeVar("T") 15 | FutureT: TypeAlias = Union["asyncio.Future[T]", Generator[Any, None, T], Awaitable[T]] 16 | 17 | if sys.version_info >= (3, 8): 18 | create_task = asyncio.create_task 19 | Task = asyncio.Task 20 | 21 | else: 22 | 23 | def create_task( 24 | coro: FutureT[T], name: Optional[str] = None 25 | ) -> "asyncio.Future[T]": 26 | return asyncio.create_task(coro) 27 | 28 | Task = asyncio.Future 29 | 30 | if sys.version_info >= (3, 9): 31 | from collections import Counter, deque as Deque 32 | else: 33 | from typing import Counter, Deque 34 | 35 | if sys.version_info < (3, 8): 36 | import importlib_metadata as metadata 37 | else: 38 | from importlib import metadata 39 | 40 | __all__ = [ 41 | "Counter", 42 | "Deque", 43 | "Task", 44 | "create_task", 45 | "metadata", 46 | ] 47 | 48 | # Workaround for psycopg < 3.0.8. 49 | # Timeout on NullPool connection mignt not work correctly. 50 | try: 51 | ConnectionTimeout: Type[e.OperationalError] = e.ConnectionTimeout 52 | except AttributeError: 53 | 54 | class DummyConnectionTimeout(e.OperationalError): 55 | pass 56 | 57 | ConnectionTimeout = DummyConnectionTimeout 58 | -------------------------------------------------------------------------------- /tests/types/test_uuid.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from uuid import UUID 3 | import subprocess as sp 4 | 5 | import pytest 6 | 7 | from psycopg import pq 8 | from psycopg import sql 9 | from psycopg.adapt import PyFormat 10 | 11 | 12 | @pytest.mark.parametrize("fmt_in", PyFormat) 13 | def test_uuid_dump(conn, fmt_in): 14 | val = "12345678123456781234567812345679" 15 | cur = conn.cursor() 16 | cur.execute(f"select %{fmt_in.value} = %s::uuid", (UUID(val), val)) 17 | assert cur.fetchone()[0] is True 18 | 19 | 20 | @pytest.mark.crdb_skip("copy") 21 | @pytest.mark.parametrize("fmt_out", pq.Format) 22 | def test_uuid_load(conn, fmt_out): 23 | cur = conn.cursor(binary=fmt_out) 24 | val = "12345678123456781234567812345679" 25 | cur.execute("select %s::uuid", (val,)) 26 | assert cur.fetchone()[0] == UUID(val) 27 | 28 | stmt = sql.SQL("copy (select {}::uuid) to stdout (format {})").format( 29 | val, sql.SQL(fmt_out.name) 30 | ) 31 | with cur.copy(stmt) as copy: 32 | copy.set_types(["uuid"]) 33 | (res,) = copy.read_row() 34 | 35 | assert res == UUID(val) 36 | 37 | 38 | @pytest.mark.slow 39 | @pytest.mark.subprocess 40 | def test_lazy_load(dsn): 41 | script = f"""\ 42 | import sys 43 | import psycopg 44 | 45 | assert 'uuid' not in sys.modules 46 | 47 | conn = psycopg.connect({dsn!r}) 48 | with conn.cursor() as cur: 49 | cur.execute("select repeat('1', 32)::uuid") 50 | cur.fetchone() 51 | 52 | conn.close() 53 | assert 'uuid' in sys.modules 54 | """ 55 | 56 | sp.check_call([sys.executable, "-c", script]) 57 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | =================================================== 2 | Psycopg 3 -- PostgreSQL database adapter for Python 3 | =================================================== 4 | 5 | Psycopg 3 is a newly designed PostgreSQL_ database adapter for the Python_ 6 | programming language. 7 | 8 | Psycopg 3 presents a familiar interface for everyone who has used 9 | `Psycopg 2`_ or any other `DB-API 2.0`_ database adapter, but allows to use 10 | more modern PostgreSQL and Python features, such as: 11 | 12 | - :ref:`Asynchronous support ` 13 | - :ref:`COPY support from Python objects ` 14 | - :ref:`A redesigned connection pool ` 15 | - :ref:`Support for static typing ` 16 | - :ref:`Server-side parameters binding ` 17 | - :ref:`Prepared statements ` 18 | - :ref:`Statements pipeline ` 19 | - :ref:`Binary communication ` 20 | - :ref:`Direct access to the libpq functionalities ` 21 | 22 | .. _Python: https://www.python.org/ 23 | .. _PostgreSQL: https://www.postgresql.org/ 24 | .. _Psycopg 2: https://www.psycopg.org/docs/ 25 | .. _DB-API 2.0: https://www.python.org/dev/peps/pep-0249/ 26 | 27 | 28 | Documentation 29 | ============= 30 | 31 | .. toctree:: 32 | :maxdepth: 2 33 | 34 | basic/index 35 | advanced/index 36 | api/index 37 | 38 | Release notes 39 | ------------- 40 | 41 | .. toctree:: 42 | :maxdepth: 1 43 | 44 | news 45 | news_pool 46 | 47 | 48 | Indices and tables 49 | ------------------ 50 | 51 | * :ref:`genindex` 52 | * :ref:`modindex` 53 | -------------------------------------------------------------------------------- /tests/crdb/test_typing.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from ..test_typing import _test_reveal 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "conn, type", 8 | [ 9 | ( 10 | "psycopg.crdb.connect()", 11 | "psycopg.crdb.CrdbConnection[Tuple[Any, ...]]", 12 | ), 13 | ( 14 | "psycopg.crdb.connect(row_factory=rows.dict_row)", 15 | "psycopg.crdb.CrdbConnection[Dict[str, Any]]", 16 | ), 17 | ( 18 | "psycopg.crdb.CrdbConnection.connect()", 19 | "psycopg.crdb.CrdbConnection[Tuple[Any, ...]]", 20 | ), 21 | ( 22 | "psycopg.crdb.CrdbConnection.connect(row_factory=rows.tuple_row)", 23 | "psycopg.crdb.CrdbConnection[Tuple[Any, ...]]", 24 | ), 25 | ( 26 | "psycopg.crdb.CrdbConnection.connect(row_factory=rows.dict_row)", 27 | "psycopg.crdb.CrdbConnection[Dict[str, Any]]", 28 | ), 29 | ( 30 | "await psycopg.crdb.AsyncCrdbConnection.connect()", 31 | "psycopg.crdb.AsyncCrdbConnection[Tuple[Any, ...]]", 32 | ), 33 | ( 34 | "await psycopg.crdb.AsyncCrdbConnection.connect(row_factory=rows.dict_row)", 35 | "psycopg.crdb.AsyncCrdbConnection[Dict[str, Any]]", 36 | ), 37 | ], 38 | ) 39 | def test_connection_type(conn, type, mypy): 40 | stmts = f"obj = {conn}" 41 | _test_reveal_crdb(stmts, type, mypy) 42 | 43 | 44 | def _test_reveal_crdb(stmts, type, mypy): 45 | stmts = f"""\ 46 | import psycopg.crdb 47 | {stmts} 48 | """ 49 | _test_reveal(stmts, type, mypy) 50 | -------------------------------------------------------------------------------- /tests/types/test_bool.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from psycopg import pq 4 | from psycopg import sql 5 | from psycopg.adapt import Transformer, PyFormat 6 | from psycopg.postgres import types as builtins 7 | 8 | 9 | @pytest.mark.parametrize("fmt_in", PyFormat) 10 | @pytest.mark.parametrize("fmt_out", pq.Format) 11 | @pytest.mark.parametrize("b", [True, False]) 12 | def test_roundtrip_bool(conn, b, fmt_in, fmt_out): 13 | cur = conn.cursor(binary=fmt_out) 14 | result = cur.execute(f"select %{fmt_in.value}", (b,)).fetchone()[0] 15 | assert cur.pgresult.fformat(0) == fmt_out 16 | if b is not None: 17 | assert cur.pgresult.ftype(0) == builtins["bool"].oid 18 | assert result is b 19 | 20 | result = cur.execute(f"select %{fmt_in.value}", ([b],)).fetchone()[0] 21 | assert cur.pgresult.fformat(0) == fmt_out 22 | if b is not None: 23 | assert cur.pgresult.ftype(0) == builtins["bool"].array_oid 24 | assert result[0] is b 25 | 26 | 27 | @pytest.mark.parametrize("val", [True, False]) 28 | def test_quote_bool(conn, val): 29 | 30 | tx = Transformer() 31 | assert tx.get_dumper(val, PyFormat.TEXT).quote(val) == str(val).lower().encode( 32 | "ascii" 33 | ) 34 | 35 | cur = conn.cursor() 36 | cur.execute(sql.SQL("select {v}").format(v=sql.Literal(val))) 37 | assert cur.fetchone()[0] is val 38 | 39 | 40 | def test_quote_none(conn): 41 | 42 | tx = Transformer() 43 | assert tx.get_dumper(None, PyFormat.TEXT).quote(None) == b"NULL" 44 | 45 | cur = conn.cursor() 46 | cur.execute(sql.SQL("select {v}").format(v=sql.Literal(None))) 47 | assert cur.fetchone()[0] is None 48 | -------------------------------------------------------------------------------- /tests/test_encodings.py: -------------------------------------------------------------------------------- 1 | import codecs 2 | import pytest 3 | 4 | import psycopg 5 | from psycopg import _encodings as encodings 6 | 7 | 8 | def test_names_normalised(): 9 | for name in encodings._py_codecs.values(): 10 | assert codecs.lookup(name).name == name 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "pyenc, pgenc", 15 | [ 16 | ("ascii", "SQL_ASCII"), 17 | ("utf8", "UTF8"), 18 | ("utf-8", "UTF8"), 19 | ("uTf-8", "UTF8"), 20 | ("latin9", "LATIN9"), 21 | ("iso8859-15", "LATIN9"), 22 | ], 23 | ) 24 | def test_py2pg(pyenc, pgenc): 25 | assert encodings.py2pgenc(pyenc) == pgenc.encode() 26 | 27 | 28 | @pytest.mark.parametrize( 29 | "pyenc, pgenc", 30 | [ 31 | ("ascii", "SQL_ASCII"), 32 | ("utf-8", "UTF8"), 33 | ("iso8859-15", "LATIN9"), 34 | ], 35 | ) 36 | def test_pg2py(pyenc, pgenc): 37 | assert encodings.pg2pyenc(pgenc.encode()) == pyenc 38 | 39 | 40 | @pytest.mark.parametrize("pgenc", ["MULE_INTERNAL", "EUC_TW"]) 41 | def test_pg2py_missing(pgenc): 42 | with pytest.raises(psycopg.NotSupportedError): 43 | encodings.pg2pyenc(pgenc.encode()) 44 | 45 | 46 | @pytest.mark.parametrize( 47 | "conninfo, pyenc", 48 | [ 49 | ("", "utf-8"), 50 | ("user=foo, dbname=bar", "utf-8"), 51 | ("user=foo, dbname=bar, client_encoding=EUC_JP", "euc_jp"), 52 | ("user=foo, dbname=bar, client_encoding=euc-jp", "euc_jp"), 53 | ("user=foo, dbname=bar, client_encoding=WAT", "utf-8"), 54 | ], 55 | ) 56 | def test_conninfo_encoding(conninfo, pyenc): 57 | assert encodings.conninfo_encoding(conninfo) == pyenc 58 | -------------------------------------------------------------------------------- /tests/test_module.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from psycopg._cmodule import _psycopg 4 | 5 | 6 | @pytest.mark.parametrize( 7 | "args, kwargs, want_conninfo", 8 | [ 9 | ((), {}, ""), 10 | (("dbname=foo",), {"user": "bar"}, "dbname=foo user=bar"), 11 | ((), {"port": 15432}, "port=15432"), 12 | ((), {"user": "foo", "dbname": None}, "user=foo"), 13 | ], 14 | ) 15 | def test_connect(monkeypatch, dsn, args, kwargs, want_conninfo): 16 | # Check the main args passing from psycopg.connect to the conn generator 17 | # Details of the params manipulation are in test_conninfo. 18 | import psycopg.connection 19 | 20 | orig_connect = psycopg.connection.connect # type: ignore 21 | 22 | got_conninfo = None 23 | 24 | def mock_connect(conninfo): 25 | nonlocal got_conninfo 26 | got_conninfo = conninfo 27 | return orig_connect(dsn) 28 | 29 | monkeypatch.setattr(psycopg.connection, "connect", mock_connect) 30 | 31 | conn = psycopg.connect(*args, **kwargs) 32 | assert got_conninfo == want_conninfo 33 | conn.close() 34 | 35 | 36 | def test_version(mypy): 37 | cp = mypy.run_on_source( 38 | """\ 39 | from psycopg import __version__ 40 | assert __version__ 41 | """ 42 | ) 43 | assert not cp.stdout 44 | 45 | 46 | @pytest.mark.skipif(_psycopg is None, reason="C module test") 47 | def test_version_c(mypy): 48 | # can be psycopg_c, psycopg_binary 49 | cpackage = _psycopg.__name__.split(".")[0] 50 | 51 | cp = mypy.run_on_source( 52 | f"""\ 53 | from {cpackage} import __version__ 54 | assert __version__ 55 | """ 56 | ) 57 | assert not cp.stdout 58 | -------------------------------------------------------------------------------- /tests/pq/test_pq.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | 5 | import psycopg 6 | from psycopg import pq 7 | 8 | from ..utils import check_libpq_version 9 | 10 | 11 | def test_version(): 12 | rv = pq.version() 13 | assert rv > 90500 14 | assert rv < 200000 # you are good for a while 15 | 16 | 17 | def test_build_version(): 18 | assert pq.__build_version__ and pq.__build_version__ >= 70400 19 | 20 | 21 | @pytest.mark.skipif("not os.environ.get('PSYCOPG_TEST_WANT_LIBPQ_BUILD')") 22 | def test_want_built_version(): 23 | want = os.environ["PSYCOPG_TEST_WANT_LIBPQ_BUILD"] 24 | got = pq.__build_version__ 25 | assert not check_libpq_version(got, want) 26 | 27 | 28 | @pytest.mark.skipif("not os.environ.get('PSYCOPG_TEST_WANT_LIBPQ_IMPORT')") 29 | def test_want_import_version(): 30 | want = os.environ["PSYCOPG_TEST_WANT_LIBPQ_IMPORT"] 31 | got = pq.version() 32 | assert not check_libpq_version(got, want) 33 | 34 | 35 | # Note: These tests are here because test_pipeline.py tests are all skipped 36 | # when pipeline mode is not supported. 37 | 38 | 39 | @pytest.mark.libpq(">= 14") 40 | def test_pipeline_supported(conn): 41 | assert psycopg.Pipeline.is_supported() 42 | assert psycopg.AsyncPipeline.is_supported() 43 | 44 | with conn.pipeline(): 45 | pass 46 | 47 | 48 | @pytest.mark.libpq("< 14") 49 | def test_pipeline_not_supported(conn): 50 | assert not psycopg.Pipeline.is_supported() 51 | assert not psycopg.AsyncPipeline.is_supported() 52 | 53 | with pytest.raises(psycopg.NotSupportedError) as exc: 54 | with conn.pipeline(): 55 | pass 56 | 57 | assert "too old" in str(exc.value) 58 | -------------------------------------------------------------------------------- /tools/build/wheel_linux_before_all.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Configure the libraries needed to build wheel packages on linux. 4 | # This script is designed to be used by cibuildwheel as CIBW_BEFORE_ALL_LINUX 5 | 6 | set -euo pipefail 7 | set -x 8 | 9 | dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 10 | 11 | source /etc/os-release 12 | 13 | # Install PostgreSQL development files. 14 | case "$ID" in 15 | alpine) 16 | # tzdata is required for datetime tests. 17 | apk update 18 | apk add --no-cache tzdata 19 | "${dir}/build_libpq.sh" > /dev/null 20 | ;; 21 | 22 | debian) 23 | # Note that the pgdg doesn't have an aarch64 repository so wheels are 24 | # build with the libpq packaged with Debian 9, which is 9.6. 25 | if [ "$AUDITWHEEL_ARCH" != 'aarch64' ]; then 26 | echo "deb http://apt.postgresql.org/pub/repos/apt $VERSION_CODENAME-pgdg main" \ 27 | > /etc/apt/sources.list.d/pgdg.list 28 | # TODO: On 2021-11-09 curl fails on 'ppc64le' with: 29 | # curl: (60) SSL certificate problem: certificate has expired 30 | # Test again later if -k can be removed. 31 | curl -skf https://www.postgresql.org/media/keys/ACCC4CF8.asc \ 32 | > /etc/apt/trusted.gpg.d/postgresql.asc 33 | fi 34 | 35 | apt-get update 36 | apt-get -y upgrade 37 | apt-get -y install libpq-dev 38 | ;; 39 | 40 | centos) 41 | "${dir}/build_libpq.sh" > /dev/null 42 | ;; 43 | 44 | *) 45 | echo "$0: unexpected Linux distribution: '$ID'" >&2 46 | exit 1 47 | ;; 48 | esac 49 | -------------------------------------------------------------------------------- /tests/pq/test_conninfo.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import psycopg 4 | from psycopg import pq 5 | 6 | 7 | def test_defaults(monkeypatch): 8 | monkeypatch.setenv("PGPORT", "15432") 9 | defs = pq.Conninfo.get_defaults() 10 | assert len(defs) > 20 11 | port = [d for d in defs if d.keyword == b"port"][0] 12 | assert port.envvar == b"PGPORT" 13 | assert port.compiled == b"5432" 14 | assert port.val == b"15432" 15 | assert port.label == b"Database-Port" 16 | assert port.dispchar == b"" 17 | assert port.dispsize == 6 18 | 19 | 20 | @pytest.mark.libpq(">= 10") 21 | def test_conninfo_parse(): 22 | infos = pq.Conninfo.parse( 23 | b"postgresql://host1:123,host2:456/somedb" 24 | b"?target_session_attrs=any&application_name=myapp" 25 | ) 26 | info = {i.keyword: i.val for i in infos if i.val is not None} 27 | assert info[b"host"] == b"host1,host2" 28 | assert info[b"port"] == b"123,456" 29 | assert info[b"dbname"] == b"somedb" 30 | assert info[b"application_name"] == b"myapp" 31 | 32 | 33 | @pytest.mark.libpq("< 10") 34 | def test_conninfo_parse_96(): 35 | conninfo = pq.Conninfo.parse( 36 | b"postgresql://other@localhost/otherdb" 37 | b"?connect_timeout=10&application_name=myapp" 38 | ) 39 | info = {i.keyword: i.val for i in conninfo if i.val is not None} 40 | assert info[b"host"] == b"localhost" 41 | assert info[b"dbname"] == b"otherdb" 42 | assert info[b"application_name"] == b"myapp" 43 | 44 | 45 | def test_conninfo_parse_bad(): 46 | with pytest.raises(psycopg.OperationalError) as e: 47 | pq.Conninfo.parse(b"bad_conninfo=") 48 | assert "bad_conninfo" in str(e.value) 49 | -------------------------------------------------------------------------------- /tools/build/ci_install_libpq.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Install the desired libpw in github action (Linux runner) 4 | # 5 | # Specify `oldest` or `newest` as first argument in order to choose the oldest 6 | # available to the debian distro or the newest available from the pgdg ppa. 7 | 8 | set -euo pipefail 9 | set -x 10 | 11 | libpq=${1:-} 12 | 13 | case "$libpq" in 14 | "") 15 | # Assume a libpq is already installed in the system. We don't care about 16 | # the version. 17 | exit 0 18 | ;; 19 | 20 | oldest) 21 | pqver=$(apt-cache show libpq5 | grep ^Version: | tail -1 | awk '{print $2}') 22 | sudo apt-get -qq -y --allow-downgrades install \ 23 | "libpq-dev=${pqver}" "libpq5=${pqver}" 24 | ;; 25 | 26 | newest) 27 | curl -sL https://www.postgresql.org/media/keys/ACCC4CF8.asc \ 28 | | gpg --dearmor \ 29 | | sudo tee /etc/apt/trusted.gpg.d/apt.postgresql.org.gpg > /dev/null 30 | 31 | # NOTE: in order to test with a preview release, add its number to 32 | # the deb entry. For instance, to test on preview Postgres 16, use: 33 | # "deb http://apt.postgresql.org/pub/repos/apt ${rel}-pgdg main 16" 34 | rel=$(lsb_release -c -s) 35 | echo "deb http://apt.postgresql.org/pub/repos/apt ${rel}-pgdg main" \ 36 | | sudo tee -a /etc/apt/sources.list.d/pgdg.list > /dev/null 37 | sudo apt-get -qq update 38 | 39 | pqver=$(apt-cache show libpq5 | grep ^Version: | head -1 | awk '{print $2}') 40 | sudo apt-get -qq -y install "libpq-dev=${pqver}" "libpq5=${pqver}" 41 | ;; 42 | 43 | *) 44 | echo "Unexpected wanted libpq: '${libpq}'" >&2 45 | exit 1 46 | ;; 47 | 48 | esac 49 | -------------------------------------------------------------------------------- /tests/fix_mypy.py: -------------------------------------------------------------------------------- 1 | import re 2 | import subprocess as sp 3 | 4 | import pytest 5 | 6 | 7 | def pytest_configure(config): 8 | config.addinivalue_line( 9 | "markers", 10 | "mypy: the test uses mypy (the marker is set automatically" 11 | " on tests using the fixture)", 12 | ) 13 | 14 | 15 | def pytest_collection_modifyitems(items): 16 | for item in items: 17 | if "mypy" in item.fixturenames: 18 | # add a mypy tag so we can address these tests only 19 | item.add_marker(pytest.mark.mypy) 20 | 21 | # All the tests using mypy are slow 22 | item.add_marker(pytest.mark.slow) 23 | 24 | 25 | @pytest.fixture(scope="session") 26 | def mypy(tmp_path_factory): 27 | cache_dir = tmp_path_factory.mktemp(basename="mypy_cache") 28 | src_dir = tmp_path_factory.mktemp("source") 29 | 30 | class MypyRunner: 31 | def run_on_file(self, filename): 32 | cmdline = f""" 33 | mypy 34 | --strict 35 | --show-error-codes --no-color-output --no-error-summary 36 | --config-file= --cache-dir={cache_dir} 37 | """.split() 38 | cmdline.append(filename) 39 | return sp.run(cmdline, stdout=sp.PIPE, stderr=sp.STDOUT) 40 | 41 | def run_on_source(self, source): 42 | fn = src_dir / "tmp.py" 43 | with fn.open("w") as f: 44 | f.write(source) 45 | 46 | return self.run_on_file(str(fn)) 47 | 48 | def get_revealed(self, line): 49 | """return the type from an output of reveal_type""" 50 | return re.sub( 51 | r".*Revealed type is (['\"])([^']+)\1.*", r"\2", line 52 | ).replace("*", "") 53 | 54 | return MypyRunner() 55 | -------------------------------------------------------------------------------- /psycopg/psycopg/types/uuid.py: -------------------------------------------------------------------------------- 1 | """ 2 | Adapters for the UUID type. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | from typing import Callable, Optional, TYPE_CHECKING 8 | 9 | from .. import postgres 10 | from ..pq import Format 11 | from ..abc import AdaptContext 12 | from ..adapt import Buffer, Dumper, Loader 13 | 14 | if TYPE_CHECKING: 15 | import uuid 16 | 17 | # Importing the uuid module is slow, so import it only on request. 18 | UUID: Callable[..., "uuid.UUID"] = None # type: ignore[assignment] 19 | 20 | 21 | class UUIDDumper(Dumper): 22 | 23 | oid = postgres.types["uuid"].oid 24 | 25 | def dump(self, obj: "uuid.UUID") -> bytes: 26 | return obj.hex.encode() 27 | 28 | 29 | class UUIDBinaryDumper(UUIDDumper): 30 | 31 | format = Format.BINARY 32 | 33 | def dump(self, obj: "uuid.UUID") -> bytes: 34 | return obj.bytes 35 | 36 | 37 | class UUIDLoader(Loader): 38 | def __init__(self, oid: int, context: Optional[AdaptContext] = None): 39 | super().__init__(oid, context) 40 | global UUID 41 | if UUID is None: 42 | from uuid import UUID 43 | 44 | def load(self, data: Buffer) -> "uuid.UUID": 45 | if isinstance(data, memoryview): 46 | data = bytes(data) 47 | return UUID(data.decode()) 48 | 49 | 50 | class UUIDBinaryLoader(UUIDLoader): 51 | 52 | format = Format.BINARY 53 | 54 | def load(self, data: Buffer) -> "uuid.UUID": 55 | if isinstance(data, memoryview): 56 | data = bytes(data) 57 | return UUID(bytes=data) 58 | 59 | 60 | def register_default_adapters(context: AdaptContext) -> None: 61 | adapters = context.adapters 62 | adapters.register_dumper("uuid.UUID", UUIDDumper) 63 | adapters.register_dumper("uuid.UUID", UUIDBinaryDumper) 64 | adapters.register_loader("uuid", UUIDLoader) 65 | adapters.register_loader("uuid", UUIDBinaryLoader) 66 | -------------------------------------------------------------------------------- /.github/workflows/packages-pool.yml: -------------------------------------------------------------------------------- 1 | name: Build pool packages 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: '28 6 * * sun' 7 | 8 | jobs: 9 | 10 | sdist: 11 | runs-on: ubuntu-latest 12 | 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | include: 17 | - {package: psycopg_pool, format: sdist, impl: python} 18 | - {package: psycopg_pool, format: wheel, impl: python} 19 | 20 | steps: 21 | - uses: actions/checkout@v3 22 | 23 | - uses: actions/setup-python@v4 24 | with: 25 | python-version: 3.9 26 | 27 | - name: Create the sdist packages 28 | run: |- 29 | python ${{ matrix.package }}/setup.py sdist -d `pwd`/dist/ 30 | if: ${{ matrix.format == 'sdist' }} 31 | 32 | - name: Create the wheel packages 33 | run: |- 34 | pip install wheel 35 | python ${{ matrix.package }}/setup.py bdist_wheel -d `pwd`/dist/ 36 | if: ${{ matrix.format == 'wheel' }} 37 | 38 | - name: Install the Python pool package and test requirements 39 | run: |- 40 | pip install dist/* 41 | pip install ./psycopg[test] 42 | 43 | - name: Test the sdist package 44 | run: pytest -m 'not slow and not flakey' --color yes 45 | env: 46 | PSYCOPG_IMPL: ${{ matrix.impl }} 47 | PSYCOPG_TEST_DSN: "host=127.0.0.1 user=postgres" 48 | PGPASSWORD: password 49 | 50 | - uses: actions/upload-artifact@v3 51 | with: 52 | path: ./dist/* 53 | 54 | services: 55 | postgresql: 56 | image: postgres:14 57 | env: 58 | POSTGRES_PASSWORD: password 59 | ports: 60 | - 5432:5432 61 | # Set health checks to wait until postgres has started 62 | options: >- 63 | --health-cmd pg_isready 64 | --health-interval 10s 65 | --health-timeout 5s 66 | --health-retries 5 67 | -------------------------------------------------------------------------------- /psycopg_pool/setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = psycopg-pool 3 | description = Connection Pool for Psycopg 4 | url = https://psycopg.org/psycopg3/ 5 | author = Daniele Varrazzo 6 | author_email = daniele.varrazzo@gmail.com 7 | license = GNU Lesser General Public License v3 (LGPLv3) 8 | 9 | # STOP AND READ! if you change: 10 | version = 3.2.0.dev1 11 | # also change: 12 | # - `docs/news_pool.rst` to declare this version current or unreleased 13 | 14 | project_urls = 15 | Homepage = https://psycopg.org/ 16 | Documentation = https://www.psycopg.org/psycopg3/docs/advanced/pool.html 17 | Changes = https://psycopg.org/psycopg3/docs/news_pool.html 18 | Code = https://github.com/psycopg/psycopg 19 | Issue Tracker = https://github.com/psycopg/psycopg/issues 20 | Download = https://pypi.org/project/psycopg-pool/ 21 | 22 | classifiers = 23 | Development Status :: 5 - Production/Stable 24 | Intended Audience :: Developers 25 | License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3) 26 | Operating System :: MacOS :: MacOS X 27 | Operating System :: Microsoft :: Windows 28 | Operating System :: POSIX 29 | Programming Language :: Python :: 3 30 | Programming Language :: Python :: 3.7 31 | Programming Language :: Python :: 3.8 32 | Programming Language :: Python :: 3.9 33 | Programming Language :: Python :: 3.10 34 | Programming Language :: Python :: 3.11 35 | Topic :: Database 36 | Topic :: Database :: Front-Ends 37 | Topic :: Software Development 38 | Topic :: Software Development :: Libraries :: Python Modules 39 | 40 | long_description = file: README.rst 41 | long_description_content_type = text/x-rst 42 | license_files = LICENSE.txt 43 | 44 | [options] 45 | python_requires = >= 3.7 46 | packages = find: 47 | zip_safe = False 48 | install_requires = 49 | typing-extensions >= 3.10 50 | importlib-metadata >= 1.4; python_version < "3.8" 51 | 52 | [options.package_data] 53 | psycopg_pool = py.typed 54 | -------------------------------------------------------------------------------- /docs/news_pool.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: psycopg_pool 2 | 3 | .. index:: 4 | single: Release notes 5 | single: News 6 | 7 | ``psycopg_pool`` release notes 8 | ============================== 9 | 10 | Current release 11 | --------------- 12 | 13 | psycopg_pool 3.1.5 14 | ^^^^^^^^^^^^^^^^^^ 15 | 16 | - Make sure that `!ConnectionPool.check()` refills an empty pool 17 | (:ticket:`#438`). 18 | - Avoid error in Pyright caused by aliasing `!TypeAlias` (:ticket:`#439`). 19 | 20 | 21 | psycopg_pool 3.1.4 22 | ^^^^^^^^^^^^^^^^^^ 23 | 24 | - Fix async pool exhausting connections, happening if the pool is created 25 | before the event loop is started (:ticket:`#219`). 26 | 27 | 28 | psycopg_pool 3.1.3 29 | ^^^^^^^^^^^^^^^^^^ 30 | 31 | - Add support for Python 3.11 (:ticket:`#305`). 32 | 33 | 34 | psycopg_pool 3.1.2 35 | ^^^^^^^^^^^^^^^^^^ 36 | 37 | - Fix possible failure to reconnect after losing connection from the server 38 | (:ticket:`#370`). 39 | 40 | 41 | psycopg_pool 3.1.1 42 | ^^^^^^^^^^^^^^^^^^ 43 | 44 | - Fix race condition on pool creation which might result in the pool not 45 | filling (:ticket:`#230`). 46 | 47 | 48 | psycopg_pool 3.1.0 49 | ------------------ 50 | 51 | - Add :ref:`null-pool` (:ticket:`#148`). 52 | - Add `ConnectionPool.open()` and ``open`` parameter to the pool init 53 | (:ticket:`#151`). 54 | - Drop support for Python 3.6. 55 | 56 | 57 | psycopg_pool 3.0.3 58 | ^^^^^^^^^^^^^^^^^^ 59 | 60 | - Raise `!ValueError` if `ConnectionPool` `!min_size` and `!max_size` are both 61 | set to 0 (instead of hanging). 62 | - Raise `PoolClosed` calling `~ConnectionPool.wait()` on a closed pool. 63 | 64 | 65 | psycopg_pool 3.0.2 66 | ^^^^^^^^^^^^^^^^^^ 67 | 68 | - Remove dependency on the internal `!psycopg._compat` module. 69 | 70 | 71 | psycopg_pool 3.0.1 72 | ^^^^^^^^^^^^^^^^^^ 73 | 74 | - Don't leave connections idle in transaction after calling 75 | `~ConnectionPool.check()` (:ticket:`#144`). 76 | 77 | 78 | psycopg_pool 3.0 79 | ---------------- 80 | 81 | - First release on PyPI. 82 | -------------------------------------------------------------------------------- /docs/api/module.rst: -------------------------------------------------------------------------------- 1 | The `!psycopg` module 2 | ===================== 3 | 4 | Psycopg implements the `Python Database DB API 2.0 specification`__. As such 5 | it also exposes the `module-level objects`__ required by the specifications. 6 | 7 | .. __: https://www.python.org/dev/peps/pep-0249/ 8 | .. __: https://www.python.org/dev/peps/pep-0249/#module-interface 9 | 10 | .. module:: psycopg 11 | 12 | .. autofunction:: connect 13 | 14 | This is an alias of the class method `Connection.connect`: see its 15 | documentation for details. 16 | 17 | If you need an asynchronous connection use `AsyncConnection.connect` 18 | instead. 19 | 20 | 21 | .. rubric:: Exceptions 22 | 23 | The standard `DBAPI exceptions`__ are exposed both by the `!psycopg` module 24 | and by the `psycopg.errors` module. The latter also exposes more specific 25 | exceptions, mapping to the database error states (see 26 | :ref:`sqlstate-exceptions`). 27 | 28 | .. __: https://www.python.org/dev/peps/pep-0249/#exceptions 29 | 30 | .. parsed-literal:: 31 | 32 | `!Exception` 33 | \|__ `Warning` 34 | \|__ `Error` 35 | \|__ `InterfaceError` 36 | \|__ `DatabaseError` 37 | \|__ `DataError` 38 | \|__ `OperationalError` 39 | \|__ `IntegrityError` 40 | \|__ `InternalError` 41 | \|__ `ProgrammingError` 42 | \|__ `NotSupportedError` 43 | 44 | 45 | .. data:: adapters 46 | 47 | The default adapters map establishing how Python and PostgreSQL types are 48 | converted into each other. 49 | 50 | This map is used as a template when new connections are created, using 51 | `psycopg.connect()`. Its `~psycopg.adapt.AdaptersMap.types` attribute is a 52 | `~psycopg.types.TypesRegistry` containing information about every 53 | PostgreSQL builtin type, useful for adaptation customisation (see 54 | :ref:`adaptation`):: 55 | 56 | >>> psycopg.adapters.types["int4"] 57 | 58 | 59 | :type: `~psycopg.adapt.AdaptersMap` 60 | -------------------------------------------------------------------------------- /psycopg/psycopg/_enums.py: -------------------------------------------------------------------------------- 1 | """ 2 | Enum values for psycopg 3 | 4 | These values are defined by us and are not necessarily dependent on 5 | libpq-defined enums. 6 | """ 7 | 8 | # Copyright (C) 2020 The Psycopg Team 9 | 10 | from enum import Enum, IntEnum 11 | from selectors import EVENT_READ, EVENT_WRITE 12 | 13 | from . import pq 14 | 15 | 16 | class Wait(IntEnum): 17 | R = EVENT_READ 18 | W = EVENT_WRITE 19 | RW = EVENT_READ | EVENT_WRITE 20 | 21 | 22 | class Ready(IntEnum): 23 | R = EVENT_READ 24 | W = EVENT_WRITE 25 | RW = EVENT_READ | EVENT_WRITE 26 | 27 | 28 | class PyFormat(str, Enum): 29 | """ 30 | Enum representing the format wanted for a query argument. 31 | 32 | The value `AUTO` allows psycopg to choose the best format for a certain 33 | parameter. 34 | """ 35 | 36 | __module__ = "psycopg.adapt" 37 | 38 | AUTO = "s" 39 | """Automatically chosen (``%s`` placeholder).""" 40 | TEXT = "t" 41 | """Text parameter (``%t`` placeholder).""" 42 | BINARY = "b" 43 | """Binary parameter (``%b`` placeholder).""" 44 | 45 | @classmethod 46 | def from_pq(cls, fmt: pq.Format) -> "PyFormat": 47 | return _pg2py[fmt] 48 | 49 | @classmethod 50 | def as_pq(cls, fmt: "PyFormat") -> pq.Format: 51 | return _py2pg[fmt] 52 | 53 | 54 | class IsolationLevel(IntEnum): 55 | """ 56 | Enum representing the isolation level for a transaction. 57 | """ 58 | 59 | __module__ = "psycopg" 60 | 61 | READ_UNCOMMITTED = 1 62 | """:sql:`READ UNCOMMITTED` isolation level.""" 63 | READ_COMMITTED = 2 64 | """:sql:`READ COMMITTED` isolation level.""" 65 | REPEATABLE_READ = 3 66 | """:sql:`REPEATABLE READ` isolation level.""" 67 | SERIALIZABLE = 4 68 | """:sql:`SERIALIZABLE` isolation level.""" 69 | 70 | 71 | _py2pg = { 72 | PyFormat.TEXT: pq.Format.TEXT, 73 | PyFormat.BINARY: pq.Format.BINARY, 74 | } 75 | 76 | _pg2py = { 77 | pq.Format.TEXT: PyFormat.TEXT, 78 | pq.Format.BINARY: PyFormat.BINARY, 79 | } 80 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/types/bool.pyx: -------------------------------------------------------------------------------- 1 | """ 2 | Cython adapters for boolean. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | cimport cython 8 | 9 | 10 | @cython.final 11 | cdef class BoolDumper(CDumper): 12 | 13 | format = PQ_TEXT 14 | oid = oids.BOOL_OID 15 | 16 | cdef Py_ssize_t cdump(self, obj, bytearray rv, Py_ssize_t offset) except -1: 17 | cdef char *buf = CDumper.ensure_size(rv, offset, 1) 18 | 19 | # Fast paths, just a pointer comparison 20 | if obj is True: 21 | buf[0] = b"t" 22 | elif obj is False: 23 | buf[0] = b"f" 24 | elif obj: 25 | buf[0] = b"t" 26 | else: 27 | buf[0] = b"f" 28 | 29 | return 1 30 | 31 | def quote(self, obj: bool) -> bytes: 32 | if obj is True: 33 | return b"true" 34 | elif obj is False: 35 | return b"false" 36 | else: 37 | return b"true" if obj else b"false" 38 | 39 | 40 | @cython.final 41 | cdef class BoolBinaryDumper(CDumper): 42 | 43 | format = PQ_BINARY 44 | oid = oids.BOOL_OID 45 | 46 | cdef Py_ssize_t cdump(self, obj, bytearray rv, Py_ssize_t offset) except -1: 47 | cdef char *buf = CDumper.ensure_size(rv, offset, 1) 48 | 49 | # Fast paths, just a pointer comparison 50 | if obj is True: 51 | buf[0] = b"\x01" 52 | elif obj is False: 53 | buf[0] = b"\x00" 54 | elif obj: 55 | buf[0] = b"\x01" 56 | else: 57 | buf[0] = b"\x00" 58 | 59 | return 1 60 | 61 | 62 | @cython.final 63 | cdef class BoolLoader(CLoader): 64 | 65 | format = PQ_TEXT 66 | 67 | cdef object cload(self, const char *data, size_t length): 68 | # this creates better C than `return data[0] == b't'` 69 | return True if data[0] == b't' else False 70 | 71 | 72 | @cython.final 73 | cdef class BoolBinaryLoader(CLoader): 74 | 75 | format = PQ_BINARY 76 | 77 | cdef object cload(self, const char *data, size_t length): 78 | return True if data[0] else False 79 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/pq.pxd: -------------------------------------------------------------------------------- 1 | # Include pid_t but Windows doesn't have it 2 | # Don't use "IF" so that the generated C is portable and can be included 3 | # in the sdist. 4 | cdef extern from * nogil: 5 | """ 6 | #if defined(_WIN32) || defined(WIN32) || defined(MS_WINDOWS) 7 | typedef signed pid_t; 8 | #else 9 | #include 10 | #endif 11 | """ 12 | ctypedef signed pid_t 13 | 14 | from psycopg_c.pq cimport libpq 15 | 16 | ctypedef char *(*conn_bytes_f) (const libpq.PGconn *) 17 | ctypedef int(*conn_int_f) (const libpq.PGconn *) 18 | 19 | 20 | cdef class PGconn: 21 | cdef libpq.PGconn* _pgconn_ptr 22 | cdef object __weakref__ 23 | cdef public object notice_handler 24 | cdef public object notify_handler 25 | cdef pid_t _procpid 26 | 27 | @staticmethod 28 | cdef PGconn _from_ptr(libpq.PGconn *ptr) 29 | 30 | cpdef int flush(self) except -1 31 | cpdef object notifies(self) 32 | 33 | 34 | cdef class PGresult: 35 | cdef libpq.PGresult* _pgresult_ptr 36 | 37 | @staticmethod 38 | cdef PGresult _from_ptr(libpq.PGresult *ptr) 39 | 40 | 41 | cdef class PGcancel: 42 | cdef libpq.PGcancel* pgcancel_ptr 43 | 44 | @staticmethod 45 | cdef PGcancel _from_ptr(libpq.PGcancel *ptr) 46 | 47 | 48 | cdef class Escaping: 49 | cdef PGconn conn 50 | 51 | cpdef escape_literal(self, data) 52 | cpdef escape_identifier(self, data) 53 | cpdef escape_string(self, data) 54 | cpdef escape_bytea(self, data) 55 | cpdef unescape_bytea(self, const unsigned char *data) 56 | 57 | 58 | cdef class PQBuffer: 59 | cdef unsigned char *buf 60 | cdef Py_ssize_t len 61 | 62 | @staticmethod 63 | cdef PQBuffer _from_buffer(unsigned char *buf, Py_ssize_t length) 64 | 65 | 66 | cdef class ViewBuffer: 67 | cdef unsigned char *buf 68 | cdef Py_ssize_t len 69 | cdef object obj 70 | 71 | @staticmethod 72 | cdef ViewBuffer _from_buffer( 73 | object obj, unsigned char *buf, Py_ssize_t length) 74 | 75 | 76 | cdef int _buffer_as_string_and_size( 77 | data: "Buffer", char **ptr, Py_ssize_t *length 78 | ) except -1 79 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/pq/conninfo.pyx: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg_c.pq.Conninfo object implementation. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | from psycopg.pq.misc import ConninfoOption 8 | 9 | 10 | class Conninfo: 11 | @classmethod 12 | def get_defaults(cls) -> List[ConninfoOption]: 13 | cdef libpq.PQconninfoOption *opts = libpq.PQconndefaults() 14 | if opts is NULL : 15 | raise MemoryError("couldn't allocate connection defaults") 16 | rv = _options_from_array(opts) 17 | libpq.PQconninfoFree(opts) 18 | return rv 19 | 20 | @classmethod 21 | def parse(cls, const char *conninfo) -> List[ConninfoOption]: 22 | cdef char *errmsg = NULL 23 | cdef libpq.PQconninfoOption *opts = libpq.PQconninfoParse(conninfo, &errmsg) 24 | if opts is NULL: 25 | if errmsg is NULL: 26 | raise MemoryError("couldn't allocate on conninfo parse") 27 | else: 28 | exc = e.OperationalError(errmsg.decode("utf8", "replace")) 29 | libpq.PQfreemem(errmsg) 30 | raise exc 31 | 32 | rv = _options_from_array(opts) 33 | libpq.PQconninfoFree(opts) 34 | return rv 35 | 36 | def __repr__(self): 37 | return f"<{type(self).__name__} ({self.keyword.decode('ascii')})>" 38 | 39 | 40 | cdef _options_from_array(libpq.PQconninfoOption *opts): 41 | rv = [] 42 | cdef int i = 0 43 | cdef libpq.PQconninfoOption* opt 44 | while True: 45 | opt = opts + i 46 | if opt.keyword is NULL: 47 | break 48 | rv.append( 49 | ConninfoOption( 50 | keyword=opt.keyword, 51 | envvar=opt.envvar if opt.envvar is not NULL else None, 52 | compiled=opt.compiled if opt.compiled is not NULL else None, 53 | val=opt.val if opt.val is not NULL else None, 54 | label=opt.label if opt.label is not NULL else None, 55 | dispchar=opt.dispchar if opt.dispchar is not NULL else None, 56 | dispsize=opt.dispsize, 57 | ) 58 | ) 59 | i += 1 60 | 61 | return rv 62 | -------------------------------------------------------------------------------- /psycopg/psycopg/_compat.py: -------------------------------------------------------------------------------- 1 | """ 2 | compatibility functions for different Python versions 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | import sys 8 | import asyncio 9 | from typing import Any, Awaitable, Generator, Optional, Sequence, Union, TypeVar 10 | 11 | # NOTE: TypeAlias cannot be exported by this module, as pyright special-cases it. 12 | # For this raisin it must be imported directly from typing_extension where used. 13 | # See https://github.com/microsoft/pyright/issues/4197 14 | from typing_extensions import TypeAlias 15 | 16 | if sys.version_info >= (3, 8): 17 | from typing import Protocol 18 | else: 19 | from typing_extensions import Protocol 20 | 21 | T = TypeVar("T") 22 | FutureT: TypeAlias = Union["asyncio.Future[T]", Generator[Any, None, T], Awaitable[T]] 23 | 24 | if sys.version_info >= (3, 8): 25 | create_task = asyncio.create_task 26 | from math import prod 27 | 28 | else: 29 | 30 | def create_task( 31 | coro: FutureT[T], name: Optional[str] = None 32 | ) -> "asyncio.Future[T]": 33 | return asyncio.create_task(coro) 34 | 35 | from functools import reduce 36 | 37 | def prod(seq: Sequence[int]) -> int: 38 | return reduce(int.__mul__, seq, 1) 39 | 40 | 41 | if sys.version_info >= (3, 9): 42 | from zoneinfo import ZoneInfo 43 | from functools import cache 44 | from collections import Counter, deque as Deque 45 | else: 46 | from typing import Counter, Deque 47 | from functools import lru_cache 48 | from backports.zoneinfo import ZoneInfo 49 | 50 | cache = lru_cache(maxsize=None) 51 | 52 | if sys.version_info >= (3, 10): 53 | from typing import TypeGuard 54 | else: 55 | from typing_extensions import TypeGuard 56 | 57 | if sys.version_info >= (3, 11): 58 | from typing import LiteralString 59 | else: 60 | from typing_extensions import LiteralString 61 | 62 | if sys.version_info < (3, 8): 63 | import importlib_metadata as metadata 64 | else: 65 | from importlib import metadata 66 | 67 | __all__ = [ 68 | "Counter", 69 | "Deque", 70 | "LiteralString", 71 | "Protocol", 72 | "TypeGuard", 73 | "ZoneInfo", 74 | "cache", 75 | "create_task", 76 | "prod", 77 | "metadata", 78 | ] 79 | -------------------------------------------------------------------------------- /psycopg_c/setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = psycopg-c 3 | description = PostgreSQL database adapter for Python -- C optimisation distribution 4 | url = https://psycopg.org/psycopg3/ 5 | author = Daniele Varrazzo 6 | author_email = daniele.varrazzo@gmail.com 7 | license = GNU Lesser General Public License v3 (LGPLv3) 8 | version = 3.2.0.dev1 9 | 10 | project_urls = 11 | Homepage = https://psycopg.org/ 12 | Documentation = https://psycopg.org/psycopg3/docs/ 13 | Changes = https://psycopg.org/psycopg3/docs/news.html 14 | Code = https://github.com/psycopg/psycopg 15 | Issue Tracker = https://github.com/psycopg/psycopg/issues 16 | Download = https://pypi.org/project/psycopg-c/ 17 | 18 | classifiers = 19 | Development Status :: 5 - Production/Stable 20 | Intended Audience :: Developers 21 | License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3) 22 | Operating System :: MacOS :: MacOS X 23 | Operating System :: Microsoft :: Windows 24 | Operating System :: POSIX 25 | Programming Language :: Python :: 3 26 | Programming Language :: Python :: 3.7 27 | Programming Language :: Python :: 3.8 28 | Programming Language :: Python :: 3.9 29 | Programming Language :: Python :: 3.10 30 | Programming Language :: Python :: 3.11 31 | Topic :: Database 32 | Topic :: Database :: Front-Ends 33 | Topic :: Software Development 34 | Topic :: Software Development :: Libraries :: Python Modules 35 | 36 | long_description = file: README.rst 37 | long_description_content_type = text/x-rst 38 | license_files = LICENSE.txt 39 | 40 | [options] 41 | python_requires = >= 3.7 42 | setup_requires = Cython >= 3.0.0a11 43 | packages = find: 44 | zip_safe = False 45 | install_requires = 46 | importlib-metadata >= 1.4; python_version < "3.8" 47 | 48 | [options.package_data] 49 | # NOTE: do not include .pyx files: they shouldn't be in the sdist 50 | # package, so that build is only performed from the .c files (which are 51 | # distributed instead). 52 | psycopg_c = 53 | py.typed 54 | *.pyi 55 | *.pxd 56 | _psycopg/*.pxd 57 | pq/*.pxd 58 | 59 | # In the psycopg-binary distribution don't include cython-related files. 60 | psycopg_binary = 61 | py.typed 62 | *.pyi 63 | -------------------------------------------------------------------------------- /psycopg/psycopg/_struct.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utility functions to deal with binary structs. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | import struct 8 | from typing import Callable, cast, Optional, Tuple 9 | from typing_extensions import TypeAlias 10 | 11 | from .abc import Buffer 12 | from . import errors as e 13 | from ._compat import Protocol 14 | 15 | PackInt: TypeAlias = Callable[[int], bytes] 16 | UnpackInt: TypeAlias = Callable[[Buffer], Tuple[int]] 17 | PackFloat: TypeAlias = Callable[[float], bytes] 18 | UnpackFloat: TypeAlias = Callable[[Buffer], Tuple[float]] 19 | 20 | 21 | class UnpackLen(Protocol): 22 | def __call__(self, data: Buffer, start: Optional[int]) -> Tuple[int]: 23 | ... 24 | 25 | 26 | pack_int2 = cast(PackInt, struct.Struct("!h").pack) 27 | pack_uint2 = cast(PackInt, struct.Struct("!H").pack) 28 | pack_int4 = cast(PackInt, struct.Struct("!i").pack) 29 | pack_uint4 = cast(PackInt, struct.Struct("!I").pack) 30 | pack_int8 = cast(PackInt, struct.Struct("!q").pack) 31 | pack_float4 = cast(PackFloat, struct.Struct("!f").pack) 32 | pack_float8 = cast(PackFloat, struct.Struct("!d").pack) 33 | 34 | unpack_int2 = cast(UnpackInt, struct.Struct("!h").unpack) 35 | unpack_uint2 = cast(UnpackInt, struct.Struct("!H").unpack) 36 | unpack_int4 = cast(UnpackInt, struct.Struct("!i").unpack) 37 | unpack_uint4 = cast(UnpackInt, struct.Struct("!I").unpack) 38 | unpack_int8 = cast(UnpackInt, struct.Struct("!q").unpack) 39 | unpack_float4 = cast(UnpackFloat, struct.Struct("!f").unpack) 40 | unpack_float8 = cast(UnpackFloat, struct.Struct("!d").unpack) 41 | 42 | _struct_len = struct.Struct("!i") 43 | pack_len = cast(Callable[[int], bytes], _struct_len.pack) 44 | unpack_len = cast(UnpackLen, _struct_len.unpack_from) 45 | 46 | 47 | def pack_float4_bug_304(x: float) -> bytes: 48 | raise e.InterfaceError( 49 | "cannot dump Float4: Python affected by bug #304. Note that the psycopg-c" 50 | " and psycopg-binary packages are not affected by this issue." 51 | " See https://github.com/psycopg/psycopg/issues/304" 52 | ) 53 | 54 | 55 | # If issue #304 is detected, raise an error instead of dumping wrong data. 56 | if struct.Struct("!f").pack(1.0) != bytes.fromhex("3f800000"): 57 | pack_float4 = pack_float4_bug_304 58 | -------------------------------------------------------------------------------- /tests/crdb/test_cursor_async.py: -------------------------------------------------------------------------------- 1 | import json 2 | import asyncio 3 | from typing import Any 4 | from asyncio.queues import Queue 5 | 6 | import pytest 7 | from psycopg import pq, errors as e 8 | from psycopg.rows import namedtuple_row 9 | from psycopg._compat import create_task 10 | 11 | from .test_cursor import testfeed 12 | 13 | testfeed # fixture 14 | 15 | pytestmark = [pytest.mark.crdb, pytest.mark.asyncio] 16 | 17 | 18 | @pytest.mark.slow 19 | @pytest.mark.parametrize("fmt_out", pq.Format) 20 | async def test_changefeed(aconn_cls, dsn, aconn, testfeed, fmt_out): 21 | await aconn.set_autocommit(True) 22 | q: "Queue[Any]" = Queue() 23 | 24 | async def worker(): 25 | try: 26 | async with await aconn_cls.connect(dsn, autocommit=True) as conn: 27 | cur = conn.cursor(binary=fmt_out, row_factory=namedtuple_row) 28 | try: 29 | async for row in cur.stream( 30 | f"experimental changefeed for {testfeed}" 31 | ): 32 | q.put_nowait(row) 33 | except e.QueryCanceled: 34 | assert conn.info.transaction_status == conn.TransactionStatus.IDLE 35 | q.put_nowait(None) 36 | except Exception as ex: 37 | q.put_nowait(ex) 38 | 39 | t = create_task(worker()) 40 | 41 | cur = aconn.cursor() 42 | await cur.execute(f"insert into {testfeed} (data) values ('hello') returning id") 43 | (key,) = await cur.fetchone() 44 | row = await asyncio.wait_for(q.get(), 1.0) 45 | assert row.table == testfeed 46 | assert json.loads(row.key) == [key] 47 | assert json.loads(row.value)["after"] == {"id": key, "data": "hello"} 48 | 49 | await cur.execute(f"delete from {testfeed} where id = %s", [key]) 50 | row = await asyncio.wait_for(q.get(), 1.0) 51 | assert row.table == testfeed 52 | assert json.loads(row.key) == [key] 53 | assert json.loads(row.value)["after"] is None 54 | 55 | await cur.execute("select query_id from [show statements] where query !~ 'show'") 56 | (qid,) = await cur.fetchone() 57 | await cur.execute("cancel query %s", [qid]) 58 | assert cur.statusmessage == "CANCEL QUERIES 1" 59 | 60 | assert await asyncio.wait_for(q.get(), 1.0) is None 61 | await asyncio.gather(t) 62 | -------------------------------------------------------------------------------- /tests/crdb/test_cursor.py: -------------------------------------------------------------------------------- 1 | import json 2 | import threading 3 | from uuid import uuid4 4 | from queue import Queue 5 | from typing import Any 6 | 7 | import pytest 8 | from psycopg import pq, errors as e 9 | from psycopg.rows import namedtuple_row 10 | 11 | pytestmark = pytest.mark.crdb 12 | 13 | 14 | @pytest.fixture 15 | def testfeed(svcconn): 16 | name = f"test_feed_{str(uuid4()).replace('-', '')}" 17 | svcconn.execute("set cluster setting kv.rangefeed.enabled to true") 18 | svcconn.execute(f"create table {name} (id serial primary key, data text)") 19 | yield name 20 | svcconn.execute(f"drop table {name}") 21 | 22 | 23 | @pytest.mark.slow 24 | @pytest.mark.parametrize("fmt_out", pq.Format) 25 | def test_changefeed(conn_cls, dsn, conn, testfeed, fmt_out): 26 | conn.autocommit = True 27 | q: "Queue[Any]" = Queue() 28 | 29 | def worker(): 30 | try: 31 | with conn_cls.connect(dsn, autocommit=True) as conn: 32 | cur = conn.cursor(binary=fmt_out, row_factory=namedtuple_row) 33 | try: 34 | for row in cur.stream(f"experimental changefeed for {testfeed}"): 35 | q.put(row) 36 | except e.QueryCanceled: 37 | assert conn.info.transaction_status == conn.TransactionStatus.IDLE 38 | q.put(None) 39 | except Exception as ex: 40 | q.put(ex) 41 | 42 | t = threading.Thread(target=worker) 43 | t.start() 44 | 45 | cur = conn.cursor() 46 | cur.execute(f"insert into {testfeed} (data) values ('hello') returning id") 47 | (key,) = cur.fetchone() 48 | row = q.get(timeout=1) 49 | assert row.table == testfeed 50 | assert json.loads(row.key) == [key] 51 | assert json.loads(row.value)["after"] == {"id": key, "data": "hello"} 52 | 53 | cur.execute(f"delete from {testfeed} where id = %s", [key]) 54 | row = q.get(timeout=1) 55 | assert row.table == testfeed 56 | assert json.loads(row.key) == [key] 57 | assert json.loads(row.value)["after"] is None 58 | 59 | cur.execute("select query_id from [show statements] where query !~ 'show'") 60 | (qid,) = cur.fetchone() 61 | cur.execute("cancel query %s", [qid]) 62 | assert cur.statusmessage == "CANCEL QUERIES 1" 63 | 64 | assert q.get(timeout=1) is None 65 | t.join() 66 | -------------------------------------------------------------------------------- /.github/workflows/packages-src.yml: -------------------------------------------------------------------------------- 1 | name: Build source packages 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: '37 6 * * sun' 7 | 8 | jobs: 9 | 10 | sdist: 11 | runs-on: ubuntu-latest 12 | if: true 13 | 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | include: 18 | - {package: psycopg, format: sdist, impl: python} 19 | - {package: psycopg, format: wheel, impl: python} 20 | - {package: psycopg_c, format: sdist, impl: c} 21 | 22 | steps: 23 | - uses: actions/checkout@v3 24 | 25 | - uses: actions/setup-python@v4 26 | with: 27 | python-version: 3.9 28 | 29 | - name: Create the sdist packages 30 | run: |- 31 | python ${{ matrix.package }}/setup.py sdist -d `pwd`/dist/ 32 | if: ${{ matrix.format == 'sdist' }} 33 | 34 | - name: Create the wheel packages 35 | run: |- 36 | pip install wheel 37 | python ${{ matrix.package }}/setup.py bdist_wheel -d `pwd`/dist/ 38 | if: ${{ matrix.format == 'wheel' }} 39 | 40 | - name: Install the Python package and test requirements 41 | run: |- 42 | pip install `ls dist/*`[test] 43 | pip install ./psycopg_pool 44 | if: ${{ matrix.package == 'psycopg' }} 45 | 46 | - name: Install the C package and test requirements 47 | run: |- 48 | pip install dist/* 49 | pip install ./psycopg[test] 50 | pip install ./psycopg_pool 51 | if: ${{ matrix.package == 'psycopg_c' }} 52 | 53 | - name: Test the sdist package 54 | run: pytest -m 'not slow and not flakey' --color yes 55 | env: 56 | PSYCOPG_IMPL: ${{ matrix.impl }} 57 | PSYCOPG_TEST_DSN: "host=127.0.0.1 user=postgres" 58 | PGPASSWORD: password 59 | 60 | - uses: actions/upload-artifact@v3 61 | with: 62 | path: ./dist/* 63 | 64 | services: 65 | postgresql: 66 | image: postgres:14 67 | env: 68 | POSTGRES_PASSWORD: password 69 | ports: 70 | - 5432:5432 71 | # Set health checks to wait until postgres has started 72 | options: >- 73 | --health-cmd pg_isready 74 | --health-interval 10s 75 | --health-timeout 5s 76 | --health-retries 5 77 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/_psycopg/oids.pxd: -------------------------------------------------------------------------------- 1 | """ 2 | Constants to refer to OIDS in C 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | # Use tools/update_oids.py to update this data. 8 | 9 | cdef enum: 10 | INVALID_OID = 0 11 | 12 | # autogenerated: start 13 | 14 | # Generated from PostgreSQL 15.0 15 | 16 | ACLITEM_OID = 1033 17 | BIT_OID = 1560 18 | BOOL_OID = 16 19 | BOX_OID = 603 20 | BPCHAR_OID = 1042 21 | BYTEA_OID = 17 22 | CHAR_OID = 18 23 | CID_OID = 29 24 | CIDR_OID = 650 25 | CIRCLE_OID = 718 26 | DATE_OID = 1082 27 | DATEMULTIRANGE_OID = 4535 28 | DATERANGE_OID = 3912 29 | FLOAT4_OID = 700 30 | FLOAT8_OID = 701 31 | GTSVECTOR_OID = 3642 32 | INET_OID = 869 33 | INT2_OID = 21 34 | INT2VECTOR_OID = 22 35 | INT4_OID = 23 36 | INT4MULTIRANGE_OID = 4451 37 | INT4RANGE_OID = 3904 38 | INT8_OID = 20 39 | INT8MULTIRANGE_OID = 4536 40 | INT8RANGE_OID = 3926 41 | INTERVAL_OID = 1186 42 | JSON_OID = 114 43 | JSONB_OID = 3802 44 | JSONPATH_OID = 4072 45 | LINE_OID = 628 46 | LSEG_OID = 601 47 | MACADDR_OID = 829 48 | MACADDR8_OID = 774 49 | MONEY_OID = 790 50 | NAME_OID = 19 51 | NUMERIC_OID = 1700 52 | NUMMULTIRANGE_OID = 4532 53 | NUMRANGE_OID = 3906 54 | OID_OID = 26 55 | OIDVECTOR_OID = 30 56 | PATH_OID = 602 57 | PG_LSN_OID = 3220 58 | POINT_OID = 600 59 | POLYGON_OID = 604 60 | RECORD_OID = 2249 61 | REFCURSOR_OID = 1790 62 | REGCLASS_OID = 2205 63 | REGCOLLATION_OID = 4191 64 | REGCONFIG_OID = 3734 65 | REGDICTIONARY_OID = 3769 66 | REGNAMESPACE_OID = 4089 67 | REGOPER_OID = 2203 68 | REGOPERATOR_OID = 2204 69 | REGPROC_OID = 24 70 | REGPROCEDURE_OID = 2202 71 | REGROLE_OID = 4096 72 | REGTYPE_OID = 2206 73 | TEXT_OID = 25 74 | TID_OID = 27 75 | TIME_OID = 1083 76 | TIMESTAMP_OID = 1114 77 | TIMESTAMPTZ_OID = 1184 78 | TIMETZ_OID = 1266 79 | TSMULTIRANGE_OID = 4533 80 | TSQUERY_OID = 3615 81 | TSRANGE_OID = 3908 82 | TSTZMULTIRANGE_OID = 4534 83 | TSTZRANGE_OID = 3910 84 | TSVECTOR_OID = 3614 85 | TXID_SNAPSHOT_OID = 2970 86 | UUID_OID = 2950 87 | VARBIT_OID = 1562 88 | VARCHAR_OID = 1043 89 | XID_OID = 28 90 | XID8_OID = 5069 91 | XML_OID = 142 92 | # autogenerated: end 93 | -------------------------------------------------------------------------------- /psycopg/psycopg/types/shapely.py: -------------------------------------------------------------------------------- 1 | """ 2 | Adapters for PostGIS geometries 3 | """ 4 | 5 | from typing import Optional 6 | 7 | from .. import postgres 8 | from ..abc import AdaptContext, Buffer 9 | from ..adapt import Dumper, Loader 10 | from ..pq import Format 11 | from .._typeinfo import TypeInfo 12 | 13 | 14 | try: 15 | from shapely.wkb import loads, dumps 16 | from shapely.geometry.base import BaseGeometry 17 | 18 | except ImportError: 19 | raise ImportError( 20 | "The module psycopg.types.shapely requires the package 'Shapely'" 21 | " to be installed" 22 | ) 23 | 24 | 25 | class GeometryBinaryLoader(Loader): 26 | format = Format.BINARY 27 | 28 | def load(self, data: Buffer) -> "BaseGeometry": 29 | if not isinstance(data, bytes): 30 | data = bytes(data) 31 | return loads(data) 32 | 33 | 34 | class GeometryLoader(Loader): 35 | def load(self, data: Buffer) -> "BaseGeometry": 36 | # it's a hex string in binary 37 | if isinstance(data, memoryview): 38 | data = bytes(data) 39 | return loads(data.decode(), hex=True) 40 | 41 | 42 | class BaseGeometryBinaryDumper(Dumper): 43 | format = Format.BINARY 44 | 45 | def dump(self, obj: "BaseGeometry") -> bytes: 46 | return dumps(obj) # type: ignore 47 | 48 | 49 | class BaseGeometryDumper(Dumper): 50 | def dump(self, obj: "BaseGeometry") -> bytes: 51 | return dumps(obj, hex=True).encode() # type: ignore 52 | 53 | 54 | def register_shapely(info: TypeInfo, context: Optional[AdaptContext] = None) -> None: 55 | """Register Shapely dumper and loaders.""" 56 | 57 | # A friendly error warning instead of an AttributeError in case fetch() 58 | # failed and it wasn't noticed. 59 | if not info: 60 | raise TypeError("no info passed. Is the 'postgis' extension loaded?") 61 | 62 | info.register(context) 63 | adapters = context.adapters if context else postgres.adapters 64 | 65 | class GeometryDumper(BaseGeometryDumper): 66 | oid = info.oid 67 | 68 | class GeometryBinaryDumper(BaseGeometryBinaryDumper): 69 | oid = info.oid 70 | 71 | adapters.register_loader(info.oid, GeometryBinaryLoader) 72 | adapters.register_loader(info.oid, GeometryLoader) 73 | # Default binary dump 74 | adapters.register_dumper(BaseGeometry, GeometryDumper) 75 | adapters.register_dumper(BaseGeometry, GeometryBinaryDumper) 76 | -------------------------------------------------------------------------------- /docs/api/rows.rst: -------------------------------------------------------------------------------- 1 | .. _psycopg.rows: 2 | 3 | `rows` -- row factory implementations 4 | ===================================== 5 | 6 | .. module:: psycopg.rows 7 | 8 | The module exposes a few generic `~psycopg.RowFactory` implementation, which 9 | can be used to retrieve data from the database in more complex structures than 10 | the basic tuples. 11 | 12 | Check out :ref:`row-factories` for information about how to use these objects. 13 | 14 | .. autofunction:: tuple_row 15 | .. autofunction:: dict_row 16 | .. autofunction:: namedtuple_row 17 | .. autofunction:: class_row 18 | 19 | This is not a row factory, but rather a factory of row factories. 20 | Specifying `!row_factory=class_row(MyClass)` will create connections and 21 | cursors returning `!MyClass` objects on fetch. 22 | 23 | Example:: 24 | 25 | from dataclasses import dataclass 26 | import psycopg 27 | from psycopg.rows import class_row 28 | 29 | @dataclass 30 | class Person: 31 | first_name: str 32 | last_name: str 33 | age: int = None 34 | 35 | conn = psycopg.connect() 36 | cur = conn.cursor(row_factory=class_row(Person)) 37 | 38 | cur.execute("select 'John' as first_name, 'Smith' as last_name").fetchone() 39 | # Person(first_name='John', last_name='Smith', age=None) 40 | 41 | .. autofunction:: args_row 42 | .. autofunction:: kwargs_row 43 | 44 | 45 | Formal rows protocols 46 | --------------------- 47 | 48 | These objects can be used to describe your own rows adapter for static typing 49 | checks, such as mypy_. 50 | 51 | .. _mypy: https://mypy.readthedocs.io/ 52 | 53 | 54 | .. autoclass:: psycopg.rows.RowMaker() 55 | 56 | .. method:: __call__(values: Sequence[Any]) -> Row 57 | 58 | Convert a sequence of values from the database to a finished object. 59 | 60 | 61 | .. autoclass:: psycopg.rows.RowFactory() 62 | 63 | .. method:: __call__(cursor: Cursor[Row]) -> RowMaker[Row] 64 | 65 | Inspect the result on a cursor and return a `RowMaker` to convert rows. 66 | 67 | .. autoclass:: psycopg.rows.AsyncRowFactory() 68 | 69 | .. autoclass:: psycopg.rows.BaseRowFactory() 70 | 71 | Note that it's easy to implement an object implementing both `!RowFactory` and 72 | `!AsyncRowFactory`: usually, everything you need to implement a row factory is 73 | to access the cursor's `~psycopg.Cursor.description`, which is provided by 74 | both the cursor flavours. 75 | -------------------------------------------------------------------------------- /docs/advanced/prepare.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: psycopg 2 | 3 | .. index:: 4 | single: Prepared statements 5 | 6 | .. _prepared-statements: 7 | 8 | Prepared statements 9 | =================== 10 | 11 | Psycopg uses an automatic system to manage *prepared statements*. When a 12 | query is prepared, its parsing and planning is stored in the server session, 13 | so that further executions of the same query on the same connection (even with 14 | different parameters) are optimised. 15 | 16 | A query is prepared automatically after it is executed more than 17 | `~Connection.prepare_threshold` times on a connection. `!psycopg` will make 18 | sure that no more than `~Connection.prepared_max` statements are planned: if 19 | further queries are executed, the least recently used ones are deallocated and 20 | the associated resources freed. 21 | 22 | Statement preparation can be controlled in several ways: 23 | 24 | - You can decide to prepare a query immediately by passing `!prepare=True` to 25 | `Connection.execute()` or `Cursor.execute()`. The query is prepared, if it 26 | wasn't already, and executed as prepared from its first use. 27 | 28 | - Conversely, passing `!prepare=False` to `!execute()` will avoid to prepare 29 | the query, regardless of the number of times it is executed. The default for 30 | the parameter is `!None`, meaning that the query is prepared if the 31 | conditions described above are met. 32 | 33 | - You can disable the use of prepared statements on a connection by setting 34 | its `~Connection.prepare_threshold` attribute to `!None`. 35 | 36 | .. versionchanged:: 3.1 37 | You can set `!prepare_threshold` as a `~Connection.connect()` keyword 38 | parameter too. 39 | 40 | .. seealso:: 41 | 42 | The `PREPARE`__ PostgreSQL documentation contains plenty of details about 43 | prepared statements in PostgreSQL. 44 | 45 | Note however that Psycopg doesn't use SQL statements such as 46 | :sql:`PREPARE` and :sql:`EXECUTE`, but protocol level commands such as the 47 | ones exposed by :pq:`PQsendPrepare`, :pq:`PQsendQueryPrepared`. 48 | 49 | .. __: https://www.postgresql.org/docs/current/sql-prepare.html 50 | 51 | .. warning:: 52 | 53 | Using external connection poolers, such as PgBouncer, is not compatible 54 | with prepared statements, because the same client connection may change 55 | the server session it refers to. If such middleware is used you should 56 | disable prepared statements, by setting the `Connection.prepare_threshold` 57 | attribute to `!None`. 58 | -------------------------------------------------------------------------------- /tests/pool/test_pool_async_noasyncio.py: -------------------------------------------------------------------------------- 1 | # These tests relate to AsyncConnectionPool, but are not marked asyncio 2 | # because they rely on the pool initialization outside the asyncio loop. 3 | 4 | import asyncio 5 | 6 | import pytest 7 | 8 | from ..utils import gc_collect 9 | 10 | try: 11 | import psycopg_pool as pool 12 | except ImportError: 13 | # Tests should have been skipped if the package is not available 14 | pass 15 | 16 | 17 | @pytest.mark.slow 18 | def test_reconnect_after_max_lifetime(dsn, asyncio_run): 19 | # See issue #219, pool created before the loop. 20 | p = pool.AsyncConnectionPool(dsn, min_size=1, max_lifetime=0.2, open=False) 21 | 22 | async def test(): 23 | try: 24 | await p.open() 25 | ns = [] 26 | for i in range(5): 27 | async with p.connection() as conn: 28 | cur = await conn.execute("select 1") 29 | ns.append(await cur.fetchone()) 30 | await asyncio.sleep(0.2) 31 | assert len(ns) == 5 32 | finally: 33 | await p.close() 34 | 35 | asyncio_run(asyncio.wait_for(test(), timeout=2.0)) 36 | 37 | 38 | @pytest.mark.slow 39 | def test_working_created_before_loop(dsn, asyncio_run): 40 | p = pool.AsyncNullConnectionPool(dsn, open=False) 41 | 42 | async def test(): 43 | try: 44 | await p.open() 45 | ns = [] 46 | for i in range(5): 47 | async with p.connection() as conn: 48 | cur = await conn.execute("select 1") 49 | ns.append(await cur.fetchone()) 50 | await asyncio.sleep(0.2) 51 | assert len(ns) == 5 52 | finally: 53 | await p.close() 54 | 55 | asyncio_run(asyncio.wait_for(test(), timeout=2.0)) 56 | 57 | 58 | def test_cant_create_open_outside_loop(dsn): 59 | with pytest.raises(RuntimeError): 60 | pool.AsyncConnectionPool(dsn, open=True) 61 | 62 | 63 | @pytest.fixture 64 | def asyncio_run(recwarn): 65 | """Fixture reuturning asyncio.run, but managing resources at exit. 66 | 67 | In certain runs, fd objects are leaked and the error will only be caught 68 | downstream, by some innocent test calling gc_collect(). 69 | """ 70 | recwarn.clear() 71 | try: 72 | yield asyncio.run 73 | finally: 74 | gc_collect() 75 | if recwarn: 76 | warn = recwarn.pop(ResourceWarning) 77 | assert "unclosed event loop" in str(warn.message) 78 | assert not recwarn 79 | -------------------------------------------------------------------------------- /tests/crdb/test_connection.py: -------------------------------------------------------------------------------- 1 | import time 2 | import threading 3 | 4 | import psycopg.crdb 5 | from psycopg import errors as e 6 | from psycopg.crdb import CrdbConnection 7 | 8 | import pytest 9 | 10 | pytestmark = pytest.mark.crdb 11 | 12 | 13 | def test_is_crdb(conn): 14 | assert CrdbConnection.is_crdb(conn) 15 | assert CrdbConnection.is_crdb(conn.pgconn) 16 | 17 | 18 | def test_connect(dsn): 19 | with CrdbConnection.connect(dsn) as conn: 20 | assert isinstance(conn, CrdbConnection) 21 | 22 | with psycopg.crdb.connect(dsn) as conn: 23 | assert isinstance(conn, CrdbConnection) 24 | 25 | 26 | def test_xid(dsn): 27 | with CrdbConnection.connect(dsn) as conn: 28 | with pytest.raises(e.NotSupportedError): 29 | conn.xid(1, "gtrid", "bqual") 30 | 31 | 32 | def test_tpc_begin(dsn): 33 | with CrdbConnection.connect(dsn) as conn: 34 | with pytest.raises(e.NotSupportedError): 35 | conn.tpc_begin("foo") 36 | 37 | 38 | def test_tpc_recover(dsn): 39 | with CrdbConnection.connect(dsn) as conn: 40 | with pytest.raises(e.NotSupportedError): 41 | conn.tpc_recover() 42 | 43 | 44 | @pytest.mark.slow 45 | def test_broken_connection(conn): 46 | cur = conn.cursor() 47 | (session_id,) = cur.execute("select session_id from [show session_id]").fetchone() 48 | with pytest.raises(psycopg.DatabaseError): 49 | cur.execute("cancel session %s", [session_id]) 50 | assert conn.closed 51 | 52 | 53 | @pytest.mark.slow 54 | def test_broken(conn): 55 | (session_id,) = conn.execute("show session_id").fetchone() 56 | with pytest.raises(psycopg.OperationalError): 57 | conn.execute("cancel session %s", [session_id]) 58 | 59 | assert conn.closed 60 | assert conn.broken 61 | conn.close() 62 | assert conn.closed 63 | assert conn.broken 64 | 65 | 66 | @pytest.mark.slow 67 | def test_identify_closure(conn_cls, dsn): 68 | with conn_cls.connect(dsn, autocommit=True) as conn: 69 | with conn_cls.connect(dsn, autocommit=True) as conn2: 70 | (session_id,) = conn.execute("show session_id").fetchone() 71 | 72 | def closer(): 73 | time.sleep(0.2) 74 | conn2.execute("cancel session %s", [session_id]) 75 | 76 | t = threading.Thread(target=closer) 77 | t.start() 78 | t0 = time.time() 79 | try: 80 | with pytest.raises(psycopg.OperationalError): 81 | conn.execute("select pg_sleep(3.0)") 82 | dt = time.time() - t0 83 | # CRDB seems to take not less than 1s 84 | assert 0.2 < dt < 2 85 | finally: 86 | t.join() 87 | -------------------------------------------------------------------------------- /tests/pq/test_misc.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import psycopg 4 | from psycopg import pq 5 | 6 | 7 | def test_error_message(pgconn): 8 | res = pgconn.exec_(b"wat") 9 | assert res.status == pq.ExecStatus.FATAL_ERROR 10 | msg = pq.error_message(pgconn) 11 | assert "wat" in msg 12 | assert msg == pq.error_message(res) 13 | primary = res.error_field(pq.DiagnosticField.MESSAGE_PRIMARY) 14 | assert primary.decode("ascii") in msg 15 | 16 | with pytest.raises(TypeError): 17 | pq.error_message(None) # type: ignore[arg-type] 18 | 19 | res.clear() 20 | assert pq.error_message(res) == "no details available" 21 | pgconn.finish() 22 | assert "NULL" in pq.error_message(pgconn) 23 | 24 | 25 | @pytest.mark.crdb_skip("encoding") 26 | def test_error_message_encoding(pgconn): 27 | res = pgconn.exec_(b"set client_encoding to latin9") 28 | assert res.status == pq.ExecStatus.COMMAND_OK 29 | 30 | res = pgconn.exec_('select 1 from "foo\u20acbar"'.encode("latin9")) 31 | assert res.status == pq.ExecStatus.FATAL_ERROR 32 | 33 | msg = pq.error_message(pgconn) 34 | assert "foo\u20acbar" in msg 35 | 36 | msg = pq.error_message(res) 37 | assert "foo\ufffdbar" in msg 38 | 39 | msg = pq.error_message(res, encoding="latin9") 40 | assert "foo\u20acbar" in msg 41 | 42 | msg = pq.error_message(res, encoding="ascii") 43 | assert "foo\ufffdbar" in msg 44 | 45 | 46 | def test_make_empty_result(pgconn): 47 | pgconn.exec_(b"wat") 48 | res = pgconn.make_empty_result(pq.ExecStatus.FATAL_ERROR) 49 | assert res.status == pq.ExecStatus.FATAL_ERROR 50 | assert b"wat" in res.error_message 51 | 52 | pgconn.finish() 53 | res = pgconn.make_empty_result(pq.ExecStatus.FATAL_ERROR) 54 | assert res.status == pq.ExecStatus.FATAL_ERROR 55 | assert res.error_message == b"" 56 | 57 | 58 | def test_result_set_attrs(pgconn): 59 | res = pgconn.make_empty_result(pq.ExecStatus.COPY_OUT) 60 | assert res.status == pq.ExecStatus.COPY_OUT 61 | 62 | attrs = [ 63 | pq.PGresAttDesc(b"an_int", 0, 0, 0, 23, 0, 0), 64 | pq.PGresAttDesc(b"a_num", 0, 0, 0, 1700, 0, 0), 65 | pq.PGresAttDesc(b"a_bin_text", 0, 0, 1, 25, 0, 0), 66 | ] 67 | res.set_attributes(attrs) 68 | assert res.nfields == 3 69 | 70 | assert res.fname(0) == b"an_int" 71 | assert res.fname(1) == b"a_num" 72 | assert res.fname(2) == b"a_bin_text" 73 | 74 | assert res.fformat(0) == 0 75 | assert res.fformat(1) == 0 76 | assert res.fformat(2) == 1 77 | 78 | assert res.ftype(0) == 23 79 | assert res.ftype(1) == 1700 80 | assert res.ftype(2) == 25 81 | 82 | with pytest.raises(psycopg.OperationalError): 83 | res.set_attributes(attrs) 84 | -------------------------------------------------------------------------------- /docs/api/abc.rst: -------------------------------------------------------------------------------- 1 | `!abc` -- Psycopg abstract classes 2 | ================================== 3 | 4 | The module exposes Psycopg definitions which can be used for static type 5 | checking. 6 | 7 | .. module:: psycopg.abc 8 | 9 | .. autoclass:: Dumper(cls, context=None) 10 | 11 | :param cls: The type that will be managed by this dumper. 12 | :type cls: type 13 | :param context: The context where the transformation is performed. If not 14 | specified the conversion might be inaccurate, for instance it will not 15 | be possible to know the connection encoding or the server date format. 16 | :type context: `AdaptContext` or None 17 | 18 | A partial implementation of this protocol (implementing everything except 19 | `dump()`) is available as `psycopg.adapt.Dumper`. 20 | 21 | .. autoattribute:: format 22 | 23 | .. automethod:: dump 24 | 25 | The format returned by dump shouldn't contain quotes or escaped 26 | values. 27 | 28 | .. automethod:: quote 29 | 30 | .. tip:: 31 | 32 | This method will be used by `~psycopg.sql.Literal` to convert a 33 | value client-side. 34 | 35 | This method only makes sense for text dumpers; the result of calling 36 | it on a binary dumper is undefined. It might scratch your car, or burn 37 | your cake. Don't tell me I didn't warn you. 38 | 39 | .. autoattribute:: oid 40 | 41 | If the OID is not specified, PostgreSQL will try to infer the type 42 | from the context, but this may fail in some contexts and may require a 43 | cast (e.g. specifying :samp:`%s::{type}` for its placeholder). 44 | 45 | You can use the `psycopg.adapters`\ ``.``\ 46 | `~psycopg.adapt.AdaptersMap.types` registry to find the OID of builtin 47 | types, and you can use `~psycopg.types.TypeInfo` to extend the 48 | registry to custom types. 49 | 50 | .. automethod:: get_key 51 | .. automethod:: upgrade 52 | 53 | 54 | .. autoclass:: Loader(oid, context=None) 55 | 56 | :param oid: The type that will be managed by this dumper. 57 | :type oid: int 58 | :param context: The context where the transformation is performed. If not 59 | specified the conversion might be inaccurate, for instance it will not 60 | be possible to know the connection encoding or the server date format. 61 | :type context: `AdaptContext` or None 62 | 63 | A partial implementation of this protocol (implementing everything except 64 | `load()`) is available as `psycopg.adapt.Loader`. 65 | 66 | .. autoattribute:: format 67 | 68 | .. automethod:: load 69 | 70 | 71 | .. autoclass:: AdaptContext 72 | :members: 73 | 74 | .. seealso:: :ref:`adaptation` for an explanation about how contexts are 75 | connected. 76 | -------------------------------------------------------------------------------- /tests/crdb/test_connection_async.py: -------------------------------------------------------------------------------- 1 | import time 2 | import asyncio 3 | 4 | import psycopg.crdb 5 | from psycopg import errors as e 6 | from psycopg.crdb import AsyncCrdbConnection 7 | from psycopg._compat import create_task 8 | 9 | import pytest 10 | 11 | pytestmark = [pytest.mark.crdb, pytest.mark.asyncio] 12 | 13 | 14 | async def test_is_crdb(aconn): 15 | assert AsyncCrdbConnection.is_crdb(aconn) 16 | assert AsyncCrdbConnection.is_crdb(aconn.pgconn) 17 | 18 | 19 | async def test_connect(dsn): 20 | async with await AsyncCrdbConnection.connect(dsn) as conn: 21 | assert isinstance(conn, psycopg.crdb.AsyncCrdbConnection) 22 | 23 | 24 | async def test_xid(dsn): 25 | async with await AsyncCrdbConnection.connect(dsn) as conn: 26 | with pytest.raises(e.NotSupportedError): 27 | conn.xid(1, "gtrid", "bqual") 28 | 29 | 30 | async def test_tpc_begin(dsn): 31 | async with await AsyncCrdbConnection.connect(dsn) as conn: 32 | with pytest.raises(e.NotSupportedError): 33 | await conn.tpc_begin("foo") 34 | 35 | 36 | async def test_tpc_recover(dsn): 37 | async with await AsyncCrdbConnection.connect(dsn) as conn: 38 | with pytest.raises(e.NotSupportedError): 39 | await conn.tpc_recover() 40 | 41 | 42 | @pytest.mark.slow 43 | async def test_broken_connection(aconn): 44 | cur = aconn.cursor() 45 | await cur.execute("select session_id from [show session_id]") 46 | (session_id,) = await cur.fetchone() 47 | with pytest.raises(psycopg.DatabaseError): 48 | await cur.execute("cancel session %s", [session_id]) 49 | assert aconn.closed 50 | 51 | 52 | @pytest.mark.slow 53 | async def test_broken(aconn): 54 | cur = await aconn.execute("show session_id") 55 | (session_id,) = await cur.fetchone() 56 | with pytest.raises(psycopg.OperationalError): 57 | await aconn.execute("cancel session %s", [session_id]) 58 | 59 | assert aconn.closed 60 | assert aconn.broken 61 | await aconn.close() 62 | assert aconn.closed 63 | assert aconn.broken 64 | 65 | 66 | @pytest.mark.slow 67 | async def test_identify_closure(aconn_cls, dsn): 68 | async with await aconn_cls.connect(dsn) as conn: 69 | async with await aconn_cls.connect(dsn) as conn2: 70 | cur = await conn.execute("show session_id") 71 | (session_id,) = await cur.fetchone() 72 | 73 | async def closer(): 74 | await asyncio.sleep(0.2) 75 | await conn2.execute("cancel session %s", [session_id]) 76 | 77 | t = create_task(closer()) 78 | t0 = time.time() 79 | try: 80 | with pytest.raises(psycopg.OperationalError): 81 | await conn.execute("select pg_sleep(3.0)") 82 | dt = time.time() - t0 83 | assert 0.2 < dt < 2 84 | finally: 85 | await asyncio.gather(t) 86 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Psycopg 3 -- PostgreSQL database adapter for Python 2 | =================================================== 3 | 4 | Psycopg 3 is a modern implementation of a PostgreSQL adapter for Python. 5 | 6 | 7 | Installation 8 | ------------ 9 | 10 | Quick version:: 11 | 12 | pip install --upgrade pip # upgrade pip to at least 20.3 13 | pip install "psycopg[binary,pool]" # install binary dependencies 14 | 15 | For further information about installation please check `the documentation`__. 16 | 17 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html 18 | 19 | 20 | Hacking 21 | ------- 22 | 23 | In order to work on the Psycopg source code you need to have the ``libpq`` 24 | PostgreSQL client library installed in the system. For instance, on Debian 25 | systems, you can obtain it by running:: 26 | 27 | sudo apt install libpq5 28 | 29 | After which you can clone this repository:: 30 | 31 | git clone https://github.com/psycopg/psycopg.git 32 | cd psycopg 33 | 34 | Please note that the repository contains the source code of several Python 35 | packages: that's why you don't see a ``setup.py`` here. The packages may have 36 | different requirements: 37 | 38 | - The ``psycopg`` directory contains the pure python implementation of 39 | ``psycopg``. The package has only a runtime dependency on the ``libpq``, the 40 | PostgreSQL client library, which should be installed in your system. 41 | 42 | - The ``psycopg_c`` directory contains an optimization module written in 43 | C/Cython. In order to build it you will need a few development tools: please 44 | look at `Local installation`__ in the docs for the details. 45 | 46 | .. __: https://www.psycopg.org/psycopg3/docs/basic/install.html#local-installation 47 | 48 | - The ``psycopg_pool`` directory contains the `connection pools`__ 49 | implementations. This is kept as a separate package to allow a different 50 | release cycle. 51 | 52 | .. __: https://www.psycopg.org/psycopg3/docs/advanced/pool.html 53 | 54 | You can create a local virtualenv and install there the packages `in 55 | development mode`__, together with their development and testing 56 | requirements:: 57 | 58 | python -m venv .venv 59 | source .venv/bin/activate 60 | pip install -e "./psycopg[dev,test]" # for the base Python package 61 | pip install -e ./psycopg_pool # for the connection pool 62 | pip install ./psycopg_c # for the C speedup module 63 | 64 | .. __: https://pip.pypa.io/en/stable/reference/pip_install/#install-editable 65 | 66 | Please add ``--config-settings editable_mode=strict`` to the ``pip install 67 | -e`` above if you experience `editable mode broken`__. 68 | 69 | .. __: https://github.com/pypa/setuptools/issues/3557 70 | 71 | Now hack away! You can run the tests using:: 72 | 73 | psql -c 'create database psycopg_test' 74 | export PSYCOPG_TEST_DSN="dbname=psycopg_test" 75 | pytest 76 | -------------------------------------------------------------------------------- /psycopg/setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = psycopg 3 | description = PostgreSQL database adapter for Python 4 | url = https://psycopg.org/psycopg3/ 5 | author = Daniele Varrazzo 6 | author_email = daniele.varrazzo@gmail.com 7 | license = GNU Lesser General Public License v3 (LGPLv3) 8 | 9 | # Use a versioning scheme as defined in 10 | # https://www.python.org/dev/peps/pep-0440/ 11 | 12 | # STOP AND READ! if you change: 13 | version = 3.2.0.dev1 14 | # also change: 15 | # - `docs/news.rst` to declare this as the current version or an unreleased one 16 | # - `psycopg_c/setup.cfg`, and the extra dependencies below, to the same version. 17 | # 18 | # Check out tools/bump_version.py to help maintaining versions. 19 | 20 | project_urls = 21 | Homepage = https://psycopg.org/ 22 | Documentation = https://psycopg.org/psycopg3/docs/ 23 | Changes = https://psycopg.org/psycopg3/docs/news.html 24 | Code = https://github.com/psycopg/psycopg 25 | Issue Tracker = https://github.com/psycopg/psycopg/issues 26 | Download = https://pypi.org/project/psycopg/ 27 | 28 | classifiers = 29 | Development Status :: 5 - Production/Stable 30 | Intended Audience :: Developers 31 | License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3) 32 | Operating System :: MacOS :: MacOS X 33 | Operating System :: Microsoft :: Windows 34 | Operating System :: POSIX 35 | Programming Language :: Python :: 3 36 | Programming Language :: Python :: 3.7 37 | Programming Language :: Python :: 3.8 38 | Programming Language :: Python :: 3.9 39 | Programming Language :: Python :: 3.10 40 | Programming Language :: Python :: 3.11 41 | Topic :: Database 42 | Topic :: Database :: Front-Ends 43 | Topic :: Software Development 44 | Topic :: Software Development :: Libraries :: Python Modules 45 | 46 | long_description = file: README.rst 47 | long_description_content_type = text/x-rst 48 | license_files = LICENSE.txt 49 | 50 | [options] 51 | python_requires = >= 3.7 52 | packages = find: 53 | zip_safe = False 54 | install_requires = 55 | backports.zoneinfo >= 0.2.0; python_version < "3.9" 56 | typing-extensions >= 4.1 57 | tzdata; sys_platform == "win32" 58 | importlib-metadata >= 1.4; python_version < "3.8" 59 | 60 | [options.extras_require] 61 | c = 62 | psycopg-c == 3.2.0.dev1 63 | binary = 64 | psycopg-binary == 3.2.0.dev1 65 | pool = 66 | psycopg-pool 67 | test = 68 | mypy >= 0.990 69 | pproxy >= 2.7 70 | pytest >= 6.2.5 71 | pytest-asyncio >= 0.17 72 | pytest-cov >= 3.0 73 | pytest-randomly >= 3.10 74 | dev = 75 | black >= 22.3.0 76 | dnspython >= 2.1 77 | flake8 >= 4.0 78 | mypy >= 0.990 79 | types-setuptools >= 57.4 80 | wheel >= 0.37 81 | docs = 82 | Sphinx >= 5.0 83 | furo == 2022.6.21 84 | sphinx-autobuild >= 2021.3.14 85 | sphinx-autodoc-typehints >= 1.12 86 | 87 | [options.package_data] 88 | psycopg = py.typed 89 | -------------------------------------------------------------------------------- /docs/api/adapt.rst: -------------------------------------------------------------------------------- 1 | `adapt` -- Types adaptation 2 | =========================== 3 | 4 | .. module:: psycopg.adapt 5 | 6 | The `!psycopg.adapt` module exposes a set of objects useful for the 7 | configuration of *data adaptation*, which is the conversion of Python objects 8 | to PostgreSQL data types and back. 9 | 10 | These objects are useful if you need to configure data adaptation, i.e. 11 | if you need to change the default way that Psycopg converts between types or 12 | if you want to adapt custom data types and objects. You don't need this object 13 | in the normal use of Psycopg. 14 | 15 | See :ref:`adaptation` for an overview of the Psycopg adaptation system. 16 | 17 | .. _abstract base class: https://docs.python.org/glossary.html#term-abstract-base-class 18 | 19 | 20 | Dumpers and loaders 21 | ------------------- 22 | 23 | .. autoclass:: Dumper(cls, context=None) 24 | 25 | This is an `abstract base class`_, partially implementing the 26 | `~psycopg.abc.Dumper` protocol. Subclasses *must* at least implement the 27 | `.dump()` method and optionally override other members. 28 | 29 | .. automethod:: dump 30 | 31 | .. attribute:: format 32 | :type: psycopg.pq.Format 33 | :value: TEXT 34 | 35 | Class attribute. Set it to `~psycopg.pq.Format.BINARY` if the class 36 | `dump()` methods converts the object to binary format. 37 | 38 | .. automethod:: quote 39 | 40 | .. automethod:: get_key 41 | 42 | .. automethod:: upgrade 43 | 44 | 45 | .. autoclass:: Loader(oid, context=None) 46 | 47 | This is an `abstract base class`_, partially implementing the 48 | `~psycopg.abc.Loader` protocol. Subclasses *must* at least implement the 49 | `.load()` method and optionally override other members. 50 | 51 | .. automethod:: load 52 | 53 | .. attribute:: format 54 | :type: psycopg.pq.Format 55 | :value: TEXT 56 | 57 | Class attribute. Set it to `~psycopg.pq.Format.BINARY` if the class 58 | `load()` methods converts the object from binary format. 59 | 60 | 61 | Other objects used in adaptations 62 | --------------------------------- 63 | 64 | .. autoclass:: PyFormat 65 | :members: 66 | 67 | 68 | .. autoclass:: AdaptersMap 69 | 70 | .. seealso:: :ref:`adaptation` for an explanation about how contexts are 71 | connected. 72 | 73 | .. automethod:: register_dumper 74 | .. automethod:: register_loader 75 | 76 | .. attribute:: types 77 | 78 | The object where to look up for types information (such as the mapping 79 | between type names and oids in the specified context). 80 | 81 | :type: `~psycopg.types.TypesRegistry` 82 | 83 | .. automethod:: get_dumper 84 | .. automethod:: get_dumper_by_oid 85 | .. automethod:: get_loader 86 | 87 | 88 | .. autoclass:: Transformer(context=None) 89 | 90 | :param context: The context where the transformer should operate. 91 | :type context: `~psycopg.abc.AdaptContext` 92 | -------------------------------------------------------------------------------- /tests/fix_psycopg.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | 3 | import pytest 4 | 5 | 6 | @pytest.fixture 7 | def global_adapters(): 8 | """Restore the global adapters after a test has changed them.""" 9 | from psycopg import adapters 10 | 11 | dumpers = deepcopy(adapters._dumpers) 12 | dumpers_by_oid = deepcopy(adapters._dumpers_by_oid) 13 | loaders = deepcopy(adapters._loaders) 14 | types = list(adapters.types) 15 | 16 | yield None 17 | 18 | adapters._dumpers = dumpers 19 | adapters._dumpers_by_oid = dumpers_by_oid 20 | adapters._loaders = loaders 21 | adapters.types.clear() 22 | for t in types: 23 | adapters.types.add(t) 24 | 25 | 26 | @pytest.fixture 27 | @pytest.mark.crdb_skip("2-phase commit") 28 | def tpc(svcconn): 29 | tpc = Tpc(svcconn) 30 | tpc.check_tpc() 31 | tpc.clear_test_xacts() 32 | tpc.make_test_table() 33 | yield tpc 34 | tpc.clear_test_xacts() 35 | 36 | 37 | class Tpc: 38 | """Helper object to test two-phase transactions""" 39 | 40 | def __init__(self, conn): 41 | assert conn.autocommit 42 | self.conn = conn 43 | 44 | def check_tpc(self): 45 | from .fix_crdb import is_crdb, crdb_skip_message 46 | 47 | if is_crdb(self.conn): 48 | pytest.skip(crdb_skip_message("2-phase commit")) 49 | 50 | val = int(self.conn.execute("show max_prepared_transactions").fetchone()[0]) 51 | if not val: 52 | pytest.skip("prepared transactions disabled in the database") 53 | 54 | def clear_test_xacts(self): 55 | """Rollback all the prepared transaction in the testing db.""" 56 | from psycopg import sql 57 | 58 | cur = self.conn.execute( 59 | "select gid from pg_prepared_xacts where database = %s", 60 | (self.conn.info.dbname,), 61 | ) 62 | gids = [r[0] for r in cur] 63 | for gid in gids: 64 | self.conn.execute(sql.SQL("rollback prepared {}").format(gid)) 65 | 66 | def make_test_table(self): 67 | self.conn.execute("CREATE TABLE IF NOT EXISTS test_tpc (data text)") 68 | self.conn.execute("TRUNCATE test_tpc") 69 | 70 | def count_xacts(self): 71 | """Return the number of prepared xacts currently in the test db.""" 72 | cur = self.conn.execute( 73 | """ 74 | select count(*) from pg_prepared_xacts 75 | where database = %s""", 76 | (self.conn.info.dbname,), 77 | ) 78 | return cur.fetchone()[0] 79 | 80 | def count_test_records(self): 81 | """Return the number of records in the test table.""" 82 | cur = self.conn.execute("select count(*) from test_tpc") 83 | return cur.fetchone()[0] 84 | 85 | 86 | @pytest.fixture(scope="module") 87 | def generators(): 88 | """Return the 'generators' module for selected psycopg implementation.""" 89 | from psycopg import pq 90 | 91 | if pq.__impl__ == "c": 92 | from psycopg._cmodule import _psycopg 93 | 94 | return _psycopg 95 | else: 96 | import psycopg.generators 97 | 98 | return psycopg.generators 99 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import asyncio 3 | import selectors 4 | from typing import List 5 | 6 | pytest_plugins = ( 7 | "tests.fix_db", 8 | "tests.fix_pq", 9 | "tests.fix_mypy", 10 | "tests.fix_faker", 11 | "tests.fix_proxy", 12 | "tests.fix_psycopg", 13 | "tests.fix_crdb", 14 | "tests.pool.fix_pool", 15 | ) 16 | 17 | 18 | def pytest_configure(config): 19 | markers = [ 20 | "slow: this test is kinda slow (skip with -m 'not slow')", 21 | "flakey(reason): this test may fail unpredictably')", 22 | # There are troubles on travis with these kind of tests and I cannot 23 | # catch the exception for my life. 24 | "subprocess: the test import psycopg after subprocess", 25 | "timing: the test is timing based and can fail on cheese hardware", 26 | "dns: the test requires dnspython to run", 27 | "postgis: the test requires the PostGIS extension to run", 28 | ] 29 | 30 | for marker in markers: 31 | config.addinivalue_line("markers", marker) 32 | 33 | 34 | def pytest_addoption(parser): 35 | parser.addoption( 36 | "--loop", 37 | choices=["default", "uvloop"], 38 | default="default", 39 | help="The asyncio loop to use for async tests.", 40 | ) 41 | 42 | 43 | def pytest_report_header(config): 44 | rv = [] 45 | 46 | rv.append(f"default selector: {selectors.DefaultSelector.__name__}") 47 | loop = config.getoption("--loop") 48 | if loop != "default": 49 | rv.append(f"asyncio loop: {loop}") 50 | 51 | return rv 52 | 53 | 54 | def pytest_sessionstart(session): 55 | # Detect if there was a segfault in the previous run. 56 | # 57 | # In case of segfault, pytest doesn't get a chance to write failed tests 58 | # in the cache. As a consequence, retries would find no test failed and 59 | # assume that all tests passed in the previous run, making the whole test pass. 60 | cache = session.config.cache 61 | if cache.get("segfault", False): 62 | session.warn(Warning("Previous run resulted in segfault! Not running any test")) 63 | session.warn(Warning("(delete '.pytest_cache/v/segfault' to clear this state)")) 64 | raise session.Failed 65 | cache.set("segfault", True) 66 | 67 | # Configure the async loop. 68 | loop = session.config.getoption("--loop") 69 | if loop == "uvloop": 70 | import uvloop 71 | 72 | uvloop.install() 73 | else: 74 | assert loop == "default" 75 | 76 | if sys.platform == "win32": 77 | asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) 78 | 79 | 80 | allow_fail_messages: List[str] = [] 81 | 82 | 83 | def pytest_sessionfinish(session, exitstatus): 84 | # Mark the test run successful (in the sense -weak- that we didn't segfault). 85 | session.config.cache.set("segfault", False) 86 | 87 | 88 | def pytest_terminal_summary(terminalreporter, exitstatus, config): 89 | if allow_fail_messages: 90 | terminalreporter.section("failed tests ignored") 91 | for msg in allow_fail_messages: 92 | terminalreporter.line(msg) 93 | -------------------------------------------------------------------------------- /tools/build/build_macos_arm64.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Build psycopg-binary wheel packages for Apple M1 (cpNNN-macosx_arm64) 4 | # 5 | # This script is designed to run on Scaleway Apple Silicon machines. 6 | # 7 | # The script cannot be run as sudo (installing brew fails), but requires sudo, 8 | # so it can pretty much only be executed by a sudo user as it is. 9 | 10 | set -euo pipefail 11 | set -x 12 | 13 | python_versions="3.8.10 3.9.13 3.10.5 3.11.0" 14 | pg_version=15 15 | 16 | # Move to the root of the project 17 | dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 18 | cd "${dir}/../../" 19 | 20 | # Add /usr/local/bin to the path. It seems it's not, in non-interactive sessions 21 | if ! (echo $PATH | grep -q '/usr/local/bin'); then 22 | export PATH=/usr/local/bin:$PATH 23 | fi 24 | 25 | # Install brew, if necessary. Otherwise just make sure it's in the path 26 | if [[ -x /opt/homebrew/bin/brew ]]; then 27 | eval "$(/opt/homebrew/bin/brew shellenv)" 28 | else 29 | command -v brew > /dev/null || ( 30 | # Not necessary: already installed 31 | # xcode-select --install 32 | NONINTERACTIVE=1 /bin/bash -c "$(curl -fsSL \ 33 | https://raw.githubusercontent.com/Homebrew/install/master/install.sh)" 34 | ) 35 | eval "$(/opt/homebrew/bin/brew shellenv)" 36 | fi 37 | 38 | export PGDATA=/opt/homebrew/var/postgresql@${pg_version} 39 | 40 | # Install PostgreSQL, if necessary 41 | command -v pg_config > /dev/null || ( 42 | brew install postgresql@${pg_version} 43 | ) 44 | 45 | # After PostgreSQL 15, the bin path is not in the path. 46 | export PATH=$(ls -d1 /opt/homebrew/Cellar/postgresql@${pg_version}/*/bin):$PATH 47 | 48 | # Make sure the server is running 49 | 50 | # Currently not working 51 | # brew services start postgresql@${pg_version} 52 | 53 | if ! pg_ctl status; then 54 | pg_ctl -l /opt/homebrew/var/log/postgresql@${pg_version}.log start 55 | fi 56 | 57 | 58 | # Install the Python versions we want to build 59 | for ver3 in $python_versions; do 60 | ver2=$(echo $ver3 | sed 's/\([^\.]*\)\(\.[^\.]*\)\(.*\)/\1\2/') 61 | command -v python${ver2} > /dev/null || ( 62 | (cd /tmp && 63 | curl -fsSl -O \ 64 | https://www.python.org/ftp/python/${ver3}/python-${ver3}-macos11.pkg) 65 | sudo installer -pkg /tmp/python-${ver3}-macos11.pkg -target / 66 | ) 67 | done 68 | 69 | # Create a virtualenv where to work 70 | if [[ ! -x .venv/bin/python ]]; then 71 | python3 -m venv .venv 72 | fi 73 | 74 | source .venv/bin/activate 75 | pip install cibuildwheel 76 | 77 | # Create the psycopg_binary source package 78 | rm -rf psycopg_binary 79 | python tools/build/copy_to_binary.py 80 | 81 | # Build the binary packages 82 | export CIBW_PLATFORM=macos 83 | export CIBW_ARCHS=arm64 84 | export CIBW_BUILD='cp{38,39,310,311}-*' 85 | export CIBW_TEST_REQUIRES="./psycopg[test] ./psycopg_pool" 86 | export CIBW_TEST_COMMAND="pytest {project}/tests -m 'not slow and not flakey' --color yes" 87 | 88 | export PSYCOPG_IMPL=binary 89 | export PSYCOPG_TEST_DSN="dbname=postgres" 90 | export PSYCOPG_TEST_WANT_LIBPQ_BUILD=">= ${pg_version}" 91 | export PSYCOPG_TEST_WANT_LIBPQ_IMPORT=">= ${pg_version}" 92 | 93 | cibuildwheel psycopg_binary 94 | -------------------------------------------------------------------------------- /psycopg/psycopg/client_cursor.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg client-side binding cursors 3 | """ 4 | 5 | # Copyright (C) 2022 The Psycopg Team 6 | 7 | from typing import Optional, Tuple, TYPE_CHECKING 8 | from functools import partial 9 | 10 | from ._queries import PostgresQuery, PostgresClientQuery 11 | 12 | from . import pq 13 | from . import adapt 14 | from . import errors as e 15 | from .abc import ConnectionType, Query, Params 16 | from .rows import Row 17 | from .cursor import BaseCursor, Cursor 18 | from ._preparing import Prepare 19 | from .cursor_async import AsyncCursor 20 | 21 | if TYPE_CHECKING: 22 | from typing import Any # noqa: F401 23 | from .connection import Connection # noqa: F401 24 | from .connection_async import AsyncConnection # noqa: F401 25 | 26 | TEXT = pq.Format.TEXT 27 | BINARY = pq.Format.BINARY 28 | 29 | 30 | class ClientCursorMixin(BaseCursor[ConnectionType, Row]): 31 | def mogrify(self, query: Query, params: Optional[Params] = None) -> str: 32 | """ 33 | Return the query and parameters merged. 34 | 35 | Parameters are adapted and merged to the query the same way that 36 | `!execute()` would do. 37 | 38 | """ 39 | self._tx = adapt.Transformer(self) 40 | pgq = self._convert_query(query, params) 41 | return pgq.query.decode(self._tx.encoding) 42 | 43 | def _execute_send( 44 | self, 45 | query: PostgresQuery, 46 | *, 47 | force_extended: bool = False, 48 | binary: Optional[bool] = None, 49 | ) -> None: 50 | if binary is None: 51 | fmt = self.format 52 | else: 53 | fmt = BINARY if binary else TEXT 54 | 55 | if fmt == BINARY: 56 | raise e.NotSupportedError( 57 | "client-side cursors don't support binary results" 58 | ) 59 | 60 | self._query = query 61 | 62 | if self._conn._pipeline: 63 | # In pipeline mode always use PQsendQueryParams - see #314 64 | # Multiple statements in the same query are not allowed anyway. 65 | self._conn._pipeline.command_queue.append( 66 | partial(self._pgconn.send_query_params, query.query, None) 67 | ) 68 | elif force_extended: 69 | self._pgconn.send_query_params(query.query, None) 70 | else: 71 | # If we can, let's use simple query protocol, 72 | # as it can execute more than one statement in a single query. 73 | self._pgconn.send_query(query.query) 74 | 75 | def _convert_query( 76 | self, query: Query, params: Optional[Params] = None 77 | ) -> PostgresQuery: 78 | pgq = PostgresClientQuery(self._tx) 79 | pgq.convert(query, params) 80 | return pgq 81 | 82 | def _get_prepared( 83 | self, pgq: PostgresQuery, prepare: Optional[bool] = None 84 | ) -> Tuple[Prepare, bytes]: 85 | return (Prepare.NO, b"") 86 | 87 | 88 | class ClientCursor(ClientCursorMixin["Connection[Any]", Row], Cursor[Row]): 89 | __module__ = "psycopg" 90 | 91 | 92 | class AsyncClientCursor( 93 | ClientCursorMixin["AsyncConnection[Any]", Row], AsyncCursor[Row] 94 | ): 95 | __module__ = "psycopg" 96 | -------------------------------------------------------------------------------- /tests/crdb/test_adapt.py: -------------------------------------------------------------------------------- 1 | from copy import deepcopy 2 | 3 | import pytest 4 | 5 | from psycopg.crdb import adapters, CrdbConnection 6 | 7 | from psycopg.adapt import PyFormat, Transformer 8 | from psycopg.types.array import ListDumper 9 | from psycopg.postgres import types as builtins 10 | 11 | from ..test_adapt import MyStr, make_dumper, make_bin_dumper 12 | from ..test_adapt import make_loader, make_bin_loader 13 | 14 | pytestmark = pytest.mark.crdb 15 | 16 | 17 | @pytest.mark.parametrize("fmt_in", PyFormat) 18 | def test_return_untyped(conn, fmt_in): 19 | # Analyze and check for changes using strings in untyped/typed contexts 20 | cur = conn.cursor() 21 | # Currently string are passed as text oid to CockroachDB, unlike Postgres, 22 | # to which strings are passed as unknown. This is because CRDB doesn't 23 | # allow the unknown oid to be emitted; execute("SELECT %s", ["str"]) raises 24 | # an error. However, unlike PostgreSQL, text can be cast to any other type. 25 | cur.execute(f"select %{fmt_in.value}, %{fmt_in.value}", ["hello", 10]) 26 | assert cur.fetchone() == ("hello", 10) 27 | 28 | cur.execute("create table testjson(data jsonb)") 29 | cur.execute(f"insert into testjson (data) values (%{fmt_in.value})", ["{}"]) 30 | assert cur.execute("select data from testjson").fetchone() == ({},) 31 | 32 | 33 | def test_str_list_dumper_text(conn): 34 | t = Transformer(conn) 35 | dstr = t.get_dumper([""], PyFormat.TEXT) 36 | assert isinstance(dstr, ListDumper) 37 | assert dstr.oid == builtins["text"].array_oid 38 | assert dstr.sub_dumper and dstr.sub_dumper.oid == builtins["text"].oid 39 | 40 | 41 | @pytest.fixture 42 | def crdb_adapters(): 43 | """Restore the crdb adapters after a test has changed them.""" 44 | dumpers = deepcopy(adapters._dumpers) 45 | dumpers_by_oid = deepcopy(adapters._dumpers_by_oid) 46 | loaders = deepcopy(adapters._loaders) 47 | types = list(adapters.types) 48 | 49 | yield None 50 | 51 | adapters._dumpers = dumpers 52 | adapters._dumpers_by_oid = dumpers_by_oid 53 | adapters._loaders = loaders 54 | adapters.types.clear() 55 | for t in types: 56 | adapters.types.add(t) 57 | 58 | 59 | def test_dump_global_ctx(dsn, crdb_adapters, pgconn): 60 | adapters.register_dumper(MyStr, make_bin_dumper("gb")) 61 | adapters.register_dumper(MyStr, make_dumper("gt")) 62 | with CrdbConnection.connect(dsn) as conn: 63 | cur = conn.execute("select %s", [MyStr("hello")]) 64 | assert cur.fetchone() == ("hellogt",) 65 | cur = conn.execute("select %b", [MyStr("hello")]) 66 | assert cur.fetchone() == ("hellogb",) 67 | cur = conn.execute("select %t", [MyStr("hello")]) 68 | assert cur.fetchone() == ("hellogt",) 69 | 70 | 71 | def test_load_global_ctx(dsn, crdb_adapters): 72 | adapters.register_loader("text", make_loader("gt")) 73 | adapters.register_loader("text", make_bin_loader("gb")) 74 | with CrdbConnection.connect(dsn) as conn: 75 | cur = conn.cursor(binary=False).execute("select 'hello'::text") 76 | assert cur.fetchone() == ("hellogt",) 77 | cur = conn.cursor(binary=True).execute("select 'hello'::text") 78 | assert cur.fetchone() == ("hellogb",) 79 | -------------------------------------------------------------------------------- /psycopg_c/setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | PostgreSQL database adapter for Python - optimisation package 4 | """ 5 | 6 | # Copyright (C) 2020 The Psycopg Team 7 | 8 | import os 9 | import sys 10 | import subprocess as sp 11 | 12 | from setuptools import setup, Extension 13 | from distutils.command.build_ext import build_ext 14 | from distutils import log 15 | 16 | # Move to the directory of setup.py: executing this file from another location 17 | # (e.g. from the project root) will fail 18 | here = os.path.abspath(os.path.dirname(__file__)) 19 | if os.path.abspath(os.getcwd()) != here: 20 | os.chdir(here) 21 | 22 | 23 | def get_config(what: str) -> str: 24 | pg_config = "pg_config" 25 | try: 26 | out = sp.run([pg_config, f"--{what}"], stdout=sp.PIPE, check=True) 27 | except Exception as e: 28 | log.error(f"couldn't run {pg_config!r} --{what}: %s", e) 29 | raise 30 | else: 31 | return out.stdout.strip().decode() 32 | 33 | 34 | class psycopg_build_ext(build_ext): 35 | def finalize_options(self) -> None: 36 | self._setup_ext_build() 37 | super().finalize_options() 38 | 39 | def _setup_ext_build(self) -> None: 40 | cythonize = None 41 | 42 | # In the sdist there are not .pyx, only c, so we don't need Cython. 43 | # Otherwise Cython is a requirement and it is used to compile pyx to c. 44 | if os.path.exists("psycopg_c/_psycopg.pyx"): 45 | from Cython.Build import cythonize 46 | 47 | # Add include and lib dir for the libpq. 48 | includedir = get_config("includedir") 49 | libdir = get_config("libdir") 50 | for ext in self.distribution.ext_modules: 51 | ext.include_dirs.append(includedir) 52 | ext.library_dirs.append(libdir) 53 | 54 | if sys.platform == "win32": 55 | # For __imp_htons and others 56 | ext.libraries.append("ws2_32") 57 | 58 | if cythonize is not None: 59 | for ext in self.distribution.ext_modules: 60 | for i in range(len(ext.sources)): 61 | base, fext = os.path.splitext(ext.sources[i]) 62 | if fext == ".c" and os.path.exists(base + ".pyx"): 63 | ext.sources[i] = base + ".pyx" 64 | 65 | self.distribution.ext_modules = cythonize( 66 | self.distribution.ext_modules, 67 | language_level=3, 68 | compiler_directives={ 69 | "always_allow_keywords": False, 70 | }, 71 | annotate=False, # enable to get an html view of the C module 72 | ) 73 | else: 74 | self.distribution.ext_modules = [pgext, pqext] 75 | 76 | 77 | # MSVC requires an explicit "libpq" 78 | libpq = "pq" if sys.platform != "win32" else "libpq" 79 | 80 | # Some details missing, to be finished by psycopg_build_ext.finalize_options 81 | pgext = Extension( 82 | "psycopg_c._psycopg", 83 | [ 84 | "psycopg_c/_psycopg.c", 85 | "psycopg_c/types/numutils.c", 86 | ], 87 | libraries=[libpq], 88 | include_dirs=[], 89 | ) 90 | 91 | pqext = Extension( 92 | "psycopg_c.pq", 93 | ["psycopg_c/pq.c"], 94 | libraries=[libpq], 95 | include_dirs=[], 96 | ) 97 | 98 | setup( 99 | ext_modules=[pgext, pqext], 100 | cmdclass={"build_ext": psycopg_build_ext}, 101 | ) 102 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/_psycopg.pyi: -------------------------------------------------------------------------------- 1 | """ 2 | Stub representaton of the public objects exposed by the _psycopg module. 3 | 4 | TODO: this should be generated by mypy's stubgen but it crashes with no 5 | information. Will submit a bug. 6 | """ 7 | 8 | # Copyright (C) 2020 The Psycopg Team 9 | 10 | from typing import Any, Iterable, List, Optional, Sequence, Tuple 11 | 12 | from psycopg import pq 13 | from psycopg import abc 14 | from psycopg.rows import Row, RowMaker 15 | from psycopg.adapt import AdaptersMap, PyFormat 16 | from psycopg.pq.abc import PGconn, PGresult 17 | from psycopg.connection import BaseConnection 18 | from psycopg._compat import Deque 19 | 20 | class Transformer(abc.AdaptContext): 21 | types: Optional[Tuple[int, ...]] 22 | formats: Optional[List[pq.Format]] 23 | def __init__(self, context: Optional[abc.AdaptContext] = None): ... 24 | @classmethod 25 | def from_context(cls, context: Optional[abc.AdaptContext]) -> "Transformer": ... 26 | @property 27 | def connection(self) -> Optional[BaseConnection[Any]]: ... 28 | @property 29 | def encoding(self) -> str: ... 30 | @property 31 | def adapters(self) -> AdaptersMap: ... 32 | @property 33 | def pgresult(self) -> Optional[PGresult]: ... 34 | def set_pgresult( 35 | self, 36 | result: Optional["PGresult"], 37 | *, 38 | set_loaders: bool = True, 39 | format: Optional[pq.Format] = None, 40 | ) -> None: ... 41 | def set_dumper_types(self, types: Sequence[int], format: pq.Format) -> None: ... 42 | def set_loader_types(self, types: Sequence[int], format: pq.Format) -> None: ... 43 | def dump_sequence( 44 | self, params: Sequence[Any], formats: Sequence[PyFormat] 45 | ) -> Sequence[Optional[abc.Buffer]]: ... 46 | def as_literal(self, obj: Any) -> bytes: ... 47 | def get_dumper(self, obj: Any, format: PyFormat) -> abc.Dumper: ... 48 | def load_rows(self, row0: int, row1: int, make_row: RowMaker[Row]) -> List[Row]: ... 49 | def load_row(self, row: int, make_row: RowMaker[Row]) -> Optional[Row]: ... 50 | def load_sequence( 51 | self, record: Sequence[Optional[abc.Buffer]] 52 | ) -> Tuple[Any, ...]: ... 53 | def get_loader(self, oid: int, format: pq.Format) -> abc.Loader: ... 54 | 55 | # Generators 56 | def connect(conninfo: str) -> abc.PQGenConn[PGconn]: ... 57 | def execute(pgconn: PGconn) -> abc.PQGen[List[PGresult]]: ... 58 | def send(pgconn: PGconn) -> abc.PQGen[None]: ... 59 | def fetch_many(pgconn: PGconn) -> abc.PQGen[List[PGresult]]: ... 60 | def fetch(pgconn: PGconn) -> abc.PQGen[Optional[PGresult]]: ... 61 | def pipeline_communicate( 62 | pgconn: PGconn, commands: Deque[abc.PipelineCommand] 63 | ) -> abc.PQGen[List[List[PGresult]]]: ... 64 | def wait_c( 65 | gen: abc.PQGen[abc.RV], fileno: int, timeout: Optional[float] = None 66 | ) -> abc.RV: ... 67 | 68 | # Copy support 69 | def format_row_text( 70 | row: Sequence[Any], tx: abc.Transformer, out: Optional[bytearray] = None 71 | ) -> bytearray: ... 72 | def format_row_binary( 73 | row: Sequence[Any], tx: abc.Transformer, out: Optional[bytearray] = None 74 | ) -> bytearray: ... 75 | def parse_row_text(data: abc.Buffer, tx: abc.Transformer) -> Tuple[Any, ...]: ... 76 | def parse_row_binary(data: abc.Buffer, tx: abc.Transformer) -> Tuple[Any, ...]: ... 77 | 78 | # Arrays optimization 79 | def array_load_text( 80 | data: abc.Buffer, loader: abc.Loader, delimiter: bytes = b"," 81 | ) -> List[Any]: ... 82 | def array_load_binary(data: abc.Buffer, tx: abc.Transformer) -> List[Any]: ... 83 | 84 | # vim: set syntax=python: 85 | -------------------------------------------------------------------------------- /psycopg_c/psycopg_c/pq/pqbuffer.pyx: -------------------------------------------------------------------------------- 1 | """ 2 | PQbuffer object implementation. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | cimport cython 8 | from cpython.bytes cimport PyBytes_AsStringAndSize 9 | from cpython.buffer cimport PyObject_CheckBuffer, PyBUF_SIMPLE 10 | from cpython.buffer cimport PyObject_GetBuffer, PyBuffer_Release 11 | 12 | 13 | @cython.freelist(32) 14 | cdef class PQBuffer: 15 | """ 16 | Wrap a chunk of memory allocated by the libpq and expose it as memoryview. 17 | """ 18 | @staticmethod 19 | cdef PQBuffer _from_buffer(unsigned char *buf, Py_ssize_t length): 20 | cdef PQBuffer rv = PQBuffer.__new__(PQBuffer) 21 | rv.buf = buf 22 | rv.len = length 23 | return rv 24 | 25 | def __cinit__(self): 26 | self.buf = NULL 27 | self.len = 0 28 | 29 | def __dealloc__(self): 30 | if self.buf: 31 | libpq.PQfreemem(self.buf) 32 | 33 | def __repr__(self): 34 | return ( 35 | f"{self.__class__.__module__}.{self.__class__.__qualname__}" 36 | f"({bytes(self)})" 37 | ) 38 | 39 | def __getbuffer__(self, Py_buffer *buffer, int flags): 40 | buffer.buf = self.buf 41 | buffer.obj = self 42 | buffer.len = self.len 43 | buffer.itemsize = sizeof(unsigned char) 44 | buffer.readonly = 1 45 | buffer.ndim = 1 46 | buffer.format = NULL # unsigned char 47 | buffer.shape = &self.len 48 | buffer.strides = NULL 49 | buffer.suboffsets = NULL 50 | buffer.internal = NULL 51 | 52 | def __releasebuffer__(self, Py_buffer *buffer): 53 | pass 54 | 55 | 56 | @cython.freelist(32) 57 | cdef class ViewBuffer: 58 | """ 59 | Wrap a chunk of memory owned by a different object. 60 | """ 61 | @staticmethod 62 | cdef ViewBuffer _from_buffer( 63 | object obj, unsigned char *buf, Py_ssize_t length 64 | ): 65 | cdef ViewBuffer rv = ViewBuffer.__new__(ViewBuffer) 66 | rv.obj = obj 67 | rv.buf = buf 68 | rv.len = length 69 | return rv 70 | 71 | def __cinit__(self): 72 | self.buf = NULL 73 | self.len = 0 74 | 75 | def __repr__(self): 76 | return ( 77 | f"{self.__class__.__module__}.{self.__class__.__qualname__}" 78 | f"({bytes(self)})" 79 | ) 80 | 81 | def __getbuffer__(self, Py_buffer *buffer, int flags): 82 | buffer.buf = self.buf 83 | buffer.obj = self 84 | buffer.len = self.len 85 | buffer.itemsize = sizeof(unsigned char) 86 | buffer.readonly = 1 87 | buffer.ndim = 1 88 | buffer.format = NULL # unsigned char 89 | buffer.shape = &self.len 90 | buffer.strides = NULL 91 | buffer.suboffsets = NULL 92 | buffer.internal = NULL 93 | 94 | def __releasebuffer__(self, Py_buffer *buffer): 95 | pass 96 | 97 | 98 | cdef int _buffer_as_string_and_size( 99 | data: "Buffer", char **ptr, Py_ssize_t *length 100 | ) except -1: 101 | cdef Py_buffer buf 102 | 103 | if isinstance(data, bytes): 104 | PyBytes_AsStringAndSize(data, ptr, length) 105 | elif PyObject_CheckBuffer(data): 106 | PyObject_GetBuffer(data, &buf, PyBUF_SIMPLE) 107 | ptr[0] = buf.buf 108 | length[0] = buf.len 109 | PyBuffer_Release(&buf) 110 | else: 111 | raise TypeError(f"bytes or buffer expected, got {type(data)}") 112 | -------------------------------------------------------------------------------- /psycopg/psycopg/pq/_debug.py: -------------------------------------------------------------------------------- 1 | """ 2 | libpq debugging tools 3 | 4 | These functionalities are exposed here for convenience, but are not part of 5 | the public interface and are subject to change at any moment. 6 | 7 | Suggested usage:: 8 | 9 | import logging 10 | import psycopg 11 | from psycopg import pq 12 | from psycopg.pq._debug import PGconnDebug 13 | 14 | logging.basicConfig(level=logging.INFO, format="%(message)s") 15 | logger = logging.getLogger("psycopg.debug") 16 | logger.setLevel(logging.INFO) 17 | 18 | assert pq.__impl__ == "python" 19 | pq.PGconn = PGconnDebug 20 | 21 | with psycopg.connect("") as conn: 22 | conn.pgconn.trace(2) 23 | conn.pgconn.set_trace_flags( 24 | pq.Trace.SUPPRESS_TIMESTAMPS | pq.Trace.REGRESS_MODE) 25 | ... 26 | 27 | """ 28 | 29 | # Copyright (C) 2022 The Psycopg Team 30 | 31 | import inspect 32 | import logging 33 | from typing import Any, Callable, Type, TypeVar, TYPE_CHECKING 34 | from functools import wraps 35 | 36 | from . import PGconn 37 | from .misc import connection_summary 38 | 39 | if TYPE_CHECKING: 40 | from . import abc 41 | 42 | Func = TypeVar("Func", bound=Callable[..., Any]) 43 | 44 | logger = logging.getLogger("psycopg.debug") 45 | 46 | 47 | class PGconnDebug: 48 | """Wrapper for a PQconn logging all its access.""" 49 | 50 | _Self = TypeVar("_Self", bound="PGconnDebug") 51 | _pgconn: "abc.PGconn" 52 | 53 | def __init__(self, pgconn: "abc.PGconn"): 54 | super().__setattr__("_pgconn", pgconn) 55 | 56 | def __repr__(self) -> str: 57 | cls = f"{self.__class__.__module__}.{self.__class__.__qualname__}" 58 | info = connection_summary(self._pgconn) 59 | return f"<{cls} {info} at 0x{id(self):x}>" 60 | 61 | def __getattr__(self, attr: str) -> Any: 62 | value = getattr(self._pgconn, attr) 63 | if callable(value): 64 | return debugging(value) 65 | else: 66 | logger.info("PGconn.%s -> %s", attr, value) 67 | return value 68 | 69 | def __setattr__(self, attr: str, value: Any) -> None: 70 | setattr(self._pgconn, attr, value) 71 | logger.info("PGconn.%s <- %s", attr, value) 72 | 73 | @classmethod 74 | def connect(cls: Type[_Self], conninfo: bytes) -> _Self: 75 | return cls(debugging(PGconn.connect)(conninfo)) 76 | 77 | @classmethod 78 | def connect_start(cls: Type[_Self], conninfo: bytes) -> _Self: 79 | return cls(debugging(PGconn.connect_start)(conninfo)) 80 | 81 | @classmethod 82 | def ping(self, conninfo: bytes) -> int: 83 | return debugging(PGconn.ping)(conninfo) 84 | 85 | 86 | def debugging(f: Func) -> Func: 87 | """Wrap a function in order to log its arguments and return value on call.""" 88 | 89 | @wraps(f) 90 | def debugging_(*args: Any, **kwargs: Any) -> Any: 91 | reprs = [] 92 | for arg in args: 93 | reprs.append(f"{arg!r}") 94 | for (k, v) in kwargs.items(): 95 | reprs.append(f"{k}={v!r}") 96 | 97 | logger.info("PGconn.%s(%s)", f.__name__, ", ".join(reprs)) 98 | rv = f(*args, **kwargs) 99 | # Display the return value only if the function is declared to return 100 | # something else than None. 101 | ra = inspect.signature(f).return_annotation 102 | if ra is not None or rv is not None: 103 | logger.info(" <- %r", rv) 104 | return rv 105 | 106 | return debugging_ # type: ignore 107 | -------------------------------------------------------------------------------- /psycopg/psycopg/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg -- PostgreSQL database adapter for Python 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | import logging 8 | 9 | from . import pq # noqa: F401 import early to stabilize side effects 10 | from . import types 11 | from . import postgres 12 | from ._tpc import Xid 13 | from .copy import Copy, AsyncCopy 14 | from ._enums import IsolationLevel 15 | from .cursor import Cursor 16 | from .errors import Warning, Error, InterfaceError, DatabaseError 17 | from .errors import DataError, OperationalError, IntegrityError 18 | from .errors import InternalError, ProgrammingError, NotSupportedError 19 | from ._column import Column 20 | from .conninfo import ConnectionInfo 21 | from ._pipeline import Pipeline, AsyncPipeline 22 | from .connection import BaseConnection, Connection, Notify 23 | from .transaction import Rollback, Transaction, AsyncTransaction 24 | from .cursor_async import AsyncCursor 25 | from .server_cursor import AsyncServerCursor, ServerCursor 26 | from .client_cursor import AsyncClientCursor, ClientCursor 27 | from .connection_async import AsyncConnection 28 | 29 | from . import dbapi20 30 | from .dbapi20 import BINARY, DATETIME, NUMBER, ROWID, STRING 31 | from .dbapi20 import Binary, Date, DateFromTicks, Time, TimeFromTicks 32 | from .dbapi20 import Timestamp, TimestampFromTicks 33 | 34 | from .version import __version__ as __version__ # noqa: F401 35 | 36 | # Set the logger to a quiet default, can be enabled if needed 37 | logger = logging.getLogger("psycopg") 38 | if logger.level == logging.NOTSET: 39 | logger.setLevel(logging.WARNING) 40 | 41 | # DBAPI compliance 42 | connect = Connection.connect 43 | apilevel = "2.0" 44 | threadsafety = 2 45 | paramstyle = "pyformat" 46 | 47 | # register default adapters for PostgreSQL 48 | adapters = postgres.adapters # exposed by the package 49 | postgres.register_default_adapters(adapters) 50 | 51 | # After the default ones, because these can deal with the bytea oid better 52 | dbapi20.register_dbapi20_adapters(adapters) 53 | 54 | # Must come after all the types have been registered 55 | types.array.register_all_arrays(adapters) 56 | 57 | # Note: defining the exported methods helps both Sphynx in documenting that 58 | # this is the canonical place to obtain them and should be used by MyPy too, 59 | # so that function signatures are consistent with the documentation. 60 | __all__ = [ 61 | "AsyncClientCursor", 62 | "AsyncConnection", 63 | "AsyncCopy", 64 | "AsyncCursor", 65 | "AsyncPipeline", 66 | "AsyncServerCursor", 67 | "AsyncTransaction", 68 | "BaseConnection", 69 | "ClientCursor", 70 | "Column", 71 | "Connection", 72 | "ConnectionInfo", 73 | "Copy", 74 | "Cursor", 75 | "IsolationLevel", 76 | "Notify", 77 | "Pipeline", 78 | "Rollback", 79 | "ServerCursor", 80 | "Transaction", 81 | "Xid", 82 | # DBAPI exports 83 | "connect", 84 | "apilevel", 85 | "threadsafety", 86 | "paramstyle", 87 | "Warning", 88 | "Error", 89 | "InterfaceError", 90 | "DatabaseError", 91 | "DataError", 92 | "OperationalError", 93 | "IntegrityError", 94 | "InternalError", 95 | "ProgrammingError", 96 | "NotSupportedError", 97 | # DBAPI type constructors and singletons 98 | "Binary", 99 | "Date", 100 | "DateFromTicks", 101 | "Time", 102 | "TimeFromTicks", 103 | "Timestamp", 104 | "TimestampFromTicks", 105 | "BINARY", 106 | "DATETIME", 107 | "NUMBER", 108 | "ROWID", 109 | "STRING", 110 | ] 111 | -------------------------------------------------------------------------------- /tests/types/test_hstore.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import psycopg 4 | from psycopg.types import TypeInfo 5 | from psycopg.types.hstore import HstoreLoader, register_hstore 6 | 7 | pytestmark = pytest.mark.crdb_skip("hstore") 8 | 9 | 10 | @pytest.mark.parametrize( 11 | "s, d", 12 | [ 13 | ("", {}), 14 | ('"a"=>"1", "b"=>"2"', {"a": "1", "b": "2"}), 15 | ('"a" => "1" , "b" => "2"', {"a": "1", "b": "2"}), 16 | ('"a"=>NULL, "b"=>"2"', {"a": None, "b": "2"}), 17 | (r'"a"=>"\"", "\""=>"2"', {"a": '"', '"': "2"}), 18 | ('"a"=>"\'", "\'"=>"2"', {"a": "'", "'": "2"}), 19 | ('"a"=>"1", "b"=>NULL', {"a": "1", "b": None}), 20 | (r'"a\\"=>"1"', {"a\\": "1"}), 21 | (r'"a\""=>"1"', {'a"': "1"}), 22 | (r'"a\\\""=>"1"', {r"a\"": "1"}), 23 | (r'"a\\\\\""=>"1"', {r'a\\"': "1"}), 24 | ('"\xe8"=>"\xe0"', {"\xe8": "\xe0"}), 25 | ], 26 | ) 27 | def test_parse_ok(s, d): 28 | loader = HstoreLoader(0, None) 29 | assert loader.load(s.encode()) == d 30 | 31 | 32 | @pytest.mark.parametrize( 33 | "s", 34 | [ 35 | "a", 36 | '"a"', 37 | r'"a\\""=>"1"', 38 | r'"a\\\\""=>"1"', 39 | '"a=>"1"', 40 | '"a"=>"1", "b"=>NUL', 41 | ], 42 | ) 43 | def test_parse_bad(s): 44 | with pytest.raises(psycopg.DataError): 45 | loader = HstoreLoader(0, None) 46 | loader.load(s.encode()) 47 | 48 | 49 | def test_register_conn(hstore, conn): 50 | info = TypeInfo.fetch(conn, "hstore") 51 | register_hstore(info, conn) 52 | assert conn.adapters.types[info.oid].name == "hstore" 53 | 54 | cur = conn.execute("select null::hstore, ''::hstore, 'a => b'::hstore") 55 | assert cur.fetchone() == (None, {}, {"a": "b"}) 56 | 57 | 58 | def test_register_curs(hstore, conn): 59 | info = TypeInfo.fetch(conn, "hstore") 60 | cur = conn.cursor() 61 | register_hstore(info, cur) 62 | assert conn.adapters.types.get(info.oid) is None 63 | assert cur.adapters.types[info.oid].name == "hstore" 64 | 65 | cur.execute("select null::hstore, ''::hstore, 'a => b'::hstore") 66 | assert cur.fetchone() == (None, {}, {"a": "b"}) 67 | 68 | 69 | def test_register_globally(conn_cls, hstore, dsn, svcconn, global_adapters): 70 | info = TypeInfo.fetch(svcconn, "hstore") 71 | register_hstore(info) 72 | assert psycopg.adapters.types[info.oid].name == "hstore" 73 | 74 | assert svcconn.adapters.types.get(info.oid) is None 75 | conn = conn_cls.connect(dsn) 76 | assert conn.adapters.types[info.oid].name == "hstore" 77 | 78 | cur = conn.execute("select null::hstore, ''::hstore, 'a => b'::hstore") 79 | assert cur.fetchone() == (None, {}, {"a": "b"}) 80 | conn.close() 81 | 82 | 83 | ab = list(map(chr, range(32, 128))) 84 | samp = [ 85 | {}, 86 | {"a": "b", "c": None}, 87 | dict(zip(ab, ab)), 88 | {"".join(ab): "".join(ab)}, 89 | ] 90 | 91 | 92 | @pytest.mark.parametrize("d", samp) 93 | def test_roundtrip(hstore, conn, d): 94 | register_hstore(TypeInfo.fetch(conn, "hstore"), conn) 95 | d1 = conn.execute("select %s", [d]).fetchone()[0] 96 | assert d == d1 97 | 98 | 99 | def test_roundtrip_array(hstore, conn): 100 | register_hstore(TypeInfo.fetch(conn, "hstore"), conn) 101 | samp1 = conn.execute("select %s", (samp,)).fetchone()[0] 102 | assert samp1 == samp 103 | 104 | 105 | def test_no_info_error(conn): 106 | with pytest.raises(TypeError, match="hstore.*extension"): 107 | register_hstore(None, conn) # type: ignore[arg-type] 108 | -------------------------------------------------------------------------------- /tests/README.rst: -------------------------------------------------------------------------------- 1 | psycopg test suite 2 | =================== 3 | 4 | Quick version 5 | ------------- 6 | 7 | To run tests on the current code you can install the `test` extra of the 8 | package, specify a connection string in the `PSYCOPG_TEST_DSN` env var to 9 | connect to a test database, and run ``pytest``:: 10 | 11 | $ pip install -e "psycopg[test]" 12 | $ export PSYCOPG_TEST_DSN="host=localhost dbname=psycopg_test" 13 | $ pytest 14 | 15 | 16 | Test options 17 | ------------ 18 | 19 | - The tests output header shows additional psycopg related information, 20 | on top of the one normally displayed by ``pytest`` and the extensions used:: 21 | 22 | $ pytest 23 | ========================= test session starts ========================= 24 | platform linux -- Python 3.8.5, pytest-6.0.2, py-1.10.0, pluggy-0.13.1 25 | Using --randomly-seed=2416596601 26 | libpq available: 130002 27 | libpq wrapper implementation: c 28 | 29 | 30 | - By default the tests run using the ``pq`` implementation that psycopg would 31 | choose (the C module if installed, else the Python one). In order to test a 32 | different implementation, use the normal `pq module selection mechanism`__ 33 | of the ``PSYCOPG_IMPL`` env var:: 34 | 35 | $ PSYCOPG_IMPL=python pytest 36 | ========================= test session starts ========================= 37 | [...] 38 | libpq available: 130002 39 | libpq wrapper implementation: python 40 | 41 | .. __: https://www.psycopg.org/psycopg/docs/api/pq.html#pq-module-implementations 42 | 43 | 44 | - Slow tests have a ``slow`` marker which can be selected to reduce test 45 | runtime to a few seconds only. Please add a ``@pytest.mark.slow`` marker to 46 | any test needing an arbitrary wait. At the time of writing:: 47 | 48 | $ pytest 49 | ========================= test session starts ========================= 50 | [...] 51 | ======= 1983 passed, 3 skipped, 110 xfailed in 78.21s (0:01:18) ======= 52 | 53 | $ pytest -m "not slow" 54 | ========================= test session starts ========================= 55 | [...] 56 | ==== 1877 passed, 2 skipped, 169 deselected, 48 xfailed in 13.47s ===== 57 | 58 | - ``pytest`` option ``--pq-trace={TRACEFILE,STDERR}`` can be used to capture 59 | libpq trace. When using ``stderr``, the output will only be shown for 60 | failing or in-error tests, unless ``-s/--capture=no`` option is used. 61 | 62 | - ``pytest`` option ``--pq-debug`` can be used to log access to libpq's 63 | ``PGconn`` functions. 64 | 65 | 66 | Testing in docker 67 | ----------------- 68 | 69 | Useful to test different Python versions without installing them. Can be used 70 | to replicate GitHub actions failures, specifying the ``--randomly-seed`` used 71 | in the test run. The following ``PG*`` env vars are an example to adjust the 72 | test dsn in order to connect to a database running on the docker host: specify 73 | a set of env vars working for your setup:: 74 | 75 | $ docker run -ti --rm --volume `pwd`:/src --workdir /src \ 76 | -e PSYCOPG_TEST_DSN -e PGHOST=172.17.0.1 -e PGUSER=`whoami` \ 77 | python:3.7 bash 78 | 79 | # pip install -e "./psycopg[test]" ./psycopg_pool ./psycopg_c 80 | # pytest 81 | 82 | 83 | Testing with CockroachDB 84 | ======================== 85 | 86 | You can run CRDB in a docker container using:: 87 | 88 | docker run -p 26257:26257 --name crdb --rm \ 89 | cockroachdb/cockroach:v22.1.3 start-single-node --insecure 90 | 91 | And use the following connection string to run the tests:: 92 | 93 | export PSYCOPG_TEST_DSN="host=localhost port=26257 user=root dbname=defaultdb" 94 | pytest ... 95 | -------------------------------------------------------------------------------- /psycopg/psycopg/dbapi20.py: -------------------------------------------------------------------------------- 1 | """ 2 | Compatibility objects with DBAPI 2.0 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | import time 8 | import datetime as dt 9 | from math import floor 10 | from typing import Any, Sequence, Union 11 | 12 | from . import postgres 13 | from .abc import AdaptContext, Buffer 14 | from .types.string import BytesDumper, BytesBinaryDumper 15 | 16 | 17 | class DBAPITypeObject: 18 | def __init__(self, name: str, type_names: Sequence[str]): 19 | self.name = name 20 | self.values = tuple(postgres.types[n].oid for n in type_names) 21 | 22 | def __repr__(self) -> str: 23 | return f"psycopg.{self.name}" 24 | 25 | def __eq__(self, other: Any) -> bool: 26 | if isinstance(other, int): 27 | return other in self.values 28 | else: 29 | return NotImplemented 30 | 31 | def __ne__(self, other: Any) -> bool: 32 | if isinstance(other, int): 33 | return other not in self.values 34 | else: 35 | return NotImplemented 36 | 37 | 38 | BINARY = DBAPITypeObject("BINARY", ("bytea",)) 39 | DATETIME = DBAPITypeObject( 40 | "DATETIME", "timestamp timestamptz date time timetz interval".split() 41 | ) 42 | NUMBER = DBAPITypeObject("NUMBER", "int2 int4 int8 float4 float8 numeric".split()) 43 | ROWID = DBAPITypeObject("ROWID", ("oid",)) 44 | STRING = DBAPITypeObject("STRING", "text varchar bpchar".split()) 45 | 46 | 47 | class Binary: 48 | def __init__(self, obj: Any): 49 | self.obj = obj 50 | 51 | def __repr__(self) -> str: 52 | sobj = repr(self.obj) 53 | if len(sobj) > 40: 54 | sobj = f"{sobj[:35]} ... ({len(sobj)} byteschars)" 55 | return f"{self.__class__.__name__}({sobj})" 56 | 57 | 58 | class BinaryBinaryDumper(BytesBinaryDumper): 59 | def dump(self, obj: Union[Buffer, Binary]) -> Buffer: 60 | if isinstance(obj, Binary): 61 | return super().dump(obj.obj) 62 | else: 63 | return super().dump(obj) 64 | 65 | 66 | class BinaryTextDumper(BytesDumper): 67 | def dump(self, obj: Union[Buffer, Binary]) -> Buffer: 68 | if isinstance(obj, Binary): 69 | return super().dump(obj.obj) 70 | else: 71 | return super().dump(obj) 72 | 73 | 74 | def Date(year: int, month: int, day: int) -> dt.date: 75 | return dt.date(year, month, day) 76 | 77 | 78 | def DateFromTicks(ticks: float) -> dt.date: 79 | return TimestampFromTicks(ticks).date() 80 | 81 | 82 | def Time(hour: int, minute: int, second: int) -> dt.time: 83 | return dt.time(hour, minute, second) 84 | 85 | 86 | def TimeFromTicks(ticks: float) -> dt.time: 87 | return TimestampFromTicks(ticks).time() 88 | 89 | 90 | def Timestamp( 91 | year: int, month: int, day: int, hour: int, minute: int, second: int 92 | ) -> dt.datetime: 93 | return dt.datetime(year, month, day, hour, minute, second) 94 | 95 | 96 | def TimestampFromTicks(ticks: float) -> dt.datetime: 97 | secs = floor(ticks) 98 | frac = ticks - secs 99 | t = time.localtime(ticks) 100 | tzinfo = dt.timezone(dt.timedelta(seconds=t.tm_gmtoff)) 101 | rv = dt.datetime(*t[:6], round(frac * 1_000_000), tzinfo=tzinfo) 102 | return rv 103 | 104 | 105 | def register_dbapi20_adapters(context: AdaptContext) -> None: 106 | adapters = context.adapters 107 | adapters.register_dumper(Binary, BinaryTextDumper) 108 | adapters.register_dumper(Binary, BinaryBinaryDumper) 109 | 110 | # Make them also the default dumpers when dumping by bytea oid 111 | adapters.register_dumper(None, BinaryTextDumper) 112 | adapters.register_dumper(None, BinaryBinaryDumper) 113 | -------------------------------------------------------------------------------- /docs/api/copy.rst: -------------------------------------------------------------------------------- 1 | .. currentmodule:: psycopg 2 | 3 | COPY-related objects 4 | ==================== 5 | 6 | The main objects (`Copy`, `AsyncCopy`) present the main interface to exchange 7 | data during a COPY operations. These objects are normally obtained by the 8 | methods `Cursor.copy()` and `AsyncCursor.copy()`; however, they can be also 9 | created directly, for instance to write to a destination which is not a 10 | database (e.g. using a `~psycopg.copy.FileWriter`). 11 | 12 | See :ref:`copy` for details. 13 | 14 | 15 | Main Copy objects 16 | ----------------- 17 | 18 | .. autoclass:: Copy() 19 | 20 | The object is normally returned by `!with` `Cursor.copy()`. 21 | 22 | .. automethod:: write_row 23 | 24 | The data in the tuple will be converted as configured on the cursor; 25 | see :ref:`adaptation` for details. 26 | 27 | .. automethod:: write 28 | .. automethod:: read 29 | 30 | Instead of using `!read()` you can iterate on the `!Copy` object to 31 | read its data row by row, using ``for row in copy: ...``. 32 | 33 | .. automethod:: rows 34 | 35 | Equivalent of iterating on `read_row()` until it returns `!None` 36 | 37 | .. automethod:: read_row 38 | .. automethod:: set_types 39 | 40 | 41 | .. autoclass:: AsyncCopy() 42 | 43 | The object is normally returned by ``async with`` `AsyncCursor.copy()`. 44 | Its methods are similar to the ones of the `Copy` object but offering an 45 | `asyncio` interface (`await`, `async for`, `async with`). 46 | 47 | .. automethod:: write_row 48 | .. automethod:: write 49 | .. automethod:: read 50 | 51 | Instead of using `!read()` you can iterate on the `!AsyncCopy` object 52 | to read its data row by row, using ``async for row in copy: ...``. 53 | 54 | .. automethod:: rows 55 | 56 | Use it as `async for record in copy.rows():` ... 57 | 58 | .. automethod:: read_row 59 | 60 | 61 | .. _copy-writers: 62 | 63 | Writer objects 64 | -------------- 65 | 66 | .. currentmodule:: psycopg.copy 67 | 68 | .. versionadded:: 3.1 69 | 70 | Copy writers are helper objects to specify where to write COPY-formatted data. 71 | By default, data is written to the database (using the `LibpqWriter`). It is 72 | possible to write copy-data for offline use by using a `FileWriter`, or to 73 | customize further writing by implementing your own `Writer` or `AsyncWriter` 74 | subclass. 75 | 76 | Writers instances can be used passing them to the cursor 77 | `~psycopg.Cursor.copy()` method or to the `~psycopg.Copy` constructor, as the 78 | `!writer` argument. 79 | 80 | .. autoclass:: Writer 81 | 82 | This is an abstract base class: subclasses are required to implement their 83 | `write()` method. 84 | 85 | .. automethod:: write 86 | .. automethod:: finish 87 | 88 | 89 | .. autoclass:: LibpqWriter 90 | 91 | This is the writer used by default if none is specified. 92 | 93 | 94 | .. autoclass:: FileWriter 95 | 96 | This writer should be used without executing a :sql:`COPY` operation on 97 | the database. For example, if `records` is a list of tuples containing 98 | data to save in COPY format to a file (e.g. for later import), it can be 99 | used as: 100 | 101 | .. code:: python 102 | 103 | with open("target-file.pgcopy", "wb") as f: 104 | with Copy(cur, writer=FileWriter(f)) as copy: 105 | for record in records 106 | copy.write_row(record) 107 | 108 | 109 | .. autoclass:: AsyncWriter 110 | 111 | This class methods have the same semantics of the ones of `Writer`, but 112 | offer an async interface. 113 | 114 | .. automethod:: write 115 | .. automethod:: finish 116 | 117 | .. autoclass:: AsyncLibpqWriter 118 | -------------------------------------------------------------------------------- /tools/update_backer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | r"""Add or edit github users in the backers file 3 | """ 4 | 5 | import sys 6 | import logging 7 | import requests 8 | from pathlib import Path 9 | from ruamel.yaml import YAML # pip install ruamel.yaml 10 | 11 | logger = logging.getLogger() 12 | logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s") 13 | 14 | 15 | def fetch_user(username): 16 | logger.info("fetching %s", username) 17 | resp = requests.get( 18 | f"https://api.github.com/users/{username}", 19 | headers={"Accept": "application/vnd.github.v3+json"}, 20 | ) 21 | resp.raise_for_status() 22 | return resp.json() 23 | 24 | 25 | def get_user_data(data): 26 | """ 27 | Get the data to save from the request data 28 | """ 29 | out = { 30 | "username": data["login"], 31 | "avatar": data["avatar_url"], 32 | "name": data["name"], 33 | } 34 | if data["blog"]: 35 | website = data["blog"] 36 | if not website.startswith("http"): 37 | website = "http://" + website 38 | 39 | out["website"] = website 40 | 41 | return out 42 | 43 | 44 | def add_entry(opt, filedata, username): 45 | userdata = get_user_data(fetch_user(username)) 46 | if opt.top: 47 | userdata["tier"] = "top" 48 | 49 | filedata.append(userdata) 50 | 51 | 52 | def update_entry(opt, filedata, entry): 53 | # entry is an username or an user entry daat 54 | if isinstance(entry, str): 55 | username = entry 56 | entry = [e for e in filedata if e["username"] == username] 57 | if not entry: 58 | raise Exception(f"{username} not found") 59 | entry = entry[0] 60 | else: 61 | username = entry["username"] 62 | 63 | userdata = get_user_data(fetch_user(username)) 64 | for k, v in userdata.items(): 65 | if entry.get("keep_" + k): 66 | continue 67 | entry[k] = v 68 | 69 | 70 | def main(): 71 | opt = parse_cmdline() 72 | logger.info("reading %s", opt.file) 73 | yaml = YAML(typ="rt") 74 | filedata = yaml.load(opt.file) 75 | 76 | for username in opt.add or (): 77 | add_entry(opt, filedata, username) 78 | 79 | for username in opt.update or (): 80 | update_entry(opt, filedata, username) 81 | 82 | if opt.update_all: 83 | for entry in filedata: 84 | update_entry(opt, filedata, entry) 85 | 86 | # yamllint happy 87 | yaml.explicit_start = True 88 | logger.info("writing %s", opt.file) 89 | yaml.dump(filedata, opt.file) 90 | 91 | 92 | def parse_cmdline(): 93 | from argparse import ArgumentParser 94 | 95 | parser = ArgumentParser(description=__doc__) 96 | parser.add_argument( 97 | "--file", 98 | help="the file to update [default: %(default)s]", 99 | default=Path(__file__).parent.parent / "BACKERS.yaml", 100 | type=Path, 101 | ) 102 | parser.add_argument( 103 | "--add", 104 | metavar="USERNAME", 105 | nargs="+", 106 | help="add USERNAME to the backers", 107 | ) 108 | 109 | parser.add_argument( 110 | "--top", 111 | action="store_true", 112 | help="add to the top tier", 113 | ) 114 | 115 | parser.add_argument( 116 | "--update", 117 | metavar="USERNAME", 118 | nargs="+", 119 | help="update USERNAME data", 120 | ) 121 | 122 | parser.add_argument( 123 | "--update-all", 124 | action="store_true", 125 | help="update all the existing backers data", 126 | ) 127 | 128 | opt = parser.parse_args() 129 | 130 | return opt 131 | 132 | 133 | if __name__ == "__main__": 134 | sys.exit(main()) 135 | -------------------------------------------------------------------------------- /psycopg/psycopg/_wrappers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Wrappers for numeric types. 3 | """ 4 | 5 | # Copyright (C) 2020 The Psycopg Team 6 | 7 | # Wrappers to force numbers to be cast as specific PostgreSQL types 8 | 9 | # These types are implemented here but exposed by `psycopg.types.numeric`. 10 | # They are defined here to avoid a circular import. 11 | _MODULE = "psycopg.types.numeric" 12 | 13 | 14 | class Int2(int): 15 | """ 16 | Force dumping a Python `!int` as a PostgreSQL :sql:`smallint/int2`. 17 | """ 18 | 19 | __module__ = _MODULE 20 | __slots__ = () 21 | 22 | def __new__(cls, arg: int) -> "Int2": 23 | return super().__new__(cls, arg) 24 | 25 | def __str__(self) -> str: 26 | return super().__repr__() 27 | 28 | def __repr__(self) -> str: 29 | return f"{self.__class__.__name__}({super().__repr__()})" 30 | 31 | 32 | class Int4(int): 33 | """ 34 | Force dumping a Python `!int` as a PostgreSQL :sql:`integer/int4`. 35 | """ 36 | 37 | __module__ = _MODULE 38 | __slots__ = () 39 | 40 | def __new__(cls, arg: int) -> "Int4": 41 | return super().__new__(cls, arg) 42 | 43 | def __str__(self) -> str: 44 | return super().__repr__() 45 | 46 | def __repr__(self) -> str: 47 | return f"{self.__class__.__name__}({super().__repr__()})" 48 | 49 | 50 | class Int8(int): 51 | """ 52 | Force dumping a Python `!int` as a PostgreSQL :sql:`bigint/int8`. 53 | """ 54 | 55 | __module__ = _MODULE 56 | __slots__ = () 57 | 58 | def __new__(cls, arg: int) -> "Int8": 59 | return super().__new__(cls, arg) 60 | 61 | def __str__(self) -> str: 62 | return super().__repr__() 63 | 64 | def __repr__(self) -> str: 65 | return f"{self.__class__.__name__}({super().__repr__()})" 66 | 67 | 68 | class IntNumeric(int): 69 | """ 70 | Force dumping a Python `!int` as a PostgreSQL :sql:`numeric/decimal`. 71 | """ 72 | 73 | __module__ = _MODULE 74 | __slots__ = () 75 | 76 | def __new__(cls, arg: int) -> "IntNumeric": 77 | return super().__new__(cls, arg) 78 | 79 | def __str__(self) -> str: 80 | return super().__repr__() 81 | 82 | def __repr__(self) -> str: 83 | return f"{self.__class__.__name__}({super().__repr__()})" 84 | 85 | 86 | class Float4(float): 87 | """ 88 | Force dumping a Python `!float` as a PostgreSQL :sql:`float4/real`. 89 | """ 90 | 91 | __module__ = _MODULE 92 | __slots__ = () 93 | 94 | def __new__(cls, arg: float) -> "Float4": 95 | return super().__new__(cls, arg) 96 | 97 | def __str__(self) -> str: 98 | return super().__repr__() 99 | 100 | def __repr__(self) -> str: 101 | return f"{self.__class__.__name__}({super().__repr__()})" 102 | 103 | 104 | class Float8(float): 105 | """ 106 | Force dumping a Python `!float` as a PostgreSQL :sql:`float8/double precision`. 107 | """ 108 | 109 | __module__ = _MODULE 110 | __slots__ = () 111 | 112 | def __new__(cls, arg: float) -> "Float8": 113 | return super().__new__(cls, arg) 114 | 115 | def __str__(self) -> str: 116 | return super().__repr__() 117 | 118 | def __repr__(self) -> str: 119 | return f"{self.__class__.__name__}({super().__repr__()})" 120 | 121 | 122 | class Oid(int): 123 | """ 124 | Force dumping a Python `!int` as a PostgreSQL :sql:`oid`. 125 | """ 126 | 127 | __module__ = _MODULE 128 | __slots__ = () 129 | 130 | def __new__(cls, arg: int) -> "Oid": 131 | return super().__new__(cls, arg) 132 | 133 | def __str__(self) -> str: 134 | return super().__repr__() 135 | 136 | def __repr__(self) -> str: 137 | return f"{self.__class__.__name__}({super().__repr__()})" 138 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | import sys 18 | from pathlib import Path 19 | 20 | import psycopg 21 | 22 | docs_dir = Path(__file__).parent 23 | sys.path.append(str(docs_dir / "lib")) 24 | 25 | 26 | # -- Project information ----------------------------------------------------- 27 | 28 | project = "psycopg" 29 | copyright = "2020, Daniele Varrazzo and The Psycopg Team" 30 | author = "Daniele Varrazzo" 31 | release = psycopg.__version__ 32 | 33 | 34 | # -- General configuration --------------------------------------------------- 35 | 36 | # Add any Sphinx extension module names here, as strings. They can be 37 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 38 | # ones. 39 | extensions = [ 40 | "sphinx.ext.autodoc", 41 | "sphinx.ext.intersphinx", 42 | "sphinx_autodoc_typehints", 43 | "sql_role", 44 | "ticket_role", 45 | "pg3_docs", 46 | "libpq_docs", 47 | ] 48 | 49 | # Add any paths that contain templates here, relative to this directory. 50 | templates_path = ["_templates"] 51 | 52 | # List of patterns, relative to source directory, that match files and 53 | # directories to ignore when looking for source files. 54 | # This pattern also affects html_static_path and html_extra_path. 55 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", ".venv"] 56 | 57 | 58 | # -- Options for HTML output ------------------------------------------------- 59 | 60 | # The announcement may be in the website but not shipped with the docs 61 | ann_file = docs_dir / "../../templates/docs3-announcement.html" 62 | if ann_file.exists(): 63 | with ann_file.open() as f: 64 | announcement = f.read() 65 | else: 66 | announcement = "" 67 | 68 | html_css_files = ["psycopg.css"] 69 | 70 | # The name of the Pygments (syntax highlighting) style to use. 71 | # Some that I've check don't suck: 72 | # default lovelace tango algol_nu 73 | # list: from pygments.styles import STYLE_MAP; print(sorted(STYLE_MAP.keys())) 74 | pygments_style = "tango" 75 | 76 | # The theme to use for HTML and HTML Help pages. See the documentation for 77 | # a list of builtin themes. 78 | html_theme = "furo" 79 | html_show_sphinx = True 80 | html_show_sourcelink = False 81 | html_theme_options = { 82 | "announcement": announcement, 83 | "sidebar_hide_name": False, 84 | "light_logo": "psycopg.svg", 85 | "dark_logo": "psycopg.svg", 86 | "light_css_variables": { 87 | "admonition-font-size": "1rem", 88 | }, 89 | } 90 | 91 | # Add any paths that contain custom static files (such as style sheets) here, 92 | # relative to this directory. They are copied after the builtin static files, 93 | # so a file named "default.css" will overwrite the builtin "default.css". 94 | html_static_path = ["_static"] 95 | 96 | # The reST default role (used for this markup: `text`) to use for all documents. 97 | default_role = "obj" 98 | 99 | intersphinx_mapping = { 100 | "py": ("https://docs.python.org/3", None), 101 | "pg2": ("https://www.psycopg.org/docs/", None), 102 | } 103 | 104 | autodoc_member_order = "bysource" 105 | 106 | # PostgreSQL docs version to link libpq functions to 107 | libpq_docs_version = "14" 108 | 109 | # Where to point on :ticket: role 110 | ticket_url = "https://github.com/psycopg/psycopg/issues/%s" 111 | -------------------------------------------------------------------------------- /tests/fix_crdb.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | import pytest 4 | 5 | from .utils import VersionCheck 6 | from psycopg.crdb import CrdbConnection 7 | 8 | 9 | def pytest_configure(config): 10 | # register libpq marker 11 | config.addinivalue_line( 12 | "markers", 13 | "crdb(version_expr, reason=detail): run/skip the test with matching CockroachDB" 14 | " (e.g. '>= 21.2.10', '< 22.1', 'skip < 22')", 15 | ) 16 | config.addinivalue_line( 17 | "markers", 18 | "crdb_skip(reason): skip the test for known CockroachDB reasons", 19 | ) 20 | 21 | 22 | def check_crdb_version(got, mark): 23 | if mark.name == "crdb": 24 | assert len(mark.args) <= 1 25 | assert not (set(mark.kwargs) - {"reason"}) 26 | spec = mark.args[0] if mark.args else "only" 27 | reason = mark.kwargs.get("reason") 28 | elif mark.name == "crdb_skip": 29 | assert len(mark.args) == 1 30 | assert not mark.kwargs 31 | reason = mark.args[0] 32 | assert reason in _crdb_reasons, reason 33 | spec = _crdb_reason_version.get(reason, "skip") 34 | else: 35 | assert False, mark.name 36 | 37 | pred = VersionCheck.parse(spec) 38 | pred.whose = "CockroachDB" 39 | 40 | msg = pred.get_skip_message(got) 41 | if not msg: 42 | return None 43 | 44 | reason = crdb_skip_message(reason) 45 | if reason: 46 | msg = f"{msg}: {reason}" 47 | 48 | return msg 49 | 50 | 51 | # Utility functions which can be imported in the test suite 52 | 53 | is_crdb = CrdbConnection.is_crdb 54 | 55 | 56 | def crdb_skip_message(reason: Optional[str]) -> str: 57 | msg = "" 58 | if reason: 59 | msg = reason 60 | if _crdb_reasons.get(reason): 61 | url = ( 62 | "https://github.com/cockroachdb/cockroach/" 63 | f"issues/{_crdb_reasons[reason]}" 64 | ) 65 | msg = f"{msg} ({url})" 66 | 67 | return msg 68 | 69 | 70 | def skip_crdb(*args, reason=None): 71 | return pytest.param(*args, marks=pytest.mark.crdb("skip", reason=reason)) 72 | 73 | 74 | def crdb_encoding(*args): 75 | """Mark tests that fail on CockroachDB because of missing encodings""" 76 | return skip_crdb(*args, reason="encoding") 77 | 78 | 79 | def crdb_time_precision(*args): 80 | """Mark tests that fail on CockroachDB because time doesn't support precision""" 81 | return skip_crdb(*args, reason="time precision") 82 | 83 | 84 | def crdb_scs_off(*args): 85 | return skip_crdb(*args, reason="standard_conforming_strings=off") 86 | 87 | 88 | # mapping from reason description to ticket number 89 | _crdb_reasons = { 90 | "2-phase commit": 22329, 91 | "backend pid": 35897, 92 | "batch statements": 44803, 93 | "begin_read_only": 87012, 94 | "binary decimal": 82492, 95 | "cancel": 41335, 96 | "cast adds tz": 51692, 97 | "cidr": 18846, 98 | "composite": 27792, 99 | "copy array": 82792, 100 | "copy canceled": 81559, 101 | "copy": 41608, 102 | "cursor invalid name": 84261, 103 | "cursor with hold": 77101, 104 | "deferrable": 48307, 105 | "do": 17511, 106 | "encoding": 35882, 107 | "geometric types": 21286, 108 | "hstore": 41284, 109 | "inet": 94192, 110 | "infinity date": 41564, 111 | "interval style": 35807, 112 | "json array": 23468, 113 | "large objects": 243, 114 | "negative interval": 81577, 115 | "nested array": 32552, 116 | "no col query": None, 117 | "notify": 41522, 118 | "password_encryption": 42519, 119 | "pg_terminate_backend": 35897, 120 | "range": 41282, 121 | "scroll cursor": 77102, 122 | "server-side cursor": 41412, 123 | "severity_nonlocalized": 81794, 124 | "stored procedure": 1751, 125 | } 126 | 127 | _crdb_reason_version = { 128 | "backend pid": "skip < 22", 129 | "inet": "skip == 22.2.1", 130 | "cancel": "skip < 22", 131 | "server-side cursor": "skip < 22.1.3", 132 | "severity_nonlocalized": "skip < 22.1.3", 133 | } 134 | -------------------------------------------------------------------------------- /psycopg/psycopg/_tpc.py: -------------------------------------------------------------------------------- 1 | """ 2 | psycopg two-phase commit support 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | import re 8 | import datetime as dt 9 | from base64 import b64encode, b64decode 10 | from typing import Optional, Union 11 | from dataclasses import dataclass, replace 12 | 13 | _re_xid = re.compile(r"^(\d+)_([^_]*)_([^_]*)$") 14 | 15 | 16 | @dataclass(frozen=True) 17 | class Xid: 18 | """A two-phase commit transaction identifier. 19 | 20 | The object can also be unpacked as a 3-item tuple (`format_id`, `gtrid`, 21 | `bqual`). 22 | 23 | """ 24 | 25 | format_id: Optional[int] 26 | gtrid: str 27 | bqual: Optional[str] 28 | prepared: Optional[dt.datetime] = None 29 | owner: Optional[str] = None 30 | database: Optional[str] = None 31 | 32 | @classmethod 33 | def from_string(cls, s: str) -> "Xid": 34 | """Try to parse an XA triple from the string. 35 | 36 | This may fail for several reasons. In such case return an unparsed Xid. 37 | """ 38 | try: 39 | return cls._parse_string(s) 40 | except Exception: 41 | return Xid(None, s, None) 42 | 43 | def __str__(self) -> str: 44 | return self._as_tid() 45 | 46 | def __len__(self) -> int: 47 | return 3 48 | 49 | def __getitem__(self, index: int) -> Union[int, str, None]: 50 | return (self.format_id, self.gtrid, self.bqual)[index] 51 | 52 | @classmethod 53 | def _parse_string(cls, s: str) -> "Xid": 54 | m = _re_xid.match(s) 55 | if not m: 56 | raise ValueError("bad Xid format") 57 | 58 | format_id = int(m.group(1)) 59 | gtrid = b64decode(m.group(2)).decode() 60 | bqual = b64decode(m.group(3)).decode() 61 | return cls.from_parts(format_id, gtrid, bqual) 62 | 63 | @classmethod 64 | def from_parts( 65 | cls, format_id: Optional[int], gtrid: str, bqual: Optional[str] 66 | ) -> "Xid": 67 | if format_id is not None: 68 | if bqual is None: 69 | raise TypeError("if format_id is specified, bqual must be too") 70 | if not 0 <= format_id < 0x80000000: 71 | raise ValueError("format_id must be a non-negative 32-bit integer") 72 | if len(bqual) > 64: 73 | raise ValueError("bqual must be not longer than 64 chars") 74 | if len(gtrid) > 64: 75 | raise ValueError("gtrid must be not longer than 64 chars") 76 | 77 | elif bqual is None: 78 | raise TypeError("if format_id is None, bqual must be None too") 79 | 80 | return Xid(format_id, gtrid, bqual) 81 | 82 | def _as_tid(self) -> str: 83 | """ 84 | Return the PostgreSQL transaction_id for this XA xid. 85 | 86 | PostgreSQL wants just a string, while the DBAPI supports the XA 87 | standard and thus a triple. We use the same conversion algorithm 88 | implemented by JDBC in order to allow some form of interoperation. 89 | 90 | see also: the pgjdbc implementation 91 | http://cvs.pgfoundry.org/cgi-bin/cvsweb.cgi/jdbc/pgjdbc/org/ 92 | postgresql/xa/RecoveredXid.java?rev=1.2 93 | """ 94 | if self.format_id is None or self.bqual is None: 95 | # Unparsed xid: return the gtrid. 96 | return self.gtrid 97 | 98 | # XA xid: mash together the components. 99 | egtrid = b64encode(self.gtrid.encode()).decode() 100 | ebqual = b64encode(self.bqual.encode()).decode() 101 | 102 | return f"{self.format_id}_{egtrid}_{ebqual}" 103 | 104 | @classmethod 105 | def _get_recover_query(cls) -> str: 106 | return "SELECT gid, prepared, owner, database FROM pg_prepared_xacts" 107 | 108 | @classmethod 109 | def _from_record( 110 | cls, gid: str, prepared: dt.datetime, owner: str, database: str 111 | ) -> "Xid": 112 | xid = Xid.from_string(gid) 113 | return replace(xid, prepared=prepared, owner=owner, database=database) 114 | 115 | 116 | Xid.__module__ = "psycopg" 117 | -------------------------------------------------------------------------------- /tests/pool/test_sched.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from time import time, sleep 3 | from functools import partial 4 | from threading import Thread 5 | 6 | import pytest 7 | 8 | try: 9 | from psycopg_pool.sched import Scheduler 10 | except ImportError: 11 | # Tests should have been skipped if the package is not available 12 | pass 13 | 14 | pytestmark = [pytest.mark.timing] 15 | 16 | 17 | @pytest.mark.slow 18 | def test_sched(): 19 | s = Scheduler() 20 | results = [] 21 | 22 | def worker(i): 23 | results.append((i, time())) 24 | 25 | t0 = time() 26 | s.enter(0.1, partial(worker, 1)) 27 | s.enter(0.4, partial(worker, 3)) 28 | s.enter(0.3, None) 29 | s.enter(0.2, partial(worker, 2)) 30 | s.run() 31 | assert len(results) == 2 32 | assert results[0][0] == 1 33 | assert results[0][1] - t0 == pytest.approx(0.1, 0.1) 34 | assert results[1][0] == 2 35 | assert results[1][1] - t0 == pytest.approx(0.2, 0.1) 36 | 37 | 38 | @pytest.mark.slow 39 | def test_sched_thread(): 40 | s = Scheduler() 41 | t = Thread(target=s.run, daemon=True) 42 | t.start() 43 | 44 | results = [] 45 | 46 | def worker(i): 47 | results.append((i, time())) 48 | 49 | t0 = time() 50 | s.enter(0.1, partial(worker, 1)) 51 | s.enter(0.4, partial(worker, 3)) 52 | s.enter(0.3, None) 53 | s.enter(0.2, partial(worker, 2)) 54 | 55 | t.join() 56 | t1 = time() 57 | assert t1 - t0 == pytest.approx(0.3, 0.2) 58 | 59 | assert len(results) == 2 60 | assert results[0][0] == 1 61 | assert results[0][1] - t0 == pytest.approx(0.1, 0.2) 62 | assert results[1][0] == 2 63 | assert results[1][1] - t0 == pytest.approx(0.2, 0.2) 64 | 65 | 66 | @pytest.mark.slow 67 | def test_sched_error(caplog): 68 | caplog.set_level(logging.WARNING, logger="psycopg") 69 | s = Scheduler() 70 | t = Thread(target=s.run, daemon=True) 71 | t.start() 72 | 73 | results = [] 74 | 75 | def worker(i): 76 | results.append((i, time())) 77 | 78 | def error(): 79 | 1 / 0 80 | 81 | t0 = time() 82 | s.enter(0.1, partial(worker, 1)) 83 | s.enter(0.4, None) 84 | s.enter(0.3, partial(worker, 2)) 85 | s.enter(0.2, error) 86 | 87 | t.join() 88 | t1 = time() 89 | assert t1 - t0 == pytest.approx(0.4, 0.1) 90 | 91 | assert len(results) == 2 92 | assert results[0][0] == 1 93 | assert results[0][1] - t0 == pytest.approx(0.1, 0.1) 94 | assert results[1][0] == 2 95 | assert results[1][1] - t0 == pytest.approx(0.3, 0.1) 96 | 97 | assert len(caplog.records) == 1 98 | assert "ZeroDivisionError" in caplog.records[0].message 99 | 100 | 101 | @pytest.mark.slow 102 | def test_empty_queue_timeout(): 103 | s = Scheduler() 104 | 105 | t0 = time() 106 | times = [] 107 | 108 | wait_orig = s._event.wait 109 | 110 | def wait_logging(timeout=None): 111 | rv = wait_orig(timeout) 112 | times.append(time() - t0) 113 | return rv 114 | 115 | setattr(s._event, "wait", wait_logging) 116 | s.EMPTY_QUEUE_TIMEOUT = 0.2 117 | 118 | t = Thread(target=s.run) 119 | t.start() 120 | sleep(0.5) 121 | s.enter(0.5, None) 122 | t.join() 123 | times.append(time() - t0) 124 | for got, want in zip(times, [0.2, 0.4, 0.5, 1.0]): 125 | assert got == pytest.approx(want, 0.2), times 126 | 127 | 128 | @pytest.mark.slow 129 | def test_first_task_rescheduling(): 130 | s = Scheduler() 131 | 132 | t0 = time() 133 | times = [] 134 | 135 | wait_orig = s._event.wait 136 | 137 | def wait_logging(timeout=None): 138 | rv = wait_orig(timeout) 139 | times.append(time() - t0) 140 | return rv 141 | 142 | setattr(s._event, "wait", wait_logging) 143 | s.EMPTY_QUEUE_TIMEOUT = 0.1 144 | 145 | s.enter(0.4, lambda: None) 146 | t = Thread(target=s.run) 147 | t.start() 148 | s.enter(0.6, None) # this task doesn't trigger a reschedule 149 | sleep(0.1) 150 | s.enter(0.1, lambda: None) # this triggers a reschedule 151 | t.join() 152 | times.append(time() - t0) 153 | for got, want in zip(times, [0.1, 0.2, 0.4, 0.6, 0.6]): 154 | assert got == pytest.approx(want, 0.2), times 155 | -------------------------------------------------------------------------------- /tests/fix_proxy.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import socket 4 | import logging 5 | import subprocess as sp 6 | from shutil import which 7 | 8 | import pytest 9 | 10 | import psycopg 11 | from psycopg import conninfo 12 | 13 | 14 | def pytest_collection_modifyitems(items): 15 | for item in items: 16 | # TODO: there is a race condition on macOS and Windows in the CI: 17 | # listen returns before really listening and tests based on 'deaf_port' 18 | # fail 50% of the times. Just add the 'proxy' mark on these tests 19 | # because they are already skipped in the CI. 20 | if "proxy" in item.fixturenames or "deaf_port" in item.fixturenames: 21 | item.add_marker(pytest.mark.proxy) 22 | 23 | 24 | def pytest_configure(config): 25 | config.addinivalue_line( 26 | "markers", 27 | "proxy: the test uses pproxy (the marker is set automatically" 28 | " on tests using the fixture)", 29 | ) 30 | 31 | 32 | @pytest.fixture 33 | def proxy(dsn): 34 | """Return a proxy to the --test-dsn database""" 35 | p = Proxy(dsn) 36 | yield p 37 | p.stop() 38 | 39 | 40 | @pytest.fixture 41 | def deaf_port(dsn): 42 | """Return a port number with a socket open but not answering""" 43 | with socket.socket(socket.AF_INET) as s: 44 | s.bind(("", 0)) 45 | port = s.getsockname()[1] 46 | s.listen(0) 47 | yield port 48 | 49 | 50 | class Proxy: 51 | """ 52 | Proxy a Postgres service for testing purpose. 53 | 54 | Allow to lose connectivity and restart it using stop/start. 55 | """ 56 | 57 | def __init__(self, server_dsn): 58 | cdict = conninfo.conninfo_to_dict(server_dsn) 59 | 60 | # Get server params 61 | host = cdict.get("host") or os.environ.get("PGHOST") 62 | self.server_host = host if host and not host.startswith("/") else "localhost" 63 | self.server_port = cdict.get("port", "5432") 64 | 65 | # Get client params 66 | self.client_host = "localhost" 67 | self.client_port = self._get_random_port() 68 | 69 | # Make a connection string to the proxy 70 | cdict["host"] = self.client_host 71 | cdict["port"] = self.client_port 72 | cdict["sslmode"] = "disable" # not supported by the proxy 73 | self.client_dsn = conninfo.make_conninfo(**cdict) 74 | 75 | # The running proxy process 76 | self.proc = None 77 | 78 | def start(self): 79 | if self.proc: 80 | logging.info("proxy already started") 81 | return 82 | 83 | logging.info("starting proxy") 84 | pproxy = which("pproxy") 85 | if not pproxy: 86 | raise ValueError("pproxy program not found") 87 | cmdline = [pproxy, "--reuse"] 88 | cmdline.extend(["-l", f"tunnel://:{self.client_port}"]) 89 | cmdline.extend(["-r", f"tunnel://{self.server_host}:{self.server_port}"]) 90 | 91 | self.proc = sp.Popen(cmdline, stdout=sp.DEVNULL) 92 | logging.info("proxy started") 93 | self._wait_listen() 94 | 95 | # verify that the proxy works 96 | try: 97 | with psycopg.connect(self.client_dsn): 98 | pass 99 | except Exception as e: 100 | pytest.fail(f"failed to create a working proxy: {e}") 101 | 102 | def stop(self): 103 | if not self.proc: 104 | return 105 | 106 | logging.info("stopping proxy") 107 | self.proc.terminate() 108 | self.proc.wait() 109 | logging.info("proxy stopped") 110 | self.proc = None 111 | 112 | @classmethod 113 | def _get_random_port(cls): 114 | with socket.socket() as s: 115 | s.bind(("", 0)) 116 | return s.getsockname()[1] 117 | 118 | def _wait_listen(self): 119 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: 120 | for i in range(20): 121 | if 0 == sock.connect_ex((self.client_host, self.client_port)): 122 | break 123 | time.sleep(0.1) 124 | else: 125 | raise ValueError("the proxy didn't start listening in time") 126 | 127 | logging.info("proxy listening") 128 | -------------------------------------------------------------------------------- /BACKERS.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # You can find our sponsors at https://www.psycopg.org/sponsors/ Thank you! 3 | 4 | - username: postgrespro 5 | tier: top 6 | avatar: https://avatars.githubusercontent.com/u/12005770?v=4 7 | name: Postgres Professional 8 | website: https://postgrespro.com/ 9 | 10 | - username: commandprompt 11 | by: jdatcmd 12 | tier: top 13 | avatar: https://avatars.githubusercontent.com/u/339156?v=4 14 | name: Command Prompt, Inc. 15 | website: https://www.commandprompt.com 16 | 17 | - username: bitdotioinc 18 | tier: top 19 | avatar: https://avatars.githubusercontent.com/u/56135630?v=4 20 | name: bit.io 21 | website: https://bit.io/?utm_campaign=sponsorship&utm_source=psycopg2&utm_medium=web 22 | keep_website: true 23 | 24 | 25 | - username: yougov 26 | tier: mid 27 | avatar: https://avatars.githubusercontent.com/u/378494?v=4 28 | name: YouGov 29 | website: https://www.yougov.com 30 | 31 | - username: phenopolis 32 | by: pontikos 33 | tier: mid 34 | avatar: https://avatars.githubusercontent.com/u/20042742?v=4 35 | name: Phenopolis 36 | website: http://www.phenopolis.co.uk 37 | 38 | - username: MaterializeInc 39 | tier: mid 40 | avatar: https://avatars.githubusercontent.com/u/47674186?v=4 41 | name: Materialize, Inc. 42 | website: http://materialize.com 43 | 44 | - username: getsentry 45 | tier: mid 46 | avatar: https://avatars.githubusercontent.com/u/1396951?v=4 47 | name: Sentry 48 | website: https://sentry.io 49 | 50 | - username: 20tab 51 | tier: mid 52 | avatar: https://avatars.githubusercontent.com/u/1843159?v=4 53 | name: 20tab srl 54 | website: http://www.20tab.com 55 | 56 | - username: genropy 57 | by: gporcari 58 | tier: mid 59 | avatar: https://avatars.githubusercontent.com/u/7373189?v=4 60 | name: genropy 61 | website: http://www.genropy.org 62 | 63 | - username: svennek 64 | tier: mid 65 | avatar: https://avatars.githubusercontent.com/u/37837?v=4 66 | name: Svenne Krap 67 | website: http://www.svenne.dk 68 | 69 | - username: mailupinc 70 | tier: mid 71 | avatar: https://avatars.githubusercontent.com/u/72260631?v=4 72 | name: BEE 73 | website: https://beefree.io 74 | 75 | 76 | - username: taifu 77 | avatar: https://avatars.githubusercontent.com/u/115712?v=4 78 | name: Marco Beri 79 | website: http:/beri.it 80 | 81 | - username: la-mar 82 | avatar: https://avatars.githubusercontent.com/u/16618300?v=4 83 | name: Brock Friedrich 84 | 85 | - username: xarg 86 | avatar: https://avatars.githubusercontent.com/u/94721?v=4 87 | name: Alex Plugaru 88 | website: https://plugaru.org 89 | 90 | - username: dalibo 91 | avatar: https://avatars.githubusercontent.com/u/182275?v=4 92 | name: Dalibo 93 | website: http://www.dalibo.com 94 | 95 | - username: rafmagns-skepa-dreag 96 | avatar: https://avatars.githubusercontent.com/u/7447491?v=4 97 | name: Richard H 98 | 99 | - username: rustprooflabs 100 | avatar: https://avatars.githubusercontent.com/u/3085224?v=4 101 | name: Ryan Lambert 102 | website: https://www.rustprooflabs.com 103 | 104 | - username: logilab 105 | avatar: https://avatars.githubusercontent.com/u/446566?v=4 106 | name: Logilab 107 | website: http://www.logilab.org 108 | 109 | - username: asqui 110 | avatar: https://avatars.githubusercontent.com/u/174182?v=4 111 | name: Daniel Fortunov 112 | 113 | - username: iqbalabd 114 | avatar: https://avatars.githubusercontent.com/u/14254614?v=4 115 | name: Iqbal Abdullah 116 | website: https://info.xoxzo.com/ 117 | 118 | - username: healthchecks 119 | avatar: https://avatars.githubusercontent.com/u/13053880?v=4 120 | name: Healthchecks 121 | website: https://healthchecks.io 122 | 123 | - username: c-rindi 124 | avatar: https://avatars.githubusercontent.com/u/7826876?v=4 125 | name: C~+ 126 | 127 | - username: Intevation 128 | by: bernhardreiter 129 | avatar: https://avatars.githubusercontent.com/u/2050405?v=4 130 | name: Intevation 131 | website: https://www.intevation.de/ 132 | 133 | - username: abegerho 134 | avatar: https://avatars.githubusercontent.com/u/5734243?v=4 135 | name: Abhishek Begerhotta 136 | 137 | - username: ferpection 138 | avatar: https://avatars.githubusercontent.com/u/6997008?v=4 139 | name: Ferpection 140 | website: https://ferpection.com 141 | -------------------------------------------------------------------------------- /tests/fix_pq.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import ctypes 4 | from typing import Iterator, List, NamedTuple 5 | from tempfile import TemporaryFile 6 | 7 | import pytest 8 | 9 | from .utils import check_libpq_version 10 | 11 | try: 12 | from psycopg import pq 13 | except ImportError: 14 | pq = None # type: ignore 15 | 16 | 17 | def pytest_report_header(config): 18 | try: 19 | from psycopg import pq 20 | except ImportError: 21 | return [] 22 | 23 | return [ 24 | f"libpq wrapper implementation: {pq.__impl__}", 25 | f"libpq used: {pq.version()}", 26 | f"libpq compiled: {pq.__build_version__}", 27 | ] 28 | 29 | 30 | def pytest_configure(config): 31 | # register libpq marker 32 | config.addinivalue_line( 33 | "markers", 34 | "libpq(version_expr): run the test only with matching libpq" 35 | " (e.g. '>= 10', '< 9.6')", 36 | ) 37 | 38 | 39 | def pytest_runtest_setup(item): 40 | for m in item.iter_markers(name="libpq"): 41 | assert len(m.args) == 1 42 | msg = check_libpq_version(pq.version(), m.args[0]) 43 | if msg: 44 | pytest.skip(msg) 45 | 46 | 47 | @pytest.fixture 48 | def libpq(): 49 | """Return a ctypes wrapper to access the libpq.""" 50 | try: 51 | from psycopg.pq.misc import find_libpq_full_path 52 | 53 | # Not available when testing the binary package 54 | libname = find_libpq_full_path() 55 | assert libname, "libpq libname not found" 56 | return ctypes.pydll.LoadLibrary(libname) 57 | except Exception as e: 58 | if pq.__impl__ == "binary": 59 | pytest.skip(f"can't load libpq for testing: {e}") 60 | else: 61 | raise 62 | 63 | 64 | @pytest.fixture 65 | def setpgenv(monkeypatch): 66 | """Replace the PG* env vars with the vars provided.""" 67 | 68 | def setpgenv_(env): 69 | ks = [k for k in os.environ if k.startswith("PG")] 70 | for k in ks: 71 | monkeypatch.delenv(k) 72 | 73 | if env: 74 | for k, v in env.items(): 75 | monkeypatch.setenv(k, v) 76 | 77 | return setpgenv_ 78 | 79 | 80 | @pytest.fixture 81 | def trace(libpq): 82 | pqver = pq.__build_version__ 83 | if pqver < 140000: 84 | pytest.skip(f"trace not available on libpq {pqver}") 85 | if sys.platform != "linux": 86 | pytest.skip(f"trace not available on {sys.platform}") 87 | 88 | yield Tracer() 89 | 90 | 91 | class Tracer: 92 | def trace(self, conn): 93 | pgconn: "pq.abc.PGconn" 94 | 95 | if hasattr(conn, "exec_"): 96 | pgconn = conn 97 | elif hasattr(conn, "cursor"): 98 | pgconn = conn.pgconn 99 | else: 100 | raise Exception() 101 | 102 | return TraceLog(pgconn) 103 | 104 | 105 | class TraceLog: 106 | def __init__(self, pgconn: "pq.abc.PGconn"): 107 | self.pgconn = pgconn 108 | self.tempfile = TemporaryFile(buffering=0) 109 | pgconn.trace(self.tempfile.fileno()) 110 | pgconn.set_trace_flags(pq.Trace.SUPPRESS_TIMESTAMPS) 111 | 112 | def __del__(self): 113 | if self.pgconn.status == pq.ConnStatus.OK: 114 | self.pgconn.untrace() 115 | self.tempfile.close() 116 | 117 | def __iter__(self) -> "Iterator[TraceEntry]": 118 | self.tempfile.seek(0) 119 | data = self.tempfile.read() 120 | for entry in self._parse_entries(data): 121 | yield entry 122 | 123 | def _parse_entries(self, data: bytes) -> "Iterator[TraceEntry]": 124 | for line in data.splitlines(): 125 | direction, length, type, *content = line.split(b"\t") 126 | yield TraceEntry( 127 | direction=direction.decode(), 128 | length=int(length.decode()), 129 | type=type.decode(), 130 | # Note: the items encoding is not very solid: no escaped 131 | # backslash, no escaped quotes. 132 | # At the moment we don't need a proper parser. 133 | content=[content[0]] if content else [], 134 | ) 135 | 136 | 137 | class TraceEntry(NamedTuple): 138 | direction: str 139 | length: int 140 | type: str 141 | content: List[bytes] 142 | -------------------------------------------------------------------------------- /psycopg/psycopg/types/hstore.py: -------------------------------------------------------------------------------- 1 | """ 2 | Dict to hstore adaptation 3 | """ 4 | 5 | # Copyright (C) 2021 The Psycopg Team 6 | 7 | import re 8 | from typing import Dict, List, Optional 9 | from typing_extensions import TypeAlias 10 | 11 | from .. import errors as e 12 | from .. import postgres 13 | from ..abc import Buffer, AdaptContext 14 | from ..adapt import PyFormat, RecursiveDumper, RecursiveLoader 15 | from ..postgres import TEXT_OID 16 | from .._typeinfo import TypeInfo 17 | 18 | _re_escape = re.compile(r'(["\\])') 19 | _re_unescape = re.compile(r"\\(.)") 20 | 21 | _re_hstore = re.compile( 22 | r""" 23 | # hstore key: 24 | # a string of normal or escaped chars 25 | "((?: [^"\\] | \\. )*)" 26 | \s*=>\s* # hstore value 27 | (?: 28 | NULL # the value can be null - not caught 29 | # or a quoted string like the key 30 | | "((?: [^"\\] | \\. )*)" 31 | ) 32 | (?:\s*,\s*|$) # pairs separated by comma or end of string. 33 | """, 34 | re.VERBOSE, 35 | ) 36 | 37 | 38 | Hstore: TypeAlias = Dict[str, Optional[str]] 39 | 40 | 41 | class BaseHstoreDumper(RecursiveDumper): 42 | def dump(self, obj: Hstore) -> Buffer: 43 | if not obj: 44 | return b"" 45 | 46 | tokens: List[str] = [] 47 | 48 | def add_token(s: str) -> None: 49 | tokens.append('"') 50 | tokens.append(_re_escape.sub(r"\\\1", s)) 51 | tokens.append('"') 52 | 53 | for k, v in obj.items(): 54 | 55 | if not isinstance(k, str): 56 | raise e.DataError("hstore keys can only be strings") 57 | add_token(k) 58 | 59 | tokens.append("=>") 60 | 61 | if v is None: 62 | tokens.append("NULL") 63 | elif not isinstance(v, str): 64 | raise e.DataError("hstore keys can only be strings") 65 | else: 66 | add_token(v) 67 | 68 | tokens.append(",") 69 | 70 | del tokens[-1] 71 | data = "".join(tokens) 72 | dumper = self._tx.get_dumper(data, PyFormat.TEXT) 73 | return dumper.dump(data) 74 | 75 | 76 | class HstoreLoader(RecursiveLoader): 77 | def load(self, data: Buffer) -> Hstore: 78 | loader = self._tx.get_loader(TEXT_OID, self.format) 79 | s: str = loader.load(data) 80 | 81 | rv: Hstore = {} 82 | start = 0 83 | for m in _re_hstore.finditer(s): 84 | if m is None or m.start() != start: 85 | raise e.DataError(f"error parsing hstore pair at char {start}") 86 | k = _re_unescape.sub(r"\1", m.group(1)) 87 | v = m.group(2) 88 | if v is not None: 89 | v = _re_unescape.sub(r"\1", v) 90 | 91 | rv[k] = v 92 | start = m.end() 93 | 94 | if start < len(s): 95 | raise e.DataError(f"error parsing hstore: unparsed data after char {start}") 96 | 97 | return rv 98 | 99 | 100 | def register_hstore(info: TypeInfo, context: Optional[AdaptContext] = None) -> None: 101 | """Register the adapters to load and dump hstore. 102 | 103 | :param info: The object with the information about the hstore type. 104 | :param context: The context where to register the adapters. If `!None`, 105 | register it globally. 106 | 107 | .. note:: 108 | 109 | Registering the adapters doesn't affect objects already created, even 110 | if they are children of the registered context. For instance, 111 | registering the adapter globally doesn't affect already existing 112 | connections. 113 | """ 114 | # A friendly error warning instead of an AttributeError in case fetch() 115 | # failed and it wasn't noticed. 116 | if not info: 117 | raise TypeError("no info passed. Is the 'hstore' extension loaded?") 118 | 119 | # Register arrays and type info 120 | info.register(context) 121 | 122 | adapters = context.adapters if context else postgres.adapters 123 | 124 | # Generate and register a customized text dumper 125 | class HstoreDumper(BaseHstoreDumper): 126 | oid = info.oid 127 | 128 | adapters.register_dumper(dict, HstoreDumper) 129 | 130 | # register the text loader on the oid 131 | adapters.register_loader(info.oid, HstoreLoader) 132 | --------------------------------------------------------------------------------