├── .flake8 ├── .github ├── ISSUE_TEMPLATE.md ├── RELEASING.rst ├── release_log.py └── workflows │ ├── install-krb5.sh │ ├── install-postgres.sh │ ├── release.yml │ └── tests.yml ├── .gitignore ├── .gitmodules ├── AUTHORS ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.rst ├── asyncpg ├── .gitignore ├── __init__.py ├── _asyncio_compat.py ├── _testbase │ ├── __init__.py │ └── fuzzer.py ├── _version.py ├── cluster.py ├── compat.py ├── connect_utils.py ├── connection.py ├── connresource.py ├── cursor.py ├── exceptions │ ├── __init__.py │ └── _base.py ├── introspection.py ├── pool.py ├── prepared_stmt.py ├── protocol │ ├── .gitignore │ ├── __init__.py │ ├── codecs │ │ ├── __init__.py │ │ ├── array.pyx │ │ ├── base.pxd │ │ ├── base.pyx │ │ ├── pgproto.pyx │ │ ├── range.pyx │ │ ├── record.pyx │ │ └── textutils.pyx │ ├── consts.pxi │ ├── coreproto.pxd │ ├── coreproto.pyx │ ├── cpythonx.pxd │ ├── encodings.pyx │ ├── pgtypes.pxi │ ├── prepared_stmt.pxd │ ├── prepared_stmt.pyx │ ├── protocol.pxd │ ├── protocol.pyi │ ├── protocol.pyx │ ├── record │ │ ├── __init__.pxd │ │ ├── recordobj.c │ │ └── recordobj.h │ ├── scram.pxd │ ├── scram.pyx │ ├── settings.pxd │ └── settings.pyx ├── serverversion.py ├── transaction.py ├── types.py └── utils.py ├── docs ├── .gitignore ├── Makefile ├── _static │ └── theme_overrides.css ├── api │ └── index.rst ├── conf.py ├── faq.rst ├── index.rst ├── installation.rst ├── requirements.txt └── usage.rst ├── performance.png ├── pyproject.toml ├── setup.py ├── tests ├── __init__.py ├── certs │ ├── ca.cert.pem │ ├── ca.crl.pem │ ├── ca.key.pem │ ├── client.cert.pem │ ├── client.csr.pem │ ├── client.key.pem │ ├── client.key.protected.pem │ ├── client_ca.cert.pem │ ├── client_ca.cert.srl │ ├── client_ca.key.pem │ ├── gen.py │ ├── server.cert.pem │ ├── server.crl.pem │ └── server.key.pem ├── test__environment.py ├── test__sourcecode.py ├── test_adversity.py ├── test_cache_invalidation.py ├── test_cancellation.py ├── test_codecs.py ├── test_connect.py ├── test_copy.py ├── test_cursor.py ├── test_exceptions.py ├── test_execute.py ├── test_introspection.py ├── test_listeners.py ├── test_logging.py ├── test_pool.py ├── test_prepare.py ├── test_record.py ├── test_test.py ├── test_timeout.py ├── test_transaction.py ├── test_types.py └── test_utils.py └── tools ├── generate_exceptions.py └── generate_type_map.py /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | select = C90,E,F,W,Y0 3 | ignore = E402,E731,W503,W504,E252 4 | exclude = .git,__pycache__,build,dist,.eggs,.github,.local,.venv*,.tox 5 | per-file-ignores = *.pyi: F401,F403,F405,F811,E127,E128,E203,E266,E301,E302,E305,E501,E701,E704,E741,B303,W503,W504 6 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 12 | 13 | * **asyncpg version**: 14 | * **PostgreSQL version**: 15 | * **Do you use a PostgreSQL SaaS? If so, which? Can you reproduce 16 | the issue with a local PostgreSQL install?**: 17 | * **Python version**: 18 | * **Platform**: 19 | * **Do you use pgbouncer?**: 20 | * **Did you install asyncpg with pip?**: 21 | * **If you built asyncpg locally, which version of Cython did you use?**: 22 | * **Can the issue be reproduced under both asyncio and 23 | [uvloop](https://github.com/magicstack/uvloop)?**: 24 | 25 | 26 | -------------------------------------------------------------------------------- /.github/RELEASING.rst: -------------------------------------------------------------------------------- 1 | Releasing asyncpg 2 | ================= 3 | 4 | When making an asyncpg release follow the below checklist. 5 | 6 | 1. Remove the ``.dev0`` suffix from ``__version__`` in ``asyncpg/__init__.py``. 7 | 8 | 2. Make a release commit: 9 | 10 | .. code-block:: shell 11 | 12 | $ git commit -a -m "asyncpg vX.Y.0" 13 | 14 | Here, X.Y.0 is the ``__version__`` in ``asyncpg/__init__.py``. 15 | 16 | 3. Force push into the "releases" branch on Github: 17 | 18 | .. code-block:: shell 19 | 20 | $ git push --force origin master:releases 21 | 22 | 4. Wait for CI to make the release build. If there are errors, 23 | investigate, fix and repeat steps 2 through 4. 24 | 25 | 5. Prepare the release changelog by cleaning and categorizing the output of 26 | ``.github/release_log.py``. Look at previous releases for examples 27 | of changelog formatting: 28 | 29 | .. code-block:: shell 30 | 31 | $ .github/release_log.py 32 | 33 | 6. Make an annotated, signed git tag and use the changelog as the tag 34 | annotation: 35 | 36 | .. code-block:: shell 37 | 38 | $ git tag -s vX.Y.0 39 | 40 | 41 | 7. Push the release commit and the new tag to master on Github: 42 | 43 | .. code-block:: shell 44 | 45 | $ git push --follow-tags 46 | 47 | 8. Wait for CI to publish the build to PyPI. 48 | 49 | 9. Edit the release on Github and paste the same content you used for 50 | the tag annotation (Github treats tag annotations as plain text, 51 | rather than Markdown.) 52 | 53 | 10. Open master for development by bumping the minor component of 54 | ``__version__`` in ``asyncpg/__init__.py`` and appending the ``.dev0`` 55 | suffix. 56 | -------------------------------------------------------------------------------- /.github/release_log.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright (C) 2016-present the asyncpg authors and contributors 4 | # 5 | # 6 | # This module is part of asyncpg and is released under 7 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | 10 | import json 11 | import requests 12 | import re 13 | import sys 14 | 15 | 16 | BASE_URL = 'https://api.github.com/repos/magicstack/asyncpg/compare' 17 | 18 | 19 | def main(): 20 | if len(sys.argv) < 2: 21 | print('pass a sha1 hash as a first argument') 22 | sys.exit(1) 23 | 24 | from_hash = sys.argv[1] 25 | if len(sys.argv) > 2: 26 | to_hash = sys.argv[2] 27 | 28 | r = requests.get(f'{BASE_URL}/{from_hash}...{to_hash}') 29 | data = json.loads(r.text) 30 | 31 | for commit in data['commits']: 32 | message = commit['commit']['message'] 33 | first_line = message.partition('\n\n')[0] 34 | if commit.get('author'): 35 | username = '@{}'.format(commit['author']['login']) 36 | else: 37 | username = commit['commit']['author']['name'] 38 | sha = commit["sha"][:8] 39 | 40 | m = re.search(r'\#(?P\d+)\b', message) 41 | if m: 42 | issue_num = m.group('num') 43 | else: 44 | issue_num = None 45 | 46 | print(f'* {first_line}') 47 | print(f' (by {username} in {sha}', end='') 48 | print(')') 49 | print() 50 | 51 | 52 | if __name__ == '__main__': 53 | main() 54 | -------------------------------------------------------------------------------- /.github/workflows/install-krb5.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -Eexuo pipefail 4 | shopt -s nullglob 5 | 6 | if [[ $OSTYPE == linux* ]]; then 7 | if [ "$(id -u)" = "0" ]; then 8 | SUDO= 9 | else 10 | SUDO=sudo 11 | fi 12 | 13 | if [ -e /etc/os-release ]; then 14 | source /etc/os-release 15 | elif [ -e /etc/centos-release ]; then 16 | ID="centos" 17 | VERSION_ID=$(cat /etc/centos-release | cut -f3 -d' ' | cut -f1 -d.) 18 | else 19 | echo "install-krb5.sh: cannot determine which Linux distro this is" >&2 20 | exit 1 21 | fi 22 | 23 | if [ "${ID}" = "debian" -o "${ID}" = "ubuntu" ]; then 24 | export DEBIAN_FRONTEND=noninteractive 25 | 26 | $SUDO apt-get update 27 | $SUDO apt-get install -y --no-install-recommends \ 28 | libkrb5-dev krb5-user krb5-kdc krb5-admin-server 29 | elif [ "${ID}" = "almalinux" ]; then 30 | $SUDO dnf install -y krb5-server krb5-workstation krb5-libs krb5-devel 31 | elif [ "${ID}" = "centos" ]; then 32 | $SUDO yum install -y krb5-server krb5-workstation krb5-libs krb5-devel 33 | elif [ "${ID}" = "alpine" ]; then 34 | $SUDO apk add krb5 krb5-server krb5-dev 35 | else 36 | echo "install-krb5.sh: Unsupported linux distro: ${distro}" >&2 37 | exit 1 38 | fi 39 | else 40 | echo "install-krb5.sh: unsupported OS: ${OSTYPE}" >&2 41 | exit 1 42 | fi 43 | -------------------------------------------------------------------------------- /.github/workflows/install-postgres.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -Eexuo pipefail 4 | shopt -s nullglob 5 | 6 | if [[ $OSTYPE == linux* ]]; then 7 | PGVERSION=${PGVERSION:-12} 8 | 9 | if [ -e /etc/os-release ]; then 10 | source /etc/os-release 11 | elif [ -e /etc/centos-release ]; then 12 | ID="centos" 13 | VERSION_ID=$(cat /etc/centos-release | cut -f3 -d' ' | cut -f1 -d.) 14 | else 15 | echo "install-postgres.sh: cannot determine which Linux distro this is" >&2 16 | exit 1 17 | fi 18 | 19 | if [ "${ID}" = "debian" -o "${ID}" = "ubuntu" ]; then 20 | export DEBIAN_FRONTEND=noninteractive 21 | 22 | apt-get install -y --no-install-recommends curl gnupg ca-certificates 23 | curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - 24 | mkdir -p /etc/apt/sources.list.d/ 25 | echo "deb https://apt.postgresql.org/pub/repos/apt/ ${VERSION_CODENAME}-pgdg main" \ 26 | >> /etc/apt/sources.list.d/pgdg.list 27 | apt-get update 28 | apt-get install -y --no-install-recommends \ 29 | "postgresql-${PGVERSION}" \ 30 | "postgresql-contrib-${PGVERSION}" 31 | elif [ "${ID}" = "almalinux" ]; then 32 | yum install -y \ 33 | "postgresql-server" \ 34 | "postgresql-devel" \ 35 | "postgresql-contrib" 36 | elif [ "${ID}" = "centos" ]; then 37 | el="EL-${VERSION_ID%.*}-$(arch)" 38 | baseurl="https://download.postgresql.org/pub/repos/yum/reporpms" 39 | yum install -y "${baseurl}/${el}/pgdg-redhat-repo-latest.noarch.rpm" 40 | if [ ${VERSION_ID%.*} -ge 8 ]; then 41 | dnf -qy module disable postgresql 42 | fi 43 | yum install -y \ 44 | "postgresql${PGVERSION}-server" \ 45 | "postgresql${PGVERSION}-contrib" 46 | ln -s "/usr/pgsql-${PGVERSION}/bin/pg_config" "/usr/local/bin/pg_config" 47 | elif [ "${ID}" = "alpine" ]; then 48 | apk add shadow postgresql postgresql-dev postgresql-contrib 49 | else 50 | echo "install-postgres.sh: unsupported Linux distro: ${distro}" >&2 51 | exit 1 52 | fi 53 | 54 | useradd -m -s /bin/bash apgtest 55 | 56 | elif [[ $OSTYPE == darwin* ]]; then 57 | brew install postgresql 58 | 59 | else 60 | echo "install-postgres.sh: unsupported OS: ${OSTYPE}" >&2 61 | exit 1 62 | fi 63 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - "master" 7 | - "ci" 8 | - "[0-9]+.[0-9x]+*" 9 | paths: 10 | - "asyncpg/_version.py" 11 | 12 | jobs: 13 | validate-release-request: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Validate release PR 17 | uses: edgedb/action-release/validate-pr@master 18 | id: checkver 19 | with: 20 | require_team: Release Managers 21 | require_approval: no 22 | github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} 23 | version_file: asyncpg/_version.py 24 | version_line_pattern: | 25 | __version__(?:\s*:\s*typing\.Final)?\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) 26 | 27 | - name: Stop if not approved 28 | if: steps.checkver.outputs.approved != 'true' 29 | run: | 30 | echo ::error::PR is not approved yet. 31 | exit 1 32 | 33 | - name: Store release version for later use 34 | env: 35 | VERSION: ${{ steps.checkver.outputs.version }} 36 | run: | 37 | mkdir -p dist/ 38 | echo "${VERSION}" > dist/VERSION 39 | 40 | - uses: actions/upload-artifact@v4 41 | with: 42 | name: dist-version 43 | path: dist/VERSION 44 | 45 | build-sdist: 46 | needs: validate-release-request 47 | runs-on: ubuntu-latest 48 | 49 | env: 50 | PIP_DISABLE_PIP_VERSION_CHECK: 1 51 | 52 | steps: 53 | - uses: actions/checkout@v4 54 | with: 55 | fetch-depth: 50 56 | submodules: true 57 | 58 | - name: Set up Python 59 | uses: actions/setup-python@v5 60 | with: 61 | python-version: "3.x" 62 | 63 | - name: Build source distribution 64 | run: | 65 | pip install -U setuptools wheel pip 66 | python setup.py sdist 67 | 68 | - uses: actions/upload-artifact@v4 69 | with: 70 | name: dist-sdist 71 | path: dist/*.tar.* 72 | 73 | build-wheels-matrix: 74 | needs: validate-release-request 75 | runs-on: ubuntu-latest 76 | outputs: 77 | include: ${{ steps.set-matrix.outputs.include }} 78 | steps: 79 | - uses: actions/checkout@v4 80 | - uses: actions/setup-python@v5 81 | with: 82 | python-version: "3.x" 83 | - run: pip install cibuildwheel==2.21.3 84 | - id: set-matrix 85 | run: | 86 | MATRIX_INCLUDE=$( 87 | { 88 | cibuildwheel --print-build-identifiers --platform linux --arch x86_64,aarch64 | grep cp | jq -nRc '{"only": inputs, "os": "ubuntu-latest"}' \ 89 | && cibuildwheel --print-build-identifiers --platform macos --arch x86_64,arm64 | grep cp | jq -nRc '{"only": inputs, "os": "macos-latest"}' \ 90 | && cibuildwheel --print-build-identifiers --platform windows --arch x86,AMD64 | grep cp | jq -nRc '{"only": inputs, "os": "windows-latest"}' 91 | } | jq -sc 92 | ) 93 | echo "include=$MATRIX_INCLUDE" >> $GITHUB_OUTPUT 94 | 95 | build-wheels: 96 | needs: build-wheels-matrix 97 | runs-on: ${{ matrix.os }} 98 | name: Build ${{ matrix.only }} 99 | 100 | strategy: 101 | fail-fast: false 102 | matrix: 103 | include: ${{ fromJson(needs.build-wheels-matrix.outputs.include) }} 104 | 105 | defaults: 106 | run: 107 | shell: bash 108 | 109 | env: 110 | PIP_DISABLE_PIP_VERSION_CHECK: 1 111 | 112 | steps: 113 | - uses: actions/checkout@v4 114 | with: 115 | fetch-depth: 50 116 | submodules: true 117 | 118 | - name: Set up QEMU 119 | if: runner.os == 'Linux' 120 | uses: docker/setup-qemu-action@v2 121 | 122 | - uses: pypa/cibuildwheel@7940a4c0e76eb2030e473a5f864f291f63ee879b # v2.21.3 123 | with: 124 | only: ${{ matrix.only }} 125 | env: 126 | CIBW_BUILD_VERBOSITY: 1 127 | 128 | - uses: actions/upload-artifact@v4 129 | with: 130 | name: dist-wheels-${{ matrix.only }} 131 | path: wheelhouse/*.whl 132 | 133 | merge-artifacts: 134 | runs-on: ubuntu-latest 135 | needs: [build-sdist, build-wheels] 136 | steps: 137 | - name: Merge Artifacts 138 | uses: actions/upload-artifact/merge@v4 139 | with: 140 | name: dist 141 | delete-merged: true 142 | 143 | publish-docs: 144 | needs: [build-sdist, build-wheels] 145 | runs-on: ubuntu-latest 146 | 147 | env: 148 | PIP_DISABLE_PIP_VERSION_CHECK: 1 149 | 150 | steps: 151 | - name: Checkout source 152 | uses: actions/checkout@v4 153 | with: 154 | fetch-depth: 5 155 | submodules: true 156 | 157 | - name: Set up Python 158 | uses: actions/setup-python@v5 159 | with: 160 | python-version: "3.x" 161 | 162 | - name: Build docs 163 | run: | 164 | pip install -e .[docs] 165 | make htmldocs 166 | 167 | - name: Checkout gh-pages 168 | uses: actions/checkout@v4 169 | with: 170 | fetch-depth: 5 171 | ref: gh-pages 172 | path: docs/gh-pages 173 | 174 | - name: Sync docs 175 | run: | 176 | rsync -a docs/_build/html/ docs/gh-pages/current/ 177 | 178 | - name: Commit and push docs 179 | uses: magicstack/gha-commit-and-push@master 180 | with: 181 | target_branch: gh-pages 182 | workdir: docs/gh-pages 183 | commit_message: Automatic documentation update 184 | github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} 185 | ssh_key: ${{ secrets.RELEASE_BOT_SSH_KEY }} 186 | gpg_key: ${{ secrets.RELEASE_BOT_GPG_KEY }} 187 | gpg_key_id: "5C468778062D87BF!" 188 | 189 | publish: 190 | needs: [build-sdist, build-wheels, publish-docs] 191 | runs-on: ubuntu-latest 192 | 193 | environment: 194 | name: pypi 195 | url: https://pypi.org/p/asyncpg 196 | permissions: 197 | id-token: write 198 | attestations: write 199 | contents: write 200 | deployments: write 201 | 202 | steps: 203 | - uses: actions/checkout@v4 204 | with: 205 | fetch-depth: 5 206 | submodules: false 207 | 208 | - uses: actions/download-artifact@v4 209 | with: 210 | name: dist 211 | path: dist/ 212 | 213 | - name: Extract Release Version 214 | id: relver 215 | run: | 216 | set -e 217 | echo "version=$(cat dist/VERSION)" >> $GITHUB_OUTPUT 218 | rm dist/VERSION 219 | 220 | - name: Merge and tag the PR 221 | uses: edgedb/action-release/merge@master 222 | with: 223 | github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} 224 | ssh_key: ${{ secrets.RELEASE_BOT_SSH_KEY }} 225 | gpg_key: ${{ secrets.RELEASE_BOT_GPG_KEY }} 226 | gpg_key_id: "5C468778062D87BF!" 227 | tag_name: v${{ steps.relver.outputs.version }} 228 | 229 | - name: Publish Github Release 230 | uses: elprans/gh-action-create-release@master 231 | env: 232 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 233 | with: 234 | tag_name: v${{ steps.relver.outputs.version }} 235 | release_name: v${{ steps.relver.outputs.version }} 236 | target: ${{ github.event.pull_request.base.ref }} 237 | body: ${{ github.event.pull_request.body }} 238 | 239 | - run: | 240 | ls -al dist/ 241 | 242 | - name: Upload to PyPI 243 | uses: pypa/gh-action-pypi-publish@release/v1 244 | with: 245 | attestations: true 246 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - ci 8 | pull_request: 9 | branches: 10 | - master 11 | 12 | jobs: 13 | test-platforms: 14 | # NOTE: this matrix is for testing various combinations of Python and OS 15 | # versions on the system-installed PostgreSQL version (which is usually 16 | # fairly recent). For a PostgreSQL version matrix see the test-postgres 17 | # job. 18 | strategy: 19 | matrix: 20 | python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] 21 | os: [ubuntu-latest, macos-latest, windows-latest] 22 | loop: [asyncio, uvloop] 23 | exclude: 24 | # uvloop does not support windows 25 | - loop: uvloop 26 | os: windows-latest 27 | 28 | runs-on: ${{ matrix.os }} 29 | 30 | defaults: 31 | run: 32 | shell: bash 33 | 34 | env: 35 | PIP_DISABLE_PIP_VERSION_CHECK: 1 36 | 37 | steps: 38 | - uses: actions/checkout@v4 39 | with: 40 | fetch-depth: 50 41 | submodules: true 42 | 43 | - name: Check if release PR. 44 | uses: edgedb/action-release/validate-pr@master 45 | id: release 46 | with: 47 | github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} 48 | missing_version_ok: yes 49 | version_file: asyncpg/_version.py 50 | version_line_pattern: | 51 | __version__(?:\s*:\s*typing\.Final)?\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) 52 | 53 | - name: Setup PostgreSQL 54 | if: "!steps.release.outputs.is_release && matrix.os == 'macos-latest'" 55 | run: | 56 | brew install postgresql 57 | 58 | - name: Set up Python ${{ matrix.python-version }} 59 | uses: actions/setup-python@v5 60 | if: "!steps.release.outputs.is_release" 61 | with: 62 | python-version: ${{ matrix.python-version }} 63 | 64 | - name: Install Python Deps 65 | if: "!steps.release.outputs.is_release" 66 | run: | 67 | [ "$RUNNER_OS" = "Linux" ] && .github/workflows/install-krb5.sh 68 | python -m pip install -U pip setuptools wheel 69 | python -m pip install -e .[test] 70 | 71 | - name: Test 72 | if: "!steps.release.outputs.is_release" 73 | env: 74 | LOOP_IMPL: ${{ matrix.loop }} 75 | run: | 76 | if [ "${LOOP_IMPL}" = "uvloop" ]; then 77 | env USE_UVLOOP=1 python -m unittest -v tests.suite 78 | else 79 | python -m unittest -v tests.suite 80 | fi 81 | 82 | test-postgres: 83 | strategy: 84 | matrix: 85 | postgres-version: ["9.5", "9.6", "10", "11", "12", "13", "14", "15", "16", "17"] 86 | 87 | runs-on: ubuntu-latest 88 | 89 | env: 90 | PIP_DISABLE_PIP_VERSION_CHECK: 1 91 | 92 | steps: 93 | - uses: actions/checkout@v4 94 | with: 95 | fetch-depth: 50 96 | submodules: true 97 | 98 | - name: Check if release PR. 99 | uses: edgedb/action-release/validate-pr@master 100 | id: release 101 | with: 102 | github_token: ${{ secrets.RELEASE_BOT_GITHUB_TOKEN }} 103 | missing_version_ok: yes 104 | version_file: asyncpg/_version.py 105 | version_line_pattern: | 106 | __version__(?:\s*:\s*typing\.Final)?\s*=\s*(?:['"])([[:PEP440:]])(?:['"]) 107 | 108 | - name: Set up PostgreSQL 109 | if: "!steps.release.outputs.is_release" 110 | env: 111 | PGVERSION: ${{ matrix.postgres-version }} 112 | DISTRO_NAME: focal 113 | run: | 114 | sudo env DISTRO_NAME="${DISTRO_NAME}" PGVERSION="${PGVERSION}" \ 115 | .github/workflows/install-postgres.sh 116 | echo PGINSTALLATION="/usr/lib/postgresql/${PGVERSION}/bin" \ 117 | >> "${GITHUB_ENV}" 118 | 119 | - name: Set up Python ${{ matrix.python-version }} 120 | uses: actions/setup-python@v5 121 | if: "!steps.release.outputs.is_release" 122 | with: 123 | python-version: "3.x" 124 | 125 | - name: Install Python Deps 126 | if: "!steps.release.outputs.is_release" 127 | run: | 128 | [ "$RUNNER_OS" = "Linux" ] && .github/workflows/install-krb5.sh 129 | python -m pip install -U pip setuptools wheel 130 | python -m pip install -e .[test] 131 | 132 | - name: Test 133 | if: "!steps.release.outputs.is_release" 134 | env: 135 | PGVERSION: ${{ matrix.postgres-version }} 136 | run: | 137 | python -m unittest -v tests.suite 138 | 139 | # This job exists solely to act as the test job aggregate to be 140 | # targeted by branch policies. 141 | regression-tests: 142 | name: "Regression Tests" 143 | needs: [test-platforms, test-postgres] 144 | runs-on: ubuntu-latest 145 | 146 | steps: 147 | - run: echo OK 148 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *._* 2 | *.pyc 3 | *.pyo 4 | *.ymlc 5 | *.ymlc~ 6 | *.scssc 7 | *.so 8 | *.pyd 9 | *~ 10 | .#* 11 | .DS_Store 12 | .project 13 | .pydevproject 14 | .settings 15 | .idea 16 | /.ropeproject 17 | \#*# 18 | /pub 19 | /test*.py 20 | /.local 21 | /perf.data* 22 | /config_local.yml 23 | /build 24 | __pycache__/ 25 | .d8_history 26 | /*.egg 27 | /*.egg-info 28 | /dist 29 | /.cache 30 | docs/_build 31 | *,cover 32 | .coverage 33 | /.pytest_cache/ 34 | /.eggs 35 | /.vscode 36 | /.mypy_cache 37 | /.venv* 38 | /.tox 39 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "asyncpg/pgproto"] 2 | path = asyncpg/pgproto 3 | url = https://github.com/MagicStack/py-pgproto.git 4 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | Main contributors 2 | ================= 3 | 4 | MagicStack Inc.: 5 | Elvis Pranskevichus 6 | Yury Selivanov 7 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | recursive-include docs *.py *.rst Makefile *.css 2 | recursive-include examples *.py 3 | recursive-include tests *.py *.pem 4 | recursive-include asyncpg *.pyx *.pxd *.pxi *.py *.pyi *.c *.h 5 | include LICENSE README.rst Makefile performance.png .flake8 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: compile debug test quicktest clean all 2 | 3 | 4 | PYTHON ?= python 5 | ROOT = $(dir $(realpath $(firstword $(MAKEFILE_LIST)))) 6 | 7 | 8 | all: compile 9 | 10 | 11 | clean: 12 | rm -fr dist/ doc/_build/ 13 | rm -fr asyncpg/pgproto/*.c asyncpg/pgproto/*.html 14 | rm -fr asyncpg/pgproto/codecs/*.html 15 | rm -fr asyncpg/pgproto/*.so 16 | rm -fr asyncpg/protocol/*.c asyncpg/protocol/*.html 17 | rm -fr asyncpg/protocol/*.so build *.egg-info 18 | rm -fr asyncpg/protocol/codecs/*.html 19 | find . -name '__pycache__' | xargs rm -rf 20 | 21 | 22 | compile: 23 | env ASYNCPG_BUILD_CYTHON_ALWAYS=1 $(PYTHON) -m pip install -e . 24 | 25 | 26 | debug: 27 | env ASYNCPG_DEBUG=1 $(PYTHON) -m pip install -e . 28 | 29 | test: 30 | PYTHONASYNCIODEBUG=1 $(PYTHON) -m unittest -v tests.suite 31 | $(PYTHON) -m unittest -v tests.suite 32 | USE_UVLOOP=1 $(PYTHON) -m unittest -v tests.suite 33 | 34 | 35 | testinstalled: 36 | cd "$${HOME}" && $(PYTHON) $(ROOT)/tests/__init__.py 37 | 38 | 39 | quicktest: 40 | $(PYTHON) -m unittest -v tests.suite 41 | 42 | 43 | htmldocs: 44 | $(PYTHON) -m pip install -e .[docs] 45 | $(MAKE) -C docs html 46 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | asyncpg -- A fast PostgreSQL Database Client Library for Python/asyncio 2 | ======================================================================= 3 | 4 | .. image:: https://github.com/MagicStack/asyncpg/workflows/Tests/badge.svg 5 | :target: https://github.com/MagicStack/asyncpg/actions?query=workflow%3ATests+branch%3Amaster 6 | :alt: GitHub Actions status 7 | .. image:: https://img.shields.io/pypi/v/asyncpg.svg 8 | :target: https://pypi.python.org/pypi/asyncpg 9 | 10 | **asyncpg** is a database interface library designed specifically for 11 | PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation 12 | of PostgreSQL server binary protocol for use with Python's ``asyncio`` 13 | framework. You can read more about asyncpg in an introductory 14 | `blog post `_. 15 | 16 | asyncpg requires Python 3.8 or later and is supported for PostgreSQL 17 | versions 9.5 to 17. Other PostgreSQL versions or other databases 18 | implementing the PostgreSQL protocol *may* work, but are not being 19 | actively tested. 20 | 21 | 22 | Documentation 23 | ------------- 24 | 25 | The project documentation can be found 26 | `here `_. 27 | 28 | 29 | Performance 30 | ----------- 31 | 32 | In our testing asyncpg is, on average, **5x** faster than psycopg3. 33 | 34 | .. image:: https://raw.githubusercontent.com/MagicStack/asyncpg/master/performance.png?fddca40ab0 35 | :target: https://gistpreview.github.io/?0ed296e93523831ea0918d42dd1258c2 36 | 37 | The above results are a geometric mean of benchmarks obtained with PostgreSQL 38 | `client driver benchmarking toolbench `_ 39 | in June 2023 (click on the chart to see full details). 40 | 41 | 42 | Features 43 | -------- 44 | 45 | asyncpg implements PostgreSQL server protocol natively and exposes its 46 | features directly, as opposed to hiding them behind a generic facade 47 | like DB-API. 48 | 49 | This enables asyncpg to have easy-to-use support for: 50 | 51 | * **prepared statements** 52 | * **scrollable cursors** 53 | * **partial iteration** on query results 54 | * automatic encoding and decoding of composite types, arrays, 55 | and any combination of those 56 | * straightforward support for custom data types 57 | 58 | 59 | Installation 60 | ------------ 61 | 62 | asyncpg is available on PyPI. When not using GSSAPI/SSPI authentication it 63 | has no dependencies. Use pip to install:: 64 | 65 | $ pip install asyncpg 66 | 67 | If you need GSSAPI/SSPI authentication, use:: 68 | 69 | $ pip install 'asyncpg[gssauth]' 70 | 71 | For more details, please `see the documentation 72 | `_. 73 | 74 | 75 | Basic Usage 76 | ----------- 77 | 78 | .. code-block:: python 79 | 80 | import asyncio 81 | import asyncpg 82 | 83 | async def run(): 84 | conn = await asyncpg.connect(user='user', password='password', 85 | database='database', host='127.0.0.1') 86 | values = await conn.fetch( 87 | 'SELECT * FROM mytable WHERE id = $1', 88 | 10, 89 | ) 90 | await conn.close() 91 | 92 | asyncio.run(run()) 93 | 94 | 95 | License 96 | ------- 97 | 98 | asyncpg is developed and distributed under the Apache 2.0 license. 99 | -------------------------------------------------------------------------------- /asyncpg/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | -------------------------------------------------------------------------------- /asyncpg/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | from __future__ import annotations 8 | 9 | from .connection import connect, Connection # NOQA 10 | from .exceptions import * # NOQA 11 | from .pool import create_pool, Pool # NOQA 12 | from .protocol import Record # NOQA 13 | from .types import * # NOQA 14 | 15 | 16 | from ._version import __version__ # NOQA 17 | 18 | from . import exceptions 19 | 20 | 21 | __all__: tuple[str, ...] = ( 22 | 'connect', 'create_pool', 'Pool', 'Record', 'Connection' 23 | ) 24 | __all__ += exceptions.__all__ # NOQA 25 | -------------------------------------------------------------------------------- /asyncpg/_asyncio_compat.py: -------------------------------------------------------------------------------- 1 | # Backports from Python/Lib/asyncio for older Pythons 2 | # 3 | # Copyright (c) 2001-2023 Python Software Foundation; All Rights Reserved 4 | # 5 | # SPDX-License-Identifier: PSF-2.0 6 | 7 | from __future__ import annotations 8 | 9 | import asyncio 10 | import functools 11 | import sys 12 | import typing 13 | 14 | if typing.TYPE_CHECKING: 15 | from . import compat 16 | 17 | if sys.version_info < (3, 11): 18 | from async_timeout import timeout as timeout_ctx 19 | else: 20 | from asyncio import timeout as timeout_ctx 21 | 22 | _T = typing.TypeVar('_T') 23 | 24 | 25 | async def wait_for(fut: compat.Awaitable[_T], timeout: float | None) -> _T: 26 | """Wait for the single Future or coroutine to complete, with timeout. 27 | 28 | Coroutine will be wrapped in Task. 29 | 30 | Returns result of the Future or coroutine. When a timeout occurs, 31 | it cancels the task and raises TimeoutError. To avoid the task 32 | cancellation, wrap it in shield(). 33 | 34 | If the wait is cancelled, the task is also cancelled. 35 | 36 | If the task supresses the cancellation and returns a value instead, 37 | that value is returned. 38 | 39 | This function is a coroutine. 40 | """ 41 | # The special case for timeout <= 0 is for the following case: 42 | # 43 | # async def test_waitfor(): 44 | # func_started = False 45 | # 46 | # async def func(): 47 | # nonlocal func_started 48 | # func_started = True 49 | # 50 | # try: 51 | # await asyncio.wait_for(func(), 0) 52 | # except asyncio.TimeoutError: 53 | # assert not func_started 54 | # else: 55 | # assert False 56 | # 57 | # asyncio.run(test_waitfor()) 58 | 59 | if timeout is not None and timeout <= 0: 60 | fut = asyncio.ensure_future(fut) 61 | 62 | if fut.done(): 63 | return fut.result() 64 | 65 | await _cancel_and_wait(fut) 66 | try: 67 | return fut.result() 68 | except asyncio.CancelledError as exc: 69 | raise TimeoutError from exc 70 | 71 | async with timeout_ctx(timeout): 72 | return await fut 73 | 74 | 75 | async def _cancel_and_wait(fut: asyncio.Future[_T]) -> None: 76 | """Cancel the *fut* future or task and wait until it completes.""" 77 | 78 | loop = asyncio.get_running_loop() 79 | waiter = loop.create_future() 80 | cb = functools.partial(_release_waiter, waiter) 81 | fut.add_done_callback(cb) 82 | 83 | try: 84 | fut.cancel() 85 | # We cannot wait on *fut* directly to make 86 | # sure _cancel_and_wait itself is reliably cancellable. 87 | await waiter 88 | finally: 89 | fut.remove_done_callback(cb) 90 | 91 | 92 | def _release_waiter(waiter: asyncio.Future[typing.Any], *args: object) -> None: 93 | if not waiter.done(): 94 | waiter.set_result(None) 95 | -------------------------------------------------------------------------------- /asyncpg/_version.py: -------------------------------------------------------------------------------- 1 | # This file MUST NOT contain anything but the __version__ assignment. 2 | # 3 | # When making a release, change the value of __version__ 4 | # to an appropriate value, and open a pull request against 5 | # the correct branch (master if making a new feature release). 6 | # The commit message MUST contain a properly formatted release 7 | # log, and the commit must be signed. 8 | # 9 | # The release automation will: build and test the packages for the 10 | # supported platforms, publish the packages on PyPI, merge the PR 11 | # to the target branch, create a Git tag pointing to the commit. 12 | 13 | from __future__ import annotations 14 | 15 | import typing 16 | 17 | __version__: typing.Final = '0.30.0' 18 | -------------------------------------------------------------------------------- /asyncpg/compat.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | from __future__ import annotations 8 | 9 | import enum 10 | import pathlib 11 | import platform 12 | import typing 13 | import sys 14 | 15 | if typing.TYPE_CHECKING: 16 | import asyncio 17 | 18 | SYSTEM: typing.Final = platform.uname().system 19 | 20 | 21 | if sys.platform == 'win32': 22 | import ctypes.wintypes 23 | 24 | CSIDL_APPDATA: typing.Final = 0x001a 25 | 26 | def get_pg_home_directory() -> pathlib.Path | None: 27 | # We cannot simply use expanduser() as that returns the user's 28 | # home directory, whereas Postgres stores its config in 29 | # %AppData% on Windows. 30 | buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH) 31 | r = ctypes.windll.shell32.SHGetFolderPathW(0, CSIDL_APPDATA, 0, 0, buf) 32 | if r: 33 | return None 34 | else: 35 | return pathlib.Path(buf.value) / 'postgresql' 36 | 37 | else: 38 | def get_pg_home_directory() -> pathlib.Path | None: 39 | try: 40 | return pathlib.Path.home() 41 | except (RuntimeError, KeyError): 42 | return None 43 | 44 | 45 | async def wait_closed(stream: asyncio.StreamWriter) -> None: 46 | # Not all asyncio versions have StreamWriter.wait_closed(). 47 | if hasattr(stream, 'wait_closed'): 48 | try: 49 | await stream.wait_closed() 50 | except ConnectionResetError: 51 | # On Windows wait_closed() sometimes propagates 52 | # ConnectionResetError which is totally unnecessary. 53 | pass 54 | 55 | 56 | if sys.version_info < (3, 12): 57 | def markcoroutinefunction(c): # type: ignore 58 | pass 59 | else: 60 | from inspect import markcoroutinefunction # noqa: F401 61 | 62 | 63 | if sys.version_info < (3, 12): 64 | from ._asyncio_compat import wait_for as wait_for # noqa: F401 65 | else: 66 | from asyncio import wait_for as wait_for # noqa: F401 67 | 68 | 69 | if sys.version_info < (3, 11): 70 | from ._asyncio_compat import timeout_ctx as timeout # noqa: F401 71 | else: 72 | from asyncio import timeout as timeout # noqa: F401 73 | 74 | if sys.version_info < (3, 9): 75 | from typing import ( # noqa: F401 76 | Awaitable as Awaitable, 77 | ) 78 | else: 79 | from collections.abc import ( # noqa: F401 80 | Awaitable as Awaitable, 81 | ) 82 | 83 | if sys.version_info < (3, 11): 84 | class StrEnum(str, enum.Enum): 85 | __str__ = str.__str__ 86 | __repr__ = enum.Enum.__repr__ 87 | else: 88 | from enum import StrEnum as StrEnum # noqa: F401 89 | -------------------------------------------------------------------------------- /asyncpg/connresource.py: -------------------------------------------------------------------------------- 1 | 2 | # Copyright (C) 2016-present the asyncpg authors and contributors 3 | # 4 | # 5 | # This module is part of asyncpg and is released under 6 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 7 | 8 | 9 | import functools 10 | 11 | from . import exceptions 12 | 13 | 14 | def guarded(meth): 15 | """A decorator to add a sanity check to ConnectionResource methods.""" 16 | 17 | @functools.wraps(meth) 18 | def _check(self, *args, **kwargs): 19 | self._check_conn_validity(meth.__name__) 20 | return meth(self, *args, **kwargs) 21 | 22 | return _check 23 | 24 | 25 | class ConnectionResource: 26 | __slots__ = ('_connection', '_con_release_ctr') 27 | 28 | def __init__(self, connection): 29 | self._connection = connection 30 | self._con_release_ctr = connection._pool_release_ctr 31 | 32 | def _check_conn_validity(self, meth_name): 33 | con_release_ctr = self._connection._pool_release_ctr 34 | if con_release_ctr != self._con_release_ctr: 35 | raise exceptions.InterfaceError( 36 | 'cannot call {}.{}(): ' 37 | 'the underlying connection has been released back ' 38 | 'to the pool'.format(self.__class__.__name__, meth_name)) 39 | 40 | if self._connection.is_closed(): 41 | raise exceptions.InterfaceError( 42 | 'cannot call {}.{}(): ' 43 | 'the underlying connection is closed'.format( 44 | self.__class__.__name__, meth_name)) 45 | -------------------------------------------------------------------------------- /asyncpg/protocol/.gitignore: -------------------------------------------------------------------------------- 1 | /*.c 2 | -------------------------------------------------------------------------------- /asyncpg/protocol/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | # flake8: NOQA 8 | 9 | from __future__ import annotations 10 | 11 | from .protocol import Protocol, Record, NO_TIMEOUT, BUILTIN_TYPE_NAME_MAP 12 | -------------------------------------------------------------------------------- /asyncpg/protocol/codecs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MagicStack/asyncpg/5b14653e0b447d956aa01ec658562138e19f0293/asyncpg/protocol/codecs/__init__.py -------------------------------------------------------------------------------- /asyncpg/protocol/codecs/base.pxd: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | ctypedef object (*encode_func)(ConnectionSettings settings, 9 | WriteBuffer buf, 10 | object obj) 11 | 12 | ctypedef object (*decode_func)(ConnectionSettings settings, 13 | FRBuffer *buf) 14 | 15 | ctypedef object (*codec_encode_func)(Codec codec, 16 | ConnectionSettings settings, 17 | WriteBuffer buf, 18 | object obj) 19 | 20 | ctypedef object (*codec_decode_func)(Codec codec, 21 | ConnectionSettings settings, 22 | FRBuffer *buf) 23 | 24 | 25 | cdef enum CodecType: 26 | CODEC_UNDEFINED = 0 27 | CODEC_C = 1 28 | CODEC_PY = 2 29 | CODEC_ARRAY = 3 30 | CODEC_COMPOSITE = 4 31 | CODEC_RANGE = 5 32 | CODEC_MULTIRANGE = 6 33 | 34 | 35 | cdef enum ServerDataFormat: 36 | PG_FORMAT_ANY = -1 37 | PG_FORMAT_TEXT = 0 38 | PG_FORMAT_BINARY = 1 39 | 40 | 41 | cdef enum ClientExchangeFormat: 42 | PG_XFORMAT_OBJECT = 1 43 | PG_XFORMAT_TUPLE = 2 44 | 45 | 46 | cdef class Codec: 47 | cdef: 48 | uint32_t oid 49 | 50 | str name 51 | str schema 52 | str kind 53 | 54 | CodecType type 55 | ServerDataFormat format 56 | ClientExchangeFormat xformat 57 | 58 | encode_func c_encoder 59 | decode_func c_decoder 60 | Codec base_codec 61 | 62 | object py_encoder 63 | object py_decoder 64 | 65 | # arrays 66 | Codec element_codec 67 | Py_UCS4 element_delimiter 68 | 69 | # composite types 70 | tuple element_type_oids 71 | object element_names 72 | object record_desc 73 | list element_codecs 74 | 75 | # Pointers to actual encoder/decoder functions for this codec 76 | codec_encode_func encoder 77 | codec_decode_func decoder 78 | 79 | cdef init(self, str name, str schema, str kind, 80 | CodecType type, ServerDataFormat format, 81 | ClientExchangeFormat xformat, 82 | encode_func c_encoder, decode_func c_decoder, 83 | Codec base_codec, 84 | object py_encoder, object py_decoder, 85 | Codec element_codec, tuple element_type_oids, 86 | object element_names, list element_codecs, 87 | Py_UCS4 element_delimiter) 88 | 89 | cdef encode_scalar(self, ConnectionSettings settings, WriteBuffer buf, 90 | object obj) 91 | 92 | cdef encode_array(self, ConnectionSettings settings, WriteBuffer buf, 93 | object obj) 94 | 95 | cdef encode_array_text(self, ConnectionSettings settings, WriteBuffer buf, 96 | object obj) 97 | 98 | cdef encode_range(self, ConnectionSettings settings, WriteBuffer buf, 99 | object obj) 100 | 101 | cdef encode_multirange(self, ConnectionSettings settings, WriteBuffer buf, 102 | object obj) 103 | 104 | cdef encode_composite(self, ConnectionSettings settings, WriteBuffer buf, 105 | object obj) 106 | 107 | cdef encode_in_python(self, ConnectionSettings settings, WriteBuffer buf, 108 | object obj) 109 | 110 | cdef decode_scalar(self, ConnectionSettings settings, FRBuffer *buf) 111 | 112 | cdef decode_array(self, ConnectionSettings settings, FRBuffer *buf) 113 | 114 | cdef decode_array_text(self, ConnectionSettings settings, FRBuffer *buf) 115 | 116 | cdef decode_range(self, ConnectionSettings settings, FRBuffer *buf) 117 | 118 | cdef decode_multirange(self, ConnectionSettings settings, FRBuffer *buf) 119 | 120 | cdef decode_composite(self, ConnectionSettings settings, FRBuffer *buf) 121 | 122 | cdef decode_in_python(self, ConnectionSettings settings, FRBuffer *buf) 123 | 124 | cdef inline encode(self, 125 | ConnectionSettings settings, 126 | WriteBuffer buf, 127 | object obj) 128 | 129 | cdef inline decode(self, ConnectionSettings settings, FRBuffer *buf) 130 | 131 | cdef has_encoder(self) 132 | cdef has_decoder(self) 133 | cdef is_binary(self) 134 | 135 | cdef inline Codec copy(self) 136 | 137 | @staticmethod 138 | cdef Codec new_array_codec(uint32_t oid, 139 | str name, 140 | str schema, 141 | Codec element_codec, 142 | Py_UCS4 element_delimiter) 143 | 144 | @staticmethod 145 | cdef Codec new_range_codec(uint32_t oid, 146 | str name, 147 | str schema, 148 | Codec element_codec) 149 | 150 | @staticmethod 151 | cdef Codec new_multirange_codec(uint32_t oid, 152 | str name, 153 | str schema, 154 | Codec element_codec) 155 | 156 | @staticmethod 157 | cdef Codec new_composite_codec(uint32_t oid, 158 | str name, 159 | str schema, 160 | ServerDataFormat format, 161 | list element_codecs, 162 | tuple element_type_oids, 163 | object element_names) 164 | 165 | @staticmethod 166 | cdef Codec new_python_codec(uint32_t oid, 167 | str name, 168 | str schema, 169 | str kind, 170 | object encoder, 171 | object decoder, 172 | encode_func c_encoder, 173 | decode_func c_decoder, 174 | Codec base_codec, 175 | ServerDataFormat format, 176 | ClientExchangeFormat xformat) 177 | 178 | 179 | cdef class DataCodecConfig: 180 | cdef: 181 | dict _derived_type_codecs 182 | dict _custom_type_codecs 183 | 184 | cdef inline Codec get_codec(self, uint32_t oid, ServerDataFormat format, 185 | bint ignore_custom_codec=*) 186 | cdef inline Codec get_custom_codec(self, uint32_t oid, 187 | ServerDataFormat format) 188 | -------------------------------------------------------------------------------- /asyncpg/protocol/codecs/range.pyx: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | from asyncpg import types as apg_types 9 | 10 | from collections.abc import Sequence as SequenceABC 11 | 12 | # defined in postgresql/src/include/utils/rangetypes.h 13 | DEF RANGE_EMPTY = 0x01 # range is empty 14 | DEF RANGE_LB_INC = 0x02 # lower bound is inclusive 15 | DEF RANGE_UB_INC = 0x04 # upper bound is inclusive 16 | DEF RANGE_LB_INF = 0x08 # lower bound is -infinity 17 | DEF RANGE_UB_INF = 0x10 # upper bound is +infinity 18 | 19 | 20 | cdef enum _RangeArgumentType: 21 | _RANGE_ARGUMENT_INVALID = 0 22 | _RANGE_ARGUMENT_TUPLE = 1 23 | _RANGE_ARGUMENT_RANGE = 2 24 | 25 | 26 | cdef inline bint _range_has_lbound(uint8_t flags): 27 | return not (flags & (RANGE_EMPTY | RANGE_LB_INF)) 28 | 29 | 30 | cdef inline bint _range_has_ubound(uint8_t flags): 31 | return not (flags & (RANGE_EMPTY | RANGE_UB_INF)) 32 | 33 | 34 | cdef inline _RangeArgumentType _range_type(object obj): 35 | if cpython.PyTuple_Check(obj) or cpython.PyList_Check(obj): 36 | return _RANGE_ARGUMENT_TUPLE 37 | elif isinstance(obj, apg_types.Range): 38 | return _RANGE_ARGUMENT_RANGE 39 | else: 40 | return _RANGE_ARGUMENT_INVALID 41 | 42 | 43 | cdef range_encode(ConnectionSettings settings, WriteBuffer buf, 44 | object obj, uint32_t elem_oid, 45 | encode_func_ex encoder, const void *encoder_arg): 46 | cdef: 47 | ssize_t obj_len 48 | uint8_t flags = 0 49 | object lower = None 50 | object upper = None 51 | WriteBuffer bounds_data = WriteBuffer.new() 52 | _RangeArgumentType arg_type = _range_type(obj) 53 | 54 | if arg_type == _RANGE_ARGUMENT_INVALID: 55 | raise TypeError( 56 | 'list, tuple or Range object expected (got type {})'.format( 57 | type(obj))) 58 | 59 | elif arg_type == _RANGE_ARGUMENT_TUPLE: 60 | obj_len = len(obj) 61 | if obj_len == 2: 62 | lower = obj[0] 63 | upper = obj[1] 64 | 65 | if lower is None: 66 | flags |= RANGE_LB_INF 67 | 68 | if upper is None: 69 | flags |= RANGE_UB_INF 70 | 71 | flags |= RANGE_LB_INC | RANGE_UB_INC 72 | 73 | elif obj_len == 1: 74 | lower = obj[0] 75 | flags |= RANGE_LB_INC | RANGE_UB_INF 76 | 77 | elif obj_len == 0: 78 | flags |= RANGE_EMPTY 79 | 80 | else: 81 | raise ValueError( 82 | 'expected 0, 1 or 2 elements in range (got {})'.format( 83 | obj_len)) 84 | 85 | else: 86 | if obj.isempty: 87 | flags |= RANGE_EMPTY 88 | else: 89 | lower = obj.lower 90 | upper = obj.upper 91 | 92 | if obj.lower_inc: 93 | flags |= RANGE_LB_INC 94 | elif lower is None: 95 | flags |= RANGE_LB_INF 96 | 97 | if obj.upper_inc: 98 | flags |= RANGE_UB_INC 99 | elif upper is None: 100 | flags |= RANGE_UB_INF 101 | 102 | if _range_has_lbound(flags): 103 | encoder(settings, bounds_data, lower, encoder_arg) 104 | 105 | if _range_has_ubound(flags): 106 | encoder(settings, bounds_data, upper, encoder_arg) 107 | 108 | buf.write_int32(1 + bounds_data.len()) 109 | buf.write_byte(flags) 110 | buf.write_buffer(bounds_data) 111 | 112 | 113 | cdef range_decode(ConnectionSettings settings, FRBuffer *buf, 114 | decode_func_ex decoder, const void *decoder_arg): 115 | cdef: 116 | uint8_t flags = frb_read(buf, 1)[0] 117 | int32_t bound_len 118 | object lower = None 119 | object upper = None 120 | FRBuffer bound_buf 121 | 122 | if _range_has_lbound(flags): 123 | bound_len = hton.unpack_int32(frb_read(buf, 4)) 124 | if bound_len == -1: 125 | lower = None 126 | else: 127 | frb_slice_from(&bound_buf, buf, bound_len) 128 | lower = decoder(settings, &bound_buf, decoder_arg) 129 | 130 | if _range_has_ubound(flags): 131 | bound_len = hton.unpack_int32(frb_read(buf, 4)) 132 | if bound_len == -1: 133 | upper = None 134 | else: 135 | frb_slice_from(&bound_buf, buf, bound_len) 136 | upper = decoder(settings, &bound_buf, decoder_arg) 137 | 138 | return apg_types.Range(lower=lower, upper=upper, 139 | lower_inc=(flags & RANGE_LB_INC) != 0, 140 | upper_inc=(flags & RANGE_UB_INC) != 0, 141 | empty=(flags & RANGE_EMPTY) != 0) 142 | 143 | 144 | cdef multirange_encode(ConnectionSettings settings, WriteBuffer buf, 145 | object obj, uint32_t elem_oid, 146 | encode_func_ex encoder, const void *encoder_arg): 147 | cdef: 148 | WriteBuffer elem_data 149 | ssize_t elem_data_len 150 | ssize_t elem_count 151 | 152 | if not isinstance(obj, SequenceABC): 153 | raise TypeError( 154 | 'expected a sequence (got type {!r})'.format(type(obj).__name__) 155 | ) 156 | 157 | elem_data = WriteBuffer.new() 158 | 159 | for elem in obj: 160 | range_encode(settings, elem_data, elem, elem_oid, encoder, encoder_arg) 161 | 162 | elem_count = len(obj) 163 | if elem_count > INT32_MAX: 164 | raise OverflowError(f'too many elements in multirange value') 165 | 166 | elem_data_len = elem_data.len() 167 | if elem_data_len > INT32_MAX - 4: 168 | raise OverflowError( 169 | f'size of encoded multirange datum exceeds the maximum allowed' 170 | f' {INT32_MAX - 4} bytes') 171 | 172 | # Datum length 173 | buf.write_int32(4 + elem_data_len) 174 | # Number of elements in multirange 175 | buf.write_int32(elem_count) 176 | buf.write_buffer(elem_data) 177 | 178 | 179 | cdef multirange_decode(ConnectionSettings settings, FRBuffer *buf, 180 | decode_func_ex decoder, const void *decoder_arg): 181 | cdef: 182 | int32_t nelems = hton.unpack_int32(frb_read(buf, 4)) 183 | FRBuffer elem_buf 184 | int32_t elem_len 185 | int i 186 | list result 187 | 188 | if nelems == 0: 189 | return [] 190 | 191 | if nelems < 0: 192 | raise exceptions.ProtocolError( 193 | 'unexpected multirange size value: {}'.format(nelems)) 194 | 195 | result = cpython.PyList_New(nelems) 196 | for i in range(nelems): 197 | elem_len = hton.unpack_int32(frb_read(buf, 4)) 198 | if elem_len == -1: 199 | raise exceptions.ProtocolError( 200 | 'unexpected NULL element in multirange value') 201 | else: 202 | frb_slice_from(&elem_buf, buf, elem_len) 203 | elem = range_decode(settings, &elem_buf, decoder, decoder_arg) 204 | cpython.Py_INCREF(elem) 205 | cpython.PyList_SET_ITEM(result, i, elem) 206 | 207 | return result 208 | -------------------------------------------------------------------------------- /asyncpg/protocol/codecs/record.pyx: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | from asyncpg import exceptions 9 | 10 | 11 | cdef inline record_encode_frame(ConnectionSettings settings, WriteBuffer buf, 12 | WriteBuffer elem_data, int32_t elem_count): 13 | buf.write_int32(4 + elem_data.len()) 14 | # attribute count 15 | buf.write_int32(elem_count) 16 | # encoded attribute data 17 | buf.write_buffer(elem_data) 18 | 19 | 20 | cdef anonymous_record_decode(ConnectionSettings settings, FRBuffer *buf): 21 | cdef: 22 | tuple result 23 | ssize_t elem_count 24 | ssize_t i 25 | int32_t elem_len 26 | uint32_t elem_typ 27 | Codec elem_codec 28 | FRBuffer elem_buf 29 | 30 | elem_count = hton.unpack_int32(frb_read(buf, 4)) 31 | result = cpython.PyTuple_New(elem_count) 32 | 33 | for i in range(elem_count): 34 | elem_typ = hton.unpack_int32(frb_read(buf, 4)) 35 | elem_len = hton.unpack_int32(frb_read(buf, 4)) 36 | 37 | if elem_len == -1: 38 | elem = None 39 | else: 40 | elem_codec = settings.get_data_codec(elem_typ) 41 | if elem_codec is None or not elem_codec.has_decoder(): 42 | raise exceptions.InternalClientError( 43 | 'no decoder for composite type element in ' 44 | 'position {} of type OID {}'.format(i, elem_typ)) 45 | elem = elem_codec.decode(settings, 46 | frb_slice_from(&elem_buf, buf, elem_len)) 47 | 48 | cpython.Py_INCREF(elem) 49 | cpython.PyTuple_SET_ITEM(result, i, elem) 50 | 51 | return result 52 | 53 | 54 | cdef anonymous_record_encode(ConnectionSettings settings, WriteBuffer buf, obj): 55 | raise exceptions.UnsupportedClientFeatureError( 56 | 'input of anonymous composite types is not supported', 57 | hint=( 58 | 'Consider declaring an explicit composite type and ' 59 | 'using it to cast the argument.' 60 | ), 61 | detail='PostgreSQL does not implement anonymous composite type input.' 62 | ) 63 | 64 | 65 | cdef init_record_codecs(): 66 | register_core_codec(RECORDOID, 67 | anonymous_record_encode, 68 | anonymous_record_decode, 69 | PG_FORMAT_BINARY) 70 | 71 | init_record_codecs() 72 | -------------------------------------------------------------------------------- /asyncpg/protocol/codecs/textutils.pyx: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | cdef inline uint32_t _apg_tolower(uint32_t c): 9 | if c >= 'A' and c <= 'Z': 10 | return c + 'a' - 'A' 11 | else: 12 | return c 13 | 14 | 15 | cdef int apg_strcasecmp(const Py_UCS4 *s1, const Py_UCS4 *s2): 16 | cdef: 17 | uint32_t c1 18 | uint32_t c2 19 | int i = 0 20 | 21 | while True: 22 | c1 = s1[i] 23 | c2 = s2[i] 24 | 25 | if c1 != c2: 26 | c1 = _apg_tolower(c1) 27 | c2 = _apg_tolower(c2) 28 | if c1 != c2: 29 | return c1 - c2 30 | 31 | if c1 == 0 or c2 == 0: 32 | break 33 | 34 | i += 1 35 | 36 | return 0 37 | 38 | 39 | cdef int apg_strcasecmp_char(const char *s1, const char *s2): 40 | cdef: 41 | uint8_t c1 42 | uint8_t c2 43 | int i = 0 44 | 45 | while True: 46 | c1 = s1[i] 47 | c2 = s2[i] 48 | 49 | if c1 != c2: 50 | c1 = _apg_tolower(c1) 51 | c2 = _apg_tolower(c2) 52 | if c1 != c2: 53 | return c1 - c2 54 | 55 | if c1 == 0 or c2 == 0: 56 | break 57 | 58 | i += 1 59 | 60 | return 0 61 | 62 | 63 | cdef inline bint apg_ascii_isspace(Py_UCS4 ch): 64 | return ( 65 | ch == ' ' or 66 | ch == '\n' or 67 | ch == '\r' or 68 | ch == '\t' or 69 | ch == '\v' or 70 | ch == '\f' 71 | ) 72 | 73 | 74 | cdef Py_UCS4 *apg_parse_int32(Py_UCS4 *buf, int32_t *num): 75 | cdef: 76 | Py_UCS4 *p 77 | int32_t n = 0 78 | int32_t neg = 0 79 | 80 | if buf[0] == '-': 81 | neg = 1 82 | buf += 1 83 | elif buf[0] == '+': 84 | buf += 1 85 | 86 | p = buf 87 | while p[0] >= '0' and p[0] <= '9': 88 | n = 10 * n - (p[0] - '0') 89 | p += 1 90 | 91 | if p == buf: 92 | return NULL 93 | 94 | if not neg: 95 | n = -n 96 | 97 | num[0] = n 98 | 99 | return p 100 | -------------------------------------------------------------------------------- /asyncpg/protocol/consts.pxi: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | DEF _MAXINT32 = 2**31 - 1 9 | DEF _COPY_BUFFER_SIZE = 524288 10 | DEF _COPY_SIGNATURE = b"PGCOPY\n\377\r\n\0" 11 | DEF _EXECUTE_MANY_BUF_NUM = 4 12 | DEF _EXECUTE_MANY_BUF_SIZE = 32768 13 | -------------------------------------------------------------------------------- /asyncpg/protocol/coreproto.pxd: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | include "scram.pxd" 9 | 10 | 11 | cdef enum ConnectionStatus: 12 | CONNECTION_OK = 1 13 | CONNECTION_BAD = 2 14 | CONNECTION_STARTED = 3 # Waiting for connection to be made. 15 | 16 | 17 | cdef enum ProtocolState: 18 | PROTOCOL_IDLE = 0 19 | 20 | PROTOCOL_FAILED = 1 21 | PROTOCOL_ERROR_CONSUME = 2 22 | PROTOCOL_CANCELLED = 3 23 | PROTOCOL_TERMINATING = 4 24 | 25 | PROTOCOL_AUTH = 10 26 | PROTOCOL_PREPARE = 11 27 | PROTOCOL_BIND_EXECUTE = 12 28 | PROTOCOL_BIND_EXECUTE_MANY = 13 29 | PROTOCOL_CLOSE_STMT_PORTAL = 14 30 | PROTOCOL_SIMPLE_QUERY = 15 31 | PROTOCOL_EXECUTE = 16 32 | PROTOCOL_BIND = 17 33 | PROTOCOL_COPY_OUT = 18 34 | PROTOCOL_COPY_OUT_DATA = 19 35 | PROTOCOL_COPY_OUT_DONE = 20 36 | PROTOCOL_COPY_IN = 21 37 | PROTOCOL_COPY_IN_DATA = 22 38 | 39 | 40 | cdef enum AuthenticationMessage: 41 | AUTH_SUCCESSFUL = 0 42 | AUTH_REQUIRED_KERBEROS = 2 43 | AUTH_REQUIRED_PASSWORD = 3 44 | AUTH_REQUIRED_PASSWORDMD5 = 5 45 | AUTH_REQUIRED_SCMCRED = 6 46 | AUTH_REQUIRED_GSS = 7 47 | AUTH_REQUIRED_GSS_CONTINUE = 8 48 | AUTH_REQUIRED_SSPI = 9 49 | AUTH_REQUIRED_SASL = 10 50 | AUTH_SASL_CONTINUE = 11 51 | AUTH_SASL_FINAL = 12 52 | 53 | 54 | cdef enum ResultType: 55 | RESULT_OK = 1 56 | RESULT_FAILED = 2 57 | 58 | 59 | cdef enum TransactionStatus: 60 | PQTRANS_IDLE = 0 # connection idle 61 | PQTRANS_ACTIVE = 1 # command in progress 62 | PQTRANS_INTRANS = 2 # idle, within transaction block 63 | PQTRANS_INERROR = 3 # idle, within failed transaction 64 | PQTRANS_UNKNOWN = 4 # cannot determine status 65 | 66 | 67 | ctypedef object (*decode_row_method)(object, const char*, ssize_t) 68 | 69 | 70 | cdef class CoreProtocol: 71 | cdef: 72 | ReadBuffer buffer 73 | bint _skip_discard 74 | bint _discard_data 75 | 76 | # executemany support data 77 | object _execute_iter 78 | str _execute_portal_name 79 | str _execute_stmt_name 80 | 81 | ConnectionStatus con_status 82 | ProtocolState state 83 | TransactionStatus xact_status 84 | 85 | str encoding 86 | 87 | object transport 88 | 89 | object address 90 | # Instance of _ConnectionParameters 91 | object con_params 92 | # Instance of SCRAMAuthentication 93 | SCRAMAuthentication scram 94 | # Instance of gssapi.SecurityContext or sspilib.SecurityContext 95 | object gss_ctx 96 | 97 | readonly int32_t backend_pid 98 | readonly int32_t backend_secret 99 | 100 | ## Result 101 | ResultType result_type 102 | object result 103 | bytes result_param_desc 104 | bytes result_row_desc 105 | bytes result_status_msg 106 | 107 | # True - completed, False - suspended 108 | bint result_execute_completed 109 | 110 | cpdef is_in_transaction(self) 111 | cdef _process__auth(self, char mtype) 112 | cdef _process__prepare(self, char mtype) 113 | cdef _process__bind_execute(self, char mtype) 114 | cdef _process__bind_execute_many(self, char mtype) 115 | cdef _process__close_stmt_portal(self, char mtype) 116 | cdef _process__simple_query(self, char mtype) 117 | cdef _process__bind(self, char mtype) 118 | cdef _process__copy_out(self, char mtype) 119 | cdef _process__copy_out_data(self, char mtype) 120 | cdef _process__copy_in(self, char mtype) 121 | cdef _process__copy_in_data(self, char mtype) 122 | 123 | cdef _parse_msg_authentication(self) 124 | cdef _parse_msg_parameter_status(self) 125 | cdef _parse_msg_notification(self) 126 | cdef _parse_msg_backend_key_data(self) 127 | cdef _parse_msg_ready_for_query(self) 128 | cdef _parse_data_msgs(self) 129 | cdef _parse_copy_data_msgs(self) 130 | cdef _parse_msg_error_response(self, is_error) 131 | cdef _parse_msg_command_complete(self) 132 | 133 | cdef _write_copy_data_msg(self, object data) 134 | cdef _write_copy_done_msg(self) 135 | cdef _write_copy_fail_msg(self, str cause) 136 | 137 | cdef _auth_password_message_cleartext(self) 138 | cdef _auth_password_message_md5(self, bytes salt) 139 | cdef _auth_password_message_sasl_initial(self, list sasl_auth_methods) 140 | cdef _auth_password_message_sasl_continue(self, bytes server_response) 141 | cdef _auth_gss_init_gssapi(self) 142 | cdef _auth_gss_init_sspi(self, bint negotiate) 143 | cdef _auth_gss_get_service(self) 144 | cdef _auth_gss_step(self, bytes server_response) 145 | 146 | cdef _write(self, buf) 147 | cdef _writelines(self, list buffers) 148 | 149 | cdef _read_server_messages(self) 150 | 151 | cdef _push_result(self) 152 | cdef _reset_result(self) 153 | cdef _set_state(self, ProtocolState new_state) 154 | 155 | cdef _ensure_connected(self) 156 | 157 | cdef WriteBuffer _build_parse_message(self, str stmt_name, str query) 158 | cdef WriteBuffer _build_bind_message(self, str portal_name, 159 | str stmt_name, 160 | WriteBuffer bind_data) 161 | cdef WriteBuffer _build_empty_bind_data(self) 162 | cdef WriteBuffer _build_execute_message(self, str portal_name, 163 | int32_t limit) 164 | 165 | 166 | cdef _connect(self) 167 | cdef _prepare_and_describe(self, str stmt_name, str query) 168 | cdef _send_parse_message(self, str stmt_name, str query) 169 | cdef _send_bind_message(self, str portal_name, str stmt_name, 170 | WriteBuffer bind_data, int32_t limit) 171 | cdef _bind_execute(self, str portal_name, str stmt_name, 172 | WriteBuffer bind_data, int32_t limit) 173 | cdef bint _bind_execute_many(self, str portal_name, str stmt_name, 174 | object bind_data, bint return_rows) 175 | cdef bint _bind_execute_many_more(self, bint first=*) 176 | cdef _bind_execute_many_fail(self, object error, bint first=*) 177 | cdef _bind(self, str portal_name, str stmt_name, 178 | WriteBuffer bind_data) 179 | cdef _execute(self, str portal_name, int32_t limit) 180 | cdef _close(self, str name, bint is_portal) 181 | cdef _simple_query(self, str query) 182 | cdef _copy_out(self, str copy_stmt) 183 | cdef _copy_in(self, str copy_stmt) 184 | cdef _terminate(self) 185 | 186 | cdef _decode_row(self, const char* buf, ssize_t buf_len) 187 | 188 | cdef _on_result(self) 189 | cdef _on_notification(self, pid, channel, payload) 190 | cdef _on_notice(self, parsed) 191 | cdef _set_server_parameter(self, name, val) 192 | cdef _on_connection_lost(self, exc) 193 | -------------------------------------------------------------------------------- /asyncpg/protocol/cpythonx.pxd: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | cdef extern from "Python.h": 9 | int PyByteArray_Check(object) 10 | 11 | int PyMemoryView_Check(object) 12 | Py_buffer *PyMemoryView_GET_BUFFER(object) 13 | object PyMemoryView_GetContiguous(object, int buffertype, char order) 14 | 15 | Py_UCS4* PyUnicode_AsUCS4Copy(object) except NULL 16 | object PyUnicode_FromKindAndData( 17 | int kind, const void *buffer, Py_ssize_t size) 18 | 19 | int PyUnicode_4BYTE_KIND 20 | -------------------------------------------------------------------------------- /asyncpg/protocol/encodings.pyx: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | '''Map PostgreSQL encoding names to Python encoding names 9 | 10 | https://www.postgresql.org/docs/current/static/multibyte.html#CHARSET-TABLE 11 | ''' 12 | 13 | cdef dict ENCODINGS_MAP = { 14 | 'abc': 'cp1258', 15 | 'alt': 'cp866', 16 | 'euc_cn': 'euccn', 17 | 'euc_jp': 'eucjp', 18 | 'euc_kr': 'euckr', 19 | 'koi8r': 'koi8_r', 20 | 'koi8u': 'koi8_u', 21 | 'shift_jis_2004': 'euc_jis_2004', 22 | 'sjis': 'shift_jis', 23 | 'sql_ascii': 'ascii', 24 | 'vscii': 'cp1258', 25 | 'tcvn': 'cp1258', 26 | 'tcvn5712': 'cp1258', 27 | 'unicode': 'utf_8', 28 | 'win': 'cp1521', 29 | 'win1250': 'cp1250', 30 | 'win1251': 'cp1251', 31 | 'win1252': 'cp1252', 32 | 'win1253': 'cp1253', 33 | 'win1254': 'cp1254', 34 | 'win1255': 'cp1255', 35 | 'win1256': 'cp1256', 36 | 'win1257': 'cp1257', 37 | 'win1258': 'cp1258', 38 | 'win866': 'cp866', 39 | 'win874': 'cp874', 40 | 'win932': 'cp932', 41 | 'win936': 'cp936', 42 | 'win949': 'cp949', 43 | 'win950': 'cp950', 44 | 'windows1250': 'cp1250', 45 | 'windows1251': 'cp1251', 46 | 'windows1252': 'cp1252', 47 | 'windows1253': 'cp1253', 48 | 'windows1254': 'cp1254', 49 | 'windows1255': 'cp1255', 50 | 'windows1256': 'cp1256', 51 | 'windows1257': 'cp1257', 52 | 'windows1258': 'cp1258', 53 | 'windows866': 'cp866', 54 | 'windows874': 'cp874', 55 | 'windows932': 'cp932', 56 | 'windows936': 'cp936', 57 | 'windows949': 'cp949', 58 | 'windows950': 'cp950', 59 | } 60 | 61 | 62 | cdef get_python_encoding(pg_encoding): 63 | return ENCODINGS_MAP.get(pg_encoding.lower(), pg_encoding.lower()) 64 | -------------------------------------------------------------------------------- /asyncpg/protocol/pgtypes.pxi: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | # GENERATED FROM pg_catalog.pg_type 9 | # DO NOT MODIFY, use tools/generate_type_map.py to update 10 | 11 | DEF INVALIDOID = 0 12 | DEF MAXBUILTINOID = 9999 13 | DEF MAXSUPPORTEDOID = 5080 14 | 15 | DEF BOOLOID = 16 16 | DEF BYTEAOID = 17 17 | DEF CHAROID = 18 18 | DEF NAMEOID = 19 19 | DEF INT8OID = 20 20 | DEF INT2OID = 21 21 | DEF INT4OID = 23 22 | DEF REGPROCOID = 24 23 | DEF TEXTOID = 25 24 | DEF OIDOID = 26 25 | DEF TIDOID = 27 26 | DEF XIDOID = 28 27 | DEF CIDOID = 29 28 | DEF PG_DDL_COMMANDOID = 32 29 | DEF JSONOID = 114 30 | DEF XMLOID = 142 31 | DEF PG_NODE_TREEOID = 194 32 | DEF SMGROID = 210 33 | DEF TABLE_AM_HANDLEROID = 269 34 | DEF INDEX_AM_HANDLEROID = 325 35 | DEF POINTOID = 600 36 | DEF LSEGOID = 601 37 | DEF PATHOID = 602 38 | DEF BOXOID = 603 39 | DEF POLYGONOID = 604 40 | DEF LINEOID = 628 41 | DEF CIDROID = 650 42 | DEF FLOAT4OID = 700 43 | DEF FLOAT8OID = 701 44 | DEF ABSTIMEOID = 702 45 | DEF RELTIMEOID = 703 46 | DEF TINTERVALOID = 704 47 | DEF UNKNOWNOID = 705 48 | DEF CIRCLEOID = 718 49 | DEF MACADDR8OID = 774 50 | DEF MONEYOID = 790 51 | DEF MACADDROID = 829 52 | DEF INETOID = 869 53 | DEF _TEXTOID = 1009 54 | DEF _OIDOID = 1028 55 | DEF ACLITEMOID = 1033 56 | DEF BPCHAROID = 1042 57 | DEF VARCHAROID = 1043 58 | DEF DATEOID = 1082 59 | DEF TIMEOID = 1083 60 | DEF TIMESTAMPOID = 1114 61 | DEF TIMESTAMPTZOID = 1184 62 | DEF INTERVALOID = 1186 63 | DEF TIMETZOID = 1266 64 | DEF BITOID = 1560 65 | DEF VARBITOID = 1562 66 | DEF NUMERICOID = 1700 67 | DEF REFCURSOROID = 1790 68 | DEF REGPROCEDUREOID = 2202 69 | DEF REGOPEROID = 2203 70 | DEF REGOPERATOROID = 2204 71 | DEF REGCLASSOID = 2205 72 | DEF REGTYPEOID = 2206 73 | DEF RECORDOID = 2249 74 | DEF CSTRINGOID = 2275 75 | DEF ANYOID = 2276 76 | DEF ANYARRAYOID = 2277 77 | DEF VOIDOID = 2278 78 | DEF TRIGGEROID = 2279 79 | DEF LANGUAGE_HANDLEROID = 2280 80 | DEF INTERNALOID = 2281 81 | DEF OPAQUEOID = 2282 82 | DEF ANYELEMENTOID = 2283 83 | DEF ANYNONARRAYOID = 2776 84 | DEF UUIDOID = 2950 85 | DEF TXID_SNAPSHOTOID = 2970 86 | DEF FDW_HANDLEROID = 3115 87 | DEF PG_LSNOID = 3220 88 | DEF TSM_HANDLEROID = 3310 89 | DEF PG_NDISTINCTOID = 3361 90 | DEF PG_DEPENDENCIESOID = 3402 91 | DEF ANYENUMOID = 3500 92 | DEF TSVECTOROID = 3614 93 | DEF TSQUERYOID = 3615 94 | DEF GTSVECTOROID = 3642 95 | DEF REGCONFIGOID = 3734 96 | DEF REGDICTIONARYOID = 3769 97 | DEF JSONBOID = 3802 98 | DEF ANYRANGEOID = 3831 99 | DEF EVENT_TRIGGEROID = 3838 100 | DEF JSONPATHOID = 4072 101 | DEF REGNAMESPACEOID = 4089 102 | DEF REGROLEOID = 4096 103 | DEF REGCOLLATIONOID = 4191 104 | DEF ANYMULTIRANGEOID = 4537 105 | DEF ANYCOMPATIBLEMULTIRANGEOID = 4538 106 | DEF PG_BRIN_BLOOM_SUMMARYOID = 4600 107 | DEF PG_BRIN_MINMAX_MULTI_SUMMARYOID = 4601 108 | DEF PG_MCV_LISTOID = 5017 109 | DEF PG_SNAPSHOTOID = 5038 110 | DEF XID8OID = 5069 111 | DEF ANYCOMPATIBLEOID = 5077 112 | DEF ANYCOMPATIBLEARRAYOID = 5078 113 | DEF ANYCOMPATIBLENONARRAYOID = 5079 114 | DEF ANYCOMPATIBLERANGEOID = 5080 115 | 116 | cdef ARRAY_TYPES = (_TEXTOID, _OIDOID,) 117 | 118 | BUILTIN_TYPE_OID_MAP = { 119 | ABSTIMEOID: 'abstime', 120 | ACLITEMOID: 'aclitem', 121 | ANYARRAYOID: 'anyarray', 122 | ANYCOMPATIBLEARRAYOID: 'anycompatiblearray', 123 | ANYCOMPATIBLEMULTIRANGEOID: 'anycompatiblemultirange', 124 | ANYCOMPATIBLENONARRAYOID: 'anycompatiblenonarray', 125 | ANYCOMPATIBLEOID: 'anycompatible', 126 | ANYCOMPATIBLERANGEOID: 'anycompatiblerange', 127 | ANYELEMENTOID: 'anyelement', 128 | ANYENUMOID: 'anyenum', 129 | ANYMULTIRANGEOID: 'anymultirange', 130 | ANYNONARRAYOID: 'anynonarray', 131 | ANYOID: 'any', 132 | ANYRANGEOID: 'anyrange', 133 | BITOID: 'bit', 134 | BOOLOID: 'bool', 135 | BOXOID: 'box', 136 | BPCHAROID: 'bpchar', 137 | BYTEAOID: 'bytea', 138 | CHAROID: 'char', 139 | CIDOID: 'cid', 140 | CIDROID: 'cidr', 141 | CIRCLEOID: 'circle', 142 | CSTRINGOID: 'cstring', 143 | DATEOID: 'date', 144 | EVENT_TRIGGEROID: 'event_trigger', 145 | FDW_HANDLEROID: 'fdw_handler', 146 | FLOAT4OID: 'float4', 147 | FLOAT8OID: 'float8', 148 | GTSVECTOROID: 'gtsvector', 149 | INDEX_AM_HANDLEROID: 'index_am_handler', 150 | INETOID: 'inet', 151 | INT2OID: 'int2', 152 | INT4OID: 'int4', 153 | INT8OID: 'int8', 154 | INTERNALOID: 'internal', 155 | INTERVALOID: 'interval', 156 | JSONBOID: 'jsonb', 157 | JSONOID: 'json', 158 | JSONPATHOID: 'jsonpath', 159 | LANGUAGE_HANDLEROID: 'language_handler', 160 | LINEOID: 'line', 161 | LSEGOID: 'lseg', 162 | MACADDR8OID: 'macaddr8', 163 | MACADDROID: 'macaddr', 164 | MONEYOID: 'money', 165 | NAMEOID: 'name', 166 | NUMERICOID: 'numeric', 167 | OIDOID: 'oid', 168 | OPAQUEOID: 'opaque', 169 | PATHOID: 'path', 170 | PG_BRIN_BLOOM_SUMMARYOID: 'pg_brin_bloom_summary', 171 | PG_BRIN_MINMAX_MULTI_SUMMARYOID: 'pg_brin_minmax_multi_summary', 172 | PG_DDL_COMMANDOID: 'pg_ddl_command', 173 | PG_DEPENDENCIESOID: 'pg_dependencies', 174 | PG_LSNOID: 'pg_lsn', 175 | PG_MCV_LISTOID: 'pg_mcv_list', 176 | PG_NDISTINCTOID: 'pg_ndistinct', 177 | PG_NODE_TREEOID: 'pg_node_tree', 178 | PG_SNAPSHOTOID: 'pg_snapshot', 179 | POINTOID: 'point', 180 | POLYGONOID: 'polygon', 181 | RECORDOID: 'record', 182 | REFCURSOROID: 'refcursor', 183 | REGCLASSOID: 'regclass', 184 | REGCOLLATIONOID: 'regcollation', 185 | REGCONFIGOID: 'regconfig', 186 | REGDICTIONARYOID: 'regdictionary', 187 | REGNAMESPACEOID: 'regnamespace', 188 | REGOPERATOROID: 'regoperator', 189 | REGOPEROID: 'regoper', 190 | REGPROCEDUREOID: 'regprocedure', 191 | REGPROCOID: 'regproc', 192 | REGROLEOID: 'regrole', 193 | REGTYPEOID: 'regtype', 194 | RELTIMEOID: 'reltime', 195 | SMGROID: 'smgr', 196 | TABLE_AM_HANDLEROID: 'table_am_handler', 197 | TEXTOID: 'text', 198 | TIDOID: 'tid', 199 | TIMEOID: 'time', 200 | TIMESTAMPOID: 'timestamp', 201 | TIMESTAMPTZOID: 'timestamptz', 202 | TIMETZOID: 'timetz', 203 | TINTERVALOID: 'tinterval', 204 | TRIGGEROID: 'trigger', 205 | TSM_HANDLEROID: 'tsm_handler', 206 | TSQUERYOID: 'tsquery', 207 | TSVECTOROID: 'tsvector', 208 | TXID_SNAPSHOTOID: 'txid_snapshot', 209 | UNKNOWNOID: 'unknown', 210 | UUIDOID: 'uuid', 211 | VARBITOID: 'varbit', 212 | VARCHAROID: 'varchar', 213 | VOIDOID: 'void', 214 | XID8OID: 'xid8', 215 | XIDOID: 'xid', 216 | XMLOID: 'xml', 217 | _OIDOID: 'oid[]', 218 | _TEXTOID: 'text[]' 219 | } 220 | 221 | BUILTIN_TYPE_NAME_MAP = {v: k for k, v in BUILTIN_TYPE_OID_MAP.items()} 222 | 223 | BUILTIN_TYPE_NAME_MAP['smallint'] = \ 224 | BUILTIN_TYPE_NAME_MAP['int2'] 225 | 226 | BUILTIN_TYPE_NAME_MAP['int'] = \ 227 | BUILTIN_TYPE_NAME_MAP['int4'] 228 | 229 | BUILTIN_TYPE_NAME_MAP['integer'] = \ 230 | BUILTIN_TYPE_NAME_MAP['int4'] 231 | 232 | BUILTIN_TYPE_NAME_MAP['bigint'] = \ 233 | BUILTIN_TYPE_NAME_MAP['int8'] 234 | 235 | BUILTIN_TYPE_NAME_MAP['decimal'] = \ 236 | BUILTIN_TYPE_NAME_MAP['numeric'] 237 | 238 | BUILTIN_TYPE_NAME_MAP['real'] = \ 239 | BUILTIN_TYPE_NAME_MAP['float4'] 240 | 241 | BUILTIN_TYPE_NAME_MAP['double precision'] = \ 242 | BUILTIN_TYPE_NAME_MAP['float8'] 243 | 244 | BUILTIN_TYPE_NAME_MAP['timestamp with timezone'] = \ 245 | BUILTIN_TYPE_NAME_MAP['timestamptz'] 246 | 247 | BUILTIN_TYPE_NAME_MAP['timestamp without timezone'] = \ 248 | BUILTIN_TYPE_NAME_MAP['timestamp'] 249 | 250 | BUILTIN_TYPE_NAME_MAP['time with timezone'] = \ 251 | BUILTIN_TYPE_NAME_MAP['timetz'] 252 | 253 | BUILTIN_TYPE_NAME_MAP['time without timezone'] = \ 254 | BUILTIN_TYPE_NAME_MAP['time'] 255 | 256 | BUILTIN_TYPE_NAME_MAP['char'] = \ 257 | BUILTIN_TYPE_NAME_MAP['bpchar'] 258 | 259 | BUILTIN_TYPE_NAME_MAP['character'] = \ 260 | BUILTIN_TYPE_NAME_MAP['bpchar'] 261 | 262 | BUILTIN_TYPE_NAME_MAP['character varying'] = \ 263 | BUILTIN_TYPE_NAME_MAP['varchar'] 264 | 265 | BUILTIN_TYPE_NAME_MAP['bit varying'] = \ 266 | BUILTIN_TYPE_NAME_MAP['varbit'] 267 | -------------------------------------------------------------------------------- /asyncpg/protocol/prepared_stmt.pxd: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | cdef class PreparedStatementState: 9 | cdef: 10 | readonly str name 11 | readonly str query 12 | readonly bint closed 13 | readonly bint prepared 14 | readonly int refs 15 | readonly type record_class 16 | readonly bint ignore_custom_codec 17 | 18 | 19 | list row_desc 20 | list parameters_desc 21 | 22 | ConnectionSettings settings 23 | 24 | int16_t args_num 25 | bint have_text_args 26 | tuple args_codecs 27 | 28 | int16_t cols_num 29 | object cols_desc 30 | bint have_text_cols 31 | tuple rows_codecs 32 | 33 | cdef _encode_bind_msg(self, args, int seqno = ?) 34 | cpdef _init_codecs(self) 35 | cdef _ensure_rows_decoder(self) 36 | cdef _ensure_args_encoder(self) 37 | cdef _set_row_desc(self, object desc) 38 | cdef _set_args_desc(self, object desc) 39 | cdef _decode_row(self, const char* cbuf, ssize_t buf_len) 40 | -------------------------------------------------------------------------------- /asyncpg/protocol/protocol.pxd: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | from libc.stdint cimport int16_t, int32_t, uint16_t, \ 9 | uint32_t, int64_t, uint64_t 10 | 11 | from asyncpg.pgproto.debug cimport PG_DEBUG 12 | 13 | from asyncpg.pgproto.pgproto cimport ( 14 | WriteBuffer, 15 | ReadBuffer, 16 | FRBuffer, 17 | ) 18 | 19 | from asyncpg.pgproto cimport pgproto 20 | 21 | include "consts.pxi" 22 | include "pgtypes.pxi" 23 | 24 | include "codecs/base.pxd" 25 | include "settings.pxd" 26 | include "coreproto.pxd" 27 | include "prepared_stmt.pxd" 28 | 29 | 30 | cdef class BaseProtocol(CoreProtocol): 31 | 32 | cdef: 33 | object loop 34 | ConnectionSettings settings 35 | object cancel_sent_waiter 36 | object cancel_waiter 37 | object waiter 38 | bint return_extra 39 | object create_future 40 | object timeout_handle 41 | object conref 42 | type record_class 43 | bint is_reading 44 | 45 | str last_query 46 | 47 | bint writing_paused 48 | bint closing 49 | 50 | readonly uint64_t queries_count 51 | 52 | bint _is_ssl 53 | 54 | PreparedStatementState statement 55 | 56 | cdef get_connection(self) 57 | 58 | cdef _get_timeout_impl(self, timeout) 59 | cdef _check_state(self) 60 | cdef _new_waiter(self, timeout) 61 | cdef _coreproto_error(self) 62 | 63 | cdef _on_result__connect(self, object waiter) 64 | cdef _on_result__prepare(self, object waiter) 65 | cdef _on_result__bind_and_exec(self, object waiter) 66 | cdef _on_result__close_stmt_or_portal(self, object waiter) 67 | cdef _on_result__simple_query(self, object waiter) 68 | cdef _on_result__bind(self, object waiter) 69 | cdef _on_result__copy_out(self, object waiter) 70 | cdef _on_result__copy_in(self, object waiter) 71 | 72 | cdef _handle_waiter_on_connection_lost(self, cause) 73 | 74 | cdef _dispatch_result(self) 75 | 76 | cdef inline resume_reading(self) 77 | cdef inline pause_reading(self) 78 | -------------------------------------------------------------------------------- /asyncpg/protocol/record/__init__.pxd: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | cimport cpython 9 | 10 | 11 | cdef extern from "record/recordobj.h": 12 | 13 | cpython.PyTypeObject *ApgRecord_InitTypes() except NULL 14 | 15 | int ApgRecord_CheckExact(object) 16 | object ApgRecord_New(type, object, int) 17 | void ApgRecord_SET_ITEM(object, int, object) 18 | 19 | object ApgRecordDesc_New(object, object) 20 | -------------------------------------------------------------------------------- /asyncpg/protocol/record/recordobj.h: -------------------------------------------------------------------------------- 1 | #ifndef APG_RECORDOBJ_H 2 | #define APG_RECORDOBJ_H 3 | 4 | #include "Python.h" 5 | 6 | 7 | /* Largest record to save on free list */ 8 | #define ApgRecord_MAXSAVESIZE 20 9 | 10 | /* Maximum number of records of each size to save */ 11 | #define ApgRecord_MAXFREELIST 2000 12 | 13 | 14 | typedef struct { 15 | PyObject_HEAD 16 | PyObject *mapping; 17 | PyObject *keys; 18 | } ApgRecordDescObject; 19 | 20 | 21 | typedef struct { 22 | PyObject_VAR_HEAD 23 | Py_hash_t self_hash; 24 | ApgRecordDescObject *desc; 25 | PyObject *ob_item[1]; 26 | 27 | /* ob_item contains space for 'ob_size' elements. 28 | * Items must normally not be NULL, except during construction when 29 | * the record is not yet visible outside the function that builds it. 30 | */ 31 | } ApgRecordObject; 32 | 33 | 34 | extern PyTypeObject ApgRecord_Type; 35 | extern PyTypeObject ApgRecordIter_Type; 36 | extern PyTypeObject ApgRecordItems_Type; 37 | 38 | extern PyTypeObject ApgRecordDesc_Type; 39 | 40 | #define ApgRecord_Check(self) PyObject_TypeCheck(self, &ApgRecord_Type) 41 | #define ApgRecord_CheckExact(o) (Py_TYPE(o) == &ApgRecord_Type) 42 | #define ApgRecordDesc_CheckExact(o) (Py_TYPE(o) == &ApgRecordDesc_Type) 43 | 44 | #define ApgRecord_SET_ITEM(op, i, v) \ 45 | (((ApgRecordObject *)(op))->ob_item[i] = v) 46 | #define ApgRecord_GET_ITEM(op, i) \ 47 | (((ApgRecordObject *)(op))->ob_item[i]) 48 | 49 | PyTypeObject *ApgRecord_InitTypes(void); 50 | PyObject *ApgRecord_New(PyTypeObject *, PyObject *, Py_ssize_t); 51 | PyObject *ApgRecordDesc_New(PyObject *, PyObject *); 52 | 53 | #endif 54 | -------------------------------------------------------------------------------- /asyncpg/protocol/scram.pxd: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | cdef class SCRAMAuthentication: 9 | cdef: 10 | readonly bytes authentication_method 11 | readonly bytes authorization_message 12 | readonly bytes client_channel_binding 13 | readonly bytes client_first_message_bare 14 | readonly bytes client_nonce 15 | readonly bytes client_proof 16 | readonly bytes password_salt 17 | readonly int password_iterations 18 | readonly bytes server_first_message 19 | # server_key is an instance of hmac.HAMC 20 | readonly object server_key 21 | readonly bytes server_nonce 22 | 23 | cdef create_client_first_message(self, str username) 24 | cdef create_client_final_message(self, str password) 25 | cdef parse_server_first_message(self, bytes server_response) 26 | cdef verify_server_final_message(self, bytes server_final_message) 27 | cdef _bytes_xor(self, bytes a, bytes b) 28 | cdef _generate_client_nonce(self, int num_bytes) 29 | cdef _generate_client_proof(self, str password) 30 | cdef _generate_salted_password(self, str password, bytes salt, int iterations) 31 | cdef _normalize_password(self, str original_password) 32 | -------------------------------------------------------------------------------- /asyncpg/protocol/settings.pxd: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | cdef class ConnectionSettings(pgproto.CodecContext): 9 | cdef: 10 | str _encoding 11 | object _codec 12 | dict _settings 13 | bint _is_utf8 14 | DataCodecConfig _data_codecs 15 | 16 | cdef add_setting(self, str name, str val) 17 | cdef is_encoding_utf8(self) 18 | cpdef get_text_codec(self) 19 | cpdef inline register_data_types(self, types) 20 | cpdef inline add_python_codec( 21 | self, typeoid, typename, typeschema, typeinfos, typekind, encoder, 22 | decoder, format) 23 | cpdef inline remove_python_codec( 24 | self, typeoid, typename, typeschema) 25 | cpdef inline clear_type_cache(self) 26 | cpdef inline set_builtin_type_codec( 27 | self, typeoid, typename, typeschema, typekind, alias_to, format) 28 | cpdef inline Codec get_data_codec( 29 | self, uint32_t oid, ServerDataFormat format=*, 30 | bint ignore_custom_codec=*) 31 | -------------------------------------------------------------------------------- /asyncpg/protocol/settings.pyx: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | from asyncpg import exceptions 9 | 10 | 11 | @cython.final 12 | cdef class ConnectionSettings(pgproto.CodecContext): 13 | 14 | def __cinit__(self): 15 | self._encoding = 'utf-8' 16 | self._is_utf8 = True 17 | self._settings = {} 18 | self._codec = codecs.lookup('utf-8') 19 | self._data_codecs = DataCodecConfig() 20 | 21 | cdef add_setting(self, str name, str val): 22 | self._settings[name] = val 23 | if name == 'client_encoding': 24 | py_enc = get_python_encoding(val) 25 | self._codec = codecs.lookup(py_enc) 26 | self._encoding = self._codec.name 27 | self._is_utf8 = self._encoding == 'utf-8' 28 | 29 | cdef is_encoding_utf8(self): 30 | return self._is_utf8 31 | 32 | cpdef get_text_codec(self): 33 | return self._codec 34 | 35 | cpdef inline register_data_types(self, types): 36 | self._data_codecs.add_types(types) 37 | 38 | cpdef inline add_python_codec(self, typeoid, typename, typeschema, 39 | typeinfos, typekind, encoder, decoder, 40 | format): 41 | cdef: 42 | ServerDataFormat _format 43 | ClientExchangeFormat xformat 44 | 45 | if format == 'binary': 46 | _format = PG_FORMAT_BINARY 47 | xformat = PG_XFORMAT_OBJECT 48 | elif format == 'text': 49 | _format = PG_FORMAT_TEXT 50 | xformat = PG_XFORMAT_OBJECT 51 | elif format == 'tuple': 52 | _format = PG_FORMAT_ANY 53 | xformat = PG_XFORMAT_TUPLE 54 | else: 55 | raise exceptions.InterfaceError( 56 | 'invalid `format` argument, expected {}, got {!r}'.format( 57 | "'text', 'binary' or 'tuple'", format 58 | )) 59 | 60 | self._data_codecs.add_python_codec(typeoid, typename, typeschema, 61 | typekind, typeinfos, 62 | encoder, decoder, 63 | _format, xformat) 64 | 65 | cpdef inline remove_python_codec(self, typeoid, typename, typeschema): 66 | self._data_codecs.remove_python_codec(typeoid, typename, typeschema) 67 | 68 | cpdef inline clear_type_cache(self): 69 | self._data_codecs.clear_type_cache() 70 | 71 | cpdef inline set_builtin_type_codec(self, typeoid, typename, typeschema, 72 | typekind, alias_to, format): 73 | cdef: 74 | ServerDataFormat _format 75 | 76 | if format is None: 77 | _format = PG_FORMAT_ANY 78 | elif format == 'binary': 79 | _format = PG_FORMAT_BINARY 80 | elif format == 'text': 81 | _format = PG_FORMAT_TEXT 82 | else: 83 | raise exceptions.InterfaceError( 84 | 'invalid `format` argument, expected {}, got {!r}'.format( 85 | "'text' or 'binary'", format 86 | )) 87 | 88 | self._data_codecs.set_builtin_type_codec(typeoid, typename, typeschema, 89 | typekind, alias_to, _format) 90 | 91 | cpdef inline Codec get_data_codec(self, uint32_t oid, 92 | ServerDataFormat format=PG_FORMAT_ANY, 93 | bint ignore_custom_codec=False): 94 | return self._data_codecs.get_codec(oid, format, ignore_custom_codec) 95 | 96 | def __getattr__(self, name): 97 | if not name.startswith('_'): 98 | try: 99 | return self._settings[name] 100 | except KeyError: 101 | raise AttributeError(name) from None 102 | 103 | return object.__getattribute__(self, name) 104 | 105 | def __repr__(self): 106 | return ''.format(self._settings) 107 | -------------------------------------------------------------------------------- /asyncpg/serverversion.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | from __future__ import annotations 8 | 9 | import re 10 | import typing 11 | 12 | from .types import ServerVersion 13 | 14 | version_regex: typing.Final = re.compile( 15 | r"(Postgre[^\s]*)?\s*" 16 | r"(?P[0-9]+)\.?" 17 | r"((?P[0-9]+)\.?)?" 18 | r"(?P[0-9]+)?" 19 | r"(?P[a-z]+)?" 20 | r"(?P[0-9]+)?" 21 | ) 22 | 23 | 24 | class _VersionDict(typing.TypedDict): 25 | major: int 26 | minor: int | None 27 | micro: int | None 28 | releaselevel: str | None 29 | serial: int | None 30 | 31 | 32 | def split_server_version_string(version_string: str) -> ServerVersion: 33 | version_match = version_regex.search(version_string) 34 | 35 | if version_match is None: 36 | raise ValueError( 37 | "Unable to parse Postgres " 38 | f'version from "{version_string}"' 39 | ) 40 | 41 | version: _VersionDict = version_match.groupdict() # type: ignore[assignment] # noqa: E501 42 | for ver_key, ver_value in version.items(): 43 | # Cast all possible versions parts to int 44 | try: 45 | version[ver_key] = int(ver_value) # type: ignore[literal-required, call-overload] # noqa: E501 46 | except (TypeError, ValueError): 47 | pass 48 | 49 | if version["major"] < 10: 50 | return ServerVersion( 51 | version["major"], 52 | version.get("minor") or 0, 53 | version.get("micro") or 0, 54 | version.get("releaselevel") or "final", 55 | version.get("serial") or 0, 56 | ) 57 | 58 | # Since PostgreSQL 10 the versioning scheme has changed. 59 | # 10.x really means 10.0.x. While parsing 10.1 60 | # as (10, 1) may seem less confusing, in practice most 61 | # version checks are written as version[:2], and we 62 | # want to keep that behaviour consistent, i.e not fail 63 | # a major version check due to a bugfix release. 64 | return ServerVersion( 65 | version["major"], 66 | 0, 67 | version.get("minor") or 0, 68 | version.get("releaselevel") or "final", 69 | version.get("serial") or 0, 70 | ) 71 | -------------------------------------------------------------------------------- /asyncpg/transaction.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import enum 9 | 10 | from . import connresource 11 | from . import exceptions as apg_errors 12 | 13 | 14 | class TransactionState(enum.Enum): 15 | NEW = 0 16 | STARTED = 1 17 | COMMITTED = 2 18 | ROLLEDBACK = 3 19 | FAILED = 4 20 | 21 | 22 | ISOLATION_LEVELS = { 23 | 'read_committed', 24 | 'read_uncommitted', 25 | 'serializable', 26 | 'repeatable_read', 27 | } 28 | ISOLATION_LEVELS_BY_VALUE = { 29 | 'read committed': 'read_committed', 30 | 'read uncommitted': 'read_uncommitted', 31 | 'serializable': 'serializable', 32 | 'repeatable read': 'repeatable_read', 33 | } 34 | 35 | 36 | class Transaction(connresource.ConnectionResource): 37 | """Represents a transaction or savepoint block. 38 | 39 | Transactions are created by calling the 40 | :meth:`Connection.transaction() ` 41 | function. 42 | """ 43 | 44 | __slots__ = ('_connection', '_isolation', '_readonly', '_deferrable', 45 | '_state', '_nested', '_id', '_managed') 46 | 47 | def __init__(self, connection, isolation, readonly, deferrable): 48 | super().__init__(connection) 49 | 50 | if isolation and isolation not in ISOLATION_LEVELS: 51 | raise ValueError( 52 | 'isolation is expected to be either of {}, ' 53 | 'got {!r}'.format(ISOLATION_LEVELS, isolation)) 54 | 55 | self._isolation = isolation 56 | self._readonly = readonly 57 | self._deferrable = deferrable 58 | self._state = TransactionState.NEW 59 | self._nested = False 60 | self._id = None 61 | self._managed = False 62 | 63 | async def __aenter__(self): 64 | if self._managed: 65 | raise apg_errors.InterfaceError( 66 | 'cannot enter context: already in an `async with` block') 67 | self._managed = True 68 | await self.start() 69 | 70 | async def __aexit__(self, extype, ex, tb): 71 | try: 72 | self._check_conn_validity('__aexit__') 73 | except apg_errors.InterfaceError: 74 | if extype is GeneratorExit: 75 | # When a PoolAcquireContext is being exited, and there 76 | # is an open transaction in an async generator that has 77 | # not been iterated fully, there is a possibility that 78 | # Pool.release() would race with this __aexit__(), since 79 | # both would be in concurrent tasks. In such case we 80 | # yield to Pool.release() to do the ROLLBACK for us. 81 | # See https://github.com/MagicStack/asyncpg/issues/232 82 | # for an example. 83 | return 84 | else: 85 | raise 86 | 87 | try: 88 | if extype is not None: 89 | await self.__rollback() 90 | else: 91 | await self.__commit() 92 | finally: 93 | self._managed = False 94 | 95 | @connresource.guarded 96 | async def start(self): 97 | """Enter the transaction or savepoint block.""" 98 | self.__check_state_base('start') 99 | if self._state is TransactionState.STARTED: 100 | raise apg_errors.InterfaceError( 101 | 'cannot start; the transaction is already started') 102 | 103 | con = self._connection 104 | 105 | if con._top_xact is None: 106 | if con._protocol.is_in_transaction(): 107 | raise apg_errors.InterfaceError( 108 | 'cannot use Connection.transaction() in ' 109 | 'a manually started transaction') 110 | con._top_xact = self 111 | else: 112 | # Nested transaction block 113 | if self._isolation: 114 | top_xact_isolation = con._top_xact._isolation 115 | if top_xact_isolation is None: 116 | top_xact_isolation = ISOLATION_LEVELS_BY_VALUE[ 117 | await self._connection.fetchval( 118 | 'SHOW transaction_isolation;')] 119 | if self._isolation != top_xact_isolation: 120 | raise apg_errors.InterfaceError( 121 | 'nested transaction has a different isolation level: ' 122 | 'current {!r} != outer {!r}'.format( 123 | self._isolation, top_xact_isolation)) 124 | self._nested = True 125 | 126 | if self._nested: 127 | self._id = con._get_unique_id('savepoint') 128 | query = 'SAVEPOINT {};'.format(self._id) 129 | else: 130 | query = 'BEGIN' 131 | if self._isolation == 'read_committed': 132 | query += ' ISOLATION LEVEL READ COMMITTED' 133 | elif self._isolation == 'read_uncommitted': 134 | query += ' ISOLATION LEVEL READ UNCOMMITTED' 135 | elif self._isolation == 'repeatable_read': 136 | query += ' ISOLATION LEVEL REPEATABLE READ' 137 | elif self._isolation == 'serializable': 138 | query += ' ISOLATION LEVEL SERIALIZABLE' 139 | if self._readonly: 140 | query += ' READ ONLY' 141 | if self._deferrable: 142 | query += ' DEFERRABLE' 143 | query += ';' 144 | 145 | try: 146 | await self._connection.execute(query) 147 | except BaseException: 148 | self._state = TransactionState.FAILED 149 | raise 150 | else: 151 | self._state = TransactionState.STARTED 152 | 153 | def __check_state_base(self, opname): 154 | if self._state is TransactionState.COMMITTED: 155 | raise apg_errors.InterfaceError( 156 | 'cannot {}; the transaction is already committed'.format( 157 | opname)) 158 | if self._state is TransactionState.ROLLEDBACK: 159 | raise apg_errors.InterfaceError( 160 | 'cannot {}; the transaction is already rolled back'.format( 161 | opname)) 162 | if self._state is TransactionState.FAILED: 163 | raise apg_errors.InterfaceError( 164 | 'cannot {}; the transaction is in error state'.format( 165 | opname)) 166 | 167 | def __check_state(self, opname): 168 | if self._state is not TransactionState.STARTED: 169 | if self._state is TransactionState.NEW: 170 | raise apg_errors.InterfaceError( 171 | 'cannot {}; the transaction is not yet started'.format( 172 | opname)) 173 | self.__check_state_base(opname) 174 | 175 | async def __commit(self): 176 | self.__check_state('commit') 177 | 178 | if self._connection._top_xact is self: 179 | self._connection._top_xact = None 180 | 181 | if self._nested: 182 | query = 'RELEASE SAVEPOINT {};'.format(self._id) 183 | else: 184 | query = 'COMMIT;' 185 | 186 | try: 187 | await self._connection.execute(query) 188 | except BaseException: 189 | self._state = TransactionState.FAILED 190 | raise 191 | else: 192 | self._state = TransactionState.COMMITTED 193 | 194 | async def __rollback(self): 195 | self.__check_state('rollback') 196 | 197 | if self._connection._top_xact is self: 198 | self._connection._top_xact = None 199 | 200 | if self._nested: 201 | query = 'ROLLBACK TO {};'.format(self._id) 202 | else: 203 | query = 'ROLLBACK;' 204 | 205 | try: 206 | await self._connection.execute(query) 207 | except BaseException: 208 | self._state = TransactionState.FAILED 209 | raise 210 | else: 211 | self._state = TransactionState.ROLLEDBACK 212 | 213 | @connresource.guarded 214 | async def commit(self): 215 | """Exit the transaction or savepoint block and commit changes.""" 216 | if self._managed: 217 | raise apg_errors.InterfaceError( 218 | 'cannot manually commit from within an `async with` block') 219 | await self.__commit() 220 | 221 | @connresource.guarded 222 | async def rollback(self): 223 | """Exit the transaction or savepoint block and rollback changes.""" 224 | if self._managed: 225 | raise apg_errors.InterfaceError( 226 | 'cannot manually rollback from within an `async with` block') 227 | await self.__rollback() 228 | 229 | def __repr__(self): 230 | attrs = [] 231 | attrs.append('state:{}'.format(self._state.name.lower())) 232 | 233 | if self._isolation is not None: 234 | attrs.append(self._isolation) 235 | if self._readonly: 236 | attrs.append('readonly') 237 | if self._deferrable: 238 | attrs.append('deferrable') 239 | 240 | if self.__class__.__module__.startswith('asyncpg.'): 241 | mod = 'asyncpg' 242 | else: 243 | mod = self.__class__.__module__ 244 | 245 | return '<{}.{} {} {:#x}>'.format( 246 | mod, self.__class__.__name__, ' '.join(attrs), id(self)) 247 | -------------------------------------------------------------------------------- /asyncpg/types.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | from __future__ import annotations 8 | 9 | import typing 10 | 11 | from asyncpg.pgproto.types import ( 12 | BitString, Point, Path, Polygon, 13 | Box, Line, LineSegment, Circle, 14 | ) 15 | 16 | if typing.TYPE_CHECKING: 17 | from typing_extensions import Self 18 | 19 | 20 | __all__ = ( 21 | 'Type', 'Attribute', 'Range', 'BitString', 'Point', 'Path', 'Polygon', 22 | 'Box', 'Line', 'LineSegment', 'Circle', 'ServerVersion', 23 | ) 24 | 25 | 26 | class Type(typing.NamedTuple): 27 | oid: int 28 | name: str 29 | kind: str 30 | schema: str 31 | 32 | 33 | Type.__doc__ = 'Database data type.' 34 | Type.oid.__doc__ = 'OID of the type.' 35 | Type.name.__doc__ = 'Type name. For example "int2".' 36 | Type.kind.__doc__ = \ 37 | 'Type kind. Can be "scalar", "array", "composite" or "range".' 38 | Type.schema.__doc__ = 'Name of the database schema that defines the type.' 39 | 40 | 41 | class Attribute(typing.NamedTuple): 42 | name: str 43 | type: Type 44 | 45 | 46 | Attribute.__doc__ = 'Database relation attribute.' 47 | Attribute.name.__doc__ = 'Attribute name.' 48 | Attribute.type.__doc__ = 'Attribute data type :class:`asyncpg.types.Type`.' 49 | 50 | 51 | class ServerVersion(typing.NamedTuple): 52 | major: int 53 | minor: int 54 | micro: int 55 | releaselevel: str 56 | serial: int 57 | 58 | 59 | ServerVersion.__doc__ = 'PostgreSQL server version tuple.' 60 | 61 | 62 | class _RangeValue(typing.Protocol): 63 | def __eq__(self, __value: object) -> bool: 64 | ... 65 | 66 | def __lt__(self, __other: Self, /) -> bool: 67 | ... 68 | 69 | def __gt__(self, __other: Self, /) -> bool: 70 | ... 71 | 72 | 73 | _RV = typing.TypeVar('_RV', bound=_RangeValue) 74 | 75 | 76 | class Range(typing.Generic[_RV]): 77 | """Immutable representation of PostgreSQL `range` type.""" 78 | 79 | __slots__ = ('_lower', '_upper', '_lower_inc', '_upper_inc', '_empty') 80 | 81 | _lower: _RV | None 82 | _upper: _RV | None 83 | _lower_inc: bool 84 | _upper_inc: bool 85 | _empty: bool 86 | 87 | def __init__( 88 | self, 89 | lower: _RV | None = None, 90 | upper: _RV | None = None, 91 | *, 92 | lower_inc: bool = True, 93 | upper_inc: bool = False, 94 | empty: bool = False 95 | ) -> None: 96 | self._empty = empty 97 | if empty: 98 | self._lower = self._upper = None 99 | self._lower_inc = self._upper_inc = False 100 | else: 101 | self._lower = lower 102 | self._upper = upper 103 | self._lower_inc = lower is not None and lower_inc 104 | self._upper_inc = upper is not None and upper_inc 105 | 106 | @property 107 | def lower(self) -> _RV | None: 108 | return self._lower 109 | 110 | @property 111 | def lower_inc(self) -> bool: 112 | return self._lower_inc 113 | 114 | @property 115 | def lower_inf(self) -> bool: 116 | return self._lower is None and not self._empty 117 | 118 | @property 119 | def upper(self) -> _RV | None: 120 | return self._upper 121 | 122 | @property 123 | def upper_inc(self) -> bool: 124 | return self._upper_inc 125 | 126 | @property 127 | def upper_inf(self) -> bool: 128 | return self._upper is None and not self._empty 129 | 130 | @property 131 | def isempty(self) -> bool: 132 | return self._empty 133 | 134 | def _issubset_lower(self, other: Self) -> bool: 135 | if other._lower is None: 136 | return True 137 | if self._lower is None: 138 | return False 139 | 140 | return self._lower > other._lower or ( 141 | self._lower == other._lower 142 | and (other._lower_inc or not self._lower_inc) 143 | ) 144 | 145 | def _issubset_upper(self, other: Self) -> bool: 146 | if other._upper is None: 147 | return True 148 | if self._upper is None: 149 | return False 150 | 151 | return self._upper < other._upper or ( 152 | self._upper == other._upper 153 | and (other._upper_inc or not self._upper_inc) 154 | ) 155 | 156 | def issubset(self, other: Self) -> bool: 157 | if self._empty: 158 | return True 159 | if other._empty: 160 | return False 161 | 162 | return self._issubset_lower(other) and self._issubset_upper(other) 163 | 164 | def issuperset(self, other: Self) -> bool: 165 | return other.issubset(self) 166 | 167 | def __bool__(self) -> bool: 168 | return not self._empty 169 | 170 | def __eq__(self, other: object) -> bool: 171 | if not isinstance(other, Range): 172 | return NotImplemented 173 | 174 | return ( 175 | self._lower, 176 | self._upper, 177 | self._lower_inc, 178 | self._upper_inc, 179 | self._empty 180 | ) == ( 181 | other._lower, # pyright: ignore [reportUnknownMemberType] 182 | other._upper, # pyright: ignore [reportUnknownMemberType] 183 | other._lower_inc, 184 | other._upper_inc, 185 | other._empty 186 | ) 187 | 188 | def __hash__(self) -> int: 189 | return hash(( 190 | self._lower, 191 | self._upper, 192 | self._lower_inc, 193 | self._upper_inc, 194 | self._empty 195 | )) 196 | 197 | def __repr__(self) -> str: 198 | if self._empty: 199 | desc = 'empty' 200 | else: 201 | if self._lower is None or not self._lower_inc: 202 | lb = '(' 203 | else: 204 | lb = '[' 205 | 206 | if self._lower is not None: 207 | lb += repr(self._lower) 208 | 209 | if self._upper is not None: 210 | ub = repr(self._upper) 211 | else: 212 | ub = '' 213 | 214 | if self._upper is None or not self._upper_inc: 215 | ub += ')' 216 | else: 217 | ub += ']' 218 | 219 | desc = '{}, {}'.format(lb, ub) 220 | 221 | return ''.format(desc) 222 | 223 | __str__ = __repr__ 224 | -------------------------------------------------------------------------------- /asyncpg/utils.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the ayncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import re 9 | 10 | 11 | def _quote_ident(ident): 12 | return '"{}"'.format(ident.replace('"', '""')) 13 | 14 | 15 | def _quote_literal(string): 16 | return "'{}'".format(string.replace("'", "''")) 17 | 18 | 19 | async def _mogrify(conn, query, args): 20 | """Safely inline arguments to query text.""" 21 | # Introspect the target query for argument types and 22 | # build a list of safely-quoted fully-qualified type names. 23 | ps = await conn.prepare(query) 24 | paramtypes = [] 25 | for t in ps.get_parameters(): 26 | if t.name.endswith('[]'): 27 | pname = '_' + t.name[:-2] 28 | else: 29 | pname = t.name 30 | 31 | paramtypes.append('{}.{}'.format( 32 | _quote_ident(t.schema), _quote_ident(pname))) 33 | del ps 34 | 35 | # Use Postgres to convert arguments to text representation 36 | # by casting each value to text. 37 | cols = ['quote_literal(${}::{}::text)'.format(i, t) 38 | for i, t in enumerate(paramtypes, start=1)] 39 | 40 | textified = await conn.fetchrow( 41 | 'SELECT {cols}'.format(cols=', '.join(cols)), *args) 42 | 43 | # Finally, replace $n references with text values. 44 | return re.sub( 45 | r"\$(\d+)\b", 46 | lambda m: ( 47 | textified[int(m.group(1)) - 1] 48 | if textified[int(m.group(1)) - 1] is not None 49 | else "NULL" 50 | ), 51 | query, 52 | ) 53 | -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | _build 2 | _templates 3 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = python -m sphinx 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and a HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " text to make text files" 35 | @echo " man to make manual pages" 36 | @echo " texinfo to make Texinfo files" 37 | @echo " info to make Texinfo files and run them through makeinfo" 38 | @echo " gettext to make PO message catalogs" 39 | @echo " changes to make an overview of all changed/added/deprecated items" 40 | @echo " xml to make Docutils-native XML files" 41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 42 | @echo " linkcheck to check all external links for integrity" 43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 44 | @echo " coverage to run coverage check of the documentation (if enabled)" 45 | @echo " dummy to check syntax errors of document sources" 46 | 47 | .PHONY: clean 48 | clean: 49 | rm -rf $(BUILDDIR)/* 50 | 51 | .PHONY: html 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | .PHONY: dirhtml 58 | dirhtml: 59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 60 | @echo 61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 62 | 63 | .PHONY: singlehtml 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | .PHONY: pickle 70 | pickle: 71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 72 | @echo 73 | @echo "Build finished; now you can process the pickle files." 74 | 75 | .PHONY: json 76 | json: 77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 78 | @echo 79 | @echo "Build finished; now you can process the JSON files." 80 | 81 | .PHONY: htmlhelp 82 | htmlhelp: 83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 84 | @echo 85 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 86 | ".hhp project file in $(BUILDDIR)/htmlhelp." 87 | 88 | .PHONY: qthelp 89 | qthelp: 90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 91 | @echo 92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/asyncpg.qhcp" 95 | @echo "To view the help file:" 96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/asyncpg.qhc" 97 | 98 | .PHONY: applehelp 99 | applehelp: 100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 101 | @echo 102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 103 | @echo "N.B. You won't be able to view it unless you put it in" \ 104 | "~/Library/Documentation/Help or install it in your application" \ 105 | "bundle." 106 | 107 | .PHONY: devhelp 108 | devhelp: 109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 110 | @echo 111 | @echo "Build finished." 112 | @echo "To view the help file:" 113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/asyncpg" 114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/asyncpg" 115 | @echo "# devhelp" 116 | 117 | .PHONY: epub 118 | epub: 119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 120 | @echo 121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 122 | 123 | .PHONY: epub3 124 | epub3: 125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 126 | @echo 127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 128 | 129 | .PHONY: latex 130 | latex: 131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 132 | @echo 133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 135 | "(use \`make latexpdf' here to do that automatically)." 136 | 137 | .PHONY: latexpdf 138 | latexpdf: 139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 140 | @echo "Running LaTeX files through pdflatex..." 141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 143 | 144 | .PHONY: latexpdfja 145 | latexpdfja: 146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 147 | @echo "Running LaTeX files through platex and dvipdfmx..." 148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 150 | 151 | .PHONY: text 152 | text: 153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 154 | @echo 155 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 156 | 157 | .PHONY: man 158 | man: 159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 160 | @echo 161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 162 | 163 | .PHONY: texinfo 164 | texinfo: 165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 166 | @echo 167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 168 | @echo "Run \`make' in that directory to run these through makeinfo" \ 169 | "(use \`make info' here to do that automatically)." 170 | 171 | .PHONY: info 172 | info: 173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 174 | @echo "Running Texinfo files through makeinfo..." 175 | make -C $(BUILDDIR)/texinfo info 176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 177 | 178 | .PHONY: gettext 179 | gettext: 180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 181 | @echo 182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 183 | 184 | .PHONY: changes 185 | changes: 186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 187 | @echo 188 | @echo "The overview file is in $(BUILDDIR)/changes." 189 | 190 | .PHONY: linkcheck 191 | linkcheck: 192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 193 | @echo 194 | @echo "Link check complete; look for any errors in the above output " \ 195 | "or in $(BUILDDIR)/linkcheck/output.txt." 196 | 197 | .PHONY: doctest 198 | doctest: 199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 200 | @echo "Testing of doctests in the sources finished, look at the " \ 201 | "results in $(BUILDDIR)/doctest/output.txt." 202 | 203 | .PHONY: coverage 204 | coverage: 205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 206 | @echo "Testing of coverage in the sources finished, look at the " \ 207 | "results in $(BUILDDIR)/coverage/python.txt." 208 | 209 | .PHONY: xml 210 | xml: 211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 212 | @echo 213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 214 | 215 | .PHONY: pseudoxml 216 | pseudoxml: 217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 218 | @echo 219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 220 | 221 | .PHONY: dummy 222 | dummy: 223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 224 | @echo 225 | @echo "Build finished. Dummy builder generates no files." 226 | -------------------------------------------------------------------------------- /docs/_static/theme_overrides.css: -------------------------------------------------------------------------------- 1 | /* override table width restrictions */ 2 | @media screen and (min-width: 767px) { 3 | 4 | .wy-table-responsive table td { 5 | white-space: normal !important; 6 | vertical-align: top !important; 7 | } 8 | 9 | .wy-table-responsive { 10 | overflow: visible !important; 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import sys 5 | 6 | sys.path.insert(0, os.path.abspath('..')) 7 | 8 | version_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), 9 | 'asyncpg', '_version.py') 10 | 11 | with open(version_file, 'r') as f: 12 | for line in f: 13 | if line.startswith('__version__: typing.Final ='): 14 | _, _, version = line.partition('=') 15 | version = version.strip(" \n'\"") 16 | break 17 | else: 18 | raise RuntimeError( 19 | 'unable to read the version from asyncpg/_version.py') 20 | 21 | # -- General configuration ------------------------------------------------ 22 | 23 | extensions = [ 24 | 'sphinx.ext.autodoc', 25 | 'sphinx.ext.doctest', 26 | 'sphinx.ext.viewcode', 27 | 'sphinx.ext.githubpages', 28 | 'sphinx.ext.intersphinx', 29 | ] 30 | 31 | add_module_names = False 32 | 33 | templates_path = ['_templates'] 34 | source_suffix = '.rst' 35 | master_doc = 'index' 36 | project = 'asyncpg' 37 | copyright = '2016-present, the asyncpg authors and contributors' 38 | author = '' 39 | release = version 40 | language = "en" 41 | exclude_patterns = ['_build'] 42 | pygments_style = 'sphinx' 43 | todo_include_todos = False 44 | suppress_warnings = ['image.nonlocal_uri'] 45 | 46 | # -- Options for HTML output ---------------------------------------------- 47 | 48 | html_theme = 'sphinx_rtd_theme' 49 | html_title = 'asyncpg Documentation' 50 | html_short_title = 'asyncpg' 51 | html_static_path = ['_static'] 52 | html_sidebars = { 53 | '**': [ 54 | 'about.html', 55 | 'navigation.html', 56 | ] 57 | } 58 | html_show_sourcelink = False 59 | html_show_sphinx = False 60 | html_show_copyright = True 61 | htmlhelp_basename = 'asyncpgdoc' 62 | 63 | 64 | # -- Options for LaTeX output --------------------------------------------- 65 | 66 | latex_elements = {} 67 | 68 | latex_documents = [ 69 | (master_doc, 'asyncpg.tex', 'asyncpg Documentation', 70 | author, 'manual'), 71 | ] 72 | 73 | 74 | # -- Options for manual page output --------------------------------------- 75 | 76 | man_pages = [ 77 | (master_doc, 'asyncpg', 'asyncpg Documentation', 78 | [author], 1) 79 | ] 80 | 81 | 82 | # -- Options for Texinfo output ------------------------------------------- 83 | 84 | texinfo_documents = [ 85 | (master_doc, 'asyncpg', 'asyncpg Documentation', 86 | author, 'asyncpg', 87 | 'asyncpg is a fast PostgreSQL client library for the ' 88 | 'Python asyncio framework', 89 | 'Miscellaneous'), 90 | ] 91 | 92 | # -- Options for intersphinx ---------------------------------------------- 93 | 94 | intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} 95 | -------------------------------------------------------------------------------- /docs/faq.rst: -------------------------------------------------------------------------------- 1 | .. _asyncpg-faq: 2 | 3 | 4 | Frequently Asked Questions 5 | ========================== 6 | 7 | Does asyncpg support DB-API? 8 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 9 | 10 | No. DB-API is a synchronous API, while asyncpg is based 11 | around an asynchronous I/O model. Thus, full drop-in compatibility 12 | with DB-API is not possible and we decided to design asyncpg API 13 | in a way that is better aligned with PostgreSQL architecture and 14 | terminology. We will release a synchronous DB-API-compatible version 15 | of asyncpg at some point in the future. 16 | 17 | 18 | Can I use asyncpg with SQLAlchemy ORM? 19 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 20 | 21 | Yes. SQLAlchemy version 1.4 and later supports the asyncpg dialect natively. 22 | Please refer to its documentation for details. Older SQLAlchemy versions 23 | may be used in tandem with a third-party adapter such as 24 | asyncpgsa_ or databases_. 25 | 26 | 27 | Can I use dot-notation with :class:`asyncpg.Record`? It looks cleaner. 28 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 29 | 30 | We decided against making :class:`asyncpg.Record` a named tuple 31 | because we want to keep the ``Record`` method namespace separate 32 | from the column namespace. That said, you can provide a custom ``Record`` 33 | class that implements dot-notation via the ``record_class`` argument to 34 | :func:`connect() ` or any of the Record-returning 35 | methods. 36 | 37 | .. code-block:: python 38 | 39 | class MyRecord(asyncpg.Record): 40 | def __getattr__(self, name): 41 | return self[name] 42 | 43 | 44 | Why can't I use a :ref:`cursor ` outside of a transaction? 45 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 46 | 47 | Cursors created by a call to 48 | :meth:`Connection.cursor() ` or 49 | :meth:`PreparedStatement.cursor() \ 50 | ` 51 | cannot be used outside of a transaction. Any such attempt will result in 52 | ``InterfaceError``. 53 | To create a cursor usable outside of a transaction, use the 54 | ``DECLARE ... CURSOR WITH HOLD`` SQL statement directly. 55 | 56 | 57 | .. _asyncpg-prepared-stmt-errors: 58 | 59 | Why am I getting prepared statement errors? 60 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 61 | 62 | If you are getting intermittent ``prepared statement "__asyncpg_stmt_xx__" 63 | does not exist`` or ``prepared statement “__asyncpg_stmt_xx__” 64 | already exists`` errors, you are most likely not connecting to the 65 | PostgreSQL server directly, but via 66 | `pgbouncer `_. pgbouncer, when 67 | in the ``"transaction"`` or ``"statement"`` pooling mode, does not support 68 | prepared statements. You have several options: 69 | 70 | * if you are using pgbouncer only to reduce the cost of new connections 71 | (as opposed to using pgbouncer for connection pooling from 72 | a large number of clients in the interest of better scalability), 73 | switch to the :ref:`connection pool ` 74 | functionality provided by asyncpg, it is a much better option for this 75 | purpose; 76 | 77 | * disable automatic use of prepared statements by passing 78 | ``statement_cache_size=0`` 79 | to :func:`asyncpg.connect() ` and 80 | :func:`asyncpg.create_pool() ` 81 | (and, obviously, avoid the use of 82 | :meth:`Connection.prepare() `); 83 | 84 | * switch pgbouncer's ``pool_mode`` to ``session``. 85 | 86 | 87 | Why do I get ``PostgresSyntaxError`` when using ``expression IN $1``? 88 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 89 | 90 | ``expression IN $1`` is not a valid PostgreSQL syntax. To check 91 | a value against a sequence use ``expression = any($1::mytype[])``, 92 | where ``mytype`` is the array element type. 93 | 94 | .. _asyncpgsa: https://github.com/CanopyTax/asyncpgsa 95 | .. _databases: https://github.com/encode/databases 96 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://github.com/MagicStack/asyncpg/workflows/Tests/badge.svg 2 | :target: https://github.com/MagicStack/asyncpg/actions?query=workflow%3ATests+branch%3Amaster 3 | :alt: GitHub Actions status 4 | 5 | .. image:: https://img.shields.io/pypi/status/asyncpg.svg?maxAge=2592000?style=plastic 6 | :target: https://pypi.python.org/pypi/asyncpg 7 | 8 | 9 | ======= 10 | asyncpg 11 | ======= 12 | 13 | **asyncpg** is a database interface library designed specifically for 14 | PostgreSQL and Python/asyncio. asyncpg is an efficient, clean implementation 15 | of PostgreSQL server binary protocol for use with Python's ``asyncio`` 16 | framework. 17 | 18 | **asyncpg** requires Python 3.8 or later and is supported for PostgreSQL 19 | versions 9.5 to 17. Other PostgreSQL versions or other databases implementing 20 | the PostgreSQL protocol *may* work, but are not being actively tested. 21 | 22 | Contents 23 | -------- 24 | 25 | .. toctree:: 26 | :maxdepth: 2 27 | 28 | installation 29 | usage 30 | api/index 31 | faq 32 | -------------------------------------------------------------------------------- /docs/installation.rst: -------------------------------------------------------------------------------- 1 | .. _asyncpg-installation: 2 | 3 | 4 | Installation 5 | ============ 6 | 7 | **asyncpg** has no external dependencies when not using GSSAPI/SSPI 8 | authentication. The recommended way to install it is to use **pip**: 9 | 10 | .. code-block:: bash 11 | 12 | $ pip install asyncpg 13 | 14 | If you need GSSAPI/SSPI authentication, the recommended way is to use 15 | 16 | .. code-block:: bash 17 | 18 | $ pip install 'asyncpg[gssauth]' 19 | 20 | This installs SSPI support on Windows and GSSAPI support on non-Windows 21 | platforms. SSPI and GSSAPI interoperate as clients and servers: an SSPI 22 | client can authenticate to a GSSAPI server and vice versa. 23 | 24 | On Linux installing GSSAPI requires a working C compiler and Kerberos 5 25 | development files. The latter can be obtained by installing **libkrb5-dev** 26 | package on Debian/Ubuntu or **krb5-devel** on RHEL/Fedora. (This is needed 27 | because PyPI does not have Linux wheels for **gssapi**. See `here for the 28 | details `_.) 29 | 30 | It is also possible to use GSSAPI on Windows: 31 | 32 | * `pip install gssapi` 33 | * Install `Kerberos for Windows `_. 34 | * Set the ``gsslib`` parameter or the ``PGGSSLIB`` environment variable to 35 | `gssapi` when connecting. 36 | 37 | 38 | Building from source 39 | -------------------- 40 | 41 | If you want to build **asyncpg** from a Git checkout you will need: 42 | 43 | * To have cloned the repo with `--recurse-submodules`. 44 | * A working C compiler. 45 | * CPython header files. These can usually be obtained by installing 46 | the relevant Python development package: **python3-dev** on Debian/Ubuntu, 47 | **python3-devel** on RHEL/Fedora. 48 | 49 | Once the above requirements are satisfied, run the following command 50 | in the root of the source checkout: 51 | 52 | .. code-block:: bash 53 | 54 | $ pip install -e . 55 | 56 | A debug build containing more runtime checks can be created by setting 57 | the ``ASYNCPG_DEBUG`` environment variable when building: 58 | 59 | .. code-block:: bash 60 | 61 | $ env ASYNCPG_DEBUG=1 pip install -e . 62 | 63 | 64 | Running tests 65 | ------------- 66 | 67 | 68 | If you want to run tests you must have PostgreSQL installed. 69 | 70 | To execute the testsuite run: 71 | 72 | .. code-block:: bash 73 | 74 | $ python setup.py test 75 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinxcontrib-asyncio 2 | sphinx_rtd_theme 3 | -------------------------------------------------------------------------------- /performance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MagicStack/asyncpg/5b14653e0b447d956aa01ec658562138e19f0293/performance.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "asyncpg" 3 | description = "An asyncio PostgreSQL driver" 4 | authors = [{name = "MagicStack Inc", email = "hello@magic.io"}] 5 | requires-python = '>=3.8.0' 6 | readme = "README.rst" 7 | license = {text = "Apache License, Version 2.0"} 8 | dynamic = ["version"] 9 | keywords = [ 10 | "database", 11 | "postgres", 12 | ] 13 | classifiers = [ 14 | "Development Status :: 5 - Production/Stable", 15 | "Framework :: AsyncIO", 16 | "Intended Audience :: Developers", 17 | "License :: OSI Approved :: Apache Software License", 18 | "Operating System :: POSIX", 19 | "Operating System :: MacOS :: MacOS X", 20 | "Operating System :: Microsoft :: Windows", 21 | "Programming Language :: Python :: 3 :: Only", 22 | "Programming Language :: Python :: 3.8", 23 | "Programming Language :: Python :: 3.9", 24 | "Programming Language :: Python :: 3.10", 25 | "Programming Language :: Python :: 3.11", 26 | "Programming Language :: Python :: 3.12", 27 | "Programming Language :: Python :: Implementation :: CPython", 28 | "Topic :: Database :: Front-Ends", 29 | ] 30 | dependencies = [ 31 | 'async_timeout>=4.0.3; python_version < "3.11.0"', 32 | ] 33 | 34 | [project.urls] 35 | github = "https://github.com/MagicStack/asyncpg" 36 | 37 | [project.optional-dependencies] 38 | gssauth = [ 39 | 'gssapi; platform_system != "Windows"', 40 | 'sspilib; platform_system == "Windows"', 41 | ] 42 | test = [ 43 | 'flake8~=6.1', 44 | 'flake8-pyi~=24.1.0', 45 | 'distro~=1.9.0', 46 | 'uvloop>=0.15.3; platform_system != "Windows" and python_version < "3.14.0"', 47 | 'gssapi; platform_system == "Linux"', 48 | 'k5test; platform_system == "Linux"', 49 | 'sspilib; platform_system == "Windows"', 50 | 'mypy~=1.8.0', 51 | ] 52 | docs = [ 53 | 'Sphinx~=8.1.3', 54 | 'sphinx_rtd_theme>=1.2.2', 55 | ] 56 | 57 | [build-system] 58 | requires = [ 59 | "setuptools>=60", 60 | "wheel", 61 | "Cython(>=0.29.24,<4.0.0)" 62 | ] 63 | build-backend = "setuptools.build_meta" 64 | 65 | [tool.setuptools] 66 | zip-safe = false 67 | 68 | [tool.setuptools.packages.find] 69 | include = ["asyncpg", "asyncpg.*"] 70 | 71 | [tool.setuptools.exclude-package-data] 72 | "*" = ["*.c", "*.h"] 73 | 74 | [tool.cibuildwheel] 75 | build-frontend = "build" 76 | test-extras = "test" 77 | 78 | [tool.cibuildwheel.macos] 79 | before-all = ".github/workflows/install-postgres.sh" 80 | test-command = "python {project}/tests/__init__.py" 81 | 82 | [tool.cibuildwheel.windows] 83 | test-command = "python {project}\\tests\\__init__.py" 84 | 85 | [tool.cibuildwheel.linux] 86 | before-all = """ 87 | .github/workflows/install-postgres.sh \ 88 | && .github/workflows/install-krb5.sh \ 89 | """ 90 | test-command = """\ 91 | PY=`which python` \ 92 | && chmod -R go+rX "$(dirname $(dirname $(dirname $PY)))" \ 93 | && su -l apgtest -c "$PY {project}/tests/__init__.py" \ 94 | """ 95 | 96 | [tool.pytest.ini_options] 97 | addopts = "--capture=no --assert=plain --strict-markers --tb=native --import-mode=importlib" 98 | testpaths = "tests" 99 | filterwarnings = "default" 100 | 101 | [tool.coverage.run] 102 | branch = true 103 | plugins = ["Cython.Coverage"] 104 | parallel = true 105 | source = ["asyncpg/", "tests/"] 106 | omit = ["*.pxd"] 107 | 108 | [tool.coverage.report] 109 | exclude_lines = [ 110 | "pragma: no cover", 111 | "def __repr__", 112 | "if debug", 113 | "raise NotImplementedError", 114 | "if __name__ == .__main__.", 115 | ] 116 | show_missing = true 117 | 118 | [tool.mypy] 119 | exclude = [ 120 | "^.eggs", 121 | "^.github", 122 | "^.vscode", 123 | "^build", 124 | "^dist", 125 | "^docs", 126 | "^tests", 127 | ] 128 | incremental = true 129 | strict = true 130 | implicit_reexport = true 131 | 132 | [[tool.mypy.overrides]] 133 | module = [ 134 | "asyncpg._testbase", 135 | "asyncpg._testbase.*", 136 | "asyncpg.cluster", 137 | "asyncpg.connect_utils", 138 | "asyncpg.connection", 139 | "asyncpg.connresource", 140 | "asyncpg.cursor", 141 | "asyncpg.exceptions", 142 | "asyncpg.exceptions.*", 143 | "asyncpg.pool", 144 | "asyncpg.prepared_stmt", 145 | "asyncpg.transaction", 146 | "asyncpg.utils", 147 | ] 148 | ignore_errors = true 149 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import sys 9 | 10 | if sys.version_info < (3, 8): 11 | raise RuntimeError('asyncpg requires Python 3.8 or greater') 12 | 13 | import os 14 | import os.path 15 | import pathlib 16 | import platform 17 | import re 18 | import subprocess 19 | 20 | # We use vanilla build_ext, to avoid importing Cython via 21 | # the setuptools version. 22 | import setuptools 23 | from setuptools.command import build_py as setuptools_build_py 24 | from setuptools.command import sdist as setuptools_sdist 25 | from setuptools.command import build_ext as setuptools_build_ext 26 | 27 | 28 | CYTHON_DEPENDENCY = 'Cython(>=0.29.24,<4.0.0)' 29 | 30 | CFLAGS = ['-O2'] 31 | LDFLAGS = [] 32 | 33 | if platform.uname().system != 'Windows': 34 | CFLAGS.extend(['-fsigned-char', '-Wall', '-Wsign-compare', '-Wconversion']) 35 | 36 | 37 | _ROOT = pathlib.Path(__file__).parent 38 | 39 | 40 | with open(str(_ROOT / 'README.rst')) as f: 41 | readme = f.read() 42 | 43 | 44 | with open(str(_ROOT / 'asyncpg' / '_version.py')) as f: 45 | for line in f: 46 | if line.startswith('__version__: typing.Final ='): 47 | _, _, version = line.partition('=') 48 | VERSION = version.strip(" \n'\"") 49 | break 50 | else: 51 | raise RuntimeError( 52 | 'unable to read the version from asyncpg/_version.py') 53 | 54 | 55 | if (_ROOT / '.git').is_dir() and 'dev' in VERSION: 56 | # This is a git checkout, use git to 57 | # generate a precise version. 58 | def git_commitish(): 59 | env = {} 60 | v = os.environ.get('PATH') 61 | if v is not None: 62 | env['PATH'] = v 63 | 64 | git = subprocess.run(['git', 'rev-parse', 'HEAD'], env=env, 65 | cwd=str(_ROOT), stdout=subprocess.PIPE) 66 | if git.returncode == 0: 67 | commitish = git.stdout.strip().decode('ascii') 68 | else: 69 | commitish = 'unknown' 70 | 71 | return commitish 72 | 73 | VERSION += '+' + git_commitish()[:7] 74 | 75 | 76 | class VersionMixin: 77 | 78 | def _fix_version(self, filename): 79 | # Replace asyncpg.__version__ with the actual version 80 | # of the distribution (possibly inferred from git). 81 | 82 | with open(str(filename)) as f: 83 | content = f.read() 84 | 85 | version_re = r"(.*__version__\s*=\s*)'[^']+'(.*)" 86 | repl = r"\1'{}'\2".format(self.distribution.metadata.version) 87 | content = re.sub(version_re, repl, content) 88 | 89 | with open(str(filename), 'w') as f: 90 | f.write(content) 91 | 92 | 93 | class sdist(setuptools_sdist.sdist, VersionMixin): 94 | 95 | def make_release_tree(self, base_dir, files): 96 | super().make_release_tree(base_dir, files) 97 | self._fix_version(pathlib.Path(base_dir) / 'asyncpg' / '_version.py') 98 | 99 | 100 | class build_py(setuptools_build_py.build_py, VersionMixin): 101 | 102 | def build_module(self, module, module_file, package): 103 | outfile, copied = super().build_module(module, module_file, package) 104 | 105 | if module == '__init__' and package == 'asyncpg': 106 | self._fix_version(outfile) 107 | 108 | return outfile, copied 109 | 110 | 111 | class build_ext(setuptools_build_ext.build_ext): 112 | 113 | user_options = setuptools_build_ext.build_ext.user_options + [ 114 | ('cython-always', None, 115 | 'run cythonize() even if .c files are present'), 116 | ('cython-annotate', None, 117 | 'Produce a colorized HTML version of the Cython source.'), 118 | ('cython-directives=', None, 119 | 'Cython compiler directives'), 120 | ] 121 | 122 | def initialize_options(self): 123 | # initialize_options() may be called multiple times on the 124 | # same command object, so make sure not to override previously 125 | # set options. 126 | if getattr(self, '_initialized', False): 127 | return 128 | 129 | super(build_ext, self).initialize_options() 130 | 131 | if os.environ.get('ASYNCPG_DEBUG'): 132 | self.cython_always = True 133 | self.cython_annotate = True 134 | self.cython_directives = "linetrace=True" 135 | self.define = 'PG_DEBUG,CYTHON_TRACE,CYTHON_TRACE_NOGIL' 136 | self.debug = True 137 | else: 138 | self.cython_always = False 139 | self.cython_annotate = None 140 | self.cython_directives = None 141 | 142 | def finalize_options(self): 143 | # finalize_options() may be called multiple times on the 144 | # same command object, so make sure not to override previously 145 | # set options. 146 | if getattr(self, '_initialized', False): 147 | return 148 | 149 | if not self.cython_always: 150 | self.cython_always = bool(os.environ.get( 151 | "ASYNCPG_BUILD_CYTHON_ALWAYS")) 152 | 153 | if self.cython_annotate is None: 154 | self.cython_annotate = os.environ.get( 155 | "ASYNCPG_BUILD_CYTHON_ANNOTATE") 156 | 157 | if self.cython_directives is None: 158 | self.cython_directives = os.environ.get( 159 | "ASYNCPG_BUILD_CYTHON_DIRECTIVES") 160 | 161 | need_cythonize = self.cython_always 162 | cfiles = {} 163 | 164 | for extension in self.distribution.ext_modules: 165 | for i, sfile in enumerate(extension.sources): 166 | if sfile.endswith('.pyx'): 167 | prefix, ext = os.path.splitext(sfile) 168 | cfile = prefix + '.c' 169 | 170 | if os.path.exists(cfile) and not self.cython_always: 171 | extension.sources[i] = cfile 172 | else: 173 | if os.path.exists(cfile): 174 | cfiles[cfile] = os.path.getmtime(cfile) 175 | else: 176 | cfiles[cfile] = 0 177 | need_cythonize = True 178 | 179 | if need_cythonize: 180 | import pkg_resources 181 | 182 | # Double check Cython presence in case setup_requires 183 | # didn't go into effect (most likely because someone 184 | # imported Cython before setup_requires injected the 185 | # correct egg into sys.path. 186 | try: 187 | import Cython 188 | except ImportError: 189 | raise RuntimeError( 190 | 'please install {} to compile asyncpg from source'.format( 191 | CYTHON_DEPENDENCY)) 192 | 193 | cython_dep = pkg_resources.Requirement.parse(CYTHON_DEPENDENCY) 194 | if Cython.__version__ not in cython_dep: 195 | raise RuntimeError( 196 | 'asyncpg requires {}, got Cython=={}'.format( 197 | CYTHON_DEPENDENCY, Cython.__version__ 198 | )) 199 | 200 | from Cython.Build import cythonize 201 | 202 | directives = { 203 | 'language_level': '3', 204 | } 205 | 206 | if self.cython_directives: 207 | for directive in self.cython_directives.split(','): 208 | k, _, v = directive.partition('=') 209 | if v.lower() == 'false': 210 | v = False 211 | if v.lower() == 'true': 212 | v = True 213 | 214 | directives[k] = v 215 | 216 | self.distribution.ext_modules[:] = cythonize( 217 | self.distribution.ext_modules, 218 | compiler_directives=directives, 219 | annotate=self.cython_annotate) 220 | 221 | super(build_ext, self).finalize_options() 222 | 223 | 224 | setup_requires = [] 225 | 226 | if ( 227 | not (_ROOT / 'asyncpg' / 'protocol' / 'protocol.c').exists() 228 | or os.environ.get("ASYNCPG_BUILD_CYTHON_ALWAYS") 229 | ): 230 | # No Cython output, require Cython to build. 231 | setup_requires.append(CYTHON_DEPENDENCY) 232 | 233 | 234 | setuptools.setup( 235 | version=VERSION, 236 | ext_modules=[ 237 | setuptools.extension.Extension( 238 | "asyncpg.pgproto.pgproto", 239 | ["asyncpg/pgproto/pgproto.pyx"], 240 | extra_compile_args=CFLAGS, 241 | extra_link_args=LDFLAGS), 242 | 243 | setuptools.extension.Extension( 244 | "asyncpg.protocol.protocol", 245 | ["asyncpg/protocol/record/recordobj.c", 246 | "asyncpg/protocol/protocol.pyx"], 247 | include_dirs=['asyncpg/pgproto/'], 248 | extra_compile_args=CFLAGS, 249 | extra_link_args=LDFLAGS), 250 | ], 251 | cmdclass={'build_ext': build_ext, 'build_py': build_py, 'sdist': sdist}, 252 | setup_requires=setup_requires, 253 | ) 254 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import pathlib 9 | import sys 10 | import unittest 11 | 12 | 13 | def suite(): 14 | test_loader = unittest.TestLoader() 15 | test_suite = test_loader.discover(str(pathlib.Path(__file__).parent), 16 | pattern='test_*.py') 17 | return test_suite 18 | 19 | 20 | if __name__ == '__main__': 21 | runner = unittest.runner.TextTestRunner(verbosity=2) 22 | result = runner.run(suite()) 23 | sys.exit(not result.wasSuccessful()) 24 | -------------------------------------------------------------------------------- /tests/certs/ca.cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIGJjCCBA6gAwIBAgIICJCUmtkcj2MwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNV 3 | BAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdUb3JvbnRvMRgwFgYD 4 | VQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5jcGcgdGVzdHMxHTAb 5 | BgNVBAMMFGFzeW5jcGcgdGVzdCByb290IGNhMR0wGwYJKoZIhvcNAQkBFg5oZWxs 6 | b0BtYWdpYy5pbzAeFw0yNDEwMTYxNzIzNTZaFw00MzEyMTcxNzIzNTZaMIGhMQsw 7 | CQYDVQQGEwJDQTEQMA4GA1UECAwHT250YXJpbzEQMA4GA1UEBwwHVG9yb250bzEY 8 | MBYGA1UECgwPTWFnaWNTdGFjayBJbmMuMRYwFAYDVQQLDA1hc3luY3BnIHRlc3Rz 9 | MR0wGwYDVQQDDBRhc3luY3BnIHRlc3Qgcm9vdCBjYTEdMBsGCSqGSIb3DQEJARYO 10 | aGVsbG9AbWFnaWMuaW8wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCP 11 | +oCl0qrReSlWj+yvfGz68UQqm6joL9VgeA0Tvc8S23Ia3S73wcTTdGhIQwMOaIuW 12 | y+m3J3js2wtpF0fmULYHr1ED7vQ+QOWarTyv/cGxSCyOYo4KVPHBfT6lYQTJk5NW 13 | Oc2wr5ff/9nhdO61sGxZa2GVBjmbLOJ9IBKTvRcmNgLmPo60wMHtF4L5/PuwVPuu 14 | +zRoETfEh12avtY7Y2G+0i4ZRm4uBmw7hmByWzWCwqrV619BaFHaJUf2bEh5eCbz 15 | 1nhF7WHVjBfnSJOgDxmZbKZZPmNzTVm8UxN22g9Ao6cZSxjbFAdpIhlQhAT6sjlW 16 | hvI6b58A3AJKi7zo+a7lnbPIeckduSkgbil3LZ4KxWgx6fPCBLqGH1XN6I8MQnX/ 17 | e1ewiFXwuZMb+FgoKxaQBseuPVaA3ViYefysjvLjP7U9eRzv6qRimOmH5efaplbD 18 | zGhRUKA8GgmN/B+S3ofqDhpp3zz7gFxjkE1f4/XNACqXt79iGaH+EscV4znxlsZj 19 | gUQYAcExpAmKrJg5kmxagHcgu0pVKlyUvSba/kKQ/aYDgdddgPutH+UHs5pssc69 20 | YBpEXQTG9CMeRh6ZUgcrR0foJLM5g2k53xpG1oTHiJcCKARFZPRpDoZ6NjCIuFKY 21 | 6+HMcpFRVDsDnUXmFah9bUhsSQbc6MHHX/iTbpMGNwIDAQABo2AwXjAPBgNVHRMB 22 | Af8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUhGQbAW97KXQs68Z3efEj 23 | 55zsc4UwHwYDVR0jBBgwFoAUhGQbAW97KXQs68Z3efEj55zsc4UwDQYJKoZIhvcN 24 | AQELBQADggIBADsy7jhBmwGbOZPox0XvB2XzWjOPl3uI3Ys3uGaAXVbGVnP3nDtU 25 | waGg7Fhf/ibQVAOkWLfm9FCJEO6bEojF4CjCa//iMqXgnPJaWeYceb8+CzuF5Ukg 26 | n/kfbj04dVvOnPa8KYkMOWQ6zsBgKuNaA5jOKWYwoHFgQNjKRiVikyOp6zF3aPu0 27 | wW7M7FOVHn0ZhMRBcJG8dGbQ8vaeu8z4i04tlvpQaFgtY66ECeUwhTIrvVuqtQOl 28 | jR//w70TUTIH3JzzYmyCubOCjdqcNRYPRRiA/L+mdzrE7honSTQfo0iupT/5bJcu 29 | GRjLHL/aRvYrq8ogqQKIYW0EbVuFzHfb+kPV61Bf5APbA26GU/14XkA4KwzJnDMR 30 | d2wr0RivSceXtY2ZakYP6+2cqjuhk6Y0tl0FBuyQXqAbe1L7X2VctLJMi5UgksVB 31 | q5rdHSJ3fbHRoCUpj4/rSafqJNHlAf2MEE/q8l0D8JhYoN69RhvyFQJLFEU4c74b 32 | XHdFt6bfyxm4+ZzUdj/TXadPAUO1YfQCn9Tf7QOoR68acSvQxEDbChZlJYkdAE+C 33 | zxNcoHVc6XIpk7NIr09qTQ5viz736fV6EI6OIoUaqrz9u+NZ3sPPD2Gf+rOinVFQ 34 | R2Q5kxHYo8Kt1DK0fFcUe1cOZk3df7seQWw1OdJngp5S7gEWBiWg8zr7 35 | -----END CERTIFICATE----- 36 | -------------------------------------------------------------------------------- /tests/certs/ca.crl.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN X509 CRL----- 2 | MIIDAjCB6wIBATANBgkqhkiG9w0BAQsFADCBoTELMAkGA1UEBhMCQ0ExEDAOBgNV 3 | BAgMB09udGFyaW8xEDAOBgNVBAcMB1Rvcm9udG8xGDAWBgNVBAoMD01hZ2ljU3Rh 4 | Y2sgSW5jLjEWMBQGA1UECwwNYXN5bmNwZyB0ZXN0czEdMBsGA1UEAwwUYXN5bmNw 5 | ZyB0ZXN0IHJvb3QgY2ExHTAbBgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvFw0y 6 | MTA5MTQxNjA2MDFaFw0yMTA5MTUxNjA2MDFaMBUwEwICEAAXDTIxMDkxNDE2MDYw 7 | MVowDQYJKoZIhvcNAQELBQADggIBAL4yfNmvGS8SkIVbRzdAC9+XJPw/dBJOUJwr 8 | EgERICAz7OTqG1PkmMhPL00Dm9fe52+KnSwHgL749W0S/X5rTNMSwLyGiiJ5HYbH 9 | GFRKQ/cvXLi4jYpSI1Ac94kk0japf3SfwEw3+122oba8SiAVP0nY3bHpHvNfOaDV 10 | fhbFTwb5bFm6ThqlKLZxGCKP0fGeQ4homuwgRiLE/UOiue5ted1ph0PkKVui208k 11 | FnhNYXSllakTGT8ZZZZVid/4tSHqJEY9vbdMXNv1GX8mhjoU1Gv9dOuyFGgUc9Vx 12 | e7gzf/Wf36vKI29o8QGkkTslRZpMG59z3sG4Y0vJEoqXMB6eQLOr5iUCyj2CyDha 13 | 66pwrdc1fRt3EvNXUWkdHfY3EHb7DxueedDEgtmfSNbEaZTXa5RaZRavNGNTaPDf 14 | UcrDU4w1N0wkYLQxPqd+VPcf1iKyfkAydpeOq9CChqRD0Tx58eTn6N/lLGFPPRfs 15 | x47BA4FmefBeXZzd5HiXCUouk3qHIHs2yCzFs+TEBkx5eV42cP++HxjirPydLf6Y 16 | G/o/TKRnc/2Lw+dCzvUV/p3geuw4+vq1BIFanwB9jp4tGaBrffIAyle8vPQLw6bp 17 | 1o1O39pdxniz+c9r0Kw/ETxTqRLbasSib5FHq5G/G9a+QxPsLAzKgwLWhR4fXvbu 18 | YPbhYhRP 19 | -----END X509 CRL----- 20 | -------------------------------------------------------------------------------- /tests/certs/ca.key.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIJKAIBAAKCAgEAj/qApdKq0XkpVo/sr3xs+vFEKpuo6C/VYHgNE73PEttyGt0u 3 | 98HE03RoSEMDDmiLlsvptyd47NsLaRdH5lC2B69RA+70PkDlmq08r/3BsUgsjmKO 4 | ClTxwX0+pWEEyZOTVjnNsK+X3//Z4XTutbBsWWthlQY5myzifSASk70XJjYC5j6O 5 | tMDB7ReC+fz7sFT7rvs0aBE3xIddmr7WO2NhvtIuGUZuLgZsO4Zgcls1gsKq1etf 6 | QWhR2iVH9mxIeXgm89Z4Re1h1YwX50iToA8ZmWymWT5jc01ZvFMTdtoPQKOnGUsY 7 | 2xQHaSIZUIQE+rI5VobyOm+fANwCSou86Pmu5Z2zyHnJHbkpIG4pdy2eCsVoMenz 8 | wgS6hh9VzeiPDEJ1/3tXsIhV8LmTG/hYKCsWkAbHrj1WgN1YmHn8rI7y4z+1PXkc 9 | 7+qkYpjph+Xn2qZWw8xoUVCgPBoJjfwfkt6H6g4aad88+4BcY5BNX+P1zQAql7e/ 10 | Yhmh/hLHFeM58ZbGY4FEGAHBMaQJiqyYOZJsWoB3ILtKVSpclL0m2v5CkP2mA4HX 11 | XYD7rR/lB7OabLHOvWAaRF0ExvQjHkYemVIHK0dH6CSzOYNpOd8aRtaEx4iXAigE 12 | RWT0aQ6GejYwiLhSmOvhzHKRUVQ7A51F5hWofW1IbEkG3OjBx1/4k26TBjcCAwEA 13 | AQKCAgABseW8zf+TyrTZX4VeRX008Q0n4UA6R4HgClnBDz12T94Gge8RHJdYE+k8 14 | XImXLFTkWA8uyEispSF7wbnndLDH42D1RmVarEHnsb1ipv6WOy7HGFLqvThBWluX 15 | 783yH4oe/Dw3JcIIcYcbl9hNjD+iR9jUu8eG057w8SU21wWEPiOHmVntt80woNO6 16 | ZKeD2mRCGZPy260H474O2ctE1LUsXWYMhx857HpusvTEs90r5mXDcetjpjo8cq7n 17 | sDukLm1q9m3hCNvbezQ21UxjmHnpK/XDXDAohdMWG/ZBMmz2ilanvhITVieGLdAV 18 | ehBi8SEqqxkD5hd9l5lxTjbRmUrdRZilnUKqup9WcOTQYeAZ2WAazyYuFqWAwSf+ 19 | dU+SzMTG+7ts9y4RbnWL9H6hN2GWMeNdLRVqE4aECMv7kAIJZ2u6VyNXSEoVueBM 20 | CJ7CU075QgxNL1REDWRBaUaflBhdwQFnMXBULw2E01KZFmQvZLe06SI/xjkB7oGU 21 | HdqWRDx0YP8lrFG35ukA2t+EswJxcbZHsagEdrz0jjz0a87vjgHnff1XpowhZU6M 22 | 4OgtQpoM4t4O7xg/sl80c0WwVvsOHVkGwUARCfZ4F2fXnocpYOCWQQbsA/SH/qJ8 23 | l+ChM4XkBNzKAUtpwkozqisKURJKTAJyeuAKD4fXRX/IwcPUYQKCAQEAyp1iiuTX 24 | pXzDso+3WPxLr3kwYJSUxpxSP4EjZZvzJoVflFBttUOoLURPEMrK5tEqWHqRrJto 25 | 73s3yQt4xWUtUql5eCB69nIVjseRhsbXjNzMIC41u65aflfIqQztHzF2gdFMZh3I 26 | gBp87CzKHSf83ToN3QZtQxIvuPdYdxDIjCMHc5hgRSLNKGhKXs1qWA76ASGNwQKW 27 | 7nUflWfDG3yZ7sWtmz7T2djz2zsmmzppCRRVjHAxQWZ+TxW+KsBOpGzgNvteUese 28 | ZK2ARc6lLSdgS74J5U6j07dOzQZ4eVC/OPHAIbPZxJAZ7/waP7YM+h+ohU+G8kXL 29 | KevnXjsC2oa/FwKCAQEAteoHugnwXvl9VyPceGQeffmQIq095CoD35UVlq60yR/9 30 | zgGN8mrXuEgGyydCYrK0/pUYb1pQhk5Xy1D6t5ou44uYlGuksWDqquRwgl7qMMVE 31 | 0GAwm+3wUmz7u5XD3uEJaGWV+gbvg8Hbvl3V/MzjlI4caAZ3lcNaX/Jf3xG6Gyfi 32 | So0iQzVMN6NR7m+I32YFB3jxu9PlzUTEj+9SCHuERFAozuzwjdLwiYjNMzv0zPWj 33 | v3ERO2mX6PE6yN1XkBsCGGG9qVz/ZzvKOz8Dl4TryY0a5eg4QUEZ3nUlnpq9/8M3 34 | xcN6M2yK8XLbTmVhSHX2J5nVI3s+BTbVHBoO0edl4QKCAQBcmMbTUThYkgdh0Jpr 35 | WYpBXHJGgUDo78IK8bq6kiXygdunjYZF4/C1F1XHB9bo28itfP6cUr4HTFm3UL3W 36 | AKJQ99DinH11qbe+c+hHHxKddr73Kgc2ib0jpny2/YhUzCcrtvpiZNQf73sN+H46 37 | Cu9eL0zsqSZAE8ypjKjqaUot+UhLhOTiU8BM6jSq1Nf3/Ig3Ah2lishtnCtd/XjG 38 | VBCJdeAcZf8tvR/dHlBLestL8fYS46cvC2dIP1iUcyS9smBZ4FE/wOM4Aa7wuDr2 39 | wtsYYnZlTKZEeK7TtlRSpRtvK9Sx0l8AnRatfZqFaW7O1K8QlcLHcCwkMYKgpvlr 40 | 407rAoIBAQCi5nqa1xGgCux53wwr5wQDLTssQlS8//7N9ZQKhlIwFOzT0EKLha+9 41 | PwqOW46wEXXQ0DS8anTXgEpQMCkDxxcb/sLYjfhCOxaJh91Ucahnmg+ARdLhn1Xo 42 | id124qsu5/fju6xs5E8RfsTHmQHpypQ1UHkRklD+FJzWdJXzjM1KShHzTqUS6CRj 43 | YmYZDVnVK2dvhJd76knL4jve5KFiJTGRdvLEMhtL9Uwe7RlMOvGBpKpI4fhbarh1 44 | CafpfYRO8FCVAtmzUysHB9yV51zRD1+R8kDXBndxv9lpgx/4AnwID4nfF6hTamyV 45 | wJOwhUpzd+bBGZlql483Xh3Cd3cz8nIhAoIBACs/XIDpXojtWopHXZReNwhqPC1D 46 | q3rjpPrZ8uqDu0Z/iTTO9OSvYaMBTVjXQ7w8T3X3ilMr45kpsHx0TQeh3Jbjy459 47 | S9z+6MtSIM0fbpYBEfa7sirDQM/ZlgZjm7vq/4lBVFGFIw7vxu4m/G0oHtihWRKh 48 | ClGG1Ypm00srgWihhjtRn8hfnLqCi4t9xxW1q8Te01Gem8H0nfNKfs5V8O4cKIZa 49 | izrfne/1Fto1khYFTlP6XdVHPjvl2/qX2WUz4G+2eNWGQVghC70cuV8kiFYlEXVp 50 | a6w2oSx8jo+5qRZrMlUQP5bE7dOBvZuoBmEi/FVfRYuFdxSZ3H2VAZKRgC4= 51 | -----END RSA PRIVATE KEY----- 52 | -------------------------------------------------------------------------------- /tests/certs/client.cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEAzCCAuugAwIBAgIUPfej8IQ/5bCrihqWImrq2vKPOq0wDQYJKoZIhvcNAQEL 3 | BQAwgaMxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdU 4 | b3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5j 5 | cGcgdGVzdHMxHzAdBgNVBAMMFmFzeW5jcGcgdGVzdCBjbGllbnQgQ0ExHTAbBgkq 6 | hkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMB4XDTIxMDgwOTIxNTA1MloXDTMyMDEw 7 | NDIxNTA1MlowgZUxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYD 8 | VQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsM 9 | DWFzeW5jcGcgdGVzdHMxETAPBgNVBAMMCHNzbF91c2VyMR0wGwYJKoZIhvcNAQkB 10 | Fg5oZWxsb0BtYWdpYy5pbzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB 11 | AJjiP9Ik/KRRLK9GMvoH8m1LO+Gyrr8Gz36LpmKJMR/PpwTL+1pOkYSGhOyT3Cw9 12 | /kWWLJRCvYqKgFtYtbr4S6ReGm3GdSVW+sfVRYDrRQZLPgQSPeq25g2v8UZ63Ota 13 | lPAyUPUZKpxyWz8PL77lV8psb9yv14yBH2kv9BbxKPksWOU8p8OCn1Z3WFFl0ItO 14 | nzMvCp5os+xFrt4SpoRGTx9x4QleY+zrEsYZtmnV4wC+JuJkNw4fuCdrX5k7dghs 15 | uZkcsAZof1nMdYsYiazeDfQKZtJqh5kO7mpwvCudKUWaLJJUwiQA87BwSlnCd/Hh 16 | TZDbC+zeFNjTS49/4Q72xVECAwEAAaM7MDkwHwYDVR0jBBgwFoAUi1jMmAisuOib 17 | mHIE2n0W2WnnaL0wCQYDVR0TBAIwADALBgNVHQ8EBAMCBPAwDQYJKoZIhvcNAQEL 18 | BQADggEBACbnp5oOp639ko4jn8axF+so91k0vIcgwDg+NqgtSRsuAENGumHAa8ec 19 | YOks0TCTvNN5E6AfNSxRat5CyguIlJ/Vy3KbkkFNXcCIcI/duAJvNphg7JeqYlQM 20 | VIJhrO/5oNQMzzTw8XzTHnciGbrbiZ04hjwrruEkvmIAwgQPhIgq4H6umTZauTvk 21 | DEo7uLm7RuG9hnDyWCdJxLLljefNL/EAuDYpPzgTeEN6JAnOu0ULIbpxpJKiYEId 22 | 8I0U2n0I2NTDOHmsAJiXf8BiHHmpK5SXFyY9s2ZuGkCzvmeZlR81tTXmHZ3v1X2z 23 | 8NajoAZfJ+QD50DrbF5E00yovZbyIB4= 24 | -----END CERTIFICATE----- 25 | -------------------------------------------------------------------------------- /tests/certs/client.csr.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE REQUEST----- 2 | MIIC2zCCAcMCAQAwgZUxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAw 3 | DgYDVQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNV 4 | BAsMDWFzeW5jcGcgdGVzdHMxETAPBgNVBAMMCHNzbF91c2VyMR0wGwYJKoZIhvcN 5 | AQkBFg5oZWxsb0BtYWdpYy5pbzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC 6 | ggEBAJjiP9Ik/KRRLK9GMvoH8m1LO+Gyrr8Gz36LpmKJMR/PpwTL+1pOkYSGhOyT 7 | 3Cw9/kWWLJRCvYqKgFtYtbr4S6ReGm3GdSVW+sfVRYDrRQZLPgQSPeq25g2v8UZ6 8 | 3OtalPAyUPUZKpxyWz8PL77lV8psb9yv14yBH2kv9BbxKPksWOU8p8OCn1Z3WFFl 9 | 0ItOnzMvCp5os+xFrt4SpoRGTx9x4QleY+zrEsYZtmnV4wC+JuJkNw4fuCdrX5k7 10 | dghsuZkcsAZof1nMdYsYiazeDfQKZtJqh5kO7mpwvCudKUWaLJJUwiQA87BwSlnC 11 | d/HhTZDbC+zeFNjTS49/4Q72xVECAwEAAaAAMA0GCSqGSIb3DQEBCwUAA4IBAQCG 12 | irI2ph09V/4BMe6QMhjBFUatwmTa/05PYGjvT3LAhRzEb3/o/gca0XFSAFrE6zIY 13 | DsgMk1c8aLr9DQsn9cf22oMFImKdnIZ3WLE9MXjN+s1Bjkiqt7uxDpxPo/DdfUTQ 14 | RQC5i/Z2tn29y9K09lEjp35ZhPp3tOA0V4CH0FThAjRR+amwaBjxQ7TTSNfoMUd7 15 | i/DrylwnNg1iEQmYUwJYopqgxtwseiBUSDXzEvjFPY4AvZKmEQmE5QkybpWIfivt 16 | 1kmKhvKKpn5Cb6c0D3XoYqyPN3TxqjH9L8R+tWUCwhYJeDZj5DumFr3Hw/sx8tOL 17 | EctyS6XfO3S2KbmDiyv8 18 | -----END CERTIFICATE REQUEST----- 19 | -------------------------------------------------------------------------------- /tests/certs/client.key.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEowIBAAKCAQEAmOI/0iT8pFEsr0Yy+gfybUs74bKuvwbPfoumYokxH8+nBMv7 3 | Wk6RhIaE7JPcLD3+RZYslEK9ioqAW1i1uvhLpF4abcZ1JVb6x9VFgOtFBks+BBI9 4 | 6rbmDa/xRnrc61qU8DJQ9RkqnHJbPw8vvuVXymxv3K/XjIEfaS/0FvEo+SxY5Tyn 5 | w4KfVndYUWXQi06fMy8Knmiz7EWu3hKmhEZPH3HhCV5j7OsSxhm2adXjAL4m4mQ3 6 | Dh+4J2tfmTt2CGy5mRywBmh/Wcx1ixiJrN4N9Apm0mqHmQ7uanC8K50pRZosklTC 7 | JADzsHBKWcJ38eFNkNsL7N4U2NNLj3/hDvbFUQIDAQABAoIBAAIMVeqM0E2rQLwA 8 | ZsJuxNKuBVlauXiZsMHzQQFk8SGJ+KTZzr5A+zYZT0KUIIj/M57fCi3aTwvCG0Ie 9 | CCE/HlRPZm8+D2e2qJlwxAOcI0qYS3ZmgCna1W4tgz/8eWU1y3UEV41RDv8VkR9h 10 | JrSaAfkWRtFgEbUyLaeNGuoLxQ7Bggo9zi1/xDJz/aZ/y4L4y8l1xs2eNVmbRGnj 11 | mPr1daeYhsWgaNiT/Wm3CAxvykptHavyWSsrXzCp0bEw6fAXxBqkeDFGIMVC9q3t 12 | ZRFtqMHi9i7SJtH1XauOC6QxLYgSEmNEie1JYbNx2Zf4h2KvSwDxpTqWhOjJ/m5j 13 | /NSkASECgYEAyHQAqG90yz5QaYnC9lgUhGIMokg9O3LcEbeK7IKIPtC9xINOrnj6 14 | ecCfhfc1aP3wQI+VKC3kiYerfTJvVsU5CEawBQSRiBY/TZZ7hTR7Rkm3s4xeM+o6 15 | 2zADdVUwmTVYwu0gUKCeDKO4iD8Uhh8J54JrKUejuG50VWZQWGVgqo0CgYEAwz+2 16 | VdYcfuQykMA3jQBnXmMMK92/Toq6FPDgsa45guEFD6Zfdi9347/0Ipt+cTNg0sUZ 17 | YBLOnNPwLn+yInfFa88Myf0UxCAOoZKfpJg/J27soUJzpd/CGx+vaAHrxMP6t/qo 18 | JAGMBIyOoqquId7jvErlC/sGBk/duya7IdiT1tUCgYBuvM8EPhaKlVE9DJL9Hmmv 19 | PK94E2poZiq3SutffzkfYpgDcPrNnh3ZlxVJn+kMqITKVcfz226On7mYP32MtQWt 20 | 0cc57m0rfgbYqRJx4y1bBiyK7ze3fGWpYxv1/OsNKJBxlygsAp9toiC2fAqtkYYa 21 | NE1ZD6+dmr9/0jb+rnq5nQKBgQCtZvwsp4ePOmOeItgzJdSoAxdgLgQlYRd6WaN0 22 | qeLx1Z6FE6FceTPk1SmhQq+9IYAwMFQk+w78QU3iPg6ahfyTjsMw8M9sj3vvCyU1 23 | LPGJt/34CehjvKHLLQy/NlWJ3vPgSYDi2Wzc7WgQF72m3ykqpOlfBoWHPY8TE4bG 24 | vG4wMQKBgFSq2GDAJ1ovBl7yWYW7w4SM8X96YPOff+OmI4G/8+U7u3dDM1dYeQxD 25 | 7BHLuvr4AXg27LC97u8/eFIBXC1elbco/nAKE1YHj2xcIb/4TsgAqkcysGV08ngi 26 | dULh3q0GpTYyuELZV4bfWE8MjSiGAH+nuMdXYDGuY2QnBq8MdSOH 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /tests/certs/client.key.protected.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | Proc-Type: 4,ENCRYPTED 3 | DEK-Info: AES-256-CBC,B222CD7D00828606A07DBC489D400921 4 | 5 | LRHsNGUsD5bG9+x/1UlzImN0rqEF10sFPBmxKeQpXQ/hy4iR+X/Gagoyagi23wOn 6 | EZf0sCLJx95ixG+4fXJDX0jgBtqeziVNS4FLWHIuf3+blja8nf4tkmmH9pF8jFQ0 7 | i1an3TP6KRyDKa17gioOdtSsS51BZmPkp3MByJQsrMhyB0txEUsGtUMaBTYmVN/5 8 | uYHf9MsmfcfQy30nt2t6St6W82QupHHMOx5xyhPJo8cqQncZC7Dwo4hyDV3h3vWn 9 | UjaRZiEMmQ3IgCwfJd1VmMECvrwXd/sTOXNhofWwDQIqmQ3GGWdrRnmgD863BQT3 10 | V8RVyPLkutOnrZ/kiMSAuiXGsSYK0TV8F9TaP/abLob4P8jbKYLcuR7ws3cu1xBl 11 | XWt9RALxGPUyHIy+BWLXJTYL8T+TVJpiKsAGCQB54j8VQBSArwFL4LnzdUu1txe2 12 | qa6ZEwt4q6SEwOTJpJWz3oJ1j+OTsRCN+4dlyo7sEZMeyTRp9nUzwulhd+fOdIhY 13 | 2UllMG71opKfNxZzEW7lq6E/waf0MmxwjUJmgwVO218yag9oknHnoFwewF42DGY7 14 | 072h23EJeKla7sI+MAB18z01z6C/yHWXLybOlXaGqk6zOm3OvTUFnUXtKzlBO2v3 15 | FQwrOE5U/VEyQkNWzHzh4j4LxYEL9/B08PxaveUwvNVGn9I3YknE6uMfcU7VuxDq 16 | +6bgM6r+ez+9QLFSjH/gQuPs2DKX0h3b9ppQNx+MANX0DEGbGabJiBp887f8pG6Q 17 | tW0i0+rfzYz3JwnwIuMZjYz6qUlP4bJMEmmDfod3fbnvg3MoCSMTUvi1Tq3Iiv4L 18 | GM5/YNkL0V3PhOI686aBfU7GLGXQFhdbQ9xrSoQRBmmNBqTCSf+iIEoTxlBac8GQ 19 | vSzDO+A+ovBP36K13Yn7gzuN/3PLZXH2TZ8t2b/OkEXOciH5KbycGHQA7gqxX1P4 20 | J55gpqPAWe8e7wKheWj3BMfmbWuH4rpiEkrLpqbTSfTwIKqplk253chmJj5I82XI 21 | ioFLS5vCi9JJsTrQ720O+VQPVB5xeA80WL8NxamWQb/KkvVnb4dTmaV30RCgLLZC 22 | tuMx8YSW71ALLT15qFB2zlMDKZO1jjunNE71BUFBPIkTKEOCyMAiF60fFeIWezxy 23 | kvBBOg7+MTcZNeW110FqRWNGr2A5KYFN15g+YVpfEoF26slHisSjVW5ndzGh0kaQ 24 | sIOjQitA9JYoLua7sHvsr6H5KdCGjNxv7O7y8wLGBVApRhU0wxZtbClqqEUvCLLP 25 | UiLDp9L34wDL7sGrfNgWA4UuN29XQzTxI5kbv/EPKhyt2oVHLqUiE+eGyvnuYm+X 26 | KqFi016nQaxTU5Kr8Pl0pSHbJMLFDWLSpsbbTB6YJpdEGxJoj3JB3VncOpwcuK+G 27 | xZ1tV2orPt1s/6m+/ihzRgoEkyLwcLRPN7ojgD/sqS679ZGf1IkDMgFCQe4g0UWm 28 | Fw7v816MNCgypUM5hQaU+Jp8vSlEc29RbrdSHbcxrKj/xPCLWrAbvmI5tgonKmuJ 29 | J1LW8AXyh/EUp/uUh++jqVGx+8pFfcmJw6V6JrJzQ7HMlakkry7N1eAGrIJGtYCW 30 | -----END RSA PRIVATE KEY----- 31 | -------------------------------------------------------------------------------- /tests/certs/client_ca.cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIEKTCCAxGgAwIBAgIUKmL8tfNS9LIB6GLB9RpZpTyk3uIwDQYJKoZIhvcNAQEL 3 | BQAwgaMxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdU 4 | b3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5j 5 | cGcgdGVzdHMxHzAdBgNVBAMMFmFzeW5jcGcgdGVzdCBjbGllbnQgQ0ExHTAbBgkq 6 | hkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMB4XDTIxMDgwOTIxNDQxM1oXDTQxMDgw 7 | NDIxNDQxM1owgaMxCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYD 8 | VQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsM 9 | DWFzeW5jcGcgdGVzdHMxHzAdBgNVBAMMFmFzeW5jcGcgdGVzdCBjbGllbnQgQ0Ex 10 | HTAbBgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMIIBIjANBgkqhkiG9w0BAQEF 11 | AAOCAQ8AMIIBCgKCAQEAptRYfxKiWExfZguQDva53bIqYa4lJwZA86Qu0peBUcsd 12 | E6zyHNgVv4XSMim1FH12KQ4KPKuQAcVqRMCRAHqB96kUfWQqF//fLajr0umdzcbx 13 | +UTgNux8TkScTl9KNAxhiR/oOGbKFcNSs4raaG8puwwEN66uMhoKk2pN2NwDVfHa 14 | bTekJ3jouTcTCnqCynx4qwI4WStJkuW4IPCmDRVXxOOauT7YalElYLWYtAOqGEvf 15 | noDK2Imhc0h6B5XW8nI54rVCXWwhW1v3RLAJGP+LwSy++bf08xmpHXdKkAj5BmUO 16 | QwJRiJ33Xa17rmi385egx8KpqV04YEAPdV1Z4QM6PQIDAQABo1MwUTAdBgNVHQ4E 17 | FgQUi1jMmAisuOibmHIE2n0W2WnnaL0wHwYDVR0jBBgwFoAUi1jMmAisuOibmHIE 18 | 2n0W2WnnaL0wDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAifNE 19 | ZLZXxECp2Sl6jCViZxgFf2+OHDvRORgI6J0heckYyYF/JHvLaDphh6TkSJAdT6Y3 20 | hAb7jueTMI+6RIdRzIjTKCGdJqUetiSfAbnQyIp2qmVqdjeFoXTvQL7BdkIE+kOW 21 | 0iomMqDB3czTl//LrgVQCYqKM0D/Ytecpg2mbshLfpPxdHyliCJcb4SqfdrDnKoV 22 | HUduBjOVot+6bkB5SEGCrrB4KMFTzbAu+zriKWWz+uycIyeVMLEyhDs59vptOK6e 23 | gWkraG43LZY3cHPiVeN3tA/dWdyJf9rgK21zQDSMB8OSH4yQjdQmkkvRQBjp3Fcy 24 | w2SZIP4o9l1Y7+hMMw== 25 | -----END CERTIFICATE----- 26 | -------------------------------------------------------------------------------- /tests/certs/client_ca.cert.srl: -------------------------------------------------------------------------------- 1 | 3DF7A3F0843FE5B0AB8A1A96226AEADAF28F3AAD 2 | -------------------------------------------------------------------------------- /tests/certs/client_ca.key.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAptRYfxKiWExfZguQDva53bIqYa4lJwZA86Qu0peBUcsdE6zy 3 | HNgVv4XSMim1FH12KQ4KPKuQAcVqRMCRAHqB96kUfWQqF//fLajr0umdzcbx+UTg 4 | Nux8TkScTl9KNAxhiR/oOGbKFcNSs4raaG8puwwEN66uMhoKk2pN2NwDVfHabTek 5 | J3jouTcTCnqCynx4qwI4WStJkuW4IPCmDRVXxOOauT7YalElYLWYtAOqGEvfnoDK 6 | 2Imhc0h6B5XW8nI54rVCXWwhW1v3RLAJGP+LwSy++bf08xmpHXdKkAj5BmUOQwJR 7 | iJ33Xa17rmi385egx8KpqV04YEAPdV1Z4QM6PQIDAQABAoIBABQrKcO7CftoyEO6 8 | 9CCK/W9q4arLddxg6itKVwrInC66QnqlduO7z+1GjWHZHvYqMMXH17778r30EuPa 9 | 7+zB4sKBI2QBXwFlwqJvgIsQCS7edVRwWjbpoiGIM+lZpcvjD0uXmuhurNGyumXQ 10 | TJVBkyb0zfG5YX/XHB40RNMJzjFuiMPDLVQmmDE//FOuWqBG88MgJP9Ghk3J7wA2 11 | JfDPavb49EzOCSh74zJWP7/QyybzF3ABCMu4OFkaOdqso8FS659XI55QReBbUppu 12 | FRkOgao1BclJhbBdrdtLNjlETM82tfVgW56vaIrrU2z7HskihEyMdB4c+CYbBnPx 13 | QqIhkhUCgYEA0SLVExtNy5Gmi6/ZY9tcd3QIuxcN6Xiup+LgIhWK3+GIoVOPsOjN 14 | 27dlVRINPKhrCfVbrLxUtDN5PzphwSA2Qddm4jg3d5FzX+FgKHQpoaU1WjtRPP+w 15 | K+t6W/NbZ8Rn4JyhZQ3Yqj264NA2l3QmuTfZSUQ5m4x7EUakfGU7G1sCgYEAzDaU 16 | jHsovn0FedOUaaYl6pgzjFV8ByPeT9usN54PZyuzyc+WunjJkxCQqD88J9jyG8XB 17 | 3V3tQj/CNbMczrS2ZaJ29aI4b/8NwBNR9e6t01bY3B90GJi8S4B4Hf8tYyIlVdeL 18 | tCC4FCZhvl4peaK3AWBj4NhjvdB32ThDXSGxLEcCgYEAiA5tKHz+44ziGMZSW1B+ 19 | m4f1liGtf1Jv7fD/d60kJ/qF9M50ENej9Wkel3Wi/u9ik5v4BCyRvpouKyBEMGxQ 20 | YA1OdaW1ECikMqBg+nB4FR1x1D364ABIEIqlk+SCdsOkANBlf2S+rCJ0zYUnvuhl 21 | uOHIjo3AHJ4MAnU+1V7WUTkCgYBkMedioc7U34x/QJNR3sY9ux2Xnh2zdyLNdc+i 22 | njeafDPDMcoXhcoJERiYpCYEuwnXHIlI7pvJZHUKWe4pcTsI1NSfIk+ki7SYaCJP 23 | kyLQTY0rO3d/1fiU5tyIgzomqIs++fm+kEsg/8/3UkXxOyelUkDPAfy2FgGnn1ZV 24 | 7ID8YwKBgQCeZCapdGJ6Iu5oYB17TyE5pLwb+QzaofR5uO8H4pXGVQyilKVCG9Dp 25 | GMnlXD7bwXPVKa8Icow2OIbmgrZ2mzOo9BSY3BlkKbpJDy7UNtAhzsHHN5/AEk8z 26 | YycWQtMiXI+cRsYO0eyHhJeSS2hX+JTe++iZX65twV53agzCHWRIbg== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /tests/certs/gen.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | 4 | from cryptography import x509 5 | from cryptography.hazmat import backends 6 | from cryptography.hazmat.primitives import hashes 7 | from cryptography.hazmat.primitives import serialization 8 | from cryptography.hazmat.primitives.asymmetric import rsa 9 | from cryptography.x509 import oid 10 | 11 | 12 | def _new_cert(issuer=None, is_issuer=False, serial_number=None, **subject): 13 | backend = backends.default_backend() 14 | private_key = rsa.generate_private_key( 15 | public_exponent=65537, key_size=4096, backend=backend 16 | ) 17 | public_key = private_key.public_key() 18 | subject = x509.Name( 19 | [ 20 | x509.NameAttribute(getattr(oid.NameOID, key.upper()), value) 21 | for key, value in subject.items() 22 | ] 23 | ) 24 | builder = ( 25 | x509.CertificateBuilder() 26 | .subject_name(subject) 27 | .public_key(public_key) 28 | .serial_number(serial_number or int.from_bytes(os.urandom(8), "big")) 29 | ) 30 | if issuer: 31 | issuer_cert, signing_key = issuer 32 | builder = ( 33 | builder.issuer_name(issuer_cert.subject) 34 | .not_valid_before(issuer_cert.not_valid_before) 35 | .not_valid_after(issuer_cert.not_valid_after) 36 | ) 37 | aki_ext = x509.AuthorityKeyIdentifier( 38 | key_identifier=issuer_cert.extensions.get_extension_for_class( 39 | x509.SubjectKeyIdentifier 40 | ).value.digest, 41 | authority_cert_issuer=[x509.DirectoryName(issuer_cert.subject)], 42 | authority_cert_serial_number=issuer_cert.serial_number, 43 | ) 44 | else: 45 | signing_key = private_key 46 | builder = ( 47 | builder.issuer_name(subject) 48 | .not_valid_before( 49 | datetime.datetime.today() - datetime.timedelta(days=1) 50 | ) 51 | .not_valid_after( 52 | datetime.datetime.today() + datetime.timedelta(weeks=1000) 53 | ) 54 | ) 55 | aki_ext = x509.AuthorityKeyIdentifier.from_issuer_public_key( 56 | public_key 57 | ) 58 | if is_issuer: 59 | builder = ( 60 | builder.add_extension( 61 | x509.BasicConstraints(ca=True, path_length=None), 62 | critical=True, 63 | ) 64 | .add_extension( 65 | x509.KeyUsage( 66 | digital_signature=False, 67 | content_commitment=False, 68 | key_encipherment=False, 69 | data_encipherment=False, 70 | key_agreement=False, 71 | key_cert_sign=True, 72 | crl_sign=True, 73 | encipher_only=False, 74 | decipher_only=False, 75 | ), 76 | critical=False, 77 | ) 78 | .add_extension( 79 | x509.SubjectKeyIdentifier.from_public_key(public_key), 80 | critical=False, 81 | ) 82 | .add_extension( 83 | aki_ext, 84 | critical=False, 85 | ) 86 | ) 87 | else: 88 | builder = ( 89 | builder.add_extension( 90 | x509.KeyUsage( 91 | digital_signature=True, 92 | content_commitment=False, 93 | key_encipherment=True, 94 | data_encipherment=False, 95 | key_agreement=False, 96 | key_cert_sign=False, 97 | crl_sign=False, 98 | encipher_only=False, 99 | decipher_only=False, 100 | ), 101 | critical=False, 102 | ) 103 | .add_extension( 104 | x509.BasicConstraints(ca=False, path_length=None), 105 | critical=True, 106 | ) 107 | .add_extension( 108 | x509.ExtendedKeyUsage([oid.ExtendedKeyUsageOID.SERVER_AUTH]), 109 | critical=False, 110 | ) 111 | .add_extension( 112 | x509.SubjectAlternativeName([x509.DNSName("localhost")]), 113 | critical=False, 114 | ) 115 | .add_extension( 116 | x509.SubjectKeyIdentifier.from_public_key(public_key), 117 | critical=False, 118 | ) 119 | .add_extension( 120 | aki_ext, 121 | critical=False, 122 | ) 123 | ) 124 | certificate = builder.sign( 125 | private_key=signing_key, 126 | algorithm=hashes.SHA256(), 127 | backend=backend, 128 | ) 129 | return certificate, private_key 130 | 131 | 132 | def _write_cert(path, cert_key_pair, password=None): 133 | certificate, private_key = cert_key_pair 134 | if password: 135 | encryption = serialization.BestAvailableEncryption(password) 136 | else: 137 | encryption = serialization.NoEncryption() 138 | with open(path + ".key.pem", "wb") as f: 139 | f.write( 140 | private_key.private_bytes( 141 | encoding=serialization.Encoding.PEM, 142 | format=serialization.PrivateFormat.TraditionalOpenSSL, 143 | encryption_algorithm=encryption, 144 | ) 145 | ) 146 | with open(path + ".cert.pem", "wb") as f: 147 | f.write( 148 | certificate.public_bytes( 149 | encoding=serialization.Encoding.PEM, 150 | ) 151 | ) 152 | 153 | 154 | def new_ca(path, **subject): 155 | cert_key_pair = _new_cert(is_issuer=True, **subject) 156 | _write_cert(path, cert_key_pair) 157 | return cert_key_pair 158 | 159 | 160 | def new_cert( 161 | path, ca_cert_key_pair, password=None, is_issuer=False, **subject 162 | ): 163 | cert_key_pair = _new_cert( 164 | issuer=ca_cert_key_pair, is_issuer=is_issuer, **subject 165 | ) 166 | _write_cert(path, cert_key_pair, password) 167 | return cert_key_pair 168 | 169 | 170 | def new_crl(path, issuer, cert): 171 | issuer_cert, signing_key = issuer 172 | revoked_cert = ( 173 | x509.RevokedCertificateBuilder() 174 | .serial_number(cert[0].serial_number) 175 | .revocation_date(datetime.datetime.today()) 176 | .build() 177 | ) 178 | builder = ( 179 | x509.CertificateRevocationListBuilder() 180 | .issuer_name(issuer_cert.subject) 181 | .last_update(datetime.datetime.today()) 182 | .next_update(datetime.datetime.today() + datetime.timedelta(days=1)) 183 | .add_revoked_certificate(revoked_cert) 184 | ) 185 | crl = builder.sign(private_key=signing_key, algorithm=hashes.SHA256()) 186 | with open(path + ".crl.pem", "wb") as f: 187 | f.write(crl.public_bytes(encoding=serialization.Encoding.PEM)) 188 | 189 | 190 | def main(): 191 | ca = new_ca( 192 | "ca", 193 | country_name="CA", 194 | state_or_province_name="Ontario", 195 | locality_name="Toronto", 196 | organization_name="MagicStack Inc.", 197 | organizational_unit_name="asyncpg tests", 198 | common_name="asyncpg test root ca", 199 | email_address="hello@magic.io", 200 | ) 201 | server = new_cert( 202 | "server", 203 | ca, 204 | country_name="CA", 205 | state_or_province_name="Ontario", 206 | organization_name="MagicStack Inc.", 207 | organizational_unit_name="asyncpg tests", 208 | common_name="localhost", 209 | email_address="hello@magic.io", 210 | serial_number=4096, 211 | ) 212 | new_crl('server', ca, server) 213 | 214 | 215 | if __name__ == "__main__": 216 | main() 217 | -------------------------------------------------------------------------------- /tests/certs/server.cert.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIG5jCCBM6gAwIBAgICEAAwDQYJKoZIhvcNAQELBQAwgaExCzAJBgNVBAYTAkNB 3 | MRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYDVQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9N 4 | YWdpY1N0YWNrIEluYy4xFjAUBgNVBAsMDWFzeW5jcGcgdGVzdHMxHTAbBgNVBAMM 5 | FGFzeW5jcGcgdGVzdCByb290IGNhMR0wGwYJKoZIhvcNAQkBFg5oZWxsb0BtYWdp 6 | Yy5pbzAeFw0yNDEwMTYxNzIzNTZaFw00MzEyMTcxNzIzNTZaMIGEMQswCQYDVQQG 7 | EwJDQTEQMA4GA1UECAwHT250YXJpbzEYMBYGA1UECgwPTWFnaWNTdGFjayBJbmMu 8 | MRYwFAYDVQQLDA1hc3luY3BnIHRlc3RzMRIwEAYDVQQDDAlsb2NhbGhvc3QxHTAb 9 | BgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvMIICIjANBgkqhkiG9w0BAQEFAAOC 10 | Ag8AMIICCgKCAgEA3F017q/obCM1SsHY5dFz72pFgVMhBIZ6kdIInbFv7RmEykZz 11 | ubbJnrgwgYDO5FKGUNO+a80AbjIvBrtPtXs9Ip/QDg0jqgw/MOADCxCzYnAQ2Ew2 12 | y1PfspGtdPhLNTmrO8+AxU2XmjsYY0+ysgUQQttOs9hJ79pIsKGBEES8g9oJTiIf 13 | tKgCxCIuhiZC+AgjeIQZUB9ccifmOGrCJYrD6LBuNGoQNW2/ykqjuHE8219dv1hV 14 | do8azcp/WmejjQguZyU3S/AofnyyNE24rWpXbbFs+9FFaUXd8g/fWCwrRmcXpOaE 15 | lvkmMZyuT9kuglHsvpzzGGNSUpvVoPfldk/4JY/kJrA2G5pgTX6mGRYGEN0jmlCa 16 | yg/ZFn36G0mA5ZBH4Qln+lKUSjJH8bhlFXvXlE3Mc34OCdOAp1TRfOT/qCRKo9A5 17 | KCjVOvG5MAKE8TZnTFLCSx5gK/EdQ2iV7Sm3aVc2P4eEJh+nvv1LDVLQEAak6U+u 18 | sZN5+Wnu7wDKSlh80vTTtoqls5Uo3gIxHYnqX5Fj6nwCzGjjXISNE4OKZLuk3can 19 | mciEES3plUrut+O6a2JWiDoCrwX4blYXhtL92Xaer/Mk1TSf2JsmL6pODoapsA0S 20 | CHtpcgoodxdKriy1qUGsiNlPNVWjASGyKXoEZdv49wyoZuysudl1aS1w42UCAwEA 21 | AaOCAUEwggE9MAsGA1UdDwQEAwIFoDAMBgNVHRMBAf8EAjAAMBMGA1UdJQQMMAoG 22 | CCsGAQUFBwMBMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHQ4EFgQUO/cXg1uX 23 | 2oHZodbw6F3/HakLdaQwgdUGA1UdIwSBzTCByoAUhGQbAW97KXQs68Z3efEj55zs 24 | c4WhgaekgaQwgaExCzAJBgNVBAYTAkNBMRAwDgYDVQQIDAdPbnRhcmlvMRAwDgYD 25 | VQQHDAdUb3JvbnRvMRgwFgYDVQQKDA9NYWdpY1N0YWNrIEluYy4xFjAUBgNVBAsM 26 | DWFzeW5jcGcgdGVzdHMxHTAbBgNVBAMMFGFzeW5jcGcgdGVzdCByb290IGNhMR0w 27 | GwYJKoZIhvcNAQkBFg5oZWxsb0BtYWdpYy5pb4IICJCUmtkcj2MwDQYJKoZIhvcN 28 | AQELBQADggIBAD4Ti52nEttUNay+sqqbDLtnSyMRsJI8agPqiHz6bYifSf530rlh 29 | qlHYUY5tgfrd8yDZNIe9Ib7Q1WQjgR8c/T9SoFnLl/tff1CVOAYQ/ffCZGTdBOSc 30 | KfdKEEvObWxWsqv31ZAMWVzfPsF7rwbTbZ8YdH2CNjxbZxrSEn2IrjplsoP5WMsE 31 | 6t7Q+J5wpi2yiEI9PoY2wH5WBB8ONWvZfj9r6OrczlTEZ+L6eiip5kMiw5R9EVt6 32 | ju2aMWqbZTI49Mu/qvXRAkwYvX7mrhuW/4mPHOW/zSnN7hOyjntx1fdnpPD5BTT6 33 | CoJ7nhWgnntw2kk2V9UBCYpVeqidDRrs+nr1xSpduuM1ve3SDkIpd6EGEUqZJ12s 34 | 5xpCUFK67atCZOXbJXqanm+3N9kbqYuwkWoqnPjOfMYW7oABmUy8elVGGwTuiTI0 35 | sXS3aQJ+Bm7oqSXrIxUTjOUUaYNhhaqZdXaO/29vI2+i975Pt1ZLLPUkp0hsUgTT 36 | kryN02TlNTxxQafTWad6YdzyrwvMpV7vxf7JQkOKRwLinqLCDVxjBt66O9mLIpQF 37 | WIfWQG+X4sgobB0NTtBWeGkrIgnhUtsT0ibVm4JAC1cbxdLOq2dfcURC8UFWJXok 38 | yFr/uaDZiKKbUFXbalZwnx6H6ucfl5No3hheexadyIbPNcHhFJ9zGXot 39 | -----END CERTIFICATE----- 40 | -------------------------------------------------------------------------------- /tests/certs/server.crl.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN X509 CRL----- 2 | MIIDAjCB6wIBATANBgkqhkiG9w0BAQsFADCBoTELMAkGA1UEBhMCQ0ExEDAOBgNV 3 | BAgMB09udGFyaW8xEDAOBgNVBAcMB1Rvcm9udG8xGDAWBgNVBAoMD01hZ2ljU3Rh 4 | Y2sgSW5jLjEWMBQGA1UECwwNYXN5bmNwZyB0ZXN0czEdMBsGA1UEAwwUYXN5bmNw 5 | ZyB0ZXN0IHJvb3QgY2ExHTAbBgkqhkiG9w0BCQEWDmhlbGxvQG1hZ2ljLmlvFw0y 6 | NDEwMTcxNzIzNTZaFw0yNDEwMTgxNzIzNTZaMBUwEwICEAAXDTI0MTAxNzE3MjM1 7 | NlowDQYJKoZIhvcNAQELBQADggIBAEVNX72KK6etoZQOXzPgd8ZJNrYcsOwjNZFL 8 | ZxC47uX+yrxjv7Wrrk4feyakFi5bL9n8/JMggcpxC6yxMQH/sdOZJ0BzKw3GUAxj 9 | m53i1GGO1lGdKH5a7uDPZVW362JwCVE81ROCdb1SL/yYmIwhD4w2bqjOQuI63Xe1 10 | MDfVZBqcIwzzkA5PEjTSFQIsBcHU+rDrWggkz/XJh5alRav8Gnj7KTE8U1z5UeKV 11 | LUk8L8+ZLW6XlrTnyjOn3qT7sZw2C/R46GCyHWwT5tbLhJhm2u1EuX3Iids02vIP 12 | w9bYf7+Uu2lsse9TuFNXtW0UFLdvVezomHjNBCaMI/MIvG4wSWnAo5bTtlowzxSy 13 | 7rpQQYBebcl5somUAhHqs4dsxbEwCXMPDdapiXkhxR9R4nDvkfsgwyqIRsWsIEq6 14 | PFjjRySNFUg5/vqhVQrg0hV7ygzXfd/kIlud3ZkKnli51TuFMWKD5sMN0r8ITLdG 15 | usoJQiF6G3ByLQBnsiQoHbipWkWTOKmfB/cfaPXdagPZH6rQmJeeNq0vBy6VqbFi 16 | 7D+BqABs+yIT6uJEEqyPGJttkUZP+0ziaK+DZF4MgJtiERtz2GjKMeh3h/YSqA27 17 | 8El6na7hPA3k1pANkaOaKuxZYzrPsl3P91ISGL6E0dgd6f9NZMOxbhfNKoDsBJnd 18 | Hjb3RTY4 19 | -----END X509 CRL----- 20 | -------------------------------------------------------------------------------- /tests/certs/server.key.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIJKQIBAAKCAgEA3F017q/obCM1SsHY5dFz72pFgVMhBIZ6kdIInbFv7RmEykZz 3 | ubbJnrgwgYDO5FKGUNO+a80AbjIvBrtPtXs9Ip/QDg0jqgw/MOADCxCzYnAQ2Ew2 4 | y1PfspGtdPhLNTmrO8+AxU2XmjsYY0+ysgUQQttOs9hJ79pIsKGBEES8g9oJTiIf 5 | tKgCxCIuhiZC+AgjeIQZUB9ccifmOGrCJYrD6LBuNGoQNW2/ykqjuHE8219dv1hV 6 | do8azcp/WmejjQguZyU3S/AofnyyNE24rWpXbbFs+9FFaUXd8g/fWCwrRmcXpOaE 7 | lvkmMZyuT9kuglHsvpzzGGNSUpvVoPfldk/4JY/kJrA2G5pgTX6mGRYGEN0jmlCa 8 | yg/ZFn36G0mA5ZBH4Qln+lKUSjJH8bhlFXvXlE3Mc34OCdOAp1TRfOT/qCRKo9A5 9 | KCjVOvG5MAKE8TZnTFLCSx5gK/EdQ2iV7Sm3aVc2P4eEJh+nvv1LDVLQEAak6U+u 10 | sZN5+Wnu7wDKSlh80vTTtoqls5Uo3gIxHYnqX5Fj6nwCzGjjXISNE4OKZLuk3can 11 | mciEES3plUrut+O6a2JWiDoCrwX4blYXhtL92Xaer/Mk1TSf2JsmL6pODoapsA0S 12 | CHtpcgoodxdKriy1qUGsiNlPNVWjASGyKXoEZdv49wyoZuysudl1aS1w42UCAwEA 13 | AQKCAgAXD9TfxfPCXWzrsJ3NGhPSr9crpvzYRw/3cs5esn3O3Sd92SGuAz3WfoWV 14 | CAX0SdlaBs7xjo1yUDjbsNQGtNRmaz3lj+Ug8WcrlkYQl7mDnnbPgX+6h8HsI5LO 15 | SwM+mWpyN/p3Vkd8vJ0wx4Z2sFD4rjruV2m60FK11DEi+A6X6JmmCQGIcTeDjzrk 16 | jzHdrfxdqyAlt80qT+1Sui7XVE5sa7Uc3HzAcAaXr81dNXyeThIMPxJdS1y4F258 17 | kkbA27pU0Rrtt5SFUvIoxyQsrJRkcSJsDYVWHxm7MNi5luXF2G7WXcmX2JCcCz8I 18 | MZJ3JlvAbGyEgOB8r2e2u5AoHEu7xjpjJ0/6smmig7LDe96uNpg6zDwS3xl6rAup 19 | qgwJ5TTwY8BydVOtDqe5Na8yqLtwMr0yA+k2Hz856mzCTJEOI9TaOq/jtq+n4AXW 20 | lkBai762oVKSKYCVJSK6eslTf2bAqjT3jakbgqJLKmMo5XvCnYUWWIve0RhQMNT4 21 | 0tiLCxKurYa7xPqgW26c/fEHvdBDrU1JAablcAjsW9sJ+KIlilK02M9DqF0RnBBI 22 | wK7Ql76ugsYbp8WBXkpFjMMyciMhqH8xJiyi7MuiCwpBGQwxBHHaX7f9OqDWOClR 23 | mVGjrZuk9oiI3waUjGG50SzLBlMbeIzMdXgRuM7fByq6DG0VgQKCAQEA8d2YCODh 24 | ApCM7GB/tmANfVQ0tnfxUT3ceEAOH7XkI+nz87Zv/1k6NOklCMi+nUwoGQfM5CxU 25 | NdWC0I7wI1ATdllPStUAJ4c8xtdEdlrLHBcGNvhYbbqMWRsNGITstnAx3tZ4X32H 26 | duhS5wfPE/X25YMN+8Dtm7jifEMqoCUV55iZxfYs+LXxQF03KVAJ5Ie5a1ac5UCz 27 | zzu9fbYSs70ByJsHWt4ZOsPkJVmkmuXzUPvr72otUYYSdju0PgbJqRoEyTbCh3HT 28 | zo0emKl8jj7oTSzVNjb6AaB6nsKco6wQLQSlaxBzo0j7TBRylVtG81CYjr5LFpp0 29 | UQrHjLZnSTvC5wKCAQEA6T3yH6bFc9FcJGOW1jYozQ5y+NWkXv3MVFIf3IqPT76p 30 | rMEI6krmGUKi+otOaV2Axy36kOcbntzENMg++LPCe0SczK14+pwUrI91cp/Ega6K 31 | +/4sKvh8WDZhzVYkWs76UiRj7Ef4MvtsaPAcFN/Ek+fItDHFRoSGdm+vx+j3ZDxx 32 | tdRudTs0kYyhmdlM0kZTbXsmz37x6+45uO16s+D2lvX2PXM9Lve9z/Ti6nn9QvIF 33 | kM9ZmAU6epmMPsGKM9WOK/sTcPUnd3Ife9tmi3BRAAygDk6hFx67kAsc124oLeZ3 34 | 0CJGshA+50hBAL7wiybLrBMRzHrElzsicppVbn3p0wKCAQAldmRBI8vWYNtjFYNS 35 | lUghnHRZuvRG2CUY/xrw8HR415jwq9ZnH8PzRBV3adiUdqJTVjD3OqKEgCC1+x3Y 36 | 6mNJVoYAmkNe3ASe6+LvzhpdrHdK9maEAHwSpSz/Gj+r9m7TDDcy2zerRErq+/uo 37 | JNXsMMNutjBXiWiTRLgKfBQLfkh7MClBELVgec+8d2hA3IDszkqY+8+eDqvIF/aH 38 | noPzNYgLHBGeV48z9dGYKHvqlEq0F6cTVIfxhkfhv51msuAA5pl07z2WZadSkBX5 39 | 1maW5ZXUwukwbVHw20X12AXdYzXYAoFWzkwWOaiR18SClX47xd/NjXjswJWuBuay 40 | oi4LAoIBAQDirP0+nYmQAYwXIWJaVNBaWQyLoLXaS7XkzNuCLncQ/S9RYVkUui3d 41 | ptFVxUUzSVf6O0kkwjYpskxNL79jXPBJdGke0gidJktBWTq/Z15G2ibguCicqlnO 42 | MSvjrzAtwLGuWwdxfpBMm+TEJ3ZjIwWc6Mo5tZUP74PuXqTrGBI2LDgmiom/DQcN 43 | 3SrAplrukMJLyD/zsF/U9vTKMKHrZ1q/Y9Mn7XMszkB+dnSBhIUKJsQZ9CoSgCJR 44 | PCD8bIOv1IATZjOCt/7fKt5GNPf30/QkpCB5RxlvqsKGPwaMp9YMpcsTT/x82SUJ 45 | CUODQg3sbovKc838d+PPRf04e51DgMNZAoIBAQC2uiJjluIKRabFSeSfu4+I6cEY 46 | kXI0F65UAudFmyXVfaQbO9DR0Y4bWPDfXAUimRvxixEhSrSIBZ/itVxzhOvqZrl1 47 | XRCZsTOVoz7Z8lcd8opxPBnWDk1m2nyajwPXp8ZLo67FG0bWbayVBBRxyvirrZjG 48 | PatRKMTyVLTCD+WlQiP4b4kShKdWA4ZH6pHUIviAotWqXMTsEKfupg9avxEk8GtH 49 | GZnXAmpnBqmbU4+3rNOaCZLdekVCoEtW0NGZEYEV5UQnZoWY6AiUUxGGE/qionKH 50 | sdKN+8CowudMH02bo1a0akS+eh+D/SGc/MLofH7uPWtX7l8sTvQivzDIkZeu 51 | -----END RSA PRIVATE KEY----- 52 | -------------------------------------------------------------------------------- /tests/test__environment.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import os 9 | import unittest 10 | 11 | import asyncpg 12 | import asyncpg.serverversion 13 | 14 | from asyncpg import _testbase as tb 15 | 16 | 17 | class TestEnvironment(tb.ConnectedTestCase): 18 | @unittest.skipIf(not os.environ.get('PGVERSION'), 19 | "environ[PGVERSION] is not set") 20 | async def test_environment_server_version(self): 21 | pgver = os.environ.get('PGVERSION') 22 | env_ver = asyncpg.serverversion.split_server_version_string(pgver) 23 | srv_ver = self.con.get_server_version() 24 | 25 | self.assertEqual( 26 | env_ver[:2], srv_ver[:2], 27 | 'Expecting PostgreSQL version {pgver}, got {maj}.{min}.'.format( 28 | pgver=pgver, maj=srv_ver.major, min=srv_ver.minor) 29 | ) 30 | 31 | @unittest.skipIf(not os.environ.get('ASYNCPG_VERSION'), 32 | "environ[ASYNCPG_VERSION] is not set") 33 | @unittest.skipIf("dev" in asyncpg.__version__, 34 | "development version with git commit data") 35 | async def test_environment_asyncpg_version(self): 36 | apgver = os.environ.get('ASYNCPG_VERSION') 37 | self.assertEqual( 38 | asyncpg.__version__, apgver, 39 | 'Expecting asyncpg version {}, got {}.'.format( 40 | apgver, asyncpg.__version__) 41 | ) 42 | -------------------------------------------------------------------------------- /tests/test__sourcecode.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | import os 8 | import subprocess 9 | import sys 10 | import unittest 11 | 12 | 13 | def find_root(): 14 | return os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 15 | 16 | 17 | class TestCodeQuality(unittest.TestCase): 18 | 19 | def test_flake8(self): 20 | try: 21 | import flake8 # NoQA 22 | except ImportError: 23 | raise unittest.SkipTest('flake8 module is missing') 24 | 25 | root_path = find_root() 26 | config_path = os.path.join(root_path, '.flake8') 27 | if not os.path.exists(config_path): 28 | raise RuntimeError('could not locate .flake8 file') 29 | 30 | try: 31 | subprocess.run( 32 | [sys.executable, '-m', 'flake8', '--config', config_path], 33 | check=True, 34 | stdout=subprocess.PIPE, 35 | stderr=subprocess.STDOUT, 36 | cwd=root_path) 37 | except subprocess.CalledProcessError as ex: 38 | output = ex.output.decode() 39 | raise AssertionError( 40 | 'flake8 validation failed:\n{}'.format(output)) from None 41 | 42 | def test_mypy(self): 43 | try: 44 | import mypy # NoQA 45 | except ImportError: 46 | raise unittest.SkipTest('mypy module is missing') 47 | 48 | root_path = find_root() 49 | config_path = os.path.join(root_path, 'pyproject.toml') 50 | if not os.path.exists(config_path): 51 | raise RuntimeError('could not locate mypy.ini file') 52 | 53 | try: 54 | subprocess.run( 55 | [ 56 | sys.executable, 57 | '-m', 58 | 'mypy', 59 | '--config-file', 60 | config_path, 61 | 'asyncpg' 62 | ], 63 | check=True, 64 | stdout=subprocess.PIPE, 65 | stderr=subprocess.STDOUT, 66 | cwd=root_path 67 | ) 68 | except subprocess.CalledProcessError as ex: 69 | output = ex.output.decode() 70 | raise AssertionError( 71 | 'mypy validation failed:\n{}'.format(output)) from None 72 | -------------------------------------------------------------------------------- /tests/test_adversity.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | """Tests how asyncpg behaves in non-ideal conditions.""" 8 | 9 | import asyncio 10 | import os 11 | import platform 12 | import unittest 13 | 14 | from asyncpg import _testbase as tb 15 | 16 | 17 | @unittest.skipIf(os.environ.get('PGHOST'), 'using remote cluster for testing') 18 | @unittest.skipIf( 19 | platform.system() == 'Windows', 20 | 'not compatible with ProactorEventLoop which is default in Python 3.8+') 21 | class TestConnectionLoss(tb.ProxiedClusterTestCase): 22 | @tb.with_timeout(30.0) 23 | async def test_connection_close_timeout(self): 24 | con = await self.connect() 25 | self.proxy.trigger_connectivity_loss() 26 | with self.assertRaises(asyncio.TimeoutError): 27 | await con.close(timeout=0.5) 28 | 29 | @tb.with_timeout(30.0) 30 | async def test_pool_acquire_timeout(self): 31 | pool = await self.create_pool( 32 | database='postgres', min_size=2, max_size=2) 33 | try: 34 | self.proxy.trigger_connectivity_loss() 35 | for _ in range(2): 36 | with self.assertRaises(asyncio.TimeoutError): 37 | async with pool.acquire(timeout=0.5): 38 | pass 39 | self.proxy.restore_connectivity() 40 | async with pool.acquire(timeout=0.5): 41 | pass 42 | finally: 43 | self.proxy.restore_connectivity() 44 | pool.terminate() 45 | 46 | @tb.with_timeout(30.0) 47 | async def test_pool_release_timeout(self): 48 | pool = await self.create_pool( 49 | database='postgres', min_size=2, max_size=2) 50 | try: 51 | with self.assertRaises(asyncio.TimeoutError): 52 | async with pool.acquire(timeout=0.5): 53 | self.proxy.trigger_connectivity_loss() 54 | finally: 55 | self.proxy.restore_connectivity() 56 | pool.terminate() 57 | 58 | @tb.with_timeout(30.0) 59 | async def test_pool_handles_abrupt_connection_loss(self): 60 | pool_size = 3 61 | query_runtime = 0.5 62 | pool_timeout = cmd_timeout = 1.0 63 | concurrency = 9 64 | pool_concurrency = (concurrency - 1) // pool_size + 1 65 | 66 | # Worst expected runtime + 20% to account for other latencies. 67 | worst_runtime = (pool_timeout + cmd_timeout) * pool_concurrency * 1.2 68 | 69 | async def worker(pool): 70 | async with pool.acquire(timeout=pool_timeout) as con: 71 | await con.fetch('SELECT pg_sleep($1)', query_runtime) 72 | 73 | def kill_connectivity(): 74 | self.proxy.trigger_connectivity_loss() 75 | 76 | new_pool = self.create_pool( 77 | database='postgres', min_size=pool_size, max_size=pool_size, 78 | timeout=cmd_timeout, command_timeout=cmd_timeout) 79 | 80 | with self.assertRunUnder(worst_runtime): 81 | pool = await new_pool 82 | try: 83 | workers = [worker(pool) for _ in range(concurrency)] 84 | self.loop.call_later(1, kill_connectivity) 85 | await asyncio.gather( 86 | *workers, return_exceptions=True) 87 | finally: 88 | pool.terminate() 89 | -------------------------------------------------------------------------------- /tests/test_cancellation.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import asyncio 9 | import asyncpg 10 | 11 | from asyncpg import _testbase as tb 12 | 13 | 14 | class TestCancellation(tb.ConnectedTestCase): 15 | 16 | async def test_cancellation_01(self): 17 | st1000 = await self.con.prepare('SELECT 1000') 18 | 19 | async def test0(): 20 | val = await self.con.execute('SELECT 42') 21 | self.assertEqual(val, 'SELECT 1') 22 | 23 | async def test1(): 24 | val = await self.con.fetchval('SELECT 42') 25 | self.assertEqual(val, 42) 26 | 27 | async def test2(): 28 | val = await self.con.fetchrow('SELECT 42') 29 | self.assertEqual(val, (42,)) 30 | 31 | async def test3(): 32 | val = await self.con.fetch('SELECT 42') 33 | self.assertEqual(val, [(42,)]) 34 | 35 | async def test4(): 36 | val = await self.con.prepare('SELECT 42') 37 | self.assertEqual(await val.fetchval(), 42) 38 | 39 | async def test5(): 40 | self.assertEqual(await st1000.fetchval(), 1000) 41 | 42 | async def test6(): 43 | self.assertEqual(await st1000.fetchrow(), (1000,)) 44 | 45 | async def test7(): 46 | self.assertEqual(await st1000.fetch(), [(1000,)]) 47 | 48 | async def test8(): 49 | cur = await st1000.cursor() 50 | self.assertEqual(await cur.fetchrow(), (1000,)) 51 | 52 | for test in {test0, test1, test2, test3, test4, test5, 53 | test6, test7, test8}: 54 | 55 | with self.subTest(testfunc=test), self.assertRunUnder(1): 56 | st = await self.con.prepare('SELECT pg_sleep(20)') 57 | task = self.loop.create_task(st.fetch()) 58 | await asyncio.sleep(0.05) 59 | task.cancel() 60 | 61 | with self.assertRaises(asyncio.CancelledError): 62 | await task 63 | 64 | async with self.con.transaction(): 65 | await test() 66 | 67 | async def test_cancellation_02(self): 68 | st = await self.con.prepare('SELECT 1') 69 | task = self.loop.create_task(st.fetch()) 70 | await asyncio.sleep(0.05) 71 | task.cancel() 72 | self.assertEqual(await task, [(1,)]) 73 | 74 | async def test_cancellation_03(self): 75 | with self.assertRaises(asyncpg.InFailedSQLTransactionError): 76 | async with self.con.transaction(): 77 | task = self.loop.create_task( 78 | self.con.fetch('SELECT pg_sleep(20)')) 79 | await asyncio.sleep(0.05) 80 | task.cancel() 81 | 82 | with self.assertRaises(asyncio.CancelledError): 83 | await task 84 | 85 | await self.con.fetch('SELECT generate_series(0, 100)') 86 | 87 | self.assertEqual( 88 | await self.con.fetchval('SELECT 42'), 89 | 42) 90 | 91 | async def test_cancellation_04(self): 92 | await self.con.fetchval('SELECT pg_sleep(0)') 93 | waiter = asyncio.Future() 94 | self.con._cancel_current_command(waiter) 95 | await waiter 96 | self.assertEqual(await self.con.fetchval('SELECT 42'), 42) 97 | -------------------------------------------------------------------------------- /tests/test_cursor.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import asyncpg 9 | import inspect 10 | 11 | from asyncpg import _testbase as tb 12 | 13 | 14 | class TestIterableCursor(tb.ConnectedTestCase): 15 | 16 | async def test_cursor_iterable_01(self): 17 | st = await self.con.prepare('SELECT generate_series(0, 20)') 18 | expected = await st.fetch() 19 | 20 | for prefetch in range(1, 25): 21 | with self.subTest(prefetch=prefetch): 22 | async with self.con.transaction(): 23 | result = [] 24 | async for rec in st.cursor(prefetch=prefetch): 25 | result.append(rec) 26 | 27 | self.assertEqual( 28 | result, expected, 29 | 'result != expected for prefetch={}'.format(prefetch)) 30 | 31 | async def test_cursor_iterable_02(self): 32 | # Test that it's not possible to create a cursor without hold 33 | # outside of a transaction 34 | s = await self.con.prepare( 35 | 'DECLARE t BINARY CURSOR WITHOUT HOLD FOR SELECT 1') 36 | with self.assertRaises(asyncpg.NoActiveSQLTransactionError): 37 | await s.fetch() 38 | 39 | # Now test that statement.cursor() does not let you 40 | # iterate over it outside of a transaction 41 | st = await self.con.prepare('SELECT generate_series(0, 20)') 42 | 43 | it = st.cursor(prefetch=5).__aiter__() 44 | if inspect.isawaitable(it): 45 | it = await it 46 | 47 | with self.assertRaisesRegex(asyncpg.NoActiveSQLTransactionError, 48 | 'cursor cannot be created.*transaction'): 49 | await it.__anext__() 50 | 51 | async def test_cursor_iterable_03(self): 52 | st = await self.con.prepare('SELECT generate_series(0, 20)') 53 | 54 | it = st.cursor().__aiter__() 55 | if inspect.isawaitable(it): 56 | it = await it 57 | 58 | st._state.mark_closed() 59 | 60 | with self.assertRaisesRegex(asyncpg.InterfaceError, 61 | 'statement is closed'): 62 | async for _ in it: # NOQA 63 | pass 64 | 65 | async def test_cursor_iterable_04(self): 66 | st = await self.con.prepare('SELECT generate_series(0, 20)') 67 | st._state.mark_closed() 68 | 69 | with self.assertRaisesRegex(asyncpg.InterfaceError, 70 | 'statement is closed'): 71 | async for _ in st.cursor(): # NOQA 72 | pass 73 | 74 | async def test_cursor_iterable_05(self): 75 | st = await self.con.prepare('SELECT generate_series(0, 20)') 76 | for prefetch in range(-1, 1): 77 | with self.subTest(prefetch=prefetch): 78 | with self.assertRaisesRegex(asyncpg.InterfaceError, 79 | 'must be greater than zero'): 80 | async for _ in st.cursor(prefetch=prefetch): # NOQA 81 | pass 82 | 83 | async def test_cursor_iterable_06(self): 84 | recs = [] 85 | 86 | async with self.con.transaction(): 87 | await self.con.execute(''' 88 | CREATE TABLE cursor_iterable_06 (id int); 89 | INSERT INTO cursor_iterable_06 VALUES (0), (1); 90 | ''') 91 | try: 92 | cur = self.con.cursor('SELECT * FROM cursor_iterable_06') 93 | async for rec in cur: 94 | recs.append(rec) 95 | finally: 96 | # Check that after iteration has exhausted the cursor, 97 | # its associated portal is closed properly, unlocking 98 | # the table. 99 | await self.con.execute('DROP TABLE cursor_iterable_06') 100 | 101 | self.assertEqual(recs, [(i,) for i in range(2)]) 102 | 103 | 104 | class TestCursor(tb.ConnectedTestCase): 105 | 106 | async def test_cursor_01(self): 107 | st = await self.con.prepare('SELECT generate_series(0, 20)') 108 | with self.assertRaisesRegex(asyncpg.NoActiveSQLTransactionError, 109 | 'cursor cannot be created.*transaction'): 110 | await st.cursor() 111 | 112 | async def test_cursor_02(self): 113 | st = await self.con.prepare('SELECT generate_series(0, 20)') 114 | async with self.con.transaction(): 115 | cur = await st.cursor() 116 | 117 | for i in range(-1, 1): 118 | with self.assertRaisesRegex(asyncpg.InterfaceError, 119 | 'greater than zero'): 120 | await cur.fetch(i) 121 | 122 | res = await cur.fetch(2) 123 | self.assertEqual(res, [(0,), (1,)]) 124 | 125 | rec = await cur.fetchrow() 126 | self.assertEqual(rec, (2,)) 127 | 128 | r = repr(cur) 129 | self.assertTrue(r.startswith(' 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import asyncpg 9 | from asyncpg import _testbase as tb 10 | 11 | 12 | class TestExceptions(tb.ConnectedTestCase): 13 | 14 | def test_exceptions_exported(self): 15 | for err in ('PostgresError', 'SubstringError', 'InterfaceError'): 16 | self.assertTrue(hasattr(asyncpg, err)) 17 | self.assertIn(err, asyncpg.__all__) 18 | 19 | for err in ('PostgresMessage',): 20 | self.assertFalse(hasattr(asyncpg, err)) 21 | self.assertNotIn(err, asyncpg.__all__) 22 | 23 | self.assertIsNone(asyncpg.PostgresError.schema_name) 24 | 25 | async def test_exceptions_unpacking(self): 26 | try: 27 | await self.con.execute('SELECT * FROM _nonexistent_') 28 | except asyncpg.UndefinedTableError as e: 29 | self.assertEqual(e.sqlstate, '42P01') 30 | self.assertEqual(e.position, '15') 31 | self.assertEqual(e.query, 'SELECT * FROM _nonexistent_') 32 | self.assertIsNotNone(e.severity) 33 | else: 34 | self.fail('UndefinedTableError not raised') 35 | 36 | async def test_exceptions_str(self): 37 | try: 38 | await self.con.execute(''' 39 | CREATE FUNCTION foo() RETURNS bool AS $$ $$ LANGUAGE SQL; 40 | ''') 41 | except asyncpg.InvalidFunctionDefinitionError as e: 42 | if self.server_version < (17, 0): 43 | detail = ( 44 | "Function's final statement must be SELECT or " 45 | "INSERT/UPDATE/DELETE RETURNING." 46 | ) 47 | else: 48 | detail = ( 49 | "Function's final statement must be SELECT or " 50 | "INSERT/UPDATE/DELETE/MERGE RETURNING." 51 | ) 52 | 53 | self.assertEqual(e.detail, detail) 54 | self.assertIn('DETAIL: Function', str(e)) 55 | else: 56 | self.fail('InvalidFunctionDefinitionError not raised') 57 | -------------------------------------------------------------------------------- /tests/test_introspection.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import asyncio 9 | import json 10 | 11 | from asyncpg import _testbase as tb 12 | from asyncpg import connection as apg_con 13 | 14 | 15 | MAX_RUNTIME = 0.25 16 | 17 | 18 | class SlowIntrospectionConnection(apg_con.Connection): 19 | """Connection class to test introspection races.""" 20 | introspect_count = 0 21 | 22 | async def _introspect_types(self, *args, **kwargs): 23 | self.introspect_count += 1 24 | await asyncio.sleep(0.4) 25 | return await super()._introspect_types(*args, **kwargs) 26 | 27 | 28 | class TestIntrospection(tb.ConnectedTestCase): 29 | @classmethod 30 | def setUpClass(cls): 31 | super().setUpClass() 32 | cls.adminconn = cls.loop.run_until_complete(cls.connect()) 33 | cls.loop.run_until_complete( 34 | cls.adminconn.execute('CREATE DATABASE asyncpg_intro_test')) 35 | 36 | @classmethod 37 | def tearDownClass(cls): 38 | cls.loop.run_until_complete( 39 | cls.adminconn.execute('DROP DATABASE asyncpg_intro_test')) 40 | 41 | cls.loop.run_until_complete(cls.adminconn.close()) 42 | cls.adminconn = None 43 | 44 | super().tearDownClass() 45 | 46 | @classmethod 47 | def get_server_settings(cls): 48 | settings = super().get_server_settings() 49 | settings.pop('jit', None) 50 | return settings 51 | 52 | def setUp(self): 53 | super().setUp() 54 | self.loop.run_until_complete(self._add_custom_codec(self.con)) 55 | 56 | async def _add_custom_codec(self, conn): 57 | # mess up with the codec - builtin introspection shouldn't be affected 58 | await conn.set_type_codec( 59 | "oid", 60 | schema="pg_catalog", 61 | encoder=lambda value: None, 62 | decoder=lambda value: None, 63 | format="text", 64 | ) 65 | 66 | @tb.with_connection_options(database='asyncpg_intro_test') 67 | async def test_introspection_on_large_db(self): 68 | await self.con.execute( 69 | 'CREATE TABLE base ({})'.format( 70 | ','.join('c{:02} varchar'.format(n) for n in range(50)) 71 | ) 72 | ) 73 | for n in range(1000): 74 | await self.con.execute( 75 | 'CREATE TABLE child_{:04} () inherits (base)'.format(n) 76 | ) 77 | 78 | with self.assertRunUnder(MAX_RUNTIME): 79 | await self.con.fetchval('SELECT $1::int[]', [1, 2]) 80 | 81 | @tb.with_connection_options(statement_cache_size=0) 82 | async def test_introspection_no_stmt_cache_01(self): 83 | old_uid = apg_con._uid 84 | 85 | self.assertEqual(self.con._stmt_cache.get_max_size(), 0) 86 | await self.con.fetchval('SELECT $1::int[]', [1, 2]) 87 | 88 | await self.con.execute(''' 89 | CREATE EXTENSION IF NOT EXISTS hstore 90 | ''') 91 | 92 | try: 93 | await self.con.set_builtin_type_codec( 94 | 'hstore', codec_name='pg_contrib.hstore') 95 | finally: 96 | await self.con.execute(''' 97 | DROP EXTENSION hstore 98 | ''') 99 | 100 | self.assertEqual(apg_con._uid, old_uid) 101 | 102 | @tb.with_connection_options(max_cacheable_statement_size=1) 103 | async def test_introspection_no_stmt_cache_02(self): 104 | # max_cacheable_statement_size will disable caching both for 105 | # the user query and for the introspection query. 106 | old_uid = apg_con._uid 107 | 108 | await self.con.fetchval('SELECT $1::int[]', [1, 2]) 109 | 110 | await self.con.execute(''' 111 | CREATE EXTENSION IF NOT EXISTS hstore 112 | ''') 113 | 114 | try: 115 | await self.con.set_builtin_type_codec( 116 | 'hstore', codec_name='pg_contrib.hstore') 117 | finally: 118 | await self.con.execute(''' 119 | DROP EXTENSION hstore 120 | ''') 121 | 122 | self.assertEqual(apg_con._uid, old_uid) 123 | 124 | @tb.with_connection_options(max_cacheable_statement_size=10000) 125 | async def test_introspection_no_stmt_cache_03(self): 126 | # max_cacheable_statement_size will disable caching for 127 | # the user query but not for the introspection query. 128 | old_uid = apg_con._uid 129 | 130 | await self.con.fetchval( 131 | "SELECT $1::int[], '{foo}'".format(foo='a' * 10000), [1, 2]) 132 | 133 | self.assertGreater(apg_con._uid, old_uid) 134 | 135 | async def test_introspection_sticks_for_ps(self): 136 | # Test that the introspected codec pipeline for a prepared 137 | # statement is not affected by a subsequent codec cache bust. 138 | 139 | ps = await self.con._prepare('SELECT $1::json[]', use_cache=True) 140 | 141 | try: 142 | # Setting a custom codec blows the codec cache for derived types. 143 | await self.con.set_type_codec( 144 | 'json', encoder=lambda v: v, decoder=json.loads, 145 | schema='pg_catalog', format='text' 146 | ) 147 | 148 | # The originally prepared statement should still be OK and 149 | # use the previously selected codec. 150 | self.assertEqual(await ps.fetchval(['{"foo": 1}']), ['{"foo": 1}']) 151 | 152 | # The new query uses the custom codec. 153 | v = await self.con.fetchval('SELECT $1::json[]', ['{"foo": 1}']) 154 | self.assertEqual(v, [{'foo': 1}]) 155 | 156 | finally: 157 | await self.con.reset_type_codec( 158 | 'json', schema='pg_catalog') 159 | 160 | async def test_introspection_retries_after_cache_bust(self): 161 | # Test that codec cache bust racing with the introspection 162 | # query would cause introspection to retry. 163 | slow_intro_conn = await self.connect( 164 | connection_class=SlowIntrospectionConnection) 165 | await self._add_custom_codec(slow_intro_conn) 166 | try: 167 | await self.con.execute(''' 168 | CREATE DOMAIN intro_1_t AS int; 169 | CREATE DOMAIN intro_2_t AS int; 170 | ''') 171 | 172 | await slow_intro_conn.fetchval(''' 173 | SELECT $1::intro_1_t 174 | ''', 10) 175 | # slow_intro_conn cache is now populated with intro_1_t 176 | 177 | async def wait_and_drop(): 178 | await asyncio.sleep(0.1) 179 | await slow_intro_conn.reload_schema_state() 180 | 181 | # Now, in parallel, run another query that 182 | # references both intro_1_t and intro_2_t. 183 | await asyncio.gather( 184 | slow_intro_conn.fetchval(''' 185 | SELECT $1::intro_1_t, $2::intro_2_t 186 | ''', 10, 20), 187 | wait_and_drop() 188 | ) 189 | 190 | # Initial query + two tries for the second query. 191 | self.assertEqual(slow_intro_conn.introspect_count, 3) 192 | 193 | finally: 194 | await self.con.execute(''' 195 | DROP DOMAIN intro_1_t; 196 | DROP DOMAIN intro_2_t; 197 | ''') 198 | await slow_intro_conn.close() 199 | 200 | @tb.with_connection_options(database='asyncpg_intro_test') 201 | async def test_introspection_loads_basetypes_of_domains(self): 202 | # Test that basetypes of domains are loaded to the 203 | # client encode/decode cache 204 | await self.con.execute(''' 205 | DROP TABLE IF EXISTS test; 206 | DROP DOMAIN IF EXISTS num_array; 207 | CREATE DOMAIN num_array numeric[]; 208 | CREATE TABLE test ( 209 | num num_array 210 | ); 211 | ''') 212 | 213 | try: 214 | # if domain basetypes are not loaded, this insert will fail 215 | await self.con.execute( 216 | 'INSERT INTO test (num) VALUES ($1)', ([1, 2],)) 217 | finally: 218 | await self.con.execute(''' 219 | DROP TABLE IF EXISTS test; 220 | DROP DOMAIN IF EXISTS num_array; 221 | ''') 222 | -------------------------------------------------------------------------------- /tests/test_logging.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from asyncpg import _testbase as tb 4 | from asyncpg import exceptions 5 | 6 | 7 | class LogCollector: 8 | def __init__(self): 9 | self.records = [] 10 | 11 | def __call__(self, record): 12 | self.records.append(record) 13 | 14 | 15 | class TestQueryLogging(tb.ConnectedTestCase): 16 | 17 | async def test_logging_context(self): 18 | queries = asyncio.Queue() 19 | 20 | def query_saver(record): 21 | queries.put_nowait(record) 22 | 23 | log = LogCollector() 24 | 25 | with self.con.query_logger(query_saver): 26 | self.assertEqual(len(self.con._query_loggers), 1) 27 | await self.con.execute("SELECT 1") 28 | with self.con.query_logger(log): 29 | self.assertEqual(len(self.con._query_loggers), 2) 30 | await self.con.execute("SELECT 2") 31 | 32 | r1 = await queries.get() 33 | r2 = await queries.get() 34 | self.assertEqual(r1.query, "SELECT 1") 35 | self.assertEqual(r2.query, "SELECT 2") 36 | self.assertEqual(len(log.records), 1) 37 | self.assertEqual(log.records[0].query, "SELECT 2") 38 | self.assertEqual(len(self.con._query_loggers), 0) 39 | 40 | async def test_error_logging(self): 41 | log = LogCollector() 42 | with self.con.query_logger(log): 43 | with self.assertRaises(exceptions.UndefinedColumnError): 44 | await self.con.execute("SELECT x") 45 | 46 | await asyncio.sleep(0) # wait for logging 47 | self.assertEqual(len(log.records), 1) 48 | self.assertEqual( 49 | type(log.records[0].exception), 50 | exceptions.UndefinedColumnError 51 | ) 52 | -------------------------------------------------------------------------------- /tests/test_test.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import asyncio 9 | import types 10 | import unittest 11 | 12 | 13 | from asyncpg import _testbase as tb 14 | 15 | 16 | class BaseSimpleTestCase: 17 | 18 | async def test_tests_zero_error(self): 19 | await asyncio.sleep(0.01) 20 | 1 / 0 21 | 22 | 23 | class TestTests(unittest.TestCase): 24 | 25 | def test_tests_fail_1(self): 26 | SimpleTestCase = types.new_class('SimpleTestCase', 27 | (BaseSimpleTestCase, tb.TestCase)) 28 | 29 | suite = unittest.TestSuite() 30 | suite.addTest(SimpleTestCase('test_tests_zero_error')) 31 | 32 | result = unittest.TestResult() 33 | suite.run(result) 34 | 35 | self.assertIn('ZeroDivisionError', result.errors[0][1]) 36 | 37 | 38 | class TestHelpers(tb.TestCase): 39 | 40 | async def test_tests_assertLoopErrorHandlerCalled_01(self): 41 | with self.assertRaisesRegex(AssertionError, r'no message.*was logged'): 42 | with self.assertLoopErrorHandlerCalled('aa'): 43 | self.loop.call_exception_handler({'message': 'bb a bb'}) 44 | 45 | with self.assertLoopErrorHandlerCalled('aa'): 46 | self.loop.call_exception_handler({'message': 'bbaabb'}) 47 | -------------------------------------------------------------------------------- /tests/test_timeout.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the asyncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import asyncio 9 | 10 | import asyncpg 11 | from asyncpg import connection as pg_connection 12 | from asyncpg import _testbase as tb 13 | 14 | 15 | MAX_RUNTIME = 0.5 16 | 17 | 18 | class TestTimeout(tb.ConnectedTestCase): 19 | 20 | async def test_timeout_01(self): 21 | for methname in {'fetch', 'fetchrow', 'fetchval', 'execute'}: 22 | with self.assertRaises(asyncio.TimeoutError), \ 23 | self.assertRunUnder(MAX_RUNTIME): 24 | meth = getattr(self.con, methname) 25 | await meth('select pg_sleep(10)', timeout=0.02) 26 | self.assertEqual(await self.con.fetch('select 1'), [(1,)]) 27 | 28 | async def test_timeout_02(self): 29 | st = await self.con.prepare('select pg_sleep(10)') 30 | 31 | for methname in {'fetch', 'fetchrow', 'fetchval'}: 32 | with self.assertRaises(asyncio.TimeoutError), \ 33 | self.assertRunUnder(MAX_RUNTIME): 34 | meth = getattr(st, methname) 35 | await meth(timeout=0.02) 36 | self.assertEqual(await self.con.fetch('select 1'), [(1,)]) 37 | 38 | async def test_timeout_03(self): 39 | task = self.loop.create_task( 40 | self.con.fetch('select pg_sleep(10)', timeout=0.2)) 41 | await asyncio.sleep(0.05) 42 | task.cancel() 43 | with self.assertRaises(asyncio.CancelledError), \ 44 | self.assertRunUnder(MAX_RUNTIME): 45 | await task 46 | self.assertEqual(await self.con.fetch('select 1'), [(1,)]) 47 | 48 | async def test_timeout_04(self): 49 | st = await self.con.prepare('select pg_sleep(10)', timeout=0.1) 50 | with self.assertRaises(asyncio.TimeoutError), \ 51 | self.assertRunUnder(MAX_RUNTIME): 52 | async with self.con.transaction(): 53 | async for _ in st.cursor(timeout=0.1): # NOQA 54 | pass 55 | self.assertEqual(await self.con.fetch('select 1'), [(1,)]) 56 | 57 | st = await self.con.prepare('select pg_sleep(10)', timeout=0.1) 58 | async with self.con.transaction(): 59 | cur = await st.cursor() 60 | with self.assertRaises(asyncio.TimeoutError), \ 61 | self.assertRunUnder(MAX_RUNTIME): 62 | await cur.fetch(1, timeout=0.1) 63 | self.assertEqual(await self.con.fetch('select 1'), [(1,)]) 64 | 65 | async def test_timeout_05(self): 66 | # Stress-test timeouts - try to trigger a race condition 67 | # between a cancellation request to Postgres and next 68 | # query (SELECT 1) 69 | for _ in range(500): 70 | with self.assertRaises(asyncio.TimeoutError): 71 | await self.con.fetch('SELECT pg_sleep(1)', timeout=1e-10) 72 | self.assertEqual(await self.con.fetch('SELECT 1'), [(1,)]) 73 | 74 | async def test_timeout_06(self): 75 | async with self.con.transaction(): 76 | with self.assertRaises(asyncio.TimeoutError), \ 77 | self.assertRunUnder(MAX_RUNTIME): 78 | async for _ in self.con.cursor( # NOQA 79 | 'select pg_sleep(10)', timeout=0.1): 80 | pass 81 | self.assertEqual(await self.con.fetch('select 1'), [(1,)]) 82 | 83 | async with self.con.transaction(): 84 | cur = await self.con.cursor('select pg_sleep(10)') 85 | with self.assertRaises(asyncio.TimeoutError), \ 86 | self.assertRunUnder(MAX_RUNTIME): 87 | await cur.fetch(1, timeout=0.1) 88 | 89 | async with self.con.transaction(): 90 | cur = await self.con.cursor('select pg_sleep(10)') 91 | with self.assertRaises(asyncio.TimeoutError), \ 92 | self.assertRunUnder(MAX_RUNTIME): 93 | await cur.forward(1, timeout=1e-10) 94 | 95 | async with self.con.transaction(): 96 | cur = await self.con.cursor('select pg_sleep(10)') 97 | with self.assertRaises(asyncio.TimeoutError), \ 98 | self.assertRunUnder(MAX_RUNTIME): 99 | await cur.fetchrow(timeout=0.1) 100 | 101 | async with self.con.transaction(): 102 | cur = await self.con.cursor('select pg_sleep(10)') 103 | with self.assertRaises(asyncio.TimeoutError), \ 104 | self.assertRunUnder(MAX_RUNTIME): 105 | await cur.fetchrow(timeout=0.1) 106 | 107 | with self.assertRaises(asyncpg.InFailedSQLTransactionError): 108 | await cur.fetch(1) 109 | 110 | self.assertEqual(await self.con.fetch('select 1'), [(1,)]) 111 | 112 | async def test_invalid_timeout(self): 113 | for command_timeout in ('a', False, -1): 114 | with self.subTest(command_timeout=command_timeout): 115 | with self.assertRaisesRegex(ValueError, 116 | 'invalid command_timeout'): 117 | await self.connect(command_timeout=command_timeout) 118 | 119 | # Note: negative timeouts are OK for method calls. 120 | for methname in {'fetch', 'fetchrow', 'fetchval', 'execute'}: 121 | for timeout in ('a', False): 122 | with self.subTest(timeout=timeout): 123 | with self.assertRaisesRegex(ValueError, 'invalid timeout'): 124 | await self.con.execute('SELECT 1', timeout=timeout) 125 | 126 | 127 | class TestConnectionCommandTimeout(tb.ConnectedTestCase): 128 | 129 | @tb.with_connection_options(command_timeout=0.2) 130 | async def test_command_timeout_01(self): 131 | for methname in {'fetch', 'fetchrow', 'fetchval', 'execute'}: 132 | with self.assertRaises(asyncio.TimeoutError), \ 133 | self.assertRunUnder(MAX_RUNTIME): 134 | meth = getattr(self.con, methname) 135 | await meth('select pg_sleep(10)') 136 | self.assertEqual(await self.con.fetch('select 1'), [(1,)]) 137 | 138 | 139 | class SlowPrepareConnection(pg_connection.Connection): 140 | """Connection class to test timeouts.""" 141 | async def _get_statement(self, query, timeout, **kwargs): 142 | await asyncio.sleep(0.3) 143 | return await super()._get_statement(query, timeout, **kwargs) 144 | 145 | 146 | class TestTimeoutCoversPrepare(tb.ConnectedTestCase): 147 | 148 | @tb.with_connection_options(connection_class=SlowPrepareConnection, 149 | command_timeout=0.3) 150 | async def test_timeout_covers_prepare_01(self): 151 | for methname in {'fetch', 'fetchrow', 'fetchval', 'execute'}: 152 | with self.assertRaises(asyncio.TimeoutError): 153 | meth = getattr(self.con, methname) 154 | await meth('select pg_sleep($1)', 0.2) 155 | -------------------------------------------------------------------------------- /tests/test_types.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the ayncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | from itertools import product 8 | 9 | from asyncpg.types import Range 10 | from asyncpg import _testbase as tb 11 | 12 | 13 | class TestTypes(tb.TestCase): 14 | 15 | def test_range_issubset(self): 16 | subs = [ 17 | Range(empty=True), 18 | Range(lower=1, upper=5, lower_inc=True, upper_inc=False), 19 | Range(lower=1, upper=5, lower_inc=True, upper_inc=True), 20 | Range(lower=1, upper=5, lower_inc=False, upper_inc=True), 21 | Range(lower=1, upper=5, lower_inc=False, upper_inc=False), 22 | Range(lower=-5, upper=10), 23 | Range(lower=2, upper=3), 24 | Range(lower=1, upper=None), 25 | Range(lower=None, upper=None) 26 | ] 27 | 28 | sups = [ 29 | Range(empty=True), 30 | Range(lower=1, upper=5, lower_inc=True, upper_inc=False), 31 | Range(lower=1, upper=5, lower_inc=True, upper_inc=True), 32 | Range(lower=1, upper=5, lower_inc=False, upper_inc=True), 33 | Range(lower=1, upper=5, lower_inc=False, upper_inc=False), 34 | Range(lower=None, upper=None) 35 | ] 36 | 37 | # Each row is 1 subs with all sups 38 | results = [ 39 | True, True, True, True, True, True, 40 | False, True, True, False, False, True, 41 | False, False, True, False, False, True, 42 | False, False, True, True, False, True, 43 | False, True, True, True, True, True, 44 | False, False, False, False, False, True, 45 | False, True, True, True, True, True, 46 | False, False, False, False, False, True, 47 | False, False, False, False, False, True 48 | ] 49 | 50 | for (sub, sup), res in zip(product(subs, sups), results): 51 | self.assertIs( 52 | sub.issubset(sup), res, "Sub:{}, Sup:{}".format(sub, sup) 53 | ) 54 | self.assertIs( 55 | sup.issuperset(sub), res, "Sub:{}, Sup:{}".format(sub, sup) 56 | ) 57 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2016-present the ayncpg authors and contributors 2 | # 3 | # 4 | # This module is part of asyncpg and is released under 5 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 6 | 7 | 8 | import datetime 9 | 10 | from asyncpg import utils 11 | from asyncpg import _testbase as tb 12 | 13 | 14 | class TestUtils(tb.ConnectedTestCase): 15 | 16 | async def test_mogrify_simple(self): 17 | cases = [ 18 | ('timestamp', 19 | datetime.datetime(2016, 10, 10), 20 | "SELECT '2016-10-10 00:00:00'::timestamp"), 21 | ('int[]', 22 | [[1, 2], [3, 4]], 23 | "SELECT '{{1,2},{3,4}}'::int[]"), 24 | ] 25 | 26 | for typename, data, expected in cases: 27 | with self.subTest(value=data, type=typename): 28 | mogrified = await utils._mogrify( 29 | self.con, 'SELECT $1::{}'.format(typename), [data]) 30 | self.assertEqual(mogrified, expected) 31 | 32 | async def test_mogrify_multiple(self): 33 | mogrified = await utils._mogrify( 34 | self.con, 'SELECT $1::int, $2::int[]', 35 | [1, [2, 3, 4, 5]]) 36 | expected = "SELECT '1'::int, '{2,3,4,5}'::int[]" 37 | self.assertEqual(mogrified, expected) 38 | -------------------------------------------------------------------------------- /tools/generate_exceptions.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright (C) 2016-present the asyncpg authors and contributors 4 | # 5 | # 6 | # This module is part of asyncpg and is released under 7 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | 10 | import argparse 11 | import builtins 12 | import re 13 | import string 14 | import textwrap 15 | 16 | from asyncpg.exceptions import _base as apg_exc 17 | 18 | 19 | _namemap = { 20 | '08001': 'ClientCannotConnectError', 21 | '08004': 'ConnectionRejectionError', 22 | '08006': 'ConnectionFailureError', 23 | '38002': 'ModifyingExternalRoutineSQLDataNotPermittedError', 24 | '38003': 'ProhibitedExternalRoutineSQLStatementAttemptedError', 25 | '38004': 'ReadingExternalRoutineSQLDataNotPermittedError', 26 | '39004': 'NullValueInExternalRoutineNotAllowedError', 27 | '42000': 'SyntaxOrAccessError', 28 | 'XX000': 'InternalServerError', 29 | } 30 | 31 | 32 | _subclassmap = { 33 | # Special subclass of FeatureNotSupportedError 34 | # raised by Postgres in RevalidateCachedQuery. 35 | '0A000': ['InvalidCachedStatementError'] 36 | } 37 | 38 | 39 | def _get_error_name(sqlstatename, msgtype, sqlstate): 40 | if sqlstate in _namemap: 41 | return _namemap[sqlstate] 42 | 43 | parts = string.capwords(sqlstatename.replace('_', ' ')).split(' ') 44 | if parts[-1] in {'Exception', 'Failure'}: 45 | parts[-1] = 'Error' 46 | 47 | if parts[-1] != 'Error' and msgtype != 'W': 48 | parts.append('Error') 49 | 50 | for i, part in enumerate(parts): 51 | if part == 'Fdw': 52 | parts[i] = 'FDW' 53 | elif part == 'Io': 54 | parts[i] = 'IO' 55 | elif part == 'Plpgsql': 56 | parts[i] = 'PLPGSQL' 57 | elif part == 'Sql': 58 | parts[i] = 'SQL' 59 | 60 | errname = ''.join(parts) 61 | 62 | if hasattr(builtins, errname): 63 | errname = 'Postgres' + errname 64 | 65 | return errname 66 | 67 | 68 | def main(): 69 | parser = argparse.ArgumentParser( 70 | description='generate _exceptions.py from postgres/errcodes.txt') 71 | parser.add_argument('errcodesfile', type=str, 72 | help='path to errcodes.txt in PostgreSQL source') 73 | 74 | args = parser.parse_args() 75 | 76 | with open(args.errcodesfile, 'r') as errcodes_f: 77 | errcodes = errcodes_f.read() 78 | 79 | section_re = re.compile(r'^Section: .*') 80 | 81 | tpl = """\ 82 | class {clsname}({base}): 83 | {docstring}{sqlstate}""" 84 | 85 | new_section = True 86 | section_class = None 87 | 88 | buf = '# GENERATED FROM postgresql/src/backend/utils/errcodes.txt\n' + \ 89 | '# DO NOT MODIFY, use tools/generate_exceptions.py to update\n\n' + \ 90 | 'from ._base import * # NOQA\nfrom . import _base\n\n\n' 91 | 92 | classes = [] 93 | clsnames = set() 94 | 95 | def _add_class(clsname, base, sqlstate, docstring): 96 | if sqlstate: 97 | sqlstate = "sqlstate = '{}'".format(sqlstate) 98 | else: 99 | sqlstate = '' 100 | 101 | txt = tpl.format(clsname=clsname, base=base, sqlstate=sqlstate, 102 | docstring=docstring) 103 | 104 | if not sqlstate and not docstring: 105 | txt += 'pass' 106 | 107 | if len(txt.splitlines()[0]) > 79: 108 | txt = txt.replace('(', '(\n ', 1) 109 | 110 | classes.append(txt) 111 | clsnames.add(clsname) 112 | 113 | for line in errcodes.splitlines(): 114 | if not line.strip() or line.startswith('#'): 115 | continue 116 | 117 | if section_re.match(line): 118 | new_section = True 119 | continue 120 | 121 | parts = re.split(r'\s+', line) 122 | 123 | if len(parts) < 4: 124 | continue 125 | 126 | sqlstate = parts[0] 127 | msgtype = parts[1] 128 | name = parts[3] 129 | 130 | clsname = _get_error_name(name, msgtype, sqlstate) 131 | 132 | if clsname in {'SuccessfulCompletionError'}: 133 | continue 134 | 135 | if clsname in clsnames: 136 | raise ValueError( 137 | 'duplicate exception class name: {}'.format(clsname)) 138 | 139 | if new_section: 140 | section_class = clsname 141 | if clsname == 'PostgresWarning': 142 | base = '_base.PostgresLogMessage, Warning' 143 | else: 144 | if msgtype == 'W': 145 | base = 'PostgresWarning' 146 | else: 147 | base = '_base.PostgresError' 148 | 149 | new_section = False 150 | else: 151 | base = section_class 152 | 153 | existing = apg_exc.PostgresMessageMeta.get_message_class_for_sqlstate( 154 | sqlstate) 155 | 156 | if (existing and existing is not apg_exc.UnknownPostgresError and 157 | existing.__doc__): 158 | docstring = '"""{}"""\n\n '.format(existing.__doc__) 159 | else: 160 | docstring = '' 161 | 162 | _add_class(clsname=clsname, base=base, sqlstate=sqlstate, 163 | docstring=docstring) 164 | 165 | subclasses = _subclassmap.get(sqlstate, []) 166 | for subclass in subclasses: 167 | existing = getattr(apg_exc, subclass, None) 168 | if existing and existing.__doc__: 169 | docstring = '"""{}"""\n\n '.format(existing.__doc__) 170 | else: 171 | docstring = '' 172 | 173 | _add_class(clsname=subclass, base=clsname, sqlstate=None, 174 | docstring=docstring) 175 | 176 | buf += '\n\n\n'.join(classes) 177 | 178 | _all = textwrap.wrap(', '.join('{!r}'.format(c) for c in sorted(clsnames))) 179 | buf += '\n\n\n__all__ = (\n {}\n)'.format( 180 | '\n '.join(_all)) 181 | 182 | buf += '\n\n__all__ += _base.__all__' 183 | 184 | print(buf) 185 | 186 | 187 | if __name__ == '__main__': 188 | main() 189 | -------------------------------------------------------------------------------- /tools/generate_type_map.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # Copyright (C) 2016-present the asyncpg authors and contributors 4 | # 5 | # 6 | # This module is part of asyncpg and is released under 7 | # the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | 10 | import argparse 11 | import asyncio 12 | 13 | import asyncpg 14 | 15 | 16 | # Array types with builtin codecs, necessary for codec 17 | # bootstrap to work 18 | # 19 | _BUILTIN_ARRAYS = ('_text', '_oid') 20 | 21 | _INVALIDOID = 0 22 | 23 | # postgresql/src/include/access/transam.h: FirstBootstrapObjectId 24 | _MAXBUILTINOID = 10000 - 1 25 | 26 | # A list of alternative names for builtin types. 27 | _TYPE_ALIASES = { 28 | 'smallint': 'int2', 29 | 'int': 'int4', 30 | 'integer': 'int4', 31 | 'bigint': 'int8', 32 | 'decimal': 'numeric', 33 | 'real': 'float4', 34 | 'double precision': 'float8', 35 | 'timestamp with timezone': 'timestamptz', 36 | 'timestamp without timezone': 'timestamp', 37 | 'time with timezone': 'timetz', 38 | 'time without timezone': 'time', 39 | 'char': 'bpchar', 40 | 'character': 'bpchar', 41 | 'character varying': 'varchar', 42 | 'bit varying': 'varbit' 43 | } 44 | 45 | 46 | async def runner(args): 47 | conn = await asyncpg.connect(host=args.pghost, port=args.pgport, 48 | user=args.pguser) 49 | 50 | buf = ( 51 | '# Copyright (C) 2016-present the asyncpg authors and contributors\n' 52 | '# \n' 53 | '#\n' 54 | '# This module is part of asyncpg and is released under\n' 55 | '# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0' 56 | '\n\n\n' 57 | '# GENERATED FROM pg_catalog.pg_type\n' + 58 | '# DO NOT MODIFY, use tools/generate_type_map.py to update\n\n' + 59 | 'DEF INVALIDOID = {}\n'.format(_INVALIDOID) + 60 | 'DEF MAXBUILTINOID = {}\n'.format(_MAXBUILTINOID) 61 | ) 62 | 63 | pg_types = await conn.fetch(''' 64 | SELECT 65 | oid, 66 | typname 67 | FROM 68 | pg_catalog.pg_type 69 | WHERE 70 | typtype IN ('b', 'p') 71 | AND (typelem = 0 OR typname = any($1) OR typlen > 0) 72 | AND oid <= $2 73 | ORDER BY 74 | oid 75 | ''', _BUILTIN_ARRAYS, _MAXBUILTINOID) 76 | 77 | defs = [] 78 | typemap = {} 79 | array_types = [] 80 | 81 | for pg_type in pg_types: 82 | typeoid = pg_type['oid'] 83 | typename = pg_type['typname'] 84 | 85 | defname = '{}OID'.format(typename.upper()) 86 | defs.append('DEF {name} = {oid}'.format(name=defname, oid=typeoid)) 87 | 88 | if typename in _BUILTIN_ARRAYS: 89 | array_types.append(defname) 90 | typename = typename[1:] + '[]' 91 | 92 | typemap[defname] = typename 93 | 94 | buf += 'DEF MAXSUPPORTEDOID = {}\n\n'.format(pg_types[-1]['oid']) 95 | 96 | buf += '\n'.join(defs) 97 | 98 | buf += '\n\ncdef ARRAY_TYPES = ({},)'.format(', '.join(array_types)) 99 | 100 | f_typemap = ('{}: {!r}'.format(dn, n) for dn, n in sorted(typemap.items())) 101 | buf += '\n\nBUILTIN_TYPE_OID_MAP = {{\n {}\n}}'.format( 102 | ',\n '.join(f_typemap)) 103 | buf += ('\n\nBUILTIN_TYPE_NAME_MAP = ' + 104 | '{v: k for k, v in BUILTIN_TYPE_OID_MAP.items()}') 105 | 106 | for k, v in _TYPE_ALIASES.items(): 107 | buf += ('\n\nBUILTIN_TYPE_NAME_MAP[{!r}] = \\\n ' 108 | 'BUILTIN_TYPE_NAME_MAP[{!r}]'.format(k, v)) 109 | 110 | print(buf) 111 | 112 | 113 | def main(): 114 | parser = argparse.ArgumentParser( 115 | description='generate protocol/pgtypes.pxi from pg_catalog.pg_types') 116 | parser.add_argument( 117 | '--pghost', type=str, default='127.0.0.1', 118 | help='PostgreSQL server host') 119 | parser.add_argument( 120 | '--pgport', type=int, default=5432, 121 | help='PostgreSQL server port') 122 | parser.add_argument( 123 | '--pguser', type=str, default='postgres', 124 | help='PostgreSQL server user') 125 | 126 | args = parser.parse_args() 127 | asyncio.run(runner(args)) 128 | 129 | 130 | if __name__ == '__main__': 131 | main() 132 | --------------------------------------------------------------------------------