├── tests ├── __init__.py ├── settings.py └── test_core.py ├── channels_postgres ├── migrations │ ├── __init__.py │ ├── 0002_create_triggers_and_functions.py │ ├── 0001_initial.py │ ├── 0003_notify_smaller_payload.py │ └── 0004_notify_ensure_payload_not_too_big.py ├── __init__.py ├── apps.py ├── models.py ├── db.py └── core.py ├── codecov.yml ├── .gitignore ├── .github ├── workflows │ ├── coverage_main_branch.yml │ ├── tests.yml │ ├── python-publish.yml │ ├── create_tag.yml │ └── shared-tests.yml └── dependabot.yml ├── package.json ├── Pipfile ├── tox.ini ├── .release-it.json ├── LICENSE ├── pyproject.toml ├── README.md └── Pipfile.lock /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /channels_postgres/migrations/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | - channels_postgres/migrations/** 3 | -------------------------------------------------------------------------------- /channels_postgres/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Channels Postgres 3 | """ 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | *.pyc 3 | .tox/ 4 | .coverage/ 5 | .DS_Store 6 | coverage.xml 7 | -------------------------------------------------------------------------------- /.github/workflows/coverage_main_branch.yml: -------------------------------------------------------------------------------- 1 | name: Main branch coverage 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | upload-coverage: 10 | name: Run tests and upload coverage reports 11 | uses: ./.github/workflows/shared-tests.yml 12 | -------------------------------------------------------------------------------- /channels_postgres/apps.py: -------------------------------------------------------------------------------- 1 | "Channels postgres app config" 2 | 3 | from django.apps import AppConfig 4 | 5 | 6 | class ChannelsPostgresConfig(AppConfig): 7 | """App Config.""" 8 | 9 | name = 'channels_postgres' 10 | default_auto_field = 'django.db.models.AutoField' 11 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "channels_postgres", 3 | "version": "1.1.3", 4 | "devDependencies": { 5 | "@release-it/bumper": "6.0.1", 6 | "@release-it/conventional-changelog": "8.0.1", 7 | "release-it": "17.0.0" 8 | }, 9 | "description": "Postgres backend for Django channels" 10 | } 11 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | pull_request: 5 | 6 | jobs: 7 | lint: 8 | name: Lint 9 | runs-on: ubuntu-22.04 10 | steps: 11 | - uses: actions/checkout@v4 12 | - name: Set up Python 13 | uses: actions/setup-python@v5 14 | with: 15 | python-version: "3.13" 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip tox 19 | - name: Run lint 20 | run: tox -e qa 21 | tests: 22 | name: Run tests 23 | uses: ./.github/workflows/shared-tests.yml 24 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | asgiref = "==3.*" 8 | channels = "==4.*" 9 | msgpack = "==1.*" 10 | psycopg = {extras = ["pool"], version = "==3.*"} 11 | 12 | [dev-packages] 13 | async-timeout = "==5.*" 14 | django = "==5.*" 15 | django-stubs = {extras = ["compatible-mypy"], version = "==5.*"} 16 | pylint = "==3.*" 17 | pylint-django = "==2.*" 18 | pytest = "==8.*" 19 | pytest-asyncio = "==0.*" 20 | pytest-cov = "==6.*" 21 | ruff = "0.*" 22 | tox = "==4.*" 23 | 24 | [requires] 25 | python_version = "3.13" 26 | 27 | [scripts] 28 | tests = "tox" 29 | format = "ruff format ." 30 | start_postgres = "docker run --rm --name channels_postgres_db -e POSTGRES_PASSWORD=password -e POSTGRES_DB=postgres -e POSTGRES_USER=postgres -p 5432:5432 postgres:17-alpine" 31 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = 3 | py39-django{42}, 4 | py310-django{42,50,51}, 5 | py311-django{42,50,51,52}, 6 | py312-django{42,50,51,52,main}, 7 | py313-django{42,50,51,52,main}, 8 | qa 9 | 10 | [testenv] 11 | usedevelop = true 12 | extras = tests 13 | deps = 14 | django42: Django >=4.2, <5.0 15 | django50: Django >=5.0, <5.1 16 | django51: Django >=5.1, <5.2 17 | django52: Django >=5.2, <5.3 18 | djangomain: https://github.com/django/django/archive/main.tar.gz 19 | commands = 20 | python -m django migrate --database channels_postgres 21 | pytest -s -vvv {posargs} 22 | setenv = 23 | DJANGO_SETTINGS_MODULE = tests.settings 24 | 25 | [testenv:qa] 26 | commands = 27 | ruff check channels_postgres tests 28 | pylint --load-plugins pylint_django channels_postgres tests 29 | mypy channels_postgres tests 30 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | name: Upload Python Package 2 | 3 | on: 4 | workflow_run: 5 | workflows: [Tag and Release (Github)] 6 | types: 7 | - completed 8 | 9 | jobs: 10 | pypi-publish: 11 | name: upload release to PyPI 12 | runs-on: ubuntu-22.04 13 | # Specifying a GitHub environment is optional, but strongly encouraged 14 | environment: release 15 | permissions: 16 | # IMPORTANT: this permission is mandatory for trusted publishing 17 | id-token: write 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Set up Python 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: '3.13' 24 | - name: Install dependencies 25 | run: python -m pip install build==1.2.2 26 | - name: Build package 27 | run: python -m build 28 | - name: Publish package distributions to PyPI 29 | uses: pypa/gh-action-pypi-publish@release/v1 30 | -------------------------------------------------------------------------------- /.release-it.json: -------------------------------------------------------------------------------- 1 | { 2 | "npm": { 3 | "publish": false, 4 | "versionArgs": ["--allow-same-version"] 5 | }, 6 | "git": { 7 | "commitMessage": "chore: Release ${version}" 8 | }, 9 | "github": { 10 | "release": true 11 | }, 12 | "plugins": { 13 | "@release-it/conventional-changelog": { 14 | "infile": false, 15 | "preset": { 16 | "name": "conventionalcommits", 17 | "types": [ 18 | { 19 | "type": "chore", 20 | "section": "Chore and depedencies" 21 | }, 22 | { 23 | "type": "feat", 24 | "section": "Features" 25 | }, 26 | { 27 | "type": "fix", 28 | "section": "Bug Fixes" 29 | } 30 | ] 31 | } 32 | }, 33 | "@release-it/bumper": { 34 | "out": [ 35 | "channels_postgres/__init__.py", 36 | "package.json" 37 | ] 38 | } 39 | } 40 | } -------------------------------------------------------------------------------- /.github/workflows/create_tag.yml: -------------------------------------------------------------------------------- 1 | name: Tag and Release (Github) 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | release: 8 | name: Tag and create github release 9 | runs-on: ubuntu-22.04 10 | permissions: 11 | contents: write # to be able to publish a GitHub release 12 | issues: write # to be able to comment on released issues 13 | pull-requests: write # to be able to comment on released pull requests 14 | id-token: write # to enable use of OIDC for npm provenance 15 | steps: 16 | - uses: actions/checkout@v4 17 | with: 18 | fetch-depth: 0 19 | - uses: actions/setup-node@v4 20 | with: 21 | node-version: 18 22 | cache: 'yarn' 23 | - run: yarn install 24 | - run: git config --global user.name "${GITHUB_ACTOR}" 25 | - run: git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" 26 | - name: Release 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | run: npx release-it --ci 30 | -------------------------------------------------------------------------------- /channels_postgres/models.py: -------------------------------------------------------------------------------- 1 | """channels_postgres models""" 2 | 3 | from datetime import datetime, timedelta 4 | 5 | from django.db import models 6 | from django.utils import timezone 7 | 8 | 9 | def _default_channel_expiry_time() -> datetime: 10 | return timezone.now() + timedelta(seconds=86400) 11 | 12 | 13 | def _default_message_expiry_time() -> datetime: 14 | return timezone.now() + timedelta(minutes=1) 15 | 16 | 17 | class GroupChannel(models.Model): 18 | """ 19 | A model that represents a group channel. 20 | 21 | Groups are used to send messages to multiple channels. 22 | """ 23 | 24 | group_key = models.CharField(max_length=100, null=False) 25 | channel = models.CharField(max_length=100, null=False) 26 | expire = models.DateTimeField(default=_default_channel_expiry_time) 27 | 28 | 29 | class Message(models.Model): 30 | """ 31 | A model that represents a message. 32 | 33 | Messages are used to send messages to a specific channel. 34 | E.g for user to user private messages. 35 | """ 36 | 37 | channel = models.CharField(max_length=100) 38 | message = models.BinaryField(max_length=1000) 39 | expire = models.DateTimeField(default=_default_message_expiry_time) 40 | -------------------------------------------------------------------------------- /channels_postgres/migrations/0002_create_triggers_and_functions.py: -------------------------------------------------------------------------------- 1 | from django.db import migrations 2 | 3 | 4 | class Migration(migrations.Migration): 5 | dependencies = [('channels_postgres', '0001_initial')] 6 | 7 | setup_database_sql = """ 8 | CREATE OR REPLACE FUNCTION channels_postgres_notify() 9 | RETURNS trigger AS $$ 10 | DECLARE 11 | BEGIN 12 | PERFORM pg_notify(NEW.channel, NEW.id::text); 13 | RETURN NEW; 14 | END; 15 | $$ LANGUAGE plpgsql; 16 | 17 | DO $$ BEGIN 18 | CREATE TRIGGER channels_postgres_notify_trigger 19 | AFTER INSERT ON channels_postgres_message 20 | FOR EACH ROW 21 | EXECUTE PROCEDURE channels_postgres_notify(); 22 | EXCEPTION 23 | WHEN others THEN null; 24 | END $$; 25 | """ 26 | 27 | reverse_setup_database_sql = """ 28 | DROP TRIGGER IF EXISTS channels_postgres_notify_trigger ON channels_postgres_message; 29 | DROP FUNCTION IF EXISTS channels_postgres_notify; 30 | """ # noqa 31 | 32 | operations = [migrations.RunSQL(sql=setup_database_sql, reverse_sql=reverse_setup_database_sql)] 33 | -------------------------------------------------------------------------------- /.github/workflows/shared-tests.yml: -------------------------------------------------------------------------------- 1 | name: Shared tests 2 | 3 | on: 4 | workflow_call: 5 | 6 | jobs: 7 | tests: 8 | name: Python ${{ matrix.python-version }} 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | # os: [ubuntu-22.04, windows-2022]: services (containers) are not supported on windows 14 | os: [ubuntu-22.04] 15 | python-version: [3.9, "3.10", "3.11", "3.12", "3.13"] 16 | 17 | services: 18 | postgres: 19 | image: postgres:17-alpine 20 | env: 21 | POSTGRES_DB: postgres 22 | POSTGRES_USER: postgres 23 | POSTGRES_PASSWORD: password 24 | ports: 25 | - 5432:5432 26 | steps: 27 | - uses: actions/checkout@v4 28 | - name: Set up Python ${{ matrix.python-version }} 29 | uses: actions/setup-python@v5 30 | with: 31 | python-version: ${{ matrix.python-version }} 32 | - name: Install dependencies 33 | run: | 34 | python -m pip install --upgrade pip wheel setuptools tox 35 | - name: Run tox targets for ${{ matrix.python-version }} 36 | run: | 37 | ENV_PREFIX=$(tr -C -d "0-9" <<< "${{ matrix.python-version }}") 38 | TOXENV=$(tox --listenvs | grep "^py$ENV_PREFIX" | tr '\n' ',') python -m tox 39 | - name: Upload coverage reports to Codecov 40 | if: github.actor != 'dependabot[bot]' 41 | uses: codecov/codecov-action@v5 42 | with: 43 | token: ${{ secrets.CODECOV_TOKEN }} 44 | 45 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2021, Osaetin Daniel 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /channels_postgres/migrations/0001_initial.py: -------------------------------------------------------------------------------- 1 | from django.db import migrations, models 2 | 3 | import channels_postgres.models 4 | 5 | 6 | class Migration(migrations.Migration): 7 | initial = True 8 | 9 | dependencies = [] 10 | 11 | operations = [ 12 | migrations.CreateModel( 13 | name='GroupChannel', 14 | fields=[ 15 | ( 16 | 'id', 17 | models.AutoField( 18 | auto_created=True, primary_key=True, serialize=False, verbose_name='ID' 19 | ), 20 | ), # noqa 21 | ('group_key', models.CharField(max_length=100)), 22 | ('channel', models.CharField(max_length=100)), 23 | ( 24 | 'expire', 25 | models.DateTimeField( 26 | default=channels_postgres.models._default_channel_expiry_time 27 | ), 28 | ), # noqa 29 | ], 30 | ), 31 | migrations.CreateModel( 32 | name='Message', 33 | fields=[ 34 | ( 35 | 'id', 36 | models.AutoField( 37 | auto_created=True, primary_key=True, serialize=False, verbose_name='ID' 38 | ), 39 | ), # noqa 40 | ('channel', models.CharField(max_length=100)), 41 | ('message', models.BinaryField(max_length=1000)), 42 | ( 43 | 'expire', 44 | models.DateTimeField( 45 | default=channels_postgres.models._default_message_expiry_time 46 | ), 47 | ), # noqa 48 | ], 49 | ), 50 | ] 51 | -------------------------------------------------------------------------------- /tests/settings.py: -------------------------------------------------------------------------------- 1 | """channels_postgres django test settings""" 2 | 3 | USE_TZ = False 4 | 5 | DATABASES = { 6 | 'default': { 7 | 'ENGINE': 'django.db.backends.postgresql', 8 | 'NAME': 'postgres', 9 | 'USER': 'postgres', 10 | 'PASSWORD': 'password', 11 | 'HOST': '127.0.0.1', 12 | 'PORT': '5432', 13 | # 'PYSCOPG_OPTIONS': { 14 | # 'min_size': 10, 15 | # 'max_size': 20, 16 | # }, 17 | } 18 | } 19 | 20 | DATABASES['channels_postgres'] = DATABASES['default'] 21 | 22 | 23 | INSTALLED_APPS = [ 24 | 'django.contrib.admin', 25 | 'django.contrib.messages', 26 | 'django.contrib.auth', 27 | 'django.contrib.contenttypes', 28 | 'django.contrib.sessions', 29 | 'django.contrib.sites', 30 | 'channels_postgres', 31 | ] 32 | 33 | SECRET_KEY = 'secret' 34 | 35 | MIDDLEWARE = [ 36 | 'django.middleware.security.SecurityMiddleware', 37 | 'django.contrib.sessions.middleware.SessionMiddleware', 38 | 'corsheaders.middleware.CorsMiddleware', 39 | 'django.middleware.common.CommonMiddleware', 40 | 'django.middleware.csrf.CsrfViewMiddleware', 41 | 'django.contrib.auth.middleware.AuthenticationMiddleware', 42 | 'django.contrib.messages.middleware.MessageMiddleware', 43 | 'django.middleware.clickjacking.XFrameOptionsMiddleware', 44 | ] 45 | 46 | TEMPLATES = [ 47 | { 48 | 'BACKEND': 'django.template.backends.django.DjangoTemplates', 49 | 'DIRS': [], 50 | 'APP_DIRS': True, 51 | 'OPTIONS': { 52 | 'context_processors': [ 53 | 'django.template.context_processors.debug', 54 | 'django.template.context_processors.request', 55 | 'django.contrib.auth.context_processors.auth', 56 | 'django.contrib.messages.context_processors.messages', 57 | ], 58 | }, 59 | }, 60 | ] 61 | -------------------------------------------------------------------------------- /channels_postgres/migrations/0003_notify_smaller_payload.py: -------------------------------------------------------------------------------- 1 | from django.db import migrations 2 | 3 | 4 | class Migration(migrations.Migration): 5 | dependencies = [('channels_postgres', '0002_create_triggers_and_functions')] 6 | 7 | # For messages than are smaller than 7168 bytes, we can send the whole message in the payload 8 | # Otherwise, we send only the message id 9 | 10 | # Postgres' NOTIFY messages are limited to 8000 bytes, so we can't always send the whole message 11 | # in the payload. 12 | setup_database_sql = """ 13 | CREATE OR REPLACE FUNCTION channels_postgres_notify() 14 | RETURNS trigger AS $$ 15 | DECLARE 16 | payload text; 17 | BEGIN 18 | IF octet_length(NEW.message) <= 7168 THEN 19 | payload := NEW.id::text || ':' || NEW.channel::text || ':' || encode(NEW.message, 'base64') || ':' || extract(epoch from NEW.expire)::text; 20 | ELSE 21 | payload := NEW.id::text || ':' || NEW.channel::text; 22 | END IF; 23 | 24 | PERFORM pg_notify('channels_postgres_messages', payload); 25 | RETURN NEW; 26 | END; 27 | $$ LANGUAGE plpgsql; 28 | 29 | DO $$ BEGIN 30 | CREATE TRIGGER channels_postgres_notify_trigger 31 | AFTER INSERT ON channels_postgres_message 32 | FOR EACH ROW 33 | EXECUTE PROCEDURE channels_postgres_notify(); 34 | EXCEPTION 35 | WHEN others THEN null; 36 | END $$; 37 | """ 38 | 39 | reverse_setup_database_sql = """ 40 | DROP TRIGGER IF EXISTS channels_postgres_notify_trigger ON channels_postgres_message; 41 | DROP FUNCTION IF EXISTS channels_postgres_notify; 42 | """ # noqa 43 | 44 | operations = [ 45 | migrations.RunSQL(sql=setup_database_sql, reverse_sql=reverse_setup_database_sql), 46 | ] 47 | -------------------------------------------------------------------------------- /channels_postgres/migrations/0004_notify_ensure_payload_not_too_big.py: -------------------------------------------------------------------------------- 1 | from django.db import migrations 2 | 3 | 4 | class Migration(migrations.Migration): 5 | dependencies = [('channels_postgres', '0003_notify_smaller_payload')] 6 | # For messages than are smaller than 8000 bytes, we can send the whole message in the payload 7 | # Otherwise, we send only the message id in the payload. 8 | setup_database_sql = """ 9 | CREATE OR REPLACE FUNCTION channels_postgres_notify() 10 | RETURNS trigger AS $$ 11 | DECLARE 12 | payload text; 13 | encoded_message text; 14 | epoch text; 15 | BEGIN 16 | encoded_message := encode(NEW.message, 'base64'); 17 | epoch := extract(epoch from NEW.expire)::text; 18 | 19 | payload := NEW.id::text || ':' || NEW.channel::text || ':' || encoded_message || ':' || epoch; 20 | IF octet_length(payload) > 8000 THEN 21 | payload := NEW.id::text || ':' || NEW.channel::text; 22 | END IF; 23 | 24 | PERFORM pg_notify('channels_postgres_messages', payload); 25 | 26 | RETURN NEW; 27 | END; 28 | $$ LANGUAGE plpgsql; 29 | 30 | DO $$ BEGIN 31 | CREATE TRIGGER channels_postgres_notify_trigger 32 | AFTER INSERT ON channels_postgres_message 33 | FOR EACH ROW 34 | EXECUTE PROCEDURE channels_postgres_notify(); 35 | EXCEPTION 36 | WHEN others THEN null; 37 | END $$; 38 | """ 39 | 40 | reverse_setup_database_sql = """ 41 | DROP TRIGGER IF EXISTS channels_postgres_notify_trigger ON channels_postgres_message; 42 | DROP FUNCTION IF EXISTS channels_postgres_notify; 43 | """ # noqa 44 | 45 | operations = [ 46 | migrations.RunSQL(sql=setup_database_sql, reverse_sql=reverse_setup_database_sql), 47 | ] 48 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "channels_postgres" 7 | version = "1.1.4" 8 | description = "PostgreSQL-backed ASGI channel layer implementation" 9 | readme = "README.md" 10 | requires-python = ">=3.9" 11 | license = "MIT" 12 | authors = [ 13 | { name = "Daniel Osaetin", email = "f805nqs6j@relay.firefox.com" } 14 | ] 15 | dependencies = [ 16 | "msgpack>=1.0.7,<2", 17 | "asgiref>=3.7.2,<4", 18 | "channels>=4.0.0,<5", 19 | "psycopg[pool]>=3,<4", 20 | ] 21 | 22 | [project.urls] 23 | Homepage = "http://github.com/danidee10/channels_postgres/" 24 | 25 | [project.optional-dependencies] 26 | cryptography = [ 27 | "cryptography>=44.0.2,<47" 28 | ] 29 | tests = [ 30 | "async-timeout", 31 | "cryptography>=44.0.2,<47", 32 | "django-stubs>=5.1.3,<6", 33 | "mypy>=1.15.0,<2", 34 | "pylint>=3.3.6,<4", 35 | "pylint-django>=2.6.1,<3", 36 | "pytest>=8.3.5,<9", 37 | "pytest-asyncio>=0.26.0,<1", 38 | "pytest-cov>=6.1.1,<7", 39 | "ruff>=0.11.4,<1" 40 | ] 41 | 42 | 43 | [tool.setuptools] 44 | include-package-data = true 45 | 46 | [tool.setuptools.packages.find] 47 | exclude = ["tests"] 48 | 49 | [tool.wheel] 50 | universal = true 51 | 52 | [tool.pylint.MAIN] 53 | ignore-paths = ["channels_postgres/migrations/*"] 54 | 55 | [tool.pytest.ini_options] 56 | addopts = "-p no:django tests/ --cov=channels_postgres --cov-branch --cov-report xml --cov-report html" 57 | asyncio_mode = "auto" 58 | # log_cli = true 59 | # log_level = "NOTSET" 60 | 61 | [tool.ruff] 62 | line-length = 100 63 | 64 | [tool.ruff.format] 65 | quote-style = "single" 66 | indent-style = "space" 67 | docstring-code-format = true 68 | 69 | [tool.mypy] 70 | plugins = ["mypy_django_plugin.main"] 71 | exclude = ["channels_postgres/migrations/*"] 72 | strict = true 73 | 74 | [tool.django-stubs] 75 | django_settings_module = "tests.settings" 76 | 77 | [[tool.mypy.overrides]] 78 | module = ["channels.*"] 79 | ignore_missing_imports = true 80 | 81 | [[tool.mypy.overrides]] 82 | module = ["psycopg.*"] 83 | ignore_missing_imports = true 84 | 85 | [[tool.mypy.overrides]] 86 | module = ["pytest.*"] 87 | ignore_missing_imports = true 88 | 89 | [[tool.mypy.overrides]] 90 | module = ["asgiref.*"] 91 | ignore_missing_imports = true 92 | 93 | [[tool.mypy.overrides]] 94 | module = ["msgpack.*"] 95 | ignore_missing_imports = true 96 | 97 | [[tool.mypy.overrides]] 98 | module = ["cryptography.*"] 99 | ignore_missing_imports = true 100 | 101 | [[tool.mypy.overrides]] 102 | module = ["async_timeout.*"] 103 | ignore_missing_imports = true 104 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # channels_postgres 2 | 3 | [![codecov](https://codecov.io/gh/danidee10/channels_postgres/graph/badge.svg?token=47JCO35QEB)](https://codecov.io/gh/danidee10/channels_postgres) 4 | 5 | [![Tests](https://github.com/danidee10/channels_postgres/actions/workflows/tests.yml/badge.svg)](https://github.com/danidee10/channels_postgres/actions/workflows/tests.yml) 6 | 7 | [![channels_postgres pypi](https://img.shields.io/pypi/v/channels_postgres.svg)](https://pypi.python.org/pypi/channels_postgres) 8 | 9 | [![Downloads](https://pepy.tech/badge/channels-postgres)](https://pepy.tech/project/channels-postgres) 10 | 11 | A Django Channels channel layer that uses PostgreSQL as its backing store 12 | 13 | ## Installation 14 | 15 | ```bash 16 | pip install channels_postgres 17 | ``` 18 | 19 | ### Update INSTALLED_APPS 20 | 21 | ```python 22 | INSTALLED_APPS = ( 23 | ... 24 | 'channels', 25 | 'channels_postgres', 26 | ... 27 | ) 28 | ``` 29 | 30 | ### Run migrations for internal tables 31 | ```bash 32 | python manage.py migrate channels_postgres 33 | ``` 34 | 35 | ### Update DATABASES dictionary 36 | 37 | Add the 'channels_postgres' entry to the DATABASES dictionary in your Django settings file like so: 38 | 39 | ```python 40 | DATABASES = { 41 | 'default': { 42 | ... 43 | }, 44 | 'channels_postgres': { 45 | 'ENGINE': 'django.db.backends.postgresql', 46 | 'NAME': 'postgres', 47 | 'USER': 'postgres', 48 | 'PASSWORD': 'password', 49 | 'HOST': '127.0.0.1', 50 | 'PORT': '5432', 51 | } 52 | } 53 | ``` 54 | 55 | ## Usage 56 | 57 | Set up the channel layer in your Django settings file like so: 58 | 59 | ```python 60 | CHANNEL_LAYERS = { 61 | 'default': { 62 | 'BACKEND': 'channels_postgres.core.PostgresChannelLayer', 63 | 'CONFIG': { 64 | 'ENGINE': 'django.db.backends.postgresql', 65 | 'NAME': 'postgres', 66 | 'USER': 'postgres', 67 | 'PASSWORD': 'password', 68 | 'HOST': '127.0.0.1', 69 | 'PORT': '5432', 70 | ..., 71 | # Optional configuration for psycopg_pool 72 | 'PSYCOPG_OPTIONS': { 73 | 'min_size': 10, 74 | 'max_size': 10, 75 | }, 76 | }, 77 | }, 78 | } 79 | ``` 80 | 81 | The Config object is exactly the same as the standard config object for Django's PostgreSQL database. See the django documentation for more information. 82 | 83 | The django-channels config parameters are described below: 84 | 85 | ### prefix 86 | 87 | Prefix to add to all database group keys. Defaults to asgi:. In most cases, you don't need to change this setting because it's only used internally. 88 | 89 | ### expiry 90 | 91 | Message expiry in seconds. Defaults to 60. You generally shouldn't need to change this, but you may want to turn it down if you have peaky traffic you wish to drop, or up if you have peaky traffic you want to backlog until you get to it. 92 | group_expiry 93 | 94 | ### Group expiry 95 | 96 | Defaults to 0. 97 | 98 | `0 means disabled!` 99 | 100 | Channels will be removed from the group after this amount of time; it's recommended you reduce it for a healthier system that encourages disconnections. This value should not be lower than the relevant timeouts in the interface server (e.g. the --websocket_timeout to daphne). 101 | capacity 102 | 103 | ### symmetric_encryption_keys 104 | 105 | Pass this to enable the optional symmetric encryption mode of the backend. To use it, make sure you have the cryptography package installed, or specify the cryptography extra when you install channels_postgres: 106 | 107 | pip install channels_postgres[cryptography] 108 | 109 | symmetric_encryption_keys should be a list of strings, with each string being an encryption key. The first key is always used for encryption; all are considered for decryption, so you can rotate keys without downtime - just add a new key at the start and move the old one down, then remove the old one after the message expiry time has passed. 110 | 111 | Data is encrypted both on the wire and at rest in Postgres, though we advise you also route your Postgres connections over TLS for higher security. 112 | 113 | Keys should have at least 32 bytes of entropy - they are passed through the SHA256 hash function before being used as an encryption key. Any string will work, but the shorter the string, the easier the encryption is to break. 114 | 115 | If you're using Django, you may also wish to set this to your site's SECRET_KEY setting via the CHANNEL_LAYERS setting: 116 | 117 | ```python 118 | CHANNEL_LAYERS = { 119 | 'default': { 120 | 'BACKEND': 'channels_postgres.core.PostgresChannelLayer', 121 | 'CONFIG': { 122 | ..., 123 | 'symmetric_encryption_keys': [SECRET_KEY], 124 | }, 125 | }, 126 | } 127 | ``` 128 | 129 | ### Pyscopg pool 130 | 131 | The `channels_postgres` makes use of a connection pool (via `psycopg_pool`) to efficiently manage concurrent connections to the database. You can pass additional options to the underlying `psycopg_pool` connection pool by setting the `PSYCOPG_OPTIONS` setting. 132 | 133 | See the [psycopg_pool documentation](https://www.psycopg.org/psycopg3/docs/api/pool.html#null-connection-pools) for more information. 134 | 135 | This might come in handy if you have lots of consumers in your channels application with increased latency between sending a message and the consumer(s) processing it. 136 | Increasing the `min_size` of the connection pool might help. 137 | 138 | ## Deviations from the channels spec 139 | 140 | ### group_expiry 141 | 142 | Defaults to 0 (which means disabled). This option is tied too closely to `daphne` (The official ASGI interface server for `django-channels`). It makes no sense if you're using an alternate `ASGI` server (like `Uvicorn`) which doesn't disconnect WebSockets automatically. 143 | 144 | Setting it to a non zero value enables the expected behaviour. 145 | 146 | ### channel_capacity 147 | 148 | RDBMS' like `PostgreSQL` were specifically built to handle considerable amounts of data. Hence, there's no channel capacity. Although, it should be noted that the `channels_postgres` channel layer uses an internal `asyncio.Queue` to store messages and deliver them to consumers as quickly as possible. 149 | It is quite possible to see increased memory usage on the server if you send a lot of messages without consumers to process them. 150 | 151 | Your database should be able to handle thousands of messages with ease. If you're still worried about the database table growing out of hand, you can reduce the `expiry` time of the individual messages so they will be purged if a consumer cannot process them on time. 152 | 153 | ## Dependencies 154 | 155 | Python >= 3.9 is required for `channels_postgres` 156 | -------------------------------------------------------------------------------- /channels_postgres/db.py: -------------------------------------------------------------------------------- 1 | """common db methods.""" 2 | 3 | import asyncio 4 | import hashlib 5 | import logging 6 | import random 7 | import typing 8 | from datetime import datetime, timedelta 9 | 10 | import psycopg 11 | import psycopg_pool 12 | import psycopg_pool.base 13 | from psycopg import sql 14 | 15 | from .models import GroupChannel, Message 16 | 17 | try: 18 | from datetime import UTC 19 | except ImportError: 20 | UTC = None # type: ignore 21 | 22 | 23 | if typing.TYPE_CHECKING: 24 | from logging import Logger 25 | 26 | 27 | # Enable pool logging 28 | # logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(name)s: %(message)s') 29 | # logging.getLogger('psycopg.pool').setLevel(logging.DEBUG) 30 | 31 | 32 | # A global variable is used to ensure only one connection pool is created 33 | # regardless of the amount of threads 34 | # And also to prevent RuntimeErrors when the event loop is closed after running tests 35 | # and psycopg Async workers are not cleaned up properly 36 | is_creating_connection_pool = asyncio.Lock() 37 | connection_pool: typing.Optional[psycopg_pool.AsyncConnectionPool] = None 38 | 39 | MESSAGE_TABLE = Message._meta.db_table 40 | GROUP_CHANNEL_TABLE = GroupChannel._meta.db_table 41 | 42 | 43 | def utc_now() -> datetime: 44 | """ 45 | Return the current datetime in UTC 46 | 47 | It is compatible with python<=3.10 and python>=3.11 48 | """ 49 | if UTC: 50 | return datetime.now(UTC) 51 | 52 | return datetime.utcnow() 53 | 54 | 55 | class DatabaseLayer: 56 | """ 57 | Encapsulates database operations 58 | 59 | A connection pool is used for efficient management of database operations 60 | This is also the reason why psycopg is used directly instead of django's ORM 61 | which doesn't support connection pooling 62 | """ 63 | 64 | def __init__( 65 | self, 66 | psycopg_options: dict[str, typing.Any], 67 | db_params: dict[str, typing.Any], 68 | using: str = 'channels_postgres', 69 | logger: 'Logger' = logging.getLogger('channels_postgres.database'), 70 | ) -> None: 71 | self.logger = logger 72 | self.using = using 73 | self.db_params = db_params 74 | self.psycopg_options = psycopg_options 75 | 76 | async def get_db_pool( 77 | self, db_params: dict[str, typing.Any] 78 | ) -> psycopg_pool.AsyncConnectionPool: 79 | """ 80 | Returns a connection pool for the database 81 | 82 | Uses a `Lock` to ensure that only one coroutine can create the connection pool 83 | Others have to wait until the connection pool is created 84 | """ 85 | global connection_pool # pylint: disable=W0603 86 | 87 | async def _configure_connection(conn: psycopg.AsyncConnection) -> None: 88 | await conn.set_autocommit(True) 89 | conn.prepare_threshold = 0 # All statements should be prepared 90 | conn.prepared_max = None # No limit on the number of prepared statements 91 | 92 | async with is_creating_connection_pool: 93 | if connection_pool is not None: 94 | self.logger.debug('Pool %s already exists', connection_pool.name) 95 | 96 | pool_stats = connection_pool.get_stats() 97 | self.logger.debug('Pool stats: %s', pool_stats) 98 | 99 | return connection_pool 100 | 101 | conn_info = psycopg.conninfo.make_conninfo(conninfo='', **db_params) 102 | 103 | connection_pool = psycopg_pool.AsyncConnectionPool( 104 | conninfo=conn_info, 105 | open=False, 106 | configure=_configure_connection, 107 | **self.psycopg_options, 108 | ) 109 | await connection_pool.open(wait=True) 110 | 111 | self.logger.debug('Pool %s created', connection_pool.name) 112 | 113 | return connection_pool 114 | 115 | async def retrieve_group_channels(self, group_key: str) -> list[str]: 116 | """Retrieves all channels for a group""" 117 | retrieve_channels_sql = sql.SQL( 118 | 'SELECT DISTINCT group_key,channel FROM {table} WHERE group_key=%s' 119 | ).format(table=sql.Identifier(GROUP_CHANNEL_TABLE)) 120 | 121 | db_pool = await self.get_db_pool(db_params=self.db_params) 122 | async with db_pool.connection() as conn: 123 | async with conn.cursor() as cursor: 124 | await cursor.execute(retrieve_channels_sql, (group_key,)) 125 | result = await cursor.fetchall() 126 | return [row[1] for row in result] 127 | 128 | async def send_to_channel( 129 | self, 130 | group_key: str, 131 | message: bytes, 132 | expire: int, 133 | channel: typing.Optional[str] = None, 134 | ) -> None: 135 | """Send a message to a channel/channels (if no channel is specified).""" 136 | message_add_sql = sql.SQL( 137 | 'INSERT INTO {table} (channel, message, expire) VALUES (%s, %s, %s)' 138 | ).format(table=sql.Identifier(MESSAGE_TABLE)) 139 | 140 | if channel is None: 141 | channels = await self.retrieve_group_channels(group_key) 142 | if not channels: 143 | self.logger.warning('Group: %s does not exist, did you call group_add?', group_key) 144 | return 145 | else: 146 | channels = [channel] 147 | 148 | expiry_datetime = utc_now() + timedelta(seconds=expire) 149 | db_pool = await self.get_db_pool(db_params=self.db_params) 150 | async with db_pool.connection() as conn: 151 | async with conn.cursor() as cursor: 152 | if len(channels) == 1: 153 | # single insert 154 | data = (channels[0], message, expiry_datetime) 155 | await cursor.execute(message_add_sql, data) 156 | else: 157 | # Bulk insert messages 158 | multi_data = [(channel, message, expiry_datetime) for channel in channels] 159 | await cursor.executemany(message_add_sql, multi_data) 160 | 161 | async def add_channel_to_group(self, group_key: str, channel: str, expire: int) -> None: 162 | """Adds a channel to a group""" 163 | expiry_datetime = utc_now() + timedelta(seconds=expire) 164 | group_add_sql = sql.SQL( 165 | 'INSERT INTO {table} (group_key, channel, expire) VALUES (%s, %s, %s)' 166 | ).format(table=sql.Identifier(GROUP_CHANNEL_TABLE)) 167 | 168 | db_pool = await self.get_db_pool(db_params=self.db_params) 169 | async with db_pool.connection() as conn: 170 | async with conn.cursor() as cursor: 171 | data = (group_key, channel, expiry_datetime) 172 | await cursor.execute(group_add_sql, data) 173 | 174 | self.logger.debug('Channel %s added to Group %s', channel, group_key) 175 | 176 | async def delete_expired_groups(self) -> None: 177 | """Deletes expired groups after a random delay""" 178 | delete_expired_groups_sql = sql.SQL('DELETE FROM {table} WHERE expire < %s').format( 179 | table=sql.Identifier(GROUP_CHANNEL_TABLE) 180 | ) 181 | 182 | expire = 60 * random.randint(10, 20) 183 | self.logger.debug('Deleting expired groups in %s seconds...', expire) 184 | await asyncio.sleep(expire) 185 | 186 | now = utc_now() 187 | db_pool = await self.get_db_pool(db_params=self.db_params) 188 | async with db_pool.connection() as conn: 189 | async with conn.cursor() as cursor: 190 | await cursor.execute(delete_expired_groups_sql, (now,)) 191 | 192 | async def delete_expired_messages(self, expire: typing.Optional[int] = None) -> None: 193 | """Deletes expired messages after a set time or random delay""" 194 | delete_expired_messages_sql = sql.SQL('DELETE FROM {table} WHERE expire < %s').format( 195 | table=sql.Identifier(MESSAGE_TABLE) 196 | ) 197 | 198 | if expire is None: 199 | expire = 60 * random.randint(10, 20) 200 | self.logger.debug('Deleting expired messages in %s seconds...', expire) 201 | await asyncio.sleep(expire) 202 | 203 | now = utc_now() 204 | db_pool = await self.get_db_pool(db_params=self.db_params) 205 | async with db_pool.connection() as conn: 206 | async with conn.cursor() as cursor: 207 | await cursor.execute(delete_expired_messages_sql, (now,)) 208 | 209 | async def retrieve_non_expired_queued_messages(self) -> list[tuple[str, str, bytes, str]]: 210 | """ 211 | Retrieves all non-expired messages from the database 212 | 213 | NOTE: Postgres doesn't support ORDER BY for `RETURNING` 214 | queries. Even if the inner query is ordered, the returning 215 | clause is not guaranteed to be ordered 216 | """ 217 | retrieve_queued_messages_sql = sql.SQL( 218 | """ 219 | DELETE FROM {table} 220 | WHERE id IN ( 221 | SELECT id 222 | FROM {table} 223 | WHERE expire > %s 224 | FOR UPDATE SKIP LOCKED 225 | ) 226 | RETURNING id::text, channel, message, extract(epoch from expire)::text 227 | """ 228 | ).format(table=sql.Identifier(MESSAGE_TABLE)) 229 | 230 | now = utc_now() 231 | db_pool = await self.get_db_pool(db_params=self.db_params) 232 | async with db_pool.connection() as conn: 233 | async with conn.cursor() as cursor: 234 | await cursor.execute(retrieve_queued_messages_sql, (now,)) 235 | 236 | return await cursor.fetchall() 237 | 238 | async def retrieve_non_expired_queued_message_from_channel( 239 | self, channel: str 240 | ) -> typing.Optional[tuple[bytes]]: 241 | """Retrieves a non-expired message from a channel""" 242 | retrieve_queued_messages_sql = sql.SQL( 243 | """ 244 | DELETE FROM {table} 245 | WHERE id = ( 246 | SELECT id 247 | FROM {table} 248 | WHERE channel=%s AND expire > %s 249 | ORDER BY id 250 | FOR UPDATE SKIP LOCKED 251 | LIMIT 1 252 | ) 253 | RETURNING message 254 | """ 255 | ).format(table=sql.Identifier(MESSAGE_TABLE)) 256 | 257 | now = utc_now() 258 | db_pool = await self.get_db_pool(db_params=self.db_params) 259 | async with db_pool.connection() as conn: 260 | async with conn.cursor() as cursor: 261 | await cursor.execute(retrieve_queued_messages_sql, (channel, now)) 262 | message = await cursor.fetchone() 263 | 264 | return typing.cast(typing.Optional[tuple[bytes]], message) 265 | 266 | def _channel_to_constant_bigint(self, channel: str) -> int: 267 | """ 268 | Converts a channel name to a constant bigint. 269 | """ 270 | # Hash the character (SHA-256 gives consistent output) 271 | hash_bytes = hashlib.sha256(channel.encode('utf-8')).digest() 272 | # Convert to a large int 273 | hash_int = int.from_bytes(hash_bytes, byteorder='big') 274 | 275 | # Fit into signed 64-bit bigint range 276 | signed_bigint = hash_int % (2**64) 277 | if signed_bigint >= 2**63: 278 | signed_bigint -= 2**64 # Convert to negative if above max signed 279 | 280 | return signed_bigint 281 | 282 | async def acquire_advisory_lock(self, channel: str) -> bool: 283 | """Acquires an advisory lock from the database""" 284 | advisory_lock_id = self._channel_to_constant_bigint(channel) 285 | acquire_advisory_lock_sql = sql.SQL('SELECT pg_try_advisory_lock(%s::bigint)').format( 286 | advisory_lock_id=advisory_lock_id 287 | ) 288 | 289 | db_pool = await self.get_db_pool(db_params=self.db_params) 290 | async with db_pool.connection() as conn: 291 | async with conn.cursor() as cursor: 292 | await cursor.execute(acquire_advisory_lock_sql, (advisory_lock_id,)) 293 | 294 | result = await cursor.fetchone() 295 | return result[0] if result else False 296 | 297 | async def delete_message_returning_message( 298 | self, message_id: int 299 | ) -> typing.Optional[tuple[bytes]]: 300 | """Deletes a message from the database and returns the message""" 301 | delete_message_returning_message_sql = sql.SQL( 302 | 'DELETE FROM {table} WHERE id=%s RETURNING message' 303 | ).format(table=sql.Identifier(MESSAGE_TABLE)) 304 | 305 | db_pool = await self.get_db_pool(db_params=self.db_params) 306 | async with db_pool.connection() as conn: 307 | async with conn.cursor() as cursor: 308 | await cursor.execute(delete_message_returning_message_sql, (message_id,)) 309 | 310 | return await cursor.fetchone() 311 | 312 | async def delete_channel_group(self, group_key: str, channel: str) -> None: 313 | """Deletes a channel from a group""" 314 | await ( 315 | GroupChannel.objects.using(self.using) 316 | .filter(group_key=group_key, channel=channel) 317 | .adelete() 318 | ) 319 | 320 | async def flush(self) -> None: 321 | """ 322 | Flushes the channel layer by truncating the message and group tables 323 | """ 324 | db_pool = await self.get_db_pool(db_params=self.db_params) 325 | async with db_pool.connection() as conn: 326 | await conn.execute( 327 | sql.SQL('TRUNCATE TABLE {table}').format(table=sql.Identifier(MESSAGE_TABLE)) 328 | ) 329 | await conn.execute( 330 | sql.SQL('TRUNCATE TABLE {table}').format(table=sql.Identifier(GROUP_CHANNEL_TABLE)) 331 | ) 332 | -------------------------------------------------------------------------------- /channels_postgres/core.py: -------------------------------------------------------------------------------- 1 | """channels_postgres core""" 2 | 3 | import asyncio 4 | import base64 5 | import hashlib 6 | import logging 7 | import platform 8 | import time 9 | import typing 10 | import uuid 11 | 12 | import msgpack 13 | import psycopg 14 | from channels.layers import BaseChannelLayer 15 | from django.db.backends.postgresql.base import DatabaseWrapper 16 | 17 | from .db import DatabaseLayer 18 | 19 | try: 20 | from cryptography.fernet import Fernet, MultiFernet 21 | except ImportError: 22 | MultiFernet = Fernet = None # type: ignore 23 | 24 | # ProactorEventLoop is not supported by psycopg3 on windows 25 | # https://www.psycopg.org/psycopg3/docs/advanced/async.html 26 | if platform.system() == 'Windows': 27 | asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) # type: ignore 28 | 29 | from asyncio import create_task # pylint: disable=C0411,C0413 30 | 31 | logger = logging.getLogger(__name__) 32 | 33 | 34 | ASYNCIO_EVENT_CHANNEL_MAPPING: dict[str, asyncio.Queue[str]] = {} 35 | 36 | 37 | class PostgresChannelLayer(BaseChannelLayer): # type: ignore # pylint: disable=R0902 38 | """ 39 | Postgres channel layer. 40 | 41 | It uses the NOTIFY/LISTEN functionality of postgres to broadcast messages 42 | 43 | It also makes use of an internal message table to overcome the 44 | 8000bytes limit of Postgres' NOTIFY messages. 45 | Which is a far cry from the channels standard of 1MB 46 | This table has a trigger that sends out the `NOTIFY` signal. 47 | 48 | Using a database also means messages are durable and will always be 49 | available to consumers (as long as they're not expired). 50 | """ 51 | 52 | def __init__( # pylint: disable=R0913,R0917 53 | self, 54 | prefix: str = 'asgi', 55 | expiry: int = 60, 56 | group_expiry: int = 0, 57 | symmetric_encryption_keys: typing.Any = None, 58 | **kwargs: dict[str, typing.Any], 59 | ): 60 | super().__init__(expiry=expiry) 61 | 62 | self.event_loop: typing.Optional[asyncio.AbstractEventLoop] = None 63 | self.listener_task_is_running: typing.Optional[asyncio.Event] = None 64 | 65 | self.prefix = prefix 66 | self.group_expiry = group_expiry 67 | self.client_prefix = uuid.uuid4().hex[:5] 68 | self.crypter: typing.Optional['MultiFernet'] = None 69 | self._setup_encryption(symmetric_encryption_keys) 70 | self._channel_advisory_locks: dict[str, bool] = {} 71 | 72 | try: 73 | kwargs['OPTIONS'] 74 | except KeyError: 75 | kwargs['OPTIONS'] = {} 76 | 77 | db_wrapper = DatabaseWrapper(kwargs) 78 | 79 | # Prevent psycopg from using the custom synchronous cursor factory from django 80 | db_params = db_wrapper.get_connection_params() 81 | db_params.pop('cursor_factory') 82 | db_params.pop('context') 83 | self.db_params = db_params 84 | 85 | psycopg_options = kwargs.get('PSYCOPG_OPTIONS', {}) 86 | self.django_db = DatabaseLayer(psycopg_options, self.db_params, logger=logger) 87 | 88 | def _setup_encryption( 89 | self, symmetric_encryption_keys: typing.Union[list[bytes], list[str]] 90 | ) -> None: 91 | # See if we can do encryption if they asked 92 | if symmetric_encryption_keys: 93 | if isinstance(symmetric_encryption_keys, (str, bytes)): 94 | raise ValueError('symmetric_encryption_keys must be a list of possible keys') 95 | if MultiFernet is None: 96 | raise ValueError('Cannot run with encryption without `cryptography` installed.') 97 | 98 | sub_fernets = [self.make_fernet(key) for key in symmetric_encryption_keys] 99 | self.crypter = MultiFernet(sub_fernets) 100 | 101 | def _get_or_create_listener_task(self) -> tuple[asyncio.Event, bool]: 102 | if not self.listener_task_is_running: 103 | self.listener_task_is_running = asyncio.Event() 104 | 105 | if not self.event_loop: 106 | self.event_loop = asyncio.get_running_loop() 107 | 108 | # self.event_loop and running_event_loop can be different if a different thread is started 109 | running_event_loop = asyncio.get_running_loop() 110 | if self.listener_task_is_running.is_set() and self.event_loop == running_event_loop: 111 | return self.listener_task_is_running, False 112 | 113 | return self.listener_task_is_running, True 114 | 115 | async def listen_to_all_channels(self) -> None: 116 | """ 117 | Listens for messages in all channels from the database 118 | and sends them to the respective queue(s) 119 | """ 120 | # Retrieve all non-expired messages for all channels from the database 121 | # and send them to the respective queues 122 | # The results need to be ordered by id 123 | # as the database returns them in an arbitrary order 124 | returning = await self.django_db.retrieve_non_expired_queued_messages() 125 | returning.sort(key=lambda x: int(x[0])) 126 | 127 | for returning_message in returning: 128 | message_id, channel, message, timestamp = returning_message 129 | base64_message = base64.b64encode(message).decode('utf-8') 130 | event_payload = f'{message_id}:{channel}:{base64_message}:{timestamp}' 131 | self._get_or_create_queue(channel).put_nowait(event_payload) 132 | 133 | conn_info = psycopg.conninfo.make_conninfo(conninfo='', **self.db_params) 134 | async with await psycopg.AsyncConnection.connect( 135 | conninfo=conn_info, autocommit=True 136 | ) as connection: 137 | await connection.execute('LISTEN channels_postgres_messages;') 138 | 139 | # The db connection is open and now listening for events 140 | assert self.listener_task_is_running is not None 141 | self.listener_task_is_running.set() 142 | 143 | # This is a blocking call that will wait for events 144 | # until the generator is closed 145 | async for event in connection.notifies(): 146 | if event.payload == '1:shutdown': 147 | logger.debug('Shutting down listener task') 148 | self.listener_task_is_running.clear() 149 | await connection.notifies().aclose() 150 | break 151 | 152 | split_payload = event.payload.split(':') 153 | channel = split_payload[1] 154 | 155 | # If we don't have the advisory lock, discard the event 156 | # as another consumer is already processing it 157 | lock = self._channel_advisory_locks.get(channel, False) 158 | if lock is False: 159 | logger.debug( 160 | "Discarding event for channel %s because we don't have the advisory lock", 161 | channel, 162 | ) 163 | continue 164 | 165 | self._get_or_create_queue(channel).put_nowait(event.payload) 166 | 167 | def make_fernet(self, key: typing.Union[bytes, str]) -> 'Fernet': 168 | """ 169 | Given a single encryption key, returns a Fernet instance using it. 170 | """ 171 | if Fernet is None: 172 | raise ValueError('Cannot run with encryption without `cryptography` installed.') 173 | 174 | if isinstance(key, str): 175 | key = key.encode('utf-8') 176 | formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest()) 177 | 178 | return Fernet(formatted_key) 179 | 180 | # ============================================================== 181 | # Channel layer API 182 | # ============================================================== 183 | 184 | extensions = ['groups', 'flush'] 185 | 186 | def _get_or_create_queue(self, channel: str) -> asyncio.Queue[str]: 187 | queue = ASYNCIO_EVENT_CHANNEL_MAPPING.get(channel, None) 188 | if not queue: 189 | queue = asyncio.Queue() 190 | ASYNCIO_EVENT_CHANNEL_MAPPING[channel] = queue 191 | 192 | return queue 193 | 194 | async def send(self, channel: str, message: dict[str, typing.Any]) -> None: 195 | """Send a message onto a (general or specific) channel.""" 196 | self._get_or_create_queue(channel) 197 | 198 | # Typecheck 199 | assert isinstance(message, dict), 'message is not a dict' 200 | try: 201 | assert self.require_valid_channel_name(channel), 'Channel name not valid' 202 | except AttributeError: 203 | assert self.valid_channel_name(channel), 'Channel name not valid' 204 | 205 | # Make sure the message does not contain reserved keys 206 | assert '__asgi_channel__' not in message 207 | serialized_message = self.serialize(message) 208 | 209 | await self.django_db.send_to_channel('', serialized_message, self.expiry, channel=channel) 210 | 211 | async def _get_message_from_channel( 212 | self, channel: str, queue: asyncio.Queue[str] 213 | ) -> dict[str, typing.Any]: 214 | logger.debug('Getting message from channel %s', channel) 215 | message: typing.Optional[tuple[bytes]] = None 216 | while True: 217 | # Receive the message and remove the future from the mapping 218 | event_payload = await queue.get() 219 | 220 | split_payload = event_payload.split(':') 221 | 222 | # Smaller messages (> 8000 bytes) are available in the queue directly and 223 | # don't need to be fetched from the database. 224 | # Also, the message doesn't need to be deleted from the database immediately because: 225 | # 1. We never retrieve expired messages from the database. 226 | # 2. It will be cleaned up later by the `delete_expired_messages` coroutine. 227 | if len(split_payload) == 4: 228 | message_id, _, base64_message, timestamp = split_payload 229 | if float(timestamp) < time.time(): 230 | continue 231 | message = (base64.b64decode(base64_message),) 232 | else: 233 | message_id = split_payload[0] 234 | message = await self.django_db.delete_message_returning_message(int(message_id)) 235 | assert message is not None 236 | 237 | break 238 | 239 | deserialized_message = self.deserialize(message[0]) 240 | 241 | return deserialized_message 242 | 243 | async def _get_or_create_channel_advisory_lock(self, channel: str) -> bool: 244 | # Try to acquire the advisory lock from the database 245 | lock = self._channel_advisory_locks.get(channel, False) 246 | if lock is False: 247 | lock = await self.django_db.acquire_advisory_lock(channel) 248 | self._channel_advisory_locks[channel] = lock 249 | 250 | return lock 251 | 252 | async def receive(self, channel: str) -> dict[str, typing.Any]: 253 | """ 254 | Receive the first message that arrives on the channel. 255 | If more than one coroutine waits on the same channel, the first waiter 256 | will be given the message when it arrives. 257 | 258 | This is done by acquiring an `advistory_lock` from the database 259 | based on the channel name. 260 | 261 | If the lock is acquired succesfully, subsequent calls to this method 262 | will not try to acquire the lock again. 263 | _The lock is session based and should be released by postgres when 264 | the session is closed_ 265 | 266 | If the lock is already acquired by another coroutine, 267 | subsequent calls to this method will repeatedly try to acquire the lock 268 | before proceeding to wait for a message. 269 | """ 270 | queue = self._get_or_create_queue(channel) 271 | await self._get_or_create_channel_advisory_lock(channel) 272 | listener_task, is_new_task = self._get_or_create_listener_task() 273 | if is_new_task: 274 | asyncio.create_task(self.listen_to_all_channels()) 275 | await listener_task.wait() 276 | 277 | try: 278 | assert self.require_valid_channel_name(channel), 'Channel name not valid' 279 | except AttributeError: 280 | assert self.valid_channel_name(channel), 'Channel name not valid' 281 | if '!' in channel: 282 | real_channel = self.non_local_name(channel) 283 | assert real_channel.endswith(self.client_prefix + '!'), 'Wrong client prefix' 284 | 285 | # Get the message from the channel 286 | return await self._get_message_from_channel(channel, queue) 287 | 288 | async def new_channel(self, prefix: str = 'specific') -> str: 289 | """ 290 | Returns a new channel name that can be used by something in our 291 | process as a specific channel. 292 | """ 293 | return f'{prefix}.{self.client_prefix}!{uuid.uuid4().hex}' 294 | 295 | # ============================================================== 296 | # Flush extension 297 | # ============================================================== 298 | 299 | async def flush(self) -> None: 300 | """ 301 | Deletes all messages and groups in the database 302 | """ 303 | await self.django_db.flush() 304 | 305 | # ============================================================== 306 | # Groups extension 307 | # ============================================================== 308 | 309 | async def group_add(self, group: str, channel: str) -> None: 310 | """Adds the channel name to a group to the Postgres table.""" 311 | group_key = self._group_key(group) 312 | 313 | await self.django_db.add_channel_to_group(group_key, channel, self.group_expiry) 314 | 315 | async def group_discard( 316 | self, group: str, channel: str, expire: typing.Optional[int] = None 317 | ) -> None: 318 | """ 319 | Removes the channel from the named group if it is in the group; 320 | does nothing otherwise (does not error) 321 | """ 322 | try: 323 | assert self.require_valid_group_name(group), 'Group name not valid' 324 | except AttributeError: 325 | assert self.valid_group_name(group), 'Group name not valid' 326 | try: 327 | assert self.require_valid_channel_name(channel), 'Channel name not valid' 328 | except AttributeError: 329 | assert self.valid_channel_name(channel), 'Channel name not valid' 330 | 331 | group_key = self._group_key(group) 332 | 333 | await self.django_db.delete_channel_group(group_key, channel) 334 | 335 | # Delete expired groups (if enabled) and messages 336 | if self.group_expiry > 0: 337 | create_task(self.django_db.delete_expired_groups()) 338 | 339 | create_task(self.django_db.delete_expired_messages(expire)) 340 | 341 | async def group_send(self, group: str, message: dict[str, typing.Any]) -> None: 342 | """Sends a message to the entire group.""" 343 | channels = await self.django_db.retrieve_group_channels(group) 344 | for channel in channels: 345 | self._get_or_create_queue(channel) 346 | 347 | try: 348 | assert self.require_valid_group_name(group), 'Group name not valid' 349 | except AttributeError: 350 | assert self.valid_group_name(group), 'Group name not valid' 351 | 352 | group_key = self._group_key(group) 353 | # Retrieve list of all channel names related to the group 354 | serialized_message = self.serialize(message) 355 | 356 | await self.django_db.send_to_channel(group_key, serialized_message, self.expiry) 357 | 358 | def _group_key(self, group: str) -> str: 359 | """Common function to make the storage key for the group.""" 360 | return f'{self.prefix}:group:{group}' 361 | 362 | # ============================================================== 363 | # Serialization 364 | # ============================================================== 365 | 366 | def serialize(self, message: dict[str, typing.Any]) -> bytes: 367 | """Serializes message to a byte string.""" 368 | value: bytes = msgpack.packb(message, use_bin_type=True) 369 | if self.crypter: 370 | value = self.crypter.encrypt(value) 371 | 372 | return value 373 | 374 | def deserialize(self, message: bytes) -> dict[str, typing.Any]: 375 | """Deserializes from a byte string.""" 376 | if self.crypter: 377 | message = self.crypter.decrypt(message, self.expiry + 10) 378 | 379 | deserialized_message: dict[str, typing.Any] = msgpack.unpackb(message, raw=False) 380 | 381 | return deserialized_message 382 | -------------------------------------------------------------------------------- /tests/test_core.py: -------------------------------------------------------------------------------- 1 | """channels_postgres tests""" 2 | 3 | import asyncio 4 | import random 5 | import typing 6 | from unittest.mock import patch 7 | 8 | import async_timeout 9 | import django 10 | import psycopg 11 | import pytest 12 | from asgiref.sync import async_to_sync 13 | from django.conf import settings 14 | 15 | django.setup() 16 | 17 | from asyncio import create_task # noqa: E402 pylint: disable=C0411,C0413 18 | 19 | from django.db.backends.postgresql.base import ( # noqa: E402 pylint: disable=C0411,C0413 20 | DatabaseWrapper, 21 | ) 22 | 23 | from channels_postgres.core import PostgresChannelLayer # noqa: E402 pylint: disable=C0411,C0413 24 | 25 | default_layer_config: dict[str, typing.Any] = { 26 | 'prefix': 'asgi', 27 | 'expiry': 60, 28 | 'group_expiry': 0, 29 | 'symmetric_encryption_keys': None, 30 | 'config': None, 31 | } 32 | 33 | 34 | @pytest.fixture(scope='function', autouse=True) 35 | async def shutdown_listener() -> typing.AsyncGenerator[None, None]: 36 | """ 37 | Fixture that shuts down the listener 38 | """ 39 | yield 40 | 41 | db_wrapper = DatabaseWrapper(settings.DATABASES['channels_postgres']) 42 | db_params = db_wrapper.get_connection_params() 43 | db_params.pop('cursor_factory') 44 | db_params.pop('context') 45 | conn_info = psycopg.conninfo.make_conninfo(conninfo='', **db_params) 46 | async with await psycopg.AsyncConnection.connect(conninfo=conn_info, autocommit=True) as conn: 47 | await conn.execute("NOTIFY channels_postgres_messages, '1:shutdown';") 48 | 49 | 50 | @pytest.fixture(name='channel_layer') 51 | async def channel_layer_fixture() -> typing.AsyncGenerator[PostgresChannelLayer, None]: 52 | """Channel layer fixture that flushes automatically.""" 53 | db_params: dict[str, typing.Any] | None = settings.DATABASES.get('channels_postgres', None) 54 | assert db_params is not None 55 | 56 | channel_layer = PostgresChannelLayer(**default_layer_config, **db_params) 57 | 58 | yield channel_layer 59 | 60 | await channel_layer.flush() 61 | 62 | 63 | async def send_three_messages_with_delay( 64 | channel_name: str, channel_layer: PostgresChannelLayer, delay: int 65 | ) -> None: 66 | """ 67 | Sends three messages to a channel with a delay between each message. 68 | 69 | The messages should be sent without errors. 70 | """ 71 | await channel_layer.send(channel_name, {'type': 'test.message', 'text': 'First!'}) 72 | 73 | await asyncio.sleep(delay) 74 | 75 | await channel_layer.send(channel_name, {'type': 'test.message', 'text': 'Second!'}) 76 | 77 | await asyncio.sleep(delay) 78 | 79 | await channel_layer.send(channel_name, {'type': 'test.message', 'text': 'Third!'}) 80 | 81 | 82 | async def group_send_three_messages_with_delay( 83 | group_name: str, channel_layer: PostgresChannelLayer, delay: int 84 | ) -> None: 85 | """ 86 | Sends three messages to a group with a delay between each message. 87 | 88 | The messages should be sent without errors. 89 | """ 90 | await channel_layer.group_send(group_name, {'type': 'test.message', 'text': 'First!'}) 91 | 92 | await asyncio.sleep(delay) 93 | 94 | await channel_layer.group_send(group_name, {'type': 'test.message', 'text': 'Second!'}) 95 | 96 | await asyncio.sleep(delay) 97 | 98 | await channel_layer.group_send(group_name, {'type': 'test.message', 'text': 'Third!'}) 99 | 100 | 101 | @pytest.mark.asyncio 102 | async def test_send_receive_basic(channel_layer: PostgresChannelLayer) -> None: 103 | """Makes sure we can send a message to a normal channel and receive it.""" 104 | await channel_layer.send('test-channel-1', {'type': 'test.message', 'text': 'Ahoy-hoy!'}) 105 | message = await channel_layer.receive('test-channel-1') 106 | 107 | assert message['type'] == 'test.message' 108 | assert message['text'] == 'Ahoy-hoy!' 109 | 110 | 111 | @pytest.mark.asyncio 112 | async def test_send_received(channel_layer: PostgresChannelLayer) -> None: 113 | """ 114 | Similar to the `test_send_receive_basic` 115 | but mimics a real world scenario where clients connect first 116 | and wait for messages 117 | """ 118 | 119 | task = create_task(channel_layer.receive('test-channel-2')) 120 | 121 | async def chained_tasks() -> None: 122 | await asyncio.sleep(1) 123 | await channel_layer.send( 124 | 'test-channel-2', {'type': 'test.message_connect_wait', 'text': 'Hello world!'} 125 | ) 126 | 127 | await asyncio.wait([task, create_task(chained_tasks())]) 128 | 129 | message = task.result() 130 | assert message['type'] == 'test.message_connect_wait' 131 | assert message['text'] == 'Hello world!' 132 | 133 | 134 | def test_double_receive() -> None: 135 | """ 136 | Makes sure we can receive from two different event loops using 137 | process-local channel names. 138 | 139 | We can't reuse the channel_layer fixture here because it's async 140 | """ 141 | db_params: dict[str, typing.Any] | None = settings.DATABASES.get('channels_postgres', None) 142 | assert db_params is not None 143 | channel_layer = PostgresChannelLayer(**default_layer_config, **db_params) 144 | 145 | channel_name_1 = async_to_sync(channel_layer.new_channel)() 146 | channel_name_2 = async_to_sync(channel_layer.new_channel)() 147 | async_to_sync(channel_layer.send)(channel_name_1, {'type': 'test.message.1'}) 148 | async_to_sync(channel_layer.send)(channel_name_2, {'type': 'test.message.2'}) 149 | 150 | # Define listeners 151 | async def listen1() -> None: 152 | message = await channel_layer.receive(channel_name_1) 153 | assert message['type'] == 'test.message.1' 154 | 155 | async def listen2() -> None: 156 | message = await channel_layer.receive(channel_name_2) 157 | assert message['type'] == 'test.message.2' 158 | 159 | # Run them inside threads to ensure that they are running in different event loops 160 | async_to_sync(listen2)() 161 | async_to_sync(listen1)() 162 | 163 | # Clean up 164 | async_to_sync(channel_layer.flush)() 165 | 166 | 167 | @pytest.mark.asyncio 168 | async def test_process_local_send_receive(channel_layer: PostgresChannelLayer) -> None: 169 | """ 170 | Makes sure we can send a message to a process-local channel then receive it. 171 | """ 172 | channel_name = await channel_layer.new_channel() 173 | await channel_layer.send(channel_name, {'type': 'test.message', 'text': 'Local only please'}) 174 | message = await channel_layer.receive(channel_name) 175 | assert message['type'] == 'test.message' 176 | assert message['text'] == 'Local only please' 177 | 178 | 179 | @pytest.mark.asyncio 180 | async def test_multi_send_receive(channel_layer: PostgresChannelLayer) -> None: 181 | """Tests overlapping sends and receives, and ordering.""" 182 | await channel_layer.send('test-channel-3', {'type': 'message.1'}) 183 | await channel_layer.send('test-channel-3', {'type': 'message.2'}) 184 | await channel_layer.send('test-channel-3', {'type': 'message.3'}) 185 | assert (await channel_layer.receive('test-channel-3'))['type'] == 'message.1' 186 | assert (await channel_layer.receive('test-channel-3'))['type'] == 'message.2' 187 | assert (await channel_layer.receive('test-channel-3'))['type'] == 'message.3' 188 | 189 | 190 | @pytest.mark.asyncio 191 | async def test_reject_bad_channel(channel_layer: PostgresChannelLayer) -> None: 192 | """ 193 | Makes sure sending/receiving on an invalic channel name fails. 194 | """ 195 | with pytest.raises(TypeError): 196 | await channel_layer.send('=+135!', {'type': 'foom'}) 197 | with pytest.raises(TypeError): 198 | await channel_layer.receive('=+135!') 199 | 200 | 201 | @pytest.mark.asyncio 202 | async def test_reject_bad_client_prefix(channel_layer: PostgresChannelLayer) -> None: 203 | """ 204 | Makes sure receiving on a non-prefixed local channel is not allowed. 205 | """ 206 | with pytest.raises(AssertionError): 207 | await channel_layer.receive('not-client-prefix!local_part') 208 | 209 | 210 | @pytest.mark.asyncio 211 | async def test_non_existent_group(channel_layer: PostgresChannelLayer) -> None: 212 | """sending on Non-existent groups shoudldn't raise any Exceptions or send any message.""" 213 | try: 214 | await channel_layer.group_send('non-existent', {'type': 'message.1'}) 215 | except Exception as e: # pylint: disable=W0718 216 | pytest.fail(f'Unexpected exception {e}') 217 | 218 | 219 | @pytest.mark.asyncio 220 | async def test_groups_basic(channel_layer: PostgresChannelLayer) -> None: 221 | """ 222 | Tests basic group operation. 223 | """ 224 | channel_name1 = await channel_layer.new_channel(prefix='test-gr-chan-1') 225 | channel_name2 = await channel_layer.new_channel(prefix='test-gr-chan-2') 226 | channel_name3 = await channel_layer.new_channel(prefix='test-gr-chan-3') 227 | await channel_layer.group_add('test-group', channel_name1) 228 | await channel_layer.group_add('test-group', channel_name2) 229 | await channel_layer.group_add('test-group', channel_name3) 230 | await channel_layer.group_discard('test-group', channel_name2, expire=1) 231 | await channel_layer.group_send('test-group', {'type': 'message.1'}) 232 | # Make sure we get the message on the two channels that were in 233 | async with async_timeout.timeout(1): 234 | assert (await channel_layer.receive(channel_name1))['type'] == 'message.1' 235 | assert (await channel_layer.receive(channel_name3))['type'] == 'message.1' 236 | # Make sure the removed channel did not get the message 237 | with pytest.raises(asyncio.TimeoutError): 238 | async with async_timeout.timeout(1): 239 | await channel_layer.receive(channel_name2) 240 | 241 | 242 | @pytest.mark.asyncio 243 | async def test_groups_same_prefix(channel_layer: PostgresChannelLayer) -> None: 244 | """ 245 | Tests group_send with multiple channels with same channel prefix 246 | """ 247 | channel_name1 = await channel_layer.new_channel(prefix='test-gr-chan') 248 | channel_name2 = await channel_layer.new_channel(prefix='test-gr-chan') 249 | channel_name3 = await channel_layer.new_channel(prefix='test-gr-chan') 250 | await channel_layer.group_add('test-group', channel_name1) 251 | await channel_layer.group_add('test-group', channel_name2) 252 | await channel_layer.group_add('test-group', channel_name3) 253 | await channel_layer.group_send('test-group', {'type': 'message.1'}) 254 | 255 | # Make sure we get the message on the channels that were in 256 | async with async_timeout.timeout(1): 257 | assert (await channel_layer.receive(channel_name1))['type'] == 'message.1' 258 | assert (await channel_layer.receive(channel_name2))['type'] == 'message.1' 259 | assert (await channel_layer.receive(channel_name3))['type'] == 'message.1' 260 | 261 | 262 | @pytest.mark.asyncio 263 | async def test_receive_cancel(channel_layer: PostgresChannelLayer) -> None: 264 | """ 265 | Makes sure we can cancel a receive without blocking 266 | """ 267 | channel = await channel_layer.new_channel() 268 | delay = 0.0 269 | while delay < 0.01: 270 | await channel_layer.send(channel, {'type': 'test.message', 'text': 'Ahoy-hoy!'}) 271 | 272 | task = asyncio.ensure_future(channel_layer.receive(channel)) 273 | await asyncio.sleep(delay) 274 | task.cancel() 275 | delay += 0.0001 276 | 277 | try: 278 | await asyncio.wait_for(task, None) 279 | except asyncio.CancelledError: 280 | pass 281 | 282 | 283 | @pytest.mark.asyncio 284 | async def test_random_reset__channel_name(channel_layer: PostgresChannelLayer) -> None: 285 | """ 286 | Makes sure resetting random seed does not make us reuse channel names. 287 | """ 288 | random.seed(1) 289 | channel_name_1 = await channel_layer.new_channel() 290 | random.seed(1) 291 | channel_name_2 = await channel_layer.new_channel() 292 | 293 | assert channel_name_1 != channel_name_2 294 | 295 | 296 | @pytest.mark.asyncio 297 | async def test_random_reset__client_prefix() -> None: 298 | """ 299 | Makes sure resetting random seed does not make us reuse client_prefixes. 300 | """ 301 | random.seed(1) 302 | db_params: dict[str, typing.Any] | None = settings.DATABASES.get('channels_postgres', None) 303 | assert db_params is not None 304 | 305 | channel_layer_1 = PostgresChannelLayer(**default_layer_config, **db_params) 306 | random.seed(1) 307 | channel_layer_2 = PostgresChannelLayer(**default_layer_config, **db_params) 308 | assert channel_layer_1.client_prefix != channel_layer_2.client_prefix 309 | 310 | 311 | @pytest.mark.asyncio 312 | async def test_message_expiry__earliest_message_expires( 313 | channel_layer: PostgresChannelLayer, 314 | ) -> None: 315 | """ 316 | Tests message expiry 317 | 318 | The channel layer should not return expired messages. 319 | """ 320 | expiry = 3 321 | delay = 2 322 | channel_layer.expiry = expiry 323 | channel_name = await channel_layer.new_channel() 324 | 325 | task = asyncio.ensure_future(send_three_messages_with_delay(channel_name, channel_layer, delay)) 326 | await asyncio.wait_for(task, None) 327 | 328 | # the first message should have expired, we should only see the second message and the third 329 | message = await channel_layer.receive(channel_name) 330 | assert message['type'] == 'test.message' 331 | assert message['text'] == 'Second!' 332 | 333 | message = await channel_layer.receive(channel_name) 334 | assert message['type'] == 'test.message' 335 | assert message['text'] == 'Third!' 336 | 337 | # Make sure there's no third message even out of order 338 | with pytest.raises(asyncio.TimeoutError): 339 | async with async_timeout.timeout(1): 340 | await channel_layer.receive(channel_name) 341 | 342 | 343 | @pytest.mark.asyncio 344 | async def test_message_expiry__all_messages_under_expiration_time( 345 | channel_layer: PostgresChannelLayer, 346 | ) -> None: 347 | """ 348 | Tests message expiry 349 | 350 | The channel layer should return all messages if they are not expired. 351 | """ 352 | expiry = 3 353 | delay = 1 354 | channel_layer.expiry = expiry 355 | channel_name = await channel_layer.new_channel() 356 | 357 | task = asyncio.ensure_future(send_three_messages_with_delay(channel_name, channel_layer, delay)) 358 | await asyncio.wait_for(task, None) 359 | 360 | # expiry = 3, total delay under 3, all messages there 361 | message = await channel_layer.receive(channel_name) 362 | assert message['type'] == 'test.message' 363 | assert message['text'] == 'First!' 364 | 365 | message = await channel_layer.receive(channel_name) 366 | assert message['type'] == 'test.message' 367 | assert message['text'] == 'Second!' 368 | 369 | message = await channel_layer.receive(channel_name) 370 | assert message['type'] == 'test.message' 371 | assert message['text'] == 'Third!' 372 | 373 | 374 | @pytest.mark.asyncio 375 | async def test_message_expiry__group_send(channel_layer: PostgresChannelLayer) -> None: 376 | """ 377 | Tests group messages expiry 378 | 379 | The channel layer should not return expired group messages. 380 | """ 381 | expiry = 3 382 | delay = 2 383 | channel_layer.expiry = expiry 384 | channel_name = await channel_layer.new_channel() 385 | 386 | await channel_layer.group_add('test-group', channel_name) 387 | 388 | task = asyncio.ensure_future( 389 | group_send_three_messages_with_delay('test-group', channel_layer, delay) 390 | ) 391 | await asyncio.wait_for(task, None) 392 | 393 | # the first message should have expired, we should only see the second message and the third 394 | message = await channel_layer.receive(channel_name) 395 | assert message['type'] == 'test.message' 396 | assert message['text'] == 'Second!' 397 | 398 | message = await channel_layer.receive(channel_name) 399 | assert message['type'] == 'test.message' 400 | assert message['text'] == 'Third!' 401 | 402 | # Make sure there's no third message even out of order 403 | with pytest.raises(asyncio.TimeoutError): 404 | async with async_timeout.timeout(1): 405 | await channel_layer.receive(channel_name) 406 | 407 | 408 | @pytest.mark.asyncio 409 | async def test_message_expiry__group_send__one_channel_expires_message( 410 | channel_layer: PostgresChannelLayer, 411 | ) -> None: 412 | """ 413 | Tests group messages expiry 414 | 415 | The channel layer should not return expired group messages. 416 | """ 417 | expiry = 4 418 | delay = 1 419 | channel_layer.expiry = expiry 420 | channel_1 = await channel_layer.new_channel() 421 | channel_2 = await channel_layer.new_channel(prefix='channel_2') 422 | 423 | await channel_layer.group_add('test-group', channel_1) 424 | await channel_layer.group_add('test-group', channel_2) 425 | 426 | # Let's give channel_1 one additional message and then sleep 427 | await channel_layer.send(channel_1, {'type': 'test.message', 'text': 'Zero!'}) 428 | await asyncio.sleep(2) 429 | 430 | task = asyncio.ensure_future( 431 | group_send_three_messages_with_delay('test-group', channel_layer, delay) 432 | ) 433 | await asyncio.wait_for(task, None) 434 | 435 | # message Zero! was sent about 2 + 1 + 1 seconds ago and it should have expired 436 | message = await channel_layer.receive(channel_1) 437 | assert message['type'] == 'test.message' 438 | assert message['text'] == 'First!' 439 | 440 | message = await channel_layer.receive(channel_1) 441 | assert message['type'] == 'test.message' 442 | assert message['text'] == 'Second!' 443 | 444 | message = await channel_layer.receive(channel_1) 445 | assert message['type'] == 'test.message' 446 | assert message['text'] == 'Third!' 447 | 448 | # Make sure there's no fourth message even out of order 449 | with pytest.raises(asyncio.TimeoutError): 450 | async with async_timeout.timeout(1): 451 | await channel_layer.receive(channel_1) 452 | 453 | # channel_2 should receive all three messages from group_send 454 | message = await channel_layer.receive(channel_2) 455 | assert message['type'] == 'test.message' 456 | assert message['text'] == 'First!' 457 | 458 | # the first message should have expired, we should only see the second message and the third 459 | message = await channel_layer.receive(channel_2) 460 | assert message['type'] == 'test.message' 461 | assert message['text'] == 'Second!' 462 | 463 | message = await channel_layer.receive(channel_2) 464 | assert message['type'] == 'test.message' 465 | assert message['text'] == 'Third!' 466 | 467 | 468 | @pytest.mark.asyncio 469 | async def test_guarantee_at_most_once_delivery() -> None: 470 | """ 471 | Tests that at most once delivery is guaranteed. 472 | 473 | If two consumers are listening on the same channel, 474 | the message should be delivered to only one of them. 475 | """ 476 | db_params: dict[str, typing.Any] | None = settings.DATABASES.get('channels_postgres', None) 477 | assert db_params is not None 478 | 479 | channel_name = 'same-channel' 480 | loop = asyncio.get_running_loop() 481 | 482 | channel_layer = PostgresChannelLayer(**default_layer_config, **db_params) 483 | channel_layer_2 = PostgresChannelLayer(**default_layer_config, **db_params) 484 | future_channel_layer = loop.create_future() 485 | future_channel_layer_2 = loop.create_future() 486 | 487 | async def receive_task( 488 | channel_layer: PostgresChannelLayer, future: asyncio.Future[typing.Any] 489 | ) -> None: 490 | message = await channel_layer.receive(channel_name) 491 | future.set_result(message) 492 | 493 | # Ensure that receive_task_2 is scheduled first and accquires the advisory lock 494 | create_task(receive_task(channel_layer_2, future_channel_layer_2)) 495 | while channel_layer_2.listener_task_is_running is None: 496 | await asyncio.sleep(0.1) 497 | await channel_layer_2.listener_task_is_running.wait() 498 | 499 | create_task(receive_task(channel_layer, future_channel_layer)) 500 | while channel_layer.listener_task_is_running is None: 501 | await asyncio.sleep(0.1) 502 | await channel_layer.listener_task_is_running.wait() 503 | 504 | await channel_layer.send(channel_name, {'type': 'test.message', 'text': 'Hello!'}) 505 | 506 | result = await future_channel_layer_2 507 | assert result['type'] == 'test.message' 508 | assert result['text'] == 'Hello!' 509 | 510 | # Channel layer 1 should not receive the message 511 | # as it is already consumed by channel layer 2 512 | with pytest.raises(asyncio.TimeoutError): 513 | async with async_timeout.timeout(1): 514 | await future_channel_layer 515 | 516 | await channel_layer.flush() 517 | await channel_layer_2.flush() 518 | 519 | 520 | def test_default_group_key_format(channel_layer: PostgresChannelLayer) -> None: 521 | """ 522 | Tests the default group key format. 523 | """ 524 | group_name = channel_layer._group_key('test_group') # pylint: disable=W0212 525 | assert group_name == 'asgi:group:test_group' 526 | 527 | 528 | def test_custom_group_key_format(channel_layer: PostgresChannelLayer) -> None: 529 | """ 530 | Tests the custom group key format. 531 | """ 532 | channel_layer.prefix = 'test_prefix' 533 | 534 | group_name = channel_layer._group_key('test_group') # pylint: disable=W0212 535 | assert group_name == 'test_prefix:group:test_group' 536 | 537 | 538 | @pytest.mark.asyncio 539 | async def test_small_message(channel_layer: PostgresChannelLayer) -> None: 540 | """ 541 | Makes sure we can send a message smaller than 8000 bytes and receive it. 542 | 543 | without INSERTING and retrieving the message from the database. 544 | The message should be sent directly via `pg_notify`. 545 | """ 546 | channel_name = await channel_layer.new_channel() 547 | text = random.randbytes(1_000) 548 | 549 | async def chained_tasks() -> None: 550 | await asyncio.sleep(1) 551 | await channel_layer.send(channel_name, {'type': 'test.message', 'text': text}) 552 | 553 | with patch.object( 554 | channel_layer.django_db, 555 | 'delete_message_returning_message', 556 | wraps=channel_layer.django_db.delete_message_returning_message, 557 | ) as spy_delete_message_returning_message: 558 | task = create_task(channel_layer.receive(channel_name)) 559 | await asyncio.wait([task, create_task(chained_tasks())]) 560 | message = task.result() 561 | 562 | spy_delete_message_returning_message.assert_not_called() 563 | assert message['type'] == 'test.message' 564 | assert message['text'] == text 565 | 566 | 567 | @pytest.mark.asyncio 568 | async def test_big_message(channel_layer: PostgresChannelLayer) -> None: 569 | """ 570 | Makes sure we can send a message bigger than `8000` bytes and receive it. 571 | 572 | Postgres has a limit of 8000 bytes for NOTIFY messages. 573 | So the message should be inserted into the database and then retrieved. 574 | """ 575 | channel_name = await channel_layer.new_channel() 576 | text = random.randbytes(10_000) 577 | 578 | async def chained_tasks() -> None: 579 | await asyncio.sleep(1) 580 | await channel_layer.send(channel_name, {'type': 'test.message', 'text': text}) 581 | 582 | with patch.object( 583 | channel_layer.django_db, 584 | 'delete_message_returning_message', 585 | wraps=channel_layer.django_db.delete_message_returning_message, 586 | ) as spy_delete_message_returning_message: 587 | task = create_task(channel_layer.receive(channel_name)) 588 | await asyncio.wait([task, create_task(chained_tasks())]) 589 | message = task.result() 590 | 591 | spy_delete_message_returning_message.assert_called_once() 592 | assert message['type'] == 'test.message' 593 | assert message['text'] == text 594 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "be4e2115baebf5a9d83c9d01e497af7634648157960460fed76f28a52c2901d0" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.13" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "asgiref": { 20 | "hashes": [ 21 | "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734", 22 | "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e" 23 | ], 24 | "index": "pypi", 25 | "markers": "python_version >= '3.9'", 26 | "version": "==3.10.0" 27 | }, 28 | "channels": { 29 | "hashes": [ 30 | "sha256:97413ffd674542db08e16a9ef09cd86ec0113e5f8125fbd33cf0854adcf27cdb", 31 | "sha256:b091d4b26f91d807de3e84aead7ba785314f27eaf5bac31dd51b1c956b883859" 32 | ], 33 | "index": "pypi", 34 | "markers": "python_version >= '3.9'", 35 | "version": "==4.3.1" 36 | }, 37 | "django": { 38 | "hashes": [ 39 | "sha256:59a13a6515f787dec9d97a0438cd2efac78c8aca1c80025244b0fe507fe0754b", 40 | "sha256:e0f6f12e2551b1716a95a63a1366ca91bbcd7be059862c1b18f989b1da356cdd" 41 | ], 42 | "markers": "python_version >= '3.10'", 43 | "version": "==5.2.7" 44 | }, 45 | "msgpack": { 46 | "hashes": [ 47 | "sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2", 48 | "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", 49 | "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", 50 | "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", 51 | "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", 52 | "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", 53 | "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", 54 | "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0", 55 | "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", 56 | "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c", 57 | "sha256:350ad5353a467d9e3b126d8d1b90fe05ad081e2e1cef5753f8c345217c37e7b8", 58 | "sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f", 59 | "sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a", 60 | "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", 61 | "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", 62 | "sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f", 63 | "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", 64 | "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", 65 | "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", 66 | "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", 67 | "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", 68 | "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", 69 | "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", 70 | "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", 71 | "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e", 72 | "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", 73 | "sha256:67016ae8c8965124fdede9d3769528ad8284f14d635337ffa6a713a580f6c030", 74 | "sha256:6bde749afe671dc44893f8d08e83bf475a1a14570d67c4bb5cec5573463c8833", 75 | "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", 76 | "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", 77 | "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", 78 | "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", 79 | "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", 80 | "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406", 81 | "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", 82 | "sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251", 83 | "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", 84 | "sha256:94fd7dc7d8cb0a54432f296f2246bc39474e017204ca6f4ff345941d4ed285a7", 85 | "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", 86 | "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", 87 | "sha256:9fba231af7a933400238cb357ecccf8ab5d51535ea95d94fc35b7806218ff844", 88 | "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", 89 | "sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87", 90 | "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", 91 | "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", 92 | "sha256:a8f6e7d30253714751aa0b0c84ae28948e852ee7fb0524082e6716769124bc23", 93 | "sha256:ad09b984828d6b7bb52d1d1d0c9be68ad781fa004ca39216c8a1e63c0f34ba3c", 94 | "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", 95 | "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", 96 | "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", 97 | "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", 98 | "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68", 99 | "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", 100 | "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", 101 | "sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa", 102 | "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", 103 | "sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9", 104 | "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", 105 | "sha256:ea5405c46e690122a76531ab97a079e184c0daf491e588592d6a23d3e32af99e", 106 | "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", 107 | "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", 108 | "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162" 109 | ], 110 | "index": "pypi", 111 | "markers": "python_version >= '3.9'", 112 | "version": "==1.1.2" 113 | }, 114 | "psycopg": { 115 | "hashes": [ 116 | "sha256:0bce99269d16ed18401683a8569b2c5abd94f72f8364856d56c0389bcd50972a", 117 | "sha256:ab5caf09a9ec42e314a21f5216dbcceac528e0e05142e42eea83a3b28b320ac3" 118 | ], 119 | "index": "pypi", 120 | "markers": "python_version >= '3.8'", 121 | "version": "==3.2.10" 122 | }, 123 | "psycopg-pool": { 124 | "hashes": [ 125 | "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5", 126 | "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7" 127 | ], 128 | "markers": "python_version >= '3.8'", 129 | "version": "==3.2.6" 130 | }, 131 | "sqlparse": { 132 | "hashes": [ 133 | "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", 134 | "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca" 135 | ], 136 | "markers": "python_version >= '3.8'", 137 | "version": "==0.5.3" 138 | }, 139 | "typing-extensions": { 140 | "hashes": [ 141 | "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", 142 | "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" 143 | ], 144 | "markers": "python_version >= '3.8'", 145 | "version": "==4.13.2" 146 | } 147 | }, 148 | "develop": { 149 | "asgiref": { 150 | "hashes": [ 151 | "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734", 152 | "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e" 153 | ], 154 | "markers": "python_version >= '3.9'", 155 | "version": "==3.10.0" 156 | }, 157 | "astroid": { 158 | "hashes": [ 159 | "sha256:1e5a5011af2920c7c67a53f65d536d65bfa7116feeaf2354d8b94f29573bb0ce", 160 | "sha256:54c760ae8322ece1abd213057c4b5bba7c49818853fc901ef09719a60dbf9dec" 161 | ], 162 | "markers": "python_full_version >= '3.9.0'", 163 | "version": "==3.3.11" 164 | }, 165 | "async-timeout": { 166 | "hashes": [ 167 | "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", 168 | "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3" 169 | ], 170 | "index": "pypi", 171 | "markers": "python_version >= '3.8'", 172 | "version": "==5.0.1" 173 | }, 174 | "cachetools": { 175 | "hashes": [ 176 | "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4", 177 | "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a" 178 | ], 179 | "markers": "python_version >= '3.7'", 180 | "version": "==5.5.2" 181 | }, 182 | "chardet": { 183 | "hashes": [ 184 | "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", 185 | "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970" 186 | ], 187 | "markers": "python_version >= '3.7'", 188 | "version": "==5.2.0" 189 | }, 190 | "colorama": { 191 | "hashes": [ 192 | "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", 193 | "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" 194 | ], 195 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", 196 | "version": "==0.4.6" 197 | }, 198 | "coverage": { 199 | "extras": [ 200 | "toml" 201 | ], 202 | "hashes": [ 203 | "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f", 204 | "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3", 205 | "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05", 206 | "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25", 207 | "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe", 208 | "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257", 209 | "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78", 210 | "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada", 211 | "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64", 212 | "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6", 213 | "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28", 214 | "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067", 215 | "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733", 216 | "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676", 217 | "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23", 218 | "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008", 219 | "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd", 220 | "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3", 221 | "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82", 222 | "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545", 223 | "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00", 224 | "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47", 225 | "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501", 226 | "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d", 227 | "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814", 228 | "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd", 229 | "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a", 230 | "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318", 231 | "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3", 232 | "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c", 233 | "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42", 234 | "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a", 235 | "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6", 236 | "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a", 237 | "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7", 238 | "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487", 239 | "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4", 240 | "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2", 241 | "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9", 242 | "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd", 243 | "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73", 244 | "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc", 245 | "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f", 246 | "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea", 247 | "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899", 248 | "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a", 249 | "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543", 250 | "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1", 251 | "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7", 252 | "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d", 253 | "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502", 254 | "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b", 255 | "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040", 256 | "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c", 257 | "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27", 258 | "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c", 259 | "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d", 260 | "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4", 261 | "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe", 262 | "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323", 263 | "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883", 264 | "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f", 265 | "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f" 266 | ], 267 | "markers": "python_version >= '3.9'", 268 | "version": "==7.8.0" 269 | }, 270 | "dill": { 271 | "hashes": [ 272 | "sha256:0633f1d2df477324f53a895b02c901fb961bdbf65a17122586ea7019292cbcf0", 273 | "sha256:44f54bf6412c2c8464c14e8243eb163690a9800dbe2c367330883b19c7561049" 274 | ], 275 | "markers": "python_version >= '3.8'", 276 | "version": "==0.4.0" 277 | }, 278 | "distlib": { 279 | "hashes": [ 280 | "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", 281 | "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403" 282 | ], 283 | "version": "==0.3.9" 284 | }, 285 | "django": { 286 | "hashes": [ 287 | "sha256:23254866a5bb9a2cfa6004e8b809ec6246eba4b58a7589bc2772f1bcc8456c7f", 288 | "sha256:37e687f7bd73ddf043e2b6b97cfe02fcbb11f2dbb3adccc6a2b18c6daa054d7f" 289 | ], 290 | "index": "pypi", 291 | "markers": "python_version >= '3.10'", 292 | "version": "==5.2.8" 293 | }, 294 | "django-stubs": { 295 | "hashes": [ 296 | "sha256:991dc36c10fefd6bf0cfd163db3dabd15787112f96cfedf6e8388ba1930040fb", 297 | "sha256:bb517a60355474a2c8403a48d16ff906c8bc46d604f419206de10ac018a5f4bc" 298 | ], 299 | "index": "pypi", 300 | "markers": "python_version >= '3.10'", 301 | "version": "==5.2.6" 302 | }, 303 | "django-stubs-ext": { 304 | "hashes": [ 305 | "sha256:1165a596138a54edac12616af126332db03b812c8272ada724b4e89188a3c41f", 306 | "sha256:c3736dcea49666140c92a1471dda83c31e8d5e71dc364ab3cedc7698dbf01cee" 307 | ], 308 | "markers": "python_version >= '3.10'", 309 | "version": "==5.2.6" 310 | }, 311 | "filelock": { 312 | "hashes": [ 313 | "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", 314 | "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de" 315 | ], 316 | "markers": "python_version >= '3.9'", 317 | "version": "==3.18.0" 318 | }, 319 | "iniconfig": { 320 | "hashes": [ 321 | "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", 322 | "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" 323 | ], 324 | "markers": "python_version >= '3.8'", 325 | "version": "==2.1.0" 326 | }, 327 | "isort": { 328 | "hashes": [ 329 | "sha256:58d8927ecce74e5087aef019f778d4081a3b6c98f15a80ba35782ca8a2097784", 330 | "sha256:9b8f96a14cfee0677e78e941ff62f03769a06d412aabb9e2a90487b3b7e8d481" 331 | ], 332 | "markers": "python_full_version >= '3.9.0'", 333 | "version": "==6.1.0" 334 | }, 335 | "mccabe": { 336 | "hashes": [ 337 | "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", 338 | "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" 339 | ], 340 | "markers": "python_version >= '3.6'", 341 | "version": "==0.7.0" 342 | }, 343 | "mypy": { 344 | "hashes": [ 345 | "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", 346 | "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", 347 | "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f", 348 | "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2", 349 | "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f", 350 | "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b", 351 | "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5", 352 | "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", 353 | "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", 354 | "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", 355 | "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c", 356 | "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828", 357 | "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba", 358 | "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", 359 | "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", 360 | "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b", 361 | "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", 362 | "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e", 363 | "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13", 364 | "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", 365 | "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", 366 | "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", 367 | "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", 368 | "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b", 369 | "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", 370 | "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559", 371 | "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3", 372 | "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f", 373 | "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", 374 | "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980", 375 | "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078", 376 | "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5" 377 | ], 378 | "markers": "python_version >= '3.9'", 379 | "version": "==1.15.0" 380 | }, 381 | "mypy-extensions": { 382 | "hashes": [ 383 | "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", 384 | "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" 385 | ], 386 | "markers": "python_version >= '3.5'", 387 | "version": "==1.0.0" 388 | }, 389 | "packaging": { 390 | "hashes": [ 391 | "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", 392 | "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" 393 | ], 394 | "markers": "python_version >= '3.8'", 395 | "version": "==24.2" 396 | }, 397 | "platformdirs": { 398 | "hashes": [ 399 | "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", 400 | "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf" 401 | ], 402 | "markers": "python_version >= '3.9'", 403 | "version": "==4.4.0" 404 | }, 405 | "pluggy": { 406 | "hashes": [ 407 | "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", 408 | "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" 409 | ], 410 | "markers": "python_version >= '3.8'", 411 | "version": "==1.5.0" 412 | }, 413 | "pylint": { 414 | "hashes": [ 415 | "sha256:01f9b0462c7730f94786c283f3e52a1fbdf0494bbe0971a78d7277ef46a751e7", 416 | "sha256:d312737d7b25ccf6b01cc4ac629b5dcd14a0fcf3ec392735ac70f137a9d5f83a" 417 | ], 418 | "index": "pypi", 419 | "markers": "python_full_version >= '3.9.0'", 420 | "version": "==3.3.9" 421 | }, 422 | "pylint-django": { 423 | "hashes": [ 424 | "sha256:19e8c85a8573a04e3de7be2ba91e9a7c818ebf05e1b617be2bbae67a906b725f", 425 | "sha256:359f68fe8c810ee6bc8e1ab4c83c19b15a43b234a24b08978f47a23462b5ce28" 426 | ], 427 | "index": "pypi", 428 | "markers": "python_version >= '3.9' and python_version < '4.0'", 429 | "version": "==2.6.1" 430 | }, 431 | "pylint-plugin-utils": { 432 | "hashes": [ 433 | "sha256:ae11664737aa2effbf26f973a9e0b6779ab7106ec0adc5fe104b0907ca04e507", 434 | "sha256:d3cebf68a38ba3fba23a873809155562571386d4c1b03e5b4c4cc26c3eee93e4" 435 | ], 436 | "markers": "python_version >= '3.7' and python_version < '4.0'", 437 | "version": "==0.8.2" 438 | }, 439 | "pyproject-api": { 440 | "hashes": [ 441 | "sha256:326df9d68dea22d9d98b5243c46e3ca3161b07a1b9b18e213d1e24fd0e605766", 442 | "sha256:7e8a9854b2dfb49454fae421cb86af43efbb2b2454e5646ffb7623540321ae6e" 443 | ], 444 | "markers": "python_version >= '3.9'", 445 | "version": "==1.9.0" 446 | }, 447 | "pytest": { 448 | "hashes": [ 449 | "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", 450 | "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845" 451 | ], 452 | "index": "pypi", 453 | "markers": "python_version >= '3.8'", 454 | "version": "==8.3.5" 455 | }, 456 | "pytest-asyncio": { 457 | "hashes": [ 458 | "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0", 459 | "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f" 460 | ], 461 | "index": "pypi", 462 | "markers": "python_version >= '3.9'", 463 | "version": "==0.26.0" 464 | }, 465 | "pytest-cov": { 466 | "hashes": [ 467 | "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a", 468 | "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde" 469 | ], 470 | "index": "pypi", 471 | "markers": "python_version >= '3.9'", 472 | "version": "==6.1.1" 473 | }, 474 | "ruff": { 475 | "hashes": [ 476 | "sha256:0df3424aa5c3c08b34ed8ce099df1021e3adaca6e90229273496b839e5a7e1af", 477 | "sha256:1c505b389e19c57a317cf4b42db824e2fca96ffb3d86766c1c9f8b96d32048a7", 478 | "sha256:2877f02119cdebf52a632d743a2e302dea422bfae152ebe2f193d3285a3a65df", 479 | "sha256:3bf8d2f9aa1602599217d82e8e0af7fd33e5878c4d98f37906b7c93f46f9a839", 480 | "sha256:3d0bbeffb8d9f4fccf7b5198d566d0bad99a9cb622f1fc3467af96cb8773c9e3", 481 | "sha256:41775927d287685e08f48d8eb3f765625ab0b7042cc9377e20e64f4eb0056ee9", 482 | "sha256:5c8753dfa44ebb2cde10ce5b4d2ef55a41fb9d9b16732a2c5df64620dbda44a3", 483 | "sha256:5ca36b4cb4db3067a3b24444463ceea5565ea78b95fe9a07ca7cb7fd16948770", 484 | "sha256:61ae91a32c853172f832c2f40bd05fd69f491db7289fb85a9b941ebdd549781a", 485 | "sha256:7047f0c5a713a401e43a88d36843d9c83a19c584e63d664474675620aaa634a8", 486 | "sha256:7cbe4e593505bdec5884c2d0a4d791a90301bc23e49a6b1eb642dd85ef9c64f1", 487 | "sha256:8d54b561729cee92f8d89c316ad7a3f9705533f5903b042399b6ae0ddfc62e11", 488 | "sha256:98da787668f239313d9c902ca7c523fe11b8ec3f39345553a51b25abc4629c96", 489 | "sha256:a307fc45ebd887b3f26b36d9326bb70bf69b01561950cdcc6c0bdf7bb8e0f7cc", 490 | "sha256:b8264016f6f209fac16262882dbebf3f8be1629777cf0f37e7aff071b3e9b92e", 491 | "sha256:bc1967e40286f63ee23c615e8e7e98098dedc7301568bd88991f6e544d8ae096", 492 | "sha256:e21be42d72e224736f0c992cdb9959a2fa53c7e943b97ef5d081e13170e3ffc5", 493 | "sha256:e681c5bc777de5af898decdcb6ba3321d0d466f4cb43c3e7cc2c3b4e7b843a05", 494 | "sha256:ea9d635e83ba21569fbacda7e78afbfeb94911c9434aff06192d9bc23fd5495a" 495 | ], 496 | "index": "pypi", 497 | "markers": "python_version >= '3.7'", 498 | "version": "==0.14.2" 499 | }, 500 | "sqlparse": { 501 | "hashes": [ 502 | "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", 503 | "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca" 504 | ], 505 | "markers": "python_version >= '3.8'", 506 | "version": "==0.5.3" 507 | }, 508 | "tomlkit": { 509 | "hashes": [ 510 | "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1", 511 | "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0" 512 | ], 513 | "markers": "python_version >= '3.8'", 514 | "version": "==0.13.3" 515 | }, 516 | "tox": { 517 | "hashes": [ 518 | "sha256:4dfdc7ba2cc6fdc6688dde1b21e7b46ff6c41795fb54586c91a3533317b5255c", 519 | "sha256:dd67f030317b80722cf52b246ff42aafd3ed27ddf331c415612d084304cf5e52" 520 | ], 521 | "index": "pypi", 522 | "markers": "python_version >= '3.8'", 523 | "version": "==4.25.0" 524 | }, 525 | "types-pyyaml": { 526 | "hashes": [ 527 | "sha256:0f8b54a528c303f0e6f7165687dd33fafa81c807fcac23f632b63aa624ced1d3", 528 | "sha256:e7d4d9e064e89a3b3cae120b4990cd370874d2bf12fa5f46c97018dd5d3c9ab6" 529 | ], 530 | "markers": "python_version >= '3.9'", 531 | "version": "==6.0.12.20250915" 532 | }, 533 | "typing-extensions": { 534 | "hashes": [ 535 | "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", 536 | "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548" 537 | ], 538 | "markers": "python_version >= '3.9'", 539 | "version": "==4.15.0" 540 | }, 541 | "virtualenv": { 542 | "hashes": [ 543 | "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8", 544 | "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6" 545 | ], 546 | "markers": "python_version >= '3.8'", 547 | "version": "==20.30.0" 548 | } 549 | } 550 | } 551 | --------------------------------------------------------------------------------